diff --git a/install.sh b/awips_install.sh similarity index 98% rename from install.sh rename to awips_install.sh index f73080d2c7..84f6802ffa 100755 --- a/install.sh +++ b/awips_install.sh @@ -2,7 +2,7 @@ # about: AWIPS install manager # devorg: Unidata Program Center # author: -# use: ./install.sh (--cave|--edex|--database|--ingest|--help) +# use: ./awips_install.sh (--cave|--edex|--database|--ingest|--help) dir="$( cd "$(dirname "$0")" ; pwd -P )" diff --git a/build/README.md b/build/README.md index 854b78f0f1..f1981e80fc 100644 --- a/build/README.md +++ b/build/README.md @@ -6,7 +6,7 @@ We create and use the Docker image unidata/awips-ade to build the two RHEL binary distributions of AWIPS (RPMS). The directory **awips2-builds/build/awips-ade/** contains Dockerfiles for 64-bit EL6 and EL7 CentOS. - ./build/awips-ade/docker_build.sh ${os_version} + ./build/awips-ade/awips-ade.sh ${os_version} where **${os_version}** is either *el6* or *el7*. @@ -14,8 +14,8 @@ ADE Docker images will be named with the latest major release and OS version docker images REPOSITORY TAG IMAGE ID CREATED SIZE - unidata/awips-ade 17.1.1-el6 77ea90df5919 16 min ago 4.13GB - unidata/awips-ade 17.1.1-el7 f030be21eda9 23 min ago 3.95GB + unidata/awips-ade 18.1.1-el6 77ea90df5919 16 min ago 4.13GB + unidata/awips-ade 18.1.1-el7 f030be21eda9 23 min ago 3.95GB ## Build AWIPS RPMs diff --git a/build/awips-ade/Dockerfile.awips-ade.el7 b/build/awips-ade/Dockerfile.awips-ade.el7 index 0c1d5814f9..db6d82ac8c 100644 --- a/build/awips-ade/Dockerfile.awips-ade.el7 +++ b/build/awips-ade/Dockerfile.awips-ade.el7 @@ -1,27 +1,12 @@ -FROM awips-devel:el7 +FROM unidata/awips-devel:el7 ENV VERSION 18.1.1 ENV RELEASE 1 MAINTAINER Michael James USER root -RUN yum update yum -y - RUN groupadd fxalpha && useradd -G fxalpha awips -RUN yum groupinstall "Development tools" -y -RUN yum install epel-release -y -RUN yum clean all -y - -ENV systemDeps="wget rsync git net-tools" -ENV rpmDeps="gcc gcc-c++ rpm-build readline-devel createrepo" -ENV qpidDeps="boost-devel cmake make ruby libuuid-devel" -ENV pythonDeps="tk-devel tcl-devel atlas-devel compat-libf2c-34 libgfortran geos-devel libpng-devel freetype" -ENV awipsDeps="netcdf netcdf-devel hdf5-devel lzo-devel bzip2-devel qt-devel" -ENV httpDeps="autoconf findutils libselinux-devel libxml2-devel lua-devel openldap-devel openssl-devel pcre-devel pkgconfig perl zlib-devel apr-util-devel apr-devel" - -RUN yum install $systemDeps $rpmDeps $qpidDeps $pythonDeps $awipsDeps $httpDeps -y - RUN wget -O /etc/yum.repos.d/awips2.repo https://www.unidata.ucar.edu/software/awips2/doc/el7-dev.repo RUN yum -y clean all RUN yum groupinstall awips2-ade -y diff --git a/build/awips-ade/Dockerfile.awips-devel.el7 b/build/awips-ade/Dockerfile.awips-devel.el7 new file mode 100644 index 0000000000..de8086575b --- /dev/null +++ b/build/awips-ade/Dockerfile.awips-devel.el7 @@ -0,0 +1,23 @@ +FROM centos:7 +ENV VERSION 18.1.1 +ENV RELEASE 1 +MAINTAINER Michael James + +USER root + +RUN yum update yum -y + +RUN yum groupinstall "Development tools" -y +RUN yum install epel-release -y +RUN yum clean all -y + +ENV systemDeps="wget rsync git net-tools" +ENV rpmDeps="gcc gcc-c++ rpm-build readline-devel createrepo" +ENV qpidDeps="boost-devel cmake make ruby libuuid-devel" +ENV pythonDeps="tk-devel tcl-devel atlas-devel compat-libf2c-34 libgfortran geos-devel libpng-devel freetype" +ENV awipsDeps="netcdf netcdf-devel hdf5-devel lzo-devel bzip2-devel qt-devel" +ENV httpDeps="autoconf findutils libselinux-devel libxml2-devel lua-devel openldap-devel openssl-devel pcre-devel pkgconfig perl zlib-devel apr-util-devel apr-devel" + +RUN yum install $systemDeps $rpmDeps $qpidDeps $pythonDeps $awipsDeps $httpDeps -y + +ENTRYPOINT ["/bin/bash"] diff --git a/build/awips-ade/docker_build.sh b/build/awips-ade/awips-ade.sh similarity index 100% rename from build/awips-ade/docker_build.sh rename to build/awips-ade/awips-ade.sh diff --git a/build/awips-ade/devel_build.sh b/build/awips-ade/awips-devel.sh similarity index 100% rename from build/awips-ade/devel_build.sh rename to build/awips-ade/awips-devel.sh diff --git a/build/buildEnvironment.sh b/build/buildEnvironment.sh index 606f6ae345..782c1bec0b 100755 --- a/build/buildEnvironment.sh +++ b/build/buildEnvironment.sh @@ -1,7 +1,7 @@ #!/bin/bash # Version export AWIPSII_VERSION="18.1.1" -export AWIPSII_RELEASE="1" +export AWIPSII_RELEASE="4" # Author export AWIPSII_BUILD_VENDOR="UCAR" export AWIPSII_BUILD_SITE="Unidata" diff --git a/build/edex-ingest/Dockerfile.edex b/build/edex-ingest/Dockerfile.edex deleted file mode 100644 index 4b5881508d..0000000000 --- a/build/edex-ingest/Dockerfile.edex +++ /dev/null @@ -1,16 +0,0 @@ -FROM centos:7 -ENV VERSION 18.1.1 -ENV RELEASE 1 -MAINTAINER Michael James -USER root - -RUN yum update yum -y -RUN groupadd fxalpha && useradd -G fxalpha awips -RUN yum install epel-release wget -y -RUN yum clean all -y -RUN wget -O /etc/yum.repos.d/awips2.repo https://www.unidata.ucar.edu/software/awips2/doc/el7-dev.repo -RUN wget https://www.unidata.ucar.edu/software/awips2/install.sh -RUN chmod +x install.sh -RUN ./install.sh --ingest - -ENTRYPOINT ["/bin/bash"] diff --git a/build/edex-ingest/docker_build.sh b/build/edex-ingest/docker_build.sh deleted file mode 100755 index 200398171c..0000000000 --- a/build/edex-ingest/docker_build.sh +++ /dev/null @@ -1,15 +0,0 @@ -#!/bin/bash -v -dir="$( cd "$(dirname "$0")" ; pwd -P )" -pushd $dir -. ../buildEnvironment.sh - -existing=$(sudo docker images |grep edex-ingest | grep $1 | awk '{ print $3 }') -if [ ! -z "$existing" ]; then - sudo docker rmi $existing -fi -pushd /awips2/repo/awips2-builds/build/edex-ingest -sudo docker build -t unidata/edex-ingest -f Dockerfile.edex . -dockerID=$(sudo docker images | grep edex-ingest | grep latest | awk '{print $3}' | head -1 ) -sudo docker tag $dockerID unidata/edex-ingest:${AWIPSII_VERSION} -sudo docker rmi unidata/edex-ingest:latest -sudo docker push unidata/edex-ingest diff --git a/build/rsync.cave b/build/rsync.cave new file mode 100644 index 0000000000..4d708a13d7 --- /dev/null +++ b/build/rsync.cave @@ -0,0 +1,45 @@ +edexOsgi/* cave/* localization/* +javaUtilities/* rpms pythonPackages +*.pdf +../awips2-nativelib/* +../awips2-core/common/* +../awips2-core/edex/* +../awips2-core/features/* +../awips2-core/viz/* +../awips2-core-foss/lib/* +../awips2-foss/lib/* +../awips2-hazards/edex/* +../awips2-hazards/common/* +../awips2-hazards/viz/* +../awips2-ncep/common/* +../awips2-ncep/viz/* +../awips2-ncep/features/* +../awips2-ncep/edex/* +../awips2-goesr/edexOsgi/* +../awips2-goesr/cave/* +../awips2-unidata/* +../python-awips +../awips2-cimss/viz/* +../awips2-cimss/edex/* +../awips2-cimss/features/* +../awips2-cimss/common/* +../awips2-data-delivery/common/* +../awips2-data-delivery/edex/* +../awips2-data-delivery/features/* +../awips2-data-delivery/viz/* +../awips2-drawing/viz/* +../awips2-drawing/features/* +../awips2-gsd/viz/* +../awips2-gsd/features/* +../awips2-nasa/edex/* +../awips2-ogc/foss/* +../awips2-ogc/edex/* +../awips2-ogc/features/* +../awips2-ohd/edex/* +../awips2-ohd/apps/* +../awips2-ohd/features/* +../awips2-ohd/lib/* +../awips2-swpc/common/* +../awips2-swpc/edex/* +../awips2-swpc/viz/* +../awips2-swpc/features/* diff --git a/build/rsync.dirs b/build/rsync.dirs index 36bec06f0a..ad08ff741c 100644 --- a/build/rsync.dirs +++ b/build/rsync.dirs @@ -10,10 +10,6 @@ javaUtilities/* rpms pythonPackages ../awips2-foss/lib/* ../awips2-rpm/foss ../awips2-rpm/installers -../awips2-nws/common/* -../awips2-nws/viz/* -../awips2-nws/features/* -../awips2-nws/edex/* ../awips2-hazards/edex/* ../awips2-hazards/common/* ../awips2-hazards/viz/* @@ -29,10 +25,6 @@ javaUtilities/* rpms pythonPackages ../awips2-cimss/edex/* ../awips2-cimss/features/* ../awips2-cimss/common/* -../awips2-collaboration/viz/* -../awips2-collaboration/features/* -../awips2-collaboration/common/* -../awips2-collaboration/foss/* ../awips2-data-delivery/common/* ../awips2-data-delivery/edex/* ../awips2-data-delivery/features/* diff --git a/build/setup.sh b/build/setup.sh index 65de694229..4cfe029dd4 100755 --- a/build/setup.sh +++ b/build/setup.sh @@ -21,13 +21,17 @@ if [ $rpmname = "buildCAVE" ]; then for dn in `cat build/repos| grep -v static| grep -v nativelib |grep -v awips2-rpm` do echo $dn - if [ -d /awips2/repo/$dn ]; then dirs+=" -v /awips2/repo/${dn}:/awips2/repo/${dn} ";fi + if [ -d /awips2/repo/$dn ]; then + dirs+=" -v /awips2/repo/${dn}:/awips2/repo/${dn} " + fi done else for dn in `cat build/repos` do echo $dn - if [ -d /awips2/repo/$dn ]; then dirs+=" -v /awips2/repo/${dn}:/awips2/repo/${dn} ";fi + if [ -d /awips2/repo/$dn ]; then + dirs+=" -v /awips2/repo/${dn}:/awips2/repo/${dn} " + fi done fi diff --git a/cave/build/features.txt b/cave/build/features.txt index f3228d019e..79b5c28be8 100644 --- a/cave/build/features.txt +++ b/cave/build/features.txt @@ -2,13 +2,11 @@ com.raytheon.uf.common.base.feature com.raytheon.uf.viz.dataplugin.obs.feature com.raytheon.uf.viz.sounding.feature com.raytheon.uf.viz.cots.feature -com.raytheon.uf.viz.registry.feature com.raytheon.uf.viz.common.core.feature com.raytheon.uf.viz.dataplugins.feature com.raytheon.viz.feature.awips com.raytheon.uf.viz.application.feature com.raytheon.uf.viz.base.feature -com.raytheon.uf.viz.archive.feature com.raytheon.uf.viz.gisdatastore.feature com.raytheon.viz.dataaccess.feature com.raytheon.uf.viz.localization.perspective.feature @@ -21,7 +19,6 @@ com.raytheon.uf.viz.kml.export.feature com.raytheon.viz.radar.feature com.raytheon.uf.viz.grid.feature com.raytheon.uf.viz.displays.feature -com.raytheon.viz.hydro.feature com.raytheon.uf.viz.d2d.damagepath.feature com.raytheon.uf.viz.d2d.xy.feature com.raytheon.viz.volumebrowser.feature @@ -31,7 +28,6 @@ com.raytheon.uf.viz.npp.feature com.raytheon.uf.viz.vtec.feature com.raytheon.viz.text.feature com.raytheon.viz.warngen.feature -com.raytheon.viz.gfe.feature com.raytheon.uf.viz.d2d.ui.awips.feature com.raytheon.uf.viz.d2d.gfe.feature com.raytheon.uf.viz.ncep.dataplugins.feature @@ -46,19 +42,6 @@ com.raytheon.uf.viz.ncep.npp.feature com.raytheon.uf.viz.ncep.perspective.feature com.raytheon.uf.viz.d2d.skewt.feature gov.noaa.gsd.viz.ensemble.feature -gov.noaa.nws.mdl.viz.awipsref.feature -gov.noaa.nws.obs.viz.geodata.feature -com.raytheon.uf.viz.server.edex.feature -com.raytheon.uf.viz.dat.feature -com.raytheon.uf.viz.datadelivery.feature edu.wisc.ssec.cimss.viz.convectprob.feature gov.noaa.nws.mdl.viz.boundaryTool.common.feature com.raytheon.uf.viz.satellite.goesr.feature -com.raytheon.uf.viz.ohd.feature -com.raytheon.uf.viz.scan.feature -edu.ucar.unidata.uf.viz.feature -gov.noaa.nws.ncep.swpc.viz.feature -gov.noaa.nws.ocp.uf.viz.gisdatastore.feature -gov.noaa.nws.ocp.viz.climate.feature -gov.noaa.nws.ocp.viz.firewx.feature -gov.noaa.nws.ocp.viz.psh.feature diff --git a/cave/build/p2-build.xml b/cave/build/p2-build.xml index eb489433af..66ce483f69 100644 --- a/cave/build/p2-build.xml +++ b/cave/build/p2-build.xml @@ -148,9 +148,6 @@ - @@ -181,9 +178,6 @@ - - - @@ -217,15 +211,9 @@ - - - - - - @@ -235,9 +223,6 @@ - - - @@ -247,12 +232,6 @@ - - - - - - @@ -274,18 +253,27 @@ - - - - - - + + + + + + + + + + + + + + + @@ -293,11 +281,9 @@ - - - - + + diff --git a/cave/build/static/linux/cave/caveEnvironment/bin/MonitorTestMode_script b/cave/build/static/linux/cave/caveEnvironment/bin/MonitorTestMode_script deleted file mode 100755 index 810cc4a461..0000000000 --- a/cave/build/static/linux/cave/caveEnvironment/bin/MonitorTestMode_script +++ /dev/null @@ -1,9 +0,0 @@ -#!/bin/sh -export DISPLAY=":0.0" -export FXA_HOME=/awips2/cave/caveEnvironment -export TMCP_HOME=/awips2/cave/caveEnvironment - -$FXA_HOME/bin/MonitorTestMode >& /dev/null & - - - diff --git a/cave/build/static/linux/cave/caveEnvironment/bin/runTMCP b/cave/build/static/linux/cave/caveEnvironment/bin/runTMCP deleted file mode 100755 index bbb4085f2c..0000000000 --- a/cave/build/static/linux/cave/caveEnvironment/bin/runTMCP +++ /dev/null @@ -1,64 +0,0 @@ -#!/bin/sh - -# determine where the script is being ran from. -path_to_script=`readlink -f $0` -RUN_FROM_DIR=`dirname ${path_to_script}` -BASE_ENV_DIR=`dirname ${RUN_FROM_DIR}` - -#DR 18113 rehost. /awips2/fxa/... Has kicked the bit-bucket. -export TMCP_HOME=/awips2/cave/caveEnvironment -export FXA_HOME=/awips2/cave/caveEnvironment - -if [ ! -n "${TMCP_HOME}" ] -then - echo -e "\e[1;31mTMCP_HOME is not set.\e[m" - echo -e "\e[0;32mSetting TMCP_HOME to '${BASE_ENV_DIR}'.\e[m" - export TMCP_HOME=${BASE_ENV_DIR} -else - echo "TMCP_HOME is '${TMCP_HOME}'" -fi - -if [ ! -n "${FXA_HOME}" ] -then - echo -e "\e[1;31mFXA_HOME is not set.\e[m" - echo -e "\e[0;32mSetting FXA_HOME to '${BASE_ENV_DIR}'.\e[m" - export FXA_HOME=${BASE_ENV_DIR} -else - echo "FXA_HOME is '${FXA_HOME}'" -fi - -# determine if 'FXA_WARNGEN_PRODUCT_ID' needs to be set -HOST=`uname -n` -TMP_HOST_NUMBER=`uname -n | awk '{print substr($1, 3, 1);}'` -ALT_HOST_PART=`uname -n | awk '{print substr($1, 3, length($1) - 1);}'` -ALT_HOST="xt"${ALT_HOST_PART} -ping -c 1 -w 1 ${ALT_HOST} >/dev/null 2>/dev/null -RC=`echo $?` - -if [ "${RC}" = "0" ] -then - if [ ! -n "${FXA_WARNGEN_PRODUCT_ID}" ] - then - echo -e "\e[1;31mFXA_WARNGEN_PRODUCT_ID is not set.\e[m" - echo -e "\e[0;32mSetting FXA_WARNGEN_PRODUCT_ID to '${TMP_HOST_NUMBER}'.\e[m" - export FXA_WARNGEN_PRODUCT_ID=${TMP_HOST_NUMBER} - else - echo "FXA_WARNGEN_PRODUCT_ID is '${FXA_WARNGEN_PRODUCT_ID}'." - fi -else - echo -e "\e[1;31mPartner host \""${ALT_HOST}"\" is unreachable by network!\e[m" - echo ${ALT_HOME} - echo -fi - -export LD_LIBRARY_PATH=$TMCP_HOME/lib:$LD_LIBRARY_PATH - -# for TMCP logs -if [ ! -d $HOME/caveData/tmcpLogs ]; then - mkdir -p $HOME/caveData/tmcpLogs -fi -export LOG_DIR=$HOME/caveData/tmcpLogs - - -$TMCP_HOME/bin/tmcp - diff --git a/cave/build/static/linux/cave/caveEnvironment/bin/showBanner_script b/cave/build/static/linux/cave/caveEnvironment/bin/showBanner_script deleted file mode 100755 index cea8099178..0000000000 --- a/cave/build/static/linux/cave/caveEnvironment/bin/showBanner_script +++ /dev/null @@ -1,10 +0,0 @@ -#!/bin/sh -export DISPLAY=:0.0 -export FXA_HOME=/awips2/cave/caveEnvironment -if [ $6 = "kde" ] -then - kstart --alldesktops $FXA_HOME/bin/showBanner $2 $3 $4 $5 & -else - $FXA_HOME/bin/showBanner $2 $3 $4 $5 & -fi - diff --git a/cave/com.raytheon.uf.viz.cwat/plugin.xml b/cave/com.raytheon.uf.viz.cwat/plugin.xml index a13a748a3d..26b7b729a9 100644 --- a/cave/com.raytheon.uf.viz.cwat/plugin.xml +++ b/cave/com.raytheon.uf.viz.cwat/plugin.xml @@ -29,16 +29,4 @@ resourceType="PLAN_VIEW"/> - - - - - - diff --git a/cave/com.raytheon.uf.viz.d2d.ui.popupskewt/plugin.xml b/cave/com.raytheon.uf.viz.d2d.ui.popupskewt/plugin.xml index 4343b2e566..0c5fbb8c6a 100644 --- a/cave/com.raytheon.uf.viz.d2d.ui.popupskewt/plugin.xml +++ b/cave/com.raytheon.uf.viz.d2d.ui.popupskewt/plugin.xml @@ -20,16 +20,6 @@ --> - - - - + label="Warngen" + mode="FORCE_TEXT"> + + + + + + + - - - - - - diff --git a/cave/com.raytheon.uf.viz.damagepath/src/com/raytheon/uf/viz/damagepath/ExportToLdadAction.java b/cave/com.raytheon.uf.viz.damagepath/src/com/raytheon/uf/viz/damagepath/ExportToLdadAction.java deleted file mode 100644 index 497f4cd646..0000000000 --- a/cave/com.raytheon.uf.viz.damagepath/src/com/raytheon/uf/viz/damagepath/ExportToLdadAction.java +++ /dev/null @@ -1,128 +0,0 @@ -/** - * This software was developed and / or modified by Raytheon Company, - * pursuant to Contract DG133W-05-CQ-1067 with the US Government. - * - * U.S. EXPORT CONTROLLED TECHNICAL DATA - * This software product contains export-restricted data whose - * export/transfer/disclosure is restricted by U.S. law. Dissemination - * to non-U.S. persons whether in the United States or abroad requires - * an export license or other authorization. - * - * Contractor Name: Raytheon Company - * Contractor Address: 6825 Pine Street, Suite 340 - * Mail Stop B8 - * Omaha, NE 68106 - * 402.291.0100 - * - * See the AWIPS II Master Rights File ("Master Rights File.pdf") for - * further licensing information. - **/ -package com.raytheon.uf.viz.damagepath; - -import java.io.ByteArrayOutputStream; -import java.io.IOException; - -import org.apache.commons.lang3.StringUtils; -import org.eclipse.core.runtime.IProgressMonitor; -import org.eclipse.core.runtime.IStatus; -import org.eclipse.core.runtime.Status; -import org.eclipse.core.runtime.jobs.Job; -import org.geotools.data.simple.SimpleFeatureCollection; - -import com.raytheon.uf.common.damagepath.request.ExportToLdadRequest; -import com.raytheon.uf.common.json.JsonException; -import com.raytheon.uf.common.json.geo.SimpleGeoJsonService; -import com.raytheon.uf.common.serialization.comm.IServerRequest; -import com.raytheon.uf.common.status.IUFStatusHandler; -import com.raytheon.uf.common.status.UFStatus; -import com.raytheon.uf.viz.core.exception.VizException; -import com.raytheon.uf.viz.core.localization.LocalizationManager; -import com.raytheon.uf.viz.core.requests.ThriftClient; -import com.raytheon.viz.ui.cmenu.AbstractRightClickAction; - -/** - * Legend right-click action to take the current {@code DamagePathLayer} data - * and export it to LDAD in GeoJSON format. - * - *
- * 
- * SOFTWARE HISTORY
- * 
- * Date         Ticket#    Engineer    Description
- * ------------ ---------- ----------- -------------------------- 
- * Jun 08, 2015  #4355     dgilling    Initial creation
- * Jun 18, 2015  #4354     dgilling    Support FeatureCollections so each 
- *                                     polygon can have its own properties.
- * Mar 11, 2016  #5288     dgilling    Rewrite to spawn async Job.
- * 
- * 
- * - * @author dgilling - * @version 1.0 - */ - -public class ExportToLdadAction extends AbstractRightClickAction { - - private static class ExportDamagePathToLdadJob extends Job { - - private static final String PLUGIN_ID = "com.raytheon.uf.viz.damagepath"; - - private final DamagePathLayer layer; - - protected ExportDamagePathToLdadJob(DamagePathLayer layer) { - super("Exporting Damage Path to LDAD"); - this.layer = layer; - } - - @Override - protected IStatus run(IProgressMonitor monitor) { - try (ByteArrayOutputStream outStream = new ByteArrayOutputStream()) { - SimpleFeatureCollection featureCollection = layer - .buildFeatureCollection(); - - new SimpleGeoJsonService().serialize(featureCollection, - outStream); - byte[] jsonData = outStream.toByteArray(); - - try { - String siteID = LocalizationManager.getInstance() - .getCurrentSite(); - IServerRequest request = new ExportToLdadRequest(siteID, - jsonData); - String errorMsg = (String) ThriftClient - .sendRequest(request); - if (StringUtils.isNotEmpty(errorMsg)) { - String msg = "Could not export damage path data to LDAD: " - + errorMsg; - statusHandler.error(msg); - return new Status(IStatus.ERROR, PLUGIN_ID, msg); - } - } catch (VizException e) { - String msg = "Error processing ExportToLdadRequest."; - statusHandler.error(msg, e); - return new Status(IStatus.ERROR, PLUGIN_ID, msg, e); - } - } catch (JsonException | IOException e) { - String msg = "Error serializing Damage Path data to GeoJSON."; - statusHandler.error(msg, e); - return new Status(IStatus.ERROR, PLUGIN_ID, msg, e); - } - - statusHandler.info("Damage Path successfully exported."); - return Status.OK_STATUS; - } - } - - protected static final transient IUFStatusHandler statusHandler = UFStatus - .getHandler(ExportToLdadAction.class); - - public ExportToLdadAction() { - super("Export to LDAD"); - } - - @Override - public void run() { - new ExportDamagePathToLdadJob((DamagePathLayer) getSelectedRsc()) - .schedule(); - } -} diff --git a/cave/com.raytheon.uf.viz.derivparam.ui/plugin.xml b/cave/com.raytheon.uf.viz.derivparam.ui/plugin.xml index b90930b111..03414b0685 100644 --- a/cave/com.raytheon.uf.viz.derivparam.ui/plugin.xml +++ b/cave/com.raytheon.uf.viz.derivparam.ui/plugin.xml @@ -32,4 +32,4 @@ extensionFilter=".txt,.xml,.py"> - \ No newline at end of file + diff --git a/cave/com.raytheon.uf.viz.grib/src/com/raytheon/uf/viz/grib/wizard/save/GribWizardSave.java b/cave/com.raytheon.uf.viz.grib/src/com/raytheon/uf/viz/grib/wizard/save/GribWizardSave.java index b442ecc59d..f63272218b 100644 --- a/cave/com.raytheon.uf.viz.grib/src/com/raytheon/uf/viz/grib/wizard/save/GribWizardSave.java +++ b/cave/com.raytheon.uf.viz.grib/src/com/raytheon/uf/viz/grib/wizard/save/GribWizardSave.java @@ -61,7 +61,9 @@ public abstract class GribWizardSave { GridModel model = data.getOrCreateModel(); String centerName = GribTableLookup.getInstance() - .getTableValue(-1, -1, "0", model.getCenter()).toString(); + .getTableValue(model.getCenter(), Integer.parseInt(model.getSubCenter()), + "0", model.getCenter()).toString(); + if (centerName == null || centerName.isEmpty()) { centerName = "UNK"; } else { diff --git a/cave/com.raytheon.uf.viz.grid.radar/localization/parameterMapping/radar/RadarProductCodes.xml b/cave/com.raytheon.uf.viz.grid.radar/localization/parameterMapping/radar/RadarProductCodes.xml index 184d7e4f4c..15a5189abd 100644 --- a/cave/com.raytheon.uf.viz.grid.radar/localization/parameterMapping/radar/RadarProductCodes.xml +++ b/cave/com.raytheon.uf.viz.grid.radar/localization/parameterMapping/radar/RadarProductCodes.xml @@ -1,27 +1,4 @@ - - diff --git a/cave/com.raytheon.uf.viz.grid.radar/localization/volumebrowser/VbSources/radar.xml b/cave/com.raytheon.uf.viz.grid.radar/localization/volumebrowser/VbSources/radar.xml index 3d97d36b30..b000f77f40 100644 --- a/cave/com.raytheon.uf.viz.grid.radar/localization/volumebrowser/VbSources/radar.xml +++ b/cave/com.raytheon.uf.viz.grid.radar/localization/volumebrowser/VbSources/radar.xml @@ -1,27 +1,5 @@ - - + \ No newline at end of file diff --git a/cave/com.raytheon.uf.viz.grid.radar/src/com/raytheon/uf/viz/grid/radar/RadarProductCodeMapping.java b/cave/com.raytheon.uf.viz.grid.radar/src/com/raytheon/uf/viz/grid/radar/RadarProductCodeMapping.java index 8fda9705ce..53906fd62b 100644 --- a/cave/com.raytheon.uf.viz.grid.radar/src/com/raytheon/uf/viz/grid/radar/RadarProductCodeMapping.java +++ b/cave/com.raytheon.uf.viz.grid.radar/src/com/raytheon/uf/viz/grid/radar/RadarProductCodeMapping.java @@ -49,6 +49,7 @@ import com.raytheon.viz.grid.xml.ParameterMapping; * Mar 22, 2010 4473 rjpeter Initial creation * Nov 07, 2361 2361 njensen Use JAXBManager for XML * Aug 15, 2017 6332 bsteffen Move to viz.grid.radar plugin + * Oct 31, 2018 mjames Remove site and user-level files * * * @@ -75,26 +76,10 @@ public class RadarProductCodeMapping { LocalizationLevel.BASE), "/parameterMapping/radar/RadarProductCodes.xml"); - File siteFile = pm.getFile( - pm.getContext(LocalizationType.CAVE_STATIC, - LocalizationLevel.SITE), - "/parameterMapping/radar/RadarProductCodes.xml"); - - File userFile = pm.getFile( - pm.getContext(LocalizationType.CAVE_STATIC, - LocalizationLevel.USER), - "/parameterMapping/radar/RadarProductCodes.xml"); - try { JAXBManager jaxb = new JAXBManager(ParameterList.class); loadParameters(baseFile, jaxb); - if (siteFile.exists()) { - loadParameters(siteFile, jaxb); - } - if (userFile.exists()) { - loadParameters(userFile, jaxb); - } } catch (Exception e) { statusHandler.handle(Priority.PROBLEM, "Error occurred loading radar product code to grid parameter mappings", diff --git a/cave/com.raytheon.uf.viz.hpe/src/com/raytheon/uf/viz/hpe/util/HpeUtils.java b/cave/com.raytheon.uf.viz.hpe/src/com/raytheon/uf/viz/hpe/util/HpeUtils.java index 17f528b367..b3da6eb863 100644 --- a/cave/com.raytheon.uf.viz.hpe/src/com/raytheon/uf/viz/hpe/util/HpeUtils.java +++ b/cave/com.raytheon.uf.viz.hpe/src/com/raytheon/uf/viz/hpe/util/HpeUtils.java @@ -20,8 +20,6 @@ package com.raytheon.uf.viz.hpe.util; import com.raytheon.uf.common.dataplugin.grid.GridRecord; -import com.raytheon.uf.common.dataplugin.grid.dataset.DatasetInfo; -import com.raytheon.uf.common.dataplugin.grid.dataset.DatasetInfoLookup; /** * HPE Utilities @@ -42,9 +40,6 @@ import com.raytheon.uf.common.dataplugin.grid.dataset.DatasetInfoLookup; */ public class HpeUtils { - private static final String HPE = "HPE"; - - private static final String BIAS_HPE = "BiasHPE"; /** * Determine if this title represents an HPE model. @@ -55,19 +50,9 @@ public class HpeUtils { * */ public static boolean isHpe(GridRecord gridRecord) { - String title = null; if (gridRecord != null) { - DatasetInfo info = DatasetInfoLookup.getInstance().getInfo( - gridRecord.getDatasetId()); - if (info != null) { - title = info.getTitle(); - } + return gridRecord.getDatasetId().contains("HPE"); } - - if (title == null) { - return false; - } - - return HPE.equals(title) || BIAS_HPE.equals(title); + return false; } } diff --git a/cave/com.raytheon.uf.viz.localization.edex.config/plugin.xml b/cave/com.raytheon.uf.viz.localization.edex.config/plugin.xml deleted file mode 100644 index 7b0faa2b66..0000000000 --- a/cave/com.raytheon.uf.viz.localization.edex.config/plugin.xml +++ /dev/null @@ -1,62 +0,0 @@ - - - - - - - - - - - - - - - - - - - diff --git a/cave/com.raytheon.uf.viz.monitor.ffmp/plugin.xml b/cave/com.raytheon.uf.viz.monitor.ffmp/plugin.xml index dd2a720c74..2c6736e513 100644 --- a/cave/com.raytheon.uf.viz.monitor.ffmp/plugin.xml +++ b/cave/com.raytheon.uf.viz.monitor.ffmp/plugin.xml @@ -99,27 +99,4 @@ sortID="116"> - - - - - - - - diff --git a/cave/com.raytheon.uf.viz.monitor.scan/plugin.xml b/cave/com.raytheon.uf.viz.monitor.scan/plugin.xml index 40c3992731..f356fa394f 100644 --- a/cave/com.raytheon.uf.viz.monitor.scan/plugin.xml +++ b/cave/com.raytheon.uf.viz.monitor.scan/plugin.xml @@ -44,21 +44,4 @@ renderingOrderId="CONTOUR"> - - - - - - diff --git a/cave/com.raytheon.uf.viz.npp.nucaps/plugin.xml b/cave/com.raytheon.uf.viz.npp.nucaps/plugin.xml index 61c480609b..b317c386a6 100644 --- a/cave/com.raytheon.uf.viz.npp.nucaps/plugin.xml +++ b/cave/com.raytheon.uf.viz.npp.nucaps/plugin.xml @@ -27,13 +27,4 @@ type="nucaps"> - - - - diff --git a/cave/com.raytheon.uf.viz.thinclient/src/com/raytheon/uf/viz/thinclient/ui/ThinClientConnectivityDialog.java b/cave/com.raytheon.uf.viz.thinclient/src/com/raytheon/uf/viz/thinclient/ui/ThinClientConnectivityDialog.java index 46603d2e24..35df1c1eec 100644 --- a/cave/com.raytheon.uf.viz.thinclient/src/com/raytheon/uf/viz/thinclient/ui/ThinClientConnectivityDialog.java +++ b/cave/com.raytheon.uf.viz.thinclient/src/com/raytheon/uf/viz/thinclient/ui/ThinClientConnectivityDialog.java @@ -19,34 +19,11 @@ **/ package com.raytheon.uf.viz.thinclient.ui; -import java.io.IOException; - -import org.eclipse.jface.preference.IPersistentPreferenceStore; import org.eclipse.jface.preference.IPreferenceStore; -import org.eclipse.swt.SWT; -import org.eclipse.swt.events.SelectionAdapter; -import org.eclipse.swt.events.SelectionEvent; -import org.eclipse.swt.layout.GridData; -import org.eclipse.swt.layout.GridLayout; -import org.eclipse.swt.widgets.Button; -import org.eclipse.swt.widgets.Composite; -import org.eclipse.swt.widgets.Group; -import org.eclipse.swt.widgets.Label; - -import com.raytheon.uf.common.localization.msgs.GetServersResponse; import com.raytheon.uf.common.status.IUFStatusHandler; import com.raytheon.uf.common.status.UFStatus; -import com.raytheon.uf.common.status.UFStatus.Priority; -import com.raytheon.uf.viz.core.comm.ConnectivityManager; -import com.raytheon.uf.viz.core.comm.ConnectivityManager.ConnectivityResult; -import com.raytheon.uf.viz.core.comm.IConnectivityCallback; -import com.raytheon.uf.viz.core.exception.VizException; import com.raytheon.uf.viz.core.localization.ConnectivityPreferenceDialog; -import com.raytheon.uf.viz.core.localization.LocalizationConstants; -import com.raytheon.uf.viz.core.localization.LocalizationManager; -import com.raytheon.uf.viz.core.localization.ServerRemembrance; import com.raytheon.uf.viz.thinclient.Activator; -import com.raytheon.uf.viz.thinclient.ThinClientUriUtil; import com.raytheon.uf.viz.thinclient.preferences.ThinClientPreferenceConstants; /** @@ -93,7 +70,7 @@ public class ThinClientConnectivityDialog extends ConnectivityPreferenceDialog { private String proxyAddress; public ThinClientConnectivityDialog(boolean checkAlertViz) { - super(checkAlertViz, "Unidata AWIPS"); + super(checkAlertViz); IPreferenceStore store = Activator.getDefault().getPreferenceStore(); dataRefreshMethod = store .getString(ThinClientPreferenceConstants.P_DATA_REFRESH_METHOD); diff --git a/cave/com.raytheon.viz.aviation/plugin.xml b/cave/com.raytheon.viz.aviation/plugin.xml index b252a66ff8..6265dc5452 100644 --- a/cave/com.raytheon.viz.aviation/plugin.xml +++ b/cave/com.raytheon.viz.aviation/plugin.xml @@ -71,21 +71,4 @@ - - - - - - diff --git a/cave/com.raytheon.viz.gfe/GFESuite/gfeclient.sh b/cave/com.raytheon.viz.gfe/GFESuite/gfeclient.sh index 8eddb59d56..71ae5ca4e4 100644 --- a/cave/com.raytheon.viz.gfe/GFESuite/gfeclient.sh +++ b/cave/com.raytheon.viz.gfe/GFESuite/gfeclient.sh @@ -74,7 +74,7 @@ then else echo "Display not set, creating offscreen x on port $$" extendLibraryPath "-noX" - Xvfb :$$ -screen 0 1280x1024x24 -nolisten tcp & + Xvfb :$$ -screen 0 1280x1024x30 -nolisten tcp & xvfb=$! export DISPLAY=":$$.0" #don't use shader when no display set diff --git a/cave/com.raytheon.viz.gfe/localization/bundles/maps/stormsurgeww.xml b/cave/com.raytheon.viz.gfe/localization/bundles/maps/stormsurgeww.xml deleted file mode 100644 index 00b83e3b3b..0000000000 --- a/cave/com.raytheon.viz.gfe/localization/bundles/maps/stormsurgeww.xml +++ /dev/null @@ -1,36 +0,0 @@ - - - - - - - - - - - - - - PLAN_VIEW - - - - - - mapdata.stormsurgeww
- StormSurgeWW -
-
-
-
-
-
diff --git a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/TCStormSurgeThreat.py b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/TCStormSurgeThreat.py index 843af3bc1d..5f642539b8 100644 --- a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/TCStormSurgeThreat.py +++ b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/TCStormSurgeThreat.py @@ -1,792 +1,793 @@ -# ---------------------------------------------------------------------------- -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. -# -# StormSurgeThreat -# -# Author: Tom LeFebvre/Pablo Santos -# April 20, 2012 - To use gridded MSL TO NAVD and MSL to MLLW -# corrections and to get rid of Very Low. -# Last Modified: June 7, 2012 Shannon White - To fix the handling of time -# for A2 so it works for both real time and displaced real time -# Migrated TC Coastal Flood for AWIPS2. Updated 6/22/2012. S.O. -# March 11, 2014 to adapt to new PSURGE 2.0/PHISH and VDATUM Datasets in A1. PS -# May 21, 2014: for new PHISH but in AWIPS 2: PS/SW -# Aug 13, 2014: To rename SurgeHtPlustTide to InundationMax and incorporate InundationTiming. PS -# Sept 17, 2014: To finalize changes and clean up for 2015initial Baseline Check in. -# -# Sept 18, 2014: Added code to pull grids from NHC via ISC if PHISH not -# Available on time. Left inactive (commented out) for the moment until that can be fully tested later -# in 2014 or in 2015. -# LeFebvre/Santos, July 27, 2015: Expanded Manual options to include Replace and Add options. -# This allows sites to specify manually different threat levels across different edit areas and time ranges. -# See 2015HTIUserGuide for details. -# -# Feb 11, 2016 LeFebvre (16.2.1): Added code to create zero grids and manual grids when -# PSURGE not available. Added checks for current guidance for PHISH and ISC options. -# -# April 14, 2016: Lefebvre/Santos: Added multabledb to restore ISC option -# -# Last Modified: -# 6/20/2016 - Santos: Added code to fix issue of old grid not being deleted when running Manual/Add option. -# 7/15/2016 - Lefebvre/Santos: Added Code to improved Manual Options, numpy compatibility and future builds, -# common methods. Fixed Smoothing Algorithm. inundation grid zeroed out where MHHW <=0. -# 9/8/2016 - Santos: Updated copyISC method to better handle when grids missing in ISC db. -# VERSION 17.1.1 = The one checked in. -# 9/26/16 - LeFebvre - Removed commented out code to pass code review. -# 10/20/16 - Santos - Removed code that stops procedure from running when guidance for current -# advisory is not available and instead advises forecaster. -# 11/3/2016: Santos - Addressed Code Review Comments. -# 12/21/2016: Santos - Added option to adjust InundationMax from manually adjusted InundationTiming grid. -# Also when running with PHISH or PETSS option computes InundationMax from comp max of InundationTiming for consistency. Previously -# they were both retrieved indply from model source and with smoothing it would result in minor differences between -# InundationMax and InundationTiming. -# 01/08/2017: Modified BE CAREFUL line when alerting forecaster PSURGE Data is still from a previous cycle. -# 01/09/2017: Renamed UpdateInunMax in GUI for clarity. Also, introduced on Jan 2017 SWiT ability for procedure to force InundationMax that are > 1 and < 1.5 to 1.5. -# This is because TCV rounds to nearest one foot for categorical HTI threat level consistency with inundation graphic. Not doing this would cause TCV to throw away zones that -# might have more than 3% coverage of inundation > 1 but less than 1.5 altogether. Changing TCV to key on anything with InundationMax >= 1 would not -# do because it would then include zones in TCV with inundation forecasts of less than 1 but >= 0.5 overdoing the threat. -# 07/20/2017: Enabled PETSS option for 2018. PS -# 10/11/2017: LeFebvre - GFE: tool failed due to an old grid being present (DR 20309) -# 11/15/2017: Tweaked during SWiT to better handle extended PSurge/PETTS Guidance out to 102 hours, -# improved UpdateInunMax option and made changes to makeInundationTiming methods to accomodate new TCs for -# the TPCSurgeProb and PETSS dbs. -# 03/20/2018 Check in Pablo's fix. -# 4/3/2018 - Additional fixes needed to enable Manual options to work out to 102 hours. -# ---------------------------------------------------------------------------- - -## -# This is an absolute override file, indicating that a higher priority version -# of the file will completely replace lower priority version of the file. -## - -# The MenuItems list defines the GFE menu item(s) under which the -# Procedure is to appear. -# Possible items are: Populate, Edit, Consistency, Verify, Hazards - -MenuItems = ["Populate"] - -import TropicalUtility, LogStream -import SmartScript -import numpy as np -import TimeRange -import AbsTime -import time -import sys - -VariableList = [("DEFAULT: Typical. Should only be changed in coordination with NHC SS Unit", "", "label"), - ("Forecast Confidence? - (Applies to PHISH/PETSS Only)", "Typical (10% Exceedance; for most systems anytime within 48 hours)", -## "radio", ["Low (Prob-only; 10% Exceedance; for ill behaved systems)", - "radio", ["Typical (10% Exceedance; for most systems anytime within 48 hours)", - "Medium (20% Exceedance; for well-behaved systems within 12 hours of event)", - "High (30% Exceedance; for well-behaved systems within 6-12 hours of event)", - "Higher (40% Exceedance; for well-behaved systems within 6 hours of the event)", - "Highest (50% Exceedance; for well-behaved systems at time of the event)"]), - ("Grid Smoothing?", "Yes", "radio", ["Yes","No"]), - ("Make grids from \nPHISH, PETSS, ISC, or Manually?", "PHISH", "radio", ["PHISH", "PETSS", "ISC", "Manually Replace", "Manually Add", "UpdateInunMax (Edit Inundation Timing Grids)"]), - ("Manual Inundation settings: Time ranges below relative to advisory model cycle", "", "label"), - ("Inundation Height:", 1.0, "scale", [0.0, 3.0], 0.1), - ("Start Hour for Inundation Timing", 0, "scale", [0.0, 96.0], 6.0), - ("End Hour for Inundation Timing", 6, "scale", [0.0, 102.0], 6.0), - ] - -class Procedure (TropicalUtility.TropicalUtility): - def __init__(self, dbss): - TropicalUtility.TropicalUtility.__init__(self, dbss) - - # Compute a base time for this guidance - def baseGuidanceTime(self): - startTime = int((self._gmtime().unixTime() - (2 * 3600)) / (6 * 3600)) * (6 * 3600) - return startTime - - # Method to get the average topography for each grid point - def getAvgTopoGrid(self, topodb): - - siteID = self.getSiteID() -# print "********************\n TOPO IS: ", topodb - dbName = siteID + "_D2D_" + topodb - - weName = "avgTopo" -# timeRange = TimeRange.allTimes().toJavaObj() - trList = self.GM_getWEInventory(weName, dbName, "SFC") - - #print "NED Topo list is", trList - - if len(trList)== 0: - #print "CRAP!!!" - return - for tr in trList: -# print "My time is", tr - topoGrid = self.getGrids(dbName, weName, "SFC", tr, mode="First") - - # Convert topography from meters to feet - topoGrid /= 0.3048 - min = -16000 - max = 16000.0 - mask2 = (topoGrid > max) - topoGrid[topoGrid < min] = -80 - topoGrid[mask2] = self.getTopo()[mask2] - return topoGrid - - # Make a time range of x hours duration from the current time - def makeNewTimeRange(self, hours): - - cTime = int(self._gmtime().unixTime()/ 3600) * 3600 - startTime = AbsTime.AbsTime(cTime) - endTime = startTime + (hours * 3600) - timeRange = TimeRange.TimeRange(startTime, endTime) - - return timeRange - - # Method to find all database versions for the specified model - def getModelIDList(self, matchStr): - - # Make a list of all available parameters - availParms = self.availableParms() - - # Initialize a list of the database identifiers we want to keep - modelList = [] - - # Look through every parameter, then check the database id - for pName, level, dbID in availParms: - modelId = dbID.modelIdentifier() - if matchStr in modelId: - if modelId not in modelList: - modelList.append(modelId) - - return modelList - - # Method to get the selected exceedance height data - def getExceedanceHeight(self, modelName, pctStr, level): - - ap = self.availableParms() - dbName = self.getSiteID() + "_D2D_" + modelName - - modelIDList = self.getModelIDList(modelName) - modelIDList.sort() - - if len(modelIDList) == 0: - return None - - surgeModel = modelIDList[-1] - - weName = "Surge" + pctStr + "Pct" - trList = self.GM_getWEInventory(weName, dbName, level) - - if len(trList) == 0: # No grids found for this database - return None - - baseTime = self.baseGuidanceTime() - - if baseTime > trList[0].startTime().unixTime(): - #modelCycle = AbsTime.AbsTime(self.baseGuidanceTime() - (6*3600)) - message = "BE CAREFUL: " + modelName + " IS STILL FROM A PREVIOUS ADVISORY/MODEL CYCLE" - self.statusBarMsg(message, "A") - #return None - - #print "Retrieving ", weName, " at ", level - # Make a new time range to span all current data - timeRange = self.GM_makeTimeRange(trList[0].startTime().unixTime(), - trList[-1].endTime().unixTime()) - - grid = self.getGrids(dbName, weName, level, timeRange, mode="Max") - -# for tr in trList: -# grid = self.getGrids(dbName, weName, level, tr, mode="Max") - - # Convert current surge values from meters to feet - mask = (grid <= -100) - grid /= 0.3048 - grid[mask] = -80.0 -# grid[mask] = np.where(mask,surgeVal*3.28, np.float32(-80.0)) - - return grid # convert meters to feet - - # Method to create the inundation timing grids - def makeInundationTiming(self, modelName, pctStr, level, smoothThreatGrid, mutableID, ssea, MHHWMask): - - dbName = self.getSiteID() + "_D2D_" + modelName - weName = "Surge" + pctStr + "Pctincr" - #print "Attempting to retrieve: ", weName, level - # get the StormSurgeProb inventory - surgeTRList = self.GM_getWEInventory(weName, dbName, level) - if len(surgeTRList) == 0: - self.statusBarMsg("No PHISH grid found.", "U") - return - - # Make timeRanges for all 13 grids. Start with the beginning of the first Phish grid - baseTime = int(surgeTRList[0].startTime().unixTime() / (6 * 3600)) * (6 * 3600) #snap to 6 hour period - endTime = int(surgeTRList[-1].endTime().unixTime() / (6 * 3600)) * (6 * 3600) #snap to 6 hour period - if endTime < surgeTRList[-1].endTime().unixTime(): - endTime += 6 * 3600 - trList = self.makeTimingTRs(baseTime, endTime) - - timingGrids = [] - - self.deleteAllGrids(["InundationTiming"]) - for tr in trList: - - if tr in surgeTRList: - phishGrid = self.getGrids(dbName, weName, level, tr) - else: - phishGrid = self.empty() - -# -# For consistency we need to add smoothing here too as we do in execute. -# - if phishGrid is None: - self.statusBarMsg("No PHISH grid available for:" + repr(tr), "S") - continue - - #print "SmoothThreatGrid:", smoothThreatGrid - if smoothThreatGrid == "Yes": -# mask = np.greater(phishGrid, 0.0) & ssea -# phishGrid = np.where(np.greater(phishGrid, 0.0), self.GM_smoothGrid(phishGrid, 3, mask), phishGrid) - mask = (phishGrid > 0.0) & ssea - smoothedPhish = self.GM_smoothGrid(phishGrid, 3, mask) - phishGrid[mask] = smoothedPhish[mask] - - # Convert units from meters to feet -# mask = (phishGrid <= -100) - grid = phishGrid / 0.3048 -# grid[mask] = -80.0 -# grid = np.where(phishGrid>-100, phishGrid*3.28, np.float32(-80.0)) # Convert units from meters to feet - - grid.clip(0.0, 100.0, grid) - grid[~ssea] = 0.0 - grid[MHHWMask] = 0.0 - timingGrids.append(grid) - self.createGrid(mutableID, "InundationTiming", "SCALAR", grid, tr, precision=1) - - return trList,timingGrids - - def makeInundationMaxGrid(self, timingGrids, trList): - - itCube = np.array(timingGrids) - maxGrid = np.amax(itCube, axis=0) - - now = int(self._gmtime().unixTime() / 3600) * 3600 - maxTimeRange = self.GM_makeTimeRange(now, now + 48 * 3600) - - self.createGrid(self.mutableID(), "InundationMax", "SCALAR", maxGrid, maxTimeRange) - - return maxGrid - -#************************************************************************************** -# THis procedure was written to extract MSL to NAVD corrections from the VDATUMS D2D -# Database. It is not yet implemented because the VDATUMS database has not been -# finalized. - - def deleteAllGrids(self, weList): - - for weName in weList: - trList = self.GM_getWEInventory(weName) - if len(trList) == 0: - continue - start = trList[0].startTime().unixTime() - end = trList[-1].endTime().unixTime() - tr = self.GM_makeTimeRange(start, end) - - self.deleteCmd([weName], tr) - - return - - def getVDATUM(self, weName, limit): - siteID = self.getSiteID() - dbName = siteID + "_D2D_VDATUMS" - - grid = self.getGrids(dbName, weName, "SFC", TimeRange.allTimes(), - mode="First") - - if grid is None: - msgStr = weName + " does not exist in the VDATUMS model. " - self.statusBarMsg(msgStr, "S") - - mask = (grid <= limit) - grid /= 0.3048 - grid[mask] = -80.0 - - # Converted from meters to feet - return grid - -# This procedure was written to extract MSL to NAVD88 corrections from the VDATUMS D2D -# Database. - - def getMSLtoNAVD(self): - return self.getVDATUM("MSLtoNAVD88", -0.40) - - -# This procedure was written to extract MSL to MLLW corrections from the VDATUMS D2D -# Database. - - def getMSLtoMLLW(self): - return self.getVDATUM("MSLtoMLLW", 0.0) - -# This procedure was written to extract MSL to MHHW corrections from the VDATUMS D2D -# Database. - - def getMSLtoMHHW(self): - return self.getVDATUM("MSLtoMHHW", -3.09) - -# This procedure was written to extract NAVD88 to MLLW corrections from the VDATUMS D2D -# Database. - def getNAVDtoMLLW(self): - return self.getVDATUM("NAVD88toMLLW", -2.20) - -# This procedure was written to extract NAVD88 to MLLW corrections from the VDATUMS D2D -# Database. - - def getNAVDtoMHHW(self): - return self.getVDATUM("NAVD88toMHHW", -3.40) - - # Copies the specified weather elements in elementList into the Fcst database. - def copyISCGridstoFcst(self, elementList, mutableID): - - # Initialize all the grids we plan to return - - surgePctGrid = None - surgePctGridMSL = None - surgePctGridMLLW = None - surgePctGridMHHW = None - surgePctGridNAVD = None - - baseTime = self.baseGuidanceTime() - - # Remove all the grids first before replacing them later - - self.deleteCmd(elementList, TimeRange.allTimes()) - - # Ensure we're not fetching older ISC grids to avoid the ISC purge bug by - # fetching ISC grids within a specific window. - allTimes = TimeRange.allTimes() - iscStart = AbsTime.AbsTime(baseTime - (10 * 3600)) # 10 hours before the baseTime - iscEnd = allTimes.endTime() # Latest time possible - ISCTRWindow = TimeRange.TimeRange(iscStart, iscEnd) - -# Amended To distinguish when inundation grids are available but not datum ones. - for weName in elementList: - #print "Processing ISC ", weName - GridsCheck = True - iscWeName = weName + "nc" - # get the inventory for the ISC grids - - try: - trList = self.GM_getWEInventory(iscWeName, "ISC", "SFC", ISCTRWindow) - except: - GridsCheck = False - - if len(trList) == 0: - GridsCheck = False - - if (weName == "InundationMax" or weName == "InundationTiming") and not GridsCheck: - self.statusBarMsg("No inundation grids found in ISC database for " + iscWeName + ". Stopping. Revert Forecast db.", "S") - return None, None, None, None, None - - if not GridsCheck: - self.statusBarMsg("No datum grids in ISC database for " + iscWeName + ". Proceeding without it.", "S") - - # Make sure that the ISC grids are current - if GridsCheck: - if baseTime > trList[0].startTime().unixTime(): - if weName == "InundationMax" or weName == "InundationTiming": - self.statusBarMsg("ISC grids for inundation element " + iscWeName + " are not current. They correspond to a previous cycle. Aborting. Revert Forecast db.", "S") - return None, None, None, None, None - else: - self.statusBarMsg("ISC grids for datum element " + iscWeName + " are not current. They correspond to a previous cycle. Proceeding without it.", "S") - GridsCheck = False - - for tr in trList: - grid = self.getGrids("ISC", iscWeName, "SFC", tr) - if iscWeName == "InundationMaxnc" or iscWeName == "InundationTimingnc": - grid.clip(0.0, 100.0, grid) - else: - grid.clip(-30.0, 100.0, grid) - - if iscWeName == "InundationTimingnc": - self.createGrid(mutableID, weName, "SCALAR", grid, tr, precision=2) - elif iscWeName == "InundationMaxnc": - surgePctGrid = grid - self.createGrid(mutableID, weName, "SCALAR", grid, tr, precision=2) - elif iscWeName == "SurgeHtPlusTideMSLnc" and GridsCheck: - surgePctGridMSL = grid - elif iscWeName == "SurgeHtPlusTideMLLWnc" and GridsCheck: - surgePctGridMLLW = grid - elif iscWeName == "SurgeHtPlusTideMHHWnc" and GridsCheck: - surgePctGridMHHW = grid - elif iscWeName == "SurgeHtPlusTideNAVDnc" and GridsCheck: - surgePctGridNAVD = grid - - return surgePctGrid,surgePctGridMSL,surgePctGridMLLW,surgePctGridMHHW,surgePctGridNAVD - - # Make a list of timeRanges that will be used to make InundationTiming grids - def makeTimingTRs(self, baseTime, endTime): - # Make the inundation timing grids - trList = [] - start = baseTime - end = baseTime + 6 * 3600 - while end <= endTime: - tr = TimeRange.TimeRange(AbsTime.AbsTime(start), AbsTime.AbsTime(end)) - trList.append(tr) - start = end - end += 6 * 3600 - - return trList - - def getTimingGrids(self): - - baseTime = self.baseGuidanceTime() - endTime = baseTime + 102 * 3600 - gridList= [] - trList = self.makeTimingTRs(baseTime, endTime) - - for tr in trList: - timingGrid = self.empty() - gridList.append(timingGrid) - - return trList, gridList - - def execute(self, varDict, editArea, timeRange): - - t0 = time.time() - - self._timeRange = timeRange - - mutableID = self.mutableID() - - # List of elements - # See if we should copy from ISC. If so, do the copy and exit - smoothThreatGrid = varDict["Grid Smoothing?"] - - makeOption = varDict["Make grids from \nPHISH, PETSS, ISC, or Manually?"] - topodb = "NED" - - ssea = self.encodeEditArea("StormSurgeWW_EditArea") - - Topo = self.getAvgTopoGrid(topodb) - - confidenceStr = varDict["Forecast Confidence? - (Applies to PHISH/PETSS Only)"] - - # extract the percent value from this string - pctPos = confidenceStr.find("%") - pctStr = confidenceStr[pctPos - 2:pctPos] - - threatWEName = "StormSurgeThreat" - - #print "pctStr is: ", pctStr - surgePctGrid = None - surgePctGridMSL = None - surgePctGridMLLW = None - surgePctGridNHHW = None - surgePctGridNAVD = None - - if makeOption == "PHISH" or makeOption == "PETSS": - - # Now get the psurge - if makeOption == "PHISH": - modelName = "TPCSurgeProb" - else: - modelName = "PETSS" - surgePctGrid = self.getExceedanceHeight(modelName, pctStr, "FHAG0") - if surgePctGrid is None: - message = "No inundation data found for " + modelName - self.statusBarMsg(message, "S") - return - - phishMask = ~ssea - surgePctGrid[phishMask] = 0.0 - surgePctGridNAVD = self.getExceedanceHeight(modelName, pctStr, "SFC") - if surgePctGridNAVD is None: - message = "No Surge plus Tide NAVD data found for " + modelName - self.statusBarMsg(message, "S") - return - - surgePctGridNAVD[phishMask] = -80.0 - if surgePctGrid is None or surgePctGridNAVD is None: - return - - # - # The following lines are the gridded vdatum corrections. - # - msltonavd = self.getMSLtoNAVD() - msltomllw = self.getMSLtoMLLW() - msltomhhw = self.getMSLtoMHHW() - navdtomllw = self.getNAVDtoMLLW() - navdtomhhw = self.getNAVDtoMHHW() - - # Apply 3x3 smooth within the surge zone - # for values greater than 1 as to not underplay areas adjacent to zero value pixels. - # If you apply a smoother, for consistency among storm surge plus tide and derived - # grids, it must be done here. - if smoothThreatGrid == "Yes": - #mask = np.greater(surgePctGrid, 0.0) & ssea - #surgePctGrid = np.where(np.greater(surgePctGrid, 0.0), self.GM_smoothGrid(surgePctGrid,3, mask), surgePctGrid) - -# mask = np.greater(surgePctGridNAVD, -10.0) & ssea -# surgePctGridNAVD = np.where(np.greater(surgePctGridNAVD, -10.0), self.GM_smoothGrid(surgePctGridNAVD,3, mask), surgePctGridNAVD) - - mask = (surgePctGridNAVD > -10.0) & ssea - surgePctGridNAVD = self.GM_smoothGrid(surgePctGridNAVD, 3, mask) - -# surgePctGridMSL= np.where(mask1, surgePctGridNAVD - msltonavd, np.float32(-80.0)) # MSL Grid - navdMask = (surgePctGridNAVD > -80.0) - mask = (msltonavd > -80.0) & navdMask & ssea - - # MSL Grid - surgePctGridMSL = surgePctGridNAVD - msltonavd - surgePctGridMSL[~mask] = -80.0 - -# surgePctGridMLLW = np.where(np.greater(navdtomllw,-80.0) & np.greater(surgePctGridNAVD,-80.0), \ -# surgePctGridNAVD + navdtomllw, np.float32(-80.0)) # MLLW Grid - - # MLLW Grid - mask = (navdtomllw > -80.0) & navdMask - surgePctGridMLLW = surgePctGridNAVD + navdtomllw - surgePctGridMLLW[~mask] = -80.0 - -# surgePctGridMHHW = np.where(np.greater(navdtomhhw,-80.0) & np.greater(surgePctGridNAVD,-80.0), \ -# surgePctGridNAVD + navdtomhhw, np.float32(-80.0)) # MHHW Grid - # MHHW Grid - mask = (navdtomhhw > -80.0) & navdMask - surgePctGridMHHW = surgePctGridNAVD + navdtomhhw - surgePctGridMHHW[~mask] = -80.0 - -# surgeDiffMLLWMHHW = np.where(np.greater(surgePctGridMLLW,-80.0) & np.greater(surgePctGridMHHW, -80.0), \ -# surgePctGridMLLW-surgePctGridMHHW, np.float32(-80.0)) # Diff Grid Between MLLW and MHHW - - # Diff Grid Between MLLW and MHHW (i.e tidal range) - mask = (surgePctGridMLLW > -80.0) & (surgePctGridMHHW > -80.0) - surgeDiffMLLWMHHW = surgePctGridMLLW - surgePctGridMHHW - surgeDiffMLLWMHHW[~mask] = -80.0 - - # Mask - MHHWMask = surgePctGridMHHW <= 0.0 - - #surgePctGrid[MHHWMask] = 0.0 - - trList,timingGrids = self.makeInundationTiming(modelName, pctStr, "FHAG0", smoothThreatGrid, mutableID, ssea, MHHWMask) - #surgePctGrid and InundationMax recomputed from InundationTiming sequence for consistency - surgePctGrid = self.makeInundationMaxGrid(timingGrids, trList) - - elif makeOption == "ISC": - - elementList = ["InundationMax","InundationTiming", "SurgeHtPlusTideMSL","SurgeHtPlusTideMLLW", - "SurgeHtPlusTideNAVD","SurgeHtPlusTideMHHW"] - surgePctGrid,surgePctGridMSL,surgePctGridMLLW,surgePctGridMHHW,surgePctGridNAVD = self.copyISCGridstoFcst(elementList, mutableID) - # if you look in CopyISC method if either InundationMax or InundationTiming is missing the procedure stops all together and notifies forecaster. - if surgePctGrid is None: - return - - elif makeOption == "Manually Replace" or makeOption == "Manually Add": - - inundationHeight = float(varDict["Inundation Height:"]) - inunStartHour = float(varDict["Start Hour for Inundation Timing"]) - inunEndHour = float(varDict["End Hour for Inundation Timing"]) - - selectedMask = self.encodeEditArea(editArea) - if not selectedMask.any(): - self.statusBarMsg("Please define an area over which to assign the inundation values.", "S") - return - - modifyMask = selectedMask & ssea - if not modifyMask.any(): - self.statusBarMsg("Please define an area that intersects the StormSurgeEditArea to assign the inundation values.", "S") - return # Calculate the intersection of the SSEditArea and selected editAre - - if inunStartHour >= inunEndHour: - self.statusBarMsg("Please define the end hour after the start hour.", "S") - return - - surgePctGrid = self.empty() - - # Fetch the old grids if we're adding - if varDict["Make grids from \nPHISH, PETSS, ISC, or Manually?"] == "Manually Add": - imTRList = self.GM_getWEInventory("InundationMax", mutableID, "SFC") - if len(imTRList) > 0: - imTR = imTRList[0] - surgePctGrid = self.getGrids(mutableID, "InundationMax", "SFC", imTR) - - surgePctGrid[modifyMask] = inundationHeight - - # Make the timing grids - baseTime = self.baseGuidanceTime() - if makeOption == "Manually Replace": # Make new grids and replace all IT grids - trList, timingGrids = self.getTimingGrids() - - for i in range(len(trList)): - # only modify grids in the specified time range - start = trList[i].startTime().unixTime() - end = trList[i].endTime().unixTime() - - if (start - baseTime) / 3600 >= inunStartHour and (end - baseTime) / 3600 <= inunEndHour: - timingGrids[i] = surgePctGrid # populate only where needed - - timeRange = TimeRange.allTimes() - self.deleteCmd(["InundationTiming"], timeRange) - for i in range(len(trList)): - timingGrids[i].clip(0.0, 100.0, timingGrids[i]) - self.createGrid(mutableID, "InundationTiming", "SCALAR", timingGrids[i], trList[i]) - - elif makeOption == "Manually Add": # Just replace the selected grid points over the selected time - # Fetch the existing IT grids - itTRList = self.GM_getWEInventory("InundationTiming", mutableID, "SFC") - if len(itTRList) == 0: - self.statusBarMsg("No InundationTiming grids found at all.", "S") - return - #Fetch the grids - itGrids = [] - trList = [] - for tr in itTRList: - start = tr.startTime().unixTime() - end = tr.endTime().unixTime() - #print "Checking tr:", tr - if (start - baseTime) / 3600 >= inunStartHour and (end - baseTime) / 3600 <= inunEndHour: - grid = self.getGrids(mutableID, "InundationTiming", "SFC", tr) - itGrids.append(grid) - trList.append(tr) - - if len(itGrids) == 0: - self.statusBarMsg("No InundationTiming grids found for selected start and end hours.", "S") - return - - # Surgically insert grid values into the InundationTiming grids over the selected hours - for i in range(len(trList)): - itGrids[i][modifyMask] = inundationHeight # poke in the values - - self.createGrid(mutableID, "InundationTiming", "SCALAR", itGrids[i], trList[i]) - - timingGrids = [] - for tr in itTRList: - grid = self.getGrids(self.mutableID(), "InundationTiming", "SFC", tr) - grid[~ssea] = 0.0 - timingGrids.append(grid) - - surgePctGrid = self.makeInundationMaxGrid(timingGrids, itTRList) - - elif makeOption == "UpdateInunMax (Edit Inundation Timing Grids)": - - self.deleteAllGrids(["InundationMax","SurgeHtPlusTideMSL", "SurgeHtPlusTideMLLW", - "SurgeHtPlusTideNAVD", "SurgeHtPlusTideMHHW", "SurgeHtPlusTideMLLW"]) - - itTRList = self.GM_getWEInventory("InundationTiming", mutableID, "SFC") - - if len(itTRList) == 0: - self.statusBarMsg("No InundationTiming grids found at all. Inundation grids required to exist when running with this option. Otherwise run with Manual Replace Option.", "S") - return - - timingGrids = [] - - # Fetch all the timing grids - for tr in itTRList: - grid = self.getGrids(self.mutableID(), "InundationTiming", "SFC", tr) - grid[~ssea] = 0.0 - timingGrids.append(grid) - self.deleteGrid(mutableID, "InundationTiming", "SFC", tr) - self.createGrid(mutableID, "InundationTiming", "SCALAR", grid, tr, precision=1) - - # Finally create the surge grid which will be saved as the InundationMax - - surgePctGrid = self.makeInundationMaxGrid(timingGrids, itTRList) - - #return - # Done with manual options - -# Next line introduced on Jan 2017 SWiT. It forces points in InundationMax that are > 1 and < 1.5 to 1.5. This is because TCV rounds to -# nearest one foot for categorical HTI threat level consistency with inundation graphic. Not doing this would cause TCV to throw away zones that -# might have more than 3% coverage of inundation > 1 but less than 1.5 altogether. Changing TCV to key on anything with InundationMax >= 1 would not -# do because it would then include zones in TCV with inundation forecasts of less than 1 but >= 0.5 overdoing the threat. - - surgePctGrid[(surgePctGrid > 1.0) & (surgePctGrid < 1.5)] = 1.5 - - threatKeys = self.getDiscreteKeys(threatWEName) - - # Define a mapping between UI names and key names - # keyMap = {"Very Low" :"Very Low", - keyMap = {"Elevated" : "Elevated", - "Moderate" : "Mod", - "High" : "High", - "Extreme" : "Extreme", - } - - threshDict = {} # a dict to store thresholds from the UI - - for key in list(keyMap.keys()): - - if keyMap[key] == "Extreme": - threshDict[keyMap[key]] = 9 - elif keyMap[key] == "High": - threshDict[keyMap[key]] = 6 - elif keyMap[key] == "Mod": - threshDict[keyMap[key]] = 3 - elif keyMap[key] == "Elevated": - threshDict[keyMap[key]] = 1 - - #print "threshDict[keyMap[key]]: ", keyMap[key], threshDict[keyMap[key]] - - # make a timeRange - 6 hours long - elementList = ["StormSurgeThreat","InundationMax","SurgeHtPlusTideMSL","SurgeHtPlusTideMLLW", - "SurgeHtPlusTideNAVD","SurgeHtPlusTideMHHW"] - - # make a new timeRange that will be used to create new grids - timeRange = self.makeNewTimeRange(6) - - # Remove old guidance grids and replace them with the new grids - # Delete the old grids first - cTime = int(self._gmtime().unixTime()/ 3600) * 3600 - startTime = AbsTime.AbsTime(cTime - 48*3600) - endTime = startTime + 240*3600 - deleteTimeRange = TimeRange.TimeRange(startTime, endTime) - - for elem in elementList: - self.deleteCmd([elem], deleteTimeRange) - - if makeOption != "Manually Replace" and makeOption != "Manually Add" and makeOption != "UpdateInunMax (Edit Inundation Timing Grids)": - if surgePctGridMSL is not None: - surgePctGridMSL.clip(-30.0, 100.0, surgePctGridMSL) - self.createGrid(mutableID, "SurgeHtPlusTideMSL", "SCALAR", surgePctGridMSL, - timeRange, precision=2) - if surgePctGridMLLW is not None: - surgePctGridMLLW.clip(-30.0, 100.0, surgePctGridMLLW) - self.createGrid(mutableID, "SurgeHtPlusTideMLLW", "SCALAR", surgePctGridMLLW, - timeRange, precision=2) - if surgePctGridNAVD is not None: - surgePctGridNAVD.clip(-30.0, 100.0, surgePctGridNAVD) - self.createGrid(mutableID, "SurgeHtPlusTideNAVD", "SCALAR", surgePctGridNAVD, - timeRange, precision=2) - if surgePctGridMHHW is not None: - surgePctGridMHHW.clip(-30.0, 100.0, surgePctGridMHHW) - self.createGrid(mutableID, "SurgeHtPlusTideMHHW", "SCALAR", surgePctGridMHHW, - timeRange, precision=2) - - # Make the grid. Start with the existing grid if we have one otherwise zeros - coastalThreat = self.empty(np.int8) - surgePctGrid.clip(0.0, 100.0, surgePctGrid) - self.createGrid(mutableID, "InundationMax", "SCALAR", surgePctGrid, timeRange, precision=2) - - # Yet another list to define the order in which we set grid values - # This order must be ranked lowest to highest - #keyList = ["Very Low", "Elevated", "Mod", "High", "Extreme"] - keyList = ["Elevated", "Mod", "High", "Extreme"] - - # Set the grid values based on the surgePctGrid grid and thresholds - for key in keyList: - #print "THRESHOLD FOR KEY IS: ", key, threshDict[key] - thresh = threshDict[key] - keyIndex = self.getIndex(key, threatKeys) - #coastalMask = ssea & np.greater_equal(surgePctGrid, thresh) - coastalMask = ssea & np.greater(surgePctGrid, thresh) - coastalThreat[coastalMask] = keyIndex - -# create the CoastalThreat Grid - self.createGrid(mutableID, threatWEName, "DISCRETE", - (coastalThreat, threatKeys), timeRange, - discreteKeys=threatKeys, - discreteOverlap=0, - discreteAuxDataLength=2, - defaultColorTable="Hazards") - - t1 = time.time() - LogStream.logEvent("Finished TCStormSurgeThreat in %f.4 ms" % ((t1-t0) * 1000)) - - return - +# ---------------------------------------------------------------------------- +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. +# +# StormSurgeThreat +# +# Author: Tom LeFebvre/Pablo Santos +# April 20, 2012 - To use gridded MSL TO NAVD and MSL to MLLW +# corrections and to get rid of Very Low. +# Last Modified: June 7, 2012 Shannon White - To fix the handling of time +# for A2 so it works for both real time and displaced real time +# Migrated TC Coastal Flood for AWIPS2. Updated 6/22/2012. S.O. +# March 11, 2014 to adapt to new PSURGE 2.0/PHISH and VDATUM Datasets in A1. PS +# May 21, 2014: for new PHISH but in AWIPS 2: PS/SW +# Aug 13, 2014: To rename SurgeHtPlustTide to InundationMax and incorporate InundationTiming. PS +# Sept 17, 2014: To finalize changes and clean up for 2015initial Baseline Check in. +# +# Sept 18, 2014: Added code to pull grids from NHC via ISC if PHISH not +# Available on time. Left inactive (commented out) for the moment until that can be fully tested later +# in 2014 or in 2015. +# LeFebvre/Santos, July 27, 2015: Expanded Manual options to include Replace and Add options. +# This allows sites to specify manually different threat levels across different edit areas and time ranges. +# See 2015HTIUserGuide for details. +# +# Feb 11, 2016 LeFebvre (16.2.1): Added code to create zero grids and manual grids when +# PSURGE not available. Added checks for current guidance for PHISH and ISC options. +# +# April 14, 2016: Lefebvre/Santos: Added multabledb to restore ISC option +# +# Last Modified: +# 6/20/2016 - Santos: Added code to fix issue of old grid not being deleted when running Manual/Add option. +# 7/15/2016 - Lefebvre/Santos: Added Code to improved Manual Options, numpy compatibility and future builds, +# common methods. Fixed Smoothing Algorithm. inundation grid zeroed out where MHHW <=0. +# 9/8/2016 - Santos: Updated copyISC method to better handle when grids missing in ISC db. +# VERSION 17.1.1 = The one checked in. +# 9/26/16 - LeFebvre - Removed commented out code to pass code review. +# 10/20/16 - Santos - Removed code that stops procedure from running when guidance for current +# advisory is not available and instead advises forecaster. +# 11/3/2016: Santos - Addressed Code Review Comments. +# 12/21/2016: Santos - Added option to adjust InundationMax from manually adjusted InundationTiming grid. +# Also when running with PHISH or PETSS option computes InundationMax from comp max of InundationTiming for consistency. Previously +# they were both retrieved indply from model source and with smoothing it would result in minor differences between +# InundationMax and InundationTiming. +# 01/08/2017: Modified BE CAREFUL line when alerting forecaster PSURGE Data is still from a previous cycle. +# 01/09/2017: Renamed UpdateInunMax in GUI for clarity. Also, introduced on Jan 2017 SWiT ability for procedure to force InundationMax that are > 1 and < 1.5 to 1.5. +# This is because TCV rounds to nearest one foot for categorical HTI threat level consistency with inundation graphic. Not doing this would cause TCV to throw away zones that +# might have more than 3% coverage of inundation > 1 but less than 1.5 altogether. Changing TCV to key on anything with InundationMax >= 1 would not +# do because it would then include zones in TCV with inundation forecasts of less than 1 but >= 0.5 overdoing the threat. +# 07/20/2017: Enabled PETSS option for 2018. PS +# 10/11/2017: LeFebvre - GFE: tool failed due to an old grid being present (DR 20309) +# 11/15/2017: Tweaked during SWiT to better handle extended PSurge/PETTS Guidance out to 102 hours, +# improved UpdateInunMax option and made changes to makeInundationTiming methods to accomodate new TCs for +# the TPCSurgeProb and PETSS dbs. +# 03/20/2018 Check in Pablo's fix. +# 4/3/2018 - Additional fixes needed to enable Manual options to work out to 102 hours. +# 9/18/2018 - Make ISC default and have 8-hour InundationMax grid created when running +# ---------------------------------------------------------------------------- + +## +# This is an absolute override file, indicating that a higher priority version +# of the file will completely replace lower priority version of the file. +## + +# The MenuItems list defines the GFE menu item(s) under which the +# Procedure is to appear. +# Possible items are: Populate, Edit, Consistency, Verify, Hazards + +MenuItems = ["Populate"] + +import TropicalUtility, LogStream +import SmartScript +import numpy as np +import TimeRange +import AbsTime +import time +import sys + +VariableList = [("DEFAULT: Typical. Should only be changed in coordination with NHC SS Unit", "", "label"), + ("Forecast Confidence? - (Applies to PHISH/PETSS Only)", "Typical (10% Exceedance; for most systems anytime within 48 hours)", +## "radio", ["Low (Prob-only; 10% Exceedance; for ill behaved systems)", + "radio", ["Typical (10% Exceedance; for most systems anytime within 48 hours)", + "Medium (20% Exceedance; for well-behaved systems within 12 hours of event)", + "High (30% Exceedance; for well-behaved systems within 6-12 hours of event)", + "Higher (40% Exceedance; for well-behaved systems within 6 hours of the event)", + "Highest (50% Exceedance; for well-behaved systems at time of the event)"]), + ("Grid Smoothing?", "Yes", "radio", ["Yes","No"]), + ("Make grids from \nPHISH, PETSS, ISC, or Manually?", "ISC", "radio", ["PHISH", "PETSS", "ISC", "Manually Replace", "Manually Add", "UpdateInunMax (Edit Inundation Timing Grids)"]), + ("Manual Inundation settings: Time ranges below relative to advisory model cycle", "", "label"), + ("Inundation Height:", 1.1, "scale", [0.0, 3.0], 0.1), + ("Start Hour for Inundation Timing", 0, "scale", [0.0, 96.0], 6.0), + ("End Hour for Inundation Timing", 6, "scale", [0.0, 102.0], 6.0), + ] + +class Procedure (TropicalUtility.TropicalUtility): + def __init__(self, dbss): + TropicalUtility.TropicalUtility.__init__(self, dbss) + + # Compute a base time for this guidance + def baseGuidanceTime(self): + startTime = int((self._gmtime().unixTime() - (2 * 3600)) / (6 * 3600)) * (6 * 3600) + return startTime + + # Method to get the average topography for each grid point + def getAvgTopoGrid(self, topodb): + + siteID = self.getSiteID() +# print "********************\n TOPO IS: ", topodb + dbName = siteID + "_D2D_" + topodb + + weName = "avgTopo" +# timeRange = TimeRange.allTimes().toJavaObj() + trList = self.GM_getWEInventory(weName, dbName, "SFC") + + #print "NED Topo list is", trList + + if len(trList)== 0: + #print "CRAP!!!" + return + for tr in trList: +# print "My time is", tr + topoGrid = self.getGrids(dbName, weName, "SFC", tr, mode="First") + + # Convert topography from meters to feet + topoGrid /= 0.3048 + min = -16000 + max = 16000.0 + mask2 = (topoGrid > max) + topoGrid[topoGrid < min] = -80 + topoGrid[mask2] = self.getTopo()[mask2] + return topoGrid + + # Make a time range of x hours duration from the current time + def makeNewTimeRange(self, hours): + + cTime = int(self._gmtime().unixTime()/ 3600) * 3600 + startTime = AbsTime.AbsTime(cTime) + endTime = startTime + (hours * 3600) + timeRange = TimeRange.TimeRange(startTime, endTime) + + return timeRange + + # Method to find all database versions for the specified model + def getModelIDList(self, matchStr): + + # Make a list of all available parameters + availParms = self.availableParms() + + # Initialize a list of the database identifiers we want to keep + modelList = [] + + # Look through every parameter, then check the database id + for pName, level, dbID in availParms: + modelId = dbID.modelIdentifier() + if matchStr in modelId: + if modelId not in modelList: + modelList.append(modelId) + + return modelList + + # Method to get the selected exceedance height data + def getExceedanceHeight(self, modelName, pctStr, level): + + ap = self.availableParms() + dbName = self.getSiteID() + "_D2D_" + modelName + + modelIDList = self.getModelIDList(modelName) + modelIDList.sort() + + if len(modelIDList) == 0: + return None + + surgeModel = modelIDList[-1] + + weName = "Surge" + pctStr + "Pct" + trList = self.GM_getWEInventory(weName, dbName, level) + + if len(trList) == 0: # No grids found for this database + return None + + baseTime = self.baseGuidanceTime() + + if baseTime > trList[0].startTime().unixTime(): + #modelCycle = AbsTime.AbsTime(self.baseGuidanceTime() - (6*3600)) + message = "BE CAREFUL: " + modelName + " IS STILL FROM A PREVIOUS ADVISORY/MODEL CYCLE" + self.statusBarMsg(message, "A") + #return None + + #print "Retrieving ", weName, " at ", level + # Make a new time range to span all current data + timeRange = self.GM_makeTimeRange(trList[0].startTime().unixTime(), + trList[-1].endTime().unixTime()) + + grid = self.getGrids(dbName, weName, level, timeRange, mode="Max") + +# for tr in trList: +# grid = self.getGrids(dbName, weName, level, tr, mode="Max") + + # Convert current surge values from meters to feet + mask = (grid <= -100) + grid /= 0.3048 + grid[mask] = -80.0 +# grid[mask] = np.where(mask,surgeVal*3.28, np.float32(-80.0)) + + return grid # convert meters to feet + + # Method to create the inundation timing grids + def makeInundationTiming(self, modelName, pctStr, level, smoothThreatGrid, mutableID, ssea, MHHWMask): + + dbName = self.getSiteID() + "_D2D_" + modelName + weName = "Surge" + pctStr + "Pctincr" + #print "Attempting to retrieve: ", weName, level + # get the StormSurgeProb inventory + surgeTRList = self.GM_getWEInventory(weName, dbName, level) + if len(surgeTRList) == 0: + self.statusBarMsg("No PHISH grid found.", "U") + return + + # Make timeRanges for all 13 grids. Start with the beginning of the first Phish grid + baseTime = int(surgeTRList[0].startTime().unixTime() / (6 * 3600)) * (6 * 3600) #snap to 6 hour period + endTime = int(surgeTRList[-1].endTime().unixTime() / (6 * 3600)) * (6 * 3600) #snap to 6 hour period + if endTime < surgeTRList[-1].endTime().unixTime(): + endTime += 6 * 3600 + trList = self.makeTimingTRs(baseTime, endTime) + + timingGrids = [] + + self.deleteAllGrids(["InundationTiming"]) + for tr in trList: + + if tr in surgeTRList: + phishGrid = self.getGrids(dbName, weName, level, tr) + else: + phishGrid = self.empty() + +# +# For consistency we need to add smoothing here too as we do in execute. +# + if phishGrid is None: + self.statusBarMsg("No PHISH grid available for:" + repr(tr), "S") + continue + + #print "SmoothThreatGrid:", smoothThreatGrid + if smoothThreatGrid == "Yes": +# mask = np.greater(phishGrid, 0.0) & ssea +# phishGrid = np.where(np.greater(phishGrid, 0.0), self.GM_smoothGrid(phishGrid, 3, mask), phishGrid) + mask = (phishGrid > 0.0) & ssea + smoothedPhish = self.GM_smoothGrid(phishGrid, 3, mask) + phishGrid[mask] = smoothedPhish[mask] + + # Convert units from meters to feet +# mask = (phishGrid <= -100) + grid = phishGrid / 0.3048 +# grid[mask] = -80.0 +# grid = np.where(phishGrid>-100, phishGrid*3.28, np.float32(-80.0)) # Convert units from meters to feet + + grid.clip(0.0, 100.0, grid) + grid[~ssea] = 0.0 + grid[MHHWMask] = 0.0 + timingGrids.append(grid) + self.createGrid(mutableID, "InundationTiming", "SCALAR", grid, tr, precision=1) + + return trList,timingGrids + + def makeInundationMaxGrid(self, timingGrids, trList): + + itCube = np.array(timingGrids) + maxGrid = np.amax(itCube, axis=0) + + now = int(self._gmtime().unixTime() / 3600) * 3600 + maxTimeRange = self.GM_makeTimeRange(now, now + 48 * 3600) + + self.createGrid(self.mutableID(), "InundationMax", "SCALAR", maxGrid, maxTimeRange) + + return maxGrid + +#************************************************************************************** +# THis procedure was written to extract MSL to NAVD corrections from the VDATUMS D2D +# Database. It is not yet implemented because the VDATUMS database has not been +# finalized. + + def deleteAllGrids(self, weList): + + for weName in weList: + trList = self.GM_getWEInventory(weName) + if len(trList) == 0: + continue + start = trList[0].startTime().unixTime() + end = trList[-1].endTime().unixTime() + tr = self.GM_makeTimeRange(start, end) + + self.deleteCmd([weName], tr) + + return + + def getVDATUM(self, weName, limit): + siteID = self.getSiteID() + dbName = siteID + "_D2D_VDATUMS" + + grid = self.getGrids(dbName, weName, "SFC", TimeRange.allTimes(), + mode="First") + + if grid is None: + msgStr = weName + " does not exist in the VDATUMS model. " + self.statusBarMsg(msgStr, "S") + + mask = (grid <= limit) + grid /= 0.3048 + grid[mask] = -80.0 + + # Converted from meters to feet + return grid + +# This procedure was written to extract MSL to NAVD88 corrections from the VDATUMS D2D +# Database. + + def getMSLtoNAVD(self): + return self.getVDATUM("MSLtoNAVD88", -0.40) + + +# This procedure was written to extract MSL to MLLW corrections from the VDATUMS D2D +# Database. + + def getMSLtoMLLW(self): + return self.getVDATUM("MSLtoMLLW", 0.0) + +# This procedure was written to extract MSL to MHHW corrections from the VDATUMS D2D +# Database. + + def getMSLtoMHHW(self): + return self.getVDATUM("MSLtoMHHW", -3.09) + +# This procedure was written to extract NAVD88 to MLLW corrections from the VDATUMS D2D +# Database. + def getNAVDtoMLLW(self): + return self.getVDATUM("NAVD88toMLLW", -2.20) + +# This procedure was written to extract NAVD88 to MLLW corrections from the VDATUMS D2D +# Database. + + def getNAVDtoMHHW(self): + return self.getVDATUM("NAVD88toMHHW", -3.40) + + # Copies the specified weather elements in elementList into the Fcst database. + def copyISCGridstoFcst(self, elementList, mutableID): + + # Initialize all the grids we plan to return + + surgePctGrid = None + surgePctGridMSL = None + surgePctGridMLLW = None + surgePctGridMHHW = None + surgePctGridNAVD = None + + baseTime = self.baseGuidanceTime() + + # Remove all the grids first before replacing them later + + self.deleteCmd(elementList, TimeRange.allTimes()) + + # Ensure we're not fetching older ISC grids to avoid the ISC purge bug by + # fetching ISC grids within a specific window. + allTimes = TimeRange.allTimes() + iscStart = AbsTime.AbsTime(baseTime - (10 * 3600)) # 10 hours before the baseTime + iscEnd = allTimes.endTime() # Latest time possible + ISCTRWindow = TimeRange.TimeRange(iscStart, iscEnd) + +# Amended To distinguish when inundation grids are available but not datum ones. + for weName in elementList: + #print "Processing ISC ", weName + GridsCheck = True + iscWeName = weName + "nc" + # get the inventory for the ISC grids + + try: + trList = self.GM_getWEInventory(iscWeName, "ISC", "SFC", ISCTRWindow) + except: + GridsCheck = False + + if len(trList) == 0: + GridsCheck = False + + if (weName == "InundationMax" or weName == "InundationTiming") and not GridsCheck: + self.statusBarMsg("No inundation grids found in ISC database for " + iscWeName + ". Stopping. Revert Forecast db.", "S") + return None, None, None, None, None + + if not GridsCheck: + self.statusBarMsg("No datum grids in ISC database for " + iscWeName + ". Proceeding without it.", "S") + + # Make sure that the ISC grids are current + if GridsCheck: + if baseTime > trList[0].startTime().unixTime(): + if weName == "InundationMax" or weName == "InundationTiming": + self.statusBarMsg("ISC grids for inundation element " + iscWeName + " are not current. They correspond to a previous cycle. Aborting. Revert Forecast db.", "S") + return None, None, None, None, None + else: + self.statusBarMsg("ISC grids for datum element " + iscWeName + " are not current. They correspond to a previous cycle. Proceeding without it.", "S") + GridsCheck = False + + for tr in trList: + grid = self.getGrids("ISC", iscWeName, "SFC", tr) + if iscWeName == "InundationMaxnc" or iscWeName == "InundationTimingnc": + grid.clip(0.0, 100.0, grid) + else: + grid.clip(-30.0, 100.0, grid) + + if iscWeName == "InundationTimingnc": + self.createGrid(mutableID, weName, "SCALAR", grid, tr, precision=2) + elif iscWeName == "InundationMaxnc": + surgePctGrid = grid + self.createGrid(mutableID, weName, "SCALAR", grid, tr, precision=2) + elif iscWeName == "SurgeHtPlusTideMSLnc" and GridsCheck: + surgePctGridMSL = grid + elif iscWeName == "SurgeHtPlusTideMLLWnc" and GridsCheck: + surgePctGridMLLW = grid + elif iscWeName == "SurgeHtPlusTideMHHWnc" and GridsCheck: + surgePctGridMHHW = grid + elif iscWeName == "SurgeHtPlusTideNAVDnc" and GridsCheck: + surgePctGridNAVD = grid + + return surgePctGrid,surgePctGridMSL,surgePctGridMLLW,surgePctGridMHHW,surgePctGridNAVD + + # Make a list of timeRanges that will be used to make InundationTiming grids + def makeTimingTRs(self, baseTime, endTime): + # Make the inundation timing grids + trList = [] + start = baseTime + end = baseTime + 6 * 3600 + while end <= endTime: + tr = TimeRange.TimeRange(AbsTime.AbsTime(start), AbsTime.AbsTime(end)) + trList.append(tr) + start = end + end += 6 * 3600 + + return trList + + def getTimingGrids(self): + + baseTime = self.baseGuidanceTime() + endTime = baseTime + 102 * 3600 + gridList= [] + trList = self.makeTimingTRs(baseTime, endTime) + + for tr in trList: + timingGrid = self.empty() + gridList.append(timingGrid) + + return trList, gridList + + def execute(self, varDict, editArea, timeRange): + + t0 = time.time() + + self._timeRange = timeRange + + mutableID = self.mutableID() + + # List of elements + # See if we should copy from ISC. If so, do the copy and exit + smoothThreatGrid = varDict["Grid Smoothing?"] + + makeOption = varDict["Make grids from \nPHISH, PETSS, ISC, or Manually?"] + topodb = "NED" + + ssea = self.encodeEditArea("StormSurgeWW_EditArea") + + Topo = self.getAvgTopoGrid(topodb) + + confidenceStr = varDict["Forecast Confidence? - (Applies to PHISH/PETSS Only)"] + + # extract the percent value from this string + pctPos = confidenceStr.find("%") + pctStr = confidenceStr[pctPos - 2:pctPos] + + threatWEName = "StormSurgeThreat" + + #print "pctStr is: ", pctStr + surgePctGrid = None + surgePctGridMSL = None + surgePctGridMLLW = None + surgePctGridNHHW = None + surgePctGridNAVD = None + + if makeOption == "PHISH" or makeOption == "PETSS": + + # Now get the psurge + if makeOption == "PHISH": + modelName = "TPCSurgeProb" + else: + modelName = "PETSS" + surgePctGrid = self.getExceedanceHeight(modelName, pctStr, "FHAG0") + if surgePctGrid is None: + message = "No inundation data found for " + modelName + self.statusBarMsg(message, "S") + return + + phishMask = ~ssea + surgePctGrid[phishMask] = 0.0 + surgePctGridNAVD = self.getExceedanceHeight(modelName, pctStr, "SFC") + if surgePctGridNAVD is None: + message = "No Surge plus Tide NAVD data found for " + modelName + self.statusBarMsg(message, "S") + return + + surgePctGridNAVD[phishMask] = -80.0 + if surgePctGrid is None or surgePctGridNAVD is None: + return + + # + # The following lines are the gridded vdatum corrections. + # + msltonavd = self.getMSLtoNAVD() + msltomllw = self.getMSLtoMLLW() + msltomhhw = self.getMSLtoMHHW() + navdtomllw = self.getNAVDtoMLLW() + navdtomhhw = self.getNAVDtoMHHW() + + # Apply 3x3 smooth within the surge zone + # for values greater than 1 as to not underplay areas adjacent to zero value pixels. + # If you apply a smoother, for consistency among storm surge plus tide and derived + # grids, it must be done here. + if smoothThreatGrid == "Yes": + #mask = np.greater(surgePctGrid, 0.0) & ssea + #surgePctGrid = np.where(np.greater(surgePctGrid, 0.0), self.GM_smoothGrid(surgePctGrid,3, mask), surgePctGrid) + +# mask = np.greater(surgePctGridNAVD, -10.0) & ssea +# surgePctGridNAVD = np.where(np.greater(surgePctGridNAVD, -10.0), self.GM_smoothGrid(surgePctGridNAVD,3, mask), surgePctGridNAVD) + + mask = (surgePctGridNAVD > -10.0) & ssea + surgePctGridNAVD = self.GM_smoothGrid(surgePctGridNAVD, 3, mask) + +# surgePctGridMSL= np.where(mask1, surgePctGridNAVD - msltonavd, np.float32(-80.0)) # MSL Grid + navdMask = (surgePctGridNAVD > -80.0) + mask = (msltonavd > -80.0) & navdMask & ssea + + # MSL Grid + surgePctGridMSL = surgePctGridNAVD - msltonavd + surgePctGridMSL[~mask] = -80.0 + +# surgePctGridMLLW = np.where(np.greater(navdtomllw,-80.0) & np.greater(surgePctGridNAVD,-80.0), \ +# surgePctGridNAVD + navdtomllw, np.float32(-80.0)) # MLLW Grid + + # MLLW Grid + mask = (navdtomllw > -80.0) & navdMask + surgePctGridMLLW = surgePctGridNAVD + navdtomllw + surgePctGridMLLW[~mask] = -80.0 + +# surgePctGridMHHW = np.where(np.greater(navdtomhhw,-80.0) & np.greater(surgePctGridNAVD,-80.0), \ +# surgePctGridNAVD + navdtomhhw, np.float32(-80.0)) # MHHW Grid + # MHHW Grid + mask = (navdtomhhw > -80.0) & navdMask + surgePctGridMHHW = surgePctGridNAVD + navdtomhhw + surgePctGridMHHW[~mask] = -80.0 + +# surgeDiffMLLWMHHW = np.where(np.greater(surgePctGridMLLW,-80.0) & np.greater(surgePctGridMHHW, -80.0), \ +# surgePctGridMLLW-surgePctGridMHHW, np.float32(-80.0)) # Diff Grid Between MLLW and MHHW + + # Diff Grid Between MLLW and MHHW (i.e tidal range) + mask = (surgePctGridMLLW > -80.0) & (surgePctGridMHHW > -80.0) + surgeDiffMLLWMHHW = surgePctGridMLLW - surgePctGridMHHW + surgeDiffMLLWMHHW[~mask] = -80.0 + + # Mask + MHHWMask = surgePctGridMHHW <= 0.0 + + #surgePctGrid[MHHWMask] = 0.0 + + trList,timingGrids = self.makeInundationTiming(modelName, pctStr, "FHAG0", smoothThreatGrid, mutableID, ssea, MHHWMask) + #surgePctGrid and InundationMax recomputed from InundationTiming sequence for consistency + surgePctGrid = self.makeInundationMaxGrid(timingGrids, trList) + + elif makeOption == "ISC": + + elementList = ["InundationMax","InundationTiming", "SurgeHtPlusTideMSL","SurgeHtPlusTideMLLW", + "SurgeHtPlusTideNAVD","SurgeHtPlusTideMHHW"] + surgePctGrid,surgePctGridMSL,surgePctGridMLLW,surgePctGridMHHW,surgePctGridNAVD = self.copyISCGridstoFcst(elementList, mutableID) + # if you look in CopyISC method if either InundationMax or InundationTiming is missing the procedure stops all together and notifies forecaster. + if surgePctGrid is None: + return + + elif makeOption == "Manually Replace" or makeOption == "Manually Add": + + inundationHeight = float(varDict["Inundation Height:"]) + inunStartHour = float(varDict["Start Hour for Inundation Timing"]) + inunEndHour = float(varDict["End Hour for Inundation Timing"]) + + selectedMask = self.encodeEditArea(editArea) + if not selectedMask.any(): + self.statusBarMsg("Please define an area over which to assign the inundation values.", "S") + return + + modifyMask = selectedMask & ssea + if not modifyMask.any(): + self.statusBarMsg("Please define an area that intersects the StormSurgeEditArea to assign the inundation values.", "S") + return # Calculate the intersection of the SSEditArea and selected editAre + + if inunStartHour >= inunEndHour: + self.statusBarMsg("Please define the end hour after the start hour.", "S") + return + + surgePctGrid = self.empty() + + # Fetch the old grids if we're adding + if varDict["Make grids from \nPHISH, PETSS, ISC, or Manually?"] == "Manually Add": + imTRList = self.GM_getWEInventory("InundationMax", mutableID, "SFC") + if len(imTRList) > 0: + imTR = imTRList[0] + surgePctGrid = self.getGrids(mutableID, "InundationMax", "SFC", imTR) + + surgePctGrid[modifyMask] = inundationHeight + + # Make the timing grids + baseTime = self.baseGuidanceTime() + if makeOption == "Manually Replace": # Make new grids and replace all IT grids + trList, timingGrids = self.getTimingGrids() + + for i in range(len(trList)): + # only modify grids in the specified time range + start = trList[i].startTime().unixTime() + end = trList[i].endTime().unixTime() + + if (start - baseTime) / 3600 >= inunStartHour and (end - baseTime) / 3600 <= inunEndHour: + timingGrids[i] = surgePctGrid # populate only where needed + + timeRange = TimeRange.allTimes() + self.deleteCmd(["InundationTiming"], timeRange) + for i in range(len(trList)): + timingGrids[i].clip(0.0, 100.0, timingGrids[i]) + self.createGrid(mutableID, "InundationTiming", "SCALAR", timingGrids[i], trList[i]) + + elif makeOption == "Manually Add": # Just replace the selected grid points over the selected time + # Fetch the existing IT grids + itTRList = self.GM_getWEInventory("InundationTiming", mutableID, "SFC") + if len(itTRList) == 0: + self.statusBarMsg("No InundationTiming grids found at all.", "S") + return + #Fetch the grids + itGrids = [] + trList = [] + for tr in itTRList: + start = tr.startTime().unixTime() + end = tr.endTime().unixTime() + #print "Checking tr:", tr + if (start - baseTime) / 3600 >= inunStartHour and (end - baseTime) / 3600 <= inunEndHour: + grid = self.getGrids(mutableID, "InundationTiming", "SFC", tr) + itGrids.append(grid) + trList.append(tr) + + if len(itGrids) == 0: + self.statusBarMsg("No InundationTiming grids found for selected start and end hours.", "S") + return + + # Surgically insert grid values into the InundationTiming grids over the selected hours + for i in range(len(trList)): + itGrids[i][modifyMask] = inundationHeight # poke in the values + + self.createGrid(mutableID, "InundationTiming", "SCALAR", itGrids[i], trList[i]) + + timingGrids = [] + for tr in itTRList: + grid = self.getGrids(self.mutableID(), "InundationTiming", "SFC", tr) + grid[~ssea] = 0.0 + timingGrids.append(grid) + + surgePctGrid = self.makeInundationMaxGrid(timingGrids, itTRList) + + elif makeOption == "UpdateInunMax (Edit Inundation Timing Grids)": + + self.deleteAllGrids(["InundationMax","SurgeHtPlusTideMSL", "SurgeHtPlusTideMLLW", + "SurgeHtPlusTideNAVD", "SurgeHtPlusTideMHHW", "SurgeHtPlusTideMLLW"]) + + itTRList = self.GM_getWEInventory("InundationTiming", mutableID, "SFC") + + if len(itTRList) == 0: + self.statusBarMsg("No InundationTiming grids found at all. Inundation grids required to exist when running with this option. Otherwise run with Manual Replace Option.", "S") + return + + timingGrids = [] + + # Fetch all the timing grids + for tr in itTRList: + grid = self.getGrids(self.mutableID(), "InundationTiming", "SFC", tr) + grid[~ssea] = 0.0 + timingGrids.append(grid) + self.deleteGrid(mutableID, "InundationTiming", "SFC", tr) + self.createGrid(mutableID, "InundationTiming", "SCALAR", grid, tr, precision=1) + + # Finally create the surge grid which will be saved as the InundationMax + + surgePctGrid = self.makeInundationMaxGrid(timingGrids, itTRList) + + #return + # Done with manual options + +# Next line introduced on Jan 2017 SWiT. It forces points in InundationMax that are > 1 and < 1.5 to 1.5. This is because TCV rounds to +# nearest one foot for categorical HTI threat level consistency with inundation graphic. Not doing this would cause TCV to throw away zones that +# might have more than 3% coverage of inundation > 1 but less than 1.5 altogether. Changing TCV to key on anything with InundationMax >= 1 would not +# do because it would then include zones in TCV with inundation forecasts of less than 1 but >= 0.5 overdoing the threat. + + surgePctGrid[(surgePctGrid > 1.0) & (surgePctGrid < 1.5)] = 1.5 + + threatKeys = self.getDiscreteKeys(threatWEName) + + # Define a mapping between UI names and key names + # keyMap = {"Very Low" :"Very Low", + keyMap = {"Elevated" : "Elevated", + "Moderate" : "Mod", + "High" : "High", + "Extreme" : "Extreme", + } + + threshDict = {} # a dict to store thresholds from the UI + + for key in keyMap.keys(): + + if keyMap[key] == "Extreme": + threshDict[keyMap[key]] = 9 + elif keyMap[key] == "High": + threshDict[keyMap[key]] = 6 + elif keyMap[key] == "Mod": + threshDict[keyMap[key]] = 3 + elif keyMap[key] == "Elevated": + threshDict[keyMap[key]] = 1 + + #print "threshDict[keyMap[key]]: ", keyMap[key], threshDict[keyMap[key]] + + # make a timeRange - 6 hours long + elementList = ["StormSurgeThreat","InundationMax","SurgeHtPlusTideMSL","SurgeHtPlusTideMLLW", + "SurgeHtPlusTideNAVD","SurgeHtPlusTideMHHW"] + + # make a new timeRange that will be used to create new grids + timeRange = self.makeNewTimeRange(8) + + # Remove old guidance grids and replace them with the new grids + # Delete the old grids first + cTime = int(self._gmtime().unixTime()/ 3600) * 3600 + startTime = AbsTime.AbsTime(cTime - 48*3600) + endTime = startTime + 240*3600 + deleteTimeRange = TimeRange.TimeRange(startTime, endTime) + + for elem in elementList: + self.deleteCmd([elem], deleteTimeRange) + + if makeOption != "Manually Replace" and makeOption != "Manually Add" and makeOption != "UpdateInunMax (Edit Inundation Timing Grids)": + if surgePctGridMSL is not None: + surgePctGridMSL.clip(-30.0, 100.0, surgePctGridMSL) + self.createGrid(mutableID, "SurgeHtPlusTideMSL", "SCALAR", surgePctGridMSL, + timeRange, precision=2) + if surgePctGridMLLW is not None: + surgePctGridMLLW.clip(-30.0, 100.0, surgePctGridMLLW) + self.createGrid(mutableID, "SurgeHtPlusTideMLLW", "SCALAR", surgePctGridMLLW, + timeRange, precision=2) + if surgePctGridNAVD is not None: + surgePctGridNAVD.clip(-30.0, 100.0, surgePctGridNAVD) + self.createGrid(mutableID, "SurgeHtPlusTideNAVD", "SCALAR", surgePctGridNAVD, + timeRange, precision=2) + if surgePctGridMHHW is not None: + surgePctGridMHHW.clip(-30.0, 100.0, surgePctGridMHHW) + self.createGrid(mutableID, "SurgeHtPlusTideMHHW", "SCALAR", surgePctGridMHHW, + timeRange, precision=2) + + # Make the grid. Start with the existing grid if we have one otherwise zeros + coastalThreat = self.empty(np.int8) + surgePctGrid.clip(0.0, 100.0, surgePctGrid) + self.createGrid(mutableID, "InundationMax", "SCALAR", surgePctGrid, timeRange, precision=2) + + # Yet another list to define the order in which we set grid values + # This order must be ranked lowest to highest + #keyList = ["Very Low", "Elevated", "Mod", "High", "Extreme"] + keyList = ["Elevated", "Mod", "High", "Extreme"] + + # Set the grid values based on the surgePctGrid grid and thresholds + for key in keyList: + #print "THRESHOLD FOR KEY IS: ", key, threshDict[key] + thresh = threshDict[key] + keyIndex = self.getIndex(key, threatKeys) + #coastalMask = ssea & np.greater_equal(surgePctGrid, thresh) + coastalMask = ssea & np.greater(surgePctGrid, thresh) + coastalThreat[coastalMask] = keyIndex + +# create the CoastalThreat Grid + self.createGrid(mutableID, threatWEName, "DISCRETE", + (coastalThreat, threatKeys), timeRange, + discreteKeys=threatKeys, + discreteOverlap=0, + discreteAuxDataLength=2, + defaultColorTable="Hazards") + + t1 = time.time() + LogStream.logEvent("Finished TCStormSurgeThreat in %f.4 ms" % ((t1-t0) * 1000)) + + return + \ No newline at end of file diff --git a/cave/com.raytheon.viz.gfe/plugin.xml b/cave/com.raytheon.viz.gfe/plugin.xml index 30d2545ec9..94d26972f1 100644 --- a/cave/com.raytheon.viz.gfe/plugin.xml +++ b/cave/com.raytheon.viz.gfe/plugin.xml @@ -2012,61 +2012,6 @@ recursive="true" value="gfe/weGroups"> - - - - - - - - - - - - - - - - - diff --git a/cave/com.raytheon.viz.grid/src/com/raytheon/viz/grid/GridDataListing.java b/cave/com.raytheon.viz.grid/src/com/raytheon/viz/grid/GridDataListing.java index 583ffb4898..3226007bdd 100644 --- a/cave/com.raytheon.viz.grid/src/com/raytheon/viz/grid/GridDataListing.java +++ b/cave/com.raytheon.viz.grid/src/com/raytheon/viz/grid/GridDataListing.java @@ -33,8 +33,6 @@ import com.raytheon.uf.common.datalisting.DataListing; import com.raytheon.uf.common.datalisting.impl.DefaultDataListing; import com.raytheon.uf.common.dataplugin.grid.GridConstants; import com.raytheon.uf.common.dataplugin.grid.GridRecord; -import com.raytheon.uf.common.dataplugin.grid.dataset.DatasetInfo; -import com.raytheon.uf.common.dataplugin.grid.dataset.DatasetInfoLookup; import com.raytheon.uf.common.dataplugin.level.Level; import com.raytheon.uf.common.dataplugin.level.LevelFactory; import com.raytheon.uf.common.dataplugin.level.MasterLevel; @@ -96,14 +94,8 @@ public class GridDataListing extends DefaultDataListing { protected Map getFormattedValues(String key, Collection values) { if (GridConstants.DATASET_ID.equals(key)) { Map formatted = new LinkedHashMap<>(); - DatasetInfoLookup lookup = DatasetInfoLookup.getInstance(); for (String value : values) { - DatasetInfo info = lookup.getInfo(value); - if (info == null) { - formatted.put(value, value); - } else { - formatted.put(value, info.getTitle() + " (" + value + ")"); - } + formatted.put(value, value); } return sortByValue(formatted); } else if (GridInventory.PARAMETER_QUERY.equals(key)) { diff --git a/cave/com.raytheon.viz.grid/src/com/raytheon/viz/grid/GridProductBrowserDataFormatter.java b/cave/com.raytheon.viz.grid/src/com/raytheon/viz/grid/GridProductBrowserDataFormatter.java index eb1424e172..8eeb6161c6 100644 --- a/cave/com.raytheon.viz.grid/src/com/raytheon/viz/grid/GridProductBrowserDataFormatter.java +++ b/cave/com.raytheon.viz.grid/src/com/raytheon/viz/grid/GridProductBrowserDataFormatter.java @@ -26,8 +26,6 @@ import java.util.Comparator; import java.util.List; import java.util.Map; -import com.raytheon.uf.common.dataplugin.grid.dataset.DatasetInfo; -import com.raytheon.uf.common.dataplugin.grid.dataset.DatasetInfoLookup; import com.raytheon.uf.common.dataplugin.level.Level; import com.raytheon.uf.common.dataplugin.level.LevelFactory; import com.raytheon.uf.common.dataplugin.level.MasterLevel; @@ -69,16 +67,9 @@ public class GridProductBrowserDataFormatter { String[] parameters) { List labels = new ArrayList(); if (GridInventory.MODEL_NAME_QUERY.equals(param)) { - DatasetInfoLookup lookup = DatasetInfoLookup.getInstance(); for (int i = 0; i < parameters.length; i++) { - DatasetInfo info = lookup.getInfo(parameters[i]); - if (info == null) { - labels.add(new ProductBrowserLabel(parameters[i], - parameters[i])); - } else { - labels.add(new ProductBrowserLabel(info.getTitle() + " (" - + parameters[i] + ")", parameters[i])); - } + labels.add(new ProductBrowserLabel(parameters[i], + parameters[i])); } Collections.sort(labels); return labels; diff --git a/cave/com.raytheon.viz.grid/src/com/raytheon/viz/grid/inv/GridUpdater.java b/cave/com.raytheon.viz.grid/src/com/raytheon/viz/grid/inv/GridUpdater.java index f3051566f7..5395d4c563 100644 --- a/cave/com.raytheon.viz.grid/src/com/raytheon/viz/grid/inv/GridUpdater.java +++ b/cave/com.raytheon.viz.grid/src/com/raytheon/viz/grid/inv/GridUpdater.java @@ -25,8 +25,8 @@ import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; -import java.util.concurrent.ArrayBlockingQueue; import java.util.concurrent.BlockingQueue; +import java.util.concurrent.LinkedBlockingQueue; import org.eclipse.core.runtime.IProgressMonitor; import org.eclipse.core.runtime.IStatus; @@ -64,6 +64,7 @@ import com.raytheon.viz.grid.GridExtensionManager; * Mar 03, 2016 5439 bsteffen Allow grid derived parameters from edex * Aug 15, 2017 6332 bsteffen Move radar specific logic to extension * Aug 23, 2017 6125 bsteffen Split common updating code to GridInventoryUpdater. + * Nov 30, 2018 7673 bsteffen Prevent full queue from blocking. * * * @@ -121,8 +122,7 @@ public class GridUpdater extends GridInventoryUpdater { private final Map> updateMap = new HashMap<>(); - private final BlockingQueue uriUpdateQueue = new ArrayBlockingQueue<>( - 512); + private final BlockingQueue uriUpdateQueue = new LinkedBlockingQueue<>(); private final Job sendDerivedAlerts = new Job( "Sending Derived Grid Alerts") { diff --git a/cave/com.raytheon.viz.grid/src/com/raytheon/viz/grid/inv/VizGridInventory.java b/cave/com.raytheon.viz.grid/src/com/raytheon/viz/grid/inv/VizGridInventory.java index 9bbc3d8685..d061370748 100644 --- a/cave/com.raytheon.viz.grid/src/com/raytheon/viz/grid/inv/VizGridInventory.java +++ b/cave/com.raytheon.viz.grid/src/com/raytheon/viz/grid/inv/VizGridInventory.java @@ -33,8 +33,6 @@ import java.util.Set; import com.raytheon.uf.common.dataplugin.grid.GridConstants; import com.raytheon.uf.common.dataplugin.grid.GridInfoConstants; import com.raytheon.uf.common.dataplugin.grid.GridInfoRecord; -import com.raytheon.uf.common.dataplugin.grid.dataset.DatasetInfo; -import com.raytheon.uf.common.dataplugin.grid.dataset.DatasetInfoLookup; import com.raytheon.uf.common.dataplugin.grid.derivparam.CommonGridInventory; import com.raytheon.uf.common.dataplugin.grid.derivparam.GridInventoryUpdater; import com.raytheon.uf.common.dataplugin.grid.derivparam.cache.CoverageUtils; @@ -212,7 +210,6 @@ public class VizGridInventory extends CommonGridInventory protected DataTree createBaseTree() throws DataCubeException { DataTree newTree = super.createBaseTree(); initGatherModels(newTree); - initAliasModels(newTree); GridExtensionManager.addToBaseTree(newTree, derParLibrary); return newTree; } @@ -239,74 +236,6 @@ public class VizGridInventory extends CommonGridInventory return rval; } - /** - * Prepare an alias map, from a modelName to all modelNames that it - * includes, from highest res to lowest res - * - * @param newGridTree - */ - private void initAliasModels(DataTree newGridTree) { - sourceAliases.clear(); - DatasetInfoLookup lookup = DatasetInfoLookup.getInstance(); - for (String modelName : newGridTree.getSources()) { - DatasetInfo info = lookup.getInfo(modelName); - if (info != null && info.getAlias() != null) { - SourceNode source = newGridTree.getSourceNode(modelName); - SourceNode dest = newGridTree.getSourceNode(info.getAlias()); - if (source != null && dest != null) { - List aliases = sourceAliases.get(dest.getValue()); - if (aliases == null) { - aliases = new ArrayList<>(); - aliases.add(dest.getValue()); - sourceAliases.put(dest.getValue(), aliases); - } - aliases.add(source.getValue()); - } - } - } - for (Entry> aliases : sourceAliases.entrySet()) { - Collections.sort(aliases.getValue(), new Comparator() { - - @Override - public int compare(String model1, String model2) { - try { - // attempt to figure out which model is the highest - // resolution. - Collection coverages1 = CoverageUtils - .getInstance().getCoverages(model1); - Collection coverages2 = CoverageUtils - .getInstance().getCoverages(model2); - if (coverages1.isEmpty()) { - return 1; - } else if (coverages2.isEmpty()) { - return -1; - } - double total1 = 0; - double total2 = 0; - for (GridCoverage coverage : coverages1) { - total1 += coverage.getDx(); - total1 += coverage.getDy(); - } - for (GridCoverage coverage : coverages2) { - total2 += coverage.getDx(); - total2 += coverage.getDy(); - } - Double res1 = total1 / coverages1.size(); - Double res2 = total2 / coverages2.size(); - return res1.compareTo(res2); - } catch (DataCubeException e) { - statusHandler.handle(Priority.PROBLEM, - "Unable to create model aliases, problems with " - + model1 + " and " + model2, - e); - return 0; - } - } - - }); - } - } - public Set getAvailableLevels(Map query) { Set levels = new HashSet<>(); List nodes = evaluateRequestConstraints(query); diff --git a/cave/com.raytheon.viz.grid/src/com/raytheon/viz/grid/rsc/general/D2DGridResource.java b/cave/com.raytheon.viz.grid/src/com/raytheon/viz/grid/rsc/general/D2DGridResource.java index f4e92f6b5f..6f07997d53 100644 --- a/cave/com.raytheon.viz.grid/src/com/raytheon/viz/grid/rsc/general/D2DGridResource.java +++ b/cave/com.raytheon.viz.grid/src/com/raytheon/viz/grid/rsc/general/D2DGridResource.java @@ -35,8 +35,6 @@ import org.opengis.referencing.operation.TransformException; import com.raytheon.uf.common.dataplugin.PluginDataObject; import com.raytheon.uf.common.dataplugin.grid.GridRecord; -import com.raytheon.uf.common.dataplugin.grid.dataset.DatasetInfo; -import com.raytheon.uf.common.dataplugin.grid.dataset.DatasetInfoLookup; import com.raytheon.uf.common.datastorage.Request; import com.raytheon.uf.common.datastorage.records.IDataRecord; import com.raytheon.uf.common.geospatial.MapUtil; @@ -314,13 +312,8 @@ public class D2DGridResource extends GridResource } } LegendParameters legendParams = new LegendParameters(); - DatasetInfo info = DatasetInfoLookup.getInstance() - .getInfo(record.getDatasetId()); - if (info == null) { - legendParams.model = record.getDatasetId(); - } else { - legendParams.model = info.getTitle(); - } + + legendParams.model = record.getDatasetId(); legendParams.level = record.getLevel(); legendParams.parameter = record.getParameter().getName(); legendParams.ensembleId = record.getEnsembleId(); diff --git a/cave/com.raytheon.viz.hydro/plugin.xml b/cave/com.raytheon.viz.hydro/plugin.xml index e0603e625c..0268062614 100644 --- a/cave/com.raytheon.viz.hydro/plugin.xml +++ b/cave/com.raytheon.viz.hydro/plugin.xml @@ -957,21 +957,6 @@ - - - - - - * * @author mpduff @@ -367,12 +369,21 @@ public class XmrgResource extends } } sampleData = new ArrayList(data.length); - for (short s : data) { - float f = (float) Math.floor(cvt.convert(s)); - buf.put(f); - // mm/100 to inch - sampleData.add(s * 0.03937f / 100); + // Map <0 and 0 to the first two color segments of the color scale, respectively + if (s < 0) { + buf.put(0.0f); + sampleData.add(0.0f); + } else if (s == 0) { + buf.put(1.0f); + sampleData.add(0.0f); + } else { + // Map values >0 to appropriate color bar segment + float f = (float) Math.floor(cvt.convert(s)); + buf.put(f); + // mm/100 to inch + sampleData.add(s * 0.03937f / 100); + } } buf.rewind(); @@ -408,15 +419,13 @@ public class XmrgResource extends buf = FloatBuffer.allocate(data.length); sampleData = new ArrayList(data.length); for (short s : data) { + // Map <0 and 0 to the first two color segments of the color scale, respectively if (s < 0) { buf.put(0.0f); sampleData.add(0.0f); - } else if (s > 0 && s < 25) { - short ns = 10; - float f = (short) cvt.convert(ns); - buf.put(f); - // mm/100 to inch - sampleData.add(s * 0.03937f / 100); + } else if (s == 0) { + buf.put(1.0f); + sampleData.add(0.0f); } else { float f = (float) Math.floor(cvt.convert(s)); buf.put(f); @@ -621,9 +630,22 @@ public class XmrgResource extends data = xmrg.getData(); } buf = FloatBuffer.allocate(data.length); + sampleData = new ArrayList(data.length); for (short s : data) { - float f = (float) Math.floor(cvt.convert(s)); - buf.put(f); + // Map <0 and 0 to the first two color segments of the color scale, respectively + if (s < 0) { + buf.put(0.0f); + sampleData.add(0.0f); + } else if (s == 0){ + buf.put(1.0f); + sampleData.add(0.0f); + } else { + // Map values >0 to appropriate color bar segment + float f = (float) Math.floor(cvt.convert(s)); + buf.put(f); + // mm/100 to inch + sampleData.add(s * 0.03937f / 100); + } } buf.rewind(); Rectangle extent = xmrg.getHrapExtent(); diff --git a/cave/com.raytheon.viz.lpi/localization/bundles/maps/88Ds.xml b/cave/com.raytheon.viz.lpi/localization/bundles/maps/88Ds.xml deleted file mode 100644 index 573707b3d8..0000000000 --- a/cave/com.raytheon.viz.lpi/localization/bundles/maps/88Ds.xml +++ /dev/null @@ -1,51 +0,0 @@ - - - - - - - - - - - - - - PLAN_VIEW - - - - - - 88D.lpi - WSR-88D Station Locs - - - - - - \ No newline at end of file diff --git a/cave/com.raytheon.viz.lpi/localization/bundles/maps/FAA/volcanoes.xml b/cave/com.raytheon.viz.lpi/localization/bundles/maps/FAA/volcanoes.xml deleted file mode 100644 index 07bb898eb5..0000000000 --- a/cave/com.raytheon.viz.lpi/localization/bundles/maps/FAA/volcanoes.xml +++ /dev/null @@ -1,51 +0,0 @@ - - - - - - - - - - - - - - PLAN_VIEW - - - - - - volcanoes.lpi - Volcano Locations - - - - - - \ No newline at end of file diff --git a/cave/com.raytheon.viz.lpi/localization/bundles/maps/fireWxStations.xml b/cave/com.raytheon.viz.lpi/localization/bundles/maps/fireWxStations.xml deleted file mode 100644 index a5fa1ecff8..0000000000 --- a/cave/com.raytheon.viz.lpi/localization/bundles/maps/fireWxStations.xml +++ /dev/null @@ -1,51 +0,0 @@ - - - - - - - - - - - - - - PLAN_VIEW - - - - - - fireWxSta.lpi - Fire Wx Stations - - - - - - \ No newline at end of file diff --git a/cave/com.raytheon.viz.lpi/localization/bundles/maps/spotters.xml b/cave/com.raytheon.viz.lpi/localization/bundles/maps/spotters.xml deleted file mode 100644 index 2f58e6f8ba..0000000000 --- a/cave/com.raytheon.viz.lpi/localization/bundles/maps/spotters.xml +++ /dev/null @@ -1,51 +0,0 @@ - - - - - - - - - - - - - - PLAN_VIEW - - - - - - spotters.lpi - Spotters - - - - - - \ No newline at end of file diff --git a/cave/com.raytheon.viz.mpe.ui/plugin.xml b/cave/com.raytheon.viz.mpe.ui/plugin.xml index 4fb51a657b..36d075b7f7 100644 --- a/cave/com.raytheon.viz.mpe.ui/plugin.xml +++ b/cave/com.raytheon.viz.mpe.ui/plugin.xml @@ -1847,22 +1847,6 @@ - - - - - - diff --git a/cave/com.raytheon.viz.mpe.ui/src/com/raytheon/viz/mpe/ui/actions/OtherPrecipOptions.java b/cave/com.raytheon.viz.mpe.ui/src/com/raytheon/viz/mpe/ui/actions/OtherPrecipOptions.java index 2cf55ab5f1..7b9bfc4e36 100644 --- a/cave/com.raytheon.viz.mpe.ui/src/com/raytheon/viz/mpe/ui/actions/OtherPrecipOptions.java +++ b/cave/com.raytheon.viz.mpe.ui/src/com/raytheon/viz/mpe/ui/actions/OtherPrecipOptions.java @@ -44,8 +44,6 @@ import com.raytheon.viz.mpe.util.MakeMat; import com.raytheon.viz.mpe.util.MakeRsel; import com.raytheon.viz.mpe.util.QCStations; import com.raytheon.viz.mpe.util.QCTStations; -import com.raytheon.viz.mpe.util.ReadFreezingStationList; -import com.raytheon.viz.mpe.util.ReadTemperatureStationList; import com.raytheon.viz.mpe.util.RenderPcp; import com.raytheon.viz.mpe.util.RenderT; import com.raytheon.viz.mpe.util.RenderT6; @@ -67,6 +65,7 @@ import com.raytheon.viz.mpe.util.WriteQPFGrids; * been retrieved. * Dec 15, 2017 6547 bkowal Remove unnecessary adjustment now that the larger underlying problem * has been resolved. + * Oct 3, 2018 7496 smanoj Fix temperature and freezing station size error. * * * @@ -684,9 +683,7 @@ public class OtherPrecipOptions { } else if (clientdata == 2) { int m; - int num_zstations; - ReadFreezingStationList rfl = new ReadFreezingStationList(); - num_zstations = rfl.getNumZstations(); + int num_zstations = DailyQcUtils.freezing_stations.size(); RenderZ rz = new RenderZ(); /* render Grids and MAZ for four 6hr precipitation */ @@ -823,8 +820,7 @@ public class OtherPrecipOptions { // logMessage ("Gridding temperature and building MATs"); - ReadTemperatureStationList rt = new ReadTemperatureStationList(); - int num_tstations = rt.getNumTstations(); + int num_tstations = DailyQcUtils.temperature_stations.size(); BadTValues bt = new BadTValues(); bt.update_bad_tvalues(DailyQcUtils.pcpn_day); diff --git a/cave/com.raytheon.viz.mpe/src/com/raytheon/viz/mpe/util/AbstractPrismDataReader.java b/cave/com.raytheon.viz.mpe/src/com/raytheon/viz/mpe/util/AbstractPrismDataReader.java index 130d666ea6..f8e683af48 100644 --- a/cave/com.raytheon.viz.mpe/src/com/raytheon/viz/mpe/util/AbstractPrismDataReader.java +++ b/cave/com.raytheon.viz.mpe/src/com/raytheon/viz/mpe/util/AbstractPrismDataReader.java @@ -39,7 +39,8 @@ import com.raytheon.viz.mpe.core.MPEDataManager; * Date Ticket# Engineer Description * ------------ ---------- ----------- -------------------------- * Sep 29, 2017 6407 bkowal Initial creation - * + * Sep 26, 2018 7482 smanoj Fix the issue with data conversion + * * * * @author bkowal @@ -123,8 +124,8 @@ public abstract class AbstractPrismDataReader { + " does not contain sufficient data. Expected data length = " + (MaxX * MaxY) + "; actual data length = " + xmrgFile.getData().length + "."); - } - + } + final short[] data = xmrgFile.getData(); int index = 0; for (int i = MaxY - 1; i >= 0; i--) { @@ -137,7 +138,8 @@ public abstract class AbstractPrismDataReader { } else { f = (float) dataToImage.convert(s); } - float aa = (float) (Math.floor((f * 10))); + + float aa = (float) (Math.floor((f * getconvFactor()))); int bb = (int) aa; destination[monthIndex][i][j] = bb; } @@ -145,7 +147,11 @@ public abstract class AbstractPrismDataReader { return null; } - + + protected abstract float getconvFactor(); + protected abstract float handleNegativeValue( final UnitConverter dataToImage, final short value); + + } \ No newline at end of file diff --git a/cave/com.raytheon.viz.mpe/src/com/raytheon/viz/mpe/util/DailyQcUtils.java b/cave/com.raytheon.viz.mpe/src/com/raytheon/viz/mpe/util/DailyQcUtils.java index 606bb4c12b..99af2929e7 100644 --- a/cave/com.raytheon.viz.mpe/src/com/raytheon/viz/mpe/util/DailyQcUtils.java +++ b/cave/com.raytheon.viz.mpe/src/com/raytheon/viz/mpe/util/DailyQcUtils.java @@ -88,7 +88,8 @@ import com.vividsolutions.jts.geom.Coordinate; * after closing and reopening DQC. * Dec 15, 2017 6547 bkowal Remove unnecessary adjustment now that the larger underlying problem * has been resolved. - * Jan 24, 2018 6547 bkowal Adjust station quality function index order. + * Jan 24, 2018 6547 bkowal Adjust station quality function index order. + * Sep 26, 2018 7482 smanoj Fix the issue with Month in the filenames * * * @@ -1831,7 +1832,7 @@ public class DailyQcUtils { dbuf = String.format("%s%s_%04d%02d%02d", zgrid_file, ztimefile[dqcTimeStringIndex][k], otime.get(Calendar.YEAR), - otime.get(Calendar.MONTH + 1), + otime.get(Calendar.MONTH) + 1, otime.get(Calendar.DAY_OF_MONTH)); num = 100 + (m * 4) + kk; @@ -1882,7 +1883,7 @@ public class DailyQcUtils { dbuf = String.format("%s%s_%04d%02d%02d", tgrid_file, ttimefile[dqcTimeStringIndex][k], otime.get(Calendar.YEAR), - otime.get(Calendar.MONTH + 1), + otime.get(Calendar.MONTH) + 1, otime.get(Calendar.DAY_OF_MONTH)); if (k < 4) { diff --git a/cave/com.raytheon.viz.mpe/src/com/raytheon/viz/mpe/util/MeanMonthlyPrecip.java b/cave/com.raytheon.viz.mpe/src/com/raytheon/viz/mpe/util/MeanMonthlyPrecip.java index 2ee5272b5d..3b6ba50ad2 100644 --- a/cave/com.raytheon.viz.mpe/src/com/raytheon/viz/mpe/util/MeanMonthlyPrecip.java +++ b/cave/com.raytheon.viz.mpe/src/com/raytheon/viz/mpe/util/MeanMonthlyPrecip.java @@ -36,6 +36,8 @@ import javax.measure.unit.SI; * Feb 3, 2015 16993 snaples fixed color scale data conversion issue. * Mar 2, 2015 15660 snaples Fixed problem with color scale using wrong values. Causing grids to be all zeros. * Oct 03, 2017 6407 bkowal Cleanup. Updated to extend {@link AbstractPrismDataReader}. + * Sep 27, 2018 7482 smanoj Fix the issue with data conversion + * * * * @author snaples @@ -46,7 +48,9 @@ public class MeanMonthlyPrecip extends AbstractPrismDataReader { private final String VERSION = "111511"; private static Isoh isoh; - + + private static final float convFactor = 25.4f; + public MeanMonthlyPrecip() { super(NonSI.INCH, SI.MILLIMETER); } @@ -115,4 +119,9 @@ public class MeanMonthlyPrecip extends AbstractPrismDataReader { short value) { return 0; } + + @Override + protected float getconvFactor(){ + return convFactor; + } } \ No newline at end of file diff --git a/cave/com.raytheon.viz.mpe/src/com/raytheon/viz/mpe/util/MeanMonthlyTemp.java b/cave/com.raytheon.viz.mpe/src/com/raytheon/viz/mpe/util/MeanMonthlyTemp.java index 81f99d1c93..cb2fb5a436 100644 --- a/cave/com.raytheon.viz.mpe/src/com/raytheon/viz/mpe/util/MeanMonthlyTemp.java +++ b/cave/com.raytheon.viz.mpe/src/com/raytheon/viz/mpe/util/MeanMonthlyTemp.java @@ -34,6 +34,7 @@ import javax.measure.unit.NonSI; * Apr 16, 2012 mgamazaychik DR9602 - changed how max and min * temperature data are read from PRISM * Oct 03, 2017 6407 bkowal Cleanup. Updated to extend {@link AbstractPrismDataReader}. + * Sep 27, 2018 7482 smanoj Fix the issue with data conversion * * * @@ -43,7 +44,9 @@ import javax.measure.unit.NonSI; public class MeanMonthlyTemp extends AbstractPrismDataReader { private static MaxMin maxmin; - + + private static final float convFactor = 10.0f; + public MeanMonthlyTemp() { super(NonSI.FAHRENHEIT, NonSI.FAHRENHEIT.divide(10)); } @@ -126,4 +129,9 @@ public class MeanMonthlyTemp extends AbstractPrismDataReader { return (float) dataToImage.convert(value); } } + + @Override + protected float getconvFactor(){ + return convFactor; + } } \ No newline at end of file diff --git a/cave/com.raytheon.viz.product.awips/icons/cave-icon.icns b/cave/com.raytheon.viz.product.awips/icons/cave-icon.icns new file mode 100644 index 0000000000..2b519a39f8 Binary files /dev/null and b/cave/com.raytheon.viz.product.awips/icons/cave-icon.icns differ diff --git a/cave/com.raytheon.viz.product.awips/icons/cave.png b/cave/com.raytheon.viz.product.awips/icons/cave.png new file mode 100644 index 0000000000..d3889baf1a Binary files /dev/null and b/cave/com.raytheon.viz.product.awips/icons/cave.png differ diff --git a/cave/com.raytheon.viz.radar/localization/menus/radar/dualPol/baseRadarFourPanel.xml b/cave/com.raytheon.viz.radar/localization/menus/radar/dualPol/baseRadarFourPanel.xml index e9b3426f84..d95eac0964 100644 --- a/cave/com.raytheon.viz.radar/localization/menus/radar/dualPol/baseRadarFourPanel.xml +++ b/cave/com.raytheon.viz.radar/localization/menus/radar/dualPol/baseRadarFourPanel.xml @@ -19,12 +19,6 @@ further_licensing_information. --> - - - - - \ No newline at end of file + diff --git a/cave/com.raytheon.viz.satellite/localization/bundles/satellite/goesr/goesrFullDisk.xml b/cave/com.raytheon.viz.satellite/localization/bundles/satellite/goesr/goesrFullDisk.xml index cc1d08bae6..84e5ded026 100644 --- a/cave/com.raytheon.viz.satellite/localization/bundles/satellite/goesr/goesrFullDisk.xml +++ b/cave/com.raytheon.viz.satellite/localization/bundles/satellite/goesr/goesrFullDisk.xml @@ -36,7 +36,7 @@ constraintType="EQUALS" /> - diff --git a/cave/com.raytheon.viz.satellite/localization/menus/satellite/goesr/goesrMenu.xml b/cave/com.raytheon.viz.satellite/localization/menus/satellite/goesr/goesrMenu.xml index facda95944..09ebb93690 100644 --- a/cave/com.raytheon.viz.satellite/localization/menus/satellite/goesr/goesrMenu.xml +++ b/cave/com.raytheon.viz.satellite/localization/menus/satellite/goesr/goesrMenu.xml @@ -96,22 +96,22 @@ - + - + - + - + diff --git a/cave/com.raytheon.viz.spi/localization/bundles/maps/Sounding Locs/gfs.xml b/cave/com.raytheon.viz.spi/localization/bundles/maps/Sounding Locs/gfs.xml deleted file mode 100644 index 06fee311fb..0000000000 --- a/cave/com.raytheon.viz.spi/localization/bundles/maps/Sounding Locs/gfs.xml +++ /dev/null @@ -1,51 +0,0 @@ - - - - - - - - - - - - - - PLAN_VIEW - - - - - - basemaps/GFSmodelBufr.spi - GFS - - - - - - diff --git a/cave/com.raytheon.viz.spi/localization/bundles/maps/Sounding Locs/goes.xml b/cave/com.raytheon.viz.spi/localization/bundles/maps/Sounding Locs/goes.xml deleted file mode 100644 index 7713b8cf88..0000000000 --- a/cave/com.raytheon.viz.spi/localization/bundles/maps/Sounding Locs/goes.xml +++ /dev/null @@ -1,56 +0,0 @@ - - - - - - - - - - - - - - PLAN_VIEW - - - - - - basemaps/goesBufr.spi - GOES - - - - - - \ No newline at end of file diff --git a/cave/com.raytheon.viz.spi/localization/bundles/maps/Sounding Locs/nam.xml b/cave/com.raytheon.viz.spi/localization/bundles/maps/Sounding Locs/nam.xml deleted file mode 100644 index f8a7b50877..0000000000 --- a/cave/com.raytheon.viz.spi/localization/bundles/maps/Sounding Locs/nam.xml +++ /dev/null @@ -1,56 +0,0 @@ - - - - - - - - - - - - - - PLAN_VIEW - - - - - - basemaps/modelBufr.spi - NAM - - - - - - \ No newline at end of file diff --git a/cave/com.raytheon.viz.spi/localization/bundles/maps/Sounding Locs/raob.xml b/cave/com.raytheon.viz.spi/localization/bundles/maps/Sounding Locs/raob.xml deleted file mode 100644 index fce39e5606..0000000000 --- a/cave/com.raytheon.viz.spi/localization/bundles/maps/Sounding Locs/raob.xml +++ /dev/null @@ -1,56 +0,0 @@ - - - - - - - - - - - - - - PLAN_VIEW - - - - - - basemaps/raob.spi - RAOB Station Locations - - - - - - \ No newline at end of file diff --git a/cave/com.raytheon.viz.spi/localization/bundles/maps/acarsAirports.xml b/cave/com.raytheon.viz.spi/localization/bundles/maps/acarsAirports.xml deleted file mode 100644 index 8e6b8db662..0000000000 --- a/cave/com.raytheon.viz.spi/localization/bundles/maps/acarsAirports.xml +++ /dev/null @@ -1,56 +0,0 @@ - - - - - - - - - - - - - - PLAN_VIEW - - - - - - basemaps/airports.spi - ACARS Airport Locations - - - - - - \ No newline at end of file diff --git a/cave/com.raytheon.viz.spi/localization/bundles/maps/buoy.xml b/cave/com.raytheon.viz.spi/localization/bundles/maps/buoy.xml deleted file mode 100644 index 2a213b0af5..0000000000 --- a/cave/com.raytheon.viz.spi/localization/bundles/maps/buoy.xml +++ /dev/null @@ -1,56 +0,0 @@ - - - - - - - - - - - - - - PLAN_VIEW - - - - - - basemaps/BUOY.spi - Fixed Buoy Locations - - - - - - \ No newline at end of file diff --git a/cave/com.raytheon.viz.spi/localization/bundles/maps/ldad.xml b/cave/com.raytheon.viz.spi/localization/bundles/maps/ldad.xml deleted file mode 100644 index 93b7e51be3..0000000000 --- a/cave/com.raytheon.viz.spi/localization/bundles/maps/ldad.xml +++ /dev/null @@ -1,46 +0,0 @@ - - - - - - - - - - - - - PLAN_VIEW - - - - - - basemaps/ldad15.spi - LDAD Stations - - - - - - diff --git a/cave/com.raytheon.viz.spi/localization/bundles/maps/ldadPrecip.xml b/cave/com.raytheon.viz.spi/localization/bundles/maps/ldadPrecip.xml deleted file mode 100644 index a0c7214bab..0000000000 --- a/cave/com.raytheon.viz.spi/localization/bundles/maps/ldadPrecip.xml +++ /dev/null @@ -1,46 +0,0 @@ - - - - - - - - - - - - - PLAN_VIEW - - - - - - basemaps/ldad15prcp.spi - LDAD Precip - - - - - - diff --git a/cave/com.raytheon.viz.spi/localization/bundles/maps/metars.xml b/cave/com.raytheon.viz.spi/localization/bundles/maps/metars.xml deleted file mode 100644 index ccdbbf1bea..0000000000 --- a/cave/com.raytheon.viz.spi/localization/bundles/maps/metars.xml +++ /dev/null @@ -1,56 +0,0 @@ - - - - - - - - - - - - - - PLAN_VIEW - - - - - - basemaps/MTR.spi - METAR Station Locations - - - - - - \ No newline at end of file diff --git a/cave/com.raytheon.viz.spi/localization/bundles/maps/profilers.xml b/cave/com.raytheon.viz.spi/localization/bundles/maps/profilers.xml deleted file mode 100644 index 0c1c76bd70..0000000000 --- a/cave/com.raytheon.viz.spi/localization/bundles/maps/profilers.xml +++ /dev/null @@ -1,56 +0,0 @@ - - - - - - - - - - - - - - PLAN_VIEW - - - - - - basemaps/profiler.spi - NPN Profilers - - - - - - \ No newline at end of file diff --git a/cave/com.raytheon.viz.spi/localization/bundles/maps/synoptic.xml b/cave/com.raytheon.viz.spi/localization/bundles/maps/synoptic.xml deleted file mode 100644 index b26ca6a420..0000000000 --- a/cave/com.raytheon.viz.spi/localization/bundles/maps/synoptic.xml +++ /dev/null @@ -1,56 +0,0 @@ - - - - - - - - - - - - - - PLAN_VIEW - - - - - - basemaps/synoptic.spi - Synoptic Station Locations - - - - - - \ No newline at end of file diff --git a/cave/com.raytheon.viz.volumebrowser/DataSelectionMenuItems.xml b/cave/com.raytheon.viz.volumebrowser/DataSelectionMenuItems.xml index aacad417b5..739838945e 100644 --- a/cave/com.raytheon.viz.volumebrowser/DataSelectionMenuItems.xml +++ b/cave/com.raytheon.viz.volumebrowser/DataSelectionMenuItems.xml @@ -19,27 +19,30 @@ further_licensing_information. --> - ENPwave - GFS40 - GFS90 - GFSensemble - GFSGuide - GriddedMOS + CMC + ESTOFS + ETSS + GFS + GFS20 GWW + MOSGuide + HRRR HPCGuide - LAMP - MSAS + LAMP2p5 NAM12 NAM40 - NAM80 - NAMWX - NICIE - TPC-HurWind + NAVGEM + NationalBlend + PROB3HR + NOHRSC-SNOW + HPCqpf + HPCqpfNDFD + fnmocWave + SeaIce + SPCGuide + RTMA TPCWindProb - TPCWindProb_Prelim - WNAwave - RTG_SST-Analysis - DMD + URMA25 GFSBufr GoesBufr MDCRS diff --git a/cave/com.raytheon.viz.volumebrowser/localization/menus/grid/WaveWatch.xml b/cave/com.raytheon.viz.volumebrowser/localization/menus/grid/WaveWatch.xml index 5e56c18344..3e8a571cce 100644 --- a/cave/com.raytheon.viz.volumebrowser/localization/menus/grid/WaveWatch.xml +++ b/cave/com.raytheon.viz.volumebrowser/localization/menus/grid/WaveWatch.xml @@ -40,10 +40,5 @@ menuText="Sfc Vorticity and Wind" id="" useReferenceTime="true"> - - - - - \ No newline at end of file + diff --git a/cave/com.raytheon.viz.volumebrowser/localization/menus/grid/cmcFamilies.xml b/cave/com.raytheon.viz.volumebrowser/localization/menus/grid/cmcFamilies.xml index a5ede62d5b..f5d2be4fac 100644 --- a/cave/com.raytheon.viz.volumebrowser/localization/menus/grid/cmcFamilies.xml +++ b/cave/com.raytheon.viz.volumebrowser/localization/menus/grid/cmcFamilies.xml @@ -51,10 +51,6 @@ menuText="Lightning Threat" id="" useReferenceTime="true"> - - - @@ -188,4 +184,4 @@ --> - \ No newline at end of file + diff --git a/cave/com.raytheon.viz.volumebrowser/localization/menus/volume/ConvectModelFamilies.xml b/cave/com.raytheon.viz.volumebrowser/localization/menus/volume/ConvectModelFamilies.xml index 41c5721aae..2be9c263d9 100644 --- a/cave/com.raytheon.viz.volumebrowser/localization/menus/volume/ConvectModelFamilies.xml +++ b/cave/com.raytheon.viz.volumebrowser/localization/menus/volume/ConvectModelFamilies.xml @@ -18,614 +18,604 @@ See_the_AWIPS_II_Master_Rights_File_("Master_Rights_File.pdf")_for further_licensing_information. --> - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/cave/com.raytheon.viz.volumebrowser/localization/menus/volume/WinterFamily.xml b/cave/com.raytheon.viz.volumebrowser/localization/menus/volume/WinterFamily.xml index f7ac447f2f..4943d3e216 100755 --- a/cave/com.raytheon.viz.volumebrowser/localization/menus/volume/WinterFamily.xml +++ b/cave/com.raytheon.viz.volumebrowser/localization/menus/volume/WinterFamily.xml @@ -18,57 +18,62 @@ See_the_AWIPS_II_Master_Rights_File_("Master_Rights_File.pdf")_for further_licensing_information. --> + - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/cave/com.raytheon.viz.volumebrowser/localization/menus/volume/WinterFnFourPanelFamily.xml b/cave/com.raytheon.viz.volumebrowser/localization/menus/volume/WinterFnFourPanelFamily.xml index 58012b9f37..45963c7b6a 100755 --- a/cave/com.raytheon.viz.volumebrowser/localization/menus/volume/WinterFnFourPanelFamily.xml +++ b/cave/com.raytheon.viz.volumebrowser/localization/menus/volume/WinterFnFourPanelFamily.xml @@ -18,57 +18,62 @@ See_the_AWIPS_II_Master_Rights_File_("Master_Rights_File.pdf")_for further_licensing_information. --> + - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/cave/com.raytheon.viz.volumebrowser/localization/menus/volume/WinterModelFamilies.xml b/cave/com.raytheon.viz.volumebrowser/localization/menus/volume/WinterModelFamilies.xml index f5a3d1bf5f..6ec3a5142d 100755 --- a/cave/com.raytheon.viz.volumebrowser/localization/menus/volume/WinterModelFamilies.xml +++ b/cave/com.raytheon.viz.volumebrowser/localization/menus/volume/WinterModelFamilies.xml @@ -18,356 +18,383 @@ See_the_AWIPS_II_Master_Rights_File_("Master_Rights_File.pdf")_for further_licensing_information. --> + - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/cave/com.raytheon.viz.volumebrowser/localization/menus/volume/baseFamilies.xml b/cave/com.raytheon.viz.volumebrowser/localization/menus/volume/baseFamilies.xml index cb8ca3fad9..7a342e8720 100644 --- a/cave/com.raytheon.viz.volumebrowser/localization/menus/volume/baseFamilies.xml +++ b/cave/com.raytheon.viz.volumebrowser/localization/menus/volume/baseFamilies.xml @@ -18,124 +18,133 @@ See_the_AWIPS_II_Master_Rights_File_("Master_Rights_File.pdf")_for further_licensing_information. --> + - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/cave/com.raytheon.viz.volumebrowser/localization/menus/volume/baseFourPanelFamilies.xml b/cave/com.raytheon.viz.volumebrowser/localization/menus/volume/baseFourPanelFamilies.xml index 7a7d0a7ee8..5136343ae4 100644 --- a/cave/com.raytheon.viz.volumebrowser/localization/menus/volume/baseFourPanelFamilies.xml +++ b/cave/com.raytheon.viz.volumebrowser/localization/menus/volume/baseFourPanelFamilies.xml @@ -18,69 +18,80 @@ See_the_AWIPS_II_Master_Rights_File_("Master_Rights_File.pdf")_for further_licensing_information. --> + - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/cave/com.raytheon.viz.volumebrowser/localization/menus/volume/baseStdEnvPackage.xml b/cave/com.raytheon.viz.volumebrowser/localization/menus/volume/baseStdEnvPackage.xml index 0bcd744339..abc7f5ed08 100644 --- a/cave/com.raytheon.viz.volumebrowser/localization/menus/volume/baseStdEnvPackage.xml +++ b/cave/com.raytheon.viz.volumebrowser/localization/menus/volume/baseStdEnvPackage.xml @@ -18,33 +18,37 @@ See_the_AWIPS_II_Master_Rights_File_("Master_Rights_File.pdf")_for further_licensing_information. --> + - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/cave/com.raytheon.viz.volumebrowser/localization/menus/volume/baseSurfaceFamilies.xml b/cave/com.raytheon.viz.volumebrowser/localization/menus/volume/baseSurfaceFamilies.xml index 2503ac05d2..5f0e153f73 100644 --- a/cave/com.raytheon.viz.volumebrowser/localization/menus/volume/baseSurfaceFamilies.xml +++ b/cave/com.raytheon.viz.volumebrowser/localization/menus/volume/baseSurfaceFamilies.xml @@ -18,89 +18,94 @@ See_the_AWIPS_II_Master_Rights_File_("Master_Rights_File.pdf")_for further_licensing_information. --> + - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/cave/com.raytheon.viz.volumebrowser/localization/menus/volume/hailFamily.xml b/cave/com.raytheon.viz.volumebrowser/localization/menus/volume/hailFamily.xml index 0ec245bab7..9649370685 100755 --- a/cave/com.raytheon.viz.volumebrowser/localization/menus/volume/hailFamily.xml +++ b/cave/com.raytheon.viz.volumebrowser/localization/menus/volume/hailFamily.xml @@ -18,57 +18,62 @@ See_the_AWIPS_II_Master_Rights_File_("Master_Rights_File.pdf")_for further_licensing_information. --> + - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/cave/com.raytheon.viz.volumebrowser/localization/menus/volume/index.xml b/cave/com.raytheon.viz.volumebrowser/localization/menus/volume/index.xml index bc5a1546b5..38c416f6f4 100644 --- a/cave/com.raytheon.viz.volumebrowser/localization/menus/volume/index.xml +++ b/cave/com.raytheon.viz.volumebrowser/localization/menus/volume/index.xml @@ -18,40 +18,68 @@ See_the_AWIPS_II_Master_Rights_File_("Master_Rights_File.pdf")_for further_licensing_information. --> + - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + - - + + - + + + - - + - + - - + - - - + \ No newline at end of file diff --git a/cave/com.raytheon.viz.volumebrowser/plugin.xml b/cave/com.raytheon.viz.volumebrowser/plugin.xml index 3abaa41296..b8af7035c2 100644 --- a/cave/com.raytheon.viz.volumebrowser/plugin.xml +++ b/cave/com.raytheon.viz.volumebrowser/plugin.xml @@ -56,7 +56,14 @@ application="D2D" localizationType="CAVE_STATIC,COMMON_STATIC" name="Volume Browser" - value="level/mappings" + value="volumebrowser" + recursive="true"> + + + locationURI="menu:models?after=top"> + label="Volume Browser"> @@ -81,6 +88,13 @@ + + + + diff --git a/cave/com.raytheon.viz.volumebrowser/src/com/raytheon/viz/volumebrowser/GridAlterBundleContributor.java b/cave/com.raytheon.viz.volumebrowser/src/com/raytheon/viz/volumebrowser/GridAlterBundleContributor.java index f776432ba1..b3824b21b9 100644 --- a/cave/com.raytheon.viz.volumebrowser/src/com/raytheon/viz/volumebrowser/GridAlterBundleContributor.java +++ b/cave/com.raytheon.viz.volumebrowser/src/com/raytheon/viz/volumebrowser/GridAlterBundleContributor.java @@ -26,7 +26,6 @@ import java.util.List; import java.util.Map; import com.raytheon.uf.common.dataplugin.grid.GridConstants; -import com.raytheon.uf.common.dataplugin.grid.dataset.DatasetInfoLookup; import com.raytheon.uf.common.dataquery.requests.RequestConstraint; import com.raytheon.uf.common.menus.vb.VbSource; import com.raytheon.uf.common.menus.vb.VbSourceList; @@ -103,18 +102,14 @@ public class GridAlterBundleContributor extends AlterBundleContributorAdapter { if (selectedString != null) { reqMap.put(GridConstants.DATASET_ID, new RequestConstraint( selectedString)); - DatasetInfoLookup lookup = DatasetInfoLookup.getInstance(); // next, need to modify for other displays (not plan view) if (data instanceof VarHeightResourceData) { - ((VarHeightResourceData) data).setSource(lookup.getInfo( - selectedString).getTitle()); + ((VarHeightResourceData) data).setSource(selectedString); } else if (data instanceof TimeSeriesResourceData) { - ((TimeSeriesResourceData) data).setSource(lookup.getInfo( - selectedString).getTitle()); + ((TimeSeriesResourceData) data).setSource(selectedString); } else if (data instanceof CrossSectionResourceData) { - ((CrossSectionResourceData) data).setSource(lookup.getInfo( - selectedString).getTitle()); + ((CrossSectionResourceData) data).setSource(selectedString); } } } diff --git a/cave/com.raytheon.viz.warngen/localization/bundles/maps/warngenExtensionArea.xml b/cave/com.raytheon.viz.warngen/localization/bundles/maps/warngenExtensionArea.xml deleted file mode 100644 index d30d80da82..0000000000 --- a/cave/com.raytheon.viz.warngen/localization/bundles/maps/warngenExtensionArea.xml +++ /dev/null @@ -1,55 +0,0 @@ - - - - - - - - - - - - - - PLAN_VIEW - - - - - - WarnGen Extension Area - com.raytheon.viz.warngen.gui.WarngenExtensionAreaLayer - - - - - - \ No newline at end of file diff --git a/cave/com.raytheon.viz.warngen/localization/bundles/maps/warngenloc.xml b/cave/com.raytheon.viz.warngen/localization/bundles/maps/warngenloc.xml deleted file mode 100644 index 39a31cfca5..0000000000 --- a/cave/com.raytheon.viz.warngen/localization/bundles/maps/warngenloc.xml +++ /dev/null @@ -1,58 +0,0 @@ - - - - - - - - - - - - - - - PLAN_VIEW - - - - - - mapdata.warngenloc
- WarnGenLoc -
-
-
-
-
-
\ No newline at end of file diff --git a/cave/com.raytheon.viz.warnings/localization/bundles/AllCWASPS.xml b/cave/com.raytheon.viz.warnings/localization/bundles/AllCWASPS.xml deleted file mode 100644 index 07b630c602..0000000000 --- a/cave/com.raytheon.viz.warnings/localization/bundles/AllCWASPS.xml +++ /dev/null @@ -1,60 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/cave/com.raytheon.viz.warnings/localization/bundles/AllLocalWarnings.xml b/cave/com.raytheon.viz.warnings/localization/bundles/AllLocalWarnings.xml deleted file mode 100644 index 23b40454d6..0000000000 --- a/cave/com.raytheon.viz.warnings/localization/bundles/AllLocalWarnings.xml +++ /dev/null @@ -1,325 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/cave/com.raytheon.viz.warnings/localization/bundles/AllMarineWarnings.xml b/cave/com.raytheon.viz.warnings/localization/bundles/AllMarineWarnings.xml deleted file mode 100644 index 80927d587c..0000000000 --- a/cave/com.raytheon.viz.warnings/localization/bundles/AllMarineWarnings.xml +++ /dev/null @@ -1,75 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/cave/com.raytheon.viz.warnings/localization/bundles/AllNationalWarnings.xml b/cave/com.raytheon.viz.warnings/localization/bundles/AllNationalWarnings.xml deleted file mode 100644 index 80c097b0a6..0000000000 --- a/cave/com.raytheon.viz.warnings/localization/bundles/AllNationalWarnings.xml +++ /dev/null @@ -1,358 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/cave/com.raytheon.viz.warnings/localization/bundles/AllRegionalWarnings-AR.xml b/cave/com.raytheon.viz.warnings/localization/bundles/AllRegionalWarnings-AR.xml deleted file mode 100644 index b2a3740c0b..0000000000 --- a/cave/com.raytheon.viz.warnings/localization/bundles/AllRegionalWarnings-AR.xml +++ /dev/null @@ -1,364 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/cave/com.raytheon.viz.warnings/localization/bundles/AllRegionalWarnings-CR.xml b/cave/com.raytheon.viz.warnings/localization/bundles/AllRegionalWarnings-CR.xml deleted file mode 100644 index dfd922e6ad..0000000000 --- a/cave/com.raytheon.viz.warnings/localization/bundles/AllRegionalWarnings-CR.xml +++ /dev/null @@ -1,364 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/cave/com.raytheon.viz.warnings/localization/bundles/AllRegionalWarnings-ER.xml b/cave/com.raytheon.viz.warnings/localization/bundles/AllRegionalWarnings-ER.xml deleted file mode 100644 index 4646632bd4..0000000000 --- a/cave/com.raytheon.viz.warnings/localization/bundles/AllRegionalWarnings-ER.xml +++ /dev/null @@ -1,364 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/cave/com.raytheon.viz.warnings/localization/bundles/AllRegionalWarnings-PR.xml b/cave/com.raytheon.viz.warnings/localization/bundles/AllRegionalWarnings-PR.xml deleted file mode 100644 index 989dfc8536..0000000000 --- a/cave/com.raytheon.viz.warnings/localization/bundles/AllRegionalWarnings-PR.xml +++ /dev/null @@ -1,364 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/cave/com.raytheon.viz.warnings/localization/bundles/AllRegionalWarnings-SR.xml b/cave/com.raytheon.viz.warnings/localization/bundles/AllRegionalWarnings-SR.xml deleted file mode 100644 index 69230d1cc4..0000000000 --- a/cave/com.raytheon.viz.warnings/localization/bundles/AllRegionalWarnings-SR.xml +++ /dev/null @@ -1,364 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/cave/com.raytheon.viz.warnings/localization/bundles/AllRegionalWarnings-WR.xml b/cave/com.raytheon.viz.warnings/localization/bundles/AllRegionalWarnings-WR.xml deleted file mode 100644 index 005947d7e0..0000000000 --- a/cave/com.raytheon.viz.warnings/localization/bundles/AllRegionalWarnings-WR.xml +++ /dev/null @@ -1,364 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/cave/com.raytheon.viz.warnings/localization/bundles/AllRegionalWarnings.xml b/cave/com.raytheon.viz.warnings/localization/bundles/AllRegionalWarnings.xml deleted file mode 100644 index 935431b4bf..0000000000 --- a/cave/com.raytheon.viz.warnings/localization/bundles/AllRegionalWarnings.xml +++ /dev/null @@ -1,374 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/cave/com.raytheon.viz.warnings/localization/bundles/AllWWA.xml b/cave/com.raytheon.viz.warnings/localization/bundles/AllWWA.xml new file mode 100644 index 0000000000..a6cb75cedc --- /dev/null +++ b/cave/com.raytheon.viz.warnings/localization/bundles/AllWWA.xml @@ -0,0 +1,31 @@ + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/cave/com.raytheon.viz.warnings/localization/bundles/AllWarnings.xml b/cave/com.raytheon.viz.warnings/localization/bundles/AllWarnings.xml index ad83ad28d9..cae0e32d68 100644 --- a/cave/com.raytheon.viz.warnings/localization/bundles/AllWarnings.xml +++ b/cave/com.raytheon.viz.warnings/localization/bundles/AllWarnings.xml @@ -9,7 +9,7 @@ - + + isRequeryNecessaryOnTimeMatch="true" name="Warnings"> - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + diff --git a/cave/com.raytheon.viz.warnings/localization/bundles/FloodWWA.xml b/cave/com.raytheon.viz.warnings/localization/bundles/FloodWWA.xml new file mode 100644 index 0000000000..ad2584ca8c --- /dev/null +++ b/cave/com.raytheon.viz.warnings/localization/bundles/FloodWWA.xml @@ -0,0 +1,37 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/cave/com.raytheon.viz.warnings/localization/bundles/FloodWarnings.xml b/cave/com.raytheon.viz.warnings/localization/bundles/FloodWarnings.xml deleted file mode 100644 index 1d7b67c454..0000000000 --- a/cave/com.raytheon.viz.warnings/localization/bundles/FloodWarnings.xml +++ /dev/null @@ -1,103 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/cave/com.raytheon.viz.warnings/localization/bundles/LocalCWAFloodWarnings.xml b/cave/com.raytheon.viz.warnings/localization/bundles/LocalCWAFloodWarnings.xml deleted file mode 100644 index bf236763e2..0000000000 --- a/cave/com.raytheon.viz.warnings/localization/bundles/LocalCWAFloodWarnings.xml +++ /dev/null @@ -1,239 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/cave/com.raytheon.viz.warnings/localization/bundles/LocalCWASPS.xml b/cave/com.raytheon.viz.warnings/localization/bundles/LocalCWASPS.xml deleted file mode 100644 index 0f2836f0d3..0000000000 --- a/cave/com.raytheon.viz.warnings/localization/bundles/LocalCWASPS.xml +++ /dev/null @@ -1,60 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/cave/com.raytheon.viz.warnings/localization/bundles/LocalCWAWarnings.xml b/cave/com.raytheon.viz.warnings/localization/bundles/LocalCWAWarnings.xml deleted file mode 100644 index 0f2be082c0..0000000000 --- a/cave/com.raytheon.viz.warnings/localization/bundles/LocalCWAWarnings.xml +++ /dev/null @@ -1,164 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/cave/com.raytheon.viz.warnings/localization/bundles/LocalLocalizedExtremeWeatherWarnings.xml b/cave/com.raytheon.viz.warnings/localization/bundles/LocalLocalizedExtremeWeatherWarnings.xml deleted file mode 100644 index 0f4833a67c..0000000000 --- a/cave/com.raytheon.viz.warnings/localization/bundles/LocalLocalizedExtremeWeatherWarnings.xml +++ /dev/null @@ -1,65 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/cave/com.raytheon.viz.warnings/localization/bundles/LocalMarineWarnings.xml b/cave/com.raytheon.viz.warnings/localization/bundles/LocalMarineWarnings.xml deleted file mode 100644 index f91cda74f2..0000000000 --- a/cave/com.raytheon.viz.warnings/localization/bundles/LocalMarineWarnings.xml +++ /dev/null @@ -1,65 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/cave/com.raytheon.viz.warnings/localization/bundles/LocalRegionalFloodWarnings.xml b/cave/com.raytheon.viz.warnings/localization/bundles/LocalRegionalFloodWarnings.xml deleted file mode 100644 index ce4a029a53..0000000000 --- a/cave/com.raytheon.viz.warnings/localization/bundles/LocalRegionalFloodWarnings.xml +++ /dev/null @@ -1,205 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/cave/com.raytheon.viz.warnings/localization/bundles/LocalRegionalLocalizedExtremeWeatherWarnings.xml b/cave/com.raytheon.viz.warnings/localization/bundles/LocalRegionalLocalizedExtremeWeatherWarnings.xml deleted file mode 100644 index a3326fd980..0000000000 --- a/cave/com.raytheon.viz.warnings/localization/bundles/LocalRegionalLocalizedExtremeWeatherWarnings.xml +++ /dev/null @@ -1,66 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/cave/com.raytheon.viz.warnings/localization/bundles/LocalRegionalMarineWarning.xml b/cave/com.raytheon.viz.warnings/localization/bundles/LocalRegionalMarineWarning.xml deleted file mode 100644 index 4d42f301a3..0000000000 --- a/cave/com.raytheon.viz.warnings/localization/bundles/LocalRegionalMarineWarning.xml +++ /dev/null @@ -1,67 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/cave/com.raytheon.viz.warnings/localization/bundles/LocalRegionalSPS.xml b/cave/com.raytheon.viz.warnings/localization/bundles/LocalRegionalSPS.xml deleted file mode 100644 index a42c5efa34..0000000000 --- a/cave/com.raytheon.viz.warnings/localization/bundles/LocalRegionalSPS.xml +++ /dev/null @@ -1,61 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/cave/com.raytheon.viz.warnings/localization/bundles/LocalRegionalWarnings.xml b/cave/com.raytheon.viz.warnings/localization/bundles/LocalRegionalWarnings.xml deleted file mode 100644 index 699a76b47d..0000000000 --- a/cave/com.raytheon.viz.warnings/localization/bundles/LocalRegionalWarnings.xml +++ /dev/null @@ -1,199 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/cave/com.raytheon.viz.warnings/localization/bundles/LocalStormReportsLocal.xml b/cave/com.raytheon.viz.warnings/localization/bundles/LocalStormReportsLocal.xml deleted file mode 100644 index d3be467859..0000000000 --- a/cave/com.raytheon.viz.warnings/localization/bundles/LocalStormReportsLocal.xml +++ /dev/null @@ -1,54 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/cave/com.raytheon.viz.warnings/localization/bundles/LocalStormReportsNational.xml b/cave/com.raytheon.viz.warnings/localization/bundles/LocalStormReportsNational.xml deleted file mode 100644 index abf8a35b4a..0000000000 --- a/cave/com.raytheon.viz.warnings/localization/bundles/LocalStormReportsNational.xml +++ /dev/null @@ -1,54 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/cave/com.raytheon.viz.warnings/localization/bundles/LocalStormReportsOffice.xml b/cave/com.raytheon.viz.warnings/localization/bundles/LocalStormReportsOffice.xml deleted file mode 100644 index 26e4935e8d..0000000000 --- a/cave/com.raytheon.viz.warnings/localization/bundles/LocalStormReportsOffice.xml +++ /dev/null @@ -1,53 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/cave/com.raytheon.viz.warnings/localization/bundles/LocalStormReportsRegion.xml b/cave/com.raytheon.viz.warnings/localization/bundles/LocalStormReportsRegion.xml deleted file mode 100644 index bf10182ab9..0000000000 --- a/cave/com.raytheon.viz.warnings/localization/bundles/LocalStormReportsRegion.xml +++ /dev/null @@ -1,54 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/cave/com.raytheon.viz.warnings/localization/bundles/MarineWWA.xml b/cave/com.raytheon.viz.warnings/localization/bundles/MarineWWA.xml new file mode 100644 index 0000000000..ae4f7a13f3 --- /dev/null +++ b/cave/com.raytheon.viz.warnings/localization/bundles/MarineWWA.xml @@ -0,0 +1,37 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/cave/com.raytheon.viz.warnings/localization/bundles/MarineWarning.xml b/cave/com.raytheon.viz.warnings/localization/bundles/MarineWarning.xml deleted file mode 100644 index 160d2ff1fd..0000000000 --- a/cave/com.raytheon.viz.warnings/localization/bundles/MarineWarning.xml +++ /dev/null @@ -1,66 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/cave/com.raytheon.viz.warnings/localization/bundles/NationalConvWarnings.xml b/cave/com.raytheon.viz.warnings/localization/bundles/NationalConvWarnings.xml deleted file mode 100644 index 0978f14133..0000000000 --- a/cave/com.raytheon.viz.warnings/localization/bundles/NationalConvWarnings.xml +++ /dev/null @@ -1,164 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/cave/com.raytheon.viz.warnings/localization/bundles/NationalFloodWarnings.xml b/cave/com.raytheon.viz.warnings/localization/bundles/NationalFloodWarnings.xml deleted file mode 100644 index 6630e84bed..0000000000 --- a/cave/com.raytheon.viz.warnings/localization/bundles/NationalFloodWarnings.xml +++ /dev/null @@ -1,199 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/cave/com.raytheon.viz.warnings/localization/bundles/NationalLocalizedExtremeWeatherWarnings.xml b/cave/com.raytheon.viz.warnings/localization/bundles/NationalLocalizedExtremeWeatherWarnings.xml deleted file mode 100644 index 3d31bc2d69..0000000000 --- a/cave/com.raytheon.viz.warnings/localization/bundles/NationalLocalizedExtremeWeatherWarnings.xml +++ /dev/null @@ -1,65 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/cave/com.raytheon.viz.warnings/localization/bundles/RegionalCWAWarnings.xml b/cave/com.raytheon.viz.warnings/localization/bundles/RegionalCWAWarnings.xml deleted file mode 100644 index 67e11359e1..0000000000 --- a/cave/com.raytheon.viz.warnings/localization/bundles/RegionalCWAWarnings.xml +++ /dev/null @@ -1,167 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/cave/com.raytheon.viz.warnings/localization/bundles/RegionalFloodWarnings-AR.xml b/cave/com.raytheon.viz.warnings/localization/bundles/RegionalFloodWarnings-AR.xml deleted file mode 100644 index 135761af52..0000000000 --- a/cave/com.raytheon.viz.warnings/localization/bundles/RegionalFloodWarnings-AR.xml +++ /dev/null @@ -1,200 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/cave/com.raytheon.viz.warnings/localization/bundles/RegionalFloodWarnings-CR.xml b/cave/com.raytheon.viz.warnings/localization/bundles/RegionalFloodWarnings-CR.xml deleted file mode 100644 index 4ea31d0ffa..0000000000 --- a/cave/com.raytheon.viz.warnings/localization/bundles/RegionalFloodWarnings-CR.xml +++ /dev/null @@ -1,200 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/cave/com.raytheon.viz.warnings/localization/bundles/RegionalFloodWarnings-ER.xml b/cave/com.raytheon.viz.warnings/localization/bundles/RegionalFloodWarnings-ER.xml deleted file mode 100644 index 2a5fa600d7..0000000000 --- a/cave/com.raytheon.viz.warnings/localization/bundles/RegionalFloodWarnings-ER.xml +++ /dev/null @@ -1,200 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/cave/com.raytheon.viz.warnings/localization/bundles/RegionalFloodWarnings-PR.xml b/cave/com.raytheon.viz.warnings/localization/bundles/RegionalFloodWarnings-PR.xml deleted file mode 100644 index a83adea911..0000000000 --- a/cave/com.raytheon.viz.warnings/localization/bundles/RegionalFloodWarnings-PR.xml +++ /dev/null @@ -1,200 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/cave/com.raytheon.viz.warnings/localization/bundles/RegionalFloodWarnings-SR.xml b/cave/com.raytheon.viz.warnings/localization/bundles/RegionalFloodWarnings-SR.xml deleted file mode 100644 index 8c28acff9e..0000000000 --- a/cave/com.raytheon.viz.warnings/localization/bundles/RegionalFloodWarnings-SR.xml +++ /dev/null @@ -1,200 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/cave/com.raytheon.viz.warnings/localization/bundles/RegionalFloodWarnings-WR.xml b/cave/com.raytheon.viz.warnings/localization/bundles/RegionalFloodWarnings-WR.xml deleted file mode 100644 index 082506a371..0000000000 --- a/cave/com.raytheon.viz.warnings/localization/bundles/RegionalFloodWarnings-WR.xml +++ /dev/null @@ -1,200 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/cave/com.raytheon.viz.warnings/localization/bundles/RegionalLocalizedExtremeWeatherWarnings-AR.xml b/cave/com.raytheon.viz.warnings/localization/bundles/RegionalLocalizedExtremeWeatherWarnings-AR.xml deleted file mode 100644 index 7d6857c01e..0000000000 --- a/cave/com.raytheon.viz.warnings/localization/bundles/RegionalLocalizedExtremeWeatherWarnings-AR.xml +++ /dev/null @@ -1,65 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/cave/com.raytheon.viz.warnings/localization/bundles/RegionalLocalizedExtremeWeatherWarnings-CR.xml b/cave/com.raytheon.viz.warnings/localization/bundles/RegionalLocalizedExtremeWeatherWarnings-CR.xml deleted file mode 100644 index 3d95231fc3..0000000000 --- a/cave/com.raytheon.viz.warnings/localization/bundles/RegionalLocalizedExtremeWeatherWarnings-CR.xml +++ /dev/null @@ -1,65 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/cave/com.raytheon.viz.warnings/localization/bundles/RegionalLocalizedExtremeWeatherWarnings-ER.xml b/cave/com.raytheon.viz.warnings/localization/bundles/RegionalLocalizedExtremeWeatherWarnings-ER.xml deleted file mode 100644 index 621a41a620..0000000000 --- a/cave/com.raytheon.viz.warnings/localization/bundles/RegionalLocalizedExtremeWeatherWarnings-ER.xml +++ /dev/null @@ -1,65 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/cave/com.raytheon.viz.warnings/localization/bundles/RegionalLocalizedExtremeWeatherWarnings-PR.xml b/cave/com.raytheon.viz.warnings/localization/bundles/RegionalLocalizedExtremeWeatherWarnings-PR.xml deleted file mode 100644 index f48e3732bc..0000000000 --- a/cave/com.raytheon.viz.warnings/localization/bundles/RegionalLocalizedExtremeWeatherWarnings-PR.xml +++ /dev/null @@ -1,65 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/cave/com.raytheon.viz.warnings/localization/bundles/RegionalLocalizedExtremeWeatherWarnings-SR.xml b/cave/com.raytheon.viz.warnings/localization/bundles/RegionalLocalizedExtremeWeatherWarnings-SR.xml deleted file mode 100644 index b53aaa0bc5..0000000000 --- a/cave/com.raytheon.viz.warnings/localization/bundles/RegionalLocalizedExtremeWeatherWarnings-SR.xml +++ /dev/null @@ -1,65 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/cave/com.raytheon.viz.warnings/localization/bundles/RegionalLocalizedExtremeWeatherWarnings-WR.xml b/cave/com.raytheon.viz.warnings/localization/bundles/RegionalLocalizedExtremeWeatherWarnings-WR.xml deleted file mode 100644 index ede1a36334..0000000000 --- a/cave/com.raytheon.viz.warnings/localization/bundles/RegionalLocalizedExtremeWeatherWarnings-WR.xml +++ /dev/null @@ -1,65 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/cave/com.raytheon.viz.warnings/localization/bundles/RegionalMarineWarning-AR.xml b/cave/com.raytheon.viz.warnings/localization/bundles/RegionalMarineWarning-AR.xml deleted file mode 100644 index d0fbcdd00a..0000000000 --- a/cave/com.raytheon.viz.warnings/localization/bundles/RegionalMarineWarning-AR.xml +++ /dev/null @@ -1,66 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/cave/com.raytheon.viz.warnings/localization/bundles/RegionalMarineWarning-CR.xml b/cave/com.raytheon.viz.warnings/localization/bundles/RegionalMarineWarning-CR.xml deleted file mode 100644 index 075b2dde41..0000000000 --- a/cave/com.raytheon.viz.warnings/localization/bundles/RegionalMarineWarning-CR.xml +++ /dev/null @@ -1,66 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/cave/com.raytheon.viz.warnings/localization/bundles/RegionalMarineWarning-ER.xml b/cave/com.raytheon.viz.warnings/localization/bundles/RegionalMarineWarning-ER.xml deleted file mode 100644 index 03f8b78c01..0000000000 --- a/cave/com.raytheon.viz.warnings/localization/bundles/RegionalMarineWarning-ER.xml +++ /dev/null @@ -1,66 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/cave/com.raytheon.viz.warnings/localization/bundles/RegionalMarineWarning-PR.xml b/cave/com.raytheon.viz.warnings/localization/bundles/RegionalMarineWarning-PR.xml deleted file mode 100644 index 2b1ff5e05f..0000000000 --- a/cave/com.raytheon.viz.warnings/localization/bundles/RegionalMarineWarning-PR.xml +++ /dev/null @@ -1,66 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/cave/com.raytheon.viz.warnings/localization/bundles/RegionalMarineWarning-SR.xml b/cave/com.raytheon.viz.warnings/localization/bundles/RegionalMarineWarning-SR.xml deleted file mode 100644 index e82d528666..0000000000 --- a/cave/com.raytheon.viz.warnings/localization/bundles/RegionalMarineWarning-SR.xml +++ /dev/null @@ -1,66 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/cave/com.raytheon.viz.warnings/localization/bundles/RegionalMarineWarning-WR.xml b/cave/com.raytheon.viz.warnings/localization/bundles/RegionalMarineWarning-WR.xml deleted file mode 100644 index 75d4efee75..0000000000 --- a/cave/com.raytheon.viz.warnings/localization/bundles/RegionalMarineWarning-WR.xml +++ /dev/null @@ -1,66 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/cave/com.raytheon.viz.warnings/localization/bundles/RegionalSPS-AR.xml b/cave/com.raytheon.viz.warnings/localization/bundles/RegionalSPS-AR.xml deleted file mode 100644 index 58851450de..0000000000 --- a/cave/com.raytheon.viz.warnings/localization/bundles/RegionalSPS-AR.xml +++ /dev/null @@ -1,60 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/cave/com.raytheon.viz.warnings/localization/bundles/RegionalSPS-CR.xml b/cave/com.raytheon.viz.warnings/localization/bundles/RegionalSPS-CR.xml deleted file mode 100644 index b659a5546b..0000000000 --- a/cave/com.raytheon.viz.warnings/localization/bundles/RegionalSPS-CR.xml +++ /dev/null @@ -1,60 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/cave/com.raytheon.viz.warnings/localization/bundles/RegionalSPS-ER.xml b/cave/com.raytheon.viz.warnings/localization/bundles/RegionalSPS-ER.xml deleted file mode 100644 index 5d1bf33e42..0000000000 --- a/cave/com.raytheon.viz.warnings/localization/bundles/RegionalSPS-ER.xml +++ /dev/null @@ -1,60 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/cave/com.raytheon.viz.warnings/localization/bundles/RegionalSPS-PR.xml b/cave/com.raytheon.viz.warnings/localization/bundles/RegionalSPS-PR.xml deleted file mode 100644 index fd65bdfeb7..0000000000 --- a/cave/com.raytheon.viz.warnings/localization/bundles/RegionalSPS-PR.xml +++ /dev/null @@ -1,60 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/cave/com.raytheon.viz.warnings/localization/bundles/RegionalSPS-SR.xml b/cave/com.raytheon.viz.warnings/localization/bundles/RegionalSPS-SR.xml deleted file mode 100644 index c8294ba075..0000000000 --- a/cave/com.raytheon.viz.warnings/localization/bundles/RegionalSPS-SR.xml +++ /dev/null @@ -1,60 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/cave/com.raytheon.viz.warnings/localization/bundles/RegionalSPS-WR.xml b/cave/com.raytheon.viz.warnings/localization/bundles/RegionalSPS-WR.xml deleted file mode 100644 index 87589eed8d..0000000000 --- a/cave/com.raytheon.viz.warnings/localization/bundles/RegionalSPS-WR.xml +++ /dev/null @@ -1,68 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/cave/com.raytheon.viz.warnings/localization/bundles/RegionalWarnings-AR.xml b/cave/com.raytheon.viz.warnings/localization/bundles/RegionalWarnings-AR.xml deleted file mode 100644 index fb770508b3..0000000000 --- a/cave/com.raytheon.viz.warnings/localization/bundles/RegionalWarnings-AR.xml +++ /dev/null @@ -1,200 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/cave/com.raytheon.viz.warnings/localization/bundles/RegionalWarnings-CR.xml b/cave/com.raytheon.viz.warnings/localization/bundles/RegionalWarnings-CR.xml deleted file mode 100644 index ef8bf42707..0000000000 --- a/cave/com.raytheon.viz.warnings/localization/bundles/RegionalWarnings-CR.xml +++ /dev/null @@ -1,200 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/cave/com.raytheon.viz.warnings/localization/bundles/RegionalWarnings-ER.xml b/cave/com.raytheon.viz.warnings/localization/bundles/RegionalWarnings-ER.xml deleted file mode 100644 index 790f4a8504..0000000000 --- a/cave/com.raytheon.viz.warnings/localization/bundles/RegionalWarnings-ER.xml +++ /dev/null @@ -1,200 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/cave/com.raytheon.viz.warnings/localization/bundles/RegionalWarnings-PR.xml b/cave/com.raytheon.viz.warnings/localization/bundles/RegionalWarnings-PR.xml deleted file mode 100644 index 62a0acceff..0000000000 --- a/cave/com.raytheon.viz.warnings/localization/bundles/RegionalWarnings-PR.xml +++ /dev/null @@ -1,200 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/cave/com.raytheon.viz.warnings/localization/bundles/RegionalWarnings-SR.xml b/cave/com.raytheon.viz.warnings/localization/bundles/RegionalWarnings-SR.xml deleted file mode 100644 index 22ee279b55..0000000000 --- a/cave/com.raytheon.viz.warnings/localization/bundles/RegionalWarnings-SR.xml +++ /dev/null @@ -1,200 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/cave/com.raytheon.viz.warnings/localization/bundles/RegionalWarnings-WR.xml b/cave/com.raytheon.viz.warnings/localization/bundles/RegionalWarnings-WR.xml deleted file mode 100644 index 141891a8ab..0000000000 --- a/cave/com.raytheon.viz.warnings/localization/bundles/RegionalWarnings-WR.xml +++ /dev/null @@ -1,200 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/cave/com.raytheon.viz.warnings/localization/bundles/StormReports.xml b/cave/com.raytheon.viz.warnings/localization/bundles/StormReports.xml index 68b47d7f6f..ec1af35f18 100644 --- a/cave/com.raytheon.viz.warnings/localization/bundles/StormReports.xml +++ b/cave/com.raytheon.viz.warnings/localization/bundles/StormReports.xml @@ -1,23 +1,4 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/cave/com.raytheon.viz.warnings/localization/bundles/WindWarnings.xml b/cave/com.raytheon.viz.warnings/localization/bundles/WindWWA.xml similarity index 81% rename from cave/com.raytheon.viz.warnings/localization/bundles/WindWarnings.xml rename to cave/com.raytheon.viz.warnings/localization/bundles/WindWWA.xml index a38b61420d..8217e68841 100644 --- a/cave/com.raytheon.viz.warnings/localization/bundles/WindWarnings.xml +++ b/cave/com.raytheon.viz.warnings/localization/bundles/WindWWA.xml @@ -8,7 +8,7 @@ - + + isRequeryNecessaryOnTimeMatch="true" name="Wind Watches, Warnings, Advisories"> - - - - + diff --git a/cave/com.raytheon.viz.warnings/localization/menus/warnings/baseWarnings.xml b/cave/com.raytheon.viz.warnings/localization/menus/warnings/baseWarnings.xml index fe78b27aa4..f6165748e9 100644 --- a/cave/com.raytheon.viz.warnings/localization/menus/warnings/baseWarnings.xml +++ b/cave/com.raytheon.viz.warnings/localization/menus/warnings/baseWarnings.xml @@ -19,30 +19,31 @@ further_licensing_information. --> - + + + + - + - + - + - - - - \ No newline at end of file + diff --git a/cave/com.raytheon.viz.warnings/localization/menus/warnings/index.xml b/cave/com.raytheon.viz.warnings/localization/menus/warnings/index.xml index 4a5900b681..cdeef3168f 100644 --- a/cave/com.raytheon.viz.warnings/localization/menus/warnings/index.xml +++ b/cave/com.raytheon.viz.warnings/localization/menus/warnings/index.xml @@ -1,23 +1,4 @@ - diff --git a/cave/com.raytheon.viz.warnings/src/com/raytheon/viz/warnings/rsc/AbstractWWAResource.java b/cave/com.raytheon.viz.warnings/src/com/raytheon/viz/warnings/rsc/AbstractWWAResource.java index f6862e9905..22bacfb927 100644 --- a/cave/com.raytheon.viz.warnings/src/com/raytheon/viz/warnings/rsc/AbstractWWAResource.java +++ b/cave/com.raytheon.viz.warnings/src/com/raytheon/viz/warnings/rsc/AbstractWWAResource.java @@ -20,6 +20,7 @@ import com.raytheon.uf.common.dataplugin.warning.AbstractWarningRecord; import com.raytheon.uf.common.dataplugin.warning.EmergencyType; import com.raytheon.uf.common.dataplugin.warning.PracticeWarningRecord; import com.raytheon.uf.common.dataplugin.warning.WarningRecord.WarningAction; +import com.raytheon.uf.common.dataplugin.warning.util.WarningLookups; import com.raytheon.uf.common.dataquery.requests.RequestConstraint; import com.raytheon.uf.common.dataquery.requests.RequestConstraint.ConstraintType; import com.raytheon.uf.common.geospatial.ReferencedCoordinate; @@ -32,6 +33,7 @@ import com.raytheon.uf.common.time.TimeRange; import com.raytheon.uf.common.time.util.TimeUtil; import com.raytheon.uf.viz.core.DrawableString; import com.raytheon.uf.viz.core.IGraphicsTarget; +import com.raytheon.uf.viz.core.RGBColors; import com.raytheon.uf.viz.core.IGraphicsTarget.HorizontalAlignment; import com.raytheon.uf.viz.core.IGraphicsTarget.LineStyle; import com.raytheon.uf.viz.core.IGraphicsTarget.VerticalAlignment; @@ -91,6 +93,7 @@ import com.vividsolutions.jts.geom.prep.PreparedGeometryFactory; * Oct 16, 2015 4971 bsteffen Do not reverse order of text. * Nov 05, 2015 5070 randerso Adjust font sizes for dpi scaling * Aug 22, 2016 5842 dgilling Remove dependency on viz.texteditor plugin. + * Dec 19, 2018 ---- mjames@ucar Added phensig color table lookup. * * * @@ -244,6 +247,9 @@ public abstract class AbstractWWAResource extends } sb.append(text); } + sb.append("\n\n"); + sb.append(record.getOverviewText()); + sb.append(record.getSegText()); return sb.toString(); } } @@ -254,7 +260,7 @@ public abstract class AbstractWWAResource extends } } - return "NO DATA"; + return null; } protected void disposeEntry(final WarningEntry entry) { @@ -366,6 +372,11 @@ public abstract class AbstractWWAResource extends initShape(target, entry.record); entry.project = false; } + + RGB displaycolor = color; + if ( ! record.getPil().equals("SPS")) { + displaycolor = RGBColors.getRGBColor(getPhensigColor(record.getPhensig())); + } if (entry != null && entry.wireframeShape != null) { LineStyle lineStyle = LineStyle.SOLID; @@ -383,7 +394,7 @@ public abstract class AbstractWWAResource extends target.drawWireframeShape( entry.wireframeShape, - getCapability(ColorableCapability.class).getColor(), + displaycolor, outlineWidth, lineStyle); } else if (entry != null && entry.shadedShape != null) { target.drawShadedShape(entry.shadedShape, 1); @@ -406,6 +417,7 @@ public abstract class AbstractWWAResource extends double mapWidth = descriptor.getMapWidth() * paintProps.getZoomLevel() / 1000; String[] textToPrint = getText(record, mapWidth); + if (warningsFont == null) { warningsFont = target.initializeFont(target .getDefaultFont().getFontName(), 9, @@ -414,8 +426,7 @@ public abstract class AbstractWWAResource extends 12); } - DrawableString params = new DrawableString(textToPrint, - color); + DrawableString params = new DrawableString(textToPrint, displaycolor); params.font = warningsFont; params.setCoordinates(d[0], d[1]); params.horizontalAlignment = HorizontalAlignment.RIGHT; @@ -428,7 +439,7 @@ public abstract class AbstractWWAResource extends // moves over text to add EMER in a different font textToPrint[1] = String.format("%1$-23" + "s", textToPrint[1]); - params.setText(textToPrint, color); + params.setText(textToPrint, displaycolor); DrawableString emergencyString = new DrawableString( params); @@ -436,7 +447,7 @@ public abstract class AbstractWWAResource extends d[1] + (paintProps.getZoomLevel()) * 90); emergencyString.font = emergencyFont; emergencyString.setText(new String[] { "", "", - " " + EmergencyType.EMER, "" }, color); + " " + EmergencyType.EMER, "" }, displaycolor); target.drawStrings(emergencyString); } @@ -591,20 +602,26 @@ public abstract class AbstractWWAResource extends addRecord(sort(pdos)); } + protected String getPhensigColor(String phensig){ + WarningLookups lookup = new WarningLookups(); + return lookup.getPhensig(phensig).color; + } + + protected String getPhensigName(String phensig){ + WarningLookups lookup = new WarningLookups(); + return lookup.getPhensig(phensig).name; + } + protected String[] getText(AbstractWarningRecord record, double mapWidth) { - String vid = record.getPhensig(); - String phen = record.getPhen(); - String[] textToPrint = new String[] { "", "", "", "" }; + + String[] textToPrint = new String[] { "", "" }; - textToPrint[0] = record.getProductClass(); - if ((vid != null && phen != null) - && (vid.equals("TO.A") || vid.equals("SV.A") - || phen.equals("FL") || phen.equals("FA"))) { - textToPrint[0] += "." + vid; + if ( ! record.getPil().equals("SPS")) { + textToPrint[0] = getPhensigName(record.getPhensig()); + } else { + textToPrint[0] = "Special Weather Statement"; } - textToPrint[0] += "." + record.getEtn(); - textToPrint[1] = record.getPil(); - + String startFormatString = DEFAULT_FORMAT; String endFormatString = DEFAULT_FORMAT; if (mapWidth == 0) { @@ -616,14 +633,14 @@ public abstract class AbstractWWAResource extends } DateFormat startFormat = new SimpleDateFormat(startFormatString); - startFormat.setTimeZone(TimeUtil.GMT_TIME_ZONE); - textToPrint[2] = "Valid " - + startFormat.format(record.getStartTime().getTime()); - DateFormat endFormat = new SimpleDateFormat(endFormatString); + + startFormat.setTimeZone(TimeUtil.GMT_TIME_ZONE); endFormat.setTimeZone(TimeUtil.GMT_TIME_ZONE); - textToPrint[3] = "Thru " - + endFormat.format(record.getEndTime().getTime()); + + textToPrint[1] = startFormat.format(record.getStartTime().getTime()) + + "-" + endFormat.format(record.getEndTime().getTime());; + return textToPrint; } diff --git a/deltaScripts/17.1.1-p2/DR6224/CleaupISCWriteLocks.sh b/deltaScripts/17.1.1-p2/DR6224/CleaupISCWriteLocks.sh deleted file mode 100755 index fe827650f9..0000000000 --- a/deltaScripts/17.1.1-p2/DR6224/CleaupISCWriteLocks.sh +++ /dev/null @@ -1,4 +0,0 @@ -#!/usr/bin/bash -# This script removes obsolete ISC Write Lock records from the cluster_task table -# -/awips2/psql/bin/psql -U awips -d metadata -c "delete from cluster_task where name = 'ISC Write Lock' and details not like '%:%';" diff --git a/deltaScripts/17.2.1/DR20377/update-state-json-vrh.py b/deltaScripts/17.2.1/DR20377/update-state-json-vrh.py deleted file mode 100644 index c9b66b66a9..0000000000 --- a/deltaScripts/17.2.1/DR20377/update-state-json-vrh.py +++ /dev/null @@ -1,20 +0,0 @@ -#!/usr/bin/env python - -# update-state-json-vrh.py - Update's VRH's state.json file prior to installing Qpid SSL certificates onto cpsbn1 and cpsbn2 -# -# Modification History -# -# Name Date Comments -# --------------------------------------------------------------------------- -# Qihan Zhang 2017-10-11 DR 20377 - Initial creation - -import json - -with open('/etc/pki/a2pgca/state/state.json', 'r+') as f: - data = json.load(f) - for target in data['targets']: - if target['name'] == 'cp1f' and target['type'] == 'server': - target['location_specs'] = ['server:$PX_SERVERS:/awips2/qpid'] - f.seek(0) - json.dump(data, f, indent=4) - f.truncate() diff --git a/deltaScripts/17.2.1/DR20377/update-state-json-vrh.sh b/deltaScripts/17.2.1/DR20377/update-state-json-vrh.sh deleted file mode 100644 index d626337f4f..0000000000 --- a/deltaScripts/17.2.1/DR20377/update-state-json-vrh.sh +++ /dev/null @@ -1,10 +0,0 @@ -#!/bin/sh -# DR 20377 deltaScript for site VRH since Qpid is running on the PXs there - -. /data/fxa/INSTALL/awips2/scripts/.global || exit 1 - -case "${SITE_IDENTIFIER}" in - ${remCPCaseArray} ) dir=$(cd "$(dirname "$0")"; pwd) - "$dir"/update-state-json-vrh.py - a2pgca refresh server:cp1f ;; -esac diff --git a/deltaScripts/17.2.1/DR6055/set-nfs-perms.sh b/deltaScripts/17.2.1/DR6055/set-nfs-perms.sh deleted file mode 100755 index 8f9676f2d2..0000000000 --- a/deltaScripts/17.2.1/DR6055/set-nfs-perms.sh +++ /dev/null @@ -1,8 +0,0 @@ -#!/bin/bash -# This script sets the correct rwx permissions for all files on specified NFS -# mounts. See the file /awips2/fxa/bin/set-nfs-perms.sh for details. - -# Run this script on dx1 only, as root. This script will ssh from dx1 into -# other boxes as necessary to do work. - -bash /awips2/fxa/bin/set-nfs-perms.sh diff --git a/deltaScripts/17.2.1/DR6061/update-nfs-fstab.sh b/deltaScripts/17.2.1/DR6061/update-nfs-fstab.sh deleted file mode 100755 index a02a90faf9..0000000000 --- a/deltaScripts/17.2.1/DR6061/update-nfs-fstab.sh +++ /dev/null @@ -1,132 +0,0 @@ -#!/bin/bash - -# This script updates nfs mounts in /etc/fstab to set noexec, nodev, -# nosuid options as necessary, then remounts all nfs mounts in -# /etc/fstab. -# -# Author: tgurney - -if [[ $(id -u) -ne 0 ]]; then - echo $0: Need to be root. - exit 1 -fi - -fstab_location=/etc/fstab -update_fstab=$(mktemp || exit 1) - -cat > $update_fstab << 'EOF' -#!/usr/bin/env python2 - -import re -import sys -import os.path - -FSTAB_PATTERN = r'([^#]\S*)\s+(\S+)\s+(\S+)\s+(\S+)(\s+[0-9]+)?(\s+[0-9]+)?' -MOUNTS = { - '/awips2/edex/data': ['nodev', 'nosuid'], - '/archive': ['nodev', 'noexec', 'nosuid'], - '/awips2/edex/data': ['nodev', 'nosuid'], - '/awips2/edex/data/fxa/trigger': ['nodev', 'noexec', 'nosuid'], - '/awips2/edex/data/manual': ['nodev', 'noexec', 'nosuid'], - '/awips2/edex/data/share': ['nodev', 'nosuid'], - '/awips2/edex/data/utility': ['nodev', 'noexec', 'nosuid'], - '/awips2/rcm/data/config': ['nodev', 'noexec', 'nosuid'], - '/data/fxa/INSTALL/awips2': ['nodev', 'nosuid'], - '/home': ['nodev', 'nosuid'], - '/awips2/bmh/conf': ['nodev', 'noexec', 'nosuid'], - '/awips2/bmh/data': ['nodev', 'noexec', 'nosuid'], - '/awips2/bmh/neospeech/result': ['nodev', 'noexec', 'nosuid'], - '/nsbn_store': ['nodev', 'noexec', 'nosuid'], - '/data_store': ['nodev', 'noexec', 'nosuid'], - '/awips2/GFESuite': ['nodev', 'nosuid'], - '/awips2/qpid/edexMessageStore': ['nodev', 'noexec', 'nosuid'], - '/awips2/qpid/messageStore': ['nodev', 'noexec', 'nosuid'], - '/tmp/awips2/edex/data': ['nodev', 'noexec', 'nosuid'], - '/tmp/awips2/GFESuite': ['nodev', 'noexec', 'nosuid'], - '/tmp/home': ['nodev', 'noexec', 'nosuid'] -} - -for line in sys.stdin: - line = line.strip() - m = re.match(FSTAB_PATTERN, line) - if not m: - print line - continue - - fstab = {'vol': m.group(1), 'mount': m.group(2), - 'fs': m.group(3), 'opts': m.group(4).split(','), - 'dump': m.group(5) or '0', 'pass': m.group(6) or '0' - } - - fstab['mount'] = os.path.abspath(fstab['mount']) - if fstab['fs'] == 'nfs' and fstab['mount'] in MOUNTS: - if 'defaults' in fstab['opts']: - fstab['opts'].remove('defaults') - for opt in MOUNTS[fstab['mount']]: - if opt not in fstab['opts']: - fstab['opts'].append(opt) - - fields = (fstab['vol'], - fstab['mount'], - fstab['fs'], - ','.join(fstab['opts']), - fstab['dump'], - fstab['pass'] - ) - print "%s\t%s\t%s\t%s\t%s %s" % fields -EOF - -tmp_fstab=$(mktemp || exit 1) - -cleanup_exit() { - rm -f $tmp_fstab $update_fstab - exit $1 -} - -echo INFO: Updating "${fstab_location}" -cat "${fstab_location}" | python2 $update_fstab > $tmp_fstab || cleanup_exit 1 -fstab_backup="${fstab_location}.$(date +%Y%m%d.%H%M%S)" -cp "${fstab_location}" $fstab_backup || cleanup_exit 1 -echo INFO: Old fstab was saved to $fstab_backup -mv $tmp_fstab "${fstab_location}" || cleanup_exit 1 -chmod 644 "${fstab_location}" - -for item in $(awk '$3 == "nfs" {print $2}' /etc/mtab); do - for fstab_item in $(grep -Ev '(^#|^\s*$)' "${fstab_location}" | awk '$3 == "nfs" {print $2}'); do - if [[ "$item" == "$fstab_item" ]]; then - if [[ "$item" == /awips2/bmh/neospeech/result* ]]; then - # This particular mount may fail to "mount -o remount" due to strange mount options. - # So we have to unmount and then mount - echo INFO: Unmounting $item - umount $item - echo INFO: Mounting $item - mount $item - else - echo INFO: Remounting $item - mount -o remount $item - fi - fi - done -done - -errors=$(mount -fav 2>&1 | grep -v ' : ') - -if [[ $? -eq 0 ]]; then - failed_location=/tmp/fstab.$(date +%Y%m%d.%H%M%S).failed - cp "${fstab_location}" $failed_location - echo - echo !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! - echo - echo ERROR: fstab failed validation! See below errors. - echo Original "${fstab_location}" has been restored from backup. - echo Failed fstab has been saved to $failed_location - echo - echo $errors - echo - echo !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! - cp -v $fstab_backup "${fstab_location}" - cleanup_exit 1 -fi - -echo INFO: Done. -cleanup_exit 0 diff --git a/deltaScripts/17.2.1/DR6081/createQpidCertificates.sh b/deltaScripts/17.2.1/DR6081/createQpidCertificates.sh deleted file mode 100755 index 3c6a62114c..0000000000 --- a/deltaScripts/17.2.1/DR6081/createQpidCertificates.sh +++ /dev/null @@ -1,4 +0,0 @@ -#!/bin/sh -# DR 6081- Enabled certificate-based authentication for qpid. - -a2pgca refresh \ No newline at end of file diff --git a/deltaScripts/17.2.1/DR6086/secureConnectionUrl.sh b/deltaScripts/17.2.1/DR6086/secureConnectionUrl.sh deleted file mode 100755 index 2a748227a5..0000000000 --- a/deltaScripts/17.2.1/DR6086/secureConnectionUrl.sh +++ /dev/null @@ -1,4 +0,0 @@ -#!/bin/sh -# DR 6086 - Update Radar Server / rcm to utilize SSL to connect to Qpid - -sed -i "s/:5672'\n\s+([^<>\s].*?)\n\s+\g<1>') - index += len('') + 1 - pretty_xml = pretty_xml[:index] + ' DSW\n' +\ - ' SQW\n' + pretty_xml[index:] - except ValueError: - log.error("Could not find SegmentedNNN in file [%s].", file) - log.error("Exiting.") - sys.exit(-1) - - with open (file, 'w') as out_file: - out_file.write(pretty_xml) - - log.info("Delta script complete.") - - - -if __name__ == '__main__': - main() diff --git a/deltaScripts/18.1.1/DCS16962/createClimateDb.sh b/deltaScripts/18.1.1/DCS16962/createClimateDb.sh deleted file mode 100755 index 2fa7bf6178..0000000000 --- a/deltaScripts/18.1.1/DCS16962/createClimateDb.sh +++ /dev/null @@ -1,34 +0,0 @@ -#!/bin/bash - -SQL_SCRIPT="createClimateDb.sql" -TABLESPACE_DIR="/awips2/database/tablespaces/climate" - -# ensure that the sql script is present -if [ ! -f ${SQL_SCRIPT} ]; then - echo "ERROR: the required sql script - ${SQL_SCRIPT} was not found." - echo "FATAL: the update has failed!" - exit 1 -fi - -echo "INFO: update started - creating Climate tablespace directory" - -# ensure tablespace directory created -mkdir -p ${TABLESPACE_DIR} -if [ ! -d ${TABLESPACE_DIR} ]; then - echo "ERROR: the required directory - ${TABLESPACE_DIR} was not created." - echo "FATAL: the update has failed!" - exit 1 -fi - -echo "INFO: creating Climate DB" - -# run the update -/awips2/psql/bin/psql -U awipsadmin -d metadata -f ${SQL_SCRIPT} -if [ $? -ne 0 ]; then - echo "FATAL: the update has failed!" - exit 1 -fi - -echo "INFO: the update has completed successfully!" - -exit 0 diff --git a/deltaScripts/18.1.1/DCS16962/createClimateDb.sql b/deltaScripts/18.1.1/DCS16962/createClimateDb.sql deleted file mode 100644 index 963c76148a..0000000000 --- a/deltaScripts/18.1.1/DCS16962/createClimateDb.sql +++ /dev/null @@ -1,73 +0,0 @@ -/** - * This software was developed and / or modified by NOAA/NWS/OCP/ASDT - * - * Create climate database - */ -\set ON_ERROR_STOP 1 -/* - * Connect to unrelated DB; new one does not exist and cannot be connected to the DB to be copied. - */ -\connect metadata -/* - * Create tablespace. - */ -CREATE TABLESPACE climate OWNER awipsadmin LOCATION '/awips2/database/tablespaces/climate'; -COMMENT ON TABLESPACE climate IS 'Climate Database tablespace'; -/* - * Copy hmdb. - */ -CREATE DATABASE climate WITH TEMPLATE hmdb; - -/* - * Connect to new DB. - */ -\connect climate - -/* - * Create new tables. - */ -CREATE TABLE cpg_session -( - cpg_session_id character varying(50) NOT NULL, - run_type integer NOT NULL, - prod_type integer NOT NULL, - state integer NOT NULL, - status integer, - status_desc character varying(1024), - global_config bytea, - prod_setting bytea, - report_data bytea, - prod_data bytea, - start_at timestamp without time zone NOT NULL, - last_updated timestamp without time zone NOT NULL, - pending_expire timestamp without time zone, - CONSTRAINT cpg_session_pkey PRIMARY KEY (cpg_session_id) -) -WITH ( - OIDS=FALSE -); -ALTER TABLE cpg_session - OWNER TO awipsadmin; -GRANT ALL ON TABLE cpg_session TO awipsadmin; -GRANT SELECT, UPDATE, INSERT, TRUNCATE, DELETE, TRIGGER ON TABLE cpg_session TO awips; -GRANT SELECT, UPDATE, INSERT, TRUNCATE, DELETE, TRIGGER ON TABLE cpg_session TO pguser; - -CREATE TABLE sent_prod_record -( - prod_id character varying(16) NOT NULL, - period_type character varying(16) NULL, - prod_type character varying(4) NOT NULL, - file_name character varying(50) NULL, - prod_text TEXT NOT NULL, - send_time timestamp without time zone NOT NULL, - user_id character varying(32) NULL, - CONSTRAINT sent_prod_record_pkey PRIMARY KEY (prod_id, send_time) -) -WITH ( - OIDS=FALSE -); -ALTER TABLE sent_prod_record - OWNER TO awipsadmin; -GRANT ALL ON TABLE sent_prod_record TO awipsadmin; -GRANT SELECT, UPDATE, INSERT, TRUNCATE, DELETE, TRIGGER ON TABLE sent_prod_record TO awips; -GRANT SELECT, UPDATE, INSERT, TRUNCATE, DELETE, TRIGGER ON TABLE sent_prod_record TO pguser; \ No newline at end of file diff --git a/deltaScripts/18.1.1/DCS20055/updateSailsTags.sh b/deltaScripts/18.1.1/DCS20055/updateSailsTags.sh deleted file mode 100755 index 01cc48667d..0000000000 --- a/deltaScripts/18.1.1/DCS20055/updateSailsTags.sh +++ /dev/null @@ -1,38 +0,0 @@ -#!/bin/bash -# This searches for RadarUpperText xml files in common_static and updates supplemental -# elevation display tags from sails to productDependentRepElev. -# -# The base files should be updated when upgraded to 18.1.1. - -COMMON=/awips2/edex/data/utility/common_static -DIR=`dirname $0` - -for dir in `ls ${COMMON}` ; do - if [[ "$dir" != "base" && "$dir" != "configured" && -d "${COMMON}/$dir" ]] ; then - echo "+++ checking $dir +++" - - for d in `ls ${COMMON}/$dir/` ; do - pDir="${COMMON}/$dir/$d/styleRules" - - if [[ -d "$pDir" ]] ; then - if [[ -f "$pDir/RadarUpperText.xml" ]] ; then - rm $pDir/*.tmp - sed -e 's/ $pDir/RadarUpperText.tmp - cmp -s $pDir/RadarUpperText.xml $pDir/RadarUpperText.tmp - if [[ $? != 0 ]] ; then - rm -f $pDir/RadarUpperText.bak - mv $pDir/RadarUpperText.xml $pDir/RadarUpperText.bak - mv $pDir/RadarUpperText.tmp $pDir/RadarUpperText.xml - chmod 664 $pDir/RadarUpperText.xml - chown awips:fxalpha $pDir/RadarUpperText.xml - echo "converted $pDir/RadarUpperText.xml" - else - echo "No conversion needed for $pDir/RadarUpperText.xml" - rm -f $pDirRadarUpperText.tmp - fi - fi - fi - done - - fi -done diff --git a/deltaScripts/18.1.1/DR5898/DeleteConfigureTextProductsTask.sh b/deltaScripts/18.1.1/DR5898/DeleteConfigureTextProductsTask.sh deleted file mode 100755 index a9bb74d014..0000000000 --- a/deltaScripts/18.1.1/DR5898/DeleteConfigureTextProductsTask.sh +++ /dev/null @@ -1,4 +0,0 @@ -#!/usr/bin/bash -# This script removes obsolete GfeConfigureTextProducts records from the cluster_task table -# -/awips2/psql/bin/psql -U awips -d metadata -c "delete from cluster_task where name = 'GfeConfigureTextProducts';" diff --git a/deltaScripts/18.1.1/DR6110/6110_update_aag_permissions.py b/deltaScripts/18.1.1/DR6110/6110_update_aag_permissions.py deleted file mode 100755 index 97f6e96179..0000000000 --- a/deltaScripts/18.1.1/DR6110/6110_update_aag_permissions.py +++ /dev/null @@ -1,73 +0,0 @@ -#!/usr/bin/env python2 - -# This script will add the new AAG permission to userRoles.xml. -# Modified from DR5816/UpdateGFEPermissions.py - -USER_ROLES_PATH = "/awips2/edex/data/utility/common_static/site/*/roles/userRoles.xml" - -# New permissions to be added -PERMISSIONS_TO_BE_ADDED = [ - "com.raytheon.localization.site/common_static/aviation/aag" -] - -import glob -import os -import shutil -import sys -import xml.etree.ElementTree as ET - -def main(): - for path in glob.iglob(USER_ROLES_PATH): - print "Updating", path - shutil.copyfile(path, path + ".bak") - - # Ensure we have an nwsRoleData tree - tree = ET.parse(path) - root = tree.getroot() - if root.tag != "nwsRoleData": - print "ERROR: not a valid userRoles.xml file" - continue - - # Ensure the application for this tree is Localization - app = root.find("application") - if app is None or app.text != "Localization": - print "ERROR: not a localization permissions file" - continue - - # Add new permissions - for id in PERMISSIONS_TO_BE_ADDED: - # see if permission already present - found = False - for permission in root.iter(tag="permission"): - if permission.attrib["id"] == id: - found = True - break - - # if permission not present add it - if not found: - print "Adding new permission %s" % id - ET.SubElement(root, "permission", attrib={"id": id}) - else: - print "Permission %s already present" % id - - for user in root.iterfind("user"): - userId = user.attrib["userId"] - if userId == "ALL": - for permission in PERMISSIONS_TO_BE_ADDED: - found = False - for userPermission in user.iter(tag="userPermission"): - if userPermission.text == permission: - found = True - break - - if not found: - print "Adding permission %s for ALL users" % permission - element = ET.SubElement(user, "userPermission") - element.text = permission - else: - print "Permission %s already present for ALL users" % permission - - tree.write(path, encoding="UTF-8", xml_declaration=True) - -if __name__ == '__main__': - sys.exit(main()) diff --git a/deltaScripts/18.1.1/DR6183/6183_move_cave_static_files.sh b/deltaScripts/18.1.1/DR6183/6183_move_cave_static_files.sh deleted file mode 100755 index 35df17f9a3..0000000000 --- a/deltaScripts/18.1.1/DR6183/6183_move_cave_static_files.sh +++ /dev/null @@ -1,34 +0,0 @@ -#!/bin/bash - -# 6183 - Move AvnFPS localization files to common_static. -# -# Author: tgurney -# March 27, 2017 - -did_work=false -echo INFO: Moving AvnFPS localization files to common_static -for old_location in /awips2/edex/data/utility/cave_static/*/*/aviation; do - if [[ ! -e "${old_location}" ]]; then - continue - fi - new_location=${old_location/cave_static/common_static} - echo INFO: Moving "${old_location}" to "${new_location}" - did_work=true - if [[ ! -d "${new_location}" ]]; then - sudo -u awips mkdir -p -m 750 "${new_location}" - else - echo "WARN: ${new_location} already exists. Just copying newer files" - fi - rsync -aux "${old_location}" "${new_location}/.." && - rm -rf --one-file-system "${old_location}" - find "${new_location}" -xdev -type f -name '*.md5' -delete - find "${new_location}" -xdev -type f -name '*.pyc' -delete - find "${new_location}" -xdev -type f -name '*.pyo' -delete - echo INFO: Done moving "${old_location}" to "${new_location}" -done - -if [[ "${did_work}" == "false" ]]; then - echo INFO: There are no files to move. Did nothing -else - echo INFO: Done moving localization files -fi diff --git a/deltaScripts/18.1.1/DR6208/BarrowNameChange.sh b/deltaScripts/18.1.1/DR6208/BarrowNameChange.sh deleted file mode 100755 index e717bb044c..0000000000 --- a/deltaScripts/18.1.1/DR6208/BarrowNameChange.sh +++ /dev/null @@ -1,5 +0,0 @@ -#!/bin/bash - -/awips2/psql/bin/psql -U awips -d ncep -c "UPDATE stns.cities SET station_id='UTQIAGVI', name='UTQIAGVIK' WHERE station_number='25711' and name='BARROW';" -/awips2/psql/bin/psql -U awips -d hmdb -c "UPDATE sta_agency_codes SET agency_sta_name='UTQIAGVIK (BARROW)' WHERE station_id=23210 and agency_sta_name='BARROW';" -/awips2/psql/bin/psql -U awips -d hmdb -c "UPDATE station_location SET station_name='UTQIAGVIK (BARROW)' WHERE station_id=23210 and station_name='BARROW';" diff --git a/deltaScripts/18.1.1/DR6217/migrateAwipsAdmins.py b/deltaScripts/18.1.1/DR6217/migrateAwipsAdmins.py deleted file mode 100755 index 7d9b2b4719..0000000000 --- a/deltaScripts/18.1.1/DR6217/migrateAwipsAdmins.py +++ /dev/null @@ -1,421 +0,0 @@ -#!/awips2/python/bin/python -# -# This script will grant the new awipsAdmin permission to any user -# having the old awips.user.admin permission. -# -# This will get admins assigned without requiring running a CAVE session -# as user awips or manually editing the users.ini file. -# -# All other users will be granted the default awipsUser role. -# -# This will get most users into the users.ini file so the admin -# can assign permissions without needing them to open a CAVE session -# - -import errno -import glob -import os -import pwd -import re -import stat -import sys -import traceback -import xml.etree.ElementTree as ET - -SETUP_ENV_PATH = "/awips2/edex/bin/setup.env" -SITEID_PATTERN = r'.*\nexport AW_SITE_IDENTIFIER=(\w+)\W.*' - -SITE_ROLES_PATH = "/awips2/edex/data/utility/common_static/site/%s/roles/*.xml" -USERS_INI_PATH = "/awips2/edex/data/utility/common_static/configured/%s/roles/users.ini" -CURRENT_USERS_PATH = "/awips2/edex/data/utility/common_static/site/%s/roles/currentusers.txt" - -SPECIAL_USERS = set(["ALL", "awips", "root"]) -USERNAME_PATTERN = r'^[a-z_][a-z0-9_]{0,30}$' - -USERS_INI_HEADER = """# ----------------------------------------------------------------------------- -# This file should not be manually edited. -# Please use the user administration GUI to modify user roles/permissions. -# ----------------------------------------------------------------------------- -# [users] section defines users and their (optional) assigned roles -# Users may only be assigned roles, they may not be assigned permissions. -# -# username = password, role1, role2, ..., roleN -# ----------------------------------------------------------------------------- -[users] -""" - -# The following dicts are for validation of the old nwsroles xml files - -USER_PERMISSION = { - "tag": "userPermission", - "text": True, - } - -USER_ROLE = { - "tag": "userRole", - "text": True, - } - -USER = { - "tag": "user", - "text": False, - "attrib": { - "userId": (True, str), - }, - "elements" : [ - # (required, multiplicity, schema) - (False, 2, USER_PERMISSION), - (False, 2, USER_ROLE), - ] - } - -ROLE_DESCRIPTION = { - "tag": "roleDescription", - "text": True, - } - -ROLE_PERMISSION = { - "tag": "rolePermission", - "text": True, - } - -ROLE = { - "tag": "role", - "text": False, - "attrib": { - "roleId": (True, str) - }, - "elements": [ - # (required, multiplicity, schema) - (False, 1, ROLE_DESCRIPTION), - (True, 2, ROLE_PERMISSION), - ] - } - -PERMISSION_DESCRIPTION = { - "tag": "description", - "text": True, - } - -PERMISSION = { - "tag": "permission", - "text": False, - "attrib": { - "id": (True, str) - }, - "elements": [ - # (required, multiplicity, schema) - (False, 1, PERMISSION_DESCRIPTION) - ] - } - -APPLICATION = { - "tag": "application", - "text": True, - } - -NWS_ROLE_DATA = { - "tag": "nwsRoleData", - "text": False, - "elements": [ - # (required, multiplicity, schema) - (True, 1, APPLICATION), - (True, 2, PERMISSION), - (False, 2, ROLE), - (False, 2, USER) - ] - } - -def formatElement(element): - s = "<" + element.tag - if element.attrib: - for id, value in element.items(): - s += ' %s="%s"' % (id, value) - s += ">" - return s - -def validateSchema(element, schema): - # validates the xml is syntactically correct based on the provided schema - # there is no validation of the content, just the format - - valid = True - - # validate tag - if element.tag != schema["tag"]: - print " ERROR: Unrecognized element <%s>, skipping..." % element.tag - return False - - # validate text ignoring whitespace - text = element.text - if text: - text = text.strip() - - if text: - if not schema["text"]: - print 'ERROR: Unexpected text %s found in element <%s>' % (repr(text), element.tag) - valid = False - elif schema["text"]: - print "ERROR: Element <%s> missing text" % element.tag - valid = False - - # validate attributes - expectedAttrib = schema.get("attrib", {}) - - # ensure existing attributes are valid - for key in element.attrib: - if key in expectedAttrib: - value = element.attrib[key] - expectedType = expectedAttrib[key][1] - if type(value) is not expectedType: - print " ERROR: Attribute %s:[%s] of element <%s> is not of expected type %s" % \ - (key, str(value), element.tag, str(expectedType)) - valid = False - else: - print 'ERROR: Unexpected attribute "%s" found in element<%s>' % (key, element.tag) - valid = False - - # ensure required attributes are present - for key in expectedAttrib: - required = expectedAttrib[key][0] - if required and key not in element.attrib: - print 'ERROR: Missing attribute "%s" in element <%s>' % (key, element.tag) - valid = False - - # validate child elements - expectedElements = schema.get("elements", []) - - # ensure existing child elements are valid - childCount = {} - for child in element: - - # find matching child schema - found = False - for required, multiplicity, childSchema in expectedElements: - if child.tag == childSchema["tag"]: - found = True - - # update child count - childCount[child.tag] = childCount.get(child.tag, 0) + 1 - - # validate child element - valid &= validateSchema(child, childSchema) - - if not found: - print 'ERROR: Unexpected child element %s found in element %s' % \ - (formatElement(child), formatElement(element)) - valid = False - - # ensure required children were found and multiplicity was valid - for required, multiplicity, childSchema in expectedElements: - count = childCount.get(childSchema["tag"], 0) - if count == 0 and required: - print 'ERROR: Element %s is missing required child element <%s>' % \ - (formatElement(element), childSchema["tag"]) - valid = False - elif count > 1 and multiplicity == 1: - print 'ERROR: %d <%s> child elements found in element %s where only 1 is allowed' % \ - (count, childSchema["tag"], formatElement(element)) - valid = False - - return valid - -def parseRolesPermissions(root): - permissions = {} - roles = {} - users = {} - application = root.find("application").text.strip() - - # parse permissions - for permission in root.iterfind("permission"): - id = permission.attrib["id"] - description = permission.find("description") - - if description is not None: - description = description.text - - if description is not None: - description = description.strip() - - permissions[id] = description - - - # parse roles - for role in root.iterfind("role"): - roleId = role.attrib["roleId"].strip() - roleDescription = role.find("roleDescription") - - if roleDescription is not None: - roleDescription = roleDescription.text - - if roleDescription is not None: - roleDescription = roleDescription.strip() - - rolePermissions = set() - for rolePermission in role.iterfind("rolePermission"): - rolePermissions.add(rolePermission.text.strip()) - - roles[roleId] = {"roleDescription":roleDescription, - "rolePermissions": rolePermissions - } - - - # parse users - for user in root.iterfind("user"): - userId = user.attrib["userId"].strip() - - userPermissions = set() - for userPermission in user.iterfind("userPermission"): - userPermissions.add(userPermission.text.strip()) - - userRoles = set() - for userRole in user.iterfind("userRole"): - userRoles.add(userRole.text.strip()) - - users[userId] = { "userRoles": userRoles, - "userPermissions": userPermissions - } - - - return application, permissions, roles, users - - -def main(): - userName = pwd.getpwuid(os.getuid()).pw_name - if userName not in ['awips', 'root']: - print "ERROR: This script must be run as user root or awips" - return 1 - - # parse site identifier from setup.env - siteId = None - try: - with open(SETUP_ENV_PATH, "r") as env: - contents = env.read() - m = re.match(SITEID_PATTERN, contents) - if m is not None: - siteId = m.group(1) - except: - print "ERROR: Unable to read", SETUP_ENV_PATH, "exiting" - traceback.print_exc() - return 1 - - if siteId is None: - print "ERROR: AW_SITE_IDENTIFIER not found in", SETUP_ENV_PATH - return 1 - - # if users.ini already exists just exit - iniPath = USERS_INI_PATH % siteId - if os.path.exists(iniPath): - print "WARN:", iniPath, "already exists, exiting." - return 1 - - # loop over all user roles xml files looking for users and/or admins - allUsers = set() - admins = set() - paths = glob.glob(SITE_ROLES_PATH % siteId) - for path in paths: - print "INFO: Processing file:", path - try: - tree = ET.parse(path) - root = tree.getroot() - except: - print "ERROR: Unable to parse XML file: %s" % path - traceback.print_exc() - continue - - # ensure file contains valid XML - if not validateSchema(root, NWS_ROLE_DATA): - print "ERROR:", path, "does not contain valid nwsRoleData xml, skipping" - continue - - # parse out roles and permissions into pythn dicts - application, permissions, roles, users = parseRolesPermissions(root) - - for user in users: - allUsers.add(user) - if application == "User Administration" and \ - "awips.user.admin" in users[user]["userPermissions"]: - admins.add(user) - - - # set allUsers to the content of - # /awips2/edex/data/utility/common_static/site/XXX/roles/currentUsers.txt - # if it exists - currentUsersPath = CURRENT_USERS_PATH % siteId - currentUsers = None - try: - with open(currentUsersPath, 'r') as f: - currentUsers = f.readlines() - currentUsers = [x.strip() for x in currentUsers] - except IOError as e: - if e.errno == errno.ENOENT: - print "WARN: %s file not found,\n using list of users for existing roles files" % currentUsersPath - elif e.errno == errno.EACCES: - print "ERROR: Unable to read %s,\n correct file permissions and re-run this script" % currentUsersPath - return 1 - else: - print "ERROR: Error reading %s,\n fix the file and re-run this script" % currentUsersPath - traceback.print_exc() - return 1 - - if currentUsers: - # remove None or empty strings - currentUsers = filter(None, currentUsers) - - # validate user names - for user in currentUsers: - if not re.match(USERNAME_PATTERN, user): - print "ERROR: %s\n contains an invalid username: '%s'\n correct and re-run this script" % (currentUsersPath, user) - return 1 - - allUsers = set(currentUsers) - - # remove special users - allUsers -= SPECIAL_USERS - - # remove admins that are not in allUsers - admins &= allUsers - - # convert allUsers set to a sorted list - # This just makes the file easier for a human - # to look at after running the delta script. - # The GUI will always sort the user names - allUsers = sorted(allUsers) - - # output users.ini file - try: - dirPath = os.path.dirname(iniPath) - try: - os.makedirs(dirPath, 0750) - except OSError, e: - if e.errno != errno.EEXIST: - raise - - with open(iniPath, 'w') as out: - out.write(USERS_INI_HEADER) - - for user in allUsers: - role = "awipsUser" - if user in admins: - role = "awipsAdmin" - print "INFO: Granting", user, role, "role" - out.write("%s = password, %s\n" % (user, role)) - os.chmod(iniPath, stat.S_IRUSR | stat.S_IWUSR | stat.S_IRGRP) - - # change owner to awips - p = pwd.getpwnam("awips") - os.chown(dirPath, p.pw_uid, p.pw_gid) - os.chown(iniPath, p.pw_uid, p.pw_gid) - except: - print "ERROR: Exception writing to %s" % iniPath - traceback.print_exc() - - # remove any partially written users.ini file - if os.path.isfile(iniPath): - os.remove(iniPath) - return 1 - - print "INFO: Successfully migrated awips admins" - return 0 - -if __name__ == '__main__': - sys.exit(main()) diff --git a/deltaScripts/18.1.1/DR6272/6272_xml_remove_currentanimationmode.py b/deltaScripts/18.1.1/DR6272/6272_xml_remove_currentanimationmode.py deleted file mode 100755 index be4e7a022d..0000000000 --- a/deltaScripts/18.1.1/DR6272/6272_xml_remove_currentanimationmode.py +++ /dev/null @@ -1,51 +0,0 @@ -#!/awips2/python/bin/python - -# Removes "currentAnimationMode" elements and attributes from bundle xml files -# Author: tgurney - -from __future__ import print_function - -import os -import shutil -import sys -import subprocess -import traceback -import xml.etree.ElementTree as ET - -def main(): - print("INFO: Starting update of bundle XMLs") - paths = subprocess.check_output("find /awips2/edex/data/utility/*/*/*/bundles -type f -regex '.*\\.xml'", shell=True) - paths = paths.strip().split('\n') - for path in paths: - tree = ET.parse(path) - try: - node = tree.getroot().find('displayList').find('displays').find('descriptor') - except AttributeError as a: - # one of the elements was not found - continue - elementToRemove = node.find('currentAnimationMode') - if elementToRemove is not None or 'currentAnimationMode' in node.attrib: - try: - shutil.copyfile(path, path + ".bak") - print("INFO: Updating " + path) - node.attrib.pop('currentAnimationMode', None) - if elementToRemove is not None: - node.remove(elementToRemove) - try: - tree.write(path, encoding="UTF-8", xml_declaration=True) - except Exception: - traceback.print_exc() - print("INFO: Restoring " + path + " from backup") - shutil.copyfile(path + ".bak", path) - try: - os.remove(path + ".bak") - except Exception: - pass - except Exception: - traceback.print_exc() - continue - print("INFO: Done.") - - -if __name__ == '__main__': - sys.exit(main()) diff --git a/deltaScripts/18.1.1/DR6346/createIncrementalAreaDictionaryAndCityLocation.py b/deltaScripts/18.1.1/DR6346/createIncrementalAreaDictionaryAndCityLocation.py deleted file mode 100755 index 4b5ef3fe23..0000000000 --- a/deltaScripts/18.1.1/DR6346/createIncrementalAreaDictionaryAndCityLocation.py +++ /dev/null @@ -1,290 +0,0 @@ -#!/awips2/python/bin/python2 - -## -# DR 6346 - This script will compare the site overrides for AreaDictionary.py -# and CityLocation.py to the CONFIGURED level versions and create incremental -# overrides of these files. -## - -import copy -import logging -import glob -import imp -import os -import os.path -import pprint -import shutil - - -logging.basicConfig(format='%(asctime)-15s %(levelname)s: %(message)s', - datefmt='%Y-%m-%d %H:%M:%S', - level=logging.DEBUG) -log = logging.getLogger("createIncrementalAreaDictionaryAndCityLocation.py") - -TEXT_UTILS_BASE_PATH = "/awips2/edex/data/utility/cave_static/site/*/gfe/userPython/textUtilities/regular/" -AREA_DICT_GLOB_PATH = os.path.join(TEXT_UTILS_BASE_PATH, "AreaDictionary.py") -CITY_LOC_GLOB_PATH = os.path.join(TEXT_UTILS_BASE_PATH, "CityLocation.py") - -AREA_DICT_HEADER = """ -# ---------------------------------------------------------------------------- -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. -# -# AreaDictionary -# AreaDictionary file -# -# Author: GFE Installation Script -# ---------------------------------------------------------------------------- - -## -# This is an absolute override file, indicating that a higher priority version -# of the file will completely replace a lower priority version of the file. -## - -# Format: -# AreaDictionary = { -# "editArea" : { -# "ugcCode": "STZxxx", -# "ugcName": "EditAreaName", -# "ugcCities": ['City1', 'City2'], -# "ugcTimeZone": "MST7MDT", -# "fullStateName": "COLORADO", -# "partOfState": "NORTHEAST", -# "stateAbbr": "CO", -# "independentCity": 0, -# "locationName": "GeneralAreaName", -# } -# ... -# } -# ugcTimeZone: This field should be replace with the correct time zone -# for that zone. If it is different from the time zone of -# the local WFO, it's time zone will appear in the header of -# some products in parentheses. -# Using any other strings to define -# the time zone may produce undesirable results. -# The time zone may also be a list of time zones in case -# a forecast zone happens to cover an area that contains -# two time zones. -# e.g. "ugcTimeZone" : ["MST7MDT", "PST8PDT"] -# -# ugcCode: This field contains the ugc coding for this area, such as COZ023 -# -# ugcName: This field contains the descriptive name for this area. It -# is used in various products, including Hazard products. This is -# the official county or zone name. -# -# locationName: This field is optional, but provides an alternate name that -# is used in the text of some products to describe the area. The -# FFA product uses this value if available. -# -# ugcCities: This field contains the list of cities for hazard and routine -# products. -# -# fullStateName: This field is used in hazard products to fully describe -# the state in which this edit area resides. -# -# partOfState: This field describes the location within a state (such as -# NORTHEAST) for this area. It is used in hazard products. -# -# stateAbbr: State Abbreviation for the fullStateName. -# -# independentCity: Set to 0 or 1. Some counties (FIPS coding) are actually -# cities. Setting the flag to 1 will instruct those formatters -# to mention independent cities, rather than include this "county" -# in the county list of the product. -# -# wfo: The wfo(s) with responsibility for the area -# -# - -from DefaultAreaDictionary import AreaDictionary - - -""" -CITY_LOCATION_HEADER = """ -# ---------------------------------------------------------------------------- -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. -# -# CityLocation -# CityLocation file -# -# Author: GFE Installation Script -# ---------------------------------------------------------------------------- - -## -# This is an absolute override file, indicating that a higher priority version -# of the file will completely replace a lower priority version of the file. -## - -# Format: -# CityLocation = { -# "editArea": {'cityName1' : (latitude, longitude), -# 'cityName2' : (latitude, longitude), -# ... -# } -# ... -# } -# -# editArea: name of edit area as in AreaDictionary -# -# cityName: name of the city - should be the same as in AreaDictionary. -# -# latitude/longitude: city's lat/lon location. -# - -from DefaultCityLocation import CityLocation - - -""" - - -def create_incremental_area_dictionary(): - for site_file in glob.iglob(AREA_DICT_GLOB_PATH): - log.info("Generating incremental override file [%s]...", site_file) - - base_file = site_file.replace("site", "configured", 1) - if not os.path.isfile(base_file): - log.error("Could not find CONFIGURED level file [%s].", base_file) - log.error("Skipping to next file.") - continue - - with open(site_file, 'r') as f: - contents = f.read() - if "from DefaultAreaDictionary import AreaDictionary" in contents: - log.info("Site AreaDictionary file [%s] has already been upgraded.", site_file) - continue - - log.info("Using configured file [%s]...", base_file) - log.info("Using site file [%s]...", site_file) - - configured_module = imp.load_source('base', base_file) - site_module = imp.load_source('override', site_file) - configured_dict = configured_module.AreaDictionary - site_dict = site_module.AreaDictionary - - diffs = diff_dicts(configured_dict, site_dict) - log.debug("AreaDictionary Differences: %r", diffs) - write_override_file(site_file, 'AreaDictionary', diffs, AREA_DICT_HEADER) - - delete_files(base_file + '*') - -def create_incremental_city_location(): - for site_file in glob.iglob(CITY_LOC_GLOB_PATH): - log.info("Generating incremental override file [%s]...", site_file) - - base_file = site_file.replace("site", "configured", 1) - if not os.path.isfile(base_file): - log.error("Could not find CONFIGURED level file [%s].", base_file) - log.error("Skipping to next file.") - continue - - with open(site_file, 'r') as f: - contents = f.read() - if "from DefaultCityLocation import CityLocation" in contents: - log.info("Site CityLocation file [%s] has already been upgraded.", site_file) - continue - - log.info("Using configured file [%s]...", base_file) - log.info("Using site file [%s]...", site_file) - - configured_module = imp.load_source('base', base_file) - site_module = imp.load_source('override', site_file) - configured_dict = configured_module.CityLocation - site_dict = site_module.CityLocation - - diffs = diff_dicts(configured_dict, site_dict) - log.debug("CityLocation Differences: %r", diffs) - write_override_file(site_file, 'CityLocation', diffs, CITY_LOCATION_HEADER) - - delete_files(base_file + '*') - -def diff_dicts(base, override): - differences = [] - - keys = set().union(base.keys(), override.keys()) - # log.debug("Combined keys: %s", keys) - - for key in sorted(keys): - if key not in base: - log.debug("Key [%s] in override, but not base.", key) - differences.append((key, copy.copy(override[key]), True)) - elif key not in override: - log.debug("Key [%s] in base, but not override.", key) - else: - sub_diffs = sub_diff_dicts(base[key], override[key]) - if sub_diffs: - log.debug("Differences for key [%s]: %r", key, sub_diffs) - differences.append((key, sub_diffs, False)) - - return differences - -def sub_diff_dicts(base, override, level=0): - differences = {} - - keys = set().union(base.keys(), override.keys()) - # log.debug("Combined keys: %s", keys) - - for key in sorted(keys): - if key not in base: - log.debug("Key [%s] in override, but not base.", key) - differences[key] = copy.copy(override[key]) - elif key not in override: - log.debug("Key [%s] in base, but not override.", key) - else: - if base[key] != override[key]: - differences[key] = override[key] - - return differences - -def write_override_file(file_name, object_name, object_value, header): - backup_file = file_name + ".bak.dr_6346" - log.info("Writing backup file [%s]", backup_file) - try: - shutil.copy(file_name, backup_file) - except: - log.exception("Unable to write backup file [%s]", backup_file) - log.error("Skipping file [%s]", file_name) - return - - log.info("Writing override file [%s]", file_name) - try: - with open(file_name, 'w') as out_file: - printer = pprint.PrettyPrinter() - - out_file.write(header) - for (key, value, added) in sorted(object_value, key=lambda i: i[0]): - if added: - out_file.write("{}[{!r}] = {}".format(object_name, key, printer.pformat(value))) - out_file.write('\n') - else: - for sub_key in sorted(value.keys()): - out_file.write("{}[{!r}][{!r}] = {}".format(object_name, key, sub_key, printer.pformat(value[sub_key]))) - out_file.write('\n') - out_file.write('\n') - except: - log.exception("Unable to write incremental override file [%s]", file_name) - log.critical("Restore backup file [%s] to [%s] before restarting EDEX.", backup_file, file_name) - -def delete_files(file_pattern): - for f in glob.iglob(file_pattern): - try: - os.remove(f) - except: - log.exception("Unable to delete file [%s].", f) - -def main(): - log.info("Starting delta script for DR #6346: creating incremental overrides for AreaDictionary.py and CityLocation.py...") - - create_incremental_area_dictionary() - - create_incremental_city_location() - - log.info("Delta script complete.") - - - -if __name__ == '__main__': - main() diff --git a/deltaScripts/18.1.1/DR6352/6352_add_backupjob_versionrequired_columns.sh b/deltaScripts/18.1.1/DR6352/6352_add_backupjob_versionrequired_columns.sh deleted file mode 100755 index cc264cd6d1..0000000000 --- a/deltaScripts/18.1.1/DR6352/6352_add_backupjob_versionrequired_columns.sh +++ /dev/null @@ -1,50 +0,0 @@ -#!/bin/bash - -# This script adds two columns 'minVersionRequired' and 'maxVersionRequired' to -# the backup_job table, with the current EDEX version as the default value for -# each - -version=$(rpm -q awips2-database --qf %{VERSION}) - -if [[ $? -ne 0 ]]; then - echo ERROR: Failed to get EDEX version. Cannot continue - exit 1 -fi - -has_column() { - table_name=${1} - column_name=${2} - result=$(psql --user=awipsadmin --db=metadata -Aqtc " - select 1 - from information_schema.columns c - where c.table_name = '${table_name}' - and c.column_name='${column_name}';") - [[ ${result} == "1" ]] - return $? -} - -has_column backup_job minversionrequired -if [[ $? -ne 0 ]]; then - echo INFO: Adding column minversionrequired - psql --user=awipsadmin --db=metadata -1 << EOF - \set ON_ERROR_STOP on - alter table backup_job add column minVersionRequired varchar(16); - update backup_job set minVersionRequired = '${version}' where minVersionRequired is null; - alter table backup_job alter column minVersionRequired set not null; -EOF -else - echo INFO: minversionrequired column already exists. -fi - -has_column backup_job maxversionrequired -if [[ $? -ne 0 ]]; then - echo INFO: Adding column maxversionrequired - psql --user=awipsadmin --db=metadata -1 << EOF - \set ON_ERROR_STOP on - alter table backup_job add column maxVersionRequired varchar(16); - update backup_job set maxVersionRequired = '${version}' where maxVersionRequired is null; - alter table backup_job alter column maxVersionRequired set not null; -EOF -else - echo INFO: maxversionrequired column already exists. -fi diff --git a/deltaScripts/18.1.1/DR6355/moveVbLevelMappingFile.sh b/deltaScripts/18.1.1/DR6355/moveVbLevelMappingFile.sh deleted file mode 100755 index 4756ca316b..0000000000 --- a/deltaScripts/18.1.1/DR6355/moveVbLevelMappingFile.sh +++ /dev/null @@ -1,22 +0,0 @@ -#!/bin/bash -# -# DR 6355 -# relocate volumebrowser/LevelMappingFile.xml into level/mappings/ -# - -BASE='/awips2/edex/data/utility/' -DEST='level/mappings/' - -for file in `find $BASE -wholename \*/volumebrowser/LevelMappingFile.xml`; -do - dir=`echo $file | sed 's/\/volumebrowser\/LevelMappingFile.xml//g'`; - destDir=${dir}/${DEST} - - if [ ! -d "${destDir}" ]; - then - sudo -u awips mkdir -p ${destDir}; - fi - - echo "Moving ${file} to ${destDir}"; - mv $file $destDir; -done; diff --git a/deltaScripts/18.1.1/DR6362/addAlaskaMarineView.sh b/deltaScripts/18.1.1/DR6362/addAlaskaMarineView.sh deleted file mode 100755 index e5830de077..0000000000 --- a/deltaScripts/18.1.1/DR6362/addAlaskaMarineView.sh +++ /dev/null @@ -1,45 +0,0 @@ -#!/bin/sh -# DR #6362 - This script creates the alaska_marine view -SIMPLEVS=("0.064" "0.016" "0.004" "0.001") -# -# Ensure simplification levels exist for both marinezones and offshore -# -for LEV in "${SIMPLEVS[@]}" ; do - echo " Creating simplified geometry level $LEV ..." - SUFFIX=${LEV/./_} - /awips2/psql/bin/psql -d maps -U awipsadmin -q -c " - DO \$\$ - BEGIN - BEGIN - PERFORM AddGeometryColumn('mapdata','marinezones','the_geom_${SUFFIX}','4326',(SELECT type FROM public.geometry_columns WHERE f_table_schema='mapdata' and f_table_name='marinezones' and f_geometry_column='the_geom'),2); - UPDATE mapdata.marinezones SET the_geom_${SUFFIX}=ST_Segmentize(ST_Multi(ST_SimplifyPreserveTopology(the_geom,${LEV})),0.1); - CREATE INDEX marinezones_the_geom_${SUFFIX}_gist ON mapdata.marinezones USING gist(the_geom_${SUFFIX}); - EXCEPTION - WHEN duplicate_column THEN - END; - BEGIN - PERFORM AddGeometryColumn('mapdata','offshore','the_geom_${SUFFIX}','4326',(SELECT type FROM public.geometry_columns WHERE f_table_schema='mapdata' and f_table_name='offshore' and f_geometry_column='the_geom'),2); - UPDATE mapdata.offshore SET the_geom_${SUFFIX}=ST_Segmentize(ST_Multi(ST_SimplifyPreserveTopology(the_geom,${LEV})),0.1); - CREATE INDEX offshore_the_geom_${SUFFIX}_gist ON mapdata.offshore USING gist(the_geom_${SUFFIX}); - EXCEPTION - WHEN duplicate_column THEN - END; - END; - \$\$" -done -# -# Create the alaska_marine view -# -/awips2/psql/bin/psql -d maps -U awipsadmin -q -c " - DROP VIEW IF EXISTS mapdata.alaska_marine; - CREATE OR REPLACE VIEW mapdata.alaska_marine AS - SELECT CAST(ROW_NUMBER() OVER(ORDER BY id) AS INT) GID, * FROM ( - SELECT id, wfo, name, lat, lon, - the_geom, the_geom_0, the_geom_0_064, the_geom_0_016, the_geom_0_004, the_geom_0_001 - FROM mapdata.marinezones WHERE wfo LIKE '%AFC%' or wfo LIKE '%AFG%' or wfo LIKE '%AJK%' - UNION - SELECT id, wfo, name, lat, lon, - the_geom, the_geom_0, the_geom_0_064, the_geom_0_016, the_geom_0_004, the_geom_0_001 - FROM mapdata.offshore WHERE wfo LIKE '%AFC%' or wfo LIKE '%AFG%' or wfo LIKE '%AJK%' - ) a; -" diff --git a/deltaScripts/18.1.1/DR6372/6372_move_cave_static_files.sh b/deltaScripts/18.1.1/DR6372/6372_move_cave_static_files.sh deleted file mode 100755 index 1b2a0400d9..0000000000 --- a/deltaScripts/18.1.1/DR6372/6372_move_cave_static_files.sh +++ /dev/null @@ -1,90 +0,0 @@ -#!/bin/bash - -# 6372 - Move AbstractMenuUtil menu localization files to common_static. -# -# Author: mapeters -# Aug 21, 2017 - -# files under menus/ dir that are handled by AbstractMenuUtil -declare -a menu_files=("ffmp/ffmp.xml" - "scan/scan.xml" - "fog/baseFog.xml" - "safeseas/baseSafeSeas.xml" - "snow/baseSnow.xml" - "satellite/baseDerivedProductsImagery.xml" - "satellite/baseComposite.xml" - "upperair/baseRAOB.xml" - "upperair/uaMenus.xml" - "upperair/.upperairMenuTime" - "radar/index.xml" - "radar/dialRadars.xml" - "radar/airportRadars.xml" - "radar/radarindex.xml" - "radar/.radarMenuTime" - "ncepHydro/cpc/cpcMenus.xml" - "ncepHydro/spc/hazardMenus.xml" - "ncepHydro/hpc/hpcMenus.xml" - "ncepHydro/mpc/mpcMenus.xml" - "ncepHydro/nco/ncoMenus.xml" - "ncepHydro/cpc/.ncepHydro/cpcMenuTime" - "ncepHydro/spc/.ncepHydro/spcMenuTime" - "ncepHydro/hpc/.ncepHydro/hpcMenuTime" - "ncepHydro/mpc/.ncepHydro/mpcMenuTime" - "ncepHydro/nco/.ncepHydro/ncoMenuTime" - ) - -function get_fs() { - df -P -- "$1" | awk 'NR==2 {print $1}' -} - -did_work=false -echo INFO: Moving AbstractMenuUtil menu localization files to common_static -for menu_file in "${menu_files[@]}"; do - for old_location in /awips2/edex/data/utility/cave_static/*/*/menus/"${menu_file}"; do - if [[ ! -e "${old_location}" ]]; then - continue - fi - new_location=${old_location/cave_static/common_static} - echo INFO: Moving "${old_location}" to "${new_location}" - did_work=true - new_location_dir=$(dirname "${new_location}") - if [[ ! -d "${new_location_dir}" ]]; then - sudo -u awips mkdir -p -m 750 "${new_location_dir}" - else - echo "WARN: ${new_location} already exists. Just copying newer files" - fi - if rsync -aux "${old_location}" "${new_location}"; then - # cut context directory (e.g. /awips2/.../site/OAX) from old_location - old_location_ctx_dir=$(echo "${old_location}" | cut -d/ -f-8) - # filesystem that context dir is on - ctx_fs=$(get_fs "${old_location_ctx_dir}") - - # remove moved file if on same filesystem as context dir - file_fs=$(get_fs "${old_location}") - if [[ "${file_fs}" = "${ctx_fs}" ]]; then - rm -f "${old_location}" "${old_location}".md5 - fi - - old_location_dir=$(dirname "${old_location}") - # remove empty directories up until the context dir - while [[ "${old_location_ctx_dir}" != "${old_location_dir}" ]]; do - # filesystem that dir is on - dir_fs=$(get_fs "${old_location_dir}") - if [[ "${dir_fs}" = "${ctx_fs}" ]]; then - # remove empty directory if on same filesystem - # stop moving up directory tree if we find non-empty directory - rmdir "${old_location_dir}" 2>/dev/null || break - fi - old_location_dir=$(dirname "${old_location_dir}") - done - fi - rm -f "${new_location}".md5 - echo INFO: Done moving "${old_location}" to "${new_location}" - done -done - -if [[ "${did_work}" == "false" ]]; then - echo INFO: There are no files to move. Did nothing -else - echo INFO: Done moving localization files -fi diff --git a/deltaScripts/18.1.1/DR6373/6373_move_cave_static_files.sh b/deltaScripts/18.1.1/DR6373/6373_move_cave_static_files.sh deleted file mode 100755 index 2da777dbd1..0000000000 --- a/deltaScripts/18.1.1/DR6373/6373_move_cave_static_files.sh +++ /dev/null @@ -1,32 +0,0 @@ -#!/bin/bash - -# 6373 - Move SCAN localization files to common_static. -# -# Author: tgurney -# Aug 11, 2017 - -did_work=false -echo INFO: Moving SCAN localization files to common_static -for old_location in /awips2/edex/data/utility/cave_static/*/*/scan; do - if [[ ! -e "${old_location}" ]]; then - continue - fi - new_location=${old_location/cave_static/common_static} - echo INFO: Moving "${old_location}" to "${new_location}" - did_work=true - if [[ ! -d "${new_location}" ]]; then - sudo -u awips mkdir -p -m 750 "${new_location}" - else - echo "WARN: ${new_location} already exists. Just copying newer files" - fi - rsync -aux "${old_location}" "${new_location}/.." && - rm -rf --one-file-system "${old_location}" - find "${new_location}" -xdev -type f -name '*.md5' -delete - echo INFO: Done moving "${old_location}" to "${new_location}" -done - -if [[ "${did_work}" == "false" ]]; then - echo INFO: There are no files to move. Did nothing -else - echo INFO: Done moving localization files -fi diff --git a/deltaScripts/18.1.1/DR6374/6374_move_cave_static_files.sh b/deltaScripts/18.1.1/DR6374/6374_move_cave_static_files.sh deleted file mode 100755 index 6d11b245bc..0000000000 --- a/deltaScripts/18.1.1/DR6374/6374_move_cave_static_files.sh +++ /dev/null @@ -1,36 +0,0 @@ -#!/bin/bash - -# 6347 - Move D2D tools localization files to common_static. -# -# Mostly copied from DR 6183's delta script -# -# Author: njensen -# August 02, 2017 - -did_work=false -echo INFO: Moving D2D tool localization files to common_static -for old_location in /awips2/edex/data/utility/cave_static/*/*/awipsTools; do - if [[ ! -e "${old_location}" ]]; then - continue - fi - new_location=${old_location/cave_static/common_static} - echo INFO: Moving "${old_location}" to "${new_location}" - did_work=true - if [[ ! -d "${new_location}" ]]; then - sudo -u awips mkdir -p "${new_location}" -m 750 - else - echo "WARN: ${new_location} already exists. Just copying newer files" - fi - rsync -aux "${old_location}" "${new_location}/.." && - rm -rf --one-file-system "${old_location}" - find "${new_location}" -xdev -type f -name '*.md5' -delete - find "${new_location}" -xdev -type f -name '*.pyc' -delete - find "${new_location}" -xdev -type f -name '*.pyo' -delete - echo INFO: Done moving "${old_location}" to "${new_location}" -done - -if [[ "${did_work}" == "false" ]]; then - echo INFO: There are no files to move. Did nothing -else - echo INFO: Done moving localization files -fi diff --git a/deltaScripts/18.1.1/DR6402/lightningSourceIndexUpdate.sh b/deltaScripts/18.1.1/DR6402/lightningSourceIndexUpdate.sh deleted file mode 100755 index 5aa707b61c..0000000000 --- a/deltaScripts/18.1.1/DR6402/lightningSourceIndexUpdate.sh +++ /dev/null @@ -1,21 +0,0 @@ -#!/bin/bash - -# 6402 - Add an index on source to the binlightning table in the metadata database. -# -# Author: mapeters -# Oct 31, 2017 - -psql=/awips2/psql/bin/psql -index=binlightning_sourceIndex -table=awips.binlightning - -sql="CREATE INDEX IF NOT EXISTS ${index} ON ${table} USING btree(source);" -${psql} -U awipsadmin -d metadata -c "${sql}" -if [[ $? -ne 0 ]]; then - echo "ERROR: Failed to create index ${index} on table ${table}." - echo "FATAL: The update has failed." - exit 1 -fi - -echo "INFO: the update has completed successfully!" -exit 0 diff --git a/deltaScripts/18.1.1/DR6507/6507_move_tablespaces.sh b/deltaScripts/18.1.1/DR6507/6507_move_tablespaces.sh deleted file mode 100755 index c1d5dc4c55..0000000000 --- a/deltaScripts/18.1.1/DR6507/6507_move_tablespaces.sh +++ /dev/null @@ -1,56 +0,0 @@ -#!/bin/bash - -# #6507 - This script moves all postgres tablespaces to -# /awips2/data/tablespaces. It will also create a symlink from -# /awips2/database/data to /awips2/data if the latter does not exist already. -# -# Run this script as root on all servers that have /awips2/data. -# -# Author: tgurney - -pg_ctl=/awips2/postgresql/bin/pg_ctl -old_data_dir=/awips2/data -new_data_dir=/awips2/database/data -new_tablespace_dir=/awips2/database/tablespaces -pg_tblspc=${new_data_dir}/pg_tblspc - -su - awips -c "${pg_ctl} -D ${new_data_dir} status" - -if [[ "$?" -eq 0 ]]; then - echo "ERROR: Postgres is running. Cannot continue." - exit 1 -fi - -if [[ -e "${old_data_dir}" && ! -h "${old_data_dir}" ]]; then - echo "ERROR: ${old_data_dir} exists and is not a symlink. It needs to be" - echo "moved to ${new_data_dir} first." - exit 1 -fi - -echo INFO: Starting. - -if [[ ! -h "${old_data_dir}" ]]; then - echo "INFO: Symlinking ${new_data_dir} to ${old_data_dir}" - ln -sf "${new_data_dir}" "${old_data_dir}" - chown awips:fxalpha "${old_data_dir}" -fi - -# move tablespaces -mkdir -p ${new_tablespace_dir} -chown -R awips:fxalpha ${new_tablespace_dir} -chmod 700 ${new_tablespace_dir} -for ts_link in "${pg_tblspc}"/* ; do - if [[ -h ${ts_link} ]]; then - this_ts=$(readlink "${ts_link}") - if [[ "$(dirname "${this_ts}")" != "${new_tablespace_dir}" ]]; then - mv -v "${this_ts}" "${new_tablespace_dir}" && \ - rm -vf "${ts_link}" && \ - ln -sv "${new_tablespace_dir}/$(basename ${this_ts})" ${ts_link} - chown awips:fxalpha ${ts_link} - else - echo "INFO: ${this_ts} already in correct location" - fi - fi -done - -echo INFO: Done. diff --git a/deltaScripts/18.1.1/DR7424/addGridCoverageIndex.sh b/deltaScripts/18.1.1/DR7424/addGridCoverageIndex.sh deleted file mode 100644 index 98afdee725..0000000000 --- a/deltaScripts/18.1.1/DR7424/addGridCoverageIndex.sh +++ /dev/null @@ -1,11 +0,0 @@ -#!/bin/bash - -# #7424 - This script adds an index to the grid_info table's location column to -# allow for faster lookup and deletion of large numbers of grid_coverage. -# -# NOTE: This script will error if the index already exists, but this will not -# negatively impact the system. - -# run the update -echo "DR #7427 - Adding index to grid_info.location..." -/awips2/psql/bin/psql -U awipsadmin -d metadata -c "CREATE INDEX grid_info_location_id_index ON grid_info USING btree (location_id);" \ No newline at end of file diff --git a/deltaScripts/goesr_deploy/convertSatURIs.py b/deltaScripts/goesr_deploy/convertSatURIs.py deleted file mode 100644 index cada569eb1..0000000000 --- a/deltaScripts/goesr_deploy/convertSatURIs.py +++ /dev/null @@ -1,156 +0,0 @@ -#!/usr/bin/python -# -""" - Convert to append the coverageid to the satellite datauris, and - modify the associated satellite hdf5 group names to append the - coverageid. The new groups are added as an alias to the existing - datasets. - Date Ticket# Engineer Description - ------------ ---------- ----------- -------------------------- - 20120711 798 jkorman Initial Development - 20120822 798 jkorman Added ability to account for files containing hdfFileId. -""" -from subprocess import Popen, PIPE -import sys -from time import time -import h5py - -POSTGRES_CMD = "psql -U awips -d metadata -t -q -A -c " -HDF5_LOC = "/awips2/edex/data/hdf5" - -ID_IDX = 0 -DATAURI_IDX = 1 -COVERAGE_IDX = 2 -SECTORID_IDX = 3 -PHYSICAL_ELEMENT_IDX = 4 -REFTIME_IDX = 5 -HDFFILEID_IDX = 6 - -def update_satellite_table(): - """ - Add the interpolationLevels column to the satellite table. - """ - result = queryPostgres("select count(*) from information_schema.columns where table_name='satellite' and column_name='interpolationlevels';") - if(result[0][0] == '0'): - result = queryPostgres("alter table satellite add column interpolationlevels integer;") - print "Adding interpolationlevels column to satellite table" - -def formatFileTime(refTime): - """ - Extract and format the year (YYYY), month (MM), day (DD), and hour (HH) - from the reference time. The output is formatted as YYYY-MM-DD-HH - """ - return refTime[0:4] + "-" + refTime[5:7] + "-" + refTime[8:10] + "-" + refTime[11:13] - -def getFilename(refTime, h5FileId): - """ - Create the satellite data hdf filename corresponding to the given reference time and an - possible hdf5 file identifier. - """ - if(len(h5FileId) > 0): - filename = "satellite-" + formatFileTime(refTime) + "-" + h5FileId + ".h5" - else: - filename = "satellite-" + formatFileTime(refTime) + ".h5" - return filename - -def queryPostgres(sql): - """ - Extract and format the year (YYYY), month (MM), day (DD), and hour (HH) - from the reference time. The output is formatted as YYYY-MM-DD-HH - """ - result = Popen(POSTGRES_CMD + "\"" + sql + "\"", stdout=PIPE, shell=True) - retVal = [] - for line in result.stdout: - retVal.append(line.strip().split("|")) - return retVal - -def get_sectorids(): - """ - Get a list of unique sector identifiers from the satellite table. - """ - return queryPostgres("select distinct sectorid from satellite;") - -def get_satellite_rows(sectorid): - """ - Extract and format the year (YYYY), month (MM), day (DD), and hour (HH) - from the reference time. The output is formatted as YYYY-MM-DD-HH - """ - keys = {} - rows = queryPostgres("select id, dataURI, coverage_gid, sectorid, physicalelement, reftime, hdffileid from satellite where sectorid=" + repr(sectorid) + ";") - for row in rows: - # create the key for this entry. - key = "/satellite/" + row[SECTORID_IDX] + "/" + row[PHYSICAL_ELEMENT_IDX] + "/" + getFilename(row[REFTIME_IDX], row[HDFFILEID_IDX]) - #print "key = " + key - # have we found this key already? - if(key in keys): - # if so, get the row list for this key - rowList = keys[key] - else: - # otherwise create an empty list to put the row in - rowList = [] - # add it to the collection - keys[key] = rowList - # and add the row to the list - rowList.append(row) - return keys - -def process_all_satellite(): - """ - Process all entries in the satellite table. - Do one sector id at a time. - """ - sectorids = get_sectorids() - if(sectorids): - for sectorid in sectorids: - print "Processing sector " + sectorid[0] - keys = get_satellite_rows(sectorid[0]) - if(keys): - for key in keys: - print "==========================================================" - print " Processing key = " + key - fname = HDF5_LOC + key - try: - f = h5py.File(fname,'r+') - for row in keys[key]: - dataURI = row[1] - coverage = row[2] - - newGroupName = dataURI + "/" + coverage - group = f.create_group(newGroupName) - group = f.create_group(newGroupName + "/Data-interpolated") - - oldds = row[DATAURI_IDX] + "/Data" - newds = newGroupName + "/Data" - # Link to the old data set - f[newds] = h5py.SoftLink(oldds) - - group = f[row[DATAURI_IDX] + "/Data-interpolated"] - numLevels = 0 - for n in group.keys(): - newds = newGroupName + "/Data-interpolated/" + n - if (n == '0'): - # special case for this link. - # dataset /Data-interpolated/0 points to /Data - oldds = row[DATAURI_IDX] + "/Data" - else: - numLevels += 1 - oldds = row[DATAURI_IDX] + "/Data-interpolated/" + n - f[newds] = h5py.SoftLink(oldds) - updateSql = "update satellite set datauri='" + row[DATAURI_IDX] + "/" + row[COVERAGE_IDX] + "'" - updateSql += ", interpolationlevels=" + repr(numLevels) - updateSql += " where id=" + row[ID_IDX] + ";" - queryPostgres(updateSql) - f.close() - except Exception, e: - print e - print "Error occurred processing file " + fname - else: - print "No keys found for the sector id " + sectorid[0] - else: - print "No sector identifiers found in the satellite table" - -if __name__ == '__main__': - t = time() - update_satellite_table() - process_all_satellite() - print "Total Conversion time %ds" % (time() - t) diff --git a/dist/comps.xml b/dist/comps.xml index 02de9b160f..7c93ae540d 100644 --- a/dist/comps.xml +++ b/dist/comps.xml @@ -88,8 +88,6 @@ awips2-edex-cots awips2-edex-dataplugins awips2-edex-ncep - awips2-edex-datadelivery - awips2-edex-ogc awips2-java awips2-ldm awips2-python @@ -260,8 +258,6 @@ awips2-edex-gfe awips2-edex-ncep awips2-edex-shapefiles - awips2-edex-datadelivery - awips2-edex-ogc awips2-httpd-pypies awips2-httpd-pypies-tools awips2-java diff --git a/docs/appendix/appendix-cots.md b/docs/appendix/appendix-cots.md index bbee3193ad..0ed1ee151f 100644 --- a/docs/appendix/appendix-cots.md +++ b/docs/appendix/appendix-cots.md @@ -1,94 +1,172 @@ -COTS and FOSS used in AWIPS + +## Python for AWIPS + +Component |Version|Description +--------------|:-----:|---------- +Python |2.7.13| Dynamic programming language +python-awips | 18.1.1 | Python AWIPS Data Access Framework +Cycler|0.10.0|Python library for composable style cycles +Cython|0.28.3|Superset of the Python programming language, designed to give C-like performance with code that is mostly written in Python +dateutil|2.7.3|Python extension to the standard datetime module +NumPy |1.9.3 |Numerical Python Scientific package for Python +matplotlib |1.5.3| Python 2D Plotting Library +Jep |3.7.1
3.8.2| Java Python interface +h5py |1.3.0| HDF5 for Python +PyDev |5.4.0 | Python Development Environment +PyParsing|2.2.0|Python class library for the easy construction of recursive-descent parsers +Python QPID | 1.36.0 | Python API for Qpid Messaging +PyTables |3.4.2| Python package for managing hierarchical datasets +pytz|2015.4|World Timezone Definitions for Python +Setuptools|28.6.0|Tools to download, build, install, upgrade, and uninstall Python packages +ScientificPython |2.8.1| Python library for common tasks in scientific computing +Shapely|1.6.4| Python package for manipulation and analysis of planar geometric objects. +Six|1.11.0|Python 2 and 3 Compatibility Library +stomp.py | 4.1.20 | Python client library for accessing messaging servers +werkzeug | 0.14.1| Python WSGI utility library +YAJSW | 12.09 | Yet Another Java Service Wrapper + + +## Apache for AWIPS + +Component |Version|Description +--------------|:-----:|---------- +ActiveMQ |5.14.2 |JMS +ActiveMQ Geronimo | 1.1.1 | +Apache Batik |1.9 |Batik is a Java-based toolkit for applications or applets that want to use images in the Scalable Vector Graphics (SVG) format for various purposes, such as display, generation or manipulation. +Apache Camel |2.18.3 |Enterprise Service Bus +Apache Derby |10.12.1 | +Apache HTTP |4.3.6 | Client and Core +Apache HTTP Server | 2.4.27 | +Apr | 1.6.2 | Apache Portable Runtime Project +Apr-Util | 1.6.0 | Apache Portable Runtime Project +commons-beanutils |1.9.3| Apache Common Libraries +commons-codec |1.10| Apache Common Libraries +commons-collections |3.2.2| Apache Common Libraries +commons-configuration |1.10| Apache Common Libraries +commons-compress |1.10| Apache Common Libraries +commons-cli |1.2| Apache Common Libraries +commons-digester |1.8.1| Apache Common Libraries +commons-io |2.4| Apache Common Libraries +commons-cxf |3.1.14| Apache Common Libraries +commons-lang |2.6| Apache Common Libraries +commons-lang3 |3.4| Apache Common Libraries +commons-management |1.0| Apache Common Libraries +commons-net |3.3| Apache Common Libraries +commons-pool |1.6| Apache Common Libraries +commons-pool2 |2.4.2| Apache Common Libraries +commons-ssl | | Apache Common Libraries +commons-validator |1.2.0| Apache Common Libraries +Mime4J | 0.7 | Parser for e-mail message streams in plain rfc822 and MIME format +MINA |1.1.7 | Network application framework +Qpid |6.1.4| Open Source AMQP (Advanced Message Queuing Protocol) Messaging +Shiro |1.3.2 | Java security framework +Thrift |0.10.0 | Binary Serialization Framework +Velocity|1.7| Templating Engine +WSS4J |2.1.4 | Web Services Security +Xalan | 2.7.2 | +Xerces | 2.9.1 | +XML Resolver | 1.2 | +XML Security | 2.0.6 | +XML Serializer | 2.7.1 | +XML Beans | 2.6.0 | +XML Graphics | 2.2 | +XML Schema | 2.1.0 | + +## Other COTS and FOSS Component |Version|Description --------------|:-----:|---------- -ActiveMQ |5.12.0 |JMS (still used by AlertViz and internally in parts of Camel) -Apache Batik |1.8 |Batik is a Java-based toolkit for applications or applets that want to use images in the Scalable Vector Graphics (SVG) format for various purposes, such as display, generation or manipulation. -Apache MINA |1.1.7 | Network application framework -Apache WSS4J |1.6.5 | Web Services Security Ant |1.9.6 | Java Build Tool Ant-Contrib |1.0b3 | Additional useful tasks and types for Ant Antlr |2.7.6 | Parser generator Atomikos TransactionEssentials |3.6.2 | Transaction management system Bitstream Vera Fonts |1.10| Font library from Gnome -bzip2 |none |Stream compression algorithm +Bouncy Castle | jdk15on-1.54 | Java implementation of cryptographic algorithms +bzip2 |0.9.1 |Stream compression algorithm C3p0 |0.9.1 |c3p0 is an easy-to-use library for making traditional JDBC drivers "enterprise-ready" by augmenting them with functionality defined by the jdbc3 spec and the optional extensions to jdbc2. -Camel |2.4 |Enterprise Service Bus -cglib |2.2| Byte Code Generation Library is high level API to generate and transform JAVA byte code. -commons-beanutils |1.8.3| Apache Common Libraries -commons-codec |1.4.1| Apache Common Libraries -commons-collection |3.2| Apache Common Libraries -commons-configuration |1.6| Apache Common Libraries -Commons-cli |1.2| Apache Common Libraries -commons-digester |1.8.1| Apache Common Libraries -commons-cxf |2.5| Apache Common Libraries -commons-httpclient |3.1| Apache Common Libraries -commons-lang |2.3| Apache Common Libraries -commons-logging |1.1.1| Apache Common Libraries -commons-management |1.0| Apache Common Libraries -commons-pool |1.3| Apache Common Libraries -commons-validator |1.2| Apache Common Libraries -cycler|0.10.0|Python library for composable style cycles -Cython|0.27.2|Superset of the Python programming language, designed to give C-like performance with code that is mostly written in Python -dateutil|2.6.1|Python extension to the standard datetime module +cglib |2.1| Byte Code Generation Library is high level API to generate and transform JAVA byte code. +distcache | 1.4.5-21 | Distributed session caching dom4j |1.6.1| An open source library for working with XML, XPath, and XSLT on the Java platform using the Java Collections Framework +OpenDAP 2| 1.0.3 | dwr (direct web remoting) Getahead |1.1.3| Java open source library -Eclipse |4.5.1| Java IDE +Eclipse |4.6.1| Java IDE +Eclipse Jetty | 9.2.19 | Servlet Engine and Http Server ehcache |1.3.0 |Caching Support +FITS | | Flexible Image Transport System +GDAL | 2.2.4 | GEOS |3.6.2| Geometry Engine, Required for PostGIS -GeoTools Java API |2.6.4| Java API for Manipulation of Geospatial Data +GeoTools Java API |16.4| Java API for Manipulation of Geospatial Data GRIBJava |8.0| Grib Java Decoder -h5py |1.3.0| HDF5 for Python +Groovy | 2.4.10 | +Guava | 18.0 | Google core libraries for Java +Hamcrest | 1.3 | Java Hamcrest Matchers hdf5 |1.8.4-patch1| Core HDF5 APIs hdf5 |2.5| Core HDF5 APIs -Hibernate |3.5.0| Data Access Layer +Hibernate |4.2.15| Data Access Layer +HIbernate JPA 2.0 API | 1.0.1 | Hibernate API +Istack | 2.21 | Common Utility Code Runtime IzPack |4.2.0| Installer creator for EDEX +Jackson Databind | 2.6.5 | General data-binding functionality for Jackson JAI |1.1.3| Java API for Image Manipulation JAI – Image I/O |1.1| Plug-ins for JAI Jasper |1.900.1| JPEG-2000 codec -Java |1.8u101| Kit for both 32-bit and 64-bit -Jep |2.3+| Java Python interface +Jasypt | 1.9.2 | Java simplified encryption +Java | jdk-8u101 | Kit for both 32-bit and 64-bit +Javax Servlet API | 3.1.0 | +Jaxen | 1.1.4 | Open source X-Path Library +Javassist | 3.18.1 | Java Programming Assistant for bytecode manipulation +JCommander | 1.72 | Java framework for parsing command line parameters +Jdom | 1.1.3 | +Jdom2 | 2.0.6 | +jfreechart | 1.0.19 | +JNA | 4.1.0 | +Joda | 2.9.9 | Java date and time API jogamp |2.3.2| Provides hardware-supported 3D graphics +JSR-275 |1.0 beta | Measures and Units +JUnit | 4.12 | JTS Topology Suite |1.10| Java API for 2D spatial data -lapack |3.0.0| Linear Algebra Package for python +lapack |3.4.2| Linear Algebra Package for python ldm |6.13.6| Local Data Manager Log4J |1.2.16| Logging Component used by Commons Logging +Logback | 1.2.0 | libgfortran |4.1.2| Fortran Library -matplotlib |1.5.3| Python 2D Plotting Library +Mchange Commons Java | 0.2.3.4 | +Mchange c3p0 | 0.9.2.1 | JDBC3 Connection and Statement Pooling +Mockito | 1.9.0 | Mocking framework for unit tests written in Java +mod_wsgi | 3.5 | Apache HTTP Server module that provides a WSGI compliant interface for hosting Python based web applications. Mozilla Rhino |1.6R7| Implementation of JavaScript embedded in Java +NCAR NC2 Libraries | 4.6.10 | ucar.nc2 containing bufr, cdm, grib, httpservices, and udunits NCEP Grib2 Libraries|| Libraries for decoding & encoding data in GRIB2 format     cnvgrib |1.1.8 and 11.9| Fortran GRIB1 <--> GRIB2 conversion utility     g2clib |1.1.8| "C" grib2 encoder/decoder     g2lib |1.1.8 and 1.1.9| Fortran grib2 encoder/decoder and search/indexing routines     w3lib |1.6 and 1.7.1| Fortran grib1 encoder/decoder and utilities -NumPy |1.9.3 |Numerical Python Scientific package for Python -objectweb asm |2.1| ASM is an all-purpose Java bytecode manipulation and analysis framework. It can be used to modify existing classes or dynamically generate classes, directly in binary form +ObjectWeb ASM |2.2| ASM is an all-purpose Java bytecode manipulation and analysis framework. It can be used to modify existing classes or dynamically generate classes, directly in binary form +ObjectWeb ASM +OGC Tools GML JTS Converter | 1.0.2 | +Opengis | 1.0.2 | +OpenSAML | 3.1.1 | Portable implementation of the Security Assertion Markup Language (SAML) +org.w3.xml.ext | 1.3.04 | Apache-hosted set of DOM, SAX, and JAXP interfaces +OWASP Enterprise Security API | 2.0.1 | Open source web application security control library for programmers to write low-risk applications +PNGJ | 2.1.1 | Java library for PNG image IO PostGIS |2.4.4| Geographic Object Support for PostgreSQL PostgreSQL |9.5.13| Database Proj |5.1.0| Cartographic Projections library -pydev |1.5| Python Development Environment -PyParsing|2.2.0|Python class library for the easy construction of recursive-descent parsers -PyTables |3.4.2| Python package for managing hierarchical datasets -pytz|2015.4|World Timezone Definitions for Python -Python |2.7.13| Dynamic programming language +Protocol Buffers | 3.3.1 | Core Protocol Buffers library Python megawidgets |1.3.2| Toolkit for building high-level compound widgets in Python using the Tkinter module -Python Setuptools|28.6.0|Tools to download, build, install, upgrade, and uninstall Python packages -Qpid |6.1.4| Open Source AMQP (Advanced Message Queuing Protocol) Messaging -ScientificPython |2.8.1| Python library for common tasks in scientific computing -Shapely|1.6.4| Python package for manipulation and analysis of planar geometric objects. -Six|1.11.0|Python 2 and 3 Compatibility Library -slf4j |1.6.1| The Simple Logging Facade for Java or (SLF4J) serves as a simple facade or abstraction for various logging frameworks -smack |2.2.1| Smack is an Open Source XMPP (Jabber) client library for instant messaging and presence. -stomp.py |revision 18| Python client library for accessing messaging servers +Quartz | 1.8.6 | Enterprise Job Scheduler +Reflections | 0.9.9 | Java runtime metadata analysis +slf4j |1.7.21| The Simple Logging Facade for Java or (SLF4J) serves as a simple facade or abstraction for various logging frameworks +smack |4.1.9| Open Source XMPP (Jabber) client library Spring Framework OSGI |1.2.0| dynamic modules -Spring Framework |2.5.6 |Layered Java/J2EE application platform +Spring Framework |4.2.9 |Layered Java/J2EE application platform Subclipse |1.4.8| Eclipse plugin for Subversion support SWT Add-ons |0.1.1| Add-ons for Eclipse SWT widgets Symphony OGNL |2.7.3| Object-Graph Navigation Language; an expression language for getting/setting properties of Java objects. -Thrift |20080411p1-3| Binary Serialization Framework +SZIP | 2.1 | Compression in HDF Products. Tomcat Native |1.1.17| Library for native memory control +UDUNITS | 4.6.10 | C library provides for arithmetic manipulation of units utilconcurrent |1.3.2| Utility classes -Velocity |1.5.0| Templating Engine -werkzeug |0.12.1| Python WSGI utility library Wildfire |3.1.1| Collaboration Server xmltask |1.15.1| Facility for automatically editing XML files as part of an Ant build +Vecmath | 1.3.1 | diff --git a/docs/appendix/maps-database.md b/docs/appendix/maps-database.md new file mode 100644 index 0000000000..96cbe0b97b --- /dev/null +++ b/docs/appendix/maps-database.md @@ -0,0 +1,516 @@ + +## mapdata.airport + +| Column | Type | +|------------|-----------------------| +| arpt_id | character varying(4) | +| name | character varying(42) | +| city | character varying(40) | +| state | character varying(2) | +| siteno | character varying(9) | +| site_type | character varying(1) | +| fac_use | character varying(2) | +| owner_type | character varying(2) | +| elv | integer | +| latitude | character varying(16) | +| longitude | character varying(16) | +| lon | double precision | +| lat | double precision | +| the_geom | geometry(Point,4326) | + +ok + +## mapdata.allrivers + +| Column | Type | +|----------------|--------------------------------| +| ihabbsrf_i | double precision | +| rr | character varying(11) | +| huc | integer | +| type | character varying(1) | +| pmile | double precision | +| pname | character varying(30) | +| owname | character varying(30) | +| pnmcd | character varying(11) | +| ownmcd | character varying(11) | +| dsrr | double precision | +| dshuc | integer | +| usdir | character varying(1) | +| lev | smallint | +| j | smallint | +| termid | integer | +| trmblv | smallint | +| k | smallint | +| the_geom | geometry(MultiLineString,4326) | + + + +## mapdata.artcc +| Column | Type | +|------------|-----------------------------| +| artcc | character varying(4) | +| alt | character varying(1) | +| name | character varying(30) | +| type | character varying(5) | +| city | character varying(40) | +| id | double precision | +| the_geom | geometry(MultiPolygon,4326) | + + + + +## mapdata.basins +| Column | Type | +|----------------|-----------------------------| +| rfc | character varying(7) | +| cwa | character varying(5) | +| id | character varying(8) | +| name | character varying(64) | +| lon | double precision | +| lat | double precision | +| the_geom | geometry(MultiPolygon,4326) | +| the_geom_0 | geometry(MultiPolygon,4326) | +| the_geom_0_064 | geometry(MultiPolygon,4326) | +| the_geom_0_016 | geometry(MultiPolygon,4326) | +| the_geom_0_004 | geometry(MultiPolygon,4326) | +| the_geom_0_001 | geometry(MultiPolygon,4326) | + + + + +## mapdata.canada +| Column | Type | +|----------------|-----------------------------| +| f_code | character varying(5) | +| name_en | character varying(25) | +| nom_fr | character varying(25) | +| country | character varying(3) | +| cgns_fid | character varying(32) | +| the_geom | geometry(MultiPolygon,4326) | + + + + +## mapdata.city +| Column | Type | +|------------|------------------------| +| st_fips | character varying(4) | +| sfips | character varying(2) | +| county_fip | character varying(4) | +| cfips | character varying(4) | +| pl_fips | character varying(7) | +| id | character varying(20) | +| name | character varying(39) | +| elevation | character varying(60) | +| pop_1990 | numeric | +| population | character varying(30) | +| st | character varying(6) | +| warngenlev | character varying(16) | +| warngentyp | character varying(16) | +| watch_warn | character varying(3) | +| zwatch_war | double precision | +| prog_disc | integer | +| zprog_disc | double precision | +| comboflag | double precision | +| land_water | character varying(16) | +| recnum | double precision | +| lon | double precision | +| lat | double precision | +| f3 | double precision | +| f4 | character varying(254) | +| f6 | double precision | +| state | character varying(25) | +| the_geom | geometry(Point,4326) | + +## mapdata.county +| Column | Type | +|----------------|-----------------------------| +| state | character varying(2) | +| cwa | character varying(9) | +| countyname | character varying(24) | +| fips | character varying(5) | +| time_zone | character varying(2) | +| fe_area | character varying(2) | +| lon | numeric | +| lat | numeric | +| the_geom | geometry(MultiPolygon,4326) | + +## mapdata.customlocations +| Column | Type | +|------------|-----------------------------| +| bullet | character varying(16) | +| name | character varying(64) | +| cwa | character varying(12) | +| rfc | character varying(8) | +| lon | numeric | +| lat | numeric | +| the_geom | geometry(MultiPolygon,4326) | + +## mapdata.cwa +| Column | Type | +|----------------|-----------------------------| +| cwa | character varying(9) | +| wfo | character varying(3) | +| lon | numeric | +| lat | numeric | +| region | character varying(2) | +| fullstaid | character varying(4) | +| citystate | character varying(50) | +| city | character varying(50) | +| state | character varying(50) | +| st | character varying(2) | +| the_geom | geometry(MultiPolygon,4326) | + +## mapdata.firewxaor +| Column | Type | +|----------------|-----------------------------| +| cwa | character varying(3) | +| wfo | character varying(3) | +| the_geom | geometry(MultiPolygon,4326) | + +## mapdata.firewxzones +| Column | Type | +|----------------|-----------------------------| +| state | character varying(2) | +| zone | character varying(3) | +| cwa | character varying(3) | +| name | character varying(254) | +| state_zone | character varying(5) | +| time_zone | character varying(2) | +| fe_area | character varying(2) | +| lon | numeric | +| lat | numeric | +| the_geom | geometry(MultiPolygon,4326) | + +## mapdata.fix +| Column | Type | +|------------|-----------------------| +| id | character varying(30) | +| type | character varying(2) | +| use | character varying(5) | +| state | character varying(2) | +| min_alt | integer | +| latitude | character varying(16) | +| longitude | character varying(16) | +| lon | double precision | +| lat | double precision | +| the_geom | geometry(Point,4326) | + +## mapdata.highaltitude +| Column | Type | +|------------|--------------------------------| +| awy_des | character varying(2) | +| awy_id | character varying(12) | +| awy_type | character varying(1) | +| airway | character varying(16) | +| newfield1 | double precision | +| the_geom | geometry(MultiLineString,4326) | + +## mapdata.highsea +| Column | Type | +|----------------|-----------------------------| +| wfo | character varying(3) | +| name | character varying(250) | +| lat | numeric | +| lon | numeric | +| id | character varying(5) | +| the_geom | geometry(MultiPolygon,4326) | + +## mapdata.highway +| Column | Type | +|----------------|--------------------------------| +| prefix | character varying(2) | +| pretype | character varying(6) | +| name | character varying(30) | +| type | character varying(6) | +| suffix | character varying(2) | +| class | character varying(1) | +| class_rte | character varying(1) | +| hwy_type | character varying(1) | +| hwy_symbol | character varying(20) | +| route | character varying(25) | +| the_geom | geometry(MultiLineString,4326) | + +## mapdata.hsa +| Column | Type | +|----------------|-----------------------------| +| wfo | character varying(3) | +| lon | double precision | +| lat | double precision | +| the_geom | geometry(MultiPolygon,4326) | + +## mapdata.interstate +| Column | Type | +|----------------|--------------------------------| +| prefix | character varying(2) | +| pretype |Ushy
Hwy
Ave
Cord
Rt
Loop
I
Sthy| +| name | character varying(30) | +| type | character varying(6) | +| suffix | character varying(2) | +| hwy_type |I
U
S | +| hwy_symbol | character varying(20) | +| route | character varying(25) | +| the_geom | geometry(MultiLineString,4326) | + +## mapdata.isc +| Column | Type | +|------------|-----------------------------| +| wfo | character varying(3) | +| cwa | character varying(3) | +| the_geom | geometry(MultiPolygon,4326) | + +## mapdata.lake +| Column | Type | +|----------------|-----------------------------| +| name | character varying(40) | +| feature | character varying(40) | +| lon | double precision | +| lat | double precision | +| the_geom | geometry(MultiPolygon,4326) | + +## mapdata.latlon10 +| Column | Type | +|------------|--------------------------------| +| the_geom | geometry(MultiLineString,4326) | + +## mapdata.lowaltitude +| Column | Type | +|------------|--------------------------------| +| awy_des | character varying(2) | +| awy_id | character varying(12) | +| awy_type | character varying(1) | +| airway | character varying(16) | +| newfield1 | double precision | +| the_geom | geometry(MultiLineString,4326) | + +## mapdata.majorrivers +| Column | Type | +|----------------|--------------------------------| +| rf1_150_id | double precision | +| huc | integer | +| seg | smallint | +| milept | double precision | +| seqno | double precision | +| rflag | character varying(1) | +| owflag | character varying(1) | +| tflag | character varying(1) | +| sflag | character varying(1) | +| type | character varying(1) | +| segl | double precision | +| lev | smallint | +| j | smallint | +| k | smallint | +| pmile | double precision | +| arbsum | double precision | +| usdir | character varying(1) | +| termid | integer | +| trmblv | smallint | +| pname | character varying(30) | +| pnmcd | character varying(11) | +| owname | character varying(30) | +| ownmcd | character varying(11) | +| dshuc | integer | +| dsseg | smallint | +| dsmlpt | double precision | +| editrf1_ | double precision | +| demand | double precision | +| ftimped | double precision | +| tfimped | double precision | +| dir | double precision | +| rescode | double precision | +| center | double precision | +| erf1__ | double precision | +| reservoir_ | double precision | +| pname_res | character varying(30) | +| pnmcd_res | character varying(11) | +| meanq | double precision | +| lowq | double precision | +| meanv | double precision | +| lowv | double precision | +| worka | double precision | +| gagecode | double precision | +| strahler | double precision | +| rr | character varying(11) | +| dsrr | double precision | +| huc2 | smallint | +| huc4 | smallint | +| huc6 | integer | +| the_geom | geometry(MultiLineString,4326) | + +## mapdata.marinesites +| Column | Type | +|------------|-----------------------| +| st | character varying(3) | +| name | character varying(50) | +| prog_disc | bigint | +| warngenlev | character varying(14) | +| the_geom | geometry(Point,4326) | + +## mapdata.marinezones +| Column | Type | +|----------------|-----------------------------| +| id | character varying(6) | +| wfo | character varying(3) | +| gl_wfo | character varying(3) | +| name | character varying(254) | +| ajoin0 | character varying(6) | +| ajoin1 | character varying(6) | +| lon | numeric | +| lat | numeric | +| the_geom | geometry(MultiPolygon,4326) | + +## mapdata.mexico +| Column | Type | +|----------------|-----------------------------| +| area | double precision | +| perimeter | double precision | +| st_mx_ | double precision | +| st_mx_id | double precision | +| name | character varying(66) | +| country | character varying(127) | +| continent | character varying(127) | +| the_geom | geometry(MultiPolygon,4326) | + +## mapdata.navaid +| Column | Type | +|------------|------------------------| +| id | character varying(30) | +| clscode | character varying(11) | +| city | character varying(40) | +| elv | integer | +| freq | double precision | +| name | character varying(30) | +| status | character varying(30) | +| type | character varying(25) | +| oprhours | character varying(11) | +| oprname | character varying(50) | +| latdms | character varying(16) | +| londms | character varying(16) | +| airway | character varying(254) | +| sym | smallint | +| lon | double precision | +| lat | double precision | +| the_geom | geometry(Point,4326) | + +## mapdata.offshore +| Column | Type | +|------------|-----------------------------| +| id | character varying(50) | +| wfo | character varying(10) | +| lon | numeric | +| lat | numeric | +| location | character varying(70) | +| name | character varying(90) | +| the_geom | geometry(MultiPolygon,4326) | + +## mapdata.railroad +| Column | Type | +|----------------|--------------------------------| +| fnode_ | double precision | +| tnode_ | double precision | +| lpoly_ | double precision | +| rpoly_ | double precision | +| length | numeric | +| railrdl021 | double precision | +| railrdl020 | double precision | +| feature | character varying(18) | +| name | character varying(43) | +| state | character varying(2) | +| state_fips | character varying(2) | +| the_geom | geometry(MultiLineString,4326) | + +## mapdata.rfc +| Column | Type | +|----------------|-----------------------------| +| site_id | character varying(3) | +| state | character varying(2) | +| rfc_name | character varying(18) | +| rfc_city | character varying(25) | +| basin_id | character varying(5) | +| the_geom | geometry(MultiPolygon,4326) | + +## mapdata.specialuse +| Column | Type | +|------------|-----------------------------| +| name | character varying(32) | +| code | character varying(16) | +| yn | smallint | +| alt_desc | character varying(128) | +| artcc | character varying(4) | +| ctr_agen | character varying(128) | +| sch_agen | character varying(128) | +| state | character varying(2) | +| the_geom | geometry(MultiPolygon,4326) | + + + +## mapdata.states +| Column | Type | +|----------------|-----------------------------| +| state | character varying(2) | +| name | character varying(24) | +| fips | character varying(2) | +| lon | numeric | +| lat | numeric | +| the_geom | geometry(MultiPolygon,4326) | + + + +## mapdata.timezones +| Column | Type | +|------------|-----------------------------| +| name | character varying(50) | +| time_zone | character varying(1) | +| standard | character varying(9) | +| advanced | character varying(10) | +| unix_time | character varying(19) | +| lon | double precision | +| lat | double precision | +| the_geom | geometry(MultiPolygon,4326) | + + +## mapdata.warngenloc +| Column | Type | +|------------|-----------------------------| +| name | character varying(254) | +| st | character varying(3) | +| state | character varying(20) | +| population | integer | +| warngenlev | integer | +| cwa | character varying(4) | +| goodness | double precision | +| lat | numeric | +| lon | numeric | +| usedirs | numeric(10,0) | +| supdirs | character varying(20) | +| landwater | character varying(3) | +| recnum | integer | +| the_geom | geometry(MultiPolygon,4326) | + + + +## mapdata.world +| Column | Type | +|------------|-----------------------------| +| name | character varying(30) | +| count | double precision | +| first_coun | character varying(2) | +| first_regi | character varying(1) | +| the_geom | geometry(MultiPolygon,4326) | + + + +## mapdata.zone +| Column | Type | +|----------------|-----------------------------| +| state | character varying(2) | +| cwa | character varying(9) | +| time_zone | character varying(2) | +| fe_area | character varying(2) | +| zone | character varying(3) | +| name | character varying(254) | +| state_zone | character varying(5) | +| lon | numeric | +| lat | numeric | +| shortname | character varying(32) | +| the_geom | geometry(MultiPolygon,4326) | diff --git a/docs/cave/d2d-grids.md b/docs/cave/d2d-grids.md index d797cd991a..2dbccba215 100644 --- a/docs/cave/d2d-grids.md +++ b/docs/cave/d2d-grids.md @@ -1,69 +1,69 @@ -# MSLP and Precipitation +## MSLP and Precipitation ![](../images/screenCapture-2016.04.04.13.43.40-20160402_120000.png) -# Sfc Temperature and Wind +## Sfc Temperature and Wind ![](../images/screenCapture-2016.04.04.13.35.35-20160404_000000.png) -# Sfc Dewpoint Temperature +## Sfc Dewpoint Temperature ![](../images/screenCapture-2016.04.04.13.35.51-20160404_000000.png) -# Sfc Relative Humidity +## Sfc Relative Humidity ![](../images/screenCapture-2016.04.04.13.36.06-20160403_150000.png) -# 30mb Mean Dewpoint +## 30mb Mean Dewpoint ![](../images/screenCapture-2016.04.04.13.36.14-20160404_000000.png) -# Precipitable Water +## Precipitable Water ![](../images/screenCapture-2016.04.04.13.36.20-20160404_000000.png) -# Simulated Reflectivity (REFC) +## Simulated Reflectivity (REFC) ![](../images/screenCapture-2016.04.04.13.36.45-20160331_120000.png) -# Lightning Threat +## Lightning Threat -# Precip Type / Moisture Transport +## Precip Type / Moisture Transport ![](../images/screenCapture-2016.04.04.13.36.56-20160404_000000.png) -# Vorticity (500mb) +## Vorticity (500mb) ![](../images/screenCapture-2016.04.04.13.37.03-20160404_000000.png) -# Vertical Velocity (500mb, 700mb, 850mb) +## Vertical Velocity (500mb, 700mb, 850mb) ![](../images/screenCapture-2016.04.04.13.37.10-20160404_000000.png) -# Thickness / Vorticity Advection (Trenberth) +## Thickness / Vorticity Advection (Trenberth) ![](../images/screenCapture-2016.04.04.13.37.27-20160404_000000.png) -# Wind / Height (850mb, 700mb, 500mb, 300mb, 250mb) +## Wind / Height (850mb, 700mb, 500mb, 300mb, 250mb) ![](../images/screenCapture-2016.04.04.13.37.34-20160404_000000.png) -# Potential Vorticity (250mb) +## Potential Vorticity (250mb) -# Helicity / Storm-Relative Flow +## Helicity / Storm-Relative Flow -# Hail Parameters +## Hail Parameters ![](../images/screenCapture-2016.04.04.13.37.56-20160331_150000.png) -# MCS Parameters +## MCS Parameters ![](../images/screenCapture-2016.04.04.13.38.36-20160331_150000.png) -# Isentopic Analysis (270K-320K) +## Isentopic Analysis (270K-320K) ![](../images/screenCapture-2016.04.04.13.41.26-20160404_000000.png) diff --git a/docs/cave/goes-16-satellite.md b/docs/cave/goes-16-satellite.md index 33291371cf..22a99b3af1 100644 --- a/docs/cave/goes-16-satellite.md +++ b/docs/cave/goes-16-satellite.md @@ -1,6 +1,6 @@ -As of Unidata AWIPS 17.1.1, the GOES-R decoder supports the ingest and display of NOAAport provisional products (currently on the NOTHER feed), as well as Level 2b netCDF files. A GOES-R Geostationary Lightning Mapper (GLM) decoder is also provided to handle sensor lightning observations. +The GOES-R decoder supports the ingest and display of NOAAport provisional products (currently on the NOTHER feed), as well as Level 2b netCDF files. A GOES-R Geostationary Lightning Mapper (GLM) decoder is also provided to handle sensor lightning observations. GOES-R products are accessible in the menu **Satellite** > **GOES-16 Provisional Products** @@ -72,10 +72,10 @@ To display multi-channel composites requires CAVE for Linux or Windows. Decoded GOES-R satellite images are stored in `/awips2/edex/data/hdf5/satellite/` under sector subdirectories: drwxr-xr-x 18 awips fxalpha PRREGI - drwxr-xr-x 18 awips fxalpha TCONUS - drwxr-xr-x 18 awips fxalpha TFD - drwxr-xr-x 18 awips fxalpha TMESO-1 - drwxr-xr-x 18 awips fxalpha TMESO-2 + drwxr-xr-x 18 awips fxalpha ECONUS + drwxr-xr-x 18 awips fxalpha EFD + drwxr-xr-x 18 awips fxalpha EMESO-1 + drwxr-xr-x 18 awips fxalpha EMESO-2 @@ -85,7 +85,7 @@ Decoded GOES-R satellite images are stored in `/awips2/edex/data/hdf5/satellite/ Level 2+ products are described as derived environmental variables which will be created and disseminated when GOES-16 is used operationally (compared to **Level 0**, described as unprocessed instrument data at full resolution, and **Level 1b** products, described as radiometric and geometric correction applied to produce parameters in physical units). -Unidata does not currently have access to these products, but EDEX 17.1.1 can support their ingest if made available. +Unidata does not currently have access to these products, but EDEX can support their ingest if a feed is available. [Read more about GOES-R data levels...](http://www.goes-r.gov/ground/overview.html) diff --git a/docs/cave/hazard-services-alert.md b/docs/cave/hazard-services-alert.md new file mode 100644 index 0000000000..d288ca481a --- /dev/null +++ b/docs/cave/hazard-services-alert.md @@ -0,0 +1,3 @@ + + +## Alerts diff --git a/docs/cave/hazard-services-create.md b/docs/cave/hazard-services-create.md new file mode 100644 index 0000000000..30bf2606c3 --- /dev/null +++ b/docs/cave/hazard-services-create.md @@ -0,0 +1,58 @@ + +## Hazard Creation Methods + +### Recommender Execution + +### Recommender Output + +#### River Flood Recommender + +#### Flash Flood Recommender + +#### Storm Track Recommender + +#### Dam/Levee Break Flood Recommender + +#### Burn Scar Recommender + +#### Creating a Hazard from a River Gauge + +## Selection Tools + +### Select By Area + +### Freehand Drawing + +## Manipulating Hazards + +### Adjusting a Hazard Polygon + +### Moving a Polygon Vertex + +### Deleting a Polygon Vertex + +### Adding a Polygon Vertex + +#### Moving a Hazard Geometry + +#### Hazard Information Dialog + +#### Hazard Type + +#### Time Range + +#### Details (Metadata) + +## Hazard Status + +### #Propose + +#### Preview + +##### Product Staging Dialog + +##### Product Editor + +#### Issue + +#### Ending and Ended diff --git a/docs/cave/hazard-services-display.md b/docs/cave/hazard-services-display.md new file mode 100644 index 0000000000..ea9ee5d379 --- /dev/null +++ b/docs/cave/hazard-services-display.md @@ -0,0 +1,137 @@ +# AWIPS Hazard Service Display + +Hazard Services is a collection of AWIPS applications used by forecasters to create, update, and manage hazards, replacing and unifying hazard generation capabilities. + +* WarnGen +* RiverPro +* GHG +* etc. + +In addition to providing a seamless forecast process for generating short-fused, long-fused, and hydrologic hazards, Hazard Services allows the forecaster to focus on the meteorology of the hazard situation, letting the system take on more of the responsibility for the generation and dissemination of products. + +## Launching Hazard Services + +Hazard Services can be launched from the various CAVE perspectives by selection the toolbar item "Hazards". + +When Hazard Services is first started, the Console and the Spatial Display are visible. + +## Spatial Display and Console + +The **Spatial Display** is the Hazard Services drawing layer which is loaded into the CAVE Map Editor when Hazard Services is started. It is the Hazard Services map, displaying hazard areas relative to geopolitical boundaries and handling hazard drawing and editing. Its presence is indicated by the 'Hazard Services (Editable)' line in the CAVE Map Legend, and it supports operations common to other AWIPS drawing layers. + +![image](../images/hazard-services_1.png) + +The **Console** is the main control panel for Hazard Services. It is always displayed if Hazard Services is running. Closing it closes Hazard Services as well. + +![image](../images/hazard-services_2.png) + +The Console is a CAVE View, by default docked within the main window. The Console includes a toolbar and a drop-down ("view") menu to the right of or just under its title tab. Below these is the table of hazard events. + +## Hazard Services Toolbar + +![image](../images/hazard-services-toolbar.png) + +### Hydro ![image](../images/hazard-services-hydro-button.png) + +The leftmost icon on the tool bar is an indicator if Hydro hazards are being worked or not (it will turn yellow if any active hazards are hidden from view by a filter). + +### Setup (Settings) + +Allows you to filter displayed hazard information to focus on the meteorological situation of concern. For example, you may want to focus only on hydrological hazards in a particular time scale and over a particular area. + +The Settings drop-down menu allows you to select an existing Setting or a recently-used Setting, create a new Setting, edit the current Setting, or delete the current (User) Setting. As new Settings are created, they are added to this drop-down list. The Console’s title tab shows the name of the currently loaded Setting. + +> Settings can also be viewed and edited in the [Localization Perspective]((localization-perspective).). + +#### Filters + +Allows quick modification of the filters being used by the current Setting. Events may be filtered by Hazard Type, Site ID, and/or Status. As the filters are altered, the Hazard Event Table contents change to include only those hazards that pass the filters. + +For example, with a number of potential events possible, you can select a couple of interest, move them to pending state, and propose one. To reduce clutter in the Console you can hide potentials using the Filters menu, so that all potential events are still present but hidden in both the Console and the Spatial Display. + +### Recommenders (Tools) ![image](../images/hazard-services-button-recs.png) + +The Tools button reveals a drop-down menu listing all the recommenders and other tools available in the current Setting. Recommenders may be run from this menu. When you select a Setting, this menu is populated with appropriate content. + +### Products + +* **Generate RVS​** + + With an FL.x hazard selected in the Console, select this item to bring up +a dialog to write an RVS text product. + +* **Correct Product​** + + Selecting the Correct Product option provides a list of products that may be corrected. The dialog includes seven columns: Product Category, Issue Time, Event IDs, Hazard Type, VTEC, Expiration Time, and User Name. You can click in a column header to order by, or type in the Search box at the bottom. Upon selecting an item from the list, the Hazard Information Dialog launches. + +* **View Product​** + + This option allows you to review issued products, selecting from a list in a *Select Product to View* dialog. Use the dialog to select the product type (using click, Ctrl-click, Shift-click), then click and select View Product or double-click to see the legacy text. + + A similar dialog will be produced by selecting the *View Products for Selected Events* item from the Console pop-up. In this case, the Filter/Query section is not needed, so you’ll see just the lower portion of the illustrated dialog. + + +### Spatial Display Modes + +When Hazard Services is in Editable state, three buttons set the mode of the Spatial Display, governing how it interprets mouse clicks. + +#### Drawing Tools + +This menu has six choices: + +* Draw Polygon​ + + When set, mouse clicks on the Spatial Display draw polygons, +one click per node (MB1 click to place a node, MB3 click to complete the polygon). + +* AddTo Polygon​ + + If a polygon is active (hazard selected), this choice allows you to augment the area or create a new separate area that will be logically joined with the current polygon. Example of the latter: + + Note how the single hazard now comprises two polygons. (When you select Preview, these will be joined into a single polygon for issuance.) + +* Draw Freehand Polygon​ + + When set, mouse clicks on the Spatial Display draw freehand polygons (MB1 press, drag, and release to draw the polygon's outline). Note that issued text products will conform to current rules limiting polygon vertices to 20 or snapping areas to counties or zones. The freehand, many-vertex, shapes will be modified at some point during the hazard-issuance workflow. + +* AddTo Freehand Polygon​ + + Similar to AddTo Polygon, but drawing is freehand. Note that you can augment both “segments” and freehand polygons with either of the AddTo tools. + +* Remove Polygon Vertices​ + + In the case where you have a polygon with many vertices, it is very difficult to modify a boundary. This tool will remove a section of vertices to make the problem more tractable. With the tool selected, drag with MB1 to enclose a segment of the polygon. When you release, those vertices will be removed. + +* Remove Polygon Area​ + + This tool provides a way to remove sections of a geometry. Press MB1 and drag out an area that intersects your geometry. Upon release, the intersection area will be removed with the new boundary along the curve you drew. + +If more than one hazard is selected in the Console, only Draw Polygon and Draw Freehand Polygon are available. The others are invalid and dimmed. + +#### Select Event + +This radio button sets the mode to event selection. When set, mouse clicks on the Spatial Display select hazard events, and drags cause panning. This is the default mode choice of this set of radio buttons. + +#### Pan + +This radio button sets the mode to pan mode. When clicked, you can pan the map without inadvertently moving or selecting polygons. + +### Maps for Select by Area + +The Maps for Select by Area button reveals a drop-down menu allowing the selection of maps that may be used for selecting by area within the Spatial Display. If the button is disabled, no maps that allow select-by-area are currently loaded. If the button is enabled, but a map menu item within the drop-down menu is disabled, that map is loaded but is currently invisible. + +### Temporal Controls + +There are two buttons used to control the Timeline view at the right side of the Hazard Table. You can also zoom and pan the Timeline using the mouse. + +* Selected Time Mode + + This options menu allows you to select the time mode, either a single time or range of times. + +* Show Current Time + + ​This button moves the Timeline so that the current time is visible toward its left end. + +## View Menu + +The View menu is a drop-down menu holding menu items for functions that in general are less frequently used than those available via the toolbar. diff --git a/docs/cave/hazard-services-example.md b/docs/cave/hazard-services-example.md new file mode 100644 index 0000000000..1a36a03919 --- /dev/null +++ b/docs/cave/hazard-services-example.md @@ -0,0 +1,6 @@ + +## Hazard Life Cycle + +### Transition from Product Centric toward Information Centric + +### Examples of Creating, Continuing, and Ending Hazards diff --git a/docs/cave/hazard-services-settings.md b/docs/cave/hazard-services-settings.md new file mode 100644 index 0000000000..7c3f0593b2 --- /dev/null +++ b/docs/cave/hazard-services-settings.md @@ -0,0 +1,49 @@ +# Hazard Settings + + +## Change Site + +## Check Hazard Conflicts + +## Auto Check Hazard Conflicts + +## Add To Selected + +## Show Hatched Areas + +## Change VTEC Mode + +## Reset Events + + +## Hazard Event Table + +### Column Headers + +* Non-Timeline Headers + +* Timeline Header + +### Table Rows + +### Hazard History + + + +## Settings Overview + +### Settings Menu + +### Settings Dialog + +#### Hazards Filter Tab + +#### Console Tab + +#### Console Coloring Tab + +#### HID/Spatial Tab + +#### Recommenders Tab + +#### Maps/Overlays Tab diff --git a/docs/cave/warngen.md b/docs/cave/warngen.md index a5b2350544..163ab17404 100644 --- a/docs/cave/warngen.md +++ b/docs/cave/warngen.md @@ -2,7 +2,7 @@ WarnGen is an AWIPS graphics application for creating and issuing warnings as is !!! warning "In order to select a feature it must be within your *CAVE localization* coverage (load **Maps** > **County Warning Areas** to see coverages)" -Steps involved in using WarnGen in Unidata AWIPS CAVE 17.1.1 +Steps involved in using WarnGen in Unidata AWIPS CAVE * Load NEXRAD Display from the Radar menu. * Choose a WFO site with active severe weather (LKZ is used in the video). diff --git a/docs/dev/awips-development-environment.md b/docs/dev/awips-development-environment.md index e51592c53f..d6e59b4486 100644 --- a/docs/dev/awips-development-environment.md +++ b/docs/dev/awips-development-environment.md @@ -13,12 +13,12 @@ Quick instructions on how to deploy CAVE from Eclipse. 2. `yum clean all && yum groupinstall awips2-ade` - This will install Eclipse (4.5), Java (1.8), Ant, Maven, Python 2.7 and its modules (Numpy, Matplotlib, Shapely, others). + This will install Eclipse (4.6.1), Java (1.8), Ant (1.9.6), Maven, Python 2.7 and its modules (Numpy, Matplotlib, Shapely, others). 3. `git clone https://github.com/Unidata/awips2.git` - The full list of repositories required as of release 17.1.1: + The full list of repositories required: git clone https://github.com/Unidata/awips2.git git clone https://github.com/Unidata/awips2-core.git diff --git a/docs/dev/build-nsharp-macos.md b/docs/dev/build-nsharp-macos.md deleted file mode 100644 index 78bcfa3136..0000000000 --- a/docs/dev/build-nsharp-macos.md +++ /dev/null @@ -1,181 +0,0 @@ -A little known fact in the world of AWIPS(II) is just how dependent the system still is on NAWIPS-GEMPAK. The entire National Centers Perspective is dependent on pre-built shared object files for 64-bit Linux, which means that all of the D2D plugins which extend NSHARP (for bufr obs, NPP profiles, forecast models, etc.) also depend on these libraries. - -This dependency has prevented use of the NSHARP plugin in the first release (15.1.1) of the [OS X CAVE client](https://www.unidata.ucar.edu/downloads/awips2/awips2-cave.dmg). These are the steps taken to build NSHARP and GEMPAK libraries for OS X AWIPS 16.2.2. - -You will need the [https://github.com/Unidata/awips2-gemlibs](https://github.com/Unidata/awips2-gemlibs) repository on your Mac, as well as gcc and gfortran (from XCode). Pay attention to any version-specific include path or linked files, such as `/usr/local/Cellar/gcc/4.9.2_1/lib/gcc/4.9/`, always account for the correct versions and locations on your own system. - -## NSHARP pre-built libraries - -> libbignsharp.dylib - -Using the script below, the NSHARP dynamic library is built from C and FORTRAN source files (and their required include files supplied by the `awips2-gemlibs` repository, and as linked against `$GEMINC`, meaning that GEMPAK for OS X must be built and installed). - - git clone https://github.com/Unidata/awips2-gemlibs.git - cd awips2-gemlibs/nsharp/ - -An optional step, which can be performed in a separate script or within the build script below, is to create *ld-style* *.a files in `$OS_LIB` which can then be referenced with `-l` flags (e.g. `-lgemlib`): - - libs=(snlist sflist nxmlib gemlib gplt cgemlib rsl device xwp xw ps gn nsharp netcdf textlib) - for file in ${libs[@]} - do - if [ ! -f $OS_LIB/lib$file.a ]; then - echo "$OS_LIB/lib$file.a does not exist" - if [ -f $OS_LIB/$file.a ]; then - cp $OS_LIB/$file.a $OS_LIB/lib$file.a - echo "copied OS_LIB/$file.a to OS_LIB/lib$file.a for linking" - fi - fi - done - - -Build libbignsharp.dylib with the following script (Note the GEMPAK includes and links `-I$NSHARP`, `-I$GEMPAK/include`, `-L$OS_LIB`, etc.). - - #!/bin/bash - cd ~/awips2-gemlibs/nsharp/ - . $NAWIPS/Gemenviron.profile - CC=gcc - FC=gfortran - - export NSHARP=$GEMPAK/source/programs/gui/nsharp - export NWX=$GEMPAK/source/programs/gui/nwx - - myLibs="$OS_LIB/ginitp_alt.o $OS_LIB/gendp_alt.o" - - myCflags="$CFLAGS -I. -I./Sndglib -I$NSHARP -I$GEMPAK/include -I$OS_INC -I$NWX \ - -I/opt/X11/include/X11 -I/usr/include/Xm -I/opt/local/include -I/usr/include/malloc -Wcomment -Wno-return-type -Wincompatible-pointer-types -DUNDERSCORE -fPIC -DDEBUG -c" - - myFflags="-I. -I$OS_INC -I$GEMPAK/include -I$NSHARP -fPIC -g -c -fno-second-underscore -fmax-errors=200 -std=f95" - - myLinkflags="-L/usr/local/Cellar/gcc/4.9.2_1/lib/gcc/4.9/ -L/opt/local/lib -L$OS_LIB -L. -L./Sndglib -L/usr/X11R6/lib \ - -shared -Wl -Wcomment -Wincompatible-pointer-types -Wimplicit-function-declaration -Wno-return-type,-install_name,libbignsharp.dylib -o libbignsharp.dylib" - - myLibsInc="$OS_LIB/ginitp_alt.o $OS_LIB/gendp_alt.o $OS_LIB/libnxmlib.a $OS_LIB/libsnlist.a \ - $OS_LIB/libsflist.a $OS_LIB/libgemlib.a $OS_LIB/libcgemlib.a $OS_LIB/libgplt.a $OS_LIB/libdevice.a \ - $OS_LIB/libxwp.a $OS_LIB/libxw.a $OS_LIB/libps.a $OS_LIB/libgn.a $OS_LIB/libcgemlib.a $OS_LIB/libgemlib.a \ - $OS_LIB/libnetcdf.a $OS_LIB/libtextlib.a $OS_LIB/libxml2.a $OS_LIB/libxslt.a \ - $OS_LIB/libgemlib.a $OS_LIB/libcgemlib.a $OS_LIB/librsl.a $OS_LIB/libbz2.a" - - myLinktail="-I$OS_INC \ - -I$GEMPAK/include -I$NWX -I$NSHARP -I. -I./Sndglib -I/opt/X11/include/X11 -I/usr/include -I/usr/include/Xm -I/opt/local/include/ -I/opt/local/include -lhdf5 -lgfortran -ljasper -lpng -liconv -lc -lXt -lX11 -lz -lm -lXm" - - $CC $myCflags *.c Sndglib/*.c - $FC $myFflags *.f - $CC $myLinkflags *.o $myLibsInc $myLinktail - - cp libbignsharp.dylib ~/awips2-ncep/viz/gov.noaa.nws.ncep.ui.nsharp.macosx/ - - -## GEMPAK pre-built libraries - -> libgempak.dylib - -libgempak.dylib is built in a similar way as libbignsharp.dylib: - - #!/bin/bash - cd ~/awips2-gemlibs/gempak/ - . $NAWIPS/Gemenviron.profile - CC=gcc - FC=gfortran - - myCflags="$CFLAGS -I. -I$GEMPAK/source/diaglib/dg -I$GEMPAK/source/gemlib/er \ - -I/opt/X11/include/X11 -I/usr/include/Xm -I/opt/local/include -I/usr/include/malloc -fPIC -DDEBUG -c" - - myFflags="-I. -I$OS_INC -I$GEMPAK/include -fPIC -g -c -Wtabs -fno-second-underscore" - - myLinkflags="-L/usr/local/Cellar/gcc/4.9.2_1/lib/gcc/4.9/ -L/opt/local/lib -L$OS_LIB -L. \ - -shared -Wl -Wno-return-type,-install_name,libgempak.dylib -o libgempak.dylib" - - myLibs="$OS_LIB/ginitp_alt.o $OS_LIB/gendp_alt.o $OS_LIB/libcgemlib.a \ - $OS_LIB/libsflist.a $OS_LIB/gdlist.a $OS_LIB/libcgemlib.a $OS_LIB/libgemlib.a \ - $OS_LIB/libcgemlib.a $OS_LIB/libgplt.a $OS_LIB/libdevice.a $OS_LIB/libcgemlib.a \ - $OS_LIB/libgn.a $OS_LIB/libgemlib.a $OS_LIB/libcgemlib.a $OS_LIB/libnetcdf.a \ - $OS_LIB/libcgemlib.a $OS_LIB/libtextlib.a $OS_LIB/libxml2.a $OS_LIB/libxslt.a \ - $OS_LIB/libcgemlib.a $OS_LIB/libgemlib.a $OS_LIB/libcgemlib.a $OS_LIB/libcgemlib.a \ - $OS_LIB/librsl.a $OS_LIB/libcgemlib.a $OS_LIB/libbz2.a" - - myLinktail="-I$OS_INC -I$GEMPAK/include -I. -I/opt/X11/include/X11 -I/usr/include \ - -I/usr/include/Xm -I/opt/local/include/ -I/opt/local/include \ - -lhdf5 -lgfortran -ljasper -lpng -liconv -lc -lXt -lX11 -lz -lm -lXm" - - $CC $myCflags *.c - $FC $myFflags *.f - $CC $myLinkflags *.o $myLibs $myLinktail - - cp libgempak.dylib ~/awips2-ncep/viz/gov.noaa.nws.ncep.viz.gempak.nativelib.macosx/ - - -> libcnflib.dylib - - - #!/bin/bash - cd ~/awips2-gemlibs/cnflib/ - . $NAWIPS/Gemenviron.profile - CC=gcc - FC=gfortran - - myCflags="$CFLAGS -I/opt/X11/include/X11 -I/usr/include/Xm -I/opt/local/include \ - -I/usr/include/malloc -Wno-return-type -DUNDERSCORE -fPIC -DDEBUG -g -c" - - myLinkflags="-L/usr/local/Cellar/gcc/4.9.2_1/lib/gcc/4.9/ -L/opt/local/lib \ - -shared -Wl -Wno-return-type,-install_name,libcnflib.dylib -o libcnflib.dylib" - - myLinktail="-lgfortran -lc" - - myLibs="$OS_LIB/ginitp_alt.o $OS_LIB/gendp_alt.o $OS_LIB/gdlist.a $OS_LIB/gdcfil.a \ - $OS_LIB/libgemlib.a $OS_LIB/libgplt.a $OS_LIB/libdevice.a $OS_LIB/libgn.a \ - $OS_LIB/libcgemlib.a $OS_LIB/libgemlib.a $OS_LIB/libnetcdf.a $OS_LIB/libtextlib.a \ - $OS_LIB/libxslt.a $OS_LIB/libxml2.a -liconv \ - $OS_LIB/libz.a $OS_LIB/librsl.a -lbz2" - - $CC $myCflags *.c - $CC $myLinkflags *.o $myLibs $myLinktail - - cp libcnflib.dylib ~/awips2-ncep/viz/gov.noaa.nws.ncep.viz.gempak.nativelib.macosx/ - - -> libaodtv64.dylib - - #!/bin/bash - CC=gcc - FC=gfortran - - cd ~/awips2-gemlibs/aodt/AODTLIB/ - - gcc -fPIC -g -c -Wall *.c *.h - gcc -shared -Wl,-Wno-return-type,-install_name,libaodtv64.dylib -o libaodtv64.dylib *.o -lc - - cp libaodtv64.dylib ~/awips2-ncep/viz/gov.noaa.nws.ncep.viz.gempak.nativelib.macosx/ - - - -> libg2g.dylib - - #!/bin/bash - cd ~/awips2-gemlibs/g2g/ - . $NAWIPS/Gemenviron.profile - CC=gcc - FC=gfortran - - myCflags="$CFLAGS -I$GEMPAK/include -I. -I$GEMPAK/source/diaglib/dg \ - -I$GEMPAK/source/gemlib/er -I/opt/X11/include/X11 -I/usr/include/Xm \ - -I/opt/local/include -I/usr/include/malloc -Wno-return-type -DUNDERSCORE \ - -fPIC -DDEBUG -c" - - myFflags="-I. -I$OS_INC -I$GEMPAK/include -fPIC -g -c -Wtabs -fno-second-underscore" - - myLinkflags="-L/usr/local/Cellar/gcc/4.9.2_1/lib/gcc/4.9/ -L/opt/local/lib \ - -L/usr/X11R6/lib -shared -Wl -Wno-return-type,-install_name,libg2g.dylib -o libg2g.dylib" - - myLinktail="-lgfortran $OS_LIB/libjasper.a -lpng -lc" - - myLibs="$OS_LIB/ginitp_alt.o $OS_LIB/gendp_alt.o $OS_LIB/gdlist.a \ - $OS_LIB/gdcfil.a $OS_LIB/libgemlib.a $OS_LIB/libgplt.a $OS_LIB/libdevice.a \ - $OS_LIB/libgn.a $OS_LIB/libcgemlib.a $OS_LIB/libgemlib.a $OS_LIB/libnetcdf.a \ - $OS_LIB/libtextlib.a $OS_LIB/libxslt.a $OS_LIB/libxml2.a \ - -liconv $OS_LIB/libz.a $OS_LIB/librsl.a -lbz2" - - $CC $myCflags *.c - $FC $myFflags *.f - $CC $myLinkflags *.o $myLibs $myLinktail - - cp libg2g.dylib ~/awips2-ncep/viz/gov.noaa.nws.ncep.viz.gempak.nativelib.macosx/ diff --git a/docs/edex/distributed-computing.md b/docs/edex/distributed-computing.md index 9185674624..6490147d5a 100644 --- a/docs/edex/distributed-computing.md +++ b/docs/edex/distributed-computing.md @@ -61,32 +61,15 @@ Note the line **`-A INPUT -s 10.0.0.7 -j EDEX`** as well as the following **`-A In the file `/awips2/database/data/pg_hba.conf` you define remote connections for all postgres tables with as `/32`, after the block of IPv4 local connections: vi /awips2/database/data/pg_hba.conf - - # IPv4 local connections: - host fxatext all 127.0.0.1/32 trust - host hd_ob92oax all 127.0.0.1/32 trust - host dc_ob7oax all 127.0.0.1/32 trust - host hmdb all 127.0.0.1/32 trust - host metadata all 127.0.0.1/32 md5 - host maps all 127.0.0.1/32 md5 - host postgres all 127.0.0.1/32 md5 - host ncep all 127.0.0.1/32 md5 - host ebxml all 127.0.0.1/32 trust - host replication replication 127.0.0.1/32 md5 - # Remote connections - host fxatext all 10.0.0.7/32 md5 - host hd_ob92oax all 10.0.0.7/32 md5 - host dc_ob7oax all 10.0.0.7/32 md5 - host hmdb all 10.0.0.7/32 md5 - host metadata all 10.0.0.7/32 md5 - host maps all 10.0.0.7/32 md5 - host postgres all 10.0.0.7/32 md5 - host ncep all 10.0.0.7/32 md5 - host ebxml all 10.0.0.7/32 md5 - host replication replication 10.0.0.7/32 md5 - # IPv6 local connections: - host all all ::1/128 md5 - host replication replication ::1/128 md5 + + # "local" is for Unix domain socket connections only + local all all trust + hostssl all all 10.0.0.7/32 cert clientcert=1 + hostssl all all 162.0.0.0/8 cert clientcert=1 + hostssl all all 127.0.0.1/32 cert clientcert=1 + # IPv6 local connections: + hostssl all all ::1/128 cert clientcert=1 + hostnossl all all ::1/128 md5 ### 4. Start EDEX diff --git a/docs/edex/edex-ingest-docker-container.md b/docs/edex/edex-ingest-docker-container.md new file mode 100644 index 0000000000..dc82e96415 --- /dev/null +++ b/docs/edex/edex-ingest-docker-container.md @@ -0,0 +1,118 @@ +Project home: [https://github.com/Unidata/edex-docker](https://github.com/Unidata/edex-docker) + +--- + +EDEX can be run inside a docker container, which allows you to process data into an AWIPS system without requiring a full CentOS installation and configuration. + +The [EDEX Docker Image](https://github.com/Unidata/edex-docker) is built on CentOS 7 and contains the latest Unidata AWIPS release. + +This container is an *ingest-only* install, meaning there is *no database or request server*. This example requires a Database/Request server be configured for you to access remotely. See the [Distributed EDEX](https://unidata.github.io/awips2/edex/distributed-computing/) document for more. + +--- + +## Download and Install Docker + +Download and install Docker and Docker Compose: + +* [Docker for CentOS 7 Linux](https://docs.docker.com/install/linux/docker-ce/centos/) +* [Docker for Mac](https://docs.docker.com/docker-for-mac/) +* [Docker for Windows](https://docs.docker.com/docker-for-windows/install/) +* [docker-compose](https://docs.docker.com/compose/) (it should be bundled with Docker by default on Mac and Windows) + +## Run the EDEX Ingest Container + +Clone this repository + + git clone https://github.com/Unidata/edex-docker.git + cd edex-docker + +Run the container with docker-compose + + docker-compose up -d edex-ingest + +Confirm the container is running + + docker ps -a + +Enter the container + + docker exec -it edex-ingest bash + +Stop the container + + docker-compose stop + +Delete the container (keep the image) + + docker-compose rm -f + +Run commands inside the container, such as + + docker exec edex-ingest edex + +which should return something like + + [edex status] + qpid :: running :: pid 22474 + EDEXingest :: running :: pid 21860 31513 + EDEXgrib :: not running + ldmadmin :: running :: pid 22483 + + edex (status|start|stop|setup|log|purge|qpid|users) + +To update to the latest version and restart: + +```bash +docker pull unidata/edex-ingest:latest +docker-compose stop +docker-compose up -d edex-ingest +``` + +## Configuration and Customization + +The file `docker-compose.yml` defines files to mount to the container and which ports to open: + + edex-ingest: + image: unidata/edex-ingest:latest + container_name: edex-ingest + volumes: + - ./etc/ldmd.conf:/awips2/ldm/etc/ldmd.conf + - ./etc/pqact.conf:/awips2/ldm/etc/pqact.conf + - ./bin/setup.env:/awips2/edex/bin/setup.env + - ./bin/runedex.sh:/awips2/edex/bin/runedex.sh + ports: + - "388:388" + ulimits: + nofile: + soft: 1024 + hard: 1024 + +## Mounted Files + +- `etc/ldmd.conf` + + Defines which data feeds to receive. By default there is only one active request line (`REQUEST IDS|DDPLUS ".*" idd.unidata.ucar.edu`) to not overwhelm small EDEX containers ingesting large volumes of radar and gridded data files. Any updates to the file `etc/ldmd.conf` will be read the next time you restart the container. + +- `etc/pqact.conf` + + Defines how products are processed and where they are written to on the filesystem. This is the full set of pattern actions used in Unidata AWIPS, and generally you do not need to edit this file. Instead control which data feeds are requested in `ldmd.conf` (above). + +- `bin/setup.env` + + Defines the remote EDEX Database/Request server: + + ### EDEX localization related variables ### + export AW_SITE_IDENTIFIER=OAX + export EXT_ADDR=js-157-198.jetstream-cloud.org + + **EXT_ADDR** must be set to an allowed EDEX Database/Request Server. In this example we are using a JetStream Cloud instance, which controls our *edex-ingest* access with IPtables, SSL certificates, and PostgreSQL **pg_hba.conf** rules (this server is used in software training workshop environments and will not allow outside connections). + +- `bin/runedex.sh` + + The default script run when the container is started, acts as a sort-of service manager for EDEX and the LDM (see `ENTRYPOINT ["/awips2/edex/bin/runedex.sh"]` in *Dockerfile.edex*), essentially: + + /awips2/qpid/bin/qpid-wrapper & + /awips2/edex/bin/start.sh -noConsole ingest & + ldmadmin mkqueue + ldmadmin start + diff --git a/docs/edex/settings.md b/docs/edex/settings.md new file mode 100644 index 0000000000..ec5b52a3a7 --- /dev/null +++ b/docs/edex/settings.md @@ -0,0 +1,106 @@ + + +## Plugin Configuration + +The directory `/awips2/edex/conf/resources` contains configuration text files for specific plugins, which allow for user-defined values which are read by AWIPS plugins on EDEX start: + + com.raytheon.edex.plugin.gfe.properties + com.raytheon.edex.plugin.grib.properties + com.raytheon.edex.plugin.radar.properties + com.raytheon.edex.text.properties + com.raytheon.uf.edex.archive.cron.properties + com.raytheon.uf.edex.database.properties + com.raytheon.uf.edex.registry.ebxml.properties + distribution.properties + edex-localization-http.properties + edex-ogc.properties + edex-requestsrv.properties + goesr.properties + purge.properties + warning.properties + +Look at *purge.properties* for example: + + # Master switch to enable and disable purging + purge.enabled=true + + # Interval at which the purge job kicks off + purge.cron=0+0/15+*+*+*+? + + # Interval at which the outgoing files are purged + purge.outgoing.cron=0+30+*+*+*+? + + # Interval at which the logs are purged + purge.logs.cron=0+30+0+*+*+? + + # Interval at which hdf5 orphans are purged + purge.orphan.period=24h + + # Number of days older than the earliest known data to delete. + purge.orphan.buffer=7 + +In *com.raytheon.edex.plugin.grib.properties*, *com.raytheon.edex.plugin.radar.properties*, and *com.raytheon.edex.plugin.radar.properties* you can adjust the number of decoder threads for each plugin. + + cat com.raytheon.edex.plugin.radar.properties + + # Number threads for radar products ingested from the SBN + radar-decode.sbn.threads=5 + +--- + +## Ingest Modes + +By default, EDEX starts three "modes": *ingest*, *ingestGrib*, and *request* (each as its own JVM). + +The file `/awips2/edex/conf/modes/modes.xml` contains all available mode definitions, including some specific modes for Hydro Server Applications, ebXML Registries, and Data Delivery. + +EDEX services are registered through spring, and by including or excluding specific spring files (usually by datatype plugin name) we can finely customize EDEX startup. + +In `/awips2/edex/conf/modes/modes.xml` there are a number of unused plugin decoders excluded because the data are not available outside of the SBN: + + ... + + .*request.* + edex-security.xml + ebxml.*\.xml + grib-decode.xml + grid-staticdata-process.xml + .*(taf|nctext).* + webservices.xml + ebxml.*\.xml + .*datadelivery.* + .*bandwidth.* + .*sbn-simulator.* + grid-metadata.xml + .*ogc.* + + ... + +In this example, OGC, Data Delivery, request, ebXML, and grib plugins are excluded because they are included in their own mode/JVM. + +> TAF and NCTEXT plugins are disabled here due to performance issues. + +--- + +## JVM Memory + +The directory `/awips2/edex/etc/` contains files which define the amount of memory used for each of the three EDEX JVMs (ingest, ingestGrib, request): + + ls -al /awips2/edex/etc/ + -rw-r--r-- 1 awips fxalpha 1287 Jul 24 18:41 centralRegistry.sh + -rw-r--r-- 1 awips fxalpha 1155 Jul 24 18:42 default.sh + -rw-r--r-- 1 awips fxalpha 1956 Jul 24 18:41 ingestGrib.sh + -rw-r--r-- 1 awips fxalpha 337 Jul 24 18:36 ingest.sh + -rw-r--r-- 1 awips fxalpha 848 Jul 24 18:42 profiler.sh + -rw-r--r-- 1 awips fxalpha 1188 Jul 24 18:41 registry.sh + -rw-r--r-- 1 awips fxalpha 601 Jul 24 18:36 request.sh + -rw-r--r-- 1 awips fxalpha 1124 Jul 23 17:22 sbnSimulator.sh + +Each file contains the **Xmx** definition for maximum memory: + + export INIT_MEM=512 # in Meg + export MAX_MEM=4096 # in Meg + +After editing these files, you must restart edex (`service edex_camel restart`). + +--- diff --git a/docs/images/gempak-d2d.png b/docs/images/gempak-d2d.png new file mode 100644 index 0000000000..8511ca0db7 Binary files /dev/null and b/docs/images/gempak-d2d.png differ diff --git a/docs/images/hazard-services-button-recs.png b/docs/images/hazard-services-button-recs.png new file mode 100644 index 0000000000..e8741c97fa Binary files /dev/null and b/docs/images/hazard-services-button-recs.png differ diff --git a/docs/images/hazard-services-hydro-button.png b/docs/images/hazard-services-hydro-button.png new file mode 100644 index 0000000000..991867b085 Binary files /dev/null and b/docs/images/hazard-services-hydro-button.png differ diff --git a/docs/images/hazard-services-toolbar.png b/docs/images/hazard-services-toolbar.png new file mode 100644 index 0000000000..5278c32014 Binary files /dev/null and b/docs/images/hazard-services-toolbar.png differ diff --git a/docs/images/windows_envvars.png b/docs/images/windows_envvars.png new file mode 100644 index 0000000000..b842781cda Binary files /dev/null and b/docs/images/windows_envvars.png differ diff --git a/docs/index.md b/docs/index.md index f9cbdcee72..18c8af9fae 100644 --- a/docs/index.md +++ b/docs/index.md @@ -23,46 +23,21 @@ AWIPS takes a unified approach to data ingest, and most data types follow a path Unidata supports two visualization frameworks for rendering data: [CAVE](install/install-cave), and the Python Data Access Framework ([python-awips](http://python-awips.readthedocs.io)). ---- - -## Download and Install CAVE - -> [Release 17.1.1-6, June 13, 2018](https://www.unidata.ucar.edu/blogs/news/category/AWIPS) - -| | | -|:----------------------------------------:|:--| -|

Linux

|

[install.sh --cave ](https://www.unidata.ucar.edu/software/awips2/install.sh)

For CentOS/Red Hat 6 and 7. Installs to /awips2/cave and writes files to ~/caveData.

chmod 755 install.sh
sudo ./install.sh --cave

Run CAVE from the Linux Desktop menu Applications > Internet > AWIPS CAVE, or from the command line as simply `cave`.

System Requirements

  • x86_64 CentOS/RHEL 6 or 7
  • OpenGL 2.0 capable device
  • 4GB RAM
  • Latest NVIDIA driver
  • approx. 2GB disk space for data caching (~/caveData)

You can reset CAVE at any time by removing the **~/caveData** directory (on macOS **~/Library/caveData**) and reconnecting to an EDEX server.

| - - -| | | -|:----------------------------------------:|:--| -|

macOS

|

Download and install both
[awips2-cave-17.1.1-6.dmg ](https://www.unidata.ucar.edu/downloads/awips2/awips2-cave-17.1.1-6.dmg)
[awips-python.pkg ](https://www.unidata.ucar.edu/downloads/awips2/awips-python.pkg)

Writes and syncs files to ~/Library/caveData.

**awips-python.pkg** is not a prerequisite, and CAVE will still run and display data without it, but to use any derived parameter functions such as wind barbs/arrows and grid parameters on various vertical coordinates, jep must be installed in some way (it is assumed in /Library/Python/2.7/site-packages/jep/)

| - -| | | -|:----------------------------------------:|:--| -|

32-bit Windows

|

[awips-cave.msi ](https://www.unidata.ucar.edu/downloads/awips2/awips-cave.msi)

Windows clients are still based on the CAVE 16.2.2 code base and provided in lieu of no 17.1.1 client.

Writes files to caveData in the user's home directory.

| -|

64-bit Windows

|

[awips-cave.amd64.msi ](https://www.unidata.ucar.edu/downloads/awips2/awips-cave.amd64.msi)

Windows clients are still based on the CAVE 16.2.2 code base and provided in lieu of no 17.1.1 client.

Writes files to caveData in the user's home directory.

Beta status

Client-side Python scripts (including Derived Parameters) do not work on Windows

| - - -> [Read full CAVE install instructions](install/install-cave) +![CAVE](https://www.unidata.ucar.edu/software/awips2/images/Unidata_AWIPS2_CAVE.png) --- -## Download and Install EDEX +## [Download and Install CAVE](install/install-cave) -> [Release 17.1.1-6, June 13, 2018](https://www.unidata.ucar.edu/blogs/news/category/AWIPS) +--- -| | | -|:----------------------------------------:|:--| -|

Linux

|

[install.sh --edex ](https://www.unidata.ucar.edu/software/awips2/install.sh)

Installs to /awips2/ directories.

chmod 755 install.sh
sudo ./install.sh --edex

Start and Stop:

edex start
edex stop

System Requirements

  • x86_64 CentOS/RHEL 6 or 7
  • 16+ CPU cores (each CPU core is one more decoder which can run in parallel)
  • 24GB RAM
  • 700GB+ disk space
  • | - -> [Read full EDEX install instructions](install/install-edex) +## [Download and Install EDEX](install/install-edex) --- ## License -Unidata AWIPS source code and binaries (RPMs) are considered to be in the public domain, meaning there are no restrictions on any download, modification, or distribution in any form (original or modified). Unidata AWIPS contains no proprietery content and is therefore not subject to export controls as stated in the [Master Rights](https://github.com/Unidata/awips2/blob/unidata_16.2.2/rpms/legal/Master_Rights_File.pdf) licensing file. +Unidata AWIPS source code and binaries (RPMs) are considered to be in the public domain, meaning there are no restrictions on any download, modification, or distribution in any form (original or modified). Unidata AWIPS contains no proprietery content and is therefore not subject to export controls as stated in the [Master Rights](https://github.com/Unidata/awips2/blob/unidata_18.1.1/rpms/legal/Master_Rights_File.pdf) licensing file. --- @@ -106,7 +81,6 @@ Common AWIPS Visualization Environment. The data rendering and visualization too * [Read More: How to Install CAVE](install/install-cave) -![CAVE](https://www.unidata.ucar.edu/software/awips2/images/Unidata_AWIPS2_CAVE.png) ### LDM diff --git a/docs/install/install-azure.md b/docs/install/install-azure.md index d9a0800e66..971d31052b 100644 --- a/docs/install/install-azure.md +++ b/docs/install/install-azure.md @@ -67,11 +67,11 @@ and after install # Linux Download -For 64-bit RHEL/CentOS 6 and 7, download and run the script [install.sh --edex](https://www.unidata.ucar.edu/software/awips2/install.sh): +For 64-bit RHEL/CentOS 6 and 7, download and run the script [awips_install.sh --edex](https://www.unidata.ucar.edu/software/awips2/awips_install.sh): - wget https://www.unidata.ucar.edu/software/awips2/install.sh - chmod 755 ./install.sh - sudo ./install.sh --edex + wget https://www.unidata.ucar.edu/software/awips2/awips_install.sh + chmod 755 ./awips_install.sh + sudo ./awips_install.sh --edex This will install to `/awips2/edex`, `/awips2/database/data` and other directories. @@ -83,7 +83,7 @@ This will install to `/awips2/edex`, `/awips2/database/data` and other directori - Security Limits - **/etc/security/limits.conf** - Qpid is known to crash on systems without a high security limit for user processes and files. The file `/etc/security/limits.conf` defines the number of each for the awips user (This is automatically configured by the `install.sh --edex` script). + Qpid is known to crash on systems without a high security limit for user processes and files. The file `/etc/security/limits.conf` defines the number of each for the awips user (This is automatically configured by the `awips_install.sh --edex` script). awips soft nproc 65536 awips soft nofile 65536 @@ -97,7 +97,7 @@ LDM config ::1 localhost localhost.localdomain localhost6 localhost6.localdomain6 edex-cloud.westus.cloudapp.azure.com -# What does `install.sh --edex` do? +# What does `awips_install.sh --edex` do? 1. Downloads [https://www.unidata.ucar.edu/software/awips2/doc/awips2.repo](https://www.unidata.ucar.edu/software/awips2/doc/awips2.repo) to `/etc/yum.repos.d/awips2.repo` 2. Runs `yum clean all` diff --git a/docs/install/install-cave.md b/docs/install/install-cave.md index 1cbab338da..69f57147d4 100644 --- a/docs/install/install-cave.md +++ b/docs/install/install-cave.md @@ -1,20 +1,32 @@ ## Download and Install CAVE -[Latest Release 17.1.1-6 (June 13, 2018)](https://www.unidata.ucar.edu/blogs/news/category/AWIPS) +> [Release 18.1.1-4, December 26, 2018](https://www.unidata.ucar.edu/blogs/news/category/AWIPS) +## Linux | | | |:----------------------------------------:|:--| -|

    Linux

    |

    [install.sh --cave ](https://www.unidata.ucar.edu/software/awips2/install.sh)

    For CentOS/Red Hat 6 and 7. Installs to /awips2/cave and writes files to ~/caveData.

    chmod 755 install.sh
    sudo ./install.sh --cave

    Run CAVE from the Linux Desktop menu Applications > Internet > AWIPS CAVE, or from the command line as simply `cave`.

    System Requirements

    • x86_64 CentOS/RHEL 6 or 7
    • OpenGL 2.0 capable device
    • 4GB RAM
    • Latest NVIDIA driver
    • approx. 2GB disk space for data caching (~/caveData)

    You can reset CAVE at any time by removing the **~/caveData** directory (on macOS **~/Library/caveData**) and reconnecting to an EDEX server.

    | - +|

    |

    [awips_install.sh --cave ](https://www.unidata.ucar.edu/software/awips2/awips_install.sh)

    For CentOS/Red Hat 6 and 7. Installs to /awips2/cave and writes files to ~/caveData.

    chmod 755 awips_install.sh
    sudo ./aiwps_install.sh --cave

    Run CAVE from the Linux Desktop menu Applications > Internet > AWIPS CAVE, or from the command line as simply `cave`.

    System Requirements

    • x86_64 CentOS/RHEL 6 or 7
    • OpenGL 2.0 capable device
    • 4GB RAM
    • Latest NVIDIA driver
    • approx. 2GB disk space for data caching (~/caveData)

    You can reset CAVE at any time by removing the **~/caveData** directory (on macOS **~/Library/caveData**) and reconnecting to an EDEX server.

    | +## macOS | | | |:----------------------------------------:|:--| -|

    macOS

    |

    Download and install both
    [awips2-cave-17.1.1-6.dmg ](https://www.unidata.ucar.edu/downloads/awips2/awips2-cave-17.1.1-6.dmg)
    [awips-python.pkg ](https://www.unidata.ucar.edu/downloads/awips2/awips-python.pkg)

    Writes and syncs files to ~/Library/caveData.

    **awips-python.pkg** is not a prerequisite, and CAVE will still run and display data without it, but to use any derived parameter functions such as wind barbs/arrows and grid parameters on various vertical coordinates, jep must be installed in some way (it is assumed in /Library/Python/2.7/site-packages/jep/)

    | +|

    |

    Download and install both
    [awips-cave-18.1.1-4.dmg ](https://www.unidata.ucar.edu/downloads/awips2/awips-cave-18.1.1-4.dmg)
    [awips-python.pkg ](https://www.unidata.ucar.edu/downloads/awips2/awips-python.pkg)

    **Supported Graphics Devices for macOS**

  • Intel HD Graphics
  • Intel Iris
  • NVIDIA GeForce
  • **Unsupported Graphics Devices for macOS**

  • AMD Radeon R9
  • AMD Radeon Pro
  • AMD FirePro D300
  • Writes and syncs files to ~/Library/caveData.

    **awips-python.pkg** is not a prerequisite, and CAVE will still run and display data without it, but to use any derived parameter functions such as wind barbs/arrows and grid parameters on various vertical coordinates, jep must be installed in some way (it is assumed in /Library/Python/2.7/site-packages/jep/)

    | + +## Windows | | | |:----------------------------------------:|:--| -|

    32-bit Windows

    |

    [awips-cave.msi ](https://www.unidata.ucar.edu/downloads/awips2/awips-cave.msi)

    Windows clients are still based on the CAVE 16.2.2 code base and provided in lieu of no 17.1.1 client.

    Writes files to caveData in the user's home directory.

    | -|

    64-bit Windows

    |

    [awips-cave.amd64.msi ](https://www.unidata.ucar.edu/downloads/awips2/awips-cave.amd64.msi)

    Windows clients are still based on the CAVE 16.2.2 code base and provided in lieu of no 17.1.1 client.

    Writes files to caveData in the user's home directory.

    Beta status

    Client-side Python scripts (including Derived Parameters) do not work on Windows

    | +|

    |

    [awips-cave-18.1.1-4.msi ](https://www.unidata.ucar.edu/downloads/awips2/awips-cave-18.1.1-3.msi)

    Writes files to **~/caveData** (in your user home directory)

    Requires Python 3, Numpy, and Jep be installed

    Requires **PYTHONHOME** be defined

    In addition to the application directory, the MSI installer will attempt to copy the *[gridslice](https://github.com/mjames-upc/gridslice)* shared library to `$PYTHONHOME/Dlls`. If the `$PYTHONHOME` environmental variable is not defined, *gridslice* will not be installed. You can always rerun the installer after defining `$PYTHONHOME` and then check that the file `gridslice.pyd` is installed in `$PYTHONHOME/Dlls`.

    CAVE will still run without gridslice, but certain bundles which use derived parameters, such as [isentropic analyses](../cave/d2d-grids/#isentopic-analysis-270k-320k), will not load.

    | + + + +

    Windows-Specific Instructions

    + + + +
    +

    1) Download and install Miniconda Python 3.7 for Windows

    • Allow Miniconda3 to set PATH and other environment variables.
    • Ensure that PYTHONHOME is set to the Miniconda3 location.

      If PYTHONHOME is not set, the gridslice Python module will not be installed or available.

    2) Install dependent Python packages

    • pip install numpy==1.15.1 jep==3.8.2

    3) Run awips-cave-18.1.1-4.msi

    +
    --- @@ -26,12 +38,13 @@ Unidata and XSEDE Jetstream have partnered to offer a EDEX data server in the cl --- -## Troubleshooting +## caveData Directory -**Localization Preferences Error** +After connecting to an EDEX server, you will have a local directory named **caveData** which contains files synced from EDEX as well as a client-side cache for data and map resources. -You can reset CAVE by removing the **~/caveData** directory (on macOS **~/Library/caveData**) and then run `cave` again to connect to an EDEX server. Your local files have been removed, but if you are re-connecting to an EDEX server you have used before, the remote files will sync again to your local **~/caveData** (bundles, colormaps, etc.). +You can reset CAVE by removing the **caveData** directory and reconnecting to an EDEX server. Your local files have been removed, but if you are re-connecting to an EDEX server you have used before, the remote files will sync again to your local **~/caveData** (bundles, colormaps, etc.). -**No Images Displayed** +* Linux: `/home//caveData` +* macOS: `/Users//Library/caveData` +* Windows: `C:\Users\\caveData` -If you are able to load wire-frame contours but not images, [update your video driver](http://www.nvidia.com/Download/index.aspx?lang=en-us). diff --git a/docs/install/install-distributed.md b/docs/install/install-distributed.md index b0a53ebd1e..831b5a428b 100644 --- a/docs/install/install-distributed.md +++ b/docs/install/install-distributed.md @@ -99,11 +99,11 @@ In the [Azure portal](https://portal.azure.com): 13. Finally, install the EDEX server - `wget https://www.unidata.ucar.edu/software/awips2/install.sh` + `wget https://www.unidata.ucar.edu/software/awips2/awips_install.sh` - `chmod 755 ./install.sh` + `chmod 755 ./awips_install.sh` - `sudo ./install.sh --edex` + `sudo ./awips_install.sh --edex` --- diff --git a/docs/install/install-edex.md b/docs/install/install-edex.md index 978e910252..1dd1cfaa9c 100644 --- a/docs/install/install-edex.md +++ b/docs/install/install-edex.md @@ -1,11 +1,11 @@ ## Download and Install EDEX -[Latest Release 17.1.1-6 (June 13, 2018)](https://www.unidata.ucar.edu/blogs/news/category/AWIPS) +> [Release 18.1.1-4, December 26, 2018](https://www.unidata.ucar.edu/blogs/news/category/AWIPS) | | | |:----------------------------------------:|:--| -|

    Linux

    |

    [install.sh --edex ](https://www.unidata.ucar.edu/software/awips2/install.sh)

    Installs to /awips2/ directories.

    chmod 755 install.sh
    sudo ./install.sh --edex

    Start and Stop:

    edex start
    edex stop

    System Requirements

    • x86_64 CentOS/RHEL 6 or 7
    • 16+ CPU cores (each CPU core is one more decoder which can run in parallel)
    • 24GB RAM
    • 700GB+ disk space
    • A **Solid State Drive (SSD)** is highly recommended

    An **SSD** should be mounted either to `/awips2` (to contain the entire EDEX system) or to `/awips2/edex/data/hdf5` (to contain the large files in the decoded data store). EDEX can scale to any system by adjusting the incoming LDM data feeds or adjusting the resources (CPU threads) allocated to each data type.

    **64-bit CentOS/RHEL 6 and 7** are the only supported operating systems for EDEX. You may have luck with Fedora Core 12 to 14 and Scientific Linux.

    EDEX is not supported on Debian, Ubuntu, SUSE, Solaris, OS X, or Windows.

    | +|

    Linux

    |

    [aiwps_install.sh --edex ](https://www.unidata.ucar.edu/software/awips2/awips_install.sh)

    Installs to /awips2/ directories.

    chmod 755 awips_install.sh
    sudo ./awips_install.sh --edex

    Start and Stop:

    edex start
    edex stop

    System Requirements

    • x86_64 CentOS/RHEL 6 or 7
    • 16+ CPU cores (each CPU core is one more decoder which can run in parallel)
    • 24GB RAM
    • 700GB+ disk space
    • A **Solid State Drive (SSD)** is highly recommended

    An **SSD** should be mounted either to `/awips2` (to contain the entire EDEX system) or to `/awips2/edex/data/hdf5` (to contain the large files in the decoded data store). EDEX can scale to any system by adjusting the incoming LDM data feeds or adjusting the resources (CPU threads) allocated to each data type.

    **64-bit CentOS/RHEL 6 and 7** are the only supported operating systems for EDEX. You may have luck with Fedora Core 12 to 14 and Scientific Linux.

    EDEX is not supported on Debian, Ubuntu, SUSE, Solaris, OS X, or Windows.

    | > ### [Read More: Distributed EDEX, Installing Across Multiple Machines](/edex/distributed-computing/) @@ -27,15 +27,16 @@ All of these command should be run as **root** ### 2. Install EDEX > ->Download and run [install.sh --edex ](https://www.unidata.ucar.edu/software/awips2/install.sh) +>Download and run [sudo ./awips_install.sh --edex ](https://www.unidata.ucar.edu/software/awips2/awips_install.sh) > -> wget https://www.unidata.ucar.edu/software/awips2/install.sh -> chmod 755 install.sh -> sudo ./install.sh --edex +> wget https://www.unidata.ucar.edu/software/awips2/awips_install.sh +> chmod 755 awips_install.sh +> sudo ./awips_install.sh --edex +> /usr/bin/edex setup > > > ->!!! note "**install.sh --edex** will perform the following steps (it's always a good idea to review downloaded shell scripts):" +>!!! note "**awips_install.sh --edex** will perform the following steps (it's always a good idea to review downloaded shell scripts):" > > 1. Saves the appropriate Yum repo file to `/etc/yum.repos.d/awips2.repo` > 2. Increases process and file limits for the the *awips* account in `/etc/security/limits.conf` @@ -87,6 +88,8 @@ All of these command should be run as **root** > >- **To open ports to specific IP addresses** > +> In this example, the IP range `128.117.140.0/24` will match all 128.117.140.* addresses, while `128.117.156.0/24` will match 128.117.156.*. +> > vi /etc/sysconfig/iptables > > *filter @@ -108,8 +111,6 @@ All of these command should be run as **root** > #-A EDEX -m state --state NEW -p tcp --dport 9588 -j ACCEPT # for registry/dd > -A EDEX -j REJECT > COMMIT -> -> In this example, the IP range `128.117.140.0/24` will match all 128.117.140.* addresses, while `128.117.156.0/24` will match 128.117.156.*. > >**Restart iptables** > @@ -127,7 +128,6 @@ All of these command should be run as **root** ### 5. Start EDEX > -> edex setup > edex start > >To manually start, stop, and restart: @@ -156,7 +156,7 @@ All of these command should be run as **root** ### /etc/security/limits.conf -**/etc/security/limits.conf** defines the number of user processes and files (this step is automatically performed by `install.sh --edex`). Without these definitions, Qpid is known to crash during periods of high ingest. +**/etc/security/limits.conf** defines the number of user processes and files (this step is automatically performed by `./awips_install.sh --edex`). Without these definitions, Qpid is known to crash during periods of high ingest. awips soft nproc 65536 awips soft nofile 65536 diff --git a/docs/install/start-edex.md b/docs/install/start-edex.md index ec3358361a..7b28e097df 100644 --- a/docs/install/start-edex.md +++ b/docs/install/start-edex.md @@ -90,64 +90,4 @@ To see a list of clients connecting to your EDEX server, use the `edex users [YY ### edex purge -To view any stuck purge jobs in PortgreSQL (a rare but serious problem if your disk fills up). The solution to this is to run `edex purge reset`. - ---- - -## EDEX Memory Configuration - -The directory `/awips2/edex/etc/` contains files which define the amount of memory used for each of the three EDEX JVMs (ingest, ingestGrib, request): - - ls -al /awips2/edex/etc/ - -rw-r--r-- 1 awips fxalpha 1501 Dec 7 00:37 default.sh - -rw-r--r-- 1 awips fxalpha 1655 Dec 12 19:47 ingestGrib.sh - -rw-r--r-- 1 awips fxalpha 937 Dec 12 19:46 ingest.sh - -rw-r--r-- 1 awips fxalpha 1231 Dec 12 19:47 request.sh - -Each file contains the **Xmx** definition for maximum memory: - - export INIT_MEM=512 # in Meg - export MAX_MEM=4096 # in Meg - -After editing these files, you must restart edex (`service edex_camel restart`). - ---- - -## EDEX Plugin Configuration - -The directory `/awips2/edex/conf/modes` contains XML files with rules defining which plugins are included or excluded with each JVM (ingest, ingestGrid, request): - - ls -la /awips2/edex/conf/modes - -rw-r--r-- 1 awips fxalpha 1982 Dec 6 21:26 grid-modes.xml - -rw-r--r-- 1 awips fxalpha 928 Dec 6 21:24 ingest-modes.xml - -rw-r--r-- 1 awips fxalpha 1689 Dec 6 21:24 request-modes.xml - -EDEX services are all registered through spring. By including or excluding specific spring files we can determine at startup which services the EDEX instance should start. - -All mode files are merged at startup. Modes files with modes that have the same name are combined so the end result is an aggregate of patterns in all files. Include and exclude tags should have regular expressions that are compatible with Java's Pattern class. If you provide no `` tag for a particular mode, the include defaults to `.*`. - -An example of `/awips2/edex/conf/modes/ingest-modes.xml`, with a number of unused plugin decoders excluded because the data are not available outside of the SBN: - - - - - .*request.* - edex-security.xml - taf.* - modis.* - shef.* - idft.* - ffmp.* - stormtrack.* - retrieval.* - regionalsat.* - pointset-netcdf.* - ncscat.* - bufrobs.* - bufrmthdw.* - sgwh.* - - - - - +To view any stuck purge jobs in PortgreSQL (a rare but serious problem if your disk fills up). The solution to this is to run `edex purge reset`. \ No newline at end of file diff --git a/docs/mkdocs.yml b/docs/mkdocs.yml new file mode 120000 index 0000000000..b5fcf41e93 --- /dev/null +++ b/docs/mkdocs.yml @@ -0,0 +1 @@ +../mkdocs.yml \ No newline at end of file diff --git a/pdf/A2_SVD_OB17.1.1.pdf b/docs/pdf/A2_SVD_OB17.1.1.pdf similarity index 100% rename from pdf/A2_SVD_OB17.1.1.pdf rename to docs/pdf/A2_SVD_OB17.1.1.pdf diff --git a/pdf/AWP.FT.SWCTR.ADE-16.00.pdf b/docs/pdf/AWP.FT.SWCTR.ADE-16.00.pdf similarity index 100% rename from pdf/AWP.FT.SWCTR.ADE-16.00.pdf rename to docs/pdf/AWP.FT.SWCTR.ADE-16.00.pdf diff --git a/pdf/AWP.RLSN.OB17.1.1 - FINAL.pdf b/docs/pdf/AWP.RLSN.OB17.1.1 - FINAL.pdf similarity index 100% rename from pdf/AWP.RLSN.OB17.1.1 - FINAL.pdf rename to docs/pdf/AWP.RLSN.OB17.1.1 - FINAL.pdf diff --git a/pdf/NWS_Documentation/A2_SVD_OB17.1.1.xls b/docs/pdf/NWS_Documentation/A2_SVD_OB17.1.1.xls similarity index 100% rename from pdf/NWS_Documentation/A2_SVD_OB17.1.1.xls rename to docs/pdf/NWS_Documentation/A2_SVD_OB17.1.1.xls diff --git a/pdf/NWS_Documentation/AWP.DSN.A2.SSDD-08.00.zip b/docs/pdf/NWS_Documentation/AWP.DSN.A2.SSDD-08.00.zip similarity index 100% rename from pdf/NWS_Documentation/AWP.DSN.A2.SSDD-08.00.zip rename to docs/pdf/NWS_Documentation/AWP.DSN.A2.SSDD-08.00.zip diff --git a/pdf/NWS_Documentation/D2D Procedure Conversion/D2D Bundle Conversion.docx b/docs/pdf/NWS_Documentation/D2D Procedure Conversion/D2D Bundle Conversion.docx similarity index 100% rename from pdf/NWS_Documentation/D2D Procedure Conversion/D2D Bundle Conversion.docx rename to docs/pdf/NWS_Documentation/D2D Procedure Conversion/D2D Bundle Conversion.docx diff --git a/pdf/NWS_Documentation/GFE Migration/D2DGridsinGFE.docx b/docs/pdf/NWS_Documentation/GFE Migration/D2DGridsinGFE.docx similarity index 100% rename from pdf/NWS_Documentation/GFE Migration/D2DGridsinGFE.docx rename to docs/pdf/NWS_Documentation/GFE Migration/D2DGridsinGFE.docx diff --git a/pdf/NWS_Documentation/GFE Migration/GfeClient/PublishingGfeGrids.docx b/docs/pdf/NWS_Documentation/GFE Migration/GfeClient/PublishingGfeGrids.docx similarity index 100% rename from pdf/NWS_Documentation/GFE Migration/GfeClient/PublishingGfeGrids.docx rename to docs/pdf/NWS_Documentation/GFE Migration/GfeClient/PublishingGfeGrids.docx diff --git a/pdf/NWS_Documentation/GFE Migration/GfeClient/gfeClient PngWriter.docx b/docs/pdf/NWS_Documentation/GFE Migration/GfeClient/gfeClient PngWriter.docx similarity index 100% rename from pdf/NWS_Documentation/GFE Migration/GfeClient/gfeClient PngWriter.docx rename to docs/pdf/NWS_Documentation/GFE Migration/GfeClient/gfeClient PngWriter.docx diff --git a/pdf/NWS_Documentation/GFE Migration/GfeClient/gfeClient TextProductTest.docx b/docs/pdf/NWS_Documentation/GFE Migration/GfeClient/gfeClient TextProductTest.docx similarity index 100% rename from pdf/NWS_Documentation/GFE Migration/GfeClient/gfeClient TextProductTest.docx rename to docs/pdf/NWS_Documentation/GFE Migration/GfeClient/gfeClient TextProductTest.docx diff --git a/pdf/NWS_Documentation/GFE Migration/GfeClient/gfeClient runProcedure.docx b/docs/pdf/NWS_Documentation/GFE Migration/GfeClient/gfeClient runProcedure.docx similarity index 100% rename from pdf/NWS_Documentation/GFE Migration/GfeClient/gfeClient runProcedure.docx rename to docs/pdf/NWS_Documentation/GFE Migration/GfeClient/gfeClient runProcedure.docx diff --git a/pdf/NWS_Documentation/GFE Migration/GfeClient/gfeClient.docx b/docs/pdf/NWS_Documentation/GFE Migration/GfeClient/gfeClient.docx similarity index 100% rename from pdf/NWS_Documentation/GFE Migration/GfeClient/gfeClient.docx rename to docs/pdf/NWS_Documentation/GFE Migration/GfeClient/gfeClient.docx diff --git a/pdf/NWS_Documentation/GFE Migration/Links/AppCheckForPointTools.docx b/docs/pdf/NWS_Documentation/GFE Migration/Links/AppCheckForPointTools.docx similarity index 100% rename from pdf/NWS_Documentation/GFE Migration/Links/AppCheckForPointTools.docx rename to docs/pdf/NWS_Documentation/GFE Migration/Links/AppCheckForPointTools.docx diff --git a/pdf/NWS_Documentation/GFE Migration/Links/AppConvertEditAreas.docx b/docs/pdf/NWS_Documentation/GFE Migration/Links/AppConvertEditAreas.docx similarity index 100% rename from pdf/NWS_Documentation/GFE Migration/Links/AppConvertEditAreas.docx rename to docs/pdf/NWS_Documentation/GFE Migration/Links/AppConvertEditAreas.docx diff --git a/pdf/NWS_Documentation/GFE Migration/Links/AppGfeAfpsProxy.docx b/docs/pdf/NWS_Documentation/GFE Migration/Links/AppGfeAfpsProxy.docx similarity index 100% rename from pdf/NWS_Documentation/GFE Migration/Links/AppGfeAfpsProxy.docx rename to docs/pdf/NWS_Documentation/GFE Migration/Links/AppGfeAfpsProxy.docx diff --git a/pdf/NWS_Documentation/GFE Migration/Links/BaseSiteUserDirs.docx b/docs/pdf/NWS_Documentation/GFE Migration/Links/BaseSiteUserDirs.docx similarity index 100% rename from pdf/NWS_Documentation/GFE Migration/Links/BaseSiteUserDirs.docx rename to docs/pdf/NWS_Documentation/GFE Migration/Links/BaseSiteUserDirs.docx diff --git a/pdf/NWS_Documentation/GFE Migration/Links/CommonSmartToolChanges.docx b/docs/pdf/NWS_Documentation/GFE Migration/Links/CommonSmartToolChanges.docx similarity index 100% rename from pdf/NWS_Documentation/GFE Migration/Links/CommonSmartToolChanges.docx rename to docs/pdf/NWS_Documentation/GFE Migration/Links/CommonSmartToolChanges.docx diff --git a/pdf/NWS_Documentation/GFE Migration/Links/GFErename modules.docx b/docs/pdf/NWS_Documentation/GFE Migration/Links/GFErename modules.docx similarity index 100% rename from pdf/NWS_Documentation/GFE Migration/Links/GFErename modules.docx rename to docs/pdf/NWS_Documentation/GFE Migration/Links/GFErename modules.docx diff --git a/pdf/NWS_Documentation/GFE Migration/Links/GfeActivateSite.docx b/docs/pdf/NWS_Documentation/GFE Migration/Links/GfeActivateSite.docx similarity index 100% rename from pdf/NWS_Documentation/GFE Migration/Links/GfeActivateSite.docx rename to docs/pdf/NWS_Documentation/GFE Migration/Links/GfeActivateSite.docx diff --git a/pdf/NWS_Documentation/GFE Migration/Links/GfeConfigMaps.docx b/docs/pdf/NWS_Documentation/GFE Migration/Links/GfeConfigMaps.docx similarity index 100% rename from pdf/NWS_Documentation/GFE Migration/Links/GfeConfigMaps.docx rename to docs/pdf/NWS_Documentation/GFE Migration/Links/GfeConfigMaps.docx diff --git a/pdf/NWS_Documentation/GFE Migration/Links/GfeModuleInstaller.sh.docx b/docs/pdf/NWS_Documentation/GFE Migration/Links/GfeModuleInstaller.sh.docx similarity index 100% rename from pdf/NWS_Documentation/GFE Migration/Links/GfeModuleInstaller.sh.docx rename to docs/pdf/NWS_Documentation/GFE Migration/Links/GfeModuleInstaller.sh.docx diff --git a/pdf/NWS_Documentation/GFE Migration/Links/HowToGfePortingNotes.docx b/docs/pdf/NWS_Documentation/GFE Migration/Links/HowToGfePortingNotes.docx similarity index 100% rename from pdf/NWS_Documentation/GFE Migration/Links/HowToGfePortingNotes.docx rename to docs/pdf/NWS_Documentation/GFE Migration/Links/HowToGfePortingNotes.docx diff --git a/pdf/NWS_Documentation/GFE Migration/Links/HowToImportShapefile.docx b/docs/pdf/NWS_Documentation/GFE Migration/Links/HowToImportShapefile.docx similarity index 100% rename from pdf/NWS_Documentation/GFE Migration/Links/HowToImportShapefile.docx rename to docs/pdf/NWS_Documentation/GFE Migration/Links/HowToImportShapefile.docx diff --git a/pdf/NWS_Documentation/GFE Migration/Links/MultiModelSmartInits.docx b/docs/pdf/NWS_Documentation/GFE Migration/Links/MultiModelSmartInits.docx similarity index 100% rename from pdf/NWS_Documentation/GFE Migration/Links/MultiModelSmartInits.docx rename to docs/pdf/NWS_Documentation/GFE Migration/Links/MultiModelSmartInits.docx diff --git a/pdf/NWS_Documentation/GFE Migration/Links/NCLADT Repository.docx b/docs/pdf/NWS_Documentation/GFE Migration/Links/NCLADT Repository.docx similarity index 100% rename from pdf/NWS_Documentation/GFE Migration/Links/NCLADT Repository.docx rename to docs/pdf/NWS_Documentation/GFE Migration/Links/NCLADT Repository.docx diff --git a/pdf/NWS_Documentation/GFE Migration/Links/NumericToNumpy.docx b/docs/pdf/NWS_Documentation/GFE Migration/Links/NumericToNumpy.docx similarity index 100% rename from pdf/NWS_Documentation/GFE Migration/Links/NumericToNumpy.docx rename to docs/pdf/NWS_Documentation/GFE Migration/Links/NumericToNumpy.docx diff --git a/pdf/NWS_Documentation/GFE Migration/Links/SmartScriptDifferences.docx b/docs/pdf/NWS_Documentation/GFE Migration/Links/SmartScriptDifferences.docx similarity index 100% rename from pdf/NWS_Documentation/GFE Migration/Links/SmartScriptDifferences.docx rename to docs/pdf/NWS_Documentation/GFE Migration/Links/SmartScriptDifferences.docx diff --git a/pdf/NWS_Documentation/GFE Migration/Links/SmartScriptIssues.docx b/docs/pdf/NWS_Documentation/GFE Migration/Links/SmartScriptIssues.docx similarity index 100% rename from pdf/NWS_Documentation/GFE Migration/Links/SmartScriptIssues.docx rename to docs/pdf/NWS_Documentation/GFE Migration/Links/SmartScriptIssues.docx diff --git a/pdf/NWS_Documentation/GFE Migration/Links/SmartScriptUsageAnalysis.docx b/docs/pdf/NWS_Documentation/GFE Migration/Links/SmartScriptUsageAnalysis.docx similarity index 100% rename from pdf/NWS_Documentation/GFE Migration/Links/SmartScriptUsageAnalysis.docx rename to docs/pdf/NWS_Documentation/GFE Migration/Links/SmartScriptUsageAnalysis.docx diff --git a/pdf/NWS_Documentation/GFE Migration/Links/checkForPointTools doc.docx b/docs/pdf/NWS_Documentation/GFE Migration/Links/checkForPointTools doc.docx similarity index 100% rename from pdf/NWS_Documentation/GFE Migration/Links/checkForPointTools doc.docx rename to docs/pdf/NWS_Documentation/GFE Migration/Links/checkForPointTools doc.docx diff --git a/pdf/NWS_Documentation/GFE Migration/Links/findAdamGfeFile.docx b/docs/pdf/NWS_Documentation/GFE Migration/Links/findAdamGfeFile.docx similarity index 100% rename from pdf/NWS_Documentation/GFE Migration/Links/findAdamGfeFile.docx rename to docs/pdf/NWS_Documentation/GFE Migration/Links/findAdamGfeFile.docx diff --git a/pdf/NWS_Documentation/GFE Migration/Links/gfeConfigFileChanges.docx b/docs/pdf/NWS_Documentation/GFE Migration/Links/gfeConfigFileChanges.docx similarity index 100% rename from pdf/NWS_Documentation/GFE Migration/Links/gfeConfigFileChanges.docx rename to docs/pdf/NWS_Documentation/GFE Migration/Links/gfeConfigFileChanges.docx diff --git a/pdf/NWS_Documentation/GFE Migration/Links/gfePorter.docx b/docs/pdf/NWS_Documentation/GFE Migration/Links/gfePorter.docx similarity index 100% rename from pdf/NWS_Documentation/GFE Migration/Links/gfePorter.docx rename to docs/pdf/NWS_Documentation/GFE Migration/Links/gfePorter.docx diff --git a/pdf/NWS_Documentation/LDM Ingest/Ingest Filter Configuration.docx b/docs/pdf/NWS_Documentation/LDM Ingest/Ingest Filter Configuration.docx similarity index 100% rename from pdf/NWS_Documentation/LDM Ingest/Ingest Filter Configuration.docx rename to docs/pdf/NWS_Documentation/LDM Ingest/Ingest Filter Configuration.docx diff --git a/pdf/NWS_Documentation/LDM Ingest/LDM Ingest Checklist.docx b/docs/pdf/NWS_Documentation/LDM Ingest/LDM Ingest Checklist.docx similarity index 100% rename from pdf/NWS_Documentation/LDM Ingest/LDM Ingest Checklist.docx rename to docs/pdf/NWS_Documentation/LDM Ingest/LDM Ingest Checklist.docx diff --git a/pdf/NWS_Documentation/Local Applications/Adding Local Grids_v2.pdf b/docs/pdf/NWS_Documentation/Local Applications/Adding Local Grids_v2.pdf similarity index 100% rename from pdf/NWS_Documentation/Local Applications/Adding Local Grids_v2.pdf rename to docs/pdf/NWS_Documentation/Local Applications/Adding Local Grids_v2.pdf diff --git a/pdf/NWS_Documentation/Local Applications/Local Applications Guide.docx b/docs/pdf/NWS_Documentation/Local Applications/Local Applications Guide.docx similarity index 100% rename from pdf/NWS_Documentation/Local Applications/Local Applications Guide.docx rename to docs/pdf/NWS_Documentation/Local Applications/Local Applications Guide.docx diff --git a/pdf/NWS_Documentation/Local Applications/Local Apps working session - migration.docx b/docs/pdf/NWS_Documentation/Local Applications/Local Apps working session - migration.docx similarity index 100% rename from pdf/NWS_Documentation/Local Applications/Local Apps working session - migration.docx rename to docs/pdf/NWS_Documentation/Local Applications/Local Apps working session - migration.docx diff --git a/pdf/NWS_Documentation/Localization Doc References/A2_SDC_Localization_Guide.pdf b/docs/pdf/NWS_Documentation/Localization Doc References/A2_SDC_Localization_Guide.pdf similarity index 100% rename from pdf/NWS_Documentation/Localization Doc References/A2_SDC_Localization_Guide.pdf rename to docs/pdf/NWS_Documentation/Localization Doc References/A2_SDC_Localization_Guide.pdf diff --git a/pdf/NWS_Documentation/Localization Doc References/ADAM SDC AutoTool functions.docx b/docs/pdf/NWS_Documentation/Localization Doc References/ADAM SDC AutoTool functions.docx similarity index 100% rename from pdf/NWS_Documentation/Localization Doc References/ADAM SDC AutoTool functions.docx rename to docs/pdf/NWS_Documentation/Localization Doc References/ADAM SDC AutoTool functions.docx diff --git a/pdf/NWS_Documentation/Localization Doc References/ADAM SDC Autotool Log File.docx b/docs/pdf/NWS_Documentation/Localization Doc References/ADAM SDC Autotool Log File.docx similarity index 100% rename from pdf/NWS_Documentation/Localization Doc References/ADAM SDC Autotool Log File.docx rename to docs/pdf/NWS_Documentation/Localization Doc References/ADAM SDC Autotool Log File.docx diff --git a/pdf/NWS_Documentation/Localization Doc References/Table 5.3-1.docx b/docs/pdf/NWS_Documentation/Localization Doc References/Table 5.3-1.docx similarity index 100% rename from pdf/NWS_Documentation/Localization Doc References/Table 5.3-1.docx rename to docs/pdf/NWS_Documentation/Localization Doc References/Table 5.3-1.docx diff --git a/pdf/NWS_Documentation/Organization Aids/A2 Site Mig Org Aids_v8.xlsm b/docs/pdf/NWS_Documentation/Organization Aids/A2 Site Mig Org Aids_v8.xlsm similarity index 100% rename from pdf/NWS_Documentation/Organization Aids/A2 Site Mig Org Aids_v8.xlsm rename to docs/pdf/NWS_Documentation/Organization Aids/A2 Site Mig Org Aids_v8.xlsm diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SV07BE~1.HTM b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SV07BE~1.HTM similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SV07BE~1.HTM rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SV07BE~1.HTM diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SV089E~1.HTM b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SV089E~1.HTM similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SV089E~1.HTM rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SV089E~1.HTM diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SV0D1C~1.HTM b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SV0D1C~1.HTM similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SV0D1C~1.HTM rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SV0D1C~1.HTM diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SV0D49~1.HTM b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SV0D49~1.HTM similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SV0D49~1.HTM rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SV0D49~1.HTM diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SV189E~1.HTM b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SV189E~1.HTM similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SV189E~1.HTM rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SV189E~1.HTM diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SV1994~1.HTM b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SV1994~1.HTM similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SV1994~1.HTM rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SV1994~1.HTM diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SV2148~1.HTM b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SV2148~1.HTM similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SV2148~1.HTM rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SV2148~1.HTM diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SV22AD~1.HTM b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SV22AD~1.HTM similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SV22AD~1.HTM rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SV22AD~1.HTM diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SV23EF~1.HTM b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SV23EF~1.HTM similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SV23EF~1.HTM rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SV23EF~1.HTM diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SV2C3D~1.HTM b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SV2C3D~1.HTM similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SV2C3D~1.HTM rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SV2C3D~1.HTM diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SV3CB8~1.HTM b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SV3CB8~1.HTM similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SV3CB8~1.HTM rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SV3CB8~1.HTM diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SV4037~1.HTM b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SV4037~1.HTM similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SV4037~1.HTM rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SV4037~1.HTM diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SV465B~1.HTM b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SV465B~1.HTM similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SV465B~1.HTM rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SV465B~1.HTM diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SV5804~1.HTM b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SV5804~1.HTM similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SV5804~1.HTM rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SV5804~1.HTM diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SV616A~1.HTM b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SV616A~1.HTM similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SV616A~1.HTM rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SV616A~1.HTM diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SV6F55~1.HTM b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SV6F55~1.HTM similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SV6F55~1.HTM rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SV6F55~1.HTM diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SV8006~1.HTM b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SV8006~1.HTM similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SV8006~1.HTM rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SV8006~1.HTM diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SV834D~1.HTM b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SV834D~1.HTM similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SV834D~1.HTM rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SV834D~1.HTM diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SV8427~1.HTM b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SV8427~1.HTM similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SV8427~1.HTM rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SV8427~1.HTM diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SV8672~1.HTM b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SV8672~1.HTM similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SV8672~1.HTM rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SV8672~1.HTM diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SV8756~1.HTM b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SV8756~1.HTM similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SV8756~1.HTM rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SV8756~1.HTM diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SV8B33~1.HTM b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SV8B33~1.HTM similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SV8B33~1.HTM rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SV8B33~1.HTM diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SV9313~1.HTM b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SV9313~1.HTM similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SV9313~1.HTM rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SV9313~1.HTM diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SV931C~1.HTM b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SV931C~1.HTM similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SV931C~1.HTM rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SV931C~1.HTM diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SV941A~1.HTM b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SV941A~1.HTM similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SV941A~1.HTM rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SV941A~1.HTM diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SVA79B~1.HTM b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SVA79B~1.HTM similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SVA79B~1.HTM rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SVA79B~1.HTM diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SVA7FC~1.HTM b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SVA7FC~1.HTM similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SVA7FC~1.HTM rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SVA7FC~1.HTM diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SVAF17~1.HTM b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SVAF17~1.HTM similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SVAF17~1.HTM rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SVAF17~1.HTM diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SVB637~1.HTM b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SVB637~1.HTM similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SVB637~1.HTM rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SVB637~1.HTM diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SVB69B~1.HTM b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SVB69B~1.HTM similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SVB69B~1.HTM rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SVB69B~1.HTM diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SVBC8A~1.HTM b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SVBC8A~1.HTM similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SVBC8A~1.HTM rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SVBC8A~1.HTM diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SVBCB3~1.HTM b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SVBCB3~1.HTM similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SVBCB3~1.HTM rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SVBCB3~1.HTM diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SVBFC2~1.HTM b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SVBFC2~1.HTM similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SVBFC2~1.HTM rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SVBFC2~1.HTM diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SVC20E~1.HTM b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SVC20E~1.HTM similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SVC20E~1.HTM rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SVC20E~1.HTM diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SVC285~1.HTM b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SVC285~1.HTM similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SVC285~1.HTM rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SVC285~1.HTM diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SVC32E~1.HTM b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SVC32E~1.HTM similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SVC32E~1.HTM rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SVC32E~1.HTM diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SVC3C0~1.HTM b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SVC3C0~1.HTM similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SVC3C0~1.HTM rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SVC3C0~1.HTM diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SVC429~1.HTM b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SVC429~1.HTM similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SVC429~1.HTM rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SVC429~1.HTM diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SVC7FB~1.HTM b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SVC7FB~1.HTM similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SVC7FB~1.HTM rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SVC7FB~1.HTM diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SVD4AC~1.HTM b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SVD4AC~1.HTM similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SVD4AC~1.HTM rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SVD4AC~1.HTM diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SVD50B~1.HTM b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SVD50B~1.HTM similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SVD50B~1.HTM rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SVD50B~1.HTM diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SVD88A~1.HTM b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SVD88A~1.HTM similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SVD88A~1.HTM rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SVD88A~1.HTM diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SVDB01~1.HTM b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SVDB01~1.HTM similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SVDB01~1.HTM rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SVDB01~1.HTM diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SVDE91~1.HTM b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SVDE91~1.HTM similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SVDE91~1.HTM rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SVDE91~1.HTM diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SVE48F~1.HTM b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SVE48F~1.HTM similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SVE48F~1.HTM rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SVE48F~1.HTM diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SVE57C~1.HTM b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SVE57C~1.HTM similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SVE57C~1.HTM rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SVE57C~1.HTM diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SVEC7A~1.HTM b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SVEC7A~1.HTM similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SVEC7A~1.HTM rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SVEC7A~1.HTM diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SVF22F~1.HTM b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SVF22F~1.HTM similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SVF22F~1.HTM rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SVF22F~1.HTM diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SVF2ED~1.HTM b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SVF2ED~1.HTM similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SVF2ED~1.HTM rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SVF2ED~1.HTM diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SVF7ED~1.HTM b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SVF7ED~1.HTM similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SVF7ED~1.HTM rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SVF7ED~1.HTM diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SVFB3E~1.HTM b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SVFB3E~1.HTM similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SVFB3E~1.HTM rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SVFB3E~1.HTM diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SVFE0C~1.HTM b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SVFE0C~1.HTM similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SVFE0C~1.HTM rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SVFE0C~1.HTM diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SVNBAS~4.HTM b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SVNBAS~4.HTM similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SVNBAS~4.HTM rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SVNBAS~4.HTM diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SVNCUS~2.HTM b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SVNCUS~2.HTM similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SVNCUS~2.HTM rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SVNCUS~2.HTM diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SVNSER~4.HTM b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SVNSER~4.HTM similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SVNSER~4.HTM rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/SVNSER~4.HTM diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/images/ch01dia1.png b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/images/ch01dia1.png similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/images/ch01dia1.png rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/images/ch01dia1.png diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/images/ch02dia1.png b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/images/ch02dia1.png similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/images/ch02dia1.png rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/images/ch02dia1.png diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/images/ch02dia2.png b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/images/ch02dia2.png similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/images/ch02dia2.png rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/images/ch02dia2.png diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/images/ch02dia3.png b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/images/ch02dia3.png similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/images/ch02dia3.png rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/images/ch02dia3.png diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/images/ch02dia4.png b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/images/ch02dia4.png similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/images/ch02dia4.png rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/images/ch02dia4.png diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/images/ch02dia5.png b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/images/ch02dia5.png similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/images/ch02dia5.png rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/images/ch02dia5.png diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/images/ch02dia6.png b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/images/ch02dia6.png similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/images/ch02dia6.png rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/images/ch02dia6.png diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/images/ch02dia7.png b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/images/ch02dia7.png similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/images/ch02dia7.png rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/images/ch02dia7.png diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/images/ch04dia1.png b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/images/ch04dia1.png similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/images/ch04dia1.png rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/images/ch04dia1.png diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/images/ch04dia2.png b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/images/ch04dia2.png similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/images/ch04dia2.png rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/images/ch04dia2.png diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/images/ch04dia3.png b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/images/ch04dia3.png similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/images/ch04dia3.png rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/images/ch04dia3.png diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/images/ch04dia4.png b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/images/ch04dia4.png similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/images/ch04dia4.png rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/images/ch04dia4.png diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/images/ch08dia1.png b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/images/ch08dia1.png similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/images/ch08dia1.png rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/images/ch08dia1.png diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/images/ch08dia2.png b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/images/ch08dia2.png similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/images/ch08dia2.png rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/images/ch08dia2.png diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/images/draft.png b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/images/draft.png similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/images/draft.png rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/images/draft.png diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/images/note.png b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/images/note.png similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/images/note.png rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/images/note.png diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/images/tip.png b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/images/tip.png similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/images/tip.png rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/images/tip.png diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/images/warning.png b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/images/warning.png similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/images/warning.png rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/images/warning.png diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/index.html b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/index.html similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/index.html rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/index.html diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/styles.css b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/styles.css similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/styles.css rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/styles.css diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.advanced.confarea.html b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.advanced.confarea.html similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.advanced.confarea.html rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.advanced.confarea.html diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.advanced.externals.html b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.advanced.externals.html similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.advanced.externals.html rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.advanced.externals.html diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.advanced.html b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.advanced.html similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.advanced.html rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.advanced.html diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.advanced.l10n.html b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.advanced.l10n.html similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.advanced.l10n.html rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.advanced.l10n.html diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.advanced.locking.html b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.advanced.locking.html similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.advanced.locking.html rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.advanced.locking.html diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.advanced.pegrevs.html b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.advanced.pegrevs.html similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.advanced.pegrevs.html rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.advanced.pegrevs.html diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.advanced.props.html b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.advanced.props.html similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.advanced.props.html rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.advanced.props.html diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.advanced.sparsedirs.html b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.advanced.sparsedirs.html similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.advanced.sparsedirs.html rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.advanced.sparsedirs.html diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.advanced.summary.html b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.advanced.summary.html similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.advanced.summary.html rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.advanced.summary.html diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.advanced.vendorbr.html b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.advanced.vendorbr.html similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.advanced.vendorbr.html rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.advanced.vendorbr.html diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.basic.html b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.basic.html similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.basic.html rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.basic.html diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.basic.in-action.html b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.basic.in-action.html similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.basic.in-action.html rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.basic.in-action.html diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.basic.summary.html b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.basic.summary.html similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.basic.summary.html rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.basic.summary.html diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.branchmerge.html b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.branchmerge.html similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.branchmerge.html rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.branchmerge.html diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.branchmerge.maint.html b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.branchmerge.maint.html similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.branchmerge.maint.html rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.branchmerge.maint.html diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.branchmerge.summary.html b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.branchmerge.summary.html similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.branchmerge.summary.html rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.branchmerge.summary.html diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.branchmerge.tags.html b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.branchmerge.tags.html similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.branchmerge.tags.html rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.branchmerge.tags.html diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.branchmerge.using.html b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.branchmerge.using.html similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.branchmerge.using.html rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.branchmerge.using.html diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.branchmerge.whatis.html b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.branchmerge.whatis.html similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.branchmerge.whatis.html rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.branchmerge.whatis.html diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.copyright.html b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.copyright.html similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.copyright.html rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.copyright.html diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.customization.html b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.customization.html similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.customization.html rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.customization.html diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.developer.html b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.developer.html similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.developer.html rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.developer.html diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.developer.insidewc.html b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.developer.insidewc.html similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.developer.insidewc.html rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.developer.insidewc.html diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.developer.layerlib.html b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.developer.layerlib.html similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.developer.layerlib.html rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.developer.layerlib.html diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.developer.summary.html b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.developer.summary.html similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.developer.summary.html rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.developer.summary.html diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.developer.usingapi.html b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.developer.usingapi.html similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.developer.usingapi.html rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.developer.usingapi.html diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.forcvs.auth.html b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.forcvs.auth.html similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.forcvs.auth.html rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.forcvs.auth.html diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.forcvs.conflicts.html b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.forcvs.conflicts.html similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.forcvs.conflicts.html rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.forcvs.conflicts.html diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.forcvs.convert.html b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.forcvs.convert.html similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.forcvs.convert.html rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.forcvs.convert.html diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.forcvs.directories.html b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.forcvs.directories.html similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.forcvs.directories.html rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.forcvs.directories.html diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.forcvs.disconnected.html b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.forcvs.disconnected.html similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.forcvs.disconnected.html rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.forcvs.disconnected.html diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.forcvs.html b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.forcvs.html similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.forcvs.html rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.forcvs.html diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.forcvs.modules.html b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.forcvs.modules.html similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.forcvs.modules.html rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.forcvs.modules.html diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.forcvs.properties.html b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.forcvs.properties.html similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.forcvs.properties.html rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.forcvs.properties.html diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.forcvs.revnums.html b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.forcvs.revnums.html similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.forcvs.revnums.html rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.forcvs.revnums.html diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.foreword.html b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.foreword.html similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.foreword.html rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.foreword.html diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.index.html b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.index.html similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.index.html rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.index.html diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.intro.html b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.intro.html similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.intro.html rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.intro.html diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.intro.install.html b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.intro.install.html similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.intro.install.html rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.intro.install.html diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.intro.quickstart.html b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.intro.quickstart.html similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.intro.quickstart.html rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.intro.quickstart.html diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.intro.whatis.html b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.intro.whatis.html similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.intro.whatis.html rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.intro.whatis.html diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.preface.acks.html b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.preface.acks.html similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.preface.acks.html rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.preface.acks.html diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.preface.audience.html b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.preface.audience.html similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.preface.audience.html rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.preface.audience.html diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.preface.free.html b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.preface.free.html similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.preface.free.html rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.preface.free.html diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.preface.howread.html b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.preface.howread.html similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.preface.howread.html rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.preface.howread.html diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.preface.html b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.preface.html similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.preface.html rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.preface.html diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.html b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.html similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.html rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.html diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.mod_authz_svn.html b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.mod_authz_svn.html similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.mod_authz_svn.html rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.mod_authz_svn.html diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.mod_dav_svn.html b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.mod_dav_svn.html similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.mod_dav_svn.html rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.mod_dav_svn.html diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.properties.html b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.properties.html similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.properties.html rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.properties.html diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.reposhooks.html b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.reposhooks.html similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.reposhooks.html rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.reposhooks.html diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svn.c.add.html b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svn.c.add.html similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svn.c.add.html rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svn.c.add.html diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svn.c.blame.html b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svn.c.blame.html similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svn.c.blame.html rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svn.c.blame.html diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svn.c.cat.html b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svn.c.cat.html similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svn.c.cat.html rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svn.c.cat.html diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svn.c.checkout.html b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svn.c.checkout.html similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svn.c.checkout.html rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svn.c.checkout.html diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svn.c.cleanup.html b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svn.c.cleanup.html similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svn.c.cleanup.html rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svn.c.cleanup.html diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svn.c.commit.html b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svn.c.commit.html similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svn.c.commit.html rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svn.c.commit.html diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svn.c.copy.html b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svn.c.copy.html similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svn.c.copy.html rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svn.c.copy.html diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svn.c.delete.html b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svn.c.delete.html similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svn.c.delete.html rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svn.c.delete.html diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svn.c.diff.html b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svn.c.diff.html similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svn.c.diff.html rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svn.c.diff.html diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svn.c.export.html b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svn.c.export.html similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svn.c.export.html rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svn.c.export.html diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svn.c.help.html b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svn.c.help.html similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svn.c.help.html rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svn.c.help.html diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svn.c.import.html b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svn.c.import.html similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svn.c.import.html rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svn.c.import.html diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svn.c.info.html b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svn.c.info.html similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svn.c.info.html rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svn.c.info.html diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svn.c.list.html b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svn.c.list.html similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svn.c.list.html rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svn.c.list.html diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svn.c.lock.html b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svn.c.lock.html similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svn.c.lock.html rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svn.c.lock.html diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svn.c.log.html b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svn.c.log.html similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svn.c.log.html rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svn.c.log.html diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svn.c.merge.html b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svn.c.merge.html similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svn.c.merge.html rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svn.c.merge.html diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svn.c.mergeinfo.html b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svn.c.mergeinfo.html similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svn.c.mergeinfo.html rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svn.c.mergeinfo.html diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svn.c.mkdir.html b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svn.c.mkdir.html similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svn.c.mkdir.html rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svn.c.mkdir.html diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svn.c.move.html b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svn.c.move.html similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svn.c.move.html rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svn.c.move.html diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svn.c.propdel.html b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svn.c.propdel.html similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svn.c.propdel.html rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svn.c.propdel.html diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svn.c.propedit.html b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svn.c.propedit.html similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svn.c.propedit.html rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svn.c.propedit.html diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svn.c.propget.html b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svn.c.propget.html similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svn.c.propget.html rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svn.c.propget.html diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svn.c.proplist.html b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svn.c.proplist.html similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svn.c.proplist.html rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svn.c.proplist.html diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svn.c.propset.html b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svn.c.propset.html similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svn.c.propset.html rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svn.c.propset.html diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svn.c.relocate.html b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svn.c.relocate.html similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svn.c.relocate.html rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svn.c.relocate.html diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svn.c.resolve.html b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svn.c.resolve.html similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svn.c.resolve.html rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svn.c.resolve.html diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svn.c.resolved.html b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svn.c.resolved.html similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svn.c.resolved.html rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svn.c.resolved.html diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svn.c.revert.html b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svn.c.revert.html similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svn.c.revert.html rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svn.c.revert.html diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svn.c.status.html b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svn.c.status.html similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svn.c.status.html rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svn.c.status.html diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svn.c.switch.html b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svn.c.switch.html similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svn.c.switch.html rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svn.c.switch.html diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svn.c.unlock.html b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svn.c.unlock.html similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svn.c.unlock.html rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svn.c.unlock.html diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svn.c.update.html b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svn.c.update.html similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svn.c.update.html rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svn.c.update.html diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svn.c.upgrade.html b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svn.c.upgrade.html similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svn.c.upgrade.html rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svn.c.upgrade.html diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svn.html b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svn.html similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svn.html rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svn.html diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svnadmin.c.dump.html b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svnadmin.c.dump.html similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svnadmin.c.dump.html rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svnadmin.c.dump.html diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svnadmin.c.help.html b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svnadmin.c.help.html similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svnadmin.c.help.html rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svnadmin.c.help.html diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svnadmin.c.load.html b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svnadmin.c.load.html similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svnadmin.c.load.html rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svnadmin.c.load.html diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svnadmin.c.pack.html b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svnadmin.c.pack.html similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svnadmin.c.pack.html rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svnadmin.c.pack.html diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svnadmin.html b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svnadmin.html similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svnadmin.html rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svnadmin.html diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svndumpfilter.html b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svndumpfilter.html similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svndumpfilter.html rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svndumpfilter.html diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svnlook.c.cat.html b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svnlook.c.cat.html similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svnlook.c.cat.html rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svnlook.c.cat.html diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svnlook.c.date.html b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svnlook.c.date.html similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svnlook.c.date.html rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svnlook.c.date.html diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svnlook.c.diff.html b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svnlook.c.diff.html similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svnlook.c.diff.html rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svnlook.c.diff.html diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svnlook.c.help.html b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svnlook.c.help.html similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svnlook.c.help.html rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svnlook.c.help.html diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svnlook.c.info.html b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svnlook.c.info.html similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svnlook.c.info.html rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svnlook.c.info.html diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svnlook.c.lock.html b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svnlook.c.lock.html similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svnlook.c.lock.html rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svnlook.c.lock.html diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svnlook.c.log.html b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svnlook.c.log.html similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svnlook.c.log.html rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svnlook.c.log.html diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svnlook.c.tree.html b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svnlook.c.tree.html similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svnlook.c.tree.html rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svnlook.c.tree.html diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svnlook.c.uuid.html b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svnlook.c.uuid.html similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svnlook.c.uuid.html rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svnlook.c.uuid.html diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svnlook.html b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svnlook.html similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svnlook.html rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svnlook.html diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svnrdump.c.dump.html b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svnrdump.c.dump.html similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svnrdump.c.dump.html rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svnrdump.c.dump.html diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svnrdump.c.help.html b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svnrdump.c.help.html similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svnrdump.c.help.html rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svnrdump.c.help.html diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svnrdump.c.load.html b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svnrdump.c.load.html similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svnrdump.c.load.html rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svnrdump.c.load.html diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svnrdump.html b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svnrdump.html similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svnrdump.html rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svnrdump.html diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svnserve.html b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svnserve.html similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svnserve.html rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svnserve.html diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svnsync.c.help.html b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svnsync.c.help.html similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svnsync.c.help.html rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svnsync.c.help.html diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svnsync.c.info.html b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svnsync.c.info.html similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svnsync.c.info.html rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svnsync.c.info.html diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svnsync.c.init.html b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svnsync.c.init.html similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svnsync.c.init.html rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svnsync.c.init.html diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svnsync.c.sync.html b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svnsync.c.sync.html similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svnsync.c.sync.html rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svnsync.c.sync.html diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svnsync.html b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svnsync.html similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svnsync.html rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svnsync.html diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svnversion.html b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svnversion.html similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svnversion.html rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svnversion.html diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svnversion.re.html b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svnversion.re.html similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svnversion.re.html rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.ref.svnversion.re.html diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.reposadmin.basics.html b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.reposadmin.basics.html similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.reposadmin.basics.html rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.reposadmin.basics.html diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.reposadmin.create.html b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.reposadmin.create.html similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.reposadmin.create.html rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.reposadmin.create.html diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.reposadmin.html b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.reposadmin.html similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.reposadmin.html rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.reposadmin.html diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.reposadmin.maint.html b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.reposadmin.maint.html similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.reposadmin.maint.html rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.reposadmin.maint.html diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.reposadmin.planning.html b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.reposadmin.planning.html similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.reposadmin.planning.html rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.reposadmin.planning.html diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.reposadmin.summary.html b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.reposadmin.summary.html similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.reposadmin.summary.html rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.reposadmin.summary.html diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.serverconfig.html b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.serverconfig.html similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.serverconfig.html rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.serverconfig.html diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.serverconfig.httpd.html b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.serverconfig.httpd.html similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.serverconfig.httpd.html rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.serverconfig.httpd.html diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.tour.cleanup.html b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.tour.cleanup.html similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.tour.cleanup.html rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.tour.cleanup.html diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.tour.cycle.html b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.tour.cycle.html similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.tour.cycle.html rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.tour.cycle.html diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.tour.help.html b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.tour.help.html similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.tour.help.html rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.tour.help.html diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.tour.history.html b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.tour.history.html similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.tour.history.html rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.tour.history.html diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.tour.html b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.tour.html similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.tour.html rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.tour.html diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.tour.importing.html b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.tour.importing.html similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.tour.importing.html rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.tour.importing.html diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.tour.initial.html b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.tour.initial.html similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.tour.initial.html rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.tour.initial.html diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.tour.summary.html b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.tour.summary.html similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.tour.summary.html rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.tour.summary.html diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.tour.treeconflicts.html b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.tour.treeconflicts.html similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.tour.treeconflicts.html rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.tour.treeconflicts.html diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.webdav.basic.html b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.webdav.basic.html similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.webdav.basic.html rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.webdav.basic.html diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.webdav.clients.html b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.webdav.clients.html similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.webdav.clients.html rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.webdav.clients.html diff --git a/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.webdav.html b/docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.webdav.html similarity index 100% rename from pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.webdav.html rename to docs/pdf/NWS_Documentation/Subversion/1. 7 svn-book-html-chunk/svn.webdav.html diff --git a/pdf/NWS_Documentation/Subversion/1.7 svn-book-html-chunk.tar.bz2 b/docs/pdf/NWS_Documentation/Subversion/1.7 svn-book-html-chunk.tar.bz2 similarity index 100% rename from pdf/NWS_Documentation/Subversion/1.7 svn-book-html-chunk.tar.bz2 rename to docs/pdf/NWS_Documentation/Subversion/1.7 svn-book-html-chunk.tar.bz2 diff --git a/pdf/NWS_Documentation/Subversion/1.7 svn-book.pdf b/docs/pdf/NWS_Documentation/Subversion/1.7 svn-book.pdf similarity index 100% rename from pdf/NWS_Documentation/Subversion/1.7 svn-book.pdf rename to docs/pdf/NWS_Documentation/Subversion/1.7 svn-book.pdf diff --git a/pdf/NWS_Documentation/Subversion/AppSvnTagRelease.docx b/docs/pdf/NWS_Documentation/Subversion/AppSvnTagRelease.docx similarity index 100% rename from pdf/NWS_Documentation/Subversion/AppSvnTagRelease.docx rename to docs/pdf/NWS_Documentation/Subversion/AppSvnTagRelease.docx diff --git a/pdf/NWS_Documentation/Subversion/HowToSubversion.docx b/docs/pdf/NWS_Documentation/Subversion/HowToSubversion.docx similarity index 100% rename from pdf/NWS_Documentation/Subversion/HowToSubversion.docx rename to docs/pdf/NWS_Documentation/Subversion/HowToSubversion.docx diff --git a/pdf/NWS_Documentation/Subversion/HowToSubversionNewApp.docx b/docs/pdf/NWS_Documentation/Subversion/HowToSubversionNewApp.docx similarity index 100% rename from pdf/NWS_Documentation/Subversion/HowToSubversionNewApp.docx rename to docs/pdf/NWS_Documentation/Subversion/HowToSubversionNewApp.docx diff --git a/pdf/NWS_Documentation/Subversion/RepoLayout.docx b/docs/pdf/NWS_Documentation/Subversion/RepoLayout.docx similarity index 100% rename from pdf/NWS_Documentation/Subversion/RepoLayout.docx rename to docs/pdf/NWS_Documentation/Subversion/RepoLayout.docx diff --git a/pdf/NWS_Documentation/Subversion/Subversion Tips.docx b/docs/pdf/NWS_Documentation/Subversion/Subversion Tips.docx similarity index 100% rename from pdf/NWS_Documentation/Subversion/Subversion Tips.docx rename to docs/pdf/NWS_Documentation/Subversion/Subversion Tips.docx diff --git a/pdf/NWS_Documentation/Subversion/SubversionTutorPt1-Erh-itproject.pdf b/docs/pdf/NWS_Documentation/Subversion/SubversionTutorPt1-Erh-itproject.pdf similarity index 100% rename from pdf/NWS_Documentation/Subversion/SubversionTutorPt1-Erh-itproject.pdf rename to docs/pdf/NWS_Documentation/Subversion/SubversionTutorPt1-Erh-itproject.pdf diff --git a/pdf/NWS_Documentation/Subversion/SubversionTutorPt2-Erh-itproject.pdf b/docs/pdf/NWS_Documentation/Subversion/SubversionTutorPt2-Erh-itproject.pdf similarity index 100% rename from pdf/NWS_Documentation/Subversion/SubversionTutorPt2-Erh-itproject.pdf rename to docs/pdf/NWS_Documentation/Subversion/SubversionTutorPt2-Erh-itproject.pdf diff --git a/pdf/NWS_Documentation/Subversion/SubversionTutorPt3-Erh-itproject.pdf b/docs/pdf/NWS_Documentation/Subversion/SubversionTutorPt3-Erh-itproject.pdf similarity index 100% rename from pdf/NWS_Documentation/Subversion/SubversionTutorPt3-Erh-itproject.pdf rename to docs/pdf/NWS_Documentation/Subversion/SubversionTutorPt3-Erh-itproject.pdf diff --git a/pdf/NWS_Documentation/System Installation/A2_Rollback_doc.a2.rlbk-02.pdf b/docs/pdf/NWS_Documentation/System Installation/A2_Rollback_doc.a2.rlbk-02.pdf similarity index 100% rename from pdf/NWS_Documentation/System Installation/A2_Rollback_doc.a2.rlbk-02.pdf rename to docs/pdf/NWS_Documentation/System Installation/A2_Rollback_doc.a2.rlbk-02.pdf diff --git a/pdf/NWS_Documentation/System Installation/AWIPS II Installation Status Check.doc b/docs/pdf/NWS_Documentation/System Installation/AWIPS II Installation Status Check.doc similarity index 100% rename from pdf/NWS_Documentation/System Installation/AWIPS II Installation Status Check.doc rename to docs/pdf/NWS_Documentation/System Installation/AWIPS II Installation Status Check.doc diff --git a/pdf/NWS_Documentation/System Installation/Service Backup.pptx b/docs/pdf/NWS_Documentation/System Installation/Service Backup.pptx similarity index 100% rename from pdf/NWS_Documentation/System Installation/Service Backup.pptx rename to docs/pdf/NWS_Documentation/System Installation/Service Backup.pptx diff --git a/pdf/NWS_Documentation/System Installation/Thumbs.db b/docs/pdf/NWS_Documentation/System Installation/Thumbs.db similarity index 100% rename from pdf/NWS_Documentation/System Installation/Thumbs.db rename to docs/pdf/NWS_Documentation/System Installation/Thumbs.db diff --git a/pdf/NWS_Documentation/WarnGen/A2Warngen Localization_Combine Marine Zones.docx b/docs/pdf/NWS_Documentation/WarnGen/A2Warngen Localization_Combine Marine Zones.docx similarity index 100% rename from pdf/NWS_Documentation/WarnGen/A2Warngen Localization_Combine Marine Zones.docx rename to docs/pdf/NWS_Documentation/WarnGen/A2Warngen Localization_Combine Marine Zones.docx diff --git a/pdf/NWS_Documentation/WarnGen/A2Warngen Localization_Create Site Specific Dams.docx b/docs/pdf/NWS_Documentation/WarnGen/A2Warngen Localization_Create Site Specific Dams.docx similarity index 100% rename from pdf/NWS_Documentation/WarnGen/A2Warngen Localization_Create Site Specific Dams.docx rename to docs/pdf/NWS_Documentation/WarnGen/A2Warngen Localization_Create Site Specific Dams.docx diff --git a/pdf/NWS_Documentation/WarnGen/AWIPS 2 WarnGen Documentation 1.3.1.doc b/docs/pdf/NWS_Documentation/WarnGen/AWIPS 2 WarnGen Documentation 1.3.1.doc similarity index 100% rename from pdf/NWS_Documentation/WarnGen/AWIPS 2 WarnGen Documentation 1.3.1.doc rename to docs/pdf/NWS_Documentation/WarnGen/AWIPS 2 WarnGen Documentation 1.3.1.doc diff --git a/pdf/NWS_Documentation/WarnGen/AWIPS 2 WarnGen Localization Tutorial.docx b/docs/pdf/NWS_Documentation/WarnGen/AWIPS 2 WarnGen Localization Tutorial.docx similarity index 100% rename from pdf/NWS_Documentation/WarnGen/AWIPS 2 WarnGen Localization Tutorial.docx rename to docs/pdf/NWS_Documentation/WarnGen/AWIPS 2 WarnGen Localization Tutorial.docx diff --git a/pdf/NWS_Documentation/WarnGen/Adding Mile Markers in AWIPS 2.docx b/docs/pdf/NWS_Documentation/WarnGen/Adding Mile Markers in AWIPS 2.docx similarity index 100% rename from pdf/NWS_Documentation/WarnGen/Adding Mile Markers in AWIPS 2.docx rename to docs/pdf/NWS_Documentation/WarnGen/Adding Mile Markers in AWIPS 2.docx diff --git a/pdf/NWS_Documentation/WarnGen/CountyInclusion.docx b/docs/pdf/NWS_Documentation/WarnGen/CountyInclusion.docx similarity index 100% rename from pdf/NWS_Documentation/WarnGen/CountyInclusion.docx rename to docs/pdf/NWS_Documentation/WarnGen/CountyInclusion.docx diff --git a/pdf/NWS_Documentation/WarnGen/Main Page for AWIPS Migration WarnGen Info.docx b/docs/pdf/NWS_Documentation/WarnGen/Main Page for AWIPS Migration WarnGen Info.docx similarity index 100% rename from pdf/NWS_Documentation/WarnGen/Main Page for AWIPS Migration WarnGen Info.docx rename to docs/pdf/NWS_Documentation/WarnGen/Main Page for AWIPS Migration WarnGen Info.docx diff --git a/pdf/NWS_Documentation/WarnGen/WarngenDoc.docx b/docs/pdf/NWS_Documentation/WarnGen/WarngenDoc.docx similarity index 100% rename from pdf/NWS_Documentation/WarnGen/WarngenDoc.docx rename to docs/pdf/NWS_Documentation/WarnGen/WarngenDoc.docx diff --git a/pdf/NWS_Documentation/WarnGen/WarngenTemplateTeam.docx b/docs/pdf/NWS_Documentation/WarnGen/WarngenTemplateTeam.docx similarity index 100% rename from pdf/NWS_Documentation/WarnGen/WarngenTemplateTeam.docx rename to docs/pdf/NWS_Documentation/WarnGen/WarngenTemplateTeam.docx diff --git a/pdf/NWS_Documentation/Wiki/AWIPS Migration Site Configuration INDEX.docx b/docs/pdf/NWS_Documentation/Wiki/AWIPS Migration Site Configuration INDEX.docx similarity index 100% rename from pdf/NWS_Documentation/Wiki/AWIPS Migration Site Configuration INDEX.docx rename to docs/pdf/NWS_Documentation/Wiki/AWIPS Migration Site Configuration INDEX.docx diff --git a/pdf/NWS_Documentation/Wiki/NCLADT Wiki INDEX.docx b/docs/pdf/NWS_Documentation/Wiki/NCLADT Wiki INDEX.docx similarity index 100% rename from pdf/NWS_Documentation/Wiki/NCLADT Wiki INDEX.docx rename to docs/pdf/NWS_Documentation/Wiki/NCLADT Wiki INDEX.docx diff --git a/pdf/OB17.1.1_SMM_WithoutRedLines.pdf b/docs/pdf/OB17.1.1_SMM_WithoutRedLines.pdf similarity index 100% rename from pdf/OB17.1.1_SMM_WithoutRedLines.pdf rename to docs/pdf/OB17.1.1_SMM_WithoutRedLines.pdf diff --git a/pdf/index.md b/docs/pdf/index.md similarity index 100% rename from pdf/index.md rename to docs/pdf/index.md diff --git a/edexOsgi/build.edex/build.xml b/edexOsgi/build.edex/build.xml index f990407503..c07b0d2cf6 100644 --- a/edexOsgi/build.edex/build.xml +++ b/edexOsgi/build.edex/build.xml @@ -31,64 +31,53 @@ + - + - + - + - + - + - + + + + - + + diff --git a/edexOsgi/build.edex/edex/build.properties b/edexOsgi/build.edex/edex/build.properties index 1fb5028285..eb712ccd72 100644 --- a/edexOsgi/build.edex/edex/build.properties +++ b/edexOsgi/build.edex/edex/build.properties @@ -183,7 +183,7 @@ skipFetch=false #J2SE-1.3= #J2SE-1.4= #J2SE-1.5= -#JavaSE-1.6= +#JavaSE-1.8= #PersonalJava-1.1= #PersonalJava-1.2= #CDC-1.0/PersonalBasis-1.0= diff --git a/edexOsgi/build.edex/opt/db/ddl/setup/pg_hba.conf b/edexOsgi/build.edex/opt/db/ddl/setup/pg_hba.conf index 37b28c3d89..f64203cefe 100644 --- a/edexOsgi/build.edex/opt/db/ddl/setup/pg_hba.conf +++ b/edexOsgi/build.edex/opt/db/ddl/setup/pg_hba.conf @@ -65,8 +65,6 @@ # "local" is for Unix domain socket connections only local all all trust -hostssl all all 147.18.136.0/24 cert clientcert=1 -hostssl all all 147.18.139.0/24 cert clientcert=1 hostssl all all 162.0.0.0/8 cert clientcert=1 hostssl all all 127.0.0.1/32 cert clientcert=1 # IPv6 local connections: diff --git a/edexOsgi/com.raytheon.edex.plugin.gfe/META-INF/MANIFEST.MF b/edexOsgi/com.raytheon.edex.plugin.gfe/META-INF/MANIFEST.MF index 194033e9ff..347dfa2aa1 100644 --- a/edexOsgi/com.raytheon.edex.plugin.gfe/META-INF/MANIFEST.MF +++ b/edexOsgi/com.raytheon.edex.plugin.gfe/META-INF/MANIFEST.MF @@ -45,7 +45,6 @@ Export-Package: com.raytheon.edex.plugin.gfe, com.raytheon.edex.plugin.gfe.server.database, com.raytheon.edex.plugin.gfe.server.lock, com.raytheon.edex.plugin.gfe.smartinit, - com.raytheon.edex.plugin.gfe.svcbackup, com.raytheon.edex.plugin.gfe.textproducts, com.raytheon.edex.plugin.gfe.util Import-Package: com.raytheon.edex.db.dao, diff --git a/edexOsgi/com.raytheon.edex.plugin.gfe/res/spring/gfe-request.xml b/edexOsgi/com.raytheon.edex.plugin.gfe/res/spring/gfe-request.xml index 3907f84515..780c587c21 100644 --- a/edexOsgi/com.raytheon.edex.plugin.gfe/res/spring/gfe-request.xml +++ b/edexOsgi/com.raytheon.edex.plugin.gfe/res/spring/gfe-request.xml @@ -87,9 +87,6 @@ - - - @@ -150,72 +147,8 @@ - - - - - - - - - - - java.lang.Throwable - - - - - - - - - - - - - - - java.lang.Throwable - - - - - - - - - - - - - - - java.lang.Throwable - - - - - - - - - - - - - - - java.lang.Throwable - - - - - diff --git a/edexOsgi/com.raytheon.edex.plugin.gfe/src/com/raytheon/edex/plugin/gfe/server/lock/ClearGfeOrphanedLocks.java b/edexOsgi/com.raytheon.edex.plugin.gfe/src/com/raytheon/edex/plugin/gfe/server/lock/ClearGfeOrphanedLocks.java deleted file mode 100644 index 51874e97dd..0000000000 --- a/edexOsgi/com.raytheon.edex.plugin.gfe/src/com/raytheon/edex/plugin/gfe/server/lock/ClearGfeOrphanedLocks.java +++ /dev/null @@ -1,155 +0,0 @@ -/** - * This software was developed and / or modified by Raytheon Company, - * pursuant to Contract DG133W-05-CQ-1067 with the US Government. - * - * U.S. EXPORT CONTROLLED TECHNICAL DATA - * This software product contains export-restricted data whose - * export/transfer/disclosure is restricted by U.S. law. Dissemination - * to non-U.S. persons whether in the United States or abroad requires - * an export license or other authorization. - * - * Contractor Name: Raytheon Company - * Contractor Address: 6825 Pine Street, Suite 340 - * Mail Stop B8 - * Omaha, NE 68106 - * 402.291.0100 - * - * See the AWIPS II Master Rights File ("Master Rights File.pdf") for - * further licensing information. - **/ -package com.raytheon.edex.plugin.gfe.server.lock; - -import java.util.ArrayList; -import java.util.Date; -import java.util.HashSet; -import java.util.List; -import java.util.Set; - -import com.raytheon.edex.plugin.gfe.server.IFPServer; -import com.raytheon.edex.plugin.gfe.util.SendNotifications; -import com.raytheon.uf.common.dataplugin.gfe.exception.GfeException; -import com.raytheon.uf.common.dataplugin.gfe.server.lock.Lock; -import com.raytheon.uf.common.dataplugin.gfe.server.lock.LockTable; -import com.raytheon.uf.common.dataplugin.gfe.server.lock.LockTable.LockMode; -import com.raytheon.uf.common.dataplugin.gfe.server.message.ServerResponse; -import com.raytheon.uf.common.dataplugin.gfe.server.notify.GfeNotification; -import com.raytheon.uf.common.dataplugin.gfe.server.notify.LockNotification; -import com.raytheon.uf.common.dataplugin.gfe.server.request.LockRequest; -import com.raytheon.uf.common.message.WsId; -import com.raytheon.uf.common.status.IUFStatusHandler; -import com.raytheon.uf.common.status.UFStatus; -import com.raytheon.uf.edex.esb.camel.jms.IBrokerRestProvider; - -/** - * GFE task to clear orphaned locks from the database table. Orphaned locks are - * locks whose session ID is not in the list of current Qpid sessions. - * - *
    - *
    - * SOFTWARE HISTORY
    - *
    - * Date          Ticket#  Engineer        Description
    - * ------------- -------- --------------- --------------------------------------
    - * Mar 03, 2015  629      mgamazaychikov  Initial creation
    - * Jan 25, 2017  6092     randerso        Renamed interface to
    - *                                        IBrokerRestProvider
    - *
    - * 
    - * - * @author mgamazaychikov - */ - -public class ClearGfeOrphanedLocks { - private static IBrokerRestProvider provider; - - private static final transient IUFStatusHandler statusHandler = UFStatus - .getHandler(ClearGfeOrphanedLocks.class); - - public static void setProvider(IBrokerRestProvider provider) { - ClearGfeOrphanedLocks.provider = provider; - } - - private void breakLocks(Set clients, List lockList, - LockManager lockMgr, String siteId) { - boolean foundOrpanedLocks = false; - List lreq = new ArrayList<>(); - StringBuilder sb = new StringBuilder(); - for (Lock lock : lockList) { - String lockWsid = lock.getWsId().toString(); - if (!clients.contains(lockWsid)) { - foundOrpanedLocks = true; - List lst = new ArrayList<>(); - lst.add(lock); - // Inactive clients found - lreq.add(new LockRequest(lock.getParmId(), lock.getTimeRange(), - LockMode.BREAK_LOCK)); - sb.append(" Breaking orphaned lock for site " + siteId + " on " - + lock.getParmId().toString() + " owned by " - + lock.getWsId().toPrettyString() + "."); - - } - } - if (foundOrpanedLocks) { - statusHandler.info(sb.toString()); - WsId requestor = new WsId(null, null, "ClearGfeOrphanedLocks"); - ServerResponse> sr = lockMgr.requestLockChange(lreq, - requestor); - if (sr.isOkay()) { - try { - List lockTables = sr.getPayload(); - List notes = new ArrayList<>( - lockTables.size()); - - for (LockTable table : lockTables) { - notes.add(new LockNotification(table, siteId)); - } - ServerResponse notifyResponse = SendNotifications - .send(notes); - if (!notifyResponse.isOkay()) { - statusHandler.error(notifyResponse.message()); - } - - // send out grid update notifications - notifyResponse = SendNotifications - .send(sr.getNotifications()); - if (!notifyResponse.isOkay()) { - statusHandler.error(notifyResponse.message()); - } - } catch (Exception e) { - statusHandler.error("Error sending lock notification", e); - } - } else { - statusHandler.error(sr.message()); - } - return; - } else { - statusHandler - .info(" No orphaned locks found for site " + siteId + "."); - return; - } - } - - public void clearLocksCron() throws Exception { - statusHandler - .info("Started at " + new Date(System.currentTimeMillis())); - if (IFPServer.getActiveServers().size() == 0) { - statusHandler.info("No active IFPServer found."); - return; - } - - Set clients = new HashSet<>(provider.getConnections()); - for (IFPServer ifps : IFPServer.getActiveServers()) { - LockManager lockMgr = ifps.getLockMgr(); - String siteId = ifps.getSiteId(); - try { - List lockList = lockMgr.getAllLocks(siteId); - - // find orphaned locks and break them - breakLocks(clients, lockList, lockMgr, siteId); - } catch (GfeException e) { - statusHandler.error("Error retrieving all locks", e); - } - } - return; - } -} diff --git a/edexOsgi/com.raytheon.edex.plugin.gfe/src/com/raytheon/edex/plugin/gfe/svcbackup/SvcBackupUtil.java b/edexOsgi/com.raytheon.edex.plugin.gfe/src/com/raytheon/edex/plugin/gfe/svcbackup/SvcBackupUtil.java deleted file mode 100644 index f29d51c8a1..0000000000 --- a/edexOsgi/com.raytheon.edex.plugin.gfe/src/com/raytheon/edex/plugin/gfe/svcbackup/SvcBackupUtil.java +++ /dev/null @@ -1,310 +0,0 @@ -/** - * This software was developed and / or modified by Raytheon Company, - * pursuant to Contract DG133W-05-CQ-1067 with the US Government. - * - * U.S. EXPORT CONTROLLED TECHNICAL DATA - * This software product contains export-restricted data whose - * export/transfer/disclosure is restricted by U.S. law. Dissemination - * to non-U.S. persons whether in the United States or abroad requires - * an export license or other authorization. - * - * Contractor Name: Raytheon Company - * Contractor Address: 6825 Pine Street, Suite 340 - * Mail Stop B8 - * Omaha, NE 68106 - * 402.291.0100 - * - * See the AWIPS II Master Rights File ("Master Rights File.pdf") for - * further licensing information. - **/ - -package com.raytheon.edex.plugin.gfe.svcbackup; - -import java.io.IOException; -import java.io.InputStream; -import java.nio.file.Path; -import java.nio.file.Paths; -import java.util.HashSet; -import java.util.Properties; -import java.util.Set; - -import org.apache.commons.lang3.StringUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import com.google.common.collect.Sets; -import com.raytheon.uf.common.dataplugin.gfe.exception.GfeException; -import com.raytheon.uf.common.localization.IPathManager; -import com.raytheon.uf.common.localization.LocalizationContext.LocalizationLevel; -import com.raytheon.uf.common.localization.LocalizationContext.LocalizationType; -import com.raytheon.uf.common.localization.LocalizationFile; -import com.raytheon.uf.common.localization.LocalizationUtil; -import com.raytheon.uf.common.localization.PathManagerFactory; -import com.raytheon.uf.common.localization.exception.LocalizationException; -import com.raytheon.uf.common.util.RunProcess; -import com.raytheon.uf.edex.core.EDEXUtil; -import com.raytheon.uf.edex.site.SiteAwareRegistry; - -/** - * - * Utility class for Service Backup - * - *
    - *
    - * SOFTWARE HISTORY
    - *
    - * Date          Ticket#  Engineer     Description
    - * ------------- -------- ------------ -----------------------------------------
    - * Oct 09, 2009           bphillip     Initial creation
    - * Sep 19, 2011  10955    rferrel      make sure process destroy is called.
    - * Jun 12, 2012  609      djohnson     Use EDEXUtil for EDEX_HOME.
    - * Nov 15, 2012  15614    jdynina      Added check for national center
    - * May 02, 2013  1762     dgilling     Remove check for national center, add
    - *                                     method to retrieve list of svcbu sites.
    - * May 28, 2014  3211     njensen      Use IAuthorizer instead of IRoleStorage
    - * Jul 10, 2014  2914     garmendariz  Remove EnvProperties
    - * Feb 17, 2015  4103     dgilling     Add getLockDir for specific site, code
    - *                                     cleanup.
    - * Mar 27, 2015  4103     dgilling     Support new location for
    - *                                     svcbu.properties.
    - * Dec 15, 2015  5166     kbisanz      Update logging to use SLF4J
    - * Jul 15, 2016  5747     dgilling     Move edex_static to common_static.
    - * Oct 24, 2016  5951     dgilling     Cleanup logging in executeProcess.
    - * May 23, 2017  6285     randerso     Removed authorizeWithLocalization.
    - *                                     Code cleanup.
    - *
    - * 
    - * - * @author bphillip - */ -public class SvcBackupUtil { - - private static final String SVCBU_PROPS_PATH = LocalizationUtil.join("gfe", - "config", "svcbu.properties"); - - /** The logger instance */ - protected static transient Logger logger = LoggerFactory - .getLogger(SvcBackupUtil.class); - - /** - * A private constructor so that Java does not attempt to create one for us. - * As this class should not be instantiated, do not attempt to ever call - * this constructor; it will simply throw an AssertionError. - * - */ - private SvcBackupUtil() { - throw new AssertionError(); - } - - /** - * Execute command defined by args - * - * @param args - * command arguments - * @return The output of the process - * @throws Exception - */ - public static String execute(String... args) throws Exception { - String[] newArgs = new String[args.length + 1]; - newArgs[0] = "sh"; - System.arraycopy(args, 0, newArgs, 1, newArgs.length - 1); - return executeProcess(newArgs); - } - - /** - * Executes a process using the java.lang.ProcessBuilder. - *

    - * The first argument is the command to execute. The proceeding arguments - * are the arguments to pass to the command for execution - * - * @param args - * First argument is the command. The proceeding arguments are - * the arguments to pass to the command for execution - * @return The output of the process - * @throws GfeException - * If errors occur while executing the process - */ - private static String executeProcess(String... args) throws GfeException { - RunProcess proc = RunProcess.getRunProcess(); - ProcessBuilder pBuilder = new ProcessBuilder(); - pBuilder.environment().put("LOCALIZATION_PATH", - EDEXUtil.getEdexUtility()); - pBuilder.environment().put("AWIPS_HOME", "/awips2/"); - pBuilder.redirectErrorStream(true); - pBuilder.command(args); - try { - proc.setProcess(pBuilder.start()); - } catch (IOException e) { - throw new GfeException("Error executing process", e); - } - - int exitValue = 0; - String processOutput = ""; - - exitValue = proc.waitFor(); - if (proc.isProcessInterrupted()) { - throw new GfeException("Process interrupted"); - } - processOutput = proc.getStdout(); - if (exitValue != 0) { - if (StringUtils.isNotBlank(processOutput)) { - String msg = String.format( - "Command [%s] terminated abnormally: \n%s", - StringUtils.join(args, ' '), processOutput); - logger.error(msg); - } - throw new GfeException("Process terminated abnormally"); - } - return processOutput; - } - - /** - * Examines the InputStream of a process and extracts any output into a - * String - * - * @param p - * The process to get the output from - * @return The output - * @throws GfeException - * If problems occur reading the process output - */ - public static String getProcessOutput(Process p) throws GfeException { - - String retVal = null; - InputStream in = p.getInputStream(); - StringBuilder out = new StringBuilder(); - int read = 0; - final byte[] buffer = new byte[0x10000]; - try { - do { - read = in.read(buffer, 0, buffer.length); - if (read > 0) { - out.append(new String(buffer), 0, read); - } - } while (read >= 0); - } catch (IOException e) { - throw new GfeException("Error reading process output", e); - } finally { - if (in != null) { - try { - in.close(); - } catch (IOException e) { - logger.error("Unable to close process input stream!", e); - } - } - try { - p.getOutputStream().close(); - } catch (IOException e1) { - logger.error("Unable to close process output stream!", e1); - } - - try { - p.getErrorStream().close(); - } catch (IOException e1) { - logger.error("Unable to close process error stream!", e1); - } - } - - retVal = out.toString(); - if (retVal.endsWith("\n")) { - retVal = retVal.substring(0, retVal.length() - 1); - } - return retVal; - } - - /** - * Get contents of svcbu.properties - * - * @return the svcbu properties - */ - public static Properties getSvcBackupProperties() { - Properties svcbuProperties = new Properties(); - - IPathManager pathMgr = PathManagerFactory.getPathManager(); - - LocalizationFile basePropsFile = pathMgr.getLocalizationFile( - pathMgr.getContext(LocalizationType.COMMON_STATIC, - LocalizationLevel.BASE), - SVCBU_PROPS_PATH); - try (InputStream input = basePropsFile.openInputStream()) { - svcbuProperties.load(input); - } catch (IOException | LocalizationException e) { - logger.error("Unable to load BASE level svcbu.properties file.", e); - } - - LocalizationFile sitePropsFile = pathMgr.getLocalizationFile( - pathMgr.getContextForSite(LocalizationType.COMMON_STATIC, - EDEXUtil.getEdexSite()), - SVCBU_PROPS_PATH); - if (sitePropsFile.exists()) { - try (InputStream input = sitePropsFile.openInputStream()) { - svcbuProperties.load(input); - } catch (IOException | LocalizationException e) { - logger.error("Unable to load SITE level svcbu.properties file.", - e); - } - } - - return svcbuProperties; - } - - /** - * Returns the base lock directory for service backup. All site specific - * lock directories will be children to this directory. - * - * @return The {@code Path} that represents the base directory for service - * backup locks. - */ - public static Path getLockDir() { - String lockDir = SvcBackupUtil.getSvcBackupProperties() - .getProperty("LOCK_DIR"); - return Paths.get(lockDir); - } - - /** - * Returns the site-specific lock directory for service backup. - * - * @param siteID - * The 3-character site identifier. - * @return he {@code Path} that represents the site-specific directory for - * service backup locks. - */ - public static Path getLockDir(final String siteID) { - return getLockDir().resolve(siteID.toUpperCase()); - } - - /** - * Get the set of primary sites - * - * @return the primary sites - */ - public static Set getPrimarySites() { - Properties svcbuProps = SvcBackupUtil.getSvcBackupProperties(); - String siteList = EDEXUtil.getEdexSite(); - if (svcbuProps != null) { - String propVal = svcbuProps.getProperty("PRIMARY_SITES", "").trim(); - if (!propVal.isEmpty()) { - siteList = propVal; - } - } - - String[] sites = siteList.split(","); - Set retVal = new HashSet<>(sites.length, 1.0f); - Set validSites = Sets - .newHashSet(SiteAwareRegistry.getInstance().getActiveSites()); - for (String site : sites) { - String siteId = site.trim().toUpperCase(); - if (!siteId.isEmpty()) { - if (validSites.contains(siteId)) { - retVal.add(siteId); - } else { - final String msg = "Service backup primary site " + site - + " is not a currently activated site. Service backup and export grids tasks cannot be run for this site. Check the PRIMARY_SITES setting in svcbu.properties."; - logger.warn(msg); - } - } - } - - return retVal; - } -} diff --git a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/config/serverConfig.py b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/config/serverConfig.py index 6ef8c6d45a..c4ddf674e7 100644 --- a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/config/serverConfig.py +++ b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/config/serverConfig.py @@ -1,3479 +1,3480 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## -# serverConfig -- base GFE server configuration file -# -# NOTE: THIS FILE SHOULD NOT BE USER-MODIFIED. INSTEAD REFER TO THE -# LOCAL CONFIG DOCUMENTATION ON HOW TO OVERRIDE SETTINGS IN THIS FILE. -# -# Baseline GFE server configuration -# -# ---------------------------------------------------------------------------- -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# 08/09/2013 #1571 randerso Changed projections to use the Java -# ProjectionType enumeration -# 10/03/2013 #2418 dgilling Update for new pSurge 2.0 data. -# 10/03/2013 #2424 randerso Change localTC to use dateutil instead of pytz -# to get correct offsets for Alaska -# 01/17/2014 #2719 randerso Added NHA domain -# 02/20/2014 #2824 randerso Added log message when local override files are not found -# 03/11/2014 #2897 dgilling Add new MHWM databases to default configuration. -# 03/20/2014 #2418 dgilling Remove unneeded D2D source PHISH. -# 04/17/2014 #2934 dgilling Remove alias for TPCSurgeProb D2D database. -# 05/09/2014 #3148 randerso Add tpHPCndfd to D2DAccumulativeElements for HPCERP -# 06/20/2014 #3230 rferrel Added URMA25. -# 05/29/2014 #3224 randerso Added "SPC":8 to D2DDBVERSIONS -# 07/09/2014 #3146 randerso Removed unused import -# 07/10/2014 swhite Add surge and tropical threat WEs and their dependencies -# 01/08/2015 #15035 lshi add site WNJ -# 12/03/2014 #3866 rferrel Added GFS20 -# 01/13/2015 #3955 randerso Added definitions for NewTerrain database -# Added Topo to ISCPARMS -# 01/19/2015 #4014 dgilling Added ETSS. -# 02/11/2015 #4053 rferrel Added GLWN and moved GLERL to display only for Great Lakes sites.. -# 01/19/2015 #4014 dgilling Added ETSS. -# 02/24/2015 #16692 byin Added RTMA. Removed gfsLR and WaveWatch -# 03/19/2015 #4300 randerso Remove GUMa as it is obsolete (per Shannon White) -# 03/30/2015 #17288 bhunder Added Guam-RTMA to D2D models -# 03/30/2015 #17206 yteng Changed some parameters that are not rate parameters -# 03/31/2015 #17288 bhunder Added Weather Params for RTMA -# 04/03/2015 #4367 dgilling Change WindGust's time constraints back to TC1 -# for Fcst/Official. -# 04/08/2015 #4383 dgilling Define FireWX ISC configuration parameters. -# 04/15/2015 #17383 yteng Change localTC to fix error that time constraints -# being off -# Apr 25, 2015 4952 njensen Updated for new JEP API -# 04/20/2015 #4414 dgilling Add missing NWPSTrkngCG0 weather elements. -# 05/12/2015 #17144 bhunder Added RTMA model -# 05/29/2015 17496 ryu Changed parm definitions for Wave1-10 and Period1-10. -# -# 05/29/2015 #17144 bhunder Added weather Params for URMA25 and OCONUS RTMA -# 09/02/2015 #4819 rferrel Added HWRF. -# 09/09/2015 16287 amoore Additional validation of user input -# 10/07/2015 #4958 dgilling Added support for NationalBlend D2D data. -# 10/13/2015 #4961 randerso Updated NewTerrain/BaseTerrain database definitions -# 10/30/2015 #17940 jendrowski Responded to Code Review. Mostly syntactical changes. -# 11/05/2015 #18182 ryu Change D2DDBVERSIONS value for HPCERP to 24 -# 12/22/2015 #14152 jwatson Added Sky, Wind to GFSLAMPGrid parms -# 1/28/2016 #13910 amoore Wave model data should be available in 3-hrly timesteps -# 02/09/2016 #5283 nabowle Remove NGM support. -# 02/22/2016 #18161 wkwock Add NationalBlend model for AK, PR, HW -# 02/23/2016 #14845 jwatson Changed NamDNG5 to NamDNG for all sources and params. -# Changed D2DModels for CONUS and Alaska to -# namdng25 and AK-NamDNG3 -# 04/01/2016 18777 ryu Replace NCF ip addresses. -# 04/22/2016 #18896 wkwock Add more nationalBlend Model -# 06/01/2016 JCM removed tc3ng from officialdbs for wave/period elements; -# removed Wave_XX and Period_XX; removed Wave10, Period10; -# added databases for all sites to baseline -# 08/08/2016 #5747 randerso Support removal of wrapper.py -# 10/05/2016 19293 randerso Fixed units on Tropical and a few other weather elements -# 12/12/2016 #19596 bhunder Added "tp" to NationalBlend model D2DAccumulativeElements -# 02/20/2017 DCS18966 mdavis/pjendr. NIC adjustment: name changes and removal of obsolete -# smart inits(DCS 19490). Fixed addOptionalParms. -# 03/17/2017 19673 jmaloney Added Rip Current Probabilities (RipProb). -# 06/29/2017 6323 randerso Added P-ETSS model -# 07/19/2017 DCS19490 gpetrescu Removed AKwave10, Wave10 and Period10. -# 07/12/2017 6324 randerso Added TPCWindProb_Prelim model -# 07/12/2017 6253 randerso Updated for Standard Terrain -# 08/03/2017 #20054 bhunder Added changes for ETSS model and for ETSS-HiRes model. -# 10/03/2017 DR20432 arivera Replace GFS40 with GFS in SnowRatioGFS and remove -# GLOBHwave from SJU model databases. -# 11/28/2017 6539 randerso Made P-ETSS and TPCSurgeProb elements D2DAccumulativeElements -# 12/06/2017 DCS20267 psantos Add NWPS Rip Current Guidance -# 12/20/2017 20510 ryu changes to StormTotalSnow parameter -# 02/23/2018 #20395 wkwock Added NBM3.1 elements. -# 04/03/2018 DR20656 arivera Missing comma: "Dune Erosion Probability" in optionalParmsDict['marine'] -# 05/09/2018 DR20715 arivera Missing comma: groups['marineSites'] after 'AVAK' -# 06/18/2018 16729 ryu Remove tpHPC element from RFCQPF model and the smart init for the model. -# -#################################################################################################### - -## -# This is an incremental override file, indicating that the files at different -# localization levels will be combined. Incremental overrides are achieved by -# creating a localConfig file at a higher priority localization level that -# imports this base file. -# -# See the Configuration Guides->Server Configuration->Syntax for localConfig.py -# section of the GFE Online Help for more information. -## - - - -#---------------------------------------------------------------------------- -# USEFUL DEFINES -#---------------------------------------------------------------------------- - -import siteConfig,imp -import pprint -import re -import sys -import LogStream -from collections import defaultdict -BASELINE = getattr(siteConfig, 'BASELINE', 0) - -#D scfp=open('/localapps/logs/scdebug.log','w') -class dbConfig(object): - """Class to create GFE databases from modelDict""" - def __init__(self,modelDict): - self.modelDict=modelDict - self.dbs=[] - self.D2DMODELS=[] - self.D2DDBVERSIONS={} - self.D2DAccumulativeElements={} - self.INITMODULES={} - self.INITSKIPS={} - - def addConfiguredModels(self,ignoreList=[]): - """Setup model databases defined in dbConfigDict. - ignoreList can be used to filter out specific models - """ - for m in self.modelDict: - if m in ignoreList: - continue - # Don't allow BC model if regular is in ignore list - if m[-2:] == 'BC' and m[:-2] in ignoreList: - continue - self.addGfeDB(m,self.modelDict[m]) - return - def addGfeDB(self,modelname,dbConfigDict): - """Does all the work needed for adding a model to GFE from entries - in dbConfigDict. This populates dbs and sets various self - variables. - """ - if "DB" in dbConfigDict and "Parms" in dbConfigDict: - self.dbs.append((dbConfigDict["DB"],dbConfigDict["Parms"])) - if "D2DAccumulativeElements" in dbConfigDict: - self.D2DAccumulativeElements[modelname]=dbConfigDict["D2DAccumulativeElements"] - if "D2DDBVERSIONS" in dbConfigDict: - self.D2DDBVERSIONS[modelname]=dbConfigDict["D2DDBVERSIONS"] - if "D2DMODELS" in dbConfigDict: - self.D2DMODELS.append((dbConfigDict["D2DMODELS"],modelname)) - if "INITMODULES" in dbConfigDict: - if type(dbConfigDict["INITMODULES"]) is tuple: - self.INITMODULES[dbConfigDict["INITMODULES"][0]] = dbConfigDict["INITMODULES"][1] - else: - self.INITMODULES[dbConfigDict["INITMODULES"]]=[modelname] - if "INITSKIPS" in dbConfigDict: - self.INITSKIPS[modelname]=dbConfigDict["INITSKIPS"] - -#=============================================================================== -# Utility methods to manage GFE configuration -#=============================================================================== -def mergeModelDicts(baseDict,addDict): - """Combine serverConfig model dict and regional modelDict into one modelDict. - Settings in baseDict are maintained unless overridden in addDict. The merging - is done on a key by key basis of a specific model's dictionary (baseDict and - addDict are dictionaries of dictionaries) - This changes baseDict in place so the object passed in as baseDict is modified - in the caller's scope. - """ - for m,v in addDict.items(): - if m not in baseDict: - baseDict[m]=v - else: - for key,val in v.items(): - baseDict[m][key]=val - -def updateModelDict(modelDict,model,key,value): - """Udates a specific entry for a model in modelDict. model and key are dictionary - keys into modelDict and modelDict[model] respectively. If model is not defined - in modelDict, then a new entry is created. Otherwise, value replaces any existing - value in modelDict[model][key]. - This changes modelDict in place so the object passed in as modelDict is modified - in the caller's scope. - """ - if model in modelDict: - modelDict[model][key]=value - else: - modelDict[model]= {key : value} - -def alterModelDef(dbTuple, name=None, format=None, dbType=None, single=None, - official=None, numver=None, purgeAge=None): - """Alter GFE database definition. The definition is used in the dbs setting - and has form: - (name, format, type, single, official, numVer, purgeAge) - i.e., Practice = ("Fcst", GRID, "Prac", YES, NO, 1, 24) - - Won't use these exact names since some might conflict with builtins - Only supply what you want to change. To clone a model definition, just - supply name='newname' - """ - n,f,t,s,o,v,p=dbTuple - l=[] - for old,new in [(n,name),(f,format),(t,dbType),(s,single),(o,official), - (v,numver),(p,purgeAge)]: - if new is None: - l.append(old) - else: - l.append(new) - return tuple(l) - -def createModelDict(localsDict,dbs,D2DMODELS,D2DDBVERSIONS,D2DAccumulativeElements, - INITMODULES,INITSKIPS): - """Convert serverConfig model configuration to a dictionary. This allows - legacy serverConfig settings in dbs,D2DMODELS,INITMODULES, etc. to be - maintained and then converted into a single dictionary where all settings - for a model are together. - - WARNING: There can only be one version of a model in the dbs list. Fcst - practice and test databases have to be handled separately. This is ok - because these databases are defined after any localConfig customizations - of the normal Fcst database. - - modelDict contains the following keys. Only define what is needed, i.e., - it is not required to have every key defined - "DB": Definition of the database, i.e., the first value in a dbs entry: - ("wrfems", GRID, "", NO, NO, 3, 0). This must be a tuple. The name - in the DB entry must be the same as the model name used as the key - into the modelDict variable. - - "Parms" : Definition of the weather element parameters in the database, - i.e., the second part of the dbs entry. This is a list of tuples. - - "D2DMODELS" : D2D metadata database name for the source model. - - "INITMODULES': Name of the SmartInit module. This should be just the module - name as a string, not a list. - - "D2DAccumulativeElements" : List of parms that are accumulative - - "D2DDBVERSIONS" : Number of versions of a D2D model to show in the Weather - Element Browser. Defaults to 2 if not supplied. - - "INITSKIPS" : Used to skip model cycles. - - Example for a model: - - modelDict["CMCreg"]={ - "DB": ("CMCreg", "GRID", "", NO, NO, 2, 0), - "Parms": [([Temp, Td, RH, Wind, WindGust, Sky, MixHgt, TransWind, QPF, - PoP, SnowAmt, SnowRatio], TC3), - ([PoP6, QPF6, QPF6hr, CQPF1],TC6NG), - ([QPF12, PoP12],TC12NG), - ([MinRH], MinRHTC), ([MaxRH], MaxRHTC), - ([MaxT], MaxTTC), ([MinT], MinTTC), - ], - "D2DMODELS": "Canadian-Reg", - "INITMODULES": "Local_CMCreg", - "D2DAccumulativeElements": ["tpgemreg","tprun","tp3hr","tp6hr"], - "D2DDBVERSIONS": 3, - } - """ - # Create self initializing dictionary via collections.defaultdict - modelDict=defaultdict(dict) - parmsDict={} - tcDict={} - - for n,v in sorted(localsDict.items()): - if type(v) is tuple and type(v[0]) is str and v[1] in [DISCRETE,SCALAR,VECTOR,WEATHER]: - parmsDict[n]=v - elif type(v) is tuple and len(v)==3 and type(v[0]) is int: - tcDict[n]=v - - # Process dbs entries, i.e., model database definition - tcDict={} - for item in sorted(dbs): - plist=[] - parmTmpDict={} - pDict={} - for pt in item[1]: - parmsList=[] - # Try to find named parm setting - for p in pt[0]: - pname=p[0] - pDict[pname]=p - parmsList.append(pname) - - # Try to get a named time constraint - name=next((name for name,v in tcDict.items() if v == pt[1]), None) - if name is None: - name = repr(pt[1]) - tcDict[name]=pt[1] - if name in parmTmpDict: - parmTmpDict[name]+=parmsList - else: - parmTmpDict[name]=parmsList - - # This consolidates parms by time constraint and sorts parm names. - for tc in sorted(parmTmpDict.keys()): - theParms=[] - for p in sorted(parmTmpDict[tc]): - theParms.append(pDict[p]) - plist.append((theParms, tcDict[tc])) - - modelDict[item[0][0]]={'DB':item[0],'Parms':plist} - - for si,ml in list(INITMODULES.items()): - m=ml[0] - modelDict[m]['INITMODULES']=si - for m,v in list(D2DDBVERSIONS.items()): - modelDict[m]['D2DDBVERSIONS']=D2DDBVERSIONS[m] - - for m,v in list(D2DAccumulativeElements.items()): - modelDict[m]['D2DAccumulativeElements']=v - for m,v in list(INITSKIPS.items()): - modelDict[m]['INITSKIPS']=v - for item in D2DMODELS: - if type(item) is str: - m=item - v=item - else: - v,m=item - if m in modelDict: - modelDict[m]['D2DMODELS']=v - else: - modelDict[m]={'D2DMODELS':v} - return modelDict - -def changeParm(modelDict,pname,value,modelList=['Fcst']): - """Alter a parm that is defined in modelDict Parm setting. - - pname: name of parm. This is a string not the parm definition - value: the parm definition tuple. If the None object, then the parm - will be deleted. - modelList: List of model names to check. An empty list will check all - models in modelDict. - Return: Nothing. modelDict is altered in place. - """ - if not modelList: - modelList=list(modelDict.keys()) - for m in modelList: - if m not in modelDict or 'Parms' not in modelDict[m] or \ - not checkForParm(modelDict[m]['Parms'],pname): - continue - - newpt=[] - # parms is tuple (parmList,TC) - for pList,tc in modelDict[m]['Parms']: - # This makes a copy of the list of parms, not a reference - # this is needed because we are changing the list in place. - theParms= list(pList) - match=False - for matchParm in (p for p in theParms if p[0] == pname): - match=True - theParms.remove(matchParm) - if match and value is not None: - theParms.append(value) - if theParms: - newpt.append((theParms,tc)) - if newpt != modelDict[m]['Parms']: - modelDict[m]['Parms'] = newpt - -def changeParmTC(modelDict,pname,newTC,modelList=['Fcst']): - """Alter a parm in that is defined in modelDict Parm setting. - - pname: name of parm. This is a string not the parm definition - newTC: the new Time Contraint (tuple) - modelList: List of model names to check. An empty list will check all - models in modelDict. - Return: Nothing. modelDict is altered in place. - """ - if not modelList: - modelList=list(modelDict.keys()) - for m in sorted(modelList): - if m not in modelDict or 'Parms' not in modelDict[m]: - continue -#d print m,"checkForParm=",checkForParm(modelDict[m]['Parms'],pname) - if not checkForParm(modelDict[m]['Parms'],pname): - continue - - newpt=[] - # Parms is tuple (parmList,TC) - for pList,tc in modelDict[m]['Parms']: - # This makes a copy of the list of parms, not a reference - # this is needed because we are changing the list in place. - theParms= list(pList) - matchParm=next((p for p in theParms if p[0] == pname),None) -#d print m,matchParm,tc,newTC,len(theParms) - if matchParm: - theParms.remove(matchParm) - newpt.append(([matchParm],newTC)) -#d print "Added",matchParm,newTC - if theParms: -#d print "restored",theParms," to",tc - newpt.append((theParms,tc)) - if newpt != modelDict[m]['Parms']: -#d print 'Updated model',m - modelDict[m]['Parms'] = newpt -#d print modelDict[m]['Parms'],'\n' - -def checkForParm(parmDef,pname): - """Check a model parm definition if a parm named pname is in it. - - parmDef: list of tuples, each tuple is a list of parms and a time - contraint. Call with modelDict[modelname]['Parms]. - pname: Name of parm (string). - Returns: Boolean True if found, or False - """ - for item in parmDef: - t=next((pt for pt in item[0] if pt[0] == pname),None) - if t is not None: - return True - return False - -def getParmNames(parmsDef): - """Return a list of parm names in a model parm definition - - parmsDef: list of tuples, each tuple is a list of parms and a time - constraint. Call with modelDict[modelname]['Parms]. - Returns: List of string parameter names - - Here's an example of how to remove unused parms from Fcst, this can - run in localConfig: - - parmsToRemove=[] - for p in getParmNames(modelDict['Fcst']): - pl=p.lower() - for t in ['period','swell','wave','surf', 'surge']: - if t in pl: - parmsToRemove.append(p) - break - removeParms(modelDict,'Fcst',parmsToRemove) - """ - result=[] - for pList,tc in parmsDef: - # p is the parmDef tuple where first item is the parm name - newParms=[p[0] for p in pList] - result+=newParms - return sorted(result) - -def printServerConfig(moduleObj,localsDict, logFile="/awips2/edex/logs/localConfig.log"): - """Dump out ServerConfig final settings. localsDict is a dictionary of - local variables in localConfig, normally locals(). - """ - # serverConfig log text - scText="" - try: - with open(logFile,"w") as fp: - # Print out dbs entries, i.e., model database definition - fp.write("Configuration for %s\n" % localsDict['SID']) - dbs=DATABASES - for item in sorted(dbs): - scText += "\ndbs[%s]: %s\n" % (item[0][0], str(item[0])) - scText += _dumpParms(item[1]) - - # Dump out serverConfig settings likely to be modified by localConfig - scvars=["D2DMODELS", "INITMODULES", - "D2DDBVERSIONS", "D2DAccumulativeElements", - "REQUEST_ISC", "SEND_ISC_ON_SAVE", - "SEND_ISC_ON_PUBLISH", "REQUESTED_ISC_PARMS", - "ExtraWEPrecision", "INITSKIPS", - "HazardKeys", - "MAX_USER_BACKGROUND_PROCESSES", - "AdditionalISCRouting", - "ignoreDatabases", - ] - - for item in scvars: - scText += "\n%s:\n" % item - obj=getattr(moduleObj,item,None) - if type(obj) is list: - obj.sort() - scText += pprint.pformat(obj) +'\n' - - # This prints out all variables named parms*, i.e., parmsNAM12 - for k in sorted(localsDict.keys()): - if k == "OFFICIALDBS" or re.match("parms[A-Z]+",k) is not None or \ - k == "extraISCparms": - scText += "\n%s:\n" % k - scText += _dumpParms(localsDict[k]) - scText += printModelDict(localsDict) - fp.write(scText) - except IOError as e: - LogStream.logProblem("printServerConfig open file problem "+logFile+" - log not created\n" +LogStream.exc(e)) - -def printModelDict(localsDict): - """Convert serverConfig model configuration to a dictionary. This writes - the dictionary as text. This does not create a usable modelDict, just one to - use to print out the dictionary as python code.""" - - modelDict={} - parmsDict={} - tcDict={} - dbs=DATABASES - scText="" - for n,v in list(localsDict.items()): - if type(v) is tuple and type(v[0]) is str and v[1] in [DISCRETE,SCALAR,VECTOR,WEATHER]: - parmsDict[n]=v - elif type(v) is tuple and len(v)==3 and type(v[0]) is int: - tcDict[n]=v - - scText += '\n' - for n in sorted(parmsDict): - scText += 'parmVar: %s = %s\n' % (n,repr(parmsDict[n])) - scText += '\n' - for n in sorted(tcDict): - scText += 'TC: %s = %s\n' % (n,repr(tcDict[n])) - scText += '\n' - - # Print out dbs entries, i.e., model database definition - for item in sorted(dbs): - plist=[] - parmTmpDict={} - for pt in item[1]: - parmsList=[] - # Try to find named parm setting - for p in pt[0]: - name=next((name for name,v in parmsDict.items() if v == p), None) - if name is not None: - parmsList.append(name) - else: - parmsList.append(p[0]) - theParms='&nlq(['+', '.join(parmsList)+'], ' - # Try to get a named time constraint - name=next((name for name,v in tcDict.items() if v == pt[1]), None) - if name is None: - name = repr(pt[1]) - if name in parmTmpDict: - parmTmpDict[name]+=parmsList - else: - parmTmpDict[name]=parmsList - # This consolidates parms by time constraint and sorts parm names. - for tc in sorted(parmTmpDict.keys()): - parmTmpDict[tc]=sorted(parmTmpDict[tc]) - theParms='&nlq(['+', '.join(parmTmpDict[tc])+'], ' - plist.append(theParms + tc +')&nrq') - - modelDict[item[0][0]]={'DB':item[0],'Parms':plist} - for si,ml in list(INITMODULES.items()): - m=ml[0] - entry=si - if len(ml) > 1: - # Multiple d2d models for smartinit - # Try to get model from si name - if si.find('Local_') == 0: - m=si[6:] - entry=(si,ml) - if m in modelDict: - # If a model has multiple SmartInit modules, try to best match which - # Smartinit module to assign to the model. - if 'INITMODULES' not in modelDict[m] or m in si: - modelDict[m]['INITMODULES']=entry - else: - modelDict[m]={'INITMODULES':entry} - - for m,v in list(D2DDBVERSIONS.items()): - if m in modelDict: - modelDict[m]['D2DDBVERSIONS']=D2DDBVERSIONS[m] - else: - modelDict[m]={'D2DDBVERSIONS':D2DDBVERSIONS[m]} - - for m,v in list(D2DAccumulativeElements.items()): - if m in modelDict: - modelDict[m]['D2DAccumulativeElements']=v - else: - modelDict[m]={'D2DAccumulativeElements':v} - for m,v in list(INITSKIPS.items()): - if m in modelDict: - modelDict[m]['INITSKIPS']=v - else: - modelDict[m]={'INITSKIPS':v} - for item in D2DMODELS: - if type(item) is str: - m=item - v=item - else: - v,m=item - if m in modelDict: - modelDict[m]['D2DMODELS']=v - else: - modelDict[m]={'D2DMODELS':v} - - for m in sorted(modelDict): - text=pprint.pformat(modelDict[m],width=80,indent=0) - text=text.replace("'&nlq",'') - text=text.replace("&nrq'",'') - text=text.replace('"&nlq','') - text=text.replace('&nrq"','') - text=text.replace(", 'INITMODULES':",",\n'INITMODULES':") - text=text.replace(')]}','),\n ]\n}') - text=text.replace('\n','\n ') - scText += "modelDict['%s'] = {\n %s\n\n" % (m,text[1:]) - return scText - -def _dumpParms(parms): - """Pretty prints parms.""" - pDict={} - result="" - for item in parms: - if type(item) is not tuple: - # Not a parm definition! - return - pList,tc = item - for p in pList: - pDict[p[0]]=(p,tc) - for k in sorted(pDict.keys()): - result += " %s\n" % repr(pDict[k]) - return result - -def addOptionalParms(defaultTC,tcParmDict,parmDict,modelDict): - """Adds parms from optionalParmsDict to the Fcst database. - This is a convience function if most parms use the default time constraint. - Otherwise, its just as easy to hard code what needs to be added for a - optionalParmsDict entry. - - defaultTC: Default time constraint to if a parameter specific TC is not - defined in tcParmDict. - tcParmDict: Dictionary with keys of time constraints. Value is a list of - parameter names to be added with that time constraint. Empty - dictionary ok if everything should use the default. Example: - tcParmDict={TC6NG:['IceLineAcc','IceFlatAcc',]} - parmDict: Parameter dictionary with keys of parameter name and value is - the parameter definition tuple. Keys must match keys in tcParmDict. - modelDict: The serverConfig modelDict dictionary. Must already have Fcst - defined. Changed in place. - Returns: The parameter definition added to Fcst - """ - - tcParms={defaultTC:[]} - for tc in tcParmDict: - tcParms[tc]=[] - if len(tcParmDict) == 0: - tcParmDict['dummyTC']=['dummyParm'] - for pname,value in parmDict.items(): - # Find the time constrait to use for this parm - theTC=defaultTC - for tc in tcParmDict: - if pname in tcParmDict[tc]: - theTC=tc - break - tcParms[theTC].append(value) - - theParms=[] - for tc in tcParms: - theParms.append((tcParms[tc],tc)) - modelDict['Fcst']['Parms'] += theParms - return theParms - -def addPowt(modelDict): - """This sets up PoWT parameters for in Fcst database. - """ - defaultTC=TC1 - # Use value of time constraint and string name of parm in tcParmDict - tcParmDict={TC6NG:['IceLineAcc','IceFlatAcc',] - } - return addOptionalParms(defaultTC,tcParmDict, - optionalParmsDict['powt'],modelDict) - -def addWinterWeatherProbs(modelDict): - """This sets up ER Winter Weather Probability parameters in the Fcst database. - """ - defaultTC=TC1 - # Use value of time constraint and string name of parm in tcParmDict - tcParmDict={} - return addOptionalParms(defaultTC,tcParmDict, - optionalParmsDict['winterProbs'],modelDict) - -def addRainfallProbs(modelDict): - """This sets up WPC rainfall probability parameters in the Fcst database. - """ - defaultTC=TC1 - # Use value of time constraint and string name of parm in tcParmDict - tcParmDict={} - return addOptionalParms(defaultTC,tcParmDict, - optionalParmsDict['rainfallProb'],modelDict) - -# Local-time based time constraints. Does not automatically account for -# daylight savings time. The dst flag is 0 for standard time and manually -# set to 1 for daylight time (if desired). The start is specified in -# seconds local time, e.g., 6*HOUR would indicate 6am. -def localTC(start,repeat,duration,dst): - timezone = SITES[GFESUITE_SITEID][3] - import dateutil.tz, datetime - tz = dateutil.tz.gettz(timezone) - local = datetime.datetime.now(tz) - delta = tz.utcoffset(local) - tz.dst(local) - offset = delta.days*86400 + delta.seconds - start = start - offset - if dst == 1: - start = start - 3600 #daylight savings flag - if start >= 3600 * 24: - start = start - 3600 * 24 - elif start < 0: - start = start + 3600 * 24 - return (start, repeat, duration) - -# imports the named module. If the module -# does not exist, it is just ignored. But -# if it exists and has an error, the exception -# is thrown. If the module was imported returns -# true. -def siteImport(modName): - try: - fp, path, des = imp.find_module(modName) - if fp: - fp.close() - except ImportError: - LogStream.logEvent("No " + modName + " file found, using baseline settings."); - return 0 - globals()[modName] = __import__(modName) - return 1 - -def doIt(): - # Import the local site configuration file (if it exists) - import doConfig - import VTECPartners - (models, projections, vis, wx, desDef, allSites, domain, siteId, timeZone,officeTypes) = \ - doConfig.parse(GFESUITE_SITEID, DATABASES, types, visibilities, SITES, - allProjections) - IFPConfigServer.models = models - IFPConfigServer.projectionData = projections - IFPConfigServer.weatherVisibilities = vis - IFPConfigServer.weatherTypes = wx - IFPConfigServer.discreteDefinitions = desDef - IFPConfigServer.allSites = allSites - IFPConfigServer.officeTypes = officeTypes - IFPConfigServer.siteID = siteId - IFPConfigServer.timeZone = timeZone - IFPConfigServer.d2dModels = doConfig.d2dParse(D2DMODELS) - IFPConfigServer.netCDFDirs = doConfig.netcdfParse(NETCDFDIRS) - IFPConfigServer.satData = doConfig.parseSat(SATDATA) - IFPConfigServer.domain = domain - - (serverHost, mhsid, \ - rpcPort, \ - initMethods, accumulativeD2DElements, \ - initSkips, d2dVersions, \ - logFilePurgeAfter, \ - prdDir, baseDir, \ - extraWEPrecision, \ - tableFetchTime, \ - autoConfigureNotifyTextProd, \ - iscRoutingTableAddress, \ - requestedISCsites, requestISC, \ - sendiscOnSave, sendiscOnPublish, \ - requestedISCparms, \ - transmitScript) \ - = doConfig.otherParse(list(SITES.keys()), \ - GFESUITE_SERVER, GFESUITE_MHSID, \ - GFESUITE_PORT, INITMODULES, - D2DAccumulativeElements, - INITSKIPS, D2DDBVERSIONS, LOG_FILE_PURGE_AFTER, - GFESUITE_PRDDIR, GFESUITE_HOME, - ExtraWEPrecision, VTECPartners.VTEC_REMOTE_TABLE_FETCH_TIME, - AUTO_CONFIGURE_NOTIFYTEXTPROD, ISC_ROUTING_TABLE_ADDRESS, - REQUESTED_ISC_SITES, REQUEST_ISC, SEND_ISC_ON_SAVE, SEND_ISC_ON_PUBLISH, - REQUESTED_ISC_PARMS, TRANSMIT_SCRIPT) - IFPConfigServer.serverHost = serverHost - IFPConfigServer.mhsid = mhsid - IFPConfigServer.rpcPort = rpcPort - IFPConfigServer.initMethods = initMethods - IFPConfigServer.accumulativeD2DElements = accumulativeD2DElements - IFPConfigServer.initSkips = initSkips - IFPConfigServer.d2dVersions = d2dVersions - IFPConfigServer.logFilePurgeAfter = logFilePurgeAfter - IFPConfigServer.prdDir = prdDir - IFPConfigServer.baseDir = baseDir - IFPConfigServer.extraWEPrecision = extraWEPrecision - IFPConfigServer.tableFetchTime = tableFetchTime - IFPConfigServer.autoConfigureNotifyTextProd = autoConfigureNotifyTextProd - IFPConfigServer.iscRoutingTableAddress = iscRoutingTableAddress - IFPConfigServer.requestedISCsites = requestedISCsites - IFPConfigServer.requestISC = requestISC - IFPConfigServer.sendiscOnSave = sendiscOnSave - IFPConfigServer.sendiscOnPublish = sendiscOnPublish - IFPConfigServer.requestedISCparms = requestedISCparms - IFPConfigServer.transmitScript = transmitScript - IFPConfigServer.iscRoutingConfig = doConfig.parseAdditionalISCRouting(AdditionalISCRouting) - -def getSimpleConfig(): - return IFPConfigServer - -GFESUITE_SITEID = siteConfig.GFESUITE_SITEID -GFESUITE_MHSID = siteConfig.GFESUITE_MHSID -GFESUITE_SERVER = siteConfig.GFESUITE_SERVER -GFESUITE_HOME = siteConfig.GFESUITE_HOME -GFESUITE_PORT = int(siteConfig.GFESUITE_PORT) -#GFESUITE_DATDIR = siteConfig.GFESUITE_DATDIR -GFESUITE_LOGDIR = siteConfig.GFESUITE_LOGDIR -GFESUITE_PRDDIR = siteConfig.GFESUITE_PRDDIR -#GFESUITE_SHPDIR = siteConfig.GFESUITE_SHPDIR -#GFESUITE_TOPODIR = siteConfig.GFESUITE_TOPODIR -#GFESUITE_VTECDIR = siteConfig.GFESUITE_VTECDIR - -SID = GFESUITE_SITEID - -# modelDict is a master configuration dictionary for all GFE databases -# Create self initializing dictionary via collections.defaultdict -modelDict=defaultdict(dict) - -# ignoreDatabases is used when executing the final configuration to ignore -# certain models. The new paradigm with modelDict is to have one master -# modelDict and ignore datasets for specific regions or groups. Sites can -# add to or remove from ignoreDatabases in their localConfig. -ignoreDatabases=[] - -# Groups are a way of setting up groups of parms for special or optionally used -# methodology. For example, the Probability of Weather Type methodology. -groups={} -groups['ALASKA_SITES'] = ['AFG', 'AJK', 'ALU', 'AER', 'ACR', 'AFC', 'VRH', 'AAWU', 'AVAK'] -groups['GreatLake_SITES'] = ['LOT', 'MKX', 'GRB', 'DLH', 'MQT', 'APX', 'GRR', 'DTX', - 'IWX', 'CLE', 'BUF', 'PBZ', 'ILN', 'IND', 'ILX', 'MPX', 'FGF'] -groups['CONUS_EAST_SITES'] = ['ALY', 'AKQ', 'APX', 'BGM', 'BMX', 'BOX', 'BTV', 'BUF', - 'CAE', 'CAR', 'CHS', 'CLE', 'CTP', 'DTX', 'FFC', 'GRR', - 'GSP', 'GYX', 'ILM', 'ILN', 'IND', 'JAN', 'JAX', 'JKL', - 'LCH', 'LMK', 'LWX', 'MEG', 'MFL', 'MHX', 'MLB', 'MOB', - 'MQT', 'MRX', 'OKX', 'PAH', 'PBZ', 'PHI', 'RAH', 'RLX', - 'RNK', 'TAE', 'TBW', 'ALR', 'RHA', 'TAR', 'TIR'] -groups['RFC_SITES'] = ["ACR", "ALR", "FWR", "KRF", "MSR", "ORN", "PTR", - "RHA", "RSA", "STR", "TAR", "TIR", "TUA"] - -siteRegion={} -# need to account for RFCs? -siteRegion['AR'] = groups['ALASKA_SITES'] -siteRegion['CR'] = ['ABR','APX','ARX','BIS','BOU','CYS','DDC','DLH','DMX','DTX', - 'DVN','EAX','FGF','FSD','GID','GJT','GLD','GRB','GRR','ICT', - 'ILX','IND','IWX','JKL','LBF','LMK','LOT','LSX','MKX','MPX', - 'MQT','OAX','PAH','PUB','RIW','SGF','TOP','UNR'] -siteRegion['ER'] = ['AKQ','ALY','BGM','BOX','BTV','BUF','CAE','CAR','CHS','CLE', - 'CTP','GSP','GYX','ILM','ILN','LWX','MHX','OKX','PBZ','PHI', - 'RAH','RLX','RNK'] -siteRegion['PR'] = ['GUM','HFO','PBP','PPG'] -siteRegion['SR'] = ['ABQ','AMA','BMX','BRO','CRP','EPZ','EWX','FFC','FWD','HGX', - 'HUN','JAN','JAX','KEY','LCH','LIX','LUB','LZK','MAF','MEG', - 'MFL','MLB','MOB','MRX','OHX','OUN','SHV','SJT','SJU','TAE', - 'TBW','TSA'] -siteRegion['WR'] = ['BOI','BYZ','EKA','FGZ','GGW','HNX','LKN','LOX','MFR','MSO', - 'MTR','OTX','PDT','PIH','PQR','PSR','REV','SEW','SGX','SLC', - 'STO','TFX','TWC','VEF'] - -groups['OCONUS_SITES'] = groups['ALASKA_SITES'] + siteRegion['PR'] + ['SJU'] - -myRegion='ALL' -for r in siteRegion: - if SID in siteRegion[r]: - myRegion=r - break - -groups['powt']=list(groups['OCONUS_SITES']+ siteRegion['CR'] + siteRegion['ER'] + siteRegion['SR'] + siteRegion['WR']) -groups['marineSites']=[ - # CONUS WFOs - "CAR","GYX","BOX","OKX","PHI","LWX","AKQ","MHX","ILM","CHS", - "BRO","CRP","HGX","LCH","LIX","MOB","TAE","TBW","KEY","MFL", - "MLB","JAX","SJU", - "SEW","PQR","MFR","EKA","MTR","LOX","SGX", - # AR sites - 'AFC', 'AFG', 'AJK', 'AER', 'ALU', 'VRH', 'AVAK', - # OPC Atlantic and Pacific - 'ONA', 'ONP', - # NHC/TAFB Pacific and Atlantic, Storm Surge - 'NH1', 'NH2', 'NHA', - # HFO Marine, GUM - 'HFO', 'HPA', 'GUM', - ] - -groups['winterProbs']= [ - # ER sites - 'AKQ','ALY','BGM','BOX','BTV','BUF','CAE','CAR','CHS','CLE', - 'CTP','GSP','GYX','ILM','ILN','LWX','MHX','OKX','PBZ','PHI', - 'RAH','RLX','RNK', - #CR sites - 'ABR','BIS','BOU','CYS','DDC','DMX','FGF','FSD','GLD','GRB', - 'ICT','IND','IWX','JKL','LMK','LOT','MKX','MPX','MQT','OAX', - 'PAH','PUB','SGF','GJT', - #SR sites - 'FFC','LUB','MRX','OUN','TSA', - #WR sites - 'FGZ','GGW','HNX','LKN','MFR','MSO','OTX','PDT','REV','SEW', - 'SGX','SLC','STO' - ] - -groups['rainfallProbs'] = ["BOX"] - -#--------------------------------------------------------------------------- -# -# Weather Element configuration section. -# -#--------------------------------------------------------------------------- - -SCALAR = 'Scalar' -VECTOR = 'Vector' -WEATHER = 'Weather' -DISCRETE = 'Discrete' -YES = 1 -NO = 0 - -#SCALAR, VECTOR -# name/type/units/description/max/min/precision/rateParm/ -#WEATHER -# name/WEATHER/units/description/ -#DISCRETE -# keyDef = [(keySym, keyDesc), (keySym, keyDesc)] -# name/DISCRETE/units/description/overlapCapable/keyDef/ - -# Standard Public Weather Elements -SID = GFESUITE_SITEID - -maxTempVal=140.0 -minTempVal=-100.0 -maxTdVal=140.0 -minTdVal=-100.0 -maxQpfVal=10.0 -maxIceVal=5.0 -Temp = ("T", SCALAR, "F", "Surface Temperature", maxTempVal, minTempVal, 0, NO) -Td = ("Td", SCALAR, "F", "Dewpoint", maxTdVal, minTdVal, 0, NO) -MaxT = ("MaxT", SCALAR, "F", "Maximum Temperature", maxTempVal, minTempVal, 0, NO) -MinT = ("MinT", SCALAR, "F", "Minimum Temperature", maxTempVal, minTempVal, 0, NO) -HeatIndex = ("HeatIndex", SCALAR, "F", "Heat Index", maxTempVal, -80.0, 0, NO) -WindChill = ("WindChill", SCALAR, "F", "Wind Chill", 120.0, -120.0, 0, NO) -QPF = ("QPF", SCALAR, "in", "QPF", maxQpfVal, 0.0, 2, YES) -Wind = ("Wind", VECTOR, "kts", "Surface Wind", 125.0, 0.0, 0, NO) -WindGust = ("WindGust", SCALAR, "kts", "Wind Gust", 125.0, 0.0, 0, NO) -# special for TPC hurricane winds -HiWind = ("Wind", VECTOR, "kts", "Surface Wind", 200.0, 0.0, 0, NO) -Weather = ("Wx", WEATHER, "wx", "Weather") -IceAcc = ("IceAccum", SCALAR, "in", "Ice Accumulation", maxIceVal, 0.0, 2, YES) -StormTotalIce = ('StormTotalIce', SCALAR, 'in', 'Storm Total Ice', maxIceVal, 0.0, 2, YES) -SnowAmt = ("SnowAmt", SCALAR, "in", "Snowfall amount", 20.0, 0.0, 1, YES) -StormTotalSnow = ("StormTotalSnow", SCALAR, "in","Storm Total Snow", 180.0, 0.0, 1, NO) -PoP = ("PoP", SCALAR, "%", "Prob of Precip", 100.0, 0.0, 0, NO) -PoP6 = ("PoP6", SCALAR, "%", "Prob of Precip (6hr)", 100.0, 0.0, 0, NO) -PoP12 = ("PoP12", SCALAR, "%", "Prob of Precip (12hr)", 100.0, 0.0, 0, NO) -TstmPrb3 = ("TstmPrb3", SCALAR, "%", "Prob of Tstorm (3hr)", 100.0, 0.0, 0, NO) -TstmPrb6 = ("TstmPrb6", SCALAR, "%", "Prob of Tstorm (6hr)", 100.0, 0.0, 0, NO) -TstmPrb12 = ("TstmPrb12", SCALAR, "%", "Prob of Tstorm (12hr)", 100.0, 0.0, 0, NO) -Sky = ("Sky", SCALAR, "%", "Sky Condition", 100.0, 0.0, 0, NO) -FzLevel = ("FzLevel", SCALAR, "ft", "Freezing level", 30000.0, 0.0, 0, NO) -SnowLevel = ("SnowLevel", SCALAR, "ft", "Snow Level", 18000.0, 0.0, 0, NO) -RH = ("RH", SCALAR, "%", "Relative Humidity", 100.0, 0.0, 0, NO) - -# DR20541 and 20482 - add collaborate PoP, SnowAmt, QPF and ndfd QPF tools -PoP12hr = ("PoP12hr", SCALAR, "%", "12 hr Chance of Precip", 100.0, 0.0, 0, NO) -QPF6hr = ("QPF6hr", SCALAR, "in", "6 hr Precipitation (in)", maxQpfVal, 0.0, 2, YES) -SnowAmt6hr = ("SnowAmt6hr", SCALAR, "in", "6 hr Snowfall", 30.0, 0.0, 1, YES) - -# Cobb SnowTool included. -SnowRatio = ('SnowRatio', SCALAR, 'none', 'Snow Ratio', 40.0, 0.0, 1, NO) -#totalVV = ('totalVV', SCALAR, 'ubar/s', 'Total VV', 400.0, 0.0, 0, YES) -cape = ("cape", SCALAR, "1unit", "CAPE", 8000.0, 0.0, 1, NO) -ApparentT = ("ApparentT", SCALAR, "F", "Apparent Temperature", maxTempVal, -120.0, 0, NO) -LkSfcT = ("LkSfcT", SCALAR, "C", "Lake Surface T", 40.0, -2.0, 1, NO) -SnowMap = ("SnowMap", SCALAR, "in", "Snowfall Map", 20.0, 0.0, 1, NO) -StormTotalQPF = ('StormTotalQPF', SCALAR, 'in', 'Storm Total QPF (in)', 36.0, 0.0, 2, NO) -SeasonTotalSnow = ('SeasonTotalSnow', SCALAR, 'in', 'Season Total Snow (in)', 150.0, 0.0, 2, NO) - -# Fire Weather Weather Elements -LAL = ("LAL", SCALAR, "cat", "Lightning Activity Level", 6.0, 1.0, 0, NO) -CWR = ("CWR", SCALAR, "%", "Chance of Wetting Rain", 100.0, 0.0, 0, NO) -Haines = ("Haines", SCALAR, "cat", "Haines Index", 6.0, 2.0, 0, NO) -MixHgt = ("MixHgt", SCALAR, "ft", "Mixing Height", 20000.0, 0.0, 0, NO) -Wind20ft = ("Wind20ft", VECTOR, "kts", "20ft. Wind", 125.0, 0.0, 0, NO) -FreeWind = ("FreeWind", VECTOR, "kts", "Free Air Wind", 125.0, 0.0, 0, NO) -TransWind = ("TransWind", VECTOR, "kts", "Transport Wind", 125.0, 0.0, 0, NO) -Stability = ("Stability",SCALAR,"cat","Stability", 6.0,1.0,0, NO) -HrsOfSun = ("HrsOfSun",SCALAR,"hrs","Hours of Sun",24.0,0.0,1, NO) -MarineLayer = ("MarineLayer",SCALAR,"ft","Depth of Marine Layer", 20000.0,0.0,0,NO) -InvBurnOffTemp = ("InvBurnOffTemp",SCALAR,"F","Inversion Burn-off Temperature", 120.0,-30.0,0, NO) -VentRate = ("VentRate", SCALAR, "kt*ft", "VentRate", 500000.0, 0.0, 0, NO) -DSI = ("DSI", SCALAR, "index", "DSI", 6.0, 0.0, 0, NO) -MaxRH = ("MaxRH", SCALAR, "%", "Maximum Relative Humidity", 100.0, 0.0, 0, NO) -MinRH = ("MinRH", SCALAR, "%", "Minimum Relative Humidity", 100.0, 0.0, 0, NO) -Wetflag = ("Wetflag", SCALAR, "yn", "1300LT WetFlag", 1.0, 0.0, 0, NO) -Ttrend = ("Ttrend", SCALAR, "F", "24hr Temperature Trend", 50.0, -50.0, 0, NO) -RHtrend = ("RHtrend", SCALAR, "F", "24hr Relative Humidity Trend", 100.0, -100.0, 0, NO) - -# HPC Delta weather elements -DeltaMinT = ('DeltaMinT', SCALAR, 'F', 'DeltaMinT', 130.0, -80.0, 0, NO) -DeltaMaxT = ('DeltaMaxT', SCALAR, 'F', 'DeltaMaxT', 130.0, -80.0, 0, NO) -DeltaWind = ("DeltaWind", VECTOR, "kts", "Surface Delta Wind", 125.0, 0.0, 0, NO) -DeltaSky = ("DeltaSky", SCALAR, "%", "Delta Sky Condition", 100.0, -100.0, 0, NO) -DeltaPoP = ("DeltaPoP", SCALAR, "%", "Delta Prob of Precip", 100.0, -100.0, 0, NO) - -# Special LAPS parms -Radar = ("Radar", SCALAR, "dbz", "Radar Reflectivity", 80.0, -20.0, 0, NO) - -# RTMA parms -QPE = ("QPE", SCALAR, "in", "QPE", maxQpfVal, 0.0, 2, YES) -#if SID in groups['ALASKA_SITES']: - not sure if this needs to be like that -if SID in groups['OCONUS_SITES']: - TUnc = ("TUnc", SCALAR, "F", "Temperature Anl Uncertainty", 20.0, 0.0, 0, NO) - TdUnc = ("TdUnc", SCALAR, "F", "Dewpoint Anl Uncertainty", 25.0, 0.0, 0, NO) -else: - TUnc = ("TUnc", SCALAR, "F", "Temperature Anl Uncertainty", 10.0, 0.0, 0, NO) - TdUnc = ("TdUnc", SCALAR, "F", "Dewpoint Anl Uncertainty", 15.0, 0.0, 0, NO) -# DR17144 -SkyUnc = ("SkyUnc", SCALAR, "%", "Sky Condition Uncertainty", 100.0, 0.0, 0, NO) -WSpdUnc = ("WSpdUnc", SCALAR, "kts", "WSpd Anl Uncertainty", 12.0, 0.0, 0, NO) -WDirUnc = ("WDirUnc", SCALAR, "deg", "WDir Anl Uncertainty", 10.0, 0.0, 0, NO) -VisUnc = ("VisUnc", SCALAR, "SM", "Vsby Anl Uncertainty", 10.0, 0.0, 2, NO) -# DCS 17288 -PressUnc = ("PressUnc", SCALAR, "Pa", "Press Anl Uncertainty", 110000.0, 0.0, 2, NO) -Pressure = ("Pressure", SCALAR, "Pa", "Pressure", 110000.0, 0.0, 2, NO) -WGustUnc = ("WGustUnc", SCALAR, "kts", "WGust Anl Uncertainty", 12.0, 0.0, 0, NO) - -# NamDNG parms -QPF3 = ("QPF3", SCALAR, "in", "3HR QPF", maxQpfVal, 0.0, 2, YES) -QPF6 = ("QPF6", SCALAR, "in", "6HR QPF", maxQpfVal, 0.0, 2, YES) -QPF12 = ("QPF12", SCALAR, "in", "12HR QPF", maxQpfVal, 0.0, 2, YES) -Vis = ("Vis", SCALAR, "SM", "Visibility", 10.0, 0.0, 2, NO) -SnowAmt6 = ("SnowAmt6", SCALAR, "in", "Snowfall amount (6hr)", 20.0, 0.0, 1, YES) - -MaxT3 = ("MaxT3", SCALAR, "F", "3hr Maximum Temperature", maxTempVal, minTempVal, 0, NO) -MinT3 = ("MinT3", SCALAR, "F", "3hr Minimum Temperature", maxTempVal, minTempVal, 0, NO) -MaxRH3 = ("MaxRH3", SCALAR, "%", "3hr Maximum Relative Humidity", 100.0, 0.0, 0, NO) - -# Parms for ,'SAT',Satellite -SatVisE = ("VisibleE", SCALAR, "count", "Satellite Albdo %", 255.0, 0.0, 0, NO) -SatIR11E = ("IR11E", SCALAR, "C", "11 micron temperature", 58.0, -111.0, 0, NO) -SatIR13E = ("IR13E", SCALAR, "C", "13 micron temperature", 50.0, -111.0, 0, NO) -SatIR39E = ("IR39E", SCALAR, "C", "3.9 micron temperature", 50.0, -111.0, 0, NO) -SatWVE = ("WaterVaporE", SCALAR, "C", "water vapor temperature", -11.0, -62.0, 0, NO) -SatFogE = ("FogE", SCALAR, "C", "ir11 - ir39", 50.0, -111.0, 0, NO) - -SatVisW = ("VisibleW", SCALAR, "count", "Satellite Albdo %", 255.0, 0.0, 0, NO) -SatIR11W = ("IR11W", SCALAR, "C", "11 micron temperature", 58.0, -111.0, 0, NO) -SatIR13W = ("IR13W", SCALAR, "C", "13 micron temperature", 50.0, -111.0, 0, NO) -SatIR39W = ("IR39W", SCALAR, "C", "3.9 micron temperature", 50.0, -111.0, 0, NO) -SatWVW = ("WaterVaporW", SCALAR, "C", "water vapor temperature", -11.0, -62.0, 0, NO) -SatFogW = ("FogW", SCALAR, "C", "ir11 - ir39", 50.0, -111.0, 0, NO) - -# TPC Wind Probability parms -prob34 = ("prob34", SCALAR, "%", "WS34 CPROB", 100.0, 0.0, 0, NO) -prob50 = ("prob50", SCALAR, "%", "WS50 CPROB", 100.0, 0.0, 0, NO) -prob64 = ("prob64", SCALAR, "%", "WS64 CPROB", 100.0, 0.0, 0, NO) -pws34 = ("pws34", SCALAR, "%", "34WSIPROB", 100.0, 0.0, 0, NO) -pws50 = ("pws50", SCALAR, "%", "50WSIPROB", 100.0, 0.0, 0, NO) -pws64 = ("pws64", SCALAR, "%", "64WSIPROB", 100.0, 0.0, 0, NO) -pwsD34 = ("pwsD34", SCALAR, "%", "Day34WSIPROB", 100.0, 0.0, 0, NO) -pwsN34 = ("pwsN34", SCALAR, "%", "Night34WSIPROB", 100.0, 0.0, 0, NO) -pwsD64 = ("pwsD64", SCALAR, "%", "Day64WSIPROB", 100.0, 0.0, 0, NO) -pwsN64 = ("pwsN64", SCALAR, "%", "Night64WSI PROB", 100.0, 0.0, 0, NO) -pws34int = ("pws34int", SCALAR, "%", "34WSIntPROB", 100.0, 0.0, 0, NO) -pws64int = ("pws64int", SCALAR, "%", "64WSIntPROB", 100.0, 0.0, 0, NO) - -# Surge parms for HLS/TCV -InundationMax = ("InundationMax", SCALAR, "ft", "Max Inundation", 30.0, -100.0, 1, NO) -InundationTiming = ("InundationTiming", SCALAR, "ft", "Incremental Inundation", 30.0, -100.0, 1, NO) -SurgeHtPlusTideMSL = ("SurgeHtPlusTideMSL", SCALAR, "ft", "Surge above MSL", 30.0, -100.0, 1, NO) -SurgeHtPlusTideMLLW = ("SurgeHtPlusTideMLLW", SCALAR, "ft", "Surge above MLLW", 30.0, -100.0, 1, NO) -SurgeHtPlusTideMHHW = ("SurgeHtPlusTideMHHW", SCALAR, "ft", "Surge above MHHW", 30.0, -100.0, 1, NO) -SurgeHtPlusTideNAVD = ("SurgeHtPlusTideNAVD", SCALAR, "ft", "Surge above NAVD88", 30.0, -100.0, 1, NO) - -# parms for storm surge collaboration -SShazardKeys = [("",""), ("SS.A", "STORM SURGE WATCH"), ("SS.W", "STORM SURGE WARNING")] -ProposedSS = ("ProposedSS", DISCRETE, "wwa", "Proposed StormSurge Hazards", YES, SShazardKeys, 7) -tempProposedSS = ("tempProposedSS", DISCRETE, "wwa", "Temp Proposed StormSurge Hazards", - YES, SShazardKeys, 4) -InitialSS = ("InitialSS", DISCRETE, "wwa", "Initial StormSurge Hazards", - YES, SShazardKeys, 4) -DiffSS = ("DiffSS", SCALAR, "None", "Difference StormSurge Hazards", 2.0, -1.0, 0, NO) - -# parms for tropical cyclone threat graphics -Threat4Keys = [("None","None to Little"), ("Elevated","Elevated"), ("Mod", "Moderate"), ("High", "High"), ("Extreme","Extreme"),] - -FloodingRainThreat = ("FloodingRainThreat", DISCRETE, "cat", "Flooding Rain Threat", NO, Threat4Keys,2) -StormSurgeThreat = ("StormSurgeThreat", DISCRETE, "cat", "Storm Surge Threat", NO, Threat4Keys,2) -WindThreat = ("WindThreat", DISCRETE, "cat", "Wind Threat", NO, Threat4Keys,2) -TornadoThreat = ("TornadoThreat", DISCRETE, "cat", "Tornado Threat", NO, Threat4Keys,2) -# 09/13/2016 JCM changed precision of QPFtoFFGRatio to 2, max from 8 to 1000 -QPFtoFFGRatio = ("QPFtoFFGRatio", SCALAR, "1", "QPF to FFG Ratio", 1000.0, 0.0, 2, NO) - -# Hazards -HazardKeys = [] -HazardKeys.append(("", "")) #1st one must be None -import VTECTable -kys = list(VTECTable.VTECTable.keys()) -kys.sort() -for k in kys: - HazardKeys.append((k, VTECTable.VTECTable[k]['hdln'])) - -#H-VTEC keys - will someday add these back in -#("hydroER", "Hydro - Excessive Rainfall"), -#("hydroSM", "Hydro - Snow melt"), -#("hydroRS", "Rain and Snow melt"), -#("hydroDM", "Dam or Levee Failure"), -#("hydroGO", "Glacier-Dammed Lake Outburst"), -#("hydroIJ", "Ice Jam"), -#("hydroIC", "Rain and/or Snow melt and/or Ice Jam"), - -Hazards = ("Hazards", DISCRETE, "wwa", "Hazards", YES, HazardKeys, 4) - -# Scalar/Vector Weather Elements that Require Extra Precision (due to their -# use in calculations) Either form may be used. -ExtraWEPrecision = [] - -# Parms for ESTOFS -AstroTide = ("AstroTide", SCALAR, "ft", "Astro Tide", 20.0, -8.0, 1, NO) -StormSurge = ("StormSurge", SCALAR, "ft", "Storm Surge", 30.0, -5.0, 1, NO) - -# Parms for ETSS and ETSSHiRes -SurgeTide = ("SurgeTide", SCALAR, "ft", "Surge Tide", 20.0, -8.0, 1, NO) - -# Parm for Aviation/GFSLAMPGrid -CigHgt=("CigHgt",SCALAR,"ft","Ceiling Height",25000.0,-100.0,0,NO) - -# Parms for NationalBlend -QPF1=("QPF1", SCALAR, "in", "1HR QPF", maxQpfVal, 0.0, 2, YES) -PPI01=('PPI01', SCALAR, '%', '1-H Precip Potential Index', 100.0, 0.0, 0, NO) -PPI06=('PPI06', SCALAR, '%', '6-H Precip Potential Index', 100.0, 0.0, 0, NO) -PositiveEnergyAloft=("PositiveEnergyAloft" , SCALAR, "j/kg", "Positive energy aloft" , 500.0, 0.0, 1, NO) -NegativeEnergyLowLevel=("NegativeEnergyLowLevel" , SCALAR, "j/kg", "Negative energy in the low levels" , 0.0, -500.0, 1, NO) -SnowAmt01 = ("SnowAmt01", SCALAR, "in", "1-h Snow Accumulation", 20.0, 0.0, 1, YES) -IceAccum01 = ("IceAccum01", SCALAR, "inch", "1-h Ice Accumulation", maxIceVal, 0.0, 3, NO) -IceAccum = ("IceAccum", SCALAR, "inch", "6-h Ice Accumulation", 13.0, 0.0, 3, NO) -TstmPrb1 = ("TstmPrb1", SCALAR, "%", "1-h SREF-based Prob. of a Thunderstorm", 100.0, 0.0, 0, NO) -DryTstmPrb = ("DryTstmPrb", SCALAR, "%", "3-h SREF-based Prob. of a Dry Thunderstorm", 100.0, 0.0, 0, NO) -WGS50pct =("WGS50pct", SCALAR, "kts", "10-m Wind Gust",125.0 , 0.0, 0, NO) -WS50Prcntl30m =("WS50Prcntl30m", SCALAR, "kts", "30-m Wind Speed", 125.0, 0.0, 0, NO) -WS50Prcntl80m =("WS50Prcntl80m", SCALAR, "kts", "80-m Wind Speed", 125.0, 0.0, 0, NO) -Vis50pct =("Vis50pct", SCALAR, "SM", "Visibility",10.0 , 0.0, 3, NO) -T50pct =("T50pct", SCALAR, "F", "Air Temperature", maxTempVal, minTempVal, 1, NO) -PMSL10pct =("PMSL10pct", SCALAR, "mb", "10th percentile Mean Sea Level Pressure", 1100.0, 900.0, 1, NO) -PMSL50pct =("PMSL50pct", SCALAR, "mb", "50th percentile Mean Sea Level Pressure", 1100.0, 900.0, 1, NO) -PMSL90pct =("PMSL90pct", SCALAR, "mb", "90th percentile Mean Sea Level Pressure", 1100.0, 900.0, 1, NO) -FosBerg = ("FosBerg", SCALAR, "none", "Fosberg Fire Weather Index", 100.0, 0.0, 0, NO) - - - -#--------------------------------------------------------------------------- -# -# Weather configuration section -# -#--------------------------------------------------------------------------- - -# list of possible visibilities -visibilities = ['', '0SM', '1/4SM', '1/2SM', '3/4SM', '1SM', '11/2SM', - '2SM', '21/2SM', '3SM', '4SM', '5SM', '6SM', 'P6SM'] - -# list of possible coverages and probabilities -NOCOV = ('', 'No Coverage') -ISOD = ('Iso', 'Isolated') -SCT = ('Sct', 'Scattered') -NUM = ('Num', 'Numerous') -WIDE = ('Wide', 'Widespread') -OCNL = ('Ocnl', 'Occasional') -SCHC = ('SChc', 'Slight Chance Of') -CHC = ('Chc', 'Chance Of') -LKLY = ('Lkly', 'Likely') -DEFN = ('Def', 'Definite') -PATCHY = ('Patchy', 'Patchy') -AREAS = ('Areas', 'Areas of') -FQT = ('Frq', 'Frequent') -BRIEF = ('Brf', 'Brief') -PERIODS = ('Pds', 'Periods of') -INTM = ('Inter', 'Intermittent') - -# list of possible intensities -INTEN_NONE = ('', 'No intensity') -INTEN_VERYLIGHT = ('--', 'Very Light') -INTEN_LIGHT = ('-', 'Light') -INTEN_MOD = ('m', 'Moderate') -INTEN_HEAVY = ('+', 'Heavy') -INTEN_SEVERE = ('+', 'Severe') -INTEN_DENSE = ('+', 'Dense') - -# list of optional attributes -FQTLTG = ('FL', 'Frequent Lightning') -GUSTS = ('GW', 'Gusty Winds') -HVYRAFL = ('HvyRn', 'Heavy Rainfall') -DMGWND = ('DmgW', 'Damaging Winds') -SMALLH = ('SmA', 'Small Hail') -LARGEH = ('LgA', 'Large Hail') -OUTLYNG = ('OLA','in the outlying areas') -GRASSY = ('OGA','on grassy areas') -OVRPASS = ('OBO','on bridges and overpasses') -OR = ('OR', 'or') -DRY = ('Dry', 'dry') -PRIMARY = ('Primary', 'Highest Ranking') -MENTION = ('Mention', 'Include Unconditionally') -TORNADO = ('TOR', 'Tornadoes') - -# list of each weather types -NOWX = ('', 'No Weather', - [NOCOV], - [INTEN_NONE], - []) -THUNDER = ('T', 'Thunderstorms', - [ISOD, SCT, NUM, WIDE, SCHC, CHC, LKLY, DEFN, OCNL, FQT, BRIEF, - PERIODS, INTM], - [INTEN_NONE, INTEN_SEVERE], - [PRIMARY, MENTION, FQTLTG, HVYRAFL, GUSTS, DMGWND, DRY, - LARGEH, SMALLH, TORNADO]) -RAIN = ('R', 'Rain', - [WIDE, SCHC, CHC, LKLY, DEFN, OCNL, FQT, BRIEF, PERIODS, INTM], - [INTEN_VERYLIGHT, INTEN_LIGHT, INTEN_MOD, INTEN_HEAVY], - [PRIMARY, MENTION, OR]) -RAINSHOWERS = ('RW', 'Rain Showers', - [ISOD, SCT, NUM, WIDE, SCHC, CHC, LKLY, DEFN, OCNL, FQT, BRIEF, - PERIODS, INTM], - [INTEN_VERYLIGHT, INTEN_LIGHT, INTEN_MOD, INTEN_HEAVY], - [PRIMARY, MENTION, OR]) -DRIZZLE = ('L', 'Drizzle', - [PATCHY, AREAS, WIDE, SCHC, CHC, LKLY, DEFN, OCNL, FQT, - BRIEF, PERIODS, INTM], - [INTEN_VERYLIGHT, INTEN_LIGHT, INTEN_MOD, INTEN_HEAVY], - [PRIMARY, MENTION, OR]) -FZRAIN = ('ZR', 'Freezing Rain', - [WIDE, SCHC, CHC, LKLY, DEFN, OCNL, FQT, BRIEF, PERIODS, INTM], - [INTEN_VERYLIGHT, INTEN_LIGHT, INTEN_MOD, INTEN_HEAVY], - [PRIMARY, MENTION, OR]) -FZDRIZZLE = ('ZL', 'Freezing Drizzle', - [PATCHY, AREAS, WIDE, SCHC, CHC, LKLY, DEFN, OCNL, FQT, - BRIEF, PERIODS, INTM], - [INTEN_VERYLIGHT, INTEN_LIGHT, INTEN_MOD, INTEN_HEAVY], - [PRIMARY, MENTION, OR]) -SNOW = ('S', 'Snow', - [WIDE, SCHC, CHC, LKLY, DEFN, OCNL, FQT, BRIEF, PERIODS, INTM], - [INTEN_VERYLIGHT, INTEN_LIGHT, INTEN_MOD, INTEN_HEAVY], - [PRIMARY, MENTION, OR]) -SNOWSHOWERS = ('SW', 'Snow Showers', - [ISOD, SCT, NUM, WIDE, SCHC, CHC, LKLY, DEFN, OCNL, FQT, - BRIEF, PERIODS, INTM], - [INTEN_VERYLIGHT, INTEN_LIGHT, INTEN_MOD, INTEN_HEAVY], - [PRIMARY, MENTION, OR]) -SLEET = ('IP', 'Sleet', - [WIDE, SCHC, CHC, LKLY, DEFN, OCNL, FQT, BRIEF, PERIODS, INTM], - [INTEN_VERYLIGHT, INTEN_LIGHT, INTEN_MOD, INTEN_HEAVY], - [PRIMARY, MENTION, OR]) -FOG = ('F', 'Fog', - [PATCHY, AREAS, WIDE], - [INTEN_NONE, INTEN_DENSE], - [PRIMARY, MENTION]) -FREEZEFOG = ('ZF', 'Freezing Fog', - [PATCHY, AREAS, WIDE], - [INTEN_NONE, INTEN_DENSE], - [PRIMARY, MENTION]) -ICEFOG = ('IF', 'Ice Fog', - [PATCHY, AREAS, WIDE], - [INTEN_NONE], - [PRIMARY, MENTION]) -ICECRYSTAL = ('IC', 'Ice Crystals', - [PATCHY, AREAS, WIDE], - [INTEN_NONE], - [PRIMARY, MENTION]) -HAZE = ('H', 'Haze', - [DEFN], - [INTEN_NONE], - [PRIMARY, MENTION]) -BLWGSNOW = ('BS', 'Blowing Snow', - [PATCHY, AREAS, DEFN], - [INTEN_NONE], - [PRIMARY, MENTION]) -BLWGSAND = ('BN', 'Blowing Sand', - [PATCHY, AREAS, DEFN], - [INTEN_NONE], - [PRIMARY, MENTION]) -SMOKE = ('K', 'Smoke', - [PATCHY, AREAS, DEFN], - [INTEN_NONE], - [PRIMARY, MENTION]) -BLWGDUST = ('BD', 'Blowing Dust', - [PATCHY, AREAS, DEFN], - [INTEN_NONE], - [PRIMARY, MENTION]) -FROST = ('FR','Frost', - [PATCHY, AREAS, WIDE], - [INTEN_NONE], - [PRIMARY, MENTION, OUTLYNG]) -FRZSPRAY = ('ZY','Freezing Spray', - [ISOD, SCT, NUM, WIDE, SCHC, CHC, LKLY, DEFN, OCNL], - [INTEN_LIGHT, INTEN_MOD, INTEN_HEAVY], - [PRIMARY, MENTION]) -VOLASH = ('VA','Volcanic Ash', - [NOCOV], - [INTEN_NONE], - [PRIMARY, MENTION]) -WATERSPOUT = ('WP','Waterspouts', - [ISOD, SCHC, CHC, LKLY, DEFN], - [INTEN_NONE], - [PRIMARY, MENTION]) - - -types = [NOWX, THUNDER, WATERSPOUT, RAIN, RAINSHOWERS, - DRIZZLE, FZRAIN, FZDRIZZLE, SNOW, SNOWSHOWERS, - SLEET, FOG, FREEZEFOG, ICEFOG, ICECRYSTAL ,HAZE, BLWGSNOW, - BLWGSAND, SMOKE, BLWGDUST, FROST, FRZSPRAY, VOLASH] - - -# PARMS FROM NwsInitsConfig -#------------------------------------------------------------------------------- -# Discrete Keys -#------------------------------------------------------------------------------- -# -AirKeys = [("","None"), ("Watch","Watch"), ("Advisory","Advisory"),("Warning", "Warning"),] -ThreatKeys=[('', 'None'), ('Very Low', 'Very Low'), ('Low', 'Low'), - ('Moderate', 'Moderate'), ('High', 'High'), ('Extreme','Extreme')] -# -SevereKeys = [('NONE', '0'), ('TSTM', '2'), ('MRGL', '3'), ('SLGT', '4'), ('ENH', '5'), ('MOD', '6'), ('HIGH', '8')] - -AirQuality = ('AirQuality', DISCRETE, 'cat', 'Air Quality', NO, AirKeys) -BasinFFP = ('BasinFFP', DISCRETE, 'none', 'Basin Flash Flood Potential', NO, - [('Dry', 'Dry'), ('Low', 'Low'), ('Moderate', 'Moderate'), ('High', 'High'), ('Very High', 'Very High')]) -CLRIndx = ('CLRIndx', SCALAR, 'none', 'Clearing Index', 1050.0, 0.0, 0, NO) -CQPF1 = ('CQPF1', SCALAR, 'in', '6hr Cont QPF', maxQpfVal, 0.0, 2, NO) -Ceiling = ('Ceiling', SCALAR, 'ft', 'Lowest Cloud Base Height', 25000.0, -30000.0, 0, NO) -CigHgtCat = ('CigHgtCat', SCALAR, 'index', 'Cloud Ceiling Height Category', 6.0, 0.0, 0, NO) -CloudBaseConditional = ('CloudBaseConditional', SCALAR, '100ft', 'Conditional Cloud Base Height', 250.0, 0.0, 0, NO) -CloudBasePrimary = ('CloudBasePrimary', SCALAR, '100ft', 'Primary Cloud Base Height', 250.0, 0.0, 0, NO) -CloudBaseSecondary = ('CloudBaseSecondary', SCALAR, '100ft', 'Secondary Cloud Base Height', 250.0, 0.0, 0, NO) -ClimoET = ('ClimoET', SCALAR, 'in', 'ClimoET', 0.75, 0.0, 2, NO) -ClimoETAprA = ('ClimoETAprA', SCALAR, 'in', 'ClimoET AprA', 0.75, 0.0, 2, NO) -ClimoETAprB = ('ClimoETAprB', SCALAR, 'in', 'ClimoET AprB', 0.75, 0.0, 2, NO) -ClimoETAugA = ('ClimoETAugA', SCALAR, 'in', 'ClimoET AugA', 0.75, 0.0, 2, NO) -ClimoETAugB = ('ClimoETAugB', SCALAR, 'in', 'ClimoET AugB', 0.75, 0.0, 2, NO) -ClimoETDecA = ('ClimoETDecA', SCALAR, 'in', 'ClimoET DecA', 0.75, 0.0, 2, NO) -ClimoETDecB = ('ClimoETDecB', SCALAR, 'in', 'ClimoET DecB', 0.75, 0.0, 2, NO) -ClimoETFebA = ('ClimoETFebA', SCALAR, 'in', 'ClimoET FebA', 0.75, 0.0, 2, NO) -ClimoETFebB = ('ClimoETFebB', SCALAR, 'in', 'ClimoET FebB', 0.75, 0.0, 2, NO) -ClimoETJanA = ('ClimoETJanA', SCALAR, 'in', 'ClimoET JanA', 0.75, 0.0, 2, NO) -ClimoETJanB = ('ClimoETJanB', SCALAR, 'in', 'ClimoET JanB', 0.75, 0.0, 2, NO) -ClimoETJulA = ('ClimoETJulA', SCALAR, 'in', 'ClimoET JulA', 0.75, 0.0, 2, NO) -ClimoETJulB = ('ClimoETJulB', SCALAR, 'in', 'ClimoET JulB', 0.75, 0.0, 2, NO) -ClimoETJunA = ('ClimoETJunA', SCALAR, 'in', 'ClimoET JunA', 0.75, 0.0, 2, NO) -ClimoETJunB = ('ClimoETJunB', SCALAR, 'in', 'ClimoET JunB', 0.75, 0.0, 2, NO) -ClimoETMarA = ('ClimoETMarA', SCALAR, 'in', 'ClimoET MarA', 0.75, 0.0, 2, NO) -ClimoETMarB = ('ClimoETMarB', SCALAR, 'in', 'ClimoET MarB', 0.75, 0.0, 2, NO) -ClimoETMayA = ('ClimoETMayA', SCALAR, 'in', 'ClimoET MayA', 0.75, 0.0, 2, NO) -ClimoETMayB = ('ClimoETMayB', SCALAR, 'in', 'ClimoET MayB', 0.75, 0.0, 2, NO) -ClimoETNovA = ('ClimoETNovA', SCALAR, 'in', 'ClimoET NovA', 0.75, 0.0, 2, NO) -ClimoETNovB = ('ClimoETNovB', SCALAR, 'in', 'ClimoET NovB', 0.75, 0.0, 2, NO) -ClimoETOctA = ('ClimoETOctA', SCALAR, 'in', 'ClimoET OctA', 0.75, 0.0, 2, NO) -ClimoETOctB = ('ClimoETOctB', SCALAR, 'in', 'ClimoET OctB', 0.75, 0.0, 2, NO) -ClimoETSepA = ('ClimoETSepA', SCALAR, 'in', 'ClimoET SepA', 0.75, 0.0, 2, NO) -ClimoETSepB = ('ClimoETSepB', SCALAR, 'in', 'ClimoET SepB', 0.75, 0.0, 2, NO) -ClimoPoP = ('ClimoPoP', SCALAR, '%', 'ClimoPoP', 100.0, 0.0, 0, NO) -ClimoPoPAprA = ('ClimoPoPAprA', SCALAR, '%', 'ClimoPoP AprA', 100.0, 0.0, 0, NO) -ClimoPoPAprB = ('ClimoPoPAprB', SCALAR, '%', 'ClimoPoP AprB', 100.0, 0.0, 0, NO) -ClimoPoPAugA = ('ClimoPoPAugA', SCALAR, '%', 'ClimoPoP AugA', 100.0, 0.0, 0, NO) -ClimoPoPAugB = ('ClimoPoPAugB', SCALAR, '%', 'ClimoPoP AugB', 100.0, 0.0, 0, NO) -ClimoPoPDecA = ('ClimoPoPDecA', SCALAR, '%', 'ClimoPoP DecA', 100.0, 0.0, 0, NO) -ClimoPoPDecB = ('ClimoPoPDecB', SCALAR, '%', 'ClimoPoP DecB', 100.0, 0.0, 0, NO) -ClimoPoPFG = ('ClimoPoPFG', SCALAR, '%', 'ClimoPoP First Guess', 100.0, 0.0, 0, NO) -ClimoPoPFebA = ('ClimoPoPFebA', SCALAR, '%', 'ClimoPoP FebA', 100.0, 0.0, 0, NO) -ClimoPoPFebB = ('ClimoPoPFebB', SCALAR, '%', 'ClimoPoP FebB', 100.0, 0.0, 0, NO) -ClimoPoPJanA = ('ClimoPoPJanA', SCALAR, '%', 'ClimoPoP JanA', 100.0, 0.0, 0, NO) -ClimoPoPJanB = ('ClimoPoPJanB', SCALAR, '%', 'ClimoPoP JanB', 100.0, 0.0, 0, NO) -ClimoPoPJulA = ('ClimoPoPJulA', SCALAR, '%', 'ClimoPoP JulA', 100.0, 0.0, 0, NO) -ClimoPoPJulB = ('ClimoPoPJulB', SCALAR, '%', 'ClimoPoP JulB', 100.0, 0.0, 0, NO) -ClimoPoPJunA = ('ClimoPoPJunA', SCALAR, '%', 'ClimoPoP JunA', 100.0, 0.0, 0, NO) -ClimoPoPJunB = ('ClimoPoPJunB', SCALAR, '%', 'ClimoPoP JunB', 100.0, 0.0, 0, NO) -ClimoPoPMarA = ('ClimoPoPMarA', SCALAR, '%', 'ClimoPoP MarA', 100.0, 0.0, 0, NO) -ClimoPoPMarB = ('ClimoPoPMarB', SCALAR, '%', 'ClimoPoP MarB', 100.0, 0.0, 0, NO) -ClimoPoPMayA = ('ClimoPoPMayA', SCALAR, '%', 'ClimoPoP MayA', 100.0, 0.0, 0, NO) -ClimoPoPMayB = ('ClimoPoPMayB', SCALAR, '%', 'ClimoPoP MayB', 100.0, 0.0, 0, NO) -ClimoPoPNovA = ('ClimoPoPNovA', SCALAR, '%', 'ClimoPoP NovA', 100.0, 0.0, 0, NO) -ClimoPoPNovB = ('ClimoPoPNovB', SCALAR, '%', 'ClimoPoP NovB', 100.0, 0.0, 0, NO) -ClimoPoPOctA = ('ClimoPoPOctA', SCALAR, '%', 'ClimoPoP OctA', 100.0, 0.0, 0, NO) -ClimoPoPOctB = ('ClimoPoPOctB', SCALAR, '%', 'ClimoPoP OctB', 100.0, 0.0, 0, NO) -ClimoPoPSepA = ('ClimoPoPSepA', SCALAR, '%', 'ClimoPoP SepA', 100.0, 0.0, 0, NO) -ClimoPoPSepB = ('ClimoPoPSepB', SCALAR, '%', 'ClimoPoP SepB', 100.0, 0.0, 0, NO) -CoastalFlood = ('CoastalFlood', DISCRETE, 'cat', 'Coastal Flood', NO, ThreatKeys) -CondPredHgt = ('CondPredHgt', SCALAR, '100ft', 'Conditional Predominant Cloud Height', 250.0, 0.0, 0, NO) -CondPredVsby = ('CondPredVsby', SCALAR, 'mi', 'Conditional Predominant Visibility', 10.0, 0.0, 2, NO) -DenseFogSmoke = ('DenseFogSmoke', DISCRETE, 'cat', 'Dense Fog', NO, ThreatKeys) -DepartNormFRET = ('DepartNormFRET', SCALAR, 'in', 'DepartNormFRET', 0.35, -0.35, 2, NO) -Dryness = ('Dryness', DISCRETE, 'none', 'EGB Fuel Dryness', NO, - [('NoData', 'NoData'), ('Moist', 'Moist'), ('Dry', 'Dry'), ('VeryDry', 'VeryDry')]) -ExcessiveCold = ('ExcessiveCold', DISCRETE, 'cat', 'Extreme Cold', NO, ThreatKeys) -ExcessiveHeat = ('ExcessiveHeat', DISCRETE, 'cat', 'Excessive Heat', NO, ThreatKeys) -FFP = ('FFP', DISCRETE, 'none', 'Flash Flood Potential', NO, - [('Dry', 'Dry'), ('Low', 'Low'), ('Moderate', 'Moderate'), ('High', 'High'), ('Very High', 'Very High')]) -FFPI = ('FFPI', SCALAR, 'index', 'Flash Flood Potential Index', 10.0, 0.0, 2, NO) -FRET = ('FRET', SCALAR, 'in', 'Forecast Reference ET', 0.75, 0.0, 2, NO) -FRET7Day = ('FRET7Day', SCALAR, 'in/week', 'Weekly Forecast Reference ET', 5.0, 0.0, 2, NO) -FireWeather = ('FireWeather', DISCRETE, 'cat', 'Wild Fire', NO, ThreatKeys) -FlashFlood = ('FlashFlood', DISCRETE, 'cat', 'Flash Flood', NO, ThreatKeys) -Flood = ('Flood', DISCRETE, 'cat', 'River Flood', NO, ThreatKeys) -FrostFreeze = ('FrostFreeze', DISCRETE, 'cat', 'Frost/Freeze', NO, ThreatKeys) -FuelMstr = ('FuelMstr', SCALAR, 'none', '10 Hour Fuel Moisture', 40.0, 1.0, 0, NO) -HainesMid = ('HainesMid', SCALAR, 'cat', 'Mid Level Haines Index', 6.0, 2.0, 0, NO) -HeatImpactLevels = ('HeatImpactLevels', SCALAR, 'none', 'HeatImpactLevels', 4.0, 0.0, 0, NO) -HeatImpactLevelsMaxT = ('HeatImpactLevelsMaxT', SCALAR, 'none', 'HeatImpactLevelsMaxT', 4.0, 0.0, 0, NO) -HeatImpactLevelsMinT = ('HeatImpactLevelsMinT', SCALAR, 'none', 'HeatImpactLevelsMinT', 4.0, 0.0, 0, NO) -HeatOrangeMaxT = ('HeatOrangeMaxT', SCALAR, 'F', 'Heat Orange MaxT', maxTempVal, minTempVal, 0, NO) -HeatOrangeMinT = ('HeatOrangeMinT', SCALAR, 'F', 'Heat Orange MinT', maxTempVal, minTempVal, 0, NO) -HeatRedMaxT = ('HeatRedMaxT', SCALAR, 'F', 'Heat Red MaxT', maxTempVal, minTempVal, 0, NO) -HeatRedMinT = ('HeatRedMinT', SCALAR, 'F', 'Heat Red MinT', maxTempVal, minTempVal, 0, NO) -HeatYellowMaxT = ('HeatYellowMaxT', SCALAR, 'F', 'Heat Yellow MaxT', maxTempVal, minTempVal, 0, NO) -HeatYellowMinT = ('HeatYellowMinT', SCALAR, 'F', 'Heat Yellow MinT', maxTempVal, minTempVal, 0, NO) -HighWind = ('HighWind', DISCRETE, 'cat', 'High Wind', NO, ThreatKeys) -IceAccum6hr = ('IceAccum6hr', SCALAR, 'in', '6-hr Ice Accumulation', 2.0, 0.0, 2, NO) -LLWS = ('LLWS', VECTOR, 'kts', 'Low Level Wind Shear', 125.0, 0.0, 0, NO) -LLWSHgt = ('LLWSHgt', SCALAR, '100 ft', 'Wind Shear Height', 20.0, 0.0, 0, NO) -LTG = ('LTG', SCALAR, 'CNT', 'LTG', 100.0, 0.0, 0, NO) -LTG12 = ('LTG12', SCALAR, 'CNT', 'LTG12', 100.0, 0.0, 0, NO) -LTG24 = ('LTG24', SCALAR, 'CNT', 'LTG24', 100.0, 0.0, 0, NO) -Lightning = ('Lightning', DISCRETE, 'cat', 'Lightning', NO, ThreatKeys) -Max3 = ('Max3', SCALAR, 'F', '3hr Maximum Temperature', maxTempVal, minTempVal, 0, NO) -Max6 = ('Max6', SCALAR, 'F', '6hr Maximum Temperature', maxTempVal, minTempVal, 0, NO) -MaxApT = ('MaxApT', SCALAR, 'F', 'Max Apparent Temperature', maxTempVal, -120.0, 0, NO) -MaxRHError = ('MaxRHError', SCALAR, '%', 'Maximum Relative Humidity Error', 100.0, -100.0, 0, NO) -MaxRHFcst = ('MaxRHFcst', SCALAR, '%', 'Forecast Maximum Relative Humidity', 100.0, 0.0, 0, NO) -MaxRHOb = ('MaxRHOb', SCALAR, '%', 'Observed Maximum Relative Humidity', 100.0, 0.0, 0, NO) -MaxRHObs = ('MaxRHObs', SCALAR, '%', 'Maximum Observed RH', 100.0, 0.0, 0, NO) -MaxT10 = ('MaxT10', SCALAR, 'F', '10th Percentile for MaxT', maxTempVal, minTempVal, 0, NO) -MaxT50 = ('MaxT50', SCALAR, 'F', '50th Percentile for MaxT', maxTempVal, minTempVal, 0, NO) -MaxT90 = ('MaxT90', SCALAR, 'F', '90th Percentile for MaxT', maxTempVal, minTempVal, 0, NO) -MaxTAloft = ('MaxTAloft', SCALAR, 'C', 'Max Temp in Warm Nose', 40.0, -20.0, 1, NO) -MaxTError = ('MaxTError', SCALAR, 'F', 'Maximum Temperature Error', 120.0, -120.0, 0, NO) -MaxTFcst = ('MaxTFcst', SCALAR, 'F', 'Observed Maximum Temperature', maxTempVal, minTempVal, 0, NO) -MaxTOb = ('MaxTOb', SCALAR, 'F', 'Observed Maximum Temperature', maxTempVal, minTempVal, 0, NO) -MaxTObs = ('MaxTObs', SCALAR, 'F', 'Maximum Temperature Obs', maxTempVal, minTempVal, 0, NO) -Min3 = ('Min3', SCALAR, 'F', '3hr Minimum Temperature', maxTempVal, minTempVal, 0, NO) -Min6 = ('Min6', SCALAR, 'F', '6hr Minimum Temperature', maxTempVal, minTempVal, 0, NO) -MinApT = ('MinApT', SCALAR, 'F', 'Min Apparent Temperature', maxTempVal, -120.0, 0, NO) -MinRH3 = ('MinRH3', SCALAR, '%', '3hr Minimum Relative Humidity', 100.0, 0.0, 0, NO) -MinRHError = ('MinRHError', SCALAR, '%', 'Minimum Relative Humidity Error', 100.0, -100.0, 0, NO) -MinRHFcst = ('MinRHFcst', SCALAR, '%', 'Forecast Minimum Relative Humidity', 100.0, 0.0, 0, NO) -MinRHOb = ('MinRHOb', SCALAR, '%', 'Observed Minimum Relative Humidity', 100.0, 0.0, 0, NO) -MinRHObs = ('MinRHObs', SCALAR, '%', 'Minimum Observed RH', 100.0, 0.0, 0, NO) -MinT10 = ('MinT10', SCALAR, 'F', '10th Percentile for MinT', maxTempVal, minTempVal, 0, NO) -MinT50 = ('MinT50', SCALAR, 'F', '50th Percentile for MinT', maxTempVal, minTempVal, 0, NO) -MinT6 = ('MinT6', SCALAR, 'F', 'Minimum Temperature 6Hr', maxTempVal, minTempVal, 0, NO) -MinT90 = ('MinT90', SCALAR, 'F', '90th Percentile for MinT', maxTempVal, minTempVal, 0, NO) -MinTError = ('MinTError', SCALAR, 'F', 'Minimum Temperature Error', 120.0, -120.0, 0, NO) -MinTFcst = ('MinTFcst', SCALAR, 'F', 'Forecast Minimum Temperature', maxTempVal, minTempVal, 0, NO) -MinTOb = ('MinTOb', SCALAR, 'F', 'Observed Minimum Temperature', maxTempVal, minTempVal, 0, NO) -MinTObs = ('MinTObs', SCALAR, 'F', 'Minimum Temperature Obs', maxTempVal, minTempVal, 0, NO) -MixHgtAve = ('MixHgtAve', SCALAR, 'ft', 'Mixing Hgt Average', 20000.0, 0.0, 0, NO) -MixHgtMSL = ('MixHgtMSL', SCALAR, 'ft', 'Mixing Height above sea level', 30000.0, 0.0, 0, NO) -MixT1700 = ('MixT1700', SCALAR, 'F', '1700Foot MixingTemp', 110.0, -10.0, 0, NO) -P95MaxT = ('P95MaxT', SCALAR, 'F', 'P95MaxT', maxTempVal, minTempVal, 0, NO) -P95MinT = ('P95MinT', SCALAR, 'F', 'P95MinT', maxTempVal, minTempVal, 0, NO) - # EKDMOS -PQPF06001 = ('PQPF06001', SCALAR, '%', '6hr Prob QPF > 0.01', 100.0, 0.0, 0, NO) -PQPF06005 = ('PQPF06005', SCALAR, '%', '6hr Prob QPF > 0.05', 100.0, 0.0, 0, NO) -PQPF06010 = ('PQPF06010', SCALAR, '%', '6hr Prob QPF > 0.10', 100.0, 0.0, 0, NO) -PQPF06015 = ('PQPF06015', SCALAR, '%', '6hr Prob QPF > 0.15', 100.0, 0.0, 0, NO) -PQPF06020 = ('PQPF06020', SCALAR, '%', '6hr Prob QPF > 0.20', 100.0, 0.0, 0, NO) -PQPF06025 = ('PQPF06025', SCALAR, '%', '6hr Prob QPF > 0.25', 100.0, 0.0, 0, NO) -PQPF06030 = ('PQPF06030', SCALAR, '%', '6hr Prob QPF > 0.30', 100.0, 0.0, 0, NO) -PQPF06040 = ('PQPF06040', SCALAR, '%', '6hr Prob QPF > 0.40', 100.0, 0.0, 0, NO) -PQPF06050 = ('PQPF06050', SCALAR, '%', '6hr Prob QPF > 0.50', 100.0, 0.0, 0, NO) -PQPF06075 = ('PQPF06075', SCALAR, '%', '6hr Prob QPF > 0.75', 100.0, 0.0, 0, NO) -PQPF06100 = ('PQPF06100', SCALAR, '%', '6hr Prob QPF > 1.00', 100.0, 0.0, 0, NO) -PQPF06150 = ('PQPF06150', SCALAR, '%', '6hr Prob QPF > 1.50', 100.0, 0.0, 0, NO) -PoP12Fcst = ('PoP12Fcst', SCALAR, '%', 'Forecast Prob. of Precip.', 100.0, 0.0, 0, NO) -PoP3 = ('PoP3', SCALAR, '%', 'PoP3', 100.0, 0.0, 0, NO) -PoPPCECMWF = ('PoPPatternClimoECMWF', SCALAR, '%', 'PatternClimoECMWF', 100.0, 0.0, 0, NO) -PoPPCFIM = ('PoPPatternClimoFIM', SCALAR, '%', 'PatternClimoFIM', 100.0, 0.0, 0, NO) -PoPPCGEM = ('PoPPatternClimoGEM', SCALAR, '%', 'PatternClimoGEM', 100.0, 0.0, 0, NO) -PoPPCGFS = ('PoPPatternClimoGFS', SCALAR, '%', 'PatternClimoGFS', 100.0, 0.0, 0, NO) -PoPPattern1 = ('PoPNortherlyFlow', SCALAR, '%', 'NortherlyFlow', 100.0, 0.0, 0, NO) -PoPPattern10 = ('PoPRockiesRidge', SCALAR, '%', 'RockiesRidge', 100.0, 0.0, 0, NO) -PoPPattern11 = ('PoPSouthernFirehose', SCALAR, '%', 'SouthernFirehose', 100.0, 0.0, 0, NO) -PoPPattern12 = ('PoPNorthernFirehose', SCALAR, '%', 'NorthernFirehose', 100.0, 0.0, 0, NO) -PoPPattern2 = ('PoPGreatBasinLow', SCALAR, '%', 'GreatBasinLow', 100.0, 0.0, 0, NO) -PoPPattern3 = ('PoPBroadCyclonicFlow', SCALAR, '%', 'BroadCyclonicFlow', 100.0, 0.0, 0, NO) -PoPPattern4 = ('PoPCoastalRidge', SCALAR, '%', 'CoastalRidge', 100.0, 0.0, 0, NO) -PoPPattern5 = ('PoPNorthwestFlow', SCALAR, '%', 'NorthwestFlow', 100.0, 0.0, 0, NO) -PoPPattern6 = ('PoPZonalFlow', SCALAR, '%', 'ZonalFlow', 100.0, 0.0, 0, NO) -PoPPattern7 = ('PoPBroadAntiCyclonicFlow', SCALAR, '%', 'BroadAntiCyclonicFlow', 100.0, 0.0, 0, NO) -PoPPattern8 = ('PoPDiffluentOnshoreFlow', SCALAR, '%', 'DiffluentOnshoreFlow', 100.0, 0.0, 0, NO) -PoPPattern9 = ('PoPSouthwestFlow', SCALAR, '%', 'SouthwestFlow', 100.0, 0.0, 0, NO) -PoPWG = ('PoPWG', SCALAR, '%', 'Climo PoP Work Grid', 30.0, -30.0, 0, NO) -PPFFG = ("PPFFG", SCALAR, "%", "Prob of Excessive Rain in %", 100.0, 0.0 ,0, NO) -PrecipDur = ('PrecipDur', SCALAR, 'hrs', 'Precipitation Duration', 12.0, 0.0, 1, YES) -PredHgt = ('PredHgt', SCALAR, '100ft', 'Predominant Cloud Height', 250.0, 0.0, 0, NO) -PredHgtCat = ('PredHgtCat', SCALAR, 'index', 'Predominant Cloud Height Category', 6.0, 0.0, 0, NO) -PredHgtRH = ('PredHgtRH', SCALAR, '100ft', 'Pred Cloud Height from RH', 250.0, 1.0, 0, NO) -PredHgtTempo = ('PredHgtTempo', SCALAR, '100ft', 'Predominant Cloud Height Tempo', 250.0, 0.0, 0, NO) -PredVsby = ('PredVsby', SCALAR, 'mi', 'Predominant Visibility', 10.0, 0.0, 2, NO) -Pres = ('Pres', SCALAR, 'mb', 'Pressure', 1100.0, 900.0, 2, NO) -ProbDmgWind = ('ProbDmgWind', SCALAR, '%', 'Probability of Damaging Wind', 100.0, 0.0, 0, NO) -ProbExtrmDmgWind = ('ProbExtrmDmgWind', SCALAR, '%', 'Probability of Extreme Damaging Wind', 100.0, 0.0, 0, NO) -ProbExtrmHail = ('ProbExtrmHail', SCALAR, '%', 'Probability of Extreme Hail', 100.0, 0.0, 0, NO) -ProbExtrmSvr = ('ProbExtrmSvr', SCALAR, '%', 'Probability of Extreme Severe', 100.0, 0.0, 0, NO) -ProbExtrmTor = ('ProbExtrmTor', SCALAR, '%', 'Probability of Extreme Tornado', 100.0, 0.0, 0, NO) -ProbSvrHail = ('ProbSvrHail', SCALAR, '%', 'Probability of Severe Hail', 100.0, 0.0, 0, NO) -ProbTor = ('ProbTor', SCALAR, '%', 'Probability of Tornado', 100.0, 0.0, 0, NO) -ProbTotSvr = ('ProbTotSvr', SCALAR, '%', 'Probability of Severe', 100.0, 0.0, 0, NO) -ProbSnowGTT = ("ProbSnowGTT", SCALAR, "%", "Prob. snow > trace", 100.0, 0.0, 0, NO) -ProbSnowGT1 = ("ProbSnowGT1", SCALAR, "%", "Prob. snow > 1 inch", 100.0, 0.0, 0, NO) -ProbSnowGT2 = ("ProbSnowGT2", SCALAR, "%", "Prob. snow > 2 inches ", 100.0, 0.0, 0, NO) -ProbSnowGT4 = ("ProbSnowGT4", SCALAR, "%", "Prob. snow > 4 inches ", 100.0, 0.0, 0, NO) -ProbSnowGT6 = ("ProbSnowGT6", SCALAR, "%", "Prob. snow > 6 inches ", 100.0, 0.0, 0, NO) -ProbSnowGT8 = ("ProbSnowGT8", SCALAR, "%", "Prob. snow > 8 inches", 100.0, 0.0, 0, NO) -ProbSnowGT12 = ("ProbSnowGT12", SCALAR, "%", "Prob. snow > 12 inches", 100.0, 0.0, 0, NO) -ProbSnowGT18 = ("ProbSnowGT18", SCALAR, "%", "Prob. snow > 18 inches", 100.0, 0.0, 0, NO) -ProbSnowRT1 = ("ProbSnowRT1", SCALAR, "%", "Prob. snow T-1 inch", 100.0, 0.0, 0, NO) -ProbSnowR12 = ("ProbSnowR12", SCALAR, "%", "Prob. snow 1-2 inches", 100.0, 0.0, 0, NO) -ProbSnowR24 = ("ProbSnowR24", SCALAR, "%", "Prob. snow 2-4 inches ", 100.0, 0.0, 0, NO) -ProbSnowR48 = ("ProbSnowR48", SCALAR, "%", "Prob. snow 4-8 inches ", 100.0, 0.0, 0, NO) -ProbSnowR812 = ("ProbSnowR812", SCALAR, "%", "Prob. snow 8-12 inches ", 100.0, 0.0, 0, NO) -ProbSnowR1218 = ("ProbSnowR1218", SCALAR, "%", "Prob. snow 12-18 inches", 100.0, 0.0, 0, NO) -ProbSnowR18 = ("ProbSnowR18", SCALAR, "%", "Prob. snow > 18 inches", 100.0, 0.0, 0, NO) -QPE06 = ('QPE06', SCALAR, 'in', 'QPE06', maxQpfVal, 0.0, 2, YES) -QPE06Ob = ('QPE06Ob', SCALAR, 'in', 'Observed Precip', 20.0, 0.0, 2, NO) -QPE12 = ('QPE12', SCALAR, 'in', 'QPE12', 15.0, 0.0, 2, YES) -QPE24 = ('QPE24', SCALAR, 'in', 'QPE24', 15.0, 0.0, 2, YES) -QPFDS = ('QPFDS', SCALAR, 'in', 'QPFDS', maxQpfVal, 0.0, 2, YES) -QPFFcst = ('QPFFcst', SCALAR, 'in', 'Forecast Precip.', 10.0, 0.0, 2, NO) -QPFPCECMWF = ('QPFPatternClimoECMWF', SCALAR, 'in', 'PatternClimoECMWF', maxQpfVal, 0.0, 2, NO) -QPFPCFIM = ('QPFPatternClimoFIM', SCALAR, 'in', 'PatternClimoFIM', maxQpfVal, 0.0, 2, NO) -QPFPCGEM = ('QPFPatternClimoGEM', SCALAR, 'in', 'PatternClimoGEM', maxQpfVal, 0.0, 2, NO) -QPFPCGFS = ('QPFPatternClimoGFS', SCALAR, 'in', 'PatternClimoGFS', maxQpfVal, 0.0, 2, NO) -QPFPattern1 = ('QPFNortherlyFlow', SCALAR, 'in', 'NortherlyFlow', maxQpfVal, 0.0, 2, NO) -QPFPattern10 = ('QPFRockiesRidge', SCALAR, 'in', 'RockiesRidge', maxQpfVal, 0.0, 2, NO) -QPFPattern11 = ('QPFSouthernFirehose', SCALAR, 'in', 'SouthernFirehose', maxQpfVal, 0.0, 2, NO) -QPFPattern12 = ('QPFNorthernFirehose', SCALAR, 'in', 'NorthernFirehose', maxQpfVal, 0.0, 2, NO) -QPFPattern2 = ('QPFGreatBasinLow', SCALAR, 'in', 'GreatBasinLow', maxQpfVal, 0.0, 2, NO) -QPFPattern3 = ('QPFBroadCyclonicFlow', SCALAR, 'in', 'BroadCyclonicFlow', maxQpfVal, 0.0, 2, NO) -QPFPattern4 = ('QPFCoastalRidge', SCALAR, 'in', 'CoastalRidge', maxQpfVal, 0.0, 2, NO) -QPFPattern5 = ('QPFNorthwestFlow', SCALAR, 'in', 'NorthwestFlow', maxQpfVal, 0.0, 2, NO) -QPFPattern6 = ('QPFZonalFlow', SCALAR, 'in', 'ZonalFlow', maxQpfVal, 0.0, 2, NO) -QPFPattern7 = ('QPFBroadAntiCyclonicFlow', SCALAR, 'in', 'BroadAntiCyclonicFlow', maxQpfVal, 0.0, 2, NO) -QPFPattern8 = ('QPFDiffluentOnshoreFlow', SCALAR, 'in', 'DiffluentOnshoreFlow', maxQpfVal, 0.0, 2, NO) -QPFPattern9 = ('QPFSouthwestFlow', SCALAR, 'in', 'SouthwestFlow', maxQpfVal, 0.0, 2, NO) -QPFPct = ('QPFPct', SCALAR, '%', 'QPFPct', 300.0, 0.0, 1, YES) -QPFPctMonthlyClimo = ('QPFPctMonthlyClimo', SCALAR, '%', 'QPF Pct Monthly PRISMClimo', 200.0, 0.0, 0, NO) -QPFRaw = ('QPFRaw', SCALAR, 'in', 'QPFRaw', maxQpfVal, 0.0, 2, YES) -QSE06 = ('QSE06', SCALAR, 'in', 'QSE06', 100.0, 0.0, 1, YES) -RipCurrent = ('RipCurrent', DISCRETE, 'cat', 'Rip Current', NO, ThreatKeys) -RipCurrentIndex = ('RipCurrentIndex', SCALAR, 'ft', 'Rip Current Index', 16.0, -1.0, 1, NO) -RipRisk = ("RipRisk", SCALAR, "none", "Rip Current Risk", 3.0, 0.0, 0, NO) -SPC12hrLP1 = ('SPC12hrLP1', SCALAR, '%', 'SPC 12HR Lightning Probability (1)', 100.0, 0.0, 0, NO) -SPC12hrLP10 = ('SPC12hrLP10', SCALAR, '%', 'SPC 12HR Lightning Probability (10)', 100.0, 0.0, 0, NO) -SPC12hrLP100 = ('SPC12hrLP100', SCALAR, '%', 'SPC 12HR Lightning Probability (100)', 100.0, 0.0, 0, NO) -SPC24hrLP1 = ('SPC24hrLP1', SCALAR, '%', 'SPC 24HR Lightning Probability (1)', 100.0, 0.0, 0, NO) -SPC24hrLP10 = ('SPC24hrLP10', SCALAR, '%', 'SPC 24HR Lightning Probability (10)', 100.0, 0.0, 0, NO) -SPC24hrLP100 = ('SPC24hrLP100', SCALAR, '%', 'SPC 24HR Lightning Probability (100)', 100.0, 0.0, 0, NO) -SPC3hrLP1 = ('SPC3hrLP1', SCALAR, '%', 'SPC 3HR Lightning Probability (1)', 100.0, 0.0, 0, NO) -SPC3hrLP10 = ('SPC3hrLP10', SCALAR, '%', 'SPC 3HR Lightning Probability (10)', 100.0, 0.0, 0, NO) -SPC3hrLP100 = ('SPC3hrLP100', SCALAR, '%', 'SPC 3HR Lightning Probability (100)', 100.0, 0.0, 0, NO) -SevereHail = ('SevereHail', DISCRETE, 'cat', 'Severe Hail', NO, ThreatKeys) -SevereTstmWind = ('SevereTstmWind', DISCRETE, 'cat', 'SevereTstmWind', NO, ThreatKeys) -SnowAmt10Prcntl = ('SnowAmt10Prcntl', SCALAR, 'in', 'min case', 50.0, 0.0, 1, NO) -SnowAmt50Prcntl = ('SnowAmt50Prcntl', SCALAR, 'in', 'avg case', 50.0, 0.0, 1, NO) -SnowAmt90Prcntl = ('SnowAmt90Prcntl', SCALAR, 'in', 'max case', 50.0, 0.0, 1, NO) -SnowDepth = ('SnowDepth', SCALAR, 'in', 'Snow Depth', 50.0, 0.0, 0, NO) -SnowRatioCLIMO = ('SnowRatioCLIMO', SCALAR, '%', 'Snow Ratio Climatology SON-DJF-MAM', 40.0, 0.0, 1, YES) -SnowRatioGFS = ('SnowRatioGFS', SCALAR, '%', 'Snow Ratio from GFS', 40.0, 0.0, 1, YES) -SnowRatioHPCMEAN = ('SnowRatioHPCMEAN', SCALAR, '%', 'Snow Ratio from HPC MEAN', 40.0, 0.0, 1, YES) -SnowRatioNAM = ('SnowRatioNAM', SCALAR, '%', 'Snow Ratio from NAM40', 40.0, 0.0, 1, YES) -T10 = ('T10', SCALAR, 'F', '10th Percentile for T', maxTempVal, minTempVal, 0, NO) -T50 = ('T50', SCALAR, 'F', '50th Percentile for T', maxTempVal, minTempVal, 0, NO) -T90 = ('T90', SCALAR, 'F', '90th Percentile for T', maxTempVal, minTempVal, 0, NO) -TAloft = ('TAloft', SCALAR, 'F', 'Temperature Aloft', 120.0, -50.0, 1, NO) -Td10 = ('Td10', SCALAR, 'F', '10th Percentile for DpT', maxTdVal, minTdVal, 0, NO) -Td50 = ('Td50', SCALAR, 'F', '50th Percentile for DpT', maxTdVal, minTdVal, 0, NO) -Td90 = ('Td90', SCALAR, 'F', '90th Percentile for DpT', maxTdVal, minTdVal, 0, NO) -TdAft = ('TdAft', SCALAR, 'F', 'Afternoon Dewpoint', maxTdVal, minTdVal, 0, NO) -TdAftError = ('TdAftError', SCALAR, 'F', 'Afternoon Dewpoint Error', 120.0, -120.0, 0, NO) -TdAftFcst = ('TdAftFcst', SCALAR, 'F', 'Forecast Afternoon Dewpoint', maxTdVal, minTdVal, 0, NO) -TdAftOb = ('TdAftOb', SCALAR, 'F', 'Observed Afternoon Dewpoint', maxTdVal, minTdVal, 0, NO) -TdAftObs = ('TdAftObs', SCALAR, 'F', 'Afternoon Dewpoint Obs', maxTdVal, minTdVal, 0, NO) -TdMrn = ('TdMrn', SCALAR, 'F', 'Morning Dewpoint', maxTdVal, minTdVal, 0, NO) -TdMrnError = ('TdMrnError', SCALAR, 'F', 'Morning Dewpoint Error', 120.0, -120.0, 0, NO) -TdMrnFcst = ('TdMrnFcst', SCALAR, 'F', 'Forecast Morning Dewpoint', maxTdVal, minTdVal, 0, NO) -TdMrnOb = ('TdMrnOb', SCALAR, 'F', 'Observed Morning Dewpoint', maxTdVal, minTdVal, 0, NO) -TdMrnObs = ('TdMrnObs', SCALAR, 'F', 'Morning Dewpoint Obs', maxTdVal, minTdVal, 0, NO) -Tornado = ('Tornado', DISCRETE, 'cat', 'Tornado', NO, ThreatKeys) -TransWindAve = ('TransWindAve', VECTOR, 'mph', 'Transport Wind Average', 125.0, 0.0, 0, NO) -Tw = ('Tw', SCALAR, 'F', 'Surface Wet Bulb Temp', 80.0, -50.0, 0, NO) -VentRateAve = ('VentRateAve', SCALAR, 'mph-ft', 'Vent Rate Average', 500000.0, 0.0, 0, NO) -Visibility = ('Visibility', SCALAR, 'SM', 'Visibility', 10.0, 0.0, 2, NO) -VisibilityConditional = ('VisibilityConditional', SCALAR, 'SM', 'Conditional Visibility', 10.0, 0.0, 2, NO) -Vsby = ('Vsby', SCALAR, 'mi', 'Visibility', 10.0, 0.0, 2, NO) -WG1 = ('WG1', SCALAR, 'none', 'WorkGrid1', 100.0, -100.0, 0, NO) -WinterWx = ('WinterWx', DISCRETE, 'cat', 'Winter Weather', NO, ThreatKeys) - -#** Parameter sets for specific functionality -optionalParmsDict = {} - -# Marine Weather Elements -optionalParmsDict['marine']={ - 'WaveDir' : ("WaveDir", VECTOR, "m/s", "Wave Direction", 5.0, 0.0, 2, NO), - 'WindWaveHeight' : ("WindWaveHgt", SCALAR, "ft", "Wind Wave Height", 100.0, 0.0, 0, NO), - 'WaveHeight' : ("WaveHeight", SCALAR, "ft", "Total Wave Height", 100.0, 0.0, 0, NO), - 'Swell' : ("Swell", VECTOR, "ft", "Primary Swell", 100.0, 0.0, 0, NO), - 'Swell2' : ("Swell2", VECTOR, "ft", "Secondary Swell", 100.0, 0.0, 0, NO), - 'Period' : ("Period", SCALAR, "sec", "Primary Period", 30.0, 0.0, 0, NO), - 'IceCoverage' : ("IceCoverage", SCALAR, "%", "Ice Coverage Amount", 100.0, 0.0, 0, NO), - 'SurfHeight' : ("SurfHeight", SCALAR, "ft", "Total Wave Height", 100.0, 0.0, 0, NO), - ##########DCS3499 - 'SigWaveHgt' : ("SigWaveHgt", SCALAR, "ft", - "Significant wave height of combined wind waves and swells", - 30.0, 0.0, 0, NO), - 'PeakWaveDir' : ("PeakWaveDir", VECTOR, "degree", "Direction of peak wave", 100.0, 0.0, 0, NO), - 'WindWaveHgt' : ("WindWaveHgt", SCALAR, "ft", "Significant wave height of wind waves", 30.0, 0.0, 0, NO), - 'WindWavePeriod' : ("WindWavePeriod", SCALAR, "sec.", "Wind wave peak period", 20.0, 0.0, 0, NO), - 'WindWaveDir' : ("WindWaveDir", VECTOR, "degree", "Direction of wind waves", 100.0, 0.0, 0, NO), - 'NWPSwind' : ("NWPSwind", VECTOR, "kts", "NWPSwind", 150.0, 0.0, 0, NO), - 'UWaveDir' : ("UWaveDir", SCALAR, "m/s", "U WaveDir Comp", 0.50, -0.50, 3, NO), - 'VWaveDir' : ("VWaveDir", SCALAR, "m/s", "V WaveDir Comp", 0.50, -0.50, 3, NO), - 'SwanSwell' : ("SwanSwell", SCALAR, "ft", "Total Significant Swell Height", 40.0, 0.0, 2, NO), - 'SST' : ("SST", SCALAR, "F", "Sea Sfc Temp", 100.0, 25.0, 0, NO), - 'StormTide' : ('StormTide', SCALAR, 'ft', 'Storm Tide', 30.0, -8.0, 1, NO), - #Fcst Grids - for partitioned wave groups - 'Wave1' : ("Wave1", VECTOR, "ft", "WAVE1", 50.0, 0.0, 0, NO), - 'Wave2' : ("Wave2", VECTOR, "ft", "WAVE2", 50.0, 0.0, 0, NO), - 'Wave3' : ("Wave3", VECTOR, "ft", "WAVE3", 50.0, 0.0, 0, NO), - 'Wave4' : ("Wave4", VECTOR, "ft", "WAVE4", 50.0, 0.0, 0, NO), - 'Wave5' : ("Wave5", VECTOR, "ft", "WAVE5", 50.0, 0.0, 0, NO), - 'Wave6' : ("Wave6", VECTOR, "ft", "WAVE6", 50.0, 0.0, 0, NO), - 'Wave7' : ("Wave7", VECTOR, "ft", "Wave7", 50.0, 0.0, 0, NO), - 'Wave8' : ("Wave8", VECTOR, "ft", "Wave8", 50.0, 0.0, 0, NO), - 'Wave9' : ("Wave9", VECTOR, "ft", "Wave9", 50.0, 0.0, 0, NO), - #Fcst Grids - for partitioned wave groups - 'Period1' : ("Period1", SCALAR, "sec", "Period1", 30.0, 0.0, 0, NO), - 'Period2' : ("Period2", SCALAR, "sec", "Period2", 30.0, 0.0, 0, NO), - 'Period3' : ("Period3", SCALAR, "sec", "Period3", 30.0, 0.0, 0, NO), - 'Period4' : ("Period4", SCALAR, "sec", "Period4", 30.0, 0.0, 0, NO), - 'Period5' : ("Period5", SCALAR, "sec", "Period5", 30.0, 0.0, 0, NO), - 'Period6' : ("Period6", SCALAR, "sec", "Period6", 30.0, 0.0, 0, NO), - 'Period7' : ("Period7", SCALAR, "sec", "Period7", 30.0, 0.0, 0, NO), - 'Period8' : ("Period8", SCALAR, "sec", "Period8", 30.0, 0.0, 0, NO), - 'Period9' : ("Period9", SCALAR, "sec", "Period9", 30.0, 0.0, 0, NO), - 'RipProb' : ("RipProb", SCALAR, "%", "Rip Current Probability", 100.0, 0.0, 0, NO), - 'ErosionProb' : ("ErosionProb", SCALAR, "%", "Dune Erosion Probability", 100.0, 0.0, 0, NO), - 'OverwashProb' : ("OverwashProb", SCALAR, "%", "Dune Overwash Probability", 100.0, 0.0, 0, NO) -} -if SID in groups['GreatLake_SITES']: - # Redefine the WaveHeight field to include a decimal point - optionalParmsDict['marine'].update({'WaveHeight' : - ("WaveHeight", SCALAR, "ft", "Wave Height", 40.0, 0.0, 1, NO)}) - -# Parameter set for Probability of weather type, Optional for sites. -optionalParmsDict['powt']={ - 'PoTBD': ('PotBlowingDust', SCALAR, '%', 'Prob of Blowing Dust', 100.0, 0.0, 0, NO), - 'PoTBN': ('PotBlowingSand', SCALAR, '%', 'Prob of Blowing Sand', 100.0, 0.0, 0, NO), - 'PoTBS': ('PotBlowingSnow', SCALAR, '%', 'Prob of Blowing Snow', 100.0, 0.0, 0, NO), - 'PoTF': ('PotFog', SCALAR, '%', 'Prob of Fog', 100.0, 0.0, 0, NO), - 'PoTFR': ('PotFrost', SCALAR, '%', 'Prob of Frost', 100.0, 0.0, 0, NO), - 'PoTFl': ('PotFlurries', SCALAR, '%', 'Prob of Flurries', 100.0, 0.0, 0, NO), - 'PoTH': ('PotHaze', SCALAR, '%', 'Prob of Haze', 100.0, 0.0, 0, NO), - 'PoTIC': ('PotIceCrystals', SCALAR, '%', 'Prob of Ice Crystals', 100.0, 0.0, 0, NO), - 'PoTIF': ('PotIceFog', SCALAR, '%', 'Prob of Ice Fog', 100.0, 0.0, 0, NO), - 'PoTIP': ('PotSleet', SCALAR, '%', 'Prob of Sleet', 100.0, 0.0, 0, NO), - 'PoTK': ('PotSmoke', SCALAR, '%', 'Prob of Smoke', 100.0, 0.0, 0, NO), - 'PoTL': ('PotDrizzle', SCALAR, '%', 'Prob of Drizzle', 100.0, 0.0, 0, NO), - 'PoTR': ('PotRain', SCALAR, '%', 'Prob of Rain', 100.0, 0.0, 0, NO), - 'PoTRW': ('PotRainShowers', SCALAR, '%', 'Prob of Rain Showers', 100.0, 0.0, 0, NO), - 'PoTS': ('PotSnow', SCALAR, '%', 'Prob of Snow', 100.0, 0.0, 0, NO), - 'PoTSW': ('PotSnowShowers', SCALAR, '%', 'Prob of Snow Showers', 100.0, 0.0, 0, NO), - 'PoTSp': ('PotSprinkles', SCALAR, '%', 'Prob of Sprinkles', 100.0, 0.0, 0, NO), - 'PoTSvr': ('PotSevere', SCALAR, '%', 'Prob of Severe Storms', 100.0, 0.0, 0, NO), - 'PoTT': ('PotThunder', SCALAR, '%', 'Prob of Thunder', 100.0, 0.0, 0, NO), - 'PoTVA': ('PotVolcanicAsh', SCALAR, '%', 'Prob of Volcanic Ash', 100.0, 0.0, 0, NO), - 'PoTWP': ('PotWaterspout', SCALAR, '%', 'Prob of Waterspout', 100.0, 0.0, 0, NO), - 'PoTZF': ('PotFreezingFog', SCALAR, '%', 'Prob of Freezing Fog', 100.0, 0.0, 0, NO), - 'PoTZL': ('PotFreezingDrizzle', SCALAR, '%', 'Prob of Freezing Drizzle', 100.0, 0.0, 0, NO), - 'PoTZR': ('PotFreezingRain', SCALAR, '%', 'Prob of Freezing Rain', 100.0, 0.0, 0, NO), - 'PoTZY': ('PotFreezingSpray', SCALAR, '%', 'Prob of Freezing Spray', 100.0, 0.0, 0, NO), - 'PoTHZY': ('PotHeavyFreezingSpray', SCALAR, '%', 'Prob of Heavy Freezing Spray', 100.0, 0.0, 0, NO), - 'RoadTemp' : ("RoadTemp", SCALAR, "F", "Road Temperature", 120.0, -50.0, 0, NO), - 'MaxTwAloft' : ("MaxTwAloft", SCALAR, 'C', 'Max Wet-Bulb Temp in Warm Nose', 40.0, -20.0, 1, NO), - 'ProbIcePresent': ("ProbIcePresent", SCALAR, "%", "Prob of Ice Present", 100.0, 0.0, 0, NO), - 'ProbRefreezeSleet': ("ProbRefreezeSleet", SCALAR, "%", "Prob of Refreeze into Sleet", 100.0, 0.0, 0, NO), - 'SleetAmt': ("SleetAmt", SCALAR, "in", "Sleet Accumulation", 5.0, 0.0, 1, YES), - 'IceFlatAcc': ('IceFlatAccum', SCALAR, 'in', 'Flat Ice Accumulation', maxIceVal, 0.0, 2, YES), - 'IceLineAcc': ('IceLineAccum', SCALAR, 'in', 'Line Ice Accumulation', maxIceVal, 0.0, 2, YES), -} - -# Parameter set for Winter Weather probabilities, Optional for sites. -#****** Winter 2017 changes -optionalParmsDict['winterProbs']={ - # Storm Total Snow related - 'StormTotalSnowWPC' : ("StormTotalSnowWPC", SCALAR, "in","WPC Storm Total Snow", 50.0, 0.0, 1, NO), - - # Snow Percentiles - 'SnowAmt5Prcntl' : ("SnowAmt5Prcntl", SCALAR, "in","5 percentile", 100.0, -40.0, 1, NO), - 'SnowAmt10Prcntl' : ("SnowAmt10Prcntl", SCALAR, "in","10 percentile", 100.0, -40.0, 1, NO), - 'SnowAmt25Prcntl' : ("SnowAmt25Prcntl", SCALAR, "in","25 percentile", 100.0, -40.0, 1, NO), - 'SnowAmt50Prcntl' : ("SnowAmt50Prcntl", SCALAR, "in","50 percentile", 100.0, -40.0, 1, NO), - 'SnowAmt75Prcntl' : ("SnowAmt75Prcntl", SCALAR, "in","75 percentile", 100.0, -40.0, 1, NO), - 'SnowAmt90Prcntl' : ("SnowAmt90Prcntl", SCALAR, "in","90 percentile", 100.0, -40.0, 1, NO), - 'SnowAmt95Prcntl' : ("SnowAmt95Prcntl", SCALAR, "in","95 percentile", 100.0, -40.0, 1, NO), - - # Snow Exceedance Probabilities (Add others as needed) - 'ProbSnowGET' : ("ProbSnowGET", SCALAR, "%", "Prob. snow >= trace", 100.0, 0.0, 0, NO), - 'ProbSnowGE1' : ("ProbSnowGE1", SCALAR, "%", "Prob. snow >= 1 inch", 100.0, 0.0, 0, NO), - 'ProbSnowGE2' : ("ProbSnowGE2", SCALAR, "%", "Prob. snow >= 2 inches", 100.0, 0.0, 0, NO), - 'ProbSnowGE4' : ("ProbSnowGE4", SCALAR, "%", "Prob. snow >= 4 inches", 100.0, 0.0, 0, NO), - 'ProbSnowGE6' : ("ProbSnowGE6", SCALAR, "%", "Prob. snow >= 6 inches", 100.0, 0.0, 0, NO), - 'ProbSnowGE8' : ("ProbSnowGE8", SCALAR, "%", "Prob. snow >= 8 inches", 100.0, 0.0, 0, NO), - 'ProbSnowGE12' : ("ProbSnowGE12", SCALAR, "%", "Prob. snow >= 12 inches", 100.0, 0.0, 0, NO), - 'ProbSnowGE18' : ("ProbSnowGE18", SCALAR, "%", "Prob. snow >= 18 inches", 100.0, 0.0, 0, NO), - - # Freezing Rain Percentiles - 'IceAccum5Prcntl' : ("IceAccum5Prcntl", SCALAR, "in","5 percentile", 5.0, -4.0, 2, NO), - 'IceAccum10Prcntl' : ("IceAccum10Prcntl", SCALAR, "in","10 percentile", 5.0, -4.0, 2, NO), - 'IceAccum25Prcntl' : ("IceAccum25Prcntl", SCALAR, "in","25 percentile", 5.0, -4.0, 2, NO), - 'IceAccum50Prcntl' : ("IceAccum50Prcntl", SCALAR, "in","50 percentile", 5.0, -4.0, 2, NO), - 'IceAccum75Prcntl' : ("IceAccum75Prcntl", SCALAR, "in","75 percentile", 5.0, -4.0, 2, NO), - 'IceAccum90Prcntl' : ("IceAccum90Prcntl", SCALAR, "in","90 percentile", 5.0, -4.0, 2, NO), - 'IceAccum95Prcntl' : ("IceAccum95Prcntl", SCALAR, "in","95 percentile", 5.0, -4.0, 2, NO), - - # Freezing rain accretion probabilities - 'ProbIceGE001' : ("ProbIceGE001", SCALAR, "%", "Prob. ice >= 0.01", 100.0, 0.0, 0, NO), - 'ProbIceGE010' : ("ProbIceGE010", SCALAR, "%", "Prob. ice >= 0.10", 100.0, 0.0, 0, NO), - 'ProbIceGE025' : ("ProbIceGE025", SCALAR, "%", "Prob. ice >= 0.25", 100.0, 0.0, 0, NO), - 'ProbIceGE050' : ("ProbIceGE050", SCALAR, "%", "Prob. ice >= 0.50", 100.0, 0.0, 0, NO), - -# Persist WPC snow prob grids - 'SnowAmt5PrcntlWPC' : ("SnowAmt5PrcntlWPC", SCALAR, "in","WPC 5th percentile snow amount", 100.0, -40.0, 1, NO), - 'SnowAmt10PrcntlWPC' : ("SnowAmt10PrcntlWPC", SCALAR, "in","WPC 10th percentile snow amount", 100.0, -40.0, 1, NO), - 'SnowAmt25PrcntlWPC' : ("SnowAmt25PrcntlWPC", SCALAR, "in","WPC 25th percentile snow amount", 100.0, -40.0, 1, NO), - 'SnowAmt50PrcntlWPC' : ("SnowAmt50PrcntlWPC", SCALAR, "in","WPC 50th percentile snow amount", 100.0, -40.0, 1, NO), - 'SnowAmt75PrcntlWPC' : ("SnowAmt75PrcntlWPC", SCALAR, "in","WPC 75th percentile snow amount", 100.0, -40.0, 1, NO), - 'SnowAmt90PrcntlWPC' : ("SnowAmt90PrcntlWPC", SCALAR, "in","WPC 90th percentile snow amount", 100.0, -40.0, 1, NO), - 'SnowAmt95PrcntlWPC' : ("SnowAmt95PrcntlWPC", SCALAR, "in","WPC 95th percentile snow amount", 100.0, -40.0, 1, NO), - 'ProbSnowGETWPC' : ("ProbSnowGETWPC", SCALAR, "%", "WPC Prob. snow >= trace", 100.0, 0.0, 0, NO), - 'ProbSnowGE1WPC' : ("ProbSnowGE1WPC", SCALAR, "%", "WPC Prob. snow >= 1 in", 100.0, 0.0, 0, NO), - 'ProbSnowGE2WPC' : ("ProbSnowGE2WPC", SCALAR, "%", "WPC Prob. snow >= 2 in", 100.0, 0.0, 0, NO), - 'ProbSnowGE4WPC' : ("ProbSnowGE4WPC", SCALAR, "%", "WPC Prob. snow >= 4 in", 100.0, 0.0, 0, NO), - 'ProbSnowGE6WPC' : ("ProbSnowGE6WPC", SCALAR, "%", "WPC Prob. snow >= 6 in", 100.0, 0.0, 0, NO), - 'ProbSnowGE8WPC' : ("ProbSnowGE8WPC", SCALAR, "%", "WPC Prob. snow >= 8 in", 100.0, 0.0, 0, NO), - 'ProbSnowGE12WPC' : ("ProbSnowGE12WPC", SCALAR, "%", "WPC Prob. snow >= 12 in", 100.0, 0.0, 0, NO), - 'ProbSnowGE18WPC' : ("ProbSnowGE18WPC", SCALAR, "%", "WPC Prob. snow >= 18 in", 100.0, 0.0, 0, NO), -} - -# Add rainfall probability definitions -optionalParmsDict['rainfallProb']={ - # Rain Percentiles - 'QPF5Prcntl' : ("QPF5Prcntl", SCALAR, "in","5 percentile", 36.0, -24.0, 2, NO), - 'QPF10Prcntl' : ("QPF10Prcntl", SCALAR, "in","10 percentile", 36.0, -24.0, 2, NO), - 'QPF25Prcntl' : ("QPF25Prcntl", SCALAR, "in","25 percentile", 36.0, -24.0, 2, NO), - 'QPF50Prcntl' : ("QPF50Prcntl", SCALAR, "in","50 percentile", 36.0, -24.0, 2, NO), - 'QPF75Prcntl' : ("QPF75Prcntl", SCALAR, "in","75 percentile", 36.0, -24.0, 2, NO), - 'QPF90Prcntl' : ("QPF90Prcntl", SCALAR, "in","90 percentile", 36.0, -24.0, 2, NO), - 'QPF95Prcntl' : ("QPF95Prcntl", SCALAR, "in","95 percentile", 36.0, -24.0, 2, NO), - - # Rain Exceedance Probabilities (Add others as needed) - 'ProbRainGE001' : ("ProbRainGE001", SCALAR, "%", "Prob. Rain >= 0.01 in", 100.0, 0.0, 0, NO), - 'ProbRainGE010' : ("ProbRainGE010", SCALAR, "%", "Prob. Rain >= 0.10 in", 100.0, 0.0, 0, NO), - 'ProbRainGE025' : ("ProbRainGE025", SCALAR, "%", "Prob. Rain >= 0.25 in", 100.0, 0.0, 0, NO), - 'ProbRainGE050' : ("ProbRainGE050", SCALAR, "%", "Prob. Rain >= 0.50 in", 100.0, 0.0, 0, NO), - 'ProbRainGE075' : ("ProbRainGE075", SCALAR, "%", "Prob. Rain >= 0.75 in", 100.0, 0.0, 0, NO), - 'ProbRainGE100' : ("ProbRainGE100", SCALAR, "%", "Prob. Rain >= 1.00 in", 100.0, 0.0, 0, NO), - 'ProbRainGE150' : ("ProbRainGE150", SCALAR, "%", "Prob. Rain >= 1.50 in", 100.0, 0.0, 0, NO), - 'ProbRainGE200' : ("ProbRainGE200", SCALAR, "%", "Prob. Rain >= 2.00 in", 100.0, 0.0, 0, NO), - 'ProbRainGE250' : ("ProbRainGE250", SCALAR, "%", "Prob. Rain >= 2.50 in", 100.0, 0.0, 0, NO), - 'ProbRainGE300' : ("ProbRainGE300", SCALAR, "%", "Prob. Rain >= 3.00 in", 100.0, 0.0, 0, NO), -} - - -# Make all optional parms available as variables. -for optionalParmKey in optionalParmsDict: - for pname,parm in optionalParmsDict[optionalParmKey].items(): - setattr(sys.modules[__name__],pname,parm) - -#----------------------------------- -# DO NOT CHANGE THE FOLLOWING SECTION -#------------------------------------ -if not BASELINE and siteImport('localWxConfig'): - types = localWxConfig.types - - -#--------------------------------------------------------------------------- -# -# Projection Configuration section. -# -#--------------------------------------------------------------------------- -from com.raytheon.uf.common.dataplugin.gfe.config import ProjectionData -ProjectionType = ProjectionData.ProjectionType -NONE = ProjectionType.NONE -LAMBERT_CONFORMAL = ProjectionType.LAMBERT_CONFORMAL -MERCATOR = ProjectionType.MERCATOR -POLAR_STEREOGRAPHIC = ProjectionType.POLAR_STEREOGRAPHIC -LATLON = ProjectionType.LATLON - -# projectionID / projectionType / latLonLL / latLonUR / -# latLonOrigin / stdParallelOne / stdParallelTwo / gridPointLL / gridPointUR -# latIntersect / lonCenter / lonOrigin - -Grid201 = ('Grid201',POLAR_STEREOGRAPHIC, - (-150.00, -20.826), (-20.90846, 30.0), - (0.0, 0.0), 0.0, 0.0, (1, 1), (65, 65), 0.0, 0.0, -105.0) - -Grid202 = ('Grid202', POLAR_STEREOGRAPHIC, - (-141.028, 7.838), (-18.576, 35.617), - (0.0, 0.0), 0.0, 0.0, (1, 1), (65, 43), 0.0, 0.0, -105.0) - -Grid203 = ('Grid203', POLAR_STEREOGRAPHIC, - (-185.837, 19.132), (-53.660, 57.634), - (0.0, 0.0), 0.0, 0.0, (1, 1), (45, 39), 0.0, 0.0, -150.0) - -Grid204 = ('Grid204', MERCATOR, - (-250.0, -25.0), (-109.129, 60.644), - (0.0, 0.0), 0.0, 0.0, (1, 1), (93, 68), 0.0, -179.564, 0.0) - -Grid205 = ('Grid205', POLAR_STEREOGRAPHIC, - (-84.904, 0.616), (-15.000, 45.620), - (0.0, 0.0), 0.0, 0.0, (1, 1), (45, 39), 0.0, 0.0, -60.0) - -Grid206 = ('Grid206', LAMBERT_CONFORMAL, - (-117.991, 22.289), (-73.182, 51.072), - (-95.0, 25.0), 25.0, 25.0, (1, 1), (51, 41), 0.0, 0.0, 0.0) - -Grid207 = ('Grid207', POLAR_STEREOGRAPHIC, - (-175.641, 42.085), (-93.689, 63.976), - (0.0, 0.0), 0.0, 0.0, (1, 1), (49, 35), 0.0, 0.0, -150.0) - -Grid208 = ('Grid208', MERCATOR, - (-166.219, 10.656), (-147.844, 27.917), - (0.0, 0.0), 0.0, 0.0, (1, 1), (25, 25), 0.0, -157.082, 0.0) - -Grid209 = ('Grid209', LAMBERT_CONFORMAL, - (-117.991, 22.289), (-73.182, 51.072), - (-95.0, 25.0), 25.0, 25.0, (1, 1), (101, 81), 0.0, 0.0, 0.0) - -Grid210 = ('Grid210', MERCATOR, - (-77.000, 9.000), (-58.625, 26.422), - (0.0, 0.0), 0.0, 0.0, (1, 1), (25, 25), 0.0, -67.812, 0.0) - -Grid211 = ('Grid211', LAMBERT_CONFORMAL, - (-133.459, 12.190), (-49.385, 57.290), - (-95.0, 25.0), 25.0, 25.0, (1, 1), (93, 65), 0.0, 0.0, 0.0) - -Grid212 = ('Grid212', LAMBERT_CONFORMAL, - (-133.459, 12.190), (-49.385, 57.290), - (-95.0, 25.0), 25.0, 25.0, (1, 1), (185, 129), 0.0, 0.0, 0.0) - -Grid213 = ('Grid213', POLAR_STEREOGRAPHIC, - (-141.028, 7.838), (-18.577, 35.617), - (0.0, 0.0), 0.0, 0.0, (1, 1), (129, 85), 0.0, 0.0, -105.0) - -Grid214 = ('Grid214', POLAR_STEREOGRAPHIC, - (-175.641, 42.085), (-93.689, 63.975), - (0.0, 0.0), 0.0, 0.0, (1, 1), (97, 69), 0.0, 0.0, -150.0) - -# (new alaska grid) -Grid214AK = ('Grid214AK', POLAR_STEREOGRAPHIC, - (-178.571, 40.5301), (-93.689, 63.975), - (0.0, 0.0), 0.0, 0.0, (1,1), (104, 70), 0.0, 0.0, -150.0) - -Grid215 = ('Grid215', LAMBERT_CONFORMAL, - (-133.459, 12.190), (-49.385, 57.290), - (-95.0, 25.0), 25.0, 25.0, (1, 1), (369, 257), 0.0, 0.0, 0.0) - -Grid216 = ('Grid216', POLAR_STEREOGRAPHIC, - (-173.000, 30.000), (-62.850, 70.111), - (0.0, 0.0), 0.0, 0.0, (1, 1), (139, 107), 0.0, 0.0, -135.0) - -Grid217 = ('Grid217', POLAR_STEREOGRAPHIC, - (-173.000, 30.000), (-62.850, 70.111), - (0.0, 0.0), 0.0, 0.0, (1, 1), (277, 213), 0.0, 0.0, -135.0) - -Grid218 = ('Grid218', LAMBERT_CONFORMAL, - (-133.459, 12.190), (-49.385, 57.290), - (-95.0, 25.0), 25.0, 25.0, (1, 1), (614, 428), 0.0, 0.0, 0.0) - -Grid219 = ('Grid219', POLAR_STEREOGRAPHIC, - (-119.559, 25.008), (60.339, 24.028), - (0.0, 0.0), 0.0, 0.0, (1, 1), (385, 465), 0.0, 0.0, -80.0) - -Grid221 = ('Grid221', LAMBERT_CONFORMAL, - (-145.500, 1.000), (-2.566, 46.352), - (-107.0, 50.0), 50.0, 50.0, (1, 1), (349, 277), 0.0, 0.0, 0.0) - -Grid222 = ('Grid222', LAMBERT_CONFORMAL, - (-145.500, 1.000), (-2.566, 46.352), - (-107.0, 50.0), 50.0, 50.0, (1, 1), (59, 47), 0.0, 0.0, 0.0) - -Grid225 = ('Grid225', MERCATOR, - (-250.0, -25.0), (-109.129, 60.644), - (0.0, 0.0), 0.0, 0.0, (1, 1), (185, 135), 0.0, -179.564, 0.0) - -Grid226 = ('Grid226', LAMBERT_CONFORMAL, - (-133.459, 12.190), (-49.385, 57.290), - (-95.0, 25.0), 25.0, 25.0, (1, 1), (737, 513), 0.0, 0.0, 0.0) - -Grid227 = ('Grid227', LAMBERT_CONFORMAL, - (-133.459, 12.190), (-49.385, 57.290), - (-95.0, 25.0), 25.0, 25.0, (1, 1), (1473, 1025), 0.0, 0.0, 0.0) - -Grid228 = ('Grid228', LATLON, - (0.0, 90.0), (359.0, -90.0), (0.0, 0.0), 0.0, 0.0, - (1, 1), (144, 73), 0.0, 0.0, 0.0) - -Grid229 = ('Grid229', LATLON, - (0.0, 90.0), (359.0, -90.0), - (0.0, 0.0), 0.0, 0.0, (1, 1), (360, 181), 0.0, 0.0, 0.0) - -Grid230 = ('Grid230', LATLON, - (0.0, 90.0), (359.5, -90.0), - (0.0, 0.0), 0.0, 0.0, (1, 1), (720, 361), 0.0, 0.0, 0.0) - -Grid231 = ('Grid231', LATLON, - (0.0, 0.0), (359.5, 90.0), - (0.0, 0.0), 0.0, 0.0, (1, 1), (720, 181), 0.0, 0.0, 0.0) - -Grid232 = ('Grid232', LATLON, - (0.0, 0.0), (359.0, 90.0), - (0.0, 0.0), 0.0, 0.0, (1, 1), (360, 91), 0.0, 0.0, 0.0) - -Grid233 = ('Grid233', LATLON, - (0.0, -78.0), (358.750, 78.0), - (0.0, 0.0), 0.0, 0.0, (1, 1), (288, 157), 0.0, 0.0, 0.0) - -Grid234 = ('Grid234', LATLON, - (-98.000, 15.0), (-65.000, -45.0), - (0.0, 0.0), 0.0, 0.0, (1, 1), (133, 121), 0.0, 0.0, 0.0) - -Grid235 = ('Grid235', LATLON, - (0.250, 89.750), (359.750, -89.750), - (0.0, 0.0), 0.0, 0.0, (1, 1), (720, 360), 0.0, 0.0, 0.0) - -HRAP = ('HRAP', POLAR_STEREOGRAPHIC, - (-119.036, 23.097), (-75.945396, 53.480095), - (0.0, 0.0), 0.0, 0.0, (1, 1), (801, 881), 0.0, 0.0, -105.0) - -NDFD_Oceanic_10K = ('NDFD_Oceanic_10km', MERCATOR, - (-230.094, -30.4192), (10.71, 80.01), - (0.0, 0.0), 0.0, 0.0, (1, 1), (2517, 1793), 0.0, -109.962, 0.0) - -# Add a new domain for NHC purposes -GridForNHA = ('GridForNHA', LAMBERT_CONFORMAL, - (-103.929, 20.164), (-50.8894, 42.9545), - (-95.0, 35.0), 35.0, 35.0, (1, 1), (1833,1241), 0.0, 0.0, 0.0) - -# list of all projections -allProjections = [Grid201, Grid202, Grid203, Grid204, Grid205, Grid206, - Grid207, Grid208, Grid209, Grid210, Grid211, Grid212, Grid213, Grid214, - Grid214AK, Grid215, Grid216, Grid217, Grid218, Grid219, Grid221, Grid222, - Grid225, Grid226, Grid227, Grid228, Grid229, Grid230, Grid231, Grid232, - Grid233, Grid234, Grid235, HRAP, NDFD_Oceanic_10K, GridForNHA] - -#--------------------------------------------------------------------------- -# -# Grid Domain configuration section -# -#--------------------------------------------------------------------------- -# -# xdim/ydim: Defines the dimensions of the grids. (GFE grid size) -# -# origin: Defines the lower-left corner of the grid (point 0,0) in -# world coordinates. -# -# extent: Defines the "size" of the grid in world coordinates. The upper -# right corner is the origin+extent. -# -# TimeZone: Defines the timezone used by this site in standard TZ format. -# Refer to /usr/share/zoneinfo/zone.tab for the correct settings. -# -# Projection: Defines the projection identifier to be used for this domain. - -# Note that all parameters for an existing database must use the same -# projection, though not necessarily the same grid size and location. - -# These values are set up for AWIPS. There is a script at the end -# of this section that adjusts the resolution for the RPP sites. - -# [xdim, ydim] / (origin) /( extent) / TimeZone / Projection / OfficeType - -SITES = { -#WFOs - # Experimental combined AFC site - 'AFC' : ([1057, 449], (1.0, 19.00), (66.0, 28.0), 'America/Anchorage', Grid214AK, "wfo"), - 'ABQ' : ([145, 145], (36.00, 22.00), (9.0, 9.0), 'MST7MDT', Grid211,"wfo"), - 'ABR' : ([145, 145], (45.00, 35.00), (9.0, 9.0), 'CST6CDT', Grid211,"wfo"), - 'AER' : ([369, 337], (44.00, 23.00), (23.0, 21.0), 'America/Anchorage', Grid214AK, "wfo"), - 'AFG' : ([641, 497], (27.0, 38.0), (40.0, 31.0), 'America/Anchorage', Grid214AK, "wfo"), - 'AJK' : ([337, 241], (62.0, 23.0), (21.0, 15.0), 'America/Juneau', Grid214AK, "wfo"), - 'AKQ' : ([145, 145], (68.00, 25.00), (9.0, 9.0), 'EST5EDT', Grid211, "wfo"), - 'ALU' : ([865, 449], (1.0, 19.0), (54.0, 28.0), 'America/Anchorage', Grid214AK, "wfo"), - 'ALY' : ([145, 145], (70.00, 33.00), (9.0, 9.0), 'EST5EDT', Grid211, "wfo"), - 'AMA' : ([145, 145], (41.00, 21.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"), - 'APX' : ([145, 145], (58.00, 34.00), (9.0, 9.0), 'EST5EDT', Grid211, "wfo"), - 'ARX' : ([145, 145], (52.00, 33.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"), - 'BGM' : ([145, 145], (68.00, 33.00), (9.0, 9.0), 'EST5EDT', Grid211, "wfo"), - 'BIS' : ([145, 145], (43.00, 37.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"), - 'BMX' : ([145, 145], (58.00, 19.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"), - 'BOI' : ([177, 177], (25.00, 34.00), (11.0, 11.0), 'MST7MDT', Grid211, "wfo"), - 'BOU' : ([145, 145], (38.00, 27.00), (9.0, 9.0), 'MST7MDT', Grid211, "wfo"), - 'BOX' : ([187, 154], (75.375,34.59375), (5.8125,4.78125), "EST5EDT", Grid211, "wfo"), - 'BRO' : ([145, 145], (44.00, 10.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"), - 'BTV' : ([193, 157], (72.00, 37.15), (6.0, 4.875), 'EST5EDT', Grid211, "wfo"), - 'BUF' : ([145, 145], (66.00, 32.00), (9.0, 9.0), 'EST5EDT', Grid211, "wfo"), - 'BYZ' : ([145, 145], (36.00, 37.00), (9.0, 9.0), 'MST7MDT', Grid211, "wfo"), - 'CAE' : ([145, 145], (65.00, 20.00), (9.0, 9.0), 'EST5EDT', Grid211, "wfo"), - 'CAR' : ([145, 145], (75.00, 39.00), (9.0, 9.0), 'EST5EDT', Grid211, "wfo"), - 'CHS' : ([145, 145], (65.00, 18.00), (9.0, 9.0), 'EST5EDT', Grid211, "wfo"), - 'CLE' : ([145, 145], (62.00, 30.00), (9.0, 9.0), 'EST5EDT', Grid211, "wfo"), - 'CRP' : ([145, 145], (45.00, 11.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"), - 'CTP' : ([145, 145], (67.00, 30.00), (9.0, 9.0), 'EST5EDT', Grid211, "wfo"), - 'CYS' : ([145, 145], (37.00, 31.00), (9.0, 9.0), 'MST7MDT', Grid211, "wfo"), - 'DDC' : ([145, 145], (43.00, 24.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"), - 'DLH' : ([145, 145], (50.00, 37.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"), - 'DMX' : ([145, 145], (49.00, 30.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"), - 'DTX' : ([161, 161], (57.00, 34.00), (10.0, 10.0), 'EST5EDT', Grid211, "wfo"), - 'DVN' : ([145, 145], (52.00, 30.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"), - 'EAX' : ([145, 145], (50.00, 27.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"), - 'EKA' : ([145, 145], (20.00, 31.00), (9.0, 9.0), 'PST8PDT', Grid211, "wfo"), - 'EPZ' : ([145, 145], (36.00, 16.00), (9.0, 9.0), 'MST7MDT', Grid211, "wfo"), - 'EWX' : ([145, 145], (44.00, 12.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"), - 'FFC' : ([145, 145], (61.00, 18.00), (9.0, 9.0), 'EST5EDT', Grid211, "wfo"), - 'FGF' : ([145, 145], (45.00, 39.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"), - 'FGZ' : ([145, 145], (29.00, 23.00), (9.0, 9.0), 'US/Arizona', Grid211, "wfo"), - 'FSD' : ([177, 177], (43.00, 32.00), (11.0, 11.0), 'CST6CDT', Grid211, "wfo"), - 'FWD' : ([145, 145], (45.00, 17.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"), - 'GGW' : ([145, 145], (36.00, 39.00), (9.0, 9.0), 'MST7MDT', Grid211, "wfo"), - 'GID' : ([145, 145], (44.00, 28.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"), - 'GJT' : ([145, 145], (34.00, 27.00), (9.0, 9.0), 'MST7MDT', Grid211, "wfo"), - 'GLD' : ([145, 145], (41.00, 26.00), (9.0, 9.0), 'MST7MDT', Grid211, "wfo"), - 'GRB' : ([145, 145], (54.00, 35.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"), - 'GRR' : ([145, 145], (58.00, 33.00), (9.0, 9.0), 'EST5EDT', Grid211, "wfo"), - 'GSP' : ([145, 145], (63.00, 21.00), (9.0, 9.0), 'EST5EDT', Grid211, "wfo"), - 'GUM' : ([193, 193], (23.0, 26.0), (3.0, 3.0), 'Pacific/Guam', Grid204, "wfo"), - 'GYX' : ([193,209], (76.00, 37.375), (6.0, 6.5), 'EST5EDT', Grid211, "wfo"), - 'HFO' : ([321, 225], (58.78125,29.875),(5.0,3.5), 'Pacific/Honolulu', Grid204, 'wfo'), - 'HGX' : ([145, 145], (48.00, 13.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"), - 'HNX' : ([145, 145], (22.00, 24.00), (9.0, 9.0), 'PST8PDT', Grid211, "wfo"), - 'HUN' : ([161, 161], (60.0, 22.0), (5.0, 5.0), 'CST6CDT', Grid211, "wfo"), - 'ICT' : ([145, 145], (45.00, 25.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"), - 'ILM' : ([145, 145], (67.00, 21.00), (9.0, 9.0), 'EST5EDT', Grid211, "wfo"), - 'ILN' : ([145, 145], (60.00, 27.00), (9.0, 9.0), 'EST5EDT', Grid211, "wfo"), - 'ILX' : ([145, 145], (55.00, 27.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"), - 'IND' : ([145, 145], (58.00, 27.00), (9.0, 9.0), 'EST5EDT', Grid211, "wfo"), - 'IWX' : ([145, 145], (58.00, 30.00), (9.0, 9.0), 'EST5EDT', Grid211, "wfo"), - 'JAN' : ([145, 145], (54.00, 18.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"), - 'JAX' : ([145, 145], (64.00, 14.00), (9.0, 9.0), 'EST5EDT', Grid211, "wfo"), - 'JKL' : ([145, 145], (61.00, 25.00), (9.0, 9.0), 'EST5EDT', Grid211, "wfo"), - 'KEY' : ([145, 145], (66.00, 8.00), (9.0, 9.0), 'EST5EDT', Grid211, "wfo"), - 'LBF' : ([145, 145], (43.00, 30.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"), - 'LCH' : ([145, 145], (52.00, 15.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"), - 'LIX' : ([145, 145], (54.00, 14.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"), - 'LKN' : ([145, 145], (25.00, 30.00), (9.0, 9.0), 'PST8PDT', Grid211, "wfo"), - 'LMK' : ([145, 145], (59.00, 25.00), (9.0, 9.0), 'EST5EDT', Grid211, "wfo"), - 'LOT' : ([145, 145], (55.00, 30.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"), - 'LOX' : ([145, 145], (21.00, 23.00), (9.0, 9.0), 'PST8PDT', Grid211, "wfo"), - 'LSX' : ([145, 145], (52.00, 25.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"), - 'LUB' : ([145, 145], (39.00, 17.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"), - 'LWX' : ([145, 145], (67.00, 27.00), (9.0, 9.0), 'EST5EDT', Grid211, "wfo"), - 'LZK' : ([145, 145], (51.00, 20.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"), - 'MAF' : ([205,247], (40.375, 16.8125), (6.375, 7.6875), 'CST6CDT', Grid211, "wfo"), - 'MEG' : ([145, 145], (54.00, 22.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"), - 'MFL' : ([145, 145], (66.00, 9.00), (9.0, 9.0), 'EST5EDT', Grid211, "wfo"), - 'MFR' : ([145, 145], (20.00, 34.00), (9.0, 9.0), 'PST8PDT', Grid211, "wfo"), - 'MHX' : ([145, 145], (68.00, 22.00), (9.0, 9.0), 'EST5EDT', Grid211, "wfo"), - 'MKX' : ([145, 145], (55.00, 33.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"), - 'MLB' : ([145, 145], (66.00, 12.00), (9.0, 9.0), 'EST5EDT', Grid211, "wfo"), - 'MOB' : ([145, 145], (57.00, 16.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"), - 'MPX' : ([145, 145], (50.00, 34.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"), - 'MQT' : ([145, 145], (56.00, 36.00), (9.0, 9.0), 'EST5EDT', Grid211, "wfo"), - 'MRX' : ([145, 145], (61.00, 22.00), (9.0, 9.0), 'EST5EDT', Grid211, "wfo"), - 'MSO' : ([145, 145], (29.00, 39.00), (9.0, 9.0), 'MST7MDT', Grid211, "wfo"), - 'MTR' : ([145, 145], (20.00, 26.00), (9.0, 9.0), 'PST8PDT', Grid211, "wfo"), - 'OAX' : ([145, 145], (45.00, 30.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"), - 'OHX' : ([145, 145], (58.00, 22.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"), - 'OKX' : ([145, 145], (71.00, 30.00), (9.0, 9.0), 'EST5EDT', Grid211, "wfo"), - 'OTX' : ([145, 145], (25.00, 40.00), (9.0, 9.0), 'PST8PDT', Grid211, "wfo"), - 'OUN' : ([145, 145], (44.00, 21.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"), - 'PAH' : ([145, 145], (56.00, 24.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"), - 'PBZ' : ([145, 145], (65.00, 29.00), (9.0, 9.0), 'EST5EDT', Grid211, "wfo"), - 'PDT' : ([145, 145], (23.00, 38.00), (9.0, 9.0), 'PST8PDT', Grid211, "wfo"), - 'PHI' : ([145, 145], (70.00, 28.00), (9.0, 9.0), 'EST5EDT', Grid211, "wfo"), - 'PIH' : ([145, 145], (30.00, 34.00), (9.0, 9.0), 'MST7MDT', Grid211, "wfo"), - 'PQR' : ([145, 145], (19.00, 38.00), (9.0, 9.0), 'PST8PDT', Grid211, "wfo"), - 'PSR' : ([145, 145], (28.00, 20.00), (9.0, 9.0), 'US/Arizona', Grid211, "wfo"), - 'PUB' : ([145, 145], (38.00, 26.00), (9.0, 9.0), 'MST7MDT', Grid211, "wfo"), - 'RAH' : ([145, 145], (66.00, 22.00), (9.0, 9.0), 'EST5EDT', Grid211, "wfo"), - 'REV' : ([145, 145], (23.00, 29.00), (9.0, 9.0), 'PST8PDT', Grid211, "wfo"), - 'RIW' : ([145, 145], (35.00, 33.00), (9.0, 9.0), 'MST7MDT', Grid211, "wfo"), - 'RLX' : ([145, 145], (63.00, 26.00), (9.0, 9.0), 'EST5EDT', Grid211, "wfo"), - 'RNK' : ([161, 161], (67.0, 26.00), (5.0, 5.0), 'EST5EDT', Grid211, 'wfo'), - 'SEW' : ([145, 145], (21.00, 42.00), (9.0, 9.0), 'PST8PDT', Grid211, "wfo"), - 'SGF' : ([145, 145], (51.00, 24.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"), - 'SGX' : ([145, 145], (24.00, 21.00), (9.0, 9.0), 'PST8PDT', Grid211, "wfo"), - 'SHV' : ([145, 145], (50.00, 17.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"), - 'SJT' : ([145, 145], (43.00, 16.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"), - 'SJU' : ([32, 28], (10.0, 10.0), (8.0, 7.0), 'America/Puerto_Rico',Grid210, "wfo"), - 'SLC' : ([161, 161], (30.00, 28.00), (10.0, 10.0), 'MST7MDT', Grid211, "wfo"), - 'STO' : ([145, 145], (20.00, 28.00), (9.0, 9.0), 'PST8PDT', Grid211, "wfo"), - 'TAE' : ([145, 145], (60.00, 15.00), (9.0, 9.0), 'EST5EDT', Grid211, "wfo"), - 'TBW' : ([145, 145], (64.00, 11.00), (9.0, 9.0), 'EST5EDT', Grid211, "wfo"), - 'TFX' : ([145, 145], (32.00, 39.00), (9.0, 9.0), 'MST7MDT', Grid211, "wfo"), - 'TOP' : ([145, 145], (47.00, 26.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"), - 'TSA' : ([145, 145], (48.00, 22.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"), - 'TWC' : ([145, 145], (29.00, 20.00), (9.0, 9.0), 'US/Arizona', Grid211, "wfo"), - 'UNR' : ([145, 145], (40.00, 34.00), (9.0, 9.0), 'MST7MDT', Grid211, "wfo"), - 'VEF' : ([145, 145], (26.00, 25.00), (9.0, 9.0), 'PST8PDT', Grid211, "wfo"), -#RFCs - 'ACR' : ([565, 415], (26.0, 19.0), (60.0, 44.0), 'America/Anchorage', Grid214AK, "rfc"), - 'ALR' : ([299, 278], (59.0, 11.0), (17.0, 19.0), 'CST6CDT', Grid211, "rfc"), - 'FWR' : ([362, 334], (36.0, 11.0), (20.0, 20.0), 'CST6CDT', Grid211, "rfc"), - 'KRF' : ([408, 356], (33.0, 27.0), (26.0, 22.0), 'CST6CDT', Grid211, "rfc"), - 'MSR' : ([381, 304], (43.0, 28.0), (24.0, 20.0), 'CST6CDT', Grid211, "rfc"), - 'ORN' : ([303, 216], (51.0, 16.0), (18.0, 14.0), 'CST6CDT', Grid211, "rfc"), - 'PTR' : ([218, 308], (21.0, 35.0), (17.0, 19.0), 'PST8PDT', Grid211, "rfc"), - 'RHA' : ([132, 140], (69.0, 28.0), (7.0, 10.0), 'EST5EDT', Grid211, "rfc"), - 'RSA' : ([140, 296], (21.0, 23.0), (12.0, 17.0), 'PST8PDT', Grid211, "rfc"), - 'STR' : ([171, 307], (29.0, 20.0), (13.0, 18.0), 'MST7MDT', Grid211, "rfc"), - 'TAR' : ([226, 164], (69.0, 34.0), (13.0, 13.0), 'EST5EDT', Grid211, "rfc"), - 'TIR' : ([220, 171], (59.0, 25.0), (13.0, 12.0), 'EST5EDT', Grid211, "rfc"), - 'TUA' : ([281, 168], (39.0, 22.0), (18.0, 10.0), 'CST6CDT', Grid211, "rfc"), - -#Special Sites - Added Hawaiian High Seas domain - 'US' : ([267, 159], (18.0, 9.5), (67.0, 40.0), 'EDT5EDT', Grid211, "other"), - 'FSL' : ([161, 145], (38.50, 27.00), (10.0, 9.0), 'MST7MDT', Grid211, "other"), - 'NH1' : ([838, 577], (887.0, 121.0), (837.0, 576.0), 'EST5EDT', NDFD_Oceanic_10K, "wfo"), - 'NH2' : ([1188, 363], (1328.0, 365.0), (1187.0, 362.0), 'EST5EDT', NDFD_Oceanic_10K, "wfo"), - 'ONA' : ([244, 383], (68.9375, 19.5625), (15.1875, 23.875), 'EST5EDT', Grid211, "wfo"), - 'ONP' : ([396, 415], (8.1875, 21.5625), (24.6875, 25.875), 'PST8PDT', Grid211, "wfo"), - 'HPA' : ([899, 671], (284.0, 30.0), (898.0, 670.0), 'Pacific/Honolulu', NDFD_Oceanic_10K, "wfo"), - 'WNJ' : ([301, 346], (1000.0, 475.0), (300.0, 345.0), 'CST6CDT', NDFD_Oceanic_10K, "wfo"), - -#Aviation Domains for AAWU - 'AAWU' : ([705, 457], (1.0, 11.0), (88.0, 57.0), 'America/Anchorage', Grid214AK, 'nc'), - 'AVAK' : ([465, 417], (8.0, 12.0), (29.0, 26.0), 'America/Anchorage', Grid203, 'nc'), - -#Regional Offices - 'VUY' : ([337,449], (62.00, 19.00), (21.0, 28.0), 'EST5EDT', Grid211, "ro"), - 'BCQ' : ([145,145], (50.00, 27.00), (9.0, 9.0), 'CST6CDT', Grid211, "ro"), - 'EHU' : ([657,321], (36.00, 9.50), (41.0, 20.0), 'CST6CDT', Grid211, "ro"), - 'VHW' : ([161,161], (30.00, 28.00), (10.0, 10.0), 'MST7MDT', Grid211, "ro"), - 'PBP' : ([321,225], (7.00, 11.00), (10.0, 7.0), 'Pacific/Honolulu', Grid208, "ro"), - 'VRH' : ([1409, 913], (1.0, 11.0), (88.0, 57.0), 'America/Anchorage', Grid214AK, 'nc'), - -#National Centers - 'HAK' : ( [825,553], ( 1.0, 1.0), (103.0, 69.0), 'EST5EDT', Grid214AK, "nc"), - 'HUS' : ([1073,689], (19.0, 8.0), ( 67.0, 43.0), 'EST5EDT', Grid211, "nc"), - 'NHA' : ([1873,1361], (35.5, 3.5), (58.5, 42.5), 'EST5EDT', Grid211, "nc"), - -} - -# Get list of valid office types, for validation. -VALID_OFFICE_TYPES = [] -# List of all values of all sites. -for siteValues in list(SITES.values()): - # Office type is the 5th element of each site's values - officeType = siteValues[5] - if officeType not in VALID_OFFICE_TYPES: - # A new office type - VALID_OFFICE_TYPES.append(officeType) - -#--------------------------------------------------------------------------- -# -# Time Constraint configuration section -# -#--------------------------------------------------------------------------- -HOUR = 3600 -DAY = 24 * HOUR - -# Start: is the number of seconds since 0000z for the first grid of the day -# Repeat: is the number of seconds from start until the next grid starts -# Duration: is the length of the grid in number of seconds - -# Examples of constraints: -# Hourly temperatures -# HrTemp = (0, HOUR, HOUR) -# QPF that is 6 hours long, aligned on 0000z, exists for every 6 hours -# Q = (0, HOUR*6, HOUR*6) -# - -# fixed time constraints: start / repeat / duration -TC_1M = (0, 60, 60) # 1 minute -TC1 = (0, HOUR, HOUR) -TC3 = (0, 3 * HOUR, HOUR) -TC6 = (0, 6 * HOUR, HOUR) -TC12 = (0, 12 * HOUR, HOUR) -TC3NG = (0, 3 * HOUR, 3 * HOUR) -TC6NG = (0, 6 * HOUR, 6 * HOUR) -TC12NG = (0, 12 * HOUR, 12 * HOUR) -TC24NG = (0, 24 * HOUR, 24 * HOUR) -TC061212 = (6 * HOUR, 12 * HOUR, 12 * HOUR) -Persistent = (0, 0, 0) # special time constraint - - -# The following time constraints are based on local standard time. -# Change the last parameter from 0 to 1 to force daylight savings time -# always. -# PWS TCs changed in OB9.3 for new 6 hour data from NHC -MaxTTC = localTC(7*HOUR, 24*HOUR, 13*HOUR, 0) -MinTTC = localTC(19*HOUR, 24*HOUR, 14*HOUR, 0) -MaxRHTC = localTC(15*HOUR, 24*HOUR, 18*HOUR, 0) -MinRHTC = localTC(3*HOUR, 24*HOUR, 18*HOUR, 0) -LT3NG = localTC(0*HOUR, 3*HOUR, 3*HOUR, 0) -LT6NG = localTC(0*HOUR, 6*HOUR, 6*HOUR, 0) -LT12NG = localTC(6*HOUR, 12*HOUR, 12*HOUR, 0) -LTMOS = localTC(6*HOUR, 12*HOUR, 12*HOUR, 0) #special MOS local time -MaxTTCMOS = localTC(6*HOUR, 24*HOUR, 12*HOUR, 0) #special MOS maxT -MinTTCMOS = localTC(18*HOUR, 24*HOUR, 12*HOUR, 0) #special MOS minT -LT24 = localTC(0*HOUR, 24*HOUR, 24*HOUR, 0) -FireWx1300TC = localTC(13*HOUR, 24*HOUR, 1*HOUR, 0) #special FireWx 1pm snap -#DR3511 DeltaMaxTTC = localTC(7*HOUR, 24*HOUR, 16*HOUR, 0) # just for HPCdeltaMaxT -PWSDTC = localTC(11*HOUR, 24*HOUR, 12*HOUR, 0) -PWSNTC = localTC(23*HOUR, 24*HOUR, 12*HOUR, 0) -# Alaska OCONUS -if SID in siteRegion['AR']: - MaxTTC = localTC(5*HOUR, 24*HOUR, 15*HOUR, 0) - MinTTC = localTC(17*HOUR, 24*HOUR, 18*HOUR, 0) - -# From NwsInitsConfig -LT24APT = localTC(7*HOUR, 24*HOUR, 24*HOUR, 0) -FireWxAvgTC = localTC( 12*HOUR, 24*HOUR, 6*HOUR, 0) -LT4HH = localTC(11*HOUR, 24*HOUR, 4*HOUR, 0) -SPC24 = (12*HOUR, 24*HOUR, 24*HOUR) -# For WR -TC0624NG=(6*HOUR,24*HOUR,24*HOUR) -TC12NG6=(6*HOUR,12*HOUR,12*HOUR) -# HIL Time Constraint -HILTC=(6*HOUR,24*HOUR,24*HOUR) - -#--------------------------------------------------------------------------- -# -# Database/(Model) Attribute Configuration -# -#--------------------------------------------------------------------------- -# -# name: The model name of the database -# -# format: Either 'GRID' or 'DFM' -# -# type: Optional type of the database -# -# single: YES or NO. YES if this database always exists and is not -# based on model-times. NO if this database is created/destroyed and -# is based on model-runs. When created, the names of these databases have -# time stamps. -# -# official: YES or NO. YES if this is an official database from which -# products can be generated. NO if this is a conventional database. -# -# numVer: Number of versions of this database to retain. -# -# purgeAge: Number of hours in the past before grids will be automatically -# purged from the database. If 0, then purging is disabled. -# - -YES = 1 -NO = 0 -GRID = 'GRID' -# name / format / type / single / official / numVer / purgeAge - -Fcst = ('Fcst', GRID, '', YES, NO, 1, 24) -Practice = ('Fcst', GRID, 'Prac', YES, NO, 1, 24) -TestFcst = ('Fcst', GRID, 'Test', YES, NO, 1, 24) -Restore = ('Restore', GRID, '', YES, NO, 1, 24) -Test = ('Test', GRID, 'test', NO, NO, 1, 0) -Official = ('Official', GRID, '', YES, YES, 1, 24) -ISC = ('ISC', GRID, '', YES, NO, 1, 12) - - -#--------------------------------------------------------------------------- -# -# Search path for netCDF data files. -# NOTE: This feature was implemented only backward compatibility with existing A1 datasets. -# New datasets should be generated in a from that can be ingested by A2 -# It shoudl only be used for static datasets. -# New files will not be recognized without a server restart. -# -#--------------------------------------------------------------------------- -# Alaska OCONUS -if SID in groups['ALASKA_SITES']: - NETCDFDIRS = [('/awips2/edex/data/gfe/climo/PRISMAK'), - ('/awips2/edex/data/gfe/climo/PRISMAK800'), - ] - -# Hawaii OCONUS -elif SID == "HFO": - NETCDFDIRS = [('/awips2/edex/data/gfe/topo/NED3ARCSTOPO','CRMTopo'), - ('/awips2/edex/data/gfe/topo/NED3ARCSTOPONEW','NED'), - ('/awips2/edex/data/gfe/topo/StdTerrain/Hawaii', 'StdTerrain'), - ] - -# San Juan OCONUS -elif SID == "SJU": - NETCDFDIRS = [('/awips2/edex/data/gfe/topo/NED3ARCSTOPO','CRMTopo'), - ('/awips2/edex/data/gfe/topo/NED3ARCSTOPONEW','NED'), - ('/awips2/edex/data/gfe/topo/VDATUMS','VDATUMS'), - ('/awips2/edex/data/gfe/topo/StdTerrain/PuertoRico', 'StdTerrain') - ] - -# Guam OCONUS -elif SID == "GUM": - NETCDFDIRS = [] - -#CONUS sites -elif SID in groups['CONUS_EAST_SITES']: - NETCDFDIRS = [('/awips2/edex/data/gfe/climo/PRISM'), - ('/awips2/edex/data/gfe/climo/NCDC'), - ('/awips2/edex/data/gfe/topo/NED3ARCSTOPO','CRMTopo'), - ('/awips2/edex/data/gfe/topo/NED3ARCSTOPONEW','NED'), - ('/awips2/edex/data/gfe/topo/VDATUMS','VDATUMS'), - ('/awips2/edex/data/gfe/topo/StdTerrain/CONUS', 'StdTerrain'), - ] - -else: #######DCS3501 WEST_CONUS - NETCDFDIRS = [('/awips2/edex/data/gfe/climo/PRISM'), - ('/awips2/edex/data/gfe/climo/NCDC'), - ('/awips2/edex/data/gfe/topo/NED3ARCSTOPO','CRMTopo'), - ('/awips2/edex/data/gfe/topo/NED3ARCSTOPONEW','NED'), - ('/awips2/edex/data/gfe/topo/VDATUMS','VDATUMS'), - ('/awips2/edex/data/gfe/topo/StdTerrain/CONUS', 'StdTerrain'), - ] - -#--------------------------------------------------------------------------- -# -# Where to find (and what to call) satellite data. -# -#--------------------------------------------------------------------------- -# - -# This table contains product ID and weather element names for satellite data -# -# A product ID consists of the sector ID and physical element of the -# satellite product. -# -# Examples: -# -# "East CONUS/Imager Visible" -# "East CONUS/Imager 11 micron IR" -# "East CONUS/Imager 13 micron (IR)" -# "East CONUS/Imager 3.9 micron IR" -# - -# Alaska OCONUS -if SID in groups['ALASKA_SITES']: - SATDATA = [] - -# Hawaii OCONUS -elif SID == "HFO": - SATDATA = [] - -# San Juan OCONUS -elif SID == "SJU": - SATDATA = [("East CONUS/Imager Visible", "visibleEast"), - ("East CONUS/Imager 11 micron IR", "ir11East"), - ("East CONUS/Imager 13 micron (IR)", "ir13East"), - ("East CONUS/Imager 3.9 micron IR", "ir39East"), - ("East CONUS/Imager 6.7-6.5 micron IR (WV)", "waterVaporEast")] - -# Guam OCONUS -elif SID == "GUM": - SATDATA = [] - -#CONUS sites -else: - SATDATA = [("West CONUS/Imager Visible", "visibleWest"), - ("West CONUS/Imager 11 micron IR", "ir11West"), - ("West CONUS/Imager 13 micron (IR)", "ir13West"), - ("West CONUS/Imager 3.9 micron IR", "ir39West"), - ("West CONUS/Imager 6.7-6.5 micron IR (WV)", "waterVaporWest"), - ("East CONUS/Imager Visible", "visibleEast"), - ("East CONUS/Imager 11 micron IR", "ir11East"), - ("East CONUS/Imager 13 micron (IR)", "ir13East"), - ("East CONUS/Imager 3.9 micron IR", "ir39East"), - ("East CONUS/Imager 6.7-6.5 micron IR (WV)", "waterVaporEast")] - -#--------------------------------------------------------------------------- -# -# Intersite Coordination Configurations -# -#--------------------------------------------------------------------------- -# base urls for the ISC Routing Table -ISC_ROUTING_TABLE_ADDRESS = { - "ANCF" : "http://svcbu-ancf.er.awips.noaa.gov:8080/irt", - "BNCF" : "http://svcbu-bncf.er.awips.noaa.gov:8080/irt" - } - - -# list of sites that from which you want ISC data (If None, ifpServer will -# automatically calculate the list.) Should always include your own site. -REQUESTED_ISC_SITES = None - -# Overall ISC request flag. Must be set to 1 in order to request and receive -# ISC data. Must be 1 to register with the IRT. -REQUEST_ISC = 0 - -# Sending control flag. Set to 1 to send isc when data is saved. -SEND_ISC_ON_SAVE = 0 - -# Sending control flag. Set to 1 to send isc when data is published. -SEND_ISC_ON_PUBLISH = 0 - -# List of weather elements to request for ISC. If set to None, it defaults -# to the list of all weather elements in the Fcst database. -REQUESTED_ISC_PARMS = None - -# Transmission script for sending data. This is the script that iscExtract -# and other routines (e.g., vtec table sharing) will call to perform the -# actual transmission of data. -TRANSMIT_SCRIPT = GFESUITE_HOME + '/bin/gfe_msg_send -s %SUBJECT -a %ADDRESSES -i %WMOID -c 11 -p 0 -e %ATTACHMENTS' - - -# Extra ISC parms (weather elements). These are a list of the baseline -# weather elements to be added as extra parms to the ISC database. This -# is necessary when receiving ISC grids from a site that is a different -# office type than your own. You never need to add weather elements -# to the ISC database that is your own office type. The format of this -# entry is a list of tuples. The tuple is a list of weather elements -# objects (such as Temp and not "T"), and an office type, such as "rfc". -EXTRA_ISC_PARMS = [([QPF,FloodingRainThreat], 'rfc'), ([QPF,FloodingRainThreat], 'wfo'), ([ProposedSS,Hazards,InundationMax,InundationTiming,SurgeHtPlusTideMSL,SurgeHtPlusTideMLLW,SurgeHtPlusTideMHHW,SurgeHtPlusTideNAVD], 'nc'),([ProposedSS,Hazards,InundationMax,InundationTiming,SurgeHtPlusTideMSL,SurgeHtPlusTideMLLW,SurgeHtPlusTideMHHW,SurgeHtPlusTideNAVD], 'wfo')] - -#--------------------------------------------------------------------------- -# -# Misc. Configurations -# -#--------------------------------------------------------------------------- -# defines the number of days to keep log files -LOG_FILE_PURGE_AFTER = 28 - -# auto configure NotifyTextProd -- set after OB6 -AUTO_CONFIGURE_NOTIFYTEXTPROD = 1 #0=off,1=on - - -#----------------------------------- -# DO NOT CHANGE THE FOLLOWING SECTION -#------------------------------------ -# import the local config file - -myOfficeType = SITES[GFESUITE_SITEID][5] - -AdditionalISCRouting = [ - # Configure by adding entries to this list in the form of: - # ([WeatherElements], ModelName, EditAreaPrefix) - # Example: - # ([Hazards, LAL, CWR], "ISCFire", "FireWxAOR_"), -] - -#--------------------------------------------------------------------------- -# Parm groups. Combine parms with time constraints -# list of ([parms], timeConstraints) -#--------------------------------------------------------------------------- - -#!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -# There is nothing special about these variables. They are just used as a -# convienence to set up multiple models in modelDict with the same parameter -# set. However, model parms are no longer as generic as they once were and -# its just as easy to set the parms explicitly in modelDict. - -STD6_MODEL = [([Temp, Td, RH, Wind, Wind20ft, Sky, FzLevel, SnowLevel], TC6), - ([Haines, MixHgt, FreeWind, TransWind, VentRate], TC6), - ([DSI, Stability, Ttrend, RHtrend], TC6), - ([SnowAmt, PoP, CWR], TC6NG), ([QPF, Weather, IceAcc, LAL], TC6NG), - ([MarineLayer, HrsOfSun, InvBurnOffTemp], LT24), - ([MinRH], MinRHTC), ([MaxRH], MaxRHTC), - ([MaxT], MaxTTC), ([MinT], MinTTC), - ([Wetflag], FireWx1300TC)] - -# hourly -STD1_MODEL = [([Temp, Td, RH, Wind, Wind20ft, Sky, FzLevel, SnowLevel], TC1), - ([Haines, MixHgt, FreeWind, TransWind], TC1), - ([DSI, Stability, VentRate, Ttrend, RHtrend], TC1), - ([SnowAmt, PoP, CWR], TC1), ([QPF, Weather, IceAcc, LAL], TC1), - ([MarineLayer, HrsOfSun, InvBurnOffTemp], LT24), - ([MinRH], MinRHTC), ([MaxRH], MaxRHTC), - ([MaxT], MaxTTC), ([MinT], MinTTC), - ([Wetflag], FireWx1300TC)] - -# 3 hourly -STD3_MODEL = [([Temp, Td, RH, Wind, Wind20ft, Sky, FzLevel, SnowLevel], TC3), - ([Haines, MixHgt, FreeWind, TransWind], TC3), - ([DSI, Stability, VentRate, Ttrend, RHtrend], TC3), - ([SnowAmt, PoP, CWR], TC3NG), ([QPF, IceAcc, Weather, LAL], TC3NG), - ([MarineLayer, HrsOfSun, InvBurnOffTemp], LT24), - ([MinRH], MinRHTC), ([MaxRH], MaxRHTC), - ([MaxT], MaxTTC), ([MinT], MinTTC), - ([Wetflag], FireWx1300TC)] - -# Fcst and official database parameter groupings -OFFICIALDBS = [([Temp, Td, Wind, Weather, Sky, FzLevel, SnowLevel], TC1), - ([HeatIndex, WindChill, RH, SnowAmt, CWR, QPF], TC1), - ([PoP, Ttrend, RHtrend, Wind20ft, WindGust], TC1), - ([MinT], MinTTC), ([MaxT], MaxTTC), - ([MinRH], MinRHTC), ([MaxRH], MaxRHTC), - ([VentRate, LAL, Haines, MixHgt, FreeWind, TransWind], TC1), - ([DSI, Stability, MarineLayer], TC1), - ([HrsOfSun, InvBurnOffTemp], LT24), - ([IceAcc, IceCoverage, Hazards], TC1), - ([Wetflag], FireWx1300TC), - ([StormTotalSnow], TC1), - # Tropical parms - ([prob34, prob50, prob64,pws34,pws50,pws64,], TC1), - ([InundationMax,SurgeHtPlusTideMSL,SurgeHtPlusTideMLLW,SurgeHtPlusTideMHHW,SurgeHtPlusTideNAVD], TC1), - ([ProposedSS,DiffSS,tempProposedSS,InitialSS], TC1), - ([WindThreat,StormSurgeThreat,FloodingRainThreat,TornadoThreat], TC1), - ([pwsD34,pwsD64], PWSDTC), - ([pwsN34,pwsN64], PWSNTC), - ([pws34int,pws64int,InundationTiming,QPFtoFFGRatio], TC6NG), - # DR20541 and 20482 - ([PoP12hr], TC12NG), - ([QPF6hr, SnowAmt6hr], TC6NG), - ([cape], LT6NG), - ([ApparentT, HeatIndex, WindChill, LkSfcT, SnowMap, SnowRatio, StormTotalQPF], TC1), - ] - -## JCM Change wave and period (and swanswell) to TC1 for all marine sites -if SID in groups['marineSites'] or SID in groups['GreatLake_SITES']: - OFFICIALDBS.append(([WaveHeight, PeakWaveDir, WindWaveHeight, SurfHeight, Swell, Swell2, Period, Period2], TC1)) - OFFICIALDBS.append(([SwanSwell, Wave1, Wave2, Wave3, Wave4, Wave5, Wave6, Wave7, Wave8, Wave9, - Period1, Period3, Period4, Period5, Period6, Period7, Period8, Period9], TC1)) - OFFICIALDBS.append(([NWPSwind, UWaveDir, VWaveDir, WaveDir, RipProb, ErosionProb, OverwashProb],TC1)) - -# NWPS -nwpsCG1_MODEL = [([SwanSwell, Period, WaveHeight, PeakWaveDir, WindWaveHeight, Wind, RipProb, ErosionProb, OverwashProb], TC1)] -nwpsTrkngCG0_MODEL = [([Wave1, Wave2, Wave3, Wave4, Wave5, Wave6, Wave7, Wave8, Wave9, Period1, Period2, Period3, Period4, Period5, Period6,Period7, Period8, Period9], TC1)] - -# OPC TAF parameters (for NW, SW, and E) -OPCTAFBPARMS = [([WindWaveHeight, WaveHeight], TC1)] - -# SAT database parameter groupings -SATPARMS = [([SatVisE, SatIR11E, SatIR13E, SatIR39E, SatWVE, SatFogE], TC_1M), - ([SatVisW, SatIR11W, SatIR13W, SatIR39W, SatWVW, SatFogW], TC_1M)] - -# RTMA database parameter groupings -# DCS17288/DR17144 -if SID in groups['OCONUS_SITES']: - RTMAPARMS = [([Temp,Td,RH,Wind,Vis,Pressure,WindGust],TC1), - ([MinT],MinTTC), ([MaxT],MaxTTC), - ([MinRH],MinRHTC), ([MaxRH],MaxRHTC), - ([TUnc,TdUnc,WSpdUnc,WDirUnc,VisUnc,PressUnc,WGustUnc],TC1)] -else: - RTMAPARMS = [([Temp,Td,RH,Wind,QPE,Sky,Vis,Pressure,WindGust],TC1), - ([MinT],MinTTC), ([MaxT],MaxTTC), - ([MinRH],MinRHTC), ([MaxRH],MaxRHTC), - ([TUnc,TdUnc,WSpdUnc,WDirUnc,VisUnc,PressUnc,WGustUnc,SkyUnc],TC1)] - -#--------------------------------------------------------------------------- -# Databases for a site. -# list of (Database, [parms]) -# Official, Practice, TestFcst, Test are all set after Fcst is defined. -#--------------------------------------------------------------------------- - -# Intersite coordination database parameter groupings, based on -# OFFICIALDBS, but time constraint is always TC1 -ISCPARMS = [] -if type(officeType) != str: - raise TypeError("Office type not a str: " + repr(officeType)) -else: - if officeType not in VALID_OFFICE_TYPES: - raise ValueError("Office type: " + str(officeType) + " does not match any of the following: [" + (', '.join(VALID_OFFICE_TYPES)) + "]") - - -# -# new parameters for NewTerrain -# -NewTopo = ("NewTopo", SCALAR, "ft", "New Topo", 50000.0, -32000.0, 1, NO) -PrevTopo = ("PrevTopo", SCALAR, "ft", "Previous Topo", 50000.0, -32000.0, 1, NO) -StdTopo = ("StdTopo", SCALAR, "ft", "Standard Topo", 50000.0, -32000.0, 1, NO) -GTOPO = ("GTOPO", SCALAR, "ft", "GTOPO30", 50000.0, -32000.0, 1, NO) -Topo = ("Topo", SCALAR, "ft", "Topography", 50000.0, -32000.0, 1, NO) - -# Add Topo to ISC parms for NewTerrain -if type(REQUESTED_ISC_PARMS) is list and not "NewTopo" in REQUESTED_ISC_PARMS: - REQUESTED_ISC_PARMS.append("NewTopo") -ISCPARMS.append(([NewTopo], Persistent)) - - -#--------------------------------------------------------------------------- -# -# General server configuration section -# -#--------------------------------------------------------------------------- - -#---------------------------------------------------------------------------- -# Server settings DO NOT CHANGE THESE DEFINITIONS -#---------------------------------------------------------------------------- -from com.raytheon.edex.plugin.gfe.config import SimpleServerConfig -IFPConfigServer = SimpleServerConfig() -#IFPConfigServer.allowedNodes = [] -IFPConfigServer.allowTopoBelowZero = 1 - -#------------------------------------------------------------------------------ -# serverConfig model configuration is now done in the modelDict dictionary. -# variables D2DMODELS, D2DDBVERSIONS,D2DAccumulativeElements,INITMODULES, -# INITSKIPS, DATABASES are no longer explicitly set and are not valid -# to be referenced in localConfig.py. - -# WARNING: There can only be one version of a model in modelDict. Fcst, -# practice and test databases have to be handled separately because there -# are databases with the same name but different types. This is ok -# because these databases are defined after any localConfig customizations -# of the normal Fcst database. - -# modelDict contains the following keys. Only define what is needed, i.e., -# it is not required to have every key defined -# "DB": Definition of the database, i.e., the first value in a dbs entry: -# ("wrfems", GRID, "", NO, NO, 3, 0). This must be a tuple. The name -# in the DB entry must be the same as the model name used as the key -# into the modelDict variable. -# -# "Parms" : Definition of the weather element parameters in the database, -# i.e., the second part of the dbs entry. This is a list of tuples. -# -# "D2DMODELS" : D2D metadata database name for the source model. -# -# "INITMODULES': Name of the SmartInit module. It is usually just the -# name as a string. If the init requires multiple models, use a tuple -# of ('smartInit name',[list of model names]) -# 'INITMODULES': ('Local_WPCGuide', ["HPCGuide","HPCERP","HPCWWD"]), -# -# "D2DAccumulativeElements" : List of parm names that are accumulative -# -# "D2DDBVERSIONS" : Number of versions of a D2D model to show in the Weather -# Element Browser. Defaults to 2 if not supplied. -# -# "INITSKIPS" : Used to skip specific model cycles. -# -# Example for a model: -# -# modelDict["CMCreg"]={ -# "DB": ("CMCreg", "GRID", "", NO, NO, 2, 0), -# "Parms": [([Temp, Td, RH, Wind, WindGust, Sky, MixHgt, TransWind, QPF, -# PoP, SnowAmt, SnowRatio], TC3), -# ([PoP6, QPF6, QPF6hr, CQPF1],TC6NG), -# ([QPF12, PoP12],TC12NG), -# ([MinRH], MinRHTC), ([MaxRH], MaxRHTC), -# ([MaxT], MaxTTC), ([MinT], MinTTC), -# ], -# "D2DMODELS": "Canadian-Reg", -# "INITMODULES": "Local_CMCreg", -# "D2DAccumulativeElements": ["tpgemreg","tprun","tp3hr","tp6hr"], -# "D2DDBVERSIONS": 3, -# } -# - -# Official, Practice, TestFcst, Test, Restore are all derivations of Fcst and -# are setup after localConfig is processed. -modelDict['Fcst'] = {'DB': Fcst, 'Parms': OFFICIALDBS} - -# Model Databases -waveParms=[Period, Period2, SurfHeight, Swell, Swell2, WaveHeight, - Wind, WindWaveHeight, ] - -modelDict['BaseTerrain'] = { - 'DB': ('BaseTerrain', 'GRID', 'EditTopo', YES, NO, 1, 0), - 'Parms': [([StdTopo, GTOPO, PrevTopo], Persistent), - ], - } - -modelDict['CRMTopo'] = { - 'D2DDBVERSIONS': 1} - -modelDict['ECMWFHiRes'] = { - 'D2DMODELS': 'ECMWF-HiRes',} - -modelDict['ENPwave'] = { - 'D2DMODELS': 'ENPWAVE253', - 'DB': ('ENPwave', 'GRID', '', NO, NO, 2, 0), - 'Parms': [(waveParms, TC6), - ], - } - -modelDict['ESTOFS'] = { - 'D2DMODELS': 'estofsEP', - 'DB': ('ESTOFS', 'GRID', '', NO, NO, 2, 0), - 'INITMODULES': 'ESTOFS', - 'Parms': [([AstroTide, StormSurge], TC1), - ], - } - -modelDict['ETSS'] = { - 'D2DMODELS': 'ETSS', - 'DB': ('ETSS', 'GRID', '', NO, NO, 2, 0), - 'INITMODULES': 'ETSS', - 'Parms': [([StormSurge, SurgeTide], TC1), - ], - } - -modelDict['ETSSHiRes'] = { - 'D2DMODELS': 'ETSS-HiRes', - 'DB': ('ETSSHiRes', 'GRID', '', NO, NO, 2, 0), - 'INITMODULES': 'ETSSHiRes', - 'Parms': [([AstroTide, SurgeTide], TC1), - ], - } - -for s in ['ALR', 'FWR', 'KRF', 'MSR', 'ORN', 'PTR', 'RHA', 'RSA', 'STR', 'TAR', - 'TIR', 'TUA',]: - modelDict['FFG'+s] = {'D2DMODELS': 'FFG-'+s} - -modelDict['GFS20'] = { - 'D2DMODELS': 'GFS20', - 'D2DAccumulativeElements': ['tp3hr','tp6hr', 'tp', 'cp', 'crain', 'csnow', 'cfrzr', 'cicep'], - 'DB': ('GFS20', 'GRID', '', NO, NO, 2, 0), - 'Parms': [([Wetflag], FireWx1300TC), - ([MaxRH], MaxRHTC), - ([MaxT], MaxTTC), - ([MinRH], MinRHTC), - ([MinT], MinTTC), - ([HrsOfSun, InvBurnOffTemp, MarineLayer], LT24), - ([DSI, FreeWind, FzLevel, Haines, MixHgt, RH, RHtrend, Sky, - SnowLevel, Stability, Td, Temp, TransWind, Ttrend, VentRate, - Wind, Wind20ft], TC6), - ([CWR, IceAcc, LAL, PoP, QPF, SnowAmt, Weather], TC6NG), - ], - } - -modelDict['GFS80'] = { - 'D2DAccumulativeElements': ['tp', 'cp'], - 'D2DMODELS': 'AVN211', - 'DB': ('GFS80', 'GRID', '', NO, NO, 2, 0), - 'INITMODULES': 'GFS80', - 'Parms': STD6_MODEL, - } - -modelDict['GFSLAMPGrid'] = { - 'D2DMODELS': 'GFSLAMPGrid', - 'DB': ('GFSLAMPGrid', 'GRID', '', NO, NO, 3, 0), - 'INITMODULES': 'GFSLAMPGrid', - 'Parms': [([CigHgt, Sky, Td, Temp, Vis, Wind], TC1), - ], - } - -modelDict['GWW'] = { - 'DB': ('GWW', 'GRID', '', NO, NO, 2, 0), - 'Parms': [(waveParms, TC6), - ], - } - -modelDict['WaveWatch'] = { - 'D2DMODELS': 'WaveWatch',} - -modelDict['GlobalWave'] = { - 'D2DMODELS': 'GlobalWave', - 'DB': ('GlobalWave', 'GRID', '', NO, NO, 2, 0), - 'Parms': [(waveParms, TC3), - ], - } - -modelDict['HIRESWarw'] = { - 'D2DAccumulativeElements': ['tp'], - 'D2DMODELS': 'HiResW-ARW-West', - 'DB': ('HIRESWarw', 'GRID', '', NO, NO, 2, 0), - 'INITMODULES': 'HIRESWarw', - 'Parms': STD3_MODEL, - } - -modelDict['HIRESWnmm'] = { - 'D2DAccumulativeElements': ['tp'], - 'D2DMODELS': 'HiResW-NMM-West', - 'DB': ('HIRESWnmm', 'GRID', '', NO, NO, 2, 0), - 'INITMODULES': 'HIRESWnmm', - 'Parms': STD3_MODEL, - } - -modelDict['HPCERP'] = { - 'D2DAccumulativeElements': ['tpHPCndfd'], - 'D2DDBVERSIONS': 24, - 'D2DMODELS': 'HPCqpfNDFD',} - -modelDict['HPCGRID'] = { - 'DB': ('HPCGRID', 'GRID', '', NO, NO, 2, 0), - 'Parms': [([PoP, SnowAmt], LTMOS), - ([MaxT], MaxTTCMOS), - ([MinT], MinTTCMOS), - ([Sky, Td, Temp, Weather, Wind], TC1), - ([QPF], TC6NG), - ], - } - -modelDict['HPCGuide'] = { - 'D2DAccumulativeElements': ['pop'], - 'D2DMODELS': 'HPCGuide', - 'DB': ('HPCGuide', 'GRID', '', NO, NO, 2, 0), - 'INITMODULES': 'HPCGuide', - 'Parms': [([MaxT], MaxTTC), - ([MinT], MinTTC), - ([PoP], TC12NG), - ([Sky, Td, Wind], TC6), - ], - } - -modelDict['HPCQPF'] = { - 'D2DAccumulativeElements': ['tpHPC'], - 'D2DMODELS': 'HPCqpf', - 'DB': ('HPCQPF', 'GRID', '', NO, NO, 4, 0), - 'INITMODULES': 'HPCQPF', - 'Parms': [([QPF], TC6NG), - ], - } - -modelDict['HRRR'] = { - 'D2DAccumulativeElements': ['tp', 'crain', 'csnow', 'cfrzr', 'cicep'], - 'D2DMODELS': 'HRRR', - 'DB': ('HRRR', 'GRID', '', NO, NO, 3, 0), - 'INITMODULES': 'HRRR', - 'Parms': [([QPF, RH, Sky, Td, Temp, Wind, WindGust], TC1), - ], - } - -modelDict['HWRF'] = { - 'D2DAccumulativeElements': ['tp', 'cp'], - 'D2DMODELS': 'HWRF',} - -modelDict['LAPS'] = { - 'D2DAccumulativeElements': ['pc'], - 'D2DDBVERSIONS': 6, - 'D2DMODELS': 'LAPS', - 'DB': ('LAPS', 'GRID', '', YES, NO, 1, 30), - 'INITMODULES': 'LAPS', - 'Parms': [([QPF, Radar, Sky, SnowAmt, Td, Temp, Weather, Wind], TC1), - ], - } - -modelDict['MOSGuide'] = { - 'D2DAccumulativeElements': ['pop12hr', 'pop6hr', 'thp12hr', 'thp3hr', - 'thp6hr', 'tcc', 'tp6hr', 'tp12hr', 'wgs'], - 'D2DMODELS': 'MOSGuide', - 'DB': ('MOSGuide', 'GRID', '', NO, NO, 2, 0), - 'INITMODULES': 'MOSGuide', - 'Parms': [([MaxT], MaxTTC), - ([MinT], MinTTC), - ([RH, Td, Temp, Wind], TC1), - ([PoP, PoP12, QPF, QPF12, TstmPrb12], TC12NG), - ([TstmPrb3], TC3NG), - ([PoP6, QPF6, Sky, TstmPrb6, WindGust], TC6NG), - ], - } - -modelDict['MSAS'] = { - 'D2DAccumulativeElements': ['tp', 'cp'], - 'D2DDBVERSIONS': 6, - 'D2DMODELS': 'MSAS', - 'DB': ('MSAS', 'GRID', '', YES, NO, 1, 36), - 'INITMODULES': 'MSAS', - 'Parms': [([Td, Temp, Wind], TC1), - ], - } - -modelDict['NAHwave4'] = { - 'D2DMODELS': 'NAHwave4',} - -modelDict['NAM12'] = { - 'D2DAccumulativeElements': ['tp', 'cp', 'crain', 'csnow', 'cfrzr', 'cicep'], - 'D2DMODELS': 'NAM12', - 'DB': ('NAM12', 'GRID', '', NO, NO, 2, 0), - 'INITMODULES': 'NAM12', - 'Parms': STD3_MODEL, - } - -modelDict['NAM20'] = { - 'D2DAccumulativeElements': ['tp', 'cp'], - 'D2DMODELS': 'NAM20',} - -modelDict['NAM40'] = { - 'D2DAccumulativeElements': ['tp', 'cp'], - 'D2DMODELS': 'NAM40', - 'DB': ('NAM40', 'GRID', '', NO, NO, 2, 0), - 'Parms': STD3_MODEL, - } - -modelDict['NAM80'] = { - 'D2DAccumulativeElements': ['tp', 'cp'], - 'D2DMODELS': 'ETA', - 'DB': ('NAM80', 'GRID', '', NO, NO, 2, 0), - 'Parms': STD6_MODEL, - } - -modelDict['NED'] = { - 'D2DDBVERSIONS': 1} - -modelDict['NamDNG'] = { - 'D2DMODELS': 'namdng25', - 'DB': ('NamDNG', 'GRID', '', NO, NO, 2, 0), - 'INITMODULES': 'NamDNG', - 'Parms': [([MaxRH], MaxRHTC), - ([MaxT], MaxTTC), - ([MinRH], MinRHTC), - ([MinT], MinTTC), - ([PoP12, QPF12], TC12NG), - ([MixHgt, RH, Sky, SnowLevel, Td, Temp, TransWind, Vis, - Wind, WindGust], TC3), - ([MaxRH3, MaxT3, MinT3, PoP, QPF3, SnowAmt], TC3NG), - ([PoP6, QPF6, SnowAmt6], TC6NG), - ], - } - -modelDict['NationalBlend'] = { - 'D2DAccumulativeElements': ["pop12hr", "pop", "pop6hr", "tp", "ppi1hr", "ppi6hr", - "tp1hr", "tp6hr", "thp3hr", "thp6hr", - "totsn1hr", "totsn6hr", "ficeac1hr", "ficeac6hr"], - 'D2DMODELS': 'NationalBlend', - 'DB': ('NationalBlend', 'GRID', '', NO, NO, 7, 0), - 'INITMODULES': 'NationalBlend', - 'Parms': [([Temp, Td, RH, Sky, Wind, WindGust, ApparentT], TC1), - ([QPF1,PPI01,CloudBasePrimary,Ceiling,Visibility],TC1), - ([PoTIP, PoTR, PoTRW, PoTS, PoTSW, PoTZR,],TC1), - ([SnowLevel,MaxTwAloft,ProbIcePresent, ProbRefreezeSleet,SnowRatio],TC1), - ([PositiveEnergyAloft, NegativeEnergyLowLevel],TC1), - ([MixHgt, TransWind, LLWS, VentRate, LLWSHgt, Radar, - SigWaveHgt, Weather, Haines, FosBerg, - SnowAmt01, IceAccum01, TstmPrb1],TC1), - ([TstmPrb3, DryTstmPrb],TC3NG), - ([TstmPrb6, QPF, PoP6, PPI06, SnowAmt, IceAccum, - QPF10Prcntl, QPF50Prcntl, QPF90Prcntl],TC6NG), - ([MaxT], MaxTTC), ([MinT], MinTTC), - ([MaxRH], MaxRHTC), ([MinRH], MinRHTC),([PoP, TstmPrb12],TC12NG), - ], - } - -modelDict['NationalBlendOC'] = { - 'D2DMODELS': 'NationalBlendOC', - 'DB': ('NationalBlend', 'GRID', '', NO, NO, 2, 0), - 'INITMODULES': 'NationalBlendOC', - 'Parms': [([WGS50pct, WS50Prcntl30m, WS50Prcntl80m, Vis50pct, T50pct, - PMSL10pct, PMSL50pct, PMSL90pct], TC1), - ], - } - -modelDict['NewTerrain'] = { - 'DB': ('NewTerrain', 'GRID', 'EditTopo', YES, NO, 1, 0), - 'Parms': [([NewTopo], Persistent), - ], - } - -modelDict['PWPF'] = { - 'D2DMODELS': 'PWPF',} - -modelDict['RFCQPF'] = { - 'D2DMODELS': 'RFCqpf', - 'DB': ('RFCQPF', 'GRID', '', NO, NO, 4, 0), - 'Parms': [([QPF], TC6NG), - ], - } - -modelDict['RTMA'] = { - 'D2DAccumulativeElements': ['tp'], - 'D2DMODELS': 'RTMA25', - 'DB': ('RTMA', 'GRID', '', YES, NO, 1, 36), - 'INITMODULES': 'RTMA', - 'Parms': RTMAPARMS, - } - -modelDict['RAP13'] = { - 'D2DAccumulativeElements': ['tp', 'cp'], - 'D2DMODELS': 'RAP13', - 'DB': ('RAP13', 'GRID', '', NO, NO, 2, 0), - 'INITMODULES': 'RAP13', - 'INITSKIPS': [1, 2, 4, 5, 7, 8, 10, 11, 13, 14, 16, 17, 19, 20, 22, 23], - 'Parms': STD1_MODEL, - } - -modelDict['SAT'] = { - 'DB': ('SAT', 'GRID', '', YES, NO, 1, 12), - 'Parms': [([SatFogE, SatFogW, SatIR11E, SatIR11W, SatIR13E, SatIR13W, - SatIR39E, SatIR39W, SatVisE, SatVisW, SatWVE, SatWVW], - TC_1M), - ], - } - -modelDict['SPC'] = { - 'D2DDBVERSIONS': 8, 'D2DMODELS': 'SPCGuide',} - -modelDict['SREF'] = { - 'D2DMODELS': 'SREF212', - 'DB': ('SREF', 'GRID', '', NO, NO, 3, 0), - 'INITMODULES': 'SREF', - 'Parms': [([Td, Temp, Wind], TC1), - ], - } - -modelDict['Satellite'] = { - 'D2DDBVERSIONS': 6,} -# Turn on satellite smartInit only if SATDATA has some entries. -if SATDATA: - modelDict['Satellite']['INITMODULES'] = 'SAT' - -modelDict['TPCProb'] = { - 'D2DDBVERSIONS': 30, - 'D2DMODELS': 'TPCWindProb', - 'DB': ('TPCProb', 'GRID', '', NO, NO, 30, 0), - 'Parms': [([pwsD34, pwsD64], PWSDTC), - ([pwsN34, pwsN64], PWSNTC), - ([prob34, prob50, prob64, pws34, pws50, pws64], TC1), - ], - } - -modelDict['TPCProbPrelim'] = { - 'D2DDBVERSIONS': 30, - 'D2DMODELS': 'TPCWindProb_Prelim', - 'DB': ('TPCProbPrelim', 'GRID', '', NO, NO, 30, 0), - 'Parms': [([pwsD34, pwsD64], PWSDTC), - ([pwsN34, pwsN64], PWSNTC), - ([prob34, prob50, prob64, pws34, pws50, pws64], TC1), - ], - } - -modelDict['TPCStormSurge'] = { - 'D2DDBVERSIONS': 1} - -modelDict['TPCSurgeProb'] = { - 'D2DMODELS': 'TPCSurgeProb', - 'D2DAccumulativeElements': [ - 'Surge10Pct', - 'Surge20Pct', - 'Surge30Pct', - 'Surge40Pct', - 'Surge50Pct', - 'Surge90Pct', - 'PSurge25Ft', - 'PSurge24Ft', - 'PSurge23Ft', - 'PSurge22Ft', - 'PSurge21Ft', - 'PSurge20Ft', - 'PSurge19Ft', - 'PSurge18Ft', - 'PSurge17Ft', - 'PSurge16Ft', - 'PSurge15Ft', - 'PSurge14Ft', - 'PSurge13Ft', - 'PSurge12Ft', - 'PSurge11Ft', - 'PSurge10Ft', - 'PSurge9Ft', - 'PSurge8Ft', - 'PSurge7Ft', - 'PSurge6Ft', - 'PSurge5Ft', - 'PSurge4Ft', - 'PSurge3Ft', - 'PSurge2Ft', - 'PSurge1Ft', - 'PSurge0Ft', - 'Surge10Pctincr', - 'Surge20Pctincr', - 'Surge30Pctincr', - 'Surge40Pctincr', - 'Surge50Pctincr', - 'Surge90Pctincr', - 'PSurge20Ftincr', - 'PSurge19Ftincr', - 'PSurge18Ftincr', - 'PSurge17Ftincr', - 'PSurge16Ftincr', - 'PSurge15Ftincr', - 'PSurge14Ftincr', - 'PSurge13Ftincr', - 'PSurge12Ftincr', - 'PSurge11Ftincr', - 'PSurge10Ftincr', - 'PSurge9Ftincr', - 'PSurge8Ftincr', - 'PSurge7Ftincr', - 'PSurge6Ftincr', - 'PSurge5Ftincr', - 'PSurge4Ftincr', - 'PSurge3Ftincr', - 'PSurge2Ftincr', - 'PSurge1Ftincr', - 'PSurge0Ftincr', - ], - } - -modelDict['PETSS'] = { - 'D2DMODELS': 'P-ETSS', - 'D2DAccumulativeElements': [ - 'Surge10Pct', - 'Surge20Pct', - 'Surge30Pct', - 'Surge40Pct', - 'Surge50Pct', - 'Surge90Pct', - 'Surge10Pctincr', - 'Surge20Pctincr', - 'Surge30Pctincr', - 'Surge40Pctincr', - 'Surge50Pctincr', - 'Surge90Pctincr', - 'PSurge0Ftincr', - 'PSurge1Ftincr', - 'PSurge2Ftincr', - 'PSurge3Ftincr', - 'PSurge4Ftincr', - 'PSurge5Ftincr', - 'PSurge6Ftincr', - 'PSurge7Ftincr', - 'PSurge8Ftincr', - 'PSurge9Ftincr', - 'PSurge10Ftincr', - 'PSurge13Ftincr', - 'PSurge16Ftincr', - 'PSurge0Ft', - 'PSurge1Ft', - 'PSurge2Ft', - 'PSurge3Ft', - 'PSurge4Ft', - 'PSurge5Ft', - 'PSurge6Ft', - 'PSurge7Ft', - 'PSurge8Ft', - 'PSurge9Ft', - 'PSurge10Ft', - 'PSurge13Ft', - 'PSurge16Ft', - 'PSurgeMaxincr', - 'PSurgeMeanincr', - 'PSurgeMinincr', - 'PSurgeMax', - 'PSurgeMean', - 'PSurgeMin', - ], - } - -modelDict['TPCtcm'] = { - 'DB': ('TPCtcm', 'GRID', '', NO, NO, 2, 0), - 'Parms': [([HiWind], TC3), - ], - } - -modelDict['URMA25'] = { - 'D2DAccumulativeElements': ['tp'], - 'D2DMODELS': 'URMA25', - 'DB': ('URMA25', 'GRID', '', YES, NO, 1, 36), - 'INITMODULES': 'URMA25', - 'Parms': [([MaxRH], MaxRHTC), - ([MaxT], MaxTTC), - ([MinRH], MinRHTC), - ([MinT], MinTTC), - ([PressUnc, Pressure, QPE, RH, Sky, SkyUnc, TUnc, Td, TdUnc, - Temp, Vis, VisUnc, WDirUnc, WGustUnc, WSpdUnc, Wind, - WindGust], TC1), - ], - } - -modelDict['WCwave10'] = { - 'D2DMODELS': 'WCwave10', - 'DB': ('WCwave10', 'GRID', '', NO, NO, 2, 0), - 'Parms': [(waveParms, TC3), - ], - } - -modelDict['WCwave4'] = { - 'D2DMODELS': 'WCwave4', - 'DB': ('WCwave4', 'GRID', '', NO, NO, 2, 0), - 'Parms': [(waveParms, TC3), - ], - } - -modelDict['WNAWAVE'] = { - 'DB': ('WNAWAVE', 'GRID', '', NO, NO, 2, 0), - 'Parms': [(waveParms, TC6), - ], - } - -modelDict['WNAWAVE238'] = { - 'D2DMODELS': 'WNAWAVE238',} - -modelDict['WNAwave10'] = { - 'D2DMODELS': 'WNAwave10', - 'DB': ('WNAwave10', 'GRID', '', NO, NO, 2, 0), - 'Parms': [(waveParms, TC3), - ], - } - -modelDict['WNAwave4'] = { - 'D2DMODELS': 'WNAwave4', - 'DB': ('WNAwave4', 'GRID', '', NO, NO, 2, 0), - 'Parms': [(waveParms, TC3), - ], - } - -# This list will be used to set up a default ignoreDatabases list. This is shorter than -# listing all models to ignore. -includeOnly=[] -if SID in groups['ALASKA_SITES']: - modelDict['AKwave4'] = { - 'D2DMODELS': 'AKwave4', - 'D2DDBVERSIONS': 2, - 'DB': ('AKwave4', 'GRID', '', NO, NO, 2, 0), - 'Parms': [([Period, Period2, Swell, Swell2, WaveHeight, Wind, - WindWaveHgt, WindWavePeriod], TC3), - ], - } - - modelDict['AKwave10'] = { - 'D2DMODELS': 'AKwave10', - 'D2DDBVERSIONS': 2, - 'DB': ('AKwave10', 'GRID', '', NO, NO, 2, 0), - 'Parms': [([Period, Period2, Swell, Swell2, WaveHeight, Wind, - WindWaveHgt, WindWavePeriod], TC3), - ], - } - - updateModelDict(modelDict,'ESTOFS','D2DMODELS', 'estofsAK') - updateModelDict(modelDict,'ETSS','D2DMODELS', 'ETSS-AK') - updateModelDict(modelDict,'GFS20','D2DMODELS', 'AK-GFS22') - updateModelDict(modelDict,'HIRESWarw','D2DMODELS', 'HiResW-ARW-AK') - updateModelDict(modelDict,'HIRESWnmm','D2DMODELS', 'HiResW-NMM-AK') - updateModelDict(modelDict,'MOSGuide','D2DMODELS', 'MOSGuide-AK') - updateModelDict(modelDict,'NAM12','D2DMODELS', 'AK-NAM11') - updateModelDict(modelDict,'NamDNG','D2DMODELS', 'AK-NamDNG3') - updateModelDict(modelDict,'NationalBlend','D2DMODELS', 'NationalBlendAK') - updateModelDict(modelDict,'RTMA','D2DMODELS', 'AK-RTMA3') - updateModelDict(modelDict,'SREF','D2DMODELS', 'SREF216') - updateModelDict(modelDict,'URMA','D2DMODELS', 'AK-URMA') - updateModelDict(modelDict,'RTOFS-Alaska','D2DMODELS', 'RTOFS-Alaska') - updateModelDict(modelDict,'RTOFS-Alaska','D2DMODELS', 'RTOFS-Alaska') - updateModelDict(modelDict,'RTOFS-Arctic','D2DMODELS', 'RTOFS-Arctic') - updateModelDict(modelDict,'RTOFS-Bering','D2DMODELS', 'RTOFS-Bering') - updateModelDict(modelDict,'RTOFS-GulfAlaska','D2DMODELS', 'RTOFS-GulfAlaska') - updateModelDict(modelDict,'PETSS','D2DMODELS', 'P-ETSS-AK') - # Model databases for Alaska - includeOnly = ['AKwave4', 'AKwave10', 'BaseTerrain', 'CRMTopo', 'ECMWFHiRes', 'ESTOFS', - 'ETSS', 'GFS20', 'GWW', 'HIRESWarw', 'HIRESWnmm', 'MOSGuide', 'NAM12', - 'NamDNG', 'NationalBlend', 'NED', 'NewTerrain', 'RTMA', 'RTOFS-Alaska', - 'RTOFS-Arctic', 'RTOFS-Bering', 'RTOFS-GulfAlaska', 'SAT', 'SREF', 'URMA', - 'nwpsCG1AER', 'nwpsCG1AFG', 'nwpsCG1AJK', 'nwpsCG1ALU', 'nwpsTrkngCG0AER', - 'nwpsTrkngCG0AFG', 'nwpsTrkngCG0AJK', 'nwpsTrkngCG0ALU', 'PETSS', - ] - -# Hawaii OCONUS -elif SID == "HFO": - modelDict['GFS75'] = { - 'D2DMODELS': 'AVN225', - 'D2DAccumulativeElements': ['tp', 'cp'], - 'DB': ('GFS75', 'GRID', '', NO, NO, 2, 0), - 'INITMODULES': 'GFS75', - 'Parms': STD6_MODEL, - } - - updateModelDict(modelDict,'WaveWatch','D2DMODELS', 'WaveWatch') - updateModelDict(modelDict,'GlobalWave','D2DMODELS', 'GlobalWave') - updateModelDict(modelDict,'RTMA','D2DMODELS', 'HI-RTMA') - updateModelDict(modelDict,'NamDNG','D2DMODELS', 'HI-NamDNG5') - updateModelDict(modelDict,'HIRESWarw','D2DMODELS', 'HiResW-ARW-HI') - updateModelDict(modelDict,'HIRESWnmm','D2DMODELS', 'HiResW-NMM-HI') - updateModelDict(modelDict,'SPC','D2DMODELS', 'SPCGuide') - updateModelDict(modelDict,'TPCProb','D2DMODELS', 'TPCWindProb') - updateModelDict(modelDict,'TPCProbPrelim','D2DMODELS', 'TPCWindProb_Prelim') - updateModelDict(modelDict,'ECMWFHiRes','D2DMODELS', 'ECMWF-HiRes') - updateModelDict(modelDict,'RTOFS-Honolulu','D2DMODELS', 'RTOFS-Honolulu') - updateModelDict(modelDict,'ESTOFS','D2DMODELS', 'estofsHI') - updateModelDict(modelDict,'MOSGuide','D2DMODELS', 'MOSGuide-HI') - updateModelDict(modelDict,'NationalBlend','D2DMODELS', 'NationalBlendHI') - # Model databases for HFO - includeOnly = ['ECMWFHiRes', 'ESTOFS', 'GFS75', 'WaveWatch', 'GlobalWave', - 'HIRESWarw', 'HIRESWnmm', 'MOSGuide', 'NamDNG', 'NationalBlend', - 'RTMA', 'RTOFS-Honolulu', 'SPC', 'TPCProb', 'TPCProbPrelim', 'nwpsCG1GUM', - 'nwpsCG1HFO', 'nwpsTrkngCG0GUM', 'nwpsTrkngCG0HFO', - ] - -# Guam OCONUS -elif SID == "GUM": - modelDict['GFS75'] = { - 'D2DMODELS': 'AVN225', - 'D2DAccumulativeElements': ['tp', 'cp'], - 'DB': ('GFS75', 'GRID', '', NO, NO, 2, 0), - 'INITMODULES': 'GFS75', - 'Parms': STD6_MODEL, - } - - updateModelDict(modelDict,'GlobalWave','D2DMODELS', 'GlobalWave') - updateModelDict(modelDict,'TPCProb','D2DMODELS', 'TPCWindProb') - updateModelDict(modelDict,'TPCProbPrelim','D2DMODELS', 'TPCWindProb_Prelim') - updateModelDict(modelDict,'RTOFS-Guam','D2DMODELS', 'RTOFS-Guam') - updateModelDict(modelDict,'RTMA','D2DMODELS', 'Guam-RTMA') - # Model databases for GUM - includeOnly = ['GFS75', 'GlobalWave', 'RTMA', 'RTOFS-Guam', 'TPCProb', - 'TPCProbPrelim', 'nwpsCG1GUM', 'nwpsCG1HFO', - 'nwpsTrkngCG0GUM', 'nwpsTrkngCG0HFO', - ] - -# San Juan OCONUS -elif SID == "SJU": - updateModelDict(modelDict,'GFS80','D2DMODELS', 'AVN211') - updateModelDict(modelDict,'NAM80','D2DMODELS', 'ETA') - updateModelDict(modelDict,'WaveWatch','D2DMODELS', 'WaveWatch') - updateModelDict(modelDict,'GlobalWave','D2DMODELS', 'GlobalWave') - updateModelDict(modelDict,'WNAwave10','D2DMODELS', 'WNAwave10') - updateModelDict(modelDict,'WNAwave4','D2DMODELS', 'WNAwave4') - updateModelDict(modelDict,'RTMA','D2DMODELS', 'PR-RTMA') - updateModelDict(modelDict,'HIRESWarw','D2DMODELS', 'HiResW-ARW-SJU') - updateModelDict(modelDict,'HIRESWnmm','D2DMODELS', 'HiResW-NMM-SJU') - updateModelDict(modelDict,'SPC','D2DMODELS', 'SPCGuide') - updateModelDict(modelDict,'TPCProb','D2DMODELS', 'TPCWindProb') - updateModelDict(modelDict,'TPCProbPrelim','D2DMODELS', 'TPCWindProb_Prelim') - updateModelDict(modelDict,'ECMWFHiRes','D2DMODELS', 'ECMWF-HiRes') - updateModelDict(modelDict,'RTOFS-Atlantic','D2DMODELS', 'RTOFS-Atlantic') - updateModelDict(modelDict,'ESTOFS','D2DMODELS', 'estofsPR') - updateModelDict(modelDict,'NAHwave4','D2DMODELS', 'NAHwave4') - updateModelDict(modelDict,'GFS20','D2DMODELS', 'PR-GFS') - updateModelDict(modelDict,'NationalBlend','D2DMODELS', 'NationalBlendPR') - # Model databases for SJU - includeOnly = ['ECMWFHiRes', 'ESTOFS', 'GFS20', 'GFS80', 'WaveWatch', - 'GlobalWave', 'HIRESWarw', 'HIRESWnmm', 'NAHwave4', 'NAM80', - 'NationalBlend', 'RTMA', 'RTOFS-Atlantic', 'SPC', 'TPCProb', - 'TPCProbPrelim', 'WNAwave10', 'WNAwave4', - 'nwpsCG1JAX', 'nwpsCG1KEY', 'nwpsCG1MFL', 'nwpsCG1MLB', 'nwpsCG1SJU', - 'nwpsTrkngCG0JAX', 'nwpsTrkngCG0KEY', 'nwpsTrkngCG0MFL', - 'nwpsTrkngCG0MLB', 'nwpsTrkngCG0SJU', - ] - -# East CONUS changes from default modelDict -elif SID in groups['CONUS_EAST_SITES']: - updateModelDict(modelDict,'ESTOFS','D2DMODELS', 'estofsUS') - updateModelDict(modelDict,'HIRESWarw','D2DMODELS', 'HiResW-ARW-East') - updateModelDict(modelDict,'HIRESWnmm','D2DMODELS', 'HiResW-NMM-East') - -if SID in groups['GreatLake_SITES']: - modelDict['GLERL'] = { - 'D2DMODELS': 'GLERL', - 'DB': ('GLERL', 'GRID', '', 0, 0, 2, 0), - 'Parms': [([Period, Swell, WaveHeight], TC1), - ] - } - - modelDict['GLWN'] = {'D2DMODELS': 'GLWN'} - -# NWPS configuration. -if SID in ['AFC', 'AER', 'AFG', 'AJK', 'ALU', 'AVAK']: - nwpsSites = ['AER', 'AFG', 'AJK', 'ALU',] -elif SID in ['GUM', 'HFO',]: - nwpsSites = ['GUM', 'HFO',] -elif SID == "SJU": - nwpsSites = ['SJU', 'MFL', 'KEY', 'MLB', 'JAX'] -elif SID in ['CAR', 'GYX', 'BOX', 'OKX', 'PHI', 'LWX', 'AKQ', 'MHX', 'ILM', 'CHS', - 'BRO', 'CRP', 'HGX', 'LCH', 'LIX', 'MOB', 'TAE', 'TBW', 'KEY', 'MFL', - 'MLB', 'JAX',]: - nwpsSites = ['CAR', 'GYX', 'BOX', 'OKX', 'PHI', 'LWX', 'AKQ', 'MHX', 'ILM', 'CHS', - 'BRO', 'CRP', 'HGX', 'LCH', 'LIX', 'MOB', 'TAE', 'TBW', 'KEY', 'MFL', - 'MLB', 'JAX', 'SJU',] -elif SID in ['SEW', 'PQR', 'MFR', 'EKA', 'MTR', 'LOX', 'SGX',]: - nwpsSites = ['SEW', 'PQR', 'MFR', 'EKA', 'MTR', 'LOX', 'SGX',] -else: - nwpsSites = [] - -for s in nwpsSites: - name='nwpsCG1%s' % s - modelDict[name] = { - 'DB': (name, 'GRID', '', NO, NO, 2, 0), - 'D2DMODELS': name, - 'INITMODULES': name, - 'Parms': nwpsCG1_MODEL, - } - name='nwpsTrkngCG0%s' % s - modelDict[name] = { - 'DB': (name, 'GRID', '', NO, NO, 2, 0), - 'D2DMODELS': name, - 'INITMODULES': name, - 'Parms': nwpsTrkngCG0_MODEL, - } -# This list will be used to set up a default ignoreDatabases list. This is shorter than -# listing all models to ignore. Usually only set up for sites that aren't CONUS WFOs -# includeOnly is not designed to be changed by localConfig. -if includeOnly: - for m in sorted(modelDict.keys()): - if m not in includeOnly and 'D2DMODELS' in modelDict[m]: - ignoreDatabases.append(m) - -# END modelDict initial set up -#------------------------------------------------------------------------------ -# Add in optional parms to Fcst parm def -if SID in groups['powt']: - addPowt(modelDict) - -if SID in groups['winterProbs']: - addWinterWeatherProbs(modelDict) - -if SID in groups['rainfallProbs']: - addRainfallProbs(modelDict) - -D2DMODELS=[] -D2DDBVERSIONS={} -D2DAccumulativeElements={} -INITMODULES={} -INITSKIPS={} - -localParms = [] -localISCParms = [] -localISCExtraParms = [] -localLogFile = '' - -if not BASELINE and siteImport('localConfig'): - localParms = getattr(localConfig, 'parms', []) - localISCParms = getattr(localConfig, 'parmsISC', []) - localISCExtraParms = getattr(localConfig, 'extraISCparms', []) - localLogFile = getattr(localConfig, 'logFile', '') - modelDict['Fcst']['Parms'] += localParms - #ensure office type is set properly in localConfig SITES[] - if len(SITES[GFESUITE_SITEID]) == 5: - a = list(SITES[GFESUITE_SITEID]) - a.append(myOfficeType) - SITES[GFESUITE_SITEID] = tuple(a) - else: - myOfficeType = SITES[GFESUITE_SITEID][5] #probably from localConfig - -# Instantiate settings from modelDict -db=dbConfig(modelDict) -db.addConfiguredModels(ignoreDatabases) -DATABASES = db.dbs -D2DMODELS = db.D2DMODELS -D2DDBVERSIONS = db.D2DDBVERSIONS -D2DAccumulativeElements = db.D2DAccumulativeElements -INITMODULES = db.INITMODULES -INITSKIPS = db.INITSKIPS -OFFICIALDBS=list(modelDict['Fcst']['Parms']) - -# Create Practice and test databases from Fcst -DATABASES.append((Official, modelDict['Fcst']['Parms'])), -DATABASES.append((Practice, modelDict['Fcst']['Parms'])), -DATABASES.append((TestFcst, modelDict['Fcst']['Parms'])), -DATABASES.append((Test, modelDict['Fcst']['Parms'])), - -for entry in AdditionalISCRouting: - (parmList, dbName, editAreaPrefix) = entry - parmList = list(parmList) - addedIscDbDefinition = (dbName, ) + ISC[1:] - addedIscParms = [(parmList, TC1)] - DATABASES.append((addedIscDbDefinition, addedIscParms)) - -# Intersite coordination database parameter groupings, based on -# OFFICIALDBS, but time constraint is always TC1 -for wes, tc in (OFFICIALDBS + localISCParms): - ISCPARMS.append((wes, TC1)) - -# We also add in any extraISCparms as needed, but only for office -# types other than our own. -for wes, officeType in (EXTRA_ISC_PARMS + localISCExtraParms): - if myOfficeType == officeType: - continue - if type(officeType) != str: - raise TypeError("Office type not a str: " + repr(officeType)) - else: - if officeType not in VALID_OFFICE_TYPES: - raise ValueError("Office type: " + str(officeType) + " does not match any of the following: [" + (', '.join(VALID_OFFICE_TYPES)) + "]") - for we in wes: - wecopy = list(we) - wecopy[0] = wecopy[0] + officeType #rename the weather element - wecopy = tuple(wecopy) - ISCPARMS.append(([wecopy], TC1)) - -# Restore database parameter groupings (based on OFFICIALDBS, but TC1) -RESTOREPARMS = [] -for wes, tc in modelDict['Fcst']['Parms']: - RESTOREPARMS.append((wes, TC1)) - -# Now add the ISC and Restore databases to the DATABASES groupings -DATABASES.append((Restore, RESTOREPARMS)) -DATABASES.append((ISC, ISCPARMS)) - - -#D logfp=open('/localapps/logs/serverConfig2.log','w') -#D logfp.write('DATABASE names:\n') -#D for m in sorted(DATABASES): -#D logfp.write('%s\n' % m[0][0]) -#D logfp.write('\n\nDATABASES\n') -#D pprint.pprint(sorted(DATABASES),logfp,width=130) -#D logfp.write('\n\nINITMODULES\n') -#D pprint.pprint(INITMODULES,logfp,width=130) -#D logfp.write('\n\nD2DMODELS\n') -#D pprint.pprint(D2DMODELS,logfp,width=130) -#D logfp.close() - -doIt() - -#D logfp=open('/localapps/logs/SC_MD2.py','w') -#D modelDict=createModelDict(locals(),DATABASES,D2DMODELS,D2DDBVERSIONS,D2DAccumulativeElements, -#D INITMODULES,INITSKIPS,logfp) -#D logfp.close() -if localLogFile: - printServerConfig(sys.modules[__name__],vars(localConfig),localLogFile) -#D scfp.close() +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. +# +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## +# serverConfig -- base GFE server configuration file +# +# NOTE: THIS FILE SHOULD NOT BE USER-MODIFIED. INSTEAD REFER TO THE +# LOCAL CONFIG DOCUMENTATION ON HOW TO OVERRIDE SETTINGS IN THIS FILE. +# +# Baseline GFE server configuration +# +# ---------------------------------------------------------------------------- +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# 08/09/2013 #1571 randerso Changed projections to use the Java +# ProjectionType enumeration +# 10/03/2013 #2418 dgilling Update for new pSurge 2.0 data. +# 10/03/2013 #2424 randerso Change localTC to use dateutil instead of pytz +# to get correct offsets for Alaska +# 01/17/2014 #2719 randerso Added NHA domain +# 02/20/2014 #2824 randerso Added log message when local override files are not found +# 03/11/2014 #2897 dgilling Add new MHWM databases to default configuration. +# 03/20/2014 #2418 dgilling Remove unneeded D2D source PHISH. +# 04/17/2014 #2934 dgilling Remove alias for TPCSurgeProb D2D database. +# 05/09/2014 #3148 randerso Add tpHPCndfd to D2DAccumulativeElements for HPCERP +# 06/20/2014 #3230 rferrel Added URMA25. +# 05/29/2014 #3224 randerso Added "SPC":8 to D2DDBVERSIONS +# 07/09/2014 #3146 randerso Removed unused import +# 07/10/2014 swhite Add surge and tropical threat WEs and their dependencies +# 01/08/2015 #15035 lshi add site WNJ +# 12/03/2014 #3866 rferrel Added GFS20 +# 01/13/2015 #3955 randerso Added definitions for NewTerrain database +# Added Topo to ISCPARMS +# 01/19/2015 #4014 dgilling Added ETSS. +# 02/11/2015 #4053 rferrel Added GLWN and moved GLERL to display only for Great Lakes sites.. +# 01/19/2015 #4014 dgilling Added ETSS. +# 02/24/2015 #16692 byin Added RTMA. Removed gfsLR and WaveWatch +# 03/19/2015 #4300 randerso Remove GUMa as it is obsolete (per Shannon White) +# 03/30/2015 #17288 bhunder Added Guam-RTMA to D2D models +# 03/30/2015 #17206 yteng Changed some parameters that are not rate parameters +# 03/31/2015 #17288 bhunder Added Weather Params for RTMA +# 04/03/2015 #4367 dgilling Change WindGust's time constraints back to TC1 +# for Fcst/Official. +# 04/08/2015 #4383 dgilling Define FireWX ISC configuration parameters. +# 04/15/2015 #17383 yteng Change localTC to fix error that time constraints +# being off +# Apr 25, 2015 4952 njensen Updated for new JEP API +# 04/20/2015 #4414 dgilling Add missing NWPSTrkngCG0 weather elements. +# 05/12/2015 #17144 bhunder Added RTMA model +# 05/29/2015 17496 ryu Changed parm definitions for Wave1-10 and Period1-10. +# +# 05/29/2015 #17144 bhunder Added weather Params for URMA25 and OCONUS RTMA +# 09/02/2015 #4819 rferrel Added HWRF. +# 09/09/2015 16287 amoore Additional validation of user input +# 10/07/2015 #4958 dgilling Added support for NationalBlend D2D data. +# 10/13/2015 #4961 randerso Updated NewTerrain/BaseTerrain database definitions +# 10/30/2015 #17940 jendrowski Responded to Code Review. Mostly syntactical changes. +# 11/05/2015 #18182 ryu Change D2DDBVERSIONS value for HPCERP to 24 +# 12/22/2015 #14152 jwatson Added Sky, Wind to GFSLAMPGrid parms +# 1/28/2016 #13910 amoore Wave model data should be available in 3-hrly timesteps +# 02/09/2016 #5283 nabowle Remove NGM support. +# 02/22/2016 #18161 wkwock Add NationalBlend model for AK, PR, HW +# 02/23/2016 #14845 jwatson Changed NamDNG5 to NamDNG for all sources and params. +# Changed D2DModels for CONUS and Alaska to +# namdng25 and AK-NamDNG3 +# 04/01/2016 18777 ryu Replace NCF ip addresses. +# 04/22/2016 #18896 wkwock Add more nationalBlend Model +# 06/01/2016 JCM removed tc3ng from officialdbs for wave/period elements; +# removed Wave_XX and Period_XX; removed Wave10, Period10; +# added databases for all sites to baseline +# 08/08/2016 #5747 randerso Support removal of wrapper.py +# 10/05/2016 19293 randerso Fixed units on Tropical and a few other weather elements +# 12/12/2016 #19596 bhunder Added "tp" to NationalBlend model D2DAccumulativeElements +# 02/20/2017 DCS18966 mdavis/pjendr. NIC adjustment: name changes and removal of obsolete +# smart inits(DCS 19490). Fixed addOptionalParms. +# 03/17/2017 19673 jmaloney Added Rip Current Probabilities (RipProb). +# 06/29/2017 6323 randerso Added P-ETSS model +# 07/19/2017 DCS19490 gpetrescu Removed AKwave10, Wave10 and Period10. +# 07/12/2017 6324 randerso Added TPCWindProb_Prelim model +# 07/12/2017 6253 randerso Updated for Standard Terrain +# 08/03/2017 #20054 bhunder Added changes for ETSS model and for ETSS-HiRes model. +# 10/03/2017 DR20432 arivera Replace GFS40 with GFS in SnowRatioGFS and remove +# GLOBHwave from SJU model databases. +# 11/28/2017 6539 randerso Made P-ETSS and TPCSurgeProb elements D2DAccumulativeElements +# 12/06/2017 DCS20267 psantos Add NWPS Rip Current Guidance +# 12/20/2017 20510 ryu changes to StormTotalSnow parameter +# 02/23/2018 #20395 wkwock Added NBM3.1 elements. +# 04/03/2018 DR20656 arivera Missing comma: "Dune Erosion Probability" in optionalParmsDict['marine'] +# 05/09/2018 DR20715 arivera Missing comma: groups['marineSites'] after 'AVAK' +# 06/18/2018 16729 ryu Remove tpHPC element from RFCQPF model and the smart init for the model. +# 09/04/2018 20874 wkwock Haines and Fosberg are 6 hours accumulative. +# +#################################################################################################### + +## +# This is an incremental override file, indicating that the files at different +# localization levels will be combined. Incremental overrides are achieved by +# creating a localConfig file at a higher priority localization level that +# imports this base file. +# +# See the Configuration Guides->Server Configuration->Syntax for localConfig.py +# section of the GFE Online Help for more information. +## + + + +#---------------------------------------------------------------------------- +# USEFUL DEFINES +#---------------------------------------------------------------------------- + +import siteConfig,imp +import pprint +import re +import sys +import LogStream +from collections import defaultdict +BASELINE = getattr(siteConfig, 'BASELINE', 0) + +#D scfp=open('/localapps/logs/scdebug.log','w') +class dbConfig(object): + """Class to create GFE databases from modelDict""" + def __init__(self,modelDict): + self.modelDict=modelDict + self.dbs=[] + self.D2DMODELS=[] + self.D2DDBVERSIONS={} + self.D2DAccumulativeElements={} + self.INITMODULES={} + self.INITSKIPS={} + + def addConfiguredModels(self,ignoreList=[]): + """Setup model databases defined in dbConfigDict. + ignoreList can be used to filter out specific models + """ + for m in self.modelDict: + if m in ignoreList: + continue + # Don't allow BC model if regular is in ignore list + if m[-2:] == 'BC' and m[:-2] in ignoreList: + continue + self.addGfeDB(m,self.modelDict[m]) + return + def addGfeDB(self,modelname,dbConfigDict): + """Does all the work needed for adding a model to GFE from entries + in dbConfigDict. This populates dbs and sets various self + variables. + """ + if "DB" in dbConfigDict and "Parms" in dbConfigDict: + self.dbs.append((dbConfigDict["DB"],dbConfigDict["Parms"])) + if "D2DAccumulativeElements" in dbConfigDict: + self.D2DAccumulativeElements[modelname]=dbConfigDict["D2DAccumulativeElements"] + if "D2DDBVERSIONS" in dbConfigDict: + self.D2DDBVERSIONS[modelname]=dbConfigDict["D2DDBVERSIONS"] + if "D2DMODELS" in dbConfigDict: + self.D2DMODELS.append((dbConfigDict["D2DMODELS"],modelname)) + if "INITMODULES" in dbConfigDict: + if type(dbConfigDict["INITMODULES"]) is tuple: + self.INITMODULES[dbConfigDict["INITMODULES"][0]] = dbConfigDict["INITMODULES"][1] + else: + self.INITMODULES[dbConfigDict["INITMODULES"]]=[modelname] + if "INITSKIPS" in dbConfigDict: + self.INITSKIPS[modelname]=dbConfigDict["INITSKIPS"] + +#=============================================================================== +# Utility methods to manage GFE configuration +#=============================================================================== +def mergeModelDicts(baseDict,addDict): + """Combine serverConfig model dict and regional modelDict into one modelDict. + Settings in baseDict are maintained unless overridden in addDict. The merging + is done on a key by key basis of a specific model's dictionary (baseDict and + addDict are dictionaries of dictionaries) + This changes baseDict in place so the object passed in as baseDict is modified + in the caller's scope. + """ + for m,v in addDict.iteritems(): + if m not in baseDict: + baseDict[m]=v + else: + for key,val in v.iteritems(): + baseDict[m][key]=val + +def updateModelDict(modelDict,model,key,value): + """Udates a specific entry for a model in modelDict. model and key are dictionary + keys into modelDict and modelDict[model] respectively. If model is not defined + in modelDict, then a new entry is created. Otherwise, value replaces any existing + value in modelDict[model][key]. + This changes modelDict in place so the object passed in as modelDict is modified + in the caller's scope. + """ + if model in modelDict: + modelDict[model][key]=value + else: + modelDict[model]= {key : value} + +def alterModelDef(dbTuple, name=None, format=None, dbType=None, single=None, + official=None, numver=None, purgeAge=None): + """Alter GFE database definition. The definition is used in the dbs setting + and has form: + (name, format, type, single, official, numVer, purgeAge) + i.e., Practice = ("Fcst", GRID, "Prac", YES, NO, 1, 24) + + Won't use these exact names since some might conflict with builtins + Only supply what you want to change. To clone a model definition, just + supply name='newname' + """ + n,f,t,s,o,v,p=dbTuple + l=[] + for old,new in [(n,name),(f,format),(t,dbType),(s,single),(o,official), + (v,numver),(p,purgeAge)]: + if new is None: + l.append(old) + else: + l.append(new) + return tuple(l) + +def createModelDict(localsDict,dbs,D2DMODELS,D2DDBVERSIONS,D2DAccumulativeElements, + INITMODULES,INITSKIPS): + """Convert serverConfig model configuration to a dictionary. This allows + legacy serverConfig settings in dbs,D2DMODELS,INITMODULES, etc. to be + maintained and then converted into a single dictionary where all settings + for a model are together. + + WARNING: There can only be one version of a model in the dbs list. Fcst + practice and test databases have to be handled separately. This is ok + because these databases are defined after any localConfig customizations + of the normal Fcst database. + + modelDict contains the following keys. Only define what is needed, i.e., + it is not required to have every key defined + "DB": Definition of the database, i.e., the first value in a dbs entry: + ("wrfems", GRID, "", NO, NO, 3, 0). This must be a tuple. The name + in the DB entry must be the same as the model name used as the key + into the modelDict variable. + + "Parms" : Definition of the weather element parameters in the database, + i.e., the second part of the dbs entry. This is a list of tuples. + + "D2DMODELS" : D2D metadata database name for the source model. + + "INITMODULES': Name of the SmartInit module. This should be just the module + name as a string, not a list. + + "D2DAccumulativeElements" : List of parms that are accumulative + + "D2DDBVERSIONS" : Number of versions of a D2D model to show in the Weather + Element Browser. Defaults to 2 if not supplied. + + "INITSKIPS" : Used to skip model cycles. + + Example for a model: + + modelDict["CMCreg"]={ + "DB": ("CMCreg", "GRID", "", NO, NO, 2, 0), + "Parms": [([Temp, Td, RH, Wind, WindGust, Sky, MixHgt, TransWind, QPF, + PoP, SnowAmt, SnowRatio], TC3), + ([PoP6, QPF6, QPF6hr, CQPF1],TC6NG), + ([QPF12, PoP12],TC12NG), + ([MinRH], MinRHTC), ([MaxRH], MaxRHTC), + ([MaxT], MaxTTC), ([MinT], MinTTC), + ], + "D2DMODELS": "Canadian-Reg", + "INITMODULES": "Local_CMCreg", + "D2DAccumulativeElements": ["tpgemreg","tprun","tp3hr","tp6hr"], + "D2DDBVERSIONS": 3, + } + """ + # Create self initializing dictionary via collections.defaultdict + modelDict=defaultdict(dict) + parmsDict={} + tcDict={} + + for n,v in sorted(localsDict.items()): + if type(v) is tuple and type(v[0]) is str and v[1] in [DISCRETE,SCALAR,VECTOR,WEATHER]: + parmsDict[n]=v + elif type(v) is tuple and len(v)==3 and type(v[0]) is int: + tcDict[n]=v + + # Process dbs entries, i.e., model database definition + tcDict={} + for item in sorted(dbs): + plist=[] + parmTmpDict={} + pDict={} + for pt in item[1]: + parmsList=[] + # Try to find named parm setting + for p in pt[0]: + pname=p[0] + pDict[pname]=p + parmsList.append(pname) + + # Try to get a named time constraint + name=next((name for name,v in tcDict.iteritems() if v == pt[1]), None) + if name is None: + name = `pt[1]` + tcDict[name]=pt[1] + if name in parmTmpDict: + parmTmpDict[name]+=parmsList + else: + parmTmpDict[name]=parmsList + + # This consolidates parms by time constraint and sorts parm names. + for tc in sorted(parmTmpDict.keys()): + theParms=[] + for p in sorted(parmTmpDict[tc]): + theParms.append(pDict[p]) + plist.append((theParms, tcDict[tc])) + + modelDict[item[0][0]]={'DB':item[0],'Parms':plist} + + for si,ml in INITMODULES.items(): + m=ml[0] + modelDict[m]['INITMODULES']=si + for m,v in D2DDBVERSIONS.items(): + modelDict[m]['D2DDBVERSIONS']=D2DDBVERSIONS[m] + + for m,v in D2DAccumulativeElements.items(): + modelDict[m]['D2DAccumulativeElements']=v + for m,v in INITSKIPS.items(): + modelDict[m]['INITSKIPS']=v + for item in D2DMODELS: + if type(item) is str: + m=item + v=item + else: + v,m=item + if m in modelDict: + modelDict[m]['D2DMODELS']=v + else: + modelDict[m]={'D2DMODELS':v} + return modelDict + +def changeParm(modelDict,pname,value,modelList=['Fcst']): + """Alter a parm that is defined in modelDict Parm setting. + + pname: name of parm. This is a string not the parm definition + value: the parm definition tuple. If the None object, then the parm + will be deleted. + modelList: List of model names to check. An empty list will check all + models in modelDict. + Return: Nothing. modelDict is altered in place. + """ + if not modelList: + modelList=modelDict.keys() + for m in modelList: + if m not in modelDict or 'Parms' not in modelDict[m] or \ + not checkForParm(modelDict[m]['Parms'],pname): + continue + + newpt=[] + # parms is tuple (parmList,TC) + for pList,tc in modelDict[m]['Parms']: + # This makes a copy of the list of parms, not a reference + # this is needed because we are changing the list in place. + theParms= list(pList) + match=False + for matchParm in (p for p in theParms if p[0] == pname): + match=True + theParms.remove(matchParm) + if match and value is not None: + theParms.append(value) + if theParms: + newpt.append((theParms,tc)) + if newpt != modelDict[m]['Parms']: + modelDict[m]['Parms'] = newpt + +def changeParmTC(modelDict,pname,newTC,modelList=['Fcst']): + """Alter a parm in that is defined in modelDict Parm setting. + + pname: name of parm. This is a string not the parm definition + newTC: the new Time Contraint (tuple) + modelList: List of model names to check. An empty list will check all + models in modelDict. + Return: Nothing. modelDict is altered in place. + """ + if not modelList: + modelList=modelDict.keys() + for m in sorted(modelList): + if m not in modelDict or 'Parms' not in modelDict[m]: + continue +#d print m,"checkForParm=",checkForParm(modelDict[m]['Parms'],pname) + if not checkForParm(modelDict[m]['Parms'],pname): + continue + + newpt=[] + # Parms is tuple (parmList,TC) + for pList,tc in modelDict[m]['Parms']: + # This makes a copy of the list of parms, not a reference + # this is needed because we are changing the list in place. + theParms= list(pList) + matchParm=next((p for p in theParms if p[0] == pname),None) +#d print m,matchParm,tc,newTC,len(theParms) + if matchParm: + theParms.remove(matchParm) + newpt.append(([matchParm],newTC)) +#d print "Added",matchParm,newTC + if theParms: +#d print "restored",theParms," to",tc + newpt.append((theParms,tc)) + if newpt != modelDict[m]['Parms']: +#d print 'Updated model',m + modelDict[m]['Parms'] = newpt +#d print modelDict[m]['Parms'],'\n' + +def checkForParm(parmDef,pname): + """Check a model parm definition if a parm named pname is in it. + + parmDef: list of tuples, each tuple is a list of parms and a time + contraint. Call with modelDict[modelname]['Parms]. + pname: Name of parm (string). + Returns: Boolean True if found, or False + """ + for item in parmDef: + t=next((pt for pt in item[0] if pt[0] == pname),None) + if t is not None: + return True + return False + +def getParmNames(parmsDef): + """Return a list of parm names in a model parm definition + + parmsDef: list of tuples, each tuple is a list of parms and a time + constraint. Call with modelDict[modelname]['Parms]. + Returns: List of string parameter names + + Here's an example of how to remove unused parms from Fcst, this can + run in localConfig: + + parmsToRemove=[] + for p in getParmNames(modelDict['Fcst']): + pl=p.lower() + for t in ['period','swell','wave','surf', 'surge']: + if t in pl: + parmsToRemove.append(p) + break + removeParms(modelDict,'Fcst',parmsToRemove) + """ + result=[] + for pList,tc in parmsDef: + # p is the parmDef tuple where first item is the parm name + newParms=[p[0] for p in pList] + result+=newParms + return sorted(result) + +def printServerConfig(moduleObj,localsDict, logFile="/awips2/edex/logs/localConfig.log"): + """Dump out ServerConfig final settings. localsDict is a dictionary of + local variables in localConfig, normally locals(). + """ + # serverConfig log text + scText="" + try: + with open(logFile,"w") as fp: + # Print out dbs entries, i.e., model database definition + fp.write("Configuration for %s\n" % localsDict['SID']) + dbs=DATABASES + for item in sorted(dbs): + scText += "\ndbs[%s]: %s\n" % (item[0][0], str(item[0])) + scText += _dumpParms(item[1]) + + # Dump out serverConfig settings likely to be modified by localConfig + scvars=["D2DMODELS", "INITMODULES", + "D2DDBVERSIONS", "D2DAccumulativeElements", + "REQUEST_ISC", "SEND_ISC_ON_SAVE", + "SEND_ISC_ON_PUBLISH", "REQUESTED_ISC_PARMS", + "ExtraWEPrecision", "INITSKIPS", + "HazardKeys", + "MAX_USER_BACKGROUND_PROCESSES", + "AdditionalISCRouting", + "ignoreDatabases", + ] + + for item in scvars: + scText += "\n%s:\n" % item + obj=getattr(moduleObj,item,None) + if type(obj) is list: + obj.sort() + scText += pprint.pformat(obj) +'\n' + + # This prints out all variables named parms*, i.e., parmsNAM12 + for k in sorted(localsDict.keys()): + if k == "OFFICIALDBS" or re.match("parms[A-Z]+",k) is not None or \ + k == "extraISCparms": + scText += "\n%s:\n" % k + scText += _dumpParms(localsDict[k]) + scText += printModelDict(localsDict) + fp.write(scText) + except IOError as e: + LogStream.logProblem("printServerConfig open file problem "+logFile+" - log not created\n" +LogStream.exc(e)) + +def printModelDict(localsDict): + """Convert serverConfig model configuration to a dictionary. This writes + the dictionary as text. This does not create a usable modelDict, just one to + use to print out the dictionary as python code.""" + + modelDict={} + parmsDict={} + tcDict={} + dbs=DATABASES + scText="" + for n,v in localsDict.items(): + if type(v) is tuple and type(v[0]) is str and v[1] in [DISCRETE,SCALAR,VECTOR,WEATHER]: + parmsDict[n]=v + elif type(v) is tuple and len(v)==3 and type(v[0]) is int: + tcDict[n]=v + + scText += '\n' + for n in sorted(parmsDict): + scText += 'parmVar: %s = %s\n' % (n,`parmsDict[n]`) + scText += '\n' + for n in sorted(tcDict): + scText += 'TC: %s = %s\n' % (n,`tcDict[n]`) + scText += '\n' + + # Print out dbs entries, i.e., model database definition + for item in sorted(dbs): + plist=[] + parmTmpDict={} + for pt in item[1]: + parmsList=[] + # Try to find named parm setting + for p in pt[0]: + name=next((name for name,v in parmsDict.iteritems() if v == p), None) + if name is not None: + parmsList.append(name) + else: + parmsList.append(p[0]) + theParms='&nlq(['+', '.join(parmsList)+'], ' + # Try to get a named time constraint + name=next((name for name,v in tcDict.iteritems() if v == pt[1]), None) + if name is None: + name = `pt[1]` + if name in parmTmpDict: + parmTmpDict[name]+=parmsList + else: + parmTmpDict[name]=parmsList + # This consolidates parms by time constraint and sorts parm names. + for tc in sorted(parmTmpDict.keys()): + parmTmpDict[tc]=sorted(parmTmpDict[tc]) + theParms='&nlq(['+', '.join(parmTmpDict[tc])+'], ' + plist.append(theParms + tc +')&nrq') + + modelDict[item[0][0]]={'DB':item[0],'Parms':plist} + for si,ml in INITMODULES.items(): + m=ml[0] + entry=si + if len(ml) > 1: + # Multiple d2d models for smartinit + # Try to get model from si name + if si.find('Local_') == 0: + m=si[6:] + entry=(si,ml) + if m in modelDict: + # If a model has multiple SmartInit modules, try to best match which + # Smartinit module to assign to the model. + if 'INITMODULES' not in modelDict[m] or m in si: + modelDict[m]['INITMODULES']=entry + else: + modelDict[m]={'INITMODULES':entry} + + for m,v in D2DDBVERSIONS.items(): + if m in modelDict: + modelDict[m]['D2DDBVERSIONS']=D2DDBVERSIONS[m] + else: + modelDict[m]={'D2DDBVERSIONS':D2DDBVERSIONS[m]} + + for m,v in D2DAccumulativeElements.items(): + if m in modelDict: + modelDict[m]['D2DAccumulativeElements']=v + else: + modelDict[m]={'D2DAccumulativeElements':v} + for m,v in INITSKIPS.items(): + if m in modelDict: + modelDict[m]['INITSKIPS']=v + else: + modelDict[m]={'INITSKIPS':v} + for item in D2DMODELS: + if type(item) is str: + m=item + v=item + else: + v,m=item + if m in modelDict: + modelDict[m]['D2DMODELS']=v + else: + modelDict[m]={'D2DMODELS':v} + + for m in sorted(modelDict): + text=pprint.pformat(modelDict[m],width=80,indent=0) + text=text.replace("'&nlq",'') + text=text.replace("&nrq'",'') + text=text.replace('"&nlq','') + text=text.replace('&nrq"','') + text=text.replace(", 'INITMODULES':",",\n'INITMODULES':") + text=text.replace(')]}','),\n ]\n}') + text=text.replace('\n','\n ') + scText += "modelDict['%s'] = {\n %s\n\n" % (m,text[1:]) + return scText + +def _dumpParms(parms): + """Pretty prints parms.""" + pDict={} + result="" + for item in parms: + if type(item) is not tuple: + # Not a parm definition! + return + pList,tc = item + for p in pList: + pDict[p[0]]=(p,tc) + for k in sorted(pDict.keys()): + result += " %s\n" % repr(pDict[k]) + return result + +def addOptionalParms(defaultTC,tcParmDict,parmDict,modelDict): + """Adds parms from optionalParmsDict to the Fcst database. + This is a convience function if most parms use the default time constraint. + Otherwise, its just as easy to hard code what needs to be added for a + optionalParmsDict entry. + + defaultTC: Default time constraint to if a parameter specific TC is not + defined in tcParmDict. + tcParmDict: Dictionary with keys of time constraints. Value is a list of + parameter names to be added with that time constraint. Empty + dictionary ok if everything should use the default. Example: + tcParmDict={TC6NG:['IceLineAcc','IceFlatAcc',]} + parmDict: Parameter dictionary with keys of parameter name and value is + the parameter definition tuple. Keys must match keys in tcParmDict. + modelDict: The serverConfig modelDict dictionary. Must already have Fcst + defined. Changed in place. + Returns: The parameter definition added to Fcst + """ + + tcParms={defaultTC:[]} + for tc in tcParmDict: + tcParms[tc]=[] + if len(tcParmDict) == 0: + tcParmDict['dummyTC']=['dummyParm'] + for pname,value in parmDict.iteritems(): + # Find the time constrait to use for this parm + theTC=defaultTC + for tc in tcParmDict: + if pname in tcParmDict[tc]: + theTC=tc + break + tcParms[theTC].append(value) + + theParms=[] + for tc in tcParms: + theParms.append((tcParms[tc],tc)) + modelDict['Fcst']['Parms'] += theParms + return theParms + +def addPowt(modelDict): + """This sets up PoWT parameters for in Fcst database. + """ + defaultTC=TC1 + # Use value of time constraint and string name of parm in tcParmDict + tcParmDict={TC6NG:['IceLineAcc','IceFlatAcc',] + } + return addOptionalParms(defaultTC,tcParmDict, + optionalParmsDict['powt'],modelDict) + +def addWinterWeatherProbs(modelDict): + """This sets up ER Winter Weather Probability parameters in the Fcst database. + """ + defaultTC=TC1 + # Use value of time constraint and string name of parm in tcParmDict + tcParmDict={} + return addOptionalParms(defaultTC,tcParmDict, + optionalParmsDict['winterProbs'],modelDict) + +def addRainfallProbs(modelDict): + """This sets up WPC rainfall probability parameters in the Fcst database. + """ + defaultTC=TC1 + # Use value of time constraint and string name of parm in tcParmDict + tcParmDict={} + return addOptionalParms(defaultTC,tcParmDict, + optionalParmsDict['rainfallProb'],modelDict) + +# Local-time based time constraints. Does not automatically account for +# daylight savings time. The dst flag is 0 for standard time and manually +# set to 1 for daylight time (if desired). The start is specified in +# seconds local time, e.g., 6*HOUR would indicate 6am. +def localTC(start,repeat,duration,dst): + timezone = SITES[GFESUITE_SITEID][3] + import dateutil.tz, datetime + tz = dateutil.tz.gettz(timezone) + local = datetime.datetime.now(tz) + delta = tz.utcoffset(local) - tz.dst(local) + offset = delta.days*86400 + delta.seconds + start = start - offset + if dst == 1: + start = start - 3600 #daylight savings flag + if start >= 3600 * 24: + start = start - 3600 * 24 + elif start < 0: + start = start + 3600 * 24 + return (start, repeat, duration) + +# imports the named module. If the module +# does not exist, it is just ignored. But +# if it exists and has an error, the exception +# is thrown. If the module was imported returns +# true. +def siteImport(modName): + try: + fp, path, des = imp.find_module(modName) + if fp: + fp.close() + except ImportError: + LogStream.logEvent("No " + modName + " file found, using baseline settings."); + return 0 + globals()[modName] = __import__(modName) + return 1 + +def doIt(): + # Import the local site configuration file (if it exists) + import doConfig + import VTECPartners + (models, projections, vis, wx, desDef, allSites, domain, siteId, timeZone,officeTypes) = \ + doConfig.parse(GFESUITE_SITEID, DATABASES, types, visibilities, SITES, + allProjections) + IFPConfigServer.models = models + IFPConfigServer.projectionData = projections + IFPConfigServer.weatherVisibilities = vis + IFPConfigServer.weatherTypes = wx + IFPConfigServer.discreteDefinitions = desDef + IFPConfigServer.allSites = allSites + IFPConfigServer.officeTypes = officeTypes + IFPConfigServer.siteID = siteId + IFPConfigServer.timeZone = timeZone + IFPConfigServer.d2dModels = doConfig.d2dParse(D2DMODELS) + IFPConfigServer.netCDFDirs = doConfig.netcdfParse(NETCDFDIRS) + IFPConfigServer.satData = doConfig.parseSat(SATDATA) + IFPConfigServer.domain = domain + + (serverHost, mhsid, \ + rpcPort, \ + initMethods, accumulativeD2DElements, \ + initSkips, d2dVersions, \ + logFilePurgeAfter, \ + prdDir, baseDir, \ + extraWEPrecision, \ + tableFetchTime, \ + autoConfigureNotifyTextProd, \ + iscRoutingTableAddress, \ + requestedISCsites, requestISC, \ + sendiscOnSave, sendiscOnPublish, \ + requestedISCparms, \ + transmitScript) \ + = doConfig.otherParse(SITES.keys(), \ + GFESUITE_SERVER, GFESUITE_MHSID, \ + GFESUITE_PORT, INITMODULES, + D2DAccumulativeElements, + INITSKIPS, D2DDBVERSIONS, LOG_FILE_PURGE_AFTER, + GFESUITE_PRDDIR, GFESUITE_HOME, + ExtraWEPrecision, VTECPartners.VTEC_REMOTE_TABLE_FETCH_TIME, + AUTO_CONFIGURE_NOTIFYTEXTPROD, ISC_ROUTING_TABLE_ADDRESS, + REQUESTED_ISC_SITES, REQUEST_ISC, SEND_ISC_ON_SAVE, SEND_ISC_ON_PUBLISH, + REQUESTED_ISC_PARMS, TRANSMIT_SCRIPT) + IFPConfigServer.serverHost = serverHost + IFPConfigServer.mhsid = mhsid + IFPConfigServer.rpcPort = rpcPort + IFPConfigServer.initMethods = initMethods + IFPConfigServer.accumulativeD2DElements = accumulativeD2DElements + IFPConfigServer.initSkips = initSkips + IFPConfigServer.d2dVersions = d2dVersions + IFPConfigServer.logFilePurgeAfter = logFilePurgeAfter + IFPConfigServer.prdDir = prdDir + IFPConfigServer.baseDir = baseDir + IFPConfigServer.extraWEPrecision = extraWEPrecision + IFPConfigServer.tableFetchTime = tableFetchTime + IFPConfigServer.autoConfigureNotifyTextProd = autoConfigureNotifyTextProd + IFPConfigServer.iscRoutingTableAddress = iscRoutingTableAddress + IFPConfigServer.requestedISCsites = requestedISCsites + IFPConfigServer.requestISC = requestISC + IFPConfigServer.sendiscOnSave = sendiscOnSave + IFPConfigServer.sendiscOnPublish = sendiscOnPublish + IFPConfigServer.requestedISCparms = requestedISCparms + IFPConfigServer.transmitScript = transmitScript + IFPConfigServer.iscRoutingConfig = doConfig.parseAdditionalISCRouting(AdditionalISCRouting) + +def getSimpleConfig(): + return IFPConfigServer + +GFESUITE_SITEID = siteConfig.GFESUITE_SITEID +GFESUITE_MHSID = siteConfig.GFESUITE_MHSID +GFESUITE_SERVER = siteConfig.GFESUITE_SERVER +GFESUITE_HOME = siteConfig.GFESUITE_HOME +GFESUITE_PORT = int(siteConfig.GFESUITE_PORT) +#GFESUITE_DATDIR = siteConfig.GFESUITE_DATDIR +GFESUITE_LOGDIR = siteConfig.GFESUITE_LOGDIR +GFESUITE_PRDDIR = siteConfig.GFESUITE_PRDDIR +#GFESUITE_SHPDIR = siteConfig.GFESUITE_SHPDIR +#GFESUITE_TOPODIR = siteConfig.GFESUITE_TOPODIR +#GFESUITE_VTECDIR = siteConfig.GFESUITE_VTECDIR + +SID = GFESUITE_SITEID + +# modelDict is a master configuration dictionary for all GFE databases +# Create self initializing dictionary via collections.defaultdict +modelDict=defaultdict(dict) + +# ignoreDatabases is used when executing the final configuration to ignore +# certain models. The new paradigm with modelDict is to have one master +# modelDict and ignore datasets for specific regions or groups. Sites can +# add to or remove from ignoreDatabases in their localConfig. +ignoreDatabases=[] + +# Groups are a way of setting up groups of parms for special or optionally used +# methodology. For example, the Probability of Weather Type methodology. +groups={} +groups['ALASKA_SITES'] = ['AFG', 'AJK', 'ALU', 'AER', 'ACR', 'AFC', 'VRH', 'AAWU', 'AVAK'] +groups['GreatLake_SITES'] = ['LOT', 'MKX', 'GRB', 'DLH', 'MQT', 'APX', 'GRR', 'DTX', + 'IWX', 'CLE', 'BUF', 'PBZ', 'ILN', 'IND', 'ILX', 'MPX', 'FGF'] +groups['CONUS_EAST_SITES'] = ['ALY', 'AKQ', 'APX', 'BGM', 'BMX', 'BOX', 'BTV', 'BUF', + 'CAE', 'CAR', 'CHS', 'CLE', 'CTP', 'DTX', 'FFC', 'GRR', + 'GSP', 'GYX', 'ILM', 'ILN', 'IND', 'JAN', 'JAX', 'JKL', + 'LCH', 'LMK', 'LWX', 'MEG', 'MFL', 'MHX', 'MLB', 'MOB', + 'MQT', 'MRX', 'OKX', 'PAH', 'PBZ', 'PHI', 'RAH', 'RLX', + 'RNK', 'TAE', 'TBW', 'ALR', 'RHA', 'TAR', 'TIR'] +groups['RFC_SITES'] = ["ACR", "ALR", "FWR", "KRF", "MSR", "ORN", "PTR", + "RHA", "RSA", "STR", "TAR", "TIR", "TUA"] + +siteRegion={} +# need to account for RFCs? +siteRegion['AR'] = groups['ALASKA_SITES'] +siteRegion['CR'] = ['ABR','APX','ARX','BIS','BOU','CYS','DDC','DLH','DMX','DTX', + 'DVN','EAX','FGF','FSD','GID','GJT','GLD','GRB','GRR','ICT', + 'ILX','IND','IWX','JKL','LBF','LMK','LOT','LSX','MKX','MPX', + 'MQT','OAX','PAH','PUB','RIW','SGF','TOP','UNR'] +siteRegion['ER'] = ['AKQ','ALY','BGM','BOX','BTV','BUF','CAE','CAR','CHS','CLE', + 'CTP','GSP','GYX','ILM','ILN','LWX','MHX','OKX','PBZ','PHI', + 'RAH','RLX','RNK'] +siteRegion['PR'] = ['GUM','HFO','PBP','PPG'] +siteRegion['SR'] = ['ABQ','AMA','BMX','BRO','CRP','EPZ','EWX','FFC','FWD','HGX', + 'HUN','JAN','JAX','KEY','LCH','LIX','LUB','LZK','MAF','MEG', + 'MFL','MLB','MOB','MRX','OHX','OUN','SHV','SJT','SJU','TAE', + 'TBW','TSA'] +siteRegion['WR'] = ['BOI','BYZ','EKA','FGZ','GGW','HNX','LKN','LOX','MFR','MSO', + 'MTR','OTX','PDT','PIH','PQR','PSR','REV','SEW','SGX','SLC', + 'STO','TFX','TWC','VEF'] + +groups['OCONUS_SITES'] = groups['ALASKA_SITES'] + siteRegion['PR'] + ['SJU'] + +myRegion='ALL' +for r in siteRegion: + if SID in siteRegion[r]: + myRegion=r + break + +groups['powt']=list(groups['OCONUS_SITES']+ siteRegion['CR'] + siteRegion['ER'] + siteRegion['SR'] + siteRegion['WR']) +groups['marineSites']=[ + # CONUS WFOs + "CAR","GYX","BOX","OKX","PHI","LWX","AKQ","MHX","ILM","CHS", + "BRO","CRP","HGX","LCH","LIX","MOB","TAE","TBW","KEY","MFL", + "MLB","JAX","SJU", + "SEW","PQR","MFR","EKA","MTR","LOX","SGX", + # AR sites + 'AFC', 'AFG', 'AJK', 'AER', 'ALU', 'VRH', 'AVAK', + # OPC Atlantic and Pacific + 'ONA', 'ONP', + # NHC/TAFB Pacific and Atlantic, Storm Surge + 'NH1', 'NH2', 'NHA', + # HFO Marine, GUM + 'HFO', 'HPA', 'GUM', + ] + +groups['winterProbs']= [ + # ER sites + 'AKQ','ALY','BGM','BOX','BTV','BUF','CAE','CAR','CHS','CLE', + 'CTP','GSP','GYX','ILM','ILN','LWX','MHX','OKX','PBZ','PHI', + 'RAH','RLX','RNK', + #CR sites + 'ABR','BIS','BOU','CYS','DDC','DMX','FGF','FSD','GLD','GRB', + 'ICT','IND','IWX','JKL','LMK','LOT','MKX','MPX','MQT','OAX', + 'PAH','PUB','SGF','GJT', + #SR sites + 'FFC','LUB','MRX','OUN','TSA', + #WR sites + 'FGZ','GGW','HNX','LKN','MFR','MSO','OTX','PDT','REV','SEW', + 'SGX','SLC','STO' + ] + +groups['rainfallProbs'] = ["BOX"] + +#--------------------------------------------------------------------------- +# +# Weather Element configuration section. +# +#--------------------------------------------------------------------------- + +SCALAR = 'Scalar' +VECTOR = 'Vector' +WEATHER = 'Weather' +DISCRETE = 'Discrete' +YES = 1 +NO = 0 + +#SCALAR, VECTOR +# name/type/units/description/max/min/precision/rateParm/ +#WEATHER +# name/WEATHER/units/description/ +#DISCRETE +# keyDef = [(keySym, keyDesc), (keySym, keyDesc)] +# name/DISCRETE/units/description/overlapCapable/keyDef/ + +# Standard Public Weather Elements +SID = GFESUITE_SITEID + +maxTempVal=140.0 +minTempVal=-100.0 +maxTdVal=140.0 +minTdVal=-100.0 +maxQpfVal=10.0 +maxIceVal=5.0 +Temp = ("T", SCALAR, "F", "Surface Temperature", maxTempVal, minTempVal, 0, NO) +Td = ("Td", SCALAR, "F", "Dewpoint", maxTdVal, minTdVal, 0, NO) +MaxT = ("MaxT", SCALAR, "F", "Maximum Temperature", maxTempVal, minTempVal, 0, NO) +MinT = ("MinT", SCALAR, "F", "Minimum Temperature", maxTempVal, minTempVal, 0, NO) +HeatIndex = ("HeatIndex", SCALAR, "F", "Heat Index", maxTempVal, -80.0, 0, NO) +WindChill = ("WindChill", SCALAR, "F", "Wind Chill", 120.0, -120.0, 0, NO) +QPF = ("QPF", SCALAR, "in", "QPF", maxQpfVal, 0.0, 2, YES) +Wind = ("Wind", VECTOR, "kts", "Surface Wind", 125.0, 0.0, 0, NO) +WindGust = ("WindGust", SCALAR, "kts", "Wind Gust", 125.0, 0.0, 0, NO) +# special for TPC hurricane winds +HiWind = ("Wind", VECTOR, "kts", "Surface Wind", 200.0, 0.0, 0, NO) +Weather = ("Wx", WEATHER, "wx", "Weather") +IceAcc = ("IceAccum", SCALAR, "in", "Ice Accumulation", maxIceVal, 0.0, 2, YES) +StormTotalIce = ('StormTotalIce', SCALAR, 'in', 'Storm Total Ice', maxIceVal, 0.0, 2, YES) +SnowAmt = ("SnowAmt", SCALAR, "in", "Snowfall amount", 20.0, 0.0, 1, YES) +StormTotalSnow = ("StormTotalSnow", SCALAR, "in","Storm Total Snow", 180.0, 0.0, 1, NO) +PoP = ("PoP", SCALAR, "%", "Prob of Precip", 100.0, 0.0, 0, NO) +PoP6 = ("PoP6", SCALAR, "%", "Prob of Precip (6hr)", 100.0, 0.0, 0, NO) +PoP12 = ("PoP12", SCALAR, "%", "Prob of Precip (12hr)", 100.0, 0.0, 0, NO) +TstmPrb3 = ("TstmPrb3", SCALAR, "%", "Prob of Tstorm (3hr)", 100.0, 0.0, 0, NO) +TstmPrb6 = ("TstmPrb6", SCALAR, "%", "Prob of Tstorm (6hr)", 100.0, 0.0, 0, NO) +TstmPrb12 = ("TstmPrb12", SCALAR, "%", "Prob of Tstorm (12hr)", 100.0, 0.0, 0, NO) +Sky = ("Sky", SCALAR, "%", "Sky Condition", 100.0, 0.0, 0, NO) +FzLevel = ("FzLevel", SCALAR, "ft", "Freezing level", 30000.0, 0.0, 0, NO) +SnowLevel = ("SnowLevel", SCALAR, "ft", "Snow Level", 18000.0, 0.0, 0, NO) +RH = ("RH", SCALAR, "%", "Relative Humidity", 100.0, 0.0, 0, NO) + +# DR20541 and 20482 - add collaborate PoP, SnowAmt, QPF and ndfd QPF tools +PoP12hr = ("PoP12hr", SCALAR, "%", "12 hr Chance of Precip", 100.0, 0.0, 0, NO) +QPF6hr = ("QPF6hr", SCALAR, "in", "6 hr Precipitation (in)", maxQpfVal, 0.0, 2, YES) +SnowAmt6hr = ("SnowAmt6hr", SCALAR, "in", "6 hr Snowfall", 30.0, 0.0, 1, YES) + +# Cobb SnowTool included. +SnowRatio = ('SnowRatio', SCALAR, 'none', 'Snow Ratio', 40.0, 0.0, 1, NO) +#totalVV = ('totalVV', SCALAR, 'ubar/s', 'Total VV', 400.0, 0.0, 0, YES) +cape = ("cape", SCALAR, "1unit", "CAPE", 8000.0, 0.0, 1, NO) +ApparentT = ("ApparentT", SCALAR, "F", "Apparent Temperature", maxTempVal, -120.0, 0, NO) +LkSfcT = ("LkSfcT", SCALAR, "C", "Lake Surface T", 40.0, -2.0, 1, NO) +SnowMap = ("SnowMap", SCALAR, "in", "Snowfall Map", 20.0, 0.0, 1, NO) +StormTotalQPF = ('StormTotalQPF', SCALAR, 'in', 'Storm Total QPF (in)', 36.0, 0.0, 2, NO) +SeasonTotalSnow = ('SeasonTotalSnow', SCALAR, 'in', 'Season Total Snow (in)', 150.0, 0.0, 2, NO) + +# Fire Weather Weather Elements +LAL = ("LAL", SCALAR, "cat", "Lightning Activity Level", 6.0, 1.0, 0, NO) +CWR = ("CWR", SCALAR, "%", "Chance of Wetting Rain", 100.0, 0.0, 0, NO) +Haines = ("Haines", SCALAR, "cat", "Haines Index", 6.0, 2.0, 0, NO) +MixHgt = ("MixHgt", SCALAR, "ft", "Mixing Height", 20000.0, 0.0, 0, NO) +Wind20ft = ("Wind20ft", VECTOR, "kts", "20ft. Wind", 125.0, 0.0, 0, NO) +FreeWind = ("FreeWind", VECTOR, "kts", "Free Air Wind", 125.0, 0.0, 0, NO) +TransWind = ("TransWind", VECTOR, "kts", "Transport Wind", 125.0, 0.0, 0, NO) +Stability = ("Stability",SCALAR,"cat","Stability", 6.0,1.0,0, NO) +HrsOfSun = ("HrsOfSun",SCALAR,"hrs","Hours of Sun",24.0,0.0,1, NO) +MarineLayer = ("MarineLayer",SCALAR,"ft","Depth of Marine Layer", 20000.0,0.0,0,NO) +InvBurnOffTemp = ("InvBurnOffTemp",SCALAR,"F","Inversion Burn-off Temperature", 120.0,-30.0,0, NO) +VentRate = ("VentRate", SCALAR, "kt*ft", "VentRate", 500000.0, 0.0, 0, NO) +DSI = ("DSI", SCALAR, "index", "DSI", 6.0, 0.0, 0, NO) +MaxRH = ("MaxRH", SCALAR, "%", "Maximum Relative Humidity", 100.0, 0.0, 0, NO) +MinRH = ("MinRH", SCALAR, "%", "Minimum Relative Humidity", 100.0, 0.0, 0, NO) +Wetflag = ("Wetflag", SCALAR, "yn", "1300LT WetFlag", 1.0, 0.0, 0, NO) +Ttrend = ("Ttrend", SCALAR, "F", "24hr Temperature Trend", 50.0, -50.0, 0, NO) +RHtrend = ("RHtrend", SCALAR, "F", "24hr Relative Humidity Trend", 100.0, -100.0, 0, NO) + +# HPC Delta weather elements +DeltaMinT = ('DeltaMinT', SCALAR, 'F', 'DeltaMinT', 130.0, -80.0, 0, NO) +DeltaMaxT = ('DeltaMaxT', SCALAR, 'F', 'DeltaMaxT', 130.0, -80.0, 0, NO) +DeltaWind = ("DeltaWind", VECTOR, "kts", "Surface Delta Wind", 125.0, 0.0, 0, NO) +DeltaSky = ("DeltaSky", SCALAR, "%", "Delta Sky Condition", 100.0, -100.0, 0, NO) +DeltaPoP = ("DeltaPoP", SCALAR, "%", "Delta Prob of Precip", 100.0, -100.0, 0, NO) + +# Special LAPS parms +Radar = ("Radar", SCALAR, "dbz", "Radar Reflectivity", 80.0, -20.0, 0, NO) + +# RTMA parms +QPE = ("QPE", SCALAR, "in", "QPE", maxQpfVal, 0.0, 2, YES) +#if SID in groups['ALASKA_SITES']: - not sure if this needs to be like that +if SID in groups['OCONUS_SITES']: + TUnc = ("TUnc", SCALAR, "F", "Temperature Anl Uncertainty", 20.0, 0.0, 0, NO) + TdUnc = ("TdUnc", SCALAR, "F", "Dewpoint Anl Uncertainty", 25.0, 0.0, 0, NO) +else: + TUnc = ("TUnc", SCALAR, "F", "Temperature Anl Uncertainty", 10.0, 0.0, 0, NO) + TdUnc = ("TdUnc", SCALAR, "F", "Dewpoint Anl Uncertainty", 15.0, 0.0, 0, NO) +# DR17144 +SkyUnc = ("SkyUnc", SCALAR, "%", "Sky Condition Uncertainty", 100.0, 0.0, 0, NO) +WSpdUnc = ("WSpdUnc", SCALAR, "kts", "WSpd Anl Uncertainty", 12.0, 0.0, 0, NO) +WDirUnc = ("WDirUnc", SCALAR, "deg", "WDir Anl Uncertainty", 10.0, 0.0, 0, NO) +VisUnc = ("VisUnc", SCALAR, "SM", "Vsby Anl Uncertainty", 10.0, 0.0, 2, NO) +# DCS 17288 +PressUnc = ("PressUnc", SCALAR, "Pa", "Press Anl Uncertainty", 110000.0, 0.0, 2, NO) +Pressure = ("Pressure", SCALAR, "Pa", "Pressure", 110000.0, 0.0, 2, NO) +WGustUnc = ("WGustUnc", SCALAR, "kts", "WGust Anl Uncertainty", 12.0, 0.0, 0, NO) + +# NamDNG parms +QPF3 = ("QPF3", SCALAR, "in", "3HR QPF", maxQpfVal, 0.0, 2, YES) +QPF6 = ("QPF6", SCALAR, "in", "6HR QPF", maxQpfVal, 0.0, 2, YES) +QPF12 = ("QPF12", SCALAR, "in", "12HR QPF", maxQpfVal, 0.0, 2, YES) +Vis = ("Vis", SCALAR, "SM", "Visibility", 10.0, 0.0, 2, NO) +SnowAmt6 = ("SnowAmt6", SCALAR, "in", "Snowfall amount (6hr)", 20.0, 0.0, 1, YES) + +MaxT3 = ("MaxT3", SCALAR, "F", "3hr Maximum Temperature", maxTempVal, minTempVal, 0, NO) +MinT3 = ("MinT3", SCALAR, "F", "3hr Minimum Temperature", maxTempVal, minTempVal, 0, NO) +MaxRH3 = ("MaxRH3", SCALAR, "%", "3hr Maximum Relative Humidity", 100.0, 0.0, 0, NO) + +# Parms for ,'SAT',Satellite +SatVisE = ("VisibleE", SCALAR, "count", "Satellite Albdo %", 255.0, 0.0, 0, NO) +SatIR11E = ("IR11E", SCALAR, "C", "11 micron temperature", 58.0, -111.0, 0, NO) +SatIR13E = ("IR13E", SCALAR, "C", "13 micron temperature", 50.0, -111.0, 0, NO) +SatIR39E = ("IR39E", SCALAR, "C", "3.9 micron temperature", 50.0, -111.0, 0, NO) +SatWVE = ("WaterVaporE", SCALAR, "C", "water vapor temperature", -11.0, -62.0, 0, NO) +SatFogE = ("FogE", SCALAR, "C", "ir11 - ir39", 50.0, -111.0, 0, NO) + +SatVisW = ("VisibleW", SCALAR, "count", "Satellite Albdo %", 255.0, 0.0, 0, NO) +SatIR11W = ("IR11W", SCALAR, "C", "11 micron temperature", 58.0, -111.0, 0, NO) +SatIR13W = ("IR13W", SCALAR, "C", "13 micron temperature", 50.0, -111.0, 0, NO) +SatIR39W = ("IR39W", SCALAR, "C", "3.9 micron temperature", 50.0, -111.0, 0, NO) +SatWVW = ("WaterVaporW", SCALAR, "C", "water vapor temperature", -11.0, -62.0, 0, NO) +SatFogW = ("FogW", SCALAR, "C", "ir11 - ir39", 50.0, -111.0, 0, NO) + +# TPC Wind Probability parms +prob34 = ("prob34", SCALAR, "%", "WS34 CPROB", 100.0, 0.0, 0, NO) +prob50 = ("prob50", SCALAR, "%", "WS50 CPROB", 100.0, 0.0, 0, NO) +prob64 = ("prob64", SCALAR, "%", "WS64 CPROB", 100.0, 0.0, 0, NO) +pws34 = ("pws34", SCALAR, "%", "34WSIPROB", 100.0, 0.0, 0, NO) +pws50 = ("pws50", SCALAR, "%", "50WSIPROB", 100.0, 0.0, 0, NO) +pws64 = ("pws64", SCALAR, "%", "64WSIPROB", 100.0, 0.0, 0, NO) +pwsD34 = ("pwsD34", SCALAR, "%", "Day34WSIPROB", 100.0, 0.0, 0, NO) +pwsN34 = ("pwsN34", SCALAR, "%", "Night34WSIPROB", 100.0, 0.0, 0, NO) +pwsD64 = ("pwsD64", SCALAR, "%", "Day64WSIPROB", 100.0, 0.0, 0, NO) +pwsN64 = ("pwsN64", SCALAR, "%", "Night64WSI PROB", 100.0, 0.0, 0, NO) +pws34int = ("pws34int", SCALAR, "%", "34WSIntPROB", 100.0, 0.0, 0, NO) +pws64int = ("pws64int", SCALAR, "%", "64WSIntPROB", 100.0, 0.0, 0, NO) + +# Surge parms for HLS/TCV +InundationMax = ("InundationMax", SCALAR, "ft", "Max Inundation", 30.0, -100.0, 1, NO) +InundationTiming = ("InundationTiming", SCALAR, "ft", "Incremental Inundation", 30.0, -100.0, 1, NO) +SurgeHtPlusTideMSL = ("SurgeHtPlusTideMSL", SCALAR, "ft", "Surge above MSL", 30.0, -100.0, 1, NO) +SurgeHtPlusTideMLLW = ("SurgeHtPlusTideMLLW", SCALAR, "ft", "Surge above MLLW", 30.0, -100.0, 1, NO) +SurgeHtPlusTideMHHW = ("SurgeHtPlusTideMHHW", SCALAR, "ft", "Surge above MHHW", 30.0, -100.0, 1, NO) +SurgeHtPlusTideNAVD = ("SurgeHtPlusTideNAVD", SCALAR, "ft", "Surge above NAVD88", 30.0, -100.0, 1, NO) + +# parms for storm surge collaboration +SShazardKeys = [("",""), ("SS.A", "STORM SURGE WATCH"), ("SS.W", "STORM SURGE WARNING")] +ProposedSS = ("ProposedSS", DISCRETE, "wwa", "Proposed StormSurge Hazards", YES, SShazardKeys, 7) +tempProposedSS = ("tempProposedSS", DISCRETE, "wwa", "Temp Proposed StormSurge Hazards", + YES, SShazardKeys, 4) +InitialSS = ("InitialSS", DISCRETE, "wwa", "Initial StormSurge Hazards", + YES, SShazardKeys, 4) +DiffSS = ("DiffSS", SCALAR, "None", "Difference StormSurge Hazards", 2.0, -1.0, 0, NO) + +# parms for tropical cyclone threat graphics +Threat4Keys = [("None","None to Little"), ("Elevated","Elevated"), ("Mod", "Moderate"), ("High", "High"), ("Extreme","Extreme"),] + +FloodingRainThreat = ("FloodingRainThreat", DISCRETE, "cat", "Flooding Rain Threat", NO, Threat4Keys,2) +StormSurgeThreat = ("StormSurgeThreat", DISCRETE, "cat", "Storm Surge Threat", NO, Threat4Keys,2) +WindThreat = ("WindThreat", DISCRETE, "cat", "Wind Threat", NO, Threat4Keys,2) +TornadoThreat = ("TornadoThreat", DISCRETE, "cat", "Tornado Threat", NO, Threat4Keys,2) +# 09/13/2016 JCM changed precision of QPFtoFFGRatio to 2, max from 8 to 1000 +QPFtoFFGRatio = ("QPFtoFFGRatio", SCALAR, "1", "QPF to FFG Ratio", 1000.0, 0.0, 2, NO) + +# Hazards +HazardKeys = [] +HazardKeys.append(("", "")) #1st one must be None +import VTECTable +kys = VTECTable.VTECTable.keys() +kys.sort() +for k in kys: + HazardKeys.append((k, VTECTable.VTECTable[k]['hdln'])) + +#H-VTEC keys - will someday add these back in +#("hydroER", "Hydro - Excessive Rainfall"), +#("hydroSM", "Hydro - Snow melt"), +#("hydroRS", "Rain and Snow melt"), +#("hydroDM", "Dam or Levee Failure"), +#("hydroGO", "Glacier-Dammed Lake Outburst"), +#("hydroIJ", "Ice Jam"), +#("hydroIC", "Rain and/or Snow melt and/or Ice Jam"), + +Hazards = ("Hazards", DISCRETE, "wwa", "Hazards", YES, HazardKeys, 4) + +# Scalar/Vector Weather Elements that Require Extra Precision (due to their +# use in calculations) Either form may be used. +ExtraWEPrecision = [] + +# Parms for ESTOFS +AstroTide = ("AstroTide", SCALAR, "ft", "Astro Tide", 20.0, -8.0, 1, NO) +StormSurge = ("StormSurge", SCALAR, "ft", "Storm Surge", 30.0, -5.0, 1, NO) + +# Parms for ETSS and ETSSHiRes +SurgeTide = ("SurgeTide", SCALAR, "ft", "Surge Tide", 20.0, -8.0, 1, NO) + +# Parm for Aviation/GFSLAMPGrid +CigHgt=("CigHgt",SCALAR,"ft","Ceiling Height",25000.0,-100.0,0,NO) + +# Parms for NationalBlend +QPF1=("QPF1", SCALAR, "in", "1HR QPF", maxQpfVal, 0.0, 2, YES) +PPI01=('PPI01', SCALAR, '%', '1-H Precip Potential Index', 100.0, 0.0, 0, NO) +PPI06=('PPI06', SCALAR, '%', '6-H Precip Potential Index', 100.0, 0.0, 0, NO) +PositiveEnergyAloft=("PositiveEnergyAloft" , SCALAR, "j/kg", "Positive energy aloft" , 500.0, 0.0, 1, NO) +NegativeEnergyLowLevel=("NegativeEnergyLowLevel" , SCALAR, "j/kg", "Negative energy in the low levels" , 0.0, -500.0, 1, NO) +SnowAmt01 = ("SnowAmt01", SCALAR, "in", "1-h Snow Accumulation", 20.0, 0.0, 1, YES) +IceAccum01 = ("IceAccum01", SCALAR, "inch", "1-h Ice Accumulation", maxIceVal, 0.0, 3, NO) +IceAccum = ("IceAccum", SCALAR, "inch", "6-h Ice Accumulation", 13.0, 0.0, 3, NO) +TstmPrb1 = ("TstmPrb1", SCALAR, "%", "1-h SREF-based Prob. of a Thunderstorm", 100.0, 0.0, 0, NO) +DryTstmPrb = ("DryTstmPrb", SCALAR, "%", "3-h SREF-based Prob. of a Dry Thunderstorm", 100.0, 0.0, 0, NO) +WGS50pct =("WGS50pct", SCALAR, "kts", "10-m Wind Gust",125.0 , 0.0, 0, NO) +WS50Prcntl30m =("WS50Prcntl30m", SCALAR, "kts", "30-m Wind Speed", 125.0, 0.0, 0, NO) +WS50Prcntl80m =("WS50Prcntl80m", SCALAR, "kts", "80-m Wind Speed", 125.0, 0.0, 0, NO) +Vis50pct =("Vis50pct", SCALAR, "SM", "Visibility",10.0 , 0.0, 3, NO) +T50pct =("T50pct", SCALAR, "F", "Air Temperature", maxTempVal, minTempVal, 1, NO) +PMSL10pct =("PMSL10pct", SCALAR, "mb", "10th percentile Mean Sea Level Pressure", 1100.0, 900.0, 1, NO) +PMSL50pct =("PMSL50pct", SCALAR, "mb", "50th percentile Mean Sea Level Pressure", 1100.0, 900.0, 1, NO) +PMSL90pct =("PMSL90pct", SCALAR, "mb", "90th percentile Mean Sea Level Pressure", 1100.0, 900.0, 1, NO) +FosBerg = ("FosBerg", SCALAR, "none", "Fosberg Fire Weather Index", 100.0, 0.0, 0, NO) + + + +#--------------------------------------------------------------------------- +# +# Weather configuration section +# +#--------------------------------------------------------------------------- + +# list of possible visibilities +visibilities = ['', '0SM', '1/4SM', '1/2SM', '3/4SM', '1SM', '11/2SM', + '2SM', '21/2SM', '3SM', '4SM', '5SM', '6SM', 'P6SM'] + +# list of possible coverages and probabilities +NOCOV = ('', 'No Coverage') +ISOD = ('Iso', 'Isolated') +SCT = ('Sct', 'Scattered') +NUM = ('Num', 'Numerous') +WIDE = ('Wide', 'Widespread') +OCNL = ('Ocnl', 'Occasional') +SCHC = ('SChc', 'Slight Chance Of') +CHC = ('Chc', 'Chance Of') +LKLY = ('Lkly', 'Likely') +DEFN = ('Def', 'Definite') +PATCHY = ('Patchy', 'Patchy') +AREAS = ('Areas', 'Areas of') +FQT = ('Frq', 'Frequent') +BRIEF = ('Brf', 'Brief') +PERIODS = ('Pds', 'Periods of') +INTM = ('Inter', 'Intermittent') + +# list of possible intensities +INTEN_NONE = ('', 'No intensity') +INTEN_VERYLIGHT = ('--', 'Very Light') +INTEN_LIGHT = ('-', 'Light') +INTEN_MOD = ('m', 'Moderate') +INTEN_HEAVY = ('+', 'Heavy') +INTEN_SEVERE = ('+', 'Severe') +INTEN_DENSE = ('+', 'Dense') + +# list of optional attributes +FQTLTG = ('FL', 'Frequent Lightning') +GUSTS = ('GW', 'Gusty Winds') +HVYRAFL = ('HvyRn', 'Heavy Rainfall') +DMGWND = ('DmgW', 'Damaging Winds') +SMALLH = ('SmA', 'Small Hail') +LARGEH = ('LgA', 'Large Hail') +OUTLYNG = ('OLA','in the outlying areas') +GRASSY = ('OGA','on grassy areas') +OVRPASS = ('OBO','on bridges and overpasses') +OR = ('OR', 'or') +DRY = ('Dry', 'dry') +PRIMARY = ('Primary', 'Highest Ranking') +MENTION = ('Mention', 'Include Unconditionally') +TORNADO = ('TOR', 'Tornadoes') + +# list of each weather types +NOWX = ('', 'No Weather', + [NOCOV], + [INTEN_NONE], + []) +THUNDER = ('T', 'Thunderstorms', + [ISOD, SCT, NUM, WIDE, SCHC, CHC, LKLY, DEFN, OCNL, FQT, BRIEF, + PERIODS, INTM], + [INTEN_NONE, INTEN_SEVERE], + [PRIMARY, MENTION, FQTLTG, HVYRAFL, GUSTS, DMGWND, DRY, + LARGEH, SMALLH, TORNADO]) +RAIN = ('R', 'Rain', + [WIDE, SCHC, CHC, LKLY, DEFN, OCNL, FQT, BRIEF, PERIODS, INTM], + [INTEN_VERYLIGHT, INTEN_LIGHT, INTEN_MOD, INTEN_HEAVY], + [PRIMARY, MENTION, OR]) +RAINSHOWERS = ('RW', 'Rain Showers', + [ISOD, SCT, NUM, WIDE, SCHC, CHC, LKLY, DEFN, OCNL, FQT, BRIEF, + PERIODS, INTM], + [INTEN_VERYLIGHT, INTEN_LIGHT, INTEN_MOD, INTEN_HEAVY], + [PRIMARY, MENTION, OR]) +DRIZZLE = ('L', 'Drizzle', + [PATCHY, AREAS, WIDE, SCHC, CHC, LKLY, DEFN, OCNL, FQT, + BRIEF, PERIODS, INTM], + [INTEN_VERYLIGHT, INTEN_LIGHT, INTEN_MOD, INTEN_HEAVY], + [PRIMARY, MENTION, OR]) +FZRAIN = ('ZR', 'Freezing Rain', + [WIDE, SCHC, CHC, LKLY, DEFN, OCNL, FQT, BRIEF, PERIODS, INTM], + [INTEN_VERYLIGHT, INTEN_LIGHT, INTEN_MOD, INTEN_HEAVY], + [PRIMARY, MENTION, OR]) +FZDRIZZLE = ('ZL', 'Freezing Drizzle', + [PATCHY, AREAS, WIDE, SCHC, CHC, LKLY, DEFN, OCNL, FQT, + BRIEF, PERIODS, INTM], + [INTEN_VERYLIGHT, INTEN_LIGHT, INTEN_MOD, INTEN_HEAVY], + [PRIMARY, MENTION, OR]) +SNOW = ('S', 'Snow', + [WIDE, SCHC, CHC, LKLY, DEFN, OCNL, FQT, BRIEF, PERIODS, INTM], + [INTEN_VERYLIGHT, INTEN_LIGHT, INTEN_MOD, INTEN_HEAVY], + [PRIMARY, MENTION, OR]) +SNOWSHOWERS = ('SW', 'Snow Showers', + [ISOD, SCT, NUM, WIDE, SCHC, CHC, LKLY, DEFN, OCNL, FQT, + BRIEF, PERIODS, INTM], + [INTEN_VERYLIGHT, INTEN_LIGHT, INTEN_MOD, INTEN_HEAVY], + [PRIMARY, MENTION, OR]) +SLEET = ('IP', 'Sleet', + [WIDE, SCHC, CHC, LKLY, DEFN, OCNL, FQT, BRIEF, PERIODS, INTM], + [INTEN_VERYLIGHT, INTEN_LIGHT, INTEN_MOD, INTEN_HEAVY], + [PRIMARY, MENTION, OR]) +FOG = ('F', 'Fog', + [PATCHY, AREAS, WIDE], + [INTEN_NONE, INTEN_DENSE], + [PRIMARY, MENTION]) +FREEZEFOG = ('ZF', 'Freezing Fog', + [PATCHY, AREAS, WIDE], + [INTEN_NONE, INTEN_DENSE], + [PRIMARY, MENTION]) +ICEFOG = ('IF', 'Ice Fog', + [PATCHY, AREAS, WIDE], + [INTEN_NONE], + [PRIMARY, MENTION]) +ICECRYSTAL = ('IC', 'Ice Crystals', + [PATCHY, AREAS, WIDE], + [INTEN_NONE], + [PRIMARY, MENTION]) +HAZE = ('H', 'Haze', + [DEFN], + [INTEN_NONE], + [PRIMARY, MENTION]) +BLWGSNOW = ('BS', 'Blowing Snow', + [PATCHY, AREAS, DEFN], + [INTEN_NONE], + [PRIMARY, MENTION]) +BLWGSAND = ('BN', 'Blowing Sand', + [PATCHY, AREAS, DEFN], + [INTEN_NONE], + [PRIMARY, MENTION]) +SMOKE = ('K', 'Smoke', + [PATCHY, AREAS, DEFN], + [INTEN_NONE], + [PRIMARY, MENTION]) +BLWGDUST = ('BD', 'Blowing Dust', + [PATCHY, AREAS, DEFN], + [INTEN_NONE], + [PRIMARY, MENTION]) +FROST = ('FR','Frost', + [PATCHY, AREAS, WIDE], + [INTEN_NONE], + [PRIMARY, MENTION, OUTLYNG]) +FRZSPRAY = ('ZY','Freezing Spray', + [ISOD, SCT, NUM, WIDE, SCHC, CHC, LKLY, DEFN, OCNL], + [INTEN_LIGHT, INTEN_MOD, INTEN_HEAVY], + [PRIMARY, MENTION]) +VOLASH = ('VA','Volcanic Ash', + [NOCOV], + [INTEN_NONE], + [PRIMARY, MENTION]) +WATERSPOUT = ('WP','Waterspouts', + [ISOD, SCHC, CHC, LKLY, DEFN], + [INTEN_NONE], + [PRIMARY, MENTION]) + + +types = [NOWX, THUNDER, WATERSPOUT, RAIN, RAINSHOWERS, + DRIZZLE, FZRAIN, FZDRIZZLE, SNOW, SNOWSHOWERS, + SLEET, FOG, FREEZEFOG, ICEFOG, ICECRYSTAL ,HAZE, BLWGSNOW, + BLWGSAND, SMOKE, BLWGDUST, FROST, FRZSPRAY, VOLASH] + + +# PARMS FROM NwsInitsConfig +#------------------------------------------------------------------------------- +# Discrete Keys +#------------------------------------------------------------------------------- +# +AirKeys = [("","None"), ("Watch","Watch"), ("Advisory","Advisory"),("Warning", "Warning"),] +ThreatKeys=[('', 'None'), ('Very Low', 'Very Low'), ('Low', 'Low'), + ('Moderate', 'Moderate'), ('High', 'High'), ('Extreme','Extreme')] +# +SevereKeys = [('NONE', '0'), ('TSTM', '2'), ('MRGL', '3'), ('SLGT', '4'), ('ENH', '5'), ('MOD', '6'), ('HIGH', '8')] + +AirQuality = ('AirQuality', DISCRETE, 'cat', 'Air Quality', NO, AirKeys) +BasinFFP = ('BasinFFP', DISCRETE, 'none', 'Basin Flash Flood Potential', NO, + [('Dry', 'Dry'), ('Low', 'Low'), ('Moderate', 'Moderate'), ('High', 'High'), ('Very High', 'Very High')]) +CLRIndx = ('CLRIndx', SCALAR, 'none', 'Clearing Index', 1050.0, 0.0, 0, NO) +CQPF1 = ('CQPF1', SCALAR, 'in', '6hr Cont QPF', maxQpfVal, 0.0, 2, NO) +Ceiling = ('Ceiling', SCALAR, 'ft', 'Lowest Cloud Base Height', 25000.0, -30000.0, 0, NO) +CigHgtCat = ('CigHgtCat', SCALAR, 'index', 'Cloud Ceiling Height Category', 6.0, 0.0, 0, NO) +CloudBaseConditional = ('CloudBaseConditional', SCALAR, '100ft', 'Conditional Cloud Base Height', 250.0, 0.0, 0, NO) +CloudBasePrimary = ('CloudBasePrimary', SCALAR, '100ft', 'Primary Cloud Base Height', 250.0, 0.0, 0, NO) +CloudBaseSecondary = ('CloudBaseSecondary', SCALAR, '100ft', 'Secondary Cloud Base Height', 250.0, 0.0, 0, NO) +ClimoET = ('ClimoET', SCALAR, 'in', 'ClimoET', 0.75, 0.0, 2, NO) +ClimoETAprA = ('ClimoETAprA', SCALAR, 'in', 'ClimoET AprA', 0.75, 0.0, 2, NO) +ClimoETAprB = ('ClimoETAprB', SCALAR, 'in', 'ClimoET AprB', 0.75, 0.0, 2, NO) +ClimoETAugA = ('ClimoETAugA', SCALAR, 'in', 'ClimoET AugA', 0.75, 0.0, 2, NO) +ClimoETAugB = ('ClimoETAugB', SCALAR, 'in', 'ClimoET AugB', 0.75, 0.0, 2, NO) +ClimoETDecA = ('ClimoETDecA', SCALAR, 'in', 'ClimoET DecA', 0.75, 0.0, 2, NO) +ClimoETDecB = ('ClimoETDecB', SCALAR, 'in', 'ClimoET DecB', 0.75, 0.0, 2, NO) +ClimoETFebA = ('ClimoETFebA', SCALAR, 'in', 'ClimoET FebA', 0.75, 0.0, 2, NO) +ClimoETFebB = ('ClimoETFebB', SCALAR, 'in', 'ClimoET FebB', 0.75, 0.0, 2, NO) +ClimoETJanA = ('ClimoETJanA', SCALAR, 'in', 'ClimoET JanA', 0.75, 0.0, 2, NO) +ClimoETJanB = ('ClimoETJanB', SCALAR, 'in', 'ClimoET JanB', 0.75, 0.0, 2, NO) +ClimoETJulA = ('ClimoETJulA', SCALAR, 'in', 'ClimoET JulA', 0.75, 0.0, 2, NO) +ClimoETJulB = ('ClimoETJulB', SCALAR, 'in', 'ClimoET JulB', 0.75, 0.0, 2, NO) +ClimoETJunA = ('ClimoETJunA', SCALAR, 'in', 'ClimoET JunA', 0.75, 0.0, 2, NO) +ClimoETJunB = ('ClimoETJunB', SCALAR, 'in', 'ClimoET JunB', 0.75, 0.0, 2, NO) +ClimoETMarA = ('ClimoETMarA', SCALAR, 'in', 'ClimoET MarA', 0.75, 0.0, 2, NO) +ClimoETMarB = ('ClimoETMarB', SCALAR, 'in', 'ClimoET MarB', 0.75, 0.0, 2, NO) +ClimoETMayA = ('ClimoETMayA', SCALAR, 'in', 'ClimoET MayA', 0.75, 0.0, 2, NO) +ClimoETMayB = ('ClimoETMayB', SCALAR, 'in', 'ClimoET MayB', 0.75, 0.0, 2, NO) +ClimoETNovA = ('ClimoETNovA', SCALAR, 'in', 'ClimoET NovA', 0.75, 0.0, 2, NO) +ClimoETNovB = ('ClimoETNovB', SCALAR, 'in', 'ClimoET NovB', 0.75, 0.0, 2, NO) +ClimoETOctA = ('ClimoETOctA', SCALAR, 'in', 'ClimoET OctA', 0.75, 0.0, 2, NO) +ClimoETOctB = ('ClimoETOctB', SCALAR, 'in', 'ClimoET OctB', 0.75, 0.0, 2, NO) +ClimoETSepA = ('ClimoETSepA', SCALAR, 'in', 'ClimoET SepA', 0.75, 0.0, 2, NO) +ClimoETSepB = ('ClimoETSepB', SCALAR, 'in', 'ClimoET SepB', 0.75, 0.0, 2, NO) +ClimoPoP = ('ClimoPoP', SCALAR, '%', 'ClimoPoP', 100.0, 0.0, 0, NO) +ClimoPoPAprA = ('ClimoPoPAprA', SCALAR, '%', 'ClimoPoP AprA', 100.0, 0.0, 0, NO) +ClimoPoPAprB = ('ClimoPoPAprB', SCALAR, '%', 'ClimoPoP AprB', 100.0, 0.0, 0, NO) +ClimoPoPAugA = ('ClimoPoPAugA', SCALAR, '%', 'ClimoPoP AugA', 100.0, 0.0, 0, NO) +ClimoPoPAugB = ('ClimoPoPAugB', SCALAR, '%', 'ClimoPoP AugB', 100.0, 0.0, 0, NO) +ClimoPoPDecA = ('ClimoPoPDecA', SCALAR, '%', 'ClimoPoP DecA', 100.0, 0.0, 0, NO) +ClimoPoPDecB = ('ClimoPoPDecB', SCALAR, '%', 'ClimoPoP DecB', 100.0, 0.0, 0, NO) +ClimoPoPFG = ('ClimoPoPFG', SCALAR, '%', 'ClimoPoP First Guess', 100.0, 0.0, 0, NO) +ClimoPoPFebA = ('ClimoPoPFebA', SCALAR, '%', 'ClimoPoP FebA', 100.0, 0.0, 0, NO) +ClimoPoPFebB = ('ClimoPoPFebB', SCALAR, '%', 'ClimoPoP FebB', 100.0, 0.0, 0, NO) +ClimoPoPJanA = ('ClimoPoPJanA', SCALAR, '%', 'ClimoPoP JanA', 100.0, 0.0, 0, NO) +ClimoPoPJanB = ('ClimoPoPJanB', SCALAR, '%', 'ClimoPoP JanB', 100.0, 0.0, 0, NO) +ClimoPoPJulA = ('ClimoPoPJulA', SCALAR, '%', 'ClimoPoP JulA', 100.0, 0.0, 0, NO) +ClimoPoPJulB = ('ClimoPoPJulB', SCALAR, '%', 'ClimoPoP JulB', 100.0, 0.0, 0, NO) +ClimoPoPJunA = ('ClimoPoPJunA', SCALAR, '%', 'ClimoPoP JunA', 100.0, 0.0, 0, NO) +ClimoPoPJunB = ('ClimoPoPJunB', SCALAR, '%', 'ClimoPoP JunB', 100.0, 0.0, 0, NO) +ClimoPoPMarA = ('ClimoPoPMarA', SCALAR, '%', 'ClimoPoP MarA', 100.0, 0.0, 0, NO) +ClimoPoPMarB = ('ClimoPoPMarB', SCALAR, '%', 'ClimoPoP MarB', 100.0, 0.0, 0, NO) +ClimoPoPMayA = ('ClimoPoPMayA', SCALAR, '%', 'ClimoPoP MayA', 100.0, 0.0, 0, NO) +ClimoPoPMayB = ('ClimoPoPMayB', SCALAR, '%', 'ClimoPoP MayB', 100.0, 0.0, 0, NO) +ClimoPoPNovA = ('ClimoPoPNovA', SCALAR, '%', 'ClimoPoP NovA', 100.0, 0.0, 0, NO) +ClimoPoPNovB = ('ClimoPoPNovB', SCALAR, '%', 'ClimoPoP NovB', 100.0, 0.0, 0, NO) +ClimoPoPOctA = ('ClimoPoPOctA', SCALAR, '%', 'ClimoPoP OctA', 100.0, 0.0, 0, NO) +ClimoPoPOctB = ('ClimoPoPOctB', SCALAR, '%', 'ClimoPoP OctB', 100.0, 0.0, 0, NO) +ClimoPoPSepA = ('ClimoPoPSepA', SCALAR, '%', 'ClimoPoP SepA', 100.0, 0.0, 0, NO) +ClimoPoPSepB = ('ClimoPoPSepB', SCALAR, '%', 'ClimoPoP SepB', 100.0, 0.0, 0, NO) +CoastalFlood = ('CoastalFlood', DISCRETE, 'cat', 'Coastal Flood', NO, ThreatKeys) +CondPredHgt = ('CondPredHgt', SCALAR, '100ft', 'Conditional Predominant Cloud Height', 250.0, 0.0, 0, NO) +CondPredVsby = ('CondPredVsby', SCALAR, 'mi', 'Conditional Predominant Visibility', 10.0, 0.0, 2, NO) +DenseFogSmoke = ('DenseFogSmoke', DISCRETE, 'cat', 'Dense Fog', NO, ThreatKeys) +DepartNormFRET = ('DepartNormFRET', SCALAR, 'in', 'DepartNormFRET', 0.35, -0.35, 2, NO) +Dryness = ('Dryness', DISCRETE, 'none', 'EGB Fuel Dryness', NO, + [('NoData', 'NoData'), ('Moist', 'Moist'), ('Dry', 'Dry'), ('VeryDry', 'VeryDry')]) +ExcessiveCold = ('ExcessiveCold', DISCRETE, 'cat', 'Extreme Cold', NO, ThreatKeys) +ExcessiveHeat = ('ExcessiveHeat', DISCRETE, 'cat', 'Excessive Heat', NO, ThreatKeys) +FFP = ('FFP', DISCRETE, 'none', 'Flash Flood Potential', NO, + [('Dry', 'Dry'), ('Low', 'Low'), ('Moderate', 'Moderate'), ('High', 'High'), ('Very High', 'Very High')]) +FFPI = ('FFPI', SCALAR, 'index', 'Flash Flood Potential Index', 10.0, 0.0, 2, NO) +FRET = ('FRET', SCALAR, 'in', 'Forecast Reference ET', 0.75, 0.0, 2, NO) +FRET7Day = ('FRET7Day', SCALAR, 'in/week', 'Weekly Forecast Reference ET', 5.0, 0.0, 2, NO) +FireWeather = ('FireWeather', DISCRETE, 'cat', 'Wild Fire', NO, ThreatKeys) +FlashFlood = ('FlashFlood', DISCRETE, 'cat', 'Flash Flood', NO, ThreatKeys) +Flood = ('Flood', DISCRETE, 'cat', 'River Flood', NO, ThreatKeys) +FrostFreeze = ('FrostFreeze', DISCRETE, 'cat', 'Frost/Freeze', NO, ThreatKeys) +FuelMstr = ('FuelMstr', SCALAR, 'none', '10 Hour Fuel Moisture', 40.0, 1.0, 0, NO) +HainesMid = ('HainesMid', SCALAR, 'cat', 'Mid Level Haines Index', 6.0, 2.0, 0, NO) +HeatImpactLevels = ('HeatImpactLevels', SCALAR, 'none', 'HeatImpactLevels', 4.0, 0.0, 0, NO) +HeatImpactLevelsMaxT = ('HeatImpactLevelsMaxT', SCALAR, 'none', 'HeatImpactLevelsMaxT', 4.0, 0.0, 0, NO) +HeatImpactLevelsMinT = ('HeatImpactLevelsMinT', SCALAR, 'none', 'HeatImpactLevelsMinT', 4.0, 0.0, 0, NO) +HeatOrangeMaxT = ('HeatOrangeMaxT', SCALAR, 'F', 'Heat Orange MaxT', maxTempVal, minTempVal, 0, NO) +HeatOrangeMinT = ('HeatOrangeMinT', SCALAR, 'F', 'Heat Orange MinT', maxTempVal, minTempVal, 0, NO) +HeatRedMaxT = ('HeatRedMaxT', SCALAR, 'F', 'Heat Red MaxT', maxTempVal, minTempVal, 0, NO) +HeatRedMinT = ('HeatRedMinT', SCALAR, 'F', 'Heat Red MinT', maxTempVal, minTempVal, 0, NO) +HeatYellowMaxT = ('HeatYellowMaxT', SCALAR, 'F', 'Heat Yellow MaxT', maxTempVal, minTempVal, 0, NO) +HeatYellowMinT = ('HeatYellowMinT', SCALAR, 'F', 'Heat Yellow MinT', maxTempVal, minTempVal, 0, NO) +HighWind = ('HighWind', DISCRETE, 'cat', 'High Wind', NO, ThreatKeys) +IceAccum6hr = ('IceAccum6hr', SCALAR, 'in', '6-hr Ice Accumulation', 2.0, 0.0, 2, NO) +LLWS = ('LLWS', VECTOR, 'kts', 'Low Level Wind Shear', 125.0, 0.0, 0, NO) +LLWSHgt = ('LLWSHgt', SCALAR, '100 ft', 'Wind Shear Height', 20.0, 0.0, 0, NO) +LTG = ('LTG', SCALAR, 'CNT', 'LTG', 100.0, 0.0, 0, NO) +LTG12 = ('LTG12', SCALAR, 'CNT', 'LTG12', 100.0, 0.0, 0, NO) +LTG24 = ('LTG24', SCALAR, 'CNT', 'LTG24', 100.0, 0.0, 0, NO) +Lightning = ('Lightning', DISCRETE, 'cat', 'Lightning', NO, ThreatKeys) +Max3 = ('Max3', SCALAR, 'F', '3hr Maximum Temperature', maxTempVal, minTempVal, 0, NO) +Max6 = ('Max6', SCALAR, 'F', '6hr Maximum Temperature', maxTempVal, minTempVal, 0, NO) +MaxApT = ('MaxApT', SCALAR, 'F', 'Max Apparent Temperature', maxTempVal, -120.0, 0, NO) +MaxRHError = ('MaxRHError', SCALAR, '%', 'Maximum Relative Humidity Error', 100.0, -100.0, 0, NO) +MaxRHFcst = ('MaxRHFcst', SCALAR, '%', 'Forecast Maximum Relative Humidity', 100.0, 0.0, 0, NO) +MaxRHOb = ('MaxRHOb', SCALAR, '%', 'Observed Maximum Relative Humidity', 100.0, 0.0, 0, NO) +MaxRHObs = ('MaxRHObs', SCALAR, '%', 'Maximum Observed RH', 100.0, 0.0, 0, NO) +MaxT10 = ('MaxT10', SCALAR, 'F', '10th Percentile for MaxT', maxTempVal, minTempVal, 0, NO) +MaxT50 = ('MaxT50', SCALAR, 'F', '50th Percentile for MaxT', maxTempVal, minTempVal, 0, NO) +MaxT90 = ('MaxT90', SCALAR, 'F', '90th Percentile for MaxT', maxTempVal, minTempVal, 0, NO) +MaxTAloft = ('MaxTAloft', SCALAR, 'C', 'Max Temp in Warm Nose', 40.0, -20.0, 1, NO) +MaxTError = ('MaxTError', SCALAR, 'F', 'Maximum Temperature Error', 120.0, -120.0, 0, NO) +MaxTFcst = ('MaxTFcst', SCALAR, 'F', 'Observed Maximum Temperature', maxTempVal, minTempVal, 0, NO) +MaxTOb = ('MaxTOb', SCALAR, 'F', 'Observed Maximum Temperature', maxTempVal, minTempVal, 0, NO) +MaxTObs = ('MaxTObs', SCALAR, 'F', 'Maximum Temperature Obs', maxTempVal, minTempVal, 0, NO) +Min3 = ('Min3', SCALAR, 'F', '3hr Minimum Temperature', maxTempVal, minTempVal, 0, NO) +Min6 = ('Min6', SCALAR, 'F', '6hr Minimum Temperature', maxTempVal, minTempVal, 0, NO) +MinApT = ('MinApT', SCALAR, 'F', 'Min Apparent Temperature', maxTempVal, -120.0, 0, NO) +MinRH3 = ('MinRH3', SCALAR, '%', '3hr Minimum Relative Humidity', 100.0, 0.0, 0, NO) +MinRHError = ('MinRHError', SCALAR, '%', 'Minimum Relative Humidity Error', 100.0, -100.0, 0, NO) +MinRHFcst = ('MinRHFcst', SCALAR, '%', 'Forecast Minimum Relative Humidity', 100.0, 0.0, 0, NO) +MinRHOb = ('MinRHOb', SCALAR, '%', 'Observed Minimum Relative Humidity', 100.0, 0.0, 0, NO) +MinRHObs = ('MinRHObs', SCALAR, '%', 'Minimum Observed RH', 100.0, 0.0, 0, NO) +MinT10 = ('MinT10', SCALAR, 'F', '10th Percentile for MinT', maxTempVal, minTempVal, 0, NO) +MinT50 = ('MinT50', SCALAR, 'F', '50th Percentile for MinT', maxTempVal, minTempVal, 0, NO) +MinT6 = ('MinT6', SCALAR, 'F', 'Minimum Temperature 6Hr', maxTempVal, minTempVal, 0, NO) +MinT90 = ('MinT90', SCALAR, 'F', '90th Percentile for MinT', maxTempVal, minTempVal, 0, NO) +MinTError = ('MinTError', SCALAR, 'F', 'Minimum Temperature Error', 120.0, -120.0, 0, NO) +MinTFcst = ('MinTFcst', SCALAR, 'F', 'Forecast Minimum Temperature', maxTempVal, minTempVal, 0, NO) +MinTOb = ('MinTOb', SCALAR, 'F', 'Observed Minimum Temperature', maxTempVal, minTempVal, 0, NO) +MinTObs = ('MinTObs', SCALAR, 'F', 'Minimum Temperature Obs', maxTempVal, minTempVal, 0, NO) +MixHgtAve = ('MixHgtAve', SCALAR, 'ft', 'Mixing Hgt Average', 20000.0, 0.0, 0, NO) +MixHgtMSL = ('MixHgtMSL', SCALAR, 'ft', 'Mixing Height above sea level', 30000.0, 0.0, 0, NO) +MixT1700 = ('MixT1700', SCALAR, 'F', '1700Foot MixingTemp', 110.0, -10.0, 0, NO) +P95MaxT = ('P95MaxT', SCALAR, 'F', 'P95MaxT', maxTempVal, minTempVal, 0, NO) +P95MinT = ('P95MinT', SCALAR, 'F', 'P95MinT', maxTempVal, minTempVal, 0, NO) + # EKDMOS +PQPF06001 = ('PQPF06001', SCALAR, '%', '6hr Prob QPF > 0.01', 100.0, 0.0, 0, NO) +PQPF06005 = ('PQPF06005', SCALAR, '%', '6hr Prob QPF > 0.05', 100.0, 0.0, 0, NO) +PQPF06010 = ('PQPF06010', SCALAR, '%', '6hr Prob QPF > 0.10', 100.0, 0.0, 0, NO) +PQPF06015 = ('PQPF06015', SCALAR, '%', '6hr Prob QPF > 0.15', 100.0, 0.0, 0, NO) +PQPF06020 = ('PQPF06020', SCALAR, '%', '6hr Prob QPF > 0.20', 100.0, 0.0, 0, NO) +PQPF06025 = ('PQPF06025', SCALAR, '%', '6hr Prob QPF > 0.25', 100.0, 0.0, 0, NO) +PQPF06030 = ('PQPF06030', SCALAR, '%', '6hr Prob QPF > 0.30', 100.0, 0.0, 0, NO) +PQPF06040 = ('PQPF06040', SCALAR, '%', '6hr Prob QPF > 0.40', 100.0, 0.0, 0, NO) +PQPF06050 = ('PQPF06050', SCALAR, '%', '6hr Prob QPF > 0.50', 100.0, 0.0, 0, NO) +PQPF06075 = ('PQPF06075', SCALAR, '%', '6hr Prob QPF > 0.75', 100.0, 0.0, 0, NO) +PQPF06100 = ('PQPF06100', SCALAR, '%', '6hr Prob QPF > 1.00', 100.0, 0.0, 0, NO) +PQPF06150 = ('PQPF06150', SCALAR, '%', '6hr Prob QPF > 1.50', 100.0, 0.0, 0, NO) +PoP12Fcst = ('PoP12Fcst', SCALAR, '%', 'Forecast Prob. of Precip.', 100.0, 0.0, 0, NO) +PoP3 = ('PoP3', SCALAR, '%', 'PoP3', 100.0, 0.0, 0, NO) +PoPPCECMWF = ('PoPPatternClimoECMWF', SCALAR, '%', 'PatternClimoECMWF', 100.0, 0.0, 0, NO) +PoPPCFIM = ('PoPPatternClimoFIM', SCALAR, '%', 'PatternClimoFIM', 100.0, 0.0, 0, NO) +PoPPCGEM = ('PoPPatternClimoGEM', SCALAR, '%', 'PatternClimoGEM', 100.0, 0.0, 0, NO) +PoPPCGFS = ('PoPPatternClimoGFS', SCALAR, '%', 'PatternClimoGFS', 100.0, 0.0, 0, NO) +PoPPattern1 = ('PoPNortherlyFlow', SCALAR, '%', 'NortherlyFlow', 100.0, 0.0, 0, NO) +PoPPattern10 = ('PoPRockiesRidge', SCALAR, '%', 'RockiesRidge', 100.0, 0.0, 0, NO) +PoPPattern11 = ('PoPSouthernFirehose', SCALAR, '%', 'SouthernFirehose', 100.0, 0.0, 0, NO) +PoPPattern12 = ('PoPNorthernFirehose', SCALAR, '%', 'NorthernFirehose', 100.0, 0.0, 0, NO) +PoPPattern2 = ('PoPGreatBasinLow', SCALAR, '%', 'GreatBasinLow', 100.0, 0.0, 0, NO) +PoPPattern3 = ('PoPBroadCyclonicFlow', SCALAR, '%', 'BroadCyclonicFlow', 100.0, 0.0, 0, NO) +PoPPattern4 = ('PoPCoastalRidge', SCALAR, '%', 'CoastalRidge', 100.0, 0.0, 0, NO) +PoPPattern5 = ('PoPNorthwestFlow', SCALAR, '%', 'NorthwestFlow', 100.0, 0.0, 0, NO) +PoPPattern6 = ('PoPZonalFlow', SCALAR, '%', 'ZonalFlow', 100.0, 0.0, 0, NO) +PoPPattern7 = ('PoPBroadAntiCyclonicFlow', SCALAR, '%', 'BroadAntiCyclonicFlow', 100.0, 0.0, 0, NO) +PoPPattern8 = ('PoPDiffluentOnshoreFlow', SCALAR, '%', 'DiffluentOnshoreFlow', 100.0, 0.0, 0, NO) +PoPPattern9 = ('PoPSouthwestFlow', SCALAR, '%', 'SouthwestFlow', 100.0, 0.0, 0, NO) +PoPWG = ('PoPWG', SCALAR, '%', 'Climo PoP Work Grid', 30.0, -30.0, 0, NO) +PPFFG = ("PPFFG", SCALAR, "%", "Prob of Excessive Rain in %", 100.0, 0.0 ,0, NO) +PrecipDur = ('PrecipDur', SCALAR, 'hrs', 'Precipitation Duration', 12.0, 0.0, 1, YES) +PredHgt = ('PredHgt', SCALAR, '100ft', 'Predominant Cloud Height', 250.0, 0.0, 0, NO) +PredHgtCat = ('PredHgtCat', SCALAR, 'index', 'Predominant Cloud Height Category', 6.0, 0.0, 0, NO) +PredHgtRH = ('PredHgtRH', SCALAR, '100ft', 'Pred Cloud Height from RH', 250.0, 1.0, 0, NO) +PredHgtTempo = ('PredHgtTempo', SCALAR, '100ft', 'Predominant Cloud Height Tempo', 250.0, 0.0, 0, NO) +PredVsby = ('PredVsby', SCALAR, 'mi', 'Predominant Visibility', 10.0, 0.0, 2, NO) +Pres = ('Pres', SCALAR, 'mb', 'Pressure', 1100.0, 900.0, 2, NO) +ProbDmgWind = ('ProbDmgWind', SCALAR, '%', 'Probability of Damaging Wind', 100.0, 0.0, 0, NO) +ProbExtrmDmgWind = ('ProbExtrmDmgWind', SCALAR, '%', 'Probability of Extreme Damaging Wind', 100.0, 0.0, 0, NO) +ProbExtrmHail = ('ProbExtrmHail', SCALAR, '%', 'Probability of Extreme Hail', 100.0, 0.0, 0, NO) +ProbExtrmSvr = ('ProbExtrmSvr', SCALAR, '%', 'Probability of Extreme Severe', 100.0, 0.0, 0, NO) +ProbExtrmTor = ('ProbExtrmTor', SCALAR, '%', 'Probability of Extreme Tornado', 100.0, 0.0, 0, NO) +ProbSvrHail = ('ProbSvrHail', SCALAR, '%', 'Probability of Severe Hail', 100.0, 0.0, 0, NO) +ProbTor = ('ProbTor', SCALAR, '%', 'Probability of Tornado', 100.0, 0.0, 0, NO) +ProbTotSvr = ('ProbTotSvr', SCALAR, '%', 'Probability of Severe', 100.0, 0.0, 0, NO) +ProbSnowGTT = ("ProbSnowGTT", SCALAR, "%", "Prob. snow > trace", 100.0, 0.0, 0, NO) +ProbSnowGT1 = ("ProbSnowGT1", SCALAR, "%", "Prob. snow > 1 inch", 100.0, 0.0, 0, NO) +ProbSnowGT2 = ("ProbSnowGT2", SCALAR, "%", "Prob. snow > 2 inches ", 100.0, 0.0, 0, NO) +ProbSnowGT4 = ("ProbSnowGT4", SCALAR, "%", "Prob. snow > 4 inches ", 100.0, 0.0, 0, NO) +ProbSnowGT6 = ("ProbSnowGT6", SCALAR, "%", "Prob. snow > 6 inches ", 100.0, 0.0, 0, NO) +ProbSnowGT8 = ("ProbSnowGT8", SCALAR, "%", "Prob. snow > 8 inches", 100.0, 0.0, 0, NO) +ProbSnowGT12 = ("ProbSnowGT12", SCALAR, "%", "Prob. snow > 12 inches", 100.0, 0.0, 0, NO) +ProbSnowGT18 = ("ProbSnowGT18", SCALAR, "%", "Prob. snow > 18 inches", 100.0, 0.0, 0, NO) +ProbSnowRT1 = ("ProbSnowRT1", SCALAR, "%", "Prob. snow T-1 inch", 100.0, 0.0, 0, NO) +ProbSnowR12 = ("ProbSnowR12", SCALAR, "%", "Prob. snow 1-2 inches", 100.0, 0.0, 0, NO) +ProbSnowR24 = ("ProbSnowR24", SCALAR, "%", "Prob. snow 2-4 inches ", 100.0, 0.0, 0, NO) +ProbSnowR48 = ("ProbSnowR48", SCALAR, "%", "Prob. snow 4-8 inches ", 100.0, 0.0, 0, NO) +ProbSnowR812 = ("ProbSnowR812", SCALAR, "%", "Prob. snow 8-12 inches ", 100.0, 0.0, 0, NO) +ProbSnowR1218 = ("ProbSnowR1218", SCALAR, "%", "Prob. snow 12-18 inches", 100.0, 0.0, 0, NO) +ProbSnowR18 = ("ProbSnowR18", SCALAR, "%", "Prob. snow > 18 inches", 100.0, 0.0, 0, NO) +QPE06 = ('QPE06', SCALAR, 'in', 'QPE06', maxQpfVal, 0.0, 2, YES) +QPE06Ob = ('QPE06Ob', SCALAR, 'in', 'Observed Precip', 20.0, 0.0, 2, NO) +QPE12 = ('QPE12', SCALAR, 'in', 'QPE12', 15.0, 0.0, 2, YES) +QPE24 = ('QPE24', SCALAR, 'in', 'QPE24', 15.0, 0.0, 2, YES) +QPFDS = ('QPFDS', SCALAR, 'in', 'QPFDS', maxQpfVal, 0.0, 2, YES) +QPFFcst = ('QPFFcst', SCALAR, 'in', 'Forecast Precip.', 10.0, 0.0, 2, NO) +QPFPCECMWF = ('QPFPatternClimoECMWF', SCALAR, 'in', 'PatternClimoECMWF', maxQpfVal, 0.0, 2, NO) +QPFPCFIM = ('QPFPatternClimoFIM', SCALAR, 'in', 'PatternClimoFIM', maxQpfVal, 0.0, 2, NO) +QPFPCGEM = ('QPFPatternClimoGEM', SCALAR, 'in', 'PatternClimoGEM', maxQpfVal, 0.0, 2, NO) +QPFPCGFS = ('QPFPatternClimoGFS', SCALAR, 'in', 'PatternClimoGFS', maxQpfVal, 0.0, 2, NO) +QPFPattern1 = ('QPFNortherlyFlow', SCALAR, 'in', 'NortherlyFlow', maxQpfVal, 0.0, 2, NO) +QPFPattern10 = ('QPFRockiesRidge', SCALAR, 'in', 'RockiesRidge', maxQpfVal, 0.0, 2, NO) +QPFPattern11 = ('QPFSouthernFirehose', SCALAR, 'in', 'SouthernFirehose', maxQpfVal, 0.0, 2, NO) +QPFPattern12 = ('QPFNorthernFirehose', SCALAR, 'in', 'NorthernFirehose', maxQpfVal, 0.0, 2, NO) +QPFPattern2 = ('QPFGreatBasinLow', SCALAR, 'in', 'GreatBasinLow', maxQpfVal, 0.0, 2, NO) +QPFPattern3 = ('QPFBroadCyclonicFlow', SCALAR, 'in', 'BroadCyclonicFlow', maxQpfVal, 0.0, 2, NO) +QPFPattern4 = ('QPFCoastalRidge', SCALAR, 'in', 'CoastalRidge', maxQpfVal, 0.0, 2, NO) +QPFPattern5 = ('QPFNorthwestFlow', SCALAR, 'in', 'NorthwestFlow', maxQpfVal, 0.0, 2, NO) +QPFPattern6 = ('QPFZonalFlow', SCALAR, 'in', 'ZonalFlow', maxQpfVal, 0.0, 2, NO) +QPFPattern7 = ('QPFBroadAntiCyclonicFlow', SCALAR, 'in', 'BroadAntiCyclonicFlow', maxQpfVal, 0.0, 2, NO) +QPFPattern8 = ('QPFDiffluentOnshoreFlow', SCALAR, 'in', 'DiffluentOnshoreFlow', maxQpfVal, 0.0, 2, NO) +QPFPattern9 = ('QPFSouthwestFlow', SCALAR, 'in', 'SouthwestFlow', maxQpfVal, 0.0, 2, NO) +QPFPct = ('QPFPct', SCALAR, '%', 'QPFPct', 300.0, 0.0, 1, YES) +QPFPctMonthlyClimo = ('QPFPctMonthlyClimo', SCALAR, '%', 'QPF Pct Monthly PRISMClimo', 200.0, 0.0, 0, NO) +QPFRaw = ('QPFRaw', SCALAR, 'in', 'QPFRaw', maxQpfVal, 0.0, 2, YES) +QSE06 = ('QSE06', SCALAR, 'in', 'QSE06', 100.0, 0.0, 1, YES) +RipCurrent = ('RipCurrent', DISCRETE, 'cat', 'Rip Current', NO, ThreatKeys) +RipCurrentIndex = ('RipCurrentIndex', SCALAR, 'ft', 'Rip Current Index', 16.0, -1.0, 1, NO) +RipRisk = ("RipRisk", SCALAR, "none", "Rip Current Risk", 3.0, 0.0, 0, NO) +SPC12hrLP1 = ('SPC12hrLP1', SCALAR, '%', 'SPC 12HR Lightning Probability (1)', 100.0, 0.0, 0, NO) +SPC12hrLP10 = ('SPC12hrLP10', SCALAR, '%', 'SPC 12HR Lightning Probability (10)', 100.0, 0.0, 0, NO) +SPC12hrLP100 = ('SPC12hrLP100', SCALAR, '%', 'SPC 12HR Lightning Probability (100)', 100.0, 0.0, 0, NO) +SPC24hrLP1 = ('SPC24hrLP1', SCALAR, '%', 'SPC 24HR Lightning Probability (1)', 100.0, 0.0, 0, NO) +SPC24hrLP10 = ('SPC24hrLP10', SCALAR, '%', 'SPC 24HR Lightning Probability (10)', 100.0, 0.0, 0, NO) +SPC24hrLP100 = ('SPC24hrLP100', SCALAR, '%', 'SPC 24HR Lightning Probability (100)', 100.0, 0.0, 0, NO) +SPC3hrLP1 = ('SPC3hrLP1', SCALAR, '%', 'SPC 3HR Lightning Probability (1)', 100.0, 0.0, 0, NO) +SPC3hrLP10 = ('SPC3hrLP10', SCALAR, '%', 'SPC 3HR Lightning Probability (10)', 100.0, 0.0, 0, NO) +SPC3hrLP100 = ('SPC3hrLP100', SCALAR, '%', 'SPC 3HR Lightning Probability (100)', 100.0, 0.0, 0, NO) +SevereHail = ('SevereHail', DISCRETE, 'cat', 'Severe Hail', NO, ThreatKeys) +SevereTstmWind = ('SevereTstmWind', DISCRETE, 'cat', 'SevereTstmWind', NO, ThreatKeys) +SnowAmt10Prcntl = ('SnowAmt10Prcntl', SCALAR, 'in', 'min case', 50.0, 0.0, 1, NO) +SnowAmt50Prcntl = ('SnowAmt50Prcntl', SCALAR, 'in', 'avg case', 50.0, 0.0, 1, NO) +SnowAmt90Prcntl = ('SnowAmt90Prcntl', SCALAR, 'in', 'max case', 50.0, 0.0, 1, NO) +SnowDepth = ('SnowDepth', SCALAR, 'in', 'Snow Depth', 50.0, 0.0, 0, NO) +SnowRatioCLIMO = ('SnowRatioCLIMO', SCALAR, '%', 'Snow Ratio Climatology SON-DJF-MAM', 40.0, 0.0, 1, YES) +SnowRatioGFS = ('SnowRatioGFS', SCALAR, '%', 'Snow Ratio from GFS', 40.0, 0.0, 1, YES) +SnowRatioHPCMEAN = ('SnowRatioHPCMEAN', SCALAR, '%', 'Snow Ratio from HPC MEAN', 40.0, 0.0, 1, YES) +SnowRatioNAM = ('SnowRatioNAM', SCALAR, '%', 'Snow Ratio from NAM40', 40.0, 0.0, 1, YES) +T10 = ('T10', SCALAR, 'F', '10th Percentile for T', maxTempVal, minTempVal, 0, NO) +T50 = ('T50', SCALAR, 'F', '50th Percentile for T', maxTempVal, minTempVal, 0, NO) +T90 = ('T90', SCALAR, 'F', '90th Percentile for T', maxTempVal, minTempVal, 0, NO) +TAloft = ('TAloft', SCALAR, 'F', 'Temperature Aloft', 120.0, -50.0, 1, NO) +Td10 = ('Td10', SCALAR, 'F', '10th Percentile for DpT', maxTdVal, minTdVal, 0, NO) +Td50 = ('Td50', SCALAR, 'F', '50th Percentile for DpT', maxTdVal, minTdVal, 0, NO) +Td90 = ('Td90', SCALAR, 'F', '90th Percentile for DpT', maxTdVal, minTdVal, 0, NO) +TdAft = ('TdAft', SCALAR, 'F', 'Afternoon Dewpoint', maxTdVal, minTdVal, 0, NO) +TdAftError = ('TdAftError', SCALAR, 'F', 'Afternoon Dewpoint Error', 120.0, -120.0, 0, NO) +TdAftFcst = ('TdAftFcst', SCALAR, 'F', 'Forecast Afternoon Dewpoint', maxTdVal, minTdVal, 0, NO) +TdAftOb = ('TdAftOb', SCALAR, 'F', 'Observed Afternoon Dewpoint', maxTdVal, minTdVal, 0, NO) +TdAftObs = ('TdAftObs', SCALAR, 'F', 'Afternoon Dewpoint Obs', maxTdVal, minTdVal, 0, NO) +TdMrn = ('TdMrn', SCALAR, 'F', 'Morning Dewpoint', maxTdVal, minTdVal, 0, NO) +TdMrnError = ('TdMrnError', SCALAR, 'F', 'Morning Dewpoint Error', 120.0, -120.0, 0, NO) +TdMrnFcst = ('TdMrnFcst', SCALAR, 'F', 'Forecast Morning Dewpoint', maxTdVal, minTdVal, 0, NO) +TdMrnOb = ('TdMrnOb', SCALAR, 'F', 'Observed Morning Dewpoint', maxTdVal, minTdVal, 0, NO) +TdMrnObs = ('TdMrnObs', SCALAR, 'F', 'Morning Dewpoint Obs', maxTdVal, minTdVal, 0, NO) +Tornado = ('Tornado', DISCRETE, 'cat', 'Tornado', NO, ThreatKeys) +TransWindAve = ('TransWindAve', VECTOR, 'mph', 'Transport Wind Average', 125.0, 0.0, 0, NO) +Tw = ('Tw', SCALAR, 'F', 'Surface Wet Bulb Temp', 80.0, -50.0, 0, NO) +VentRateAve = ('VentRateAve', SCALAR, 'mph-ft', 'Vent Rate Average', 500000.0, 0.0, 0, NO) +Visibility = ('Visibility', SCALAR, 'SM', 'Visibility', 10.0, 0.0, 2, NO) +VisibilityConditional = ('VisibilityConditional', SCALAR, 'SM', 'Conditional Visibility', 10.0, 0.0, 2, NO) +Vsby = ('Vsby', SCALAR, 'mi', 'Visibility', 10.0, 0.0, 2, NO) +WG1 = ('WG1', SCALAR, 'none', 'WorkGrid1', 100.0, -100.0, 0, NO) +WinterWx = ('WinterWx', DISCRETE, 'cat', 'Winter Weather', NO, ThreatKeys) + +#** Parameter sets for specific functionality +optionalParmsDict = {} + +# Marine Weather Elements +optionalParmsDict['marine']={ + 'WaveDir' : ("WaveDir", VECTOR, "m/s", "Wave Direction", 5.0, 0.0, 2, NO), + 'WindWaveHeight' : ("WindWaveHgt", SCALAR, "ft", "Wind Wave Height", 100.0, 0.0, 0, NO), + 'WaveHeight' : ("WaveHeight", SCALAR, "ft", "Total Wave Height", 100.0, 0.0, 0, NO), + 'Swell' : ("Swell", VECTOR, "ft", "Primary Swell", 100.0, 0.0, 0, NO), + 'Swell2' : ("Swell2", VECTOR, "ft", "Secondary Swell", 100.0, 0.0, 0, NO), + 'Period' : ("Period", SCALAR, "sec", "Primary Period", 30.0, 0.0, 0, NO), + 'IceCoverage' : ("IceCoverage", SCALAR, "%", "Ice Coverage Amount", 100.0, 0.0, 0, NO), + 'SurfHeight' : ("SurfHeight", SCALAR, "ft", "Total Wave Height", 100.0, 0.0, 0, NO), + ##########DCS3499 + 'SigWaveHgt' : ("SigWaveHgt", SCALAR, "ft", + "Significant wave height of combined wind waves and swells", + 30.0, 0.0, 0, NO), + 'PeakWaveDir' : ("PeakWaveDir", VECTOR, "degree", "Direction of peak wave", 100.0, 0.0, 0, NO), + 'WindWaveHgt' : ("WindWaveHgt", SCALAR, "ft", "Significant wave height of wind waves", 30.0, 0.0, 0, NO), + 'WindWavePeriod' : ("WindWavePeriod", SCALAR, "sec.", "Wind wave peak period", 20.0, 0.0, 0, NO), + 'WindWaveDir' : ("WindWaveDir", VECTOR, "degree", "Direction of wind waves", 100.0, 0.0, 0, NO), + 'NWPSwind' : ("NWPSwind", VECTOR, "kts", "NWPSwind", 150.0, 0.0, 0, NO), + 'UWaveDir' : ("UWaveDir", SCALAR, "m/s", "U WaveDir Comp", 0.50, -0.50, 3, NO), + 'VWaveDir' : ("VWaveDir", SCALAR, "m/s", "V WaveDir Comp", 0.50, -0.50, 3, NO), + 'SwanSwell' : ("SwanSwell", SCALAR, "ft", "Total Significant Swell Height", 40.0, 0.0, 2, NO), + 'SST' : ("SST", SCALAR, "F", "Sea Sfc Temp", 100.0, 25.0, 0, NO), + 'StormTide' : ('StormTide', SCALAR, 'ft', 'Storm Tide', 30.0, -8.0, 1, NO), + #Fcst Grids - for partitioned wave groups + 'Wave1' : ("Wave1", VECTOR, "ft", "WAVE1", 50.0, 0.0, 0, NO), + 'Wave2' : ("Wave2", VECTOR, "ft", "WAVE2", 50.0, 0.0, 0, NO), + 'Wave3' : ("Wave3", VECTOR, "ft", "WAVE3", 50.0, 0.0, 0, NO), + 'Wave4' : ("Wave4", VECTOR, "ft", "WAVE4", 50.0, 0.0, 0, NO), + 'Wave5' : ("Wave5", VECTOR, "ft", "WAVE5", 50.0, 0.0, 0, NO), + 'Wave6' : ("Wave6", VECTOR, "ft", "WAVE6", 50.0, 0.0, 0, NO), + 'Wave7' : ("Wave7", VECTOR, "ft", "Wave7", 50.0, 0.0, 0, NO), + 'Wave8' : ("Wave8", VECTOR, "ft", "Wave8", 50.0, 0.0, 0, NO), + 'Wave9' : ("Wave9", VECTOR, "ft", "Wave9", 50.0, 0.0, 0, NO), + #Fcst Grids - for partitioned wave groups + 'Period1' : ("Period1", SCALAR, "sec", "Period1", 30.0, 0.0, 0, NO), + 'Period2' : ("Period2", SCALAR, "sec", "Period2", 30.0, 0.0, 0, NO), + 'Period3' : ("Period3", SCALAR, "sec", "Period3", 30.0, 0.0, 0, NO), + 'Period4' : ("Period4", SCALAR, "sec", "Period4", 30.0, 0.0, 0, NO), + 'Period5' : ("Period5", SCALAR, "sec", "Period5", 30.0, 0.0, 0, NO), + 'Period6' : ("Period6", SCALAR, "sec", "Period6", 30.0, 0.0, 0, NO), + 'Period7' : ("Period7", SCALAR, "sec", "Period7", 30.0, 0.0, 0, NO), + 'Period8' : ("Period8", SCALAR, "sec", "Period8", 30.0, 0.0, 0, NO), + 'Period9' : ("Period9", SCALAR, "sec", "Period9", 30.0, 0.0, 0, NO), + 'RipProb' : ("RipProb", SCALAR, "%", "Rip Current Probability", 100.0, 0.0, 0, NO), + 'ErosionProb' : ("ErosionProb", SCALAR, "%", "Dune Erosion Probability", 100.0, 0.0, 0, NO), + 'OverwashProb' : ("OverwashProb", SCALAR, "%", "Dune Overwash Probability", 100.0, 0.0, 0, NO) +} +if SID in groups['GreatLake_SITES']: + # Redefine the WaveHeight field to include a decimal point + optionalParmsDict['marine'].update({'WaveHeight' : + ("WaveHeight", SCALAR, "ft", "Wave Height", 40.0, 0.0, 1, NO)}) + +# Parameter set for Probability of weather type, Optional for sites. +optionalParmsDict['powt']={ + 'PoTBD': ('PotBlowingDust', SCALAR, '%', 'Prob of Blowing Dust', 100.0, 0.0, 0, NO), + 'PoTBN': ('PotBlowingSand', SCALAR, '%', 'Prob of Blowing Sand', 100.0, 0.0, 0, NO), + 'PoTBS': ('PotBlowingSnow', SCALAR, '%', 'Prob of Blowing Snow', 100.0, 0.0, 0, NO), + 'PoTF': ('PotFog', SCALAR, '%', 'Prob of Fog', 100.0, 0.0, 0, NO), + 'PoTFR': ('PotFrost', SCALAR, '%', 'Prob of Frost', 100.0, 0.0, 0, NO), + 'PoTFl': ('PotFlurries', SCALAR, '%', 'Prob of Flurries', 100.0, 0.0, 0, NO), + 'PoTH': ('PotHaze', SCALAR, '%', 'Prob of Haze', 100.0, 0.0, 0, NO), + 'PoTIC': ('PotIceCrystals', SCALAR, '%', 'Prob of Ice Crystals', 100.0, 0.0, 0, NO), + 'PoTIF': ('PotIceFog', SCALAR, '%', 'Prob of Ice Fog', 100.0, 0.0, 0, NO), + 'PoTIP': ('PotSleet', SCALAR, '%', 'Prob of Sleet', 100.0, 0.0, 0, NO), + 'PoTK': ('PotSmoke', SCALAR, '%', 'Prob of Smoke', 100.0, 0.0, 0, NO), + 'PoTL': ('PotDrizzle', SCALAR, '%', 'Prob of Drizzle', 100.0, 0.0, 0, NO), + 'PoTR': ('PotRain', SCALAR, '%', 'Prob of Rain', 100.0, 0.0, 0, NO), + 'PoTRW': ('PotRainShowers', SCALAR, '%', 'Prob of Rain Showers', 100.0, 0.0, 0, NO), + 'PoTS': ('PotSnow', SCALAR, '%', 'Prob of Snow', 100.0, 0.0, 0, NO), + 'PoTSW': ('PotSnowShowers', SCALAR, '%', 'Prob of Snow Showers', 100.0, 0.0, 0, NO), + 'PoTSp': ('PotSprinkles', SCALAR, '%', 'Prob of Sprinkles', 100.0, 0.0, 0, NO), + 'PoTSvr': ('PotSevere', SCALAR, '%', 'Prob of Severe Storms', 100.0, 0.0, 0, NO), + 'PoTT': ('PotThunder', SCALAR, '%', 'Prob of Thunder', 100.0, 0.0, 0, NO), + 'PoTVA': ('PotVolcanicAsh', SCALAR, '%', 'Prob of Volcanic Ash', 100.0, 0.0, 0, NO), + 'PoTWP': ('PotWaterspout', SCALAR, '%', 'Prob of Waterspout', 100.0, 0.0, 0, NO), + 'PoTZF': ('PotFreezingFog', SCALAR, '%', 'Prob of Freezing Fog', 100.0, 0.0, 0, NO), + 'PoTZL': ('PotFreezingDrizzle', SCALAR, '%', 'Prob of Freezing Drizzle', 100.0, 0.0, 0, NO), + 'PoTZR': ('PotFreezingRain', SCALAR, '%', 'Prob of Freezing Rain', 100.0, 0.0, 0, NO), + 'PoTZY': ('PotFreezingSpray', SCALAR, '%', 'Prob of Freezing Spray', 100.0, 0.0, 0, NO), + 'PoTHZY': ('PotHeavyFreezingSpray', SCALAR, '%', 'Prob of Heavy Freezing Spray', 100.0, 0.0, 0, NO), + 'RoadTemp' : ("RoadTemp", SCALAR, "F", "Road Temperature", 120.0, -50.0, 0, NO), + 'MaxTwAloft' : ("MaxTwAloft", SCALAR, 'C', 'Max Wet-Bulb Temp in Warm Nose', 40.0, -20.0, 1, NO), + 'ProbIcePresent': ("ProbIcePresent", SCALAR, "%", "Prob of Ice Present", 100.0, 0.0, 0, NO), + 'ProbRefreezeSleet': ("ProbRefreezeSleet", SCALAR, "%", "Prob of Refreeze into Sleet", 100.0, 0.0, 0, NO), + 'SleetAmt': ("SleetAmt", SCALAR, "in", "Sleet Accumulation", 5.0, 0.0, 1, YES), + 'IceFlatAcc': ('IceFlatAccum', SCALAR, 'in', 'Flat Ice Accumulation', maxIceVal, 0.0, 2, YES), + 'IceLineAcc': ('IceLineAccum', SCALAR, 'in', 'Line Ice Accumulation', maxIceVal, 0.0, 2, YES), +} + +# Parameter set for Winter Weather probabilities, Optional for sites. +#****** Winter 2017 changes +optionalParmsDict['winterProbs']={ + # Storm Total Snow related + 'StormTotalSnowWPC' : ("StormTotalSnowWPC", SCALAR, "in","WPC Storm Total Snow", 50.0, 0.0, 1, NO), + + # Snow Percentiles + 'SnowAmt5Prcntl' : ("SnowAmt5Prcntl", SCALAR, "in","5 percentile", 100.0, -40.0, 1, NO), + 'SnowAmt10Prcntl' : ("SnowAmt10Prcntl", SCALAR, "in","10 percentile", 100.0, -40.0, 1, NO), + 'SnowAmt25Prcntl' : ("SnowAmt25Prcntl", SCALAR, "in","25 percentile", 100.0, -40.0, 1, NO), + 'SnowAmt50Prcntl' : ("SnowAmt50Prcntl", SCALAR, "in","50 percentile", 100.0, -40.0, 1, NO), + 'SnowAmt75Prcntl' : ("SnowAmt75Prcntl", SCALAR, "in","75 percentile", 100.0, -40.0, 1, NO), + 'SnowAmt90Prcntl' : ("SnowAmt90Prcntl", SCALAR, "in","90 percentile", 100.0, -40.0, 1, NO), + 'SnowAmt95Prcntl' : ("SnowAmt95Prcntl", SCALAR, "in","95 percentile", 100.0, -40.0, 1, NO), + + # Snow Exceedance Probabilities (Add others as needed) + 'ProbSnowGET' : ("ProbSnowGET", SCALAR, "%", "Prob. snow >= trace", 100.0, 0.0, 0, NO), + 'ProbSnowGE1' : ("ProbSnowGE1", SCALAR, "%", "Prob. snow >= 1 inch", 100.0, 0.0, 0, NO), + 'ProbSnowGE2' : ("ProbSnowGE2", SCALAR, "%", "Prob. snow >= 2 inches", 100.0, 0.0, 0, NO), + 'ProbSnowGE4' : ("ProbSnowGE4", SCALAR, "%", "Prob. snow >= 4 inches", 100.0, 0.0, 0, NO), + 'ProbSnowGE6' : ("ProbSnowGE6", SCALAR, "%", "Prob. snow >= 6 inches", 100.0, 0.0, 0, NO), + 'ProbSnowGE8' : ("ProbSnowGE8", SCALAR, "%", "Prob. snow >= 8 inches", 100.0, 0.0, 0, NO), + 'ProbSnowGE12' : ("ProbSnowGE12", SCALAR, "%", "Prob. snow >= 12 inches", 100.0, 0.0, 0, NO), + 'ProbSnowGE18' : ("ProbSnowGE18", SCALAR, "%", "Prob. snow >= 18 inches", 100.0, 0.0, 0, NO), + + # Freezing Rain Percentiles + 'IceAccum5Prcntl' : ("IceAccum5Prcntl", SCALAR, "in","5 percentile", 5.0, -4.0, 2, NO), + 'IceAccum10Prcntl' : ("IceAccum10Prcntl", SCALAR, "in","10 percentile", 5.0, -4.0, 2, NO), + 'IceAccum25Prcntl' : ("IceAccum25Prcntl", SCALAR, "in","25 percentile", 5.0, -4.0, 2, NO), + 'IceAccum50Prcntl' : ("IceAccum50Prcntl", SCALAR, "in","50 percentile", 5.0, -4.0, 2, NO), + 'IceAccum75Prcntl' : ("IceAccum75Prcntl", SCALAR, "in","75 percentile", 5.0, -4.0, 2, NO), + 'IceAccum90Prcntl' : ("IceAccum90Prcntl", SCALAR, "in","90 percentile", 5.0, -4.0, 2, NO), + 'IceAccum95Prcntl' : ("IceAccum95Prcntl", SCALAR, "in","95 percentile", 5.0, -4.0, 2, NO), + + # Freezing rain accretion probabilities + 'ProbIceGE001' : ("ProbIceGE001", SCALAR, "%", "Prob. ice >= 0.01", 100.0, 0.0, 0, NO), + 'ProbIceGE010' : ("ProbIceGE010", SCALAR, "%", "Prob. ice >= 0.10", 100.0, 0.0, 0, NO), + 'ProbIceGE025' : ("ProbIceGE025", SCALAR, "%", "Prob. ice >= 0.25", 100.0, 0.0, 0, NO), + 'ProbIceGE050' : ("ProbIceGE050", SCALAR, "%", "Prob. ice >= 0.50", 100.0, 0.0, 0, NO), + +# Persist WPC snow prob grids + 'SnowAmt5PrcntlWPC' : ("SnowAmt5PrcntlWPC", SCALAR, "in","WPC 5th percentile snow amount", 100.0, -40.0, 1, NO), + 'SnowAmt10PrcntlWPC' : ("SnowAmt10PrcntlWPC", SCALAR, "in","WPC 10th percentile snow amount", 100.0, -40.0, 1, NO), + 'SnowAmt25PrcntlWPC' : ("SnowAmt25PrcntlWPC", SCALAR, "in","WPC 25th percentile snow amount", 100.0, -40.0, 1, NO), + 'SnowAmt50PrcntlWPC' : ("SnowAmt50PrcntlWPC", SCALAR, "in","WPC 50th percentile snow amount", 100.0, -40.0, 1, NO), + 'SnowAmt75PrcntlWPC' : ("SnowAmt75PrcntlWPC", SCALAR, "in","WPC 75th percentile snow amount", 100.0, -40.0, 1, NO), + 'SnowAmt90PrcntlWPC' : ("SnowAmt90PrcntlWPC", SCALAR, "in","WPC 90th percentile snow amount", 100.0, -40.0, 1, NO), + 'SnowAmt95PrcntlWPC' : ("SnowAmt95PrcntlWPC", SCALAR, "in","WPC 95th percentile snow amount", 100.0, -40.0, 1, NO), + 'ProbSnowGETWPC' : ("ProbSnowGETWPC", SCALAR, "%", "WPC Prob. snow >= trace", 100.0, 0.0, 0, NO), + 'ProbSnowGE1WPC' : ("ProbSnowGE1WPC", SCALAR, "%", "WPC Prob. snow >= 1 in", 100.0, 0.0, 0, NO), + 'ProbSnowGE2WPC' : ("ProbSnowGE2WPC", SCALAR, "%", "WPC Prob. snow >= 2 in", 100.0, 0.0, 0, NO), + 'ProbSnowGE4WPC' : ("ProbSnowGE4WPC", SCALAR, "%", "WPC Prob. snow >= 4 in", 100.0, 0.0, 0, NO), + 'ProbSnowGE6WPC' : ("ProbSnowGE6WPC", SCALAR, "%", "WPC Prob. snow >= 6 in", 100.0, 0.0, 0, NO), + 'ProbSnowGE8WPC' : ("ProbSnowGE8WPC", SCALAR, "%", "WPC Prob. snow >= 8 in", 100.0, 0.0, 0, NO), + 'ProbSnowGE12WPC' : ("ProbSnowGE12WPC", SCALAR, "%", "WPC Prob. snow >= 12 in", 100.0, 0.0, 0, NO), + 'ProbSnowGE18WPC' : ("ProbSnowGE18WPC", SCALAR, "%", "WPC Prob. snow >= 18 in", 100.0, 0.0, 0, NO), +} + +# Add rainfall probability definitions +optionalParmsDict['rainfallProb']={ + # Rain Percentiles + 'QPF5Prcntl' : ("QPF5Prcntl", SCALAR, "in","5 percentile", 36.0, -24.0, 2, NO), + 'QPF10Prcntl' : ("QPF10Prcntl", SCALAR, "in","10 percentile", 36.0, -24.0, 2, NO), + 'QPF25Prcntl' : ("QPF25Prcntl", SCALAR, "in","25 percentile", 36.0, -24.0, 2, NO), + 'QPF50Prcntl' : ("QPF50Prcntl", SCALAR, "in","50 percentile", 36.0, -24.0, 2, NO), + 'QPF75Prcntl' : ("QPF75Prcntl", SCALAR, "in","75 percentile", 36.0, -24.0, 2, NO), + 'QPF90Prcntl' : ("QPF90Prcntl", SCALAR, "in","90 percentile", 36.0, -24.0, 2, NO), + 'QPF95Prcntl' : ("QPF95Prcntl", SCALAR, "in","95 percentile", 36.0, -24.0, 2, NO), + + # Rain Exceedance Probabilities (Add others as needed) + 'ProbRainGE001' : ("ProbRainGE001", SCALAR, "%", "Prob. Rain >= 0.01 in", 100.0, 0.0, 0, NO), + 'ProbRainGE010' : ("ProbRainGE010", SCALAR, "%", "Prob. Rain >= 0.10 in", 100.0, 0.0, 0, NO), + 'ProbRainGE025' : ("ProbRainGE025", SCALAR, "%", "Prob. Rain >= 0.25 in", 100.0, 0.0, 0, NO), + 'ProbRainGE050' : ("ProbRainGE050", SCALAR, "%", "Prob. Rain >= 0.50 in", 100.0, 0.0, 0, NO), + 'ProbRainGE075' : ("ProbRainGE075", SCALAR, "%", "Prob. Rain >= 0.75 in", 100.0, 0.0, 0, NO), + 'ProbRainGE100' : ("ProbRainGE100", SCALAR, "%", "Prob. Rain >= 1.00 in", 100.0, 0.0, 0, NO), + 'ProbRainGE150' : ("ProbRainGE150", SCALAR, "%", "Prob. Rain >= 1.50 in", 100.0, 0.0, 0, NO), + 'ProbRainGE200' : ("ProbRainGE200", SCALAR, "%", "Prob. Rain >= 2.00 in", 100.0, 0.0, 0, NO), + 'ProbRainGE250' : ("ProbRainGE250", SCALAR, "%", "Prob. Rain >= 2.50 in", 100.0, 0.0, 0, NO), + 'ProbRainGE300' : ("ProbRainGE300", SCALAR, "%", "Prob. Rain >= 3.00 in", 100.0, 0.0, 0, NO), +} + + +# Make all optional parms available as variables. +for optionalParmKey in optionalParmsDict: + for pname,parm in optionalParmsDict[optionalParmKey].iteritems(): + setattr(sys.modules[__name__],pname,parm) + +#----------------------------------- +# DO NOT CHANGE THE FOLLOWING SECTION +#------------------------------------ +if not BASELINE and siteImport('localWxConfig'): + types = localWxConfig.types + + +#--------------------------------------------------------------------------- +# +# Projection Configuration section. +# +#--------------------------------------------------------------------------- +from com.raytheon.uf.common.dataplugin.gfe.config import ProjectionData +ProjectionType = ProjectionData.ProjectionType +NONE = ProjectionType.NONE +LAMBERT_CONFORMAL = ProjectionType.LAMBERT_CONFORMAL +MERCATOR = ProjectionType.MERCATOR +POLAR_STEREOGRAPHIC = ProjectionType.POLAR_STEREOGRAPHIC +LATLON = ProjectionType.LATLON + +# projectionID / projectionType / latLonLL / latLonUR / +# latLonOrigin / stdParallelOne / stdParallelTwo / gridPointLL / gridPointUR +# latIntersect / lonCenter / lonOrigin + +Grid201 = ('Grid201',POLAR_STEREOGRAPHIC, + (-150.00, -20.826), (-20.90846, 30.0), + (0.0, 0.0), 0.0, 0.0, (1, 1), (65, 65), 0.0, 0.0, -105.0) + +Grid202 = ('Grid202', POLAR_STEREOGRAPHIC, + (-141.028, 7.838), (-18.576, 35.617), + (0.0, 0.0), 0.0, 0.0, (1, 1), (65, 43), 0.0, 0.0, -105.0) + +Grid203 = ('Grid203', POLAR_STEREOGRAPHIC, + (-185.837, 19.132), (-53.660, 57.634), + (0.0, 0.0), 0.0, 0.0, (1, 1), (45, 39), 0.0, 0.0, -150.0) + +Grid204 = ('Grid204', MERCATOR, + (-250.0, -25.0), (-109.129, 60.644), + (0.0, 0.0), 0.0, 0.0, (1, 1), (93, 68), 0.0, -179.564, 0.0) + +Grid205 = ('Grid205', POLAR_STEREOGRAPHIC, + (-84.904, 0.616), (-15.000, 45.620), + (0.0, 0.0), 0.0, 0.0, (1, 1), (45, 39), 0.0, 0.0, -60.0) + +Grid206 = ('Grid206', LAMBERT_CONFORMAL, + (-117.991, 22.289), (-73.182, 51.072), + (-95.0, 25.0), 25.0, 25.0, (1, 1), (51, 41), 0.0, 0.0, 0.0) + +Grid207 = ('Grid207', POLAR_STEREOGRAPHIC, + (-175.641, 42.085), (-93.689, 63.976), + (0.0, 0.0), 0.0, 0.0, (1, 1), (49, 35), 0.0, 0.0, -150.0) + +Grid208 = ('Grid208', MERCATOR, + (-166.219, 10.656), (-147.844, 27.917), + (0.0, 0.0), 0.0, 0.0, (1, 1), (25, 25), 0.0, -157.082, 0.0) + +Grid209 = ('Grid209', LAMBERT_CONFORMAL, + (-117.991, 22.289), (-73.182, 51.072), + (-95.0, 25.0), 25.0, 25.0, (1, 1), (101, 81), 0.0, 0.0, 0.0) + +Grid210 = ('Grid210', MERCATOR, + (-77.000, 9.000), (-58.625, 26.422), + (0.0, 0.0), 0.0, 0.0, (1, 1), (25, 25), 0.0, -67.812, 0.0) + +Grid211 = ('Grid211', LAMBERT_CONFORMAL, + (-133.459, 12.190), (-49.385, 57.290), + (-95.0, 25.0), 25.0, 25.0, (1, 1), (93, 65), 0.0, 0.0, 0.0) + +Grid212 = ('Grid212', LAMBERT_CONFORMAL, + (-133.459, 12.190), (-49.385, 57.290), + (-95.0, 25.0), 25.0, 25.0, (1, 1), (185, 129), 0.0, 0.0, 0.0) + +Grid213 = ('Grid213', POLAR_STEREOGRAPHIC, + (-141.028, 7.838), (-18.577, 35.617), + (0.0, 0.0), 0.0, 0.0, (1, 1), (129, 85), 0.0, 0.0, -105.0) + +Grid214 = ('Grid214', POLAR_STEREOGRAPHIC, + (-175.641, 42.085), (-93.689, 63.975), + (0.0, 0.0), 0.0, 0.0, (1, 1), (97, 69), 0.0, 0.0, -150.0) + +# (new alaska grid) +Grid214AK = ('Grid214AK', POLAR_STEREOGRAPHIC, + (-178.571, 40.5301), (-93.689, 63.975), + (0.0, 0.0), 0.0, 0.0, (1,1), (104, 70), 0.0, 0.0, -150.0) + +Grid215 = ('Grid215', LAMBERT_CONFORMAL, + (-133.459, 12.190), (-49.385, 57.290), + (-95.0, 25.0), 25.0, 25.0, (1, 1), (369, 257), 0.0, 0.0, 0.0) + +Grid216 = ('Grid216', POLAR_STEREOGRAPHIC, + (-173.000, 30.000), (-62.850, 70.111), + (0.0, 0.0), 0.0, 0.0, (1, 1), (139, 107), 0.0, 0.0, -135.0) + +Grid217 = ('Grid217', POLAR_STEREOGRAPHIC, + (-173.000, 30.000), (-62.850, 70.111), + (0.0, 0.0), 0.0, 0.0, (1, 1), (277, 213), 0.0, 0.0, -135.0) + +Grid218 = ('Grid218', LAMBERT_CONFORMAL, + (-133.459, 12.190), (-49.385, 57.290), + (-95.0, 25.0), 25.0, 25.0, (1, 1), (614, 428), 0.0, 0.0, 0.0) + +Grid219 = ('Grid219', POLAR_STEREOGRAPHIC, + (-119.559, 25.008), (60.339, 24.028), + (0.0, 0.0), 0.0, 0.0, (1, 1), (385, 465), 0.0, 0.0, -80.0) + +Grid221 = ('Grid221', LAMBERT_CONFORMAL, + (-145.500, 1.000), (-2.566, 46.352), + (-107.0, 50.0), 50.0, 50.0, (1, 1), (349, 277), 0.0, 0.0, 0.0) + +Grid222 = ('Grid222', LAMBERT_CONFORMAL, + (-145.500, 1.000), (-2.566, 46.352), + (-107.0, 50.0), 50.0, 50.0, (1, 1), (59, 47), 0.0, 0.0, 0.0) + +Grid225 = ('Grid225', MERCATOR, + (-250.0, -25.0), (-109.129, 60.644), + (0.0, 0.0), 0.0, 0.0, (1, 1), (185, 135), 0.0, -179.564, 0.0) + +Grid226 = ('Grid226', LAMBERT_CONFORMAL, + (-133.459, 12.190), (-49.385, 57.290), + (-95.0, 25.0), 25.0, 25.0, (1, 1), (737, 513), 0.0, 0.0, 0.0) + +Grid227 = ('Grid227', LAMBERT_CONFORMAL, + (-133.459, 12.190), (-49.385, 57.290), + (-95.0, 25.0), 25.0, 25.0, (1, 1), (1473, 1025), 0.0, 0.0, 0.0) + +Grid228 = ('Grid228', LATLON, + (0.0, 90.0), (359.0, -90.0), (0.0, 0.0), 0.0, 0.0, + (1, 1), (144, 73), 0.0, 0.0, 0.0) + +Grid229 = ('Grid229', LATLON, + (0.0, 90.0), (359.0, -90.0), + (0.0, 0.0), 0.0, 0.0, (1, 1), (360, 181), 0.0, 0.0, 0.0) + +Grid230 = ('Grid230', LATLON, + (0.0, 90.0), (359.5, -90.0), + (0.0, 0.0), 0.0, 0.0, (1, 1), (720, 361), 0.0, 0.0, 0.0) + +Grid231 = ('Grid231', LATLON, + (0.0, 0.0), (359.5, 90.0), + (0.0, 0.0), 0.0, 0.0, (1, 1), (720, 181), 0.0, 0.0, 0.0) + +Grid232 = ('Grid232', LATLON, + (0.0, 0.0), (359.0, 90.0), + (0.0, 0.0), 0.0, 0.0, (1, 1), (360, 91), 0.0, 0.0, 0.0) + +Grid233 = ('Grid233', LATLON, + (0.0, -78.0), (358.750, 78.0), + (0.0, 0.0), 0.0, 0.0, (1, 1), (288, 157), 0.0, 0.0, 0.0) + +Grid234 = ('Grid234', LATLON, + (-98.000, 15.0), (-65.000, -45.0), + (0.0, 0.0), 0.0, 0.0, (1, 1), (133, 121), 0.0, 0.0, 0.0) + +Grid235 = ('Grid235', LATLON, + (0.250, 89.750), (359.750, -89.750), + (0.0, 0.0), 0.0, 0.0, (1, 1), (720, 360), 0.0, 0.0, 0.0) + +HRAP = ('HRAP', POLAR_STEREOGRAPHIC, + (-119.036, 23.097), (-75.945396, 53.480095), + (0.0, 0.0), 0.0, 0.0, (1, 1), (801, 881), 0.0, 0.0, -105.0) + +NDFD_Oceanic_10K = ('NDFD_Oceanic_10km', MERCATOR, + (-230.094, -30.4192), (10.71, 80.01), + (0.0, 0.0), 0.0, 0.0, (1, 1), (2517, 1793), 0.0, -109.962, 0.0) + +# Add a new domain for NHC purposes +GridForNHA = ('GridForNHA', LAMBERT_CONFORMAL, + (-103.929, 20.164), (-50.8894, 42.9545), + (-95.0, 35.0), 35.0, 35.0, (1, 1), (1833,1241), 0.0, 0.0, 0.0) + +# list of all projections +allProjections = [Grid201, Grid202, Grid203, Grid204, Grid205, Grid206, + Grid207, Grid208, Grid209, Grid210, Grid211, Grid212, Grid213, Grid214, + Grid214AK, Grid215, Grid216, Grid217, Grid218, Grid219, Grid221, Grid222, + Grid225, Grid226, Grid227, Grid228, Grid229, Grid230, Grid231, Grid232, + Grid233, Grid234, Grid235, HRAP, NDFD_Oceanic_10K, GridForNHA] + +#--------------------------------------------------------------------------- +# +# Grid Domain configuration section +# +#--------------------------------------------------------------------------- +# +# xdim/ydim: Defines the dimensions of the grids. (GFE grid size) +# +# origin: Defines the lower-left corner of the grid (point 0,0) in +# world coordinates. +# +# extent: Defines the "size" of the grid in world coordinates. The upper +# right corner is the origin+extent. +# +# TimeZone: Defines the timezone used by this site in standard TZ format. +# Refer to /usr/share/zoneinfo/zone.tab for the correct settings. +# +# Projection: Defines the projection identifier to be used for this domain. + +# Note that all parameters for an existing database must use the same +# projection, though not necessarily the same grid size and location. + +# These values are set up for AWIPS. There is a script at the end +# of this section that adjusts the resolution for the RPP sites. + +# [xdim, ydim] / (origin) /( extent) / TimeZone / Projection / OfficeType + +SITES = { +#WFOs + # Experimental combined AFC site + 'AFC' : ([1057, 449], (1.0, 19.00), (66.0, 28.0), 'America/Anchorage', Grid214AK, "wfo"), + 'ABQ' : ([145, 145], (36.00, 22.00), (9.0, 9.0), 'MST7MDT', Grid211,"wfo"), + 'ABR' : ([145, 145], (45.00, 35.00), (9.0, 9.0), 'CST6CDT', Grid211,"wfo"), + 'AER' : ([369, 337], (44.00, 23.00), (23.0, 21.0), 'America/Anchorage', Grid214AK, "wfo"), + 'AFG' : ([641, 497], (27.0, 38.0), (40.0, 31.0), 'America/Anchorage', Grid214AK, "wfo"), + 'AJK' : ([337, 241], (62.0, 23.0), (21.0, 15.0), 'America/Juneau', Grid214AK, "wfo"), + 'AKQ' : ([145, 145], (68.00, 25.00), (9.0, 9.0), 'EST5EDT', Grid211, "wfo"), + 'ALU' : ([865, 449], (1.0, 19.0), (54.0, 28.0), 'America/Anchorage', Grid214AK, "wfo"), + 'ALY' : ([145, 145], (70.00, 33.00), (9.0, 9.0), 'EST5EDT', Grid211, "wfo"), + 'AMA' : ([145, 145], (41.00, 21.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"), + 'APX' : ([145, 145], (58.00, 34.00), (9.0, 9.0), 'EST5EDT', Grid211, "wfo"), + 'ARX' : ([145, 145], (52.00, 33.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"), + 'BGM' : ([145, 145], (68.00, 33.00), (9.0, 9.0), 'EST5EDT', Grid211, "wfo"), + 'BIS' : ([145, 145], (43.00, 37.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"), + 'BMX' : ([145, 145], (58.00, 19.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"), + 'BOI' : ([177, 177], (25.00, 34.00), (11.0, 11.0), 'MST7MDT', Grid211, "wfo"), + 'BOU' : ([145, 145], (38.00, 27.00), (9.0, 9.0), 'MST7MDT', Grid211, "wfo"), + 'BOX' : ([187, 154], (75.375,34.59375), (5.8125,4.78125), "EST5EDT", Grid211, "wfo"), + 'BRO' : ([145, 145], (44.00, 10.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"), + 'BTV' : ([193, 157], (72.00, 37.15), (6.0, 4.875), 'EST5EDT', Grid211, "wfo"), + 'BUF' : ([145, 145], (66.00, 32.00), (9.0, 9.0), 'EST5EDT', Grid211, "wfo"), + 'BYZ' : ([145, 145], (36.00, 37.00), (9.0, 9.0), 'MST7MDT', Grid211, "wfo"), + 'CAE' : ([145, 145], (65.00, 20.00), (9.0, 9.0), 'EST5EDT', Grid211, "wfo"), + 'CAR' : ([145, 145], (75.00, 39.00), (9.0, 9.0), 'EST5EDT', Grid211, "wfo"), + 'CHS' : ([145, 145], (65.00, 18.00), (9.0, 9.0), 'EST5EDT', Grid211, "wfo"), + 'CLE' : ([145, 145], (62.00, 30.00), (9.0, 9.0), 'EST5EDT', Grid211, "wfo"), + 'CRP' : ([145, 145], (45.00, 11.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"), + 'CTP' : ([145, 145], (67.00, 30.00), (9.0, 9.0), 'EST5EDT', Grid211, "wfo"), + 'CYS' : ([145, 145], (37.00, 31.00), (9.0, 9.0), 'MST7MDT', Grid211, "wfo"), + 'DDC' : ([145, 145], (43.00, 24.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"), + 'DLH' : ([145, 145], (50.00, 37.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"), + 'DMX' : ([145, 145], (49.00, 30.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"), + 'DTX' : ([161, 161], (57.00, 34.00), (10.0, 10.0), 'EST5EDT', Grid211, "wfo"), + 'DVN' : ([145, 145], (52.00, 30.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"), + 'EAX' : ([145, 145], (50.00, 27.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"), + 'EKA' : ([145, 145], (20.00, 31.00), (9.0, 9.0), 'PST8PDT', Grid211, "wfo"), + 'EPZ' : ([145, 145], (36.00, 16.00), (9.0, 9.0), 'MST7MDT', Grid211, "wfo"), + 'EWX' : ([145, 145], (44.00, 12.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"), + 'FFC' : ([145, 145], (61.00, 18.00), (9.0, 9.0), 'EST5EDT', Grid211, "wfo"), + 'FGF' : ([145, 145], (45.00, 39.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"), + 'FGZ' : ([145, 145], (29.00, 23.00), (9.0, 9.0), 'US/Arizona', Grid211, "wfo"), + 'FSD' : ([177, 177], (43.00, 32.00), (11.0, 11.0), 'CST6CDT', Grid211, "wfo"), + 'FWD' : ([145, 145], (45.00, 17.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"), + 'GGW' : ([145, 145], (36.00, 39.00), (9.0, 9.0), 'MST7MDT', Grid211, "wfo"), + 'GID' : ([145, 145], (44.00, 28.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"), + 'GJT' : ([145, 145], (34.00, 27.00), (9.0, 9.0), 'MST7MDT', Grid211, "wfo"), + 'GLD' : ([145, 145], (41.00, 26.00), (9.0, 9.0), 'MST7MDT', Grid211, "wfo"), + 'GRB' : ([145, 145], (54.00, 35.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"), + 'GRR' : ([145, 145], (58.00, 33.00), (9.0, 9.0), 'EST5EDT', Grid211, "wfo"), + 'GSP' : ([145, 145], (63.00, 21.00), (9.0, 9.0), 'EST5EDT', Grid211, "wfo"), + 'GUM' : ([193, 193], (23.0, 26.0), (3.0, 3.0), 'Pacific/Guam', Grid204, "wfo"), + 'GYX' : ([193,209], (76.00, 37.375), (6.0, 6.5), 'EST5EDT', Grid211, "wfo"), + 'HFO' : ([321, 225], (58.78125,29.875),(5.0,3.5), 'Pacific/Honolulu', Grid204, 'wfo'), + 'HGX' : ([145, 145], (48.00, 13.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"), + 'HNX' : ([145, 145], (22.00, 24.00), (9.0, 9.0), 'PST8PDT', Grid211, "wfo"), + 'HUN' : ([161, 161], (60.0, 22.0), (5.0, 5.0), 'CST6CDT', Grid211, "wfo"), + 'ICT' : ([145, 145], (45.00, 25.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"), + 'ILM' : ([145, 145], (67.00, 21.00), (9.0, 9.0), 'EST5EDT', Grid211, "wfo"), + 'ILN' : ([145, 145], (60.00, 27.00), (9.0, 9.0), 'EST5EDT', Grid211, "wfo"), + 'ILX' : ([145, 145], (55.00, 27.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"), + 'IND' : ([145, 145], (58.00, 27.00), (9.0, 9.0), 'EST5EDT', Grid211, "wfo"), + 'IWX' : ([145, 145], (58.00, 30.00), (9.0, 9.0), 'EST5EDT', Grid211, "wfo"), + 'JAN' : ([145, 145], (54.00, 18.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"), + 'JAX' : ([145, 145], (64.00, 14.00), (9.0, 9.0), 'EST5EDT', Grid211, "wfo"), + 'JKL' : ([145, 145], (61.00, 25.00), (9.0, 9.0), 'EST5EDT', Grid211, "wfo"), + 'KEY' : ([145, 145], (66.00, 8.00), (9.0, 9.0), 'EST5EDT', Grid211, "wfo"), + 'LBF' : ([145, 145], (43.00, 30.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"), + 'LCH' : ([145, 145], (52.00, 15.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"), + 'LIX' : ([145, 145], (54.00, 14.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"), + 'LKN' : ([145, 145], (25.00, 30.00), (9.0, 9.0), 'PST8PDT', Grid211, "wfo"), + 'LMK' : ([145, 145], (59.00, 25.00), (9.0, 9.0), 'EST5EDT', Grid211, "wfo"), + 'LOT' : ([145, 145], (55.00, 30.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"), + 'LOX' : ([145, 145], (21.00, 23.00), (9.0, 9.0), 'PST8PDT', Grid211, "wfo"), + 'LSX' : ([145, 145], (52.00, 25.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"), + 'LUB' : ([145, 145], (39.00, 17.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"), + 'LWX' : ([145, 145], (67.00, 27.00), (9.0, 9.0), 'EST5EDT', Grid211, "wfo"), + 'LZK' : ([145, 145], (51.00, 20.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"), + 'MAF' : ([205,247], (40.375, 16.8125), (6.375, 7.6875), 'CST6CDT', Grid211, "wfo"), + 'MEG' : ([145, 145], (54.00, 22.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"), + 'MFL' : ([145, 145], (66.00, 9.00), (9.0, 9.0), 'EST5EDT', Grid211, "wfo"), + 'MFR' : ([145, 145], (20.00, 34.00), (9.0, 9.0), 'PST8PDT', Grid211, "wfo"), + 'MHX' : ([145, 145], (68.00, 22.00), (9.0, 9.0), 'EST5EDT', Grid211, "wfo"), + 'MKX' : ([145, 145], (55.00, 33.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"), + 'MLB' : ([145, 145], (66.00, 12.00), (9.0, 9.0), 'EST5EDT', Grid211, "wfo"), + 'MOB' : ([145, 145], (57.00, 16.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"), + 'MPX' : ([145, 145], (50.00, 34.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"), + 'MQT' : ([145, 145], (56.00, 36.00), (9.0, 9.0), 'EST5EDT', Grid211, "wfo"), + 'MRX' : ([145, 145], (61.00, 22.00), (9.0, 9.0), 'EST5EDT', Grid211, "wfo"), + 'MSO' : ([145, 145], (29.00, 39.00), (9.0, 9.0), 'MST7MDT', Grid211, "wfo"), + 'MTR' : ([145, 145], (20.00, 26.00), (9.0, 9.0), 'PST8PDT', Grid211, "wfo"), + 'OAX' : ([145, 145], (45.00, 30.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"), + 'OHX' : ([145, 145], (58.00, 22.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"), + 'OKX' : ([145, 145], (71.00, 30.00), (9.0, 9.0), 'EST5EDT', Grid211, "wfo"), + 'OTX' : ([145, 145], (25.00, 40.00), (9.0, 9.0), 'PST8PDT', Grid211, "wfo"), + 'OUN' : ([145, 145], (44.00, 21.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"), + 'PAH' : ([145, 145], (56.00, 24.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"), + 'PBZ' : ([145, 145], (65.00, 29.00), (9.0, 9.0), 'EST5EDT', Grid211, "wfo"), + 'PDT' : ([145, 145], (23.00, 38.00), (9.0, 9.0), 'PST8PDT', Grid211, "wfo"), + 'PHI' : ([145, 145], (70.00, 28.00), (9.0, 9.0), 'EST5EDT', Grid211, "wfo"), + 'PIH' : ([145, 145], (30.00, 34.00), (9.0, 9.0), 'MST7MDT', Grid211, "wfo"), + 'PQR' : ([145, 145], (19.00, 38.00), (9.0, 9.0), 'PST8PDT', Grid211, "wfo"), + 'PSR' : ([145, 145], (28.00, 20.00), (9.0, 9.0), 'US/Arizona', Grid211, "wfo"), + 'PUB' : ([145, 145], (38.00, 26.00), (9.0, 9.0), 'MST7MDT', Grid211, "wfo"), + 'RAH' : ([145, 145], (66.00, 22.00), (9.0, 9.0), 'EST5EDT', Grid211, "wfo"), + 'REV' : ([145, 145], (23.00, 29.00), (9.0, 9.0), 'PST8PDT', Grid211, "wfo"), + 'RIW' : ([145, 145], (35.00, 33.00), (9.0, 9.0), 'MST7MDT', Grid211, "wfo"), + 'RLX' : ([145, 145], (63.00, 26.00), (9.0, 9.0), 'EST5EDT', Grid211, "wfo"), + 'RNK' : ([161, 161], (67.0, 26.00), (5.0, 5.0), 'EST5EDT', Grid211, 'wfo'), + 'SEW' : ([145, 145], (21.00, 42.00), (9.0, 9.0), 'PST8PDT', Grid211, "wfo"), + 'SGF' : ([145, 145], (51.00, 24.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"), + 'SGX' : ([145, 145], (24.00, 21.00), (9.0, 9.0), 'PST8PDT', Grid211, "wfo"), + 'SHV' : ([145, 145], (50.00, 17.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"), + 'SJT' : ([145, 145], (43.00, 16.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"), + 'SJU' : ([32, 28], (10.0, 10.0), (8.0, 7.0), 'America/Puerto_Rico',Grid210, "wfo"), + 'SLC' : ([161, 161], (30.00, 28.00), (10.0, 10.0), 'MST7MDT', Grid211, "wfo"), + 'STO' : ([145, 145], (20.00, 28.00), (9.0, 9.0), 'PST8PDT', Grid211, "wfo"), + 'TAE' : ([145, 145], (60.00, 15.00), (9.0, 9.0), 'EST5EDT', Grid211, "wfo"), + 'TBW' : ([145, 145], (64.00, 11.00), (9.0, 9.0), 'EST5EDT', Grid211, "wfo"), + 'TFX' : ([145, 145], (32.00, 39.00), (9.0, 9.0), 'MST7MDT', Grid211, "wfo"), + 'TOP' : ([145, 145], (47.00, 26.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"), + 'TSA' : ([145, 145], (48.00, 22.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"), + 'TWC' : ([145, 145], (29.00, 20.00), (9.0, 9.0), 'US/Arizona', Grid211, "wfo"), + 'UNR' : ([145, 145], (40.00, 34.00), (9.0, 9.0), 'MST7MDT', Grid211, "wfo"), + 'VEF' : ([145, 145], (26.00, 25.00), (9.0, 9.0), 'PST8PDT', Grid211, "wfo"), +#RFCs + 'ACR' : ([565, 415], (26.0, 19.0), (60.0, 44.0), 'America/Anchorage', Grid214AK, "rfc"), + 'ALR' : ([299, 278], (59.0, 11.0), (17.0, 19.0), 'CST6CDT', Grid211, "rfc"), + 'FWR' : ([362, 334], (36.0, 11.0), (20.0, 20.0), 'CST6CDT', Grid211, "rfc"), + 'KRF' : ([408, 356], (33.0, 27.0), (26.0, 22.0), 'CST6CDT', Grid211, "rfc"), + 'MSR' : ([381, 304], (43.0, 28.0), (24.0, 20.0), 'CST6CDT', Grid211, "rfc"), + 'ORN' : ([303, 216], (51.0, 16.0), (18.0, 14.0), 'CST6CDT', Grid211, "rfc"), + 'PTR' : ([218, 308], (21.0, 35.0), (17.0, 19.0), 'PST8PDT', Grid211, "rfc"), + 'RHA' : ([132, 140], (69.0, 28.0), (7.0, 10.0), 'EST5EDT', Grid211, "rfc"), + 'RSA' : ([140, 296], (21.0, 23.0), (12.0, 17.0), 'PST8PDT', Grid211, "rfc"), + 'STR' : ([171, 307], (29.0, 20.0), (13.0, 18.0), 'MST7MDT', Grid211, "rfc"), + 'TAR' : ([226, 164], (69.0, 34.0), (13.0, 13.0), 'EST5EDT', Grid211, "rfc"), + 'TIR' : ([220, 171], (59.0, 25.0), (13.0, 12.0), 'EST5EDT', Grid211, "rfc"), + 'TUA' : ([281, 168], (39.0, 22.0), (18.0, 10.0), 'CST6CDT', Grid211, "rfc"), + +#Special Sites - Added Hawaiian High Seas domain + 'US' : ([267, 159], (18.0, 9.5), (67.0, 40.0), 'EDT5EDT', Grid211, "other"), + 'FSL' : ([161, 145], (38.50, 27.00), (10.0, 9.0), 'MST7MDT', Grid211, "other"), + 'NH1' : ([838, 577], (887.0, 121.0), (837.0, 576.0), 'EST5EDT', NDFD_Oceanic_10K, "wfo"), + 'NH2' : ([1188, 363], (1328.0, 365.0), (1187.0, 362.0), 'EST5EDT', NDFD_Oceanic_10K, "wfo"), + 'ONA' : ([244, 383], (68.9375, 19.5625), (15.1875, 23.875), 'EST5EDT', Grid211, "wfo"), + 'ONP' : ([396, 415], (8.1875, 21.5625), (24.6875, 25.875), 'PST8PDT', Grid211, "wfo"), + 'HPA' : ([899, 671], (284.0, 30.0), (898.0, 670.0), 'Pacific/Honolulu', NDFD_Oceanic_10K, "wfo"), + 'WNJ' : ([301, 346], (1000.0, 475.0), (300.0, 345.0), 'CST6CDT', NDFD_Oceanic_10K, "wfo"), + +#Aviation Domains for AAWU + 'AAWU' : ([705, 457], (1.0, 11.0), (88.0, 57.0), 'America/Anchorage', Grid214AK, 'nc'), + 'AVAK' : ([465, 417], (8.0, 12.0), (29.0, 26.0), 'America/Anchorage', Grid203, 'nc'), + +#Regional Offices + 'VUY' : ([337,449], (62.00, 19.00), (21.0, 28.0), 'EST5EDT', Grid211, "ro"), + 'BCQ' : ([145,145], (50.00, 27.00), (9.0, 9.0), 'CST6CDT', Grid211, "ro"), + 'EHU' : ([657,321], (36.00, 9.50), (41.0, 20.0), 'CST6CDT', Grid211, "ro"), + 'VHW' : ([161,161], (30.00, 28.00), (10.0, 10.0), 'MST7MDT', Grid211, "ro"), + 'PBP' : ([321,225], (7.00, 11.00), (10.0, 7.0), 'Pacific/Honolulu', Grid208, "ro"), + 'VRH' : ([1409, 913], (1.0, 11.0), (88.0, 57.0), 'America/Anchorage', Grid214AK, 'nc'), + +#National Centers + 'HAK' : ( [825,553], ( 1.0, 1.0), (103.0, 69.0), 'EST5EDT', Grid214AK, "nc"), + 'HUS' : ([1073,689], (19.0, 8.0), ( 67.0, 43.0), 'EST5EDT', Grid211, "nc"), + 'NHA' : ([1873,1361], (35.5, 3.5), (58.5, 42.5), 'EST5EDT', Grid211, "nc"), + +} + +# Get list of valid office types, for validation. +VALID_OFFICE_TYPES = [] +# List of all values of all sites. +for siteValues in SITES.values(): + # Office type is the 5th element of each site's values + officeType = siteValues[5] + if officeType not in VALID_OFFICE_TYPES: + # A new office type + VALID_OFFICE_TYPES.append(officeType) + +#--------------------------------------------------------------------------- +# +# Time Constraint configuration section +# +#--------------------------------------------------------------------------- +HOUR = 3600 +DAY = 24 * HOUR + +# Start: is the number of seconds since 0000z for the first grid of the day +# Repeat: is the number of seconds from start until the next grid starts +# Duration: is the length of the grid in number of seconds + +# Examples of constraints: +# Hourly temperatures +# HrTemp = (0, HOUR, HOUR) +# QPF that is 6 hours long, aligned on 0000z, exists for every 6 hours +# Q = (0, HOUR*6, HOUR*6) +# + +# fixed time constraints: start / repeat / duration +TC_1M = (0, 60, 60) # 1 minute +TC1 = (0, HOUR, HOUR) +TC3 = (0, 3 * HOUR, HOUR) +TC6 = (0, 6 * HOUR, HOUR) +TC12 = (0, 12 * HOUR, HOUR) +TC3NG = (0, 3 * HOUR, 3 * HOUR) +TC6NG = (0, 6 * HOUR, 6 * HOUR) +TC12NG = (0, 12 * HOUR, 12 * HOUR) +TC24NG = (0, 24 * HOUR, 24 * HOUR) +TC061212 = (6 * HOUR, 12 * HOUR, 12 * HOUR) +Persistent = (0, 0, 0) # special time constraint + + +# The following time constraints are based on local standard time. +# Change the last parameter from 0 to 1 to force daylight savings time +# always. +# PWS TCs changed in OB9.3 for new 6 hour data from NHC +MaxTTC = localTC(7*HOUR, 24*HOUR, 13*HOUR, 0) +MinTTC = localTC(19*HOUR, 24*HOUR, 14*HOUR, 0) +MaxRHTC = localTC(15*HOUR, 24*HOUR, 18*HOUR, 0) +MinRHTC = localTC(3*HOUR, 24*HOUR, 18*HOUR, 0) +LT3NG = localTC(0*HOUR, 3*HOUR, 3*HOUR, 0) +LT6NG = localTC(0*HOUR, 6*HOUR, 6*HOUR, 0) +LT12NG = localTC(6*HOUR, 12*HOUR, 12*HOUR, 0) +LTMOS = localTC(6*HOUR, 12*HOUR, 12*HOUR, 0) #special MOS local time +MaxTTCMOS = localTC(6*HOUR, 24*HOUR, 12*HOUR, 0) #special MOS maxT +MinTTCMOS = localTC(18*HOUR, 24*HOUR, 12*HOUR, 0) #special MOS minT +LT24 = localTC(0*HOUR, 24*HOUR, 24*HOUR, 0) +FireWx1300TC = localTC(13*HOUR, 24*HOUR, 1*HOUR, 0) #special FireWx 1pm snap +#DR3511 DeltaMaxTTC = localTC(7*HOUR, 24*HOUR, 16*HOUR, 0) # just for HPCdeltaMaxT +PWSDTC = localTC(11*HOUR, 24*HOUR, 12*HOUR, 0) +PWSNTC = localTC(23*HOUR, 24*HOUR, 12*HOUR, 0) +# Alaska OCONUS +if SID in siteRegion['AR']: + MaxTTC = localTC(5*HOUR, 24*HOUR, 15*HOUR, 0) + MinTTC = localTC(17*HOUR, 24*HOUR, 18*HOUR, 0) + +# From NwsInitsConfig +LT24APT = localTC(7*HOUR, 24*HOUR, 24*HOUR, 0) +FireWxAvgTC = localTC( 12*HOUR, 24*HOUR, 6*HOUR, 0) +LT4HH = localTC(11*HOUR, 24*HOUR, 4*HOUR, 0) +SPC24 = (12*HOUR, 24*HOUR, 24*HOUR) +# For WR +TC0624NG=(6*HOUR,24*HOUR,24*HOUR) +TC12NG6=(6*HOUR,12*HOUR,12*HOUR) +# HIL Time Constraint +HILTC=(6*HOUR,24*HOUR,24*HOUR) + +#--------------------------------------------------------------------------- +# +# Database/(Model) Attribute Configuration +# +#--------------------------------------------------------------------------- +# +# name: The model name of the database +# +# format: Either 'GRID' or 'DFM' +# +# type: Optional type of the database +# +# single: YES or NO. YES if this database always exists and is not +# based on model-times. NO if this database is created/destroyed and +# is based on model-runs. When created, the names of these databases have +# time stamps. +# +# official: YES or NO. YES if this is an official database from which +# products can be generated. NO if this is a conventional database. +# +# numVer: Number of versions of this database to retain. +# +# purgeAge: Number of hours in the past before grids will be automatically +# purged from the database. If 0, then purging is disabled. +# + +YES = 1 +NO = 0 +GRID = 'GRID' +# name / format / type / single / official / numVer / purgeAge + +Fcst = ('Fcst', GRID, '', YES, NO, 1, 24) +Practice = ('Fcst', GRID, 'Prac', YES, NO, 1, 24) +TestFcst = ('Fcst', GRID, 'Test', YES, NO, 1, 24) +Restore = ('Restore', GRID, '', YES, NO, 1, 24) +Test = ('Test', GRID, 'test', NO, NO, 1, 0) +Official = ('Official', GRID, '', YES, YES, 1, 24) +ISC = ('ISC', GRID, '', YES, NO, 1, 12) + + +#--------------------------------------------------------------------------- +# +# Search path for netCDF data files. +# NOTE: This feature was implemented only backward compatibility with existing A1 datasets. +# New datasets should be generated in a from that can be ingested by A2 +# It shoudl only be used for static datasets. +# New files will not be recognized without a server restart. +# +#--------------------------------------------------------------------------- +# Alaska OCONUS +if SID in groups['ALASKA_SITES']: + NETCDFDIRS = [('/awips2/edex/data/gfe/climo/PRISMAK'), + ('/awips2/edex/data/gfe/climo/PRISMAK800'), + ] + +# Hawaii OCONUS +elif SID == "HFO": + NETCDFDIRS = [('/awips2/edex/data/gfe/topo/NED3ARCSTOPO','CRMTopo'), + ('/awips2/edex/data/gfe/topo/NED3ARCSTOPONEW','NED'), + ('/awips2/edex/data/gfe/topo/StdTerrain/Hawaii', 'StdTerrain'), + ] + +# San Juan OCONUS +elif SID == "SJU": + NETCDFDIRS = [('/awips2/edex/data/gfe/topo/NED3ARCSTOPO','CRMTopo'), + ('/awips2/edex/data/gfe/topo/NED3ARCSTOPONEW','NED'), + ('/awips2/edex/data/gfe/topo/VDATUMS','VDATUMS'), + ('/awips2/edex/data/gfe/topo/StdTerrain/PuertoRico', 'StdTerrain') + ] + +# Guam OCONUS +elif SID == "GUM": + NETCDFDIRS = [] + +#CONUS sites +elif SID in groups['CONUS_EAST_SITES']: + NETCDFDIRS = [('/awips2/edex/data/gfe/climo/PRISM'), + ('/awips2/edex/data/gfe/climo/NCDC'), + ('/awips2/edex/data/gfe/topo/NED3ARCSTOPO','CRMTopo'), + ('/awips2/edex/data/gfe/topo/NED3ARCSTOPONEW','NED'), + ('/awips2/edex/data/gfe/topo/VDATUMS','VDATUMS'), + ('/awips2/edex/data/gfe/topo/StdTerrain/CONUS', 'StdTerrain'), + ] + +else: #######DCS3501 WEST_CONUS + NETCDFDIRS = [('/awips2/edex/data/gfe/climo/PRISM'), + ('/awips2/edex/data/gfe/climo/NCDC'), + ('/awips2/edex/data/gfe/topo/NED3ARCSTOPO','CRMTopo'), + ('/awips2/edex/data/gfe/topo/NED3ARCSTOPONEW','NED'), + ('/awips2/edex/data/gfe/topo/VDATUMS','VDATUMS'), + ('/awips2/edex/data/gfe/topo/StdTerrain/CONUS', 'StdTerrain'), + ] + +#--------------------------------------------------------------------------- +# +# Where to find (and what to call) satellite data. +# +#--------------------------------------------------------------------------- +# + +# This table contains product ID and weather element names for satellite data +# +# A product ID consists of the sector ID and physical element of the +# satellite product. +# +# Examples: +# +# "East CONUS/Imager Visible" +# "East CONUS/Imager 11 micron IR" +# "East CONUS/Imager 13 micron (IR)" +# "East CONUS/Imager 3.9 micron IR" +# + +# Alaska OCONUS +if SID in groups['ALASKA_SITES']: + SATDATA = [] + +# Hawaii OCONUS +elif SID == "HFO": + SATDATA = [] + +# San Juan OCONUS +elif SID == "SJU": + SATDATA = [("East CONUS/Imager Visible", "visibleEast"), + ("East CONUS/Imager 11 micron IR", "ir11East"), + ("East CONUS/Imager 13 micron (IR)", "ir13East"), + ("East CONUS/Imager 3.9 micron IR", "ir39East"), + ("East CONUS/Imager 6.7-6.5 micron IR (WV)", "waterVaporEast")] + +# Guam OCONUS +elif SID == "GUM": + SATDATA = [] + +#CONUS sites +else: + SATDATA = [("West CONUS/Imager Visible", "visibleWest"), + ("West CONUS/Imager 11 micron IR", "ir11West"), + ("West CONUS/Imager 13 micron (IR)", "ir13West"), + ("West CONUS/Imager 3.9 micron IR", "ir39West"), + ("West CONUS/Imager 6.7-6.5 micron IR (WV)", "waterVaporWest"), + ("East CONUS/Imager Visible", "visibleEast"), + ("East CONUS/Imager 11 micron IR", "ir11East"), + ("East CONUS/Imager 13 micron (IR)", "ir13East"), + ("East CONUS/Imager 3.9 micron IR", "ir39East"), + ("East CONUS/Imager 6.7-6.5 micron IR (WV)", "waterVaporEast")] + +#--------------------------------------------------------------------------- +# +# Intersite Coordination Configurations +# +#--------------------------------------------------------------------------- +# base urls for the ISC Routing Table +ISC_ROUTING_TABLE_ADDRESS = { + "ANCF" : "http://svcbu-ancf.er.awips.noaa.gov:8080/irt", + "BNCF" : "http://svcbu-bncf.er.awips.noaa.gov:8080/irt" + } + + +# list of sites that from which you want ISC data (If None, ifpServer will +# automatically calculate the list.) Should always include your own site. +REQUESTED_ISC_SITES = None + +# Overall ISC request flag. Must be set to 1 in order to request and receive +# ISC data. Must be 1 to register with the IRT. +REQUEST_ISC = 0 + +# Sending control flag. Set to 1 to send isc when data is saved. +SEND_ISC_ON_SAVE = 0 + +# Sending control flag. Set to 1 to send isc when data is published. +SEND_ISC_ON_PUBLISH = 0 + +# List of weather elements to request for ISC. If set to None, it defaults +# to the list of all weather elements in the Fcst database. +REQUESTED_ISC_PARMS = None + +# Transmission script for sending data. This is the script that iscExtract +# and other routines (e.g., vtec table sharing) will call to perform the +# actual transmission of data. +TRANSMIT_SCRIPT = GFESUITE_HOME + '/bin/gfe_msg_send -s %SUBJECT -a %ADDRESSES -i %WMOID -c 11 -p 0 -e %ATTACHMENTS' + + +# Extra ISC parms (weather elements). These are a list of the baseline +# weather elements to be added as extra parms to the ISC database. This +# is necessary when receiving ISC grids from a site that is a different +# office type than your own. You never need to add weather elements +# to the ISC database that is your own office type. The format of this +# entry is a list of tuples. The tuple is a list of weather elements +# objects (such as Temp and not "T"), and an office type, such as "rfc". +EXTRA_ISC_PARMS = [([QPF,FloodingRainThreat], 'rfc'), ([QPF,FloodingRainThreat], 'wfo'), ([ProposedSS,Hazards,InundationMax,InundationTiming,SurgeHtPlusTideMSL,SurgeHtPlusTideMLLW,SurgeHtPlusTideMHHW,SurgeHtPlusTideNAVD], 'nc'),([ProposedSS,Hazards,InundationMax,InundationTiming,SurgeHtPlusTideMSL,SurgeHtPlusTideMLLW,SurgeHtPlusTideMHHW,SurgeHtPlusTideNAVD], 'wfo')] + +#--------------------------------------------------------------------------- +# +# Misc. Configurations +# +#--------------------------------------------------------------------------- +# defines the number of days to keep log files +LOG_FILE_PURGE_AFTER = 28 + +# auto configure NotifyTextProd -- set after OB6 +AUTO_CONFIGURE_NOTIFYTEXTPROD = 1 #0=off,1=on + + +#----------------------------------- +# DO NOT CHANGE THE FOLLOWING SECTION +#------------------------------------ +# import the local config file + +myOfficeType = SITES[GFESUITE_SITEID][5] + +AdditionalISCRouting = [ + # Configure by adding entries to this list in the form of: + # ([WeatherElements], ModelName, EditAreaPrefix) + # Example: + # ([Hazards, LAL, CWR], "ISCFire", "FireWxAOR_"), +] + +#--------------------------------------------------------------------------- +# Parm groups. Combine parms with time constraints +# list of ([parms], timeConstraints) +#--------------------------------------------------------------------------- + +#!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! +# There is nothing special about these variables. They are just used as a +# convienence to set up multiple models in modelDict with the same parameter +# set. However, model parms are no longer as generic as they once were and +# its just as easy to set the parms explicitly in modelDict. + +STD6_MODEL = [([Temp, Td, RH, Wind, Wind20ft, Sky, FzLevel, SnowLevel], TC6), + ([Haines, MixHgt, FreeWind, TransWind, VentRate], TC6), + ([DSI, Stability, Ttrend, RHtrend], TC6), + ([SnowAmt, PoP, CWR], TC6NG), ([QPF, Weather, IceAcc, LAL], TC6NG), + ([MarineLayer, HrsOfSun, InvBurnOffTemp], LT24), + ([MinRH], MinRHTC), ([MaxRH], MaxRHTC), + ([MaxT], MaxTTC), ([MinT], MinTTC), + ([Wetflag], FireWx1300TC)] + +# hourly +STD1_MODEL = [([Temp, Td, RH, Wind, Wind20ft, Sky, FzLevel, SnowLevel], TC1), + ([Haines, MixHgt, FreeWind, TransWind], TC1), + ([DSI, Stability, VentRate, Ttrend, RHtrend], TC1), + ([SnowAmt, PoP, CWR], TC1), ([QPF, Weather, IceAcc, LAL], TC1), + ([MarineLayer, HrsOfSun, InvBurnOffTemp], LT24), + ([MinRH], MinRHTC), ([MaxRH], MaxRHTC), + ([MaxT], MaxTTC), ([MinT], MinTTC), + ([Wetflag], FireWx1300TC)] + +# 3 hourly +STD3_MODEL = [([Temp, Td, RH, Wind, Wind20ft, Sky, FzLevel, SnowLevel], TC3), + ([Haines, MixHgt, FreeWind, TransWind], TC3), + ([DSI, Stability, VentRate, Ttrend, RHtrend], TC3), + ([SnowAmt, PoP, CWR], TC3NG), ([QPF, IceAcc, Weather, LAL], TC3NG), + ([MarineLayer, HrsOfSun, InvBurnOffTemp], LT24), + ([MinRH], MinRHTC), ([MaxRH], MaxRHTC), + ([MaxT], MaxTTC), ([MinT], MinTTC), + ([Wetflag], FireWx1300TC)] + +# Fcst and official database parameter groupings +OFFICIALDBS = [([Temp, Td, Wind, Weather, Sky, FzLevel, SnowLevel], TC1), + ([HeatIndex, WindChill, RH, SnowAmt, CWR, QPF], TC1), + ([PoP, Ttrend, RHtrend, Wind20ft, WindGust], TC1), + ([MinT], MinTTC), ([MaxT], MaxTTC), + ([MinRH], MinRHTC), ([MaxRH], MaxRHTC), + ([VentRate, LAL, Haines, MixHgt, FreeWind, TransWind], TC1), + ([DSI, Stability, MarineLayer], TC1), + ([HrsOfSun, InvBurnOffTemp], LT24), + ([IceAcc, IceCoverage, Hazards], TC1), + ([Wetflag], FireWx1300TC), + ([StormTotalSnow], TC1), + # Tropical parms + ([prob34, prob50, prob64,pws34,pws50,pws64,], TC1), + ([InundationMax,SurgeHtPlusTideMSL,SurgeHtPlusTideMLLW,SurgeHtPlusTideMHHW,SurgeHtPlusTideNAVD], TC1), + ([ProposedSS,DiffSS,tempProposedSS,InitialSS], TC1), + ([WindThreat,StormSurgeThreat,FloodingRainThreat,TornadoThreat], TC1), + ([pwsD34,pwsD64], PWSDTC), + ([pwsN34,pwsN64], PWSNTC), + ([pws34int,pws64int,InundationTiming,QPFtoFFGRatio], TC6NG), + # DR20541 and 20482 + ([PoP12hr], TC12NG), + ([QPF6hr, SnowAmt6hr], TC6NG), + ([cape], LT6NG), + ([ApparentT, HeatIndex, WindChill, LkSfcT, SnowMap, SnowRatio, StormTotalQPF], TC1), + ] + +## JCM Change wave and period (and swanswell) to TC1 for all marine sites +if SID in groups['marineSites'] or SID in groups['GreatLake_SITES']: + OFFICIALDBS.append(([WaveHeight, PeakWaveDir, WindWaveHeight, SurfHeight, Swell, Swell2, Period, Period2], TC1)) + OFFICIALDBS.append(([SwanSwell, Wave1, Wave2, Wave3, Wave4, Wave5, Wave6, Wave7, Wave8, Wave9, + Period1, Period3, Period4, Period5, Period6, Period7, Period8, Period9], TC1)) + OFFICIALDBS.append(([NWPSwind, UWaveDir, VWaveDir, WaveDir, RipProb, ErosionProb, OverwashProb],TC1)) + +# NWPS +nwpsCG1_MODEL = [([SwanSwell, Period, WaveHeight, PeakWaveDir, WindWaveHeight, Wind, RipProb, ErosionProb, OverwashProb], TC1)] +nwpsTrkngCG0_MODEL = [([Wave1, Wave2, Wave3, Wave4, Wave5, Wave6, Wave7, Wave8, Wave9, Period1, Period2, Period3, Period4, Period5, Period6,Period7, Period8, Period9], TC1)] + +# OPC TAF parameters (for NW, SW, and E) +OPCTAFBPARMS = [([WindWaveHeight, WaveHeight], TC1)] + +# SAT database parameter groupings +SATPARMS = [([SatVisE, SatIR11E, SatIR13E, SatIR39E, SatWVE, SatFogE], TC_1M), + ([SatVisW, SatIR11W, SatIR13W, SatIR39W, SatWVW, SatFogW], TC_1M)] + +# RTMA database parameter groupings +# DCS17288/DR17144 +if SID in groups['OCONUS_SITES']: + RTMAPARMS = [([Temp,Td,RH,Wind,Vis,Pressure,WindGust],TC1), + ([MinT],MinTTC), ([MaxT],MaxTTC), + ([MinRH],MinRHTC), ([MaxRH],MaxRHTC), + ([TUnc,TdUnc,WSpdUnc,WDirUnc,VisUnc,PressUnc,WGustUnc],TC1)] +else: + RTMAPARMS = [([Temp,Td,RH,Wind,QPE,Sky,Vis,Pressure,WindGust],TC1), + ([MinT],MinTTC), ([MaxT],MaxTTC), + ([MinRH],MinRHTC), ([MaxRH],MaxRHTC), + ([TUnc,TdUnc,WSpdUnc,WDirUnc,VisUnc,PressUnc,WGustUnc,SkyUnc],TC1)] + +#--------------------------------------------------------------------------- +# Databases for a site. +# list of (Database, [parms]) +# Official, Practice, TestFcst, Test are all set after Fcst is defined. +#--------------------------------------------------------------------------- + +# Intersite coordination database parameter groupings, based on +# OFFICIALDBS, but time constraint is always TC1 +ISCPARMS = [] +if type(officeType) != str: + raise TypeError, "Office type not a str: " + `officeType` +else: + if officeType not in VALID_OFFICE_TYPES: + raise ValueError, "Office type: " + str(officeType) + " does not match any of the following: [" + (', '.join(VALID_OFFICE_TYPES)) + "]" + + +# +# new parameters for NewTerrain +# +NewTopo = ("NewTopo", SCALAR, "ft", "New Topo", 50000.0, -32000.0, 1, NO) +PrevTopo = ("PrevTopo", SCALAR, "ft", "Previous Topo", 50000.0, -32000.0, 1, NO) +StdTopo = ("StdTopo", SCALAR, "ft", "Standard Topo", 50000.0, -32000.0, 1, NO) +GTOPO = ("GTOPO", SCALAR, "ft", "GTOPO30", 50000.0, -32000.0, 1, NO) +Topo = ("Topo", SCALAR, "ft", "Topography", 50000.0, -32000.0, 1, NO) + +# Add Topo to ISC parms for NewTerrain +if type(REQUESTED_ISC_PARMS) is list and not "NewTopo" in REQUESTED_ISC_PARMS: + REQUESTED_ISC_PARMS.append("NewTopo") +ISCPARMS.append(([NewTopo], Persistent)) + + +#--------------------------------------------------------------------------- +# +# General server configuration section +# +#--------------------------------------------------------------------------- + +#---------------------------------------------------------------------------- +# Server settings DO NOT CHANGE THESE DEFINITIONS +#---------------------------------------------------------------------------- +from com.raytheon.edex.plugin.gfe.config import SimpleServerConfig +IFPConfigServer = SimpleServerConfig() +#IFPConfigServer.allowedNodes = [] +IFPConfigServer.allowTopoBelowZero = 1 + +#------------------------------------------------------------------------------ +# serverConfig model configuration is now done in the modelDict dictionary. +# variables D2DMODELS, D2DDBVERSIONS,D2DAccumulativeElements,INITMODULES, +# INITSKIPS, DATABASES are no longer explicitly set and are not valid +# to be referenced in localConfig.py. + +# WARNING: There can only be one version of a model in modelDict. Fcst, +# practice and test databases have to be handled separately because there +# are databases with the same name but different types. This is ok +# because these databases are defined after any localConfig customizations +# of the normal Fcst database. + +# modelDict contains the following keys. Only define what is needed, i.e., +# it is not required to have every key defined +# "DB": Definition of the database, i.e., the first value in a dbs entry: +# ("wrfems", GRID, "", NO, NO, 3, 0). This must be a tuple. The name +# in the DB entry must be the same as the model name used as the key +# into the modelDict variable. +# +# "Parms" : Definition of the weather element parameters in the database, +# i.e., the second part of the dbs entry. This is a list of tuples. +# +# "D2DMODELS" : D2D metadata database name for the source model. +# +# "INITMODULES': Name of the SmartInit module. It is usually just the +# name as a string. If the init requires multiple models, use a tuple +# of ('smartInit name',[list of model names]) +# 'INITMODULES': ('Local_WPCGuide', ["HPCGuide","HPCERP","HPCWWD"]), +# +# "D2DAccumulativeElements" : List of parm names that are accumulative +# +# "D2DDBVERSIONS" : Number of versions of a D2D model to show in the Weather +# Element Browser. Defaults to 2 if not supplied. +# +# "INITSKIPS" : Used to skip specific model cycles. +# +# Example for a model: +# +# modelDict["CMCreg"]={ +# "DB": ("CMCreg", "GRID", "", NO, NO, 2, 0), +# "Parms": [([Temp, Td, RH, Wind, WindGust, Sky, MixHgt, TransWind, QPF, +# PoP, SnowAmt, SnowRatio], TC3), +# ([PoP6, QPF6, QPF6hr, CQPF1],TC6NG), +# ([QPF12, PoP12],TC12NG), +# ([MinRH], MinRHTC), ([MaxRH], MaxRHTC), +# ([MaxT], MaxTTC), ([MinT], MinTTC), +# ], +# "D2DMODELS": "Canadian-Reg", +# "INITMODULES": "Local_CMCreg", +# "D2DAccumulativeElements": ["tpgemreg","tprun","tp3hr","tp6hr"], +# "D2DDBVERSIONS": 3, +# } +# + +# Official, Practice, TestFcst, Test, Restore are all derivations of Fcst and +# are setup after localConfig is processed. +modelDict['Fcst'] = {'DB': Fcst, 'Parms': OFFICIALDBS} + +# Model Databases +waveParms=[Period, Period2, SurfHeight, Swell, Swell2, WaveHeight, + Wind, WindWaveHeight, ] + +modelDict['BaseTerrain'] = { + 'DB': ('BaseTerrain', 'GRID', 'EditTopo', YES, NO, 1, 0), + 'Parms': [([StdTopo, GTOPO, PrevTopo], Persistent), + ], + } + +modelDict['CRMTopo'] = { + 'D2DDBVERSIONS': 1} + +modelDict['ECMWFHiRes'] = { + 'D2DMODELS': 'ECMWF-HiRes',} + +modelDict['ENPwave'] = { + 'D2DMODELS': 'ENPWAVE253', + 'DB': ('ENPwave', 'GRID', '', NO, NO, 2, 0), + 'Parms': [(waveParms, TC6), + ], + } + +modelDict['ESTOFS'] = { + 'D2DMODELS': 'estofsEP', + 'DB': ('ESTOFS', 'GRID', '', NO, NO, 2, 0), + 'INITMODULES': 'ESTOFS', + 'Parms': [([AstroTide, StormSurge], TC1), + ], + } + +modelDict['ETSS'] = { + 'D2DMODELS': 'ETSS', + 'DB': ('ETSS', 'GRID', '', NO, NO, 2, 0), + 'INITMODULES': 'ETSS', + 'Parms': [([StormSurge, SurgeTide], TC1), + ], + } + +modelDict['ETSSHiRes'] = { + 'D2DMODELS': 'ETSS-HiRes', + 'DB': ('ETSSHiRes', 'GRID', '', NO, NO, 2, 0), + 'INITMODULES': 'ETSSHiRes', + 'Parms': [([AstroTide, SurgeTide], TC1), + ], + } + +for s in ['ALR', 'FWR', 'KRF', 'MSR', 'ORN', 'PTR', 'RHA', 'RSA', 'STR', 'TAR', + 'TIR', 'TUA',]: + modelDict['FFG'+s] = {'D2DMODELS': 'FFG-'+s} + +modelDict['GFS20'] = { + 'D2DMODELS': 'GFS20', + 'D2DAccumulativeElements': ['tp3hr','tp6hr', 'tp', 'cp', 'crain', 'csnow', 'cfrzr', 'cicep'], + 'DB': ('GFS20', 'GRID', '', NO, NO, 2, 0), + 'Parms': [([Wetflag], FireWx1300TC), + ([MaxRH], MaxRHTC), + ([MaxT], MaxTTC), + ([MinRH], MinRHTC), + ([MinT], MinTTC), + ([HrsOfSun, InvBurnOffTemp, MarineLayer], LT24), + ([DSI, FreeWind, FzLevel, Haines, MixHgt, RH, RHtrend, Sky, + SnowLevel, Stability, Td, Temp, TransWind, Ttrend, VentRate, + Wind, Wind20ft], TC6), + ([CWR, IceAcc, LAL, PoP, QPF, SnowAmt, Weather], TC6NG), + ], + } + +modelDict['GFS80'] = { + 'D2DAccumulativeElements': ['tp', 'cp'], + 'D2DMODELS': 'AVN211', + 'DB': ('GFS80', 'GRID', '', NO, NO, 2, 0), + 'INITMODULES': 'GFS80', + 'Parms': STD6_MODEL, + } + +modelDict['GFSLAMPGrid'] = { + 'D2DMODELS': 'GFSLAMPGrid', + 'DB': ('GFSLAMPGrid', 'GRID', '', NO, NO, 3, 0), + 'INITMODULES': 'GFSLAMPGrid', + 'Parms': [([CigHgt, Sky, Td, Temp, Vis, Wind], TC1), + ], + } + +modelDict['GWW'] = { + 'DB': ('GWW', 'GRID', '', NO, NO, 2, 0), + 'Parms': [(waveParms, TC6), + ], + } + +modelDict['WaveWatch'] = { + 'D2DMODELS': 'WaveWatch',} + +modelDict['GlobalWave'] = { + 'D2DMODELS': 'GlobalWave', + 'DB': ('GlobalWave', 'GRID', '', NO, NO, 2, 0), + 'Parms': [(waveParms, TC3), + ], + } + +modelDict['HIRESWarw'] = { + 'D2DAccumulativeElements': ['tp'], + 'D2DMODELS': 'HiResW-ARW-West', + 'DB': ('HIRESWarw', 'GRID', '', NO, NO, 2, 0), + 'INITMODULES': 'HIRESWarw', + 'Parms': STD3_MODEL, + } + +modelDict['HIRESWnmm'] = { + 'D2DAccumulativeElements': ['tp'], + 'D2DMODELS': 'HiResW-NMM-West', + 'DB': ('HIRESWnmm', 'GRID', '', NO, NO, 2, 0), + 'INITMODULES': 'HIRESWnmm', + 'Parms': STD3_MODEL, + } + +modelDict['HPCERP'] = { + 'D2DAccumulativeElements': ['tpHPCndfd'], + 'D2DDBVERSIONS': 24, + 'D2DMODELS': 'HPCqpfNDFD',} + +modelDict['HPCGRID'] = { + 'DB': ('HPCGRID', 'GRID', '', NO, NO, 2, 0), + 'Parms': [([PoP, SnowAmt], LTMOS), + ([MaxT], MaxTTCMOS), + ([MinT], MinTTCMOS), + ([Sky, Td, Temp, Weather, Wind], TC1), + ([QPF], TC6NG), + ], + } + +modelDict['HPCGuide'] = { + 'D2DAccumulativeElements': ['pop'], + 'D2DMODELS': 'HPCGuide', + 'DB': ('HPCGuide', 'GRID', '', NO, NO, 2, 0), + 'INITMODULES': 'HPCGuide', + 'Parms': [([MaxT], MaxTTC), + ([MinT], MinTTC), + ([PoP], TC12NG), + ([Sky, Td, Wind], TC6), + ], + } + +modelDict['HPCQPF'] = { + 'D2DAccumulativeElements': ['tpHPC'], + 'D2DMODELS': 'HPCqpf', + 'DB': ('HPCQPF', 'GRID', '', NO, NO, 4, 0), + 'INITMODULES': 'HPCQPF', + 'Parms': [([QPF], TC6NG), + ], + } + +modelDict['HRRR'] = { + 'D2DAccumulativeElements': ['tp', 'crain', 'csnow', 'cfrzr', 'cicep'], + 'D2DMODELS': 'HRRR', + 'DB': ('HRRR', 'GRID', '', NO, NO, 3, 0), + 'INITMODULES': 'HRRR', + 'Parms': [([QPF, RH, Sky, Td, Temp, Wind, WindGust], TC1), + ], + } + +modelDict['HWRF'] = { + 'D2DAccumulativeElements': ['tp', 'cp'], + 'D2DMODELS': 'HWRF',} + +modelDict['LAPS'] = { + 'D2DAccumulativeElements': ['pc'], + 'D2DDBVERSIONS': 6, + 'D2DMODELS': 'LAPS', + 'DB': ('LAPS', 'GRID', '', YES, NO, 1, 30), + 'INITMODULES': 'LAPS', + 'Parms': [([QPF, Radar, Sky, SnowAmt, Td, Temp, Weather, Wind], TC1), + ], + } + +modelDict['MOSGuide'] = { + 'D2DAccumulativeElements': ['pop12hr', 'pop6hr', 'thp12hr', 'thp3hr', + 'thp6hr', 'tcc', 'tp6hr', 'tp12hr', 'wgs'], + 'D2DMODELS': 'MOSGuide', + 'DB': ('MOSGuide', 'GRID', '', NO, NO, 2, 0), + 'INITMODULES': 'MOSGuide', + 'Parms': [([MaxT], MaxTTC), + ([MinT], MinTTC), + ([RH, Td, Temp, Wind], TC1), + ([PoP, PoP12, QPF, QPF12, TstmPrb12], TC12NG), + ([TstmPrb3], TC3NG), + ([PoP6, QPF6, Sky, TstmPrb6, WindGust], TC6NG), + ], + } + +modelDict['MSAS'] = { + 'D2DAccumulativeElements': ['tp', 'cp'], + 'D2DDBVERSIONS': 6, + 'D2DMODELS': 'MSAS', + 'DB': ('MSAS', 'GRID', '', YES, NO, 1, 36), + 'INITMODULES': 'MSAS', + 'Parms': [([Td, Temp, Wind], TC1), + ], + } + +modelDict['NAHwave4'] = { + 'D2DMODELS': 'NAHwave4',} + +modelDict['NAM12'] = { + 'D2DAccumulativeElements': ['tp', 'cp', 'crain', 'csnow', 'cfrzr', 'cicep'], + 'D2DMODELS': 'NAM12', + 'DB': ('NAM12', 'GRID', '', NO, NO, 2, 0), + 'INITMODULES': 'NAM12', + 'Parms': STD3_MODEL, + } + +modelDict['NAM20'] = { + 'D2DAccumulativeElements': ['tp', 'cp'], + 'D2DMODELS': 'NAM20',} + +modelDict['NAM40'] = { + 'D2DAccumulativeElements': ['tp', 'cp'], + 'D2DMODELS': 'NAM40', + 'DB': ('NAM40', 'GRID', '', NO, NO, 2, 0), + 'Parms': STD3_MODEL, + } + +modelDict['NAM80'] = { + 'D2DAccumulativeElements': ['tp', 'cp'], + 'D2DMODELS': 'ETA', + 'DB': ('NAM80', 'GRID', '', NO, NO, 2, 0), + 'Parms': STD6_MODEL, + } + +modelDict['NED'] = { + 'D2DDBVERSIONS': 1} + +modelDict['NamDNG'] = { + 'D2DMODELS': 'namdng25', + 'DB': ('NamDNG', 'GRID', '', NO, NO, 2, 0), + 'INITMODULES': 'NamDNG', + 'Parms': [([MaxRH], MaxRHTC), + ([MaxT], MaxTTC), + ([MinRH], MinRHTC), + ([MinT], MinTTC), + ([PoP12, QPF12], TC12NG), + ([MixHgt, RH, Sky, SnowLevel, Td, Temp, TransWind, Vis, + Wind, WindGust], TC3), + ([MaxRH3, MaxT3, MinT3, PoP, QPF3, SnowAmt], TC3NG), + ([PoP6, QPF6, SnowAmt6], TC6NG), + ], + } + +modelDict['NationalBlend'] = { + 'D2DAccumulativeElements': ["pop12hr", "pop", "pop6hr", "tp", "ppi1hr", "ppi6hr", + "tp1hr", "tp6hr", "thp3hr", "thp6hr", + "totsn1hr", "totsn6hr", "ficeac1hr", "ficeac6hr", + "TstmPrb12", "Haines", "FosBerg"], + 'D2DMODELS': 'NationalBlend', + 'DB': ('NationalBlend', 'GRID', '', NO, NO, 7, 0), + 'INITMODULES': 'NationalBlend', + 'Parms': [([Temp, Td, RH, Sky, Wind, WindGust, ApparentT], TC1), + ([QPF1,PPI01,CloudBasePrimary,Ceiling,Visibility],TC1), + ([PoTIP, PoTR, PoTRW, PoTS, PoTSW, PoTZR,],TC1), + ([SnowLevel,MaxTwAloft,ProbIcePresent, ProbRefreezeSleet,SnowRatio],TC1), + ([PositiveEnergyAloft, NegativeEnergyLowLevel],TC1), + ([MixHgt, TransWind, LLWS, VentRate, LLWSHgt, Radar, + SigWaveHgt, Weather, SnowAmt01, IceAccum01, TstmPrb1],TC1), + ([TstmPrb3, DryTstmPrb],TC3NG), + ([TstmPrb6, QPF, PoP6, PPI06, SnowAmt, IceAccum, + QPF10Prcntl, QPF50Prcntl, QPF90Prcntl, Haines, FosBerg],TC6NG), + ([MaxT], MaxTTC), ([MinT], MinTTC), + ([MaxRH], MaxRHTC), ([MinRH], MinRHTC),([PoP, TstmPrb12],TC12NG), + ], + } + +modelDict['NationalBlendOC'] = { + 'D2DMODELS': 'NationalBlendOC', + 'DB': ('NationalBlend', 'GRID', '', NO, NO, 2, 0), + 'INITMODULES': 'NationalBlendOC', + 'Parms': [([WGS50pct, WS50Prcntl30m, WS50Prcntl80m, Vis50pct, T50pct, + PMSL10pct, PMSL50pct, PMSL90pct], TC1), + ], + } + +modelDict['NewTerrain'] = { + 'DB': ('NewTerrain', 'GRID', 'EditTopo', YES, NO, 1, 0), + 'Parms': [([NewTopo], Persistent), + ], + } + +modelDict['PWPF'] = { + 'D2DMODELS': 'PWPF',} + +modelDict['RFCQPF'] = { + 'D2DMODELS': 'RFCqpf', + 'DB': ('RFCQPF', 'GRID', '', NO, NO, 4, 0), + 'Parms': [([QPF], TC6NG), + ], + } + +modelDict['RTMA'] = { + 'D2DAccumulativeElements': ['tp'], + 'D2DMODELS': 'RTMA25', + 'DB': ('RTMA', 'GRID', '', YES, NO, 1, 36), + 'INITMODULES': 'RTMA', + 'Parms': RTMAPARMS, + } + +modelDict['RAP13'] = { + 'D2DAccumulativeElements': ['tp', 'cp'], + 'D2DMODELS': 'RAP13', + 'DB': ('RAP13', 'GRID', '', NO, NO, 2, 0), + 'INITMODULES': 'RAP13', + 'INITSKIPS': [1, 2, 4, 5, 7, 8, 10, 11, 13, 14, 16, 17, 19, 20, 22, 23], + 'Parms': STD1_MODEL, + } + +modelDict['SAT'] = { + 'DB': ('SAT', 'GRID', '', YES, NO, 1, 12), + 'Parms': [([SatFogE, SatFogW, SatIR11E, SatIR11W, SatIR13E, SatIR13W, + SatIR39E, SatIR39W, SatVisE, SatVisW, SatWVE, SatWVW], + TC_1M), + ], + } + +modelDict['SPC'] = { + 'D2DDBVERSIONS': 8, 'D2DMODELS': 'SPCGuide',} + +modelDict['SREF'] = { + 'D2DMODELS': 'SREF212', + 'DB': ('SREF', 'GRID', '', NO, NO, 3, 0), + 'INITMODULES': 'SREF', + 'Parms': [([Td, Temp, Wind], TC1), + ], + } + +modelDict['Satellite'] = { + 'D2DDBVERSIONS': 6,} +# Turn on satellite smartInit only if SATDATA has some entries. +if SATDATA: + modelDict['Satellite']['INITMODULES'] = 'SAT' + +modelDict['TPCProb'] = { + 'D2DDBVERSIONS': 30, + 'D2DMODELS': 'TPCWindProb', + 'DB': ('TPCProb', 'GRID', '', NO, NO, 30, 0), + 'Parms': [([pwsD34, pwsD64], PWSDTC), + ([pwsN34, pwsN64], PWSNTC), + ([prob34, prob50, prob64, pws34, pws50, pws64], TC1), + ], + } + +modelDict['TPCProbPrelim'] = { + 'D2DDBVERSIONS': 30, + 'D2DMODELS': 'TPCWindProb_Prelim', + 'DB': ('TPCProbPrelim', 'GRID', '', NO, NO, 30, 0), + 'Parms': [([pwsD34, pwsD64], PWSDTC), + ([pwsN34, pwsN64], PWSNTC), + ([prob34, prob50, prob64, pws34, pws50, pws64], TC1), + ], + } + +modelDict['TPCStormSurge'] = { + 'D2DDBVERSIONS': 1} + +modelDict['TPCSurgeProb'] = { + 'D2DMODELS': 'TPCSurgeProb', + 'D2DAccumulativeElements': [ + 'Surge10Pct', + 'Surge20Pct', + 'Surge30Pct', + 'Surge40Pct', + 'Surge50Pct', + 'Surge90Pct', + 'PSurge25Ft', + 'PSurge24Ft', + 'PSurge23Ft', + 'PSurge22Ft', + 'PSurge21Ft', + 'PSurge20Ft', + 'PSurge19Ft', + 'PSurge18Ft', + 'PSurge17Ft', + 'PSurge16Ft', + 'PSurge15Ft', + 'PSurge14Ft', + 'PSurge13Ft', + 'PSurge12Ft', + 'PSurge11Ft', + 'PSurge10Ft', + 'PSurge9Ft', + 'PSurge8Ft', + 'PSurge7Ft', + 'PSurge6Ft', + 'PSurge5Ft', + 'PSurge4Ft', + 'PSurge3Ft', + 'PSurge2Ft', + 'PSurge1Ft', + 'PSurge0Ft', + 'Surge10Pctincr', + 'Surge20Pctincr', + 'Surge30Pctincr', + 'Surge40Pctincr', + 'Surge50Pctincr', + 'Surge90Pctincr', + 'PSurge20Ftincr', + 'PSurge19Ftincr', + 'PSurge18Ftincr', + 'PSurge17Ftincr', + 'PSurge16Ftincr', + 'PSurge15Ftincr', + 'PSurge14Ftincr', + 'PSurge13Ftincr', + 'PSurge12Ftincr', + 'PSurge11Ftincr', + 'PSurge10Ftincr', + 'PSurge9Ftincr', + 'PSurge8Ftincr', + 'PSurge7Ftincr', + 'PSurge6Ftincr', + 'PSurge5Ftincr', + 'PSurge4Ftincr', + 'PSurge3Ftincr', + 'PSurge2Ftincr', + 'PSurge1Ftincr', + 'PSurge0Ftincr', + ], + } + +modelDict['PETSS'] = { + 'D2DMODELS': 'P-ETSS', + 'D2DAccumulativeElements': [ + 'Surge10Pct', + 'Surge20Pct', + 'Surge30Pct', + 'Surge40Pct', + 'Surge50Pct', + 'Surge90Pct', + 'Surge10Pctincr', + 'Surge20Pctincr', + 'Surge30Pctincr', + 'Surge40Pctincr', + 'Surge50Pctincr', + 'Surge90Pctincr', + 'PSurge0Ftincr', + 'PSurge1Ftincr', + 'PSurge2Ftincr', + 'PSurge3Ftincr', + 'PSurge4Ftincr', + 'PSurge5Ftincr', + 'PSurge6Ftincr', + 'PSurge7Ftincr', + 'PSurge8Ftincr', + 'PSurge9Ftincr', + 'PSurge10Ftincr', + 'PSurge13Ftincr', + 'PSurge16Ftincr', + 'PSurge0Ft', + 'PSurge1Ft', + 'PSurge2Ft', + 'PSurge3Ft', + 'PSurge4Ft', + 'PSurge5Ft', + 'PSurge6Ft', + 'PSurge7Ft', + 'PSurge8Ft', + 'PSurge9Ft', + 'PSurge10Ft', + 'PSurge13Ft', + 'PSurge16Ft', + 'PSurgeMaxincr', + 'PSurgeMeanincr', + 'PSurgeMinincr', + 'PSurgeMax', + 'PSurgeMean', + 'PSurgeMin', + ], + } + +modelDict['TPCtcm'] = { + 'DB': ('TPCtcm', 'GRID', '', NO, NO, 2, 0), + 'Parms': [([HiWind], TC3), + ], + } + +modelDict['URMA25'] = { + 'D2DAccumulativeElements': ['tp'], + 'D2DMODELS': 'URMA25', + 'DB': ('URMA25', 'GRID', '', YES, NO, 1, 36), + 'INITMODULES': 'URMA25', + 'Parms': [([MaxRH], MaxRHTC), + ([MaxT], MaxTTC), + ([MinRH], MinRHTC), + ([MinT], MinTTC), + ([PressUnc, Pressure, QPE, RH, Sky, SkyUnc, TUnc, Td, TdUnc, + Temp, Vis, VisUnc, WDirUnc, WGustUnc, WSpdUnc, Wind, + WindGust], TC1), + ], + } + +modelDict['WCwave10'] = { + 'D2DMODELS': 'WCwave10', + 'DB': ('WCwave10', 'GRID', '', NO, NO, 2, 0), + 'Parms': [(waveParms, TC3), + ], + } + +modelDict['WCwave4'] = { + 'D2DMODELS': 'WCwave4', + 'DB': ('WCwave4', 'GRID', '', NO, NO, 2, 0), + 'Parms': [(waveParms, TC3), + ], + } + +modelDict['WNAWAVE'] = { + 'DB': ('WNAWAVE', 'GRID', '', NO, NO, 2, 0), + 'Parms': [(waveParms, TC6), + ], + } + +modelDict['WNAWAVE238'] = { + 'D2DMODELS': 'WNAWAVE238',} + +modelDict['WNAwave10'] = { + 'D2DMODELS': 'WNAwave10', + 'DB': ('WNAwave10', 'GRID', '', NO, NO, 2, 0), + 'Parms': [(waveParms, TC3), + ], + } + +modelDict['WNAwave4'] = { + 'D2DMODELS': 'WNAwave4', + 'DB': ('WNAwave4', 'GRID', '', NO, NO, 2, 0), + 'Parms': [(waveParms, TC3), + ], + } + +# This list will be used to set up a default ignoreDatabases list. This is shorter than +# listing all models to ignore. +includeOnly=[] +if SID in groups['ALASKA_SITES']: + modelDict['AKwave4'] = { + 'D2DMODELS': 'AKwave4', + 'D2DDBVERSIONS': 2, + 'DB': ('AKwave4', 'GRID', '', NO, NO, 2, 0), + 'Parms': [([Period, Period2, Swell, Swell2, WaveHeight, Wind, + WindWaveHgt, WindWavePeriod], TC3), + ], + } + + modelDict['AKwave10'] = { + 'D2DMODELS': 'AKwave10', + 'D2DDBVERSIONS': 2, + 'DB': ('AKwave10', 'GRID', '', NO, NO, 2, 0), + 'Parms': [([Period, Period2, Swell, Swell2, WaveHeight, Wind, + WindWaveHgt, WindWavePeriod], TC3), + ], + } + + updateModelDict(modelDict,'ESTOFS','D2DMODELS', 'estofsAK') + updateModelDict(modelDict,'ETSS','D2DMODELS', 'ETSS-AK') + updateModelDict(modelDict,'GFS20','D2DMODELS', 'AK-GFS22') + updateModelDict(modelDict,'HIRESWarw','D2DMODELS', 'HiResW-ARW-AK') + updateModelDict(modelDict,'HIRESWnmm','D2DMODELS', 'HiResW-NMM-AK') + updateModelDict(modelDict,'MOSGuide','D2DMODELS', 'MOSGuide-AK') + updateModelDict(modelDict,'NAM12','D2DMODELS', 'AK-NAM11') + updateModelDict(modelDict,'NamDNG','D2DMODELS', 'AK-NamDNG3') + updateModelDict(modelDict,'NationalBlend','D2DMODELS', 'NationalBlendAK') + updateModelDict(modelDict,'RTMA','D2DMODELS', 'AK-RTMA3') + updateModelDict(modelDict,'SREF','D2DMODELS', 'SREF216') + updateModelDict(modelDict,'URMA','D2DMODELS', 'AK-URMA') + updateModelDict(modelDict,'RTOFS-Alaska','D2DMODELS', 'RTOFS-Alaska') + updateModelDict(modelDict,'RTOFS-Alaska','D2DMODELS', 'RTOFS-Alaska') + updateModelDict(modelDict,'RTOFS-Arctic','D2DMODELS', 'RTOFS-Arctic') + updateModelDict(modelDict,'RTOFS-Bering','D2DMODELS', 'RTOFS-Bering') + updateModelDict(modelDict,'RTOFS-GulfAlaska','D2DMODELS', 'RTOFS-GulfAlaska') + updateModelDict(modelDict,'PETSS','D2DMODELS', 'P-ETSS-AK') + # Model databases for Alaska + includeOnly = ['AKwave4', 'AKwave10', 'BaseTerrain', 'CRMTopo', 'ECMWFHiRes', 'ESTOFS', + 'ETSS', 'GFS20', 'GWW', 'HIRESWarw', 'HIRESWnmm', 'MOSGuide', 'NAM12', + 'NamDNG', 'NationalBlend', 'NED', 'NewTerrain', 'RTMA', 'RTOFS-Alaska', + 'RTOFS-Arctic', 'RTOFS-Bering', 'RTOFS-GulfAlaska', 'SAT', 'SREF', 'URMA', + 'nwpsCG1AER', 'nwpsCG1AFG', 'nwpsCG1AJK', 'nwpsCG1ALU', 'nwpsTrkngCG0AER', + 'nwpsTrkngCG0AFG', 'nwpsTrkngCG0AJK', 'nwpsTrkngCG0ALU', 'PETSS', + ] + +# Hawaii OCONUS +elif SID == "HFO": + modelDict['GFS75'] = { + 'D2DMODELS': 'AVN225', + 'D2DAccumulativeElements': ['tp', 'cp'], + 'DB': ('GFS75', 'GRID', '', NO, NO, 2, 0), + 'INITMODULES': 'GFS75', + 'Parms': STD6_MODEL, + } + + updateModelDict(modelDict,'WaveWatch','D2DMODELS', 'WaveWatch') + updateModelDict(modelDict,'GlobalWave','D2DMODELS', 'GlobalWave') + updateModelDict(modelDict,'RTMA','D2DMODELS', 'HI-RTMA') + updateModelDict(modelDict,'NamDNG','D2DMODELS', 'HI-NamDNG5') + updateModelDict(modelDict,'HIRESWarw','D2DMODELS', 'HiResW-ARW-HI') + updateModelDict(modelDict,'HIRESWnmm','D2DMODELS', 'HiResW-NMM-HI') + updateModelDict(modelDict,'SPC','D2DMODELS', 'SPCGuide') + updateModelDict(modelDict,'TPCProb','D2DMODELS', 'TPCWindProb') + updateModelDict(modelDict,'TPCProbPrelim','D2DMODELS', 'TPCWindProb_Prelim') + updateModelDict(modelDict,'ECMWFHiRes','D2DMODELS', 'ECMWF-HiRes') + updateModelDict(modelDict,'RTOFS-Honolulu','D2DMODELS', 'RTOFS-Honolulu') + updateModelDict(modelDict,'ESTOFS','D2DMODELS', 'estofsHI') + updateModelDict(modelDict,'MOSGuide','D2DMODELS', 'MOSGuide-HI') + updateModelDict(modelDict,'NationalBlend','D2DMODELS', 'NationalBlendHI') + # Model databases for HFO + includeOnly = ['ECMWFHiRes', 'ESTOFS', 'GFS75', 'WaveWatch', 'GlobalWave', + 'HIRESWarw', 'HIRESWnmm', 'MOSGuide', 'NamDNG', 'NationalBlend', + 'RTMA', 'RTOFS-Honolulu', 'SPC', 'TPCProb', 'TPCProbPrelim', 'nwpsCG1GUM', + 'nwpsCG1HFO', 'nwpsTrkngCG0GUM', 'nwpsTrkngCG0HFO', + ] + +# Guam OCONUS +elif SID == "GUM": + modelDict['GFS75'] = { + 'D2DMODELS': 'AVN225', + 'D2DAccumulativeElements': ['tp', 'cp'], + 'DB': ('GFS75', 'GRID', '', NO, NO, 2, 0), + 'INITMODULES': 'GFS75', + 'Parms': STD6_MODEL, + } + + updateModelDict(modelDict,'GlobalWave','D2DMODELS', 'GlobalWave') + updateModelDict(modelDict,'TPCProb','D2DMODELS', 'TPCWindProb') + updateModelDict(modelDict,'TPCProbPrelim','D2DMODELS', 'TPCWindProb_Prelim') + updateModelDict(modelDict,'RTOFS-Guam','D2DMODELS', 'RTOFS-Guam') + updateModelDict(modelDict,'RTMA','D2DMODELS', 'Guam-RTMA') + # Model databases for GUM + includeOnly = ['GFS75', 'GlobalWave', 'RTMA', 'RTOFS-Guam', 'TPCProb', + 'TPCProbPrelim', 'nwpsCG1GUM', 'nwpsCG1HFO', + 'nwpsTrkngCG0GUM', 'nwpsTrkngCG0HFO', + ] + +# San Juan OCONUS +elif SID == "SJU": + updateModelDict(modelDict,'GFS80','D2DMODELS', 'AVN211') + updateModelDict(modelDict,'NAM80','D2DMODELS', 'ETA') + updateModelDict(modelDict,'WaveWatch','D2DMODELS', 'WaveWatch') + updateModelDict(modelDict,'GlobalWave','D2DMODELS', 'GlobalWave') + updateModelDict(modelDict,'WNAwave10','D2DMODELS', 'WNAwave10') + updateModelDict(modelDict,'WNAwave4','D2DMODELS', 'WNAwave4') + updateModelDict(modelDict,'RTMA','D2DMODELS', 'PR-RTMA') + updateModelDict(modelDict,'HIRESWarw','D2DMODELS', 'HiResW-ARW-SJU') + updateModelDict(modelDict,'HIRESWnmm','D2DMODELS', 'HiResW-NMM-SJU') + updateModelDict(modelDict,'SPC','D2DMODELS', 'SPCGuide') + updateModelDict(modelDict,'TPCProb','D2DMODELS', 'TPCWindProb') + updateModelDict(modelDict,'TPCProbPrelim','D2DMODELS', 'TPCWindProb_Prelim') + updateModelDict(modelDict,'ECMWFHiRes','D2DMODELS', 'ECMWF-HiRes') + updateModelDict(modelDict,'RTOFS-Atlantic','D2DMODELS', 'RTOFS-Atlantic') + updateModelDict(modelDict,'ESTOFS','D2DMODELS', 'estofsPR') + updateModelDict(modelDict,'NAHwave4','D2DMODELS', 'NAHwave4') + updateModelDict(modelDict,'GFS20','D2DMODELS', 'PR-GFS') + updateModelDict(modelDict,'NationalBlend','D2DMODELS', 'NationalBlendPR') + # Model databases for SJU + includeOnly = ['ECMWFHiRes', 'ESTOFS', 'GFS20', 'GFS80', 'WaveWatch', + 'GlobalWave', 'HIRESWarw', 'HIRESWnmm', 'NAHwave4', 'NAM80', + 'NationalBlend', 'RTMA', 'RTOFS-Atlantic', 'SPC', 'TPCProb', + 'TPCProbPrelim', 'WNAwave10', 'WNAwave4', + 'nwpsCG1JAX', 'nwpsCG1KEY', 'nwpsCG1MFL', 'nwpsCG1MLB', 'nwpsCG1SJU', + 'nwpsTrkngCG0JAX', 'nwpsTrkngCG0KEY', 'nwpsTrkngCG0MFL', + 'nwpsTrkngCG0MLB', 'nwpsTrkngCG0SJU', + ] + +# East CONUS changes from default modelDict +elif SID in groups['CONUS_EAST_SITES']: + updateModelDict(modelDict,'ESTOFS','D2DMODELS', 'estofsUS') + updateModelDict(modelDict,'HIRESWarw','D2DMODELS', 'HiResW-ARW-East') + updateModelDict(modelDict,'HIRESWnmm','D2DMODELS', 'HiResW-NMM-East') + +if SID in groups['GreatLake_SITES']: + modelDict['GLERL'] = { + 'D2DMODELS': 'GLERL', + 'DB': ('GLERL', 'GRID', '', 0, 0, 2, 0), + 'Parms': [([Period, Swell, WaveHeight], TC1), + ] + } + + modelDict['GLWN'] = {'D2DMODELS': 'GLWN'} + +# NWPS configuration. +if SID in ['AFC', 'AER', 'AFG', 'AJK', 'ALU', 'AVAK']: + nwpsSites = ['AER', 'AFG', 'AJK', 'ALU',] +elif SID in ['GUM', 'HFO',]: + nwpsSites = ['GUM', 'HFO',] +elif SID == "SJU": + nwpsSites = ['SJU', 'MFL', 'KEY', 'MLB', 'JAX'] +elif SID in ['CAR', 'GYX', 'BOX', 'OKX', 'PHI', 'LWX', 'AKQ', 'MHX', 'ILM', 'CHS', + 'BRO', 'CRP', 'HGX', 'LCH', 'LIX', 'MOB', 'TAE', 'TBW', 'KEY', 'MFL', + 'MLB', 'JAX',]: + nwpsSites = ['CAR', 'GYX', 'BOX', 'OKX', 'PHI', 'LWX', 'AKQ', 'MHX', 'ILM', 'CHS', + 'BRO', 'CRP', 'HGX', 'LCH', 'LIX', 'MOB', 'TAE', 'TBW', 'KEY', 'MFL', + 'MLB', 'JAX', 'SJU',] +elif SID in ['SEW', 'PQR', 'MFR', 'EKA', 'MTR', 'LOX', 'SGX',]: + nwpsSites = ['SEW', 'PQR', 'MFR', 'EKA', 'MTR', 'LOX', 'SGX',] +else: + nwpsSites = [] + +for s in nwpsSites: + name='nwpsCG1%s' % s + modelDict[name] = { + 'DB': (name, 'GRID', '', NO, NO, 2, 0), + 'D2DMODELS': name, + 'INITMODULES': name, + 'Parms': nwpsCG1_MODEL, + } + name='nwpsTrkngCG0%s' % s + modelDict[name] = { + 'DB': (name, 'GRID', '', NO, NO, 2, 0), + 'D2DMODELS': name, + 'INITMODULES': name, + 'Parms': nwpsTrkngCG0_MODEL, + } +# This list will be used to set up a default ignoreDatabases list. This is shorter than +# listing all models to ignore. Usually only set up for sites that aren't CONUS WFOs +# includeOnly is not designed to be changed by localConfig. +if includeOnly: + for m in sorted(modelDict.keys()): + if m not in includeOnly and 'D2DMODELS' in modelDict[m]: + ignoreDatabases.append(m) + +# END modelDict initial set up +#------------------------------------------------------------------------------ +# Add in optional parms to Fcst parm def +if SID in groups['powt']: + addPowt(modelDict) + +if SID in groups['winterProbs']: + addWinterWeatherProbs(modelDict) + +if SID in groups['rainfallProbs']: + addRainfallProbs(modelDict) + +D2DMODELS=[] +D2DDBVERSIONS={} +D2DAccumulativeElements={} +INITMODULES={} +INITSKIPS={} + +localParms = [] +localISCParms = [] +localISCExtraParms = [] +localLogFile = '' + +if not BASELINE and siteImport('localConfig'): + localParms = getattr(localConfig, 'parms', []) + localISCParms = getattr(localConfig, 'parmsISC', []) + localISCExtraParms = getattr(localConfig, 'extraISCparms', []) + localLogFile = getattr(localConfig, 'logFile', '') + modelDict['Fcst']['Parms'] += localParms + #ensure office type is set properly in localConfig SITES[] + if len(SITES[GFESUITE_SITEID]) == 5: + a = list(SITES[GFESUITE_SITEID]) + a.append(myOfficeType) + SITES[GFESUITE_SITEID] = tuple(a) + else: + myOfficeType = SITES[GFESUITE_SITEID][5] #probably from localConfig + +# Instantiate settings from modelDict +db=dbConfig(modelDict) +db.addConfiguredModels(ignoreDatabases) +DATABASES = db.dbs +D2DMODELS = db.D2DMODELS +D2DDBVERSIONS = db.D2DDBVERSIONS +D2DAccumulativeElements = db.D2DAccumulativeElements +INITMODULES = db.INITMODULES +INITSKIPS = db.INITSKIPS +OFFICIALDBS=list(modelDict['Fcst']['Parms']) + +# Create Practice and test databases from Fcst +DATABASES.append((Official, modelDict['Fcst']['Parms'])), +DATABASES.append((Practice, modelDict['Fcst']['Parms'])), +DATABASES.append((TestFcst, modelDict['Fcst']['Parms'])), +DATABASES.append((Test, modelDict['Fcst']['Parms'])), + +for entry in AdditionalISCRouting: + (parmList, dbName, editAreaPrefix) = entry + parmList = list(parmList) + addedIscDbDefinition = (dbName, ) + ISC[1:] + addedIscParms = [(parmList, TC1)] + DATABASES.append((addedIscDbDefinition, addedIscParms)) + +# Intersite coordination database parameter groupings, based on +# OFFICIALDBS, but time constraint is always TC1 +for wes, tc in (OFFICIALDBS + localISCParms): + ISCPARMS.append((wes, TC1)) + +# We also add in any extraISCparms as needed, but only for office +# types other than our own. +for wes, officeType in (EXTRA_ISC_PARMS + localISCExtraParms): + if myOfficeType == officeType: + continue + if type(officeType) != str: + raise TypeError, "Office type not a str: " + `officeType` + else: + if officeType not in VALID_OFFICE_TYPES: + raise ValueError, "Office type: " + str(officeType) + " does not match any of the following: [" + (', '.join(VALID_OFFICE_TYPES)) + "]" + for we in wes: + wecopy = list(we) + wecopy[0] = wecopy[0] + officeType #rename the weather element + wecopy = tuple(wecopy) + ISCPARMS.append(([wecopy], TC1)) + +# Restore database parameter groupings (based on OFFICIALDBS, but TC1) +RESTOREPARMS = [] +for wes, tc in modelDict['Fcst']['Parms']: + RESTOREPARMS.append((wes, TC1)) + +# Now add the ISC and Restore databases to the DATABASES groupings +DATABASES.append((Restore, RESTOREPARMS)) +DATABASES.append((ISC, ISCPARMS)) + + +#D logfp=open('/localapps/logs/serverConfig2.log','w') +#D logfp.write('DATABASE names:\n') +#D for m in sorted(DATABASES): +#D logfp.write('%s\n' % m[0][0]) +#D logfp.write('\n\nDATABASES\n') +#D pprint.pprint(sorted(DATABASES),logfp,width=130) +#D logfp.write('\n\nINITMODULES\n') +#D pprint.pprint(INITMODULES,logfp,width=130) +#D logfp.write('\n\nD2DMODELS\n') +#D pprint.pprint(D2DMODELS,logfp,width=130) +#D logfp.close() + +doIt() + +#D logfp=open('/localapps/logs/SC_MD2.py','w') +#D modelDict=createModelDict(locals(),DATABASES,D2DMODELS,D2DDBVERSIONS,D2DAccumulativeElements, +#D INITMODULES,INITSKIPS,logfp) +#D logfp.close() +if localLogFile: + printServerConfig(sys.modules[__name__],vars(localConfig),localLogFile) +#D scfp.close() diff --git a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/grid/dataset/alias/gfeParamInfo.xml b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/grid/dataset/alias/gfeParamInfo.xml index af13d61395..90c3adee3b 100644 --- a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/grid/dataset/alias/gfeParamInfo.xml +++ b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/grid/dataset/alias/gfeParamInfo.xml @@ -1,24 +1,4 @@ - - + + 528528 + grid over the contiguous United States - 8X Resolution + (10 km) (Used by the Radar mosaics) (Lambert Conformal) + 39.144 + -123.724 + LowerLeft + 528 + 528 + 2.711 + 2.865 + km + 6367470.0 + 6367470.0 + 60 + -105.0 + diff --git a/edexOsgi/com.raytheon.edex.plugin.grib/utility/common_static/base/grib/models/NCEP/gribModels_NCEP-7_NationalBlend.xml b/edexOsgi/com.raytheon.edex.plugin.grib/utility/common_static/base/grib/models/NCEP/gribModels_NCEP-7_NationalBlend.xml index 94eeee7853..795ac13cf6 100644 --- a/edexOsgi/com.raytheon.edex.plugin.grib/utility/common_static/base/grib/models/NCEP/gribModels_NCEP-7_NationalBlend.xml +++ b/edexOsgi/com.raytheon.edex.plugin.grib/utility/common_static/base/grib/models/NCEP/gribModels_NCEP-7_NationalBlend.xml @@ -55,22 +55,6 @@ - - - NationalBlend -

    7
    - 14 - - NBMv3 - - - 96 - - - diff --git a/edexOsgi/com.raytheon.edex.plugin.grib/utility/common_static/base/grib/models/gribModels_RFC-9.xml b/edexOsgi/com.raytheon.edex.plugin.grib/utility/common_static/base/grib/models/gribModels_RFC-9.xml index 71fc426b5a..5ffefa2910 100644 --- a/edexOsgi/com.raytheon.edex.plugin.grib/utility/common_static/base/grib/models/gribModels_RFC-9.xml +++ b/edexOsgi/com.raytheon.edex.plugin.grib/utility/common_static/base/grib/models/gribModels_RFC-9.xml @@ -1003,7 +1003,7 @@ RFCqpf
    9
    159 - 226 + 528528 180 diff --git a/edexOsgi/com.raytheon.edex.plugin.grib/utility/common_static/base/grib/subgrids/ARI-Clip.xml b/edexOsgi/com.raytheon.edex.plugin.grib/utility/common_static/base/grib/subgrids/ARI-Clip.xml deleted file mode 100644 index 5db1043fa9..0000000000 --- a/edexOsgi/com.raytheon.edex.plugin.grib/utility/common_static/base/grib/subgrids/ARI-Clip.xml +++ /dev/null @@ -1,33 +0,0 @@ - - - - - - ARI - ARI - false - 1000 - 1000 - diff --git a/edexOsgi/com.raytheon.edex.plugin.grib/utility/common_static/base/grib/subgrids/HiresEastClip.xml b/edexOsgi/com.raytheon.edex.plugin.grib/utility/common_static/base/grib/subgrids/HiresEastClip.xml deleted file mode 100644 index f977abe2ec..0000000000 --- a/edexOsgi/com.raytheon.edex.plugin.grib/utility/common_static/base/grib/subgrids/HiresEastClip.xml +++ /dev/null @@ -1,36 +0,0 @@ - - - - - - HiResW-ARW-East HiResW-NMM-East - 255001 - 350 - 350 - - diff --git a/edexOsgi/com.raytheon.edex.plugin.grib/utility/common_static/base/grib/subgrids/HiresWestClip.xml b/edexOsgi/com.raytheon.edex.plugin.grib/utility/common_static/base/grib/subgrids/HiresWestClip.xml deleted file mode 100644 index 5df54dbfcb..0000000000 --- a/edexOsgi/com.raytheon.edex.plugin.grib/utility/common_static/base/grib/subgrids/HiresWestClip.xml +++ /dev/null @@ -1,36 +0,0 @@ - - - - - - HiResW-ARW-West HiResW-NMM-West - 255002 - 350 - 350 - - diff --git a/edexOsgi/com.raytheon.edex.plugin.grib/utility/common_static/base/grib/subgrids/MRMS1kmClip.xml b/edexOsgi/com.raytheon.edex.plugin.grib/utility/common_static/base/grib/subgrids/MRMS1kmClip.xml deleted file mode 100755 index 2037827a43..0000000000 --- a/edexOsgi/com.raytheon.edex.plugin.grib/utility/common_static/base/grib/subgrids/MRMS1kmClip.xml +++ /dev/null @@ -1,32 +0,0 @@ - - - - - - MRMS_1000 - 10000 - 800 - 600 - diff --git a/edexOsgi/com.raytheon.edex.plugin.grib/utility/common_static/base/grib/subgrids/MRMS500mClip.xml b/edexOsgi/com.raytheon.edex.plugin.grib/utility/common_static/base/grib/subgrids/MRMS500mClip.xml deleted file mode 100755 index a21c7f246d..0000000000 --- a/edexOsgi/com.raytheon.edex.plugin.grib/utility/common_static/base/grib/subgrids/MRMS500mClip.xml +++ /dev/null @@ -1,32 +0,0 @@ - - - - - - MRMS_0500 - 10001 - 1600 - 1200 - diff --git a/edexOsgi/com.raytheon.edex.plugin.grib/utility/common_static/base/grib/subgrids/NBM3Clip.xml b/edexOsgi/com.raytheon.edex.plugin.grib/utility/common_static/base/grib/subgrids/NBM3Clip.xml deleted file mode 100644 index 274d09be58..0000000000 --- a/edexOsgi/com.raytheon.edex.plugin.grib/utility/common_static/base/grib/subgrids/NBM3Clip.xml +++ /dev/null @@ -1,36 +0,0 @@ - - - - - - NationalBlend - NBMv3 - 700 - 700 - - diff --git a/edexOsgi/com.raytheon.edex.plugin.grib/utility/common_static/base/grib/subgrids/NBMClip.xml b/edexOsgi/com.raytheon.edex.plugin.grib/utility/common_static/base/grib/subgrids/NBMClip.xml deleted file mode 100644 index e0ab8f5456..0000000000 --- a/edexOsgi/com.raytheon.edex.plugin.grib/utility/common_static/base/grib/subgrids/NBMClip.xml +++ /dev/null @@ -1,36 +0,0 @@ - - - - - - NationalBlend HPCqpfNDFD - NBM - 700 - 700 - - diff --git a/edexOsgi/com.raytheon.edex.plugin.grib/utility/common_static/base/grib/subgrids/NamDngClip_AK.xml b/edexOsgi/com.raytheon.edex.plugin.grib/utility/common_static/base/grib/subgrids/NamDngClip_AK.xml deleted file mode 100755 index bde869a473..0000000000 --- a/edexOsgi/com.raytheon.edex.plugin.grib/utility/common_static/base/grib/subgrids/NamDngClip_AK.xml +++ /dev/null @@ -1,36 +0,0 @@ - - - - - - AK-NamDNG3 - 1023 - 1649 - 1105 - - diff --git a/edexOsgi/com.raytheon.edex.plugin.grib/utility/common_static/base/grib/subgrids/RTGSSTHRClip.xml b/edexOsgi/com.raytheon.edex.plugin.grib/utility/common_static/base/grib/subgrids/RTGSSTHRClip.xml deleted file mode 100644 index 916d9f914f..0000000000 --- a/edexOsgi/com.raytheon.edex.plugin.grib/utility/common_static/base/grib/subgrids/RTGSSTHRClip.xml +++ /dev/null @@ -1,36 +0,0 @@ - - - - - - RTGSSTHR - 173 - 250 - 250 - - diff --git a/edexOsgi/com.raytheon.edex.plugin.grib/utility/common_static/base/grib/subgrids/eta12Clip.xml b/edexOsgi/com.raytheon.edex.plugin.grib/utility/common_static/base/grib/subgrids/eta12Clip.xml deleted file mode 100644 index 780138b2a4..0000000000 --- a/edexOsgi/com.raytheon.edex.plugin.grib/utility/common_static/base/grib/subgrids/eta12Clip.xml +++ /dev/null @@ -1,36 +0,0 @@ - - - - - - NAM12 - 218 - 175 - 175 - - diff --git a/edexOsgi/com.raytheon.edex.plugin.grib/utility/common_static/base/grib/subgrids/grid1023Clip.xml b/edexOsgi/com.raytheon.edex.plugin.grib/utility/common_static/base/grib/subgrids/grid1023Clip.xml deleted file mode 100644 index ffbfe38131..0000000000 --- a/edexOsgi/com.raytheon.edex.plugin.grib/utility/common_static/base/grib/subgrids/grid1023Clip.xml +++ /dev/null @@ -1,41 +0,0 @@ - - - - - - - - 1023 - 500 - 500 - - diff --git a/edexOsgi/com.raytheon.edex.plugin.grib/utility/common_static/base/grib/subgrids/grid184Clip.xml b/edexOsgi/com.raytheon.edex.plugin.grib/utility/common_static/base/grib/subgrids/grid184Clip.xml deleted file mode 100644 index ce59dc1ec0..0000000000 --- a/edexOsgi/com.raytheon.edex.plugin.grib/utility/common_static/base/grib/subgrids/grid184Clip.xml +++ /dev/null @@ -1,41 +0,0 @@ - - - - - - - - 184 - 500 - 500 - - diff --git a/edexOsgi/com.raytheon.edex.plugin.grib/utility/common_static/base/grib/subgrids/grid197Clip.xml b/edexOsgi/com.raytheon.edex.plugin.grib/utility/common_static/base/grib/subgrids/grid197Clip.xml deleted file mode 100644 index 3a913980e3..0000000000 --- a/edexOsgi/com.raytheon.edex.plugin.grib/utility/common_static/base/grib/subgrids/grid197Clip.xml +++ /dev/null @@ -1,41 +0,0 @@ - - - - - - - - 197 - 500 - 500 - - diff --git a/edexOsgi/com.raytheon.edex.plugin.grib/utility/common_static/base/grib/subgrids/grid198Clip.xml b/edexOsgi/com.raytheon.edex.plugin.grib/utility/common_static/base/grib/subgrids/grid198Clip.xml deleted file mode 100644 index a6e34ac173..0000000000 --- a/edexOsgi/com.raytheon.edex.plugin.grib/utility/common_static/base/grib/subgrids/grid198Clip.xml +++ /dev/null @@ -1,41 +0,0 @@ - - - - - - - - 198 - 300 - 300 - - diff --git a/edexOsgi/com.raytheon.edex.plugin.grib/utility/common_static/base/grib/subgrids/grid374Clip.xml b/edexOsgi/com.raytheon.edex.plugin.grib/utility/common_static/base/grib/subgrids/grid374Clip.xml deleted file mode 100644 index 0fa191b404..0000000000 --- a/edexOsgi/com.raytheon.edex.plugin.grib/utility/common_static/base/grib/subgrids/grid374Clip.xml +++ /dev/null @@ -1,36 +0,0 @@ - - - - - - 374 - 1300 - 1200 - false - - diff --git a/edexOsgi/com.raytheon.edex.plugin.grib/utility/common_static/base/grib/subgrids/mEtaClip.xml b/edexOsgi/com.raytheon.edex.plugin.grib/utility/common_static/base/grib/subgrids/mEtaClip.xml deleted file mode 100644 index 6211164ae3..0000000000 --- a/edexOsgi/com.raytheon.edex.plugin.grib/utility/common_static/base/grib/subgrids/mEtaClip.xml +++ /dev/null @@ -1,36 +0,0 @@ - - - - - - NAM40 NAM20 NAM40 - 215 - 175 - 173 - - diff --git a/edexOsgi/com.raytheon.edex.plugin.grib/utility/common_static/base/grib/subgrids/nam5.xml b/edexOsgi/com.raytheon.edex.plugin.grib/utility/common_static/base/grib/subgrids/nam5.xml deleted file mode 100644 index e994396290..0000000000 --- a/edexOsgi/com.raytheon.edex.plugin.grib/utility/common_static/base/grib/subgrids/nam5.xml +++ /dev/null @@ -1,9 +0,0 @@ - - - NAM5 - 227 - 175 - 175 - 40.0 - -105.0 - diff --git a/edexOsgi/com.raytheon.edex.plugin.grib/utility/common_static/base/grib/subgrids/namdng25Clip.xml b/edexOsgi/com.raytheon.edex.plugin.grib/utility/common_static/base/grib/subgrids/namdng25Clip.xml deleted file mode 100755 index deca9881e8..0000000000 --- a/edexOsgi/com.raytheon.edex.plugin.grib/utility/common_static/base/grib/subgrids/namdng25Clip.xml +++ /dev/null @@ -1,36 +0,0 @@ - - - - - - namdng25 - 184 - 700 - 700 - - diff --git a/edexOsgi/com.raytheon.edex.plugin.grib/utility/common_static/base/grib/subgrids/qpf218Clip.xml b/edexOsgi/com.raytheon.edex.plugin.grib/utility/common_static/base/grib/subgrids/qpf218Clip.xml deleted file mode 100644 index 7ab2dcf681..0000000000 --- a/edexOsgi/com.raytheon.edex.plugin.grib/utility/common_static/base/grib/subgrids/qpf218Clip.xml +++ /dev/null @@ -1,36 +0,0 @@ - - - - - - HPCqpf - 226 - 175 - 175 - - diff --git a/edexOsgi/com.raytheon.edex.plugin.grib/utility/common_static/base/grib/subgrids/ruc13Clip.xml b/edexOsgi/com.raytheon.edex.plugin.grib/utility/common_static/base/grib/subgrids/ruc13Clip.xml deleted file mode 100644 index b5e45c0b5d..0000000000 --- a/edexOsgi/com.raytheon.edex.plugin.grib/utility/common_static/base/grib/subgrids/ruc13Clip.xml +++ /dev/null @@ -1,36 +0,0 @@ - - - - - - RAP13 - 130 - 175 - 175 - - diff --git a/edexOsgi/com.raytheon.edex.plugin.grib/utility/common_static/base/grid/datasetInfo/NCEP/gribDatasets_NCEP-7_NationalBlend.xml b/edexOsgi/com.raytheon.edex.plugin.grib/utility/common_static/base/grid/datasetInfo/NCEP/gribDatasets_NCEP-7_NationalBlend.xml deleted file mode 100644 index f9463894b3..0000000000 --- a/edexOsgi/com.raytheon.edex.plugin.grib/utility/common_static/base/grid/datasetInfo/NCEP/gribDatasets_NCEP-7_NationalBlend.xml +++ /dev/null @@ -1,54 +0,0 @@ - - - - - - - NationalBlend - NationalBlend -
    1
    -
    - - NationalBlendAK - NationalBlendAK -
    1
    -
    - - NationalBlendHI - NationalBlendHI -
    1
    -
    - - NationalBlendPR - NationalBlendPR -
    1
    -
    - - NationalBlendOC - NationalBlendOC -
    1
    -
    -
    diff --git a/edexOsgi/com.raytheon.edex.plugin.grib/utility/common_static/base/grid/datasetInfo/NCEP/gribDatasets_NCEP-7_URMA.xml b/edexOsgi/com.raytheon.edex.plugin.grib/utility/common_static/base/grid/datasetInfo/NCEP/gribDatasets_NCEP-7_URMA.xml deleted file mode 100644 index 8b1fa705ad..0000000000 --- a/edexOsgi/com.raytheon.edex.plugin.grib/utility/common_static/base/grid/datasetInfo/NCEP/gribDatasets_NCEP-7_URMA.xml +++ /dev/null @@ -1,34 +0,0 @@ - - - - - - - URMA25 - URMA25 -
    1
    -
    -
    diff --git a/edexOsgi/com.raytheon.edex.plugin.grib/utility/common_static/base/grid/datasetInfo/gribDatasets_ECMWF-98.xml b/edexOsgi/com.raytheon.edex.plugin.grib/utility/common_static/base/grid/datasetInfo/gribDatasets_ECMWF-98.xml deleted file mode 100644 index c5e6d76f98..0000000000 --- a/edexOsgi/com.raytheon.edex.plugin.grib/utility/common_static/base/grid/datasetInfo/gribDatasets_ECMWF-98.xml +++ /dev/null @@ -1,104 +0,0 @@ - - - - - - - ECMWF-HiRes - ECMWF-HiRes -
    12
    -
    - - ECMWF-LowRes - ECMF-NorthernHemisphere -
    24
    -
    - - ECMWF-LowRes - ECMF1 -
    24
    -
    - - ECMWF-LowRes - ECMF2 -
    24
    -
    - - ECMWF-LowRes - ECMF3 -
    24
    -
    - - ECMWF-LowRes - ECMF4 -
    24
    -
    - - ECMWF-LowRes - ECMF5 -
    24
    -
    - - ECMWF-LowRes - ECMF6 -
    24
    -
    - - ECMWF-LowRes - ECMF7 -
    24
    -
    - - ECMWF-LowRes - ECMF8 -
    24
    -
    - - ECMWF-LowRes - ECMF9 -
    24
    -
    - - ECMWF-LowRes - ECMF10 -
    24
    -
    - - ECMWF-LowRes - ECMF11 -
    24
    -
    - - ECMWF-LowRes - ECMF12 -
    24
    -
    - - ECMWF-MODEL0 - ECMF-MODEL0 -
    24
    -
    -
    diff --git a/edexOsgi/com.raytheon.edex.plugin.grib/utility/common_static/base/grid/datasetInfo/gribDatasets_FNMO-58.xml b/edexOsgi/com.raytheon.edex.plugin.grib/utility/common_static/base/grid/datasetInfo/gribDatasets_FNMO-58.xml deleted file mode 100644 index dec05b9229..0000000000 --- a/edexOsgi/com.raytheon.edex.plugin.grib/utility/common_static/base/grid/datasetInfo/gribDatasets_FNMO-58.xml +++ /dev/null @@ -1,44 +0,0 @@ - - - - - - - NOGAPS - NOGAPS -
    1
    -
    - - AK-NOGAPS - AK-NOGAPS -
    1
    -
    - - NAVGEM - nogaps -
    6
    -
    -
    diff --git a/edexOsgi/com.raytheon.edex.plugin.grib/utility/common_static/base/grid/datasetInfo/gribDatasets_FSL-59.xml b/edexOsgi/com.raytheon.edex.plugin.grib/utility/common_static/base/grid/datasetInfo/gribDatasets_FSL-59.xml deleted file mode 100644 index 5ca3159c22..0000000000 --- a/edexOsgi/com.raytheon.edex.plugin.grib/utility/common_static/base/grid/datasetInfo/gribDatasets_FSL-59.xml +++ /dev/null @@ -1,34 +0,0 @@ - - - - - - - LAPS - LAPS -
    1
    -
    -
    diff --git a/edexOsgi/com.raytheon.edex.plugin.grib/utility/common_static/base/grid/datasetInfo/gribDatasets_NCEP-7.xml b/edexOsgi/com.raytheon.edex.plugin.grib/utility/common_static/base/grid/datasetInfo/gribDatasets_NCEP-7.xml deleted file mode 100644 index 87b03f6cf3..0000000000 --- a/edexOsgi/com.raytheon.edex.plugin.grib/utility/common_static/base/grid/datasetInfo/gribDatasets_NCEP-7.xml +++ /dev/null @@ -1,1005 +0,0 @@ - - - - - - - GFSLAMP-Stn - GFSLAMP -
    1
    -
    - - NAM80 - ETA -
    6
    -
    - - GFS180 - AVN -
    6
    -
    - - gfsLR - MRF -
    12
    -
    - - gfsLR21 - MRF21 -
    12
    -
    - - gfsLR22 - MRF22 -
    12
    -
    - - gfsLR23 - MRF23 -
    12
    -
    - - gfsLR24 - MRF24 -
    12
    -
    - - gfsLR25 - MRF25 -
    12
    -
    - - gfsLR26 - MRF26 -
    12
    -
    - - GFS360 - GFS201 -
    6
    -
    - - GFS1degGbl - GFS229 -
    3
    -
    - - GFS20-CONUS - GFS20 -
    3
    -
    - - GFS20-AK - AK-GFS22 -
    3
    -
    - - GFS20-PAC - GFS-PAC-20KM -
    3
    -
    - - PR-GFS - GFS-PRICO-20KM -
    3
    -
    - - gfsLR - mrfNH -
    12
    -
    - - GFS90 - GFS95 -
    6
    -
    - - GFS80 - AVN211 -
    6
    -
    - - NAM40 - NAM40 -
    3
    -
    - - NAM20 - NAM20 - NAM40 -
    3
    -
    - - WAFS - WAFS -
    6
    -
    - - NAMWX - NAM40 -
    3
    -
    - - GFS190 - AVN203 -
    12
    -
    - - gfs190 - MRF203 -
    12
    -
    - - GFS150 - MRF204 -
    12
    -
    - - GFS150 - MRF205 -
    12
    -
    - - NAM95 - AK-NAM95 -
    6
    -
    - - GFSGbl - AVN-NorthernHemisphere -
    6
    -
    - - GFSGbl - AVN37 -
    6
    -
    - - GFSGbl - AVN38 -
    6
    -
    - - GFSGbl - AVN39 -
    6
    -
    - - GFSGbl - AVN40 -
    6
    -
    - - GFSGbl - AVN41 -
    6
    -
    - - GFSGbl - AVN42 -
    6
    -
    - - GFSGbl - AVN43 -
    6
    -
    - - GFSGbl - AVN44 -
    6
    -
    - - GWW - WaveWatch -
    6
    -
    - - GWW21 - GWW21 -
    6
    -
    - - GWW22 - GWW22 -
    6
    -
    - - GWW23 - GWW23 -
    6
    -
    - - GWW24 - GWW24 -
    6
    -
    - - SeaIce - SeaIce -
    1
    -
    - - SeaIce220 - SeaIce220 -
    1
    -
    - - SeaIce173 - SeaIce173 -
    1
    -
    - - SeaIce235 - SeaIce235 -
    1
    -
    - - RAP40 - RAP40 -
    1
    -
    - - GFS75 - AVN225 -
    6
    -
    - - AKWAVE - AKWAVE239 -
    6
    -
    - - WNAwave - WNAWAVE238 -
    6
    -
    - - AK-NAM40 - AK-NAM45 -
    6
    -
    - - AK-NAM20 - AK-NAM22 -
    6
    -
    - - NAM12 - NAM12 -
    3
    -
    - - AK-NAM12 - AK-NAM11 -
    3
    -
    - - NAM over the contiguous US 16X Resolution(5Km) Lambert Conformal - NAM5 -
    3
    -
    - - ENPwave - ENPWAVE253 -
    6
    -
    - - HurWave238 - HurWave238 -
    6
    -
    - - HurWave253 - HurWave253 -
    6
    -
    - - TPC-HurWind - HurWind226 -
    6
    -
    - - GuamHurWind - HurWind175 -
    6
    -
    - - HawHurWind - HurWind250 -
    6
    -
    - - NICICE - NICICE -
    1
    -
    - - AK-NICICE - AK-NICICE -
    1
    -
    - - DGEX - DGEX -
    6
    -
    - - DGEX - AK-DGEX -
    6
    -
    - - GFS40 - GFS40 -
    6
    -
    - - AK-GFS - GFS160 -
    6
    -
    - - PR-GFS - GFS254 -
    6
    -
    - - SJU-GFS - GFS161 -
    6
    -
    - - Guam-GFSDNG - GFS199 -
    3
    -
    - - RAP13 - RAP13 -
    1
    -
    - - RAP16 - RAP200 -
    1
    -
    - - RTMA - RTMA -
    1
    -
    - - RTMA - RTMA25 -
    1
    -
    - - AK-RTMA - AK-RTMA -
    1
    -
    - - AK-RTMA3 - AK-RTMA3 -
    1
    -
    - - NamDNG5 - NamDNG5 -
    3
    -
    - - namdng25 - namdng25 -
    3
    -
    - - RaobOA - RaobOA -
    12
    -
    - - MetarOA - MetarOA -
    1
    -
    - - GlobalWave - GlobalWave -
    3
    -
    - - AKwave10 - AKwave10 -
    3
    -
    - - AKwave4 - AKwave4 -
    3
    -
    - - EPwave10 - EPwave10 -
    3
    -
    - - WCwave10 - WCwave10 -
    3
    -
    - - WCwave4 - WCwave4 -
    3
    -
    - - WNAwave10 - WNAwave10 -
    3
    -
    - - WNAwave4 - WNAwave4 -
    3
    -
    - - Aviation - Aviation -
    1
    -
    - - Aviation - Aviation -
    1
    -
    - - Aviation - Aviation -
    1
    -
    - - AK-NamDNG5 - AK-NamDNG5 -
    3
    -
    - - AK-NamDNG3 - AK-NamDNG3 -
    3
    -
    - - HI-NamDNG5 - HI-NamDNG5 -
    3
    -
    - - PR-NamDNG5 - PR-NamDNG5 -
    3
    -
    - - HiResW-ARW-East - HiResW-ARW-East -
    3
    -
    - - HiResW-ARW-West - HiResW-ARW-West -
    3
    -
    - - HiResW-ARW-AK - HiResW-ARW-AK -
    3
    -
    - - HiResW-ARW-SJU - HiResW-ARW-SJU -
    3
    -
    - - HiResW-ARW-HI - HiResW-ARW-HI -
    3
    -
    - - HiResW-ARW-GU - HiResW-ARW-GU -
    3
    -
    - - HiResW-NMM-East - HiResW-NMM-East -
    3
    -
    - - HiResW-NMM-West - HiResW-NMM-West -
    3
    -
    - - HiResW-NMM-AK - HiResW-NMM-AK -
    3
    -
    - - HiResW-NMM-SJU - HiResW-NMM-SJU -
    3
    -
    - - HiResW-NMM-HI - HiResW-NMM-HI -
    3
    -
    - - HiResW-NMM-GU - HiResW-NMM-GU -
    3
    -
    - - GRLKwave - GRLKwave -
    1
    -
    - - MSAS - MSAS -
    1
    -
    - - GFSBufr - GfsBufr -
    1
    -
    - - GFSSouthernHemisphere - GFSSouthernHemisphere -
    1
    -
    - - GoesBufr - GoesBufr -
    1
    -
    - - Metar - Metar -
    1
    -
    - - Ldad - Ldad -
    1
    -
    - - DMD - DMD -
    -10
    -
    - - MDCRS - MDCRS -
    1
    -
    - - NAM Bufr - EtaBufr -
    1
    -
    - - PoesBufr - PoesBufr -
    1
    -
    - - Profiler - Profiler -
    1
    -
    - - Raob - Raob -
    12
    -
    - - VWP - VWP -
    -6
    -
    - - PR-NAM12 - PR-NAM -
    3
    -
    - - GFSensemble - ENSEMBLE -
    6
    -
    - - GFSensemble - ENSEMBLE37 -
    6
    -
    - - GFSensemble - ENSEMBLE38 -
    6
    -
    - - GFSensemble - ENSEMBLE39 -
    6
    -
    - - GFSensemble - ENSEMBLE40 -
    6
    -
    - - GFSensemble - ENSEMBLE41 -
    6
    -
    - - GFSensemble - ENSEMBLE42 -
    6
    -
    - - GFSensemble - ENSEMBLE43 -
    6
    -
    - - GFSensemble - ENSEMBLE44 -
    6
    -
    - - SREF - SREF40 -
    3
    -
    - - AK-SREF - SREF216 -
    3
    -
    - - PR-SREF - SREF243 -
    3
    -
    - - RTG-SST-Analysis - RTGSST -
    1
    -
    - - GFSGuide - GFSGuide -
    6
    -
    - - RTMA - RTMA -
    1
    -
    - - RTMA - RTMA25 -
    1
    -
    - - AK-RTMA - AK-RTMA -
    1
    -
    - - HI-RTMA - HI-RTMA -
    1
    -
    - - PR-RTMA - PR-RTMA -
    1
    -
    - - Guam-RTMA - Guam-RTMA -
    3
    -
    - - RTG-SST-HR-Analysis - RTGSSTHR -
    1
    -
    - - QPE - QPE -
    1
    -
    - - SeaSfcAnalysis21 - SeaSfcAnalysis21 -
    1
    -
    - - SeaSfcAnalysis22 - SeaSfcAnalysis22 -
    1
    -
    - - SeaSfcAnalysis23 - SeaSfcAnalysis23 -
    1
    -
    - - SeaSfcAnalysis24 - SeaSfcAnalysis24 -
    1
    -
    - - SeaSfcAnalysis37 - SeaSfcAnalysis37 -
    1
    -
    - - SeaSfcAnalysis38 - SeaSfcAnalysis38 -
    1
    -
    - - SeaSfcAnalysis39 - SeaSfcAnalysis39 -
    1
    -
    - - SeaSfcAnalysis40 - SeaSfcAnalysis40 -
    1
    -
    - - SeaSfcAnalysis41 - SeaSfcAnalysis41 -
    1
    -
    - - SeaSfcAnalysis42 - SeaSfcAnalysis42 -
    1
    -
    - - SeaSfcAnalysis43 - SeaSfcAnalysis43 -
    1
    -
    - - SeaSfcAnalysis44 - SeaSfcAnalysis44 -
    1
    -
    - - SeaSfcAnalysis61 - SeaSfcAnalysis61 -
    1
    -
    - - SeaSfcAnalysis62 - SeaSfcAnalysis62 -
    1
    -
    - - SeaSfcAnalysis63 - SeaSfcAnalysis63 -
    1
    -
    - - SeaSfcAnalysis64 - SeaSfcAnalysis64 -
    1
    -
    - - RFCqpf - RFCqpf -
    6
    -
    - - HPCqpf - HPCqpf -
    6
    -
    - - HPCGuide - HPCGuide -
    6
    -
    - - AK-HPCGuide - HPCGuide-AK -
    6
    -
    - - HPC - HPCqpfNDFD -
    6
    -
    - - HPC-MODEL0 - HPC-MODEL0 -
    6
    -
    - - OPCWave-W-ATL - OPCWave180 -
    24
    -
    - - OPCWave-NE-PAC - OPCWave181 -
    24
    -
    - - CPCoutlook-Short - CPCoutlook-Short -
    24
    -
    - - CPCoutlook-Medium - CPCoutlook-Medium -
    24
    -
    - - CPCoutlook-Short-AK - CPCoutlook-Short-AK -
    3
    -
    - - CPCoutlook-Medium-AK - CPCoutlook-Medium-AK -
    3
    -
    - - CPCoutlook-Long - CPCoutlook211 -
    720
    -
    - - Aviation - Aviation -
    1
    -
    - - NCWF - NCWF -
    1
    -
    - - SPCGuide - SPCGuide -
    1
    -
    - - TPCWindProb - TPCWindProb -
    6
    -
    - - TPCWindProb_Prelim - TPCWindProb_Prelim -
    6
    -
    - - OPCWave-TE-PAC - OPCWave182 -
    24
    -
    - - GriddedMOS - MOSGuide -
    6
    -
    - - AK-GriddedMOS - MOSGuide-AK -
    3
    -
    - - HI-GriddedMOS - MOSGuide-HI -
    3
    -
    - - GFSLAMPGrid - GFSLAMPGrid -
    1
    -
    - - TPCSurgeProb - TPCSurgeProb -
    6
    -
    - - TPCSurgeProb197 - TPCSurgeProb197 -
    6
    -
    - - TPCSurgeProb-AK - TPCSurgeProb-AK -
    6
    -
    - - P-ETSS - P-ETSS -
    1
    -
    - - P-ETSS-LoRes - P-ETSS-LoRes -
    1
    -
    - - P-ETSS-AK - P-ETSS-AK -
    1
    -
    - - PROB3HR - PROB3HR -
    3
    -
    - - HRRR - HRRR -
    1
    -
    - - HREF - HREF-US -
    1
    -
    - - HREF-AK - HREF-AK -
    1
    -
    - - HREF-PRICO - HREF-SJU -
    1
    -
    - - HREF-PAC - HREF-HI -
    1
    -
    -
    diff --git a/edexOsgi/com.raytheon.edex.plugin.grib/utility/common_static/base/grid/datasetInfo/gribDatasets_NOAA-161.xml b/edexOsgi/com.raytheon.edex.plugin.grib/utility/common_static/base/grid/datasetInfo/gribDatasets_NOAA-161.xml deleted file mode 100644 index 821435cd07..0000000000 --- a/edexOsgi/com.raytheon.edex.plugin.grib/utility/common_static/base/grid/datasetInfo/gribDatasets_NOAA-161.xml +++ /dev/null @@ -1,34 +0,0 @@ - - - - - - - GLERL - GLERL -
    12
    -
    -
    diff --git a/edexOsgi/com.raytheon.edex.plugin.grib/utility/common_static/base/grid/datasetInfo/gribDatasets_NWSTG-8.xml b/edexOsgi/com.raytheon.edex.plugin.grib/utility/common_static/base/grid/datasetInfo/gribDatasets_NWSTG-8.xml deleted file mode 100644 index b5d4d417b9..0000000000 --- a/edexOsgi/com.raytheon.edex.plugin.grib/utility/common_static/base/grid/datasetInfo/gribDatasets_NWSTG-8.xml +++ /dev/null @@ -1,34 +0,0 @@ - - - - - - - RCM - RCM -
    1
    -
    -
    diff --git a/edexOsgi/com.raytheon.edex.plugin.grib/utility/common_static/base/grid/datasetInfo/gribDatasets_RFC-9.xml b/edexOsgi/com.raytheon.edex.plugin.grib/utility/common_static/base/grid/datasetInfo/gribDatasets_RFC-9.xml deleted file mode 100644 index 65f58ab832..0000000000 --- a/edexOsgi/com.raytheon.edex.plugin.grib/utility/common_static/base/grid/datasetInfo/gribDatasets_RFC-9.xml +++ /dev/null @@ -1,689 +0,0 @@ - - - - - - - MPE - MPE-Local -
    1
    -
    - - MPE - MPE-Mosaic -
    1
    -
    - - HPE - HPE -
    300
    -
    - - BiasHPE - BHPE -
    300
    -
    - - QPE-SJU - QPE-SJU -
    1
    -
    - - QPE-Manual-SJU - QPE-Manual-SJU -
    1
    -
    - - QPE-Auto-SJU - QPE-Auto-SJU -
    1
    -
    - - MPE-Local-SJU - MPE-Local-SJU -
    1
    -
    - - MPE-Mosaic-SJU - MPE-Mosaic-SJU -
    1
    -
    - - QPE-XNAV-SJU - QPE-XNAV-SJU -
    1
    -
    - - QPE-RFC-SJU - QPE-RFC-SJU -
    1
    -
    - - QPF-TUA - RFCqpf -
    6
    -
    - - FFG-TUA - FFG-TUA -
    1
    -
    - - QPE-TUA - QPE-TUA -
    1
    -
    - - QPE-Manual-TUA - QPE-Manual-TUA -
    1
    -
    - - QPE-Auto-TUA - QPE-Auto-TUA -
    1
    -
    - - MPE-Local-TUA - MPE-Local-TUA -
    1
    -
    - - MPE-Mosaic-TUA - MPE-Mosaic-TUA -
    1
    -
    - - QPE-XNAV-TUA - QPE-XNAV-TUA -
    1
    -
    - - QPE-RFC-TUA - QPE-RFC-TUA -
    1
    -
    - - QPF-ACR - RFCqpf -
    6
    -
    - - FFG-ACR - FFG-ACR -
    1
    -
    - - QPE-ACR - QPE-ACR -
    1
    -
    - - QPE-Manual-ACR - QPE-Manual-ACR -
    1
    -
    - - QPE-Auto-ACR - QPE-Auto-ACR -
    1
    -
    - - MPE-Local-ACR - MPE-Local-ACR -
    1
    -
    - - MPE-Mosaic-ACR - MPE-Mosaic-ACR -
    1
    -
    - - QPE-XNAV-ACR - QPE-XNAV-ACR -
    1
    -
    - - QPE-RFC-ACR - QPE-RFC-ACR -
    1
    -
    - - QPF-STR - RFCqpf -
    6
    -
    - - FFG-STR - FFG-STR -
    1
    -
    - - QPE-STR - QPE-STR -
    1
    -
    - - QPE-Manual-STR - QPE-Manual-STR -
    1
    -
    - - QPE-Auto-STR - QPE-Auto-STR -
    1
    -
    - - MPE-Local-STR - MPE-Local-STR -
    1
    -
    - - MPE-Mosaic-STR - MPE-Mosaic-STR -
    1
    -
    - - QPE-XNAV-STR - QPE-XNAV-STR -
    1
    -
    - - QPE-RFC-STR - QPE-RFC-STR -
    1
    -
    - - QPF-RSA - RFCqpf -
    6
    -
    - - FFG-RSA - FFG-RSA -
    1
    -
    - - QPE-RSA - QPE-RSA -
    1
    -
    - - QPE-Manual-RSA - QPE-Manual-RSA -
    1
    -
    - - QPE-Auto-RSA - QPE-Auto-RSA -
    1
    -
    - - MPE-Local-RSA - MPE-Local-RSA -
    1
    -
    - - MPE-Mosaic-RSA - MPE-Mosaic-RSA -
    1
    -
    - - QPE-XNAV-RSA - QPE-XNAV-RSA -
    1
    -
    - - QPE-RFC-RSA - QPE-RFC-RSA -
    1
    -
    - - QPF-ORN - RFCqpf -
    6
    -
    - - FFG-ORN - FFG-ORN -
    1
    -
    - - QPE-ORN - QPE-ORN -
    1
    -
    - - QPE-Manual-ORN - QPE-Manual-ORN -
    1
    -
    - - QPE-Auto-ORN - QPE-Auto-ORN -
    1
    -
    - - MPE-Local-ORN - MPE-Local-ORN -
    1
    -
    - - MPE-Mosaic-ORN - MPE-Mosaic-ORN -
    1
    -
    - - QPE-XNAV-ORN - QPE-XNAV-ORN -
    1
    -
    - - QPE-RFC-ORN - QPE-RFC-ORN -
    1
    -
    - - QPF-RHA - RFCqpf -
    6
    -
    - - FFG-RHA - FFG-RHA -
    1
    -
    - - QPE-RHA - QPE-RHA -
    1
    -
    - - QPE-Manual-RHA - QPE-Manual-RHA -
    1
    -
    - - QPE-Auto-RHA - QPE-Auto-RHA -
    1
    -
    - - MPE-Local-RHA - MPE-Local-RHA -
    1
    -
    - - MPE-Mosaic-RHA - MPE-Mosaic-RHA -
    1
    -
    - - QPE-XNAV-RHA - QPE-XNAV-RHA -
    1
    -
    - - QPE-RFC-RHA - QPE-RFC-RHA -
    1
    -
    - - QPF-KRF - RFCqpf -
    6
    -
    - - FFG-KRF - FFG-KRF -
    1
    -
    - - QPE-KRF - QPE-KRF -
    1
    -
    - - QPE-Manual-KRF - QPE-Manual-KRF -
    1
    -
    - - QPE-Auto-KRF - QPE-Auto-KRF -
    1
    -
    - - MPE-Local-KRF - MPE-Local-KRF -
    1
    -
    - - MPE-Mosaic-KRF - MPE-Mosaic-KRF -
    1
    -
    - - QPE-XNAV-KRF - QPE-XNAV-KRF -
    1
    -
    - - QPE-RFC-KRF - QPE-RFC-KRF -
    1
    -
    - - QPF-MSR - RFCqpf -
    6
    -
    - - FFG-MSR - FFG-MSR -
    1
    -
    - - QPE-MSR - QPE-MSR -
    1
    -
    - - QPE-Manual-MSR - QPE-Manual-MSR -
    1
    -
    - - QPE-Auto-MSR - QPE-Auto-MSR -
    1
    -
    - - MPE-Local-MSR - MPE-Local-MSR -
    1
    -
    - - MPE-Mosaic-MSR - MPE-Mosaic-MSR -
    1
    -
    - - QPE-XNAV-MSR - QPE-XNAV-MSR -
    1
    -
    - - QPE-RFC-MSR - QPE-RFC-MSR -
    1
    -
    - - QPF-TAR - RFCqpf -
    6
    -
    - - FFG-TAR - FFG-TAR -
    1
    -
    - - QPE-TAR - QPE-TAR -
    1
    -
    - - QPE-Manual-TAR - QPE-Manual-TAR -
    1
    -
    - - QPE-Auto-TAR - QPE-Auto-TAR -
    1
    -
    - - MPE-Local-TAR - MPE-Local-TAR -
    1
    -
    - - MPE-Mosaic-TAR - MPE-Mosaic-TAR -
    1
    -
    - - QPE-XNAV-TAR - QPE-XNAV-TAR -
    1
    -
    - - QPE-RFC-TAR - QPE-RFC-TAR -
    1
    -
    - - QPF-PTR - RFCqpf -
    6
    -
    - - FFG-PTR - FFG-PTR -
    1
    -
    - - QPE-PTR - QPE-PTR -
    1
    -
    - - QPE-Manual-PTR - QPE-Manual-PTR -
    1
    -
    - - QPE-Auto-PTR - QPE-Auto-PTR -
    1
    -
    - - MPE-Local-PTR - MPE-Local-PTR -
    1
    -
    - - MPE-Mosaic-PTR - MPE-Mosaic-PTR -
    1
    -
    - - QPE-XNAV-PTR - QPE-XNAV-PTR -
    1
    -
    - - QPE-RFC-PTR - QPE-RFC-PTR -
    1
    -
    - - QPF-TIR - RFCqpf -
    6
    -
    - - FFG-TIR - FFG-TIR -
    1
    -
    - - QPE-TIR - QPE-TIR -
    1
    -
    - - QPE-Manual-TIR - QPE-Manual-TIR -
    1
    -
    - - QPE-Auto-TIR - QPE-Auto-TIR -
    1
    -
    - - MPE-Local-TIR - MPE-Local-TIR -
    1
    -
    - - MPE-Mosaic-TIR - MPE-Mosaic-TIR -
    1
    -
    - - QPE-XNAV-TIR - QPE-XNAV-TIR -
    1
    -
    - - QPE-RFC-TIR - QPE-RFC-TIR -
    1
    -
    - - QPF-ALR - RFCqpf -
    6
    -
    - - FFG-ALR - FFG-ALR -
    1
    -
    - - QPE-ALR - QPE-ALR -
    1
    -
    - - QPE-Manual-ALR - QPE-Manual-ALR -
    1
    -
    - - QPE-Auto-ALR - QPE-Auto-ALR -
    1
    -
    - - MPE-Local-ALR - MPE-Local-ALR -
    1
    -
    - - MPE-Mosaic-ALR - MPE-Mosaic-ALR -
    1
    -
    - - QPE-XNAV-ALR - QPE-XNAV-ALR -
    1
    -
    - - QPE-RFC-ALR - QPE-RFC-ALR -
    1
    -
    - - QPF-FWR - RFCqpf -
    6
    -
    - - FFG-FWR - FFG-FWR -
    1
    -
    - - QPE-FWR - QPE-FWR -
    1
    -
    - - QPE-Manual-FWR - QPE-Manual-FWR -
    1
    -
    - - QPE-Auto-FWR - QPE-Auto-FWR -
    1
    -
    - - MPE-Local-FWR - MPE-Local-FWR -
    1
    -
    - - MPE-Mosaic-FWR - MPE-Mosaic-FWR -
    1
    -
    - - QPE-XNAV-FWR - QPE-XNAV-FWR -
    1
    -
    - - QPE-RFC-FWR - QPE-RFC-FWR -
    1
    -
    - - NOHRSC-SNOW - NOHRSC-SNOW -
    1
    -
    - - NOHRSC-SNOW - NOHRSC-SNOW -
    24
    -
    - - SPE - AUTOSPE -
    1
    -
    - - SPE - MANSPE -
    1
    -
    -
    diff --git a/edexOsgi/com.raytheon.edex.plugin.grib/utility/common_static/base/grid/datasetInfo/gribDatasets_RSMC-54.xml b/edexOsgi/com.raytheon.edex.plugin.grib/utility/common_static/base/grid/datasetInfo/gribDatasets_RSMC-54.xml deleted file mode 100644 index a4408b3e53..0000000000 --- a/edexOsgi/com.raytheon.edex.plugin.grib/utility/common_static/base/grid/datasetInfo/gribDatasets_RSMC-54.xml +++ /dev/null @@ -1,39 +0,0 @@ - - - - - - - CanadianModel - CanadianModel -
    1
    -
    - - CanadianModel - CanadianModel -
    1
    -
    -
    diff --git a/edexOsgi/com.raytheon.edex.plugin.grib/utility/common_static/base/grid/datasetInfo/gribDatasets_UKMET-74.xml b/edexOsgi/com.raytheon.edex.plugin.grib/utility/common_static/base/grid/datasetInfo/gribDatasets_UKMET-74.xml deleted file mode 100644 index 627915dc86..0000000000 --- a/edexOsgi/com.raytheon.edex.plugin.grib/utility/common_static/base/grid/datasetInfo/gribDatasets_UKMET-74.xml +++ /dev/null @@ -1,79 +0,0 @@ - - - - - - - UKMET-MODEL1 - UKMET-MODEL1 -
    6
    -
    - - UKMET - UKMET-NorthernHemisphere -
    6
    -
    - - UKMET - UKMET37 -
    6
    -
    - - UKMET - UKMET38 -
    6
    -
    - - UKMET - UKMET39 -
    6
    -
    - - UKMET - UKMET40 -
    6
    -
    - - UKMET - UKMET41 -
    6
    -
    - - UKMET - UKMET42 -
    6
    -
    - - UKMET - UKMET43 -
    6
    -
    - - UKMET - UKMET44 -
    6
    -
    -
    diff --git a/edexOsgi/com.raytheon.edex.plugin.grib/utility/common_static/base/grid/master_grib2_lookup.txt b/edexOsgi/com.raytheon.edex.plugin.grib/utility/common_static/base/grid/master_grib2_lookup.txt index 2631b88395..d83e9fc9e4 100644 --- a/edexOsgi/com.raytheon.edex.plugin.grib/utility/common_static/base/grid/master_grib2_lookup.txt +++ b/edexOsgi/com.raytheon.edex.plugin.grib/utility/common_static/base/grid/master_grib2_lookup.txt @@ -55,8 +55,8 @@ ThP_T170L42A-NCEP-MDL_1073x689_10800-0 ThP3hr // GFSLAMPGrid 2.5km ThP_LAMP-NCEP-MDL_2145x1377_7200-0 PROLGHT2hr // 2.5km MOSGuide -TP0.254mm_T170L42A-NCEP-MDL_2145x1377_21600-0 POP6hr -TP0.254mm_T170L42A-NCEP-MDL_2145x1377_43200-0 POP12hr +TP0.254mm_T170L42A-NCEP-MDL_2345x1597_21600-0 POP6hr +TP0.254mm_T170L42A-NCEP-MDL_2345x1597_43200-0 POP12hr // MOSGuide Alaska TP0.254mm_T170L42A-NCEP-MDL_1649x1105_21600-0 POP6hr TP0.254mm_T170L42A-NCEP-MDL_1649x1105_43200-0 POP12hr @@ -221,7 +221,7 @@ SnD_NMM-NCEP_21600-0 snowd6hr // Catchall that always maps probability of precip over 0.245mm(1/100 in) to POP. TP0.254mm POP -// Throw 1-hr and 2-hr precip on the floor for RAP13 +// Throw 1-hr and 2-hr precip on the floor for RUC13 CP_RUC2-NCEP_165x165_7200-0 PWS64 CP_RUC2-NCEP_165x165_10800-0 PWS64 LgSP_RUC2-NCEP_165x165_7200-0 PWS64 @@ -654,9 +654,9 @@ CIn_NBM-NCEP-MDL NBE PRBCLDICE-9.999e+18%_NBM-NCEP-MDL PrbCldIce PRBCLDICE0.0%_NBM-NCEP-MDL PrbCldIce GeH_NBM-NCEP-MDL LLWSHgt -DRYTPROB0.0%_NBM-NCEP-MDL DRYTPPROB // NBM publishes this, it is marked SREF because NBM doesn't change it. ThP0.0%_SREF-NCEP-MDL_2345x1597_10800-0 ThP3hr +DRYTPROB0.0%_SREF-NCEP-SPC DRYTPPROB3hr //END NationalBLend // HREF diff --git a/edexOsgi/com.raytheon.uf.common.dataplugin.grid.derivparam/src/com/raytheon/uf/common/dataplugin/grid/derivparam/data/StaticGridRequestableData.java b/edexOsgi/com.raytheon.uf.common.dataplugin.grid.derivparam/src/com/raytheon/uf/common/dataplugin/grid/derivparam/data/StaticGridRequestableData.java index af47704e1b..10b05f73d6 100644 --- a/edexOsgi/com.raytheon.uf.common.dataplugin.grid.derivparam/src/com/raytheon/uf/common/dataplugin/grid/derivparam/data/StaticGridRequestableData.java +++ b/edexOsgi/com.raytheon.uf.common.dataplugin.grid.derivparam/src/com/raytheon/uf/common/dataplugin/grid/derivparam/data/StaticGridRequestableData.java @@ -21,8 +21,6 @@ package com.raytheon.uf.common.dataplugin.grid.derivparam.data; import javax.measure.unit.SI; -import com.raytheon.uf.common.dataplugin.grid.dataset.DatasetInfo; -import com.raytheon.uf.common.dataplugin.grid.dataset.DatasetInfoLookup; import com.raytheon.uf.common.dataplugin.grid.util.StaticGridData; import com.raytheon.uf.common.dataplugin.grid.util.StaticGridDataType; import com.raytheon.uf.common.dataplugin.level.LevelFactory; @@ -77,17 +75,6 @@ public class StaticGridRequestableData extends AbstractRequestableData { if (StaticGridDataType._dt.equals(dataType)) { int dTinSeconds = 0; - DatasetInfo info = DatasetInfoLookup.getInstance().getInfo(source); - - if (info != null) { - dTinSeconds = info.getDt(); - - // dT <= 24 is in hours, need to convert to seconds - if (Math.abs(dTinSeconds) <= 24) { - dTinSeconds *= 3600; - } - } - return new Float(dTinSeconds); } else { if (this.space instanceof GridCoverage) { diff --git a/edexOsgi/com.raytheon.uf.common.dataplugin.grid/META-INF/MANIFEST.MF b/edexOsgi/com.raytheon.uf.common.dataplugin.grid/META-INF/MANIFEST.MF index c39c72b702..b51228279d 100644 --- a/edexOsgi/com.raytheon.uf.common.dataplugin.grid/META-INF/MANIFEST.MF +++ b/edexOsgi/com.raytheon.uf.common.dataplugin.grid/META-INF/MANIFEST.MF @@ -9,7 +9,6 @@ Bundle-ActivationPolicy: lazy Export-Package: com.raytheon.uf.common.dataplugin.grid, com.raytheon.uf.common.dataplugin.grid.dataaccess, com.raytheon.uf.common.dataplugin.grid.dataquery, - com.raytheon.uf.common.dataplugin.grid.dataset, com.raytheon.uf.common.dataplugin.grid.datastorage, com.raytheon.uf.common.dataplugin.grid.mapping, com.raytheon.uf.common.dataplugin.grid.request, diff --git a/edexOsgi/com.raytheon.uf.common.dataplugin.grid/src/com/raytheon/uf/common/dataplugin/grid/dataset/DatasetInfo.java b/edexOsgi/com.raytheon.uf.common.dataplugin.grid/src/com/raytheon/uf/common/dataplugin/grid/dataset/DatasetInfo.java deleted file mode 100644 index 669c916f84..0000000000 --- a/edexOsgi/com.raytheon.uf.common.dataplugin.grid/src/com/raytheon/uf/common/dataplugin/grid/dataset/DatasetInfo.java +++ /dev/null @@ -1,95 +0,0 @@ -/** - * This software was developed and / or modified by Raytheon Company, - * pursuant to Contract DG133W-05-CQ-1067 with the US Government. - * - * U.S. EXPORT CONTROLLED TECHNICAL DATA - * This software product contains export-restricted data whose - * export/transfer/disclosure is restricted by U.S. law. Dissemination - * to non-U.S. persons whether in the United States or abroad requires - * an export license or other authorization. - * - * Contractor Name: Raytheon Company - * Contractor Address: 6825 Pine Street, Suite 340 - * Mail Stop B8 - * Omaha, NE 68106 - * 402.291.0100 - * - * See the AWIPS II Master Rights File ("Master Rights File.pdf") for - * further licensing information. - **/ -package com.raytheon.uf.common.dataplugin.grid.dataset; - -import javax.xml.bind.annotation.XmlAccessType; -import javax.xml.bind.annotation.XmlAccessorType; -import javax.xml.bind.annotation.XmlElement; - -/** - * - * Contains static information about a grid dataset. - * - *
    - * 
    - * SOFTWARE HISTORY
    - * 
    - * Date         Ticket#    Engineer    Description
    - * ------------ ---------- ----------- --------------------------
    - * Feb 27, 2012            bsteffen     Initial creation
    - * 
    - * 
    - * - * @author bsteffen - * @version 1.0 - */ -@XmlAccessorType(XmlAccessType.NONE) -public class DatasetInfo { - - /** The title of the model */ - @XmlElement - private String title; - - /** The model name */ - @XmlElement - private String datasetId; - - @XmlElement - private String alias; - - /** - * The intrinsic temporal resolution of the data. - */ - @XmlElement - private Integer dt; - - public String getTitle() { - return title; - } - - public void setTitle(String title) { - this.title = title; - } - - public String getDatasetId() { - return datasetId; - } - - public void setDatasetId(String datasetId) { - this.datasetId = datasetId; - } - - public String getAlias() { - return alias; - } - - public void setAlias(String alias) { - this.alias = alias; - } - - public Integer getDt() { - return dt; - } - - public void setDt(Integer dt) { - this.dt = dt; - } - -} diff --git a/edexOsgi/com.raytheon.uf.common.dataplugin.grid/src/com/raytheon/uf/common/dataplugin/grid/dataset/DatasetInfoLookup.java b/edexOsgi/com.raytheon.uf.common.dataplugin.grid/src/com/raytheon/uf/common/dataplugin/grid/dataset/DatasetInfoLookup.java deleted file mode 100644 index 28067af773..0000000000 --- a/edexOsgi/com.raytheon.uf.common.dataplugin.grid/src/com/raytheon/uf/common/dataplugin/grid/dataset/DatasetInfoLookup.java +++ /dev/null @@ -1,124 +0,0 @@ -/** - * This software was developed and / or modified by Raytheon Company, - * pursuant to Contract DG133W-05-CQ-1067 with the US Government. - * - * U.S. EXPORT CONTROLLED TECHNICAL DATA - * This software product contains export-restricted data whose - * export/transfer/disclosure is restricted by U.S. law. Dissemination - * to non-U.S. persons whether in the United States or abroad requires - * an export license or other authorization. - * - * Contractor Name: Raytheon Company - * Contractor Address: 6825 Pine Street, Suite 340 - * Mail Stop B8 - * Omaha, NE 68106 - * 402.291.0100 - * - * See the AWIPS II Master Rights File ("Master Rights File.pdf") for - * further licensing information. - **/ -package com.raytheon.uf.common.dataplugin.grid.dataset; - -import java.io.IOException; -import java.io.InputStream; -import java.util.HashMap; -import java.util.Map; - -import javax.xml.bind.JAXBException; - -import com.raytheon.uf.common.localization.IPathManager; -import com.raytheon.uf.common.localization.LocalizationContext; -import com.raytheon.uf.common.localization.LocalizationFile; -import com.raytheon.uf.common.localization.PathManagerFactory; -import com.raytheon.uf.common.localization.exception.LocalizationException; -import com.raytheon.uf.common.serialization.SerializationException; -import com.raytheon.uf.common.serialization.SingleTypeJAXBManager; -import com.raytheon.uf.common.status.IUFStatusHandler; -import com.raytheon.uf.common.status.UFStatus; - -/** - * - * Provides logic to read datasetInfo files from localization and provide lookup - * by datasetId. - * - *
    - *
    - * SOFTWARE HISTORY
    - *
    - * Date          Ticket#  Engineer    Description
    - * ------------- -------- ----------- --------------------------
    - * Feb 27, 2012           bsteffen     Initial creation
    - * Dec 16, 2013  2574     bsteffen     Update deprecated method call.
    - * Jul 21, 2014  3373     bclement     changed to use single type JAXB manager
    - * Feb 15, 2016  5244     nabowle      Replace deprecated LocalizationFile methods.
    - *
    - * 
    - * - * @author bsteffen - * @version 1.0 - */ -public class DatasetInfoLookup { - private static final transient IUFStatusHandler statusHandler = UFStatus - .getHandler(DatasetInfoLookup.class); - - private static DatasetInfoLookup instance; - - public static DatasetInfoLookup getInstance() { - if (instance == null) { - instance = new DatasetInfoLookup(); - } - return instance; - } - - private Map infoMap = new HashMap(); - - private DatasetInfoLookup() { - init(); - } - - private void init() { - SingleTypeJAXBManager manager = null; - try { - manager = new SingleTypeJAXBManager(true, - DatasetInfoSet.class); - } catch (JAXBException e) { - statusHandler - .error("Error loading context for DatasetInfo, no datasetInfo will be loaded.", - e); - } - IPathManager pathMgr = PathManagerFactory.getPathManager(); - LocalizationContext commonStaticBase = pathMgr.getContext( - LocalizationContext.LocalizationType.COMMON_STATIC, - LocalizationContext.LocalizationLevel.BASE); - - LocalizationContext commonStaticSite = pathMgr.getContext( - LocalizationContext.LocalizationType.COMMON_STATIC, - LocalizationContext.LocalizationLevel.SITE); - - LocalizationFile[] files = pathMgr.listFiles(new LocalizationContext[] { - commonStaticSite, commonStaticBase }, "grid" - + IPathManager.SEPARATOR + "datasetInfo", - new String[] { ".xml" }, true, true); - for (LocalizationFile file : files) { - if (file == null || !file.exists()) { - return; - } - try (InputStream is = file.openInputStream()) { - DatasetInfoSet set = manager.unmarshalFromInputStream(is); - for (DatasetInfo info : set.getInfos()) { - infoMap.put(info.getDatasetId(), info); - } - } catch (SerializationException | IOException - | LocalizationException e) { - statusHandler.error( - "Error reading dataset info: " + file.getPath() - + " has been ignored.", e); - } - } - - } - - public DatasetInfo getInfo(String datasetId) { - return infoMap.get(datasetId); - } -} diff --git a/edexOsgi/com.raytheon.uf.common.dataplugin.grid/src/com/raytheon/uf/common/dataplugin/grid/dataset/DatasetInfoSet.java b/edexOsgi/com.raytheon.uf.common.dataplugin.grid/src/com/raytheon/uf/common/dataplugin/grid/dataset/DatasetInfoSet.java deleted file mode 100644 index f7559b7bb3..0000000000 --- a/edexOsgi/com.raytheon.uf.common.dataplugin.grid/src/com/raytheon/uf/common/dataplugin/grid/dataset/DatasetInfoSet.java +++ /dev/null @@ -1,62 +0,0 @@ -/** - * This software was developed and / or modified by Raytheon Company, - * pursuant to Contract DG133W-05-CQ-1067 with the US Government. - * - * U.S. EXPORT CONTROLLED TECHNICAL DATA - * This software product contains export-restricted data whose - * export/transfer/disclosure is restricted by U.S. law. Dissemination - * to non-U.S. persons whether in the United States or abroad requires - * an export license or other authorization. - * - * Contractor Name: Raytheon Company - * Contractor Address: 6825 Pine Street, Suite 340 - * Mail Stop B8 - * Omaha, NE 68106 - * 402.291.0100 - * - * See the AWIPS II Master Rights File ("Master Rights File.pdf") for - * further licensing information. - **/ -package com.raytheon.uf.common.dataplugin.grid.dataset; - -import java.util.ArrayList; - -import javax.xml.bind.annotation.XmlAccessType; -import javax.xml.bind.annotation.XmlAccessorType; -import javax.xml.bind.annotation.XmlElement; -import javax.xml.bind.annotation.XmlElements; -import javax.xml.bind.annotation.XmlRootElement; - -/** - * - * JAXB compatible root element for a list of datasetInfo objects - * - *
    - * 
    - * SOFTWARE HISTORY
    - * 
    - * Date         Ticket#    Engineer    Description
    - * ------------ ---------- ----------- --------------------------
    - * Feb 27, 2012            bsteffen     Initial creation
    - * 
    - * 
    - * - * @author bsteffen - * @version 1.0 - */ -@XmlRootElement -@XmlAccessorType(XmlAccessType.NONE) -public class DatasetInfoSet { - - @XmlElements({ @XmlElement(name = "info", type = DatasetInfo.class) }) - private ArrayList infos; - - public ArrayList getInfos() { - return infos; - } - - public void setInfos(ArrayList infos) { - this.infos = infos; - } - -} diff --git a/edexOsgi/com.raytheon.uf.common.dataplugin.warning/src/com/raytheon/uf/common/dataplugin/warning/dataaccess/WarningGeometryFactory.java b/edexOsgi/com.raytheon.uf.common.dataplugin.warning/src/com/raytheon/uf/common/dataplugin/warning/dataaccess/WarningGeometryFactory.java index 7a4a0dad37..e68565cbd4 100644 --- a/edexOsgi/com.raytheon.uf.common.dataplugin.warning/src/com/raytheon/uf/common/dataplugin/warning/dataaccess/WarningGeometryFactory.java +++ b/edexOsgi/com.raytheon.uf.common.dataplugin.warning/src/com/raytheon/uf/common/dataplugin/warning/dataaccess/WarningGeometryFactory.java @@ -322,6 +322,6 @@ public class WarningGeometryFactory extends AbstractGeometryDatabaseFactory { @Override protected String assembleGetIdentifierValues(IDataRequest request, String identifierKey) { - return assembleGetColumnValues(request.getDatatype(), identifierKey); + return assembleGetColumnValues(request, request.getDatatype(), identifierKey); } } diff --git a/edexOsgi/com.raytheon.uf.common.dataplugin.warning/src/com/raytheon/uf/common/dataplugin/warning/util/WarningLookups.java b/edexOsgi/com.raytheon.uf.common.dataplugin.warning/src/com/raytheon/uf/common/dataplugin/warning/util/WarningLookups.java new file mode 100644 index 0000000000..794617e3d6 --- /dev/null +++ b/edexOsgi/com.raytheon.uf.common.dataplugin.warning/src/com/raytheon/uf/common/dataplugin/warning/util/WarningLookups.java @@ -0,0 +1,139 @@ +package com.raytheon.uf.common.dataplugin.warning.util; + +import java.io.File; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import javax.xml.bind.JAXBContext; +import javax.xml.bind.Unmarshaller; +import javax.xml.bind.annotation.XmlAccessType; +import javax.xml.bind.annotation.XmlAccessorType; +import javax.xml.bind.annotation.XmlAttribute; +import javax.xml.bind.annotation.XmlRootElement; + +import com.raytheon.uf.common.localization.IPathManager; +import com.raytheon.uf.common.localization.LocalizationContext; +import com.raytheon.uf.common.localization.LocalizationContext.LocalizationType; +import com.raytheon.uf.common.localization.PathManagerFactory; +import com.raytheon.uf.common.status.IUFStatusHandler; +import com.raytheon.uf.common.status.UFStatus; + +/** + * Class for looking up phensig info for warnings/advisories. + * + * + *
    + * 
    + * SOFTWARE HISTORY
    + * 
    + * Date          Ticket#  Engineer  Description
    + * ------------- -------- --------- --------------------------------------------
    + * Dec 19, 2018           mjames    Initial creation
    + * 
    + * 
    + * + * @author mjames + */ +public class WarningLookups { + private static final IUFStatusHandler theHandler = UFStatus.getHandler(WarningLookups.class); + + private static final String LOOKUP_DIR = "warngen"; + + public static class PhensigValue { + public PhensigValue() {} + + public PhensigValue(String name, String color) { + this.name = name; + this.color = color; + } + + @XmlAttribute + public String name; + + @XmlAttribute + public String color; + } + + @XmlAccessorType(XmlAccessType.NONE) + public static abstract class AbstractLookup { + public abstract Map getMap(); + } + + @XmlRootElement + @XmlAccessorType(XmlAccessType.FIELD) + public static class PhensigColors extends + AbstractLookup { + public HashMap map = new HashMap<>(); + + @Override + public Map getMap() { + return map; + } + } + + private PhensigColors phensigColorLookup; + + public WarningLookups() { + try { + init(); + } catch (Exception e) { + theHandler.error("Failed to initialize lookups", e); + } + } + + private void init() throws Exception { + JAXBContext context = JAXBContext.newInstance(PhensigColors.class); + Unmarshaller u = context.createUnmarshaller(); + phensigColorLookup = load(new PhensigColors(), "phensigColors.xml", u); + } + + private static , K, V> T load( + T combinedLookup, String fileName, Unmarshaller u) { + + IPathManager pm = PathManagerFactory.getPathManager(); + + List contexts = Arrays.asList(pm + .getLocalSearchHierarchy(LocalizationType.COMMON_STATIC)); + Collections.reverse(contexts); + + for (LocalizationContext ctx : contexts) { + File f = pm.getFile(ctx, LOOKUP_DIR + IPathManager.SEPARATOR + fileName); + if (f != null && f.isFile()) { + try { + @SuppressWarnings("unchecked") + T lookup = (T) u.unmarshal(f); + if (!combinedLookup.getClass().isAssignableFrom( + lookup.getClass())) { + throw new Exception(String.format( + "file contains %s' expected %s", + lookup.getClass(), combinedLookup.getClass())); + } + combinedLookup.getMap().putAll(lookup.getMap()); + } catch (Exception e) { + theHandler.error( + String.format("%s: %s", f, e.getMessage()), e); + } + } + } + return combinedLookup; + } + + public PhensigValue getPhensig(String phensigCode) { + return phensigColorLookup.map.get(phensigCode); + } + + private static WarningLookups instance; + + public static synchronized WarningLookups getInstance() { + if (instance == null) + instance = new WarningLookups(); + return instance; + } + + public static synchronized void reload() { + instance = new WarningLookups(); + } +} diff --git a/edexOsgi/com.raytheon.uf.common.dataplugin.warning/utility/common_static/base/warngen/phensigColors.xml b/edexOsgi/com.raytheon.uf.common.dataplugin.warning/utility/common_static/base/warngen/phensigColors.xml new file mode 100644 index 0000000000..3134eba84b --- /dev/null +++ b/edexOsgi/com.raytheon.uf.common.dataplugin.warning/utility/common_static/base/warngen/phensigColors.xml @@ -0,0 +1,109 @@ + + + + AF.W + AF.Y + AQ.Y + AS.O + AS.Y + BH.S + BW.Y + BZ.A + BZ.W + CF.A + CF.W + CF.Y + CF.S + DS.W + DU.Y + EC.A + EC.W + EH.A + EH.W + EW.W + FA.A + FA.W + FA.Y + FF.A + FF.S + FF.W + FG.Y + FL.A + FL.W + HY.S + FL.S + FL.Y + FR.Y + FW.A + FW.W + FZ.A + FZ.W + GL.A + GL.W + HF.A + HF.W + HT.Y + HU.A + HU.S + HU.W + HW.A + HW.W + HZ.A + HZ.W + IS.W + LE.A + LE.W + LE.Y + LO.Y + LS.A + LS.S + LS.W + LS.Y + LW.Y + MA.A + MA.S + MA.W + MF.Y + MH.W + MH.Y + MS.Y + RB.Y + RP.S + SC.Y + SE.A + SE.W + SI.Y + SM.Y + SR.A + SR.W + SS.A + SS.W + SU.W + SU.Y + SV.A + SV.S + SV.W + SW.Y + TO.A + TO.W + TR.A + TR.W + TS.A + TS.W + TS.Y + TY.A + TY.W + UP.A + UP.W + UP.Y + WC.A + WC.W + WC.Y + WI.Y + WS.A + WS.W + WW.Y + ZF.Y + ZR.Y + + diff --git a/edexOsgi/com.raytheon.uf.common.grib/src/com/raytheon/uf/common/grib/tables/GribTableLookup.java b/edexOsgi/com.raytheon.uf.common.grib/src/com/raytheon/uf/common/grib/tables/GribTableLookup.java index cb777a8b8b..629c3fb767 100644 --- a/edexOsgi/com.raytheon.uf.common.grib/src/com/raytheon/uf/common/grib/tables/GribTableLookup.java +++ b/edexOsgi/com.raytheon.uf.common.grib/src/com/raytheon/uf/common/grib/tables/GribTableLookup.java @@ -195,7 +195,7 @@ public class GribTableLookup { /** * Initializes predefined tables. The predefined tables are stored as flat - * files in the utility directy to be access by the localization service + * files in the utility directory to be accessed by the localization service *

    * The localization service reads in all files and populates the cached * tables accordingly @@ -209,13 +209,7 @@ public class GribTableLookup { LocalizationLevel.BASE), "/grib/tables") .getPath(); - String sitePath = pm - .getFile(pm.getContext(LocalizationType.COMMON_STATIC, - LocalizationLevel.SITE), "/grib/tables") - .getPath(); - initTablesFromPath(commonPath); - initTablesFromPath(sitePath); } @@ -427,14 +421,14 @@ public class GribTableLookup { private final int centerid; - private final int subcenteris; + private final int subcenterid; private final int hashcode; - public TableMapKey(int centerid, int subcenteris) { + public TableMapKey(int centerid, int subcenterid) { this.centerid = centerid; - this.subcenteris = subcenteris; - this.hashcode = 31 * (31 + centerid) + subcenteris; + this.subcenterid = subcenterid; + this.hashcode = 31 * (31 + centerid) + subcenterid; } @Override @@ -457,7 +451,7 @@ public class GribTableLookup { if (centerid != other.centerid) { return false; } - if (subcenteris != other.subcenteris) { + if (subcenterid != other.subcenterid) { return false; } return true; diff --git a/edexOsgi/com.raytheon.uf.common.menus.vb/src/com/raytheon/uf/common/menus/vb/VbSourceList.java b/edexOsgi/com.raytheon.uf.common.menus.vb/src/com/raytheon/uf/common/menus/vb/VbSourceList.java index 94910d85e7..ddd2e3da48 100644 --- a/edexOsgi/com.raytheon.uf.common.menus.vb/src/com/raytheon/uf/common/menus/vb/VbSourceList.java +++ b/edexOsgi/com.raytheon.uf.common.menus.vb/src/com/raytheon/uf/common/menus/vb/VbSourceList.java @@ -40,8 +40,6 @@ import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlRootElement; -import com.raytheon.uf.common.dataplugin.grid.dataset.DatasetInfo; -import com.raytheon.uf.common.dataplugin.grid.dataset.DatasetInfoLookup; import com.raytheon.uf.common.localization.ILocalizationFile; import com.raytheon.uf.common.localization.ILocalizationPathObserver; import com.raytheon.uf.common.localization.IPathManager; @@ -404,8 +402,6 @@ public class VbSourceList { } } - DatasetInfoLookup lookup = DatasetInfoLookup.getInstance(); - DatasetInfo info; // Set containing sources to not be added to lists Set removes = new HashSet<>(); Iterator itr = allSources.iterator(); @@ -415,9 +411,7 @@ public class VbSourceList { VbSource source = itr.next(); // Set display names for sources if (source.getName() == null) { - info = lookup.getInfo(source.getKey()); - source.setName( - info != null ? info.getTitle() : source.getKey()); + source.setName(source.getKey()); } if (source.getRemove()) { // Add sources with remove tags to removal set and remove them. diff --git a/edexOsgi/com.raytheon.uf.edex.dataplugins.feature/feature.xml b/edexOsgi/com.raytheon.uf.edex.dataplugins.feature/feature.xml index f32923b2c8..aa54b8b06a 100644 --- a/edexOsgi/com.raytheon.uf.edex.dataplugins.feature/feature.xml +++ b/edexOsgi/com.raytheon.uf.edex.dataplugins.feature/feature.xml @@ -17,18 +17,6 @@ [Enter License Description here.] - - - - + + + + - - - - - - - - - - - - - - - - diff --git a/edexOsgi/com.raytheon.uf.edex.ndm.dataplugin/src/com/raytheon/uf/edex/ndm/dataplugin/subscriber/ModelBufrSubscriber.java b/edexOsgi/com.raytheon.uf.edex.ndm.dataplugin/src/com/raytheon/uf/edex/ndm/dataplugin/subscriber/ModelBufrSubscriber.java deleted file mode 100644 index bdf1cff98a..0000000000 --- a/edexOsgi/com.raytheon.uf.edex.ndm.dataplugin/src/com/raytheon/uf/edex/ndm/dataplugin/subscriber/ModelBufrSubscriber.java +++ /dev/null @@ -1,223 +0,0 @@ -/** - * This software was developed and / or modified by Raytheon Company, - * pursuant to Contract DG133W-05-CQ-1067 with the US Government. - * - * U.S. EXPORT CONTROLLED TECHNICAL DATA - * This software product contains export-restricted data whose - * export/transfer/disclosure is restricted by U.S. law. Dissemination - * to non-U.S. persons whether in the United States or abroad requires - * an export license or other authorization. - * - * Contractor Name: Raytheon Company - * Contractor Address: 6825 Pine Street, Suite 340 - * Mail Stop B8 - * Omaha, NE 68106 - * 402.291.0100 - * - * See the AWIPS II Master Rights File ("Master Rights File.pdf") for - * further licensing information. - **/ -package com.raytheon.uf.edex.ndm.dataplugin.subscriber; - -import java.io.BufferedReader; -import java.io.BufferedWriter; -import java.io.File; -import java.io.FileNotFoundException; -import java.io.FileReader; -import java.io.FileWriter; -import java.io.IOException; -import java.io.OutputStreamWriter; - -import com.raytheon.uf.common.localization.ILocalizationFile; -import com.raytheon.uf.common.localization.IPathManager; -import com.raytheon.uf.common.localization.LocalizationContext; -import com.raytheon.uf.common.localization.LocalizationContext.LocalizationLevel; -import com.raytheon.uf.common.localization.LocalizationContext.LocalizationType; -import com.raytheon.uf.common.localization.PathManagerFactory; -import com.raytheon.uf.common.localization.exception.LocalizationException; -import com.raytheon.uf.common.pointdata.vadriver.VA_Driver; -import com.raytheon.uf.common.status.IUFStatusHandler; -import com.raytheon.uf.common.status.UFStatus; -import com.raytheon.uf.common.status.UFStatus.Priority; -import com.raytheon.uf.edex.ndm.dataplugin.ingest.INationalDatasetSubscriber; -import com.raytheon.uf.edex.plugin.modelsounding.decoder.ModelSoundingDataAdapter; - -/** - * Subscriber to update the local model sounding sites whenever the national spi - * file changes. - * - *

    - * 
    - * SOFTWARE HISTORY
    - * 
    - * Date          Ticket#  Engineer    Description
    - * ------------- -------- ----------- --------------------------
    - * Jan 29, 2011           bfarmer     Initial creation
    - * Dec 02, 2013  2537     bsteffen    Ensure streams are closed.
    - * Mar 06, 2014  2876     mpduff      New NDM plugin.
    - * Mar 02, 2016  5434     bkowal      Relocated to ndm dataplugin.
    - * Jul 11, 2016  5744     mapeters    Save to common_static (not edex_static)
    - * 
    - * 
    - * - * @author bfarmer - */ - -public class ModelBufrSubscriber implements INationalDatasetSubscriber { - - private static final String MODEL_STATION_LIST = ModelSoundingDataAdapter.MODEL_STATION_LIST; - - private static final String MODEL_STATION_INFO = "modelBufrStationInfo.txt"; - - private static final String MODEL_GOODNESS = "modelBufr.goodness"; - - private static final IUFStatusHandler statusHandler = UFStatus - .getHandler(ModelBufrSubscriber.class); - - @Override - public void notify(String fileName, File file) { - - statusHandler.handle(Priority.EVENTA, - "modelBufr:Processing input file [" + fileName + "]"); - - if ("modelBufr.spi".equals(fileName)) { - IPathManager pathMgr = PathManagerFactory.getPathManager(); - LocalizationContext lc = pathMgr.getContext( - LocalizationType.COMMON_STATIC, LocalizationLevel.SITE); - ILocalizationFile outLocFile = pathMgr.getLocalizationFile(lc, - ModelSoundingDataAdapter.SPI_FILE); - saveFile(file, outLocFile); - ModelSoundingDataAdapter.updateSPIData(); - } else if (MODEL_STATION_LIST.equals(fileName) - || MODEL_STATION_INFO.equals(fileName)) { - // Both are saved as MODEL_STATION_LIST in localization - processModelStationTxtFile(file); - } - } - - private void processModelStationTxtFile(File file) { - IPathManager pathMgr = PathManagerFactory.getPathManager(); - LocalizationContext lc = pathMgr.getContext( - LocalizationType.COMMON_STATIC, LocalizationLevel.SITE); - ILocalizationFile outLocFile = pathMgr.getLocalizationFile(lc, - MODEL_STATION_LIST); - File goodnessFile = pathMgr.getFile(lc, MODEL_GOODNESS); - saveFile(file, outLocFile); - generateSPI(file, goodnessFile); - - File spiFile = pathMgr.getFile(lc, ModelSoundingDataAdapter.SPI_FILE); - if (!spiFile.exists()) { - try { - spiFile.createNewFile(); - } catch (IOException e) { - statusHandler.handle( - Priority.SIGNIFICANT, - "modelBufr:Could not create spiFile file: " - + spiFile.getName(), e); - } - } - - VA_Driver driver = new VA_Driver(); - driver.setWeight(0.5f); - driver.vaStationsFile(goodnessFile, null, spiFile); - // updateStationList will reload spi files also - ModelSoundingDataAdapter.update(); - } - - /** - * - * @param file - * @param goodnessFile - */ - private void generateSPI(File file, File goodnessFile) { - String line; - String[] splitLine; - try { - try (BufferedReader fis = new BufferedReader(new FileReader(file)); - BufferedWriter fos = new BufferedWriter(new FileWriter( - goodnessFile))) { - while ((line = fis.readLine()) != null) { - if (line.length() > 0) { - // check for commented lines - if ('#' != line.charAt(0)) { - try { - splitLine = line.split("\\|"); - Integer elevation; - Double latitude; - Double longitude; - String cause = "elevation"; - try { - elevation = Integer.parseInt(splitLine[4] - .trim()); - cause = "latitude"; - latitude = Double.parseDouble(splitLine[2] - .trim()); - cause = "longitude"; - longitude = Double.parseDouble(splitLine[3] - .trim()); - } catch (NumberFormatException nfe) { - String err = String - .format("modelBufr:Invalid %s in data line [%s]", - cause, line); - statusHandler.handle(Priority.PROBLEM, err); - continue; - } - String stationName = splitLine[1].trim(); - fos.write("0 "); - fos.write(stationName); - fos.write(String.format(" %8.4f %9.4f %5d %9d", - latitude, longitude, elevation, 0)); - fos.newLine(); - } catch (Exception e) { - String err = String.format( - "modelBufr:Error in data line [%s]", - line); - statusHandler.handle(Priority.PROBLEM, err, e); - continue; - } - } - } - } - } - } catch (IOException e) { - statusHandler.handle(Priority.SIGNIFICANT, - "modelBufr:Could not read File ", e); - } - } - - /** - * Save the contents of the given File to the given ILocalizationFile - * - * @param file - * @param outFile - */ - private void saveFile(File file, ILocalizationFile outFile) { - if ((file != null) && file.exists()) { - try (BufferedReader fis = new BufferedReader(new FileReader(file)); - BufferedWriter fos = new BufferedWriter( - new OutputStreamWriter(outFile.openOutputStream()))) { - String line = null; - try { - while ((line = fis.readLine()) != null) { - fos.write(line); - fos.newLine(); - } - } catch (IOException e) { - statusHandler.handle(Priority.PROBLEM, - "Could not read file: " + file.getName(), e); - - } - } catch (FileNotFoundException e) { - statusHandler.handle(Priority.PROBLEM, "Failed to find file: " - + file.getName(), e); - } catch (LocalizationException e) { - statusHandler.handle( - Priority.PROBLEM, - "Failed to open output stream for file: " - + outFile.getPath(), e); - } catch (IOException e) { - // Error occurred closing fis/fos, ignore - } - } - } -} diff --git a/edexOsgi/com.raytheon.uf.edex.plugin.grid/src/com/raytheon/uf/edex/plugin/grid/handler/GridTreeHandler.java b/edexOsgi/com.raytheon.uf.edex.plugin.grid/src/com/raytheon/uf/edex/plugin/grid/handler/GridTreeHandler.java index fcc71c1be9..1197d3c702 100644 --- a/edexOsgi/com.raytheon.uf.edex.plugin.grid/src/com/raytheon/uf/edex/plugin/grid/handler/GridTreeHandler.java +++ b/edexOsgi/com.raytheon.uf.edex.plugin.grid/src/com/raytheon/uf/edex/plugin/grid/handler/GridTreeHandler.java @@ -26,8 +26,6 @@ import java.util.List; import com.raytheon.uf.common.inventory.tree.DataTree; import com.raytheon.uf.common.dataplugin.grid.GridInfoConstants; import com.raytheon.uf.common.dataplugin.grid.GridInfoRecord; -import com.raytheon.uf.common.dataplugin.grid.dataset.DatasetInfo; -import com.raytheon.uf.common.dataplugin.grid.dataset.DatasetInfoLookup; import com.raytheon.uf.common.dataplugin.grid.request.GetGridTreeRequest; import com.raytheon.uf.common.serialization.comm.IRequestHandler; import com.raytheon.uf.edex.database.dao.CoreDao; @@ -80,7 +78,7 @@ public class GridTreeHandler implements IRequestHandler { ArrayList gridFields = new ArrayList( Arrays.asList((Object[]) gridField)); String model = gridFields.get(0).toString(); - gridTree.addBranch(model, getDt(model), gridFields.get(1) + gridTree.addBranch(model, gridFields.get(1) .toString(), gridFields.get(2).toString(), gridFields.get(3).toString(), gridFields.get(4) .toString()); @@ -90,18 +88,4 @@ public class GridTreeHandler implements IRequestHandler { return gridTree; } - private int getDt(String modelName) { - DatasetInfo info = DatasetInfoLookup.getInstance().getInfo(modelName); - if (info != null && info.getDt() != null) { - int dTinSeconds = info.getDt(); - - // dT <= 24 is in hours, need to convert to seconds - if (Math.abs(dTinSeconds) <= 24) { - dTinSeconds *= 3600; - } - return dTinSeconds; - } - return -1; - } - } diff --git a/edexOsgi/com.raytheon.uf.edex.plugin.grid/utility/common_static/base/purge/gridPurgeRules.xml b/edexOsgi/com.raytheon.uf.edex.plugin.grid/utility/common_static/base/purge/gridPurgeRules.xml index 6a42abe7b4..ab0e9ac9ff 100644 --- a/edexOsgi/com.raytheon.uf.edex.plugin.grid/utility/common_static/base/purge/gridPurgeRules.xml +++ b/edexOsgi/com.raytheon.uf.edex.plugin.grid/utility/common_static/base/purge/gridPurgeRules.xml @@ -1,24 +1,4 @@ - - LAPS 30 - - - MSAS - 24 - - - - mesoEta21[25] + NAM(?:12|20|40) 2 00-00:15:00 - - - NAM40 - 1 - 00-00:15:00 - - - - WaveWatch - 1 - 00-00:15:00 - - - - RFCqpf + + (RFCqpf|HPCqpf) 5 - - - RAP40 - 8 - - - - AVN225 - 3 - - - - WNAWAVE238 - 1 - 00-00:15:00 - - - - - mesoEta21[67] - 1 - 00-00:15:00 - - - - - ETA2(?:18|42) - 1 - 00-00:15:00 - - - - HPCqpf - 5 - - - - ENPWAVE253 - 1 - 00-00:15:00 - - - - HPCGuide - 2 - 00-00:15:00 - - GFSGuide 8 00-00:15:00 - - - - - - GFS(?:212|160|254|161) - 1 - 00-00:15:00 - - MPE-Local 72 - TPCWindProb.* 8 00-00:15:00 - - - MOSGuide - 2 - 00-00:15:00 - - QPE-(?:ACR|STR|RSA|PTR|TUA|ORN|RHA|KRF|MSR|TAR|TIR|ALR|FWR) 72 - - - OPCWave.* 8 00-00:15:00 - - - - - - - - - - - - .*[Ww]ave.* - 1 - 00-00:15:00 - - RTMA 24 @@ -204,7 +78,6 @@ 24 00-00:15:00 - AK-RTMA3 24 @@ -225,131 +98,40 @@ 8 00-00:15:00 - SREF40 2 00-00:15:00 - - - GFSLAMPGrid - 24 - 00-00:15:00 - - ECMWF-HiRes 2 00-00:15:00 - HPCqpfNDFD 42 00-00:15:00 - - - NamDNG5 - 2 - 00-00:15:00 - - - - TPCSurgeProb - 1 - 00-00:15:00 - - - - P-ETSS - 1 - 00-00:15:00 - - HPE 00-12:00:00 00-00:15:00 - BHPE 00-12:00:00 00-00:15:00 - - - - MOSGuide-(?:AK|HI) - 2 - 00-00:15:00 - - - - HiRes-ARW-East - 2 - 00-00:15:00 - - - - - - HiRes-ARW-(?:West|AK|SJU) - 1 - 00-00:15:00 - - - - HiRes-ARW-HI - 2 - 00-00:15:00 - - - - HiRes-NMM-East - 2 - 00-00:15:00 - - - - - - HiRes-NMM-(?:West|AK|SJU) - 1 - 00-00:15:00 - - - - HiRes-NMM-HI - 2 - 00-00:15:00 - - SPCGuide 5 00-00:15:00 - - - HRRR - 4 - - - - GFS199 - 4 - 00-01:00:00 - - MRMS_(?:05|10|50)00 00-08:00:00 - - NARR 40-00:00:00 diff --git a/edexOsgi/com.raytheon.uf.edex.plugin.manualIngest/res/spring/manualIngest-spring.xml b/edexOsgi/com.raytheon.uf.edex.plugin.manualIngest/res/spring/manualIngest-spring.xml index 472c84005e..191a6501cc 100644 --- a/edexOsgi/com.raytheon.uf.edex.plugin.manualIngest/res/spring/manualIngest-spring.xml +++ b/edexOsgi/com.raytheon.uf.edex.plugin.manualIngest/res/spring/manualIngest-spring.xml @@ -11,7 +11,7 @@ xmlns="http://camel.apache.org/schema/spring" errorHandlerRef="errorHandler"> + uri="file:${data.archive.root}/ingest?delete=true&delay=5000&maxMessagesPerPoll=1000&exclusiveReadLockStrategy=#fileChangedStrategy&recursive=true" /> diff --git a/edexOsgi/com.raytheon.uf.edex.plugin.modelsounding/src/com/raytheon/uf/edex/plugin/modelsounding/ModelSoundingDecoder.java b/edexOsgi/com.raytheon.uf.edex.plugin.modelsounding/src/com/raytheon/uf/edex/plugin/modelsounding/ModelSoundingDecoder.java index 11424449a1..bc98073e98 100644 --- a/edexOsgi/com.raytheon.uf.edex.plugin.modelsounding/src/com/raytheon/uf/edex/plugin/modelsounding/ModelSoundingDecoder.java +++ b/edexOsgi/com.raytheon.uf.edex.plugin.modelsounding/src/com/raytheon/uf/edex/plugin/modelsounding/ModelSoundingDecoder.java @@ -85,8 +85,6 @@ import com.raytheon.uf.edex.plugin.modelsounding.decoder.ModelSoundingDataAdapte */ public class ModelSoundingDecoder implements IDescriptorFactorySelector { - public static final String SPI_FILE = "basemaps/modelBufr.spi"; - // Name of the plugin controlling this decoder. public static final String PLUGIN_NAME = "modelsounding"; diff --git a/edexOsgi/com.raytheon.uf.edex.plugin.modelsounding/src/com/raytheon/uf/edex/plugin/modelsounding/ModelSoundingPersistenceManager.java b/edexOsgi/com.raytheon.uf.edex.plugin.modelsounding/src/com/raytheon/uf/edex/plugin/modelsounding/ModelSoundingPersistenceManager.java index 772383f7d0..4e9c7d44a3 100644 --- a/edexOsgi/com.raytheon.uf.edex.plugin.modelsounding/src/com/raytheon/uf/edex/plugin/modelsounding/ModelSoundingPersistenceManager.java +++ b/edexOsgi/com.raytheon.uf.edex.plugin.modelsounding/src/com/raytheon/uf/edex/plugin/modelsounding/ModelSoundingPersistenceManager.java @@ -140,10 +140,6 @@ public class ModelSoundingPersistenceManager implements IContextStateProcessor { EDEXUtil.getMessageProducer().sendSync( "modelSoundingPersistIndexAlert", pdos); timer.stop(); - logger.info("Stored container: " - + container.getKey() + ", size: " - + container.size() + ", in " - + timer.getElapsedTime() + "ms"); } catch (EdexException e) { logger.error("Failed to persist " + pdos.length + " PluginDataObject(s) for key: " @@ -275,4 +271,4 @@ public class ModelSoundingPersistenceManager implements IContextStateProcessor { @Override public void postStop() { } -} \ No newline at end of file +} diff --git a/edexOsgi/com.raytheon.uf.edex.plugin.modelsounding/src/com/raytheon/uf/edex/plugin/modelsounding/decoder/ModelSoundingDataAdapter.java b/edexOsgi/com.raytheon.uf.edex.plugin.modelsounding/src/com/raytheon/uf/edex/plugin/modelsounding/decoder/ModelSoundingDataAdapter.java index 348eab8a48..b546591f3b 100644 --- a/edexOsgi/com.raytheon.uf.edex.plugin.modelsounding/src/com/raytheon/uf/edex/plugin/modelsounding/decoder/ModelSoundingDataAdapter.java +++ b/edexOsgi/com.raytheon.uf.edex.plugin.modelsounding/src/com/raytheon/uf/edex/plugin/modelsounding/decoder/ModelSoundingDataAdapter.java @@ -68,6 +68,7 @@ import com.raytheon.uf.edex.plugin.modelsounding.common.SoundingModels; * Sep 16, 2014 3628 mapeters Replaced static imports. * Jul 12, 2016 5744 mapeters SoundingStations constructor no longer takes * path parameter + * Sep 05, 2018 mjames@ucar Remove modelBufr filter. * * @@ -81,42 +82,10 @@ public class ModelSoundingDataAdapter { private static final Object LOCK = new Object(); - public static final String SPI_FILE = "basemaps" + IPathManager.SEPARATOR - + "modelBufr.spi"; - public static final String MODEL_STATION_LIST = "modelBufrStationList.txt"; private static SoundingStations stationsList = new SoundingStations(); - private static SPIContainer SPI_DATA = populateSPIData(); - - public static void updateSPIData() { - SPIContainer spi = populateSPIData(); - synchronized (LOCK) { - if ((spi != null) && (spi.isLoaded())) { - SPI_DATA = spi; - } - } - } - - public static void updateStationList() { - SoundingStations ss = new SoundingStations(); - synchronized (LOCK) { - stationsList = ss; - } - } - - public static void update() { - SoundingStations ss = new SoundingStations(); - SPIContainer spi = populateSPIData(); - synchronized (LOCK) { - stationsList = ss; - if ((spi != null) && (spi.isLoaded())) { - SPI_DATA = spi; - } - } - } - /** * Get the temporal and model information. * @@ -249,41 +218,35 @@ public class ModelSoundingDataAdapter { String stationId = stationsList.mapId(String.format("%010d", wmoStaNum)); // Now determine if the station Id is in this localization list. - SPIEntry s = SPI_DATA.getEntryById(stationId); - if (s != null) { - if (stationId != null) { - location.setStationId(stationId); - obsData.setSiteId(String.format("%06d", wmoStaNum)); - } - if (model.equals(SoundingModels.MODEL_ETA)) { - index++; - } - Double lat = null; - dp = dataList.get(index++); - int d = dp.getReferencingDescriptor().getDescriptor(); - if (d == BUFRDescriptor.createDescriptor(0, 5, 2)) { - lat = (Double) dp.getValue(); - } - Double lon = null; - dp = dataList.get(index++); - d = dp.getReferencingDescriptor().getDescriptor(); - if (d == BUFRDescriptor.createDescriptor(0, 6, 2)) { - lon = (Double) dp.getValue(); - } - location.assignLocation(lat.floatValue(), lon.floatValue()); - dp = dataList.get(index); - d = dp.getReferencingDescriptor().getDescriptor(); - if (d == BUFRDescriptor.createDescriptor(0, 10, 194)) { - stationHeight = (dp.getValue() != null) ? ((Double) dp - .getValue()).intValue() : null; - location.setElevation(stationHeight); - } - obsData.setLocation(location); - - obsData.setPointDataView(view); - } else { - obsData = null; + if (stationId != null) { + location.setStationId(stationId); + obsData.setSiteId(String.format("%06d", wmoStaNum)); } + if (model.equals(SoundingModels.MODEL_ETA)) { + index++; + } + Double lat = null; + dp = dataList.get(index++); + int d = dp.getReferencingDescriptor().getDescriptor(); + if (d == BUFRDescriptor.createDescriptor(0, 5, 2)) { + lat = (Double) dp.getValue(); + } + Double lon = null; + dp = dataList.get(index++); + d = dp.getReferencingDescriptor().getDescriptor(); + if (d == BUFRDescriptor.createDescriptor(0, 6, 2)) { + lon = (Double) dp.getValue(); + } + location.assignLocation(lat.floatValue(), lon.floatValue()); + dp = dataList.get(index); + d = dp.getReferencingDescriptor().getDescriptor(); + if (d == BUFRDescriptor.createDescriptor(0, 10, 194)) { + stationHeight = (dp.getValue() != null) ? ((Double) dp + .getValue()).intValue() : null; + location.setElevation(stationHeight); + } + obsData.setLocation(location); + obsData.setPointDataView(view); } return obsData; @@ -518,29 +481,4 @@ public class ModelSoundingDataAdapter { return retValue; } - private static SPIContainer populateSPIData() { - SPIContainer container = null; - - PathManager pathMgr = (PathManager) PathManagerFactory.getPathManager(); - - LocalizationContext ctx = pathMgr.getContext( - LocalizationType.COMMON_STATIC, LocalizationLevel.SITE); - String site = ctx.getContextName(); - - logger.info("Loading " + SPI_FILE + " for site [" + site + "]"); - - File srcFile = pathMgr.getFile(ctx, SPI_FILE); - - container = new SPIContainer(srcFile); - if (container.isLoaded()) { - logger.info("Loading " + SPI_FILE + " for site [" + site - + "] Successful"); - } else { - logger.error("Loading " + SPI_FILE + " for site [" + site - + "] failed"); - } - - return container; - } - } diff --git a/edexOsgi/com.raytheon.uf.edex.plugin.satellite.mcidas/src/com/raytheon/uf/edex/plugin/satellite/mcidas/McidasSatelliteDecoder.java b/edexOsgi/com.raytheon.uf.edex.plugin.satellite.mcidas/src/com/raytheon/uf/edex/plugin/satellite/mcidas/McidasSatelliteDecoder.java index 3904e28fcd..178a286673 100644 --- a/edexOsgi/com.raytheon.uf.edex.plugin.satellite.mcidas/src/com/raytheon/uf/edex/plugin/satellite/mcidas/McidasSatelliteDecoder.java +++ b/edexOsgi/com.raytheon.uf.edex.plugin.satellite.mcidas/src/com/raytheon/uf/edex/plugin/satellite/mcidas/McidasSatelliteDecoder.java @@ -574,9 +574,6 @@ public class McidasSatelliteDecoder { private String getAreaName(int ssn, int areaNumber) { String value = McidasSatelliteLookups.getInstance().getAreaName(ssn); - theHandler.info("Looking up ssn=" + ssn + " for areaNumber=" + areaNumber); - theHandler.info("return value=" + value); - return value != null ? value : String.format("AREA%04d", areaNumber); } diff --git a/edexOsgi/com.raytheon.uf.tools.cli/impl/capture b/edexOsgi/com.raytheon.uf.tools.cli/impl/capture new file mode 100755 index 0000000000..2d06129e3b --- /dev/null +++ b/edexOsgi/com.raytheon.uf.tools.cli/impl/capture @@ -0,0 +1,776 @@ +#!/bin/bash +# base path to save capture data to, will create subdirectory for each workstation +basePath="/data/fxa/cave" + +edexGrepString="edex.run.mode=" + +xorgLogPath="/var/log" + +# the remote servers to grab top on. Use to get general state of servers +if [ ! -z "${DX_SERVERS}" ]; then + REMOTE_SERVERS_TO_CHECK="${DX_SERVERS}" +else + REMOTE_SERVERS_TO_CHECK="dx1f dx2f dx3 dx4" +fi +if [ ! -z "${PX_SERVERS}" ]; then + REMOTE_SERVERS_TO_CHECK="${REMOTE_SERVERS_TO_CHECK} ${PX_SERVERS}" +else + REMOTE_SERVERS_TO_CHECK="${REMOTE_SERVERS_TO_CHECK} px1 px2" +fi + +# the database host to grab current running queries for +DATABASE_HOST="dx1f" + +# Flags to control what data capture grabs, to enable flag must be YES, anything else will be considered off. +RUN_JSTACK="Y" +JSTACK_ITERATIONS="15" +RUN_JMAP="Y" +RUN_QPID_STAT="Y" +MOVE_ALL_HS_ERR_PID="Y" +# For remote top you must have ssh keys setup to allow automatic login, otherwise password prompt will get sent to log file and script will never exit +GRAB_REMOTE_TOP="Y" +GRAB_REMOTE_VMSTAT="Y" +GRAB_CAVE_AND_ALERTVIZ_LOGS="Y" +GRAB_SCREENSHOT="Y" +GRAB_CURRENT_QUERIES="Y" +GRAB_XORG_LOG="Y" +EDEX_MODE="N" +FORCE="N" +TGZ_OUTPUT="Y" +RUN_VERSIONS="Y" +ACCCUM="N" + +cavePid="" +edexProcCount=0 + +# print usage message +usage() { + echo "Script for capturing information about cave/edex and general server health." + echo + echo "Following options allowed" + echo -e "-quick" + echo " Turns off jmap and reduces jstack iterations to 5" + echo + echo -e "-c \"{host names}\"\tdefault [$REMOTE_SERVERS_TO_CHECK]" + echo " The servers to grab top information from, make sure list is quoted and space delimited" + echo + echo -e "-d {y/n}\t\tdefault [$RUN_JMAP]" + echo " Run jmap to grab the heap dump information" + echo + echo -e "-e {request/ingest/ingestGrib/ingestDat}" + echo " Run edex mode and grab information about the jvm passed. May be used multiple times to grab data about multiple jvms" + echo + echo -e "-f {y/n}\t\tdefault [$FORCE]" + echo " Force a jstack/jmap by default" + echo + echo -e "-l {y/n}\t\tdefault [$GRAB_CAVE_AND_ALERTVIZ_LOGS]" + echo " Captures the cave and alertviz logs. If run for a specific pid the only cave log captured will be for that pid" + echo + echo -e "-m {y/n}\t\tdefault [$MOVE_ALL_HS_ERR_PID]" + echo " Captures all hs_err_pid's found" + echo + echo -e "-p {PID}\t\tdefault none" + echo " Run capture for a specific PID, crash information will not be captured. Defaults to none and runs against all pids found." + echo + echo -e "-q {y/n}\t\tdefault [$RUN_QPID_STAT]" + echo " Run qpid-stat" + echo + echo -e "-Q {y/n}\t\tdefault [$GRAB_CURRENT_QUERIES]" + echo " Grab current running database queries" + echo + echo -e "-r \"Reason for capture\"" + echo " The reason for capture, so popup will not be shown" + echo + echo -e "-s {y/n}\t\tdefault [$RUN_JSTACK]" + echo " Run jstack to grab the thread stack information" + echo + echo -e "-screen {y/n}\t\tdefault [$GRAB_SCREENSHOT]" + echo " Screen print the current workstation (local user must be running capture)" + echo + echo -e "-t {y/n}\t\tdefault [$GRAB_REMOTE_TOP]" + echo " Captures top information from servers, auto login must be enabled" + echo + echo -e "-v {y/n}\t\tdefault [$GRAB_REMOTE_VMSTAT]" + echo " Captures vmstat information from servers, auto login must be enabled" + echo + echo -e "-V {y/n}\t\tdefault [$RUN_VERSIONS]" + echo " Grab version information" + echo + echo -e "-z {y/n}\t\tdefault [$TGZ_OUTPUT]" + echo " Tar and gzip the captured data" + echo + echo -e "-h" + echo " Display this usage statement" + exit 0 +} + +# ensure directory is created and has write permissions +checkDir() { + dir="$1" + if [ ! -d "$dir" ]; then + mkdir -p $dir + if [ ! -d "$dir" ]; then + message="Unable to create capture data directory\n$dir" + zenity --error --no-wrap --title="Capture Failed" --text="$message" > /dev/null 2>&1 & + + echo -e "Capture failed: $message" + exit 1 + fi + fi + + if [ ! -w "$dir" ]; then + message="Do not have write permissions to capture data directory\n$dir" + zenity --error --no-wrap --title="Capture Failed" --text="$message" > /dev/null 2>&1 & + + echo -e "Capture failed: $message" + exit 1 + fi +} + +checkYes() { + local __resultvar="$1" + if [ $2 == "YES" -o $2 == "Y" -o $2 == "yes" -o $2 == "y" ]; then + eval $__resultvar="y" + else + eval $__resultvar="n" + fi +} + +#check if at least 1 GB of free mem exists to run jmap +checkJmapMem() { + CAP_MEM=1000 + let freeMem=`free -m | grep Mem | awk '{print $7}'` + + if [ $freeMem -gt $CAP_MEM ]; then + echo "${t1}: ${freeMem}M free, running jmap" >> $processFile + return 1 + else + echo "${t1}: ${freeMem}M free, skipping jmap" >> $processFile + return 0 + fi +} + + +# runs import to grab screen shot of users desktop +grabScreenShot() { + if [ "$GRAB_SCREENSHOT" == "y" ]; then + echo "Capturing screen shot of desktop" + t1=`date "+%Y%m%d %H:%M:%S"` + echo "${t1}: Capturing screen shot of desktop" >> $processFile + possibleScreens=`w -hs $user | awk '{print $3}' | sort -u` + count=0 + for pScreen in $possibleScreens; + do + if [[ $pScreen =~ :[0-9]+\.[0-9]+ ]]; then + import -window root -display $pScreen ${dataPath}/screenShot_${count}.png > ${dataPath}/screenShot_${count}.log 2>&1 & + let "count+=1" + fi + done + fi +} + +# runs import to grab screen shot of users desktop +grabXorgLog() { + if [ "$GRAB_XORG_LOG" == "y" ]; then + echo "Capturing Xorg logs" + t1=`date "+%Y%m%d %H:%M:%S"` + echo "${t1}: Capturing Xorg logs" >> $processFile + cp ${xorgLogPath}/Xorg*.log* ${dataPath} >> $processFile 2>&1 & + fi +} + +# runs ssh command to grab top on a remote server, requires auto login to be setup +grabRemoteTop() { + if [ "$GRAB_REMOTE_TOP" == "y" ]; then + echo "Capturing top on remote servers" + for server in ${REMOTE_SERVERS_TO_CHECK}; + do + t1=`date "+%Y%m%d %H:%M:%S"` + echo "${t1}: Capturing top for $server" >> $processFile + out_file="${dataPath}/top_$server.log" + ssh $server "sh -c 'export COLUMNS=160; top -b -c -n1' " >> $out_file 2>&1 & + done + fi +} + +# runs ssh command to grab vmstat on a remote server, requires auto login to be setup +grabRemoteVmstat() { + if [ "$GRAB_REMOTE_VMSTAT" == "y" ]; then + echo "Capturing vmstat on remote servers" + for server in ${REMOTE_SERVERS_TO_CHECK}; + do + t1=`date "+%Y%m%d %H:%M:%S"` + echo "${t1}: Capturing vmstat for $server" >> $processFile + out_file="${dataPath}/vmstat_$server.log" + ssh $server "sh -c 'vmstat -w 1 5' " >> $out_file 2>&1 & + done + fi +} + +grabCurrentDatabaseQueries() { + if [ "$GRAB_CURRENT_QUERIES" == "y" ]; then + echo "Capturing current database queries" + t1=`date "+%Y%m%d %H:%M:%S"` + echo "${t1}: Capturing current database queries" >> $processFile + out_file="${dataPath}/database_queries.log" + echo "dx1f:5432:metadata:awips:awips" > ~/.pgpass; chmod 600 ~/.pgpass + psql -d metadata -U awips -h ${DATABASE_HOST} -c "select datname, pid, client_addr, query, now()-xact_start as runningTime from pg_stat_activity where state != 'idle' order by runningTime desc;" >> $out_file 2>&1 & + fi +} + +checkForProcsAsOtherUsers() { + if [ ! -z "$procs" ]; then + numMyProcs=`echo "$myProcs" | wc -l` + numProcs=`echo "$procs" | wc -l` + + if [ "$numMyProcs" -ne "$numProcs" ]; then + notMyProcs=`echo "$procs" | grep -v $user` + + # preserve IFS and set it to line feed only + PREV_IFS=$IFS + IFS=$'\n' + usersFound=0 + + for proc in $notMyProcs + do + procUser=`echo $proc | awk '{print $1}'` + + count=0 + found=0 + + while [ "$count" -lt "$usersFound" ]; do + if [ "${procUsers[$count]}" == "$procUser" ]; then + found=1 + fi + let "count+=1" + done + + if [ "$found" -eq "0" ]; then + procUsers[$usersFound]="$procUser" + let "usersFound+=1" + fi + done + + # restore IFS + IFS=$PREV_IFS + + message="Processes found running as other users, please run capture as:\n" + count=0 + while [ "$count" -lt "$usersFound" ]; do + message="${message}\n${procUsers[$count]}" + let "count+=1" + done + + zenity --info --no-wrap --title="!!! Capture Must Be Rerun !!!" --text="$message" > /dev/null 2>&1 & + echo -e "Capture Must Be Rerun:\n$message\n" + fi + fi +} + +# gets the reason for running capture +reasonForCapture() { + if [ -z "$reason" ]; then + reason=`zenity --list --title "Reason for Running Capture " --width 300 --height 260 --text "Select reason for running capture\n" --radiolist --column "Cause" --column "Reason" --editable TRUE "Received Out of Memory Error" FALSE "Cave slow down" FALSE "Cave unresponsive/froze" FALSE "Cave crashed" FALSE "Other"` + rerun=0 + + if [ -z "$reason" ]; then + rerun=1 + elif [ "$reason" == "Other" ]; then + rerun=1 + fi + + if [ "$rerun" -eq "1" ]; then + reason=`zenity --text-info --title "Please Enter Reason for Running Capture" --editable --width 400 --height 250` + fi + fi + + echo $reason >> ${dataPath}/capture_reason.log +} + +# for a specified pid run jstack a specified number of times in a row +runJstack() { + local pid="$1" + local numIterations="$2" + local options="-l" + + if [ "$FORCE" == "y" ]; then + options="${options} -F" + fi + + local cmd="/awips2/java/bin/jstack" + local count=1 + local prePath="${dataPath}/pid_${pid}_" + local log="" + while [ "$count" -le "$numIterations" ]; do + t1=`date "+%Y%m%d %H:%M:%S"` + log="${prePath}jstack_${count}.log" + + echo "${t1}: Running command: ${cmd} ${options} ${pid} >> ${log} 2>&1" >> $processFile + echo "Running for $t1" >> $log + ${cmd} ${options} ${pid} >> ${log} 2>&1 + + if [[ "$?" != "0" && $FORCE != "y" ]]; then + t1=`date "+%Y%m%d %H:%M:%S"` + echo "${t1}: jstack for $pid failed to connect, rerunning with -F" >> $processFile + ${cmd} ${options} -F ${pid} >> ${log} 2>&1 + fi + let "count+=1" + done +} + +# Launchs a background process for each PID to pull jstacks +launchJstacks() { + # grab all jstacks + if [ "${RUN_JSTACK}" == "y" ]; then + if [ ! -z ${cavePid} ]; then + echo "Capturing thread stack for pid $cavePid" + else + echo "Capturing all process thread stacks" + fi + + local count=0 + while [ "$count" -lt "$numProcs" ]; do + runJstack ${pids[$count]} ${JSTACK_ITERATIONS} & + let "count+=1" + done + fi +} + +# runs jmap in background, if it fails will run again with -F +runJmap() { + local pid=$1 + local prePath="${dataPath}/pid_${pid}_" + local options="" + + if [ "$FORCE" == "y" ]; then + options="${options} -F" + fi + + local t1=`date "+%Y%m%d %H:%M:%S"` + local log="${prePath}dump.log" + local dumpPath="${prePath}dump" + local heapPath="${prePath}heap" + + if [ "$ACCUM" == "y" ]; then + # accum needs to change hprof by date + local t2=`date "+%Y%m%d_%H%M%S"` + dumpPath="${dumpPath}_${t2}.hprof" + heapPath="${heapPath}_${t2}.txt" + else + dumpPath="${dumpPath}.hprof" + heapPath="${heapPath}.txt" + fi + + echo "${t1}: Running command: /awips2/java/bin/jmap -heap $pid" >> $processFile + /awips2/java/bin/jmap -heap $pid >> ${heapPath} 2>&1 + + # Java 1.7 has a bug that causes jmap to crash processes using the G1 garbage collector + # more info at http://stackoverflow.com/questions/20571004/garbage-collector-first-and-jmap-eof-bug + # workaround is to add the 'live' option to jmap to limit the dump to only live objects + local cmd="/awips2/java/bin/jmap -dump:live,format=b,file=${dumpPath}" + echo "${t1}: Running command: $cmd $options $pid >> $log 2>&1" >> $processFile + $cmd $options $pid >> $log 2>&1 + + if [[ "$?" != "0" && $FORCE != "y" ]]; then + t1=`date "+%Y%m%d %H:%M:%S"` + echo "${t1}: jmap for $pid failed to connect, rerunning with -F" >> $processFile + $cmd $options -F $pid >> $log 2>&1 + fi +} + +# Launchs a background process for each PID to pull jmap +launchJmaps() { + # grab all jmaps + if [ "$RUN_JMAP" == "y" ]; then + if [ ! -z ${cavePid} ]; then + echo "Capturing process heap dump for pid $cavePid" + else + echo "Capturing all Heap Dumps" + fi + + local count=0 + while [ "$count" -lt "$numProcs" ]; do + runJmap ${pids[$count]} & + let "count+=1" + done + fi +} + +# runs qpid-stat +runQpidStat() { + local qpidHost=cp1f + local prePath="${dataPath}/" + local t1=`date "+%Y%m%d %H:%M:%S"` + local cmd="/awips2/python/bin/qpid-stat -q -Smsg -L500 ${qpidHost}" + local log="${prepath}qpid-stat-queues.log" + echo "${t1}: Running command: $cmd >> $log 2>&1 &" >> $processFile + if [ "$ACCUM" == "y" ]; then + echo >> $log + echo >> $log + echo "Running for $t1" >> $log + fi + $cmd >> $log 2>&1 & + + log="${prepath}qpid-stat-sessions.log" + cmd="/awips2/python/bin/qpid-stat -s -Smsg -L500 ${qpidHost}" + echo "${t1}: Running command: $cmd >> $log 2>&1 &" >> $processFile + if [ "$ACCUM" == "y" ]; then + echo >> $log + echo >> $log + echo "Running for $t1" >> $log + fi + $cmd >> $log 2>&1 & +} + +# runs versions.sh to grab version info +runVersions() { + local t1=`date "+%Y%m%d %H:%M:%S"` + local cmd="/awips2/cave/versions.sh" + echo "${t1}: Running command: $cmd >> ${dataPath}/versions.log 2>&1" >> $processFile + $cmd >> ${dataPath}/versions.log 2>&1 +} + +# take in pid, output process name without args +getCommandName() { + ps --no-header c -p $1 -o cmd +} + +# take in pid, output parent process id +getParentPid() { + ps --no-header -p $1 -o ppid +} + +# take in pid, output associated cave executable pid if found, otherwise output given pid +determineCaveProcess() { + local RVAL=$1 + # check if supplied PID is for the cave executable + local CMD_NAME=$(getCommandName $1) + if [[ ! $CMD_NAME =~ cave ]] + then + # worker pid probably provided, check parent + RVAL=$(getParentPid $1) + if [[ ! $(getCommandName $RVAL) =~ cave ]] + then + # parent wasn't cave either... continue on using PID provided + echo "${t1}: Unable to find cave process for pid $1, proceeding with provided PID" >> $processFile + RVAL=$1 + fi + fi + echo $RVAL +} + +# parse command line +while [ ! -z "$1" ]; do + arg=$1 + shift 1 + + case $arg in + -a) ACCUM="$1"; shift 1;; + -d) RUN_JMAP="$1"; shift 1;; + -e) EDEX_MODE="Y"; edexProcs[$edexProcCount]="$1"; shift 1; let "edexProcCount+=1";; + -f) FORCE="$1"; shift 1;; + -l) GRAB_CAVE_AND_ALERTVIZ_LOGS="$1"; shift 1;; + -m) MOVE_ALL_HS_ERR_PID="$1"; shift 1;; + -p) cavePid="$1"; shift 1;; + -q) RUN_QPID_STAT="$1"; shift 1;; + -Q) GRAB_CURRENT_QUERIES="$1"; shift 1;; + -r) REMOTE_SERVERS_TO_CHECK="$1"; shift 1;; + -s) RUN_JSTACK="$1"; shift 1;; + -screen) GRAB_SCREENSHOT="$1"; shift 1;; + -t) GRAB_REMOTE_TOP="$1"; shift 1;; + -v) GRAB_REMOTE_VMSTAT="$1"; shift 1;; + -V) RUN_VERSIONS="$1"; shift 1;; + -z) TGZ_OUTPUT="$1"; shift 1;; + -quick) RUN_JMAP="N"; JSTACK_ITERATIONS=5;; + -h|*) usage;; + esac +done + +# validate inputs +checkYes RUN_JSTACK $RUN_JSTACK +checkYes RUN_JMAP $RUN_JMAP +checkYes RUN_QPID_STAT $RUN_QPID_STAT +checkYes FORCE $FORCE +checkYes MOVE_ALL_HS_ERR_PID $MOVE_ALL_HS_ERR_PID +checkYes GRAB_REMOTE_TOP $GRAB_REMOTE_TOP +checkYes GRAB_REMOTE_VMSTAT $GRAB_REMOTE_VMSTAT +checkYes GRAB_CURRENT_QUERIES $GRAB_CURRENT_QUERIES +checkYes GRAB_CAVE_AND_ALERTVIZ_LOGS $GRAB_CAVE_AND_ALERTVIZ_LOGS +checkYes EDEX_MODE $EDEX_MODE +checkYes TGZ_OUTPUT $TGZ_OUTPUT +checkYes ACCCUM $ACCCUM +checkYes RUN_VERSIONS $RUN_VERSIONS +checkYes GRAB_SCREENSHOT $GRAB_SCREENSHOT +checkYes GRAB_XORG_LOG $GRAB_XORG_LOG + +# if PID mode don't grab other hs_err_pids +if [ ! -z $cavePid ]; then + MOVE_ALL_HS_ERR_PID="n" +fi + +# if accum don't tgz +if [ "$ACCUM" == "y" ]; then + TGZ_OUTPUT="n" + RUN_VERSIONS="n" +fi + +if [ "$EDEX_MODE" == "y" ]; then + reason="n" + GRAB_CAVE_AND_ALERTVIZ_LOGS="n" + MOVE_ALL_HS_ERR_PID="n" + GRAB_REMOTE_TOP="n" + GRAB_REMOTE_VMSTAT="n" + GRAB_XORG_LOG="n" +fi + +umask 0002 + +checkDir $basePath + +user=`whoami` + +hostName=`hostname -s` +fullHostName=`hostname` +# remove the -testBed items +strippedHostName=${hostName%-} +hostPath="${basePath}/${hostName}" + +checkDir $hostPath + +curTime=`date +%Y%m%d_%H%M%S` +curDir=`pwd` + +if [ "${ACCUM}" == "y" ]; then + curDay=`date +%Y%m%d` + dataPath="${hostPath}/captureData_${curDay}" +else + dataPath="${hostPath}/captureData_${curTime}" +fi + +checkDir $dataPath + +cd $dataPath +processFile=${dataPath}/capture_info.log +export COLUMNS=160 +top -b -c -n1 >> "${dataPath}/top_$hostName.log" +vmstat -w 1 5 >> "${dataPath}/vmstat_$hostName.log" + +if [ "$ACCUM" == "y" ]; then + echo "" >> "${dataPath}/top_$hostName.log" + echo "" >> "${dataPath}/top_$hostName.log" + echo "" >> "${dataPath}/vmstat_$hostName.log" +fi + + +if [ "$EDEX_MODE" == "y" ]; then + grepString="$edexGrepString(" + count=0 + + while [ "$count" -lt "$edexProcCount" ]; do + if [ "$count" -ne "0" ]; then + grepString="${grepString}|" + fi + + grepString="${grepString}${edexProcs[$count]}" + let "count+=1" + done + + grepString="${grepString}) " + procs=`ps -ef | grep -E "$grepString" | grep -v "grep"` +else + #list of cave process ids to get ps output for + caveProcNums="" + for parent in $(pgrep '^cave$') + do + # the cave process starts a new JVM as a child process + # find all children of the cave process + children=$(pgrep -P $parent) + if [[ -z $children ]] + then + # no children, assume that this is a main cave process + caveProcNums="$caveProcNums $parent" + else + # otherwise, only get ps output for children + caveProcNums="$caveProcNums $children" + fi + done + if [ ! "${caveProcNums}" = "" ]; then + procs=$(ps --no-header -fp $caveProcNums) + fi +fi + +if [ ! -z "$cavePid" ]; then + # limit cave procs to the requested PID + echo "Running in PID mode, only requesting for pid $cavePid" >> $processFile + procs=`echo "$procs" | grep "$cavePid"` +fi + +myProcs=`echo "$procs" | grep "$user"` + +echo "${procs}" >> $processFile +echo "" >> $processFile +echo "" >> $processFile + +checkForProcsAsOtherUsers + +if [ ! -z "${myProcs}" ]; then + t1=`date "+%Y%m%d %H:%M:%S"` + echo "Processes found for user $user, capturing data to $dataPath" + echo "${t1}: Processes found for user $user, capturing data to $dataPath" >> $processFile + echo "" >> $processFile + numProcs=`echo "$myProcs" | wc -l` + + # preserve IFS and set it to line feed only + PREV_IFS=$IFS + IFS=$'\n' + count=0 + + # grab the pids for future use + for proc in $myProcs + do + pids[$count]=`echo "$proc" | awk '{print $2}'` + let "count+=1" + done + IFS=$PREV_IFS + + launchJstacks + + checkJmapMem + let rc=$? + if [ $rc -eq 1 ]; then + launchJmaps + fi + +else + t1=`date "+%Y%m%d %H:%M:%S"` + echo "*** NO processes found for user $user, capturing limited data to $dataPath" + echo "${t1}: NO processes found for $user" >> $processFile + echo "" >> $processFile +fi + +# grab Xorg logs +grabXorgLog + +# grab top for servers +grabRemoteTop + +# grab vm stat for servers +grabRemoteVmstat + +# grab current database queries +grabCurrentDatabaseQueries + +# grab screen shot, spawns background process for each screen +grabScreenShot + +# grab qpid stat +runQpidStat + +# ls users home directory to check nas performance +/usr/bin/time -p ls -la ~ > ${dataPath}/nas_check_ls_home.txt 2>&1 & + +# get reason for running capture +if [ "$reason" != "n" ]; then + reasonForCapture & +fi + +# move all hs_err_pid from user's home directory to capture directory +if [ "${MOVE_ALL_HS_ERR_PID}" == "y" ]; then + numErrFiles=`ls ${HOME}/hs_err_pid* 2> /dev/null | wc -l` + t1=`date "+%Y%m%d %H:%M:%S"` + + if [ "${numErrFiles}" == "0" ]; then + echo "*** NO hs_err_pid files to capture" + echo "${t1}: No hs_err_pid files to capture" >> $processFile + else + echo "Capturing ${numErrFiles} hs_err_pids" + echo "${t1}: Capturing ${numErrFiles} hs_err_pids" >> $processFile + mv ${HOME}/hs_err_pid* ${dataPath} + fi + + echo "" >> $processFile +fi + +# Grab the cave console logs for the last 24 hours as well as the current alertviz database, if pid mode only grab cave for that pid +if [ "${GRAB_CAVE_AND_ALERTVIZ_LOGS}" == "y" ]; then + dir="${HOME}/caveData/logs/${hostName}" + if [ ! -d $dir ]; then + dir="${HOME}/caveData/logs/${strippedHostName}" + if [ ! -d $dir ]; then + dir="${HOME}/caveData/logs/${fullHostName}" + fi + fi + + t1=`date "+%Y%m%d %H:%M:%S"` + if [ -d $dir ]; then + echo "Capturing alertviz logs" + echo "${t1}: Capturing alertviz logs" >> $processFile + mkdir ${dataPath}/alertVizDatabase + cp -r $dir ${dataPath}/alertVizDatabase + else + echo "*** NO alertviz logs to capture" + echo "${t1}: *** Can't find alertviz logs to capture" >> $processFile + echo "" >> $processFile + fi + + dir="${HOME}/caveData/logs/consoleLogs/${hostName}" + if [ ! -d $dir ]; then + dir="${HOME}/caveData/logs/consoleLogs/${strippedHostName}" + if [ ! -d $dir ]; then + dir="${HOME}/caveData/logs/consoleLogs/${fullHostName}" + fi + fi + + t1=`date "+%Y%m%d %H:%M:%S"` + # grab any logs written to in last 2 hours, or pid mode only that log + if [ -d $dir ]; then + echo "Capturing cave logs" + echo "${t1}: Capturing cave logs" >> $processFile + mkdir -p ${dataPath}/consoleLogs + if [ ! -z ${cavePid} ]; then + # logs have cave executable pid or worker pid in the name (-o means OR) + find $dir -type f -name "*$(determineCaveProcess ${cavePid})*" -o -name "*${cavePid}*" -exec cp {} ${dataPath}/consoleLogs \; + else + find $dir -type f -mmin -120 -exec cp {} ${dataPath}/consoleLogs \; + fi + else + echo "*** NO cave logs to capture" + echo "${t1}: *** Can't find cave logs to capture" >> $processFile + echo "" >> $processFile + fi +fi + +# grab the version information +if [ "$RUN_VERSIONS" == "y" ]; then + runVersions +fi + +# wait for any backgrounded processes by this script to finish +wait + +message="" + +# tar/gz the output +if [ "${TGZ_OUTPUT}" == "y" ]; then + echo "Tar/zipping captured data" + if [ ! -z ${cavePid} ]; then + tgzFile="${hostPath}/captureData_${curTime}_pid_${cavePid}.tgz" + else + tgzFile="${hostPath}/captureData_${curTime}.tgz" + fi + + cd .. + tar -czf $tgzFile --remove-files captureData_${curTime} + rm -rf ${dataPath} + message="Data captured to $tgzFile" +else + message="Data captured to ${dataPath}" +fi + +zenity --info --no-wrap --title="Capture Done" --text="$message" > /dev/null 2>&1 & +echo +echo $message +rm ~/.pgpass + +cd $curDir + diff --git a/edexOsgi/deploy.edex.awips2/esb/conf/modes/modes.xml b/edexOsgi/deploy.edex.awips2/esb/conf/modes/modes.xml index 00bf7517f3..8f57364c3b 100644 --- a/edexOsgi/deploy.edex.awips2/esb/conf/modes/modes.xml +++ b/edexOsgi/deploy.edex.awips2/esb/conf/modes/modes.xml @@ -62,7 +62,7 @@ ingest pluginExclude goesrExclude - .*(airmet|atcf|aww|convsigmet|gfe|grid|hydro|intlsigmet|modis|ncpafm|ncuair|profiler|netcdf-grid).* + .*(lma|airmet|atcf|aww|convsigmet|gfe|grid|hydro|intlsigmet|modis|ncpafm|ncuair|profiler|netcdf-grid).* .*(nonconvsigmet|satellite|sgwh|ssha|stats|stormtrack|textlightning_ep|useradmin|wcp).* diff --git a/environment.yml b/environment.yml index 9bd0a1f0b6..786b9354f8 100755 --- a/environment.yml +++ b/environment.yml @@ -6,5 +6,6 @@ dependencies: - python=2.7 - pip - pip: + - mkdocs==0.17.5 - mkdocs-unidata==1.5.7 - fontawesome_markdown diff --git a/javaUtilities/build.wes2bridge.utility/.project b/javaUtilities/build.wes2bridge.utility/.project deleted file mode 100644 index 15491e3278..0000000000 --- a/javaUtilities/build.wes2bridge.utility/.project +++ /dev/null @@ -1,11 +0,0 @@ - - - build.wes2bridge.utility - - - - - - - - diff --git a/javaUtilities/build.wes2bridge.utility/ant/setup.xml b/javaUtilities/build.wes2bridge.utility/ant/setup.xml deleted file mode 100644 index 17cf416d2e..0000000000 --- a/javaUtilities/build.wes2bridge.utility/ant/setup.xml +++ /dev/null @@ -1,49 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/javaUtilities/build.wes2bridge.utility/build.wes2bridge.utility.ecl b/javaUtilities/build.wes2bridge.utility/build.wes2bridge.utility.ecl deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/javaUtilities/build.wes2bridge.utility/wes2bridge/build.properties b/javaUtilities/build.wes2bridge.utility/wes2bridge/build.properties deleted file mode 100644 index 94e3d35e3a..0000000000 --- a/javaUtilities/build.wes2bridge.utility/wes2bridge/build.properties +++ /dev/null @@ -1,28 +0,0 @@ -# Wes2Bridge Utility pde build properties. - -product=/com.raytheon.wes2bridge.utility.product/wes2bridge.utility.product - -topLevelElementType=feature -buildType=I -buildId=Wes2Bridge -buildLabel=${buildType}.${buildId} -timestamp=007 -archivePrefix=wes2bridge -archivesFormat=linux, gtk, x86 - folder -collectingFolder=${archivePrefix} - -runPackager=false -skipBase=true -skipFetch=true -skipMaps=true -baseos=linux -basews=gtk -basearch=x86 - -logExtension=.log -javacDebugInfo=false -javacFailOnError=true -javacVerbose=true -javacSource=1.8 -javacTarget=1.8 -compilerArg=-g:lines,source diff --git a/javaUtilities/build.wes2bridge.utility/wes2bridge/customTargets.xml b/javaUtilities/build.wes2bridge.utility/wes2bridge/customTargets.xml deleted file mode 100644 index 7b7b4a0191..0000000000 --- a/javaUtilities/build.wes2bridge.utility/wes2bridge/customTargets.xml +++ /dev/null @@ -1,195 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/javaUtilities/com.raytheon.wes2bridge.common/.classpath b/javaUtilities/com.raytheon.wes2bridge.common/.classpath deleted file mode 100644 index eca7bdba8f..0000000000 --- a/javaUtilities/com.raytheon.wes2bridge.common/.classpath +++ /dev/null @@ -1,7 +0,0 @@ - - - - - - - diff --git a/javaUtilities/com.raytheon.wes2bridge.common/.project b/javaUtilities/com.raytheon.wes2bridge.common/.project deleted file mode 100644 index 3e15a62986..0000000000 --- a/javaUtilities/com.raytheon.wes2bridge.common/.project +++ /dev/null @@ -1,28 +0,0 @@ - - - com.raytheon.wes2bridge.common - - - - - - org.eclipse.jdt.core.javabuilder - - - - - org.eclipse.pde.ManifestBuilder - - - - - org.eclipse.pde.SchemaBuilder - - - - - - org.eclipse.pde.PluginNature - org.eclipse.jdt.core.javanature - - diff --git a/javaUtilities/com.raytheon.wes2bridge.common/.settings/org.eclipse.jdt.core.prefs b/javaUtilities/com.raytheon.wes2bridge.common/.settings/org.eclipse.jdt.core.prefs deleted file mode 100644 index 5df5331175..0000000000 --- a/javaUtilities/com.raytheon.wes2bridge.common/.settings/org.eclipse.jdt.core.prefs +++ /dev/null @@ -1,8 +0,0 @@ -#Thu Jan 12 13:45:52 CST 2012 -eclipse.preferences.version=1 -org.eclipse.jdt.core.compiler.codegen.inlineJsrBytecode=enabled -org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.6 -org.eclipse.jdt.core.compiler.compliance=1.6 -org.eclipse.jdt.core.compiler.problem.assertIdentifier=error -org.eclipse.jdt.core.compiler.problem.enumIdentifier=error -org.eclipse.jdt.core.compiler.source=1.6 diff --git a/javaUtilities/com.raytheon.wes2bridge.common/META-INF/MANIFEST.MF b/javaUtilities/com.raytheon.wes2bridge.common/META-INF/MANIFEST.MF deleted file mode 100644 index 5803672776..0000000000 --- a/javaUtilities/com.raytheon.wes2bridge.common/META-INF/MANIFEST.MF +++ /dev/null @@ -1,9 +0,0 @@ -Manifest-Version: 1.0 -Bundle-ManifestVersion: 2 -Bundle-Name: Wes2Bridge Utility Common -Bundle-SymbolicName: com.raytheon.wes2bridge.common -Bundle-Version: 1.0.0.qualifier -Bundle-Vendor: RAYTHEON -Bundle-RequiredExecutionEnvironment: JavaSE-1.8 -Export-Package: com.raytheon.wes2bridge.common.configuration, - com.raytheon.wes2bridge.configuration.jaxb diff --git a/javaUtilities/com.raytheon.wes2bridge.common/build.properties b/javaUtilities/com.raytheon.wes2bridge.common/build.properties deleted file mode 100644 index 34d2e4d2da..0000000000 --- a/javaUtilities/com.raytheon.wes2bridge.common/build.properties +++ /dev/null @@ -1,4 +0,0 @@ -source.. = src/ -output.. = bin/ -bin.includes = META-INF/,\ - . diff --git a/javaUtilities/com.raytheon.wes2bridge.common/build.xml b/javaUtilities/com.raytheon.wes2bridge.common/build.xml deleted file mode 100644 index b96cbeafd9..0000000000 --- a/javaUtilities/com.raytheon.wes2bridge.common/build.xml +++ /dev/null @@ -1,27 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/javaUtilities/com.raytheon.wes2bridge.common/com.raytheon.wes2bridge.common.ecl b/javaUtilities/com.raytheon.wes2bridge.common/com.raytheon.wes2bridge.common.ecl deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/javaUtilities/com.raytheon.wes2bridge.common/src/com/raytheon/wes2bridge/common/configuration/Wes2BridgeCase.java b/javaUtilities/com.raytheon.wes2bridge.common/src/com/raytheon/wes2bridge/common/configuration/Wes2BridgeCase.java deleted file mode 100644 index e0c13e475f..0000000000 --- a/javaUtilities/com.raytheon.wes2bridge.common/src/com/raytheon/wes2bridge/common/configuration/Wes2BridgeCase.java +++ /dev/null @@ -1,198 +0,0 @@ -/** - * This software was developed and / or modified by Raytheon Company, - * pursuant to Contract DG133W-05-CQ-1067 with the US Government. - * - * U.S. EXPORT CONTROLLED TECHNICAL DATA - * This software product contains export-restricted data whose - * export/transfer/disclosure is restricted by U.S. law. Dissemination - * to non-U.S. persons whether in the United States or abroad requires - * an export license or other authorization. - * - * Contractor Name: Raytheon Company - * Contractor Address: 6825 Pine Street, Suite 340 - * Mail Stop B8 - * Omaha, NE 68106 - * 402.291.0100 - * - * See the AWIPS II Master Rights File ("Master Rights File.pdf") for - * further licensing information. - **/ -package com.raytheon.wes2bridge.common.configuration; - -import javax.xml.bind.annotation.XmlAccessType; -import javax.xml.bind.annotation.XmlAccessorType; -import javax.xml.bind.annotation.XmlElement; -import javax.xml.bind.annotation.XmlRootElement; - -/** - * Represents a Wes2Bridge test case that will be used to configure a new edex - * environment. - * - *
    - * 
    - * SOFTWARE HISTORY
    - * 
    - * Date         Ticket#    Engineer    Description
    - * ------------ ---------- ----------- --------------------------
    - * Aug 12, 2014 3521       bkowal      Initial creation
    - * Apr 20, 2015 4392       dlovely     Removed un-used JMX port configuration
    - * 
    - * 
    - * - * @author bkowal - * @version 1.0 - */ - -@XmlRootElement(name = "Wes2BridgeCase") -@XmlAccessorType(XmlAccessType.NONE) -public class Wes2BridgeCase { - @XmlElement - private String name; - - @XmlElement - private String dataArchiveRoot; - - @XmlElement - private int databasePort; - - @XmlElement - private int edexHttpPort; - - @XmlElement - private int qpidHttpPort; - - @XmlElement - private int jmsPort; - - @XmlElement - private int httpdPypiesPort; - - @XmlElement - private int pypiesLoggingPort; - - /** - * - */ - public Wes2BridgeCase() { - } - - /** - * @return the name - */ - public String getName() { - return name; - } - - /** - * @param name - * the name to set - */ - public void setName(String name) { - this.name = name; - } - - /** - * @return the dataArchiveRoot - */ - public String getDataArchiveRoot() { - return dataArchiveRoot; - } - - /** - * @param dataArchiveRoot - * the dataArchiveRoot to set - */ - public void setDataArchiveRoot(String dataArchiveRoot) { - this.dataArchiveRoot = dataArchiveRoot; - } - - /** - * @return the databasePort - */ - public int getDatabasePort() { - return databasePort; - } - - /** - * @param databasePort - * the databasePort to set - */ - public void setDatabasePort(int databasePort) { - this.databasePort = databasePort; - } - - /** - * @return the edexHttpPort - */ - public int getEdexHttpPort() { - return edexHttpPort; - } - - /** - * @param edexHttpPort - * the edexHttpPort to set - */ - public void setEdexHttpPort(int edexHttpPort) { - this.edexHttpPort = edexHttpPort; - } - - /** - * @return the qpidHttpPort - */ - public int getQpidHttpPort() { - return qpidHttpPort; - } - - /** - * @param qpidHttpPort - * the qpidHttpPort to set - */ - public void setQpidHttpPort(int qpidHttpPort) { - this.qpidHttpPort = qpidHttpPort; - } - - /** - * @return the jmsPort - */ - public int getJmsPort() { - return jmsPort; - } - - /** - * @param jmsPort - * the jmsPort to set - */ - public void setJmsPort(int jmsPort) { - this.jmsPort = jmsPort; - } - - /** - * @return the httpdPypiesPort - */ - public int getHttpdPypiesPort() { - return httpdPypiesPort; - } - - /** - * @param httpdPypiesPort - * the httpdPypiesPort to set - */ - public void setHttpdPypiesPort(int httpdPypiesPort) { - this.httpdPypiesPort = httpdPypiesPort; - } - - /** - * @return the pypiesLoggingPort - */ - public int getPypiesLoggingPort() { - return pypiesLoggingPort; - } - - /** - * @param pypiesLoggingPort - * the pypiesLoggingPort to set - */ - public void setPypiesLoggingPort(int pypiesLoggingPort) { - this.pypiesLoggingPort = pypiesLoggingPort; - } -} \ No newline at end of file diff --git a/javaUtilities/com.raytheon.wes2bridge.common/src/com/raytheon/wes2bridge/configuration/jaxb/Wes2BridgeJaxbManager.java b/javaUtilities/com.raytheon.wes2bridge.common/src/com/raytheon/wes2bridge/configuration/jaxb/Wes2BridgeJaxbManager.java deleted file mode 100644 index a441a77cb7..0000000000 --- a/javaUtilities/com.raytheon.wes2bridge.common/src/com/raytheon/wes2bridge/configuration/jaxb/Wes2BridgeJaxbManager.java +++ /dev/null @@ -1,58 +0,0 @@ -/** - * This software was developed and / or modified by Raytheon Company, - * pursuant to Contract DG133W-05-CQ-1067 with the US Government. - * - * U.S. EXPORT CONTROLLED TECHNICAL DATA - * This software product contains export-restricted data whose - * export/transfer/disclosure is restricted by U.S. law. Dissemination - * to non-U.S. persons whether in the United States or abroad requires - * an export license or other authorization. - * - * Contractor Name: Raytheon Company - * Contractor Address: 6825 Pine Street, Suite 340 - * Mail Stop B8 - * Omaha, NE 68106 - * 402.291.0100 - * - * See the AWIPS II Master Rights File ("Master Rights File.pdf") for - * further licensing information. - **/ -package com.raytheon.wes2bridge.configuration.jaxb; - -import java.io.File; - -import javax.xml.bind.JAXBContext; -import javax.xml.bind.JAXBException; - -import com.raytheon.wes2bridge.common.configuration.Wes2BridgeCase; - -/** - * Uses jaxb to convert Wes2Bridge configuration in XML format to a Java POJO. - * - *
    - * Date         Ticket#    Engineer    Description
    - * ------------ ---------- ----------- --------------------------
    - * Aug 12, 2014 3521       bkowal      Initial creation
    - * 
    - * 
    - * - * @author bkowal - * @version 1.0 - */ - -public class Wes2BridgeJaxbManager { - - /** - * - */ - protected Wes2BridgeJaxbManager() { - } - - public static Wes2BridgeCase toWes2BridgeCase(File wes2bridgeXMLFile) - throws JAXBException { - JAXBContext jaxbContext = JAXBContext.newInstance(Wes2BridgeCase.class); - - return (Wes2BridgeCase) jaxbContext.createUnmarshaller().unmarshal( - wes2bridgeXMLFile); - } -} \ No newline at end of file diff --git a/javaUtilities/com.raytheon.wes2bridge.configuration/.classpath b/javaUtilities/com.raytheon.wes2bridge.configuration/.classpath deleted file mode 100644 index eca7bdba8f..0000000000 --- a/javaUtilities/com.raytheon.wes2bridge.configuration/.classpath +++ /dev/null @@ -1,7 +0,0 @@ - - - - - - - diff --git a/javaUtilities/com.raytheon.wes2bridge.configuration/.project b/javaUtilities/com.raytheon.wes2bridge.configuration/.project deleted file mode 100644 index 7a06a69836..0000000000 --- a/javaUtilities/com.raytheon.wes2bridge.configuration/.project +++ /dev/null @@ -1,28 +0,0 @@ - - - com.raytheon.wes2bridge.configuration - - - - - - org.eclipse.jdt.core.javabuilder - - - - - org.eclipse.pde.ManifestBuilder - - - - - org.eclipse.pde.SchemaBuilder - - - - - - org.eclipse.pde.PluginNature - org.eclipse.jdt.core.javanature - - diff --git a/javaUtilities/com.raytheon.wes2bridge.configuration/.settings/org.eclipse.jdt.core.prefs b/javaUtilities/com.raytheon.wes2bridge.configuration/.settings/org.eclipse.jdt.core.prefs deleted file mode 100644 index 299f204540..0000000000 --- a/javaUtilities/com.raytheon.wes2bridge.configuration/.settings/org.eclipse.jdt.core.prefs +++ /dev/null @@ -1,8 +0,0 @@ -#Thu Jan 12 14:09:44 CST 2012 -eclipse.preferences.version=1 -org.eclipse.jdt.core.compiler.codegen.inlineJsrBytecode=enabled -org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.6 -org.eclipse.jdt.core.compiler.compliance=1.6 -org.eclipse.jdt.core.compiler.problem.assertIdentifier=error -org.eclipse.jdt.core.compiler.problem.enumIdentifier=error -org.eclipse.jdt.core.compiler.source=1.6 diff --git a/javaUtilities/com.raytheon.wes2bridge.configuration/META-INF/MANIFEST.MF b/javaUtilities/com.raytheon.wes2bridge.configuration/META-INF/MANIFEST.MF deleted file mode 100644 index 5b76927b9f..0000000000 --- a/javaUtilities/com.raytheon.wes2bridge.configuration/META-INF/MANIFEST.MF +++ /dev/null @@ -1,8 +0,0 @@ -Manifest-Version: 1.0 -Bundle-ManifestVersion: 2 -Bundle-Name: Wes2Bridge Configuration Utility -Bundle-SymbolicName: com.raytheon.wes2bridge.configuration -Bundle-Version: 1.0.0.qualifier -Bundle-Vendor: RAYTHEON -Bundle-RequiredExecutionEnvironment: JavaSE-1.8 -Require-Bundle: com.raytheon.wes2bridge.common;bundle-version="1.0.0" diff --git a/javaUtilities/com.raytheon.wes2bridge.configuration/build.properties b/javaUtilities/com.raytheon.wes2bridge.configuration/build.properties deleted file mode 100644 index 34d2e4d2da..0000000000 --- a/javaUtilities/com.raytheon.wes2bridge.configuration/build.properties +++ /dev/null @@ -1,4 +0,0 @@ -source.. = src/ -output.. = bin/ -bin.includes = META-INF/,\ - . diff --git a/javaUtilities/com.raytheon.wes2bridge.configuration/build.xml b/javaUtilities/com.raytheon.wes2bridge.configuration/build.xml deleted file mode 100644 index 4d3d3da34e..0000000000 --- a/javaUtilities/com.raytheon.wes2bridge.configuration/build.xml +++ /dev/null @@ -1,42 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/javaUtilities/com.raytheon.wes2bridge.configuration/com.raytheon.wes2bridge.configuration.ecl b/javaUtilities/com.raytheon.wes2bridge.configuration/com.raytheon.wes2bridge.configuration.ecl deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/javaUtilities/com.raytheon.wes2bridge.configuration/src/com/raytheon/wes2bridge/configuration/ConfigurationUtility.java b/javaUtilities/com.raytheon.wes2bridge.configuration/src/com/raytheon/wes2bridge/configuration/ConfigurationUtility.java deleted file mode 100644 index c67435d054..0000000000 --- a/javaUtilities/com.raytheon.wes2bridge.configuration/src/com/raytheon/wes2bridge/configuration/ConfigurationUtility.java +++ /dev/null @@ -1,77 +0,0 @@ -/** - * - */ -package com.raytheon.wes2bridge.configuration; - -import java.io.File; - -import javax.xml.bind.JAXBException; - -import com.raytheon.wes2bridge.common.configuration.Wes2BridgeCase; -import com.raytheon.wes2bridge.configuration.jaxb.Wes2BridgeJaxbManager; - -/** - * A command line utility that can be used to retrieve individual values from configuration. - * - *
    - * 
    - * SOFTWARE HISTORY
    - * 
    - * Date         Ticket#    Engineer    Description
    - * ------------ ---------- ----------- --------------------------
    - * ?            ?          bkowal      Initial Creation.
    - * Aug 14, 2014 3521       bkowal      Updated to use Wes2BridgeCase.
    - * 
    - * 
    - * - * @author bkowal - * @version 1.0 - */ -public class ConfigurationUtility { - private static final String FIELD_NAME = "-name"; - - private static final String FIELD_ARCHIVE = "-archiveRoot"; - - private static final String FIELD_DBPORT = "-databasePort"; - - private static final String FIELD_HTTPPORT = "-httpPort"; - - private static final String FIELD_JMSPORT = "-jmsPort"; - - /** - * @param args - */ - public static void main(String[] args) { - if (args.length != 2) { - System.out - .println("Error: both a configuration file and a field must be specified."); - System.exit(-1); - } - - Wes2BridgeCase wes2BridgeCase = null; - try { - wes2BridgeCase = Wes2BridgeJaxbManager.toWes2BridgeCase(new File( - args[0])); - } catch (JAXBException e) { - e.printStackTrace(); - System.out - .println("FAILED TO READ THE SPECIFIED CONFIGURATION FILE: " - + args[0] + "!"); - System.exit(-1); - } - - final String field = args[1]; - if (field.equals(FIELD_NAME)) { - System.out.print(wes2BridgeCase.getName()); - } else if (field.equals(FIELD_ARCHIVE)) { - System.out.print(wes2BridgeCase.getDataArchiveRoot()); - } else if (field.equals(FIELD_DBPORT)) { - System.out.print(wes2BridgeCase.getDatabasePort()); - } else if (field.equals(FIELD_HTTPPORT)) { - System.out.print(wes2BridgeCase.getEdexHttpPort()); - } else if (field.equals(FIELD_JMSPORT)) { - System.out.print(wes2BridgeCase.getJmsPort()); - } - System.exit(0); - } -} \ No newline at end of file diff --git a/javaUtilities/com.raytheon.wes2bridge.configuration/src/log4j.properties b/javaUtilities/com.raytheon.wes2bridge.configuration/src/log4j.properties deleted file mode 100644 index 6e516c4cb7..0000000000 --- a/javaUtilities/com.raytheon.wes2bridge.configuration/src/log4j.properties +++ /dev/null @@ -1,6 +0,0 @@ -log4j.rootLogger=ERROR, CA - -log4j.appender.CA=org.apache.log4j.ConsoleAppender -log4j.appender.CA.layout=org.apache.log4j.PatternLayout - -log4j.appender.CA.layout.ConversionPattern=%-4r [%t] %-5p %c %x - %m%n \ No newline at end of file diff --git a/javaUtilities/com.raytheon.wes2bridge.datalink/.classpath b/javaUtilities/com.raytheon.wes2bridge.datalink/.classpath deleted file mode 100644 index eca7bdba8f..0000000000 --- a/javaUtilities/com.raytheon.wes2bridge.datalink/.classpath +++ /dev/null @@ -1,7 +0,0 @@ - - - - - - - diff --git a/javaUtilities/com.raytheon.wes2bridge.datalink/.project b/javaUtilities/com.raytheon.wes2bridge.datalink/.project deleted file mode 100644 index 2de6deee13..0000000000 --- a/javaUtilities/com.raytheon.wes2bridge.datalink/.project +++ /dev/null @@ -1,28 +0,0 @@ - - - com.raytheon.wes2bridge.datalink - - - - - - org.eclipse.jdt.core.javabuilder - - - - - org.eclipse.pde.ManifestBuilder - - - - - org.eclipse.pde.SchemaBuilder - - - - - - org.eclipse.pde.PluginNature - org.eclipse.jdt.core.javanature - - diff --git a/javaUtilities/com.raytheon.wes2bridge.datalink/.settings/org.eclipse.jdt.core.prefs b/javaUtilities/com.raytheon.wes2bridge.datalink/.settings/org.eclipse.jdt.core.prefs deleted file mode 100644 index d6811303d3..0000000000 --- a/javaUtilities/com.raytheon.wes2bridge.datalink/.settings/org.eclipse.jdt.core.prefs +++ /dev/null @@ -1,8 +0,0 @@ -#Thu Jan 12 13:58:47 CST 2012 -eclipse.preferences.version=1 -org.eclipse.jdt.core.compiler.codegen.inlineJsrBytecode=enabled -org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.6 -org.eclipse.jdt.core.compiler.compliance=1.6 -org.eclipse.jdt.core.compiler.problem.assertIdentifier=error -org.eclipse.jdt.core.compiler.problem.enumIdentifier=error -org.eclipse.jdt.core.compiler.source=1.6 diff --git a/javaUtilities/com.raytheon.wes2bridge.datalink/META-INF/MANIFEST.MF b/javaUtilities/com.raytheon.wes2bridge.datalink/META-INF/MANIFEST.MF deleted file mode 100644 index 4e84d64eac..0000000000 --- a/javaUtilities/com.raytheon.wes2bridge.datalink/META-INF/MANIFEST.MF +++ /dev/null @@ -1,7 +0,0 @@ -Manifest-Version: 1.0 -Bundle-ManifestVersion: 2 -Bundle-Name: Wes2Bridge Datalink Utility -Bundle-SymbolicName: com.raytheon.wes2bridge.datalink -Bundle-Version: 1.0.0.qualifier -Bundle-Vendor: RAYTHEON -Bundle-RequiredExecutionEnvironment: JavaSE-1.8 diff --git a/javaUtilities/com.raytheon.wes2bridge.datalink/build.properties b/javaUtilities/com.raytheon.wes2bridge.datalink/build.properties deleted file mode 100644 index 34d2e4d2da..0000000000 --- a/javaUtilities/com.raytheon.wes2bridge.datalink/build.properties +++ /dev/null @@ -1,4 +0,0 @@ -source.. = src/ -output.. = bin/ -bin.includes = META-INF/,\ - . diff --git a/javaUtilities/com.raytheon.wes2bridge.datalink/build.xml b/javaUtilities/com.raytheon.wes2bridge.datalink/build.xml deleted file mode 100644 index 77247af8ce..0000000000 --- a/javaUtilities/com.raytheon.wes2bridge.datalink/build.xml +++ /dev/null @@ -1,36 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/javaUtilities/com.raytheon.wes2bridge.datalink/com.raytheon.wes2bridge.datalink.ecl b/javaUtilities/com.raytheon.wes2bridge.datalink/com.raytheon.wes2bridge.datalink.ecl deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/javaUtilities/com.raytheon.wes2bridge.datalink/src/com/raytheon/wes2bridge/datalink/DataLinkUtility.java b/javaUtilities/com.raytheon.wes2bridge.datalink/src/com/raytheon/wes2bridge/datalink/DataLinkUtility.java deleted file mode 100644 index 5878159a29..0000000000 --- a/javaUtilities/com.raytheon.wes2bridge.datalink/src/com/raytheon/wes2bridge/datalink/DataLinkUtility.java +++ /dev/null @@ -1,34 +0,0 @@ -/** - * - */ -package com.raytheon.wes2bridge.datalink; - -/** - * @author bkowal - * - * This is an extremely simple utility. This program - * expects on argument: a postgresql database directory. - * This utility will then extract the "/awips2/database/data" and - * return only the name of the database directory. - */ -public class DataLinkUtility -{ - private static final String AWIPSII_DATA = "/awips2/database/data/"; - - /** - * @param args - */ - public static void main(String[] args) - { - if (args.length != 1) - { - System.out.print("Error: The name of the data directory must be specified."); - System.exit(-1); - } - - String dataDirectory = args[0]; - System.out.print(dataDirectory.replace(AWIPSII_DATA, "")); - - System.exit(0); - } -} diff --git a/javaUtilities/com.raytheon.wes2bridge.manager/.classpath b/javaUtilities/com.raytheon.wes2bridge.manager/.classpath deleted file mode 100644 index eca7bdba8f..0000000000 --- a/javaUtilities/com.raytheon.wes2bridge.manager/.classpath +++ /dev/null @@ -1,7 +0,0 @@ - - - - - - - diff --git a/javaUtilities/com.raytheon.wes2bridge.manager/.project b/javaUtilities/com.raytheon.wes2bridge.manager/.project deleted file mode 100644 index 92f4d8a99a..0000000000 --- a/javaUtilities/com.raytheon.wes2bridge.manager/.project +++ /dev/null @@ -1,28 +0,0 @@ - - - com.raytheon.wes2bridge.manager - - - - - - org.eclipse.jdt.core.javabuilder - - - - - org.eclipse.pde.ManifestBuilder - - - - - org.eclipse.pde.SchemaBuilder - - - - - - org.eclipse.pde.PluginNature - org.eclipse.jdt.core.javanature - - diff --git a/javaUtilities/com.raytheon.wes2bridge.manager/META-INF/MANIFEST.MF b/javaUtilities/com.raytheon.wes2bridge.manager/META-INF/MANIFEST.MF deleted file mode 100644 index 2bfda38b3c..0000000000 --- a/javaUtilities/com.raytheon.wes2bridge.manager/META-INF/MANIFEST.MF +++ /dev/null @@ -1,10 +0,0 @@ -Manifest-Version: 1.0 -Bundle-ManifestVersion: 2 -Bundle-Name: Wes2Bridge Manager -Bundle-SymbolicName: com.raytheon.wes2bridge.manager -Bundle-Version: 1.16.0.qualifier -Bundle-Vendor: RAYTHEON -Bundle-RequiredExecutionEnvironment: JavaSE-1.8 -Require-Bundle: com.raytheon.wes2bridge.common;bundle-version="1.0.0", - org.apache.commons.lang3, - com.fasterxml.jackson;bundle-version="2.6.5" diff --git a/javaUtilities/com.raytheon.wes2bridge.manager/build.properties b/javaUtilities/com.raytheon.wes2bridge.manager/build.properties deleted file mode 100644 index 34d2e4d2da..0000000000 --- a/javaUtilities/com.raytheon.wes2bridge.manager/build.properties +++ /dev/null @@ -1,4 +0,0 @@ -source.. = src/ -output.. = bin/ -bin.includes = META-INF/,\ - . diff --git a/javaUtilities/com.raytheon.wes2bridge.manager/build.xml b/javaUtilities/com.raytheon.wes2bridge.manager/build.xml deleted file mode 100644 index 7d520aa57c..0000000000 --- a/javaUtilities/com.raytheon.wes2bridge.manager/build.xml +++ /dev/null @@ -1,62 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/javaUtilities/com.raytheon.wes2bridge.manager/com.raytheon.wes2bridge.manager.ecl b/javaUtilities/com.raytheon.wes2bridge.manager/com.raytheon.wes2bridge.manager.ecl deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/javaUtilities/com.raytheon.wes2bridge.manager/src/com/raytheon/wes2bridge/manager/Wes2BridgeManager.java b/javaUtilities/com.raytheon.wes2bridge.manager/src/com/raytheon/wes2bridge/manager/Wes2BridgeManager.java deleted file mode 100644 index efa7ca3fa2..0000000000 --- a/javaUtilities/com.raytheon.wes2bridge.manager/src/com/raytheon/wes2bridge/manager/Wes2BridgeManager.java +++ /dev/null @@ -1,685 +0,0 @@ -/** - * This software was developed and / or modified by Raytheon Company, - * pursuant to Contract DG133W-05-CQ-1067 with the US Government. - * - * U.S. EXPORT CONTROLLED TECHNICAL DATA - * This software product contains export-restricted data whose - * export/transfer/disclosure is restricted by U.S. law. Dissemination - * to non-U.S. persons whether in the United States or abroad requires - * an export license or other authorization. - * - * Contractor Name: Raytheon Company - * Contractor Address: 6825 Pine Street, Suite 340 - * Mail Stop B8 - * Omaha, NE 68106 - * 402.291.0100 - * - * See the AWIPS II Master Rights File ("Master Rights File.pdf") for - * further licensing information. - **/ -package com.raytheon.wes2bridge.manager; - -import java.io.File; -import java.io.FileNotFoundException; -import java.io.FileReader; -import java.io.BufferedReader; -import java.io.FileWriter; -import java.io.BufferedWriter; -import java.io.IOException; -import java.nio.charset.Charset; -import java.nio.file.Files; -import java.nio.file.Paths; -import java.util.ArrayList; -import java.util.List; -import java.util.Map; -import java.util.regex.Pattern; -import java.util.regex.Matcher; - -import javax.xml.bind.JAXBException; - -import org.apache.commons.lang3.StringUtils; - -import com.fasterxml.jackson.core.type.TypeReference; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.raytheon.wes2bridge.common.configuration.Wes2BridgeCase; -import com.raytheon.wes2bridge.configuration.jaxb.Wes2BridgeJaxbManager; - -/** - * This java-based utility is used to update a wes2bridge environment. This - * utility is invoked by the wes2bridge management script (bash) after the - * management script spawns a new edex, database, qpid, pypies. Only "base" - * files are updated based on the configuration file. The new wes2bridge - * environment will still need to be localized. - * - *
    - * 
    - * SOFTWARE HISTORY
    - * 
    - * Date         Ticket#    Engineer    Description
    - * ------------ ---------- ----------- --------------------------
    - * Jan 18, 2012 1490       bkowal      Pypies is now added to each
    - *                                     edex-environment instance
    - * Apr 18, 2013 1899       bkowal      Updates qpid 0.18 configuration now.
    - * July 2, 2013 2133       bkowal      Updates for yajsw-wrapped qpid
    - * Dec 11, 2013 2182       bkowal      Update the postgresql port in
    - *                                     postgresql.conf instead of the
    - *                                     postgresql startup scripts
    - * Aug 14, 2014 3521       bkowal      Updated to use Wes2BridgeCase. Eliminated
    - *                                     configuration that is no longer used and
    - *                                     updated EDEX re-configuration.
    - * Apr 15, 2015 4392       dlovely     Updates the new qpid json configuration now
    - * Apr 20, 2015 4392       dlovely     Removed un-used JMX port configuration
    - * Nov 12, 2015 5121       bkowal      Write Java, Python, and PSQL locations to
    - *                                     setup.env to override the default locations.
    - * Mar 07, 2016 5067       bkowal      Update to use fasterxml jackson.
    - * Jul 06, 2016 5734       bkowal      Update edex_camel pid lookup match text.
    - * 
    - * - * @author bkowal - * @version 1.0 - */ -public class Wes2BridgeManager { - private static final String AWIPSII = "/awips2"; - - private static final String AWIPSII_WES2BRIDGE_SCRIPTS = AWIPSII - + "/edex-environment/scripts"; - - private static final String WES2BRIDGE_DIRECTORY = "/usr/local/edex-environment"; - - private static final int GROUP_INDEX_ONE = 1; - - private static final int GROUP_INDEX_TWO = 2; - - private static final int EXIT_FAILURE = -1; - - private static final int EXIT_SUCCESS = 0; - - private static final String DEFAULT_HDF5_DIRECTORY = "/edex/data/hdf5"; - - private Wes2BridgeCase wes2BridgeCase; - - private String wes2BridgeScripts = null; - - private static final TypeReference> MAP_TYPE_REFERENCE = new TypeReference>() { - }; - - public static final String QPID_NAME = "name"; - - public static final String QPID_AMQP = "AMQP"; - - public static final String QPID_HTTP = "HTTP"; - - public static final String QPID_PORT = "port"; - - public static final String QPID_PORTS = "ports"; - - public Wes2BridgeManager() { - } - - /** - * @param args - */ - public static void main(String[] args) { - if (args.length != 1) { - System.out - .println("ERROR: The configuration file has not been specified."); - System.exit(EXIT_FAILURE); - } - - Wes2BridgeManager manager = new Wes2BridgeManager(); - try { - manager.init(args[0]); - } catch (Exception e1) { - e1.printStackTrace(); - System.exit(EXIT_FAILURE); - } - - try { - manager.reconfigureEdex(); - } catch (Exception e1) { - e1.printStackTrace(); - System.exit(EXIT_FAILURE); - } - - try { - manager.reconfigurePostgreSQL(); - } catch (Exception e1) { - e1.printStackTrace(); - System.exit(EXIT_FAILURE); - } - - try { - manager.reconfigureQPID(); - } catch (Exception e1) { - e1.printStackTrace(); - System.exit(EXIT_FAILURE); - } - - try { - manager.reconfigurePypies(); - } catch (Exception e1) { - e1.printStackTrace(); - System.exit(EXIT_FAILURE); - } - - System.exit(EXIT_SUCCESS); - } - - public void init(String arg1) throws JAXBException { - this.wes2BridgeCase = Wes2BridgeJaxbManager.toWes2BridgeCase(new File( - arg1)); - - this.wes2BridgeScripts = WES2BRIDGE_DIRECTORY + "/" - + this.wes2BridgeCase.getName() + "/" + "edex-environment"; - } - - /* - * Updates setup.env and wrapper.conf. - */ - public void reconfigureEdex() throws FileNotFoundException, IOException { - final String srcEdexDirectory = AWIPSII + "/" + "edex"; - final String edexDirectory = WES2BRIDGE_DIRECTORY + "/" - + this.wes2BridgeCase.getName() + "/" + "edex"; - - this.updateEdexSetup(srcEdexDirectory, edexDirectory); - this.updateEdexWrapper(srcEdexDirectory, edexDirectory); - this.updateEdexCamel(edexDirectory); - } - - private void updateEdexSetup(String srcEdexDirectory, String edexDirectory) - throws FileNotFoundException, IOException, IllegalStateException { - String srcsetup_env = srcEdexDirectory + "/bin/setup.env"; - String setup_env = edexDirectory + "/bin/setup.env"; - - BufferedReader br = null; - BufferedWriter bw = null; - - try { - br = this.getBufferedReader(srcsetup_env); - bw = this.getBufferedWriter(setup_env); - - final String line1 = "export DATA_ARCHIVE_ROOT="; - final String line2 = "export DB_PORT="; - final String line3 = "export BROKER_PORT="; - final String line4 = "export HTTP_PORT="; - final String line5 = "export BROKER_HTTP="; - final String line6 = "export SHARE_DIR="; - final String pypiesServerPattern = "(export PYPIES_SERVER=http://.+:)[1-9][0-9]+"; - final Pattern pattern7 = Pattern.compile(pypiesServerPattern); - - String line = StringUtils.EMPTY; - while ((line = br.readLine()) != null) { - Matcher matcher = pattern7.matcher(line); - - if (line.startsWith(line1)) { - line = line1 + this.wes2BridgeCase.getDataArchiveRoot(); - } else if (line.startsWith(line2)) { - line = line2 + this.wes2BridgeCase.getDatabasePort(); - } else if (line.startsWith(line3)) { - line = line3 + this.wes2BridgeCase.getJmsPort(); - } else if (line.startsWith(line4)) { - line = line4 + this.wes2BridgeCase.getEdexHttpPort(); - } else if (line.startsWith(line5)) { - line = line5 + this.wes2BridgeCase.getQpidHttpPort(); - } else if (line.startsWith(line6)) { - line = line6 + edexDirectory + "/data/share"; - } else if (matcher.matches()) { - line = matcher.group(GROUP_INDEX_ONE) - + this.wes2BridgeCase.getHttpdPypiesPort(); - } - - bw.write(line + "\n"); - } - - /* - * Need to overwrite the Java, Python, and PSQL locations using - * setup.env ever since edex_camel was updated to use a login shell - * to run the EDEX start.sh script. - */ - bw.write("export JAVA_INSTALL=/awips2/java\n"); - bw.write("export PYTHON_INSTALL=/awips2/python\n"); - bw.write("export PSQL_INSTALL=/awips2/psql\n"); - } finally { - br.close(); - bw.close(); - } - } - - /* Disable JMX. */ - private void updateEdexWrapper(String srcEdexDirectory, String edexDirectory) - throws FileNotFoundException, IOException { - String srcwrapper_conf = srcEdexDirectory + "/conf/wrapper.conf"; - String wrapper_conf = edexDirectory + "/conf/wrapper.conf"; - - BufferedReader br = null; - BufferedWriter bw = null; - try { - br = this.getBufferedReader(srcwrapper_conf); - bw = this.getBufferedWriter(wrapper_conf); - - /* - * Add a new wes2bridge.instance JVM argument so that it will be - * possible to determine which edex instance belongs to which test - * case. - */ - - int javaAdditionalMax = 0; - - final String line1 = "wrapper.jvm.parameter.order.2=-Daw.site.identifier"; - - final String javaAdditionalPatternRegex = "wrapper\\.java\\.additional\\.([0-9]+)=.+"; - final Pattern javaAdditionalPattern = Pattern - .compile(javaAdditionalPatternRegex); - - String line = StringUtils.EMPTY; - while ((line = br.readLine()) != null) { - Matcher matcher = javaAdditionalPattern.matcher(line); - if (matcher.matches()) { - /* Guaranteed to be numeric based on the regex */ - int javaAdditional = Integer.parseInt(matcher.group(1)); - if (javaAdditional > javaAdditionalMax) { - javaAdditionalMax = javaAdditional; - } - } - - if (line.equals(line1)) { - bw.write(line + "\n"); - /* - * Ensure that the wes2bridge test name will be third in the - * list of jvm arguments. - */ - line = "wrapper.jvm.parameter.order.3=-Dwes2bridge.instance"; - } - - bw.write(line + "\n"); - } - - /* - * add the additional JVM argument. - */ - ++javaAdditionalMax; - String jvmArg = "wrapper.java.additional." + javaAdditionalMax - + "=-Dwes2bridge.instance=" + this.wes2BridgeCase.getName(); - bw.write(jvmArg); - } finally { - br.close(); - bw.close(); - } - } - - private void updateEdexCamel(String edexDirectory) - throws FileNotFoundException, IOException { - final String srcedex_camel = AWIPSII_WES2BRIDGE_SCRIPTS + "/" - + "edex_camel"; - final String edex_camel = this.wes2BridgeScripts + "/edex_camel"; - - BufferedReader br = null; - BufferedWriter bw = null; - try { - br = this.getBufferedReader(srcedex_camel); - bw = this.getBufferedWriter(edex_camel); - - final String line1 = "export EDEX_INSTALL="; - final String line2 = "export DATA_ARCHIVE_ROOT="; - final String line3 = "CAMELPROCESS=`ps -ef | grep \"aw.site.identifier\"|grep -c \"edex.run.mode=${1} \" `"; - final String line4 = "_camel_pid=`pgrep -f -u $EDEXUSER \"java -Dedex.run.mode=${1} \"`"; - - String line = StringUtils.EMPTY; - while ((line = br.readLine()) != null) { - if (line.trim().startsWith(line1)) { - line = line1 + edexDirectory; - } else if (line.trim().startsWith(line2)) { - line = line2 + this.wes2BridgeCase.getDataArchiveRoot(); - } else if (line.trim().startsWith(line3)) { - line = "CAMELPROCESS=`ps -ef | " - + "grep \"wes2bridge.instance=" - + this.wes2BridgeCase.getName() + "\" | " - + "grep -c \"edex.run.mode=${1} \" `"; - } else if (line.trim().startsWith(line4)) { - line = "_camel_pid=`pgrep -f \"java.*-Dedex.run.mode=${1} -Daw.site.identifier=.+ -Dwes2bridge.instance=" - + this.wes2BridgeCase.getName() + " \"`"; - } - - bw.write(line + "\n"); - } - } finally { - br.close(); - bw.close(); - } - } - - public void reconfigurePostgreSQL() throws FileNotFoundException, - IOException { - final String postgresqlRootDirectory = WES2BRIDGE_DIRECTORY - + File.separator + this.wes2BridgeCase.getName(); - final String srcDataDirectory = AWIPSII + File.separator + "data"; - - this.updateEdexPostgres(postgresqlRootDirectory); - this.updatePostgresqlConf(srcDataDirectory); - } - - private void updateEdexPostgres(String postgresqlRootDirectory) - throws FileNotFoundException, IOException { - final String srcedex_postgres = AWIPSII_WES2BRIDGE_SCRIPTS + "/" - + "edex_postgres"; - final String edex_postgres = this.wes2BridgeScripts + "/edex_postgres"; - - BufferedReader br = null; - BufferedWriter bw = null; - try { - br = this.getBufferedReader(srcedex_postgres); - bw = this.getBufferedWriter(edex_postgres); - - final String line1 = "POSTGRESQL_INSTALL_ROOT="; - - String line = StringUtils.EMPTY; - while ((line = br.readLine()) != null) { - if (line.startsWith(line1)) { - line = line1 + postgresqlRootDirectory; - } - - bw.write(line + "\n"); - } - } finally { - br.close(); - bw.close(); - } - } - - private void updatePostgresqlConf(String srcDataDirectory) - throws FileNotFoundException, IOException { - final String postgresqlConf = "postgresql.conf"; - final String srcPostgresqlConf = srcDataDirectory + File.separator - + postgresqlConf; - final String destPostgresqlConf = WES2BRIDGE_DIRECTORY + File.separator - + this.wes2BridgeCase.getName() + File.separator + "data" - + File.separator + postgresqlConf; - - final String regex1 = "^(port = )([0-9]+)(.+)"; - final Pattern pattern1 = Pattern.compile(regex1); - - BufferedReader br = null; - BufferedWriter bw = null; - try { - br = this.getBufferedReader(srcPostgresqlConf); - bw = this.getBufferedWriter(destPostgresqlConf); - - String line = StringUtils.EMPTY; - // only used once - clearing it will not be necessary - StringBuilder stringBuilder = new StringBuilder(); - while ((line = br.readLine()) != null) { - Matcher matcher = pattern1.matcher(line); - if (matcher.matches()) { - stringBuilder.append(matcher.group(1)); - stringBuilder.append(this.wes2BridgeCase.getDatabasePort()); - stringBuilder.append(matcher.group(3)); - - line = stringBuilder.toString(); - } - - bw.write(line + "\n"); - } - } finally { - br.close(); - bw.close(); - } - } - - public void reconfigureQPID() throws FileNotFoundException, IOException { - final String srcQpidDirectory = AWIPSII + "/" + "qpid"; - final String qpidDirectory = WES2BRIDGE_DIRECTORY + "/" - + this.wes2BridgeCase.getName() + "/" + "qpid"; - - this.updateQpidConfigJSON(srcQpidDirectory, qpidDirectory); - this.updateQPIDD(qpidDirectory); - } - - /* Updates qpid config.json */ - private void updateQpidConfigJSON(String srcQpidDirectory, - String qpidDirectory) throws FileNotFoundException, IOException { - String srcconfig_json = srcQpidDirectory + "/config.json"; - String config_json = qpidDirectory + "/config.json"; - - try (BufferedWriter bw = this.getBufferedWriter(config_json);) { - - List lines = Files.readAllLines(Paths.get(srcconfig_json), - Charset.defaultCharset()); - StringBuilder stringBuilder = new StringBuilder(); - for (String line : lines) { - stringBuilder.append(line); - } - - ObjectMapper mapper = new ObjectMapper(); - Map attributesMap = mapper.readValue( - stringBuilder.toString(), MAP_TYPE_REFERENCE); - - @SuppressWarnings("unchecked") - ArrayList ports = (ArrayList) attributesMap - .get(QPID_PORTS); - - for (int x = 0; x < ports.size(); x++) { - @SuppressWarnings("unchecked") - Map port = (Map) ports.get(x); - String name = (String) port.get(QPID_NAME); - if (QPID_AMQP.equals(name)) { - port.put(QPID_PORT, this.wes2BridgeCase.getJmsPort()); - } else if (QPID_HTTP.equals(name)) { - port.put(QPID_PORT, this.wes2BridgeCase.getQpidHttpPort()); - } - } - - /* - * Write the updated configuration file to its destination. - */ - mapper.writerWithDefaultPrettyPrinter().writeValue(bw, - attributesMap); - } - } - - private void updateQPIDD(String qpidDirectory) - throws FileNotFoundException, IOException { - final String srcqpidd = AWIPSII_WES2BRIDGE_SCRIPTS + "/" + "qpidd"; - final String qpidd = this.wes2BridgeScripts + "/qpidd"; - - BufferedReader br = null; - BufferedWriter bw = null; - try { - br = this.getBufferedReader(srcqpidd); - bw = this.getBufferedWriter(qpidd); - - final String line1 = "QPID_HOME="; - - String line = StringUtils.EMPTY; - while ((line = br.readLine()) != null) { - if (line.startsWith(line1)) { - line = line1 + qpidDirectory; - } - - bw.write(line + "\n"); - } - } finally { - br.close(); - bw.close(); - } - } - - /* - * This method will: 1) update pypies.cfg 2) update httpd.conf - */ - public void reconfigurePypies() throws FileNotFoundException, IOException { - final String srcPypiesDirectory = AWIPSII + File.separator + "pypies"; - final String pypiesDirectory = WES2BRIDGE_DIRECTORY + File.separator - + this.wes2BridgeCase.getName() + File.separator + "pypies"; - - final String srcHttpdPypiesDirectory = AWIPSII + File.separator - + "httpd_pypies"; - final String httpdPypiesDirectory = WES2BRIDGE_DIRECTORY - + File.separator + this.wes2BridgeCase.getName() - + File.separator + "httpd_pypies"; - - this.updatePypiesCfg(srcPypiesDirectory, pypiesDirectory); - this.updateHttpdConf(srcHttpdPypiesDirectory, httpdPypiesDirectory); - this.updateHttpdPypies(httpdPypiesDirectory, pypiesDirectory); - } - - private void updatePypiesCfg(String srcPypiesDirectory, - String pypiesDirectory) throws FileNotFoundException, IOException, - IllegalArgumentException { - final String pypiesCfgPathSuffix = File.separator + "conf" - + File.separator + "pypies.cfg"; - final String srcpypiescfg = srcPypiesDirectory + pypiesCfgPathSuffix; - final String pypiescfg = pypiesDirectory + pypiesCfgPathSuffix; - - // use the default location for the hdf5 root - final String hdf5DirectoryLocation = WES2BRIDGE_DIRECTORY - + File.separator + this.wes2BridgeCase.getName() - + DEFAULT_HDF5_DIRECTORY; - final String logFileDirectoryLocation = pypiesDirectory - + File.separator + "logs"; - - BufferedReader br = null; - BufferedWriter bw = null; - try { - br = this.getBufferedReader(srcpypiescfg); - bw = this.getBufferedWriter(pypiescfg); - - final String hdf5DirPattern = "(hdf5dir=).+"; - final String logFileDirPattern = "(logFileDir=).+"; - final String loggingPortPattern = "(logging_port=)[1-9][0-9]+"; - final Pattern pattern1 = Pattern.compile(hdf5DirPattern); - final Pattern pattern2 = Pattern.compile(logFileDirPattern); - final Pattern pattern3 = Pattern.compile(loggingPortPattern); - - String line = StringUtils.EMPTY; - while ((line = br.readLine()) != null) { - Matcher matcher1 = pattern1.matcher(line); - Matcher matcher2 = pattern2.matcher(line); - Matcher matcher3 = pattern3.matcher(line); - - if (matcher1.matches()) { - line = matcher1.group(GROUP_INDEX_ONE); - line += hdf5DirectoryLocation; - } else if (matcher2.matches()) { - line = matcher2.group(GROUP_INDEX_ONE); - line += logFileDirectoryLocation; - } else if (matcher3.matches()) { - line = matcher3.group(GROUP_INDEX_ONE); - line += this.wes2BridgeCase.getPypiesLoggingPort(); - } - - bw.write(line + "\n"); - } - } finally { - br.close(); - bw.close(); - } - } - - private void updateHttpdConf(String srcHttpdPypiesDirectory, - String httpdPypiesDirectory) throws FileNotFoundException, - IOException { - final String httpdConfPathSuffix = File.separator + "etc" - + File.separator + "httpd" + File.separator + "conf" - + File.separator + "httpd.conf"; - final String srcHttpdConf = srcHttpdPypiesDirectory - + httpdConfPathSuffix; - final String httpdConf = httpdPypiesDirectory + httpdConfPathSuffix; - final String serverRoot = httpdPypiesDirectory + File.separator + "etc" - + File.separator + "httpd"; - - BufferedReader br = null; - BufferedWriter bw = null; - try { - br = this.getBufferedReader(srcHttpdConf); - bw = this.getBufferedWriter(httpdConf); - - final String listenPattern = "(Listen )[1-9][0-9]+"; - final String serverRootPattern = "(ServerRoot \").+(\")"; - final Pattern pattern1 = Pattern.compile(listenPattern); - final Pattern pattern2 = Pattern.compile(serverRootPattern); - - String line = StringUtils.EMPTY; - while ((line = br.readLine()) != null) { - Matcher matcher1 = pattern1.matcher(line); - Matcher matcher2 = pattern2.matcher(line); - if (matcher1.matches()) { - line = matcher1.group(GROUP_INDEX_ONE); - line += this.wes2BridgeCase.getHttpdPypiesPort(); - } else if (matcher2.matches()) { - line = matcher2.group(GROUP_INDEX_ONE); - line += serverRoot; - line += matcher2.group(GROUP_INDEX_TWO); - } - - bw.write(line + "\n"); - } - } finally { - br.close(); - bw.close(); - } - } - - private void updateHttpdPypies(String httpdPypiesDirectory, - String pypiesDirectory) throws IOException, FileNotFoundException { - final String srchttpd_pypies = AWIPSII_WES2BRIDGE_SCRIPTS + "/" - + "httpd-pypies"; - final String httpd_pypies = this.wes2BridgeScripts + "/httpd-pypies"; - - BufferedReader br = null; - BufferedWriter bw = null; - try { - br = this.getBufferedReader(srchttpd_pypies); - bw = this.getBufferedWriter(httpd_pypies); - - final String httpdPypiesInstallPattern = "(HTTPD_PYPIES_INSTALL=).+"; - final String loggingCommandPattern = "( *nohup su awips -c \"\\$loggingCmd > /tmp/pypiesLoggingService)(.log 2>&1\" > /dev/null &)"; - final String pypiesConfigurationPattern = "(export PYPIES_CFG=).+"; - final Pattern pattern1 = Pattern.compile(httpdPypiesInstallPattern); - final Pattern pattern2 = Pattern.compile(loggingCommandPattern); - final Pattern pattern3 = Pattern - .compile(pypiesConfigurationPattern); - - String line = StringUtils.EMPTY; - while ((line = br.readLine()) != null) { - Matcher matcher1 = pattern1.matcher(line); - Matcher matcher2 = pattern2.matcher(line); - Matcher matcher3 = pattern3.matcher(line); - - if (matcher1.matches()) { - line = matcher1.group(GROUP_INDEX_ONE); - line += httpdPypiesDirectory; - } else if (matcher2.matches()) { - line = matcher2.group(GROUP_INDEX_ONE); - line += this.wes2BridgeCase.getName(); - line += matcher2.group(GROUP_INDEX_TWO); - } else if (matcher3.matches()) { - line = matcher3.group(GROUP_INDEX_ONE) + pypiesDirectory - + File.separator + "conf" + File.separator - + "pypies.cfg"; - } - - bw.write(line + "\n"); - } - } finally { - br.close(); - bw.close(); - } - } - - /* - * The following functions and usage of the following functions would no - * longer be necessary with Apache Commons IOUtils. - */ - private BufferedReader getBufferedReader(String file) - throws FileNotFoundException { - return new BufferedReader(new FileReader(this.getFile(file))); - } - - private BufferedWriter getBufferedWriter(String file) throws IOException { - return new BufferedWriter(new FileWriter(this.getFile(file))); - } - - private File getFile(String file) { - return new File(file); - } -} \ No newline at end of file diff --git a/javaUtilities/com.raytheon.wes2bridge.manager/src/log4j.properties b/javaUtilities/com.raytheon.wes2bridge.manager/src/log4j.properties deleted file mode 100644 index 6e516c4cb7..0000000000 --- a/javaUtilities/com.raytheon.wes2bridge.manager/src/log4j.properties +++ /dev/null @@ -1,6 +0,0 @@ -log4j.rootLogger=ERROR, CA - -log4j.appender.CA=org.apache.log4j.ConsoleAppender -log4j.appender.CA.layout=org.apache.log4j.PatternLayout - -log4j.appender.CA.layout.ConversionPattern=%-4r [%t] %-5p %c %x - %m%n \ No newline at end of file diff --git a/localization/localization.OAX/utility/common_static/site/OAX/basemaps/modelBufr.spi b/localization/localization.OAX/utility/common_static/site/OAX/basemaps/modelBufr.spi deleted file mode 100644 index 6439caa982..0000000000 --- a/localization/localization.OAX/utility/common_static/site/OAX/basemaps/modelBufr.spi +++ /dev/null @@ -1,35 +0,0 @@ - 0 CHE 43.5000 -95.6000 9999 0.000 - 0 CRL 42.0000 -95.0000 9999 0.000 - 0 RDD 40.6900 -94.4700 9999 0.000 - 0 WSC 44.1000 -93.5000 9999 0.000 - 0 HNR 41.5900 -95.3400 375 0.000 - 0 CNN 38.3700 -93.7900 9999 0.000 - 0 EAX 38.8100 -94.2600 333 0.000 - 0 HSI 40.3200 -98.4400 626 0.000 - 0 KMCI 39.3200 -94.7200 312 0.000 - 0 KMKC 39.1200 -94.6000 231 0.000 - 0 KSTJ 39.7700 -94.9200 249 0.000 - 0 KMHK 39.1300 -96.6700 322 0.000 - 0 KTOP 39.0700 -95.6200 270 0.000 - 0 KFOE 38.9500 -95.6700 329 0.000 - 0 KRSL 38.8700 -98.8200 568 0.000 - 0 KSLN 38.8000 -97.6500 388 0.000 - 0 KDSM 41.5300 -93.6500 294 0.000 - 0 KMCW 43.1500 -93.3300 370 0.000 - 0 SLB 42.6000 -95.2300 454 0.000 - 0 KOMA 41.3000 -95.9000 299 0.000 - 0 KLNK 40.8400 -96.7500 362 0.000 - 0 KGRI 40.9700 -98.3200 566 0.000 - 0 FNB 40.0800 -95.6000 300 0.000 - 0 KOFK 41.9800 -97.4300 473 0.000 - 0 KSUX 42.4000 -96.3800 336 0.000 - 0 OAX 41.3200 -96.3700 350 0.000 - 0 KFSD 43.5800 -96.7300 435 0.000 - 0 KYKN 42.9200 -97.3800 398 0.000 - 0 KMHE 43.7700 -98.0300 397 0.000 - 0 KFRM 43.6500 -94.4200 354 0.000 - 0 P#8 40.1000 -97.3400 433 0.000 - 0 P#9 42.2100 -97.7900 524 0.000 - 0 P#A 41.9000 -93.7000 315 0.000 - 0 P#G 38.3100 -97.3000 447 0.000 - 0 P#I 39.5800 -94.1900 297 0.000 diff --git a/mkdocs.yml b/mkdocs.yml index a53b1130a5..3b94a3215e 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -34,7 +34,6 @@ pages: - D2D Perspective: cave/d2d-perspective.md - Maps, Views, Projections: cave/maps-views-projections.md - Bundles and Procedures: cave/bundles-and-procedures.md - - Import/Export: cave/import-export.md - Localization Perspective: cave/localization-perspective.md - NCP Perspective: cave/ncp-perspective.md - NSHARP: cave/nsharp.md @@ -46,9 +45,12 @@ pages: - Radar Tools: cave/d2d-radar-tools.md - Editing Menus: cave/d2d-edit-menus.md - Change Localization: cave/cave-localization.md + - Import/Export: cave/import-export.md - EDEX User Manual: - - Distributed EDEX: edex/distributed-computing.md - EDEX Start and Stop: install/start-edex.md + - EDEX Settings: edex/settings.md + - Distributed EDEX: edex/distributed-computing.md + - Docker EDEX: edex/edex-ingest-docker-container.md - LDM Feeds: edex/ldm.md - Data Distribution Files: edex/data-distribution-files.md - Ingest a New Grid: edex/new-grid.md @@ -56,6 +58,12 @@ pages: - Monitor Users: edex/edex-users.md - Data Plugins: edex/data-plugins.md - Archive Case Studies: edex/case-studies.md +#- Hazard Services: +# - CAVE Display: cave/hazard-services-display.md +# - Hazard Creation: cave/hazard-services-create.md +# - Hazard Alerts: cave/hazard-services-alert.md +# - Hazard Settings: cave/hazard-services-settings.md +# - Examples: cave/hazard-services-example.md - Supported Data Types: - Gridded Model Display: cave/d2d-grids.md - Surface Obs: cave/d2d-pointdata-surface-obs.md @@ -66,7 +74,6 @@ pages: - Map Overlays: cave/d2d-map-resources.md - Python API: - Overview: python/python-awips-data-access.md - - Maps Database: python/maps-database.md - AWIPS Grids and Cartopy: python/awips-grids-and-cartopy.md - Satellite Imagery: python/satellite-imagery.md - NEXRAD Level 3 Radar: python/nexrad-level-3-radar.md @@ -76,9 +83,9 @@ pages: - Surface Obs Plot with MetPy: python/surface-obs-plot-metpy.md - Development: - AWIPS Development Environment (ADE): dev/awips-development-environment.md - - Building NSHARP for macOS: dev/build-nsharp-macos.md - Appendix: - AWIPS Grid Parameters: appendix/appendix-grid-parameters.md + - Maps Database: python/maps-database.md - Acronyms and Abbreviations: appendix/appendix-acronyms.md - COTS and FOSS: appendix/appendix-cots.md - WSR-88D Product Table: appendix/appendix-wsr88d.md diff --git a/pythonPackages/dynamicserialize/DynamicSerializationManager.py b/pythonPackages/dynamicserialize/DynamicSerializationManager.py deleted file mode 100644 index 98b21eb04d..0000000000 --- a/pythonPackages/dynamicserialize/DynamicSerializationManager.py +++ /dev/null @@ -1,69 +0,0 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - - -# -# A port of the Java DynamicSerializeManager. Should be used to read/write -# DynamicSerialize binary data. -# -# -# -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# 06/09/10 njensen Initial Creation. -# -# -# - -from thrift.transport import TTransport -import SelfDescribingBinaryProtocol, ThriftSerializationContext - -class DynamicSerializationManager: - - def __init__(self): - self.transport = None - - def _deserialize(self, ctx): - return ctx.deserializeMessage() - - def deserializeBytes(self, bytes): - ctx = self._buildSerializationContext(bytes) - ctx.readMessageStart() - obj = self._deserialize(ctx) - ctx.readMessageEnd() - return obj - - def _buildSerializationContext(self, bytes=None): - self.transport = TTransport.TMemoryBuffer(bytes) - protocol = SelfDescribingBinaryProtocol.SelfDescribingBinaryProtocol(self.transport) - return ThriftSerializationContext.ThriftSerializationContext(self, protocol) - - def serializeObject(self, obj): - ctx = self._buildSerializationContext() - ctx.writeMessageStart("dynamicSerialize") - self._serialize(ctx, obj) - ctx.writeMessageEnd() - return self.transport.getvalue() - - def _serialize(self, ctx, obj): - ctx.serializeMessage(obj) \ No newline at end of file diff --git a/pythonPackages/dynamicserialize/SelfDescribingBinaryProtocol.py b/pythonPackages/dynamicserialize/SelfDescribingBinaryProtocol.py deleted file mode 100644 index 50062a5b30..0000000000 --- a/pythonPackages/dynamicserialize/SelfDescribingBinaryProtocol.py +++ /dev/null @@ -1,142 +0,0 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - - -from thrift.protocol.TProtocol import * -from thrift.protocol.TBinaryProtocol import * -from struct import pack, unpack - - -# -# Partially compatible AWIPS-II Thrift Binary Protocol -# -# Missing functionality: -#
      -#
    • Custom Serializers -#
    • Inheritance -#
    -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# 11/11/09 chammack Initial Creation. -# 06/09/10 njensen Added float, list methods -# Apr 24, 2015 4425 nabowle Add F64List support. -# -# -# - -import struct, numpy - -FLOAT = 64 - -intList = numpy.dtype(numpy.int32).newbyteorder('>') -floatList = numpy.dtype(numpy.float32).newbyteorder('>') -longList = numpy.dtype(numpy.int64).newbyteorder('>') -shortList = numpy.dtype(numpy.int16).newbyteorder('>') -byteList = numpy.dtype(numpy.int8).newbyteorder('>') -doubleList = numpy.dtype(numpy.float64).newbyteorder('>') - -class SelfDescribingBinaryProtocol(TBinaryProtocol): - - def readFieldBegin(self): - type = self.readByte() - if type == TType.STOP: - return (None, type, 0) - name = self.readString() - id = self.readI16() - return (name, type, id) - - def readStructBegin(self): - return self.readString() - - def writeStructBegin(self, name): - self.writeString(name) - - def writeFieldBegin(self, name, type, id): - self.writeByte(type) - self.writeString(name) - self.writeI16(id) - - def readFloat(self): - d = self.readI32() - dAsBytes = struct.pack('i', d) - f = struct.unpack('f', dAsBytes) - return f[0] - - def writeFloat(self, f): - dAsBytes = struct.pack('f', f) - i = struct.unpack('i', dAsBytes) - self.writeI32(i[0]) - - def readI32List(self, sz): - buff = self.trans.readAll(4*sz) - val = numpy.frombuffer(buff, dtype=intList, count=sz) - return val - - def readF32List(self, sz): - buff = self.trans.readAll(4*sz) - val = numpy.frombuffer(buff, dtype=floatList, count=sz) - return val - - def readF64List(self, sz): - buff = self.trans.readAll(8*sz) - val = numpy.frombuffer(buff, dtype=doubleList, count=sz) - return val - - def readI64List(self, sz): - buff = self.trans.readAll(8*sz) - val = numpy.frombuffer(buff, dtype=longList, count=sz) - return val - - def readI16List(self, sz): - buff = self.trans.readAll(2*sz) - val = numpy.frombuffer(buff, dtype=shortList, count=sz) - return val - - def readI8List(self, sz): - buff = self.trans.readAll(sz) - val = numpy.frombuffer(buff, dtype=byteList, count=sz) - return val - - def writeI32List(self, buff): - b = numpy.asarray(buff, intList) - self.trans.write(numpy.getbuffer(b)) - - def writeF32List(self, buff): - b = numpy.asarray(buff, floatList) - self.trans.write(numpy.getbuffer(b)) - - def writeF64List(self, buff): - b = numpy.asarray(buff, doubleList) - self.trans.write(numpy.getbuffer(b)) - - def writeI64List(self, buff): - b = numpy.asarray(buff, longList) - self.trans.write(numpy.getbuffer(b)) - - def writeI16List(self, buff): - b = numpy.asarray(buff, shortList) - self.trans.write(numpy.getbuffer(b)) - - def writeI8List(self, buff): - b = numpy.asarray(buff, byteList) - self.trans.write(numpy.getbuffer(b)) diff --git a/pythonPackages/dynamicserialize/ThriftSerializationContext.py b/pythonPackages/dynamicserialize/ThriftSerializationContext.py deleted file mode 100644 index e82e2118d3..0000000000 --- a/pythonPackages/dynamicserialize/ThriftSerializationContext.py +++ /dev/null @@ -1,424 +0,0 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - - -# -# A port of the Java ThriftSerializationContext, used for reading/writing -# DynamicSerialize objects to/from thrift. -# -# For serialization, it has no knowledge of the expected types in other -# languages, it is instead all based on inspecting the types of the objects -# passed to it. Therefore, ensure the types of python objects and primitives -# match what they should be in the destination language. -# -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# 06/09/10 njensen Initial Creation. -# 06/12/13 #2099 dgilling Implement readObject() and -# writeObject(). -# Apr 24, 2015 4425 nabowle Add Double support -# Oct 17, 2016 5919 njensen Optimized for speed -# -# - -from thrift.Thrift import TType -import inspect -import sys -import types -import time -import dynamicserialize -from dynamicserialize import dstypes, adapters -import SelfDescribingBinaryProtocol -import numpy - -DS_LEN = len('dynamicserialize.dstypes.') - -dsObjTypes = {} - - -def buildObjMap(module): - if '__all__' in module.__dict__: - for i in module.__all__: - name = module.__name__ + '.' + i - __import__(name) - buildObjMap(sys.modules[name]) - else: - clzName = module.__name__[module.__name__.rfind('.') + 1:] - clz = module.__dict__[clzName] - tname = module.__name__ - tname = tname[DS_LEN:] - dsObjTypes[tname] = clz - -buildObjMap(dstypes) - -pythonToThriftMap = { - types.StringType: TType.STRING, - types.IntType: TType.I32, - types.LongType: TType.I64, - types.ListType: TType.LIST, - types.DictionaryType: TType.MAP, - type(set([])): TType.SET, - types.FloatType: SelfDescribingBinaryProtocol.FLOAT, - # types.FloatType: TType.DOUBLE, - types.BooleanType: TType.BOOL, - types.InstanceType: TType.STRUCT, - types.NoneType: TType.VOID, - numpy.float32: SelfDescribingBinaryProtocol.FLOAT, - numpy.int32: TType.I32, - numpy.ndarray: TType.LIST, - numpy.object_: TType.STRING, # making an assumption here - numpy.string_: TType.STRING, - numpy.float64: TType.DOUBLE, - numpy.int16: TType.I16, - numpy.int8: TType.BYTE, - numpy.int64: TType.I64 -} - -primitiveSupport = (TType.BYTE, TType.I16, TType.I32, TType.I64, - SelfDescribingBinaryProtocol.FLOAT, TType.DOUBLE) - - -class ThriftSerializationContext(object): - - def __init__(self, serializationManager, selfDescribingBinaryProtocol): - self.serializationManager = serializationManager - self.protocol = selfDescribingBinaryProtocol - self.typeDeserializationMethod = { - TType.STRING: self.protocol.readString, - TType.I16: self.protocol.readI16, - TType.I32: self.protocol.readI32, - TType.LIST: self._deserializeArray, - TType.MAP: self._deserializeMap, - TType.SET: self._deserializeSet, - SelfDescribingBinaryProtocol.FLOAT: self.protocol.readFloat, - TType.BYTE: self.protocol.readByte, - TType.I64: self.protocol.readI64, - TType.DOUBLE: self.protocol.readDouble, - TType.BOOL: self.protocol.readBool, - TType.STRUCT: self.deserializeMessage, - TType.VOID: lambda: None - } - self.typeSerializationMethod = { - TType.STRING: self.protocol.writeString, - TType.I16: self.protocol.writeI16, - TType.I32: self.protocol.writeI32, - TType.LIST: self._serializeArray, - TType.MAP: self._serializeMap, - TType.SET: self._serializeSet, - SelfDescribingBinaryProtocol.FLOAT: self.protocol.writeFloat, - TType.BYTE: self.protocol.writeByte, - TType.I64: self.protocol.writeI64, - TType.DOUBLE: self.protocol.writeDouble, - TType.BOOL: self.protocol.writeBool, - TType.STRUCT: self.serializeMessage, - TType.VOID: lambda x: None - } - self.listDeserializationMethod = { - TType.BYTE: self.protocol.readI8List, - TType.I16: self.protocol.readI16List, - TType.I32: self.protocol.readI32List, - TType.I64: self.protocol.readI64List, - SelfDescribingBinaryProtocol.FLOAT: self.protocol.readF32List, - TType.DOUBLE: self.protocol.readF64List - } - self.listSerializationMethod = { - TType.BYTE: self.protocol.writeI8List, - TType.I16: self.protocol.writeI16List, - TType.I32: self.protocol.writeI32List, - TType.I64: self.protocol.writeI64List, - SelfDescribingBinaryProtocol.FLOAT: self.protocol.writeF32List, - TType.DOUBLE: self.protocol.writeF64List - } - - def readMessageStart(self): - msg = self.protocol.readMessageBegin() - return msg[0] - - def readMessageEnd(self): - self.protocol.readMessageEnd() - - def deserializeMessage(self): - name = self.protocol.readStructBegin() - if name.isdigit(): - obj = self._deserializeType(int(name)) - return obj - name = name.replace('_', '.') - if name in adapters.classAdapterRegistry: - return adapters.classAdapterRegistry[name].deserialize(self) - elif '$' in name: - # it's an inner class, we're going to hope it's an enum, treat it - # special - fieldName, fieldType, fieldId = self.protocol.readFieldBegin() - if fieldName != '__enumValue__': - raise dynamiceserialize.SerializationException( - "Expected to find enum payload. Found: " + fieldName) - obj = self.protocol.readString() - self.protocol.readFieldEnd() - return obj - else: - clz = dsObjTypes[name] - obj = clz() - - while self._deserializeField(name, obj): - pass - - self.protocol.readStructEnd() - return obj - - def _deserializeType(self, b): - try: - return self.typeDeserializationMethod[b]() - except KeyError: - raise dynamicserialize.SerializationException( - "Unsupported type value " + str(b)) - - def _deserializeField(self, structname, obj): - fieldName, fieldType, fieldId = self.protocol.readFieldBegin() - if fieldType == TType.STOP: - return False - elif fieldType != TType.VOID: - result = self._deserializeType(fieldType) - lookingFor = "set" + fieldName[0].upper() + fieldName[1:] - - try: - setMethod = getattr(obj, lookingFor) - setMethod(result) - except: - raise dynamicserialize.SerializationException( - "Couldn't find setter method " + lookingFor) - - self.protocol.readFieldEnd() - return True - - def _deserializeArray(self): - listType, size = self.protocol.readListBegin() - result = [] - if size: - if listType not in primitiveSupport: - m = self.typeDeserializationMethod[listType] - result = [m() for n in xrange(size)] - else: - result = self.listDeserializationMethod[listType](size) - self.protocol.readListEnd() - return result - - def _deserializeMap(self): - keyType, valueType, size = self.protocol.readMapBegin() - result = {} - for n in xrange(size): - # can't go off the type, due to java generics limitations dynamic serialize is - # serializing keys and values as void - key = self.typeDeserializationMethod[TType.STRUCT]() - value = self.typeDeserializationMethod[TType.STRUCT]() - result[key] = value - self.protocol.readMapEnd() - return result - - def _deserializeSet(self): - setType, setSize = self.protocol.readSetBegin() - result = set([]) - for n in xrange(setSize): - result.add(self.typeDeserializationMethod[TType.STRUCT]()) - self.protocol.readSetEnd() - return result - - def _lookupType(self, obj): - pyt = type(obj) - if pyt in pythonToThriftMap: - return pythonToThriftMap[pyt] - elif pyt.__module__[:DS_LEN - 1] == ('dynamicserialize.dstypes'): - return pythonToThriftMap[types.InstanceType] - else: - raise dynamicserialize.SerializationException( - "Don't know how to serialize object of type: " + str(pyt)) - - def serializeMessage(self, obj): - tt = self._lookupType(obj) - - if tt == TType.STRUCT: - fqn = obj.__module__[DS_LEN:] - if fqn in adapters.classAdapterRegistry: - # get proper class name when writing class name to serialization stream - # in case we have a special inner-class case - m = sys.modules[adapters.classAdapterRegistry[fqn].__name__] - if isinstance(m.ClassAdapter, list): - fqn = m.ClassAdapter[0] - self.protocol.writeStructBegin(fqn) - adapters.classAdapterRegistry[fqn].serialize(self, obj) - return - else: - self.protocol.writeStructBegin(fqn) - methods = inspect.getmembers(obj, inspect.ismethod) - fid = 1 - for m in methods: - methodName = m[0] - if methodName.startswith('get'): - fieldname = methodName[3].lower() + methodName[4:] - val = m[1]() - ft = self._lookupType(val) - if ft == TType.STRUCT: - fc = val.__module__[DS_LEN:] - self._serializeField(fieldname, ft, fid, val) - else: - self._serializeField(fieldname, ft, fid, val) - fid += 1 - self.protocol.writeFieldStop() - - self.protocol.writeStructEnd() - else: - # basic types - self.protocol.writeStructBegin(str(tt)) - self._serializeType(obj, tt) - self.protocol.writeStructEnd() - - def _serializeField(self, fieldName, fieldType, fieldId, fieldValue): - self.protocol.writeFieldBegin(fieldName, fieldType, fieldId) - self._serializeType(fieldValue, fieldType) - self.protocol.writeFieldEnd() - - def _serializeType(self, fieldValue, fieldType): - if fieldType in self.typeSerializationMethod: - return self.typeSerializationMethod[fieldType](fieldValue) - else: - raise dynamicserialize.SerializationException( - "Unsupported type value " + str(fieldType)) - - def _serializeArray(self, obj): - size = len(obj) - if size: - if type(obj) is numpy.ndarray: - t = pythonToThriftMap[obj.dtype.type] - size = obj.size - else: - t = self._lookupType(obj[0]) - else: - t = TType.STRUCT - self.protocol.writeListBegin(t, size) - if t == TType.STRING: - if type(obj) is numpy.ndarray: - if len(obj.shape) == 1: - for x in obj: - s = str(x).strip() - self.typeSerializationMethod[t](s) - else: - for x in obj: - for y in x: - s = str(y).strip() - self.typeSerializationMethod[t](s) - else: - for x in obj: - s = str(x) - self.typeSerializationMethod[t](s) - elif t not in primitiveSupport: - for x in obj: - self.typeSerializationMethod[t](x) - else: - self.listSerializationMethod[t](obj) - self.protocol.writeListEnd() - - def _serializeMap(self, obj): - size = len(obj) - self.protocol.writeMapBegin(TType.VOID, TType.VOID, size) - for k in obj.keys(): - self.typeSerializationMethod[TType.STRUCT](k) - self.typeSerializationMethod[TType.STRUCT](obj[k]) - self.protocol.writeMapEnd() - - def _serializeSet(self, obj): - size = len(obj) - self.protocol.writeSetBegin(TType.VOID, size) - for x in obj: - self.typeSerializationMethod[TType.STRUCT](x) - self.protocol.writeSetEnd() - - def writeMessageStart(self, name): - self.protocol.writeMessageBegin(name, TType.VOID, 0) - - def writeMessageEnd(self): - self.protocol.writeMessageEnd() - - def readBool(self): - return self.protocol.readBool() - - def writeBool(self, b): - self.protocol.writeBool(b) - - def readByte(self): - return self.protocol.readByte() - - def writeByte(self, b): - self.protocol.writeByte(b) - - def readDouble(self): - return self.protocol.readDouble() - - def writeDouble(self, d): - self.protocol.writeDouble(d) - - def readFloat(self): - return self.protocol.readFloat() - - def writeFloat(self, f): - self.protocol.writeFloat(f) - - def readI16(self): - return self.protocol.readI16() - - def writeI16(self, i): - self.protocol.writeI16(i) - - def readI32(self): - return self.protocol.readI32() - - def writeI32(self, i): - self.protocol.writeI32(i) - - def readI64(self): - return self.protocol.readI64() - - def writeI64(self, i): - self.protocol.writeI64(i) - - def readString(self): - return self.protocol.readString() - - def writeString(self, s): - self.protocol.writeString(s) - - def readBinary(self): - numBytes = self.protocol.readI32() - return self.protocol.readI8List(numBytes) - - def readFloatArray(self): - size = self.protocol.readI32() - return self.protocol.readF32List(size) - - def writeFloatArray(self, floats): - self.protocol.writeI32(len(floats)) - self.protocol.writeF32List(floats) - - def readObject(self): - return self.deserializeMessage() - - def writeObject(self, obj): - self.serializeMessage(obj) diff --git a/pythonPackages/dynamicserialize/__init__.py b/pythonPackages/dynamicserialize/__init__.py deleted file mode 100644 index 1877eb9d0c..0000000000 --- a/pythonPackages/dynamicserialize/__init__.py +++ /dev/null @@ -1,58 +0,0 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - - -# -# TODO -# -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# 08/20/10 njensen Initial Creation. -# -# -# - -__all__ = [ - ] - -import dstypes, adapters -import DynamicSerializationManager - -class SerializationException(Exception): - - def __init__(self, message=None): - self.message = message - - def __str__(self): - if self.message: - return self.message - else: - return "" - -def serialize(obj): - dsm = DynamicSerializationManager.DynamicSerializationManager() - return dsm.serializeObject(obj) - -def deserialize(bytes): - dsm = DynamicSerializationManager.DynamicSerializationManager() - return dsm.deserializeBytes(bytes) \ No newline at end of file diff --git a/pythonPackages/dynamicserialize/adapters/ByteBufferAdapter.py b/pythonPackages/dynamicserialize/adapters/ByteBufferAdapter.py deleted file mode 100644 index 6d714f7ad4..0000000000 --- a/pythonPackages/dynamicserialize/adapters/ByteBufferAdapter.py +++ /dev/null @@ -1,46 +0,0 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - - -# -# Adapter for java.nio.ByteBuffer -# -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# 08/03/11 dgilling Initial Creation. -# -# -# - -ClassAdapter = ['java.nio.ByteBuffer', 'java.nio.HeapByteBuffer'] - - -def serialize(context, set): - raise NotImplementedError("Serialization of ByteBuffers is not supported.") - -def deserialize(context): - byteBuf = context.readBinary() - return byteBuf - - - diff --git a/pythonPackages/dynamicserialize/adapters/CalendarAdapter.py b/pythonPackages/dynamicserialize/adapters/CalendarAdapter.py deleted file mode 100644 index 7a21f09380..0000000000 --- a/pythonPackages/dynamicserialize/adapters/CalendarAdapter.py +++ /dev/null @@ -1,46 +0,0 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - - -# -# Adapter for java.util.Calendar -# -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# 09/29/10 wldougher Initial Creation. -# -# -# - -from dynamicserialize.dstypes.java.util import Calendar - -ClassAdapter = 'java.util.Calendar' - -def serialize(context, calendar): - calTiM = calendar.getTimeInMillis() - context.writeI64(calTiM) - -def deserialize(context): - result = Calendar() - result.setTimeInMillis(context.readI64()) - return result diff --git a/pythonPackages/dynamicserialize/adapters/CoordAdapter.py b/pythonPackages/dynamicserialize/adapters/CoordAdapter.py deleted file mode 100644 index ebfb93a3ab..0000000000 --- a/pythonPackages/dynamicserialize/adapters/CoordAdapter.py +++ /dev/null @@ -1,50 +0,0 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - - -# -# Adapter for com.vividsolutions.jts.geom.Coordinate -# -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# 01/20/11 dgilling Initial Creation. -# -# -# - -from dynamicserialize.dstypes.com.vividsolutions.jts.geom import Coordinate - -ClassAdapter = 'com.vividsolutions.jts.geom.Coordinate' - -def serialize(context, coordinate): - context.writeDouble(coordinate.getX()) - context.writeDouble(coordinate.getY()) - -def deserialize(context): - x = context.readDouble() - y = context.readDouble() - coord = Coordinate() - coord.setX(x) - coord.setY(y) - return coord - diff --git a/pythonPackages/dynamicserialize/adapters/DateAdapter.py b/pythonPackages/dynamicserialize/adapters/DateAdapter.py deleted file mode 100644 index 82c691176e..0000000000 --- a/pythonPackages/dynamicserialize/adapters/DateAdapter.py +++ /dev/null @@ -1,45 +0,0 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - - -# -# Adapter for java.util.Date -# -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# 12/06/10 dgilling Initial Creation. -# -# -# - -from dynamicserialize.dstypes.java.util import Date - -ClassAdapter = 'java.util.Date' - -def serialize(context, date): - context.writeI64(date.getTime()) - -def deserialize(context): - result = Date() - result.setTime(context.readI64()) - return result \ No newline at end of file diff --git a/pythonPackages/dynamicserialize/adapters/EnumSetAdapter.py b/pythonPackages/dynamicserialize/adapters/EnumSetAdapter.py deleted file mode 100644 index 158bfe92d3..0000000000 --- a/pythonPackages/dynamicserialize/adapters/EnumSetAdapter.py +++ /dev/null @@ -1,57 +0,0 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - - -# -# Adapter for java.util.EnumSet -# -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# 07/28/11 dgilling Initial Creation. -# 12/02/13 2537 bsteffen Serialize empty enum sets. -# -# -# - - - -from dynamicserialize.dstypes.java.util import EnumSet - -ClassAdapter = ['java.util.EnumSet', 'java.util.RegularEnumSet'] - - -def serialize(context, set): - setSize = len(set) - context.writeI32(setSize) - context.writeString(set.getEnumClass()) - for val in set: - context.writeString(val) - - -def deserialize(context): - setSize = context.readI32() - enumClassName = context.readString() - valList = [] - for i in xrange(setSize): - valList.append(context.readString()) - return EnumSet(enumClassName, valList) diff --git a/pythonPackages/dynamicserialize/adapters/FloatBufferAdapter.py b/pythonPackages/dynamicserialize/adapters/FloatBufferAdapter.py deleted file mode 100644 index 20c9690740..0000000000 --- a/pythonPackages/dynamicserialize/adapters/FloatBufferAdapter.py +++ /dev/null @@ -1,46 +0,0 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - - -# -# Adapter for java.nio.FloatBuffer -# -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# 08/01/11 dgilling Initial Creation. -# -# -# - -ClassAdapter = ['java.nio.FloatBuffer', 'java.nio.HeapFloatBuffer'] - - -def serialize(context, set): - raise NotImplementedError("Serialization of FloatBuffers is not supported.") - -def deserialize(context): - floatBuf = context.readFloatArray() - return floatBuf - - - diff --git a/pythonPackages/dynamicserialize/adapters/GeometryTypeAdapter.py b/pythonPackages/dynamicserialize/adapters/GeometryTypeAdapter.py deleted file mode 100644 index 387842fe63..0000000000 --- a/pythonPackages/dynamicserialize/adapters/GeometryTypeAdapter.py +++ /dev/null @@ -1,56 +0,0 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - - -# -# Adapter for com.vividsolutions.jts.geom.Polygon -# -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# 01/20/11 dgilling Initial Creation. -# -# -# - -# TODO: Implement serialization/make deserialization useful. -# Deserialization was simply implemented to allow GridLocation objects to be -# passed through thrift, but the resulting Geometry object will not be transformed into -# useful data; the base byte array is passed to a worthless Geometry class. - -from dynamicserialize.dstypes.com.vividsolutions.jts.geom import Geometry - -# NOTE: At the moment, EDEX serializes Polygon, MultiPolygons, Points, and -# Geometrys with the tag of the base class Geometry. Java's serialization -# adapter is smarter and can determine the exact object by reading the binary -# data. This adapter doesn't need this _yet_, so it has not been implemented. -ClassAdapter = 'com.vividsolutions.jts.geom.Geometry' - -def serialize(context, coordinate): - raise dynamicserialize.SerializationException('Not implemented yet') - -def deserialize(context): - data = context.readBinary() - geom = Geometry() - geom.setBinaryData(data) - return geom - diff --git a/pythonPackages/dynamicserialize/adapters/GregorianCalendarAdapter.py b/pythonPackages/dynamicserialize/adapters/GregorianCalendarAdapter.py deleted file mode 100644 index 39d5cbc4db..0000000000 --- a/pythonPackages/dynamicserialize/adapters/GregorianCalendarAdapter.py +++ /dev/null @@ -1,46 +0,0 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - - -# -# Adapter for java.util.Calendar -# -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# 09/29/10 wldougher Initial Creation. -# -# -# - -from dynamicserialize.dstypes.java.util import GregorianCalendar - -ClassAdapter = 'java.util.GregorianCalendar' - -def serialize(context, calendar): - calTiM = calendar.getTimeInMillis() - context.writeI64(calTiM) - -def deserialize(context): - result = GregorianCalendar() - result.setTimeInMillis(context.readI64()) - return result diff --git a/pythonPackages/dynamicserialize/adapters/JTSEnvelopeAdapter.py b/pythonPackages/dynamicserialize/adapters/JTSEnvelopeAdapter.py deleted file mode 100644 index 6446e49ae3..0000000000 --- a/pythonPackages/dynamicserialize/adapters/JTSEnvelopeAdapter.py +++ /dev/null @@ -1,51 +0,0 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - - -# -# Adapter for com.vividsolutions.jts.geom.Envelope -# -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# 05/29/13 2023 dgilling Initial Creation. -# -# - -from dynamicserialize.dstypes.com.vividsolutions.jts.geom import Envelope - -ClassAdapter = 'com.vividsolutions.jts.geom.Envelope' - -def serialize(context, envelope): - context.writeDouble(envelope.getMinX()) - context.writeDouble(envelope.getMaxX()) - context.writeDouble(envelope.getMinY()) - context.writeDouble(envelope.getMaxY()) - -def deserialize(context): - env = Envelope() - env.setMinX(context.readDouble()) - env.setMaxX(context.readDouble()) - env.setMinY(context.readDouble()) - env.setMaxY(context.readDouble()) - return env - diff --git a/pythonPackages/dynamicserialize/adapters/PointAdapter.py b/pythonPackages/dynamicserialize/adapters/PointAdapter.py deleted file mode 100644 index d571dc16ff..0000000000 --- a/pythonPackages/dynamicserialize/adapters/PointAdapter.py +++ /dev/null @@ -1,50 +0,0 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - - -# -# Adapter for java.awt.Point -# -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# 08/31/10 njensen Initial Creation. -# -# -# - -from dynamicserialize.dstypes.java.awt import Point - -ClassAdapter = 'java.awt.Point' - -def serialize(context, point): - context.writeI32(point.getX()) - context.writeI32(point.getY()) - -def deserialize(context): - x = context.readI32() - y = context.readI32() - point = Point() - point.setX(x) - point.setY(y) - return point - diff --git a/pythonPackages/dynamicserialize/adapters/StackTraceElementAdapter.py b/pythonPackages/dynamicserialize/adapters/StackTraceElementAdapter.py deleted file mode 100644 index 48de027b10..0000000000 --- a/pythonPackages/dynamicserialize/adapters/StackTraceElementAdapter.py +++ /dev/null @@ -1,52 +0,0 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - - -# -# Adapter for java.lang.StackTraceElement[] -# -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# 09/21/10 njensen Initial Creation. -# -# -# - -import dynamicserialize -from dynamicserialize.dstypes.java.lang import StackTraceElement - -ClassAdapter = 'java.lang.StackTraceElement' - - -def serialize(context, obj): - raise dynamicserialize.SerializationException('Not implemented yet') - -def deserialize(context): - result = StackTraceElement() - result.setDeclaringClass(context.readString()) - result.setMethodName(context.readString()) - result.setFileName(context.readString()) - result.setLineNumber(context.readI32()) - return result - - diff --git a/pythonPackages/dynamicserialize/adapters/TimestampAdapter.py b/pythonPackages/dynamicserialize/adapters/TimestampAdapter.py deleted file mode 100644 index e6ce2bfa90..0000000000 --- a/pythonPackages/dynamicserialize/adapters/TimestampAdapter.py +++ /dev/null @@ -1,44 +0,0 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - - -# -# Adapter for java.sql.Timestamp -# -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# 06/30/11 dgilling Initial Creation. -# -# -# - -from dynamicserialize.dstypes.java.sql import Timestamp - -ClassAdapter = 'java.sql.Timestamp' - -def serialize(context, timestamp): - context.writeI64(timestamp.getTime()) - -def deserialize(context): - result = Timestamp(context.readI64()) - return result \ No newline at end of file diff --git a/pythonPackages/dynamicserialize/adapters/__init__.py b/pythonPackages/dynamicserialize/adapters/__init__.py deleted file mode 100644 index fa6bdc954b..0000000000 --- a/pythonPackages/dynamicserialize/adapters/__init__.py +++ /dev/null @@ -1,111 +0,0 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - - -# -# __init__.py for Dynamic Serialize adapters. -# -# Plugins can contribute to dynamicserialize.adapters by either including their -# classes directly in pythonPackages/dynamicserialize/adapters/ within their -# plugin. The plugin's adapter will automatically be added to __all__ at runtime -# and registered. -# Plugins should not include a custom __init__.py in -# pythonPackages/dynamicserialize/adapters/ because it will overwrite this file. -# If custom package initialization is needed, a subpackage should be created -# with an __init__.py that includes the following: -# -# __all__ = ['CustomAdapter1', 'CustomAdapter2'] -# from dynamicserialize.adapters import registerAdapters -# registerAdapters(__name__, __all__) -# -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# 08/31/10 njensen Initial Creation. -# 03/20/13 #1774 randerso Added TimeConstraintsAdapter -# 04/22/13 #1949 rjpeter Added LockTableAdapter -# 02/06/14 #2672 bsteffen Added JTSEnvelopeAdapter -# 06/22/2015 #4573 randerso Added JobProgressAdapter -# 09/21/2015 #4486 rjpeter Added FormattedDateAdapter -# 06/23/2016 #5696 rjpeter Added CommutativeTimestampAdapter -# 10/17/2016 #5919 njensen Added GeomDataRespAdapter -# 01/09/2017 #5997 nabowle Allow contribution from plugins. -# - -__all__ = [ - 'PointAdapter', - 'StackTraceElementAdapter', - 'CalendarAdapter', - 'GregorianCalendarAdapter', - 'DateAdapter', - 'GeometryTypeAdapter', - 'CoordAdapter', - 'TimestampAdapter', - 'EnumSetAdapter', - 'FloatBufferAdapter', - 'ByteBufferAdapter', - 'JTSEnvelopeAdapter' -] - -classAdapterRegistry = {} - - -def getAdapterRegistry(): - import pkgutil - - discoveredPackages = [] - # allow other plugins to contribute to adapters by dropping their adapter or - # package into the dynamicserialize.adapters package - for _, modname, ispkg in pkgutil.iter_modules(__path__): - if ispkg: - discoveredPackages.append(modname) - else: - if modname not in __all__: - __all__.append(modname) - - registerAdapters(__name__, __all__) - - for pkg in discoveredPackages: - __import__(__name__ + '.' + pkg) - - -def registerAdapters(package, modules): - import sys - if not package.endswith('.'): - package += '.' - for x in modules: - exec 'import ' + package + x - m = sys.modules[package + x] - d = m.__dict__ - if d.has_key('ClassAdapter'): - if isinstance(m.ClassAdapter, list): - for clz in m.ClassAdapter: - classAdapterRegistry[clz] = m - else: - clzName = m.ClassAdapter - classAdapterRegistry[clzName] = m - else: - raise LookupError('Adapter class ' + x + ' has no ClassAdapter field ' + - 'and cannot be registered.') - - -getAdapterRegistry() diff --git a/pythonPackages/dynamicserialize/dstypes/__init__.py b/pythonPackages/dynamicserialize/dstypes/__init__.py deleted file mode 100644 index 8a203a734a..0000000000 --- a/pythonPackages/dynamicserialize/dstypes/__init__.py +++ /dev/null @@ -1,29 +0,0 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -# File auto-generated by PythonFileGenerator - -__all__ = [ - 'com', - 'gov', - 'java' - ] - - diff --git a/pythonPackages/dynamicserialize/dstypes/com/__init__.py b/pythonPackages/dynamicserialize/dstypes/com/__init__.py deleted file mode 100644 index 755bf9a60c..0000000000 --- a/pythonPackages/dynamicserialize/dstypes/com/__init__.py +++ /dev/null @@ -1,28 +0,0 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -# File auto-generated by PythonFileGenerator - -__all__ = [ - 'raytheon', - 'vividsolutions' - ] - - diff --git a/pythonPackages/dynamicserialize/dstypes/com/raytheon/__init__.py b/pythonPackages/dynamicserialize/dstypes/com/raytheon/__init__.py deleted file mode 100644 index 92f3a4eaec..0000000000 --- a/pythonPackages/dynamicserialize/dstypes/com/raytheon/__init__.py +++ /dev/null @@ -1,27 +0,0 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -# File auto-generated by PythonFileGenerator - -__all__ = [ - 'uf' - ] - - diff --git a/pythonPackages/dynamicserialize/dstypes/com/raytheon/uf/__init__.py b/pythonPackages/dynamicserialize/dstypes/com/raytheon/uf/__init__.py deleted file mode 100644 index b2da4684da..0000000000 --- a/pythonPackages/dynamicserialize/dstypes/com/raytheon/uf/__init__.py +++ /dev/null @@ -1,27 +0,0 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -# File auto-generated by PythonFileGenerator - -__all__ = [ - 'common' - ] - - diff --git a/pythonPackages/dynamicserialize/dstypes/com/raytheon/uf/common/__init__.py b/pythonPackages/dynamicserialize/dstypes/com/raytheon/uf/common/__init__.py deleted file mode 100644 index 966f28b25c..0000000000 --- a/pythonPackages/dynamicserialize/dstypes/com/raytheon/uf/common/__init__.py +++ /dev/null @@ -1,27 +0,0 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -# File auto-generated by PythonFileGenerator - -__all__ = [ - 'dataplugin' - ] - - diff --git a/pythonPackages/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/__init__.py b/pythonPackages/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/__init__.py deleted file mode 100644 index c18ecc8342..0000000000 --- a/pythonPackages/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/__init__.py +++ /dev/null @@ -1,27 +0,0 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -# File auto-generated by PythonFileGenerator - -__all__ = [ - 'events' - ] - - diff --git a/pythonPackages/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/events/__init__.py b/pythonPackages/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/events/__init__.py deleted file mode 100644 index e1ccbfb222..0000000000 --- a/pythonPackages/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/events/__init__.py +++ /dev/null @@ -1,27 +0,0 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -# File auto-generated by PythonFileGenerator - -__all__ = [ - 'hazards' - ] - - diff --git a/pythonPackages/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/events/hazards/__init__.py b/pythonPackages/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/events/hazards/__init__.py deleted file mode 100644 index d9702fd5d7..0000000000 --- a/pythonPackages/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/events/hazards/__init__.py +++ /dev/null @@ -1,27 +0,0 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -# File auto-generated by PythonFileGenerator - -__all__ = [ - 'requests' - ] - - diff --git a/pythonPackages/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/events/hazards/requests/RegionLookupRequest.py b/pythonPackages/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/events/hazards/requests/RegionLookupRequest.py deleted file mode 100644 index a227fa1c7c..0000000000 --- a/pythonPackages/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/events/hazards/requests/RegionLookupRequest.py +++ /dev/null @@ -1,46 +0,0 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -# File auto-generated against equivalent DynamicSerialize Java class -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# Oct 08, 2014 reblum Generated - -class RegionLookupRequest(object): - - def __init__(self): - self.region = None - self.site = None - - def getRegion(self): - return self.region - - def setRegion(self, region): - self.region = region - - def getSite(self): - return self.site - - def setSite(self, site): - self.site = site - diff --git a/pythonPackages/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/events/hazards/requests/__init__.py b/pythonPackages/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/events/hazards/requests/__init__.py deleted file mode 100644 index 8253e3399b..0000000000 --- a/pythonPackages/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/events/hazards/requests/__init__.py +++ /dev/null @@ -1,28 +0,0 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -# File auto-generated by PythonFileGenerator - -__all__ = [ - 'RegionLookupRequest' - ] - -from RegionLookupRequest import RegionLookupRequest - diff --git a/pythonPackages/dynamicserialize/dstypes/com/vividsolutions/__init__.py b/pythonPackages/dynamicserialize/dstypes/com/vividsolutions/__init__.py deleted file mode 100644 index 28b918b9e1..0000000000 --- a/pythonPackages/dynamicserialize/dstypes/com/vividsolutions/__init__.py +++ /dev/null @@ -1,27 +0,0 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -# File auto-generated by PythonFileGenerator - -__all__ = [ - 'jts' - ] - - diff --git a/pythonPackages/dynamicserialize/dstypes/com/vividsolutions/jts/__init__.py b/pythonPackages/dynamicserialize/dstypes/com/vividsolutions/jts/__init__.py deleted file mode 100644 index 8c68cc46cf..0000000000 --- a/pythonPackages/dynamicserialize/dstypes/com/vividsolutions/jts/__init__.py +++ /dev/null @@ -1,27 +0,0 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -# File auto-generated by PythonFileGenerator - -__all__ = [ - 'geom' - ] - - diff --git a/pythonPackages/dynamicserialize/dstypes/com/vividsolutions/jts/geom/Coordinate.py b/pythonPackages/dynamicserialize/dstypes/com/vividsolutions/jts/geom/Coordinate.py deleted file mode 100644 index 55a64f148a..0000000000 --- a/pythonPackages/dynamicserialize/dstypes/com/vividsolutions/jts/geom/Coordinate.py +++ /dev/null @@ -1,46 +0,0 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -# File auto-generated against equivalent DynamicSerialize Java class - -class Coordinate(object): - - def __init__(self, x=None, y=None): - self.x = x - self.y = y - - def getX(self): - return self.x - - def getY(self): - return self.y - - def setX(self, x): - self.x = x - - def setY(self, y): - self.y = y - - def __str__(self): - return str((self.x, self.y)) - - def __repr__(self): - return self.__str__() - diff --git a/pythonPackages/dynamicserialize/dstypes/com/vividsolutions/jts/geom/Envelope.py b/pythonPackages/dynamicserialize/dstypes/com/vividsolutions/jts/geom/Envelope.py deleted file mode 100644 index 2ad017ecfd..0000000000 --- a/pythonPackages/dynamicserialize/dstypes/com/vividsolutions/jts/geom/Envelope.py +++ /dev/null @@ -1,68 +0,0 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -# This class is a dummy implementation of the -# com.vividsolutions.jts.geom.Envelope class. It was simply created to allow -# serialization/deserialization of IDataRequest objects from the Data Access -# Framework. This should be re-implemented if useful work needs to be -# performed against serialized Envelope objects. -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# 05/29/13 2023 dgilling Initial Creation. -# -# - -class Envelope(object): - - def __init__(self, env=None): - self.maxx = -1.0 - self.maxy = -1.0 - self.minx = 0.0 - self.miny = 0.0 - if env is not None: - (self.minx, self.miny, self.maxx, self.maxy) = env.bounds - - def getMaxX(self): - return self.maxx - - def getMaxY(self): - return self.maxy - - def getMinX(self): - return self.minx - - def getMinY(self): - return self.miny - - def setMaxX(self, value): - self.maxx = value - - def setMaxY(self, value): - self.maxy = value - - def setMinX(self, value): - self.minx = value - - def setMinY(self, value): - self.miny = value - diff --git a/pythonPackages/dynamicserialize/dstypes/com/vividsolutions/jts/geom/Geometry.py b/pythonPackages/dynamicserialize/dstypes/com/vividsolutions/jts/geom/Geometry.py deleted file mode 100644 index c316419618..0000000000 --- a/pythonPackages/dynamicserialize/dstypes/com/vividsolutions/jts/geom/Geometry.py +++ /dev/null @@ -1,37 +0,0 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -# This class is a dummy implementation of the -# com.vividsolutions.jts.geom.Geometry class. It was simply created to allow -# serialization/deserialization of GridLocation objects. This should be -# reimplemented if useful work needs to be performed against serialized -# Geometry objects. - -class Geometry(object): - - def __init__(self): - self.binaryData = None - - def getBinaryData(self): - return self.binaryData - - def setBinaryData(self, data): - self.binaryData = data - diff --git a/pythonPackages/dynamicserialize/dstypes/com/vividsolutions/jts/geom/__init__.py b/pythonPackages/dynamicserialize/dstypes/com/vividsolutions/jts/geom/__init__.py deleted file mode 100644 index f4e07340a8..0000000000 --- a/pythonPackages/dynamicserialize/dstypes/com/vividsolutions/jts/geom/__init__.py +++ /dev/null @@ -1,32 +0,0 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -# File auto-generated by PythonFileGenerator - -__all__ = [ - 'Coordinate', - 'Envelope', - 'Geometry' - ] - -from Coordinate import Coordinate -from Envelope import Envelope -from Geometry import Geometry - diff --git a/pythonPackages/dynamicserialize/dstypes/gov/__init__.py b/pythonPackages/dynamicserialize/dstypes/gov/__init__.py deleted file mode 100644 index c19855a931..0000000000 --- a/pythonPackages/dynamicserialize/dstypes/gov/__init__.py +++ /dev/null @@ -1,8 +0,0 @@ - -# File auto-generated by PythonFileGenerator - -__all__ = [ - 'noaa' - ] - - diff --git a/pythonPackages/dynamicserialize/dstypes/gov/noaa/__init__.py b/pythonPackages/dynamicserialize/dstypes/gov/noaa/__init__.py deleted file mode 100644 index 47f9c4d705..0000000000 --- a/pythonPackages/dynamicserialize/dstypes/gov/noaa/__init__.py +++ /dev/null @@ -1,8 +0,0 @@ - -# File auto-generated by PythonFileGenerator - -__all__ = [ - 'nws' - ] - - diff --git a/pythonPackages/dynamicserialize/dstypes/gov/noaa/nws/__init__.py b/pythonPackages/dynamicserialize/dstypes/gov/noaa/nws/__init__.py deleted file mode 100644 index 925c05f9e1..0000000000 --- a/pythonPackages/dynamicserialize/dstypes/gov/noaa/nws/__init__.py +++ /dev/null @@ -1,8 +0,0 @@ - -# File auto-generated by PythonFileGenerator - -__all__ = [ - 'ncep' - ] - - diff --git a/pythonPackages/dynamicserialize/dstypes/gov/noaa/nws/ncep/__init__.py b/pythonPackages/dynamicserialize/dstypes/gov/noaa/nws/ncep/__init__.py deleted file mode 100644 index 3a99cc7b37..0000000000 --- a/pythonPackages/dynamicserialize/dstypes/gov/noaa/nws/ncep/__init__.py +++ /dev/null @@ -1,8 +0,0 @@ - -# File auto-generated by PythonFileGenerator - -__all__ = [ - 'common' - ] - - diff --git a/pythonPackages/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/__init__.py b/pythonPackages/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/__init__.py deleted file mode 100644 index c28322f4b2..0000000000 --- a/pythonPackages/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/__init__.py +++ /dev/null @@ -1,8 +0,0 @@ - -# File auto-generated by PythonFileGenerator - -__all__ = [ - 'dataplugin' - ] - - diff --git a/pythonPackages/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/__init__.py b/pythonPackages/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/__init__.py deleted file mode 100644 index adba4316ac..0000000000 --- a/pythonPackages/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/__init__.py +++ /dev/null @@ -1,11 +0,0 @@ - -# File auto-generated by PythonFileGenerator - -__all__ = [ - 'atcf', - 'gempak', - 'gpd', - 'pgen' - ] - - diff --git a/pythonPackages/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/atcf/__init__.py b/pythonPackages/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/atcf/__init__.py deleted file mode 100644 index a944c4e2eb..0000000000 --- a/pythonPackages/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/atcf/__init__.py +++ /dev/null @@ -1,8 +0,0 @@ - -# File auto-generated by PythonFileGenerator - -__all__ = [ - 'request' - ] - - diff --git a/pythonPackages/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/atcf/request/RetrieveAtcfDeckRequest.py b/pythonPackages/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/atcf/request/RetrieveAtcfDeckRequest.py deleted file mode 100644 index 9fdc815b64..0000000000 --- a/pythonPackages/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/atcf/request/RetrieveAtcfDeckRequest.py +++ /dev/null @@ -1,14 +0,0 @@ - -# File auto-generated against equivalent DynamicSerialize Java class - -class RetrieveAtcfDeckRequest(object): - - def __init__(self): - self.deckID = None - - def getDeckID(self): - return self.deckID - - def setDeckID(self, deckID): - self.deckID = deckID - diff --git a/pythonPackages/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/atcf/request/__init__.py b/pythonPackages/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/atcf/request/__init__.py deleted file mode 100644 index 1d83f34c48..0000000000 --- a/pythonPackages/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/atcf/request/__init__.py +++ /dev/null @@ -1,9 +0,0 @@ - -# File auto-generated by PythonFileGenerator - -__all__ = [ - 'RetrieveAtcfDeckRequest' - ] - -from RetrieveAtcfDeckRequest import RetrieveAtcfDeckRequest - diff --git a/pythonPackages/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/gempak/__init__.py b/pythonPackages/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/gempak/__init__.py deleted file mode 100644 index a944c4e2eb..0000000000 --- a/pythonPackages/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/gempak/__init__.py +++ /dev/null @@ -1,8 +0,0 @@ - -# File auto-generated by PythonFileGenerator - -__all__ = [ - 'request' - ] - - diff --git a/pythonPackages/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/gempak/request/GetGridDataRequest.py b/pythonPackages/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/gempak/request/GetGridDataRequest.py deleted file mode 100644 index 6393c6087e..0000000000 --- a/pythonPackages/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/gempak/request/GetGridDataRequest.py +++ /dev/null @@ -1,69 +0,0 @@ - -# File auto-generated against equivalent DynamicSerialize Java class -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# Sep 16, 2016 pmoyer Generated - -class GetGridDataRequest(object): - - def __init__(self): - self.vcoord = None - self.level2 = None - self.modelId = None - self.parm = None - self.level1 = None - self.reftime = None - self.pluginName = None - self.fcstsec = None - - def getVcoord(self): - return self.vcoord - - def setVcoord(self, vcoord): - self.vcoord = vcoord - - def getLevel2(self): - return self.level2 - - def setLevel2(self, level2): - self.level2 = level2 - - def getModelId(self): - return self.modelId - - def setModelId(self, modelId): - self.modelId = modelId - - def getParm(self): - return self.parm - - def setParm(self, parm): - self.parm = parm - - def getLevel1(self): - return self.level1 - - def setLevel1(self, level1): - self.level1 = level1 - - def getReftime(self): - return self.reftime - - def setReftime(self, reftime): - self.reftime = reftime - - def getPluginName(self): - return self.pluginName - - def setPluginName(self, pluginName): - self.pluginName = pluginName - - def getFcstsec(self): - return self.fcstsec - - def setFcstsec(self, fcstsec): - self.fcstsec = fcstsec - diff --git a/pythonPackages/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/gempak/request/GetGridInfoRequest.py b/pythonPackages/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/gempak/request/GetGridInfoRequest.py deleted file mode 100644 index 2c2c02c483..0000000000 --- a/pythonPackages/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/gempak/request/GetGridInfoRequest.py +++ /dev/null @@ -1,41 +0,0 @@ - -# File auto-generated against equivalent DynamicSerialize Java class -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# Sep 16, 2016 pmoyer Generated - -class GetGridInfoRequest(object): - - def __init__(self): - self.modelId = None - self.reftime = None - self.pluginName = None - self.fcstsec = None - - def getModelId(self): - return self.modelId - - def setModelId(self, modelId): - self.modelId = modelId - - def getReftime(self): - return self.reftime - - def setReftime(self, reftime): - self.reftime = reftime - - def getPluginName(self): - return self.pluginName - - def setPluginName(self, pluginName): - self.pluginName = pluginName - - def getFcstsec(self): - return self.fcstsec - - def setFcstsec(self, fcstsec): - self.fcstsec = fcstsec - diff --git a/pythonPackages/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/gempak/request/GetGridNavRequest.py b/pythonPackages/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/gempak/request/GetGridNavRequest.py deleted file mode 100644 index dd2afca8c4..0000000000 --- a/pythonPackages/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/gempak/request/GetGridNavRequest.py +++ /dev/null @@ -1,27 +0,0 @@ - -# File auto-generated against equivalent DynamicSerialize Java class -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# Sep 16, 2016 pmoyer Generated - -class GetGridNavRequest(object): - - def __init__(self): - self.modelId = None - self.pluginName = None - - def getModelId(self): - return self.modelId - - def setModelId(self, modelId): - self.modelId = modelId - - def getPluginName(self): - return self.pluginName - - def setPluginName(self, pluginName): - self.pluginName = pluginName - diff --git a/pythonPackages/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/gempak/request/GetStationsRequest.py b/pythonPackages/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/gempak/request/GetStationsRequest.py deleted file mode 100644 index 2f589613bd..0000000000 --- a/pythonPackages/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/gempak/request/GetStationsRequest.py +++ /dev/null @@ -1,20 +0,0 @@ - -# File auto-generated against equivalent DynamicSerialize Java class -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# Sep 16, 2016 pmoyer Generated - -class GetStationsRequest(object): - - def __init__(self): - self.pluginName = None - - def getPluginName(self): - return self.pluginName - - def setPluginName(self, pluginName): - self.pluginName = pluginName - diff --git a/pythonPackages/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/gempak/request/GetTimesRequest.py b/pythonPackages/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/gempak/request/GetTimesRequest.py deleted file mode 100644 index f766896986..0000000000 --- a/pythonPackages/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/gempak/request/GetTimesRequest.py +++ /dev/null @@ -1,27 +0,0 @@ - -# File auto-generated against equivalent DynamicSerialize Java class -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# Sep 16, 2016 pmoyer Generated - -class GetTimesRequest(object): - - def __init__(self): - self.pluginName = None - self.timeField = None - - def getPluginName(self): - return self.pluginName - - def setPluginName(self, pluginName): - self.pluginName = pluginName - - def getTimeField(self): - return self.timeField - - def setTimeField(self, timeField): - self.timeField = timeField - diff --git a/pythonPackages/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/gempak/request/GetTimesResponse.py b/pythonPackages/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/gempak/request/GetTimesResponse.py deleted file mode 100644 index 6864f0c249..0000000000 --- a/pythonPackages/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/gempak/request/GetTimesResponse.py +++ /dev/null @@ -1,20 +0,0 @@ - -# File auto-generated against equivalent DynamicSerialize Java class -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# Sep 16, 2016 pmoyer Generated - -class GetTimesResponse(object): - - def __init__(self): - self.times = None - - def getTimes(self): - return self.times - - def setTimes(self, times): - self.times = times - diff --git a/pythonPackages/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/gempak/request/Station.py b/pythonPackages/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/gempak/request/Station.py deleted file mode 100644 index a530b7612b..0000000000 --- a/pythonPackages/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/gempak/request/Station.py +++ /dev/null @@ -1,64 +0,0 @@ - -# File auto-generated against equivalent DynamicSerialize Java class -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# Sep 16, 2016 pmoyer Generated - -import numpy - -class Station(object): - - def __init__(self): - self.elevation = None - self.state = None - self.stationId = None - self.longitude = None - self.latitude = None - self.wmoIndex = None - self.country = None - - def getElevation(self): - return self.elevation - - def setElevation(self, elevation): - self.elevation = elevation - - def getState(self): - return self.state - - def setState(self, state): - self.state = state - - def getStationId(self): - return self.stationId - - def setStationId(self, stationId): - self.stationId = stationId - - def getLongitude(self): - return self.longitude - - def setLongitude(self, longitude): - self.longitude = numpy.float64(longitude) - - def getLatitude(self): - return self.latitude - - def setLatitude(self, latitude): - self.latitude = numpy.float64(latitude) - - def getWmoIndex(self): - return self.wmoIndex - - def setWmoIndex(self, wmoIndex): - self.wmoIndex = wmoIndex - - def getCountry(self): - return self.country - - def setCountry(self, country): - self.country = country - diff --git a/pythonPackages/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/gempak/request/StationDataRequest.py b/pythonPackages/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/gempak/request/StationDataRequest.py deleted file mode 100644 index cb9a467d42..0000000000 --- a/pythonPackages/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/gempak/request/StationDataRequest.py +++ /dev/null @@ -1,48 +0,0 @@ - -# File auto-generated against equivalent DynamicSerialize Java class -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# Sep 16, 2016 pmoyer Generated - -class StationDataRequest(object): - - def __init__(self): - self.refTime = None - self.pluginName = None - self.parmList = None - self.stationId = None - self.partNumber = None - - def getRefTime(self): - return self.refTime - - def setRefTime(self, refTime): - self.refTime = refTime - - def getPluginName(self): - return self.pluginName - - def setPluginName(self, pluginName): - self.pluginName = pluginName - - def getParmList(self): - return self.parmList - - def setParmList(self, parmList): - self.parmList = parmList - - def getStationId(self): - return self.stationId - - def setStationId(self, stationId): - self.stationId = stationId - - def getPartNumber(self): - return self.partNumber - - def setPartNumber(self, partNumber): - self.partNumber = partNumber - diff --git a/pythonPackages/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/gempak/request/SurfaceDataRequest.py b/pythonPackages/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/gempak/request/SurfaceDataRequest.py deleted file mode 100644 index 7424671233..0000000000 --- a/pythonPackages/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/gempak/request/SurfaceDataRequest.py +++ /dev/null @@ -1,48 +0,0 @@ - -# File auto-generated against equivalent DynamicSerialize Java class -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# Sep 16, 2016 pmoyer Generated - -class SurfaceDataRequest(object): - - def __init__(self): - self.refTime = None - self.pluginName = None - self.parmList = None - self.stationId = None - self.partNumber = None - - def getRefTime(self): - return self.refTime - - def setRefTime(self, refTime): - self.refTime = refTime - - def getPluginName(self): - return self.pluginName - - def setPluginName(self, pluginName): - self.pluginName = pluginName - - def getParmList(self): - return self.parmList - - def setParmList(self, parmList): - self.parmList = parmList - - def getStationId(self): - return self.stationId - - def setStationId(self, stationId): - self.stationId = stationId - - def getPartNumber(self): - return self.partNumber - - def setPartNumber(self, partNumber): - self.partNumber = partNumber - diff --git a/pythonPackages/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/gempak/request/UpperAirDataRequest.py b/pythonPackages/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/gempak/request/UpperAirDataRequest.py deleted file mode 100644 index 3a57baf5df..0000000000 --- a/pythonPackages/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/gempak/request/UpperAirDataRequest.py +++ /dev/null @@ -1,48 +0,0 @@ - -# File auto-generated against equivalent DynamicSerialize Java class -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# Sep 16, 2016 pmoyer Generated - -class UpperAirDataRequest(object): - - def __init__(self): - self.refTime = None - self.pluginName = None - self.parmList = None - self.stationId = None - self.partNumber = None - - def getRefTime(self): - return self.refTime - - def setRefTime(self, refTime): - self.refTime = refTime - - def getPluginName(self): - return self.pluginName - - def setPluginName(self, pluginName): - self.pluginName = pluginName - - def getParmList(self): - return self.parmList - - def setParmList(self, parmList): - self.parmList = parmList - - def getStationId(self): - return self.stationId - - def setStationId(self, stationId): - self.stationId = stationId - - def getPartNumber(self): - return self.partNumber - - def setPartNumber(self, partNumber): - self.partNumber = partNumber - diff --git a/pythonPackages/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/gempak/request/__init__.py b/pythonPackages/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/gempak/request/__init__.py deleted file mode 100644 index 9ef0c44022..0000000000 --- a/pythonPackages/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/gempak/request/__init__.py +++ /dev/null @@ -1,27 +0,0 @@ - -# File auto-generated by PythonFileGenerator - -__all__ = [ - 'GetGridDataRequest', - 'GetGridInfoRequest', - 'GetGridNavRequest', - 'GetStationsRequest', - 'GetTimesRequest', - 'GetTimesResponse', - 'Station', - 'StationDataRequest', - 'SurfaceDataRequest', - 'UpperAirDataRequest' - ] - -from GetGridDataRequest import GetGridDataRequest -from GetGridInfoRequest import GetGridInfoRequest -from GetGridNavRequest import GetGridNavRequest -from GetStationsRequest import GetStationsRequest -from GetTimesRequest import GetTimesRequest -from GetTimesResponse import GetTimesResponse -from Station import Station -from StationDataRequest import StationDataRequest -from SurfaceDataRequest import SurfaceDataRequest -from UpperAirDataRequest import UpperAirDataRequest - diff --git a/pythonPackages/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/gpd/__init__.py b/pythonPackages/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/gpd/__init__.py deleted file mode 100644 index 2e70919ce8..0000000000 --- a/pythonPackages/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/gpd/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ - -# File auto-generated by PythonFileGenerator - -__all__ = [ - 'query' - ] diff --git a/pythonPackages/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/gpd/query/GenericPointDataReqMsg.py b/pythonPackages/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/gpd/query/GenericPointDataReqMsg.py deleted file mode 100644 index 56daf7a79d..0000000000 --- a/pythonPackages/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/gpd/query/GenericPointDataReqMsg.py +++ /dev/null @@ -1,84 +0,0 @@ - -# File auto-generated against equivalent DynamicSerialize Java class - -class GenericPointDataReqMsg(object): - - def __init__(self): - self.reqType = None - self.refTime = None - self.productName = None - self.stnId = None - self.slat = None - self.slon = None - self.productVersion = None - self.querySpecifiedProductVersion = False - self.queryKey = None - self.gpdDataString = None - self.maxNumLevel = 1 - - def getReqType(self): - return self.reqType - - def setReqType(self, reqType): - self.reqType = reqType - - def getRefTime(self): - return self.refTime - - def setRefTime(self, refTime): - self.refTime = refTime - - def getProductName(self): - return self.productName - - def setProductName(self, productName): - self.productName = productName - - def getStnId(self): - return self.stnId - - def setStnId(self, stnId): - self.stnId = stnId - - def getSlat(self): - return self.slat - - def setSlat(self, slat): - self.slat = slat - - def getSlon(self): - return self.slon - - def setSlon(self, slon): - self.slon = slon - - def getMaxNumLevel(self): - return self.maxNumLevel - - def setMaxNumLevel(self, maxNumLevel): - self.maxNumLevel = maxNumLevel - - def getProductVersion(self): - return self.productVersion - - def setProductVersion(self, productVersion): - self.productVersion = productVersion - - def getQuerySpecifiedProductVersion(self): - return self.querySpecifiedProductVersion - - def setQuerySpecifiedProductVersion(self, querySpecifiedProductVersion): - self.querySpecifiedProductVersion = querySpecifiedProductVersion - - def getQueryKey(self): - return self.queryKey - - def setQueryKey(self, queryKey): - self.queryKey = queryKey - - def getGpdDataString(self): - return self.gpdDataString - - def setGpdDataString(self, gpdDataString): - self.gpdDataString = gpdDataString - \ No newline at end of file diff --git a/pythonPackages/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/gpd/query/__init__.py b/pythonPackages/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/gpd/query/__init__.py deleted file mode 100644 index bfec6fc160..0000000000 --- a/pythonPackages/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/gpd/query/__init__.py +++ /dev/null @@ -1,8 +0,0 @@ - -# File auto-generated by PythonFileGenerator - -__all__ = [ - 'GenericPointDataReqMsg' - ] - -from GenericPointDataReqMsg import GenericPointDataReqMsg diff --git a/pythonPackages/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/pgen/ActivityInfo.py b/pythonPackages/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/pgen/ActivityInfo.py deleted file mode 100644 index 8ce7a8370c..0000000000 --- a/pythonPackages/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/pgen/ActivityInfo.py +++ /dev/null @@ -1,77 +0,0 @@ - -# File auto-generated against equivalent DynamicSerialize Java class - -class ActivityInfo(object): - - def __init__(self): - self.activityName = None - self.activityType = None - self.activitySubtype = None - self.activityLabel = None - self.site = None - self.desk = None - self.forecaster = None - self.refTime = None - self.mode = None - self.status = None - - def getActivityName(self): - return self.activityName - - def setActivityName(self, activityName): - self.activityName = activityName - - def getActivityType(self): - return self.activityType - - def setActivityType(self, activityType): - self.activityType = activityType - - def getActivitySubtype(self): - return self.activitySubtype - - def setActivitySubtype(self, activitySubtype): - self.activitySubtype = activitySubtype - - def getActivityLabel(self): - return self.activityLabel - - def setActivityLabel(self, activityLabel): - self.activityLabel = activityLabel - - def getSite(self): - return self.site - - def setSite(self, site): - self.site = site - - def getDesk(self): - return self.desk - - def setDesk(self, desk): - self.desk = desk - - def getForecaster(self): - return self.forecaster - - def setForecaster(self, forecaster): - self.forecaster = forecaster - - def getRefTime(self): - return self.refTime - - def setRefTime(self, refTime): - self.refTime = refTime - - def getMode(self): - return self.mode - - def setMode(self, mode): - self.mode = mode - - def getStatus(self): - return self.status - - def setStatus(self, status): - self.status = status - diff --git a/pythonPackages/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/pgen/DerivedProduct.py b/pythonPackages/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/pgen/DerivedProduct.py deleted file mode 100644 index 888e2706d7..0000000000 --- a/pythonPackages/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/pgen/DerivedProduct.py +++ /dev/null @@ -1,28 +0,0 @@ - -# File auto-generated against equivalent DynamicSerialize Java class - -class DerivedProduct(object): - - def __init__(self): - self.name = None - self.productType = None - self.product = None - - def getName(self): - return self.name - - def setName(self, name): - self.name = name - - def getProductType(self): - return self.productType - - def setProductType(self, productType): - self.productType = productType - - def getProduct(self): - return self.product - - def setProduct(self, product): - self.product = product - diff --git a/pythonPackages/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/pgen/ResponseMessageValidate.py b/pythonPackages/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/pgen/ResponseMessageValidate.py deleted file mode 100644 index 2d7dc5c6e4..0000000000 --- a/pythonPackages/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/pgen/ResponseMessageValidate.py +++ /dev/null @@ -1,42 +0,0 @@ - -# File auto-generated against equivalent DynamicSerialize Java class - -class ResponseMessageValidate(object): - - def __init__(self): - self.result = None - self.message = None - self.fileType = None - self.dataURI = None - self.validTime = None - - def getResult(self): - return self.result - - def setResult(self, result): - self.result = result - - def getMessage(self): - return self.message - - def setMessage(self, message): - self.message = message - - def getFileType(self): - return self.fileType - - def setFileType(self, fileType): - self.fileType = fileType - - def getDataURI(self): - return self.dataURI - - def setDataURI(self, dataURI): - self.dataURI = dataURI - - def getValidTime(self): - return self.validTime - - def setValidTime(self, validTime): - self.validTime = validTime - diff --git a/pythonPackages/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/pgen/__init__.py b/pythonPackages/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/pgen/__init__.py deleted file mode 100644 index bf01e4562c..0000000000 --- a/pythonPackages/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/pgen/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ - -# File auto-generated by PythonFileGenerator - -__all__ = [ - 'ActivityInfo', - 'DerivedProduct', - 'ResponseMessageValidate', - 'request', - 'response' - ] - -from ActivityInfo import ActivityInfo -from DerivedProduct import DerivedProduct -from ResponseMessageValidate import ResponseMessageValidate - diff --git a/pythonPackages/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/pgen/request/RetrieveActivityMapRequest.py b/pythonPackages/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/pgen/request/RetrieveActivityMapRequest.py deleted file mode 100644 index 0c34127e8e..0000000000 --- a/pythonPackages/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/pgen/request/RetrieveActivityMapRequest.py +++ /dev/null @@ -1,13 +0,0 @@ - -# File auto-generated against equivalent DynamicSerialize Java class -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# May 05, 2016 root Generated - -class RetrieveActivityMapRequest(object): - - def __init__(self): - return diff --git a/pythonPackages/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/pgen/request/RetrieveAllProductsRequest.py b/pythonPackages/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/pgen/request/RetrieveAllProductsRequest.py deleted file mode 100644 index e3a11242a7..0000000000 --- a/pythonPackages/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/pgen/request/RetrieveAllProductsRequest.py +++ /dev/null @@ -1,14 +0,0 @@ - -# File auto-generated against equivalent DynamicSerialize Java class - -class RetrieveAllProductsRequest(object): - - def __init__(self): - self.dataURI = None - - def getDataURI(self): - return self.dataURI - - def setDataURI(self, dataURI): - self.dataURI = dataURI - diff --git a/pythonPackages/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/pgen/request/StoreActivityRequest.py b/pythonPackages/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/pgen/request/StoreActivityRequest.py deleted file mode 100644 index 3f36740065..0000000000 --- a/pythonPackages/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/pgen/request/StoreActivityRequest.py +++ /dev/null @@ -1,21 +0,0 @@ - -# File auto-generated against equivalent DynamicSerialize Java class - -class StoreActivityRequest(object): - - def __init__(self): - self.activityInfo = None - self.activityXML = None - - def getActivityInfo(self): - return self.activityInfo - - def setActivityInfo(self, activityInfo): - self.activityInfo = activityInfo - - def getActivityXML(self): - return self.activityXML - - def setActivityXML(self, activityXML): - self.activityXML = activityXML - diff --git a/pythonPackages/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/pgen/request/StoreDerivedProductRequest.py b/pythonPackages/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/pgen/request/StoreDerivedProductRequest.py deleted file mode 100644 index 92d64a9b04..0000000000 --- a/pythonPackages/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/pgen/request/StoreDerivedProductRequest.py +++ /dev/null @@ -1,21 +0,0 @@ - -# File auto-generated against equivalent DynamicSerialize Java class - -class StoreDerivedProductRequest(object): - - def __init__(self): - self.dataURI = None - self.productList = None - - def getDataURI(self): - return self.dataURI - - def setDataURI(self, dataURI): - self.dataURI = dataURI - - def getProductList(self): - return self.productList - - def setProductList(self, productList): - self.productList = productList - diff --git a/pythonPackages/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/pgen/request/__init__.py b/pythonPackages/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/pgen/request/__init__.py deleted file mode 100644 index cecb082c9a..0000000000 --- a/pythonPackages/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/pgen/request/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ - -# File auto-generated by PythonFileGenerator - -__all__ = [ - 'RetrieveAllProductsRequest', - 'StoreActivityRequest', - 'StoreDerivedProductRequest', - 'RetrieveActivityMapRequest' - ] - -from RetrieveAllProductsRequest import RetrieveAllProductsRequest -from StoreActivityRequest import StoreActivityRequest -from StoreDerivedProductRequest import StoreDerivedProductRequest -from RetrieveActivityMapRequest import RetrieveActivityMapRequest - diff --git a/pythonPackages/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/pgen/response/ActivityMapData.py b/pythonPackages/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/pgen/response/ActivityMapData.py deleted file mode 100644 index 97b9a3c7dd..0000000000 --- a/pythonPackages/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/pgen/response/ActivityMapData.py +++ /dev/null @@ -1,55 +0,0 @@ - -# File auto-generated against equivalent DynamicSerialize Java class -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# May 06, 2016 root Generated - -class ActivityMapData(object): - - def __init__(self): - self.refTime = None - self.activityLabel = None - self.activitySubtype = None - self.dataURI = None - self.activityType = None - self.activityName = None - - def getRefTime(self): - return self.refTime - - def setRefTime(self, refTime): - self.refTime = refTime - - def getActivityLabel(self): - return self.activityLabel - - def setActivityLabel(self, activityLabel): - self.activityLabel = activityLabel - - def getActivitySubtype(self): - return self.activitySubtype - - def setActivitySubtype(self, activitySubtype): - self.activitySubtype = activitySubtype - - def getDataURI(self): - return self.dataURI - - def setDataURI(self, dataURI): - self.dataURI = dataURI - - def getActivityType(self): - return self.activityType - - def setActivityType(self, activityType): - self.activityType = activityType - - def getActivityName(self): - return self.activityName - - def setActivityName(self, activityName): - self.activityName = activityName - diff --git a/pythonPackages/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/pgen/response/RetrieveActivityMapResponse.py b/pythonPackages/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/pgen/response/RetrieveActivityMapResponse.py deleted file mode 100644 index f9111ee70a..0000000000 --- a/pythonPackages/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/pgen/response/RetrieveActivityMapResponse.py +++ /dev/null @@ -1,20 +0,0 @@ - -# File auto-generated against equivalent DynamicSerialize Java class -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# May 06, 2016 root Generated - -class RetrieveActivityMapResponse(object): - - def __init__(self): - self.data = None - - def getData(self): - return self.data - - def setData(self, data): - self.data = data - diff --git a/pythonPackages/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/pgen/response/__init__.py b/pythonPackages/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/pgen/response/__init__.py deleted file mode 100644 index 99b5b4fad6..0000000000 --- a/pythonPackages/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/pgen/response/__init__.py +++ /dev/null @@ -1,11 +0,0 @@ - -# File auto-generated by PythonFileGenerator - -__all__ = [ - 'ActivityMapData', - 'RetrieveActivityMapResponse' - ] - -from ActivityMapData import ActivityMapData -from RetrieveActivityMapResponse import RetrieveActivityMapResponse - diff --git a/pythonPackages/dynamicserialize/dstypes/java/__init__.py b/pythonPackages/dynamicserialize/dstypes/java/__init__.py deleted file mode 100644 index 06f1a98b6b..0000000000 --- a/pythonPackages/dynamicserialize/dstypes/java/__init__.py +++ /dev/null @@ -1,30 +0,0 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -# File auto-generated by PythonFileGenerator - -__all__ = [ - 'awt', - 'sql', - 'lang', - 'util' - ] - - diff --git a/pythonPackages/dynamicserialize/dstypes/java/awt/Point.py b/pythonPackages/dynamicserialize/dstypes/java/awt/Point.py deleted file mode 100644 index 7e037b0803..0000000000 --- a/pythonPackages/dynamicserialize/dstypes/java/awt/Point.py +++ /dev/null @@ -1,56 +0,0 @@ -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. - -# -# Custom python class representing a java.awt.Point. -# -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# 08/31/10 njensen Initial Creation. -# -# -# - - -class Point(object): - - def __init__(self): - self.x = None - self.y = None - - def __str__(self): - return str((self.x, self.y)) - - def __repr__(self): - return self.__str__() - - def getX(self): - return self.x - - def getY(self): - return self.y - - def setX(self, x): - self.x = x - - def setY(self, y): - self.y = y - diff --git a/pythonPackages/dynamicserialize/dstypes/java/awt/__init__.py b/pythonPackages/dynamicserialize/dstypes/java/awt/__init__.py deleted file mode 100644 index 647ffcc65c..0000000000 --- a/pythonPackages/dynamicserialize/dstypes/java/awt/__init__.py +++ /dev/null @@ -1,40 +0,0 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - - -# -# Package definition for java.awt -# -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# 08/31/10 njensen Initial Creation. -# -# -# - - -__all__ = [ - 'Point', - ] - -from Point import Point diff --git a/pythonPackages/dynamicserialize/dstypes/java/lang/StackTraceElement.py b/pythonPackages/dynamicserialize/dstypes/java/lang/StackTraceElement.py deleted file mode 100644 index 65bf11a9d8..0000000000 --- a/pythonPackages/dynamicserialize/dstypes/java/lang/StackTraceElement.py +++ /dev/null @@ -1,73 +0,0 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -# File auto-generated against equivalent DynamicSerialize Java class - -class StackTraceElement(object): - - def __init__(self): - self.declaringClass = None - self.methodName = None - self.fileName = None - self.lineNumber = 0 - - def getDeclaringClass(self): - return self.declaringClass - - def setDeclaringClass(self, clz): - self.declaringClass = clz - - def getMethodName(self): - return self.methodName - - def setMethodName(self, methodName): - self.methodName = methodName - - def getFileName(self): - return self.fileName - - def setFileName(self, filename): - self.fileName = filename - - def getLineNumber(self): - return self.lineNumber - - def setLineNumber(self, lineNumber): - self.lineNumber = int(lineNumber) - - def isNativeMethod(self): - return (self.lineNumber == -2) - - def __str__(self): - return self.__repr__() - - def __repr__(self): - msg = self.declaringClass + "." + self.methodName - if self.isNativeMethod(): - msg += "(Native Method)" - elif self.fileName is not None and self.lineNumber >= 0: - msg += "(" + self.fileName + ":" + str(self.lineNumber) + ")" - elif self.fileName is not None: - msg += "(" + self.fileName + ")" - else: - msg += "(Unknown Source)" - return msg - - diff --git a/pythonPackages/dynamicserialize/dstypes/java/lang/__init__.py b/pythonPackages/dynamicserialize/dstypes/java/lang/__init__.py deleted file mode 100644 index a2568362bc..0000000000 --- a/pythonPackages/dynamicserialize/dstypes/java/lang/__init__.py +++ /dev/null @@ -1,28 +0,0 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -# File auto-generated by PythonFileGenerator - -__all__ = [ - 'StackTraceElement' - ] - -from StackTraceElement import StackTraceElement - diff --git a/pythonPackages/dynamicserialize/dstypes/java/sql/Timestamp.py b/pythonPackages/dynamicserialize/dstypes/java/sql/Timestamp.py deleted file mode 100644 index 93058fdec4..0000000000 --- a/pythonPackages/dynamicserialize/dstypes/java/sql/Timestamp.py +++ /dev/null @@ -1,43 +0,0 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -# File auto-generated against equivalent DynamicSerialize Java class -# and then modified post-generation to add additional features to better -# match Java implementation. Unlike real timestamp, does not support nanos precision. -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# ??/??/?? xxxxxxxx Initial Creation. -# 06/24/15 4480 dgilling implement based on Date class. -# Jun 23, 2016 5696 rjpeter Make String version match java. -# - -from dynamicserialize.dstypes.java.util import Date -from time import gmtime, strftime - -class Timestamp(Date): - - def __init__(self, time=None): - super(Timestamp, self).__init__(time) - - def __repr__(self): - return strftime("%Y-%m-%d %H:%M:%S.", gmtime(self.time/1000.0)) + '{:03d}'.format(self.time%1000) \ No newline at end of file diff --git a/pythonPackages/dynamicserialize/dstypes/java/sql/__init__.py b/pythonPackages/dynamicserialize/dstypes/java/sql/__init__.py deleted file mode 100644 index 6a39458053..0000000000 --- a/pythonPackages/dynamicserialize/dstypes/java/sql/__init__.py +++ /dev/null @@ -1,27 +0,0 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -# File auto-generated by PythonFileGenerator - -__all__ = [ - 'Timestamp' - ] - -from Timestamp import Timestamp diff --git a/pythonPackages/dynamicserialize/dstypes/java/util/Calendar.py b/pythonPackages/dynamicserialize/dstypes/java/util/Calendar.py deleted file mode 100644 index 89b13b5690..0000000000 --- a/pythonPackages/dynamicserialize/dstypes/java/util/Calendar.py +++ /dev/null @@ -1,50 +0,0 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -## -# Custom python class representing a java.util.GregorianCalendar. -# -# This is a stripped-down version of the class that only supports -# minimal methods for serialization. -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# 09/29/10 wldougher Initial Creation. -# -# -## -class Calendar(object): - """ -""" - def __init__(self): - self.time = None - - # Methods from the real class that we typically use - @staticmethod - def getInstance(): - return GregorianCalendar() - - def getTimeInMillis(self): - return self.time - - def setTimeInMillis(self, timeInMillis): - self.time = timeInMillis diff --git a/pythonPackages/dynamicserialize/dstypes/java/util/Date.py b/pythonPackages/dynamicserialize/dstypes/java/util/Date.py deleted file mode 100644 index 97879dde70..0000000000 --- a/pythonPackages/dynamicserialize/dstypes/java/util/Date.py +++ /dev/null @@ -1,58 +0,0 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## -# ---------------------------------------------------------------------------- -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# 04/28/2015 4027 randerso Added optional construction parameter to set the time -# 06/26/2015 4480 dgilling Implement __eq__ and __hash__. -# -## - -from time import gmtime, strftime - - -class Date(object): - - def __init__(self, timeInMillis=None): - self.time = timeInMillis - - def getTime(self): - return self.time - - def setTime(self, timeInMillis): - self.time = timeInMillis - - def __str__(self): - return self.__repr__() - - def __repr__(self): - return strftime("%b %d %y %H:%M:%S GMT", gmtime(self.time/1000.0)) - - def __eq__(self, other): - return self.time == other.time - - def __ne__(self, other): - return not self.__eq__(other) - - def __hash__(self): - return hash(self.time) diff --git a/pythonPackages/dynamicserialize/dstypes/java/util/EnumSet.py b/pythonPackages/dynamicserialize/dstypes/java/util/EnumSet.py deleted file mode 100644 index e6af6716e9..0000000000 --- a/pythonPackages/dynamicserialize/dstypes/java/util/EnumSet.py +++ /dev/null @@ -1,68 +0,0 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -## -# NOTE: Please do not ever use this class unless you really must. It is not -# designed to be directly accessed from client code. Hide its use from end- -# users as best as you can. -## - -## -# IMPLEMENTATION DETAILS: -# This class is an attempt to simulate Java's EnumSet class. When creating -# a new instance of this class, you must specify the name of the Java enum -# contained within as this is needed for serialization. Do not append the -# "dynamicserialize.dstypes" portion of the Python package to the supplied -# class name as Java won't know what class that is when deserializing. -# -# Since Python has no concept of enums, this class cannot provide the value- -# checking that Java class does. Be very sure that you add only valid enum -# values to your EnumSet. -## - -import collections - - -class EnumSet(collections.MutableSet): - - def __init__(self, enumClassName, iterable=[]): - self.__enumClassName = enumClassName - self.__set = set(iterable) - - def __repr__(self): - return "EnumSet({0})".format(list(self.__set)) - - def __len__(self): - return len(self.__set) - - def __contains__(self, key): - return key in self.__set - - def __iter__(self): - return iter(self.__set) - - def add(self, value): - self.__set.add(value) - - def discard(self, value): - self.__set.discard(value) - - def getEnumClass(self): - return self.__enumClassName diff --git a/pythonPackages/dynamicserialize/dstypes/java/util/GregorianCalendar.py b/pythonPackages/dynamicserialize/dstypes/java/util/GregorianCalendar.py deleted file mode 100644 index 10c43235c4..0000000000 --- a/pythonPackages/dynamicserialize/dstypes/java/util/GregorianCalendar.py +++ /dev/null @@ -1,50 +0,0 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -## -# Custom python class representing a java.util.GregorianCalendar. -# -# This is a stripped-down version of the class that only supports -# minimal methods for serialization. -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# 09/29/10 wldougher Initial Creation. -# -# -## -class GregorianCalendar(object): - """ -""" - def __init__(self): - self.time = None - - # Methods from the real class that we typically use - @staticmethod - def getInstance(): - return GregorianCalendar() - - def getTimeInMillis(self): - return self.time - - def setTimeInMillis(self, timeInMillis): - self.time = timeInMillis diff --git a/pythonPackages/dynamicserialize/dstypes/java/util/__init__.py b/pythonPackages/dynamicserialize/dstypes/java/util/__init__.py deleted file mode 100644 index 4483d930d0..0000000000 --- a/pythonPackages/dynamicserialize/dstypes/java/util/__init__.py +++ /dev/null @@ -1,34 +0,0 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -# File auto-generated by PythonFileGenerator - -__all__ = [ - 'Calendar', - 'Date', - 'EnumSet', - 'GregorianCalendar' - ] - -from Calendar import Calendar -from Date import Date -from EnumSet import EnumSet -from GregorianCalendar import GregorianCalendar - diff --git a/pythonPackages/msaslaps/acars/a2acars.csh b/pythonPackages/msaslaps/acars/a2acars.csh deleted file mode 100755 index 4c146dbfd6..0000000000 --- a/pythonPackages/msaslaps/acars/a2acars.csh +++ /dev/null @@ -1,128 +0,0 @@ -#!/bin/csh -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - - -# -# A script wrapper around a UEngine call that is meant to get all available -# acars data in the A-II database over a specified range of times. -# The data is output to stdout as ASCII. Each line is one time/platform -# combination. The individual data items are comma delimited. -# This version can adapt to use a python stub that calls the -# data access framework. -# -# Usage: -# -# a2acars.csh {p} yyyy-mm-dd hh:mm yyyy-mm-dd hh:mm -# -# The literal p flag is optional. The p flag means preserve -# the final version of the python submitted to the UEngine instead of -# cleaning it up. The path to the finalized python is /tmp/a2acarsNNNNN.py -# where NNNNN is a unix process id. This flag does nothing when using the -# DataAccessFramework. -# -# This outputs the following set of variables for each line: -# -# tailNumber,asciitime,latitude,longitude,acarsReceivingStation, -# pressure,flightPhase,rollQuality,temperature,windDir,windSpeed, -# relHumidity,mixRatio -# -# -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# 07/23/2014 3392 nanbowle Initial modification. Passes parameters straight to DAF script instead of using sed. -# -# -# - - -set rmpy = yes -if ( "$1" == "p" ) then - set rmpy = no - shift -endif -# -# Identify directory this script is in, will be one of the directories we -# search for other files in. -# -set mydir = `dirname $0` -set d1 = `echo $mydir | cut -c1` -if ( "$mydir" == '.' ) then - set mydir = $PWD -else if ( "$d1" != "/" ) then - set mydir = $PWD/$mydir -endif -set mydir = `(cd $mydir ; pwd)` -if ( ! $?FXA_HOME ) set FXA_HOME = xxxx -# -# Locate python stub that we will modify to create the final python logic. -# -if ( -e ./a2acarsStub.py ) then - set stubpy = ./a2acarsStub.py -else if ( -e $mydir/a2acarsStub.py ) then - set stubpy = $mydir/a2acarsStub.py -else if ( -e $FXA_HOME/src/dm/bufr/acars/a2acarsStub.py ) then - set stubpy = $FXA_HOME/src/dm/bufr/acars/a2acarsStub.py -else if ( -e $FXA_HOME/bin/a2acarsStub.py ) then - set stubpy = $FXA_HOME/bin/a2acarsStub.py -else - bash -c "echo could not find a2acarsStub.py 1>&2" - exit -endif -# -# Determine if we are using the data access framework or the uEngine. -# -grep DataAccessLayer $stubpy >& /dev/null -if ( $status == 0 ) then - /awips2/python/bin/python $stubpy -b "$1 $2" -e "$3 $4" -else - # - # Set up the environment we need to run the UEngine. - # - set method = "uengine" - if ( -e ./UEngine.cshsrc ) then - set ueenv = ./UEngine.cshsrc - else if ( -e $mydir/UEngine.cshsrc ) then - set ueenv = $mydir/UEngine.cshsrc - else if ( -e $FXA_HOME/src/dm/point/UEngine.cshsrc ) then - set ueenv = $FXA_HOME/src/dm/point/UEngine.cshsrc - else if ( -e $FXA_HOME/bin/UEngine.cshsrc ) then - set ueenv = $FXA_HOME/bin/UEngine.cshsrc - else - bash -c "echo could not find UEngine.cshsrc 1>&2" - exit - endif - source $ueenv - - set specpy = /tmp/a2acars${$}.py - rm -rf $specpy >& /dev/null - touch $specpy - chmod 775 $specpy - cat $stubpy | sed "s/BBBBB/$1 $2/g" | sed "s/EEEEE/$3 $4/g" > $specpy - - cd $UE_BIN_PATH - ( uengine -r python < $specpy ) | grep -v '<' | grep -v Response - - if ( "$rmpy" == "yes" ) rm -rf $specpy >& /dev/null -endif - diff --git a/pythonPackages/msaslaps/acars/a2acarsStub.py b/pythonPackages/msaslaps/acars/a2acarsStub.py deleted file mode 100644 index 66504ea3a2..0000000000 --- a/pythonPackages/msaslaps/acars/a2acarsStub.py +++ /dev/null @@ -1,184 +0,0 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - - -# -# Gets all available acars data in the A-II database over a specified range of -# times. The data is output to stdout as ASCII. Each line is one time/platform -# combination. The individual data items are comma delimited. -# -# -# -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# 07/23/2014 3392 nabowle Initial modification. Replaces UEngine with DAF. -# 07/28/2014 3392 nabowle Strip tail and receiver to match original formatting. -# 09/04/2014 3405 nabowle NO_DATA and date to string extracted to a2dafcommon -# -# - - -import a2dafcommon -import argparse -import sys - -from datetime import datetime -from awips.dataaccess import DataAccessLayer -from dynamicserialize.dstypes.com.raytheon.uf.common.time import TimeRange - -def get_args(): - parser = argparse.ArgumentParser(conflict_handler="resolve") - parser.add_argument("-h", action="store", dest="host", - help="EDEX server hostname (optional)", - metavar="hostname") - parser.add_argument("-b", action="store", dest="start", - help="The start of the time range in YYYY-MM-DD HH:MM", - metavar="start") - parser.add_argument("-e", action="store", dest="end", - help="The end of the time range in YYYY-MM-DD HH:MM", - metavar="end") - return parser.parse_args() - -def main(): - user_args = get_args() - - # Set the host in the DataAcessLayer if supplied - if user_args.host: - DataAccessLayer.changeEDEXHost(user_args.host) - - start = user_args.start - end = user_args.end - - if not start or not end: - print >> sys.stderr, "Start or End date not provided" - return - - - req = DataAccessLayer.newDataRequest("acars") - req.setParameters("tailNumber", "receiver", "pressure", "flightPhase", - "rollAngleQuality", "temp", "windDirection", "windSpeed", - "humidity", "mixingRatio", "icing") - - - - beginRange = datetime.strptime( start + ":00.0", "%Y-%m-%d %H:%M:%S.%f") - endRange = datetime.strptime( end + ":59.9", "%Y-%m-%d %H:%M:%S.%f") - timerange = TimeRange(beginRange, endRange) - - geometries = DataAccessLayer.getGeometryData(req, timerange) - - if len(geometries) == 0: -# print("No data available.") - return - - for geoData in geometries: - mytail = geoData.getString("tailNumber") - if a2dafcommon.is_no_data(mytail): - mytail = "" - else: - mytail = mytail.strip() - - mytime = geoData.getDataTime() - if mytime == None: - continue - #2014-07-16 00:00:00 (0) => 2014-07-16_00:00:00 - mytime = a2dafcommon.datatime_to_string(mytime) - - geo = geoData.getGeometry() - if geo == None: - continue - mylon = geo.x - mylat = geo.y - if a2dafcommon.is_no_data(mylat) or a2dafcommon.is_no_data(mylon): - continue - mylat = "%.4f"%float(mylat) - mylon = "%.4f"%float(mylon) - - myrec = geoData.getString("receiver") - if a2dafcommon.is_no_data(myrec): - myrec = "" - else: - myrec = myrec.strip() - - mypres = geoData.getNumber("pressure") - if a2dafcommon.is_no_data(mypres): - mypres = "1e37" - else : - mypres = "%.0f"%mypres - - myphs = geoData.getString("flightPhase") - if a2dafcommon.is_no_data(myphs): - myphs = "7" - else : - myphs = "%d"%int(myphs) - - myrol = geoData.getString("rollAngleQuality") - if a2dafcommon.is_no_data(myrol): - myrol = "3" - else : - myrol = "%d"%int(myrol) - - mytemp = geoData.getNumber("temp") - if a2dafcommon.is_no_data(mytemp): - mytemp = "1e37" - else : - mytemp = "%.1f"%mytemp - - mydir = geoData.getString("windDirection") - if a2dafcommon.is_no_data(mydir): - mydir = "1e37" - else : - mydir = "%d"%int(mydir) - - myspd = geoData.getNumber("windSpeed") - if a2dafcommon.is_no_data(myspd): - myspd = "1e37" - else : - myspd = "%.1f"%myspd - - myhum = geoData.getNumber("humidity") - if a2dafcommon.is_no_data(myhum): - myhum = "1e37" - else : - myhum = "%.0f"%myhum - - mymix = geoData.getNumber("mixingRatio") - if a2dafcommon.is_no_data(mymix): - mymix = "1e37" - else : - mymix = "%.2f"%mymix - -# Icing was commented out of the uengine version -# myicg = geoData.getString("icing") -# if a2dafcommon.is_no_data(myicg): -# myicg = "1e37" -# else : -# myicg = "%d"%int(myicg) - - msg = mytail + "," + mytime + "," + mylat + "," + mylon + "," + \ - myrec + "," + mypres + "," + myphs + "," + myrol + "," + \ - mytemp + "," + mydir + "," + myspd + "," + myhum + "," + mymix - print msg - -if __name__ == '__main__': - main() diff --git a/pythonPackages/msaslaps/aireppirep/a2aircraft.csh b/pythonPackages/msaslaps/aireppirep/a2aircraft.csh deleted file mode 100644 index 53fcb36034..0000000000 --- a/pythonPackages/msaslaps/aireppirep/a2aircraft.csh +++ /dev/null @@ -1,228 +0,0 @@ -#!/bin/csh -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -# -# A script wrapper around a UEngine call that is meant to get all available -# maritime data in the A-II database over a specified range of times. -# The data is output to stdout as ASCII. Each line is one time/platform -# combination. The individual data items are comma delimited. -# This version can adapt to use a python stub that calls the -# data access framework. -# -# Usage: -# -# a2aircraft.csh {p} {t} yyyy-mm-dd hh:mm yyyy-mm-dd hh:mm -# -# The literal p flag is optional. The p flag means preserve -# the final version of the python submitted to the UEngine instead of -# cleaning it up. The path to the finalized python is /tmp/a2aircraftNNNNN.py -# where NNNNN is a unix process id. This flag does nothing when using the DAF -# version. -# -# The optional literal t mean change some of the ascii phenomena descriptions -# for pireps into codes suitable for D-2D format aircraft data files. -# -# This outputs the following set of variables for each line: -# -# lat|lon,asciitime,flightLevel,reportType,aircraftType, -# temperature,windDir,windSpeed,visibility, -# FlightWeather*,FlightHazard*,FlightConditions*,WeatherGroup*, -# numCloudLayers,cloudBaseHeight,cloudTopHeight,cloudAmount, -# numIcingLayers,heightBaseIcing,heightTopIcing,typeIcing,intensityOfIcing, -# numTurbulenceLayers,heightBaseTurb,heightTopTurb, -# typeTurbulence,intensityOfTurbulence -# -# Asterisk (*) means variable that does not directly correspond to a -# variable in the D-2D format aircraft data files. -# -# -# -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# 09/02/2014 3405 nabowle Initial modification to better handle daf. -# -# -set rmpy = yes -set typcod = qwertyuiop -if ( "$1" == "t" ) then - set typcod = "typecode = 'no'" - shift -endif -if ( "$1" == "p" ) then - set rmpy = no - shift -endif -set repType = "" -set rt = `echo $1 | tr 'a-z' 'A-Z' | grep '^.IREP$'` -if ( "$rt" != "" ) then - set repType = $rt - shift -endif -if ( "$1" == "p" ) then - set rmpy = no - shift -endif -if ( "$1" == "t" ) then - set typcod = "typecode = 'no'" - shift -endif -# -# Identify directory this script is in, will be one of the directories we -# search for other files in. -# -set mydir = `dirname $0` -set d1 = `echo $mydir | cut -c1` -if ( "$mydir" == '.' ) then - set mydir = $PWD -else if ( "$d1" != "/" ) then - set mydir = $PWD/$mydir -endif -set mydir = `(cd $mydir ; pwd)` -if ( ! $?FXA_HOME ) set FXA_HOME = xxxx -# -# Locate python stub that we will modify to create the final python logic. -# -if ( -e ./a2airepStub.py ) then - set stubpy = ./a2airepStub.py -else if ( -e $mydir/a2airepStub.py ) then - set stubpy = $mydir/a2airepStub.py -else if ( -e $FXA_HOME/src/dm/bufr/acars/a2airepStub.py ) then - set stubpy = $FXA_HOME/src/dm/bufr/acars/a2airepStub.py -else if ( -e $FXA_HOME/bin/a2airepStub.py ) then - set stubpy = $FXA_HOME/bin/a2airepStub.py -else - bash -c "echo could not find a2airepStub.py 1>&2" - exit -endif -# -# Determine if we are using the data access framework or the uEngine. -# -grep DataAccessLayer $stubpy >& /dev/null -if ( $status == 0 ) then - set method = "daf" - if ( "$repType" != "PIREP" ) then - /awips2/python/bin/python $stubpy -b "$1 $2" -e "$3 $4" --match-legacy -# /awips2/python/bin/python $stubpy -b "$1 $2" -e "$3 $4" - endif -else - # - # Set up the environment we need to run the UEngine. - # - set method = "uengine" - if ( -e ./UEngine.cshsrc ) then - set ueenv = ./UEngine.cshsrc - else if ( -e $mydir/UEngine.cshsrc ) then - set ueenv = $mydir/UEngine.cshsrc - else if ( -e $FXA_HOME/src/dm/point/UEngine.cshsrc ) then - set ueenv = $FXA_HOME/src/dm/point/UEngine.cshsrc - else if ( -e $FXA_HOME/bin/UEngine.cshsrc ) then - set ueenv = $FXA_HOME/bin/UEngine.cshsrc - else - bash -c "echo could not find UEngine.cshsrc 1>&2" - exit - endif - source $ueenv - - # - set specpy = /tmp/a2airep${$}.py - rm -rf $specpy >& /dev/null - touch $specpy - chmod 775 $specpy - cat $stubpy | sed "s/BBBBB/$1 $2/g" | sed "s/EEEEE/$3 $4/g" > $specpy - if ( "$repType" != "PIREP" ) then - set here = `pwd` - cd $UE_BIN_PATH - ( uengine -r python < $specpy ) | grep -v '<' | grep -v Response - cd $here - endif - if ( "$rmpy" == "yes" ) rm -rf $specpy >& /dev/null -endif - -# -# Locate python stub that we will modify to create the final python logic. -# -if ( -e ./a2pirepStub.py ) then - set stubpy = ./a2pirepStub.py -else if ( -e $mydir/a2pirepStub.py ) then - set stubpy = $mydir/a2pirepStub.py -else if ( -e $FXA_HOME/src/dm/bufr/acars/a2pirepStub.py ) then - set stubpy = $FXA_HOME/src/dm/bufr/acars/a2pirepStub.py -else if ( -e $FXA_HOME/bin/a2pirepStub.py ) then - set stubpy = $FXA_HOME/bin/a2pirepStub.py -else - bash -c "echo could not find a2pirepStub.py 1>&2" - exit -endif -# -# Determine if we are using the data access framework or the uEngine. -# -grep DataAccessLayer $stubpy >& /dev/null -if ( $status == 0 ) then - set method = "daf" - if ( "$repType" != "AIREP" ) then - # By default, match the legacy output. - set pyargs = "--match-legacy" -# set pyargs = "" - if ( "$typcod" != "qwertyuiop" ) then - set pyargs = "$pyargs -t" - endif - - /awips2/python/bin/python $stubpy -b "$1 $2" -e "$3 $4" $pyargs - endif -else - if ( "$method" == "daf" ) then - # - # Set up the environment we need to run the UEngine. - # - set method = "uengine" - if ( -e ./UEngine.cshsrc ) then - set ueenv = ./UEngine.cshsrc - else if ( -e $mydir/UEngine.cshsrc ) then - set ueenv = $mydir/UEngine.cshsrc - else if ( -e $FXA_HOME/src/dm/point/UEngine.cshsrc ) then - set ueenv = $FXA_HOME/src/dm/point/UEngine.cshsrc - else if ( -e $FXA_HOME/bin/UEngine.cshsrc ) then - set ueenv = $FXA_HOME/bin/UEngine.cshsrc - else - bash -c "echo could not find UEngine.cshsrc 1>&2" - exit - endif - source $ueenv - endif - - # - set specpy = /tmp/a2pirep${$}.py - rm -rf $specpy >& /dev/null - touch $specpy - chmod 775 $specpy - cat $stubpy | sed "s/BBBBB/$1 $2/g" | sed "s/EEEEE/$3 $4/g" | \ - grep -v "$typcod" > $specpy - cd $UE_BIN_PATH - if ( "$repType" != "AIREP" ) then - cd $UE_BIN_PATH - ( uengine -r python < $specpy ) | grep -v '<' | grep -v Response - endif - if ( "$rmpy" == "yes" ) rm -rf $specpy >& /dev/null -endif - diff --git a/pythonPackages/msaslaps/aireppirep/a2airepStub.py b/pythonPackages/msaslaps/aireppirep/a2airepStub.py deleted file mode 100644 index 44f9168dae..0000000000 --- a/pythonPackages/msaslaps/aireppirep/a2airepStub.py +++ /dev/null @@ -1,168 +0,0 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - - -# -# Gets all available pirep data in the A-II database over a specified range of -# times. The data is output to stdout as ASCII. Each line is one record. -# The individual data items are comma delimited. -# -# -# -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# 08/25/2014 3405 nabowle Initial modification. Replaces UEngine with DAF. -# -# - - -import a2dafcommon -import argparse -import sys - -from datetime import datetime -from awips.dataaccess import DataAccessLayer -from dynamicserialize.dstypes.com.raytheon.uf.common.time import TimeRange - - -def get_args(): - parser = argparse.ArgumentParser(conflict_handler="resolve") - parser.add_argument("-h", action="store", dest="host", - help="EDEX server hostname (optional)", - metavar="hostname") - parser.add_argument("-b", action="store", dest="start", - help="The start of the time range in YYYY-MM-DD HH:MM", - metavar="start") - parser.add_argument("-e", action="store", dest="end", - help="The end of the time range in YYYY-MM-DD HH:MM", - metavar="end") - parser.add_argument("--match-legacy", action="store_true", dest="match", - help="If set, the legacy script output will be matched.", - default=False) - return parser.parse_args() - - -def main(): - user_args = get_args() - - if user_args.host: - DataAccessLayer.changeEDEXHost(user_args.host) - - start = user_args.start - end = user_args.end - - if not start or not end: - print >> sys.stderr, "Start or End date not provided" - return - - beginRange = datetime.strptime( start + ":00.0", "%Y-%m-%d %H:%M:%S.%f") - endRange = datetime.strptime( end + ":59.9", "%Y-%m-%d %H:%M:%S.%f") - timerange = TimeRange(beginRange, endRange) - - req = DataAccessLayer.newDataRequest("airep") - req.setParameters("id", "flightLevel", "temp", "windDirection", "windSpeed", - "flightWeather", "flightHazard", "flightConditions") - geometries = DataAccessLayer.getGeometryData(req, timerange) - - if not geometries : -# print "No data available." - return - - msg = ""; - - geometries.sort(key=lambda geo: str(geo.getDataTime())) - for geoData in geometries : - mytime = geoData.getDataTime() - if not mytime: - continue - mytime = a2dafcommon.datatime_to_string(mytime) - - geo = geoData.getGeometry() - if not geo: - continue - - mylon = geo.x - mylat = geo.y - if a2dafcommon.is_no_data(mylat) or a2dafcommon.is_no_data(mylon): - continue - mylat = "%.4f"%mylat - mylon = "%.4f"%mylon - - myflvl = geoData.getNumber("flightLevel") - if a2dafcommon.is_no_data(myflvl) : - myflvl = "1e37" - else : - myflvl = "%d"%myflvl - -# Temp is not stored. -# mytemp = geoData.getString("temp") -# if a2dafcommon.is_no_data(mytemp) : -# mytemp = "1e37" -# else : -# mytemp = "%.1f"%float(mytemp) - mytemp = "1e37" - -# Wind Direction is not stored. -# mydir = geoData.getString("windDirection") -# if a2dafcommon.is_no_data(mydir) : -# mydir = "1e37" -# else : -# mydir = "%d"%int(mydir) - mydir = "1e37" - - myspd = geoData.getNumber("windSpeed") - if a2dafcommon.is_no_data(myspd) : - myspd = "1e37" - else : - myspd = "%.1f"%myspd - - myfwx = geoData.getNumber("flightWeather") - if myfwx : - myfwx = "-1" - else : - myfwx = "%d"%myfwx - - myhaz = geoData.getNumber("flightHazard") - if a2dafcommon.is_no_data(myhaz) : - myhaz = "-1" - else : - myhaz = "%d"%myhaz - - mycond = geoData.getNumber("flightConditions") - if a2dafcommon.is_no_data(mycond) : - mycond = "-1" - else : - mycond = "%d"%mycond - - if user_args.match: -# Wind Speed not returned to legacy script. - myspd = "1e37" - - msg += mylat + "|" + mylon + "," + mytime + "," + myflvl + ",AIREP,," + \ - mytemp + "," + mydir + "," + myspd + ",1e37," + \ - myfwx + "," + myhaz + "," + mycond + ",,0,,,,0,,,,,0,,,,\n" - - print msg.strip() - -if __name__ == '__main__': - main() diff --git a/pythonPackages/msaslaps/aireppirep/a2pirepStub.py b/pythonPackages/msaslaps/aireppirep/a2pirepStub.py deleted file mode 100644 index 6b87ae8326..0000000000 --- a/pythonPackages/msaslaps/aireppirep/a2pirepStub.py +++ /dev/null @@ -1,385 +0,0 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - - -# -# Gets all available pirep data in the A-II database over a specified time -# range. The data is output to stdout as ASCII. Each line is one record. The -# individual data items are comma delimited. -# -# The legacy script does not retrieve any values not stored in the postgres db. -# To compensate for this in side-by-side comparison, a --match-legacy flag is -# provided that will ignore these fields. -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# 08/25/2014 3405 nabowle Initial modification. Replaces UEngine with DAF. -# -# - - -import a2dafcommon -import argparse -import sys - -from datetime import datetime -from awips.dataaccess import DataAccessLayer -from dynamicserialize.dstypes.com.raytheon.uf.common.time import TimeRange - - - -def get_args(): - parser = argparse.ArgumentParser(conflict_handler="resolve") - parser.add_argument("-h", action="store", dest="host", - help="EDEX server hostname (optional)", - metavar="hostname") - parser.add_argument("-b", action="store", dest="start", - help="The start of the time range in YYYY-MM-DD HH:MM", - metavar="start") - parser.add_argument("-e", action="store", dest="end", - help="The end of the time range in YYYY-MM-DD HH:MM", - metavar="end") - parser.add_argument("--match-legacy", action="store_true", dest="match", - help="If set, the legacy script output will be matched.", - default=False) - parser.add_argument("-t", action="store_true", dest="typecode", - help="If set, more type information is displayed.", - default=False) - return parser.parse_args() - - -def main(): - MULTI_DIM_PARAMS = set(["hazardType", - "turbType", "turbBaseHeight", "turbTopHeight", - "iceType", "iceBaseHeight", "iceTopHeight", - "skyCover1", "skyCover2", "skyBaseHeight", "skyTopHeight" - ]) - - user_args = get_args() - - if user_args.host: - DataAccessLayer.changeEDEXHost(user_args.host) - - start = user_args.start - end = user_args.end - - if not start or not end: - print >> sys.stderr, "Start or End date not provided" - return - - beginRange = datetime.strptime( start + ":00.0", "%Y-%m-%d %H:%M:%S.%f") - endRange = datetime.strptime( end + ":59.9", "%Y-%m-%d %H:%M:%S.%f") - timerange = TimeRange(beginRange, endRange) - - req = DataAccessLayer.newDataRequest("pirep") - req.setParameters('id', 'flightLevel', 'temp', 'windDirection', 'windSpeed', - 'horzVisibility', 'aircraftType', 'weatherGroup') - req.getParameters().extend(MULTI_DIM_PARAMS) - geometries = DataAccessLayer.getGeometryData(req, timerange) - - if not geometries : -# print "No data available." - return - - typecode = user_args.typecode - match = user_args.match - msg = "" - layerData = [] - combinedGeos = [] - for geoData in geometries : -# The DAF returns multi-dimensional parameters as separate results before -# the base result that contain the single-dimensional parameters. -# Because of the separation of parameters and known ordering of result -# types, we can easily figure out what each result is and correlate the -# ancillary data with the base data. - if set(geoData.getParameters()) & MULTI_DIM_PARAMS : - layerData.append(geoData) - continue - - combinedGeos.append({"base":geoData, "layers":layerData}) - layerData = [] - - combinedGeos.sort(key=lambda geoMap: str(geoMap['base'].getDataTime())) - - for geoMap in combinedGeos : - geoData = geoMap['base'] - layerData = geoMap['layers'] - - mytime = geoData.getDataTime() - if not mytime: - continue - mytime = mytime = a2dafcommon.datatime_to_string(mytime) - - geo = geoData.getGeometry() - if not geo: - continue - - mylon = geo.x - mylat = geo.y - if a2dafcommon.is_no_data(mylat) or a2dafcommon.is_no_data(mylon) : - continue - mylat = "%.4f"%mylat - mylon = "%.4f"%mylon - - myflvl = geoData.getNumber("flightLevel") - if a2dafcommon.is_no_data(myflvl) : - myflvl = "1e37" - else : - myflvl = "%d"%myflvl - -# Temp is not stored. -# mytemp = geoData.getNumber("temp") -# if a2dafcommon.is_no_data(mytemp) : -# mytemp = "1e37" -# else : -# mytemp = "%.1f"%float(mytemp) - mytemp = "1e37" - -# Wind Direction is not stored. -# mydir = geoData.getString("windDirection") -# if a2dafcommon.is_no_data(mydir) : -# mydir = "1e37" -# else : -# mydir = "%d"%int(mydir) - mydir = "1e37" - - myspd = geoData.getNumber("windSpeed") - if a2dafcommon.is_no_data(myspd) : - myspd = "1e37" - else : - myspd = "%.1f"%myspd - - myvis = geoData.getNumber("horzVisibility") - if a2dafcommon.is_no_data(myvis) : - myvis = "1e37" - else : - myvis = "%.1f"%myvis - - mycraft = geoData.getString("aircraftType") - if a2dafcommon.is_no_data(mycraft) : - mycraft = "" - - mywx = geoData.getString("weatherGroup") - if a2dafcommon.is_no_data(mywx) : - mywx = "" - - - cc = 0 - cldBas = "" - cldTop = "" - cldVal = "" - ii = 0 - icgBas = "" - icgTop = "" - icgTyp = "" - icgVal = "" - tt = 0 - trbBas = "" - trbTop = "" - trbTyp = "" - trbVal = "" - - if match : -# Speed, Horizontal Visibility, aircraft type, and weather group are -# not returned to the legacy script. Flightlevel of -9999 is output -# as such instead of being replaced by "1e37" - myspd = "1e37" - myvis = "-9999998.0" - mycraft = "" - mywx = "" - if geoData.getNumber("flightLevel") == -9999: - myflvl = "-9999" - else : - for pld in layerData: - sep = "" - ltyp = pld.getString("hazardType") - if a2dafcommon.is_no_data(ltyp) : - continue - - fval = pld.getString("skyCover1") - if fval == "None" : - fval = "" - - sval = pld.getString("skyCover2") - if sval == "None" : - sval = "" - if ltyp == "CLOUD" : - if fval == "TOP" : - fval = "" - if sval == "TOP" : - sval = "" - if sval != "" : - fval += "-"+sval - if typecode : - if fval == "CLR" : - fval = "0" - elif fval == "OVC" : - fval = "8" - elif fval == "SCT" : - fval = "11" - elif fval == "BKN" : - fval = "12" - elif fval == "FEW" : - fval = "13" - else : - continue - if cldBas != "" : - sep = "|" - - base = pld.getNumber("skyBaseHeight") - if a2dafcommon.is_no_data(base) or base == 99999 : - base = "1e37" - else : - base = "%.0f"%float(base) - - top = pld.getNumber("skyTopHeight") - if a2dafcommon.is_no_data(top) or top == 99999 : - top = "1e37" - else : - top = "%.0f"%float(top) - - cldBas += sep+base - cldTop += sep+top - cldVal += sep+fval - cc += 1 - elif ltyp == "ICING" : - dtyp = pld.getString("iceType") - if a2dafcommon.is_no_data(dtyp) : - dtyp = "" - - if sval != "" : - fval += "-"+sval - if icgBas != "" : - sep = "|" - if typecode : - if dtyp == "RIME" : - dtyp = "1" - elif dtyp == "CLR" : - dtyp = "2" - elif dtyp == "MXD" : - dtyp = "3" - else : - dtyp = "-9999" - if fval == "NEG" : - fval = "0"; - elif fval == "TRACE" : - fval = "1" - elif fval == "TRACE-LGT" : - fval = "2" - elif fval == "LGT" : - fval = "3" - elif fval == "LGT-MOD" : - fval = "4" - elif fval == "MOD" : - fval = "5" - elif fval == "MOD-SEV" : - fval = "7" - elif fval == "SEV" : - fval = "8" - else : - fval = "-9999" - if fval == "-9999" and dtyp == "-9999" : - continue - - base = pld.getNumber("iceBaseHeight") - if a2dafcommon.is_no_data(base) or base == 99999 : - base = "1e37" - else : - base = "%.0f"%float(base) - - top = pld.getNumber("iceTopHeight") - if a2dafcommon.is_no_data(top) or top == 99999 : - top = "1e37" - else : - top = "%.0f"%float(top) - - icgBas += sep+base - icgTop += sep+top - icgTyp += sep+dtyp - icgVal += sep+fval - ii += 1 - elif ltyp == "TURBC" : - dtyp = pld.getString("turbType") - if a2dafcommon.is_no_data(dtyp) : - dtyp = "" - - if sval != "" : - fval += "-"+sval - if typecode : - if dtyp == "CAT" : - dtyp = "1" - elif dtyp == "CHOP" : - dtyp = "2" - else : - dtyp = "-9999" - if fval == "NEG" : - fval = "0"; - elif fval == "LGT" : - fval = "2" - elif fval == "LGT-MOD" : - fval = "3" - elif fval == "MOD" : - fval = "4" - elif fval == "MOD-SEV" : - fval = "5" - elif fval == "SEV" : - fval = "6" - elif fval == "EXTRM" : - fval = "8" - else : - fval = "-9999" - if fval == "-9999" and dtyp == "-9999" : - continue - if trbBas != "" : - sep = "|" - - base = pld.getNumber("turbBaseHeight") - if a2dafcommon.is_no_data(base) or base == 99999 : - base = "1e37" - else : - base = "%.0f"%float(base) - - top = pld.getNumber("turbTopHeight") - if a2dafcommon.is_no_data(top) or top == 99999 : - top = "1e37" - else : - top = "%.0f"%float(top) - - trbBas += sep+base - trbTop += sep+top - trbTyp += sep+dtyp - trbVal += sep+fval - tt += 1 - - msg += mylat + "|" + mylon + "," + mytime + "," + myflvl + ",PIREP," + \ - mycraft + "," + mytemp + "," + mydir + "," + myspd + "," + \ - myvis + ",-1,-1,-1," + mywx + "," + \ - str(cc) + "," + cldBas + "," + cldTop + "," + cldVal + "," + \ - str(ii) + "," + icgBas + "," + icgTop + "," + \ - icgTyp + "," + icgVal + "," + \ - str(tt) + "," + trbBas + "," + trbTop + "," + \ - trbTyp + "," + trbVal + "\n" - - print msg.strip() - -if __name__ == '__main__': - main() diff --git a/pythonPackages/msaslaps/common/UEngine.cshsrc b/pythonPackages/msaslaps/common/UEngine.cshsrc deleted file mode 100755 index 59c98da46f..0000000000 --- a/pythonPackages/msaslaps/common/UEngine.cshsrc +++ /dev/null @@ -1,28 +0,0 @@ -# -# Set up the environment we need to run the UEngine. -# -#set release = "" -#if ( -e /awips2/edex/conf/banner.txt ) then -# set release = `grep Version /awips2/edex/conf/banner.txt | tr '*' ' ' | \ -# sed 's/ *$//g' | sed 's/^.* //g'` -#endif -# -while (1) - if ( $?INSTALL_BASE ) then - if ( -d $INSTALL_BASE/python ) break - endif - setenv INSTALL_BASE /awips2 - break -end -if ( -e $INSTALL_BASE/bin/uengine ) then - setenv UE_BIN_PATH $INSTALL_BASE/bin -else - setenv UE_BIN_PATH $INSTALL_BASE/fxa/bin -endif -if ( ! $?PRE_ADE_PATH ) then - setenv PRE_ADE_PATH ${PATH} -endif -setenv JAVA_HOME $INSTALL_BASE/java -setenv LD_LIBRARY_PATH /usr/lib -setenv PYTHONPATH .:$INSTALL_BASE/bin/src -setenv PATH ${JAVA_HOME}/bin:${UE_BIN_PATH}:${PRE_ADE_PATH} diff --git a/pythonPackages/msaslaps/common/a2dafcommon.py b/pythonPackages/msaslaps/common/a2dafcommon.py deleted file mode 100644 index ee3a7b0a84..0000000000 --- a/pythonPackages/msaslaps/common/a2dafcommon.py +++ /dev/null @@ -1,43 +0,0 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - - -# -# Common functionality for MSAS/LAPS scripts using the Data Acess Framework. -# -# -# -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# 09/02/2014 3405 nabowle Initial creation. -# -# - -NO_DATA = [None, "None", -9999, -9999.0, "-9999", "-9999.0", ""] - -def is_no_data(val): - return val in NO_DATA - - -def datatime_to_string(datatime): - return str(datatime)[0:19].replace(" ","_") diff --git a/pythonPackages/msaslaps/common/gtasUtil b/pythonPackages/msaslaps/common/gtasUtil deleted file mode 100644 index aa73c56fcf..0000000000 Binary files a/pythonPackages/msaslaps/common/gtasUtil and /dev/null differ diff --git a/pythonPackages/msaslaps/common/msgCodeSeds.txt b/pythonPackages/msaslaps/common/msgCodeSeds.txt deleted file mode 100644 index 9ea71eba41..0000000000 --- a/pythonPackages/msaslaps/common/msgCodeSeds.txt +++ /dev/null @@ -1,108 +0,0 @@ -# 35 36 37 38 41 50 51 57 65 66 67 81 85 86 90 -s/^0 *$/0|STImotion/g -s/^2 *$/2|GSM/g -s/^3 *$/3|PRR/g -s/^8 *$/8|PTL/g -s/^12 *$/12|CPM/g -s/^14 *$/14|CCM/g -#s/^16 *$/16|Z 1km 3bit/g -#s/^17 *$/17|Z 2km 3bit/g -#s/^18 *$/18|Z 4km 3bit/g -s/^19 *$/19|Z 1km 4bit/g -s/^20 *$/20|Z 2km 4bit/g -#s/^21 *$/21|Z 4km 4bit/g -#s/^22 *$/22|V 0.25km 3bit/g -#s/^23 *$/23|V 0.5km 3bit/g -#s/^24 *$/24|V 1km 3bit/g -s/^25 *$/25|V 0.25km 4bit/g -s/^26 *$/26|V 0.5km 4bit/g -s/^27 *$/27|V 1km 4bit/g -s/^28 *$/28|SW 0.25km 3bit/g -s/^29 *$/29|SW 0.5km 3bit/g -s/^30 *$/30|SW 1km 3bit/g -s/^31 *$/31|USP 2km 4bit/g -s/^32 *$/32|DHR 1km 8bit/g -s/^33 *$/33|HSR 1km 4bit/g -s/^34 *$/34|CFC 1km 3bit/g -#s/^35 *$/35|CZ 1km 3bit/g -#s/^36 *$/36|CZ 4km 3bit/g -s/^37 *$/37|CZ 1km 4bit/g -s/^38 *$/38|CZ 4km 4bit/g -s/^41 *$/41|ET 4km 4bit/g -s/^48 *$/48|VWP/g -s/^50 *$/50|RCS 1km 4bit/g -s/^51 *$/51|VCS 0.5km 4bit/g -#s/^55 *$/55|SRR 0.5km 4bit/g -s/^56 *$/56|SRM 1km 4bit/g -s/^57 *$/57|VIL 4km 4bit/g -s/^58 *$/58|STI/g -s/^59 *$/59|HI/g -s/^60 *$/60|M/g -s/^61 *$/61|TVS/g -s/^62 *$/62|SS/g -s/^65 *$/65|LRM 4km 3bit/g -s/^66 *$/66|LRM 4km 3bit/g -s/^67 *$/67|APR 4km 3bit/g -s/^74 *$/74|RCM/g -s/^75 *$/75|FTM/g -s/^78 *$/78|OHP 2km 4bit/g -s/^79 *$/79|THP 2km 4bit/g -s/^80 *$/80|STP 2km 4bit/g -s/^81 *$/81|DPA 4km 8bit/g -s/^82 *$/82|SPD 40km 3bit/g -s/^84 *$/84|VAD/g -s/^85 *$/85|RCS 1km 3bit/g -s/^86 *$/86|VCS 0.5km 3bit/g -s/^90 *$/90|LRM 4km 3bit/g -s/^93 *$/93|DBV 1km 8bit/g -s/^94 *$/94|Z 1km 8bit/g -s/^99 *$/99|V 0.25km 8bit/g -s/^100 *$/100|VSDT/g -s/^101 *$/101|STIT/g -s/^102 *$/102|HIT/g -s/^103 *$/103|MT/g -s/^104 *$/104|TVST/g -s/^107 *$/107|OHPT/g -s/^108 *$/108|THPT/g -s/^109 *$/109|STPT/g -s/^132 *$/132|CLR 1km 4bit/g -s/^133 *$/133|CLD 1km 4bit/g -s/^134 *$/134|DVL 1km 8bit/g -s/^135 *$/135|EET 1km 8bit/g -s/^136 *$/136|SO/g -s/^137 *$/137|ULR 1km 4bit/g -s/^138 *$/138|STP 2km 8bit/g -s/^139 *$/139|MRU/g -s/^140 *$/140|GFM/g -s/^141 *$/141|MD/g -s/^143 *$/143|TRU/g -s/^144 *$/144|OSW 2km 4bit/g -s/^145 *$/145|OSD 2km 4bit/g -s/^146 *$/146|SSW 2km 4bit/g -s/^147 *$/147|SSD 2km 4bit/g -s/^149 *$/149|DMD/g -s/^150 *$/150|USW 2km 4bit/g -s/^151 *$/151|USD 2km 4bit/g -s/^152 *$/152|RSS/g -s/^153 *$/153|Z 0.25km 0.5Deg 8bit/g -s/^154 *$/154|V 0.25km 0.5Deg 8bit/g -s/^155 *$/155|SW 0.25km 0.5Deg 8bit/g -#s/^158 *$/158|ZDR 1km 4bit/g -s/^159 *$/159|ZDR 0.25km 8bit/g -#s/^160 *$/160|CC 1km 4bit/g -s/^161 *$/161|CC 0.25km 8bit/g -#s/^162 *$/162|KDP 1km 4bit/g -s/^163 *$/163|KDP 0.25km 8bit/g -#s/^164 *$/164|HC 1km 4bit/g -s/^165 *$/165|HC 0.25km 8bit/g -s/^166 *$/166|ML/g -s/^169 *$/169|OHA 2km 4bit/g -s/^170 *$/170|DAA 0.25km 8bit/g -s/^171 *$/171|STA 2km 4bit/g -s/^172 *$/172|STA 0.25km 8bit/g -s/^173 *$/173|DUA 0.25km 8bit/g -s/^174 *$/174|DOD 0.25km 8bit/g -s/^175 *$/175|DSD 0.25km 8bit/g -s/^176 *$/176|DPR 0.25km gen/g -s/^177 *$/177|HHC 0.25km 8bit/g -s/^202 *$/202|SCC/g diff --git a/pythonPackages/msaslaps/common/wmoToNameRaob.txt b/pythonPackages/msaslaps/common/wmoToNameRaob.txt deleted file mode 100755 index 425a31b3ca..0000000000 --- a/pythonPackages/msaslaps/common/wmoToNameRaob.txt +++ /dev/null @@ -1,290 +0,0 @@ -s/^04202,/BGTL,/g -s/^08594,/GVAC,/g -s/^21824,/RUTS,/g -s/^25563,/UHMA,/g -s/^25594,/UHMD,/g -s/^25913,/UHMM,/g -s/^31329,/UHPB,/g -s/^31735,/RUHB,/g -s/^31960,/UHWW,/g -s/^32150,/RUSH,/g -s/^32540,/UHPP,/g -s/^46692,/RCSS,/g -s/^46697,/RCGM,/g -s/^46747,/RCMJ,/g -s/^47058,/ZKKK,/g -s/^47110,/RKSS,/g -s/^47122,/RKSO,/g -s/^47138,/RKTH,/g -s/^47158,/RKJJ,/g -s/^47187,/RKPM,/g -s/^47401,/RJCW,/g -s/^47412,/RJCO,/g -s/^47580,/RJSM,/g -s/^47582,/RJSK,/g -s/^47590,/RJSS,/g -s/^47678,/RJTH,/g -s/^47681,/RJNH,/g -s/^47808,/RJFF,/g -s/^47827,/RJFK,/g -s/^47881,/RJOS,/g -s/^47918,/ROIG,/g -s/^47931,/RODN,/g -s/^47936,/ROAH,/g -s/^47945,/ROMD,/g -s/^47971,/RJAO,/g -s/^47981,/RJAW,/g -s/^47991,/RJAM,/g -s/^50745,/ZYQQ,/g -s/^50953,/ZYHB,/g -s/^54161,/ZYCC,/g -s/^54342,/ZYYY,/g -s/^54662,/ZYTL,/g -s/^54857,/ZSQD,/g -s/^58457,/ZSHC,/g -s/^61641,/GOOY,/g -s/^61902,/FHAW,/g -s/^61967,/DJDG,/g -s/^70026,/PABR,/g -s/^70086,/PABA,/g -s/^70133,/PAOT,/g -s/^70200,/PAOM,/g -s/^70219,/PABE,/g -s/^70231,/PAMC,/g -s/^70261,/PAFA,/g -s/^70273,/PAFC,/g -s/^70308,/PASN,/g -s/^70316,/PACB,/g -s/^70326,/PAKN,/g -s/^70350,/PADQ,/g -s/^70361,/PAYA,/g -s/^70398,/PANT,/g -s/^70414,/PASY,/g -s/^71043,/CYVQ,/g -s/^71051,/CWSY,/g -s/^71081,/CYUX,/g -s/^71082,/CYLT,/g -s/^71109,/CYZT,/g -s/^71119,/CWEG,/g -s/^71120,/CYOD,/g -s/^71121,/CYED,/g -s/^71123,/CYEG,/g -s/^71203,/CYLW,/g -s/^71396,/CYHZ,/g -s/^71399,/CWOS,/g -s/^71600,/CWSA,/g -s/^71603,/CYQI,/g -s/^71624,/CYYZ,/g -s/^71627,/CYUL,/g -s/^71701,/CYCX,/g -s/^71722,/CWMW,/g -s/^71799,/CYYJ,/g -s/^71801,/CYYT,/g -s/^71807,/CWAR,/g -s/^71811,/CYZV,/g -s/^71815,/CYJT,/g -s/^71816,/CYYR,/g -s/^71823,/CYAH,/g -s/^71836,/CYMO,/g -s/^71845,/CWPL,/g -s/^71853,/CWLO,/g -s/^71867,/CYQD,/g -s/^71879,/CYXD,/g -s/^71896,/CYXS,/g -s/^71906,/CYVP,/g -s/^71907,/CYPH,/g -s/^71909,/CYFB,/g -s/^71913,/CYYQ,/g -s/^71915,/CYZS,/g -s/^71917,/CWEU,/g -s/^71924,/CYRB,/g -s/^71925,/CYCB,/g -s/^71926,/CYBK,/g -s/^71934,/CYSM,/g -s/^71945,/CYYE,/g -s/^71957,/CYEV,/g -s/^71964,/CYXY,/g -s/^72201,/KKEY,/g -s/^72202,/KMFL,/g -s/^72206,/KJAX,/g -s/^72208,/KCHS,/g -s/^72210,/KTBW,/g -s/^72214,/KTAE,/g -s/^72215,/KFFC,/g -s/^72221,/KVPS,/g -s/^72230,/KBMX,/g -s/^72233,/KLIX,/g -s/^72235,/KJAN,/g -s/^72240,/KLCH,/g -s/^72248,/KSHV,/g -s/^72249,/KFWD,/g -s/^72250,/KBRO,/g -s/^72251,/KCRP,/g -s/^72257,/KHLR,/g -s/^72261,/KDRT,/g -s/^72265,/KMAF,/g -s/^72269,/KWSD,/g -s/^72274,/KTWC,/g -s/^72280,/KYUM,/g -s/^72291,/KNSI,/g -s/^72293,/KNKX,/g -s/^72305,/KMHX,/g -s/^72317,/KGSO,/g -s/^72318,/KRNK,/g -s/^72327,/KOHX,/g -s/^72340,/KLZK,/g -s/^72357,/KOUN,/g -s/^72363,/KAMA,/g -s/^72364,/KEPZ,/g -s/^72365,/KABQ,/g -s/^72376,/KFGZ,/g -s/^72381,/KEDW,/g -s/^72386,/KLAS,/g -s/^72387,/KDRA,/g -s/^72391,/KNTD,/g -s/^72393,/KVBG,/g -s/^72402,/KWAL,/g -s/^72403,/KLWX,/g -s/^72426,/KILN,/g -s/^72440,/KSGF,/g -s/^72451,/KDDC,/g -s/^72456,/KTOP,/g -s/^72469,/KDNR,/g -s/^72476,/KGJT,/g -s/^72489,/KREV,/g -s/^72493,/KOAK,/g -s/^72501,/KOKX,/g -s/^72518,/KALY,/g -s/^72520,/KPBZ,/g -s/^72528,/KBUF,/g -s/^72558,/KOAX,/g -s/^72562,/KLBF,/g -s/^72572,/KSLC,/g -s/^72582,/KLKN,/g -s/^72597,/KMFR,/g -s/^72632,/KDTX,/g -s/^72634,/KAPX,/g -s/^72645,/KGRB,/g -s/^72649,/KMPX,/g -s/^72659,/KABR,/g -s/^72662,/KUNR,/g -s/^72672,/KRIW,/g -s/^72681,/KBOI,/g -s/^72694,/KSLE,/g -s/^72712,/KCAR,/g -s/^72747,/KINL,/g -s/^72764,/KBIS,/g -s/^72768,/KGGW,/g -s/^72776,/KTFX,/g -s/^72786,/KOTX,/g -s/^72797,/KUIL,/g -s/^74002,/KAPG,/g -s/^74004,/K1Y7,/g -s/^74005,/K1Y8,/g -s/^74006,/K1Y9,/g -s/^74389,/KGYX,/g -s/^74455,/KDVN,/g -s/^74494,/KCHH,/g -s/^74560,/KILX,/g -s/^74626,/KPSR,/g -s/^74794,/KXMR,/g -s/^76151,/MILS,/g -s/^76225,/MMCU,/g -s/^76256,/MMGM,/g -s/^76342,/MMOV,/g -s/^76361,/MLMM,/g -s/^76382,/MTRC,/g -s/^76394,/MMMY,/g -s/^76405,/MLAP,/g -s/^76458,/MMMZ,/g -s/^76491,/MCVM,/g -s/^76499,/MSOT,/g -s/^76548,/MTAM,/g -s/^76612,/MMGL,/g -s/^76644,/MMMD,/g -s/^76649,/MCZM,/g -s/^76654,/MMZO,/g -s/^76665,/MMOR,/g -s/^76679,/MMMX,/g -s/^76680,/MMDF,/g -s/^76692,/MMVR,/g -s/^76723,/MISC,/g -s/^76749,/MCME,/g -s/^76805,/MACA,/g -s/^76830,/MIZT,/g -s/^76840,/MARR,/g -s/^76904,/MTAP,/g -s/^78016,/TXKF,/g -s/^78073,/MYNN,/g -s/^78367,/MUGM,/g -s/^78384,/MKCG,/g -s/^78397,/MKJP,/g -s/^78486,/MDSD,/g -s/^78526,/TJSJ,/g -s/^78583,/MZBZ,/g -s/^78806,/MPHO,/g -s/^78808,/MABK,/g -s/^78866,/MACM,/g -s/^78897,/TFFR,/g -s/^78954,/MKPB,/g -s/^78970,/MKPP,/g -s/^78988,/MACC,/g -s/^80001,/MCSP,/g -s/^80035,/SKRH,/g -s/^81405,/SOCA,/g -s/^91066,/PMDY,/g -s/^91165,/PHLI,/g -s/^91212,/PGUM,/g -s/^91217,/PGAC,/g -s/^91218,/PGUA,/g -s/^91245,/PWAK,/g -s/^91250,/PKMA,/g -s/^91275,/PJON,/g -s/^91285,/PHTO,/g -s/^91334,/PTKK,/g -s/^91348,/PTPN,/g -s/^91366,/PKWA,/g -s/^91376,/PMKJ,/g -s/^91408,/PTRO,/g -s/^91413,/PTYA,/g -s/^91517,/AGGH,/g -s/^91558,/NHHH,/g -s/^91592,/NWWN,/g -s/^91610,/NGTA,/g -s/^91643,/NGFU,/g -s/^91680,/NFFN,/g -s/^91700,/PCIS,/g -s/^91765,/NSTU,/g -s/^91843,/NCRG,/g -s/^91925,/NTMN,/g -s/^91938,/NTTT,/g -s/^91944,/NTTO,/g -s/^93997,/NZRN,/g -s/^94027,/AYLA,/g -s/^94035,/AYPY,/g -s/^94120,/YPDN,/g -s/^94203,/YPBR,/g -s/^94212,/AHHC,/g -s/^94238,/YDTC,/g -s/^94287,/YBCS,/g -s/^94294,/YBTL,/g -s/^94326,/YBAS,/g -s/^94332,/YBMA,/g -s/^94346,/YBLR,/g -s/^94367,/YBMK,/g -s/^94380,/YBGL,/g -s/^94510,/YBCV,/g -s/^94578,/YBBN,/g -s/^94637,/YPKG,/g -s/^94646,/YPFT,/g -s/^94653,/YPCD,/g -s/^94659,/YPWR,/g -s/^94672,/YPAD,/g -s/^94693,/YMMI,/g -s/^94750,/YSNW,/g -s/^94767,/YSSY,/g -s/^94776,/YSWM,/g -s/^94791,/YSCH,/g -s/^94995,/ASLH,/g -s/^94996,/YSNF,/g -s/^98327,/RPMK,/g diff --git a/pythonPackages/msaslaps/grid/a2invmdl.csh b/pythonPackages/msaslaps/grid/a2invmdl.csh deleted file mode 100644 index b7f9df8413..0000000000 --- a/pythonPackages/msaslaps/grid/a2invmdl.csh +++ /dev/null @@ -1,254 +0,0 @@ -#!/bin/csh -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## -# -# A script wrapper that is meant to get inventories of gridded data -# from the A-II database. The data is output to stdout as ASCII. -# This version can adapt to use a python stub that calls the -# data access framework. -# -# Usage: -# -# a2invmdl.csh p srcid ctyp lval1 lval2 varAbrev -# p - A literal p. (optional) -# srcid - Unique alphanumeric name for gridded data source. -# ctyp - Level type id (optional) -# lval1 - First level value (optional) -# lval2 - Second level value (optional) -# varAbrev - Variable abreviation. (optional) -# -# Legacy usage, not supported in all cases: -# -# a2invmdl.csh p gproc ggid ctyp lval1 lval2 varAbrev -# -# p - A literal p. (optional) -# gproc - GRIB process number (can be multiple comma delimited) -# ggid - GRIB grid number -# ctyp - Level type id (optional) -# lval1 - First level value (optional) -# lval2 - Second level value (optional) -# varAbrev - Variable abreviation. (optional) -# -# With the new unified GRIB decoder, instead of gproc ggid, it is best -# to supply the srcid, which is like ETA or GFS254; e.g. the directory -# under /awips2/edex/data/hdf5/grid where the data is stored. -# -# Note that now for sources with no tag in the associated -# entry, the ggid must be supplied as a quoted empty string. -# -# With no arguments after the grid number, returns a list of variables for -# the data source specified by the process and grid id. With only a variable, -# returns information for the list of planes for that variable. With more -# arguments, returns a list of times for that variable and plane. -# -# Level value arguments are meaningless without the level type argument, -# but it is meaningful to provide only a level type. -# -# If the only argument after the process and grid is a literal at sign ('@') -# then what is returned is a list of all times for which there is data -# available for the given process/grid combination. -# -# If the only argument after the process and grid is a literal plus sign (+), -# then what will be returned will be a level inventory for all variables. -# -# The literal p option means preserve the final version of the python -# submitted to the UEngine instead of cleaning it up. The path to the -# finalized python is /tmp/a2rdmdlNNNNN.py where NNNNN is a unix process id. -# -# Gets all available raob data in the A-II database over a specified range of -# times. The data is output to stdout as ASCII. -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# 2014-10-22 3599 nabowle Initial modification. Changed to properly call DAF version. -# -set rmpy = yes -if ( "$1" == "p" ) then - set rmpy = no - shift -endif -# -# Identify directory this script is in, will be one of the directories we -# search for other files in. -# -set mydir = `dirname $0` -set d1 = `echo $mydir | cut -c1` -if ( "$mydir" == '.' ) then - set mydir = $PWD -else if ( "$d1" != "/" ) then - set mydir = $PWD/$mydir -endif -set mydir = `(cd $mydir ; pwd)` -if ( -x $mydir/$0 ) then - set me = $mydir/$0 -else - set me = $0 -endif -if ( ! $?FXA_HOME ) set FXA_HOME = xxxx -# -# Primarily base stuff on source name, but try to use the old interface. -# -set sss = "$1" -shift -set ids = `echo $sss | tr ',' ' '` -echo "$ids[1]" | grep '^[0-9][0-9]*$' >& /dev/null -if ( $status == 0 || $#ids > 1 ) then - set mroot = /awips2/edex/data/utility/edex_static/base/grib/models - set ids = `echo $ids | tr ' ' '\n' | grep -v '^ *$' | \ - sed 's#^##g' | sed 's#$#<|#g'` - set ids = `echo ${ids}quertyuiop | sed 's/ *//g'` - set ggg = "$1" - shift - if ( "$ggg" == "" ) then - set mmm = `find $mroot -name '*xml' ! -name '*ECMWF*' \ - -exec cat '{}' \; | sed 's|-->|~|g' | \ - tr '\t' ' ' | sed 's/ *//g' | sed 's||~|g' | \ - tr '\n' ' ' | tr '~' '\n' | grep -E "$ids" | \ - grep -v "" | sed 's/^.*//g' | \ - cut '-d<' -f1 | sort -u` - else - set mmm = `find $mroot -name '*xml' -exec cat '{}' \; | \ - sed 's|-->|~|g' | \ - tr '\t' ' ' | sed 's/ *//g' | sed 's||~|g' | \ - tr '\n' ' ' | tr '~' '\n' | grep -E "$ids" | \ - grep "$ggg<" | sed 's/^.*//g' | \ - cut '-d<' -f1 | sort -u` - endif - if ( $#mmm != 1 ) then - echo "$mmm" - echo "Could not look up model name based on $sss '$ggg'" - exit 1 - endif - set sss = $mmm -endif - -# -# Locate python stub that we will modify to create the final python logic. -# -if ( -e ./a2invmdlStub.py ) then - set stubpy = ./a2invmdlStub.py -else if ( -e $mydir/a2invmdlStub.py ) then - set stubpy = $mydir/a2invmdlStub.py -else if ( -e $FXA_HOME/src/dm/grid/a2invmdlStub.py ) then - set stubpy = $FXA_HOME/src/dm/grid/a2invmdlStub.py -else if ( -e $FXA_HOME/bin/a2invmdlStub.py ) then - set stubpy = $FXA_HOME/bin/a2invmdlStub.py -else - bash -c "echo could not find a2invmdlStub.py 1>&2" - exit -endif -# -# Determine if we are using the data access framework or the uEngine. -# -grep DataAccessLayer $stubpy >& /dev/null -if ( $status == 0 ) then - if ( "$*" == "+" ) then - /awips2/python/bin/python $stubpy --mode fieldplane --srcId $sss - else if ( "$1" == "" ) then - /awips2/python/bin/python $stubpy --mode field --srcId $sss - else if ( "$1" == "@" ) then - /awips2/python/bin/python $stubpy --mode time --srcId $sss - else if ( "$2" == "" ) then - /awips2/python/bin/python $stubpy --mode plane --srcId $sss --varAbrev $1 - else if ( "$3" == "" ) then - /awips2/python/bin/python $stubpy --mode time --srcId $sss --lvlName $1 --varAbrev $2 - else if ( "$4" == "" ) then - /awips2/python/bin/python $stubpy --mode time --srcId $sss --lvlName $1 --lvlOne $2 --varAbrev $3 - else - /awips2/python/bin/python $stubpy --mode time --srcId $sss --lvlName $1 --lvlOne $2 --lvlTwo $3 --varAbrev $4 - endif -else - # - if ( "$*" == "+" ) then - set varList = `$me $sss` - foreach onevar ( $varList ) - echo ${onevar}: - $me $sss $onevar | tr '\n' ' ' - echo - end - exit - endif - - # - # Set up the environment we need to run the UEngine. - # - if ( -e ./UEngine.cshsrc ) then - set ueenv = ./UEngine.cshsrc - else if ( -e $mydir/UEngine.cshsrc ) then - set ueenv = $mydir/UEngine.cshsrc - else if ( -e $FXA_HOME/src/dm/point/UEngine.cshsrc ) then - set ueenv = $FXA_HOME/src/dm/point/UEngine.cshsrc - else if ( -e $FXA_HOME/bin/UEngine.cshsrc ) then - set ueenv = $FXA_HOME/bin/UEngine.cshsrc - else - bash -c "echo could not find UEngine.cshsrc 1>&2" - exit - endif - source $ueenv - - # - # Modify the text of special tags in stub to create finalized script. - # - set specpy = /tmp/a2invmdl${$}.py - rm -rf $specpy >& /dev/null - touch $specpy - chmod 775 $specpy - set plane = no - if ( "$1" == "" ) then - cat $stubpy | sed "s/MMMMM/field/g" | sed "s/SSSSS/$sss/g" | \ - sed 's/^.*TTTTT.*$//g' | sed 's/^.*LLLLL.*$//g' | \ - sed 's/^.*22222.*$//g' | sed 's/^.*VVVVV.*$//g' >> $specpy - else if ( "$1" == "@" ) then - cat $stubpy | sed "s/MMMMM/time/g" | sed "s/SSSSS/$sss/g" | \ - sed 's/^.*TTTTT.*$//g' | sed 's/^.*LLLLL.*$//g' | \ - sed 's/^.*22222.*$//g' | sed 's/^.*VVVVV.*$//g' >> $specpy - else if ( "$2" == "" ) then - set plane = yes - cat $stubpy | sed "s/MMMMM/plane/g" | sed "s/SSSSS/$sss/g" | \ - sed 's/^.*TTTTT.*$//g' | sed 's/^.*LLLLL.*$//g' | \ - sed 's/^.*22222.*$//g' | sed "s/VVVVV/$1/g" >> $specpy - else if ( "$3" == "" ) then - cat $stubpy | sed "s/MMMMM/time/g" | sed "s/SSSSS/$sss/g" | \ - sed "s/TTTTT/$1/g" | sed 's/^.*LLLLL.*$//g' | \ - sed 's/^.*22222.*$//g' | sed "s/VVVVV/$2/g" >> $specpy - else if ( "$4" == "" ) then - cat $stubpy | sed "s/MMMMM/time/g" | sed "s/SSSSS/$sss/g" | \ - sed "s/TTTTT/$1/g" | sed "s/LLLLL/$2/g"| \ - sed 's/^.*22222.*$//g' | sed "s/VVVVV/$3/g" >> $specpy - else - cat $stubpy | sed "s/MMMMM/time/g" | sed "s/SSSSS/$sss/g" | \ - sed "s/TTTTT/$1/g" | sed "s/LLLLL/$2/g" | \ - sed "s/22222/$3/g" | sed "s/VVVVV/$4/g" >> $specpy - endif - if ( "$plane" == "no" ) then - cd $UE_BIN_PATH - ( uengine -r python < $specpy ) |& grep attributes | cut '-d"' -f4 - else - cd $UE_BIN_PATH - ( uengine -r python < $specpy ) |& sed 's|.*.*|@|g' | \ - grep -E 'attributes|@' | cut '-d"' -f4 | tr '\n' ' ' | tr '@' '\n' | \ - sed 's/ -999999.0//g' | sed 's/^ *//g' | sed 's/ *$//g' - endif - #if ( "$rmpy" == "yes" ) rm -rf $specpy >& /dev/null - # -endif - diff --git a/pythonPackages/msaslaps/grid/a2invmdlStub.py b/pythonPackages/msaslaps/grid/a2invmdlStub.py deleted file mode 100644 index f7b1cf04bd..0000000000 --- a/pythonPackages/msaslaps/grid/a2invmdlStub.py +++ /dev/null @@ -1,124 +0,0 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -# Gets inventories of gridded data from the A-II database. The data is output -# to stdout as ASCII. -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# 2014-10-22 3599 nabowle Initial modification. Convert to DAF. -# 2014-11-17 3599 nabowle Fix call to get_args(). -# - -import argparse -import numpy -import sys - -from awips.dataaccess import DataAccessLayer - -def get_args(): - parser = argparse.ArgumentParser(conflict_handler="resolve") - parser.add_argument("-h", action="store", dest="host", - help="EDEX server hostname (optional)", metavar="hostname") - parser.add_argument("--srcId", action="store", dest="srcId", - help="Unique alphanumeric name for gridded data source", - metavar="srcId") - parser.add_argument("--varAbrev", action="store", dest="varAbrev", - help="Variable abreviation", metavar="varAbrev") - parser.add_argument("--lvlOne", action="store", dest="lvlOne", - help="Level One value", metavar="lvlOne", type=float) - parser.add_argument("--lvlTwo", action="store", dest="lvlTwo", - help="Level Two value", metavar="lvlTwo", type=float) - parser.add_argument("--lvlName", action="store", dest="lvlName", - help="Master level name", metavar="lvlName") - parser.add_argument("--mode", action="store", dest="mode", default="time", - help="Mode - time, plane, field, or fieldplane") - return parser.parse_args() - -def main(): - user_args = get_args() - - if user_args.host: - DataAccessLayer.changeEDEXHost(user_args.host) - - req = DataAccessLayer.newDataRequest("grid") - - if not user_args.srcId: - print >> sys.stderr, "srcId not provided" - return - req.addIdentifier("info.datasetId", user_args.srcId) - - if user_args.varAbrev: - req.setParameters(user_args.varAbrev) - if user_args.lvlName is not None: - req.addIdentifier("info.level.masterLevel.name", user_args.lvlName) - if user_args.lvlOne is not None: - req.addIdentifier("info.level.levelonevalue", numpy.float64(user_args.lvlOne)) - if user_args.lvlTwo is not None: - req.addIdentifier("info.level.leveltwovalue", numpy.float64(user_args.lvlTwo)) - - mode = user_args.mode - if mode not in ["time", "plane", "field", "fieldplane"]: - print >> sys.stderr, "mode must be one of time, plane, field, or fieldplane." - return - - msg = ""; - if mode == "time": - times = DataAccessLayer.getAvailableTimes(req) - for time in times: - timeStr = str(time) - if "--" in timeStr: - timeStr = timeStr[0:-22] + ".0" + timeStr[-22:-1] + ".0" + timeStr[-1] - msg += timeStr[0:19] + ".0" + timeStr[19:] + "\n" - elif mode == "plane": - levels = DataAccessLayer.getAvailableLevels(req) - for level in levels: - msg += level_to_string(level) + "\n" - elif mode == "field": - params = DataAccessLayer.getAvailableParameters(req) - msg = "\n".join(params) - else: #fieldplane - params = DataAccessLayer.getAvailableParameters(req) - for param in params: - msg += param + ":\n" - req.setParameters(param) - levels = DataAccessLayer.getAvailableLevels(req) - if levels: - levelStr = [] - for level in levels: - levelStr.append(level_to_string(level)) - msg += " ".join(levelStr) + " \n" - - print msg.strip("\n") - - -def level_to_string(level): - name = level.getMasterLevel().getName() - lvlOne = str(level.getLevelonevalue()) - lvlTwo = str(level.getLeveltwovalue()) - msg = name + " " + lvlOne - if lvlTwo not in ["None", "-999999.0"]: - msg += " " + lvlTwo - return msg - -if __name__ == '__main__': - main() diff --git a/pythonPackages/msaslaps/grid/a2rdmdl.csh b/pythonPackages/msaslaps/grid/a2rdmdl.csh deleted file mode 100644 index 34b79a180e..0000000000 --- a/pythonPackages/msaslaps/grid/a2rdmdl.csh +++ /dev/null @@ -1,305 +0,0 @@ -#!/bin/csh -f -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -# Gets all available raob data in the A-II database over a specified range of -# times. The data is output to stdout as ASCII. -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# 2014-10-16 3598 nabowle Initial modification. Changed to handle DataAccessLayer. -# -# -# A script wrapper that is meant to get a single slab of gridded data -# from the A-II database. The data is output to stdout as ASCII. -# This version can adapt to use a python stub that calls the -# data access framework. -# -# Usage: -# -# a2rdmdl.csh p n x srcid ctyp lval1 lval2 varAbrev date hour fcst -# -# p - A literal p. (optional) -# n - A literal n. (optional) -# x - A literal x. (optional) -# srcid - Unique alphanumeric name for gridded data source. -# ctyp - Level type id (optional) -# lval1 - First level value (optional) -# lval2 - Second level value (optional) -# varAbrev - Variable abreviation. -# date - Date of model run time as yyyy-mm-dd -# hour - Hour of model run time as hh -# fcst - Forecast time in hours -# -# Legacy usage, not supported in all cases: -# -# a2rdmdl.csh p n x gproc ggid ctyp lval1 lval2 varAbrev date hour fcst -# -# p - A literal p. (optional) -# n - A literal n. (optional) -# x - A literal x. (optional) -# gproc - GRIB process number (can be multiple comma delimited) -# ggid - GRIB grid number -# ctyp - Level type id (optional) -# lval1 - First level value (optional) -# lval2 - Second level value (optional) -# varAbrev - Variable abreviation. -# date - Date of model run time as yyyy-mm-dd -# hour - Hour of model run time as hh -# fcst - Forecast time in hours -# -# With the new unified GRIB decoder, instead of gproc ggid, it is best -# to supply the srcid, which is like ETA or GFS254; e.g. the directory -# under /awips2/edex/data/hdf5/grid where the data is stored. -# -# Note that now for sources with no tag in the associated -# entry, the ggid must be supplied as a quoted empty string. -# -# The literal p option means preserve the final version of the python -# submitted to the UEngine instead of cleaning it up. The path to the -# finalized python is /tmp/a2rdmdlNNNNN.py where NNNNN is a unix process id. -# The literal n option means the first line of output is the dimension of -# the grid returned. The literal x option means return the data in xdr -# format; in this case the xdr data begins immediately after a tab-linefeed. -# -# Process any one character options. -# -set rmpy = yes -set dimStr = dimStr -set specpyName = a2rdmdlStub -while (1) - if ( "$1" == "p" ) then - set rmpy = no - else if ( "$1" == "n" ) then - set dimStr = qwertyuiop - else if ( "$1" == "x" ) then - set specpyName = a2rdmdlXdr - set dimStr = qwertyuiopx - else - break - endif - shift -end -# -# Identify directory this script is in, will be one of the directories we -# search for other files we need. -# -set mydir = `dirname $0` -set d1 = `echo $mydir | cut -c1` -if ( "$mydir" == '.' ) then - set mydir = $PWD -else if ( "$d1" != "/" ) then - set mydir = $PWD/$mydir -endif -set mydir = `(cd $mydir ; pwd)` -if ( ! $?FXA_HOME ) set FXA_HOME = xxxx -# -# Special case of topo, we will hard code it. -# -echo " $* " | grep " Topo " >& /dev/null -if ( $status == 0 ) then - set outfile = `find . -maxdepth 1 \ - -name "$1.Topo" -o -name "$1.$2.Topo" -o -name "*,$1.$2.Topo" -o \ - -name "$1,*.$2.Topo" -o -name "*,$1,*.$2.Topo"` >& /dev/null - if ( ( $#outfile != 1 ) && ( $mydir != $PWD ) ) then - set outfile = `find $mydir -maxdepth 1 \ - -name "$1.Topo" -o -name "$1.$2.Topo" -o -name "*,$1.$2.Topo" -o \ - -name "$1,*.$2.Topo" -o -name "*,$1,*.$2.Topo"` >& /dev/null - endif - if ( ( $#outfile != 1 ) && ( -d $FXA_HOME/data ) ) then - set outfile = `find $FXA_HOME/data -maxdepth 1 \ - -name "$1.Topo" -o -name "$1.$2.Topo" -o -name "*,$1.$2.Topo" -o \ - -name "$1,*.$2.Topo" -o -name "*,$1,*.$2.Topo"` >& /dev/null - endif - if ( $#outfile != 1 ) then - bash -c "echo No flat file available with topo for $1 $2 1>&2" - exit - endif - if ( "$dimStr" == "qwertyuiop" ) then - set nnn = `cat $outfile | wc` - set ny = $nnn[1] - @ nx = $nnn[2] / $ny - echo $nx $ny - endif - cat $outfile - exit -endif -# -# Primarily base stuff on source name, but try to use the old interface. -# -set sss = "$1" -shift -set ids = `echo $sss | tr ',' ' '` -echo "$ids[1]" | grep '^[0-9][0-9]*$' >& /dev/null -if ( $status == 0 || $#ids > 1 ) then - set mroot = /awips2/edex/data/utility/edex_static/base/grib/models - set ids = `echo $ids | tr ' ' '\n' | grep -v '^ *$' | \ - sed 's#^##g' | sed 's#$#<|#g'` - set ids = `echo ${ids}quertyuiop | sed 's/ *//g'` - set ggg = "$1" - shift - if ( "$ggg" == "" ) then - set mmm = `find $mroot -name '*xml' ! -name '*ECMWF*' \ - -exec cat '{}' \; | sed 's|-->|~|g' | \ - tr '\t' ' ' | sed 's/ *//g' | sed 's||~|g' | \ - tr '\n' ' ' | tr '~' '\n' | grep -E "$ids" | \ - grep -v "" | sed 's/^.*//g' | \ - cut '-d<' -f1 | sort -u` - else - set mmm = `find $mroot -name '*xml' -exec cat '{}' \; | \ - sed 's|-->|~|g' | \ - tr '\t' ' ' | sed 's/ *//g' | sed 's||~|g' | \ - tr '\n' ' ' | tr '~' '\n' | grep -E "$ids" | \ - grep "$ggg<" | sed 's/^.*//g' | \ - cut '-d<' -f1 | sort -u` - endif - if ( $#mmm != 1 ) then - echo "$mmm" - echo "Could not look up model name based on $sss '$ggg'" - exit 1 - endif - set sss = $mmm -endif - -@ i = $#argv - 3 -set vvvvv = $argv[$i] - -# -# Locate python stub that we will modify to create the final python logic. -# -if ( -e ./${specpyName}.py ) then - set stubpy = ./${specpyName}.py -else if ( -e $mydir/${specpyName}.py ) then - set stubpy = $mydir/${specpyName}.py -else if ( -e $FXA_HOME/src/dm/grid/${specpyName}.py ) then - set stubpy = $FXA_HOME/src/dm/grid/${specpyName}.py -else if ( -e $FXA_HOME/bin/${specpyName}.py ) then - set stubpy = $FXA_HOME/bin/${specpyName}.py -else - bash -c "echo could not find ${specpyName}.py 1>&2" - exit -endif -# -# Determine if we are using the data access framework or the uEngine. -# -grep DataAccessLayer $stubpy >& /dev/null -if ( $status == 0 ) then - set userArgs = "--srcId $sss --varAbrev $vvvvv" - if ( ( "$dimStr" != "dimStr" ) && ( "$specpyName" != "a2rdmdlXdr" ) ) then - set userArgs = "$userArgs --dimLine" - endif - - if ( "$5" == "" ) then - set userArgs = "$userArgs --date $2 --hour $3 --fcst $4" - else if ( "$6" == "" ) then - set userArgs = "$userArgs --lvlName $1 --date $3 --hour $4 --fcst $5" - else if ( "$7" == "" ) then - set userArgs = "$userArgs --lvlName $1 --lvlOne $2 --date $4 --hour $5 --fcst $6" - else - set userArgs = "$userArgs --lvlName $1 --lvlOne $2 --lvlTwo $3 --date $5 --hour $6 --fcst $7" - endif - python $stubpy $userArgs -else - # - # Set up the environment we need to run the UEngine. - # - if ( -e ./UEngine.cshsrc ) then - set ueenv = ./UEngine.cshsrc - else if ( -e $mydir/UEngine.cshsrc ) then - set ueenv = $mydir/UEngine.cshsrc - else if ( -e $FXA_HOME/src/dm/point/UEngine.cshsrc ) then - set ueenv = $FXA_HOME/src/dm/point/UEngine.cshsrc - else if ( -e $FXA_HOME/bin/UEngine.cshsrc ) then - set ueenv = $FXA_HOME/bin/UEngine.cshsrc - else - bash -c "echo could not find UEngine.cshsrc 1>&2" - exit - endif - source $ueenv - - set rrrrr = "" - set aaa = `echo $vvvvv | grep -E '^CP|^TP|^LgSP' | tr 'A-z' ' '` - set aaa = `echo $aaa` - # - # Special case of formatting the times for accumulated precip - # - if ( "$aaa" != "" ) then - if ( -x ./gtasUtil ) then - set gtasUtil = ./gtasUtil - else if ( -x $mydir/gtasUtil ) then - set gtasUtil = $mydir/gtasUtil - else if ( -x $FXA_HOME/src/dm/point/gtasUtil ) then - set gtasUtil = $FXA_HOME/src/dm/point/gtasUtil - else if ( -x $FXA_HOME/bin/gtasUtil ) then - set gtasUtil = $FXA_HOME/bin/gtasUtil - else - bash -c "echo could not find gtasUtil executable 1>&2" - exit - endif - @ i++ - set t = `echo $* | cut '-d ' -f${i}-$#argv` - @ fff = $t[3] * 3600 - set vt = `$gtasUtil = $t[1] ${t[2]}:00:00.0 $fff` - @ aaa = $aaa * 3600 - set bt = `$gtasUtil = $vt -$aaa` - set rrrrr = "[$bt--$vt]" - endif - - # - # Modify the text of special tags in stub to create finalized script. - # - set specpy = /tmp/a2rdmdl${$}.py - rm -rf $specpy >& /dev/null - touch $specpy - chmod 775 $specpy - if ( "$5" == "" ) then - cat $stubpy | grep -v $dimStr | sed "s/SSSSS/$sss/g" | \ - sed 's/^.*TTTTT.*$//g' | sed 's/^.*LLLLL.*$//g' | \ - sed 's/^.*22222.*$//g' | sed "s/VVVVV/$1/g" | sed "s/DDDDD/$2/g" | \ - sed "s/HHHHH/$3/g" | sed "s/FFFFF/$4/g" | sed "s/RRRRR/$rrrrr/g" >> \ - $specpy - else if ( "$6" == "" ) then - cat $stubpy | grep -v $dimStr | sed "s/SSSSS/$sss/g" | \ - sed "s/TTTTT/$1/g" | sed 's/^.*LLLLL.*$//g' | sed 's/^.*22222.*$//g' | \ - sed "s/VVVVV/$2/g" | sed "s/DDDDD/$3/g" | \ - sed "s/HHHHH/$4/g" | sed "s/FFFFF/$5/g" | sed "s/RRRRR/$rrrrr/g" >> \ - $specpy - else if ( "$7" == "" ) then - cat $stubpy | grep -v $dimStr | sed "s/SSSSS/$sss/g" | \ - sed "s/TTTTT/$1/g" | sed "s/LLLLL/$2/g" | sed 's/^.*22222.*$//g' | \ - sed "s/VVVVV/$3/g" | sed "s/DDDDD/$4/g" | \ - sed "s/HHHHH/$5/g" | sed "s/FFFFF/$6/g" | sed "s/RRRRR/$rrrrr/g" >> \ - $specpy - else - cat $stubpy | grep -v $dimStr | sed "s/SSSSS/$sss/g" | \ - sed "s/TTTTT/$1/g" | sed "s/LLLLL/$2/g" | sed "s/22222/$3/g" | \ - sed "s/VVVVV/$4/g" | sed "s/DDDDD/$5/g" | \ - sed "s/HHHHH/$6/g" | sed "s/FFFFF/$7/g" | sed "s/RRRRR/$rrrrr/g" >> \ - $specpy - endif - # - # Submit the temporary python script stripping any xml stuff, then remove it - # - cd $UE_BIN_PATH - ( uengine -r python < $specpy ) | grep -v '<' | grep -v Response - if ( "$rmpy" == "yes" ) rm -rf $specpy >& /dev/null -endif diff --git a/pythonPackages/msaslaps/grid/a2rdmdlCommon.py b/pythonPackages/msaslaps/grid/a2rdmdlCommon.py deleted file mode 100644 index 117f9090fb..0000000000 --- a/pythonPackages/msaslaps/grid/a2rdmdlCommon.py +++ /dev/null @@ -1,134 +0,0 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -# Gets all available raob data in the A-II database over a specified range of -# times. The data is output to stdout as ASCII. -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# 2014-10-15 3598 nabowle Initial creation. Extracted common code from a2rdmdl*.py -# 2014-12-15 3598 nabowle Fix retrieval when fcst is 0. -# 2016-11-10 5900 bsteffen Correct grid shape, simplify -# - -import argparse -import numpy - -from datetime import datetime -from awips.dataaccess import DataAccessLayer -from dynamicserialize.dstypes.com.raytheon.uf.common.time import DataTime -from dynamicserialize.dstypes.com.raytheon.uf.common.time import TimeRange - -def get_parser(): - parser = argparse.ArgumentParser(conflict_handler="resolve") - parser.add_argument("-h", action="store", dest="host", - help="EDEX server hostname (optional)", metavar="hostname") - parser.add_argument("--date", action="store", dest="date", - help="The date in YYYY-MM-DD", metavar="date") - parser.add_argument("--hour", action="store", dest="hour", - help="The hour in HH", metavar="hour") - parser.add_argument("--fcst", action="store", dest="fcst", - help="The forecast time in hours", metavar="fcst") - parser.add_argument("--srcId", action="store", dest="srcId", - help="Unique alphanumeric name for gridded data source", - metavar="srcId") - parser.add_argument("--varAbrev", action="store", dest="varAbrev", - help="Variable abreviation", metavar="varAbrev") - parser.add_argument("--lvlOne", action="store", dest="lvlOne", - help="Level One value", metavar="lvlOne", type=float) - parser.add_argument("--lvlTwo", action="store", dest="lvlTwo", - help="Level Two value", metavar="lvlTwo", type=float) - parser.add_argument("--lvlName", action="store", dest="lvlName", - help="Master level name", metavar="lvlName") - return parser - -def do_request(user_args): - if user_args.host: - DataAccessLayer.changeEDEXHost(user_args.host) - - srcId = user_args.srcId - varAbrev = user_args.varAbrev - - if not srcId or not varAbrev: - raise Exception("srcId or varAbrev not provided") - return - - date = user_args.date - hour = user_args.hour - fcst = user_args.fcst - - if not date or not hour or not fcst: - raise Exception("date, hour, or fcst not provided") - return - - dt = datetime.strptime( str(date) + " " + str(hour) + ":00:00.0", "%Y-%m-%d %H:%M:%S.%f") - -# check for and build date range if necessary - daterange = None - if varAbrev.endswith("hr"): - import re - matches = re.findall(r'\d+', varAbrev) - if matches: - from datetime import timedelta - hourRange = int(matches[-1]) - endDate = dt + timedelta(hours=int(fcst)) - beginDate = endDate - timedelta(hours=hourRange) - daterange = TimeRange(beginDate, endDate) - -# convert hours to seconds because DataTime does the reverse - time = DataTime(dt, int(fcst)*3600, daterange) - - - req = DataAccessLayer.newDataRequest("grid") - req.setParameters(varAbrev) - req.addIdentifier("info.datasetId", srcId) - -# To handle partial level matches, use identifiers instead of a Level. - if user_args.lvlName is not None: - req.addIdentifier("info.level.masterLevel.name", user_args.lvlName) - if user_args.lvlOne is not None: - req.addIdentifier("info.level.levelonevalue", numpy.float64(user_args.lvlOne)) - if user_args.lvlTwo is not None: - req.addIdentifier("info.level.leveltwovalue", numpy.float64(user_args.lvlTwo)) - - times = [time] - -# If fcst is 0, also query for times with FCST_USED flag - if fcst == '0': - time = DataTime(dt, int(fcst)*3600, daterange) - time.utilityFlags.add("FCST_USED") - times.append(time) - - grids = DataAccessLayer.getGridData(req, times) - - if not grids: -# print "Data not available" - raise Exception("") - - grid = grids[0] - rawData = grid.getRawData() - - yLen, xLen = rawData.shape - - return grid, xLen, yLen - - diff --git a/pythonPackages/msaslaps/grid/a2rdmdlStub.py b/pythonPackages/msaslaps/grid/a2rdmdlStub.py deleted file mode 100644 index 6eedb578aa..0000000000 --- a/pythonPackages/msaslaps/grid/a2rdmdlStub.py +++ /dev/null @@ -1,160 +0,0 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -# Gets all available raob data in the A-II database over a specified range of -# times. The data is output to stdout as ASCII. -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# 2014-10-14 3598 nabowle Initial modification. Changed to use DataAccessLayer. -# 2016-11-10 5900 bsteffen Correct grid shape, simplify -# - -import a2rdmdlCommon -import argparse -import numpy -import sys - -def get_args(): - parser = a2rdmdlCommon.get_parser() - parser.add_argument("--dimLine", action="store_true", dest="dimLine", - help="Output dimensions", default=False) - return parser.parse_args() - -def main(): - user_args = get_args() - - try: - grid, xLen, yLen = a2rdmdlCommon.do_request(user_args) - except Exception as e: - print >> sys.stderr, str(e) - return - - rawData = grid.getRawData() - - msg = "" - if user_args.dimLine: - msg += str(xLen) + " " + str(yLen) + "\n" - - j = yLen - 1 - - while j>=0 : - i = 0 - while i=999998 : - msg += "1e37 " - elif a<0.00005 : - msg += "%g"%k + " " - elif a<0.0009 : - if round(k,8) == round(k,4) : - msg += "%.4f"%k + " " - elif round(k,8) == round(k,5) : - msg += "%.5f"%k + " " - elif round(k,8) == round(k,6) : - msg += "%.6f"%k + " " - elif round(k,8) == round(k,7) : - msg += "%.7f"%k + " " - else : - msg += "%.8f"%k + " " - elif a<0.009 : - if round(k,7) == round(k,3) : - msg += "%.3f"%k + " " - elif round(k,7) == round(k,4) : - msg += "%.4f"%k + " " - elif round(k,7) == round(k,5) : - msg += "%.5f"%k + " " - elif round(k,7) == round(k,6) : - msg += "%.6f"%k + " " - else : - msg += "%.7f"%k + " " - elif a<0.09 : - if round(k,6) == round(k,2) : - msg += "%.2f"%k + " " - elif round(k,6) == round(k,3) : - msg += "%.3f"%k + " " - elif round(k,6) == round(k,4) : - msg += "%.4f"%k + " " - elif round(k,6) == round(k,5) : - msg += "%.5f"%k + " " - else : - msg += "%.6f"%k + " " - elif a<0.9 : - if round(k,5) == round(k,1) : - msg += "%.1f"%k + " " - elif round(k,5) == round(k,2) : - msg += "%.2f"%k + " " - elif round(k,5) == round(k,3) : - msg += "%.3f"%k + " " - elif round(k,5) == round(k,4) : - msg += "%.4f"%k + " " - else : - msg += "%.5f"%k + " " - elif a<9 : - if round(k,4) == round(k,0) : - msg += "%.0f"%k + " " - elif round(k,4) == round(k,1) : - msg += "%.1f"%k + " " - elif round(k,4) == round(k,2) : - msg += "%.2f"%k + " " - elif round(k,4) == round(k,3) : - msg += "%.3f"%k + " " - else : - msg += "%.4f"%k + " " - elif a<99 : - if round(k,3) == round(k,0) : - msg += "%.0f"%k + " " - elif round(k,3) == round(k,1) : - msg += "%.1f"%k + " " - elif round(k,3) == round(k,2) : - msg += "%.2f"%k + " " - else : - msg += "%.3f"%k + " " - elif a<999 : - if round(k,2) == round(k,0) : - msg += "%.0f"%k + " " - elif round(k,2) == round(k,1) : - msg += "%.1f"%k + " " - else : - msg += "%.2f"%k + " " - elif a<9999 : - if round(k,1) == round(k,0) : - msg += "%.0f"%k + " " - else : - msg += "%.1f"%k + " " - else : - msg += "%.0f"%k + " " - i += 1 - - msg += "\n" - j -= 1 - - print msg.strip() + " " - -if __name__ == '__main__': - main() diff --git a/pythonPackages/msaslaps/grid/a2rdmdlXdr.py b/pythonPackages/msaslaps/grid/a2rdmdlXdr.py deleted file mode 100644 index 10070701bf..0000000000 --- a/pythonPackages/msaslaps/grid/a2rdmdlXdr.py +++ /dev/null @@ -1,84 +0,0 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -# Gets all available raob data in the A-II database over a specified range of -# times. The data is output to stdout as ASCII. -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# 2014-10-15 3598 nabowle Initial modification. Changed to use DataAccessLayer. -# 2016-11-10 5900 bsteffen Correct grid shape, simplify -# - -import a2rdmdlCommon -import argparse -import numpy -import xdrlib -import bz2 -import sys - -def get_args(): - return a2rdmdlCommon.get_parser().parse_args() - -def main(): - user_args = get_args() - - try: - grid, xLen, yLen = a2rdmdlCommon.do_request(user_args) - except Exception as e: - print >> sys.stderr, str(e) - return - - rawData = grid.getRawData() - - msg = str(xLen) + " " + str(yLen) + "\n" - - j = yLen - 1 - mypacker = xdrlib.Packer() - mypacker.reset() - while j>=0 : - i = 0 - while i> sys.stderr, "Start or End date not provided" - return - - beginRange = datetime.strptime( start + ":00.0", "%Y-%m-%d %H:%M:%S.%f") - endRange = datetime.strptime( end + ":59.9", "%Y-%m-%d %H:%M:%S.%f") - timerange = TimeRange(beginRange, endRange) - - req = DataAccessLayer.newDataRequest("sfcobs") - req.setParameters("stationId","timeObs","elevation","reportType", - "wx_present","visibility","seaLevelPress","stationPress", - "pressChange3Hour","pressChangeChar","temperature", - "dewpoint","seaSurfaceTemp","wetBulb","windDir", - "windSpeed","equivWindSpeed10m","windGust","precip1Hour", - "precip6Hour","precip24Hour" ) - geometries = DataAccessLayer.getGeometryData(req, timerange) - - if not geometries : -# print "No data available." - return - - # Initialize conversion array for wx. - wxstr = [ " ", " ", " ", " ", "FU", "HZ", "DU", "BLSA", "PO", "VCSS", \ - "BR", "BCFG", "MIFG", "VCTS", "VCSH", "VCSH", "VCSH", " ", "SQ", "+FC", \ - "DZ", "RA", "SN", "RA SN", "FZRA", "SHRA", "SHRA SHSN", "SHGR", "FG FZFG", "TS", \ - "SS", "SS", "SS", "+SS", "+SS", "+SS", "DRSN", " ", "BLSN", "+BLSN", \ - "VCFG", "BCFG", "FG FZFG", "FG FZFG", "FG FZFG", "FG FZFG", "FG FZFG", "FG FZFG", "FZFG", "FZFG", \ - "-DZ", "-DZ", "DZ", "DZ", "+DZ", "+DZ", "-FZDZ", "FZDZ", "-DZ -RA", "DZ RA", \ - "-RA", "-RA", "RA", "RA", "+RA", "+RA", "-FZRA", "FZRA", "-RA -SN", "RA SN", \ - "-SN", "-SN", "SN", "SN", "+SN", "+SN", "IC", "SG", "IC", "PE", \ - "-SHRA", "SHRA", "+SHRA", "-SHSN -SHRA", "SHSN SHRA", "-SNSN", "SHSN", "-SHPE", "SHPE", " ", \ - "SHGR", "-RA", "+RA", "-RA -SN -GR", "+RA +SN +GR", "TSRA", "TSPE", "+TSRA", " ", "+TSPE" ] - - msg = "" - for geo in geometries : - lon = geo.getGeometry().x - lat = geo.getGeometry().y - - sName = geo.getString("stationId") - tobs = geo.getNumber("timeObs") - elev = geo.getNumber("elevation") - typ = geo.getNumber("reportType") - wx = geo.getNumber("wx_present") - vis = geo.getNumber("visibility") - msl = geo.getNumber("seaLevelPress") - p = geo.getNumber("stationPress") - pchg = geo.getNumber("pressChange3Hour") - pchr = geo.getNumber("pressChangeChar") - temp = geo.getNumber("temperature") - dpt = geo.getNumber("dewpoint") - th2o = geo.getNumber("seaSurfaceTemp") - tw = geo.getNumber("wetBulb") - dir = geo.getNumber("windDir") - spd = geo.getNumber("windSpeed") - s10 = geo.getNumber("equivWindSpeed10m") - gust = geo.getNumber("windGust") - pr1 = geo.getNumber("precip1Hour") - pr6 = geo.getNumber("precip6Hour") - pr24 = geo.getNumber("precip24Hour") - - msg += sName + "," - msg += str(tobs/1000) + "," - msg += "%.4f"%lat + "," - msg += "%.4f"%lon + "," - msg += "%.0f"%elev + "," - if typ < 1001 or typ > 1007 : - msg += "-32767," - elif typ == 1001 or typ == 1004 or typ == 1005 : - msg += "0," - else : - msg += "1," - if wx < 0 or wx > 99 : - msg += " ," - else : - msg += wxstr[wx] + "," - msg += str(vis) + "," - msg += "%.2f"%msl + "," - msg += "%.2f"%p + "," - msg += "%.0f"%pchg + "," - if pchr <= -9999 : - pchr = -32767 - msg += str(pchr) + " ," - msg += "%.1f"%temp + "," - msg += "%.1f"%dpt + "," - msg += "%.1f"%th2o + "," - msg += "%.1f"%tw + "," - msg += "%.0f"%dir + "," - msg += "%.1f"%spd + "," - msg += "%.1f"%s10 + "," - msg += "%.1f"%gust + "," - msg += "%.2f"%pr1 + "," - msg += "%.2f"%pr6 + "," - msg += "%.2f"%pr24 + "\n" - - print msg.strip() - -if __name__ == '__main__': - main() diff --git a/pythonPackages/msaslaps/maritime/a2gtboy.csh b/pythonPackages/msaslaps/maritime/a2gtboy.csh deleted file mode 100644 index 0d7e7d9284..0000000000 --- a/pythonPackages/msaslaps/maritime/a2gtboy.csh +++ /dev/null @@ -1,134 +0,0 @@ -#!/bin/csh -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## -# -# A script wrapper around a UEngine call that is meant to get all available -# maritime data in the A-II database over a specified range of times. -# The data is output to stdout as ASCII. Each line is one time/platform -# combination. The individual data items are comma delimited. -# This version can adapt to use a python stub that calls the -# data access framework. -# -# Usage: -# -# a2gtboy.csh {p} {c} yyyy-mm-dd hh:mm yyyy-mm-dd hh:mm -# -# The literal p and c flags are optional. The p flag means preserve -# the final version of the python submitted to the UEngine instead of -# cleaning it up. The path to the finalized python is /tmp/a2gtboyNNNNN.py -# where NNNNN is a unix process id. The c flag means to retreive the -# Laps set of variables, instead of the default MSAS set. -# -# -# Not using the 'c' format, the MSAS set of variables, outputs the following -# variables for each line: -# -# stationId,timeObs,latitude,longitude,elevation,seaLevelPress, -# stationPress,temperature,dewpoint,windDir,windSpeed,pressChange3Hour -# -# Using the 'c' format, the Laps set of variables, outputs the following -# variables for each line: -# -# stationId,timeObs,latitude,longitude,elevation,reportType,wx_present, -# visibility,seaLevelPress,stationPress,pressChange3Hour,pressChangeChar, -# temperature,dewpoint,seaSurfaceTemp,wetBulb,windDir,windSpeed, -# equivWindSpeed10m,windGust,precip1Hour,precip6Hour,precip24Hour -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# 09/22/2014 3591 nabowle Initial modification. Properly call DAF script. -# -# -set rmpy = yes -if ( "$1" == "p" ) then - set rmpy = no - shift -endif -# -# Identify directory this script is in, will be one of the directories we -# search for other files in. -# -set mydir = `dirname $0` -set d1 = `echo $mydir | cut -c1` -if ( "$mydir" == '.' ) then - set mydir = $PWD -else if ( "$d1" != "/" ) then - set mydir = $PWD/$mydir -endif -set mydir = `(cd $mydir ; pwd)` -if ( ! $?FXA_HOME ) set FXA_HOME = xxxx -# -set stubbase = a2gtboyStub.py -if ( "$1" == "c" ) then - shift - set stubbase = a2cvboyStub.py -endif -# -# Locate python stub that we will modify to create the final python logic. -# -if ( -e ./$stubbase ) then - set stubpy = ./$stubbase -else if ( -e $mydir/$stubbase ) then - set stubpy = $mydir/$stubbase -else if ( -e $FXA_HOME/src/dm/maritime/$stubbase ) then - set stubpy = $FXA_HOME/src/dm/maritime/$stubbase -else if ( -e $FXA_HOME/bin/$stubbase ) then - set stubpy = $FXA_HOME/bin/$stubbase -else - bash -c "echo could not find $stubbase 1>&2" - exit -endif -# -# Determine if we are using the data access framework or the uEngine. -# -grep DataAccessLayer $stubpy >& /dev/null -if ( $status == 0 ) then - /awips2/python/bin/python $stubpy -b "$1 $2" -e "$3 $4" -else - # - # Set up the environment we need to run the UEngine. - # - if ( -e ./UEngine.cshsrc ) then - set ueenv = ./UEngine.cshsrc - else if ( -e $mydir/UEngine.cshsrc ) then - set ueenv = $mydir/UEngine.cshsrc - else if ( -e $FXA_HOME/src/dm/point/UEngine.cshsrc ) then - set ueenv = $FXA_HOME/src/dm/point/UEngine.cshsrc - else if ( -e $FXA_HOME/bin/UEngine.cshsrc ) then - set ueenv = $FXA_HOME/bin/UEngine.cshsrc - else - bash -c "echo could not find UEngine.cshsrc 1>&2" - exit - endif - source $ueenv - - set specpy = /tmp/a2gtboy${$}.py - rm -rf $specpy >& /dev/null - touch $specpy - chmod 775 $specpy - cat $stubpy | sed "s/BBBBB/$1 $2/g" | sed "s/EEEEE/$3 $4/g" > $specpy - - cd $UE_BIN_PATH - ( uengine -r python < $specpy ) | grep -v '<' | sed -n '2,$p' - - if ( "$rmpy" == "yes" ) rm -rf $specpy >& /dev/null -endif diff --git a/pythonPackages/msaslaps/maritime/a2gtboyStub.py b/pythonPackages/msaslaps/maritime/a2gtboyStub.py deleted file mode 100644 index 7dbd136ae3..0000000000 --- a/pythonPackages/msaslaps/maritime/a2gtboyStub.py +++ /dev/null @@ -1,114 +0,0 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -# Gets all available MSAS maritime data in the A-II database over a specified -# range of times. The data is output to stdout as ASCII. Each line is one -# time/platform combination. The individual data items are comma delimited. -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# 09/18/2014 3591 nabowle Initial modification. Replace UEngine with DAF. -# -# - -import argparse -import sys - -from datetime import datetime -from awips.dataaccess import DataAccessLayer -from dynamicserialize.dstypes.com.raytheon.uf.common.time import TimeRange - -def get_args(): - parser = argparse.ArgumentParser(conflict_handler="resolve") - parser.add_argument("-h", action="store", dest="host", - help="EDEX server hostname (optional)", - metavar="hostname") - parser.add_argument("-b", action="store", dest="start", - help="The start of the time range in YYYY-MM-DD HH:MM", - metavar="start") - parser.add_argument("-e", action="store", dest="end", - help="The end of the time range in YYYY-MM-DD HH:MM", - metavar="end") - return parser.parse_args() - - -def main(): - user_args = get_args() - - if user_args.host: - DataAccessLayer.changeEDEXHost(user_args.host) - - start = user_args.start - end = user_args.end - - if not start or not end: - print >> sys.stderr, "Start or End date not provided" - return - - beginRange = datetime.strptime( start + ":00.0", "%Y-%m-%d %H:%M:%S.%f") - endRange = datetime.strptime( end + ":59.9", "%Y-%m-%d %H:%M:%S.%f") - timerange = TimeRange(beginRange, endRange) - - req = DataAccessLayer.newDataRequest("sfcobs") - req.setParameters("stationId","timeObs","elevation","seaLevelPress", - "stationPress","temperature","dewpoint","windDir", - "windSpeed","pressChange3Hour" ) - geometries = DataAccessLayer.getGeometryData(req, timerange) - - if not geometries : -# print "No data available." - return - - msg = "" - for geo in geometries : - lon = geo.getGeometry().x - lat = geo.getGeometry().y - - sName = geo.getString("stationId") - tobs = geo.getNumber("timeObs") - elev = geo.getNumber("elevation") - msl = geo.getNumber("seaLevelPress") - p = geo.getNumber("stationPress") - temp = geo.getNumber("temperature") - dpt = geo.getNumber("dewpoint") - dir = geo.getNumber("windDir") - spd = geo.getNumber("windSpeed") - pchg = geo.getNumber("pressChange3Hour") - - - msg += sName + "," - msg += str(tobs/1000) + "," - msg += "%.4f"%lat + "," - msg += "%.4f"%lon + "," - msg += "%.0f"%elev + "," - msg += "%.2f"%msl + "," - msg += "%.2f"%p + "," - msg += "%.1f"%temp + "," - msg += "%.1f"%dpt + "," - msg += "%.0f"%dir + "," - msg += "%.1f"%spd + "," - msg += "%.0f"%pchg + "\n" - - print msg.strip() - -if __name__ == '__main__': - main() diff --git a/pythonPackages/msaslaps/metar/a2cvmtrStub.py b/pythonPackages/msaslaps/metar/a2cvmtrStub.py deleted file mode 100644 index ea42e598f3..0000000000 --- a/pythonPackages/msaslaps/metar/a2cvmtrStub.py +++ /dev/null @@ -1,210 +0,0 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -# Gets all available Laps metar data in the A-II database over a specified range -# of times within a specifed area. The data is output to stdout as ASCII. -# Each line is one time/station combination. The individual data items are comma -# delimited. -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# 09/15/2014 3593 nabowle Initial modification. Fix index issues on 2D parameters. -# 09/15/2014 3593 nabowle Replace UEngine with DAF. -# -# - -import argparse -import sys - -from datetime import datetime -from awips.dataaccess import DataAccessLayer -from dynamicserialize.dstypes.com.raytheon.uf.common.time import TimeRange - -def get_args(): - parser = argparse.ArgumentParser(conflict_handler="resolve") - parser.add_argument("-h", action="store", dest="host", - help="EDEX server hostname (optional)", - metavar="hostname") - parser.add_argument("-b", action="store", dest="start", - help="The start of the time range in YYYY-MM-DD HH:MM", - metavar="start") - parser.add_argument("-e", action="store", dest="end", - help="The end of the time range in YYYY-MM-DD HH:MM", - metavar="end") - parser.add_argument("--lat-min", action="store", dest="latMin", type=float, - help="Minimum latitude", default=0.0, metavar="lat") - parser.add_argument("--lat-max", action="store", dest="latMax", type=float, - help="Maximum latitude", default=90.0, metavar="lat") - parser.add_argument("--lon-min", action="store", dest="lonMin", type=float, - help="Minimum longitude", default=-180.0, metavar="lon") - parser.add_argument("--lon-max", action="store", dest="lonMax", type=float, - help="Maximum longitude", default=180.0, metavar="lon") - return parser.parse_args() - - -def main(): - # The multi-dimensional parameters. - PRES_PARAMS = set(["presWeather"]) - SKY_PARAMS = set(["skyCover", "skyLayerBase"]) - - user_args = get_args() - - if user_args.host: - DataAccessLayer.changeEDEXHost(user_args.host) - - start = user_args.start - end = user_args.end - - if not start or not end: - print >> sys.stderr, "Start or End date not provided" - return - - latMin = user_args.latMin - latMax = user_args.latMax - lonMin = user_args.lonMin - lonMax = user_args.lonMax - - beginRange = datetime.strptime( start + ":00.0", "%Y-%m-%d %H:%M:%S.%f") - endRange = datetime.strptime( end + ":59.9", "%Y-%m-%d %H:%M:%S.%f") - timerange = TimeRange(beginRange, endRange) - - req = DataAccessLayer.newDataRequest("obs") - req.setParameters("stationName","timeObs","wmoId","autoStationType", - "elevation","reportType","presWeather","visibility","skyCover", - "skyLayerBase","altimeter","seaLevelPress","pressChange3Hour", - "pressChangeChar","temperature","tempFromTenths","dewpoint", - "dpFromTenths","windDir","windSpeed","windGust","maxTemp24Hour", - "minTemp24Hour""","precip1Hour","precip3Hour","precip6Hour", - "precip24Hour") - geometries = DataAccessLayer.getGeometryData(req, timerange) - - if not geometries : -# print "No data available." - return - - msg = "" - wx = [] - cvr = [] - bas = [] - for geoData in geometries: - if set(geoData.getParameters()) & PRES_PARAMS : - wx.append(geoData.getString("presWeather")) - continue - if set(geoData.getParameters()) & SKY_PARAMS : - cvr.append(geoData.getString("skyCover")) - bas.append(geoData.getNumber("skyLayerBase")) - continue - - lon = geoData.getGeometry().x - lat = geoData.getGeometry().y - if lon < lonMin or lon > lonMax or lat < latMin or lat > latMax: - wx = [] - cvr = [] - bas = [] - continue - - sName = geoData.getString("stationName") - tobs = geoData.getNumber("timeObs") - elev = geoData.getNumber("elevation") - ista = geoData.getNumber("wmoId") - atype = geoData.getString("autoStationType") - repTyp = geoData.getString("reportType") - vis = geoData.getNumber("visibility") - alt = geoData.getNumber("altimeter") - msl = geoData.getNumber("seaLevelPress") - pchg = geoData.getNumber("pressChange3Hour") - pchr = geoData.getString("pressChangeChar") - temp = geoData.getNumber("temperature") - t10 = geoData.getNumber("tempFromTenths") - dpt = geoData.getNumber("dewpoint") - td10 = geoData.getNumber("dpFromTenths") - dir = geoData.getNumber("windDir") - spd = geoData.getNumber("windSpeed") - gust = geoData.getNumber("windGust") - tmx = geoData.getNumber("maxTemp24Hour") - tmn = geoData.getNumber("minTemp24Hour") - pr1 = geoData.getNumber("precip1Hour") - pr3 = geoData.getNumber("precip3Hour") - pr6 = geoData.getNumber("precip6Hour") - pr24 = geoData.getNumber("precip24Hour") - - - msg += sName + "," - msg += str(tobs/1000) + "," - msg += "%.4f"%lat + "," - msg += "%.4f"%lon + "," - msg += "%.0f"%elev + "," - if ista < 0 : - msg += "-99," - else : - msg += str(ista) + "," - msg += atype + " ," - msg += repTyp + " ," - msg += wx[0] + " ," - msg += "%.3f"%vis + "," - - - msg += cvr[0]; - kk = 5 - while kk > 0 and cvr[0+kk] == "" : - kk -= 1 - k = 1 - while k <= kk : - msg += "|" + cvr[0+k]; - k += 1 - msg += " ," - msg += "%.1f"%bas[0]; - kk = 5 - while kk > 0 and bas[0+kk] < -9998 : - kk -= 1 - k = 1 - while k <= kk : - msg += "|" + "%.1f"%bas[0+k]; - k += 1 - msg += "," - - msg += "%.2f"%alt + "," - msg += "%.2f"%msl + "," - msg += "%.0f"%pchg + "," - msg += pchr + " ," - msg += "%.1f"%temp + "," - msg += "%.1f"%t10 + "," - msg += "%.1f"%dpt + "," - msg += "%.1f"%td10 + "," - msg += "%.0f"%dir + "," - msg += "%.1f"%spd + "," - msg += "%.1f"%gust + "," - msg += "%.1f"%tmx + "," - msg += "%.1f"%tmn + "," - msg += "%.2f"%pr1 + "," - msg += "%.2f"%pr3 + "," - msg += "%.2f"%pr6 + "," - msg += "%.2f"%pr24 + "\n" - - wx = [] - cvr = [] - bas = [] - - print msg.strip() - -if __name__ == '__main__': - main() diff --git a/pythonPackages/msaslaps/metar/a2gtmtr.csh b/pythonPackages/msaslaps/metar/a2gtmtr.csh deleted file mode 100755 index 4da9251c60..0000000000 --- a/pythonPackages/msaslaps/metar/a2gtmtr.csh +++ /dev/null @@ -1,181 +0,0 @@ -#!/bin/csh -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -# -# A script wrapper around a UEngine call that is meant to get all available -# metar data in the A-II database over a specified range of times. The data -# is output to stdout as ASCII. Each line is one time/station combination. -# The individual data items are comma delimited. This version can adapt to -# use apython stub that calls the data access framework. -# -# Usage: -# -# a2gtmtr.csh {p} {c} yyyy-mm-dd hh:mm yyyy-mm-dd hh:mm -# -# The literal p and c flags are optional. The p flag means preserve -# the final version of the python submitted to the UEngine instead of -# cleaning it up. The path to the finalized python is /tmp/a2gtmtrNNNNN.py -# where NNNNN is a unix process id. The c flag means to retreive the -# Laps set of variables, instead of the default MSAS set. -# -# Not using the 'c' format, the MSAS set of variables, outputs the following -# variables for each line: -# -# stationName,timeObs,latitude,longitude,elevation,wmoId,autoStationType -# seaLevelPress,temperature,dewpoint,windDir,windSpeed,altimeter -# -# Using the 'c' format, the Laps set of variables, outputs the following -# variables for each line: -# -# stationName,timeObs,latitude,longitude,elevation,wmoId,autoStationType -# reportType,presWeather,visibility,skyCover,skyLayerBase,altimeter -# seaLevelPress,pressChange3Hour,pressChangeChar,temperature,tempFromTenths -# dewpoint,dpFromTenths,windDir,windSpeed,windGust,maxTemp24Hour, -# minTemp24Hour,precip1Hour,precip3Hour,precip6Hour,precip24Hour -# -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# 09/15/2014 3593 nabowle Initial modification to properly use DAF version of the script. -# -# - -set rmpy = yes -if ( "$1" == "p" ) then - set rmpy = no - shift -endif -# -# Identify directory this script is in, will be one of the directories we -# search for other files in. -# -set mydir = `dirname $0` -set d1 = `echo $mydir | cut -c1` -if ( "$mydir" == '.' ) then - set mydir = $PWD -else if ( "$d1" != "/" ) then - set mydir = $PWD/$mydir -endif -set mydir = `(cd $mydir ; pwd)` -if ( ! $?FXA_HOME ) set FXA_HOME = xxxx -if ( ! $?FXA_LOCAL_SITE ) set FXA_LOCAL_SITE = xxxx -if ( ! $?FXA_INGEST_SITE ) set FXA_INGEST_SITE = $FXA_LOCAL_SITE -# -set stubbase = a2gtmtrStub.py -if ( "$1" == "c" ) then - shift - set stubbase = a2cvmtrStub.py -endif -# -# Locate python stub that we will modify to create the final python logic. -# -if ( -e ./$stubbase ) then - set stubpy = ./$stubbase -else if ( -e $mydir/$stubbase ) then - set stubpy = $mydir/$stubbase -else if ( -e $FXA_HOME/src/dm/metar/$stubbase ) then - set stubpy = $FXA_HOME/src/dm/metar/$stubbase -else if ( -e $FXA_HOME/bin/$stubbase ) then - set stubpy = $FXA_HOME/bin/$stubbase -else - bash -c "echo could not find $stubbase 1>&2" - exit -endif - -# -# Attempt to use current D-2D localization to determine lat/lon bounds. -# -set ltmn = 0 -set ltmx = 90 -set lnmn = -180 -set lnmx = 180 -set locDir = $FXA_HOME/data/localizationDataSets/$FXA_INGEST_SITE -while ( -e $locDir/CenterPoint.dat ) - grep conusScale $locDir/scaleInfo.txt >& /dev/null - if ( $status == 0 ) then - set ltmn = 15 - set ltmx = 60 - set lnmn = -145 - set lnmx = -55 - break - endif - set cenLoc = `cat $locDir/CenterPoint.dat` - if ( $#cenLoc != 2 ) break - set cenlat = `echo $cenLoc[1] | cut '-d.' -f1` - set cenlat = `( @ x = $cenlat + 0 >& /dev/null ; echo $x )` - if ( "$cenlat" == "" ) break - set cenlon = `echo $cenLoc[2] | cut '-d.' -f1` - set cenlon = `( @ x = $cenlon + 0 >& /dev/null ; echo $x )` - if ( "$cenlon" == "" ) break - if ( $cenlat > 75 ) then - set ltmn = 55 - break - endif - if ( $cenlat > 50 ) then - @ ltmn = $cenlat - 20 - break - endif - @ ltmn = $cenlat - 20 - @ ltmx = $cenlat + 20 - @ lnmn = $cenlon - 20 - @ lnmx = $cenlon + 20 - break -end -# -# -# Determine if we are using the data access framework or the uEngine. -# -grep DataAccessLayer $stubpy >& /dev/null -if ( $status == 0 ) then - /awips2/python/bin/python $stubpy -b "$1 $2" -e "$3 $4" --lat-min $ltmn --lat-max $ltmx --lon-min $lnmn --lon-max $lnmx -else - # - # Set up the environment we need to run the UEngine. - # - set method = "uengine" - if ( -e ./UEngine.cshsrc ) then - set ueenv = ./UEngine.cshsrc - else if ( -e $mydir/UEngine.cshsrc ) then - set ueenv = $mydir/UEngine.cshsrc - else if ( -e $FXA_HOME/src/dm/point/UEngine.cshsrc ) then - set ueenv = $FXA_HOME/src/dm/point/UEngine.cshsrc - else if ( -e $FXA_HOME/bin/UEngine.cshsrc ) then - set ueenv = $FXA_HOME/bin/UEngine.cshsrc - else - bash -c "echo could not find UEngine.cshsrc 1>&2" - exit - endif - source $ueenv - - set specpy = /tmp/a2gtmtr${$}.py - rm -rf $specpy >& /dev/null - touch $specpy - chmod 775 $specpy - cat $stubpy | sed "s/LTMN/$ltmn/g" | sed "s/LTMX/ /g" | \ - sed "s/LNMN/$lnmn/g" | sed "s/LNMX/$lnmx/g" | \ - sed "s/BBBBB/$1 $2/g" | sed "s/EEEEE/$3 $4/g" > $specpy - cd $UE_BIN_PATH - ( uengine -r python < $specpy ) | grep -v '<' | sed -n '3,$p' - if ( "$rmpy" == "yes" ) rm -rf $specpy >& /dev/null -endif -# diff --git a/pythonPackages/msaslaps/metar/a2gtmtrStub.py b/pythonPackages/msaslaps/metar/a2gtmtrStub.py deleted file mode 100644 index 894808e992..0000000000 --- a/pythonPackages/msaslaps/metar/a2gtmtrStub.py +++ /dev/null @@ -1,132 +0,0 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -# Gets all available metar data in the A-II database over a specified range of -# times within a specifed area. The data is output to stdout as ASCII. -# Each line is one time/station combination. The individual data items are comma -# delimited. -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# 09/15/2014 3593 nabowle Initial modification. Fix losing first record. -# 09/15/2014 3593 nabowle Replace UEngine with DAF. -# -# - -import argparse -import sys - -from datetime import datetime -from awips.dataaccess import DataAccessLayer -from dynamicserialize.dstypes.com.raytheon.uf.common.time import TimeRange - -def get_args(): - parser = argparse.ArgumentParser(conflict_handler="resolve") - parser.add_argument("-h", action="store", dest="host", - help="EDEX server hostname (optional)", - metavar="hostname") - parser.add_argument("-b", action="store", dest="start", - help="The start of the time range in YYYY-MM-DD HH:MM", - metavar="start") - parser.add_argument("-e", action="store", dest="end", - help="The end of the time range in YYYY-MM-DD HH:MM", - metavar="end") - parser.add_argument("--lat-min", action="store", dest="latMin", type=float, - help="Minimum latitude", default=0.0, metavar="lat") - parser.add_argument("--lat-max", action="store", dest="latMax", type=float, - help="Maximum latitude", default=90.0, metavar="lat") - parser.add_argument("--lon-min", action="store", dest="lonMin", type=float, - help="Minimum longitude", default=-180.0, metavar="lon") - parser.add_argument("--lon-max", action="store", dest="lonMax", type=float, - help="Maximum longitude", default=180.0, metavar="lon") - return parser.parse_args() - - -def main(): - user_args = get_args() - - if user_args.host: - DataAccessLayer.changeEDEXHost(user_args.host) - - start = user_args.start - end = user_args.end - - if not start or not end: - print >> sys.stderr, "Start or End date not provided" - return - - latMin = user_args.latMin - latMax = user_args.latMax - lonMin = user_args.lonMin - lonMax = user_args.lonMax - - beginRange = datetime.strptime( start + ":00.0", "%Y-%m-%d %H:%M:%S.%f") - endRange = datetime.strptime( end + ":59.9", "%Y-%m-%d %H:%M:%S.%f") - timerange = TimeRange(beginRange, endRange) - - req = DataAccessLayer.newDataRequest("obs") - req.setParameters("stationName","timeObs","wmoId","autoStationType", - "elevation","seaLevelPress","temperature","dewpoint", - "windDir","windSpeed","altimeter" ) - geometries = DataAccessLayer.getGeometryData(req, timerange) - - if not geometries : -# print "No data available." - return - - msg = "" - for geo in geometries : - lon = geo.getGeometry().x - lat = geo.getGeometry().y - if lon < lonMin or lon > lonMax or lat < latMin or lat > latMax: - continue - - sName = geo.getString("stationName") - tobs = geo.getNumber("timeObs") - elev = geo.getNumber("elevation") - ista = geo.getString("wmoId") - atype = geo.getString("autoStationType") - msl = geo.getNumber("seaLevelPress") - temp = geo.getNumber("temperature") - dpt = geo.getNumber("dewpoint") - dir = geo.getNumber("windDir") - spd = geo.getNumber("windSpeed") - alt = geo.getNumber("altimeter") - - msg += sName + "," - msg += str(tobs/1000) + "," - msg += "%.4f"%lat + "," - msg += "%.4f"%lon + "," - msg += "%.0f"%elev + "," - msg += str(ista) + "," - msg += atype + " ," - msg += "%.2f"%msl + "," - msg += "%.1f"%temp + "," - msg += "%.1f"%dpt + "," - msg += "%.0f"%dir + "," - msg += "%.1f"%spd + "," - msg += "%.2f"%alt + "\n" - - print msg.strip() - -if __name__ == '__main__': - main() diff --git a/pythonPackages/msaslaps/profiler/a2gtprof.csh b/pythonPackages/msaslaps/profiler/a2gtprof.csh deleted file mode 100644 index 855eb64f45..0000000000 --- a/pythonPackages/msaslaps/profiler/a2gtprof.csh +++ /dev/null @@ -1,129 +0,0 @@ -#!/bin/csh -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## -# -# A script wrapper around a UEngine call that is meant to get all available -# profiler data in the A-II database over a specified range of times. The -# data is output to stdout as ASCII. Each line is one time/station combination. -# The individual data variables are comma delimited, and when what is returned -# for a data item is a profile, each item in the profile is vertical bar -# delimited. This version can adapt to use a python stub that calls the -# data access framework. -# -# Usage: -# -# a2gtprof.csh {p} yyyy-mm-dd hh:mm yyyy-mm-dd hh:mm -# -# The literal p flag is optional. The p flag means preserve -# the final version of the python submitted to the UEngine instead of -# cleaning it up. The path to the finalized python is /tmp/a2gtprofNNNNN.py -# where NNNNN is a unix process id. -# -# The following data variables are output for each line: -# -# profilerId,validTime,latitude,longitude,elevation,pressure, -# temperature,relHumidity,windDirSfc,windSpeedSfc,rainRate,submode, -# height,levelMode,uComponent,vComponent,wComponent, -# peakPower,HorizSpStdDev,VertSpStdDev,uvQualityCode,consensusNum -# -# Everything from height onward are profiles. -# -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# Oct 6, 2014 3594 nabowle Initial modification. Handle DAF version. -# -# -set rmpy = yes -if ( "$1" == "p" ) then - set rmpy = no - shift -endif -# -# Identify directory this script is in, will be one of the directories we -# search for other files in. -# -set mydir = `dirname $0` -set d1 = `echo $mydir | cut -c1` -if ( "$mydir" == '.' ) then - set mydir = $PWD -else if ( "$d1" != "/" ) then - set mydir = $PWD/$mydir -endif -set mydir = `(cd $mydir ; pwd)` -if ( ! $?FXA_HOME ) set FXA_HOME = xxxx -if ( ! $?FXA_LOCAL_SITE ) set FXA_LOCAL_SITE = xxxx -if ( ! $?FXA_INGEST_SITE ) set FXA_INGEST_SITE = $FXA_LOCAL_SITE -# -# Locate python stub that we will modify to create the final python logic. -# -set stubbase = a2gtprofStub.py -if ( -e ./$stubbase ) then - set stubpy = ./$stubbase -else if ( -e $mydir/$stubbase ) then - set stubpy = $mydir/$stubbase -else if ( -e $FXA_HOME/src/dm/profiler/$stubbase ) then - set stubpy = $FXA_HOME/src/dm/profiler/$stubbase -else if ( -e $FXA_HOME/bin/$stubbase ) then - set stubpy = $FXA_HOME/bin/$stubbase -else - bash -c "echo could not find $stubbase 1>&2" - exit -endif -# -# Determine if we are using the data access framework or the uEngine. -# -grep DataAccessLayer $stubpy >& /dev/null -if ( $status == 0 ) then - /awips2/python/bin/python $stubpy -b "$1 $2" -e "$3 $4" -else - # - # Set up the environment we need to run the UEngine. - # - set method = "uengine" - if ( -e ./UEngine.cshsrc ) then - set ueenv = ./UEngine.cshsrc - else if ( -e $mydir/UEngine.cshsrc ) then - set ueenv = $mydir/UEngine.cshsrc - else if ( -e $FXA_HOME/src/dm/point/UEngine.cshsrc ) then - set ueenv = $FXA_HOME/src/dm/point/UEngine.cshsrc - else if ( -e $FXA_HOME/bin/UEngine.cshsrc ) then - set ueenv = $FXA_HOME/bin/UEngine.cshsrc - else - bash -c "echo could not find UEngine.cshsrc 1>&2" - exit - endif - source $ueenv - - set specpy = /tmp/a2gtprof${$}.py - rm -rf $specpy >& /dev/null - touch $specpy - chmod 775 $specpy - cat $stubpy | sed "s/BBBBB/$1 $2/g" | sed "s/EEEEE/$3 $4/g" > $specpy - - cd $UE_BIN_PATH - #uengine -r python < $specpy - ( uengine -r python < $specpy ) | grep -v '<' | sed -n '2,$p' - - if ( "$rmpy" == "yes" ) rm -rf $specpy >& /dev/null -endif - diff --git a/pythonPackages/msaslaps/profiler/a2gtprofStub.py b/pythonPackages/msaslaps/profiler/a2gtprofStub.py deleted file mode 100644 index 14b2121a4a..0000000000 --- a/pythonPackages/msaslaps/profiler/a2gtprofStub.py +++ /dev/null @@ -1,184 +0,0 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -# Gets all available profiler data in the A-II database over a specified range -# of times. The data is output to stdout as ASCII. Each line is one -# time/station combination. The individual data variables are comma delimited, -# and when what is returned for a data item is a profile, each item in the -# profile is vertical bar delimited -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# Oct 6, 2014 3594 nabowle Initial modification. Replace UEngine with DAF. -# -# - -import argparse -import sys - -from datetime import datetime -from awips.dataaccess import DataAccessLayer -from dynamicserialize.dstypes.com.raytheon.uf.common.time import TimeRange - -def get_args(): - parser = argparse.ArgumentParser(conflict_handler="resolve") - parser.add_argument("-h", action="store", dest="host", - help="EDEX server hostname (optional)", - metavar="hostname") - parser.add_argument("-b", action="store", dest="start", - help="The start of the time range in YYYY-MM-DD HH:MM", - metavar="start") - parser.add_argument("-e", action="store", dest="end", - help="The end of the time range in YYYY-MM-DD HH:MM", - metavar="end") - return parser.parse_args() - -def main(): - # The multi-dimensional parameters. - MULTI_DIM_PARAMS = set(['vComponent', 'uComponent', 'peakPower', - 'levelMode', 'uvQualityCode', 'consensusNum', - 'HorizSpStdDev', 'wComponent', 'height', - 'VertSpStdDev']) - - user_args = get_args() - - if user_args.host: - DataAccessLayer.changeEDEXHost(user_args.host) - - start = user_args.start - end = user_args.end - - if not start or not end: - print >> sys.stderr, "Start or End date not provided" - return - - beginRange = datetime.strptime( start + ":00.0", "%Y-%m-%d %H:%M:%S.%f") - endRange = datetime.strptime( end + ":59.9", "%Y-%m-%d %H:%M:%S.%f") - timerange = TimeRange(beginRange, endRange) - - req = DataAccessLayer.newDataRequest("profiler") - req.setParameters('numProfLvls', 'elevation', 'windDirSfc', 'validTime', - 'windSpeedSfc', 'pressure', 'submode', 'relHumidity', - 'profilerId', 'rainRate', 'temperature') - req.getParameters().extend(MULTI_DIM_PARAMS) - - geometries = DataAccessLayer.getGeometryData(req, timerange) - - if not geometries : -# print "couldn't get data" - return - - - subgeos = [] - msg = "" - for geoData in geometries : - if set(geoData.getParameters()) & MULTI_DIM_PARAMS : - subgeos.append(geoData) - continue - - elev = geoData.getNumber("elevation") - msg += geoData.getString("profilerId") + "," - msg += str(geoData.getNumber("validTime")/1000) + "," - msg += "%.4f"%geoData.getGeometry().y + "," - msg += "%.4f"%geoData.getGeometry().x + "," - msg += "%.0f"%elev + "," - msg += "%.1f"%geoData.getNumber("pressure") + "," - msg += "%.1f"%geoData.getNumber("temperature") + "," - msg += "%.1f"%geoData.getNumber("relHumidity") + "," - msg += "%.0f"%geoData.getNumber("windDirSfc") + "," - msg += "%.1f"%geoData.getNumber("windSpeedSfc") + "," - msg += "%.1f"%geoData.getNumber("rainRate") + "," - msg += str(geoData.getNumber("submode")) + "," - - kk = len(subgeos) -# msg += str(kk) + "," - - subgeos[0].getString("consensusNum") - if kk>0 : msg += "%.0f"%(subgeos[0].getNumber("height")-elev) - k = 1 - while k < kk : - msg += "|" + "%.0f"%(subgeos[k].getNumber("height")-elev) - k += 1 - msg += "," - if kk>0 : msg += str(subgeos[0].getNumber("levelMode")) - k = 1 - while k < kk : - msg += "|" + str(subgeos[k].getNumber("levelMode")) - k += 1 - msg += "," - - if kk>0 : msg += "%.1f"%subgeos[0].getNumber("uComponent") - k = 1 - while k < kk : - msg += "|" + "%.1f"%subgeos[k].getNumber("uComponent") - k += 1 - msg += "," - if kk>0 : msg += "%.1f"%subgeos[0].getNumber("vComponent") - k = 1 - while k < kk : - msg += "|" + "%.1f"%subgeos[k].getNumber("vComponent") - k += 1 - msg += "," - if kk>0 : msg += "%.2f"%subgeos[0].getNumber("wComponent") - k = 1 - while k < kk : - msg += "|" + "%.2f"%subgeos[k].getNumber("wComponent") - k += 1 - msg += "," - - if kk>0 : msg += "%.1f"%subgeos[0].getNumber("peakPower") - k = 1 - while k < kk : - msg += "|" + "%.1f"%subgeos[k].getNumber("peakPower") - k += 1 - msg += "," - if kk>0 : msg += "%.1f"%subgeos[0].getNumber("HorizSpStdDev") - k = 1 - while k < kk : - msg += "|" + "%.1f"%subgeos[k].getNumber("HorizSpStdDev") - k += 1 - msg += "," - if kk>0 : msg += "%.1f"%subgeos[0].getNumber("VertSpStdDev") - k = 1 - while k < kk : - msg += "|" + "%.1f"%subgeos[k].getNumber("VertSpStdDev") - k += 1 - msg += "," - - if kk>0 : msg += subgeos[0].getString("uvQualityCode") - k = 1 - while k < kk : - msg += "|" + subgeos[k].getString("uvQualityCode") - k += 1 - msg += "," - if kk>0 : msg += subgeos[0].getString("consensusNum") - k = 1 - while k < kk : - msg += "|" + subgeos[k].getString("consensusNum") - k += 1 - msg += "\n" - subgeos = [] - - print msg.strip() - -if __name__ == '__main__': - main() diff --git a/pythonPackages/msaslaps/radar/a2advrad.csh b/pythonPackages/msaslaps/radar/a2advrad.csh deleted file mode 100644 index f9f0bfc93d..0000000000 --- a/pythonPackages/msaslaps/radar/a2advrad.csh +++ /dev/null @@ -1,232 +0,0 @@ -#!/bin/csh -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -# -# A script wrapper that is meant to get data for a single radar product -# from the A-II database. The result is output to stdout as ASCII. -# The first line has the dimensions of the data, the volume scan time, -# 'radial' or 'raster', elevation number, a product description, the tilt -# angle or layer, and the VCP. The second line contains the level labels, -# and the third line has a partial list of the product dependent parameters. -# If the product is radial, the fourth line contains a list of the -# radial angles. Otherwise each line after that has data for one -# radial/row, as a sequence of space delimited plain decimal integers. -# -# The product description includes the mnemonic, the resolution, and -# the bit depth. If the azimuthal resolution is finer than 1 degree, -# the product description will additionally include a note of that. -# The product dependent parameters as defined in an 88D RPG product -# are 28 signed two byte integers. The list here includes those -# with indices from 17 to 26 (C indexing). -# -# Note that for now, this is only designed to return data for image -# products. -# -# Usage: -# -# a2advrad.csh {p} {x} radar msgcode {elev} date time {slop} -# -# p - A literal p. (optional) -# x - A literal x. (optional) Expanded set of header information. -# radar - four character radar id -# msgcode - RPG message code -# elev - tilt angle/layer value. defaults to 0. -# date - yyyy-mm-dd -# time - hh:mm -# slop - seconds of slop either side, defaults to 60 -# -# The tilt angles specified are primary tilt angles for a range of tilts. -# -# The literal p option means preserve the final version of the python -# submitted to the UEngine instead of cleaning it up. The path to the -# finalized python is /tmp/a2advradNNNNN.py where NNNNN is a unix process id. -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# 08/07/2014 3393 nabowle Initial modification. Passes parameters straight to -# non-uengine script instead of sed. Remove use of gtasUtil -# if not using the uengine stub. -# -# -set rmpy = yes -set fff = "" -if ( "$1" == "p" ) then - set rmpy = no - shift -endif -if ( "$1" == "x" ) then - set fff = "x" - shift -endif -if ( "$1" == "p" ) then - set rmpy = no - shift -endif -# -# Identify directory this script is in, will be one of the directories we -# search for other files in. -# -set mydir = `dirname $0` -set d1 = `echo $mydir | cut -c1` -if ( "$mydir" == '.' ) then - set mydir = $PWD -else if ( "$d1" != "/" ) then - set mydir = $PWD/$mydir -endif -set mydir = `(cd $mydir ; pwd)` -if ( ! $?FXA_HOME ) set FXA_HOME = xxxx -set fxa_home = $FXA_HOME -if ( $?STAGING ) then - set fxa_home = $STAGING/D-2D -endif - -# -# Locate python stub that we will modify to create the final UEngine script -# -if ( -e ./a2advradStub.py ) then - set stubpy = ./a2advradStub.py -else if ( -e $mydir/a2advradStub.py ) then - set stubpy = $mydir/a2advradStub.py -else if ( -e $fxa_home/src/dm/radar/a2advradStub.py ) then - set stubpy = $fxa_home/src/dm/radar/a2advradStub.py -else if ( -e $FXA_HOME/bin/a2advradStub.py ) then - set stubpy = $FXA_HOME/bin/a2advradStub.py -else - bash -c "echo could not find a2advradStub.py 1>&2" - exit -endif - -# -# Determine if we are using the data access framework or the uEngine. -# -grep DataAccessLayer $stubpy >& /dev/null -if ( $status == 0 ) then - set method = "daf" -else - # - # Set up the environment we need to run the UEngine. - # - set method = "uengine" - if ( -e ./UEngine.cshsrc ) then - set ueenv = ./UEngine.cshsrc - else if ( -e $mydir/UEngine.cshsrc ) then - set ueenv = $mydir/UEngine.cshsrc - else if ( -e $FXA_HOME/src/dm/point/UEngine.cshsrc ) then - set ueenv = $FXA_HOME/src/dm/point/UEngine.cshsrc - else if ( -e $FXA_HOME/bin/UEngine.cshsrc ) then - set ueenv = $FXA_HOME/bin/UEngine.cshsrc - else - bash -c "echo could not find UEngine.cshsrc 1>&2" - exit - endif - source $ueenv -endif - -# -# Locate file that lets us provide a description of the data set. -# -if ( -e ./msgCodeSeds.txt ) then - set mctrans = $PWD/msgCodeSeds.txt -else if ( -e $mydir/msgCodeSeds.txt ) then - set mctrans = $mydir/msgCodeSeds.txt -else if ( -e $fxa_home/src/dm/radar/msgCodeSeds.txt ) then - set mctrans = $fxa_home/src/dm/radar/msgCodeSeds.txt -else if ( -e $FXA_HOME/data/msgCodeSeds.txt ) then - set mctrans = $FXA_HOME/data/msgCodeSeds.txt -else - bash -c "echo could not find msgCodeSeds.txt 1>&2" - exit -endif -# -# -set rrr = $1 -set mmm = $2 -shift -shift -set ddd = `echo $mmm | sed -f $mctrans | cut '-d|' -f2 ` - -set eee = `echo $1 | grep -v '.*-'` -if ( "$eee" != "" ) shift -set slop = `echo $3 | grep '[0-9]'` -if ( "$slop" == "" ) set slop = 60 - - -if ( "$method" == "daf" ) then - set datetime = $1' '$2 - set opts = "" - - if ( "$eee" != "" ) then - set opts = "$opts --angle $eee" - endif - - if ( "$fff" == "x" ) then - set opts = "$opts --extended" - endif - - /awips2/python/bin/python $stubpy --radar $rrr --code $mmm --datetime="${datetime}" --slop $slop --description="${ddd}" $opts -else - # - # Get program that can do math with ascii time string, then use this to - # properly encode range of times for which we look for data. - # - if ( -x ./gtasUtil ) then - set gtasUtil = ./gtasUtil - else if ( -x $mydir/gtasUtil ) then - set gtasUtil = $mydir/gtasUtil - else if ( -x $fxa_home/src/dm/point/gtasUtil ) then - set gtasUtil = $fxa_home/src/dm/point/gtasUtil - else if ( -x $FXA_HOME/bin/gtasUtil ) then - set gtasUtil = $FXA_HOME/bin/gtasUtil - else - bash -c "echo could not find gtasUtil executable 1>&2" - exit - endif - - set aaa = `$gtasUtil = $1 $2 -$slop` - set bbb = `$gtasUtil = $1 $2 $slop` - - # - # Modify the text of special tags in stub to create finalized script. - # - set specpy = /tmp/a2advrad${$}.py - rm -rf $specpy >& /dev/null - touch $specpy - chmod 775 $specpy - if ( "$eee" == "" ) then - cat $stubpy | sed "s/KKKK/$rrr/g" | sed "s/MMMM/$mmm/g" | \ - sed "s/AAAAA/$aaa/g" | sed "s/BBBBB/$bbb/g" | sed "s/FFF/$fff/g" | \ - sed "s/DDDDD/$ddd/g" | sed 's/^.*EEEE.*$//g' >> $specpy - else - cat $stubpy | sed "s/KKKK/$rrr/g" | sed "s/MMMM/$mmm/g" | \ - sed "s/AAAAA/$aaa/g" | sed "s/BBBBB/$bbb/g" | sed "s/FFF/$fff/g" | \ - sed "s/DDDDD/$ddd/g" | sed "s/EEEE/$eee/g" >> $specpy - endif - # - # Submit the temporary python script stripping xml stuff, then remove it - # - cd $UE_BIN_PATH - ( uengine -r python < $specpy ) | grep -v '<' | sed 's/>/>/g' | \ - sed 's/</& /dev/null -endif -# diff --git a/pythonPackages/msaslaps/radar/a2advradStub.py b/pythonPackages/msaslaps/radar/a2advradStub.py deleted file mode 100644 index dddfadffd3..0000000000 --- a/pythonPackages/msaslaps/radar/a2advradStub.py +++ /dev/null @@ -1,129 +0,0 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -# -# Gets data for a single radar product from the A-II database. The result is -# output to stdout as ASCII. This uses a data-specific Request/Response instead -# of the DataAccessLayer in order to preserve data-genericness of the interface. -# -# -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# 08/13/2014 3393 nabowle Initial modification. Replaces UEngine -# with a custom Request/Response. -# -# - -import argparse -import a2radcommon -import sys - -def get_args(): - parser = a2radcommon.get_args_parser() - parser.add_argument("--extended", action='store_true', default=False, - dest="extended", help="Output the extended header.") - return parser.parse_args() - - -def main(): - user_args = get_args() - - records = a2radcommon.send_request(user_args) - - if not records: - # print "Data not available" - return - - description = user_args.description - if not description: - print >> sys.stderr, "Description not provided" - return - - format = user_args.extended - - print_record(records[0], format, description) - - -def print_record(record, format, description): - idra = record.getHdf5Data() - - rdat,azdat,depVals,threshVals = a2radcommon.get_hdf5_data(idra) - - if not rdat: - # Graphic, XY - # print "Unsupported radar format" - return - - dim = rdat.getDimension() - if dim != 2: - # print "Data not available" - return - - yLen = rdat.getSizes()[0] - xLen = rdat.getSizes()[1] - - # byte[] -- the raw data - array = rdat.getByteData() - arraySize = len(array) - if xLen * yLen != arraySize: - # print "Data not available" - return - - # get data for azimuth angles if we have them. - if azdat : - azVals = azdat.getFloatData() - azValsLen = len(azVals) - if yLen != azValsLen: - # print "Data not available" - return - - msg = a2radcommon.get_header(record, format, xLen, yLen, azdat, description) - - msg += a2radcommon.encode_thresh_vals(threshVals) - msg += a2radcommon.encode_dep_vals(depVals) - if azdat : - msg += a2radcommon.encode_radial(azVals) - msg += encode_data(yLen, xLen, array) - - print msg - - -def encode_data(yLen, xLen, array): - nxy = yLen*xLen - j = 0 - msg = "" - while j&2" - exit -endif -# -# Determine if we are using the data access framework or the uEngine. -# -grep DataAccessLayer $stubpy >& /dev/null -if ( $status == 0 ) then - set method = "daf" -else - # - # Set up the environment we need to run the UEngine. - # - set method = "uengine" - if ( -e ./UEngine.cshsrc ) then - set ueenv = ./UEngine.cshsrc - else if ( -e $mydir/UEngine.cshsrc ) then - set ueenv = $mydir/UEngine.cshsrc - else if ( -e $FXA_HOME/src/dm/point/UEngine.cshsrc ) then - set ueenv = $FXA_HOME/src/dm/point/UEngine.cshsrc - else if ( -e $FXA_HOME/bin/UEngine.cshsrc ) then - set ueenv = $FXA_HOME/bin/UEngine.cshsrc - else - bash -c "echo could not find UEngine.cshsrc 1>&2" - exit - endif - source $ueenv -endif -# -# Locate file that lets us provide a description of the data set. -# -if ( -e ./msgCodeSeds.txt ) then - set mctrans = $PWD/msgCodeSeds.txt -else if ( -e $mydir/msgCodeSeds.txt ) then - set mctrans = $mydir/msgCodeSeds.txt -else if ( -e $fxa_home/src/dm/radar/msgCodeSeds.txt ) then - set mctrans = $fxa_home/src/dm/radar/msgCodeSeds.txt -else if ( -e $FXA_HOME/data/msgCodeSeds.txt ) then - set mctrans = $FXA_HOME/data/msgCodeSeds.txt -else - bash -c "echo could not find msgCodeSeds.txt 1>&2" - exit -endif -# -# -set rrr = $1 -set mmm = $2 -shift -shift -set ddd = `echo $mmm | sed -f $mctrans | cut '-d|' -f2 ` - -set eee = `echo $1 | grep -v '.*-'` -if ( "$eee" != "" ) shift -set slop = `echo $3 | grep '[0-9]'` -if ( "$slop" == "" ) set slop = 60 - -if ( "$method" == "daf" ) then - set datetime = $1' '$2 - set opts = "" - - if ( "$eee" != "" ) then - set opts = "$opts --angle $eee" - endif - - if ( "$fff" == "x" ) then - set opts = "$opts --extended" - endif - if ( "$encoding" == "1" ) then - set opts = "$opts --hex" - else if ( "$encoding" == "0" ) then - set opts = "$opts --int" - endif - - /awips2/python/bin/python $stubpy --radar $rrr --code $mmm --datetime="${datetime}" --slop $slop --description="${ddd}" $opts -else - # - # Get program that can do math with ascii time string, then use this to - # properly encode range of times for which we look for data. - # - if ( -x ./gtasUtil ) then - set gtasUtil = ./gtasUtil - else if ( -x $mydir/gtasUtil ) then - set gtasUtil = $mydir/gtasUtil - else if ( -x $fxa_home/src/dm/point/gtasUtil ) then - set gtasUtil = $fxa_home/src/dm/point/gtasUtil - else if ( -x $FXA_HOME/bin/gtasUtil ) then - set gtasUtil = $FXA_HOME/bin/gtasUtil - else - bash -c "echo could not find gtasUtil executable 1>&2" - exit - endif - - set aaa = `$gtasUtil = $1 $2 -$slop` - set bbb = `$gtasUtil = $1 $2 $slop` - # - # Modify the text of special tags in stub to create finalized script. - # - set specpy = /tmp/a2gtrad${$}.py - rm -rf $specpy >& /dev/null - touch $specpy - chmod 775 $specpy - if ( "$eee" == "" ) then - cat $stubpy | sed "s/KKKK/$rrr/g" | sed "s/MMMM/$mmm/g" | \ - sed "s/AAAAA/$aaa/g" | sed "s/BBBBB/$bbb/g" | sed "s/FFF/$fff/g" | \ - sed "s/DDDDD/$ddd/g" | sed 's/^.*EEEE.*$//g' | \ - sed "s/XXXXX/$encoding/g" >> $specpy - else - cat $stubpy | sed "s/KKKK/$rrr/g" | sed "s/MMMM/$mmm/g" | \ - sed "s/AAAAA/$aaa/g" | sed "s/BBBBB/$bbb/g" | sed "s/FFF/$fff/g" | \ - sed "s/DDDDD/$ddd/g" | sed "s/EEEE/$eee/g" | \ - sed "s/XXXXX/$encoding/g" >> $specpy - endif - # - # Submit the temporary python script stripping xml stuff, then remove it - # - - cd $UE_BIN_PATH - ( uengine -r python < $specpy ) | grep -v '<' | sed 's/>/>/g' | \ - sed 's/</& /dev/null -endif - -# diff --git a/pythonPackages/msaslaps/radar/a2gtradStub.py b/pythonPackages/msaslaps/radar/a2gtradStub.py deleted file mode 100644 index 4c2c32f867..0000000000 --- a/pythonPackages/msaslaps/radar/a2gtradStub.py +++ /dev/null @@ -1,168 +0,0 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -# -# Gets data for a single radar product from the A-II database. The result is -# output to stdout as ASCII. This uses a data-specific Request/Response instead -# of the DataAccessLayer in order to preserve data-genericness of the interface. -# -# -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# 08/11/2014 3393 nabowle Initial modification. Replaces UEngine -# with a custom Request/Response. -# 12/16/2014 3393 nabowle Fix negative k values. -# -# - -import argparse -import a2radcommon -import sys - -def get_args(): - parser = a2radcommon.get_args_parser() - parser.add_argument("--hex", action='store_const', dest="encoding", - const=1, help="Hex encoding.", metavar="encoding") - parser.add_argument("--int", action='store_const', dest="encoding", - const=0, help="Delimited integer encoding.", - metavar="encoding") - parser.add_argument("--extended", action='store_true', default=False, - dest="extended", help="Output the extended header.") - return parser.parse_args() - - -def main(): - user_args = get_args() - - records = a2radcommon.send_request(user_args) - - if not records: - # print "Data not available" - return - - description = user_args.description - if not description: - print >> sys.stderr, "Description not provided" - return - - format = user_args.extended - encoding = user_args.encoding - - print_record(records[0], format, description, encoding) - - -def print_record(record, format, description, encoding): - - idra = record.getHdf5Data() - - rdat,azdat,depVals,threshVals = a2radcommon.get_hdf5_data(idra) - - if not rdat: - # Graphic, XY - # print "Unsupported radar format" - return - - dim = rdat.getDimension() - if dim != 2: - # print "Data not available" - return - - yLen = rdat.getSizes()[0] - xLen = rdat.getSizes()[1] - - # byte[] -- the raw data - array = rdat.getByteData() - arraySize = len(array) - if xLen * yLen != arraySize: - # print "Data not available" - return - - # get data for azimuth angles if we have them. - if azdat : - azVals = azdat.getFloatData() - azValsLen = len(azVals) - if yLen != azValsLen: - # print "Data not available" - return - - msg = a2radcommon.get_header(record, format, xLen, yLen, azdat, description) - - msg += a2radcommon.encode_thresh_vals(threshVals) - msg += a2radcommon.encode_dep_vals(depVals) - if azdat : - msg += a2radcommon.encode_radial(azVals) - msg += encode_data(yLen, xLen, array, encoding) - - print msg.strip() - - -def encode_data(yLen, xLen, array, encoding): - plus = " ghijklmnopqrstuvwxyz" - minus = " GHIJKLMNOPQRSTUVWXYZ" - nxy = yLen*xLen - j = 0 - msg = "" - while jkk+20 or kkk : - msg += plus[k-kk] - else : - msg += minus[kk-k] - kk = k - i += 1 - msg += "\n" - j += xLen - return msg - - -if __name__ == '__main__': - main() - diff --git a/pythonPackages/msaslaps/radar/a2gtradStubAll.py b/pythonPackages/msaslaps/radar/a2gtradStubAll.py deleted file mode 100644 index 69909efe6c..0000000000 --- a/pythonPackages/msaslaps/radar/a2gtradStubAll.py +++ /dev/null @@ -1,268 +0,0 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -# -# Gets data for a single radar product from the A-II database. The result is -# output to stdout as ASCII. This uses a data-specific Request/Response instead -# of the DataAccessLayer in order to preserve data-genericness of the interface. -# -# -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# 08/11/2014 3393 nabowle Initial modification. Replaces UEngine -# with a custom Request/Response. -# -# - -import argparse -import a2radcommon -import sys - -def get_args(): - parser = a2radcommon.get_args_parser() - return parser.parse_args() - - -def main(): - user_args = get_args() - - records = a2radcommon.send_request(user_args) - - if not records: - # print "Data not available" - return - - description = user_args.description - if not description: - print >> sys.stderr, "Description not provided" - return - - print_record(records[0], description) - - -def print_record(record, description): - - idra = record.getHdf5Data() - - rdat,azdat,depVals,prodVals,recVals,stormVals,symVals,symData,threshVals = get_hdf5_data(idra) - - if not rdat: - # Graphic, XY - # print "Unsupported radar format" - return - - dim = rdat.getDimension() - if dim != 2: - # print "Data not available" - return - - yLen = rdat.getSizes()[0] - xLen = rdat.getSizes()[1] - - # byte[] -- the raw data - array = rdat.getByteData() - arraySize = len(array) - if xLen * yLen != arraySize: - # print "Data not available" - return - - # get data for azimuth angles if we have them. - if azdat : - azVals = azdat.getFloatData() - azValsLen = len(azVals) - if yLen != azValsLen: - # print "Data not available" - return - - msg = get_header(record, xLen, yLen, azdat, description) - msg += encode_dep_vals(depVals) - msg += encode_prod_vals(prodVals) - msg += encode_rec_vals(recVals) - msg += encode_storm_vals(stormVals) - msg += encode_sym_vals(symVals) - msg += encode_sym_data(symData) - msg += encode_thresh_vals(threshVals) - - if azdat : - msg += a2radcommon.encode_radial(azVals) - - msg += encode_data(yLen, xLen, array) - - print msg - - -def get_hdf5_data(idra): - rdat = [] - azdat = [] - depVals = [] - prodVals = [] - recVals = [] - stormVals = [] - symVals = [] - symData = [] - threshVals = [] - if len(idra) > 0: - for ii in range(len(idra)): - if idra[ii].getName() == "Data": - rdat = idra[ii] - elif idra[ii].getName() == "Angles": - azdat = idra[ii] - dattyp = "radial" - elif idra[ii].getName() == "DependentValues": - depVals = idra[ii].getShortData() - elif idra[ii].getName() == "ProductVals": - prodVals = idra[ii].getByteData() - elif idra[ii].getName() == "RecordVals": - recVals = idra[ii].getByteData() - elif idra[ii].getName() == "StormIds": - stormVals = idra[ii].getByteData() - elif idra[ii].getName() == "Symbology": - symVals = idra[ii].getByteData() - elif idra[ii].getName() == "SymbologyData": - symData = idra[ii].getByteData() - elif idra[ii].getName() == "Thresholds": - threshVals = idra[ii].getShortData() - - return rdat,azdat,depVals,prodVals,recVals,stormVals,symVals,symData,threshVals - - -def get_header(record, xLen, yLen, azdat, description): - # Encode dimensions, time, mapping, description, tilt, and VCP - mytime = a2radcommon.get_datetime_str(record) - dattyp = a2radcommon.get_data_type(azdat) - - msg = str(xLen) + " " + str(yLen) + " " + mytime + " " + dattyp + \ - " " + description + "\n" + \ - str(record.getTrueElevationAngle()) + " " + \ - str(record.getVolumeCoveragePattern()) + "\n" - return msg - - -def encode_dep_vals(depVals): - nnn = len(depVals) - msg = str(nnn) - j = 0 - while j&2" - exit -endif - -set rrr = `echo $1 | grep '[a-z][a-z][a-z][a-z]'` -if ( "$rrr" != "" ) shift -set lastcmd = cat - -# -# Determine if we are using the data access framework or the uEngine. -# -grep DataAccessLayer $stubpy >& /dev/null -if ( $status == 0 ) then - set mydate = `echo "$1" | grep '.*-.*-'` - set mytime = `echo "$2" | grep ':'` - if ( "$mydate" != "" && "$mytime" != "" ) then - shift - shift - set userargs = "--date ${mydate} --time ${mytime}" - else - set userargs = "" - endif - - if ( "$rrr" == "" ) then - #done - else if ( "$1" == "" ) then - set userargs = "$userargs --icao $rrr" - else if ( "$1" == "+" ) then - set userargs = "$userargs --icao $rrr" - if ( -e ./msgCodeSeds.txt ) then - set mctrans = $PWD/msgCodeSeds.txt - else if ( -e $mydir/msgCodeSeds.txt ) then - set mctrans = $mydir/msgCodeSeds.txt - else if ( -e $fxa_home/src/dm/radar/msgCodeSeds.txt ) then - set mctrans = $fxa_home/src/dm/radar/msgCodeSeds.txt - else if ( -e $FXA_HOME/data/msgCodeSeds.txt ) then - set mctrans = $FXA_HOME/data/msgCodeSeds.txt - else - bash -c "echo could not find msgCodeSeds.txt 1>&2" - exit - endif - set lastcmd = "sed -f $mctrans" - else if ( "$2" == "" ) then - set userargs = "$userargs --icao $rrr --productCode $1 --angle 0.0 --outputDate" - else if ( "$2" == "+" ) then - set userargs = "$userargs --icao $rrr --productCode $1 --outputPrimary" - else - set userargs = "$userargs --icao $rrr --productCode $1 --angle $2 --outputTrue --outputDate" - endif - /awips2/python/bin/python $stubpy ${userargs} | $lastcmd -else - # - # Set up the environment we need to run the UEngine. - # - set method = "uengine" - if ( -e ./UEngine.cshsrc ) then - set ueenv = ./UEngine.cshsrc - else if ( -e $mydir/UEngine.cshsrc ) then - set ueenv = $mydir/UEngine.cshsrc - else if ( -e $FXA_HOME/src/dm/point/UEngine.cshsrc ) then - set ueenv = $FXA_HOME/src/dm/point/UEngine.cshsrc - else if ( -e $FXA_HOME/bin/UEngine.cshsrc ) then - set ueenv = $FXA_HOME/bin/UEngine.cshsrc - else - bash -c "echo could not find UEngine.cshsrc 1>&2" - exit - endif - source $ueenv - - # - # Set range of time we will request this information over, will default to - # essentially forever. - # - set aaa = "1970-01-01 00:00:00.0" - set bbb = "2038-01-01 00:00:00.0" - set mydate = `echo "$1" | grep '.*-.*-'` - set mytime = `echo "$2" | grep ':'` - if ( "$mydate" != "" && "$mytime" != "" ) then - shift - shift - if ( -x ./gtasUtil ) then - set gtasUtil = ./gtasUtil - else if ( -x $mydir/gtasUtil ) then - set gtasUtil = $mydir/gtasUtil - else if ( -x $fxa_home/src/dm/point/gtasUtil ) then - set gtasUtil = $fxa_home/src/dm/point/gtasUtil - else if ( -x $FXA_HOME/bin/gtasUtil ) then - set gtasUtil = $FXA_HOME/bin/gtasUtil - else - bash -c "echo could not find gtasUtil executable 1>&2" - exit - endif - set aaa = `$gtasUtil = $mydate $mytime -60` - set bbb = `$gtasUtil = $mydate $mytime 60` - endif - - - # - # Modify the text of special tags in stub to create finalized script. - # - set binary = no - set specpy = /tmp/a2invrad${$}.py - rm -rf $specpy >& /dev/null - touch $specpy - chmod 775 $specpy - if ( "$rrr" == "" ) then - cat $stubpy | grep -v "KKKK" | grep -v "MMMM" | grep -v "EEEE" | \ - sed "s/AAAAA/$aaa/g" | sed "s/BBBBB/$bbb/g" | \ - grep -v 'Code")' | grep -v 'Time")' | grep -v 'Angle")' \ - >> $specpy - else if ( "$1" == "" ) then - cat $stubpy | sed "s/KKKK/$rrr/g" | grep -v "MMMM" | \ - sed "s/AAAAA/$aaa/g" | sed "s/BBBBB/$bbb/g" | grep -v 'icao")' | \ - sed 's/^.*EEEE.*$//g' | grep -v 'Time")' | grep -v 'Angle")' \ - >> $specpy - else if ( "$1" == "+" ) then - cat $stubpy | sed "s/KKKK/$rrr/g" | grep -v "MMMM" | \ - sed "s/AAAAA/$aaa/g" | sed "s/BBBBB/$bbb/g" | grep -v 'icao")' | \ - sed 's/^.*EEEE.*$//g' | grep -v 'Time")' | grep -v 'Angle")' \ - >> $specpy - if ( -e ./msgCodeSeds.txt ) then - set mctrans = $PWD/msgCodeSeds.txt - else if ( -e $mydir/msgCodeSeds.txt ) then - set mctrans = $mydir/msgCodeSeds.txt - else if ( -e $fxa_home/src/dm/radar/msgCodeSeds.txt ) then - set mctrans = $fxa_home/src/dm/radar/msgCodeSeds.txt - else if ( -e $FXA_HOME/data/msgCodeSeds.txt ) then - set mctrans = $FXA_HOME/data/msgCodeSeds.txt - else - bash -c "echo could not find msgCodeSeds.txt 1>&2" - exit - endif - set lastcmd = "sed -f $mctrans" - else if ( "$2" == "" ) then - cat $stubpy | sed "s/KKKK/$rrr/g" | sed "s/MMMM/$1/g" | \ - sed "s/AAAAA/$aaa/g" | sed "s/BBBBB/$bbb/g" | grep -v 'icao")' | \ - sed 's/EEEE/0.0/g' | grep -v 'Angle")' | grep -v 'Code")' \ - >> $specpy - else if ( "$2" == "+" ) then - cat $stubpy | sed "s/KKKK/$rrr/g" | sed "s/MMMM/$1/g" | \ - sed "s/AAAAA/$aaa/g" | sed "s/BBBBB/$bbb/g" | grep -v 'icao")' | \ - sed 's/^.*EEEE.*$//g' | grep -v 'Time")' | grep -v 'Code")' | \ - sed 's/true/primary/g' >> $specpy - else - cat $stubpy | sed "s/KKKK/$rrr/g" | sed "s/MMMM/$1/g" | \ - sed "s/AAAAA/$aaa/g" | sed "s/BBBBB/$bbb/g" | grep -v 'icao")' | \ - sed "s/EEEE/$2/g" | grep -v 'Code")' >> $specpy - set binary = yes - endif - # - # Submit the temporary python script stripping xml stuff, then remove it - # - if ( "$binary" == "yes" ) then - cd $UE_BIN_PATH - ( uengine -r python < $specpy ) |& sed 's|.*.*|@|g' | \ - grep -E 'attributes|@' | cut '-d"' -f4 | tr '\n' ' ' | tr '@' '\n' | \ - sed 's/00000.*$//g' | sed 's/^ *//g' | sed 's/ *$//g' - else - cd $UE_BIN_PATH - ( uengine -r python < $specpy ) |& grep attributes | cut '-d"' -f4 | \ - $lastcmd - endif - - if ( "$rmpy" == "yes" ) rm -rf $specpy >& /dev/null -endif - diff --git a/pythonPackages/msaslaps/radar/a2invradStub.py b/pythonPackages/msaslaps/radar/a2invradStub.py deleted file mode 100644 index 5b7c698040..0000000000 --- a/pythonPackages/msaslaps/radar/a2invradStub.py +++ /dev/null @@ -1,195 +0,0 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -# Gets inventories of radar grid data from the A-II database. The data is -# output to stdout as ASCII. Inventories are limited to Radial and Raster -# formats. -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# 2014-10-27 3600 nabowle Initial modification. Convert to DAF. -# 2014-12-18 3600 nabowle Use new getAvailableLevels() to speed up retrieval. -# - -import argparse -import numpy -import sys - -from datetime import datetime -from datetime import timedelta - -from awips.dataaccess import DataAccessLayer -from dynamicserialize.dstypes.com.raytheon.uf.common.time import TimeRange -from dynamicserialize.dstypes.com.raytheon.uf.common.dataplugin.level import Level - - -def get_args(): - parser = argparse.ArgumentParser(conflict_handler="resolve") - parser.add_argument("-h", action="store", dest="host", - help="EDEX server hostname (optional)", metavar="hostname") - - parser.add_argument("--icao", action="store", dest="icao", - help="The ICAO (optional)", metavar="icao") - - parser.add_argument("--productCode", action="store", dest="productCode", - help="Product Code (optional)", metavar="productCode") - - parser.add_argument("--angle", action="store", dest="angle", type=float, - help="The Primary Elevation Angle (optional)", - metavar="angle") - - parser.add_argument("--date", action="store", dest="date", - help="A date to find data within a minute of. (optional, --time required if provided)", - metavar="YYYY-MM-DD") - - parser.add_argument("--time", action="store", dest="time", - help="A time to find data within a minute of. (optional, --date required if provided)", - metavar="HH:MM") - - parser.add_argument("--outputDate", action="store_true", - dest="outputDate", help="Output the datetime (optional)") - - parser.add_argument("--outputTrue", action="store_const", - dest="outputAngle", const="true", - help="Output true elevation angle, if relevant. (optional," - + " exclusive with --outputPrimary)") - - parser.add_argument("--outputPrimary", action="store_const", - dest="outputAngle", const="primary", - help="Output primary elevation angle, if relevant. " - + "(optional, exclusive with --outputTrue)") - return parser.parse_args() - -def main(): - user_args = get_args() - - if user_args.host: - DataAccessLayer.changeEDEXHost(user_args.host) - - if (user_args.date and not user_args.time) or (user_args.time and not user_args.date): - print >> sys.stderr, "date and time must be provided together" - return - - # If a time range is provided, results will be filtered based on available times - timeRange = None - if user_args.date: - midRange = datetime.strptime( user_args.date + " " + user_args.time, "%Y-%m-%d %H:%M") - beginRange = midRange - timedelta(0, 60) - endRange = midRange + timedelta(0, 60) - timeRange = TimeRange(beginRange, endRange) - - req = create_request(user_args) - if user_args.icao: - if user_args.productCode: # retrieve available times and/or true or primary elevation angles - if timeRange: - tr = timeRange - else: - tr = None - lines = set() - - if user_args.outputAngle: - levels = DataAccessLayer.getAvailableLevels(req) - for level in levels: - line = "" - req.setLevels(level) - if user_args.outputDate: - times = DataAccessLayer.getAvailableTimes(req) - for time in times: - if not tr or tr.contains(time.getValidPeriod()): - line = str(time) + ".0" - line += " " - if user_args.outputAngle == "true": - line += "%.1f"%level.getLeveltwovalue() - else: - line += "%.1f"%level.getLevelonevalue() - lines.add(line) - else: - if not tr or data_in_time_range(req, tr): - if user_args.outputAngle == "true": - line = "%.1f"%level.getLeveltwovalue() - else: - line = "%.1f"%level.getLevelonevalue() - lines.add(line) - else : # just output time - times = DataAccessLayer.getAvailableTimes(req) - for time in times: - if not tr or tr.contains(time.getValidPeriod()): - lines.add(str(time) + ".0") - msg = "\n".join(lines) - else: #retrieve available product codes - unfiltered = DataAccessLayer.getAvailableParameters(req) - productCodes = [] - for parameter in unfiltered: #filter to just productCodes - if parameter.isdigit(): - productCodes.append(parameter) - if timeRange: - unfiltered = productCodes - productCodes = [] - for productCode in unfiltered: - req = create_request(user_args) - req.setParameters(productCode) - if data_in_time_range(req, timeRange): - productCodes.append(productCode) - msg = "\n".join(productCodes) - - else: # retrieve available icaos - icaos = DataAccessLayer.getAvailableLocationNames(req) - if timeRange: - unfiltered = icaos - icaos = [] - for icao in unfiltered: - req = create_request(user_args) - req.addIdentifier("icao", icao) - if data_in_time_range(req, timeRange): - icaos.append(icao) - - msg = "\n".join(icaos) - - print msg.strip() - -def create_request(user_args): - req = DataAccessLayer.newDataRequest("radar") - if user_args.icao: - req.addIdentifier("icao", user_args.icao) - if user_args.productCode: - req.setParameters(user_args.productCode) - if user_args.angle is not None: - level = Level() - level.setLevelonevalue(user_args.angle) - req.setLevels(level) - # Indicate that when providing or requesting levels, the Levelonevalue - # is the primaryElevationAngle and the Leveltwovalue value is the - # trueElevationAngle - req.addIdentifier("level.one.field", "primaryElevationAngle") - req.addIdentifier("level.two.field", "trueElevationAngle") - - return req - -def data_in_time_range(req, timeRange): - times = DataAccessLayer.getAvailableTimes(req) - for time in times: - if timeRange.contains(time.getValidPeriod()): - return True - return False - -if __name__ == '__main__': - main() diff --git a/pythonPackages/msaslaps/radar/a2radcommon.py b/pythonPackages/msaslaps/radar/a2radcommon.py deleted file mode 100644 index 0199c20a6e..0000000000 --- a/pythonPackages/msaslaps/radar/a2radcommon.py +++ /dev/null @@ -1,251 +0,0 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -# -# Common methods for the a2gtrad and a2advrad scripts. -# -# -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# 08/13/2014 3393 nabowle Initial creation to contain common -# code for a2*radStub scripts. -# -# - -import argparse -import sys - -from datetime import datetime -from datetime import timedelta -from awips import ThriftClient - -from dynamicserialize.dstypes.com.raytheon.uf.common.time import TimeRange -from dynamicserialize.dstypes.com.raytheon.uf.common.dataplugin.level import Level -from dynamicserialize.dstypes.com.raytheon.uf.common.dataplugin.radar.request import GetRadarDataRecordRequest - -def get_default_host(): - from awips.dataaccess import DataAccessLayer - return DataAccessLayer.THRIFT_HOST - - -def get_args_parser(): - parser = argparse.ArgumentParser(conflict_handler="resolve") - parser.add_argument("--host", action="store", dest="host", - help="EDEX server hostname", - metavar="hostname") - parser.add_argument("--datetime", action="store", dest="datetime", - help="The start of the time range in YYYY-MM-DD HH:MM", - metavar="datetime") - parser.add_argument("--radar", action="store", dest="radar", - help="The ICAO code for the radar", - metavar="radar") - parser.add_argument("--code", action="store", dest="code", - help="The product code.", type=int, - metavar="code") - parser.add_argument("--angle", action="store", dest="angle", default=0, - help="The Elevation Angle", metavar="angle") - parser.add_argument("--description", action="store", dest="description", - help="The description.", - metavar="desc") - parser.add_argument("--slop", action="store", dest="slop", default=60, - help="The amount of slop, in seconds, to allow around the datetime.", - metavar="slop", type=int) - return parser - - -def send_request(user_args): - slop = user_args.slop - - dateTimeStr = user_args.datetime - if not dateTimeStr: - print >> sys.stderr, "DateTime not provided" - return - dateTime = datetime.strptime(dateTimeStr, "%Y-%m-%d %H:%M") - beginRange = dateTime - timedelta(0, slop) - endRange = dateTime + timedelta(0, slop) - - timerange = TimeRange(beginRange, endRange) - - radar = user_args.radar - if not radar: - print >> sys.stderr, "Radar code not provided" - return - - code = user_args.code - if not code: - print >> sys.stderr, "Product code not provided" - return - - angle = user_args.angle - - slop = int(user_args.slop) - - host = user_args.host - if not host: - host = get_default_host() - - client = ThriftClient.ThriftClient(host) - - # Perform a GetRadarHDF5Request - req = GetRadarDataRecordRequest() - req.setRadarId(radar) - req.setPrimaryElevationAngle(float(angle)) - req.setTimeRange(timerange) - req.setProductCode(int(code)) - - response = client.sendRequest(req) - - if response is None: - # print "Data not available" - return - - records = response.getData() - return records - - -def get_datetime_str(record): - #2014-07-16 00:00:00 (0) => 2014-07-16_00:03:00.0 - return str(record.getDataTime())[0:19].replace(" ","_") + ".0" - - -def get_data_type(azdat): - if azdat: - dattyp = "radial" - else : - dattyp = "raster" - return dattyp - - -def get_hdf5_data(idra): - rdat = [] - azdat = [] - depVals = [] - threshVals = [] - if len(idra) > 0: - for ii in range(len(idra)): - if idra[ii].getName() == "Data": - rdat = idra[ii] - elif idra[ii].getName() == "Angles": - azdat = idra[ii] - dattyp = "radial" - elif idra[ii].getName() == "DependentValues": - depVals = idra[ii].getShortData() -## Commented out from the original. May not be available. -# elif idra[ii].getName() == "ProductVals": -# prodVals = idra[ii].getByteData() -# elif idra[ii].getName() == "RecordVals": -# recVals = idra[ii].getByteData() -# elif idra[ii].getName() == "StormIds": -# stormVals = idra[ii].getByteData() -# elif idra[ii].getName() == "Symbology": -# symVals = idra[ii].getByteData() -# elif idra[ii].getName() == "SymbologyData": -# symData = idra[ii].getByteData() -## - elif idra[ii].getName() == "Thresholds": - threshVals = idra[ii].getShortData() - - return rdat,azdat,depVals,threshVals - - -def get_header(record, format, xLen, yLen, azdat, description): - # Encode dimensions, time, mapping, description, tilt, and VCP - mytime = get_datetime_str(record) - dattyp = get_data_type(azdat) - - if format : - msg = str(xLen) + " " + str(yLen) + " " + mytime + " " + \ - dattyp + " " + str(record.getLatitude()) + " " + \ - str(record.getLongitude()) + " " + \ - str(record.getElevation()) + " " + \ - str(record.getElevationNumber()) + " " + \ - description + " " + str(record.getTrueElevationAngle()) + " " + \ - str(record.getVolumeCoveragePattern()) + "\n" -#"%.1f"% - else : - msg = str(xLen) + " " + str(yLen) + " " + mytime + " " + \ - dattyp + " " + description + " " + \ - str(record.getTrueElevationAngle()) + " " + \ - str(record.getVolumeCoveragePattern()) + "\n" - - return msg - - -def encode_thresh_vals(threshVals): - spec = [".", "TH", "ND", "RF", "BI", "GC", "IC", "GR", "WS", "DS", - "RA", "HR", "BD", "HA", "UK"] - nnn = len(threshVals) - j = 0 - msg = "" - while j 14 : - msg += "." - else : - msg += spec[lo] - continue - if hi % 16 >= 8 : - msg += ">" - elif hi % 8 >= 4 : - msg += "<" - if hi % 4 >= 2 : - msg += "+" - elif hi % 2 >= 1 : - msg += "-" - if hi >= 64 : - msg += "%.2f"%(lo*0.01) - elif hi % 64 >= 32 : - msg += "%.2f"%(lo*0.05) - elif hi % 32 >= 16 : - msg += "%.1f"%(lo*0.1) - else : - msg += str(lo) - msg += "\n" - return msg - - -def encode_dep_vals(depVals): - nnn = len(depVals) - j = 0 - msg = "" - while j& /dev/null -if ( $status == 0 ) then -# Replace first field with station names substitutions in wmoToNameRaob.txt or -# with what's stored in the database. They are similar but do not match 100%. - /awips2/python/bin/python $stubpy -b "$1 $2" -e "$3 $4" | sed -f $staInf -# /awips2/python/bin/python $stubpy -b "$1 $2" -e "$3 $4" --use-station-name -else - # - # Set up the environment we need to run the UEngine. - # - set method = "uengine" - if ( -e ./UEngine.cshsrc ) then - set ueenv = ./UEngine.cshsrc - else if ( -e $mydir/UEngine.cshsrc ) then - set ueenv = $mydir/UEngine.cshsrc - else if ( -e $FXA_HOME/src/dm/point/UEngine.cshsrc ) then - set ueenv = $FXA_HOME/src/dm/point/UEngine.cshsrc - else if ( -e $FXA_HOME/bin/UEngine.cshsrc ) then - set ueenv = $FXA_HOME/bin/UEngine.cshsrc - else - bash -c "echo could not find UEngine.cshsrc 1>&2" - exit - endif - source $ueenv - - set specpy = /tmp/a2gtraob${$}.py - rm -rf $specpy >& /dev/null - touch $specpy - chmod 775 $specpy - cat $stubpy | sed "s/BBBBB/$1 $2/g" | sed "s/EEEEE/$3 $4/g" > $specpy - - cd $UE_BIN_PATH - ( uengine -r python < $specpy ) | grep -v '<' | sed -n '2,$p' | \ - sed -f $staInf - - if ( "$rmpy" == "yes" ) rm -rf $specpy >& /dev/null -endif - diff --git a/pythonPackages/msaslaps/raobs/a2gtraobStub.py b/pythonPackages/msaslaps/raobs/a2gtraobStub.py deleted file mode 100644 index c6faa24513..0000000000 --- a/pythonPackages/msaslaps/raobs/a2gtraobStub.py +++ /dev/null @@ -1,338 +0,0 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -# Gets all available raob data in the A-II database over a specified range of -# times. The data is output to stdout as ASCII. -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# Oct 10, 2014 3595 nabowle Initial modification. Fix Man and SigW indices. -# Oct 10, 2014 3595 nabowle Replace UEngine with DAF. -# -# - -import argparse -import sys - -from datetime import datetime -from awips.dataaccess import DataAccessLayer -from dynamicserialize.dstypes.com.raytheon.uf.common.time import TimeRange - -def get_args(): - parser = argparse.ArgumentParser(conflict_handler="resolve") - parser.add_argument("-h", action="store", dest="host", - help="EDEX server hostname (optional)", - metavar="hostname") - parser.add_argument("-b", action="store", dest="start", - help="The start of the time range in YYYY-MM-DD HH:MM", - metavar="start") - parser.add_argument("-e", action="store", dest="end", - help="The end of the time range in YYYY-MM-DD HH:MM", - metavar="end") - parser.add_argument("--use-station-name", action='store_true', default=False, - dest="stationname", help="Output the station name instead of station id for the first output field.") - return parser.parse_args() - -def main(): - # The multi-dimensional parameters. - MAN_PARAMS = set(['prMan', 'htMan', 'tpMan', 'tdMan', 'wdMan', 'wsMan']) - TROP_PARAMS = set(['prTrop', 'tpTrop', 'tdTrop', 'wdTrop', 'wsTrop']) - MAXW_PARAMS = set(['prMaxW', 'wdMaxW', 'wsMaxW']) - SIGT_PARAMS = set(['prSigT', 'tpSigT', 'tdSigT']) - SIGW_PARAMS = set(['htSigW', 'wdSigW', 'wsSigW']) - - user_args = get_args() - - if user_args.host: - DataAccessLayer.changeEDEXHost(user_args.host) - - start = user_args.start - end = user_args.end - - if not start or not end: - print >> sys.stderr, "Start or End date not provided" - return - - beginRange = datetime.strptime( start + ":00.0", "%Y-%m-%d %H:%M:%S.%f") - endRange = datetime.strptime( end + ":59.9", "%Y-%m-%d %H:%M:%S.%f") - timerange = TimeRange(beginRange, endRange) - - req = DataAccessLayer.newDataRequest("bufrua") - req.setParameters("wmoStaNum", "validTime", "rptType", "staElev", "numMand", - "numSigT", "numSigW", "numTrop", "numMwnd", "staName") - req.getParameters().extend(MAN_PARAMS) - req.getParameters().extend(TROP_PARAMS) - req.getParameters().extend(MAXW_PARAMS) - req.getParameters().extend(SIGT_PARAMS) - req.getParameters().extend(SIGW_PARAMS) - - geometries = DataAccessLayer.getGeometryData(req, timerange) - - if not geometries : -# print "couldn't get data" - return - - - manGeos = [] - tropGeos = [] - maxwGeos = [] - sigtGeos = [] - sigwGeos = [] - # D-2D format files expect depression - tdman = [] - tdsigt = [] - tdtrop = [] - msg = "" - for geoData in geometries : - if set(geoData.getParameters()) & MAN_PARAMS : - manGeos.append(geoData) - td = geoData.getNumber("tdMan") - tp = geoData.getNumber("tpMan") - if td >150 and td<=tp : - tdman.append(tp-td) - else : - tdman.append(-9999.0) - continue - if set(geoData.getParameters()) & TROP_PARAMS : - tropGeos.append(geoData) - td = geoData.getNumber("tdTrop") - tp = geoData.getNumber("tpTrop") - if td>150 and td<=tp : - tdtrop.append(tp-td) - else : - tdtrop.append(-9999.0) - continue - if set(geoData.getParameters()) & MAXW_PARAMS : - maxwGeos.append(geoData) - continue - if set(geoData.getParameters()) & SIGT_PARAMS : - sigtGeos.append(geoData) - td = geoData.getNumber("tdSigT") - tp = geoData.getNumber("tpSigT") - if td>150 and td<=tp : - tdsigt.append(tp-td) - else : - tdsigt.append(-9999.0) - continue - if set(geoData.getParameters()) & SIGW_PARAMS : - sigwGeos.append(geoData) - continue - - if len(manGeos) == 0 and len(sigtGeos) == 0 and len(sigwGeos) == 0 or geoData.getNumber("rptType") > 2022 : - manGeos = [] - tropGeos = [] - maxwGeos = [] - sigtGeos = [] - sigwGeos = [] - tdman = [] - tdsigt = [] - tdtrop = [] - continue - - if user_args.stationname and geoData.getString("staName") : - msg += geoData.getString("staName") + "," - else : - msg += geoData.getString("wmoStaNum") + "," - msg += str(geoData.getNumber("validTime")/1000) + "," - msg += "%.4f"%geoData.getGeometry().y + "," - msg += "%.4f"%geoData.getGeometry().x + "," - msg += "%.0f"%geoData.getNumber("staElev") + "," - msg += geoData.getString("wmoStaNum") + "," - - kk = len(manGeos) - msg += str(kk) + "," - if kk>0 : - msg += "%.1f"%manGeos[0].getNumber("prMan") - k = 1 - while k < kk : - msg += "|" + "%.1f"%manGeos[k].getNumber("prMan") - k += 1 - msg += "," - - msg += "%.1f"%geoData.getNumber("staElev") - k = 1 - while k < kk : - msg += "|" + "%.1f"%manGeos[k].getNumber("htMan") - k += 1 - msg += "," - - msg += "%.1f"%manGeos[0].getNumber("tpMan") - k = 1 - while k < kk : - msg += "|" + "%.1f"%manGeos[k].getNumber("tpMan") - k += 1 - msg += "," - - msg += "%.1f"%tdman[0] - k = 1 - while k < kk : - msg += "|" + "%.1f"%tdman[k]; - k += 1 - msg += "," - - msg += "%.1f"%manGeos[0].getNumber("wsMan") - k = 1 - while k < kk : - msg += "|" + "%.1f"%manGeos[k].getNumber("wsMan") - k += 1 - msg += "," - - msg += "%.1f"%manGeos[0].getNumber("wdMan") - k = 1 - while k < kk : - msg += "|" + "%.1f"%manGeos[k].getNumber("wdMan") - k += 1 - msg += "," - else : - msg += ",,,,,," - - kk = len(sigtGeos) - msg += str(kk) + "," - if kk>0 : - msg += "%.1f"%sigtGeos[0].getNumber("prSigT") - k = 1 - while k < kk : - msg += "|" + "%.1f"%sigtGeos[k].getNumber("prSigT") - k += 1 - msg += "," - - msg += "%.1f"%sigtGeos[0].getNumber("tpSigT") - k = 1 - while k < kk : - msg += "|" + "%.1f"%sigtGeos[k].getNumber("tpSigT") - k += 1 - msg += "," - - msg += "%.1f"%tdsigt[0] - k = 1 - while k < kk : - msg += "|" + "%.1f"%tdsigt[k] - k += 1 - msg += "," - else : - msg += ",,," - - kk = len(sigwGeos) - msg += str(kk) + "," - if kk>0 : - msg += "%.1f"%sigwGeos[0].getNumber("htSigW") - k = 1 - while k < kk : - msg += "|" + "%.1f"%sigwGeos[k].getNumber("htSigW") - k += 1 - msg += "," - - msg += "%.1f"%sigwGeos[0].getNumber("wsSigW") - k = 1 - while k < kk : - msg += "|" + "%.1f"%sigwGeos[k].getNumber("wsSigW") - k += 1 - msg += "," - - msg += "%.1f"%sigwGeos[0].getNumber("wdSigW") - k = 1 - while k < kk : - msg += "|" + "%.1f"%sigwGeos[k].getNumber("wdSigW") - k += 1 - msg += "," - else : - msg += ",,," - - kk = len(tropGeos) - msg += str(kk) + "," - if kk>0 : - msg += "%.1f"%tropGeos[0].getNumber("prTrop") - k = 1 - while k < kk : - msg += "|" + "%.1f"%tropGeos[k].getNumber("prTrop") - k += 1 - msg += "," - - msg += "%.1f"%tropGeos[0].getNumber("tpTrop") - k = 1 - while k < kk : - msg += "|" + "%.1f"%tropGeos[k].getNumber("tpTrop") - k += 1 - msg += "," - - msg += "%.1f"%tdtrop[0] - k = 1 - while k < kk : - msg += "|" + "%.1f"%tdtrop[k] - k += 1 - msg += "," - - msg += "%.1f"%tropGeos[0].getNumber("wsTrop") - k = 1 - while k < kk : - msg += "|" + "%.1f"%tropGeos[k].getNumber("wsTrop") - k += 1 - msg += "," - - msg += "%.1f"%tropGeos[0].getNumber("wdTrop") - k = 1 - while k < kk : - msg += "|" + "%.1f"%tropGeos[k].getNumber("wdTrop") - k += 1 - msg += "," - else : - msg += ",,,,," - - kk = len(maxwGeos) - msg += str(kk) + "," - if kk>0 : - msg += "%.1f"%maxwGeos[0].getNumber("prMaxW") - k = 1 - while k < kk : - msg += "|" + "%.1f"%maxwGeos[k].getNumber("prMaxW") - k += 1 - msg += "," - - msg += "%.1f"%maxwGeos[0].getNumber("wsMaxW") - k = 1 - while k < kk : - msg += "|" + "%.1f"%maxwGeos[k].getNumber("wsMaxW") - k += 1 - msg += "," - - msg += "%.1f"%maxwGeos[0].getNumber("wdMaxW") - k = 1 - while k < kk : - msg += "|" + "%.1f"%maxwGeos[k].getNumber("wdMaxW") - k += 1 - else : - msg += ",," - msg += "\n" - - manGeos = [] - tropGeos = [] - maxwGeos = [] - sigtGeos = [] - sigwGeos = [] - tdman = [] - tdsigt = [] - tdtrop = [] - - print msg.strip() - -if __name__ == '__main__': - main() diff --git a/pythonPackages/msaslaps/satellite/a2invsat.csh b/pythonPackages/msaslaps/satellite/a2invsat.csh deleted file mode 100644 index e3c47f8e66..0000000000 --- a/pythonPackages/msaslaps/satellite/a2invsat.csh +++ /dev/null @@ -1,174 +0,0 @@ -#!/bin/csh -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## -# -# A script wrapper that is meant to get inventories of satellite data -# from the A-II database. The data is output to stdout as ASCII. -# This version can adapt to use a python stub that calls the -# data access framework. -# -# Usage: -# -# a2invsat.csh {p} sector channel {satid} -# -# p - A literal p. (optional) -# sector - sector id -# channel - channel id -# satid - (optional) satellite id -# -# Returns a list of times with data for the specified sector/channel. -# -# The ids can be either D-2D integer ids, or AWIPS-II ascii ids, in which -# case they need to be quoted on the command line. -# -# Integer ids can be looked up in a2satInfo.txt, channel id corresponds to -# the physicalElement, and satid corresponds to the creatingEntity. -# -# The literal p option means preserve the final version of the python -# submitted to the UEngine instead of cleaning it up. The path to the -# finalized python is /tmp/a2invsatNNNNN.py where NNNNN is a unix process id. -# -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# 2014-10-23 3601 nabowle Initial modification. Properly calls DAF. Make a2satInfo.txt optional. -# - -set rmpy = yes -if ( "$1" == "p" ) then - set rmpy = no - shift -endif -# -# Identify directory this script is in, will be one of the directories we -# search for other files in. -# -set mydir = `dirname $0` -set d1 = `echo $mydir | cut -c1` -if ( "$mydir" == '.' ) then - set mydir = $PWD -else if ( "$d1" != "/" ) then - set mydir = $PWD/$mydir -endif -set mydir = `(cd $mydir ; pwd)` -if ( ! $?FXA_HOME ) set FXA_HOME = xxxx -# -# Locate python stub that we will modify to create the final python logic. -# -if ( -e ./a2invsatStub.py ) then - set stubpy = ./a2invsatStub.py -else if ( -e $mydir/a2invsatStub.py ) then - set stubpy = $mydir/a2invsatStub.py -else if ( -e $FXA_HOME/src/dm/sat/a2invsatStub.py ) then - set stubpy = $FXA_HOME/src/dm/sat/a2invsatStub.py -else if ( -e $FXA_HOME/bin/a2invsatStub.py ) then - set stubpy = $FXA_HOME/bin/a2invsatStub.py -else - bash -c "echo could not find a2invsatStub.py 1>&2" - exit -endif - -# -# Locate file containing mapping between D-2D interger ids and AWIPS-II ascii -# ids for sectors, channels, and satellites. -# -if ( -e ./a2satInfo.txt ) then - set satInf = ./a2satInfo.txt -else if ( -e $mydir/a2satInfo.txt ) then - set satInf = $mydir/a2satInfo.txt -else if ( -e $FXA_HOME/src/dm/sat/a2satInfo.txt ) then - set satInf = $FXA_HOME/src/dm/sat/a2satInfo.txt -else if ( -e $FXA_HOME/data/a2satInfo.txt ) then - set satInf = $FXA_HOME/data/a2satInfo.txt -else - set satInf = "" - set sss = "$1" - set ccc = "$2" - if ( "$3" == "" ) then - set eee = "" - else - set eee = "$3" - endif -endif -# -# -if ( $satInf != "" ) then - set sss = `grep "^ *$1|.*sectorID" $satInf | cut '-d|' -f3` - if ( "$sss" == "" ) set sss = "$1" - set ccc = `grep "^ *$2|.*physicalElement" $satInf | cut '-d|' -f3` - if ( "$ccc" == "" ) set ccc = "$2" - if ( "$3" == "" ) then - set eee = "" - else - set eee = `grep "^ *$3|.*creatingEntity" $satInf | cut '-d|' -f3` - if ( "$eee" == "" ) set eee = "$3" - endif -endif - -# -# Determine if we are using the data access framework or the uEngine. -# -grep DataAccessLayer $stubpy >& /dev/null -if ( $status == 0 ) then - if ( "$eee" == "" ) then - /awips2/python/bin/python $stubpy --sectorID "$sss" --physicalElement "$ccc" - else - /awips2/python/bin/python $stubpy --sectorID "$sss" --physicalElement "$ccc" --creatingEntity "$eee" - endif -else - # - # Set up the environment we need to run the UEngine. - # - if ( -e ./UEngine.cshsrc ) then - set ueenv = ./UEngine.cshsrc - else if ( -e $mydir/UEngine.cshsrc ) then - set ueenv = $mydir/UEngine.cshsrc - else if ( -e $FXA_HOME/src/dm/point/UEngine.cshsrc ) then - set ueenv = $FXA_HOME/src/dm/point/UEngine.cshsrc - else if ( -e $FXA_HOME/bin/UEngine.cshsrc ) then - set ueenv = $FXA_HOME/bin/UEngine.cshsrc - else - bash -c "echo could not find UEngine.cshsrc 1>&2" - exit - endif - source $ueenv - # - # Modify the text of special tags in stub to create finalized script. - # - set specpy = /tmp/a2invsat${$}.py - rm -rf $specpy >& /dev/null - touch $specpy - chmod 775 $specpy - if ( "$eee" == "" ) then - cat $stubpy | sed "s/SSSSS/$sss/g" | sed "s/CCCCC/$ccc/g" | \ - sed 's/^.*EEEEE.*$//g' >> $specpy - else - cat $stubpy | sed "s/SSSSS/$sss/g" | sed "s/CCCCC/$ccc/g" | \ - sed "s/EEEEE/$eee/g" >> $specpy - endif - - cd $UE_BIN_PATH - ( uengine -r python < $specpy ) |& grep attributes | cut '-d"' -f4 - - if ( "$rmpy" == "yes" ) rm -rf $specpy >& /dev/null -endif - diff --git a/pythonPackages/msaslaps/satellite/a2invsatStub.py b/pythonPackages/msaslaps/satellite/a2invsatStub.py deleted file mode 100644 index 95dfd9b83f..0000000000 --- a/pythonPackages/msaslaps/satellite/a2invsatStub.py +++ /dev/null @@ -1,76 +0,0 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -# Gets inventories of satellite data from the A-II database. The data is output -# to stdout as ASCII. -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# 2014-10-23 3601 nabowle Initial modification. Convert to DAF. -# - -import argparse -import numpy -import sys - -from awips.dataaccess import DataAccessLayer - -def get_args(): - parser = argparse.ArgumentParser(conflict_handler="resolve") - parser.add_argument("-h", action="store", dest="host", - help="EDEX server hostname (optional)", metavar="hostname") - parser.add_argument("--sectorID", action="store", dest="sectorID", - help="The sector ID.", metavar="sectorID") - parser.add_argument("--physicalElement", action="store", dest="physicalElement", - help="The physical element.", metavar="physicalElement") - parser.add_argument("--creatingEntity", action="store", dest="creatingEntity", - help="(optional) The creating entity", metavar="creatingEntity") - - return parser.parse_args() - -def main(): - user_args = get_args() - - if user_args.host: - DataAccessLayer.changeEDEXHost(user_args.host) - - req = DataAccessLayer.newDataRequest("satellite") - - if not user_args.sectorID or not user_args.physicalElement: - print >> sys.stderr, "sectorID or physicalElement not provided" - return - req.setParameters(user_args.physicalElement) - req.addIdentifier("sectorID", user_args.sectorID) - - if user_args.creatingEntity: - req.addIdentifier("creatingEntity", user_args.creatingEntity) - - msg = ""; - times = DataAccessLayer.getAvailableTimes(req) - for time in times: - timeStr = str(time) - msg += timeStr[0:19] + ".0" + timeStr[19:] + "\n" - - print msg.strip() - -if __name__ == '__main__': - main() diff --git a/pythonPackages/msaslaps/satellite/a2rdsat.csh b/pythonPackages/msaslaps/satellite/a2rdsat.csh deleted file mode 100644 index 96bab07b58..0000000000 --- a/pythonPackages/msaslaps/satellite/a2rdsat.csh +++ /dev/null @@ -1,243 +0,0 @@ -#!/bin/csh -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## -# -# A script wrapper that is meant to get data for a single satellite sector -# from the A-II database. The result is output to stdout as ASCII. -# The first line returned has the dimensions of the image, the time, and the -# source satellite of the data set returned. The rest is one line per row -# of satellite data. The data for each row undergoes second order compression -# Each pixel value of 0 or 255 is encoded as @ or #, respectively. Otherwise -# the first pixel on the row and any pixel that is more than 20 counts -# different than the previous one is encoded as two hex digits. Pixels the -# same as the previous are encoded as a period, pixels from 1 to 20 counts less -# than the previous are encoded as G through Z, and pixels from 1 to 20 counts -# more than the previous are encoded as g through z. There are no delimeters -# between the encoding for each pixel. -# -# This version can adapt to use a python stub that calls the -# data access framework. -# -# Usage: -# -# a2rdsat.csh {p} {h|i} sector channel {satid} date time {slop} {partition} -# -# p - (optional) A literal p. -# h|i - (optional) A literal h or literal i. -# Output pure undelimited hex or delimited integer values. -# sector - sector id -# channel - channel id -# satid - (optional) satellite id -# date - yyyy-mm-dd -# time - hh:mm -# slop - seconds of slop either side, defaults to 180 -# partition - (optional) upper case letter indicating partition to get. For -# very large images data may need to be returned in pieces. -# Allowable partitions are A through D. -# -# The ids can be either D-2D integer ids, or AWIPS-II ascii ids, in which -# case they may need to be quoted on the command line. -# -# Integer ids can be looked up in a2satInfo.txt, channel id corresponds to -# the physicalElement, and satid corresponds to the creatingEntity. -# -# The literal p option means preserve the final version of the python -# submitted to the UEngine instead of cleaning it up. The path to the -# finalized python is /tmp/a2rdsatNNNNN.py where NNNNN is a unix process id. -# The literal n option means the first line of output is the dimension of -# the grid returned. -# -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# Sep 29, 2014 3596 nabowle Initial modification. Handle daf conversion. -# -# -# -set rmpy = yes -if ( "$1" == "p" ) then - set rmpy = no - shift -endif -set encoding = 2 -if ( "$1" == "h" ) then - set encoding = 1 - shift -endif -if ( "$1" == "i" ) then - set encoding = 0 - shift -endif -if ( "$1" == "p" ) then - set rmpy = no - shift -endif -# -# Identify directory this script is in, will be one of the directories we -# search for other files in. -# -set mydir = `dirname $0` -set d1 = `echo $mydir | cut -c1` -if ( "$mydir" == '.' ) then - set mydir = $PWD -else if ( "$d1" != "/" ) then - set mydir = $PWD/$mydir -endif -set mydir = `(cd $mydir ; pwd)` -if ( ! $?FXA_HOME ) set FXA_HOME = xxxx -# -# Locate python stub that we will modify to create the final python logic. -# -if ( -e ./a2rdsatStub.py ) then - set stubpy = ./a2rdsatStub.py -else if ( -e $mydir/a2rdsatStub.py ) then - set stubpy = $mydir/a2rdsatStub.py -else if ( -e $FXA_HOME/src/dm/sat/a2rdsatStub.py ) then - set stubpy = $FXA_HOME/src/dm/sat/a2rdsatStub.py -else if ( -e $FXA_HOME/bin/a2rdsatStub.py ) then - set stubpy = $FXA_HOME/bin/a2rdsatStub.py -else - bash -c "echo could not find a2rdsatStub.py 1>&2" - exit -endif - - -# -# Locate file containing mapping between D-2D interger ids and AWIPS-II ascii -# ids for sectors, channels, and satellites. -# -if ( -e ./a2satInfo.txt ) then - set satInf = ./a2satInfo.txt -else if ( -e $mydir/a2satInfo.txt ) then - set satInf = $mydir/a2satInfo.txt -else if ( -e $FXA_HOME/src/dm/sat/a2satInfo.txt ) then - set satInf = $FXA_HOME/src/dm/sat/a2satInfo.txt -else if ( -e $FXA_HOME/data/a2satInfo.txt ) then - set satInf = $FXA_HOME/data/a2satInfo.txt -else - set satInf = "" - set sss = "$1" - set ccc = "$2" - set eee = `echo $3 | grep -v '.*-'` -endif -# -# -if ( $satInf != "" ) then - set sss = `grep "^ *$1|.*sectorID" $satInf | cut '-d|' -f3` - if ( "$sss" == "" ) set sss = "$1" - set ccc = `grep "^ *$2|.*physicalElement" $satInf | cut '-d|' -f3` - if ( "$ccc" == "" ) set ccc = "$2" - set eee = `echo $3 | grep -v '.*-'` - if ( "$eee" != "" ) then - set eee = `grep "^ *$eee|.*creatingEntity" $satInf | cut '-d|' -f3` - if ( "$eee" == "" ) set eee = "$3" - endif -endif - -shift -shift - -if ( "$eee" != "" ) shift -set slop = `echo $3 | grep '[0-9]'` -if ( "$slop" == "" ) set slop = 180 - -set ppp = `echo $argv[$#argv] | grep '^[A-Z]$'` -if ( "$ppp" == "" ) set ppp = 0 - -# -# Determine if we are using the data access framework or the uEngine. -# -grep DataAccessLayer $stubpy >& /dev/null -if ( $status == 0 ) then - set opts = "" - if ( "$eee" != "" ) then - set opts = "--entity ${eee}" - endif - - if ( "$encoding" == "1" ) then - set opts = "$opts --hex" - else if ( "$encoding" == "0" ) then - set opts = "$opts --int" - endif - - /awips2/python/bin/python $stubpy --sector "${sss}" --physical "${ccc}" --datetime "$1 $2" --part $ppp --slop $slop $opts -else - # - # Get program that can do math with ascii time string, then use this to - # properly encode range of times for which we look for data. - # - if ( -x ./gtasUtil ) then - set gtasUtil = ./gtasUtil - else if ( -x $mydir/gtasUtil ) then - set gtasUtil = $mydir/gtasUtil - else if ( -x $FXA_HOME/src/dm/point/gtasUtil ) then - set gtasUtil = $FXA_HOME/src/dm/point/gtasUtil - else if ( -x $FXA_HOME/bin/gtasUtil ) then - set gtasUtil = $FXA_HOME/bin/gtasUtil - else - bash -c "echo could not find gtasUtil executable 1>&2" - exit - endif - - # - # Set up the environment we need to run the UEngine. - # - set method = "uengine" - if ( -e ./UEngine.cshsrc ) then - set ueenv = ./UEngine.cshsrc - else if ( -e $mydir/UEngine.cshsrc ) then - set ueenv = $mydir/UEngine.cshsrc - else if ( -e $FXA_HOME/src/dm/point/UEngine.cshsrc ) then - set ueenv = $FXA_HOME/src/dm/point/UEngine.cshsrc - else if ( -e $FXA_HOME/bin/UEngine.cshsrc ) then - set ueenv = $FXA_HOME/bin/UEngine.cshsrc - else - bash -c "echo could not find UEngine.cshsrc 1>&2" - exit - endif - source $ueenv - - set aaa = `$gtasUtil = $1 $2 -$slop` - set bbb = `$gtasUtil = $1 $2 $slop` - - set specpy = /tmp/a2rdsat${$}.py - rm -rf $specpy >& /dev/null - touch $specpy - chmod 775 $specpy - if ( "$eee" == "" ) then - cat $stubpy | sed "s/SSSSS/$sss/g" | sed "s/CCCCC/$ccc/g" | \ - sed "s/AAAAA/$aaa/g" | sed "s/BBBBB/$bbb/g" | \ - sed 's/^.*EEEEE.*$//g'| sed "s/PPPPP/$ppp/g" | \ - sed "s/XXXXX/$encoding/g" >> $specpy - else - cat $stubpy | sed "s/SSSSS/$sss/g" | sed "s/CCCCC/$ccc/g" | \ - sed "s/AAAAA/$aaa/g" | sed "s/BBBBB/$bbb/g" | \ - sed "s/EEEEE/$eee/g" | sed "s/PPPPP/$ppp/g" | \ - sed "s/XXXXX/$encoding/g" >> $specpy - endif - - cd $UE_BIN_PATH - ( uengine -r python < $specpy ) | grep -v '<' | grep -v Response - - if ( "$rmpy" == "yes" ) rm -rf $specpy >& /dev/null - # -endif diff --git a/pythonPackages/msaslaps/satellite/a2rdsatStub.py b/pythonPackages/msaslaps/satellite/a2rdsatStub.py deleted file mode 100644 index 6283bfa2fb..0000000000 --- a/pythonPackages/msaslaps/satellite/a2rdsatStub.py +++ /dev/null @@ -1,212 +0,0 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -# Gets data for a single satellite sector from the A-II database. The result is -# output to stdout as ASCII. The first line returned has the dimensions of the -# image, the time, and the source satellite of the data set returned. The rest -# is one line per row of satellite data. The data for each row undergoes second -# order compression Each pixel value of 0 or 255 is encoded as @ or #, -# respectively. Otherwise the first pixel on the row and any pixel that is more -# than 20 counts different than the previous one is encoded as two hex digits. -# Pixels the same as the previous are encoded as a period, pixels from 1 to 20 -# counts less than the previous are encoded as G through Z, and pixels from 1 to -# 20 counts more than the previous are encoded as g through z. There are no -# delimeters between the encoding for each pixel. -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# Sep 29, 2014 3596 nabowle Initial modification. Replace UEngine with DAF. -# Nov 10, 2016 5900 bsteffen Correct grid shape, simplify - -# -# - -import a2dafcommon -import argparse -import sys -import numpy - -from datetime import datetime -from datetime import timedelta -from awips.dataaccess import DataAccessLayer -from dynamicserialize.dstypes.com.raytheon.uf.common.time import TimeRange - -def get_args(): - parser = argparse.ArgumentParser(conflict_handler="resolve") - parser.add_argument("-h", action="store", dest="host", - help="EDEX server hostname (optional)", - metavar="hostname") - parser.add_argument("--datetime", action="store", dest="datetime", - help="The start of the time range in YYYY-MM-DD HH:MM", - metavar="datetime") - parser.add_argument("--slop", action="store", dest="slop", default=180, - help="The amount of slop, in seconds, to allow around the datetime.", - metavar="slop", type=int) - parser.add_argument("--sector", action="store", dest="sector", - help="The sector ID.", metavar="sectorID") - parser.add_argument("--physical", action="store", dest="physical", - help="The physical element.", metavar="physicalElement") - parser.add_argument("--entity", action="store", dest="entity", - help="The creating entity (optional)", - metavar="creatingEntity") - parser.add_argument("--partition", action="store", dest="partition", - help="Upper case letter indicating partition to get.", - metavar="partition", default="0") - parser.add_argument("--hex", action='store_const', dest="encoding", - const=1, help="Hex encoding.", metavar="encoding") - parser.add_argument("--int", action='store_const', dest="encoding", - const=0, help="Delimited integer encoding.", - metavar="encoding") - return parser.parse_args() - - -def main(): - user_args = get_args() - - if user_args.host: - DataAccessLayer.changeEDEXHost(user_args.host) - - slop = user_args.slop - - dateTimeStr = user_args.datetime - if not dateTimeStr: - print >> sys.stderr, "DateTime not provided" - return - - physicalElement = user_args.physical - if not physicalElement: - print >> sys.stderr, "PhysicalElement not provided" - return - - sectorID = user_args.sector - if not sectorID: - print >> sys.stderr, "SectorID not provided" - return - - creatingEntity = user_args.entity - part = user_args.partition - encoding = user_args.encoding - - dateTime = datetime.strptime(dateTimeStr, "%Y-%m-%d %H:%M") - beginRange = dateTime - timedelta(0, slop) - endRange = dateTime + timedelta(0, slop) - - timerange = TimeRange(beginRange, endRange) - - req = DataAccessLayer.newDataRequest("satellite") - req.setParameters(physicalElement) - req.setLocationNames(sectorID) - - if creatingEntity: - req.addIdentifier("creatingEntity", creatingEntity) - - grids = DataAccessLayer.getGridData(req, timerange) - - if not grids: -# print "Data not available" - return - - grid = grids[0] - data = grid.getRawData() - myent = grid.getAttribute("creatingEntity") - mytime = a2dafcommon.datatime_to_string(grid.getDataTime()) + ".0" - - if data is None or len(data) == 0: -# print "No data." - return - - data[numpy.isnan(data)] = 0 - yLen, xLen = data.shape - - plus = " ghijklmnopqrstuvwxyz" - minus = " GHIJKLMNOPQRSTUVWXYZ" - limit = 10000000 - if encoding == 1 : - limit = limit/2 - elif encoding == 0 : - limit = limit/8 - - k = xLen * ( yLen / 4 ) - j = 0 - nxy = yLen*xLen - if part=="D" : - j = k+k+k - elif part=="C" : - j = k+k - nxy = j+k - elif part=="B" : - j = k - nxy = j+k - elif part=="A" or nxy>limit : - nxy = k - - msg = "" - if part<="A" : - msg += str(xLen) + " " + str(yLen) + " " - msg += mytime + " " + myent + "\n" - - i = 0 - kk = None - while jkk+20 or kkk : - msg += plus[k-kk] - else : - msg += minus[kk-k] - kk = k - i+=1 - - msg += "\n" - j+= 1 - - print msg.strip() - -if __name__ == '__main__': - main() diff --git a/pythonPackages/pypies/pypies/test/timingTest.py b/pythonPackages/pypies/pypies/test/timingTest.py deleted file mode 100644 index 8e7a866114..0000000000 --- a/pythonPackages/pypies/pypies/test/timingTest.py +++ /dev/null @@ -1,118 +0,0 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - - -# -# Timing tests for various storage plugins for the -# Python Process Isolated Enhanced Storage -# -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# 05/20/10 njensen Initial Creation. -# -# -# - -import numpy, os - -ntrials = 10 - -from timeit import Timer -import random - -import TablesTiming -import H5pyTiming -import Netcdf4Timing - -sampleIndex = [] -for i in range(200): - sampleIndex.append(random.randint(0, 1499)) -oneDimData = numpy.random.random((1000)) -twoDimData = numpy.random.random((642, 320)) - - -fname = "/home/njensen/pypies/testfiles/test.h5" - - -def createAndAppendTest(interface, array): - g = interface.createGroup("abc") - ds = interface.createDataset(g, "Data", array) - for i in range(500): - interface.appendValue(ds, random.random()) - -def test1D(clz): - try: - t = clz(fname, "w") - createAndAppendTest(t, oneDimData) - t.close() - t = clz(fname, "r") - sampleTest(t) - finally: - t.close() - os.remove(fname) - -def test2D(clz): - try: - t = clz(fname, "w") - create2DTest(t, twoDimData) - t.close() - t = clz(fname, "r") - retrieve2DTest(t) - finally: - t.close() - os.remove(fname) - -def create2DTest(interface, twoDimData): - g = interface.createGroup('abc2') - ds = interface.createDataset(g, "Data", twoDimData, 2) - -def sampleTest(interface): - ds = interface.getDataset('/abc/Data') - for index in sampleIndex: - interface.sampleValue(ds, index) - -def retrieve2DTest(interface): - ds = interface.getDataset('/abc2/Data') - - -def main(): - timer = Timer("test1D(TablesTiming.TablesTiming)", "from __main__ import test1D; from __main__ import TablesTiming") - print "pytables 1D took", sum(timer.repeat(ntrials,1))/ntrials,'seconds' - - timer = Timer("test1D(H5pyTiming.H5pyTiming)", "from __main__ import test1D; from __main__ import H5pyTiming") - print "h5py 1D took", sum(timer.repeat(ntrials,1))/ntrials,'seconds' - - timer = Timer("test1D(Netcdf4Timing.Netcdf4Timing)", "from __main__ import test1D; from __main__ import Netcdf4Timing") - print "netcdf4 1D took", sum(timer.repeat(ntrials,1))/ntrials,'seconds' - - timer = Timer("test2D(TablesTiming.TablesTiming)", "from __main__ import test2D; from __main__ import TablesTiming") - print "pytables 2D took", sum(timer.repeat(ntrials,1))/ntrials,'seconds' - - timer = Timer("test2D(H5pyTiming.H5pyTiming)", "from __main__ import test2D; from __main__ import H5pyTiming") - print "h5py 2D took", sum(timer.repeat(ntrials,1))/ntrials,'seconds' - - timer = Timer("test2D(Netcdf4Timing.Netcdf4Timing)", "from __main__ import test2D; from __main__ import Netcdf4Timing") - print "netcdf4 2D took", sum(timer.repeat(ntrials,1))/ntrials,'seconds' - -if __name__ == "__main__": - main() \ No newline at end of file diff --git a/pythonPackages/ufpy/AlertVizHandler.py b/pythonPackages/ufpy/AlertVizHandler.py deleted file mode 100644 index aeb2ab0628..0000000000 --- a/pythonPackages/ufpy/AlertVizHandler.py +++ /dev/null @@ -1,70 +0,0 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - - -# -# Pure python logging mechanism for logging to AlertViz from -# pure python (ie not JEP). DO NOT USE IN PYTHON CALLED -# FROM JAVA. -# -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# 08/18/10 njensen Initial Creation. -# -# -# - -import logging -import NotificationMessage - -class AlertVizHandler(logging.Handler): - - def __init__(self, host='localhost', port=61999, category='LOCAL', source='ANNOUNCER', level=logging.NOTSET): - logging.Handler.__init__(self, level) - self._category = category - self._host = host - self._port = port - self._source = source - - - def emit(self, record): - "Implements logging.Handler's interface. Record argument is a logging.LogRecord." - priority = None - if record.levelno >= 50: - priority = 'CRITICAL' - elif record.levelno >= 40: - priority = 'SIGNIFICANT' - elif record.levelno >= 30: - priority = 'PROBLEM' - elif record.levelno >= 20: - priority = 'EVENTA' - elif record.levelno >= 10: - priority = 'EVENTB' - else: - priority = 'VERBOSE' - - msg = self.format(record) - - notify = NotificationMessage.NotificationMessage(self._host, self._port, msg, priority, self._category, self._source) - notify.send() - \ No newline at end of file diff --git a/pythonPackages/ufpy/ConfigFileUtil.py b/pythonPackages/ufpy/ConfigFileUtil.py deleted file mode 100644 index 58f8d6727f..0000000000 --- a/pythonPackages/ufpy/ConfigFileUtil.py +++ /dev/null @@ -1,56 +0,0 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -# -# A set of utility functions for dealing with configuration files. -# -# -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# 09/27/10 dgilling Initial Creation. -# -# -# - - -def parseKeyValueFile(fileName): - propDict= dict() - - try: - propFile= open(fileName, "rU") - for propLine in propFile: - propDef= propLine.strip() - if len(propDef) == 0: - continue - if propDef[0] in ( '#' ): - continue - punctuation= [ propDef.find(c) for c in ':= ' ] + [ len(propDef) ] - found= min( [ pos for pos in punctuation if pos != -1 ] ) - name= propDef[:found].rstrip() - value= propDef[found:].lstrip(":= ").rstrip() - propDict[name]= value - propFile.close() - except: - pass - - return propDict \ No newline at end of file diff --git a/pythonPackages/ufpy/DateTimeConverter.py b/pythonPackages/ufpy/DateTimeConverter.py deleted file mode 100644 index 3295ac5f39..0000000000 --- a/pythonPackages/ufpy/DateTimeConverter.py +++ /dev/null @@ -1,107 +0,0 @@ -# # -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -# # - -# -# Functions for converting between the various "Java" dynamic serialize types -# used by EDEX to the native python time datetime. -# -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# 06/24/15 #4480 dgilling Initial Creation. -# - -import datetime -import time - -from dynamicserialize.dstypes.java.util import Date -from dynamicserialize.dstypes.java.sql import Timestamp -from dynamicserialize.dstypes.com.raytheon.uf.common.time import TimeRange - - -MAX_TIME = pow(2, 31) - 1 -MICROS_IN_SECOND = 1000000 - - -def convertToDateTime(timeArg): - """ - Converts the given object to a python datetime object. Supports native - python representations like datetime and struct_time, but also - the dynamicserialize types like Date and Timestamp. Raises TypeError - if no conversion can be performed. - - Args: - timeArg: a python object representing a date and time. Supported - types include datetime, struct_time, float, int, long and the - dynamicserialize types Date and Timestamp. - - Returns: - A datetime that represents the same date/time as the passed in object. - """ - if isinstance(timeArg, datetime.datetime): - return timeArg - elif isinstance(timeArg, time.struct_time): - return datetime.datetime(*timeArg[:6]) - elif isinstance(timeArg, float): - # seconds as float, should be avoided due to floating point errors - totalSecs = long(timeArg) - micros = int((timeArg - totalSecs) * MICROS_IN_SECOND) - return _convertSecsAndMicros(totalSecs, micros) - elif isinstance(timeArg, (int, long)): - # seconds as integer - totalSecs = timeArg - return _convertSecsAndMicros(totalSecs, 0) - elif isinstance(timeArg, (Date, Timestamp)): - totalSecs = timeArg.getTime() - return _convertSecsAndMicros(totalSecs, 0) - else: - objType = str(type(timeArg)) - raise TypeError("Cannot convert object of type " + objType + " to datetime.") - -def _convertSecsAndMicros(seconds, micros): - if seconds < MAX_TIME: - rval = datetime.datetime.utcfromtimestamp(seconds) - else: - extraTime = datetime.timedelta(seconds=(seconds - MAX_TIME)) - rval = datetime.datetime.utcfromtimestamp(MAX_TIME) + extraTime - return rval.replace(microsecond=micros) - -def constructTimeRange(*args): - """ - Builds a python dynamicserialize TimeRange object from the given - arguments. - - Args: - args*: must be a TimeRange or a pair of objects that can be - converted to a datetime via convertToDateTime(). - - Returns: - A TimeRange. - """ - - if len(args) == 1 and isinstance(args[0], TimeRange): - return args[0] - if len(args) != 2: - raise TypeError("constructTimeRange takes exactly 2 arguments, " + str(len(args)) + " provided.") - startTime = convertToDateTime(args[0]) - endTime = convertToDateTime(args[1]) - return TimeRange(startTime, endTime) diff --git a/pythonPackages/ufpy/NotificationMessage.py b/pythonPackages/ufpy/NotificationMessage.py deleted file mode 100755 index bf22c738a5..0000000000 --- a/pythonPackages/ufpy/NotificationMessage.py +++ /dev/null @@ -1,183 +0,0 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -from string import Template - -import ctypes -import stomp -import socket -import sys -import time -import threading -import xml.etree.ElementTree as ET - -import ThriftClient -from dynamicserialize.dstypes.com.raytheon.uf.common.alertviz import AlertVizRequest -from dynamicserialize import DynamicSerializationManager - -# -# Provides a capability of constructing notification messages and sending -# them to a STOMP data source. -# -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# 09/30/08 chammack Initial Creation. -# 11/03/10 5849 cjeanbap Moved to ufpy package from -# com.raytheon.uf.tools.cli -# 01/07/11 5645 cjeanbap Added audio file to Status Message. -# 05/27/11 3050 cjeanbap Added if-statement to check Priority -# value -# 07/27/15 4654 skorolev Added filters -# 11/11/15 5120 rferrel Cannot serialize empty filters. -# -class NotificationMessage: - - priorityMap = { - 0: 'CRITICAL', - 1: 'SIGNIFICANT', - 2: 'PROBLEM', - 3: 'EVENTA', - 4: 'EVENTB', - 5: 'VERBOSE'} - - def __init__(self, host='localhost', port=61999, message='', priority='PROBLEM', category="LOCAL", source="ANNOUNCER", audioFile="NONE", filters=None): - self.host = host - self.port = port - self.message = message - self.audioFile = audioFile - self.source = source - self.category = category - self.filters = filters - - priorityInt = None - - try: - priorityInt = int(priority) - except: - pass - - if priorityInt is None: - #UFStatus.java contains mapping of Priority to Logging level mapping - if priority == 'CRITICAL' or priority == 'FATAL': - priorityInt = int(0) - elif priority == 'SIGNIFICANT' or priority == 'ERROR': - priorityInt = int(1) - elif priority == 'PROBLEM' or priority == 'WARN': - priorityInt = int(2) - elif priority == 'EVENTA' or priority == 'INFO': - priorityInt = int(3) - elif priority == 'EVENTB': - priorityInt = int(4) - elif priority == 'VERBOSE' or priority == 'DEBUG': - priorityInt = int(5) - - if (priorityInt < 0 or priorityInt > 5): - print "Error occurred, supplied an invalid Priority value: " + str(priorityInt) - print "Priority values are 0, 1, 2, 3, 4 and 5." - sys.exit(1) - - if priorityInt is not None: - self.priority = self.priorityMap[priorityInt] - else: - self.priority = priority - - def connection_timeout(self, connection): - if (connection is not None and not connection.is_connected()): - print "Connection Retry Timeout" - for tid, tobj in threading._active.items(): - if tobj.name is "MainThread": - res = ctypes.pythonapi.PyThreadState_SetAsyncExc(tid, ctypes.py_object(SystemExit)) - if res != 0 and res != 1: - # problem, reset state - ctypes.pythonapi.PyThreadState_SetAsyncExc(tid, 0) - - def send(self): - # depending on the value of the port number indicates the distribution - # of the message to AlertViz - # 9581 is global distribution thru ThriftClient to Edex - # 61999 is local distribution - if (int(self.port) == 61999): - # use stomp.py - conn = stomp.Connection(host_and_ports=[(self.host, 61999)]) - timeout = threading.Timer(5.0, self.connection_timeout, [conn]) - - try: - timeout.start(); - conn.start() - finally: - timeout.cancel() - - conn.connect() - - sm = ET.Element("statusMessage") - sm.set("machine", socket.gethostname()) - sm.set("priority", self.priority) - sm.set("category", self.category) - sm.set("sourceKey", self.source) - sm.set("audioFile", self.audioFile) - if self.filters is not None and len(self.filters) > 0: - sm.set("filters", self.filters) - msg = ET.SubElement(sm, "message") - msg.text = self.message - details = ET.SubElement(sm, "details") - msg = ET.tostring(sm, "UTF-8") - - try : - conn.send(msg, destination='/queue/messages') - time.sleep(2) - finally: - conn.stop() - else: - # use ThriftClient - alertVizRequest = createRequest(self.message, self.priority, self.source, self.category, self.audioFile, self.filters) - thriftClient = ThriftClient.ThriftClient(self.host, self.port, "/services") - - serverResponse = None - try: - serverResponse = thriftClient.sendRequest(alertVizRequest) - except Exception, ex: - print "Caught exception submitting AlertVizRequest: ", str(ex) - - if (serverResponse != "None"): - print "Error occurred submitting Notification Message to AlertViz receiver: ", serverResponse - sys.exit(1) - else: - print "Response: " + str(serverResponse) - -def createRequest(message, priority, source, category, audioFile, filters): - obj = AlertVizRequest() - - obj.setMachine(socket.gethostname()) - obj.setPriority(priority) - obj.setCategory(category) - obj.setSourceKey(source) - obj.setMessage(message) - if (audioFile is not None): - obj.setAudioFile(audioFile) - else: - obj.setAudioFile('\0') - obj.setFilters(filters) - return obj - -if __name__ == '__main__': - main() \ No newline at end of file diff --git a/pythonPackages/ufpy/QpidSubscriber.py b/pythonPackages/ufpy/QpidSubscriber.py deleted file mode 100644 index 67d9eba06f..0000000000 --- a/pythonPackages/ufpy/QpidSubscriber.py +++ /dev/null @@ -1,122 +0,0 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -# -# Provides a Python-based interface for subscribing to qpid queues and topics. -# -# -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# 11/17/10 njensen Initial Creation. -# 08/15/13 2169 bkowal Optionally gzip decompress any data that is read. -# 08/04/16 2416 tgurney Add queueStarted property -# 02/16/17 6084 bsteffen Support ssl connections -# 09/07/17 6175 tgurney Remove "decompressing" log message -# -# - -import os -import os.path -import qpid -import zlib - -from Queue import Empty -from qpid.exceptions import Closed - -class QpidSubscriber: - - def __init__(self, host='127.0.0.1', port=5672, decompress=False, ssl=None): - self.host = host - self.port = port - self.decompress = decompress; - socket = qpid.util.connect(host, port) - if "QPID_SSL_CERT_DB" in os.environ: - certdb = os.environ["QPID_SSL_CERT_DB"] - else: - certdb = os.path.expanduser("~/.qpid/") - if "QPID_SSL_CERT_NAME" in os.environ: - certname = os.environ["QPID_SSL_CERT_NAME"] - else: - certname = "guest" - certfile = os.path.join(certdb, certname + ".crt") - if ssl or (ssl is None and os.path.exists(certfile)): - keyfile = os.path.join(certdb, certname + ".key") - trustfile = os.path.join(certdb, "root.crt") - socket = qpid.util.ssl(socket, keyfile=keyfile, certfile=certfile, ca_certs=trustfile) - self.__connection = qpid.connection.Connection(sock=socket, username='guest', password='guest') - self.__connection.start() - self.__session = self.__connection.session(str(qpid.datatypes.uuid4())) - self.subscribed = True - self.__queueStarted = False - - def topicSubscribe(self, topicName, callback): - # if the queue is edex.alerts, set decompress to true always for now to - # maintain compatibility with existing python scripts. - if (topicName == 'edex.alerts'): - self.decompress = True - - print "Establishing connection to broker on", self.host - queueName = topicName + self.__session.name - self.__session.queue_declare(queue=queueName, exclusive=True, auto_delete=True, arguments={'qpid.max_count':100, 'qpid.policy_type':'ring'}) - self.__session.exchange_bind(exchange='amq.topic', queue=queueName, binding_key=topicName) - self.__innerSubscribe(queueName, callback) - - def __innerSubscribe(self, serverQueueName, callback): - local_queue_name = 'local_queue_' + serverQueueName - queue = self.__session.incoming(local_queue_name) - self.__session.message_subscribe(serverQueueName, destination=local_queue_name) - queue.start() - print "Connection complete to broker on", self.host - self.__queueStarted = True - - while self.subscribed: - try: - message = queue.get(timeout=10) - content = message.body - self.__session.message_accept(qpid.datatypes.RangedSet(message.id)) - if (self.decompress): - try: - # http://stackoverflow.com/questions/2423866/python-decompressing-gzip-chunk-by-chunk - d = zlib.decompressobj(16+zlib.MAX_WBITS) - content = d.decompress(content) - except Exception: - # decompression failed, return the original content - pass - callback(content) - except Empty: - pass - except Closed: - self.close() - - def close(self): - self.__queueStarted = False - self.subscribed = False - try: - self.__session.close(timeout=10) - except Exception: - pass - - @property - def queueStarted(self): - return self.__queueStarted - diff --git a/pythonPackages/ufpy/ThriftClient.py b/pythonPackages/ufpy/ThriftClient.py deleted file mode 100644 index 2bd5ee71db..0000000000 --- a/pythonPackages/ufpy/ThriftClient.py +++ /dev/null @@ -1,102 +0,0 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -import httplib -from dynamicserialize import DynamicSerializationManager -from dynamicserialize.dstypes.com.raytheon.uf.common.serialization.comm.response import ServerErrorResponse -from dynamicserialize.dstypes.com.raytheon.uf.common.serialization import SerializableExceptionWrapper - -# -# Provides a Python-based interface for executing Thrift requests. -# -# -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# 09/20/10 dgilling Initial Creation. -# -# -# - - -class ThriftClient: - - # How to call this constructor: - # 1. Pass in all arguments separately (e.g., - # ThriftClient.ThriftClient("localhost", 9581, "/services")) - # will return a Thrift client pointed at http://localhost:9581/services. - # 2. Pass in all arguments through the host string (e.g., - # ThriftClient.ThriftClient("localhost:9581/services")) - # will return a Thrift client pointed at http://localhost:9581/services. - # 3. Pass in host/port arguments through the host string (e.g., - # ThriftClient.ThriftClient("localhost:9581", "/services")) - # will return a Thrift client pointed at http://localhost:9581/services. - def __init__(self, host, port=9581, uri="/services"): - hostParts = host.split("/", 1) - if (len(hostParts) > 1): - hostString = hostParts[0] - self.__uri = "/" + hostParts[1] - self.__httpConn = httplib.HTTPConnection(hostString) - else: - if (port is None): - self.__httpConn = httplib.HTTPConnection(host) - else: - self.__httpConn = httplib.HTTPConnection(host, port) - - self.__uri = uri - - self.__dsm = DynamicSerializationManager.DynamicSerializationManager() - - def sendRequest(self, request, uri="/thrift"): - message = self.__dsm.serializeObject(request) - - self.__httpConn.connect() - self.__httpConn.request("POST", self.__uri + uri, message) - - response = self.__httpConn.getresponse() - if (response.status != 200): - raise ThriftRequestException("Unable to post request to server") - - rval = self.__dsm.deserializeBytes(response.read()) - self.__httpConn.close() - - # let's verify we have an instance of ServerErrorResponse - # IF we do, through an exception up to the caller along - # with the original Java stack trace - # ELSE: we have a valid response and pass it back - try: - forceError = rval.getException() - raise ThriftRequestException(forceError) - except AttributeError: - pass - - return rval - - -class ThriftRequestException(Exception): - def __init__(self, value): - self.parameter = value - - def __str__(self): - return repr(self.parameter) - - \ No newline at end of file diff --git a/pythonPackages/ufpy/TimeUtil.py b/pythonPackages/ufpy/TimeUtil.py deleted file mode 100644 index e4dc6311ae..0000000000 --- a/pythonPackages/ufpy/TimeUtil.py +++ /dev/null @@ -1,108 +0,0 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## -# ---------------------------------------------------------------------------- -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. -# -# offsetTime.py -# Handles Displaced Real Time for various applications -# -# Author: hansen/romberg -# ---------------------------------------------------------------------------- - -import string -import time - -# Given the timeStr, return the offset (in seconds) -# from the current time. -# Also return the launchStr i.e. Programs launched from this -# offset application will use the launchStr as the -z argument. -# The offset will be positive for time in the future, -# negative for time in the past. -# -# May still want it to be normalized to the most recent midnight. -# -# NOTES about synchronizing: -# --With synchronizing on, the "current time" for all processes started -# within a given hour will be the same. -# This guarantees that GFE's have the same current time and ISC grid -# time stamps are syncrhonized and can be exchanged. -# Formatters launched from the GFE in this mode will be synchronized as -# well by setting the launchStr to use the time difference format -# (YYYYMMDD_HHMM,YYYYMMDD_HHMM). -# --This does not solve the problem in the general case. -# For example, if someone starts the GFE at 12:59 and someone -# else starts it at 1:01, they will have different offsets and -# current times. -# --With synchronizing off, when the process starts, the current time -# matches the drtTime in the command line. However, with synchronizing -# on, the current time will be offset by the fraction of the hour at -# which the process was started. Examples: -# Actual Starting time: 20040617_1230 -# drtTime 20040616_0000 -# Synchronizing off: -# GFE Spatial Editor at StartUp: 20040616_0000 -# Synchronizing on: -# GFE Spatial Editor at StartUp: 20040616_0030 -# -def determineDrtOffset(timeStr): - launchStr = timeStr - # Check for time difference - if timeStr.find(",") >=0: - times = timeStr.split(",") - t1 = makeTime(times[0]) - t2 = makeTime(times[1]) - #print "time offset", t1-t2, (t1-t2)/3600 - return t1-t2, launchStr - # Check for synchronized mode - synch = 0 - if timeStr[0] == "S": - timeStr = timeStr[1:] - synch = 1 - drt_t = makeTime(timeStr) - #print "input", year, month, day, hour, minute - gm = time.gmtime() - cur_t = time.mktime(gm) - - # Synchronize to most recent hour - # i.e. "truncate" cur_t to most recent hour. - #print "gmtime", gm - if synch: - cur_t = time.mktime((gm[0], gm[1], gm[2], gm[3], 0, 0, 0, 0, 0)) - curStr = '%4s%2s%2s_%2s00\n' % (`gm[0]`,`gm[1]`,`gm[2]`,`gm[3]`) - curStr = curStr.replace(' ','0') - launchStr = timeStr + "," + curStr - - #print "drt, cur", drt_t, cur_t - offset = drt_t - cur_t - #print "offset", offset, offset/3600, launchStr - return int(offset), launchStr - -def makeTime(timeStr): - year = string.atoi(timeStr[0:4]) - month = string.atoi(timeStr[4:6]) - day = string.atoi(timeStr[6:8]) - hour = string.atoi(timeStr[9:11]) - minute = string.atoi(timeStr[11:13]) - # Do not use daylight savings because gmtime is not in daylight - # savings time. - return time.mktime((year, month, day, hour, minute, 0, 0, 0, 0)) - diff --git a/pythonPackages/ufpy/UsageArgumentParser.py b/pythonPackages/ufpy/UsageArgumentParser.py deleted file mode 100644 index 313d206216..0000000000 --- a/pythonPackages/ufpy/UsageArgumentParser.py +++ /dev/null @@ -1,81 +0,0 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------- -------- --------- --------------------------------------------- -# Feb 13, 2017 6092 randerso Added StoreTimeAction -# -## - -import argparse -import sys -import time - -from dynamicserialize.dstypes.com.raytheon.uf.common.dataplugin.gfe.db.objects import DatabaseID -from dynamicserialize.dstypes.com.raytheon.uf.common.dataplugin.gfe.db.objects import ParmID - -TIME_FORMAT = "%Y%m%d_%H%M" - -class UsageArgumentParser(argparse.ArgumentParser): - """ - A subclass of ArgumentParser that overrides error() to print the - whole help text, rather than just the usage string. - """ - def error(self, message): - sys.stderr.write('%s: error: %s\n' % (self.prog, message)) - self.print_help() - sys.exit(2) - -## Custom actions for ArgumentParser objects ## -class StoreDatabaseIDAction(argparse.Action): - def __call__(self, parser, namespace, values, option_string=None): - did = DatabaseID(values) - if did.isValid(): - setattr(namespace, self.dest, did) - else: - parser.error("DatabaseID [" + values + "] not a valid identifier") - -class AppendParmNameAndLevelAction(argparse.Action): - def __call__(self, parser, namespace, values, option_string=None): - tx = ParmID.parmNameAndLevel(values) - comp = tx[0] + '_' + tx[1] - if (hasattr(namespace, self.dest)) and \ - (getattr(namespace, self.dest) is not None): - currentValues = getattr(namespace, self.dest) - currentValues.append(comp) - setattr(namespace, self.dest, currentValues) - else: - setattr(namespace, self.dest, [comp]) - -class StoreTimeAction(argparse.Action): - """ - argparse.Action subclass to validate GFE formatted time strings - and parse them to time.struct_time - """ - def __call__(self, parser, namespace, values, option_string=None): - try: - timeStruct = time.strptime(values, TIME_FORMAT) - except: - parser.error(str(values) + " is not a valid time string of the format YYYYMMDD_hhmm") - - setattr(namespace, self.dest, timeStruct) - diff --git a/pythonPackages/ufpy/UsageOptionParser.py b/pythonPackages/ufpy/UsageOptionParser.py deleted file mode 100644 index 0f2938feb6..0000000000 --- a/pythonPackages/ufpy/UsageOptionParser.py +++ /dev/null @@ -1,38 +0,0 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -import sys -from optparse import OptionParser - -class UsageOptionParser(OptionParser): - """ - A subclass of OptionParser that prints that overrides error() to print the - whole help text, rather than just the usage string. - """ - def error(self, msg): - """ - Print the help text and exit. - """ - self.print_help(sys.stderr) - sys.stderr.write("\n") - sys.stderr.write(msg) - sys.stderr.write("\n") - sys.exit(2) - diff --git a/pythonPackages/ufpy/__init__.py b/pythonPackages/ufpy/__init__.py deleted file mode 100644 index 6ed9ac6af6..0000000000 --- a/pythonPackages/ufpy/__init__.py +++ /dev/null @@ -1,37 +0,0 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - - -# -# __init__.py for ufpy package -# -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# 09/21/10 dgilling Initial Creation. -# -# -# - - -__all__ = [ - ] diff --git a/pythonPackages/ufpy/dataaccess/CombinedTimeQuery.py b/pythonPackages/ufpy/dataaccess/CombinedTimeQuery.py deleted file mode 100644 index fcf9ba4e90..0000000000 --- a/pythonPackages/ufpy/dataaccess/CombinedTimeQuery.py +++ /dev/null @@ -1,100 +0,0 @@ -# # -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -# # - -# -# Method for performing a DAF time query where all parameter/level/location -# combinations must be available at the same time. -# -# -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# 06/22/16 #5591 bsteffen Initial Creation. -# - -from ufpy.dataaccess import DataAccessLayer - -def getAvailableTimes(request, refTimeOnly=False): - return __getAvailableTimesForEachParameter(request, refTimeOnly) - -def __getAvailableTimesForEachParameter(request, refTimeOnly=False): - parameters = request.getParameters() - if parameters: - times = None - for parameter in parameters: - specificRequest = __cloneRequest(request) - specificRequest.setParameters(parameter) - specificTimes = __getAvailableTimesForEachLevel(specificRequest, refTimeOnly) - if times is None: - times = specificTimes - else: - times.intersection_update(specificTimes) - if not times: - break - return times - else: - return __getAvailableTimesForEachLevel(request, refTimeOnly) - -def __getAvailableTimesForEachLevel(request, refTimeOnly=False): - levels = request.getLevels() - if levels: - times = None - for level in levels: - specificRequest = __cloneRequest(request) - specificRequest.setLevels(level) - specificTimes = __getAvailableTimesForEachLocation(specificRequest, refTimeOnly) - if times is None: - times = specificTimes - else: - times.intersection_update(specificTimes) - if not times: - break - return times - else: - return __getAvailableTimesForEachLocation(request, refTimeOnly) - -def __getAvailableTimesForEachLocation(request, refTimeOnly=False): - locations = request.getLocationNames() - if locations: - times = None - for location in locations: - specificRequest = __cloneRequest(request) - specificRequest.setLocationNames(location) - specificTimes = DataAccessLayer.getAvailableTimes(specificRequest, refTimeOnly) - if times is None: - times = set(specificTimes) - else: - times.intersection_update(specificTimes) - if not times: - break - return times - else: - return DataAccessLayer.getAvailableTimes(request, refTimeOnly) - - -def __cloneRequest(request): - return DataAccessLayer.newDataRequest(datatype = request.getDatatype(), - parameters = request.getParameters(), - levels = request.getLevels(), - locationNames = request.getLocationNames(), - envelope = request.getEnvelope(), - **request.getIdentifiers()) \ No newline at end of file diff --git a/pythonPackages/ufpy/dataaccess/DataAccessLayer.py b/pythonPackages/ufpy/dataaccess/DataAccessLayer.py deleted file mode 100644 index 868d871011..0000000000 --- a/pythonPackages/ufpy/dataaccess/DataAccessLayer.py +++ /dev/null @@ -1,276 +0,0 @@ -# # -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -# # - - -# -# Published interface for ufpy.dataaccess package -# -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# 12/10/12 njensen Initial Creation. -# Feb 14, 2013 1614 bsteffen refactor data access framework -# to use single request. -# 04/10/13 1871 mnash move getLatLonCoords to JGridData and add default args -# 05/29/13 2023 dgilling Hook up ThriftClientRouter. -# 03/03/14 2673 bsteffen Add ability to query only ref times. -# 07/22/14 3185 njensen Added optional/default args to newDataRequest -# 07/30/14 3185 njensen Renamed valid identifiers to optional -# Apr 26, 2015 4259 njensen Updated for new JEP API -# Apr 13, 2016 5379 tgurney Add getIdentifierValues() -# Jun 01, 2016 5587 tgurney Add new signatures for -# getRequiredIdentifiers() and -# getOptionalIdentifiers() -# Oct 18, 2016 5916 bsteffen Add setLazyLoadGridLatLon -# -# - - -import sys -import subprocess -import warnings - -THRIFT_HOST = subprocess.check_output( - "source /awips2/fxa/bin/setup.env; echo $DEFAULT_HOST", - shell=True).strip() - - -USING_NATIVE_THRIFT = False - - -if sys.modules.has_key('jep'): - # intentionally do not catch if this fails to import, we want it to - # be obvious that something is configured wrong when running from within - # Java instead of allowing false confidence and fallback behavior - import JepRouter - router = JepRouter -else: - from ufpy.dataaccess import ThriftClientRouter - router = ThriftClientRouter.ThriftClientRouter(THRIFT_HOST) - USING_NATIVE_THRIFT = True - - - -def getAvailableTimes(request, refTimeOnly=False): - """ - Get the times of available data to the request. - - Args: - request: the IDataRequest to get data for - refTimeOnly: optional, use True if only unique refTimes should be - returned (without a forecastHr) - - Returns: - a list of DataTimes - """ - return router.getAvailableTimes(request, refTimeOnly) - - -def getGridData(request, times=[]): - """ - Gets the grid data that matches the request at the specified times. Each - combination of parameter, level, and dataTime will be returned as a - separate IGridData. - - Args: - request: the IDataRequest to get data for - times: a list of DataTimes, a TimeRange, or None if the data is time - agnostic - - Returns: - a list of IGridData - """ - return router.getGridData(request, times) - - -def getGeometryData(request, times=[]): - """ - Gets the geometry data that matches the request at the specified times. - Each combination of geometry, level, and dataTime will be returned as a - separate IGeometryData. - - Args: - request: the IDataRequest to get data for - times: a list of DataTimes, a TimeRange, or None if the data is time - agnostic - - Returns: - a list of IGeometryData - """ - return router.getGeometryData(request, times) - - -def getAvailableLocationNames(request): - """ - Gets the available location names that match the request without actually - requesting the data. - - Args: - request: the request to find matching location names for - - Returns: - a list of strings of available location names. - """ - return router.getAvailableLocationNames(request) - - -def getAvailableParameters(request): - """ - Gets the available parameters names that match the request without actually - requesting the data. - - Args: - request: the request to find matching parameter names for - - Returns: - a list of strings of available parameter names. - """ - return router.getAvailableParameters(request) - - -def getAvailableLevels(request): - """ - Gets the available levels that match the request without actually - requesting the data. - - Args: - request: the request to find matching levels for - - Returns: - a list of strings of available levels. - """ - return router.getAvailableLevels(request) - - -def getRequiredIdentifiers(request): - """ - Gets the required identifiers for this request. These identifiers - must be set on a request for the request of this datatype to succeed. - - Args: - request: the request to find required identifiers for - - Returns: - a list of strings of required identifiers - """ - if str(request) == request: - warnings.warn("Use getRequiredIdentifiers(IDataRequest) instead", - DeprecationWarning) - return router.getRequiredIdentifiers(request) - - -def getOptionalIdentifiers(request): - """ - Gets the optional identifiers for this request. - - Args: - request: the request to find optional identifiers for - - Returns: - a list of strings of optional identifiers - """ - if str(request) == request: - warnings.warn("Use getOptionalIdentifiers(IDataRequest) instead", - DeprecationWarning) - return router.getOptionalIdentifiers(request) - - -def getIdentifierValues(request, identifierKey): - """ - Gets the allowed values for a particular identifier on this datatype. - - Args: - request: the request to find identifier values for - identifierKey: the identifier to find values for - - Returns: - a list of strings of allowed values for the specified identifier - """ - return router.getIdentifierValues(request, identifierKey) - -def newDataRequest(datatype=None, **kwargs): - """" - Creates a new instance of IDataRequest suitable for the runtime environment. - All args are optional and exist solely for convenience. - - Args: - datatype: the datatype to create a request for - parameters: a list of parameters to set on the request - levels: a list of levels to set on the request - locationNames: a list of locationNames to set on the request - envelope: an envelope to limit the request - **kwargs: any leftover kwargs will be set as identifiers - - Returns: - a new IDataRequest - """ - return router.newDataRequest(datatype, **kwargs) - -def getSupportedDatatypes(): - """ - Gets the datatypes that are supported by the framework - - Returns: - a list of strings of supported datatypes - """ - return router.getSupportedDatatypes() - - -def changeEDEXHost(newHostName): - """ - Changes the EDEX host the Data Access Framework is communicating with. Only - works if using the native Python client implementation, otherwise, this - method will throw a TypeError. - - Args: - newHostHame: the EDEX host to connect to - """ - if USING_NATIVE_THRIFT: - global THRIFT_HOST - THRIFT_HOST = newHostName - global router - router = ThriftClientRouter.ThriftClientRouter(THRIFT_HOST) - else: - raise TypeError("Cannot call changeEDEXHost when using JepRouter.") - -def setLazyLoadGridLatLon(lazyLoadGridLatLon): - """ - Provide a hint to the Data Access Framework indicating whether to load the - lat/lon data for a grid immediately or wait until it is needed. This is - provided as a performance tuning hint and should not affect the way the - Data Access Framework is used. Depending on the internal implementation of - the Data Access Framework this hint might be ignored. Examples of when this - should be set to True are when the lat/lon information is not used or when - it is used only if certain conditions within the data are met. It could be - set to False if it is guaranteed that all lat/lon information is needed and - it would be better to get any performance overhead for generating the - lat/lon data out of the way during the initial request. - - - Args: - lazyLoadGridLatLon: Boolean value indicating whether to lazy load. - """ - try: - router.setLazyLoadGridLatLon(lazyLoadGridLatLon) - except AttributeError: - # The router is not required to support this capability. - pass \ No newline at end of file diff --git a/pythonPackages/ufpy/dataaccess/DataNotificationLayer.py b/pythonPackages/ufpy/dataaccess/DataNotificationLayer.py deleted file mode 100644 index 5ec788d843..0000000000 --- a/pythonPackages/ufpy/dataaccess/DataNotificationLayer.py +++ /dev/null @@ -1,157 +0,0 @@ -# # -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -# # - -# -# Published interface for retrieving data updates via ufpy.dataaccess package -# -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# May 26, 2016 2416 rjpeter Initial Creation. -# Aug 1, 2016 2416 tgurney Finish implementation -# -# - -""" -Interface for the DAF's data notification feature, which allows continuous -retrieval of new data as it is coming into the system. - -There are two ways to access this feature: - -1. The DataQueue module (ufpy.dataaccess.DataQueue) offers a collection that -automatically fills up with new data as it receives notifications. See that -module for more information. - -2. Depending on the type of data you want, use either getGridDataUpdates() or -getGeometryDataUpdates() in this module. Either one will give you back an -object that will retrieve new data for you and will call a function you specify -each time new data is received. - -Example code follows. This example prints temperature as observed from KOMA -each time a METAR is received from there. - - from ufpy.dataaccess import DataAccessLayer as DAL - from ufpy.dataaccess import DataNotificationLayer as DNL - - def process_obs(list_of_data): - for item in list_of_data: - print(item.getNumber('temperature')) - - request = DAL.newDataRequest('obs') - request.setParameters('temperature') - request.setLocationNames('KOMA') - - notifier = DNL.getGeometryDataUpdates(request) - notifier.subscribe(process_obs) - # process_obs will called with a list of data each time new data comes in - -""" - -import re -import sys -import subprocess -from ufpy.dataaccess.PyGeometryNotification import PyGeometryNotification -from ufpy.dataaccess.PyGridNotification import PyGridNotification - - -THRIFT_HOST = subprocess.check_output( - "source /awips2/fxa/bin/setup.env; echo $DEFAULT_HOST", - shell=True).strip() - - -USING_NATIVE_THRIFT = False - -JMS_HOST_PATTERN=re.compile('tcp://([^:]+):([0-9]+)') - -if sys.modules.has_key('jep'): - # intentionally do not catch if this fails to import, we want it to - # be obvious that something is configured wrong when running from within - # Java instead of allowing false confidence and fallback behavior - import JepRouter - router = JepRouter -else: - from ufpy.dataaccess import ThriftClientRouter - router = ThriftClientRouter.ThriftClientRouter(THRIFT_HOST) - USING_NATIVE_THRIFT = True - - -def _getJmsConnectionInfo(notifFilterResponse): - serverString = notifFilterResponse.getJmsConnectionInfo() - try: - host, port = JMS_HOST_PATTERN.match(serverString).groups() - except AttributeError as e: - raise RuntimeError('Got bad JMS connection info from server: ' + serverString) - return {'host': host, 'port': port} - - -def getGridDataUpdates(request): - """ - Get a notification object that receives updates to grid data. - - Args: - request: the IDataRequest specifying the data you want to receive - - Returns: - an update request object that you can listen for updates to by - calling its subscribe() method - """ - response = router.getNotificationFilter(request) - filter = response.getNotificationFilter() - jmsInfo = _getJmsConnectionInfo(response) - notifier = PyGridNotification(request, filter, requestHost=THRIFT_HOST, **jmsInfo) - return notifier - - -def getGeometryDataUpdates(request): - """ - Get a notification object that receives updates to geometry data. - - Args: - request: the IDataRequest specifying the data you want to receive - - Returns: - an update request object that you can listen for updates to by - calling its subscribe() method - """ - response = router.getNotificationFilter(request) - filter = response.getNotificationFilter() - jmsInfo = _getJmsConnectionInfo(response) - notifier = PyGeometryNotification(request, filter, requestHost=THRIFT_HOST, **jmsInfo) - return notifier - - -def changeEDEXHost(newHostName): - """ - Changes the EDEX host the Data Access Framework is communicating with. Only - works if using the native Python client implementation, otherwise, this - method will throw a TypeError. - - Args: - newHostHame: the EDEX host to connect to - """ - if USING_NATIVE_THRIFT: - global THRIFT_HOST - THRIFT_HOST = newHostName - global router - router = ThriftClientRouter.ThriftClientRouter(THRIFT_HOST) - else: - raise TypeError("Cannot call changeEDEXHost when using JepRouter.") diff --git a/pythonPackages/ufpy/dataaccess/DataQueue.py b/pythonPackages/ufpy/dataaccess/DataQueue.py deleted file mode 100644 index f6babd5b93..0000000000 --- a/pythonPackages/ufpy/dataaccess/DataQueue.py +++ /dev/null @@ -1,213 +0,0 @@ -# # -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -# # - -# -# Convenience class for using the DAF's notifications feature. This is a -# collection that, once connected to EDEX by calling start(), fills with -# data as notifications come in. Runs on a separate thread to allow -# non-blocking data retrieval. -# -# -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# 07/29/16 2416 tgurney Initial creation -# - -from ufpy.dataaccess import DataNotificationLayer as DNL - -import time -from threading import Thread -import sys - - -if sys.version_info.major == 2: - from Queue import Queue, Empty -else: # Python 3 module renamed to 'queue' - from queue import Queue, Empty - - -"""Used to indicate a DataQueue that will produce geometry data.""" -GEOMETRY = object() - - -"""Used to indicate a DataQueue that will produce grid data.""" -GRID = object() - - -"""Default maximum queue size.""" -_DEFAULT_MAXSIZE = 100 - - -class Closed(Exception): - """Raised when attempting to get data from a closed queue.""" - pass - - -class DataQueue(object): - - """ - Convenience class for using the DAF's notifications feature. This is a - collection that, once connected to EDEX by calling start(), fills with - data as notifications come in. - - Example for getting obs data: - - from DataQueue import DataQueue, GEOMETRY - request = DataAccessLayer.newDataRequest('obs') - request.setParameters('temperature') - request.setLocationNames('KOMA') - q = DataQueue(GEOMETRY, request) - q.start() - for item in q: - print(item.getNumber('temperature')) - """ - - def __init__(self, dtype, request, maxsize=_DEFAULT_MAXSIZE): - """ - Create a new DataQueue. - - Args: - dtype: Either GRID or GEOMETRY; must match the type of data - requested. - request: IDataRequest describing the data you want. It must at - least have datatype set. All data produced will satisfy the - constraints you specify. - maxsize: Maximum number of data objects the queue can hold at - one time. If the limit is reached, any data coming in after - that will not appear until one or more items are removed using - DataQueue.get(). - """ - assert maxsize > 0 - assert dtype in (GEOMETRY, GRID) - self._maxsize = maxsize - self._queue = Queue(maxsize=maxsize) - self._thread = None - if dtype is GEOMETRY: - self._notifier = DNL.getGeometryDataUpdates(request) - elif dtype is GRID: - self._notifier = DNL.getGridDataUpdates(request) - - def start(self): - """Start listening for notifications and requesting data.""" - if self._thread is not None: - # Already started - return - kwargs = {'callback': self._data_received} - self._thread = Thread(target=self._notifier.subscribe, kwargs=kwargs) - self._thread.daemon = True - self._thread.start() - timer = 0 - while not self._notifier.subscribed: - time.sleep(0.1) - timer += 1 - if timer >= 100: # ten seconds - raise RuntimeError('timed out when attempting to subscribe') - - def _data_received(self, data): - for d in data: - if not isinstance(d, list): - d = [d] - for item in d: - self._queue.put(item) - - def get(self, block=True, timeout=None): - """ - Get and return the next available data object. By default, if there is - no data yet available, this method will not return until data becomes - available. - - Args: - block: Specifies behavior when the queue is empty. If True, wait - until an item is available before returning (the default). If - False, return None immediately if the queue is empty. - timeout: If block is True, wait this many seconds, and return None - if data is not received in that time. - Returns: - IData - """ - if self.closed: - raise Closed - try: - return self._queue.get(block, timeout) - except Empty: - return None - - def get_all(self): - """ - Get all data waiting for processing, in a single list. Always returns - immediately. Returns an empty list if no data has arrived yet. - - Returns: - List of IData - """ - data = [] - for _ in range(self._maxsize): - next_item = self.get(False) - if next_item is None: - break - data.append(next_item) - return data - - def close(self): - """Close the queue. May not be re-opened after closing.""" - if not self.closed: - self._notifier.close() - self._thread.join() - - def qsize(self): - """Return number of items in the queue.""" - return self._queue.qsize() - - def empty(self): - """Return True if the queue is empty.""" - return self._queue.empty() - - def full(self): - """Return True if the queue is full.""" - return self._queue.full() - - @property - def closed(self): - """True if the queue has been closed.""" - return not self._notifier.subscribed - - @property - def maxsize(self): - """ - Maximum number of data objects the queue can hold at one time. - If this limit is reached, any data coming in after that will not appear - until one or more items are removed using get(). - """ - return self._maxsize - - def __iter__(self): - if self._thread is not None: - while not self.closed: - yield self.get() - - def __enter__(self): - self.start() - return self - - def __exit__(self, *unused): - self.close() \ No newline at end of file diff --git a/pythonPackages/ufpy/dataaccess/PyData.py b/pythonPackages/ufpy/dataaccess/PyData.py deleted file mode 100644 index 0d1a1effe0..0000000000 --- a/pythonPackages/ufpy/dataaccess/PyData.py +++ /dev/null @@ -1,57 +0,0 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -# -# Implements IData for use by native Python clients to the Data Access -# Framework. -# -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# 06/03/13 dgilling Initial Creation. -# -# - -from ufpy.dataaccess import IData - -class PyData(IData): - - def __init__(self, dataRecord): - self.__time = dataRecord.getTime() - self.__level = dataRecord.getLevel() - self.__locationName = dataRecord.getLocationName() - self.__attributes = dataRecord.getAttributes() - - def getAttribute(self, key): - return self.__attributes[key] - - def getAttributes(self): - return self.__attributes.keys() - - def getDataTime(self): - return self.__time - - def getLevel(self): - return self.__level - - def getLocationName(self): - return self.__locationName diff --git a/pythonPackages/ufpy/dataaccess/PyGeometryData.py b/pythonPackages/ufpy/dataaccess/PyGeometryData.py deleted file mode 100644 index d1b426a87b..0000000000 --- a/pythonPackages/ufpy/dataaccess/PyGeometryData.py +++ /dev/null @@ -1,80 +0,0 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -# -# Implements IGeometryData for use by native Python clients to the Data Access -# Framework. -# -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# 06/03/13 dgilling Initial Creation. -# 01/06/14 2537 bsteffen Share geometry WKT. -# 03/19/14 2882 dgilling Raise an exception when getNumber() -# is called for data that is not a -# numeric Type. -# 06/09/16 5574 mapeters Handle 'SHORT' type in getNumber(). -# -# - -from ufpy.dataaccess import IGeometryData -from ufpy.dataaccess import PyData - -class PyGeometryData(IGeometryData, PyData.PyData): - - def __init__(self, geoDataRecord, geometry): - PyData.PyData.__init__(self, geoDataRecord) - self.__geometry = geometry - self.__dataMap = {} - tempDataMap = geoDataRecord.getDataMap() - for key, value in tempDataMap.items(): - self.__dataMap[key] = (value[0], value[1], value[2]) - - def getGeometry(self): - return self.__geometry - - def getParameters(self): - return self.__dataMap.keys() - - def getString(self, param): - value = self.__dataMap[param][0] - return str(value) - - def getNumber(self, param): - value = self.__dataMap[param][0] - t = self.getType(param) - if t == 'INT' or t == 'SHORT': - return int(value) - elif t == 'LONG': - return long(value) - elif t == 'FLOAT': - return float(value) - elif t == 'DOUBLE': - return float(value) - else: - raise TypeError("Data for parameter " + param + " is not a numeric type.") - - def getUnit(self, param): - return self.__dataMap[param][2] - - def getType(self, param): - return self.__dataMap[param][1] diff --git a/pythonPackages/ufpy/dataaccess/PyGeometryNotification.py b/pythonPackages/ufpy/dataaccess/PyGeometryNotification.py deleted file mode 100644 index 92592c2def..0000000000 --- a/pythonPackages/ufpy/dataaccess/PyGeometryNotification.py +++ /dev/null @@ -1,54 +0,0 @@ -# # -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -# # - -# -# Notification object that produces geometry data -# -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# 07/22/16 2416 tgurney Initial creation -# 09/07/17 6175 tgurney Override messageReceived -# - -import dynamicserialize -from ufpy.dataaccess.PyNotification import PyNotification -from dynamicserialize.dstypes.com.raytheon.uf.common.dataquery.requests import RequestConstraint - -class PyGeometryNotification(PyNotification): - - def messageReceived(self, msg): - dataUriMsg = dynamicserialize.deserialize(msg) - dataUris = dataUriMsg.getDataURIs() - dataTimes = set() - for dataUri in dataUris: - if self.notificationFilter.accept(dataUri): - dataTimes.add(self.getDataTime(dataUri)) - if dataTimes: - try: - data = self.getData(self.request, list(dataTimes)) - self.callback(data) - except Exception as e: - traceback.print_exc() - - def getData(self, request, dataTimes): - return self.DAL.getGeometryData(request, dataTimes) diff --git a/pythonPackages/ufpy/dataaccess/PyGridData.py b/pythonPackages/ufpy/dataaccess/PyGridData.py deleted file mode 100644 index 991e1d944e..0000000000 --- a/pythonPackages/ufpy/dataaccess/PyGridData.py +++ /dev/null @@ -1,81 +0,0 @@ -# # -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -# # - -# -# Implements IGridData for use by native Python clients to the Data Access -# Framework. -# -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# 06/03/13 #2023 dgilling Initial Creation. -# 10/13/16 #5916 bsteffen Correct grid shape, allow lat/lon -# 11/10/16 #5900 bsteffen Correct grid shape -# to be requested by a delegate -# -# - - -import numpy -import warnings - -from ufpy.dataaccess import IGridData -from ufpy.dataaccess import PyData - -NO_UNIT_CONVERT_WARNING = """ -The ability to unit convert grid data is not currently available in this version of the Data Access Framework. -""" - - -class PyGridData(IGridData, PyData.PyData): - - def __init__(self, gridDataRecord, nx, ny, latLonGrid = None, latLonDelegate = None): - PyData.PyData.__init__(self, gridDataRecord) - nx = nx - ny = ny - self.__parameter = gridDataRecord.getParameter() - self.__unit = gridDataRecord.getUnit() - self.__gridData = numpy.reshape(numpy.array(gridDataRecord.getGridData()), (ny, nx)) - self.__latLonGrid = latLonGrid - self.__latLonDelegate = latLonDelegate - - - def getParameter(self): - return self.__parameter - - def getUnit(self): - return self.__unit - - def getRawData(self, unit=None): - # TODO: Find a proper python library that deals will with numpy and - # javax.measure style unit strings and hook it in to this method to - # allow end-users to perform unit conversion for grid data. - if unit is not None: - warnings.warn(NO_UNIT_CONVERT_WARNING, stacklevel=2) - return self.__gridData - - def getLatLonCoords(self): - if self.__latLonGrid is not None: - return self.__latLonGrid - elif self.__latLonDelegate is not None: - return self.__latLonDelegate() - return self.__latLonGrid diff --git a/pythonPackages/ufpy/dataaccess/PyGridNotification.py b/pythonPackages/ufpy/dataaccess/PyGridNotification.py deleted file mode 100644 index 365872f71e..0000000000 --- a/pythonPackages/ufpy/dataaccess/PyGridNotification.py +++ /dev/null @@ -1,59 +0,0 @@ -# # -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -# # - -# -# Notification object that produces grid data -# -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# 06/03/16 2416 rjpeter Initial Creation. -# 09/06/17 6175 tgurney Override messageReceived -# - -import dynamicserialize -from ufpy.dataaccess.PyNotification import PyNotification -from dynamicserialize.dstypes.com.raytheon.uf.common.dataquery.requests import RequestConstraint - -class PyGridNotification(PyNotification): - - def messageReceived(self, msg): - dataUriMsg = dynamicserialize.deserialize(msg) - dataUris = dataUriMsg.getDataURIs() - for dataUri in dataUris: - if not self.notificationFilter.accept(dataUri): - continue - try: - # This improves performance over requesting by datatime since it requests only the - # parameter that the notification was received for (instead of this and all previous - # parameters for the same forecast hour) - # TODO: This utterly fails for derived requests - newReq = self.DAL.newDataRequest(self.request.getDatatype()) - newReq.addIdentifier("dataURI", dataUri) - newReq.setParameters(self.request.getParameters()) - data = self.getData(newReq, []) - self.callback(data) - except Exception as e: - traceback.print_exc() - - def getData(self, request, dataTimes): - return self.DAL.getGridData(request, dataTimes) diff --git a/pythonPackages/ufpy/dataaccess/PyNotification.py b/pythonPackages/ufpy/dataaccess/PyNotification.py deleted file mode 100644 index 5d95fe6528..0000000000 --- a/pythonPackages/ufpy/dataaccess/PyNotification.py +++ /dev/null @@ -1,110 +0,0 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -# -# Implements IData for use by native Python clients to the Data Access -# Framework. -# -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# Jun 22, 2016 2416 rjpeter Initial creation -# Jul 22, 2016 2416 tgurney Finish implementation -# Sep 07, 2017 6175 tgurney Override messageReceived in subclasses -# - - -import abc -import time -import traceback - -import dynamicserialize -from ufpy.dataaccess import DataAccessLayer -from ufpy.dataaccess import INotificationSubscriber -from ufpy.QpidSubscriber import QpidSubscriber -from ufpy.ThriftClient import ThriftRequestException -from dynamicserialize.dstypes.com.raytheon.uf.common.time import DataTime - - -class PyNotification(INotificationSubscriber): - """ - Receives notifications for new data and retrieves the data that meets - specified filtering criteria. - """ - - __metaclass__ = abc.ABCMeta - - def __init__(self, request, filter, host='localhost', port=5672, requestHost='localhost'): - self.DAL = DataAccessLayer - self.DAL.changeEDEXHost(requestHost) - self.request = request - self.notificationFilter = filter - self.__topicSubscriber = QpidSubscriber(host, port, decompress=True) - self.__topicName = "edex.alerts" - self.callback = None - - def subscribe(self, callback): - """ - Start listening for notifications. - - Args: - callback: Function to call with a list of received data objects. - Will be called once for each request made for data. - """ - assert hasattr(callback, '__call__'), 'callback arg must be callable' - self.callback = callback - self.__topicSubscriber.topicSubscribe(self.__topicName, self.messageReceived) - # Blocks here - - def close(self): - if self.__topicSubscriber.subscribed: - self.__topicSubscriber.close() - - def getDataTime(self, dataURI): - dataTimeStr = dataURI.split('/')[2] - return DataTime(dataTimeStr) - - @abc.abstractmethod - def messageReceived(self, msg): - """Called when a message is received from QpidSubscriber. - - This method must call self.callback once for each request made for data - """ - pass - - @abc.abstractmethod - def getData(self, request, dataTimes): - """ - Retrieve and return data - - Args: - request: IDataRequest to send to the server - dataTimes: list of data times - Returns: - list of IData - """ - pass - - @property - def subscribed(self): - """True if currently subscribed to notifications.""" - return self.__topicSubscriber.queueStarted diff --git a/pythonPackages/ufpy/dataaccess/SoundingsSupport.py b/pythonPackages/ufpy/dataaccess/SoundingsSupport.py deleted file mode 100644 index 2f0bb2043e..0000000000 --- a/pythonPackages/ufpy/dataaccess/SoundingsSupport.py +++ /dev/null @@ -1,283 +0,0 @@ -# # -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -# # - -# -# Classes for retrieving soundings based on gridded data from the Data Access -# Framework -# -# -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# 06/24/15 #4480 dgilling Initial Creation. -# - -from collections import defaultdict -from shapely.geometry import Point - -from ufpy import DateTimeConverter -from ufpy.dataaccess import DataAccessLayer - -from dynamicserialize.dstypes.com.raytheon.uf.common.time import DataTime -from dynamicserialize.dstypes.com.raytheon.uf.common.dataplugin.level import Level - - -def getSounding(modelName, weatherElements, levels, samplePoint, refTime=None, timeRange=None): - """" - Performs a series of Data Access Framework requests to retrieve a sounding object - based on the specified request parameters. - - Args: - modelName: the grid model datasetid to use as the basis of the sounding. - weatherElements: a list of parameters to return in the sounding. - levels: a list of levels to sample the given weather elements at - samplePoint: a lat/lon pair to perform the sampling of data at. - refTime: (optional) the grid model reference time to use for the sounding. - If not specified, the latest ref time in the system will be used. - timeRange: (optional) a TimeRange to specify which forecast hours to use. - If not specified, will default to all forecast hours. - - Returns: - A _SoundingCube instance, which acts a 3-tiered dictionary, keyed - by DataTime, then by level and finally by weather element. If no - data is available for the given request parameters, None is returned. - """ - - (locationNames, parameters, levels, envelope, refTime, timeRange) = \ - __sanitizeInputs(modelName, weatherElements, levels, samplePoint, refTime, timeRange) - - requestArgs = { 'datatype' : 'grid', - 'locationNames' : locationNames, - 'parameters' : parameters, - 'levels' : levels, - 'envelope' : envelope, - } - - req = DataAccessLayer.newDataRequest(**requestArgs) - - forecastHours = __determineForecastHours(req, refTime, timeRange) - if not forecastHours: - return None - - response = DataAccessLayer.getGeometryData(req, forecastHours) - soundingObject = _SoundingCube(response) - - return soundingObject - -def setEDEXHost(host): - """ - Changes the EDEX host the Data Access Framework is communicating with. - - Args: - host: the EDEX host to connect to - """ - - if host: - DataAccessLayer.changeEDEXHost(str(host)) - -def __sanitizeInputs(modelName, weatherElements, levels, samplePoint, refTime, timeRange): - locationNames = [str(modelName)] - parameters = __buildStringList(weatherElements) - levels = __buildStringList(levels) - envelope = Point(samplePoint) - if refTime is not None: - refTime = DataTime(refTime=DateTimeConverter.convertToDateTime(refTime)) - if timeRange is not None: - timeRange = DateTimeConverter.constructTimeRange(*timeRange) - return (locationNames, parameters, levels, envelope, refTime, timeRange) - -def __determineForecastHours(request, refTime, timeRange): - dataTimes = DataAccessLayer.getAvailableTimes(request, False) - timesGen = [(DataTime(refTime=dataTime.getRefTime()), dataTime) for dataTime in dataTimes] - dataTimesMap = defaultdict(list) - for baseTime, dataTime in timesGen: - dataTimesMap[baseTime].append(dataTime) - - if refTime is None: - refTime = max(dataTimesMap.keys()) - - forecastHours = dataTimesMap[refTime] - if timeRange is None: - return forecastHours - else: - return [forecastHour for forecastHour in forecastHours if timeRange.contains(forecastHour.getValidPeriod())] - -def __buildStringList(param): - if __notStringIter(param): - return [str(item) for item in param] - else: - return [str(param)] - -def __notStringIter(iterable): - if not isinstance(iterable, basestring): - try: - iter(iterable) - return True - except TypeError: - return False - - - -class _SoundingCube(object): - """ - The top-level sounding object returned when calling SoundingsSupport.getSounding. - - This object acts as a 3-tiered dict which is keyed by time then level - then parameter name. Calling times() will return all valid keys into this - object. - """ - - def __init__(self, geometryDataObjects): - self._dataDict = {} - self._sortedTimes = [] - if geometryDataObjects: - for geometryData in geometryDataObjects: - dataTime = geometryData.getDataTime() - level = geometryData.getLevel() - for parameter in geometryData.getParameters(): - self.__addItem(parameter, dataTime, level, geometryData.getNumber(parameter)) - - def __addItem(self, parameter, dataTime, level, value): - timeLayer = self._dataDict.get(dataTime, _SoundingTimeLayer(dataTime)) - self._dataDict[dataTime] = timeLayer - timeLayer._addItem(parameter, level, value) - if dataTime not in self._sortedTimes: - self._sortedTimes.append(dataTime) - self._sortedTimes.sort() - - def __getitem__(self, key): - return self._dataDict[key] - - def __len__(self): - return len(self._dataDict) - - def times(self): - """ - Returns the valid times for this sounding. - - Returns: - A list containing the valid DataTimes for this sounding in order. - """ - return self._sortedTimes - - -class _SoundingTimeLayer(object): - """ - The second-level sounding object returned when calling SoundingsSupport.getSounding. - - This object acts as a 2-tiered dict which is keyed by level then parameter - name. Calling levels() will return all valid keys into this - object. Calling time() will return the DataTime for this particular layer. - """ - - def __init__(self, dataTime): - self._dataTime = dataTime - self._dataDict = {} - - def _addItem(self, parameter, level, value): - asString = str(level) - levelLayer = self._dataDict.get(asString, _SoundingTimeAndLevelLayer(self._dataTime, asString)) - levelLayer._addItem(parameter, value) - self._dataDict[asString] = levelLayer - - def __getitem__(self, key): - asString = str(key) - if asString in self._dataDict: - return self._dataDict[asString] - else: - raise KeyError("Level " + str(key) + " is not a valid level for this sounding.") - - def __len__(self): - return len(self._dataDict) - - def time(self): - """ - Returns the DataTime for this sounding cube layer. - - Returns: - The DataTime for this sounding layer. - """ - return self._dataTime - - def levels(self): - """ - Returns the valid levels for this sounding. - - Returns: - A list containing the valid levels for this sounding in order of - closest to surface to highest from surface. - """ - sortedLevels = [Level(level) for level in self._dataDict.keys()] - sortedLevels.sort() - return [str(level) for level in sortedLevels] - - -class _SoundingTimeAndLevelLayer(object): - """ - The bottom-level sounding object returned when calling SoundingsSupport.getSounding. - - This object acts as a dict which is keyed by parameter name. Calling - parameters() will return all valid keys into this object. Calling time() - will return the DataTime for this particular layer. Calling level() will - return the level for this layer. - """ - - def __init__(self, time, level): - self._time = time - self._level = level - self._parameters = {} - - def _addItem(self, parameter, value): - self._parameters[parameter] = value - - def __getitem__(self, key): - return self._parameters[key] - - def __len__(self): - return len(self._parameters) - - def level(self): - """ - Returns the level for this sounding cube layer. - - Returns: - The level for this sounding layer. - """ - return self._level - - def parameters(self): - """ - Returns the valid parameters for this sounding. - - Returns: - A list containing the valid parameter names. - """ - return list(self._parameters.keys()) - - def time(self): - """ - Returns the DataTime for this sounding cube layer. - - Returns: - The DataTime for this sounding layer. - """ - return self._time diff --git a/pythonPackages/ufpy/dataaccess/ThriftClientRouter.py b/pythonPackages/ufpy/dataaccess/ThriftClientRouter.py deleted file mode 100644 index 2fb4565c60..0000000000 --- a/pythonPackages/ufpy/dataaccess/ThriftClientRouter.py +++ /dev/null @@ -1,247 +0,0 @@ -# # -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -# # - -# -# Routes requests to the Data Access Framework through Python Thrift. -# -# -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# 05/21/13 2023 dgilling Initial Creation. -# 01/06/14 2537 bsteffen Share geometry WKT. -# 03/03/14 2673 bsteffen Add ability to query only ref times. -# 07/22/14 3185 njensen Added optional/default args to newDataRequest -# 07/23/14 3185 njensen Added new methods -# 07/30/14 3185 njensen Renamed valid identifiers to optional -# 06/30/15 4569 nabowle Use hex WKB for geometries. -# 04/13/15 5379 tgurney Add getIdentifierValues() -# 06/01/16 5587 tgurney Add new signatures for -# getRequiredIdentifiers() and -# getOptionalIdentifiers() -# 08/01/16 2416 tgurney Add getNotificationFilter() -# 10/13/16 5916 bsteffen Correct grid shape, allow lazy grid lat/lon -# 10/26/16 5919 njensen Speed up geometry creation in getGeometryData() -# - - -import numpy -import shapely.wkb - -from dynamicserialize.dstypes.com.raytheon.uf.common.dataaccess.impl import DefaultDataRequest -from dynamicserialize.dstypes.com.raytheon.uf.common.dataaccess.request import GetAvailableLocationNamesRequest -from dynamicserialize.dstypes.com.raytheon.uf.common.dataaccess.request import GetAvailableTimesRequest -from dynamicserialize.dstypes.com.raytheon.uf.common.dataaccess.request import GetGeometryDataRequest -from dynamicserialize.dstypes.com.raytheon.uf.common.dataaccess.request import GetGridDataRequest -from dynamicserialize.dstypes.com.raytheon.uf.common.dataaccess.request import GetGridLatLonRequest -from dynamicserialize.dstypes.com.raytheon.uf.common.dataaccess.request import GetAvailableParametersRequest -from dynamicserialize.dstypes.com.raytheon.uf.common.dataaccess.request import GetAvailableLevelsRequest -from dynamicserialize.dstypes.com.raytheon.uf.common.dataaccess.request import GetRequiredIdentifiersRequest -from dynamicserialize.dstypes.com.raytheon.uf.common.dataaccess.request import GetOptionalIdentifiersRequest -from dynamicserialize.dstypes.com.raytheon.uf.common.dataaccess.request import GetIdentifierValuesRequest -from dynamicserialize.dstypes.com.raytheon.uf.common.dataaccess.request import GetSupportedDatatypesRequest -from dynamicserialize.dstypes.com.raytheon.uf.common.dataaccess.request import GetNotificationFilterRequest - -from ufpy import ThriftClient -from ufpy.dataaccess import PyGeometryData -from ufpy.dataaccess import PyGridData - - -class LazyGridLatLon(object): - - def __init__(self, client, nx, ny, envelope, crsWkt): - self._latLonGrid = None - self._client = client - self._request = GetGridLatLonRequest() - self._request.setNx(nx) - self._request.setNy(ny) - self._request.setEnvelope(envelope) - self._request.setCrsWkt(crsWkt) - - def __call__(self): - # Its important that the data is cached internally so that if multiple - # GridData are sharing the same delegate then they can also share a - # single request for the LatLon information. - if self._latLonGrid is None: - response = self._client.sendRequest(self._request) - nx = response.getNx() - ny = response.getNy() - latData = numpy.reshape(numpy.array(response.getLats()), (ny, nx)) - lonData = numpy.reshape(numpy.array(response.getLons()), (ny, nx)) - self._latLonGrid = (lonData, latData) - return self._latLonGrid - - -class ThriftClientRouter(object): - - def __init__(self, host='localhost'): - self._client = ThriftClient.ThriftClient(host) - self._lazyLoadGridLatLon = False - - def setLazyLoadGridLatLon(self, lazyLoadGridLatLon): - self._lazyLoadGridLatLon = lazyLoadGridLatLon - - def getAvailableTimes(self, request, refTimeOnly): - timesRequest = GetAvailableTimesRequest() - timesRequest.setRequestParameters(request) - timesRequest.setRefTimeOnly(refTimeOnly) - response = self._client.sendRequest(timesRequest) - return response - - def getGridData(self, request, times): - gridDataRequest = GetGridDataRequest() - gridDataRequest.setIncludeLatLonData(not self._lazyLoadGridLatLon) - gridDataRequest.setRequestParameters(request) - # if we have an iterable times instance, then the user must have asked - # for grid data with the List of DataTime objects - # else, we assume it was a single TimeRange that was meant for the - # request - try: - iter(times) - gridDataRequest.setRequestedTimes(times) - except TypeError: - gridDataRequest.setRequestedPeriod(times) - response = self._client.sendRequest(gridDataRequest) - - locSpecificData = {} - locNames = response.getSiteNxValues().keys() - for location in locNames: - nx = response.getSiteNxValues()[location] - ny = response.getSiteNyValues()[location] - if self._lazyLoadGridLatLon: - envelope = response.getSiteEnvelopes()[location] - crsWkt = response.getSiteCrsWkt()[location] - delegate = LazyGridLatLon( - self._client, nx, ny, envelope, crsWkt) - locSpecificData[location] = (nx, ny, delegate) - else: - latData = numpy.reshape(numpy.array( - response.getSiteLatGrids()[location]), (ny, nx)) - lonData = numpy.reshape(numpy.array( - response.getSiteLonGrids()[location]), (ny, nx)) - locSpecificData[location] = (nx, ny, (lonData, latData)) - retVal = [] - for gridDataRecord in response.getGridData(): - locationName = gridDataRecord.getLocationName() - locData = locSpecificData[locationName] - if self._lazyLoadGridLatLon: - retVal.append(PyGridData.PyGridData(gridDataRecord, locData[ - 0], locData[1], latLonDelegate=locData[2])) - else: - retVal.append(PyGridData.PyGridData( - gridDataRecord, locData[0], locData[1], locData[2])) - return retVal - - def getGeometryData(self, request, times): - geoDataRequest = GetGeometryDataRequest() - geoDataRequest.setRequestParameters(request) - # if we have an iterable times instance, then the user must have asked - # for geometry data with the List of DataTime objects - # else, we assume it was a single TimeRange that was meant for the - # request - try: - iter(times) - geoDataRequest.setRequestedTimes(times) - except TypeError: - geoDataRequest.setRequestedPeriod(times) - response = self._client.sendRequest(geoDataRequest) - geometries = [] - for wkb in response.getGeometryWKBs(): - # the wkb is a numpy.ndarray of dtype int8 - # convert the bytearray to a byte string and load it - geometries.append(shapely.wkb.loads(wkb.tostring())) - - retVal = [] - for geoDataRecord in response.getGeoData(): - geom = geometries[geoDataRecord.getGeometryWKBindex()] - retVal.append(PyGeometryData.PyGeometryData(geoDataRecord, geom)) - return retVal - - def getAvailableLocationNames(self, request): - locNamesRequest = GetAvailableLocationNamesRequest() - locNamesRequest.setRequestParameters(request) - response = self._client.sendRequest(locNamesRequest) - return response - - def getAvailableParameters(self, request): - paramReq = GetAvailableParametersRequest() - paramReq.setRequestParameters(request) - response = self._client.sendRequest(paramReq) - return response - - def getAvailableLevels(self, request): - levelReq = GetAvailableLevelsRequest() - levelReq.setRequestParameters(request) - response = self._client.sendRequest(levelReq) - return response - - def getRequiredIdentifiers(self, request): - if str(request) == request: - # Handle old version getRequiredIdentifiers(str) - request = self.newDataRequest(request) - idReq = GetRequiredIdentifiersRequest() - idReq.setRequest(request) - response = self._client.sendRequest(idReq) - return response - - def getOptionalIdentifiers(self, request): - if str(request) == request: - # Handle old version getOptionalIdentifiers(str) - request = self.newDataRequest(request) - idReq = GetOptionalIdentifiersRequest() - idReq.setRequest(request) - response = self._client.sendRequest(idReq) - return response - - def getIdentifierValues(self, request, identifierKey): - idValReq = GetIdentifierValuesRequest() - idValReq.setIdentifierKey(identifierKey) - idValReq.setRequestParameters(request) - response = self._client.sendRequest(idValReq) - return response - - def newDataRequest(self, datatype, parameters=[], levels=[], locationNames=[], envelope=None, **kwargs): - req = DefaultDataRequest() - if datatype: - req.setDatatype(datatype) - if parameters: - req.setParameters(*parameters) - if levels: - req.setLevels(*levels) - if locationNames: - req.setLocationNames(*locationNames) - if envelope: - req.setEnvelope(envelope) - if kwargs: - # any args leftover are assumed to be identifiers - req.identifiers = kwargs - return req - - def getSupportedDatatypes(self): - response = self._client.sendRequest(GetSupportedDatatypesRequest()) - return response - - def getNotificationFilter(self, request): - notifReq = GetNotificationFilterRequest() - notifReq.setRequestParameters(request) - response = self._client.sendRequest(notifReq) - return response diff --git a/pythonPackages/ufpy/dataaccess/__init__.py b/pythonPackages/ufpy/dataaccess/__init__.py deleted file mode 100644 index 898f7949ad..0000000000 --- a/pythonPackages/ufpy/dataaccess/__init__.py +++ /dev/null @@ -1,389 +0,0 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - - -# -# __init__.py for ufpy.dataaccess package -# -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# 12/10/12 njensen Initial Creation. -# Feb 14, 2013 1614 bsteffen refactor data access framework -# to use single request. -# Apr 09, 2013 1871 njensen Add doc strings -# Jun 03, 2013 2023 dgilling Add getAttributes to IData, add -# getLatLonGrids() to IGridData. -# Aug 01, 2016 2416 tgurney Add INotificationSubscriber -# and INotificationFilter -# -# - -__all__ = [ - - ] - -import abc - -class IDataRequest(object): - """ - An IDataRequest to be submitted to the DataAccessLayer to retrieve data. - """ - __metaclass__ = abc.ABCMeta - - @abc.abstractmethod - def setDatatype(self, datatype): - """ - Sets the datatype of the request. - - Args: - datatype: A string of the datatype, such as "grid", "radar", "gfe", "obs" - """ - return - - @abc.abstractmethod - def addIdentifier(self, key, value): - """ - Adds an identifier to the request. Identifiers are specific to the - datatype being requested. - - Args: - key: the string key of the identifier - value: the value of the identifier - """ - return - - @abc.abstractmethod - def setParameters(self, params): - """ - Sets the parameters of data to request. - - Args: - params: a list of strings of parameters to request - """ - return - - @abc.abstractmethod - def setLevels(self, levels): - """ - Sets the levels of data to request. Not all datatypes support levels. - - Args: - levels: a list of strings of level abbreviations to request - """ - return - - @abc.abstractmethod - def setEnvelope(self, env): - """ - Sets the envelope of the request. If supported by the datatype factory, - the data returned for the request will be constrained to only the data - within the envelope. - - Args: - env: a shapely geometry - """ - return - - @abc.abstractmethod - def setLocationNames(self, locationNames): - """ - Sets the location names of the request. - - Args: - locationNames: a list of strings of location names to request - """ - return - - @abc.abstractmethod - def getDatatype(self): - """ - Gets the datatype of the request - - Returns: - the datatype set on the request - """ - return - - @abc.abstractmethod - def getIdentifiers(self): - """ - Gets the identifiers on the request - - Returns: - a dictionary of the identifiers - """ - return - - @abc.abstractmethod - def getLevels(self): - """ - Gets the levels on the request - - Returns: - a list of strings of the levels - """ - return - - @abc.abstractmethod - def getLocationNames(self): - """ - Gets the location names on the request - - Returns: - a list of strings of the location names - """ - return - - @abc.abstractmethod - def getEnvelope(self): - """ - Gets the envelope on the request - - Returns: - a rectangular shapely geometry - """ - return - - - -class IData(object): - """ - An IData representing data returned from the DataAccessLayer. - """ - __metaclass__ = abc.ABCMeta - - @abc.abstractmethod - def getAttribute(self, key): - """ - Gets an attribute of the data. - - Args: - key: the key of the attribute - - Returns: - the value of the attribute - """ - return - - @abc.abstractmethod - def getAttributes(self): - """ - Gets the valid attributes for the data. - - Returns: - a list of strings of the attribute names - """ - return - - @abc.abstractmethod - def getDataTime(self): - """ - Gets the data time of the data. - - Returns: - the data time of the data, or None if no time is associated - """ - return - - @abc.abstractmethod - def getLevel(self): - """ - Gets the level of the data. - - Returns: - the level of the data, or None if no level is associated - """ - return - - @abc.abstractmethod - def getLocationName(self, param): - """ - Gets the location name of the data. - - Returns: - the location name of the data, or None if no location name is - associated - """ - return - - - -class IGridData(IData): - """ - An IData representing grid data that is returned by the DataAccessLayer. - """ - - @abc.abstractmethod - def getParameter(self): - """ - Gets the parameter of the data. - - Returns: - the parameter of the data - """ - return - - @abc.abstractmethod - def getUnit(self): - """ - Gets the unit of the data. - - Returns: - the string abbreviation of the unit, or None if no unit is associated - """ - return - - @abc.abstractmethod - def getRawData(self): - """ - Gets the grid data as a numpy array. - - Returns: - a numpy array of the data - """ - return - - @abc.abstractmethod - def getLatLonCoords(self): - """ - Gets the lat/lon coordinates of the grid data. - - Returns: - a tuple where the first element is a numpy array of lons, and the - second element is a numpy array of lats - """ - return - - - -class IGeometryData(IData): - """ - An IData representing geometry data that is returned by the DataAccessLayer. - """ - - @abc.abstractmethod - def getGeometry(self): - """ - Gets the geometry of the data. - - Returns: - a shapely geometry - """ - return - - @abc.abstractmethod - def getParameters(self): - """Gets the parameters of the data. - - Returns: - a list of strings of the parameter names - """ - return - - @abc.abstractmethod - def getString(self, param): - """ - Gets the string value of the specified param. - - Args: - param: the string name of the param - - Returns: - the string value of the param - """ - return - - @abc.abstractmethod - def getNumber(self, param): - """ - Gets the number value of the specified param. - - Args: - param: the string name of the param - - Returns: - the number value of the param - """ - return - - @abc.abstractmethod - def getUnit(self, param): - """ - Gets the unit of the specified param. - - Args: - param: the string name of the param - - Returns: - the string abbreviation of the unit of the param - """ - return - - @abc.abstractmethod - def getType(self, param): - """ - Gets the type of the param. - - Args: - param: the string name of the param - - Returns: - a string of the type of the parameter, such as - "STRING", "INT", "LONG", "FLOAT", or "DOUBLE" - """ - return - - -class INotificationSubscriber(object): - """ - An INotificationSubscriber representing a notification filter returned from - the DataNotificationLayer. - """ - __metaclass__ = abc.ABCMeta - - @abc.abstractmethod - def subscribe(self, callback): - """ - Subscribes to the requested data. Method will not return until close is - called in a separate thread. - - Args: - callback: the method to call with the IGridData/IGeometryData - - """ - pass - - @abc.abstractmethod - def close(self): - """Closes the notification subscriber""" - pass - -class INotificationFilter(object): - """ - Represents data required to filter a set of URIs and - return a corresponding list of IDataRequest to retrieve data for. - """ - __metaclass__ = abc.ABCMeta - @abc.abstractmethod - def accept(dataUri): - pass diff --git a/pythonPackages/ufpy/gfe/IFPClient.py b/pythonPackages/ufpy/gfe/IFPClient.py deleted file mode 100644 index 1288094df6..0000000000 --- a/pythonPackages/ufpy/gfe/IFPClient.py +++ /dev/null @@ -1,173 +0,0 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -from ufpy import ThriftClient - -from dynamicserialize.dstypes.com.raytheon.uf.common.dataplugin.gfe.db.objects import DatabaseID -from dynamicserialize.dstypes.com.raytheon.uf.common.dataplugin.gfe.db.objects import ParmID -from dynamicserialize.dstypes.com.raytheon.uf.common.dataplugin.gfe.request import CommitGridsRequest -from dynamicserialize.dstypes.com.raytheon.uf.common.dataplugin.gfe.request import GetGridInventoryRequest -from dynamicserialize.dstypes.com.raytheon.uf.common.dataplugin.gfe.request import GetParmListRequest -from dynamicserialize.dstypes.com.raytheon.uf.common.dataplugin.gfe.request import GetSelectTimeRangeRequest -from dynamicserialize.dstypes.com.raytheon.uf.common.dataplugin.gfe.server.request import CommitGridRequest -from dynamicserialize.dstypes.com.raytheon.uf.common.message import WsId -from dynamicserialize.dstypes.com.raytheon.uf.common.site.requests import GetActiveSitesRequest -from dynamicserialize.dstypes.com.raytheon.uf.common.dataplugin.gfe.server.message import ServerResponse - - -# -# Provides a Python-based interface for executing GFE requests. -# -# -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# 07/26/12 dgilling Initial Creation. -# -# -# - - -class IFPClient(object): - def __init__(self, host, port, user, site=None, progName=None): - self.__thrift = ThriftClient.ThriftClient(host, port) - self.__wsId = WsId(userName=user, progName=progName) - # retrieve default site - if site is None: - sr = self.getSiteID() - if len(sr.getPayload()) > 0: - site = sr.getPayload()[0] - self.__siteId = site - - def commitGrid(self, request): - if type(request) is CommitGridRequest: - return self.__commitGrid([request]) - elif self.__isHomogenousIterable(request, CommitGridRequest): - return self.__commitGrid([cgr for cgr in request]) - raise TypeError("Invalid type: " + str(type(request)) + " specified to commitGrid(). Only accepts CommitGridRequest or lists of CommitGridRequest.") - - def __commitGrid(self, requests): - ssr = ServerResponse() - request = CommitGridsRequest() - request.setCommits(requests) - sr = self.__makeRequest(request) - ssr.setMessages(sr.getMessages()) - return ssr - - def getParmList(self, id): - argType = type(id) - if argType is DatabaseID: - return self.__getParmList([id]) - elif self.__isHomogenousIterable(id, DatabaseID): - return self.__getParmList([dbid for dbid in id]) - raise TypeError("Invalid type: " + str(argType) + " specified to getParmList(). Only accepts DatabaseID or lists of DatabaseID.") - - def __getParmList(self, ids): - ssr = ServerResponse() - request = GetParmListRequest() - request.setDbIds(ids) - sr = self.__makeRequest(request) - ssr.setMessages(sr.getMessages()) - list = sr.getPayload() if sr.getPayload() is not None else [] - ssr.setPayload(list) - return ssr - - def __isHomogenousIterable(self, iterable, classType): - try: - iterator = iter(iterable) - for item in iterator: - if not isinstance(item, classType): - return False - except TypeError: - return False - return True - - def getGridInventory(self, parmID): - if type(parmID) is ParmID: - sr = self.__getGridInventory([parmID]) - list = [] - try: - list = sr.getPayload()[parmID] - except KeyError: - # no-op, we've already default the TimeRange list to empty - pass - sr.setPayload(list) - return sr - elif self.__isHomogenousIterable(parmID, ParmID): - return self.__getGridInventory([id for id in parmID]) - raise TypeError("Invalid type: " + str(type(parmID)) + " specified to getGridInventory(). Only accepts ParmID or lists of ParmID.") - - def __getGridInventory(self, parmIDs): - ssr = ServerResponse() - request = GetGridInventoryRequest() - request.setParmIds(parmIDs) - sr = self.__makeRequest(request) - ssr.setMessages(sr.getMessages()) - trs = sr.getPayload() if sr.getPayload() is not None else {} - ssr.setPayload(trs) - return ssr - - def getSelectTR(self, name): - request = GetSelectTimeRangeRequest() - request.setName(name) - sr = self.__makeRequest(request) - ssr = ServerResponse() - ssr.setMessages(sr.getMessages()) - ssr.setPayload(sr.getPayload()) - return ssr - - def getSiteID(self): - ssr = ServerResponse() - request = GetActiveSitesRequest() - sr = self.__makeRequest(request) - ssr.setMessages(sr.getMessages()) - ids = sr.getPayload() if sr.getPayload() is not None else [] - sr.setPayload(ids) - return sr - - def __makeRequest(self, request): - try: - request.setSiteID(self.__siteId) - except AttributeError: - pass - try: - request.setWorkstationID(self.__wsId) - except AttributeError: - pass - - sr = ServerResponse() - response = None - try: - response = self.__thrift.sendRequest(request) - except ThriftClient.ThriftRequestException as e: - sr.setMessages([str(e)]) - try: - sr.setPayload(response.getPayload()) - except AttributeError: - sr.setPayload(response) - try: - sr.setMessages(response.getMessages()) - except AttributeError: - # not a server response, nothing else to do - pass - - return sr diff --git a/pythonPackages/ufpy/gfe/__init__.py b/pythonPackages/ufpy/gfe/__init__.py deleted file mode 100644 index f2a16d5c47..0000000000 --- a/pythonPackages/ufpy/gfe/__init__.py +++ /dev/null @@ -1,37 +0,0 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - - -# -# __init__.py for ufpy.gfe package -# -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# 07/26/12 dgilling Initial Creation. -# -# -# - - -__all__ = [ - ] diff --git a/pythonPackages/ufpy/localization/LocalizationFileManager.py b/pythonPackages/ufpy/localization/LocalizationFileManager.py deleted file mode 100644 index 56ce08222d..0000000000 --- a/pythonPackages/ufpy/localization/LocalizationFileManager.py +++ /dev/null @@ -1,470 +0,0 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -# -# Library for accessing localization files from python. -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# --------- -------- --------- -------------------------- -# 08/09/17 5731 bsteffen Initial Creation. - - -import urllib2 -from json import load as loadjson -from xml.etree.ElementTree import parse as parseXml -from base64 import b64encode -from StringIO import StringIO -from getpass import getuser -import dateutil.parser -import contextlib -import os -from urlparse import urlunparse, urljoin - -NON_EXISTENT_CHECKSUM = 'NON_EXISTENT_CHECKSUM' -DIRECTORY_CHECKSUM = 'DIRECTORY_CHECKSUM' - -class LocalizationFileVersionConflictException(Exception): - pass - -class LocalizationFileDoesNotExistException(Exception): - pass - -class LocalizationFileIsNotDirectoryException(Exception): - pass - -class LocalizationContext(object): - """A localization context defines the scope of a localization file. - - For example the base localization context includes all the default files - installed with EDEX, while a particular user context has custom files for - that user. - - A localization context consists of a level and name. The level defines what - kind of entity this context is valid for, such as 'base', 'site', or 'user'. - The name identifies the specific entity, for example the name of a 'user' - level context is usually the username. The 'base' level does not have a name - because there cannot be only one 'base' context. - - Attributes: - level: the localization level - name: the context name - """ - def __init__(self, level="base", name=None, type="common_static"): - if level != "base": - assert name is not None - self.level = level - self.name = name - self.type = type - def isBase(self): - return self.level == "base" - def _getUrlComponent(self): - if self.isBase(): - return self.type + '/' + "base/" - else: - return self.type + '/' + self.level + '/' + self.name + '/' - def __str__(self): - if self.isBase(): - return self.type + ".base" - else: - return self.type + "." + self.level + "." + self.name - def __eq__(self, other): - return self.level == other.level and \ - self.name == other.name and \ - self.type == other.type - def __hash__(self): - return hash((self.level, self.name, self.type)) - -class _LocalizationOutput(StringIO): - """A file-like object for writing a localization file. - - The contents being written are stored in memory and written to a - localization server only when the writing is finished. - - This object should be used as a context manager, a save operation will be - executed if the context exits with no errors. If errors occur the partial - contents are abandoned and the server is unchanged. - - It is also possible to save the contents to the server with the save() - method. - """ - def __init__(self, manager, file): - StringIO.__init__(self) - self._manager = manager - self._file = file - def save(self): - """Send the currently written contents to the server.""" - request = self._manager._buildRequest(self._file.context, self._file.path, method="PUT") - - request.add_data(self.getvalue()) - request.add_header("If-Match", self._file.checksum) - try: - urllib2.urlopen(request) - except urllib2.HTTPError as e: - if e.code == 409: - raise LocalizationFileVersionConflictException, e.read() - else: - raise e - def __enter__(self): - return self - def __exit__(self, exc_type, exc_value, traceback): - if exc_type is None: - self.save() - def __str__(self): - return '<' + self.__class__.__name__ + " for " + str(self._file) + '>' - -class LocalizationFile(object): - """A specific file stored in localization. - - A localization file is uniquely defined by the context and path. There can - only be one valid file for that path and localization at a time. To access - the contents of the file use the open method. - - Attributes: - context: A LocalizationContext - path: A path to this file - checksum: A string representation of a checksum generated from the file contents. - timnestamp: A datetime.datetime object indicating when the file was last modified. - """ - def __init__(self, manager, context, path, checksum, timestamp): - """Initialize a LocalizationFile with the given manager and attributes. - - Args: - manager: A LocalizationFileManager to assist with server communication - context: A LocalizationContext - path: A path to this file - checksum: A string representation of a checksum generated from the file contents. - timnestamp: A datetime.datetime object indicating when the file was last modified. - """ - self._manager = manager - self.context = context - self.path = path - self.checksum = checksum - self.timestamp = timestamp - def open(self, mode='r'): - """Open the file. - - This should always be called as as part of a with statement. When - writing the content is not saved on the server until leaving the with - statement normally, if an error occurs the server is left unchanged. - - Example: - with locFile.open('w') as output: - output.write('some content') - - Args: - mode: 'r' for reading the file, 'w' for writing - - Returns: - A file like object that can be used for reads or writes. - """ - if mode == 'r': - request = self._manager._buildRequest(self.context, self.path) - response = urllib2.urlopen(request) - # Not the recommended way of reading directories. - if not(self.isDirectory()): - checksum = response.headers["Content-MD5"] - if self.checksum != checksum: - raise RuntimeError, "Localization checksum mismatch " + self.checksum + " " + checksum - return contextlib.closing(response) - elif mode == 'w': - return _LocalizationOutput(self._manager, self) - else: - raise ValueError, "mode string must be 'r' or 'w' not " + str(r) - def delete(self): - """Delete this file from the server""" - request = self._manager._buildRequest(self.context, self.path, method='DELETE') - request.add_header("If-Match", self.checksum) - try: - urllib2.urlopen(request) - except urllib2.HTTPError as e: - if e.code == 409: - raise LocalizationFileVersionConflictException, e.read() - else: - raise e - def exists(self): - """Check if this file actually exists. - - Returns: - boolean indicating existence of this file - """ - return self.checksum != NON_EXISTENT_CHECKSUM - def isDirectory(self): - """Check if this file is a directory. - - A file must exist to be considered a directory. - - Returns: - boolean indicating directorocity of this file - """ - return self.checksum == DIRECTORY_CHECKSUM - def getCheckSum(self): - return self.checksum - def getContext(self): - return self.context - def getPath(self): - return self.path - def getTimeStamp(self): - return self.timestamp - def __str__(self): - return str(self.context) + "/" + self.path - def __eq__(self, other): - return self.context == other.context and \ - self.path == other.path and \ - self.checksum == other.checksum \ - and self.timestamp == other.timestamp - def __hash__(self): - return hash((self.context, self.path, self.checksum, self.timestamp)) - -def _getHost(): - import subprocess - host = subprocess.check_output( - "source /awips2/fxa/bin/setup.env; echo $DEFAULT_HOST", - shell=True).strip() - if host: - return host - return 'localhost' - -def _getSiteFromServer(host): - try: - from ufpy import ThriftClient - from dynamicserialize.dstypes.com.raytheon.uf.common.site.requests import GetPrimarySiteRequest - client = ThriftClient.ThriftClient(host) - return client.sendRequest(GetPrimarySiteRequest()) - except: - # Servers that don't have GFE installed will not return a site - pass - -def _getSiteFromEnv(): - site = os.environ.get('FXA_LOCAL_SITE') - if site is None: - site = os.environ.get('SITE_IDENTIFIER'); - return site - -def _getSite(host): - site = _getSiteFromEnv() - if not(site): - site = _getSiteFromServer(host) - return site - -def _parseJsonList(manager, response, context, path): - fileList = [] - jsonResponse = loadjson(response) - for name, jsonData in jsonResponse.items(): - checksum = jsonData["checksum"] - timestampString = jsonData["timestamp"] - timestamp = dateutil.parser.parse(timestampString) - newpath = urljoin(path, name) - fileList.append(LocalizationFile(manager, context, newpath, checksum, timestamp)) - return fileList - -def _parseXmlList(manager, response, context, path): - fileList = [] - for xmlData in parseXml(response).getroot().findall('file'): - name = xmlData.get("name") - checksum = xmlData.get("checksum") - timestampString = xmlData.get("timestamp") - timestamp = dateutil.parser.parse(timestampString) - newpath = urljoin(path, name) - fileList.append(LocalizationFile(manager, context, newpath, checksum, timestamp)) - return fileList - -class LocalizationFileManager(object): - """Connects to a server and retrieves LocalizationFiles.""" - def __init__(self, host=None, port=9581, path="/services/localization/", contexts=None, site=None, type="common_static"): - """Initializes a LocalizationFileManager with connection parameters and context information - - All arguments are optional and will use defaults or attempt to figure out appropriate values form the environment. - - Args: - host: A hostname of the localization server, such as 'ec'. - port: A port to use to connect to the localization server, usually 9581. - path: A path to reach the localization file service on the server. - contexts: A list of contexts to check for files, the order of the contexts will be used - for the order of incremental results and the priority of absolute results. - site: A site identifier to use for site specific contexts. This is only used if the contexts arg is None. - type: A localization type for contexts. This is only used if the contexts arg is None. - - """ - if host is None: - host = _getHost() - if contexts is None: - if site is None : - site = _getSite(host) - contexts = [LocalizationContext("base", None, type)] - if site: - contexts.append(LocalizationContext("configured", site, type)) - contexts.append(LocalizationContext("site", site, type)) - contexts.append(LocalizationContext("user", getuser(), type)) - netloc = host + ':' + str(port) - self._baseUrl = urlunparse(('http', netloc, path, None, None, None)) - self._contexts = contexts - def _buildRequest(self, context, path, method='GET'): - url = urljoin(self._baseUrl, context._getUrlComponent()) - url = urljoin(url, path) - request = urllib2.Request(url) - username = getuser() - # Currently password is ignored in the server - # this is the defacto standard for not providing one to this service. - password = username - base64string = b64encode('%s:%s' % (username, password)) - request.add_header("Authorization", "Basic %s" % base64string) - if method != 'GET': - request.get_method = lambda: method - return request - def _normalizePath(self, path): - if path == '' or path == '/': - path = '.' - if path[0] == '/': - path = path[1:] - return path - def _list(self, path): - path = self._normalizePath(path) - if path[-1] != '/': - path += '/' - fileList = [] - exists = False - for context in self._contexts: - try: - request = self._buildRequest(context, path) - request.add_header("Accept", "application/json, application/xml") - response = urllib2.urlopen(request) - exists = True - if not(response.geturl().endswith("/")): - # For ordinary files the server sends a redirect to remove the slash. - raise LocalizationFileIsNotDirectoryException, "Not a directory: " + path - elif response.headers["Content-Type"] == "application/xml": - fileList += _parseXmlList(self, response, context, path) - else: - fileList += _parseJsonList(self, response, context, path) - except urllib2.HTTPError as e: - if e.code != 404: - raise e - if not(exists): - raise LocalizationFileDoesNotExistException, "No such file or directory: " + path - return fileList - def _get(self, context, path): - path = self._normalizePath(path) - try: - request = self._buildRequest(context, path, method='HEAD') - resp = urllib2.urlopen(request) - if (resp.geturl().endswith("/")): - checksum = DIRECTORY_CHECKSUM; - else: - if "Content-MD5" not in resp.headers: - raise RuntimeError, "Missing Content-MD5 header in response from " + resp.geturl() - checksum = resp.headers["Content-MD5"] - if "Last-Modified" not in resp.headers: - raise RuntimeError, "Missing Last-Modified header in response from " + resp.geturl() - timestamp = dateutil.parser.parse(resp.headers["Last-Modified"]) - return LocalizationFile(self, context, path, checksum, timestamp) - except urllib2.HTTPError as e: - if e.code != 404: - raise e - else: - return LocalizationFile(self, context, path, NON_EXISTENT_CHECKSUM, None) - def listAbsolute(self, path): - """List the files in a localization directory, only a single file is returned for each unique path. - - If a file exists in more than one context then the highest level(furthest from base) is used. - - Args: - path: A path to a directory that should be the root of the listing - - Returns: - A list of LocalizationFiles - """ - merged = dict() - for file in self._list(path): - merged[file.path] = file - return sorted(merged.values(), key=lambda file: file.path) - def listIncremental(self, path): - """List the files in a localization directory, this includes all files for all contexts. - - Args: - path: A path to a directory that should be the root of the listing - - Returns: - A list of tuples, each tuple will contain one or more files for the - same paths but different contexts. Each tuple will be ordered the - same as the contexts in this manager, generally with 'base' first - and 'user' last. - """ - merged = dict() - for file in self._list(path): - if file.path in merged: - merged[file.path] += (file,) - else: - merged[file.path] = (file, ) - return sorted(merged.values(), key=lambda t: t[0].path) - def getAbsolute(self, path): - """Get a single localization file from the highest level context where it exists. - - Args: - path: A path to a localization file - - Returns: - A Localization File with the specified path or None if the file does not exist in any context. - - """ - for context in reversed(self._contexts): - f = self._get(context, path) - if f.exists(): - return f - def getIncremental(self, path): - """Get all the localization files that exist in any context for the provided path. - - Args: - path: A path to a localization file - - Returns: - A tuple containing all the files that exist for this path in any context. The tuple - will be ordered the same as the contexts in this manager, generally with 'base' first - and 'user' last. - """ - result = () - for context in self._contexts: - f = self._get(context, path) - if f.exists(): - result += (f,) - return result - def getSpecific(self, level, path): - """Get a specific localization file at a given level, the file may not exist. - - The file is returned for whichever context is valid for the provided level in this manager. - - For writing new files this is the only way to get access to a file that - does not exist in order to create it. - - Args: - level: the name of a localization level, such as "base", "site", "user" - path: A path to a localization file - - Returns: - A Localization File with the specified path and a context for the specified level. - """ - for context in self._contexts: - if context.level == level: - return self._get(context, path) - raise ValueError, "No context defined for level " + level - def __str__(self): - contextsStr = '[' + ' '.join((str(c) for c in self._contexts)) + ']' - return '<' + self.__class__.__name__ + " for " + self._baseUrl + ' ' + contextsStr + '>' diff --git a/pythonPackages/ufpy/localization/__init__.py b/pythonPackages/ufpy/localization/__init__.py deleted file mode 100644 index e138466349..0000000000 --- a/pythonPackages/ufpy/localization/__init__.py +++ /dev/null @@ -1,32 +0,0 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -# -# __init__.py for ufpy.localization package -# -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# --------- -------- --------- -------------------------- -# 08/10/17 5731 bsteffen Initial Creation. - -__all__ = [ - ] \ No newline at end of file diff --git a/pythonPackages/ufpy/qpidingest.py b/pythonPackages/ufpy/qpidingest.py deleted file mode 100644 index 8a7b2404ad..0000000000 --- a/pythonPackages/ufpy/qpidingest.py +++ /dev/null @@ -1,129 +0,0 @@ -#=============================================================================== -# qpidingest.py -# -# @author: Aaron Anderson -# @organization: NOAA/WDTB OU/CIMMS -# @version: 1.0 02/19/2010 -# @requires: QPID Python Client available from http://qpid.apache.org/download.html -# The Python Client is located under Single Component Package/Client -# -# From the README.txt Installation Instructions -# = INSTALLATION = -# Extract the release archive into a directory of your choice and set -# your PYTHONPATH accordingly: -# -# tar -xzf qpid-python-.tar.gz -C -# export PYTHONPATH=/qpid-/python -# -# ***EDEX and QPID must be running for this module to work*** -# -# DESCRIPTION: -# This module is used to connect to QPID and send messages to the external.dropbox queue -# which tells EDEX to ingest a data file from a specified path. This avoids having to copy -# a data file into an endpoint. Each message also contains a header which is used to determine -# which plugin should be used to decode the file. Each plugin has an xml file located in -# $EDEX_HOME/data/utility/edex_static/base/distribution that contains regular expressions -# that the header is compared to. When the header matches one of these regular expressions -# the file is decoded with that plugin. If you make changes to one of these xml files you -# must restart EDEX for the changes to take effect. -# -# NOTE: If the message is being sent but you do not see it being ingested in the EDEX log -# check the xml files to make sure the header you are passing matches one of the regular -# expressions. Beware of spaces, some regular expressions require spaces while others use -# a wildcard character so a space is optional. It seems you are better off having the space -# as this will be matched to both patterns. For the file in the example below, -# 20100218_185755_SAUS46KLOX.metar, I use SAUS46 KLOX as the header to make sure it matches. -# -# -# EXAMPLE: -# Simple example program: -# -#------------------------------------------------------------------------------ -# import qpidingest -# #Tell EDEX to ingest a metar file from data_store. The filepath is -# #/data_store/20100218/metar/00/standard/20100218_005920_SAUS46KSEW.metar -# -# conn=qpidingest.IngestViaQPID() #defaults to localhost port 5672 -# -# #If EDEX is not on the local machine you can make the connection as follows -# #conn=qpidingest.IngestViaQPID(host='',port=) -# -# conn.sendmessage('/data_store/20100218/metar/18/standard/20100218_185755_SAUS46KLOX.metar','SAUS46 KLOX') -# conn.close() -#------------------------------------------------------------------------------- -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# .... -# 06/13/2013 DR 16242 D. Friedman Add Qpid authentication info -# 03/06/2014 DR 17907 D. Friedman Workaround for issue QPID-5569 -# 02/16/2017 DR 6084 bsteffen Support ssl connections -# -#=============================================================================== - -import os -import os.path - -import qpid -from qpid.util import connect -from qpid.connection import Connection -from qpid.datatypes import Message, uuid4 - -QPID_USERNAME = 'guest' -QPID_PASSWORD = 'guest' - -class IngestViaQPID: - def __init__(self, host='localhost', port=5672, ssl=None): - ''' - Connect to QPID and make bindings to route message to external.dropbox queue - @param host: string hostname of computer running EDEX and QPID (default localhost) - @param port: integer port used to connect to QPID (default 5672) - @param ssl: boolean to determine whether ssl is used, default value of None will use ssl only if a client certificate is found. - ''' - - try: - # - socket = connect(host, port) - if "QPID_SSL_CERT_DB" in os.environ: - certdb = os.environ["QPID_SSL_CERT_DB"] - else: - certdb = os.path.expanduser("~/.qpid/") - if "QPID_SSL_CERT_NAME" in os.environ: - certname = os.environ["QPID_SSL_CERT_NAME"] - else: - certname = QPID_USERNAME - certfile = os.path.join(certdb, certname + ".crt") - if ssl or (ssl is None and os.path.exists(certfile)): - keyfile = os.path.join(certdb, certname + ".key") - trustfile = os.path.join(certdb, "root.crt") - socket = qpid.util.ssl(socket, keyfile=keyfile, certfile=certfile, ca_certs=trustfile) - self.connection = Connection (sock=socket, username=QPID_USERNAME, password=QPID_PASSWORD) - self.connection.start() - self.session = self.connection.session(str(uuid4())) - self.session.exchange_bind(exchange='amq.direct', queue='external.dropbox', binding_key='external.dropbox') - print 'Connected to Qpid' - except: - print 'Unable to connect to Qpid' - - def sendmessage(self, filepath, header): - ''' - This function sends a message to the external.dropbox queue providing the path - to the file to be ingested and a header to determine the plugin to be used to - decode the file. - @param filepath: string full path to file to be ingested - @param header: string header used to determine plugin decoder to use - ''' - props = self.session.delivery_properties(routing_key='external.dropbox') - head = self.session.message_properties(application_headers={'header':header}, - user_id=QPID_USERNAME) # For issue QPID-5569. Fixed in Qpid 0.27 - self.session.message_transfer(destination='amq.direct', message=Message(props, head, filepath)) - - def close(self): - ''' - After all messages are sent call this function to close connection and make sure - there are no threads left open - ''' - self.session.close(timeout=10) - print 'Connection to Qpid closed' diff --git a/pythonPackages/ufpy/stomp.py b/pythonPackages/ufpy/stomp.py deleted file mode 100644 index 9356af9fdc..0000000000 --- a/pythonPackages/ufpy/stomp.py +++ /dev/null @@ -1,934 +0,0 @@ -#!/usr/bin/env python -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## -"""Stomp Protocol Connectivity - - This provides basic connectivity to a message broker supporting the 'stomp' protocol. - At the moment ACK, SEND, SUBSCRIBE, UNSUBSCRIBE, BEGIN, ABORT, COMMIT, CONNECT and DISCONNECT operations - are supported. - - This changes the previous version which required a listener per subscription -- now a listener object - just calls the 'addlistener' method and will receive all messages sent in response to all/any subscriptions. - (The reason for the change is that the handling of an 'ack' becomes problematic unless the listener mechanism - is decoupled from subscriptions). - - Note that you must 'start' an instance of Connection to begin receiving messages. For example: - - conn = stomp.Connection([('localhost', 62003)], 'myuser', 'mypass') - conn.start() - - Meta-Data - --------- - Author: Jason R Briggs - License: http://www.apache.org/licenses/LICENSE-2.0 - Start Date: 2005/12/01 - Last Revision Date: $Date: 2008/09/11 00:16 $ - - Notes/Attribution - ----------------- - * uuid method courtesy of Carl Free Jr: - http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/213761 - * patch from Andreas Schobel - * patches from Julian Scheid of Rising Sun Pictures (http://open.rsp.com.au) - * patch from Fernando - * patches from Eugene Strulyov - - Updates - ------- - * 2007/03/31 : (Andreas Schobel) patch to fix newlines problem in ActiveMQ 4.1 - * 2007/09 : (JRB) updated to get stomp.py working in Jython as well as Python - * 2007/09/05 : (Julian Scheid) patch to allow sending custom headers - * 2007/09/18 : (JRB) changed code to use logging instead of just print. added logger for jython to work - * 2007/09/18 : (Julian Scheid) various updates, including: - - change incoming message handling so that callbacks are invoked on the listener not only for MESSAGE, but also for - CONNECTED, RECEIPT and ERROR frames. - - callbacks now get not only the payload but any headers specified by the server - - all outgoing messages now sent via a single method - - only one connection used - - change to use thread instead of threading - - sends performed on the calling thread - - receiver loop now deals with multiple messages in one received chunk of data - - added reconnection attempts and connection fail-over - - changed defaults for "user" and "passcode" to None instead of empty string (fixed transmission of those values) - - added readline support - * 2008/03/26 : (Fernando) added cStringIO for faster performance on large messages - * 2008/09/10 : (Eugene) remove lower() on headers to support case-sensitive header names - * 2008/09/11 : (JRB) fix incompatibilities with RabbitMQ, add wait for socket-connect - * 2008/10/28 : (Eugene) add jms map (from stomp1.1 ideas) - * 2008/11/25 : (Eugene) remove superfluous (incorrect) locking code - * 2009/02/05 : (JRB) remove code to replace underscores with dashes in header names (causes a problem in rabbit-mq) - * 2009/03/29 : (JRB) minor change to add logging config file - (JRB) minor change to add socket timeout, suggested by Israel - * 2009/04/01 : (Gavin) patch to change md5 to hashlib (for 2.6 compatibility) - * 2009/04/02 : (Fernando Ciciliati) fix overflow bug when waiting too long to connect to the broker - -""" - -import hashlib -import math -import random -import re -import socket -import sys -import thread -import threading -import time -import types -import xml.dom.minidom -from cStringIO import StringIO - -# -# stomp.py version number -# -_version = 1.8 - - -def _uuid( *args ): - """ - uuid courtesy of Carl Free Jr: - (http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/213761) - """ - - t = long( time.time() * 1000 ) - r = long( random.random() * 100000000000000000L ) - - try: - a = socket.gethostbyname( socket.gethostname() ) - except: - # if we can't get a network address, just imagine one - a = random.random() * 100000000000000000L - data = str(t) + ' ' + str(r) + ' ' + str(a) + ' ' + str(args) - md5 = hashlib.md5() - md5.update(data) - data = md5.hexdigest() - return data - - -class DevNullLogger(object): - """ - dummy logging class for environments without the logging module - """ - def log(self, msg): - print msg - - def devnull(self, msg): - pass - - debug = devnull - info = devnull - warning = log - error = log - critical = log - exception = log - - def isEnabledFor(self, lvl): - return False - - -# -# add logging if available -# -try: - import logging - import logging.config - logging.config.fileConfig("stomp.log.conf") - log = logging.getLogger('root') -except: - log = DevNullLogger() - - -class ConnectionClosedException(Exception): - """ - Raised in the receiver thread when the connection has been closed - by the server. - """ - pass - - -class NotConnectedException(Exception): - """ - Raised by Connection.__send_frame when there is currently no server - connection. - """ - pass - - -class ConnectionListener(object): - """ - This class should be used as a base class for objects registered - using Connection.add_listener(). - """ - def on_connecting(self, host_and_port): - """ - Called by the STOMP connection once a TCP/IP connection to the - STOMP server has been established or re-established. Note that - at this point, no connection has been established on the STOMP - protocol level. For this, you need to invoke the "connect" - method on the connection. - - \param host_and_port a tuple containing the host name and port - number to which the connection has been established. - """ - pass - - def on_connected(self, headers, body): - """ - Called by the STOMP connection when a CONNECTED frame is - received, that is after a connection has been established or - re-established. - - \param headers a dictionary containing all headers sent by the - server as key/value pairs. - - \param body the frame's payload. This is usually empty for - CONNECTED frames. - """ - pass - - def on_disconnected(self): - """ - Called by the STOMP connection when a TCP/IP connection to the - STOMP server has been lost. No messages should be sent via - the connection until it has been reestablished. - """ - pass - - def on_message(self, headers, body): - """ - Called by the STOMP connection when a MESSAGE frame is - received. - - \param headers a dictionary containing all headers sent by the - server as key/value pairs. - - \param body the frame's payload - the message body. - """ - pass - - def on_receipt(self, headers, body): - """ - Called by the STOMP connection when a RECEIPT frame is - received, sent by the server if requested by the client using - the 'receipt' header. - - \param headers a dictionary containing all headers sent by the - server as key/value pairs. - - \param body the frame's payload. This is usually empty for - RECEIPT frames. - """ - pass - - def on_error(self, headers, body): - """ - Called by the STOMP connection when an ERROR frame is - received. - - \param headers a dictionary containing all headers sent by the - server as key/value pairs. - - \param body the frame's payload - usually a detailed error - description. - """ - pass - - -class Connection(object): - """ - Represents a STOMP client connection. - """ - - def __init__(self, - host_and_ports = [ ('localhost', 61613) ], - user = None, - passcode = None, - prefer_localhost = True, - try_loopback_connect = True, - reconnect_sleep_initial = 0.1, - reconnect_sleep_increase = 0.5, - reconnect_sleep_jitter = 0.1, - reconnect_sleep_max = 60.0): - """ - Initialize and start this connection. - - \param host_and_ports - a list of (host, port) tuples. - - \param prefer_localhost - if True and the local host is mentioned in the (host, - port) tuples, try to connect to this first - - \param try_loopback_connect - if True and the local host is found in the host - tuples, try connecting to it using loopback interface - (127.0.0.1) - - \param reconnect_sleep_initial - - initial delay in seconds to wait before reattempting - to establish a connection if connection to any of the - hosts fails. - - \param reconnect_sleep_increase - - factor by which the sleep delay is increased after - each connection attempt. For example, 0.5 means - to wait 50% longer than before the previous attempt, - 1.0 means wait twice as long, and 0.0 means keep - the delay constant. - - \param reconnect_sleep_max - - maximum delay between connection attempts, regardless - of the reconnect_sleep_increase. - - \param reconnect_sleep_jitter - - random additional time to wait (as a percentage of - the time determined using the previous parameters) - between connection attempts in order to avoid - stampeding. For example, a value of 0.1 means to wait - an extra 0%-10% (randomly determined) of the delay - calculated using the previous three parameters. - """ - - sorted_host_and_ports = [] - sorted_host_and_ports.extend(host_and_ports) - - # If localhost is preferred, make sure all (host, port) tuples - # that refer to the local host come first in the list - if prefer_localhost: - def is_local_host(host): - return host in Connection.__localhost_names - - sorted_host_and_ports.sort(lambda x, y: (int(is_local_host(y[0])) - - int(is_local_host(x[0])))) - - # If the user wishes to attempt connecting to local ports - # using the loopback interface, for each (host, port) tuple - # referring to a local host, add an entry with the host name - # replaced by 127.0.0.1 if it doesn't exist already - loopback_host_and_ports = [] - if try_loopback_connect: - for host_and_port in sorted_host_and_ports: - if is_local_host(host_and_port[0]): - port = host_and_port[1] - if (not ("127.0.0.1", port) in sorted_host_and_ports - and not ("localhost", port) in sorted_host_and_ports): - loopback_host_and_ports.append(("127.0.0.1", port)) - - # Assemble the final, possibly sorted list of (host, port) tuples - self.__host_and_ports = [] - self.__host_and_ports.extend(loopback_host_and_ports) - self.__host_and_ports.extend(sorted_host_and_ports) - - self.__recvbuf = '' - - self.__listeners = [ ] - - self.__reconnect_sleep_initial = reconnect_sleep_initial - self.__reconnect_sleep_increase = reconnect_sleep_increase - self.__reconnect_sleep_jitter = reconnect_sleep_jitter - self.__reconnect_sleep_max = reconnect_sleep_max - - self.__connect_headers = {} - if user is not None and passcode is not None: - self.__connect_headers['login'] = user - self.__connect_headers['passcode'] = passcode - - self.__socket = None - self.__current_host_and_port = None - - self.__receiver_thread_exit_condition = threading.Condition() - self.__receiver_thread_exited = False - - # - # Manage the connection - # - - def start(self): - """ - Start the connection. This should be called after all - listeners have been registered. If this method is not called, - no frames will be received by the connection. - """ - self.__running = True - self.__attempt_connection() - thread.start_new_thread(self.__receiver_loop, ()) - - def stop(self): - """ - Stop the connection. This is equivalent to calling - disconnect() but will do a clean shutdown by waiting for the - receiver thread to exit. - """ - self.disconnect() - - self.__receiver_thread_exit_condition.acquire() - if not self.__receiver_thread_exited: - self.__receiver_thread_exit_condition.wait() - self.__receiver_thread_exit_condition.release() - - def get_host_and_port(self): - """ - Return a (host, port) tuple indicating which STOMP host and - port is currently connected, or None if there is currently no - connection. - """ - return self.__current_host_and_port - - def is_connected(self): - try: - return self.__socket is not None and self.__socket.getsockname()[1] != 0 - except socket.error: - return False - - # - # Manage objects listening to incoming frames - # - - def add_listener(self, listener): - self.__listeners.append(listener) - - def remove_listener(self, listener): - self.__listeners.remove(listener) - - # - # STOMP transmissions - # - - def subscribe(self, headers={}, **keyword_headers): - self.__send_frame_helper('SUBSCRIBE', '', self.__merge_headers([headers, keyword_headers]), [ 'destination' ]) - - def unsubscribe(self, headers={}, **keyword_headers): - self.__send_frame_helper('UNSUBSCRIBE', '', self.__merge_headers([headers, keyword_headers]), [ ('destination', 'id') ]) - - def send(self, message='', headers={}, **keyword_headers): - if '\x00' in message: - content_length_headers = {'content-length': len(message)} - else: - content_length_headers = {} - self.__send_frame_helper('SEND', message, self.__merge_headers([headers, - keyword_headers, - content_length_headers]), [ 'destination' ]) - - def ack(self, headers={}, **keyword_headers): - self.__send_frame_helper('ACK', '', self.__merge_headers([headers, keyword_headers]), [ 'message-id' ]) - - def begin(self, headers={}, **keyword_headers): - use_headers = self.__merge_headers([headers, keyword_headers]) - if not 'transaction' in use_headers.keys(): - use_headers['transaction'] = _uuid() - self.__send_frame_helper('BEGIN', '', use_headers, [ 'transaction' ]) - return use_headers['transaction'] - - def abort(self, headers={}, **keyword_headers): - self.__send_frame_helper('ABORT', '', self.__merge_headers([headers, keyword_headers]), [ 'transaction' ]) - - def commit(self, headers={}, **keyword_headers): - self.__send_frame_helper('COMMIT', '', self.__merge_headers([headers, keyword_headers]), [ 'transaction' ]) - - def connect(self, headers={}, **keyword_headers): - if keyword_headers.has_key('wait') and keyword_headers['wait']: - while not self.is_connected(): time.sleep(0.1) - del keyword_headers['wait'] - self.__send_frame_helper('CONNECT', '', self.__merge_headers([self.__connect_headers, headers, keyword_headers]), [ ]) - - def disconnect(self, headers={}, **keyword_headers): - self.__send_frame_helper('DISCONNECT', '', self.__merge_headers([self.__connect_headers, headers, keyword_headers]), [ ]) - self.__running = False - if hasattr(socket, 'SHUT_RDWR'): - self.__socket.shutdown(socket.SHUT_RDWR) - if self.__socket: - self.__socket.close() - self.__current_host_and_port = None - - # ========= PRIVATE MEMBERS ========= - - - # List of all host names (unqualified, fully-qualified, and IP - # addresses) that refer to the local host (both loopback interface - # and external interfaces). This is used for determining - # preferred targets. - __localhost_names = [ "localhost", - "127.0.0.1", - socket.gethostbyname(socket.gethostname()), - socket.gethostname(), - socket.getfqdn(socket.gethostname()) ] - # - # Used to parse STOMP header lines in the format "key:value", - # - __header_line_re = re.compile('(?P[^:]+)[:](?P.*)') - - # - # Used to parse the STOMP "content-length" header lines, - # - __content_length_re = re.compile('^content-length[:]\\s*(?P[0-9]+)', re.MULTILINE) - - def __merge_headers(self, header_map_list): - """ - Helper function for combining multiple header maps into one. - - Any underscores ('_') in header names (keys) will be replaced by dashes ('-'). - """ - headers = {} - for header_map in header_map_list: - for header_key in header_map.keys(): - headers[header_key] = header_map[header_key] - return headers - - def __convert_dict(self, payload): - """ - Encode python dictionary as ... structure. - """ - - xmlStr = "\n" - for key in payload: - xmlStr += "\n" - xmlStr += "%s" % key - xmlStr += "%s" % payload[key] - xmlStr += "\n" - xmlStr += "" - - return xmlStr - - def __send_frame_helper(self, command, payload, headers, required_header_keys): - """ - Helper function for sending a frame after verifying that a - given set of headers are present. - - \param command the command to send - - \param payload the frame's payload - - \param headers a dictionary containing the frame's headers - - \param required_header_keys a sequence enumerating all - required header keys. If an element in this sequence is itself - a tuple, that tuple is taken as a list of alternatives, one of - which must be present. - - \throws ArgumentError if one of the required header keys is - not present in the header map. - """ - for required_header_key in required_header_keys: - if type(required_header_key) == tuple: - found_alternative = False - for alternative in required_header_key: - if alternative in headers.keys(): - found_alternative = True - if not found_alternative: - raise KeyError("Command %s requires one of the following headers: %s" % (command, str(required_header_key))) - elif not required_header_key in headers.keys(): - raise KeyError("Command %s requires header %r" % (command, required_header_key)) - self.__send_frame(command, headers, payload) - - def __send_frame(self, command, headers={}, payload=''): - """ - Send a STOMP frame. - """ - if type(payload) == dict: - headers["transformation"] = "jms-map-xml" - payload = self.__convert_dict(payload) - - if self.__socket is not None: - frame = '%s\n%s\n%s\x00' % (command, - reduce(lambda accu, key: accu + ('%s:%s\n' % (key, headers[key])), headers.keys(), ''), - payload) - self.__socket.sendall(frame) - log.debug("Sent frame: type=%s, headers=%r, body=%r" % (command, headers, payload)) - else: - raise NotConnectedException() - - def __receiver_loop(self): - """ - Main loop listening for incoming data. - """ - try: - try: - threading.currentThread().setName("StompReceiver") - while self.__running: - log.debug('starting receiver loop') - - if self.__socket is None: - break - - try: - try: - for listener in self.__listeners: - if hasattr(listener, 'on_connecting'): - listener.on_connecting(self.__current_host_and_port) - - while self.__running: - frames = self.__read() - - for frame in frames: - (frame_type, headers, body) = self.__parse_frame(frame) - log.debug("Received frame: result=%r, headers=%r, body=%r" % (frame_type, headers, body)) - frame_type = frame_type.lower() - if frame_type in [ 'connected', - 'message', - 'receipt', - 'error' ]: - for listener in self.__listeners: - if hasattr(listener, 'on_%s' % frame_type): - eval('listener.on_%s(headers, body)' % frame_type) - else: - log.debug('listener %s has no such method on_%s' % (listener, frame_type)) - else: - log.warning('Unknown response frame type: "%s" (frame length was %d)' % (frame_type, len(frame))) - finally: - try: - self.__socket.close() - except: - pass # ignore errors when attempting to close socket - self.__socket = None - self.__current_host_and_port = None - except ConnectionClosedException: - if self.__running: - log.error("Lost connection") - # Notify listeners - for listener in self.__listeners: - if hasattr(listener, 'on_disconnected'): - listener.on_disconnected() - # Clear out any half-received messages after losing connection - self.__recvbuf = '' - continue - else: - break - except: - log.exception("An unhandled exception was encountered in the stomp receiver loop") - - finally: - self.__receiver_thread_exit_condition.acquire() - self.__receiver_thread_exited = True - self.__receiver_thread_exit_condition.notifyAll() - self.__receiver_thread_exit_condition.release() - - def __read(self): - """ - Read the next frame(s) from the socket. - """ - fastbuf = StringIO() - while self.__running: - try: - c = self.__socket.recv(1024) - except: - c = '' - if len(c) == 0: - raise ConnectionClosedException - fastbuf.write(c) - if '\x00' in c: - break - self.__recvbuf += fastbuf.getvalue() - fastbuf.close() - result = [] - - if len(self.__recvbuf) > 0 and self.__running: - while True: - pos = self.__recvbuf.find('\x00') - if pos >= 0: - frame = self.__recvbuf[0:pos] - preamble_end = frame.find('\n\n') - if preamble_end >= 0: - content_length_match = Connection.__content_length_re.search(frame[0:preamble_end]) - if content_length_match: - content_length = int(content_length_match.group('value')) - content_offset = preamble_end + 2 - frame_size = content_offset + content_length - if frame_size > len(frame): - # Frame contains NUL bytes, need to - # read more - if frame_size < len(self.__recvbuf): - pos = frame_size - frame = self.__recvbuf[0:pos] - else: - # Haven't read enough data yet, - # exit loop and wait for more to - # arrive - break - result.append(frame) - self.__recvbuf = self.__recvbuf[pos+1:] - else: - break - return result - - - def __transform(self, body, transType): - """ - Perform body transformation. Currently, the only supported transformation is - 'jms-map-xml', which converts a map into python dictionary. This can be extended - to support other transformation types. - - The body has the following format: - - - name - Dejan - - - city - Belgrade - - - - (see http://docs.codehaus.org/display/STOMP/Stomp+v1.1+Ideas) - """ - - if transType != 'jms-map-xml': - return body - - try: - entries = {} - doc = xml.dom.minidom.parseString(body) - rootElem = doc.documentElement - for entryElem in rootElem.getElementsByTagName("entry"): - pair = [] - for node in entryElem.childNodes: - if not isinstance(node, xml.dom.minidom.Element): continue - pair.append(node.firstChild.nodeValue) - assert len(pair) == 2 - entries[pair[0]] = pair[1] - return entries - except Exception, ex: - # unable to parse message. return original - return body - - - def __parse_frame(self, frame): - """ - Parse a STOMP frame into a (frame_type, headers, body) tuple, - where frame_type is the frame type as a string (e.g. MESSAGE), - headers is a map containing all header key/value pairs, and - body is a string containing the frame's payload. - """ - preamble_end = frame.find('\n\n') - preamble = frame[0:preamble_end] - preamble_lines = preamble.split('\n') - body = frame[preamble_end+2:] - - # Skip any leading newlines - first_line = 0 - while first_line < len(preamble_lines) and len(preamble_lines[first_line]) == 0: - first_line += 1 - - # Extract frame type - frame_type = preamble_lines[first_line] - - # Put headers into a key/value map - headers = {} - for header_line in preamble_lines[first_line+1:]: - header_match = Connection.__header_line_re.match(header_line) - if header_match: - headers[header_match.group('key')] = header_match.group('value') - - if 'transformation' in headers: - body = self.__transform(body, headers['transformation']) - - return (frame_type, headers, body) - - def __attempt_connection(self): - """ - Try connecting to the (host, port) tuples specified at construction time. - """ - - sleep_exp = 1 - while self.__running and self.__socket is None: - for host_and_port in self.__host_and_ports: - try: - log.debug("Attempting connection to host %s, port %s" % host_and_port) - self.__socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) - self.__socket.settimeout(None) - self.__socket.connect(host_and_port) - self.__current_host_and_port = host_and_port - log.info("Established connection to host %s, port %s" % host_and_port) - break - except socket.error: - self.__socket = None - if type(sys.exc_info()[1]) == types.TupleType: - exc = sys.exc_info()[1][1] - else: - exc = sys.exc_info()[1] - log.warning("Could not connect to host %s, port %s: %s" % (host_and_port[0], host_and_port[1], exc)) - - if self.__socket is None: - sleep_duration = (min(self.__reconnect_sleep_max, - ((self.__reconnect_sleep_initial / (1.0 + self.__reconnect_sleep_increase)) - * math.pow(1.0 + self.__reconnect_sleep_increase, sleep_exp))) - * (1.0 + random.random() * self.__reconnect_sleep_jitter)) - sleep_end = time.time() + sleep_duration - log.debug("Sleeping for %.1f seconds before attempting reconnect" % sleep_duration) - while self.__running and time.time() < sleep_end: - time.sleep(0.2) - - if sleep_duration < self.__reconnect_sleep_max: - sleep_exp += 1 - -# -# command line testing -# -if __name__ == '__main__': - - # If the readline module is available, make command input easier - try: - import readline - def stomp_completer(text, state): - commands = [ 'subscribe', 'unsubscribe', - 'send', 'ack', - 'begin', 'abort', 'commit', - 'connect', 'disconnect' - ] - for command in commands[state:]: - if command.startswith(text): - return "%s " % command - return None - - readline.parse_and_bind("tab: complete") - readline.set_completer(stomp_completer) - readline.set_completer_delims("") - except ImportError: - pass # ignore unavailable readline module - - class StompTester(object): - def __init__(self, host='localhost', port=61613, user='', passcode=''): - self.c = Connection([(host, port)], user, passcode) - self.c.add_listener(self) - self.c.start() - - def __print_async(self, frame_type, headers, body): - print "\r \r", - print frame_type - for header_key in headers.keys(): - print '%s: %s' % (header_key, headers[header_key]) - print - print body - print '> ', - sys.stdout.flush() - - def on_connecting(self, host_and_port): - self.c.connect(wait=True) - - def on_disconnected(self): - print "lost connection" - - def on_message(self, headers, body): - self.__print_async("MESSAGE", headers, body) - - def on_error(self, headers, body): - self.__print_async("ERROR", headers, body) - - def on_receipt(self, headers, body): - self.__print_async("RECEIPT", headers, body) - - def on_connected(self, headers, body): - self.__print_async("CONNECTED", headers, body) - - def ack(self, args): - if len(args) < 3: - self.c.ack(message_id=args[1]) - else: - self.c.ack(message_id=args[1], transaction=args[2]) - - def abort(self, args): - self.c.abort(transaction=args[1]) - - def begin(self, args): - print 'transaction id: %s' % self.c.begin() - - def commit(self, args): - if len(args) < 2: - print 'expecting: commit ' - else: - print 'committing %s' % args[1] - self.c.commit(transaction=args[1]) - - def disconnect(self, args): - try: - self.c.disconnect() - except NotConnectedException: - pass # ignore if no longer connected - - def send(self, args): - if len(args) < 3: - print 'expecting: send ' - else: - self.c.send(destination=args[1], message=' '.join(args[2:])) - - def sendtrans(self, args): - if len(args) < 3: - print 'expecting: sendtrans ' - else: - self.c.send(destination=args[1], message="%s\n" % ' '.join(args[3:]), transaction=args[2]) - - def subscribe(self, args): - if len(args) < 2: - print 'expecting: subscribe [ack]' - elif len(args) > 2: - print 'subscribing to "%s" with acknowledge set to "%s"' % (args[1], args[2]) - self.c.subscribe(destination=args[1], ack=args[2]) - else: - print 'subscribing to "%s" with auto acknowledge' % args[1] - self.c.subscribe(destination=args[1], ack='auto') - - def unsubscribe(self, args): - if len(args) < 2: - print 'expecting: unsubscribe ' - else: - print 'unsubscribing from "%s"' % args[1] - self.c.unsubscribe(destination=args[1]) - - if len(sys.argv) > 5: - print 'USAGE: stomp.py [host] [port] [user] [passcode]' - sys.exit(1) - - if len(sys.argv) >= 2: - host = sys.argv[1] - else: - host = "localhost" - if len(sys.argv) >= 3: - port = int(sys.argv[2]) - else: - port = 61613 - - if len(sys.argv) >= 5: - user = sys.argv[3] - passcode = sys.argv[4] - else: - user = None - passcode = None - - st = StompTester(host, port, user, passcode) - try: - while True: - line = raw_input("\r> ") - if not line or line.lstrip().rstrip() == '': - continue - elif 'quit' in line or 'disconnect' in line: - break - split = line.split() - command = split[0] - if not command.startswith("on_") and hasattr(st, command): - getattr(st, command)(split) - else: - print 'unrecognized command' - finally: - st.disconnect(None) - - diff --git a/pythonPackages/ufpy/test/Record.py b/pythonPackages/ufpy/test/Record.py deleted file mode 100644 index ac69d51448..0000000000 --- a/pythonPackages/ufpy/test/Record.py +++ /dev/null @@ -1,48 +0,0 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - - -# -# Pure python logging mechanism for logging to AlertViz from -# pure python (ie not JEP). DO NOT USE IN PYTHON CALLED -# FROM JAVA. -# -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# 11/03/10 5849 cjeanbap Initial Creation. -# -# -# - -import os -import sys - -class Record(): - def __init__(self, level=0, msg='Test Message'): - self.levelno=level - self.message=msg - self.exc_info=sys.exc_info() - self.exc_text="TEST" - - def getMessage(self): - return self.message \ No newline at end of file diff --git a/pythonPackages/ufpy/test/Test b/pythonPackages/ufpy/test/Test deleted file mode 100644 index 4ff1a0ba84..0000000000 --- a/pythonPackages/ufpy/test/Test +++ /dev/null @@ -1,48 +0,0 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - - -# -# Pure python logging mechanism for logging to AlertViz from -# pure python (ie not JEP). DO NOT USE IN PYTHON CALLED -# FROM JAVA. -# -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# 11/03/10 5849 cjeanbap Initial Creation. -# -# -# - -## to execute type python Test - - -import os -import logging -from ufpy import AlertVizHandler -import Record - -avh = AlertVizHandler.AlertVizHandler(host=os.getenv("BROKER_ADDR","localhost"), port=9581, category='LOCAL', source='ANNOUNCER', level=logging.NOTSET) -record = Record.Record(10) -avh.emit(record) - \ No newline at end of file diff --git a/pythonPackages/ufpy/test/__init__.py b/pythonPackages/ufpy/test/__init__.py deleted file mode 100644 index ae5228cd62..0000000000 --- a/pythonPackages/ufpy/test/__init__.py +++ /dev/null @@ -1,34 +0,0 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - - -# -# __init__.py for ufpy package -# -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# 11/03/10 5489 cjeanbap Initial Creation. -# -# -# - diff --git a/pythonPackages/ufpy/test/dafTests/__init__.py b/pythonPackages/ufpy/test/dafTests/__init__.py deleted file mode 100644 index 5ce6ada4e0..0000000000 --- a/pythonPackages/ufpy/test/dafTests/__init__.py +++ /dev/null @@ -1,36 +0,0 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - - -# -# __init__.py for ufpy.test.dafTests package -# -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# 02/09/2016 4795 mapeters Initial creation. -# 04/12/2016 5548 tgurney Cleanup -# -# -# - -__all__ = [] diff --git a/pythonPackages/ufpy/test/dafTests/baseBufrMosTestCase.py b/pythonPackages/ufpy/test/dafTests/baseBufrMosTestCase.py deleted file mode 100644 index a9007a449f..0000000000 --- a/pythonPackages/ufpy/test/dafTests/baseBufrMosTestCase.py +++ /dev/null @@ -1,73 +0,0 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -from ufpy.dataaccess import DataAccessLayer as DAL -from shapely.geometry import box - -import baseDafTestCase -import params -import unittest - -# -# Base TestCase for BufrMos* tests. -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# 01/19/16 4795 mapeters Initial Creation. -# 04/11/16 5548 tgurney Cleanup -# 12/07/16 5981 tgurney Parameterize -# 12/15/16 5981 tgurney Add envelope test -# -# - - -class BufrMosTestCase(baseDafTestCase.DafTestCase): - """Base class for testing DAF support of bufrmos data""" - - data_params = "temperature", "dewpoint" - - def testGetAvailableParameters(self): - req = DAL.newDataRequest(self.datatype) - self.runParametersTest(req) - - def testGetAvailableLocations(self): - req = DAL.newDataRequest(self.datatype) - self.runLocationsTest(req) - - def testGetAvailableTimes(self): - req = DAL.newDataRequest(self.datatype) - req.setLocationNames(params.OBS_STATION) - self.runTimesTest(req) - - def testGetGeometryData(self): - req = DAL.newDataRequest(self.datatype) - req.setLocationNames(params.OBS_STATION) - req.setParameters(*self.data_params) - self.runGeometryDataTest(req) - - def testGetGeometryDataWithEnvelope(self): - req = DAL.newDataRequest(self.datatype) - req.setParameters(*self.data_params) - req.setEnvelope(params.ENVELOPE) - data = self.runGeometryDataTest(req) - for item in data: - self.assertTrue(params.ENVELOPE.contains(item.getGeometry())) diff --git a/pythonPackages/ufpy/test/dafTests/baseDafTestCase.py b/pythonPackages/ufpy/test/dafTests/baseDafTestCase.py deleted file mode 100644 index 8a7225ca4f..0000000000 --- a/pythonPackages/ufpy/test/dafTests/baseDafTestCase.py +++ /dev/null @@ -1,231 +0,0 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -from __future__ import print_function - -from ufpy.dataaccess import DataAccessLayer as DAL -from ufpy.ThriftClient import ThriftRequestException - -import os -import unittest - -# -# Base TestCase for DAF tests. This class provides helper methods and -# tests common to all DAF test cases. -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# 01/19/16 4795 mapeters Initial Creation. -# 04/11/16 5548 tgurney Cleanup -# 04/13/16 5379 tgurney Add identifier values tests -# 04/18/16 5548 tgurney More cleanup, plus new tests -# 04/26/16 5587 tgurney Move identifier values tests -# to subclasses -# 06/01/16 5587 tgurney Add testGet*Identifiers -# 06/07/16 5574 tgurney Make geometry/grid data tests -# return the retrieved data -# 06/10/16 5548 tgurney Make testDatatypeIsSupported -# case-insensitive -# 08/10/16 2416 tgurney Don't test identifier values -# for dataURI -# 10/05/16 5926 dgilling Better checks in runGeometryDataTest. -# 11/08/16 5985 tgurney Do not check data times on -# time-agnostic data -# 03/13/17 5981 tgurney Do not check valid period on -# data time -# -# - - -class DafTestCase(unittest.TestCase): - - sampleDataLimit = 5 - """ - Maximum number of levels, locations, times, and geometry/grid data to - display - """ - - numTimesToLimit = 3 - """ - When limiting geometry/grid data requests with times, only retrieve data - for this many times - """ - - datatype = None - """Name of the datatype""" - - @classmethod - def setUpClass(cls): - host = os.environ.get('DAF_TEST_HOST') - if host is None: - host = 'localhost' - DAL.changeEDEXHost(host) - - @staticmethod - def getTimesIfSupported(req): - """Return available times for req. If req refers to a time-agnostic - datatype, return an empty list instead. - """ - times = [] - try: - times = DAL.getAvailableTimes(req) - except ThriftRequestException as e: - if not 'TimeAgnosticDataException' in str(e): - raise - return times - - def testDatatypeIsSupported(self): - allSupported = (item.lower() for item in DAL.getSupportedDatatypes()) - self.assertIn(self.datatype.lower(), allSupported) - - def testGetRequiredIdentifiers(self): - req = DAL.newDataRequest(self.datatype) - required = DAL.getRequiredIdentifiers(req) - self.assertIsNotNone(required) - print("Required identifiers:", required) - - def testGetOptionalIdentifiers(self): - req = DAL.newDataRequest(self.datatype) - optional = DAL.getOptionalIdentifiers(req) - self.assertIsNotNone(optional) - print("Optional identifiers:", optional) - - def runGetIdValuesTest(self, identifiers): - for id in identifiers: - if id.lower() == 'datauri': - continue - req = DAL.newDataRequest(self.datatype) - idValues = DAL.getIdentifierValues(req, id) - self.assertTrue(hasattr(idValues, '__iter__')) - - def runInvalidIdValuesTest(self): - badString = 'id from ' + self.datatype + '; select 1;' - with self.assertRaises(ThriftRequestException) as cm: - req = DAL.newDataRequest(self.datatype) - idValues = DAL.getIdentifierValues(req, badString) - - def runNonexistentIdValuesTest(self): - with self.assertRaises(ThriftRequestException) as cm: - req = DAL.newDataRequest(self.datatype) - idValues = DAL.getIdentifierValues(req, 'idthatdoesnotexist') - - def runParametersTest(self, req): - params = DAL.getAvailableParameters(req) - self.assertIsNotNone(params) - print(params) - - def runLevelsTest(self, req): - levels = DAL.getAvailableLevels(req) - self.assertIsNotNone(levels) - print("Number of levels: " + str(len(levels))) - strLevels = [str(t) for t in levels[:self.sampleDataLimit]] - print("Sample levels:\n" + str(strLevels)) - - def runLocationsTest(self, req): - locs = DAL.getAvailableLocationNames(req) - self.assertIsNotNone(locs) - print("Number of location names: " + str(len(locs))) - print("Sample location names:\n" + str(locs[:self.sampleDataLimit])) - - def runTimesTest(self, req): - times = DAL.getAvailableTimes(req) - self.assertIsNotNone(times) - print("Number of times: " + str(len(times))) - strTimes = [str(t) for t in times[:self.sampleDataLimit]] - print("Sample times:\n" + str(strTimes)) - - def runTimeAgnosticTest(self, req): - with self.assertRaises(ThriftRequestException) as cm: - times = DAL.getAvailableTimes(req) - self.assertIn('TimeAgnosticDataException', str(cm.exception)) - - def runGeometryDataTest(self, req, checkDataTimes=True): - """ - Test that we are able to successfully retrieve geometry data for the - given request. - """ - times = DafTestCase.getTimesIfSupported(req) - geomData = DAL.getGeometryData(req, times[:self.numTimesToLimit]) - self.assertIsNotNone(geomData) - if times: - self.assertNotEqual(len(geomData), 0) - if not geomData: - raise unittest.SkipTest("No data available") - print("Number of geometry records: " + str(len(geomData))) - print("Sample geometry data:") - for record in geomData[:self.sampleDataLimit]: - if (checkDataTimes and times and - "PERIOD_USED" not in record.getDataTime().getUtilityFlags()): - self.assertIn(record.getDataTime(), times[:self.numTimesToLimit]) - print("geometry=" + str(record.getGeometry()), end="") - for p in req.getParameters(): - print(" " + p + "=" + record.getString(p), end="") - print() - return geomData - - def runGeometryDataTestWithTimeRange(self, req, timeRange): - """ - Test that we are able to successfully retrieve geometry data for the - given request. - """ - geomData = DAL.getGeometryData(req, timeRange) - self.assertIsNotNone(geomData) - if not geomData: - raise unittest.SkipTest("No data available") - print("Number of geometry records: " + str(len(geomData))) - print("Sample geometry data:") - for record in geomData[:self.sampleDataLimit]: - self.assertGreaterEqual(record.getDataTime().getRefTime().getTime(), timeRange.getStartInMillis()) - self.assertLessEqual(record.getDataTime().getRefTime().getTime(), timeRange.getEndInMillis()) - print("geometry=" + str(record.getGeometry()), end="") - for p in req.getParameters(): - print(" " + p + "=" + record.getString(p), end="") - print() - return geomData - - def runGridDataTest(self, req, testSameShape=True): - """ - Test that we are able to successfully retrieve grid data for the given - request. - - Args: - testSameShape: whether or not to verify that all the retrieved data - have the same shape (most data don't change shape) - """ - times = DafTestCase.getTimesIfSupported(req) - gridData = DAL.getGridData(req, times[:self.numTimesToLimit]) - self.assertIsNotNone(gridData) - if not gridData: - raise unittest.SkipTest("No data available") - print("Number of grid records: " + str(len(gridData))) - if len(gridData) > 0: - print("Sample grid data shape:\n" + str(gridData[0].getRawData().shape) + "\n") - print("Sample grid data:\n" + str(gridData[0].getRawData()) + "\n") - print("Sample lat-lon data:\n" + str(gridData[0].getLatLonCoords()) + "\n") - - if testSameShape: - correctGridShape = gridData[0].getLatLonCoords()[0].shape - for record in gridData: - rawData = record.getRawData() - self.assertIsNotNone(rawData) - self.assertEqual(rawData.shape, correctGridShape) - return gridData diff --git a/pythonPackages/ufpy/test/dafTests/baseRadarTestCase.py b/pythonPackages/ufpy/test/dafTests/baseRadarTestCase.py deleted file mode 100644 index 085fea0203..0000000000 --- a/pythonPackages/ufpy/test/dafTests/baseRadarTestCase.py +++ /dev/null @@ -1,194 +0,0 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -from __future__ import print_function -from shapely.geometry import box -from ufpy.dataaccess import DataAccessLayer as DAL -from ufpy.ThriftClient import ThriftRequestException - -import baseDafTestCase -import params -import unittest - -# -# Tests common to all radar factories -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# 01/19/16 4795 mapeters Initial Creation. -# 04/11/16 5548 tgurney Cleanup -# 04/18/16 5548 tgurney More cleanup -# 04/26/16 5587 tgurney Move identifier values tests -# out of base class -# 06/01/16 5587 tgurney Update testGetIdentifierValues -# 06/08/16 5574 mapeters Add advanced query tests -# 06/13/16 5574 tgurney Fix checks for None -# 06/14/16 5548 tgurney Undo previous change (broke -# test) -# 06/30/16 5725 tgurney Add test for NOT IN -# 08/25/16 2671 tgurney Rename to baseRadarTestCase -# and move factory-specific -# tests -# 12/07/16 5981 tgurney Parameterize -# -# - - -class BaseRadarTestCase(baseDafTestCase.DafTestCase): - """Tests common to all radar factories""" - - # datatype is specified by subclass - datatype = None - - radarLoc = params.RADAR.lower() - - def testGetAvailableParameters(self): - req = DAL.newDataRequest(self.datatype) - self.runParametersTest(req) - - def testGetAvailableLocations(self): - req = DAL.newDataRequest(self.datatype) - self.runLocationsTest(req) - - def testGetAvailableLevels(self): - req = DAL.newDataRequest(self.datatype) - self.runLevelsTest(req) - - def testGetAvailableLevelsWithInvalidLevelIdentifierThrowsException(self): - req = DAL.newDataRequest(self.datatype) - req.addIdentifier('level.one.field', 'invalidLevelField') - with self.assertRaises(ThriftRequestException) as cm: - self.runLevelsTest(req) - self.assertIn('IncompatibleRequestException', str(cm.exception)) - - def testGetAvailableTimes(self): - req = DAL.newDataRequest(self.datatype) - req.setEnvelope(params.ENVELOPE) - self.runTimesTest(req) - - def testGetIdentifierValues(self): - req = DAL.newDataRequest(self.datatype) - optionalIds = set(DAL.getOptionalIdentifiers(req)) - requiredIds = set(DAL.getRequiredIdentifiers(req)) - self.runGetIdValuesTest(optionalIds | requiredIds) - - def testGetInvalidIdentifierValuesThrowsException(self): - self.runInvalidIdValuesTest() - - def testGetNonexistentIdentifierValuesThrowsException(self): - self.runNonexistentIdValuesTest() - - def runConstraintTest(self, key, operator, value): - raise NotImplementedError - - def testGetDataWithEqualsString(self): - gridData = self.runConstraintTest('icao', '=', self.radarLoc) - for record in gridData: - self.assertEqual(record.getAttribute('icao'), self.radarLoc) - - def testGetDataWithEqualsUnicode(self): - gridData = self.runConstraintTest('icao', '=', unicode(self.radarLoc)) - for record in gridData: - self.assertEqual(record.getAttribute('icao'), self.radarLoc) - - def testGetDataWithEqualsInt(self): - gridData = self.runConstraintTest('icao', '=', 1000) - for record in gridData: - self.assertEqual(record.getAttribute('icao'), 1000) - - def testGetDataWithEqualsLong(self): - gridData = self.runConstraintTest('icao', '=', 1000L) - for record in gridData: - self.assertEqual(record.getAttribute('icao'), 1000) - - def testGetDataWithEqualsFloat(self): - gridData = self.runConstraintTest('icao', '=', 1.0) - for record in gridData: - self.assertEqual(round(record.getAttribute('icao'), 1), 1.0) - - def testGetDataWithEqualsNone(self): - gridData = self.runConstraintTest('icao', '=', None) - for record in gridData: - self.assertIsNone(record.getAttribute('icao')) - - def testGetDataWithNotEquals(self): - gridData = self.runConstraintTest('icao', '!=', self.radarLoc) - for record in gridData: - self.assertNotEqual(record.getAttribute('icao'), self.radarLoc) - - def testGetDataWithNotEqualsNone(self): - gridData = self.runConstraintTest('icao', '!=', None) - for record in gridData: - self.assertIsNotNone(record.getAttribute('icao')) - - def testGetDataWithGreaterThan(self): - gridData = self.runConstraintTest('icao', '>', self.radarLoc) - for record in gridData: - self.assertGreater(record.getAttribute('icao'), self.radarLoc) - - def testGetDataWithLessThan(self): - gridData = self.runConstraintTest('icao', '<', self.radarLoc) - for record in gridData: - self.assertLess(record.getAttribute('icao'), self.radarLoc) - - def testGetDataWithGreaterThanEquals(self): - gridData = self.runConstraintTest('icao', '>=', self.radarLoc) - for record in gridData: - self.assertGreaterEqual(record.getAttribute('icao'), self.radarLoc) - - def testGetDataWithLessThanEquals(self): - gridData = self.runConstraintTest('icao', '<=', self.radarLoc) - for record in gridData: - self.assertLessEqual(record.getAttribute('icao'), self.radarLoc) - - def testGetDataWithInTuple(self): - gridData = self.runConstraintTest('icao', 'in', (self.radarLoc, 'tpbi')) - for record in gridData: - self.assertIn(record.getAttribute('icao'), (self.radarLoc, 'tpbi')) - - def testGetDataWithInList(self): - gridData = self.runConstraintTest('icao', 'in', [self.radarLoc, 'tpbi']) - for record in gridData: - self.assertIn(record.getAttribute('icao'), (self.radarLoc, 'tpbi')) - - def testGetDataWithInGenerator(self): - generator = (item for item in (self.radarLoc, 'tpbi')) - gridData = self.runConstraintTest('icao', 'in', generator) - for record in gridData: - self.assertIn(record.getAttribute('icao'), (self.radarLoc, 'tpbi')) - - def testGetDataWithNotInList(self): - gridData = self.runConstraintTest('icao', 'not in', ['zzzz', self.radarLoc]) - for record in gridData: - self.assertNotIn(record.getAttribute('icao'), ('zzzz', self.radarLoc)) - - def testGetDataWithInvalidConstraintTypeThrowsException(self): - with self.assertRaises(ValueError): - self.runConstraintTest('icao', 'junk', self.radarLoc) - - def testGetDataWithInvalidConstraintValueThrowsException(self): - with self.assertRaises(TypeError): - self.runConstraintTest('icao', '=', {}) - - def testGetDataWithEmptyInConstraintThrowsException(self): - with self.assertRaises(ValueError): - self.runConstraintTest('icao', 'in', []) diff --git a/pythonPackages/ufpy/test/dafTests/params.py b/pythonPackages/ufpy/test/dafTests/params.py deleted file mode 100644 index bdc6da7ed6..0000000000 --- a/pythonPackages/ufpy/test/dafTests/params.py +++ /dev/null @@ -1,43 +0,0 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - - -# -# Site-specific parameters for DAF tests -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# 12/07/16 5981 tgurney Initial creation -# 12/15/16 5981 tgurney Add ENVELOPE -# -# - -from shapely.geometry import box - -AIRPORT = 'OMA' -OBS_STATION = 'KOMA' -SITE_ID = 'OAX' -STATION_ID = '72558' -RADAR = 'KOAX' -SAMPLE_AREA = (-97.0, 41.0, -96.0, 42.0) - -ENVELOPE = box(*SAMPLE_AREA) \ No newline at end of file diff --git a/pythonPackages/ufpy/test/dafTests/testAcars.py b/pythonPackages/ufpy/test/dafTests/testAcars.py deleted file mode 100644 index b43cb3f35d..0000000000 --- a/pythonPackages/ufpy/test/dafTests/testAcars.py +++ /dev/null @@ -1,61 +0,0 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -from __future__ import print_function -from ufpy.dataaccess import DataAccessLayer as DAL - -import baseDafTestCase -import unittest - -# -# Test DAF support for ACARS data -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# 01/19/16 4795 mapeters Initial Creation. -# 04/11/16 5548 tgurney Cleanup -# 04/18/16 5548 tgurney More cleanup -# -# - - -class AcarsTestCase(baseDafTestCase.DafTestCase): - """Test DAF support for ACARS data""" - - datatype = "acars" - - def testGetAvailableParameters(self): - req = DAL.newDataRequest(self.datatype) - self.runParametersTest(req) - - def testGetAvailableLocations(self): - req = DAL.newDataRequest(self.datatype) - self.runLocationsTest(req) - - def testGetAvailableTimes(self): - req = DAL.newDataRequest(self.datatype) - self.runTimesTest(req) - - def testGetGeometryData(self): - req = DAL.newDataRequest(self.datatype) - req.setParameters("flightLevel", "tailNumber") - self.runGeometryDataTest(req) diff --git a/pythonPackages/ufpy/test/dafTests/testAirep.py b/pythonPackages/ufpy/test/dafTests/testAirep.py deleted file mode 100644 index a4fc5fd98f..0000000000 --- a/pythonPackages/ufpy/test/dafTests/testAirep.py +++ /dev/null @@ -1,172 +0,0 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -from __future__ import print_function -from ufpy.dataaccess import DataAccessLayer as DAL - -from dynamicserialize.dstypes.com.raytheon.uf.common.dataquery.requests import RequestConstraint -import baseDafTestCase -import unittest - -# -# Test DAF support for airep data -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# 01/19/16 4795 mapeters Initial Creation. -# 04/11/16 5548 tgurney Cleanup -# 04/18/16 5548 tgurney More cleanup -# 06/09/16 5587 bsteffen Add getIdentifierValues tests -# 06/13/16 5574 tgurney Add advanced query tests -# 06/30/16 5725 tgurney Add test for NOT IN -# -# - - -class AirepTestCase(baseDafTestCase.DafTestCase): - """Test DAF support for airep data""" - - datatype = "airep" - - def testGetAvailableParameters(self): - req = DAL.newDataRequest(self.datatype) - self.runParametersTest(req) - - def testGetAvailableLocations(self): - req = DAL.newDataRequest(self.datatype) - self.runLocationsTest(req) - - def testGetAvailableTimes(self): - req = DAL.newDataRequest(self.datatype) - self.runTimesTest(req) - - def testGetGeometryData(self): - req = DAL.newDataRequest(self.datatype) - req.setParameters("flightLevel", "reportType") - self.runGeometryDataTest(req) - - def testGetIdentifierValues(self): - req = DAL.newDataRequest(self.datatype) - optionalIds = set(DAL.getOptionalIdentifiers(req)) - self.runGetIdValuesTest(optionalIds) - - def testGetInvalidIdentifierValuesThrowsException(self): - self.runInvalidIdValuesTest() - - def testGetNonexistentIdentifierValuesThrowsException(self): - self.runNonexistentIdValuesTest() - - def _runConstraintTest(self, key, operator, value): - req = DAL.newDataRequest(self.datatype) - constraint = RequestConstraint.new(operator, value) - req.setParameters("flightLevel", "reportType") - req.addIdentifier(key, constraint) - return self.runGeometryDataTest(req) - - def testGetDataWithEqualsString(self): - geometryData = self._runConstraintTest('reportType', '=', 'AIREP') - for record in geometryData: - self.assertEqual(record.getString('reportType'), 'AIREP') - - def testGetDataWithEqualsUnicode(self): - geometryData = self._runConstraintTest('reportType', '=', u'AIREP') - for record in geometryData: - self.assertEqual(record.getString('reportType'), 'AIREP') - - # No numeric tests since no numeric identifiers are available. - - def testGetDataWithEqualsNone(self): - geometryData = self._runConstraintTest('reportType', '=', None) - for record in geometryData: - self.assertEqual(record.getType('reportType'), 'NULL') - - def testGetDataWithNotEquals(self): - geometryData = self._runConstraintTest('reportType', '!=', 'AIREP') - for record in geometryData: - self.assertNotEqual(record.getString('reportType'), 'AIREP') - - def testGetDataWithNotEqualsNone(self): - geometryData = self._runConstraintTest('reportType', '!=', None) - for record in geometryData: - self.assertNotEqual(record.getType('reportType'), 'NULL') - - def testGetDataWithGreaterThan(self): - geometryData = self._runConstraintTest('reportType', '>', 'AIREP') - for record in geometryData: - self.assertGreater(record.getString('reportType'), 'AIREP') - - def testGetDataWithLessThan(self): - geometryData = self._runConstraintTest('reportType', '<', 'AIREP') - for record in geometryData: - self.assertLess(record.getString('reportType'), 'AIREP') - - def testGetDataWithGreaterThanEquals(self): - geometryData = self._runConstraintTest('reportType', '>=', 'AIREP') - for record in geometryData: - self.assertGreaterEqual(record.getString('reportType'), 'AIREP') - - def testGetDataWithLessThanEquals(self): - geometryData = self._runConstraintTest('reportType', '<=', 'AIREP') - for record in geometryData: - self.assertLessEqual(record.getString('reportType'), 'AIREP') - - def testGetDataWithInTuple(self): - collection = ('AIREP', 'AMDAR') - geometryData = self._runConstraintTest('reportType', 'in', collection) - for record in geometryData: - self.assertIn(record.getString('reportType'), collection) - - def testGetDataWithInList(self): - collection = ['AIREP', 'AMDAR'] - geometryData = self._runConstraintTest('reportType', 'in', collection) - for record in geometryData: - self.assertIn(record.getString('reportType'), collection) - - def testGetDataWithInGenerator(self): - collection = ('AIREP', 'AMDAR') - generator = (item for item in collection) - geometryData = self._runConstraintTest('reportType', 'in', generator) - for record in geometryData: - self.assertIn(record.getString('reportType'), collection) - - def testGetDataWithNotInList(self): - collection = ['AMDAR'] - geometryData = self._runConstraintTest('reportType', 'not in', collection) - for record in geometryData: - self.assertNotIn(record.getString('reportType'), collection) - - def testGetDataWithInvalidConstraintTypeThrowsException(self): - with self.assertRaises(ValueError): - self._runConstraintTest('reportType', 'junk', 'AIREP') - - def testGetDataWithInvalidConstraintValueThrowsException(self): - with self.assertRaises(TypeError): - self._runConstraintTest('reportType', '=', {}) - - def testGetDataWithEmptyInConstraintThrowsException(self): - with self.assertRaises(ValueError): - self._runConstraintTest('reportType', 'in', []) - - def testGetDataWithNestedInConstraintThrowsException(self): - collection = ('AIREP', 'AMDAR', ()) - with self.assertRaises(TypeError): - self._runConstraintTest('reportType', 'in', collection) diff --git a/pythonPackages/ufpy/test/dafTests/testBinLightning.py b/pythonPackages/ufpy/test/dafTests/testBinLightning.py deleted file mode 100644 index 468a6b13be..0000000000 --- a/pythonPackages/ufpy/test/dafTests/testBinLightning.py +++ /dev/null @@ -1,198 +0,0 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -from __future__ import print_function -from ufpy.dataaccess import DataAccessLayer as DAL -from ufpy.ThriftClient import ThriftRequestException -from dynamicserialize.dstypes.com.raytheon.uf.common.dataquery.requests import RequestConstraint - - -import baseDafTestCase -import unittest - -# -# Test DAF support for binlightning data -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# 01/19/16 4795 mapeters Initial Creation. -# 04/11/16 5548 tgurney Cleanup -# 04/18/16 5548 tgurney More cleanup -# 04/21/16 5551 tgurney Add tests to verify #5551 -# 04/25/16 5587 tgurney Enable skipped test added in -# #5551 -# 04/26/16 5587 tgurney Move identifier values tests -# out of base class -# 06/01/16 5587 tgurney Update testGetIdentifierValues -# 06/03/16 5574 tgurney Add advanced query tests -# 06/13/16 5574 tgurney Typo -# 06/30/16 5725 tgurney Add test for NOT IN -# 11/08/16 5985 tgurney Do not check data times -# -# - - -class BinLightningTestCase(baseDafTestCase.DafTestCase): - """Test DAF support for binlightning data""" - - datatype = "binlightning" - - def testGetAvailableParameters(self): - req = DAL.newDataRequest(self.datatype) - self.runParametersTest(req) - - def testGetAvailableTimes(self): - req = DAL.newDataRequest(self.datatype) - req.addIdentifier("source", "NLDN") - self.runTimesTest(req) - - def testGetGeometryDataSingleSourceSingleParameter(self): - req = DAL.newDataRequest(self.datatype) - req.addIdentifier("source", "NLDN") - req.setParameters('intensity') - self.runGeometryDataTest(req, checkDataTimes=False) - - def testGetGeometryDataInvalidParamRaisesIncompatibleRequestException(self): - req = DAL.newDataRequest(self.datatype) - req.addIdentifier("source", "NLDN") - req.setParameters('blahblahblah') - with self.assertRaises(ThriftRequestException) as cm: - self.runGeometryDataTest(req) - self.assertIn('IncompatibleRequestException', str(cm.exception)) - - def testGetGeometryDataSingleSourceAllParameters(self): - req = DAL.newDataRequest(self.datatype) - req.addIdentifier("source", "NLDN") - req.setParameters(*DAL.getAvailableParameters(req)) - self.runGeometryDataTest(req, checkDataTimes=False) - - def testGetIdentifierValues(self): - req = DAL.newDataRequest(self.datatype) - optionalIds = set(DAL.getOptionalIdentifiers(req)) - requiredIds = set(DAL.getRequiredIdentifiers(req)) - self.runGetIdValuesTest(optionalIds | requiredIds) - - def testGetInvalidIdentifierValuesThrowsException(self): - self.runInvalidIdValuesTest() - - def testGetNonexistentIdentifierValuesThrowsException(self): - self.runNonexistentIdValuesTest() - - def _runConstraintTest(self, key, operator, value): - req = DAL.newDataRequest(self.datatype) - constraint = RequestConstraint.new(operator, value) - req.addIdentifier(key, constraint) - req.setParameters('intensity') - return self.runGeometryDataTest(req, checkDataTimes=False) - - def testGetDataWithEqualsString(self): - geomData = self._runConstraintTest('source', '=', 'NLDN') - for record in geomData: - self.assertEqual(record.getAttribute('source'), 'NLDN') - - def testGetDataWithEqualsUnicode(self): - geomData = self._runConstraintTest('source', '=', u'NLDN') - for record in geomData: - self.assertEqual(record.getAttribute('source'), 'NLDN') - - def testGetDataWithEqualsInt(self): - geomData = self._runConstraintTest('source', '=', 1000) - for record in geomData: - self.assertEqual(record.getAttribute('source'), 1000) - - def testGetDataWithEqualsLong(self): - geomData = self._runConstraintTest('source', '=', 1000L) - for record in geomData: - self.assertEqual(record.getAttribute('source'), 1000) - - def testGetDataWithEqualsFloat(self): - geomData = self._runConstraintTest('source', '=', 1.0) - for record in geomData: - self.assertEqual(round(record.getAttribute('source'), 1), 1.0) - - def testGetDataWithEqualsNone(self): - geomData = self._runConstraintTest('source', '=', None) - for record in geomData: - self.assertIsNone(record.getAttribute('source')) - - def testGetDataWithNotEquals(self): - geomData = self._runConstraintTest('source', '!=', 'NLDN') - for record in geomData: - self.assertNotEqual(record.getAttribute('source'), 'NLDN') - - def testGetDataWithNotEqualsNone(self): - geomData = self._runConstraintTest('source', '!=', None) - for record in geomData: - self.assertIsNotNone(record.getAttribute('source')) - - def testGetDataWithGreaterThan(self): - geomData = self._runConstraintTest('source', '>', 'NLDN') - for record in geomData: - self.assertGreater(record.getAttribute('source'), 'NLDN') - - def testGetDataWithLessThan(self): - geomData = self._runConstraintTest('source', '<', 'NLDN') - for record in geomData: - self.assertLess(record.getAttribute('source'), 'NLDN') - - def testGetDataWithGreaterThanEquals(self): - geomData = self._runConstraintTest('source', '>=', 'NLDN') - for record in geomData: - self.assertGreaterEqual(record.getAttribute('source'), 'NLDN') - - def testGetDataWithLessThanEquals(self): - geomData = self._runConstraintTest('source', '<=', 'NLDN') - for record in geomData: - self.assertLessEqual(record.getAttribute('source'), 'NLDN') - - def testGetDataWithInTuple(self): - geomData = self._runConstraintTest('source', 'in', ('NLDN', 'ENTLN')) - for record in geomData: - self.assertIn(record.getAttribute('source'), ('NLDN', 'ENTLN')) - - def testGetDataWithInList(self): - geomData = self._runConstraintTest('source', 'in', ['NLDN', 'ENTLN']) - for record in geomData: - self.assertIn(record.getAttribute('source'), ('NLDN', 'ENTLN')) - - def testGetDataWithInGenerator(self): - generator = (item for item in ('NLDN', 'ENTLN')) - geomData = self._runConstraintTest('source', 'in', generator) - for record in geomData: - self.assertIn(record.getAttribute('source'), ('NLDN', 'ENTLN')) - - def testGetDataWithNotInList(self): - geomData = self._runConstraintTest('source', 'not in', ['NLDN', 'blah']) - for record in geomData: - self.assertNotIn(record.getAttribute('source'), ('NLDN', 'blah')) - - def testGetDataWithInvalidConstraintTypeThrowsException(self): - with self.assertRaises(ValueError): - self._runConstraintTest('source', 'junk', 'NLDN') - - def testGetDataWithInvalidConstraintValueThrowsException(self): - with self.assertRaises(TypeError): - self._runConstraintTest('source', '=', {}) - - def testGetDataWithEmptyInConstraintThrowsException(self): - with self.assertRaises(ValueError): - self._runConstraintTest('source', 'in', []) diff --git a/pythonPackages/ufpy/test/dafTests/testBufrMosAvn.py b/pythonPackages/ufpy/test/dafTests/testBufrMosAvn.py deleted file mode 100644 index 38409bd268..0000000000 --- a/pythonPackages/ufpy/test/dafTests/testBufrMosAvn.py +++ /dev/null @@ -1,45 +0,0 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -from __future__ import print_function - -import baseBufrMosTestCase -import unittest - -# -# Test DAF support for bufrmosAVN data -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# 01/19/16 4795 mapeters Initial Creation. -# 04/11/16 5548 tgurney Cleanup -# 04/18/16 5548 tgurney More cleanup -# -# - - -class BufrMosAvnTestCase(baseBufrMosTestCase.BufrMosTestCase): - """Test DAF support for bufrmosAVN data""" - - datatype = "bufrmosAVN" - - # All tests inherited from superclass diff --git a/pythonPackages/ufpy/test/dafTests/testBufrMosEta.py b/pythonPackages/ufpy/test/dafTests/testBufrMosEta.py deleted file mode 100644 index 9c7c8d3ec6..0000000000 --- a/pythonPackages/ufpy/test/dafTests/testBufrMosEta.py +++ /dev/null @@ -1,45 +0,0 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -from __future__ import print_function - -import baseBufrMosTestCase -import unittest - -# -# Test DAF support for bufrmosETA data -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# 01/19/16 4795 mapeters Initial Creation. -# 04/11/16 5548 tgurney Cleanup -# 04/18/16 5548 tgurney More cleanup -# -# - - -class BufrMosEtaTestCase(baseBufrMosTestCase.BufrMosTestCase): - """Test DAF support for bufrmosETA data""" - - datatype = "bufrmosETA" - - # All tests inherited from superclass diff --git a/pythonPackages/ufpy/test/dafTests/testBufrMosGfs.py b/pythonPackages/ufpy/test/dafTests/testBufrMosGfs.py deleted file mode 100644 index 1b5819c3e4..0000000000 --- a/pythonPackages/ufpy/test/dafTests/testBufrMosGfs.py +++ /dev/null @@ -1,45 +0,0 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -from __future__ import print_function - -import baseBufrMosTestCase -import unittest - -# -# Test DAF support for bufrmosGFS data -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# 01/19/16 4795 mapeters Initial Creation. -# 04/11/16 5548 tgurney Cleanup -# 04/18/16 5548 tgurney More cleanup -# -# - - -class BufrMosGfsTestCase(baseBufrMosTestCase.BufrMosTestCase): - """Test DAF support for bufrmosGFS data""" - - datatype = "bufrmosGFS" - - # All tests inherited from superclass diff --git a/pythonPackages/ufpy/test/dafTests/testBufrMosHpc.py b/pythonPackages/ufpy/test/dafTests/testBufrMosHpc.py deleted file mode 100644 index ab1b338b8f..0000000000 --- a/pythonPackages/ufpy/test/dafTests/testBufrMosHpc.py +++ /dev/null @@ -1,50 +0,0 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -from __future__ import print_function -from ufpy.dataaccess import DataAccessLayer as DAL - -import baseBufrMosTestCase -import params -import unittest - -# -# Test DAF support for bufrmosHPC data -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# 01/19/16 4795 mapeters Initial Creation. -# 04/11/16 5548 tgurney Cleanup -# 04/18/16 5548 tgurney More cleanup -# 12/07/16 5981 tgurney Parameterize -# 12/20/16 5981 tgurney Inherit all tests -# -# - - -class BufrMosHpcTestCase(baseBufrMosTestCase.BufrMosTestCase): - """Test DAF support for bufrmosHPC data""" - - datatype = "bufrmosHPC" - data_params = "forecastHr", "maxTemp24Hour" - - # All tests inherited from superclass \ No newline at end of file diff --git a/pythonPackages/ufpy/test/dafTests/testBufrMosLamp.py b/pythonPackages/ufpy/test/dafTests/testBufrMosLamp.py deleted file mode 100644 index a3dc7236e7..0000000000 --- a/pythonPackages/ufpy/test/dafTests/testBufrMosLamp.py +++ /dev/null @@ -1,45 +0,0 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -from __future__ import print_function - -import baseBufrMosTestCase -import unittest - -# -# Test DAF support for bufrmosLAMP data -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# 01/19/16 4795 mapeters Initial Creation. -# 04/11/16 5548 tgurney Cleanup -# 04/18/16 5548 tgurney More cleanup -# -# - - -class BufrMosLampTestCase(baseBufrMosTestCase.BufrMosTestCase): - """Test DAF support for bufrmosLAMP data""" - - datatype = "bufrmosLAMP" - - # All tests inherited from superclass diff --git a/pythonPackages/ufpy/test/dafTests/testBufrMosMrf.py b/pythonPackages/ufpy/test/dafTests/testBufrMosMrf.py deleted file mode 100644 index 6ce3d102b7..0000000000 --- a/pythonPackages/ufpy/test/dafTests/testBufrMosMrf.py +++ /dev/null @@ -1,50 +0,0 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -from __future__ import print_function -from ufpy.dataaccess import DataAccessLayer as DAL - -import baseBufrMosTestCase -import params -import unittest - -# -# Test DAF support for bufrmosMRF data -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# 01/19/16 4795 mapeters Initial Creation. -# 04/11/16 5548 tgurney Cleanup -# 04/18/16 5548 tgurney More cleanup -# 12/07/16 5981 tgurney Parameterize -# 12/20/16 5981 tgurney Inherit all tests -# -# - - -class BufrMosMrfTestCase(baseBufrMosTestCase.BufrMosTestCase): - """Test DAF support for bufrmosMRF data""" - - datatype = "bufrmosMRF" - data_params = "forecastHr", "maxTempDay" - - # All tests inherited from superclass diff --git a/pythonPackages/ufpy/test/dafTests/testBufrUa.py b/pythonPackages/ufpy/test/dafTests/testBufrUa.py deleted file mode 100644 index 8263a2ba37..0000000000 --- a/pythonPackages/ufpy/test/dafTests/testBufrUa.py +++ /dev/null @@ -1,221 +0,0 @@ -# # -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -# # - -from __future__ import print_function -from ufpy.dataaccess import DataAccessLayer as DAL - -from dynamicserialize.dstypes.com.raytheon.uf.common.dataquery.requests import RequestConstraint -import baseDafTestCase -import params -import unittest - -# -# Test DAF support for bufrua data -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# 01/19/16 4795 mapeters Initial Creation. -# 04/11/16 5548 tgurney Cleanup -# 04/18/16 5548 tgurney More cleanup -# 06/09/16 5587 bsteffen Add getIdentifierValues tests -# 06/13/16 5574 tgurney Add advanced query tests -# 06/30/16 5725 tgurney Add test for NOT IN -# 12/07/16 5981 tgurney Parameterize -# 12/15/16 5981 tgurney Add envelope test -# -# - - -class BufrUaTestCase(baseDafTestCase.DafTestCase): - """Test DAF support for bufrua data""" - - datatype = "bufrua" - - location = params.STATION_ID - - def testGetAvailableParameters(self): - req = DAL.newDataRequest(self.datatype) - self.runParametersTest(req) - - def testGetAvailableLocations(self): - req = DAL.newDataRequest(self.datatype) - req.addIdentifier("reportType", "2020") - self.runLocationsTest(req) - - def testGetAvailableTimes(self): - req = DAL.newDataRequest(self.datatype) - req.setLocationNames(self.location) - req.addIdentifier("reportType", "2020") - self.runTimesTest(req) - - def testGetGeometryData(self): - req = DAL.newDataRequest(self.datatype) - req.setLocationNames(self.location) - req.addIdentifier("reportType", "2020") - req.setParameters("sfcPressure", "staName", "rptType", "tdMan") - - print("Testing getGeometryData()") - - geomData = DAL.getGeometryData(req) - self.assertIsNotNone(geomData) - print("Number of geometry records: " + str(len(geomData))) - print("Sample geometry data:") - for record in geomData[:self.sampleDataLimit]: - print("level=", record.getLevel(), end="") - # One dimensional parameters are reported on the 0.0UNKNOWN level. - # 2D parameters are reported on MB levels from pressure. - if record.getLevel() == "0.0UNKNOWN": - print(" sfcPressure=" + record.getString("sfcPressure") + record.getUnit("sfcPressure"), end="") - print(" staName=" + record.getString("staName"), end="") - print(" rptType=" + record.getString("rptType") + record.getUnit("rptType"), end="") - else: - print(" tdMan=" + str(record.getNumber("tdMan")) + record.getUnit("tdMan"), end="") - print(" geometry=", record.getGeometry()) - - print("getGeometryData() complete\n\n") - - def testGetGeometryDataWithEnvelope(self): - req = DAL.newDataRequest(self.datatype) - req.setParameters("staName", "rptType") - req.setEnvelope(params.ENVELOPE) - data = self.runGeometryDataTest(req) - for item in data: - self.assertTrue(params.ENVELOPE.contains(item.getGeometry())) - - def testGetIdentifierValues(self): - req = DAL.newDataRequest(self.datatype) - optionalIds = set(DAL.getOptionalIdentifiers(req)) - self.runGetIdValuesTest(optionalIds) - - def testGetInvalidIdentifierValuesThrowsException(self): - self.runInvalidIdValuesTest() - - def testGetNonexistentIdentifierValuesThrowsException(self): - self.runNonexistentIdValuesTest() - - def _runConstraintTest(self, key, operator, value): - req = DAL.newDataRequest(self.datatype) - constraint = RequestConstraint.new(operator, value) - req.addIdentifier(key, constraint) - # As an identifier it is "reportType" but as a parameter it is - # "rptType"... this is weird... - req.setParameters("staName", "rptType") - return self.runGeometryDataTest(req) - - def testGetDataWithEqualsString(self): - geometryData = self._runConstraintTest('reportType', '=', '2022') - for record in geometryData: - self.assertEqual(record.getString('rptType'), '2022') - - def testGetDataWithEqualsUnicode(self): - geometryData = self._runConstraintTest('reportType', '=', u'2022') - for record in geometryData: - self.assertEqual(record.getString('rptType'), '2022') - - def testGetDataWithEqualsInt(self): - geometryData = self._runConstraintTest('reportType', '=', 2022) - for record in geometryData: - self.assertEqual(record.getString('rptType'), '2022') - - def testGetDataWithEqualsLong(self): - geometryData = self._runConstraintTest('reportType', '=', 2022L) - for record in geometryData: - self.assertEqual(record.getString('rptType'), '2022') - - # No float test because no float identifiers are available - - def testGetDataWithEqualsNone(self): - geometryData = self._runConstraintTest('reportType', '=', None) - for record in geometryData: - self.assertEqual(record.getType('rptType'), 'NULL') - - def testGetDataWithNotEquals(self): - geometryData = self._runConstraintTest('reportType', '!=', 2022) - for record in geometryData: - self.assertNotEqual(record.getString('rptType'), '2022') - - def testGetDataWithNotEqualsNone(self): - geometryData = self._runConstraintTest('reportType', '!=', None) - for record in geometryData: - self.assertNotEqual(record.getType('rptType'), 'NULL') - - def testGetDataWithGreaterThan(self): - geometryData = self._runConstraintTest('reportType', '>', 2022) - for record in geometryData: - self.assertGreater(record.getString('rptType'), '2022') - - def testGetDataWithLessThan(self): - geometryData = self._runConstraintTest('reportType', '<', 2022) - for record in geometryData: - self.assertLess(record.getString('rptType'), '2022') - - def testGetDataWithGreaterThanEquals(self): - geometryData = self._runConstraintTest('reportType', '>=', 2022) - for record in geometryData: - self.assertGreaterEqual(record.getString('rptType'), '2022') - - def testGetDataWithLessThanEquals(self): - geometryData = self._runConstraintTest('reportType', '<=', 2022) - for record in geometryData: - self.assertLessEqual(record.getString('rptType'), '2022') - - def testGetDataWithInTuple(self): - collection = ('2022', '2032') - geometryData = self._runConstraintTest('reportType', 'in', collection) - for record in geometryData: - self.assertIn(record.getString('rptType'), collection) - - def testGetDataWithInList(self): - collection = ['2022', '2032'] - geometryData = self._runConstraintTest('reportType', 'in', collection) - for record in geometryData: - self.assertIn(record.getString('rptType'), collection) - - def testGetDataWithInGenerator(self): - collection = ('2022', '2032') - generator = (item for item in collection) - geometryData = self._runConstraintTest('reportType', 'in', generator) - for record in geometryData: - self.assertIn(record.getString('rptType'), collection) - - def testGetDataWithNotInList(self): - collection = ('2022', '2032') - geometryData = self._runConstraintTest('reportType', 'not in', collection) - for record in geometryData: - self.assertNotIn(record.getString('rptType'), collection) - - def testGetDataWithInvalidConstraintTypeThrowsException(self): - with self.assertRaises(ValueError): - self._runConstraintTest('reportType', 'junk', '2022') - - def testGetDataWithInvalidConstraintValueThrowsException(self): - with self.assertRaises(TypeError): - self._runConstraintTest('reportType', '=', {}) - - def testGetDataWithEmptyInConstraintThrowsException(self): - with self.assertRaises(ValueError): - self._runConstraintTest('rptType', 'in', []) - - def testGetDataWithNestedInConstraintThrowsException(self): - collection = ('2022', '2032', ()) - with self.assertRaises(TypeError): - self._runConstraintTest('rptType', 'in', collection) diff --git a/pythonPackages/ufpy/test/dafTests/testClimate.py b/pythonPackages/ufpy/test/dafTests/testClimate.py deleted file mode 100644 index 829691e396..0000000000 --- a/pythonPackages/ufpy/test/dafTests/testClimate.py +++ /dev/null @@ -1,444 +0,0 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -from __future__ import print_function -import datetime -from dynamicserialize.dstypes.com.raytheon.uf.common.dataquery.requests import RequestConstraint -from dynamicserialize.dstypes.com.raytheon.uf.common.time import TimeRange -from ufpy.dataaccess import DataAccessLayer as DAL -from ufpy.ThriftClient import ThriftRequestException - -import baseDafTestCase -import params -import unittest - -# -# Test DAF support for climate data -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# 01/19/16 4795 mapeters Initial Creation. -# 04/11/16 5548 tgurney Cleanup -# 04/18/16 5548 tgurney More cleanup -# 04/26/16 5587 tgurney Add identifier values tests -# 06/09/16 5574 mapeters Add advanced query tests, Short parameter test -# 06/13/16 5574 tgurney Fix checks for None -# 06/21/16 5548 tgurney Skip tests that cause errors -# 06/30/16 5725 tgurney Add test for NOT IN -# 10/06/16 5926 dgilling Add additional time and location tests. -# 12/07/16 5981 tgurney Parameterize -# 12/20/16 5981 tgurney Add envelope test -# 08/16/17 6388 tgurney Test for duplicate data -# -# - - -class ClimateTestCase(baseDafTestCase.DafTestCase): - """Test DAF support for climate data""" - - datatype = 'climate' - obsStation = params.OBS_STATION - - def testGetAvailableParameters(self): - req = DAL.newDataRequest(self.datatype) - req.addIdentifier('table', 'public.cli_asos_monthly') - self.runParametersTest(req) - - def testGetAvailableLocations(self): - req = DAL.newDataRequest(self.datatype) - req.addIdentifier('table', 'public.cli_asos_monthly') - self.runLocationsTest(req) - - def testGetAvailableLocationsForRptTable(self): - req = DAL.newDataRequest(self.datatype) - req.addIdentifier('table', 'public.rpt') - self.runLocationsTest(req) - - def testGetAvailableLocationsForStationId(self): - req = DAL.newDataRequest(self.datatype) - req.addIdentifier('table', 'public.day_climate_norm') - self.runLocationsTest(req) - - def testGetAvailableLocationsForInformId(self): - req = DAL.newDataRequest(self.datatype) - req.addIdentifier('table', 'public.cli_mon_season_yr') - self.runLocationsTest(req) - - def testGetAvailableLocationsWithConstraints(self): - req = DAL.newDataRequest(self.datatype) - req.addIdentifier('table', 'public.cli_asos_monthly') - req.addIdentifier('maxtemp_mon', RequestConstraint.new('>', 95)) - self.runLocationsTest(req) - - def testGetAvailableLocationsWithInvalidTable(self): - req = DAL.newDataRequest(self.datatype) - req.addIdentifier('table', 'public.boolean_values') - with self.assertRaises(ThriftRequestException) as cm: - DAL.getAvailableLocationNames(req) - self.assertIn('IncompatibleRequestException', str(cm.exception)) - - def testGetAvailableTimes(self): - req = DAL.newDataRequest(self.datatype) - req.addIdentifier('table', 'public.cli_asos_monthly') - req.setParameters('maxtemp_mon', 'min_sea_press') - self.runTimesTest(req) - - def testGetAvailableTimesWithLocationNamesForYearMonth(self): - """ - Test retrieval of times for a climo table that uses year and - month columns to build DataTimes. - """ - req = DAL.newDataRequest(self.datatype) - req.addIdentifier('table', 'public.cli_asos_monthly') - req.setLocationNames(self.obsStation, 'KABR', 'KDMO') - req.setParameters('maxtemp_mon', 'min_sea_press') - self.runTimesTest(req) - - def testGetAvailableTimesWithLocationNamesForYearDayOfYear(self): - """ - Test retrieval of times for a climo table that uses year and - day_of_year columns to build DataTimes. - """ - req = DAL.newDataRequest(self.datatype) - req.addIdentifier('table', 'public.cli_asos_daily') - req.setLocationNames(self.obsStation, 'KABR', 'KDMO') - req.setParameters('maxtemp_cal', 'min_press') - self.runTimesTest(req) - - def testGetAvailableTimesWithLocationNamesForPeriod(self): - """ - Test retrieval of times for a climo table that uses - period_start and period_end columns to build DataTimes. - """ - req = DAL.newDataRequest(self.datatype) - req.addIdentifier('table', 'public.cli_mon_season_yr') - req.setLocationNames(self.obsStation, 'KABR', 'KDMO') - req.setParameters('max_temp', 'precip_total') - self.runTimesTest(req) - - def testGetAvailableTimesWithLocationNamesForDate(self): - """ - Test retrieval of times for a climo table that uses a date - column to build DataTimes. - """ - req = DAL.newDataRequest(self.datatype) - req.addIdentifier('table', 'public.daily_climate') - req.setLocationNames(self.obsStation, 'KABR', 'KDMO') - req.setParameters('max_temp', 'precip', 'avg_wind_speed') - self.runTimesTest(req) - - def testGetAvailableTimesWithConstraint(self): - req = DAL.newDataRequest(self.datatype) - req.addIdentifier('table', 'public.cli_asos_monthly') - req.addIdentifier('maxtemp_mon', RequestConstraint.new('<', 75)) - req.setParameters('maxtemp_mon', 'min_sea_press') - self.runTimesTest(req) - - def testGetGeometryData(self): - req = DAL.newDataRequest(self.datatype) - req.addIdentifier('table', 'public.cli_asos_monthly') - req.setLocationNames('KFNB') - req.setParameters('maxtemp_mon', 'min_sea_press') - self.runGeometryDataTest(req) - - def testGetGeometryDataWithEnvelopeThrowsException(self): - # Envelope is not used - req = DAL.newDataRequest(self.datatype) - req.addIdentifier('table', 'public.cli_asos_monthly') - req.setParameters('maxtemp_mon', 'min_sea_press') - req.setEnvelope(params.ENVELOPE) - with self.assertRaises(Exception): - data = self.runGeometryDataTest(req) - - def testGetGeometryDataForYearAndDayOfYearTable(self): - """ - Test retrieval of data for a climo table that uses year and - day_of_year columns to build DataTimes. - """ - req = DAL.newDataRequest(self.datatype) - req.addIdentifier('table', 'public.cli_asos_daily') - req.setLocationNames('KFNB') - req.setParameters('maxtemp_cal', 'min_press') - self.runGeometryDataTest(req) - - def testGetGeometryDataForPeriodTable(self): - """ - Test retrieval of data for a climo table that uses a period_start and - period_end columns to build DataTimes. - """ - req = DAL.newDataRequest(self.datatype) - req.addIdentifier('table', 'public.cli_mon_season_yr') - req.setLocationNames('KFNB') - req.setParameters('max_temp', 'precip_total') - self.runGeometryDataTest(req) - - def testGetGeometryDataForDateTable(self): - """ - Test retrieval of data for a climo table that uses a date column to - build DataTimes. - """ - req = DAL.newDataRequest(self.datatype) - req.addIdentifier('table', 'public.daily_climate') - req.setLocationNames('KFNB') - req.setParameters('max_temp', 'precip', 'avg_wind_speed') - self.runGeometryDataTest(req) - - def testGetGeometryDataWithShortParameter(self): - """ - Test that a parameter that is stored in Java as a Short is correctly - retrieved as a number. - """ - req = DAL.newDataRequest(self.datatype) - req.addIdentifier('table', 'cli_asos_monthly') - req.setParameters('month') - geometryData = self.runGeometryDataTest(req) - for record in geometryData: - self.assertIsNotNone(record.getNumber('month')) - - def testGetTableIdentifierValues(self): - self.runGetIdValuesTest(['table']) - - def testGetColumnIdValuesWithTable(self): - req = DAL.newDataRequest(self.datatype) - req.addIdentifier('table', 'public.cli_asos_monthly') - idValues = DAL.getIdentifierValues(req, 'year') - self.assertTrue(hasattr(idValues, '__iter__')) - - def testGetColumnIdValuesWithoutTableThrowsException(self): - req = DAL.newDataRequest(self.datatype) - with self.assertRaises(ThriftRequestException): - idValues = DAL.getIdentifierValues(req, 'year') - - @unittest.skip('avoid EDEX error') - def testGetColumnIdValuesWithNonexistentTableThrowsException(self): - req = DAL.newDataRequest(self.datatype) - req.addIdentifier('table', 'nonexistentjunk') - with self.assertRaises(ThriftRequestException): - idValues = DAL.getIdentifierValues(req, 'year') - - @unittest.skip('avoid EDEX error') - def testGetNonexistentColumnIdValuesThrowsException(self): - req = DAL.newDataRequest(self.datatype) - req.addIdentifier('table', 'public.cli_asos_monthly') - with self.assertRaises(ThriftRequestException): - idValues = DAL.getIdentifierValues(req, 'nonexistentjunk') - - def testGetInvalidIdentifierValuesThrowsException(self): - self.runInvalidIdValuesTest() - - def testGetNonexistentIdentifierValuesThrowsException(self): - self.runNonexistentIdValuesTest() - - def _runConstraintTest(self, key, operator, value): - req = DAL.newDataRequest(self.datatype) - req.addIdentifier('table', 'cli_asos_monthly') - constraint = RequestConstraint.new(operator, value) - req.addIdentifier(key, constraint) - req.setParameters('station_code', 'avg_daily_max') - return self.runGeometryDataTest(req) - - def testGetDataWithEqualsString(self): - geometryData = self._runConstraintTest('station_code', '=', self.obsStation) - for record in geometryData: - self.assertEqual(record.getString('station_code'), self.obsStation) - - def testGetDataWithEqualsUnicode(self): - geometryData = self._runConstraintTest('station_code', '=', unicode(self.obsStation)) - for record in geometryData: - self.assertEqual(record.getString('station_code'), self.obsStation) - - def testGetDataWithEqualsInt(self): - geometryData = self._runConstraintTest('avg_daily_max', '=', 70) - for record in geometryData: - self.assertEqual(record.getNumber('avg_daily_max'), 70) - - def testGetDataWithEqualsLong(self): - geometryData = self._runConstraintTest('avg_daily_max', '=', 70L) - for record in geometryData: - self.assertEqual(record.getNumber('avg_daily_max'), 70) - - def testGetDataWithEqualsFloat(self): - geometryData = self._runConstraintTest('avg_daily_max', '=', 69.2) - for record in geometryData: - self.assertEqual(round(record.getNumber('avg_daily_max'), 1), 69.2) - - def testGetDataWithEqualsNone(self): - geometryData = self._runConstraintTest('station_code', '=', None) - self.assertEqual(len(geometryData), 0) - - def testGetDataWithNotEquals(self): - geometryData = self._runConstraintTest('station_code', '!=', self.obsStation) - for record in geometryData: - self.assertNotEqual(record.getString('station_code'), self.obsStation) - - def testGetDataWithNotEqualsNone(self): - geometryData = self._runConstraintTest('station_code', '!=', None) - for record in geometryData: - self.assertNotEqual(record.getType('station_code'), 'NULL') - - def testGetDataWithGreaterThan(self): - geometryData = self._runConstraintTest('avg_daily_max', '>', 70) - for record in geometryData: - self.assertGreater(record.getNumber('avg_daily_max'), 70) - - def testGetDataWithLessThan(self): - geometryData = self._runConstraintTest('avg_daily_max', '<', 70) - for record in geometryData: - self.assertLess(record.getNumber('avg_daily_max'), 70) - - def testGetDataWithGreaterThanEquals(self): - geometryData = self._runConstraintTest('avg_daily_max', '>=', 70) - for record in geometryData: - self.assertGreaterEqual(record.getNumber('avg_daily_max'), 70) - - def testGetDataWithLessThanEquals(self): - geometryData = self._runConstraintTest('avg_daily_max', '<=', 70) - for record in geometryData: - self.assertLessEqual(record.getNumber('avg_daily_max'), 70) - - def testGetDataWithInTuple(self): - collection = (self.obsStation, 'KABR') - geometryData = self._runConstraintTest('station_code', 'in', collection) - for record in geometryData: - self.assertIn(record.getString('station_code'), collection) - - def testGetDataWithInList(self): - collection = [self.obsStation, 'KABR'] - geometryData = self._runConstraintTest('station_code', 'in', collection) - for record in geometryData: - self.assertIn(record.getString('station_code'), collection) - - def testGetDataWithInGenerator(self): - collection = (self.obsStation, 'KABR') - generator = (item for item in collection) - geometryData = self._runConstraintTest('station_code', 'in', generator) - for record in geometryData: - self.assertIn(record.getString('station_code'), collection) - - def testGetDataWithNotInList(self): - collection = ['KORD', 'KABR'] - geometryData = self._runConstraintTest('station_code', 'not in', collection) - for record in geometryData: - self.assertNotIn(record.getString('station_code'), collection) - - def testGetDataWithInvalidConstraintTypeThrowsException(self): - with self.assertRaises(ValueError): - self._runConstraintTest('station_code', 'junk', self.obsStation) - - def testGetDataWithInvalidConstraintValueThrowsException(self): - with self.assertRaises(TypeError): - self._runConstraintTest('station_code', '=', {}) - - def testGetDataWithEmptyInConstraintThrowsException(self): - with self.assertRaises(ValueError): - self._runConstraintTest('station_code', 'in', []) - - def testGetDataWithTimeRangeWithYearAndMonth1(self): - req = DAL.newDataRequest(self.datatype) - req.addIdentifier('table', 'public.cli_asos_monthly') - req.setLocationNames('KFNB') - req.setParameters('maxtemp_mon', 'min_sea_press') - startTime = datetime.datetime(2009, 1, 1) - endTime = datetime.datetime(2009, 12, 31) - tr = TimeRange(startTime, endTime) - self.runGeometryDataTestWithTimeRange(req, tr) - - def testGetDataWithTimeRangeWithYearAndMonth2(self): - req = DAL.newDataRequest(self.datatype) - req.addIdentifier('table', 'public.cli_asos_monthly') - req.setLocationNames('KFNB') - req.setParameters('maxtemp_mon', 'min_sea_press') - startTime = datetime.datetime(2008, 1, 1) - endTime = datetime.datetime(2009, 3, 31) - tr = TimeRange(startTime, endTime) - self.runGeometryDataTestWithTimeRange(req, tr) - - def testGetDataWithTimeRangeWithYearAndMonth3(self): - req = DAL.newDataRequest(self.datatype) - req.addIdentifier('table', 'public.cli_asos_monthly') - req.setLocationNames('KFNB') - req.setParameters('maxtemp_mon', 'min_sea_press') - startTime = datetime.datetime(2007, 7, 1) - endTime = datetime.datetime(2009, 3, 31) - tr = TimeRange(startTime, endTime) - self.runGeometryDataTestWithTimeRange(req, tr) - - def testGetDataWithTimeRangeWithYearAndDayOfYear1(self): - req = DAL.newDataRequest(self.datatype) - req.addIdentifier('table', 'public.cli_asos_daily') - req.setLocationNames('KFNB') - req.setParameters('maxtemp_cal', 'min_press') - startTime = datetime.datetime(2009, 1, 1) - endTime = datetime.datetime(2009, 7, 31) - tr = TimeRange(startTime, endTime) - self.runGeometryDataTestWithTimeRange(req, tr) - - def testGetDataWithTimeRangeWithYearAndDayOfYear2(self): - req = DAL.newDataRequest(self.datatype) - req.addIdentifier('table', 'public.cli_asos_daily') - req.setLocationNames('KFNB') - req.setParameters('maxtemp_cal', 'min_press') - startTime = datetime.datetime(2008, 7, 1) - endTime = datetime.datetime(2009, 3, 31) - tr = TimeRange(startTime, endTime) - self.runGeometryDataTestWithTimeRange(req, tr) - - def testGetDataWithTimeRangeWithYearAndDayOfYear3(self): - req = DAL.newDataRequest(self.datatype) - req.addIdentifier('table', 'public.cli_asos_daily') - req.setLocationNames('KFNB') - req.setParameters('maxtemp_cal', 'min_press') - startTime = datetime.datetime(2007, 7, 1) - endTime = datetime.datetime(2009, 3, 31) - tr = TimeRange(startTime, endTime) - self.runGeometryDataTestWithTimeRange(req, tr) - - def testGetDataWithTimeRangeWithPeriodTable(self): - req = DAL.newDataRequest(self.datatype) - req.addIdentifier('table', 'public.cli_mon_season_yr') - req.setLocationNames('KFNB') - req.setParameters('max_temp', 'precip_total') - startTime = datetime.datetime(2007, 7, 1) - endTime = datetime.datetime(2009, 3, 31) - tr = TimeRange(startTime, endTime) - self.runGeometryDataTestWithTimeRange(req, tr) - - def testGetDataWithTimeRangeWithForDateTable(self): - req = DAL.newDataRequest(self.datatype) - req.addIdentifier('table', 'public.daily_climate') - req.setLocationNames('KFNB') - req.setParameters('max_temp', 'precip', 'avg_wind_speed') - startTime = datetime.datetime(2007, 7, 1) - endTime = datetime.datetime(2009, 3, 31) - tr = TimeRange(startTime, endTime) - self.runGeometryDataTestWithTimeRange(req, tr) - - def testNoDuplicateData(self): - req = DAL.newDataRequest(self.datatype) - req.addIdentifier('table', 'public.cli_asos_monthly') - req.setLocationNames('KOMA') - req.setParameters('maxtemp_day1') - rows = DAL.getGeometryData(req, DAL.getAvailableTimes(req)[0:5]) - for i in range(len(rows)): - for j in range(len(rows)): - if i != j: - self.assertNotEqual(rows[i].__dict__, rows[j].__dict__) diff --git a/pythonPackages/ufpy/test/dafTests/testCombinedTimeQuery.py b/pythonPackages/ufpy/test/dafTests/testCombinedTimeQuery.py deleted file mode 100644 index 9295871b1d..0000000000 --- a/pythonPackages/ufpy/test/dafTests/testCombinedTimeQuery.py +++ /dev/null @@ -1,67 +0,0 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -from ufpy.dataaccess import DataAccessLayer as DAL - -from ufpy.dataaccess import CombinedTimeQuery as CTQ - -import unittest -import os - -# -# Test the CombinedTimedQuery module -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# 06/24/16 5591 bsteffen Initial Creation. -# 11/08/16 5895 tgurney Change grid model -# -# -# - -class CombinedTimeQueryTestCase(unittest.TestCase): - - @classmethod - def setUp(cls): - host = os.environ.get('DAF_TEST_HOST') - if host is None: - host = 'localhost' - DAL.changeEDEXHost(host) - - def testSuccessfulQuery(self): - req = DAL.newDataRequest('grid') - req.setLocationNames('RAP13') - req.setParameters('T','GH') - req.setLevels('300MB', '500MB','700MB') - times = CTQ.getAvailableTimes(req); - self.assertNotEqual(len(times), 0) - - def testNonIntersectingQuery(self): - """ - Test that when a parameter is only available on one of the levels that no times are returned. - """ - req = DAL.newDataRequest('grid') - req.setLocationNames('RAP13') - req.setParameters('T','GH', 'LgSP1hr') - req.setLevels('300MB', '500MB','700MB','0.0SFC') - times = CTQ.getAvailableTimes(req); - self.assertEqual(len(times), 0) diff --git a/pythonPackages/ufpy/test/dafTests/testCommonObsSpatial.py b/pythonPackages/ufpy/test/dafTests/testCommonObsSpatial.py deleted file mode 100644 index 984c57fd27..0000000000 --- a/pythonPackages/ufpy/test/dafTests/testCommonObsSpatial.py +++ /dev/null @@ -1,178 +0,0 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -from __future__ import print_function -from shapely.geometry import box -from ufpy.dataaccess import DataAccessLayer as DAL - -from dynamicserialize.dstypes.com.raytheon.uf.common.dataquery.requests import RequestConstraint -import baseDafTestCase -import params -import unittest - -# -# Test DAF support for common_obs_spatial data -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# 01/19/16 4795 mapeters Initial Creation. -# 04/11/16 5548 tgurney Cleanup -# 04/18/16 5548 tgurney More cleanup -# 05/26/16 5587 njensen Added testGetIdentifierValues() -# 06/01/16 5587 tgurney Move testIdentifiers() to -# superclass -# 06/13/16 5574 tgurney Add advanced query tests -# 06/21/16 5548 tgurney Skip tests that cause errors -# 06/30/16 5725 tgurney Add test for NOT IN -# 12/07/16 5981 tgurney Parameterize -# 01/06/17 5981 tgurney Do not check data times -# - - -class CommonObsSpatialTestCase(baseDafTestCase.DafTestCase): - """Test DAF support for common_obs_spatial data""" - - datatype = "common_obs_spatial" - - def testGetAvailableParameters(self): - req = DAL.newDataRequest(self.datatype) - self.runParametersTest(req) - - def testGetAvailableLocations(self): - req = DAL.newDataRequest(self.datatype) - req.addIdentifier("country", ["US", "CN"]) - self.runLocationsTest(req) - - def testGetIdentifierValues(self): - self.runGetIdValuesTest(['country']) - - def testGetGeometryData(self): - req = DAL.newDataRequest(self.datatype) - req.setEnvelope(params.ENVELOPE) - req.setParameters("name", "stationid") - self.runGeometryDataTest(req, checkDataTimes=False) - - def testRequestingTimesThrowsTimeAgnosticDataException(self): - req = DAL.newDataRequest(self.datatype) - self.runTimeAgnosticTest(req) - - def _runConstraintTest(self, key, operator, value): - req = DAL.newDataRequest(self.datatype) - constraint = RequestConstraint.new(operator, value) - req.addIdentifier(key, constraint) - req.setParameters('catalogtype', 'elevation', 'state') - return self.runGeometryDataTest(req, checkDataTimes=False) - - def testGetDataWithEqualsString(self): - geometryData = self._runConstraintTest('state', '=', 'NE') - for record in geometryData: - self.assertEqual(record.getString('state'), 'NE') - - def testGetDataWithEqualsUnicode(self): - geometryData = self._runConstraintTest('state', '=', u'NE') - for record in geometryData: - self.assertEqual(record.getString('state'), 'NE') - - def testGetDataWithEqualsInt(self): - geometryData = self._runConstraintTest('catalogtype', '=', 32) - for record in geometryData: - self.assertEqual(record.getNumber('catalogtype'), 32) - - def testGetDataWithEqualsLong(self): - geometryData = self._runConstraintTest('elevation', '=', 0L) - for record in geometryData: - self.assertEqual(record.getNumber('elevation'), 0) - - # No float test since there are no float identifiers available. Attempting - # to filter a non-float identifier on a float value raises an exception. - - def testGetDataWithEqualsNone(self): - geometryData = self._runConstraintTest('state', '=', None) - for record in geometryData: - self.assertEqual(record.getType('state'), 'NULL') - - def testGetDataWithNotEquals(self): - geometryData = self._runConstraintTest('state', '!=', 'NE') - for record in geometryData: - self.assertNotEqual(record.getString('state'), 'NE') - - def testGetDataWithNotEqualsNone(self): - geometryData = self._runConstraintTest('state', '!=', None) - for record in geometryData: - self.assertNotEqual(record.getType('state'), 'NULL') - - def testGetDataWithGreaterThan(self): - geometryData = self._runConstraintTest('elevation', '>', 500) - for record in geometryData: - self.assertGreater(record.getNumber('elevation'), 500) - - def testGetDataWithLessThan(self): - geometryData = self._runConstraintTest('elevation', '<', 100) - for record in geometryData: - self.assertLess(record.getNumber('elevation'), 100) - - def testGetDataWithGreaterThanEquals(self): - geometryData = self._runConstraintTest('elevation', '>=', 500) - for record in geometryData: - self.assertGreaterEqual(record.getNumber('elevation'), 500) - - def testGetDataWithLessThanEquals(self): - geometryData = self._runConstraintTest('elevation', '<=', 100) - for record in geometryData: - self.assertLessEqual(record.getNumber('elevation'), 100) - - def testGetDataWithInTuple(self): - collection = ('NE', 'TX') - geometryData = self._runConstraintTest('state', 'in', collection) - for record in geometryData: - self.assertIn(record.getString('state'), collection) - - def testGetDataWithInList(self): - collection = ['NE', 'TX'] - geometryData = self._runConstraintTest('state', 'in', collection) - for record in geometryData: - self.assertIn(record.getString('state'), collection) - - def testGetDataWithInGenerator(self): - collection = ('NE', 'TX') - generator = (item for item in collection) - geometryData = self._runConstraintTest('state', 'in', generator) - for record in geometryData: - self.assertIn(record.getString('state'), collection) - - def testGetDataWithNotInList(self): - collection = ('NE', 'TX') - geometryData = self._runConstraintTest('state', 'not in', collection) - for record in geometryData: - self.assertNotIn(record.getString('state'), collection) - - def testGetDataWithInvalidConstraintTypeThrowsException(self): - with self.assertRaises(ValueError): - self._runConstraintTest('state', 'junk', 'NE') - - def testGetDataWithInvalidConstraintValueThrowsException(self): - with self.assertRaises(TypeError): - self._runConstraintTest('state', '=', {}) - - def testGetDataWithEmptyInConstraintThrowsException(self): - with self.assertRaises(ValueError): - self._runConstraintTest('state', 'in', []) diff --git a/pythonPackages/ufpy/test/dafTests/testDataTime.py b/pythonPackages/ufpy/test/dafTests/testDataTime.py deleted file mode 100644 index 4503235a28..0000000000 --- a/pythonPackages/ufpy/test/dafTests/testDataTime.py +++ /dev/null @@ -1,134 +0,0 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -from dynamicserialize.dstypes.com.raytheon.uf.common.time import DataTime - -import unittest - -# -# Unit tests for Python implementation of RequestConstraint -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# 08/02/16 2416 tgurney Initial creation -# -# - - -class DataTimeTestCase(unittest.TestCase): - - def testFromStrRefTimeOnly(self): - s = '2016-08-02 01:23:45' - expected = s - self.assertEqual(expected, str(DataTime(s))) - s = s.replace(' ', '_') - self.assertEqual(expected, str(DataTime(s))) - - def testFromStrRefTimeOnlyZeroMillis(self): - s = '2016-08-02 01:23:45.0' - # result of str() will always drop trailing .0 milliseconds - expected = '2016-08-02 01:23:45' - self.assertEqual(expected, str(DataTime(s))) - s = s.replace(' ', '_') - self.assertEqual(expected, str(DataTime(s))) - - def testFromStrRefTimeOnlyWithMillis(self): - s = '2016-08-02 01:23:45.1' - expected = '2016-08-02 01:23:45.001000' - self.assertEqual(expected, str(DataTime(s))) - s = s.replace(' ', '_') - self.assertEqual(expected, str(DataTime(s))) - - def testFromStrWithFcstTimeHr(self): - s = '2016-08-02 01:23:45 (17)' - expected = s - self.assertEqual(expected, str(DataTime(s))) - s = s.replace(' ', '_') - self.assertEqual(expected, str(DataTime(s))) - - def testFromStrWithFcstTimeHrZeroMillis(self): - s = '2016-08-02 01:23:45.0 (17)' - expected = '2016-08-02 01:23:45 (17)' - self.assertEqual(expected, str(DataTime(s))) - s = s.replace(' ', '_') - self.assertEqual(expected, str(DataTime(s))) - - def testFromStrWithFcstTimeHrAndMillis(self): - s = '2016-08-02 01:23:45.1 (17)' - expected = '2016-08-02 01:23:45.001000 (17)' - self.assertEqual(expected, str(DataTime(s))) - s = s.replace(' ', '_') - self.assertEqual(expected, str(DataTime(s))) - - def testFromStrWithFcstTimeHrMin(self): - s = '2016-08-02 01:23:45 (17:34)' - expected = s - self.assertEqual(expected, str(DataTime(s))) - s = s.replace(' ', '_') - self.assertEqual(expected, str(DataTime(s))) - - def testFromStrWithFcstTimeHrMinZeroMillis(self): - s = '2016-08-02 01:23:45.0 (17:34)' - expected = '2016-08-02 01:23:45 (17:34)' - self.assertEqual(expected, str(DataTime(s))) - s = s.replace(' ', '_') - self.assertEqual(expected, str(DataTime(s))) - - def testFromStrWithPeriod(self): - s = '2016-08-02 01:23:45[2016-08-02 02:34:45--2016-08-02 03:45:56]' - expected = s - self.assertEqual(expected, str(DataTime(s))) - s = s.replace(' ', '_') - self.assertEqual(expected, str(DataTime(s))) - - def testFromStrWithPeriodZeroMillis(self): - s = '2016-08-02 01:23:45.0[2016-08-02 02:34:45.0--2016-08-02 03:45:56.0]' - expected = '2016-08-02 01:23:45[2016-08-02 02:34:45--2016-08-02 03:45:56]' - self.assertEqual(expected, str(DataTime(s))) - s = s.replace(' ', '_') - self.assertEqual(expected, str(DataTime(s))) - - def testFromStrWithEverything(self): - s = '2016-08-02 01:23:45.0_(17:34)[2016-08-02 02:34:45.0--2016-08-02 03:45:56.0]' - expected = '2016-08-02 01:23:45 (17:34)[2016-08-02 02:34:45--2016-08-02 03:45:56]' - self.assertEqual(expected, str(DataTime(s))) - s = s.replace(' ', '_') - self.assertEqual(expected, str(DataTime(s))) - - def testDataTimeReconstructItselfFromString(self): - times = [ - '2016-08-02 01:23:45', - '2016-08-02 01:23:45.0', - '2016-08-02 01:23:45.1', - '2016-08-02 01:23:45.123000', - '2016-08-02 01:23:45 (17)', - '2016-08-02 01:23:45.0 (17)', - '2016-08-02 01:23:45.1 (17)', - '2016-08-02 01:23:45 (17:34)', - '2016-08-02 01:23:45.0 (17:34)', - '2016-08-02 01:23:45.1 (17:34)', - '2016-08-02 01:23:45.0[2016-08-02_02:34:45.0--2016-08-02_03:45:56.0]', - '2016-08-02 01:23:45.0[2016-08-02_02:34:45.123--2016-08-02_03:45:56.456]', - '2016-08-02 01:23:45.456_(17:34)[2016-08-02_02:34:45.0--2016-08-02_03:45:56.0]' - ] - for time in times: - self.assertEqual(DataTime(time), DataTime(str(DataTime(time))), time) \ No newline at end of file diff --git a/pythonPackages/ufpy/test/dafTests/testFfmp.py b/pythonPackages/ufpy/test/dafTests/testFfmp.py deleted file mode 100644 index 5edc20c897..0000000000 --- a/pythonPackages/ufpy/test/dafTests/testFfmp.py +++ /dev/null @@ -1,228 +0,0 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -from __future__ import print_function -from dynamicserialize.dstypes.com.raytheon.uf.common.dataquery.requests import RequestConstraint -from ufpy.dataaccess import DataAccessLayer as DAL - -import baseDafTestCase -import params -import unittest - -# -# Test DAF support for ffmp data -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# 01/19/16 4795 mapeters Initial Creation. -# 04/11/16 5548 tgurney Cleanup -# 04/18/16 5548 tgurney More cleanup -# 04/18/16 5587 tgurney Add test for sane handling of -# zero records returned -# 06/20/16 5587 tgurney Add identifier values tests -# 07/01/16 5728 mapeters Add advanced query tests, -# include huc and accumHrs in -# id values tests, test that -# accumHrs id is never required -# 08/03/16 5728 mapeters Fixed minor bugs, replaced -# PRTM parameter since it isn't -# configured for ec-oma -# 11/08/16 5985 tgurney Do not check data times -# 12/07/16 5981 tgurney Parameterize -# 12/20/16 5981 tgurney Do not check data times -# -# - - -class FfmpTestCase(baseDafTestCase.DafTestCase): - """Test DAF support for ffmp data""" - - datatype = 'ffmp' - location = params.RADAR.lower() - - @staticmethod - def addIdentifiers(req): - req.addIdentifier('wfo', params.SITE_ID) - req.addIdentifier('siteKey', 'hpe') - req.addIdentifier('dataKey', 'hpe') - req.addIdentifier('huc', 'ALL') - - def testGetAvailableParameters(self): - req = DAL.newDataRequest(self.datatype) - self.runParametersTest(req) - - def testGetAvailableLocations(self): - req = DAL.newDataRequest(self.datatype) - self.addIdentifiers(req) - self.runLocationsTest(req) - - def testGetAvailableTimes(self): - req = DAL.newDataRequest(self.datatype) - self.addIdentifiers(req) - req.setParameters('DHRMOSAIC') - self.runTimesTest(req) - - def testGetGeometryData(self): - req = DAL.newDataRequest(self.datatype) - self.addIdentifiers(req) - req.setParameters('DHRMOSAIC') - self.runGeometryDataTest(req, checkDataTimes=False) - - def testGetGeometryDataEmptyResult(self): - req = DAL.newDataRequest(self.datatype) - self.addIdentifiers(req) - req.setParameters('blah blah blah') # force 0 records returned - result = self.runGeometryDataTest(req, checkDataTimes=False) - self.assertEqual(len(result), 0) - - def testGetIdentifierValues(self): - req = DAL.newDataRequest(self.datatype) - optionalIds = set(DAL.getOptionalIdentifiers(req)) - requiredIds = set(DAL.getRequiredIdentifiers(req)) - ids = requiredIds | optionalIds - for id in ids: - req = DAL.newDataRequest(self.datatype) - if id == 'accumHrs': - req.setParameters('ARI6H2YR') - req.addIdentifier('wfo', params.SITE_ID) - req.addIdentifier('siteKey', self.location) - req.addIdentifier('huc', 'ALL') - idValues = DAL.getIdentifierValues(req, id) - self.assertTrue(hasattr(idValues, '__iter__')) - print(id + " values: " + str(idValues)) - - def testGetInvalidIdentifierValuesThrowsException(self): - self.runInvalidIdValuesTest() - - def testGetNonexistentIdentifierValuesThrowsException(self): - self.runNonexistentIdValuesTest() - - def _runConstraintTest(self, key, operator, value): - req = DAL.newDataRequest(self.datatype) - constraint = RequestConstraint.new(operator, value) - req.addIdentifier(key, constraint) - req.addIdentifier('wfo', params.SITE_ID) - req.addIdentifier('huc', 'ALL') - req.setParameters('QPFSCAN') - return self.runGeometryDataTest(req, checkDataTimes=False) - - def testGetDataWithEqualsString(self): - geometryData = self._runConstraintTest('siteKey', '=', self.location) - for record in geometryData: - self.assertEqual(record.getAttribute('siteKey'), self.location) - - def testGetDataWithEqualsUnicode(self): - geometryData = self._runConstraintTest('siteKey', '=', unicode(self.location)) - for record in geometryData: - self.assertEqual(record.getAttribute('siteKey'), self.location) - - # No numeric tests since no numeric identifiers are available that support - # RequestConstraints. - - def testGetDataWithEqualsNone(self): - geometryData = self._runConstraintTest('siteKey', '=', None) - for record in geometryData: - self.assertIsNone(record.getAttribute('siteKey')) - - def testGetDataWithNotEquals(self): - geometryData = self._runConstraintTest('siteKey', '!=', self.location) - for record in geometryData: - self.assertNotEqual(record.getAttribute('siteKey'), self.location) - - def testGetDataWithNotEqualsNone(self): - geometryData = self._runConstraintTest('siteKey', '!=', None) - for record in geometryData: - self.assertIsNotNone(record.getAttribute('siteKey')) - - def testGetDataWithGreaterThan(self): - geometryData = self._runConstraintTest('siteKey', '>', self.location) - for record in geometryData: - self.assertGreater(record.getAttribute('siteKey'), self.location) - - def testGetDataWithLessThan(self): - geometryData = self._runConstraintTest('siteKey', '<', self.location) - for record in geometryData: - self.assertLess(record.getAttribute('siteKey'), self.location) - - def testGetDataWithGreaterThanEquals(self): - geometryData = self._runConstraintTest('siteKey', '>=', self.location) - for record in geometryData: - self.assertGreaterEqual(record.getAttribute('siteKey'), self.location) - - def testGetDataWithLessThanEquals(self): - geometryData = self._runConstraintTest('siteKey', '<=', self.location) - for record in geometryData: - self.assertLessEqual(record.getAttribute('siteKey'), self.location) - - def testGetDataWithInList(self): - collection = [self.location, 'kuex'] - geometryData = self._runConstraintTest('siteKey', 'in', collection) - for record in geometryData: - self.assertIn(record.getAttribute('siteKey'), collection) - - def testGetDataWithNotInList(self): - collection = [self.location, 'kuex'] - geometryData = self._runConstraintTest('siteKey', 'not in', collection) - for record in geometryData: - self.assertNotIn(record.getAttribute('siteKey'), collection) - - def testGetDataWithInvalidConstraintTypeThrowsException(self): - with self.assertRaises(ValueError): - self._runConstraintTest('siteKey', 'junk', self.location) - - def testGetDataWithInvalidConstraintValueThrowsException(self): - with self.assertRaises(TypeError): - self._runConstraintTest('siteKey', '=', {}) - - def testGetDataWithEmptyInConstraintThrowsException(self): - with self.assertRaises(ValueError): - self._runConstraintTest('siteKey', 'in', []) - - def testGetDataWithSiteKeyAndDataKeyConstraints(self): - siteKeys = [self.location, 'hpe'] - dataKeys = ['kuex', 'kdmx'] - - req = DAL.newDataRequest(self.datatype) - req.addIdentifier('wfo', params.SITE_ID) - req.addIdentifier('huc', 'ALL') - - siteKeysConstraint = RequestConstraint.new('in', siteKeys) - req.addIdentifier('siteKey', siteKeysConstraint) - dataKeysConstraint = RequestConstraint.new('in', dataKeys) - req.addIdentifier('dataKey', dataKeysConstraint) - - req.setParameters('QPFSCAN') - geometryData = self.runGeometryDataTest(req, checkDataTimes=False) - for record in geometryData: - self.assertIn(record.getAttribute('siteKey'), siteKeys) - # dataKey attr. is comma-separated list of dataKeys that had data - for dataKey in record.getAttribute('dataKey').split(','): - self.assertIn(dataKey, dataKeys) - - def testGetGuidanceDataWithoutAccumHrsIdentifierSet(self): - # Test that accumHrs identifier is not required for guidance data - req = DAL.newDataRequest(self.datatype) - req.addIdentifier('wfo', params.SITE_ID) - req.addIdentifier('siteKey', self.location) - req.addIdentifier('huc', 'ALL') - req.setParameters('FFG0124hr') - self.runGeometryDataTest(req, checkDataTimes=False) \ No newline at end of file diff --git a/pythonPackages/ufpy/test/dafTests/testGfe.py b/pythonPackages/ufpy/test/dafTests/testGfe.py deleted file mode 100644 index 8ef9830621..0000000000 --- a/pythonPackages/ufpy/test/dafTests/testGfe.py +++ /dev/null @@ -1,220 +0,0 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -from __future__ import print_function -from dynamicserialize.dstypes.com.raytheon.uf.common.dataquery.requests import RequestConstraint -from ufpy.dataaccess import DataAccessLayer as DAL -from shapely.geometry import box, Point - -import baseDafTestCase -import params -import unittest - -# -# Test DAF support for GFE data -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# 01/19/16 4795 mapeters Initial Creation. -# 04/11/16 5548 tgurney Cleanup -# 04/18/16 5548 tgurney More cleanup -# 05/23/16 5637 bsteffen Test vectors -# 05/31/16 5587 tgurney Add getIdentifierValues tests -# 06/01/16 5587 tgurney Update testGetIdentifierValues -# 06/17/16 5574 mapeters Add advanced query tests -# 06/30/16 5725 tgurney Add test for NOT IN -# 11/07/16 5991 bsteffen Improve vector tests -# 12/07/16 5981 tgurney Parameterize -# 12/15/16 6040 tgurney Add testGetGridDataWithDbType -# 12/20/16 5981 tgurney Add envelope test -# 10/19/17 6491 tgurney Add test for dbtype identifier -# 11/10/17 6491 tgurney Replace modelName with -# parmId.dbId.modelName -# -# - - -class GfeTestCase(baseDafTestCase.DafTestCase): - """Test DAF support for GFE data""" - - datatype = 'gfe' - - def testGetAvailableParameters(self): - req = DAL.newDataRequest(self.datatype) - self.runParametersTest(req) - - def testGetAvailableLocations(self): - req = DAL.newDataRequest(self.datatype) - req.addIdentifier('parmId.dbId.modelName', 'Fcst') - self.runLocationsTest(req) - - def testGetAvailableTimes(self): - req = DAL.newDataRequest(self.datatype) - req.addIdentifier('parmId.dbId.modelName', 'Fcst') - req.addIdentifier('parmId.dbId.siteId', params.SITE_ID) - self.runTimesTest(req) - - def testGetGridData(self): - req = DAL.newDataRequest(self.datatype) - req.addIdentifier('parmId.dbId.modelName', 'Fcst') - req.addIdentifier('parmId.dbId.siteId', params.SITE_ID) - req.setParameters('T') - gridDatas = self.runGridDataTest(req) - for gridData in gridDatas: - self.assertEqual(gridData.getAttribute('parmId.dbId.dbType'), '') - - def testGetGridDataWithEnvelope(self): - req = DAL.newDataRequest(self.datatype) - req.addIdentifier('parmId.dbId.modelName', 'Fcst') - req.addIdentifier('parmId.dbId.siteId', params.SITE_ID) - req.setParameters('T') - req.setEnvelope(params.ENVELOPE) - gridData = self.runGridDataTest(req) - if not gridData: - raise unittest.SkipTest('no data available') - lons, lats = gridData[0].getLatLonCoords() - lons = lons.reshape(-1) - lats = lats.reshape(-1) - - # Ensure all points are within one degree of the original box - # to allow slight margin of error for reprojection distortion. - testEnv = box(params.ENVELOPE.bounds[0] - 1, params.ENVELOPE.bounds[1] - 1, - params.ENVELOPE.bounds[2] + 1, params.ENVELOPE.bounds[3] + 1 ) - - for i in range(len(lons)): - self.assertTrue(testEnv.contains(Point(lons[i], lats[i]))) - - def testGetVectorGridData(self): - req = DAL.newDataRequest(self.datatype) - req.addIdentifier('parmId.dbId.modelName', 'Fcst') - req.addIdentifier('parmId.dbId.siteId', params.SITE_ID) - req.setParameters('Wind') - times = DAL.getAvailableTimes(req) - if not(times): - raise unittest.SkipTest('No Wind Data available for testing') - gridData = DAL.getGridData(req, [times[0]]) - rawWind = None - rawDir = None - for grid in gridData: - if grid.getParameter() == 'Wind': - self.assertEqual(grid.getUnit(),'kts') - rawWind = grid.getRawData() - elif grid.getParameter() == 'WindDirection': - self.assertEqual(grid.getUnit(),'deg') - rawDir = grid.getRawData() - self.assertIsNotNone(rawWind, 'Wind Magnitude grid is not present') - self.assertIsNotNone(rawDir, 'Wind Direction grid is not present') - # rawWind and rawDir are numpy.ndarrays so comparison will result in boolean ndarrays. - self.assertTrue((rawWind >= 0).all(), 'Wind Speed should not contain negative values') - self.assertTrue((rawDir >= 0).all(), 'Wind Direction should not contain negative values') - self.assertTrue((rawDir <= 360).all(), 'Wind Direction should be less than or equal to 360') - self.assertFalse((rawDir == rawWind).all(), 'Wind Direction should be different from Wind Speed') - - def testGetIdentifierValues(self): - req = DAL.newDataRequest(self.datatype) - optionalIds = set(DAL.getOptionalIdentifiers(req)) - requiredIds = set(DAL.getRequiredIdentifiers(req)) - self.runGetIdValuesTest(optionalIds | requiredIds) - - def testGetInvalidIdentifierValuesThrowsException(self): - self.runInvalidIdValuesTest() - - def testGetNonexistentIdentifierValuesThrowsException(self): - self.runNonexistentIdValuesTest() - - def _runConstraintTest(self, key, operator, value): - req = DAL.newDataRequest(self.datatype) - constraint = RequestConstraint.new(operator, value) - req.addIdentifier(key, constraint) - req.setLocationNames(params.SITE_ID) - req.setParameters('T') - return self.runGridDataTest(req) - - def testGetDataWithModelNameEqualsString(self): - gridData = self._runConstraintTest('parmId.dbId.modelName', '=', 'Fcst') - for record in gridData: - self.assertEqual(record.getAttribute('parmId.dbId.modelName'), 'Fcst') - - def testGetDataWithDbTypeEqualsString(self): - gridData = self._runConstraintTest('parmId.dbId.dbType', '=', 'Prac') - for record in gridData: - self.assertEqual(record.getAttribute('parmId.dbId.dbType'), 'Prac') - - def testGetDataWithEqualsUnicode(self): - gridData = self._runConstraintTest('parmId.dbId.modelName', '=', u'Fcst') - for record in gridData: - self.assertEqual(record.getAttribute('parmId.dbId.modelName'), 'Fcst') - - # No numeric tests since no numeric identifiers are available. - - def testGetDataWithEqualsNone(self): - gridData = self._runConstraintTest('parmId.dbId.modelName', '=', None) - for record in gridData: - self.assertIsNone(record.getAttribute('parmId.dbId.modelName')) - - def testGetDataWithNotEquals(self): - gridData = self._runConstraintTest('parmId.dbId.modelName', '!=', 'Fcst') - for record in gridData: - self.assertNotEqual(record.getAttribute('parmId.dbId.modelName'), 'Fcst') - - def testGetDataWithNotEqualsNone(self): - gridData = self._runConstraintTest('parmId.dbId.modelName', '!=', None) - for record in gridData: - self.assertIsNotNone(record.getAttribute('parmId.dbId.modelName')) - - def testGetDataWithInTuple(self): - collection = ('Fcst', 'SAT') - gridData = self._runConstraintTest('parmId.dbId.modelName', 'in', collection) - for record in gridData: - self.assertIn(record.getAttribute('parmId.dbId.modelName'), collection) - - def testGetDataWithInList(self): - collection = ['Fcst', 'SAT'] - gridData = self._runConstraintTest('parmId.dbId.modelName', 'in', collection) - for record in gridData: - self.assertIn(record.getAttribute('parmId.dbId.modelName'), collection) - - def testGetDataWithInGenerator(self): - collection = ('Fcst', 'SAT') - generator = (item for item in collection) - gridData = self._runConstraintTest('parmId.dbId.modelName', 'in', generator) - for record in gridData: - self.assertIn(record.getAttribute('parmId.dbId.modelName'), collection) - - def testGetDataWithNotInList(self): - collection = ('Fcst', 'SAT') - gridData = self._runConstraintTest('parmId.dbId.modelName', 'not in', collection) - for record in gridData: - self.assertNotIn(record.getAttribute('parmId.dbId.modelName'), collection) - - def testGetDataWithInvalidConstraintTypeThrowsException(self): - with self.assertRaises(ValueError): - self._runConstraintTest('parmId.dbId.modelName', 'junk', 'Fcst') - - def testGetDataWithInvalidConstraintValueThrowsException(self): - with self.assertRaises(TypeError): - self._runConstraintTest('parmId.dbId.modelName', '=', {}) - - def testGetDataWithEmptyInConstraintThrowsException(self): - with self.assertRaises(ValueError): - self._runConstraintTest('parmId.dbId.modelName', 'in', []) - diff --git a/pythonPackages/ufpy/test/dafTests/testGfeEditArea.py b/pythonPackages/ufpy/test/dafTests/testGfeEditArea.py deleted file mode 100644 index 853a18098f..0000000000 --- a/pythonPackages/ufpy/test/dafTests/testGfeEditArea.py +++ /dev/null @@ -1,220 +0,0 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -from __future__ import print_function -from dynamicserialize.dstypes.com.raytheon.uf.common.dataquery.requests import RequestConstraint -from ufpy.dataaccess import DataAccessLayer as DAL -from ufpy.ThriftClient import ThriftRequestException - -import baseDafTestCase -import params - -# -# Test DAF support for GFE edit area data -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# 06/08/17 6298 mapeters Initial Creation. -# 09/27/17 6463 tgurney Remove GID site identifier -# -# - - -class GfeEditAreaTestCase(baseDafTestCase.DafTestCase): - """Test DAF support for GFE edit area data""" - - datatype = 'gfeEditArea' - - siteIdKey = 'siteId' - - editAreaNames = ['ISC_NHA', 'SDZ066', 'StormSurgeWW_EditArea'] - - groupKey = 'group' - - groups = ['ISC', 'WFOs', 'FIPS_' + params.SITE_ID] - - def testGetAvailableParameters(self): - req = DAL.newDataRequest(self.datatype) - req.addIdentifier(self.siteIdKey, params.SITE_ID) - with self.assertRaises(ThriftRequestException): - self.runParametersTest(req) - - def testGetAvailableLocations(self): - req = DAL.newDataRequest(self.datatype) - req.addIdentifier(self.siteIdKey, params.SITE_ID) - self.runLocationsTest(req) - - def testGetAvailableTimes(self): - req = DAL.newDataRequest(self.datatype) - req.addIdentifier(self.siteIdKey, params.SITE_ID) - with self.assertRaises(ThriftRequestException): - self.runTimesTest(req) - - def testGetGeometryDataWithoutSiteIdThrowsException(self): - req = DAL.newDataRequest(self.datatype) - with self.assertRaises(ThriftRequestException): - self.runGeometryDataTest(req) - - def testGetGeometryData(self): - req = DAL.newDataRequest(self.datatype) - req.addIdentifier(self.siteIdKey, params.SITE_ID) - data = self.runGeometryDataTest(req) - for item in data: - self.assertEqual(params.SITE_ID, item.getAttribute(self.siteIdKey)) - - def testGetGeometryDataWithLocNames(self): - req = DAL.newDataRequest(self.datatype) - req.addIdentifier(self.siteIdKey, params.SITE_ID) - req.setLocationNames(*self.editAreaNames) - data = self.runGeometryDataTest(req) - for item in data: - self.assertEqual(params.SITE_ID, item.getAttribute(self.siteIdKey)) - self.assertIn(item.getLocationName(), self.editAreaNames) - - def testGetGeometryDataWithGroups(self): - req = DAL.newDataRequest(self.datatype) - req.addIdentifier(self.siteIdKey, params.SITE_ID) - req.addIdentifier(self.groupKey, RequestConstraint.new('in', self.groups)) - data = self.runGeometryDataTest(req) - for item in data: - self.assertEqual(params.SITE_ID, item.getAttribute(self.siteIdKey)) - self.assertIn(item.getAttribute(self.groupKey), self.groups) - - def testGetGeometryDataWithLocNamesAndGroupsThrowException(self): - req = DAL.newDataRequest(self.datatype) - req.addIdentifier(self.siteIdKey, params.SITE_ID) - req.setLocationNames(*self.editAreaNames) - req.addIdentifier(self.groupKey, RequestConstraint.new('in', self.groups)) - with self.assertRaises(ThriftRequestException): - self.runGeometryDataTest(req) - - def testGetGeometryDataWithEnvelope(self): - req = DAL.newDataRequest(self.datatype) - req.addIdentifier(self.siteIdKey, params.SITE_ID) - req.setEnvelope(params.ENVELOPE) - data = self.runGeometryDataTest(req) - for item in data: - self.assertEqual(params.SITE_ID, item.getAttribute(self.siteIdKey)) - self.assertTrue(params.ENVELOPE.intersects(item.getGeometry())) - - def testGetIdentifierValues(self): - req = DAL.newDataRequest(self.datatype) - optionalIds = set(DAL.getOptionalIdentifiers(req)) - requiredIds = set(DAL.getRequiredIdentifiers(req)) - self.runGetIdValuesTest(optionalIds | requiredIds) - - def testGetInvalidIdentifierValuesThrowsException(self): - self.runInvalidIdValuesTest() - - def testGetNonexistentIdentifierValuesThrowsException(self): - self.runNonexistentIdValuesTest() - - def _runConstraintTest(self, key, operator, value): - req = DAL.newDataRequest(self.datatype) - constraint = RequestConstraint.new(operator, value) - req.addIdentifier(key, constraint) - req.setLocationNames(*self.editAreaNames) - return self.runGeometryDataTest(req) - - def testGetDataWithEqualsString(self): - geomData = self._runConstraintTest(self.siteIdKey, '=', params.SITE_ID) - for record in geomData: - self.assertEqual(record.getAttribute(self.siteIdKey), params.SITE_ID) - - def testGetDataWithEqualsUnicode(self): - geomData = self._runConstraintTest(self.siteIdKey, '=', params.SITE_ID.decode('unicode-escape')) - for record in geomData: - self.assertEqual(record.getAttribute(self.siteIdKey), params.SITE_ID) - - # No numeric tests since no numeric identifiers are available. - - def testGetDataWithEqualsNone(self): - geomData = self._runConstraintTest(self.siteIdKey, '=', None) - for record in geomData: - self.assertIsNone(record.getAttribute(self.siteIdKey)) - - def testGetDataWithNotEquals(self): - geomData = self._runConstraintTest(self.siteIdKey, '!=', params.SITE_ID) - for record in geomData: - self.assertNotEqual(record.getAttribute(self.siteIdKey), params.SITE_ID) - - def testGetDataWithNotEqualsNone(self): - geomData = self._runConstraintTest(self.siteIdKey, '!=', None) - for record in geomData: - self.assertIsNotNone(record.getAttribute(self.siteIdKey)) - - def testGetDataWithGreaterThan(self): - geomData = self._runConstraintTest(self.siteIdKey, '>', params.SITE_ID) - for record in geomData: - self.assertGreater(record.getAttribute(self.siteIdKey), params.SITE_ID) - - def testGetDataWithLessThan(self): - geomData = self._runConstraintTest(self.siteIdKey, '<', params.SITE_ID) - for record in geomData: - self.assertLess(record.getAttribute(self.siteIdKey), params.SITE_ID) - - def testGetDataWithGreaterThanEquals(self): - geomData = self._runConstraintTest(self.siteIdKey, '>=', params.SITE_ID) - for record in geomData: - self.assertGreaterEqual(record.getAttribute(self.siteIdKey), params.SITE_ID) - - def testGetDataWithLessThanEquals(self): - geomData = self._runConstraintTest(self.siteIdKey, '<=', params.SITE_ID) - for record in geomData: - self.assertLessEqual(record.getAttribute(self.siteIdKey), params.SITE_ID) - - def testGetDataWithInTuple(self): - collection = (params.SITE_ID,) - geomData = self._runConstraintTest(self.siteIdKey, 'in', collection) - for record in geomData: - self.assertIn(record.getAttribute(self.siteIdKey), collection) - - def testGetDataWithInList(self): - collection = [params.SITE_ID,] - geomData = self._runConstraintTest(self.siteIdKey, 'in', collection) - for record in geomData: - self.assertIn(record.getAttribute(self.siteIdKey), collection) - - def testGetDataWithInGenerator(self): - collection = (params.SITE_ID,) - generator = (item for item in collection) - geomData = self._runConstraintTest(self.siteIdKey, 'in', generator) - for record in geomData: - self.assertIn(record.getAttribute(self.siteIdKey), collection) - - def testGetDataWithNotInList(self): - collection = [params.SITE_ID,] - geomData = self._runConstraintTest(self.siteIdKey, 'not in', collection) - for record in geomData: - self.assertNotIn(record.getAttribute(self.siteIdKey), collection) - - def testGetDataWithInvalidConstraintTypeThrowsException(self): - with self.assertRaises(ValueError): - self._runConstraintTest(self.siteIdKey, 'junk', params.SITE_ID) - - def testGetDataWithInvalidConstraintValueThrowsException(self): - with self.assertRaises(TypeError): - self._runConstraintTest(self.siteIdKey, '=', {}) - - def testGetDataWithEmptyInConstraintThrowsException(self): - with self.assertRaises(ValueError): - self._runConstraintTest(self.siteIdKey, 'in', []) diff --git a/pythonPackages/ufpy/test/dafTests/testGrid.py b/pythonPackages/ufpy/test/dafTests/testGrid.py deleted file mode 100644 index 34e8f72bf6..0000000000 --- a/pythonPackages/ufpy/test/dafTests/testGrid.py +++ /dev/null @@ -1,288 +0,0 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -from __future__ import print_function -from dynamicserialize.dstypes.com.raytheon.uf.common.dataquery.requests import RequestConstraint -from shapely.geometry import box, Point -from ufpy.dataaccess import DataAccessLayer as DAL -from ufpy.ThriftClient import ThriftRequestException - -import baseDafTestCase -import params -import unittest - -# -# Test DAF support for grid data -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# 01/19/16 4795 mapeters Initial Creation. -# 04/11/16 5548 tgurney Cleanup -# 04/18/16 5548 tgurney More cleanup -# 06/09/16 5587 tgurney Typo in id values test -# 07/06/16 5728 mapeters Add advanced query tests -# 08/03/16 5728 mapeters Add additional identifiers to testGetDataWith* -# tests to shorten run time and prevent EOFError -# 10/13/16 5942 bsteffen Test envelopes -# 11/08/16 5985 tgurney Skip certain tests when no -# data is available -# 12/07/16 5981 tgurney Parameterize -# 01/06/17 5981 tgurney Skip envelope test when no -# data is available -# - - -class GridTestCase(baseDafTestCase.DafTestCase): - """Test DAF support for grid data""" - - datatype = 'grid' - - model = 'GFS160' - - def testGetAvailableParameters(self): - req = DAL.newDataRequest(self.datatype) - req.addIdentifier('info.datasetId', self.model) - self.runParametersTest(req) - - def testGetAvailableLocations(self): - req = DAL.newDataRequest(self.datatype) - req.addIdentifier('info.datasetId', self.model) - self.runLocationsTest(req) - - def testGetAvailableLevels(self): - req = DAL.newDataRequest(self.datatype) - req.addIdentifier('info.datasetId', self.model) - self.runLevelsTest(req) - - def testGetAvailableTimes(self): - req = DAL.newDataRequest(self.datatype) - req.addIdentifier('info.datasetId', self.model) - req.setLevels('2FHAG') - self.runTimesTest(req) - - def testGetGridData(self): - req = DAL.newDataRequest(self.datatype) - req.addIdentifier('info.datasetId', self.model) - req.setLevels('2FHAG') - req.setParameters('T') - self.runGridDataTest(req) - - def testGetIdentifierValues(self): - req = DAL.newDataRequest(self.datatype) - req.addIdentifier('info.datasetId', 'ENSEMBLE') - req.setLevels('2FHAG') - req.setParameters('T') - idValues = DAL.getIdentifierValues(req, 'info.ensembleId') - self.assertTrue(hasattr(idValues, '__iter__')) - if idValues: - self.assertIn('ctl1', idValues) - self.assertIn('p1', idValues) - self.assertIn('n1', idValues) - else: - raise unittest.SkipTest("no data available") - - def testGetInvalidIdentifierValuesThrowsException(self): - self.runInvalidIdValuesTest() - - def testGetNonexistentIdentifierValuesThrowsException(self): - self.runNonexistentIdValuesTest() - - - def testGetDataWithEnvelope(self): - req = DAL.newDataRequest(self.datatype) - req.addIdentifier('info.datasetId', self.model) - req.setLevels('2FHAG') - req.setParameters('T') - req.setEnvelope(params.ENVELOPE) - gridData = self.runGridDataTest(req) - if len(gridData) == 0: - raise unittest.SkipTest("No data available") - lons, lats = gridData[0].getLatLonCoords() - lons = lons.reshape(-1) - lats = lats.reshape(-1) - - # Ensure all points are within one degree of the original box - # to allow slight margin of error for reprojection distortion. - testEnv = box(params.ENVELOPE.bounds[0] - 1, params.ENVELOPE.bounds[1] - 1, - params.ENVELOPE.bounds[2] + 1, params.ENVELOPE.bounds[3] + 1 ) - - for i in range(len(lons)): - self.assertTrue(testEnv.contains(Point(lons[i], lats[i]))) - - - def _runConstraintTest(self, key, operator, value): - req = DAL.newDataRequest(self.datatype) - constraint = RequestConstraint.new(operator, value) - req.addIdentifier(key, constraint) - req.addIdentifier('info.datasetId', self.model) - req.addIdentifier('info.level.masterLevel.name', 'FHAG') - req.addIdentifier('info.level.leveltwovalue', 3000.0) - req.setParameters('T') - return self.runGridDataTest(req) - - def testGetDataWithEqualsString(self): - gridData = self._runConstraintTest('info.level.levelonevalue', '=', '2000.0') - for record in gridData: - self.assertEqual(record.getAttribute('info.level.levelonevalue'), 2000.0) - - def testGetDataWithEqualsUnicode(self): - gridData = self._runConstraintTest('info.level.levelonevalue', '=', u'2000.0') - for record in gridData: - self.assertEqual(record.getAttribute('info.level.levelonevalue'), 2000.0) - - def testGetDataWithEqualsInt(self): - gridData = self._runConstraintTest('info.level.levelonevalue', '=', 2000) - for record in gridData: - self.assertEqual(record.getAttribute('info.level.levelonevalue'), 2000) - - def testGetDataWithEqualsLong(self): - gridData = self._runConstraintTest('info.level.levelonevalue', '=', 2000L) - for record in gridData: - self.assertEqual(record.getAttribute('info.level.levelonevalue'), 2000) - - def testGetDataWithEqualsFloat(self): - gridData = self._runConstraintTest('info.level.levelonevalue', '=', 2000.0) - for record in gridData: - self.assertEqual(round(record.getAttribute('info.level.levelonevalue'), 1), 2000.0) - - def testGetDataWithEqualsNone(self): - gridData = self._runConstraintTest('info.level.levelonevalue', '=', None) - for record in gridData: - self.assertIsNone(record.getAttribute('info.level.levelonevalue')) - - def testGetDataWithNotEquals(self): - gridData = self._runConstraintTest('info.level.levelonevalue', '!=', 2000.0) - for record in gridData: - self.assertNotEqual(record.getAttribute('info.level.levelonevalue'), 2000.0) - - def testGetDataWithNotEqualsNone(self): - gridData = self._runConstraintTest('info.level.levelonevalue', '!=', None) - for record in gridData: - self.assertIsNotNone(record.getAttribute('info.level.levelonevalue')) - - def testGetDataWithGreaterThan(self): - gridData = self._runConstraintTest('info.level.levelonevalue', '>', 2000.0) - for record in gridData: - self.assertGreater(record.getAttribute('info.level.levelonevalue'), 2000.0) - - def testGetDataWithLessThan(self): - gridData = self._runConstraintTest('info.level.levelonevalue', '<', 2000.0) - for record in gridData: - self.assertLess(record.getAttribute('info.level.levelonevalue'), 2000.0) - - def testGetDataWithGreaterThanEquals(self): - gridData = self._runConstraintTest('info.level.levelonevalue', '>=', 2000.0) - for record in gridData: - self.assertGreaterEqual(record.getAttribute('info.level.levelonevalue'), 2000.0) - - def testGetDataWithLessThanEquals(self): - gridData = self._runConstraintTest('info.level.levelonevalue', '<=', 2000.0) - for record in gridData: - self.assertLessEqual(record.getAttribute('info.level.levelonevalue'), 2000.0) - - def testGetDataWithInList(self): - collection = [2000.0, 1000.0] - gridData = self._runConstraintTest('info.level.levelonevalue', 'in', collection) - for record in gridData: - self.assertIn(record.getAttribute('info.level.levelonevalue'), collection) - - def testGetDataWithNotInList(self): - collection = [2000.0, 1000.0] - gridData = self._runConstraintTest('info.level.levelonevalue', 'not in', collection) - for record in gridData: - self.assertNotIn(record.getAttribute('info.level.levelonevalue'), collection) - - def testGetDataWithInvalidConstraintTypeThrowsException(self): - with self.assertRaises(ValueError): - self._runConstraintTest('info.level.levelonevalue', 'junk', '2000.0') - - def testGetDataWithInvalidConstraintValueThrowsException(self): - with self.assertRaises(TypeError): - self._runConstraintTest('info.level.levelonevalue', '=', {}) - - def testGetDataWithEmptyInConstraintThrowsException(self): - with self.assertRaises(ValueError): - self._runConstraintTest('info.level.levelonevalue', 'in', []) - - def testGetDataWithLevelOneAndLevelTwoConstraints(self): - req = DAL.newDataRequest(self.datatype) - levelOneConstraint = RequestConstraint.new('>=', 2000.0) - req.addIdentifier('info.level.levelonevalue', levelOneConstraint) - levelTwoConstraint = RequestConstraint.new('in', (4000.0, 5000.0)) - req.addIdentifier('info.level.leveltwovalue', levelTwoConstraint) - req.addIdentifier('info.datasetId', self.model) - req.addIdentifier('info.level.masterLevel.name', 'FHAG') - req.setParameters('T') - gridData = self.runGridDataTest(req) - for record in gridData: - self.assertGreaterEqual(record.getAttribute('info.level.levelonevalue'), 2000.0) - self.assertIn(record.getAttribute('info.level.leveltwovalue'), (4000.0, 5000.0)) - - def testGetDataWithMasterLevelNameInConstraint(self): - req = DAL.newDataRequest(self.datatype) - masterLevelConstraint = RequestConstraint.new('in', ('FHAG', 'K')) - req.addIdentifier('info.level.masterLevel.name', masterLevelConstraint) - req.addIdentifier('info.level.levelonevalue', 2000.0) - req.addIdentifier('info.level.leveltwovalue', 3000.0) - req.addIdentifier('info.datasetId', 'GFS160') - req.setParameters('T') - gridData = self.runGridDataTest(req) - for record in gridData: - self.assertIn(record.getAttribute('info.level.masterLevel.name'), ('FHAG', 'K')) - - def testGetDataWithDatasetIdInConstraint(self): - req = DAL.newDataRequest(self.datatype) - # gfs160 is alias for GFS160 in this namespace - req.addIdentifier('namespace', 'gfeParamInfo') - datasetIdConstraint = RequestConstraint.new('in', ('gfs160', 'HRRR')) - req.addIdentifier('info.datasetId', datasetIdConstraint) - req.addIdentifier('info.level.masterLevel.name', 'FHAG') - req.addIdentifier('info.level.levelonevalue', 2000.0) - req.addIdentifier('info.level.leveltwovalue', 3000.0) - req.setParameters('T') - gridData = self.runGridDataTest(req, testSameShape=False) - for record in gridData: - self.assertIn(record.getAttribute('info.datasetId'), ('gfs160', 'HRRR')) - - def testGetDataWithMasterLevelNameLessThanEqualsConstraint(self): - req = DAL.newDataRequest(self.datatype) - masterLevelConstraint = RequestConstraint.new('<=', 'K') - req.addIdentifier('info.level.masterLevel.name', masterLevelConstraint) - req.addIdentifier('info.level.levelonevalue', 2000.0) - req.addIdentifier('info.level.leveltwovalue', 3000.0) - req.addIdentifier('info.datasetId', 'GFS160') - req.setParameters('T') - gridData = self.runGridDataTest(req) - for record in gridData: - self.assertLessEqual(record.getAttribute('info.level.masterLevel.name'), 'K') - - def testGetDataWithComplexConstraintAndNamespaceThrowsException(self): - req = DAL.newDataRequest(self.datatype) - req.addIdentifier('namespace', 'grib') - masterLevelConstraint = RequestConstraint.new('<=', 'K') - req.addIdentifier('info.level.masterLevel.name', masterLevelConstraint) - req.addIdentifier('info.datasetId', 'GFS160') - req.setParameters('T') - with self.assertRaises(ThriftRequestException) as cm: - self.runGridDataTest(req) - self.assertIn('IncompatibleRequestException', str(cm.exception)) - self.assertIn('info.level.masterLevel.name', str(cm.exception)) diff --git a/pythonPackages/ufpy/test/dafTests/testHydro.py b/pythonPackages/ufpy/test/dafTests/testHydro.py deleted file mode 100644 index 8f1accef88..0000000000 --- a/pythonPackages/ufpy/test/dafTests/testHydro.py +++ /dev/null @@ -1,268 +0,0 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -from __future__ import print_function -import datetime -from ufpy.dataaccess import DataAccessLayer as DAL -from ufpy.ThriftClient import ThriftRequestException - -from dynamicserialize.dstypes.com.raytheon.uf.common.dataquery.requests import RequestConstraint -from dynamicserialize.dstypes.com.raytheon.uf.common.time import TimeRange -import baseDafTestCase -import unittest - -# -# Test DAF support for hydro data -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# 01/19/16 4795 mapeters Initial Creation. -# 04/11/16 5548 tgurney Cleanup -# 04/18/16 5548 tgurney More cleanup -# 04/21/16 5596 tgurney Add tests to verify #5596 -# 04/26/16 5587 tgurney Add identifier values tests -# 06/09/16 5574 tgurney Add advanced query tests -# 06/13/16 5574 tgurney Fix checks for None -# 06/21/16 5548 tgurney Skip tests that cause errors -# 06/30/16 5725 tgurney Add test for NOT IN -# 10/06/16 5926 dgilling Add additional location tests. -# -# - - -class HydroTestCase(baseDafTestCase.DafTestCase): - """Test DAF support for hydro data""" - - datatype = 'hydro' - - def testGetAvailableParameters(self): - req = DAL.newDataRequest(self.datatype) - req.addIdentifier('table', 'height') - self.runParametersTest(req) - - def testGetAvailableParametersFullyQualifiedTable(self): - req = DAL.newDataRequest(self.datatype) - req.addIdentifier('table', 'public.height') - self.runParametersTest(req) - - def testGetAvailableParamsNoTableThrowsInvalidIdentifiersException(self): - req = DAL.newDataRequest(self.datatype) - with self.assertRaises(ThriftRequestException) as cm: - self.runParametersTest(req) - self.assertIn('InvalidIdentifiersException', str(cm.exception)) - - def testGetAvailableLocations(self): - req = DAL.newDataRequest(self.datatype) - req.addIdentifier('table', 'height') - self.runLocationsTest(req) - - def testGetAvailableLocationsWithConstraint(self): - req = DAL.newDataRequest(self.datatype) - req.addIdentifier('table', 'height') - req.addIdentifier('value', RequestConstraint.new('>', 5.0)) - self.runLocationsTest(req) - - def testGetAvailableLocationsWithInvalidTable(self): - req = DAL.newDataRequest(self.datatype) - req.addIdentifier('table', 'city') - with self.assertRaises(ThriftRequestException) as cm: - DAL.getAvailableLocationNames(req) - self.assertIn('IncompatibleRequestException', str(cm.exception)) - - def testGetAvailableTimes(self): - req = DAL.newDataRequest(self.datatype) - req.addIdentifier('table', 'height') - req.setParameters('lid', 'quality_code') - self.runTimesTest(req) - - def testGetGeometryDataWithoutLocationSpecified(self): - req = DAL.newDataRequest(self.datatype) - req.addIdentifier('table', 'height') - req.setParameters('lid', 'quality_code') - self.runGeometryDataTest(req) - - def testGetGeometryDataWithLocationSpecified(self): - req = DAL.newDataRequest(self.datatype) - req.addIdentifier('table', 'fcstheight') - locs = DAL.getAvailableLocationNames(req) - if locs: - req.setLocationNames(locs[0]) - req.setParameters('probability', 'value') - data = self.runGeometryDataTest(req) - self.assertNotEqual(len(data), 0) - - def testGetTableIdentifierValues(self): - self.runGetIdValuesTest(['table']) - - def testGetColumnIdValuesWithTable(self): - req = DAL.newDataRequest(self.datatype) - req.addIdentifier('table', 'height') - idValues = DAL.getIdentifierValues(req, 'lid') - self.assertTrue(hasattr(idValues, '__iter__')) - - @unittest.skip('avoid EDEX error') - def testGetColumnIdValuesWithNonexistentTableThrowsException(self): - req = DAL.newDataRequest(self.datatype) - req.addIdentifier('table', 'nonexistentjunk') - with self.assertRaises(ThriftRequestException): - idValues = DAL.getIdentifierValues(req, 'lid') - - def testGetColumnIdValuesWithoutTableThrowsException(self): - req = DAL.newDataRequest(self.datatype) - with self.assertRaises(ThriftRequestException): - idValues = DAL.getIdentifierValues(req, 'lid') - - @unittest.skip('avoid EDEX error') - def testGetNonexistentColumnIdValuesThrowsException(self): - req = DAL.newDataRequest(self.datatype) - req.addIdentifier('table', 'height') - with self.assertRaises(ThriftRequestException): - idValues = DAL.getIdentifierValues(req, 'nonexistentjunk') - - def testGetInvalidIdentifierValuesThrowsException(self): - self.runInvalidIdValuesTest() - - def testGetNonexistentIdentifierValuesThrowsException(self): - self.runNonexistentIdValuesTest() - - def _runConstraintTest(self, key, operator, value): - req = DAL.newDataRequest(self.datatype) - constraint = RequestConstraint.new(operator, value) - req.addIdentifier(key, constraint) - req.addIdentifier('table', 'height') - req.addIdentifier('ts', 'RG') - req.setParameters('value', 'lid', 'quality_code') - return self.runGeometryDataTest(req) - - def testGetDataWithEqualsString(self): - geometryData = self._runConstraintTest('value', '=', '3') - for record in geometryData: - self.assertEqual(record.getNumber('value'), 3) - - def testGetDataWithEqualsUnicode(self): - geometryData = self._runConstraintTest('value', '=', u'3') - for record in geometryData: - self.assertEqual(record.getNumber('value'), 3) - - def testGetDataWithEqualsInt(self): - geometryData = self._runConstraintTest('value', '=', 3) - for record in geometryData: - self.assertEqual(record.getNumber('value'), 3) - - def testGetDataWithEqualsLong(self): - geometryData = self._runConstraintTest('value', '=', 3L) - for record in geometryData: - self.assertEqual(record.getNumber('value'), 3L) - - def testGetDataWithEqualsFloat(self): - geometryData = self._runConstraintTest('value', '=', 3.0) - for record in geometryData: - self.assertEqual(round(record.getNumber('value'), 1), 3.0) - - def testGetDataWithEqualsNone(self): - geometryData = self._runConstraintTest('value', '=', None) - self.assertEqual(len(geometryData), 0) - - def testGetDataWithNotEquals(self): - geometryData = self._runConstraintTest('value', '!=', 3) - for record in geometryData: - self.assertNotEqual(record.getNumber('value'), '3') - - def testGetDataWithNotEqualsNone(self): - geometryData = self._runConstraintTest('value', '!=', None) - self.assertNotEqual(len(geometryData), 0) - for record in geometryData: - self.assertNotEqual(record.getType('value'), 'NULL') - - def testGetDataWithGreaterThan(self): - geometryData = self._runConstraintTest('value', '>', 3) - for record in geometryData: - self.assertGreater(record.getNumber('value'), 3) - - def testGetDataWithLessThan(self): - geometryData = self._runConstraintTest('value', '<', 3) - for record in geometryData: - self.assertLess(record.getNumber('value'), 3) - - def testGetDataWithGreaterThanEquals(self): - geometryData = self._runConstraintTest('value', '>=', 3) - for record in geometryData: - self.assertGreaterEqual(record.getNumber('value'), 3) - - def testGetDataWithLessThanEquals(self): - geometryData = self._runConstraintTest('value', '<=', 3) - for record in geometryData: - self.assertLessEqual(record.getNumber('value'), 3) - - def testGetDataWithInTuple(self): - collection = (3, 4) - geometryData = self._runConstraintTest('value', 'in', collection) - for record in geometryData: - self.assertIn(record.getNumber('value'), collection) - - def testGetDataWithInList(self): - collection = [3, 4] - geometryData = self._runConstraintTest('value', 'in', collection) - for record in geometryData: - self.assertIn(record.getNumber('value'), collection) - - def testGetDataWithInGenerator(self): - collection = (3, 4) - generator = (item for item in collection) - geometryData = self._runConstraintTest('value', 'in', generator) - for record in geometryData: - self.assertIn(record.getNumber('value'), collection) - - def testGetDataWithNotInList(self): - collection = [3, 4] - geometryData = self._runConstraintTest('value', 'not in', collection) - for record in geometryData: - self.assertNotIn(record.getNumber('value'), collection) - - def testGetDataWithTimeRange(self): - req = DAL.newDataRequest(self.datatype) - req.addIdentifier('table', 'height') - req.addIdentifier('ts', 'RG') - req.setParameters('value', 'lid', 'quality_code') - times = DAL.getAvailableTimes(req) - limitTimes = times[-self.numTimesToLimit:] - startTime = datetime.datetime.utcfromtimestamp(limitTimes[0].getRefTime().getTime()/1000) - endTime = datetime.datetime.utcnow() - tr = TimeRange(startTime, endTime) - self.runGeometryDataTestWithTimeRange(req, tr) - - def testGetDataWithInvalidConstraintTypeThrowsException(self): - with self.assertRaises(ValueError): - self._runConstraintTest('value', 'junk', 3) - - def testGetDataWithInvalidConstraintValueThrowsException(self): - with self.assertRaises(TypeError): - self._runConstraintTest('value', '=', {}) - - def testGetDataWithEmptyInConstraintThrowsException(self): - with self.assertRaises(ValueError): - self._runConstraintTest('value', 'in', []) - - def testGetDataWithNestedInConstraintThrowsException(self): - collection = ('3', '4', ()) - with self.assertRaises(TypeError): - self._runConstraintTest('value', 'in', collection) diff --git a/pythonPackages/ufpy/test/dafTests/testMaps.py b/pythonPackages/ufpy/test/dafTests/testMaps.py deleted file mode 100644 index afe1554a01..0000000000 --- a/pythonPackages/ufpy/test/dafTests/testMaps.py +++ /dev/null @@ -1,219 +0,0 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -from __future__ import print_function -from dynamicserialize.dstypes.com.raytheon.uf.common.dataquery.requests import RequestConstraint -from ufpy.dataaccess import DataAccessLayer as DAL -from ufpy.ThriftClient import ThriftRequestException - -import baseDafTestCase -import unittest - -# -# Test DAF support for maps data -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# 01/19/16 4795 mapeters Initial Creation. -# 04/11/16 5548 tgurney Cleanup -# 04/18/16 5548 tgurney More cleanup -# 04/26/16 5587 tgurney Add identifier values tests -# 06/13/16 5574 mapeters Add advanced query tests -# 06/21/16 5548 tgurney Skip tests that cause errors -# 06/30/16 5725 tgurney Add test for NOT IN -# 01/06/17 5981 tgurney Do not check data times -# -# - - -class MapsTestCase(baseDafTestCase.DafTestCase): - """Test DAF support for maps data""" - - datatype = 'maps' - - def testGetAvailableParameters(self): - req = DAL.newDataRequest(self.datatype) - req.addIdentifier('table', 'mapdata.county') - req.addIdentifier('geomField', 'the_geom') - self.runParametersTest(req) - - def testGetAvailableLocations(self): - req = DAL.newDataRequest(self.datatype) - req.addIdentifier('table', 'mapdata.county') - req.addIdentifier('geomField', 'the_geom') - req.addIdentifier('locationField', 'cwa') - self.runLocationsTest(req) - - def testGetGeometryData(self): - req = DAL.newDataRequest(self.datatype) - req.addIdentifier('table', 'mapdata.county') - req.addIdentifier('geomField', 'the_geom') - req.addIdentifier('inLocation', 'true') - req.addIdentifier('locationField', 'cwa') - req.setLocationNames('OAX') - req.addIdentifier('cwa', 'OAX') - req.setParameters('countyname', 'state', 'fips') - self.runGeometryDataTest(req, checkDataTimes=False) - - def testRequestingTimesThrowsTimeAgnosticDataException(self): - req = DAL.newDataRequest(self.datatype) - self.runTimeAgnosticTest(req) - - def testGetTableIdentifierValues(self): - self.runGetIdValuesTest(['table']) - - def testGetGeomFieldIdentifierValues(self): - req = DAL.newDataRequest(self.datatype) - req.addIdentifier('table', 'mapdata.county') - idValues = DAL.getIdentifierValues(req, 'geomField') - for idValue in idValues: - self.assertTrue(idValue.startswith('the_geom')) - - def testGetGeomFieldIdValuesWithoutTableThrowsException(self): - with self.assertRaises(ThriftRequestException): - self.runGetIdValuesTest(['geomField']) - - def testGetColumnIdValuesWithTable(self): - req = DAL.newDataRequest(self.datatype) - req.addIdentifier('table', 'mapdata.county') - req.addIdentifier('geomField', 'the_geom') - idValues = DAL.getIdentifierValues(req, 'state') - self.assertIn('NE', idValues) - - def testGetColumnIdValuesWithoutTableThrowsException(self): - req = DAL.newDataRequest(self.datatype) - req.addIdentifier('geomField', 'the_geom') - with self.assertRaises(ThriftRequestException): - idValues = DAL.getIdentifierValues(req, 'state') - - def testGetInvalidIdentifierValuesThrowsException(self): - self.runInvalidIdValuesTest() - - def testGetNonexistentIdentifierValuesThrowsException(self): - self.runNonexistentIdValuesTest() - - def _runConstraintTest(self, key, operator, value): - req = DAL.newDataRequest(self.datatype) - req.addIdentifier('table', 'mapdata.ffmp_basins') - req.addIdentifier('geomField', 'the_geom') - req.addIdentifier('cwa', 'OAX') - constraint = RequestConstraint.new(operator, value) - req.addIdentifier(key, constraint) - req.setParameters('state', 'reservoir', 'area_sq_mi') - return self.runGeometryDataTest(req, checkDataTimes=False) - - def testGetDataWithEqualsString(self): - geometryData = self._runConstraintTest('state', '=', 'NE') - for record in geometryData: - self.assertEqual(record.getString('state'), 'NE') - - def testGetDataWithEqualsUnicode(self): - geometryData = self._runConstraintTest('state', '=', u'NE') - for record in geometryData: - self.assertEqual(record.getString('state'), 'NE') - - def testGetDataWithEqualsInt(self): - geometryData = self._runConstraintTest('reservoir', '=', 1) - for record in geometryData: - self.assertEqual(record.getNumber('reservoir'), 1) - - def testGetDataWithEqualsLong(self): - geometryData = self._runConstraintTest('reservoir', '=', 1L) - for record in geometryData: - self.assertEqual(record.getNumber('reservoir'), 1) - - def testGetDataWithEqualsFloat(self): - geometryData = self._runConstraintTest('area_sq_mi', '=', 5.00) - for record in geometryData: - self.assertEqual(round(record.getNumber('area_sq_mi'), 2), 5.00) - - def testGetDataWithEqualsNone(self): - geometryData = self._runConstraintTest('state', '=', None) - for record in geometryData: - self.assertEqual(record.getType('state'), 'NULL') - - def testGetDataWithNotEquals(self): - geometryData = self._runConstraintTest('state', '!=', 'NE') - for record in geometryData: - self.assertNotEqual(record.getString('state'), 'NE') - - def testGetDataWithNotEqualsNone(self): - geometryData = self._runConstraintTest('state', '!=', None) - for record in geometryData: - self.assertNotEqual(record.getType('state'), 'NULL') - - def testGetDataWithGreaterThan(self): - geometryData = self._runConstraintTest('area_sq_mi', '>', 5) - for record in geometryData: - self.assertGreater(record.getNumber('area_sq_mi'), 5) - - def testGetDataWithLessThan(self): - geometryData = self._runConstraintTest('area_sq_mi', '<', 5) - for record in geometryData: - self.assertLess(record.getNumber('area_sq_mi'), 5) - - def testGetDataWithGreaterThanEquals(self): - geometryData = self._runConstraintTest('area_sq_mi', '>=', 5) - for record in geometryData: - self.assertGreaterEqual(record.getNumber('area_sq_mi'), 5) - - def testGetDataWithLessThanEquals(self): - geometryData = self._runConstraintTest('area_sq_mi', '<=', 5) - for record in geometryData: - self.assertLessEqual(record.getNumber('area_sq_mi'), 5) - - def testGetDataWithInTuple(self): - collection = ('NE', 'TX') - geometryData = self._runConstraintTest('state', 'in', collection) - for record in geometryData: - self.assertIn(record.getString('state'), collection) - - def testGetDataWithInList(self): - collection = ['NE', 'TX'] - geometryData = self._runConstraintTest('state', 'in', collection) - for record in geometryData: - self.assertIn(record.getString('state'), collection) - - def testGetDataWithInGenerator(self): - collection = ('NE', 'TX') - generator = (item for item in collection) - geometryData = self._runConstraintTest('state', 'in', generator) - for record in geometryData: - self.assertIn(record.getString('state'), collection) - - def testGetDataWithNotInList(self): - collection = ['IA', 'TX'] - geometryData = self._runConstraintTest('state', 'not in', collection) - for record in geometryData: - self.assertNotIn(record.getString('state'), collection) - - def testGetDataWithInvalidConstraintTypeThrowsException(self): - with self.assertRaises(ValueError): - self._runConstraintTest('state', 'junk', 'NE') - - def testGetDataWithInvalidConstraintValueThrowsException(self): - with self.assertRaises(TypeError): - self._runConstraintTest('state', '=', {}) - - def testGetDataWithEmptyInConstraintThrowsException(self): - with self.assertRaises(ValueError): - self._runConstraintTest('state', 'in', []) diff --git a/pythonPackages/ufpy/test/dafTests/testModelSounding.py b/pythonPackages/ufpy/test/dafTests/testModelSounding.py deleted file mode 100644 index c021fa510d..0000000000 --- a/pythonPackages/ufpy/test/dafTests/testModelSounding.py +++ /dev/null @@ -1,217 +0,0 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -from __future__ import print_function -from ufpy.dataaccess import DataAccessLayer as DAL -from dynamicserialize.dstypes.com.raytheon.uf.common.dataquery.requests import RequestConstraint - -import baseDafTestCase -import params -import unittest - -# -# Test DAF support for modelsounding data -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# 01/19/16 4795 mapeters Initial Creation. -# 04/11/16 5548 tgurney Cleanup -# 04/18/16 5548 tgurney More cleanup -# 06/09/16 5587 bsteffen Add getIdentifierValues tests -# 06/13/16 5574 tgurney Add advanced query tests -# 06/30/16 5725 tgurney Add test for NOT IN -# 11/10/16 5985 tgurney Mark expected failures prior -# to 17.3.1 -# 12/07/16 5981 tgurney Parameterize -# 12/19/16 5981 tgurney Remove pre-17.3 expected fails -# 12/20/16 5981 tgurney Add envelope test -# -# - - -class ModelSoundingTestCase(baseDafTestCase.DafTestCase): - """Test DAF support for modelsounding data""" - - datatype = "modelsounding" - - def testGetAvailableParameters(self): - req = DAL.newDataRequest(self.datatype) - self.runParametersTest(req) - - def testGetAvailableLocations(self): - req = DAL.newDataRequest(self.datatype) - req.addIdentifier("reportType", "ETA") - self.runLocationsTest(req) - - def testGetAvailableTimes(self): - req = DAL.newDataRequest(self.datatype) - req.addIdentifier("reportType", "ETA") - req.setLocationNames(params.OBS_STATION) - self.runTimesTest(req) - - def testGetGeometryData(self): - req = DAL.newDataRequest(self.datatype) - req.addIdentifier("reportType", "ETA") - req.setLocationNames(params.OBS_STATION) - req.setParameters("temperature", "pressure", "specHum", "sfcPress", "temp2", "q2") - print("Testing getGeometryData()") - geomData = DAL.getGeometryData(req) - print("Number of geometry records: " + str(len(geomData))) - print("Sample geometry data:") - for record in geomData[:self.sampleDataLimit]: - print("level=" + record.getLevel(), end="") - # One dimensional parameters are reported on the 0.0UNKNOWN level. - # 2D parameters are reported on MB levels from pressure. - if record.getLevel() == "0.0UNKNOWN": - print(" sfcPress=" + record.getString("sfcPress") + - record.getUnit("sfcPress"), end="") - print(" temp2=" + record.getString("temp2") + - record.getUnit("temp2"), end="") - print(" q2=" + record.getString("q2") + - record.getUnit("q2"), end="") - else: - print(" pressure=" + record.getString("pressure") + - record.getUnit("pressure"), end="") - print(" temperature=" + record.getString("temperature") + - record.getUnit("temperature"), end="") - print(" specHum=" + record.getString("specHum") + - record.getUnit("specHum"), end="") - print(" geometry=" + str(record.getGeometry())) - print("getGeometryData() complete\n\n") - - def testGetGeometryDataWithEnvelope(self): - req = DAL.newDataRequest(self.datatype) - req.addIdentifier("reportType", "ETA") - req.setEnvelope(params.ENVELOPE) - req.setParameters("temperature", "pressure", "specHum", "sfcPress", "temp2", "q2") - print("Testing getGeometryData()") - data = DAL.getGeometryData(req) - for item in data: - self.assertTrue(params.ENVELOPE.contains(item.getGeometry())) - - def testGetIdentifierValues(self): - req = DAL.newDataRequest(self.datatype) - optionalIds = set(DAL.getOptionalIdentifiers(req)) - self.runGetIdValuesTest(optionalIds) - - def testGetInvalidIdentifierValuesThrowsException(self): - self.runInvalidIdValuesTest() - - def testGetNonexistentIdentifierValuesThrowsException(self): - self.runNonexistentIdValuesTest() - - def _runConstraintTest(self, key, operator, value): - req = DAL.newDataRequest(self.datatype) - constraint = RequestConstraint.new(operator, value) - req.setParameters('dataURI') - req.setLocationNames(params.OBS_STATION, 'KORD', 'KOFK', 'KLNK') - req.addIdentifier(key, constraint) - return self.runGeometryDataTest(req) - - # We can filter on reportType but it is not possible to retrieve the value - # of reportType directly. We can look inside the dataURI instead. - # - # For cases like '<=' and '>' the best we can do is send the request and - # see if it throws back an exception. - # - # Can also eyeball the number of returned records. - - def testGetDataWithEqualsString(self): - geometryData = self._runConstraintTest('reportType', '=', 'ETA') - for record in geometryData: - self.assertIn('/ETA/', record.getString('dataURI')) - - def testGetDataWithEqualsUnicode(self): - geometryData = self._runConstraintTest('reportType', '=', u'ETA') - for record in geometryData: - self.assertIn('/ETA/', record.getString('dataURI')) - - # No numeric tests since no numeric identifiers are available. - - def testGetDataWithEqualsNone(self): - geometryData = self._runConstraintTest('reportType', '=', None) - - def testGetDataWithNotEquals(self): - geometryData = self._runConstraintTest('reportType', '!=', 'ETA') - for record in geometryData: - self.assertNotIn('/ETA/', record.getString('dataURI')) - - def testGetDataWithNotEqualsNone(self): - geometryData = self._runConstraintTest('reportType', '!=', None) - - def testGetDataWithGreaterThan(self): - geometryData = self._runConstraintTest('reportType', '>', 'ETA') - - def testGetDataWithLessThan(self): - geometryData = self._runConstraintTest('reportType', '<', 'ETA') - - def testGetDataWithGreaterThanEquals(self): - geometryData = self._runConstraintTest('reportType', '>=', 'ETA') - - def testGetDataWithLessThanEquals(self): - geometryData = self._runConstraintTest('reportType', '<=', 'ETA') - - def testGetDataWithInTuple(self): - collection = ('ETA', 'GFS') - geometryData = self._runConstraintTest('reportType', 'in', collection) - for record in geometryData: - dataURI = record.getString('dataURI') - self.assertTrue('/ETA/' in dataURI or '/GFS/' in dataURI) - - def testGetDataWithInList(self): - collection = ['ETA', 'GFS'] - geometryData = self._runConstraintTest('reportType', 'in', collection) - for record in geometryData: - dataURI = record.getString('dataURI') - self.assertTrue('/ETA/' in dataURI or '/GFS/' in dataURI) - - def testGetDataWithInGenerator(self): - collection = ('ETA', 'GFS') - generator = (item for item in collection) - geometryData = self._runConstraintTest('reportType', 'in', generator) - for record in geometryData: - dataURI = record.getString('dataURI') - self.assertTrue('/ETA/' in dataURI or '/GFS/' in dataURI) - - def testGetDataWithNotInList(self): - collection = ['ETA', 'GFS'] - geometryData = self._runConstraintTest('reportType', 'not in', collection) - for record in geometryData: - dataURI = record.getString('dataURI') - self.assertTrue('/ETA/' not in dataURI and '/GFS/' not in dataURI) - - def testGetDataWithInvalidConstraintTypeThrowsException(self): - with self.assertRaises(ValueError): - self._runConstraintTest('reportType', 'junk', 'ETA') - - def testGetDataWithInvalidConstraintValueThrowsException(self): - with self.assertRaises(TypeError): - self._runConstraintTest('reportType', '=', {}) - - def testGetDataWithEmptyInConstraintThrowsException(self): - with self.assertRaises(ValueError): - self._runConstraintTest('reportType', 'in', []) - - def testGetDataWithNestedInConstraintThrowsException(self): - collection = ('ETA', 'GFS', ()) - with self.assertRaises(TypeError): - self._runConstraintTest('reportType', 'in', collection) diff --git a/pythonPackages/ufpy/test/dafTests/testObs.py b/pythonPackages/ufpy/test/dafTests/testObs.py deleted file mode 100644 index 4fafa1c0cd..0000000000 --- a/pythonPackages/ufpy/test/dafTests/testObs.py +++ /dev/null @@ -1,186 +0,0 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -from __future__ import print_function -from ufpy.dataaccess import DataAccessLayer as DAL -from dynamicserialize.dstypes.com.raytheon.uf.common.dataquery.requests import RequestConstraint - -import baseDafTestCase -import params -import unittest - -# -# Test DAF support for obs data -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# 01/19/16 4795 mapeters Initial Creation. -# 04/11/16 5548 tgurney Cleanup -# 04/18/16 5548 tgurney More cleanup -# 06/09/16 5587 bsteffen Add getIdentifierValues tests -# 06/13/16 5574 tgurney Add advanced query tests -# 06/30/16 5725 tgurney Add test for NOT IN -# 12/07/16 5981 tgurney Parameterize -# 12/20/16 5981 tgurney Add envelope test -# -# - - -class ObsTestCase(baseDafTestCase.DafTestCase): - """Test DAF support for obs data""" - - datatype = "obs" - - def testGetAvailableParameters(self): - req = DAL.newDataRequest(self.datatype) - self.runParametersTest(req) - - def testGetAvailableLocations(self): - req = DAL.newDataRequest(self.datatype) - self.runLocationsTest(req) - - def testGetAvailableTimes(self): - req = DAL.newDataRequest(self.datatype) - req.setLocationNames(params.OBS_STATION) - self.runTimesTest(req) - - def testGetGeometryData(self): - req = DAL.newDataRequest(self.datatype) - req.setLocationNames(params.OBS_STATION) - req.setParameters("temperature", "seaLevelPress", "dewpoint") - data = self.runGeometryDataTest(req) - - def testGetGeometryDataWithEnvelope(self): - req = DAL.newDataRequest(self.datatype) - req.setEnvelope(params.ENVELOPE) - req.setParameters("temperature", "seaLevelPress", "dewpoint") - data = self.runGeometryDataTest(req) - for item in data: - self.assertTrue(params.ENVELOPE.contains(item.getGeometry())) - - def testGetIdentifierValues(self): - req = DAL.newDataRequest(self.datatype) - optionalIds = set(DAL.getOptionalIdentifiers(req)) - self.runGetIdValuesTest(optionalIds) - - def testGetInvalidIdentifierValuesThrowsException(self): - self.runInvalidIdValuesTest() - - def testGetNonexistentIdentifierValuesThrowsException(self): - self.runNonexistentIdValuesTest() - - def _runConstraintTest(self, key, operator, value): - req = DAL.newDataRequest(self.datatype) - constraint = RequestConstraint.new(operator, value) - req.setParameters("temperature", "reportType") - req.setLocationNames(params.OBS_STATION) - req.addIdentifier(key, constraint) - return self.runGeometryDataTest(req) - - def testGetDataWithEqualsString(self): - geometryData = self._runConstraintTest('reportType', '=', 'METAR') - for record in geometryData: - self.assertEqual(record.getString('reportType'), 'METAR') - - def testGetDataWithEqualsUnicode(self): - geometryData = self._runConstraintTest('reportType', '=', u'METAR') - for record in geometryData: - self.assertEqual(record.getString('reportType'), 'METAR') - - # No numeric tests since no numeric identifiers are available. - - def testGetDataWithEqualsNone(self): - geometryData = self._runConstraintTest('reportType', '=', None) - for record in geometryData: - self.assertEqual(record.getType('reportType'), 'NULL') - - def testGetDataWithNotEquals(self): - geometryData = self._runConstraintTest('reportType', '!=', 'METAR') - for record in geometryData: - self.assertNotEqual(record.getString('reportType'), 'METAR') - - def testGetDataWithNotEqualsNone(self): - geometryData = self._runConstraintTest('reportType', '!=', None) - for record in geometryData: - self.assertNotEqual(record.getType('reportType'), 'NULL') - - def testGetDataWithGreaterThan(self): - geometryData = self._runConstraintTest('reportType', '>', 'METAR') - for record in geometryData: - self.assertGreater(record.getString('reportType'), 'METAR') - - def testGetDataWithLessThan(self): - geometryData = self._runConstraintTest('reportType', '<', 'METAR') - for record in geometryData: - self.assertLess(record.getString('reportType'), 'METAR') - - def testGetDataWithGreaterThanEquals(self): - geometryData = self._runConstraintTest('reportType', '>=', 'METAR') - for record in geometryData: - self.assertGreaterEqual(record.getString('reportType'), 'METAR') - - def testGetDataWithLessThanEquals(self): - geometryData = self._runConstraintTest('reportType', '<=', 'METAR') - for record in geometryData: - self.assertLessEqual(record.getString('reportType'), 'METAR') - - def testGetDataWithInTuple(self): - collection = ('METAR', 'SPECI') - geometryData = self._runConstraintTest('reportType', 'in', collection) - for record in geometryData: - self.assertIn(record.getString('reportType'), collection) - - def testGetDataWithInList(self): - collection = ['METAR', 'SPECI'] - geometryData = self._runConstraintTest('reportType', 'in', collection) - for record in geometryData: - self.assertIn(record.getString('reportType'), collection) - - def testGetDataWithInGenerator(self): - collection = ('METAR', 'SPECI') - generator = (item for item in collection) - geometryData = self._runConstraintTest('reportType', 'in', generator) - for record in geometryData: - self.assertIn(record.getString('reportType'), collection) - - def testGetDataWithNotInList(self): - collection = ['METAR', 'SPECI'] - geometryData = self._runConstraintTest('reportType', 'not in', collection) - for record in geometryData: - self.assertNotIn(record.getString('reportType'), collection) - - def testGetDataWithInvalidConstraintTypeThrowsException(self): - with self.assertRaises(ValueError): - self._runConstraintTest('reportType', 'junk', 'METAR') - - def testGetDataWithInvalidConstraintValueThrowsException(self): - with self.assertRaises(TypeError): - self._runConstraintTest('reportType', '=', {}) - - def testGetDataWithEmptyInConstraintThrowsException(self): - with self.assertRaises(ValueError): - self._runConstraintTest('reportType', 'in', []) - - def testGetDataWithNestedInConstraintThrowsException(self): - collection = ('METAR', 'SPECI', ()) - with self.assertRaises(TypeError): - self._runConstraintTest('reportType', 'in', collection) diff --git a/pythonPackages/ufpy/test/dafTests/testPirep.py b/pythonPackages/ufpy/test/dafTests/testPirep.py deleted file mode 100644 index d2077c4187..0000000000 --- a/pythonPackages/ufpy/test/dafTests/testPirep.py +++ /dev/null @@ -1,91 +0,0 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -from __future__ import print_function -from ufpy.dataaccess import DataAccessLayer as DAL - -import baseDafTestCase -import params -import unittest - -# -# Test DAF support for pirep data -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# 01/19/16 4795 mapeters Initial Creation. -# 04/11/16 5548 tgurney Cleanup -# 04/18/16 5548 tgurney More cleanup -# 12/07/16 5981 tgurney Parameterize -# 12/20/16 5981 tgurney Add envelope test -# -# - - -class PirepTestCase(baseDafTestCase.DafTestCase): - """Test DAF support for pirep data""" - - datatype = "pirep" - - def testGetAvailableParameters(self): - req = DAL.newDataRequest(self.datatype) - self.runParametersTest(req) - - def testGetAvailableLocations(self): - req = DAL.newDataRequest(self.datatype) - self.runLocationsTest(req) - - def testGetAvailableTimes(self): - req = DAL.newDataRequest(self.datatype) - req.setLocationNames(params.AIRPORT) - self.runTimesTest(req) - - def testGetGeometryData(self): - req = DAL.newDataRequest(self.datatype) - req.setLocationNames(params.AIRPORT) - req.setParameters("temperature", "windSpeed", "hazardType", "turbType") - print("Testing getGeometryData()") - geomData = DAL.getGeometryData(req) - self.assertIsNotNone(geomData) - print("Number of geometry records: " + str(len(geomData))) - print("Sample geometry data:") - for record in geomData[:self.sampleDataLimit]: - print("level=", record.getLevel(), end="") - # One dimensional parameters are reported on the 0.0UNKNOWN level. - # 2D parameters are reported on MB levels from pressure. - if record.getLevel() == "0.0UNKNOWN": - print(" temperature=" + record.getString("temperature") + record.getUnit("temperature"), end="") - print(" windSpeed=" + record.getString("windSpeed") + record.getUnit("windSpeed"), end="") - else: - print(" hazardType=" + record.getString("hazardType"), end="") - print(" turbType=" + record.getString("turbType"), end="") - print(" geometry=", record.getGeometry()) - print("getGeometryData() complete\n") - - def testGetGeometryDataWithEnvelope(self): - req = DAL.newDataRequest(self.datatype) - req.setParameters("temperature", "windSpeed", "hazardType", "turbType") - req.setEnvelope(params.ENVELOPE) - print("Testing getGeometryData()") - data = DAL.getGeometryData(req) - for item in data: - self.assertTrue(params.ENVELOPE.contains(item.getGeometry())) diff --git a/pythonPackages/ufpy/test/dafTests/testPracticeWarning.py b/pythonPackages/ufpy/test/dafTests/testPracticeWarning.py deleted file mode 100644 index 2ee820cccf..0000000000 --- a/pythonPackages/ufpy/test/dafTests/testPracticeWarning.py +++ /dev/null @@ -1,49 +0,0 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -from __future__ import print_function -from ufpy.dataaccess import DataAccessLayer as DAL - -import baseDafTestCase -import testWarning - -import unittest - -# -# Test DAF support for practicewarning data -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# 01/19/16 4795 mapeters Initial Creation. -# 04/11/16 5548 tgurney Cleanup -# 04/18/16 5548 tgurney More cleanup -# 06/10/16 5548 tgurney Inherit all tests from -# warning -# - - -class PracticeWarningTestCase(testWarning.WarningTestCase): - """Test DAF support for practicewarning data""" - - datatype = "practicewarning" - - # All tests taken from testWarning diff --git a/pythonPackages/ufpy/test/dafTests/testProfiler.py b/pythonPackages/ufpy/test/dafTests/testProfiler.py deleted file mode 100644 index b249836748..0000000000 --- a/pythonPackages/ufpy/test/dafTests/testProfiler.py +++ /dev/null @@ -1,80 +0,0 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -from __future__ import print_function -from ufpy.dataaccess import DataAccessLayer as DAL - -import baseDafTestCase -import unittest - -# -# Test DAF support for profiler data -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# 01/19/16 4795 mapeters Initial Creation. -# 04/11/16 5548 tgurney Cleanup -# 04/18/16 5548 tgurney More cleanup -# -# - - -class ProfilerTestCase(baseDafTestCase.DafTestCase): - """Test DAF support for profiler data""" - - datatype = "profiler" - - def testGetAvailableParameters(self): - req = DAL.newDataRequest(self.datatype) - self.runParametersTest(req) - - def testGetAvailableLocations(self): - req = DAL.newDataRequest(self.datatype) - self.runLocationsTest(req) - - def testGetAvailableTimes(self): - req = DAL.newDataRequest(self.datatype) - self.runTimesTest(req) - - def testGetGeometryData(self): - req = DAL.newDataRequest(self.datatype) - req.setParameters("temperature", "pressure", "uComponent", "vComponent") - - print("Testing getGeometryData()") - - geomData = DAL.getGeometryData(req) - self.assertIsNotNone(geomData) - print("Number of geometry records: " + str(len(geomData))) - print("Sample geometry data:") - for record in geomData[:self.sampleDataLimit]: - print("level:", record.getLevel(), end="") - # One dimensional parameters are reported on the 0.0UNKNOWN level. - # 2D parameters are reported on MB levels from pressure. - if record.getLevel() == "0.0UNKNOWN": - print(" temperature=" + record.getString("temperature") + record.getUnit("temperature"), end="") - print(" pressure=" + record.getString("pressure") + record.getUnit("pressure"), end="") - else: - print(" uComponent=" + record.getString("uComponent") + record.getUnit("uComponent"), end="") - print(" vComponent=" + record.getString("vComponent") + record.getUnit("vComponent"), end="") - print(" geometry:", record.getGeometry()) - - print("getGeometryData() complete\n\n") diff --git a/pythonPackages/ufpy/test/dafTests/testRadarGraphics.py b/pythonPackages/ufpy/test/dafTests/testRadarGraphics.py deleted file mode 100644 index 0cb903d916..0000000000 --- a/pythonPackages/ufpy/test/dafTests/testRadarGraphics.py +++ /dev/null @@ -1,95 +0,0 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -import unittest - -from dynamicserialize.dstypes.com.raytheon.uf.common.dataquery.requests import RequestConstraint -from ufpy.dataaccess import DataAccessLayer as DAL - -import baseRadarTestCase -import params - - -# -# Test DAF support for radar graphics data -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# 08/25/16 2671 tgurney Initial creation. -# 08/31/16 2671 tgurney Add mesocyclone -# 09/08/16 2671 tgurney Add storm track -# 09/27/16 2671 tgurney Add hail index -# 09/30/16 2671 tgurney Add TVS -# 12/07/16 5981 tgurney Parameterize -# 12/19/16 5981 tgurney Do not check data times on -# returned data -# -# -class RadarGraphicsTestCase(baseRadarTestCase.BaseRadarTestCase): - """Test DAF support for radar data""" - - datatype = 'radar' - - def runConstraintTest(self, key, operator, value): - req = DAL.newDataRequest(self.datatype) - constraint = RequestConstraint.new(operator, value) - req.addIdentifier(key, constraint) - req.setParameters('166') - # TODO: Cannot check datatimes on the result because the times returned - # by getAvailableTimes have level = -1.0, while the time on the actual - # data has the correct level set (>= 0.0). - return self.runGeometryDataTest(req, checkDataTimes=False) - - def testGetGeometryDataMeltingLayer(self): - req = DAL.newDataRequest(self.datatype) - req.setEnvelope(params.ENVELOPE) - req.setLocationNames(self.radarLoc) - req.setParameters('166') - self.runGeometryDataTest(req, checkDataTimes=False) - - def testGetGeometryDataMesocyclone(self): - req = DAL.newDataRequest(self.datatype) - req.setEnvelope(params.ENVELOPE) - req.setLocationNames(self.radarLoc) - req.setParameters('141') - self.runGeometryDataTest(req, checkDataTimes=False) - - def testGetGeometryDataStormTrack(self): - req = DAL.newDataRequest(self.datatype) - req.setEnvelope(params.ENVELOPE) - req.setLocationNames(self.radarLoc) - req.setParameters('58') - self.runGeometryDataTest(req, checkDataTimes=False) - - def testGetGeometryDataHailIndex(self): - req = DAL.newDataRequest(self.datatype) - req.setEnvelope(params.ENVELOPE) - req.setLocationNames(self.radarLoc) - req.setParameters('59') - self.runGeometryDataTest(req, checkDataTimes=False) - - def testGetGeometryDataTVS(self): - req = DAL.newDataRequest(self.datatype) - req.setEnvelope(params.ENVELOPE) - req.setLocationNames(self.radarLoc) - req.setParameters('61') - self.runGeometryDataTest(req, checkDataTimes=False) diff --git a/pythonPackages/ufpy/test/dafTests/testRadarGrid.py b/pythonPackages/ufpy/test/dafTests/testRadarGrid.py deleted file mode 100644 index ed82f849ab..0000000000 --- a/pythonPackages/ufpy/test/dafTests/testRadarGrid.py +++ /dev/null @@ -1,61 +0,0 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -from ufpy.dataaccess import DataAccessLayer as DAL -from dynamicserialize.dstypes.com.raytheon.uf.common.dataquery.requests import RequestConstraint - -import baseRadarTestCase -import params -import unittest - -# -# Test DAF support for radar grid data -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# 08/25/16 2671 tgurney Initial creation -# -# - - -class RadarTestCase(baseRadarTestCase.BaseRadarTestCase): - """Test DAF support for radar data""" - - datatype = 'radar' - - parameterList = ['94'] - - def runConstraintTest(self, key, operator, value): - req = DAL.newDataRequest(self.datatype) - constraint = RequestConstraint.new(operator, value) - req.addIdentifier(key, constraint) - req.setParameters(*self.parameterList) - # Don't test shapes since they may differ. - return self.runGridDataTest(req, testSameShape=False) - - def testGetGridData(self): - req = DAL.newDataRequest(self.datatype) - req.setEnvelope(params.ENVELOPE) - req.setLocationNames(self.radarLoc) - req.setParameters(*self.parameterList) - # Don't test shapes since they may differ. - self.runGridDataTest(req, testSameShape=False) diff --git a/pythonPackages/ufpy/test/dafTests/testRadarSpatial.py b/pythonPackages/ufpy/test/dafTests/testRadarSpatial.py deleted file mode 100644 index 354eb4b580..0000000000 --- a/pythonPackages/ufpy/test/dafTests/testRadarSpatial.py +++ /dev/null @@ -1,180 +0,0 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -from __future__ import print_function -from shapely.geometry import box -from ufpy.dataaccess import DataAccessLayer as DAL - -from dynamicserialize.dstypes.com.raytheon.uf.common.dataquery.requests import RequestConstraint -import baseDafTestCase -import params -import unittest - -# -# Test DAF support for radar_spatial data -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# 01/19/16 4795 mapeters Initial Creation. -# 04/11/16 5548 tgurney Cleanup -# 04/18/16 5548 tgurney More cleanup -# 05/26/16 5587 njensen Added testGetIdentifierValues() -# 06/01/16 5587 tgurney Move testIdentifiers() to -# superclass -# 06/13/16 5574 tgurney Add advanced query tests -# 06/30/16 5725 tgurney Add test for NOT IN -# 12/07/16 5981 tgurney Parameterize -# 01/06/17 5981 tgurney Do not check data times -# -# - - -class RadarSpatialTestCase(baseDafTestCase.DafTestCase): - """Test DAF support for radar_spatial data""" - - datatype = "radar_spatial" - - def testGetAvailableLocations(self): - req = DAL.newDataRequest(self.datatype) - req.setEnvelope(params.ENVELOPE) - self.runLocationsTest(req) - - def testGetAvailableParameters(self): - req = DAL.newDataRequest(self.datatype) - self.runParametersTest(req) - - def testGetIdentifierValues(self): - self.runGetIdValuesTest(['wfo_id']) - - def testGetGeometryData(self): - req = DAL.newDataRequest(self.datatype) - req.setLocationNames("TORD", "TMDW") - req.setParameters("wfo_id", "name", "elevmeter") - self.runGeometryDataTest(req, checkDataTimes=False) - - def testRequestingTimesThrowsTimeAgnosticDataException(self): - req = DAL.newDataRequest(self.datatype) - self.runTimeAgnosticTest(req) - - def _runConstraintTest(self, key, operator, value): - req = DAL.newDataRequest(self.datatype) - constraint = RequestConstraint.new(operator, value) - req.addIdentifier(key, constraint) - req.setParameters('elevmeter', 'eqp_elv', 'wfo_id', 'immutablex') - return self.runGeometryDataTest(req, checkDataTimes=False) - - def testGetDataWithEqualsString(self): - geometryData = self._runConstraintTest('wfo_id', '=', params.SITE_ID) - for record in geometryData: - self.assertEqual(record.getString('wfo_id'), params.SITE_ID) - - def testGetDataWithEqualsUnicode(self): - geometryData = self._runConstraintTest('wfo_id', '=', unicode(params.SITE_ID)) - for record in geometryData: - self.assertEqual(record.getString('wfo_id'), params.SITE_ID) - - def testGetDataWithEqualsInt(self): - geometryData = self._runConstraintTest('immutablex', '=', 57) - for record in geometryData: - self.assertEqual(record.getNumber('immutablex'), 57) - - def testGetDataWithEqualsLong(self): - geometryData = self._runConstraintTest('immutablex', '=', 57L) - for record in geometryData: - self.assertEqual(record.getNumber('immutablex'), 57) - - def testGetDataWithEqualsFloat(self): - geometryData = self._runConstraintTest('immutablex', '=', 57.0) - for record in geometryData: - self.assertEqual(round(record.getNumber('immutablex'), 1), 57.0) - - def testGetDataWithEqualsNone(self): - geometryData = self._runConstraintTest('wfo_id', '=', None) - for record in geometryData: - self.assertEqual(record.getType('wfo_id'), 'NULL') - - def testGetDataWithNotEquals(self): - geometryData = self._runConstraintTest('wfo_id', '!=', params.SITE_ID) - for record in geometryData: - self.assertNotEquals(record.getString('wfo_id'), params.SITE_ID) - - def testGetDataWithNotEqualsNone(self): - geometryData = self._runConstraintTest('wfo_id', '!=', None) - for record in geometryData: - self.assertNotEqual(record.getType('wfo_id'), 'NULL') - - def testGetDataWithGreaterThan(self): - geometryData = self._runConstraintTest('elevmeter', '>', 1000) - for record in geometryData: - self.assertGreater(record.getNumber('elevmeter'), 1000) - - def testGetDataWithLessThan(self): - geometryData = self._runConstraintTest('elevmeter', '<', 1000) - for record in geometryData: - self.assertLess(record.getNumber('elevmeter'), 1000) - - def testGetDataWithGreaterThanEquals(self): - geometryData = self._runConstraintTest('eqp_elv', '>=', 1295) - for record in geometryData: - self.assertGreaterEqual(record.getNumber('eqp_elv'), 1295) - - def testGetDataWithLessThanEquals(self): - geometryData = self._runConstraintTest('eqp_elv', '<=', 138) - for record in geometryData: - self.assertLessEqual(record.getNumber('eqp_elv'), 138) - - def testGetDataWithInTuple(self): - collection = (params.SITE_ID, 'GID') - geometryData = self._runConstraintTest('wfo_id', 'in', collection) - for record in geometryData: - self.assertIn(record.getString('wfo_id'), collection) - - def testGetDataWithInList(self): - collection = [params.SITE_ID, 'GID'] - geometryData = self._runConstraintTest('wfo_id', 'in', collection) - for record in geometryData: - self.assertIn(record.getString('wfo_id'), collection) - - def testGetDataWithInGenerator(self): - collection = (params.SITE_ID, 'GID') - generator = (item for item in collection) - geometryData = self._runConstraintTest('wfo_id', 'in', generator) - for record in geometryData: - self.assertIn(record.getString('wfo_id'), collection) - - def testGetDataWithNotInList(self): - collection = [params.SITE_ID, 'GID'] - geometryData = self._runConstraintTest('wfo_id', 'not in', collection) - for record in geometryData: - self.assertNotIn(record.getString('wfo_id'), collection) - - def testGetDataWithInvalidConstraintTypeThrowsException(self): - with self.assertRaises(ValueError): - self._runConstraintTest('wfo_id', 'junk', params.SITE_ID) - - def testGetDataWithInvalidConstraintValueThrowsException(self): - with self.assertRaises(TypeError): - self._runConstraintTest('wfo_id', '=', {}) - - def testGetDataWithEmptyInConstraintThrowsException(self): - with self.assertRaises(ValueError): - self._runConstraintTest('wfo_id', 'in', []) diff --git a/pythonPackages/ufpy/test/dafTests/testRequestConstraint.py b/pythonPackages/ufpy/test/dafTests/testRequestConstraint.py deleted file mode 100644 index 472c0d3ffb..0000000000 --- a/pythonPackages/ufpy/test/dafTests/testRequestConstraint.py +++ /dev/null @@ -1,245 +0,0 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -from dynamicserialize.dstypes.com.raytheon.uf.common.dataquery.requests import RequestConstraint - -import unittest - -# -# Unit tests for Python implementation of RequestConstraint -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# 07/22/16 2416 tgurney Initial creation -# -# - - -class RequestConstraintTestCase(unittest.TestCase): - - def _newRequestConstraint(self, constraintType, constraintValue): - constraint = RequestConstraint() - constraint.constraintType = constraintType - constraint.constraintValue = constraintValue - return constraint - - def testEvaluateEquals(self): - new = RequestConstraint.new - self.assertTrue(new('=', 3).evaluate(3)) - self.assertTrue(new('=', 3).evaluate('3')) - self.assertTrue(new('=', '3').evaluate(3)) - self.assertTrue(new('=', 12345).evaluate(12345L)) - self.assertTrue(new('=', 'a').evaluate('a')) - self.assertTrue(new('=', 'a').evaluate(u'a')) - self.assertTrue(new('=', 1.0001).evaluate(2.0 - 0.999999)) - self.assertTrue(new('=', 1.00001).evaluate(1)) - self.assertFalse(new('=', 'a').evaluate(['a'])) - self.assertFalse(new('=', 'a').evaluate(['b'])) - self.assertFalse(new('=', 3).evaluate(4)) - self.assertFalse(new('=', 4).evaluate(3)) - self.assertFalse(new('=', 'a').evaluate('z')) - - def testEvaluateNotEquals(self): - new = RequestConstraint.new - self.assertTrue(new('!=', 'a').evaluate(['a'])) - self.assertTrue(new('!=', 'a').evaluate(['b'])) - self.assertTrue(new('!=', 3).evaluate(4)) - self.assertTrue(new('!=', 4).evaluate(3)) - self.assertTrue(new('!=', 'a').evaluate('z')) - self.assertFalse(new('!=', 3).evaluate('3')) - self.assertFalse(new('!=', '3').evaluate(3)) - self.assertFalse(new('!=', 3).evaluate(3)) - self.assertFalse(new('!=', 12345).evaluate(12345L)) - self.assertFalse(new('!=', 'a').evaluate('a')) - self.assertFalse(new('!=', 'a').evaluate(u'a')) - self.assertFalse(new('!=', 1.0001).evaluate(2.0 - 0.9999)) - - def testEvaluateGreaterThan(self): - new = RequestConstraint.new - self.assertTrue(new('>', 1.0001).evaluate(1.0002)) - self.assertTrue(new('>', 'a').evaluate('b')) - self.assertTrue(new('>', 3).evaluate(4)) - self.assertFalse(new('>', 20).evaluate(3)) - self.assertFalse(new('>', 12345).evaluate(12345L)) - self.assertFalse(new('>', 'a').evaluate('a')) - self.assertFalse(new('>', 'z').evaluate('a')) - self.assertFalse(new('>', 4).evaluate(3)) - - def testEvaluateGreaterThanEquals(self): - new = RequestConstraint.new - self.assertTrue(new('>=', 3).evaluate(3)) - self.assertTrue(new('>=', 12345).evaluate(12345L)) - self.assertTrue(new('>=', 'a').evaluate('a')) - self.assertTrue(new('>=', 1.0001).evaluate(1.0002)) - self.assertTrue(new('>=', 'a').evaluate('b')) - self.assertTrue(new('>=', 3).evaluate(20)) - self.assertFalse(new('>=', 1.0001).evaluate(1.0)) - self.assertFalse(new('>=', 'z').evaluate('a')) - self.assertFalse(new('>=', 40).evaluate(3)) - - def testEvaluateLessThan(self): - new = RequestConstraint.new - self.assertTrue(new('<', 'z').evaluate('a')) - self.assertTrue(new('<', 30).evaluate(4)) - self.assertFalse(new('<', 3).evaluate(3)) - self.assertFalse(new('<', 12345).evaluate(12345L)) - self.assertFalse(new('<', 'a').evaluate('a')) - self.assertFalse(new('<', 1.0001).evaluate(1.0002)) - self.assertFalse(new('<', 'a').evaluate('b')) - self.assertFalse(new('<', 3).evaluate(40)) - - def testEvaluateLessThanEquals(self): - new = RequestConstraint.new - self.assertTrue(new('<=', 'z').evaluate('a')) - self.assertTrue(new('<=', 20).evaluate(3)) - self.assertTrue(new('<=', 3).evaluate(3)) - self.assertTrue(new('<=', 12345).evaluate(12345L)) - self.assertTrue(new('<=', 'a').evaluate('a')) - self.assertFalse(new('<=', 1.0001).evaluate(1.0002)) - self.assertFalse(new('<=', 'a').evaluate('b')) - self.assertFalse(new('<=', 4).evaluate(30)) - - def testEvaluateIsNull(self): - new = RequestConstraint.new - self.assertTrue(new('=', None).evaluate(None)) - self.assertTrue(new('=', None).evaluate('null')) - self.assertFalse(new('=', None).evaluate(())) - self.assertFalse(new('=', None).evaluate(0)) - self.assertFalse(new('=', None).evaluate(False)) - - def testEvaluateIsNotNull(self): - new = RequestConstraint.new - self.assertTrue(new('!=', None).evaluate(())) - self.assertTrue(new('!=', None).evaluate(0)) - self.assertTrue(new('!=', None).evaluate(False)) - self.assertFalse(new('!=', None).evaluate(None)) - self.assertFalse(new('!=', None).evaluate('null')) - - def testEvaluateIn(self): - new = RequestConstraint.new - self.assertTrue(new('in', [3]).evaluate(3)) - self.assertTrue(new('in', ['a', 'b', 3]).evaluate(3)) - self.assertTrue(new('in', 'a').evaluate('a')) - self.assertTrue(new('in', [3, 4, 5]).evaluate('5')) - self.assertTrue(new('in', [1.0001, 2, 3]).evaluate(2.0 - 0.9999)) - self.assertFalse(new('in', ['a', 'b', 'c']).evaluate('d')) - self.assertFalse(new('in', 'a').evaluate('b')) - - def testEvaluateNotIn(self): - new = RequestConstraint.new - self.assertTrue(new('not in', ['a', 'b', 'c']).evaluate('d')) - self.assertTrue(new('not in', [3, 4, 5]).evaluate(6)) - self.assertTrue(new('not in', 'a').evaluate('b')) - self.assertFalse(new('not in', [3]).evaluate(3)) - self.assertFalse(new('not in', ['a', 'b', 3]).evaluate(3)) - self.assertFalse(new('not in', 'a').evaluate('a')) - self.assertFalse(new('not in', [1.0001, 2, 3]).evaluate(2.0 - 0.9999)) - - def testEvaluateLike(self): - # cannot make "like" with RequestConstraint.new() - new = self._newRequestConstraint - self.assertTrue(new('LIKE', 'a').evaluate('a')) - self.assertTrue(new('LIKE', 'a%').evaluate('a')) - self.assertTrue(new('LIKE', 'a%').evaluate('abcd')) - self.assertTrue(new('LIKE', '%a').evaluate('a')) - self.assertTrue(new('LIKE', '%a').evaluate('bcda')) - self.assertTrue(new('LIKE', '%').evaluate('')) - self.assertTrue(new('LIKE', '%').evaluate('anything')) - self.assertTrue(new('LIKE', 'a%d').evaluate('ad')) - self.assertTrue(new('LIKE', 'a%d').evaluate('abcd')) - self.assertTrue(new('LIKE', 'aa.()!{[]^%$').evaluate('aa.()!{[]^zzz$')) - self.assertTrue(new('LIKE', 'a__d%').evaluate('abcdefg')) - self.assertFalse(new('LIKE', 'a%').evaluate('b')) - self.assertFalse(new('LIKE', 'a%').evaluate('ba')) - self.assertFalse(new('LIKE', '%a').evaluate('b')) - self.assertFalse(new('LIKE', '%a').evaluate('ab')) - self.assertFalse(new('LIKE', 'a%').evaluate('A')) - self.assertFalse(new('LIKE', 'A%').evaluate('a')) - self.assertFalse(new('LIKE', 'a%d').evaluate('da')) - self.assertFalse(new('LIKE', 'a__d%').evaluate('abccdefg')) - self.assertFalse(new('LIKE', '....').evaluate('aaaa')) - self.assertFalse(new('LIKE', '.*').evaluate('anything')) - - def testEvaluateILike(self): - # cannot make "ilike" with RequestConstraint.new() - new = self._newRequestConstraint - self.assertTrue(new('ILIKE', 'a').evaluate('a')) - self.assertTrue(new('ILIKE', 'a%').evaluate('a')) - self.assertTrue(new('ILIKE', 'a%').evaluate('abcd')) - self.assertTrue(new('ILIKE', '%a').evaluate('a')) - self.assertTrue(new('ILIKE', '%a').evaluate('bcda')) - self.assertTrue(new('ILIKE', '%').evaluate('')) - self.assertTrue(new('ILIKE', '%').evaluate('anything')) - self.assertTrue(new('ILIKE', 'a%d').evaluate('ad')) - self.assertTrue(new('ILIKE', 'a%d').evaluate('abcd')) - self.assertTrue(new('ILIKE', 'a').evaluate('A')) - self.assertTrue(new('ILIKE', 'a%').evaluate('A')) - self.assertTrue(new('ILIKE', 'a%').evaluate('ABCD')) - self.assertTrue(new('ILIKE', '%a').evaluate('A')) - self.assertTrue(new('ILIKE', '%a').evaluate('BCDA')) - self.assertTrue(new('ILIKE', '%').evaluate('')) - self.assertTrue(new('ILIKE', '%').evaluate('anything')) - self.assertTrue(new('ILIKE', 'a%d').evaluate('AD')) - self.assertTrue(new('ILIKE', 'a%d').evaluate('ABCD')) - self.assertTrue(new('ILIKE', 'A').evaluate('a')) - self.assertTrue(new('ILIKE', 'A%').evaluate('a')) - self.assertTrue(new('ILIKE', 'A%').evaluate('abcd')) - self.assertTrue(new('ILIKE', '%A').evaluate('a')) - self.assertTrue(new('ILIKE', '%A').evaluate('bcda')) - self.assertTrue(new('ILIKE', '%').evaluate('')) - self.assertTrue(new('ILIKE', '%').evaluate('anything')) - self.assertTrue(new('ILIKE', 'A%D').evaluate('ad')) - self.assertTrue(new('ILIKE', 'A%D').evaluate('abcd')) - self.assertTrue(new('ILIKE', 'aa.()!{[]^%$').evaluate('AA.()!{[]^zzz$')) - self.assertTrue(new('ILIKE', 'a__d%').evaluate('abcdefg')) - self.assertTrue(new('ILIKE', 'a__d%').evaluate('ABCDEFG')) - self.assertFalse(new('ILIKE', 'a%').evaluate('b')) - self.assertFalse(new('ILIKE', 'a%').evaluate('ba')) - self.assertFalse(new('ILIKE', '%a').evaluate('b')) - self.assertFalse(new('ILIKE', '%a').evaluate('ab')) - self.assertFalse(new('ILIKE', 'a%d').evaluate('da')) - self.assertFalse(new('ILIKE', 'a__d%').evaluate('abccdefg')) - self.assertFalse(new('ILIKE', '....').evaluate('aaaa')) - self.assertFalse(new('ILIKE', '.*').evaluate('anything')) - - def testEvaluateBetween(self): - # cannot make "between" with RequestConstraint.new() - new = self._newRequestConstraint - self.assertTrue(new('BETWEEN', '1--1').evaluate(1)) - self.assertTrue(new('BETWEEN', '1--10').evaluate(1)) - self.assertTrue(new('BETWEEN', '1--10').evaluate(5)) - self.assertTrue(new('BETWEEN', '1--10').evaluate(10)) - self.assertTrue(new('BETWEEN', '1.0--1.1').evaluate(1.0)) - self.assertTrue(new('BETWEEN', '1.0--1.1').evaluate(1.05)) - self.assertTrue(new('BETWEEN', '1.0--1.1').evaluate(1.1)) - self.assertTrue(new('BETWEEN', 'a--x').evaluate('a')) - self.assertTrue(new('BETWEEN', 'a--x').evaluate('j')) - self.assertTrue(new('BETWEEN', 'a--x').evaluate('x')) - self.assertFalse(new('BETWEEN', '1--1').evaluate(2)) - self.assertFalse(new('BETWEEN', '1--2').evaluate(10)) - self.assertFalse(new('BETWEEN', '1--10').evaluate(0)) - self.assertFalse(new('BETWEEN', '1--10').evaluate(11)) - self.assertFalse(new('BETWEEN', '1.0--1.1').evaluate(0.99)) - self.assertFalse(new('BETWEEN', '1.0--1.1').evaluate(1.11)) - self.assertFalse(new('BETWEEN', 'a--x').evaluate(' ')) - self.assertFalse(new('BETWEEN', 'a--x').evaluate('z')) - diff --git a/pythonPackages/ufpy/test/dafTests/testSatellite.py b/pythonPackages/ufpy/test/dafTests/testSatellite.py deleted file mode 100644 index 3e42b1d2aa..0000000000 --- a/pythonPackages/ufpy/test/dafTests/testSatellite.py +++ /dev/null @@ -1,193 +0,0 @@ -#!/usr/bin/env python -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -from __future__ import print_function -from ufpy.dataaccess import DataAccessLayer as DAL -from dynamicserialize.dstypes.com.raytheon.uf.common.dataquery.requests import RequestConstraint - -import baseDafTestCase -import unittest - -# -# Test DAF support for satellite data -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# 01/19/16 4795 mapeters Initial Creation. -# 04/11/16 5548 tgurney Cleanup -# 04/18/16 5548 tgurney More cleanup -# 04/26/16 5587 tgurney Move identifier values tests -# out of base class -# 06/01/16 5587 tgurney Update testGetIdentifierValues -# 06/07/16 5574 tgurney Add advanced query tests -# 06/13/16 5574 tgurney Typo -# 06/30/16 5725 tgurney Add test for NOT IN -# -# - - -class SatelliteTestCase(baseDafTestCase.DafTestCase): - """Test DAF support for satellite data""" - - datatype = "satellite" - - def testGetAvailableParameters(self): - req = DAL.newDataRequest(self.datatype) - self.runParametersTest(req) - - def testGetAvailableLocations(self): - req = DAL.newDataRequest(self.datatype) - self.runLocationsTest(req) - - def testGetAvailableTimes(self): - req = DAL.newDataRequest(self.datatype) - req.setLocationNames("West CONUS") - self.runTimesTest(req) - - def testGetGridData(self): - req = DAL.newDataRequest(self.datatype) - req.setParameters("Imager 11 micron IR") - req.setLocationNames("West CONUS") - self.runGridDataTest(req) - - def testGetIdentifierValues(self): - req = DAL.newDataRequest(self.datatype) - optionalIds = set(DAL.getOptionalIdentifiers(req)) - requiredIds = set(DAL.getRequiredIdentifiers(req)) - self.runGetIdValuesTest(optionalIds | requiredIds) - - def testGetInvalidIdentifierValuesThrowsException(self): - self.runInvalidIdValuesTest() - - def testGetNonexistentIdentifierValuesThrowsException(self): - self.runNonexistentIdValuesTest() - - def _runConstraintTest(self, key, operator, value): - req = DAL.newDataRequest(self.datatype) - constraint = RequestConstraint.new(operator, value) - req.addIdentifier(key, constraint) - req.setParameters("Imager 11 micron IR") - req.setLocationNames("West CONUS") - return self.runGridDataTest(req) - - def testGetDataWithEqualsString(self): - gridData = self._runConstraintTest('creatingEntity', '=', 'Composite') - for record in gridData: - self.assertEqual(record.getAttribute('creatingEntity'), 'Composite') - - def testGetDataWithEqualsUnicode(self): - gridData = self._runConstraintTest('creatingEntity', '=', u'Composite') - for record in gridData: - self.assertEqual(record.getAttribute('creatingEntity'), 'Composite') - - def testGetDataWithEqualsInt(self): - gridData = self._runConstraintTest('creatingEntity', '=', 1000) - for record in gridData: - self.assertEqual(record.getAttribute('creatingEntity'), 1000) - - def testGetDataWithEqualsLong(self): - gridData = self._runConstraintTest('creatingEntity', '=', 1000L) - for record in gridData: - self.assertEqual(record.getAttribute('creatingEntity'), 1000) - - def testGetDataWithEqualsFloat(self): - gridData = self._runConstraintTest('creatingEntity', '=', 1.0) - for record in gridData: - self.assertEqual(round(record.getAttribute('creatingEntity'), 1), 1.0) - - def testGetDataWithEqualsNone(self): - gridData = self._runConstraintTest('creatingEntity', '=', None) - for record in gridData: - self.assertIsNone(record.getAttribute('creatingEntity')) - - def testGetDataWithNotEquals(self): - gridData = self._runConstraintTest('creatingEntity', '!=', 'Composite') - for record in gridData: - self.assertNotEqual(record.getAttribute('creatingEntity'), 'Composite') - - def testGetDataWithNotEqualsNone(self): - gridData = self._runConstraintTest('creatingEntity', '!=', None) - for record in gridData: - self.assertIsNotNone(record.getAttribute('creatingEntity')) - - def testGetDataWithGreaterThan(self): - gridData = self._runConstraintTest('creatingEntity', '>', 'Composite') - for record in gridData: - self.assertGreater(record.getAttribute('creatingEntity'), 'Composite') - - def testGetDataWithLessThan(self): - gridData = self._runConstraintTest('creatingEntity', '<', 'Composite') - for record in gridData: - self.assertLess(record.getAttribute('creatingEntity'), 'Composite') - - def testGetDataWithGreaterThanEquals(self): - gridData = self._runConstraintTest('creatingEntity', '>=', 'Composite') - for record in gridData: - self.assertGreaterEqual(record.getAttribute('creatingEntity'), 'Composite') - - def testGetDataWithLessThanEquals(self): - gridData = self._runConstraintTest('creatingEntity', '<=', 'Composite') - for record in gridData: - self.assertLessEqual(record.getAttribute('creatingEntity'), 'Composite') - - def testGetDataWithInTuple(self): - collection = ('Composite', 'Miscellaneous') - gridData = self._runConstraintTest('creatingEntity', 'in', collection) - for record in gridData: - self.assertIn(record.getAttribute('creatingEntity'), collection) - - def testGetDataWithInList(self): - collection = ('Composite', 'Miscellaneous') - gridData = self._runConstraintTest('creatingEntity', 'in', collection) - for record in gridData: - self.assertIn(record.getAttribute('creatingEntity'), collection) - - def testGetDataWithInGenerator(self): - collection = ('Composite', 'Miscellaneous') - generator = (item for item in collection) - gridData = self._runConstraintTest('creatingEntity', 'in', generator) - for record in gridData: - self.assertIn(record.getAttribute('creatingEntity'), collection) - - def testGetDataWithNotInList(self): - collection = ('Composite', 'Miscellaneous') - gridData = self._runConstraintTest('creatingEntity', 'not in', collection) - for record in gridData: - self.assertNotIn(record.getAttribute('creatingEntity'), collection) - - def testGetDataWithInvalidConstraintTypeThrowsException(self): - with self.assertRaises(ValueError): - self._runConstraintTest('creatingEntity', 'junk', 'Composite') - - def testGetDataWithInvalidConstraintValueThrowsException(self): - with self.assertRaises(TypeError): - self._runConstraintTest('creatingEntity', '=', {}) - - def testGetDataWithEmptyInConstraintThrowsException(self): - with self.assertRaises(ValueError): - self._runConstraintTest('creatingEntity', 'in', []) - - def testGetDataWithNestedInConstraintThrowsException(self): - collection = ('Composite', 'Miscellaneous', ()) - with self.assertRaises(TypeError): - self._runConstraintTest('creatingEntity', 'in', collection) diff --git a/pythonPackages/ufpy/test/dafTests/testSfcObs.py b/pythonPackages/ufpy/test/dafTests/testSfcObs.py deleted file mode 100644 index 4387eadee7..0000000000 --- a/pythonPackages/ufpy/test/dafTests/testSfcObs.py +++ /dev/null @@ -1,192 +0,0 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -from __future__ import print_function -from ufpy.dataaccess import DataAccessLayer as DAL - -from dynamicserialize.dstypes.com.raytheon.uf.common.dataquery.requests import RequestConstraint -import baseDafTestCase -import unittest - -# -# Test DAF support for sfcobs data -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# 01/19/16 4795 mapeters Initial Creation. -# 04/11/16 5548 tgurney Cleanup -# 04/18/16 5548 tgurney More cleanup -# 06/09/16 5587 bsteffen Add getIdentifierValues tests -# 06/13/16 5574 tgurney Add advanced query tests -# 06/30/16 5725 tgurney Add test for NOT IN -# 01/20/17 6095 tgurney Add null identifiers test -# -# - - -class SfcObsTestCase(baseDafTestCase.DafTestCase): - """Test DAF support for sfcobs data""" - - datatype = "sfcobs" - - def testGetAvailableParameters(self): - req = DAL.newDataRequest(self.datatype) - self.runParametersTest(req) - - def testGetAvailableLocations(self): - req = DAL.newDataRequest(self.datatype) - self.runLocationsTest(req) - - def testGetAvailableTimes(self): - req = DAL.newDataRequest(self.datatype) - req.setLocationNames("14547") - self.runTimesTest(req) - - def testGetGeometryData(self): - req = DAL.newDataRequest(self.datatype) - req.setLocationNames("14547") - req.setParameters("temperature", "seaLevelPress", "dewpoint") - self.runGeometryDataTest(req) - - def testGetGeometryDataNullIdentifiers(self): - req = DAL.newDataRequest(self.datatype) - req.setLocationNames("14547") - req.setParameters("temperature", "seaLevelPress", "dewpoint") - req.identifiers = None - self.runGeometryDataTest(req) - - def testGetIdentifierValues(self): - req = DAL.newDataRequest(self.datatype) - optionalIds = set(DAL.getOptionalIdentifiers(req)) - self.runGetIdValuesTest(optionalIds) - - def testGetInvalidIdentifierValuesThrowsException(self): - self.runInvalidIdValuesTest() - - def testGetNonexistentIdentifierValuesThrowsException(self): - self.runNonexistentIdValuesTest() - - def _runConstraintTest(self, key, operator, value): - req = DAL.newDataRequest(self.datatype) - constraint = RequestConstraint.new(operator, value) - req.addIdentifier(key, constraint) - req.setParameters("temperature", "reportType") - return self.runGeometryDataTest(req) - - def testGetDataWithEqualsString(self): - geometryData = self._runConstraintTest('reportType', '=', '1004') - for record in geometryData: - self.assertEqual(record.getString('reportType'), '1004') - - def testGetDataWithEqualsUnicode(self): - geometryData = self._runConstraintTest('reportType', '=', u'1004') - for record in geometryData: - self.assertEqual(record.getString('reportType'), '1004') - - def testGetDataWithEqualsInt(self): - geometryData = self._runConstraintTest('reportType', '=', 1004) - for record in geometryData: - self.assertEqual(record.getString('reportType'), '1004') - - def testGetDataWithEqualsLong(self): - geometryData = self._runConstraintTest('reportType', '=', 1004L) - for record in geometryData: - self.assertEqual(record.getString('reportType'), '1004') - - # No float test because no float identifiers are available - - def testGetDataWithEqualsNone(self): - geometryData = self._runConstraintTest('reportType', '=', None) - for record in geometryData: - self.assertEqual(record.getType('reportType'), 'NULL') - - def testGetDataWithNotEquals(self): - geometryData = self._runConstraintTest('reportType', '!=', 1004) - for record in geometryData: - self.assertNotEqual(record.getString('reportType'), '1004') - - def testGetDataWithNotEqualsNone(self): - geometryData = self._runConstraintTest('reportType', '!=', None) - for record in geometryData: - self.assertNotEqual(record.getType('reportType'), 'NULL') - - def testGetDataWithGreaterThan(self): - geometryData = self._runConstraintTest('reportType', '>', 1004) - for record in geometryData: - self.assertGreater(record.getString('reportType'), '1004') - - def testGetDataWithLessThan(self): - geometryData = self._runConstraintTest('reportType', '<', 1004) - for record in geometryData: - self.assertLess(record.getString('reportType'), '1004') - - def testGetDataWithGreaterThanEquals(self): - geometryData = self._runConstraintTest('reportType', '>=', 1004) - for record in geometryData: - self.assertGreaterEqual(record.getString('reportType'), '1004') - - def testGetDataWithLessThanEquals(self): - geometryData = self._runConstraintTest('reportType', '<=', 1004) - for record in geometryData: - self.assertLessEqual(record.getString('reportType'), '1004') - - def testGetDataWithInTuple(self): - collection = ('1004', '1005') - geometryData = self._runConstraintTest('reportType', 'in', collection) - for record in geometryData: - self.assertIn(record.getString('reportType'), collection) - - def testGetDataWithInList(self): - collection = ['1004', '1005'] - geometryData = self._runConstraintTest('reportType', 'in', collection) - for record in geometryData: - self.assertIn(record.getString('reportType'), collection) - - def testGetDataWithInGenerator(self): - collection = ('1004', '1005') - generator = (item for item in collection) - geometryData = self._runConstraintTest('reportType', 'in', generator) - for record in geometryData: - self.assertIn(record.getString('reportType'), collection) - - def testGetDataWithNotInList(self): - collection = ['1004', '1005'] - geometryData = self._runConstraintTest('reportType', 'not in', collection) - for record in geometryData: - self.assertNotIn(record.getString('reportType'), collection) - - def testGetDataWithInvalidConstraintTypeThrowsException(self): - with self.assertRaises(ValueError): - self._runConstraintTest('reportType', 'junk', '1004') - - def testGetDataWithInvalidConstraintValueThrowsException(self): - with self.assertRaises(TypeError): - self._runConstraintTest('reportType', '=', {}) - - def testGetDataWithEmptyInConstraintThrowsException(self): - with self.assertRaises(ValueError): - self._runConstraintTest('reportType', 'in', []) - - def testGetDataWithNestedInConstraintThrowsException(self): - collection = ('1004', '1005', ()) - with self.assertRaises(TypeError): - self._runConstraintTest('reportType', 'in', collection) diff --git a/pythonPackages/ufpy/test/dafTests/testTopo.py b/pythonPackages/ufpy/test/dafTests/testTopo.py deleted file mode 100644 index d0dcd36865..0000000000 --- a/pythonPackages/ufpy/test/dafTests/testTopo.py +++ /dev/null @@ -1,96 +0,0 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -from __future__ import print_function -from ufpy.dataaccess import DataAccessLayer as DAL -from ufpy.ThriftClient import ThriftRequestException - -import baseDafTestCase -import shapely.geometry -import unittest - -# -# Test DAF support for topo data -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# 01/19/16 4795 mapeters Initial Creation. -# 04/11/16 5548 tgurney Cleanup -# 04/18/16 5548 tgurney More cleanup -# 05/26/16 5587 tgurney Add test for -# getIdentifierValues() -# 06/01/16 5587 tgurney Update testGetIdentifierValues -# 07/18/17 6253 randerso Removed referenced to GMTED -# - - -class TopoTestCase(baseDafTestCase.DafTestCase): - """Test DAF support for topo data""" - - datatype = "topo" - - def testGetGridData(self): - print("defaultTopo") - req = DAL.newDataRequest(self.datatype) - req.addIdentifier("group", "/") - req.addIdentifier("dataset", "full") - poly = shapely.geometry.LinearRing(((-70, 40), (-71, 40), (-71, 42), (-70, 42))) - req.setEnvelope(poly) - gridData = DAL.getGridData(req) - self.assertIsNotNone(gridData) - print("Number of grid records: " + str(len(gridData))) - print("Sample grid data shape:\n" + str(gridData[0].getRawData().shape) + "\n") - print("Sample grid data:\n" + str(gridData[0].getRawData()) + "\n") - - for topoFile in ["gtopo30"]: - print("\n" + topoFile) - req.addIdentifier("topoFile", topoFile) - gridData = DAL.getGridData(req) - self.assertIsNotNone(gridData) - print("Number of grid records: " + str(len(gridData))) - print("Sample grid data shape:\n" + str(gridData[0].getRawData().shape) + "\n") - print("Sample grid data:\n" + str(gridData[0].getRawData()) + "\n") - - - def testRequestingTooMuchDataThrowsResponseTooLargeException(self): - req = DAL.newDataRequest(self.datatype) - req.addIdentifier("group", "/") - req.addIdentifier("dataset", "full") - points = ((-180, 90), (180, 90), (180, -90), (-180, -90)) - poly = shapely.geometry.LinearRing(points) - req.setEnvelope(poly) - - with self.assertRaises(ThriftRequestException) as cm: - DAL.getGridData(req) - self.assertIn('ResponseTooLargeException', str(cm.exception)) - - def testGetIdentifierValues(self): - req = DAL.newDataRequest(self.datatype) - optionalIds = set(DAL.getOptionalIdentifiers(req)) - requiredIds = set(DAL.getRequiredIdentifiers(req)) - self.runGetIdValuesTest(optionalIds | requiredIds) - - def testGetInvalidIdentifierValuesThrowsException(self): - self.runInvalidIdValuesTest() - - def testGetNonexistentIdentifierValuesThrowsException(self): - self.runNonexistentIdValuesTest() diff --git a/pythonPackages/ufpy/test/dafTests/testWarning.py b/pythonPackages/ufpy/test/dafTests/testWarning.py deleted file mode 100644 index d1ece0c115..0000000000 --- a/pythonPackages/ufpy/test/dafTests/testWarning.py +++ /dev/null @@ -1,233 +0,0 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -from __future__ import print_function -from ufpy.dataaccess import DataAccessLayer as DAL - -from dynamicserialize.dstypes.com.raytheon.uf.common.dataquery.requests import RequestConstraint -import baseDafTestCase -import unittest - -# -# Test DAF support for warning data -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# 01/19/16 4795 mapeters Initial Creation. -# 04/11/16 5548 tgurney Cleanup -# 04/18/16 5548 tgurney More cleanup -# 04/26/16 5587 tgurney Add identifier values tests -# 06/08/16 5574 tgurney Add advanced query tests -# 06/10/16 5548 tgurney Clean up references to name -# of data type -# 06/13/16 5574 tgurney Fix checks for None -# 06/21/16 5548 tgurney Skip tests that cause errors -# 06/30/16 5725 tgurney Add test for NOT IN -# 12/12/16 5981 tgurney Improve test performance -# -# - - -class WarningTestCase(baseDafTestCase.DafTestCase): - """Test DAF support for warning data""" - - datatype = "warning" - - def _getLocationNames(self): - req = DAL.newDataRequest() - req.setDatatype(self.datatype) - return DAL.getAvailableLocationNames(req) - - def _getAllRecords(self): - req = DAL.newDataRequest() - req.setDatatype(self.datatype) - req.setParameters('id') - return DAL.getGeometryData(req) - - def testGetAvailableParameters(self): - req = DAL.newDataRequest(self.datatype) - self.runParametersTest(req) - - def testGetAvailableLocations(self): - req = DAL.newDataRequest(self.datatype) - self.runLocationsTest(req) - - def testGetAvailableTimes(self): - req = DAL.newDataRequest(self.datatype) - req.setParameters("etn", "wmoid") - self.runTimesTest(req) - - def testGetGeometryData(self): - req = DAL.newDataRequest(self.datatype) - req.setParameters("etn", "wmoid") - self.runGeometryDataTest(req) - - def testFilterOnLocationName(self): - allLocationNames = self._getLocationNames() - if len(allLocationNames) == 0: - errmsg = "No {0} data exists on {1}. Try again with {0} data." - raise unittest.SkipTest(errmsg.format(self.datatype, DAL.THRIFT_HOST)) - testCount = 3 # number of different location names to test - for locationName in allLocationNames[:testCount]: - req = DAL.newDataRequest() - req.setDatatype(self.datatype) - req.setParameters('id') - req.setLocationNames(locationName) - geomData = DAL.getGeometryData(req) - for geom in geomData: - self.assertEqual(geom.getLocationName(), locationName) - - def testFilterOnNonexistentLocationReturnsEmpty(self): - req = DAL.newDataRequest() - req.setDatatype(self.datatype) - req.setParameters('id') - req.setLocationNames('ZZZZ') - self.assertEqual(len(DAL.getGeometryData(req)), 0) - - def testFilterOnInvalidLocationThrowsIncompatibleRequestException(self): - req = DAL.newDataRequest() - req.setDatatype(self.datatype) - req.setParameters('id') - req.setLocationNames(') and 0=1') - with self.assertRaises(Exception) as cm: - DAL.getGeometryData(req) - self.assertIn('IncompatibleRequestException', str(cm.exception)) - - def testGetColumnIdentifierValues(self): - self.runGetIdValuesTest(['act']) - - @unittest.skip('avoid EDEX error') - def testGetInvalidIdentifierValuesThrowsException(self): - self.runInvalidIdValuesTest() - - @unittest.skip('avoid EDEX error') - def testGetNonexistentIdentifierValuesThrowsException(self): - self.runNonexistentIdValuesTest() - - def _runConstraintTest(self, key, operator, value): - req = DAL.newDataRequest(self.datatype) - constraint = RequestConstraint.new(operator, value) - req.addIdentifier(key, constraint) - req.setParameters("etn", "wmoid", "sig") - return self.runGeometryDataTest(req) - - def testGetDataWithEqualsString(self): - geometryData = self._runConstraintTest('sig', '=', 'Y') - for record in geometryData: - self.assertEqual(record.getString('sig'), 'Y') - - def testGetDataWithEqualsUnicode(self): - geometryData = self._runConstraintTest('sig', '=', u'Y') - for record in geometryData: - self.assertEqual(record.getString('sig'), 'Y') - - def testGetDataWithEqualsInt(self): - geometryData = self._runConstraintTest('etn', '=', 1000) - for record in geometryData: - self.assertEqual(record.getString('etn'), '1000') - - def testGetDataWithEqualsLong(self): - geometryData = self._runConstraintTest('etn', '=', 1000L) - for record in geometryData: - self.assertEqual(record.getString('etn'), '1000') - - def testGetDataWithEqualsFloat(self): - geometryData = self._runConstraintTest('etn', '=', 1.0) - for record in geometryData: - self.assertEqual(round(float(record.getString('etn')), 1), 1.0) - - def testGetDataWithEqualsNone(self): - geometryData = self._runConstraintTest('sig', '=', None) - for record in geometryData: - self.assertEqual(record.getType('sig'), 'NULL') - - def testGetDataWithNotEquals(self): - geometryData = self._runConstraintTest('sig', '!=', 'Y') - for record in geometryData: - self.assertNotEqual(record.getString('sig'), 'Y') - - def testGetDataWithNotEqualsNone(self): - geometryData = self._runConstraintTest('sig', '!=', None) - for record in geometryData: - self.assertNotEqual(record.getType('sig'), 'NULL') - - def testGetDataWithGreaterThan(self): - geometryData = self._runConstraintTest('sig', '>', 'Y') - for record in geometryData: - self.assertGreater(record.getString('sig'), 'Y') - - def testGetDataWithLessThan(self): - geometryData = self._runConstraintTest('sig', '<', 'Y') - for record in geometryData: - self.assertLess(record.getString('sig'), 'Y') - - def testGetDataWithGreaterThanEquals(self): - geometryData = self._runConstraintTest('sig', '>=', 'Y') - for record in geometryData: - self.assertGreaterEqual(record.getString('sig'), 'Y') - - def testGetDataWithLessThanEquals(self): - geometryData = self._runConstraintTest('sig', '<=', 'Y') - for record in geometryData: - self.assertLessEqual(record.getString('sig'), 'Y') - - def testGetDataWithInTuple(self): - collection = ('Y', 'A') - geometryData = self._runConstraintTest('sig', 'in', collection) - for record in geometryData: - self.assertIn(record.getString('sig'), collection) - - def testGetDataWithInList(self): - collection = ['Y', 'A'] - geometryData = self._runConstraintTest('sig', 'in', collection) - for record in geometryData: - self.assertIn(record.getString('sig'), collection) - - def testGetDataWithInGenerator(self): - collection = ('Y', 'A') - generator = (item for item in collection) - geometryData = self._runConstraintTest('sig', 'in', generator) - for record in geometryData: - self.assertIn(record.getString('sig'), collection) - - def testGetDataWithNotInList(self): - collection = ['Y', 'W'] - geometryData = self._runConstraintTest('sig', 'not in', collection) - for record in geometryData: - self.assertNotIn(record.getString('sig'), collection) - - def testGetDataWithInvalidConstraintTypeThrowsException(self): - with self.assertRaises(ValueError): - self._runConstraintTest('sig', 'junk', 'Y') - - def testGetDataWithInvalidConstraintValueThrowsException(self): - with self.assertRaises(TypeError): - self._runConstraintTest('sig', '=', {}) - - def testGetDataWithEmptyInConstraintThrowsException(self): - with self.assertRaises(ValueError): - self._runConstraintTest('sig', 'in', []) - - def testGetDataWithNestedInConstraintThrowsException(self): - collection = ('Y', 'A', ()) - with self.assertRaises(TypeError): - self._runConstraintTest('sig', 'in', collection) diff --git a/pythonPackages/ufpy/test/localization/__init__.py b/pythonPackages/ufpy/test/localization/__init__.py deleted file mode 100644 index a178a511f5..0000000000 --- a/pythonPackages/ufpy/test/localization/__init__.py +++ /dev/null @@ -1,32 +0,0 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - - -# -# __init__.py for ufpy.test.localization package -# -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# --------- -------- --------- -------------------------- -# 08/07/17 5731 bsteffen Initial Creation. - -__all__ = [] diff --git a/pythonPackages/ufpy/test/localization/testLocalizationFileManager.py b/pythonPackages/ufpy/test/localization/testLocalizationFileManager.py deleted file mode 100644 index e034364b0f..0000000000 --- a/pythonPackages/ufpy/test/localization/testLocalizationFileManager.py +++ /dev/null @@ -1,172 +0,0 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -# -# Tests for the LocalizationFileManager -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# --------- -------- --------- -------------------------- -# 08/09/17 5731 bsteffen Initial Creation. - -import unittest - -from ufpy.localization.LocalizationFileManager import (LocalizationFileManager, - LocalizationFileVersionConflictException, - LocalizationContext, - LocalizationFileIsNotDirectoryException, - LocalizationFileDoesNotExistException) - -testFile = "purge/defaultPurgeRules.xml" -testContent = "05-05:05:05" -testDir = "purge/" -testNewFile = "purge/testPurgeRules.xml" - -class ContextTestCase(unittest.TestCase): - def test_eq(self): - c1 = LocalizationContext() - c2 = LocalizationContext() - self.assertEqual(c1,c2) - c3 = LocalizationContext("site", "test") - c4 = LocalizationContext("site", "test") - self.assertEqual(c3,c4) - self.assertNotEqual(c1,c3) - - def test_hash(self): - c1 = LocalizationContext() - c2 = LocalizationContext() - self.assertEqual(hash(c1),hash(c2)) - c3 = LocalizationContext("site", "test") - c4 = LocalizationContext("site", "test") - self.assertEqual(hash(c3),hash(c4)) - -class LFMTestCase(unittest.TestCase): - def setUp(self): - self.manager = LocalizationFileManager() - userFile = self.manager.getSpecific("user", testFile) - if userFile.exists(): - userFile.delete() - newFile = self.manager.getSpecific("user", testNewFile) - if newFile.exists(): - newFile.delete() - def test_gets(self): - startingIncremental = self.manager.getIncremental(testFile) - baseFile = self.manager.getSpecific("base", testFile) - self.assertEqual(baseFile, startingIncremental[0]) - self.assertTrue(baseFile.exists()) - self.assertFalse(baseFile.isDirectory()) - userFile = self.manager.getSpecific("user", testFile) - self.assertFalse(userFile.exists()) - with userFile.open("w") as stream: - stream.write(testContent) - userFile = self.manager.getSpecific("user", testFile) - self.assertTrue(userFile.exists()) - with userFile.open('r') as stream: - self.assertEqual(stream.read(), testContent) - absFile = self.manager.getAbsolute(testFile) - self.assertEqual(absFile, userFile) - endingIncremental = self.manager.getIncremental(testFile) - self.assertEqual(len(startingIncremental) + 1, len(endingIncremental)) - self.assertEqual(userFile, endingIncremental[-1]) - self.assertEqual(baseFile, endingIncremental[0]) - - - userFile.delete() - userFile = self.manager.getSpecific("user", testFile) - self.assertFalse(userFile.exists()) - - def test_concurrent_edit(self): - userFile1 = self.manager.getSpecific("user", testFile) - userFile2 = self.manager.getSpecific("user", testFile) - self.assertFalse(userFile1.exists()) - self.assertFalse(userFile2.exists()) - with self.assertRaises(LocalizationFileVersionConflictException): - with userFile1.open("w") as stream1: - stream1.write(testContent) - with userFile2.open("w") as stream2: - stream2.write(testContent) - - userFile = self.manager.getSpecific("user", testFile) - userFile.delete() - - def test_dir(self): - dir = self.manager.getAbsolute(testDir) - self.assertTrue(dir.isDirectory()) - with self.assertRaises(Exception): - dir.delete() - - def test_list(self): - abs1 = self.manager.listAbsolute(testDir) - inc1 = self.manager.listIncremental(testDir) - self.assertEqual(len(abs1), len(inc1)) - for i in range(len(abs1)): - self.assertEquals(abs1[i], inc1[i][-1]) - - userFile = self.manager.getSpecific("user", testNewFile) - self.assertNotIn(userFile, abs1) - - with userFile.open("w") as stream: - stream.write(testContent) - userFile = self.manager.getSpecific("user", testNewFile) - - - abs2 = self.manager.listAbsolute(testDir) - inc2 = self.manager.listIncremental(testDir) - self.assertEqual(len(abs2), len(inc2)) - for i in range(len(abs2)): - self.assertEquals(abs2[i], inc2[i][-1]) - - self.assertEquals(len(abs1) + 1, len(abs2)) - self.assertIn(userFile, abs2) - - userFile.delete() - - def test_list_file(self): - with self.assertRaises(LocalizationFileIsNotDirectoryException): - self.manager.listIncremental(testFile) - - def test_list_nonexistant(self): - with self.assertRaises(LocalizationFileDoesNotExistException): - self.manager.listIncremental('dontNameYourDirectoryThis') - - def test_root_variants(self): - list1 = self.manager.listAbsolute(".") - list2 = self.manager.listAbsolute("") - list3 = self.manager.listAbsolute("/") - self.assertEquals(list1,list2) - self.assertEquals(list2,list3) - - def test_slashiness(self): - raw = testDir - if raw[0] == '/': - raw = raw[1:] - if raw[-1] == '/': - raw = raw[:-1] - list1 = self.manager.listAbsolute(raw) - list2 = self.manager.listAbsolute(raw + "/") - list3 = self.manager.listAbsolute("/" + raw) - self.assertEquals(list1,list2) - self.assertEquals(list2,list3) - - - -if __name__ == '__main__': - unittest.main() \ No newline at end of file diff --git a/pythonPackages/ufpy/test/localization/testLocalizationRest.py b/pythonPackages/ufpy/test/localization/testLocalizationRest.py deleted file mode 100644 index c1e414426f..0000000000 --- a/pythonPackages/ufpy/test/localization/testLocalizationRest.py +++ /dev/null @@ -1,359 +0,0 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -import unittest -import urllib2 - -from HTMLParser import HTMLParser -from xml.etree.ElementTree import parse as parseXml -from json import load as loadjson -from urlparse import urljoin -from base64 import b64encode - -# -# Test the localizaiton REST service. -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# --------- -------- --------- -------------------------- -# 08/07/17 5731 bsteffen Initial Creation. - -baseURL = "http://localhost:9581/services/localization/" -testSite = "OAX" -testDir = "menus" -testFile = "test.xml" -username = "test" -password = username - -base64string = b64encode('%s:%s' % (username, password)) -authString = "Basic %s" % base64string - -class ValidHTMLParser(HTMLParser): - """Simple HTML parser that performs very minimal validation. - - This ensures that all start and end tags match, and also that there are - some tags. It also accumulates the text of all links in the html file - in the link_texts attribute, which can be used for further validation. - """ - - def __init__(self, testcase): - HTMLParser.__init__(self) - self._testcase = testcase - self._tags = [] - self._any = False - self.link_texts = [] - - def handle_starttag(self, tag, attrs): - self._tags.append(tag) - self._any = True - - def handle_endtag(self, tag): - self._testcase.assertNotEquals([], self._tags, "Unstarted end tag " + tag) - self._testcase.assertEquals(tag, self._tags.pop()) - - def handle_data(self, data): - if self._tags[-1] == "a": - self.link_texts.append(data) - - def close(self): - HTMLParser.close(self) - self._testcase.assertTrue(self._any) - self._testcase.assertEquals([], self._tags) - - - -class AbstractListingTestCase(): - """Base test case for testing listings, retrieves data as html, xml, and json. - - Sub classes should implement assertValidHtml, assertValidXml, and - assertValidJson to ensure that the content returned matches what was - expected. - """ - - def assertRequestGetsHtml(self, request): - response = urllib2.urlopen(request) - self.assertEquals(response.headers["Content-Type"], "text/html") - body = response.read() - parser = ValidHTMLParser(self) - parser.feed(body) - parser.close() - self.assertValidHtml(parser) - - def assertValidHtml(self, parser): - """Intended to be overriden by subclasses to validate HTML content. - - The argument is a populated instance of ValidHTMLParser. - """ - pass - - def test_default(self): - request = urllib2.Request(self.url) - self.assertRequestGetsHtml(request) - - def test_last_slash(self): - if self.url.endswith("/"): - request = urllib2.Request(self.url[:-1]) - else: - request = urllib2.Request(self.url + "/") - self.assertRequestGetsHtml(request) - - def test_wild_mime(self): - request = urllib2.Request(self.url) - request.add_header("Accept", "*/*") - self.assertRequestGetsHtml(request) - request.add_header("Accept", "text/*") - self.assertRequestGetsHtml(request) - - def test_html(self): - request = urllib2.Request(self.url) - request.add_header("Accept", "text/html") - self.assertRequestGetsHtml(request) - - def test_json(self): - request = urllib2.Request(self.url) - request.add_header("Accept", "application/json") - response = urllib2.urlopen(request) - self.assertEquals(response.headers["Content-Type"], "application/json") - jsonData = loadjson(response) - self.assertValidJson(jsonData) - - - def assertValidJson(self, jsonData): - """Intended to be overriden by subclasses to validate JSON content. - - The argument is a python object as returned from json.load - """ - pass - - def test_xml(self): - request = urllib2.Request(self.url) - request.add_header("Accept", "application/xml") - response = urllib2.urlopen(request) - self.assertEquals(response.headers["Content-Type"], "application/xml") - xmlData = parseXml(response) - self.assertValidXml(xmlData) - - def assertValidXml(self, xmlData): - """Intended to be overriden by subclasses to validate XML content. - - The argument is an ElementTree - """ - pass - - def test_delete(self): - request = urllib2.Request(self.url) - request.get_method = lambda: "DELETE" - with self.assertRaises(urllib2.HTTPError) as cm: - response = urllib2.urlopen(request) - self.assertEqual(405, cm.exception.code) - - def test_put(self): - request = urllib2.Request(self.url) - request.get_method = lambda: "PUT" - request.add_data("Test Data") - with self.assertRaises(urllib2.HTTPError) as cm: - response = urllib2.urlopen(request) - self.assertEqual(405, cm.exception.code) - - def test_unacceptable(self): - request = urllib2.Request(self.url) - request.add_header("Accept", "application/fakemimetype") - with self.assertRaises(urllib2.HTTPError) as cm: - response = urllib2.urlopen(request) - self.assertEqual(406, cm.exception.code) - request.add_header("Accept", "fakemimetype/*") - with self.assertRaises(urllib2.HTTPError) as cm: - response = urllib2.urlopen(request) - self.assertEqual(406, cm.exception.code) - - def test_accept_quality_factor(self): - request = urllib2.Request(self.url) - request.add_header("Accept", "application/xml; q=0.8, application/json; q=0.2") - response = urllib2.urlopen(request) - self.assertEquals(response.headers["Content-Type"], "application/xml") - xmlData = parseXml(response) - self.assertValidXml(xmlData) - - request.add_header("Accept", "application/xml; q=0.2, application/json; q=0.8") - response = urllib2.urlopen(request) - self.assertEquals(response.headers["Content-Type"], "application/json") - jsonData = loadjson(response) - self.assertValidJson(jsonData) - - request.add_header("Accept", "application/xml, application/json; q=0.8") - response = urllib2.urlopen(request) - self.assertEquals(response.headers["Content-Type"], "application/xml") - xmlData = parseXml(response) - self.assertValidXml(xmlData) - - request.add_header("Accept", "application/fakemimetype, application/json; q=0.8") - response = urllib2.urlopen(request) - self.assertEquals(response.headers["Content-Type"], "application/json") - jsonData = loadjson(response) - self.assertValidJson(jsonData) - -class RootTestCase(AbstractListingTestCase, unittest.TestCase): - """Test that the root of the localization service returns listing of localization types.""" - def setUp(self): - self.url = baseURL - def assertValidHtml(self, parser): - self.assertIn("common_static/", parser.link_texts) - def assertValidJson(self, jsonData): - self.assertIn("common_static/", jsonData) - def assertValidXml(self, xmlData): - root = xmlData.getroot() - self.assertEquals(root.tag, "entries") - names = [e.text for e in root.findall("entry")] - self.assertIn("common_static/", names) - -class TypeTestCase(AbstractListingTestCase, unittest.TestCase): - """Test that common_static will list context levels.""" - def setUp(self): - self.url = urljoin(baseURL, "common_static/") - def assertValidHtml(self, parser): - self.assertIn("base/", parser.link_texts) - self.assertIn("site/", parser.link_texts) - def assertValidJson(self, jsonData): - self.assertIn("base/", jsonData) - self.assertIn("site/", jsonData) - def assertValidXml(self, xmlData): - root = xmlData.getroot() - self.assertEquals(root.tag, "entries") - names = [e.text for e in root.findall("entry")] - self.assertIn("base/", names) - self.assertIn("site/", names) - -class LevelTestCase(AbstractListingTestCase, unittest.TestCase): - """Test that common_static/site will list sites.""" - def setUp(self): - self.url = urljoin(baseURL, "common_static/site/") - def assertValidHtml(self, parser): - self.assertIn(testSite +"/", parser.link_texts) - def assertValidJson(self, jsonData): - self.assertIn(testSite +"/", jsonData) - def assertValidXml(self, xmlData): - root = xmlData.getroot() - self.assertEquals(root.tag, "entries") - names = [e.text for e in root.findall("entry")] - self.assertIn(testSite +"/", names) - -class AbstractFileListingTestCase(AbstractListingTestCase): - """Base test case for a file listing""" - - def assertValidHtml(self, parser): - self.assertIn(testDir +"/", parser.link_texts) - self.assertEquals(parser.link_texts, sorted(parser.link_texts)) - def assertValidJson(self, jsonData): - self.assertIn(testDir +"/", jsonData) - def assertValidXml(self, xmlData): - root = xmlData.getroot() - self.assertEquals(root.tag, "files") - names = [e.get("name") for e in root.findall("file")] - self.assertIn(testDir +"/", names) - self.assertEquals(names, sorted(names)) - -class BaseFileListingTestCase(AbstractFileListingTestCase, unittest.TestCase): - """Test that common_static/base lists files""" - def setUp(self): - self.url = urljoin(baseURL, "common_static/base/") - -class SiteFileListingTestCase(AbstractFileListingTestCase, unittest.TestCase): - """Test that common_static/site// lists files""" - def setUp(self): - self.url = urljoin(baseURL, "common_static/site/" + testSite + "/") - -class FileTestCase(unittest.TestCase): - """Test retrieval, modification and deletion of an individual.""" - def setUp(self): - self.url = urljoin(baseURL, "common_static/user/" + username + "/" + testFile) - # The file should not exist before the test, but if it does then delete it - # This is some of the same functionality we are testing so if setup fails - # then the test would probably fail anyway - try: - request = urllib2.Request(self.url) - response = urllib2.urlopen(request) - request = urllib2.Request(self.url) - request.get_method = lambda: "DELETE" - request.add_header("Authorization", authString) - request.add_header("If-Match", response.headers["Content-MD5"]) - response = urllib2.urlopen(request) - except urllib2.HTTPError as e: - if e.code != 404: - raise e - def test_file_operations(self): - """Run through a typical set of file interactions and verify everything works correctly.""" - request = urllib2.Request(self.url) - request.get_method = lambda: "PUT" - request.add_data("Test Data") - with self.assertRaises(urllib2.HTTPError) as cm: - response = urllib2.urlopen(request) - self.assertEqual(401, cm.exception.code) - - request.add_header("Authorization", authString) - with self.assertRaises(urllib2.HTTPError) as cm: - response = urllib2.urlopen(request) - self.assertEqual(409, cm.exception.code) - - request.add_header("If-Match", "NON_EXISTENT_CHECKSUM") - response = urllib2.urlopen(request) - - - request = urllib2.Request(self.url) - response = urllib2.urlopen(request) - self.assertEquals(response.read(), "Test Data") - - request = urllib2.Request(self.url + "/") - response = urllib2.urlopen(request) - self.assertEquals(response.read(), "Test Data") - - request = urllib2.Request(self.url) - request.get_method = lambda: "PUT" - request.add_data("Test Data2") - request.add_header("If-Match", response.headers["Content-MD5"]) - request.add_header("Authorization", authString) - response = urllib2.urlopen(request) - checksum = response.headers["Content-MD5"] - - request = urllib2.Request(self.url) - response = urllib2.urlopen(request) - self.assertEquals(response.read(), "Test Data2") - - request = urllib2.Request(self.url) - request.get_method = lambda: "DELETE" - with self.assertRaises(urllib2.HTTPError) as cm: - response = urllib2.urlopen(request) - self.assertEqual(401, cm.exception.code) - - request.add_header("Authorization", authString) - with self.assertRaises(urllib2.HTTPError) as cm: - response = urllib2.urlopen(request) - self.assertEqual(409, cm.exception.code) - - request.add_header("If-Match", checksum) - response = urllib2.urlopen(request) - - request = urllib2.Request(self.url) - with self.assertRaises(urllib2.HTTPError) as cm: - response = urllib2.urlopen(request) - self.assertEqual(404, cm.exception.code) - -if __name__ == '__main__': - unittest.main() diff --git a/pythonPackages/ufpy/test/testQpidTimeToLive.py b/pythonPackages/ufpy/test/testQpidTimeToLive.py deleted file mode 100644 index ce3f074703..0000000000 --- a/pythonPackages/ufpy/test/testQpidTimeToLive.py +++ /dev/null @@ -1,104 +0,0 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -# -# -# -# -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# 03/09/11 njensen Initial Creation. -# 08/15/13 2169 bkowal Decompress data read from the queue -# -# -# - -import time, sys -import threading - -import dynamicserialize - -TIME_TO_SLEEP = 300 - -class ListenThread(threading.Thread): - - def __init__(self, hostname, portNumber, topicName): - self.hostname = hostname - self.portNumber = portNumber - self.topicName = topicName - self.nMessagesReceived = 0 - self.waitSecond = 0 - self.stopped = False - threading.Thread.__init__(self) - - def run(self): - from ufpy import QpidSubscriber - self.qs = QpidSubscriber.QpidSubscriber(self.hostname, self.portNumber, True) - self.qs.topicSubscribe(self.topicName, self.receivedMessage) - - def receivedMessage(self, msg): - print "Received message" - self.nMessagesReceived += 1 - if self.waitSecond == 0: - fmsg = open('/tmp/rawMessage', 'w') - fmsg.write(msg) - fmsg.close() - - while self.waitSecond < TIME_TO_SLEEP and not self.stopped: - if self.waitSecond % 60 == 0: - print time.strftime('%H:%M:%S'), "Sleeping and stuck in not so infinite while loop" - self.waitSecond += 1 - time.sleep(1) - - print time.strftime('%H:%M:%S'), "Received", self.nMessagesReceived, "messages" - - def stop(self): - print "Stopping" - self.stopped = True - self.qs.close() - - - -def main(): - print "Starting up at", time.strftime('%H:%M:%S') - - topic = 'edex.alerts' - host = 'localhost' - port = 5672 - - thread = ListenThread(host, port, topic) - try: - thread.start() - while True: - time.sleep(3) - except KeyboardInterrupt: - pass - finally: - thread.stop() - - -if __name__ == '__main__': - main() - - - diff --git a/rpms/awips2.cave/Installer.cave/component.spec b/rpms/awips2.cave/Installer.cave/component.spec index e6ccdfcd9a..e3283d5f58 100644 --- a/rpms/awips2.cave/Installer.cave/component.spec +++ b/rpms/awips2.cave/Installer.cave/component.spec @@ -115,6 +115,10 @@ fi %build %install + +rm -rf %{_baseline_workspace}/build/cave/tmp +rm -rf %{_baseline_workspace}/build/cave/p2 + mkdir -p ${RPM_BUILD_ROOT}/awips2 if [ $? -ne 0 ]; then exit 1 diff --git a/rpms/awips2.core/Installer.awips/component.spec b/rpms/awips2.core/Installer.awips/component.spec index db26b0709a..1e993274ec 100644 --- a/rpms/awips2.core/Installer.awips/component.spec +++ b/rpms/awips2.core/Installer.awips/component.spec @@ -46,9 +46,6 @@ if [ $? -ne 0 ]; then exit 1 fi -%post -/usr/bin/edex setup - %clean rm -rf ${RPM_BUILD_ROOT} diff --git a/rpms/awips2.core/Installer.localization/component.spec b/rpms/awips2.core/Installer.localization/component.spec index 08b7c8ad50..3333a90e06 100644 --- a/rpms/awips2.core/Installer.localization/component.spec +++ b/rpms/awips2.core/Installer.localization/component.spec @@ -46,11 +46,10 @@ fi # Build all WFO site localization Map Scales (Regional.xml and WFO.xml) BUILD_DIR=%{_baseline_workspace}/rpms/awips2.core/Installer.localization/ -UTIL=%{_static_files}/localization -#file=$BUILD_DIR/wfo.dat +UTIL=${AWIPSII_STATIC_FILES}/localization +COMMON_DIR=$BUILD_DIR/common_static file=$BUILD_DIR/coords.dat regional=$BUILD_DIR/coords_regional.dat -# for site in $(cat $file |cut -c -3) do @@ -68,11 +67,9 @@ do maxy=$(cat $file |grep $site | cut -d"," -f11 | tr -d '[[:space:]]') # CAVE - CAVE_DIR=$UTIL/cave_static/site/$site + CAVE_DIR=$BUILD_DIR/utility/cave_static/site/$site/ mkdir -p $CAVE_DIR - cp -R $BUILD_DIR/utility/cave_static/* $CAVE_DIR - mkdir -p ~/awips2-builds/localization/localization/utility/cave_static/site/$site - cp -R $BUILD_DIR/utility/cave_static/* ~/awips2-builds/localization/localization/utility/cave_static/site/$site + cp -R $UTIL/cave_static/* $CAVE_DIR grep -rl 'LOWX' $CAVE_DIR/bundles/scales/WFO.xml | xargs sed -i 's/LOWX/'$lowx'/g' grep -rl 'HIGHX' $CAVE_DIR/bundles/scales/WFO.xml | xargs sed -i 's/HIGHX/'$highx'/g' grep -rl 'LOWY' $CAVE_DIR/bundles/scales/WFO.xml | xargs sed -i 's/LOWY/'$lowy'/g' @@ -82,8 +79,6 @@ do grep -rl 'MINY' $CAVE_DIR/bundles/scales/WFO.xml | xargs sed -i 's/MINY/'$miny'/g' grep -rl 'MAXY' $CAVE_DIR/bundles/scales/WFO.xml | xargs sed -i 's/MAXY/'$maxy'/g' - #cp $CAVE_DIR/bundles/scales/WFO.xml ~/awips2-core/viz/com.raytheon.uf.viz.core.maps/localization/bundles/scales/WFO/$site.xml - lowx=$(cat $regional |grep $site | cut -d"," -f4 | tr -d '[[:space:]]') highx=$(cat $regional |grep $site | cut -d"," -f5 | tr -d '[[:space:]]') lowy=$(cat $regional |grep $site | cut -d"," -f6 | tr -d '[[:space:]]') @@ -105,22 +100,16 @@ do grep -rl 'XXX' $CAVE_DIR | xargs sed -i 's/XXX/'$site'/g' grep -rl 'LATITUDE' $CAVE_DIR | xargs sed -i 's/LATITUDE/'$lat'/g' grep -rl 'LONGITUDE' $CAVE_DIR | xargs sed -i 's/LONGITUDE/'$lon'/g' + # EDEX - EDEX_DIR=$UTIL/common_static/site/$site - mkdir -p $EDEX_DIR - cp -R $BUILD_DIR/utility/siteconfig/* $EDEX_DIR/ - grep -rl 'XXX' $EDEX_DIR | xargs sed -i 's/XXX/'$site'/g' + SITE_DIR=$COMMON_DIR/site/$site + mkdir -p $SITE_DIR + cp -R $UTIL/siteconfig/* $SITE_DIR/ + grep -rl 'XXX' $SITE_DIR | xargs sed -i 's/XXX/'$site'/g' done -# Copy existing (default) OAX and TBW map scales -#cp -R %{_baseline_workspace}/localization.OAX/utility/cave_static/site/* %{_baseline_workspace}/localization/utility/cave_static/site/ -#cp -R %{_baseline_workspace}/localization.TBW/utility/cave_static/site/* %{_baseline_workspace}/localization/utility/cave_static/site/ - # COMMON -COMMON_DIR=$UTIL/common_static -mkdir -p $COMMON_DIR -cp -R $BUILD_DIR/utility/common_static/* $COMMON_DIR/ - +cp -R $UTIL/common_static/* $COMMON_DIR/ %install if [ ! -d %{_baseline_workspace}/%{_localization_directory} ]; then diff --git a/rpms/awips2.edex/Installer.edex/component.spec b/rpms/awips2.edex/Installer.edex/component.spec index 29d74719b1..9d220388ad 100644 --- a/rpms/awips2.edex/Installer.edex/component.spec +++ b/rpms/awips2.edex/Installer.edex/component.spec @@ -15,11 +15,8 @@ Packager: %{_build_site} provides: awips2-edex provides: awips2-base-component provides: awips2-base -requires: awips2-python -requires: awips2-java -requires: awips2-psql -requires: awips2-yajsw -requires: awips2-qpid-java-broker +Requires: net-tools +Requires: awips2-java, awips2-python, awips2-psql, awips2-yajsw, awips2-qpid-java-broker Obsoletes: awips2-edex-grib < 16.1.6 Obsoletes: awips2-edex-configuration @@ -124,7 +121,6 @@ fi %pre %post - echo "#generated on $(date)" > /etc/init.d/edexServiceList echo "export SERVICES=('ingest' 'ingestGrib' 'request')" >> /etc/init.d/edexServiceList diff --git a/rpms/awips2.edex/Installer.edex/scripts/edex_camel b/rpms/awips2.edex/Installer.edex/scripts/edex_camel index 1eed5518c3..98892d1ace 100755 --- a/rpms/awips2.edex/Installer.edex/scripts/edex_camel +++ b/rpms/awips2.edex/Installer.edex/scripts/edex_camel @@ -52,9 +52,6 @@ export TODAY=`/bin/date +%Y%m%d` # Use rpm to find the paths that we need. export JAVA_INSTALL="/awips2/java" export PYTHON_INSTALL="/awips2/python" -# if the EDEX_INSTALL export line is updated. Updates to: -# com.raytheon.wes2bridge.manager/src/com/raytheon/wes2bridge/manager/Wes2BridgeManager.java -# may be required. export EDEX_INSTALL="/awips2/edex" export PSQL_INSTALL="/awips2/psql" @@ -64,17 +61,11 @@ export PATH=${JAVA_INSTALL}/bin:${PYTHON_INSTALL}/bin:/usr/local/sbin:/usr/local export LD_LIBRARY_PATH=${JAVA_INSTALL}/lib:${PYTHON_INSTALL}/lib export LD_PRELOAD=${PYTHON_INSTALL}/lib/libpython2.7.so export AMQP_SPEC="" -# if the DATA_ARCHIVE_ROOT export line is updated. Updates to: -# com.raytheon.wes2bridge.manager/src/com/raytheon/wes2bridge/manager/Wes2BridgeManager.java -# may be required. -export DATA_ARCHIVE_ROOT=/tmp/sbn +export DATA_ARCHIVE_ROOT=/awips2/data_store # what to do to get pids of an EDEX instance # $1 == instance token getCamelAndWrapperPids() { - # if the _camel_pid line is updated. Updates to: - # com.raytheon.wes2bridge.manager/src/com/raytheon/wes2bridge/manager/Wes2BridgeManager.java - # may be required. _camel_pid=`pgrep -f -u $EDEXUSER "java -Dedex.run.mode=${1} "` if [ "$_camel_pid" != "" ]; then # occasionally will get more than one running, grab parent for first one only diff --git a/rpms/awips2.edex/Installer.edex/scripts/init.d/edex_decode b/rpms/awips2.edex/Installer.edex/scripts/init.d/edex_decode index 24bcf70a50..66821d7a59 100644 --- a/rpms/awips2.edex/Installer.edex/scripts/init.d/edex_decode +++ b/rpms/awips2.edex/Installer.edex/scripts/init.d/edex_decode @@ -36,9 +36,6 @@ export TODAY=`/bin/date +%Y%m%d` # Use rpm to find the paths that we need. export JAVA_INSTALL="/awips2/java" export PYTHON_INSTALL="/awips2/python" -# if the EDEX_INSTALL export line is updated. Updates to: -# com.raytheon.wes2bridge.manager/src/com/raytheon/wes2bridge/manager/Wes2BridgeManager.java -# may be required. export EDEX_INSTALL="/awips2/edex" export PSQL_INSTALL="/awips2/psql" @@ -48,17 +45,11 @@ export PATH=${JAVA_INSTALL}/bin:${PYTHON_INSTALL}/bin:/usr/local/sbin:/usr/local export LD_LIBRARY_PATH=${JAVA_INSTALL}/lib:${PYTHON_INSTALL}/lib export LD_PRELOAD=${PYTHON_INSTALL}/lib/libpython2.7.so export AMQP_SPEC="" -# if the DATA_ARCHIVE_ROOT export line is updated. Updates to: -# com.raytheon.wes2bridge.manager/src/com/raytheon/wes2bridge/manager/Wes2BridgeManager.java -# may be required. -export DATA_ARCHIVE_ROOT=/tmp/sbn +export DATA_ARCHIVE_ROOT=/awips2/data_store # what to do to get pids of an EDEX instance # $1 == instance token getCamelAndWrapperPids() { - # if the _camel_pid line is updated. Updates to: - # com.raytheon.wes2bridge.manager/src/com/raytheon/wes2bridge/manager/Wes2BridgeManager.java - # may be required. _camel_pid=`pgrep -f -u $EDEXUSER "java -Dedex.run.mode=${1} "` if [ "$_camel_pid" != "" ]; then # occasionally will get more than one running, grab parent for first one only diff --git a/rpms/awips2.edex/Installer.edex/scripts/init.d/edex_request b/rpms/awips2.edex/Installer.edex/scripts/init.d/edex_request index 4ad8223239..b748b9f7ad 100644 --- a/rpms/awips2.edex/Installer.edex/scripts/init.d/edex_request +++ b/rpms/awips2.edex/Installer.edex/scripts/init.d/edex_request @@ -36,9 +36,6 @@ export TODAY=`/bin/date +%Y%m%d` # Use rpm to find the paths that we need. export JAVA_INSTALL="/awips2/java" export PYTHON_INSTALL="/awips2/python" -# if the EDEX_INSTALL export line is updated. Updates to: -# com.raytheon.wes2bridge.manager/src/com/raytheon/wes2bridge/manager/Wes2BridgeManager.java -# may be required. export EDEX_INSTALL="/awips2/edex" export PSQL_INSTALL="/awips2/psql" @@ -48,17 +45,11 @@ export PATH=${JAVA_INSTALL}/bin:${PYTHON_INSTALL}/bin:/usr/local/sbin:/usr/local export LD_LIBRARY_PATH=${JAVA_INSTALL}/lib:${PYTHON_INSTALL}/lib export LD_PRELOAD=${PYTHON_INSTALL}/lib/libpython2.7.so export AMQP_SPEC="" -# if the DATA_ARCHIVE_ROOT export line is updated. Updates to: -# com.raytheon.wes2bridge.manager/src/com/raytheon/wes2bridge/manager/Wes2BridgeManager.java -# may be required. -export DATA_ARCHIVE_ROOT=/tmp/sbn +export DATA_ARCHIVE_ROOT=/awips2/data_store # what to do to get pids of an EDEX instance # $1 == instance token getCamelAndWrapperPids() { - # if the _camel_pid line is updated. Updates to: - # com.raytheon.wes2bridge.manager/src/com/raytheon/wes2bridge/manager/Wes2BridgeManager.java - # may be required. _camel_pid=`pgrep -f -u $EDEXUSER "java -Dedex.run.mode=${1} "` if [ "$_camel_pid" != "" ]; then # occasionally will get more than one running, grab parent for first one only diff --git a/rpms/awips2.upc/Installer.awips/programs/edex b/rpms/awips2.upc/Installer.awips/programs/edex index b5e17f02d4..bed4a9621d 100755 --- a/rpms/awips2.upc/Installer.awips/programs/edex +++ b/rpms/awips2.upc/Installer.awips/programs/edex @@ -23,15 +23,19 @@ # # #-----------------------------------------------------------------------# # ChangeLog # -# 07/2011 M.James/Unidata Created # -# 10/2015 M.James/Unidata Log files fixed; purge check added # -# 11/2015 M.James/Unidata Added CAVE user count as "edex users" # -# 10/2016 M.James/Unidata No longer need to edit ldmd.conf # -# 06/2017 M.James/Unidata Restart # -# 09/2017 M.James/Unidata Rudimentary remote db password control, # -# remove pg_hba.conf edits # -# 01/2018 M.James/Unidata Added qpid-stat wrapper as edex qpid # -# 08/2018 M.James/Unidata Registry logging # +# 07/2011 mjames Created # +# 10/2015 mjames Tailing log files ; purge check added # +# 11/2015 mjames Display ser count as "edex users" # +# 10/2016 mjames No longer editing ldmd.conf # +# 06/2017 mjames Added restart command # +# 09/2017 mjames Rudimentary remote db password control # +# remove pg_hba.conf edits # +# 01/2018 mjames Added qpid-stat wrapper as edex qpid # +# 08/2018 mjames Registry logging # +# 09/2018 mjames Cleanup setup/editing # +# 10/2018 mjames Check for edex_ldm on LDM start/stop # +# 12/2018 mjames Find a log file to tail if running modes other # +# than ingest # #-----------------------------------------------------------------------# . /etc/profile.d/awips2.sh # directories definitions @@ -50,26 +54,25 @@ IP=`/sbin/ifconfig $usedev | grep -v "inet6" | grep "inet" | awk '{ print $2 }' # truncate IP_CIDR="${IP%.*}" -editFiles=($EDEX_ENV_FILE) -boolFiles=(true) -editFuncs=(edit_setup) YMD=`date '+%Y%m%d'` args=("$@") -# functions -edex_status() { # report back edex server on/off status +# +# Report back edex server on/off status +# +edex_status() { echo '' echo '[edex status]' if [ -d "${DATA_PATH}" ]; then # CHECK POSTGRES - postgres_prc=`ps aux | grep postgresql | grep -v grep | grep -v java |awk '{ print $11 }'` + postgres_prc=`ps aux | grep postgresql | grep -v grep | grep -v java |awk '{ print $11 }' | head -1` if [ -z $postgres_prc ]; then echo ' postgres :: not running' else - postgresPid=`ps aux | grep postgresql\/bin\/postmaster | grep -v grep | awk '{ print $2 }'` + postgresPid=`ps aux | grep postgresql\/bin\/postmaster | grep -v grep | awk '{ print $2 }' | head -1` echo ' postgres :: running :: pid '$postgresPid'' fi fi @@ -116,12 +119,15 @@ edex_status() { # report back edex server on/off status edex_request_pid=`ps aux | grep edex.run.mode=request | awk '{ print $2 }'` echo ' EDEXrequest :: running :: pid '$edex_request_pid'' fi - edex_reg_ps=`ps aux | grep edex.run.mode=centralRegistry | awk '{ print $15 }'` - if [ -z $edex_reg_ps ]; then - echo ' EDEXregistry :: not running' - else - edex_reg_pid=`ps aux | grep edex.run.mode=centralRegistry | awk '{ print $2 }'` - echo ' EDEXregistry :: running :: pid '$edex_reg_pid'' + + if [ -f "/awips2/edex/conf/resources/com.raytheon.uf.edex.datadelivery.harvester.properties" ]; then + edex_reg_ps=`ps aux | grep edex.run.mode=centralRegistry | awk '{ print $15 }'` + if [ -z $edex_reg_ps ]; then + echo ' EDEXregistry:: not running' + else + edex_reg_pid=`ps aux | grep edex.run.mode=centralRegistry | awk '{ print $2 }'` + echo ' EDEXregistry:: running :: pid '$edex_reg_pid'' + fi fi if [ -d "/awips2/ldm" ]; then @@ -137,6 +143,9 @@ edex_status() { # report back edex server on/off status echo '' } +# +# Tail an EDEX log file +# tail_log() { if [ -e $LOG_FILE ]; then echo ' :: Viewing '${LOG_FILE}'. Press CTRL+C to exit' @@ -149,7 +158,10 @@ tail_log() { fi } -edex_log() { # display todays log, default to ingest +# +# Display todays log, default to ingest +# +edex_log() { echo '[edex] EDEX Log Viewer' echo '' # LDM log @@ -212,7 +224,12 @@ edex_log() { # display todays log, default to ingest exit; fi if [ -z ${args[1]} ]; then - echo ' :: No log specified - Defaulting to ingest log' + if [ -f ${LOG_PATH}/edex-ingest-${YMD}.log ]; then + LOG_FILE=${LOG_PATH}/edex-ingest-${YMD}.log + else + LOG_FILE=$(ls -tr $LOG_PATH/edex-ingest*${YMD}.log | grep -Ev 'wrapper|performance|hibernate|start' | tail -1) + fi + echo ' :: No log specified, but I found '$LOG_FILE tail_log exit; else @@ -222,87 +239,74 @@ edex_log() { # display todays log, default to ingest fi } +# +# Update placeholder "external.fqdn" with $(hostname) # See /awips2/edex/bin/setup.env +# edit_setup() { - sed -i.setup_$YMD 's/external.fqdn/'$HOSTNAME'/g' $EDEX_ENV_FILE - echo '[edit] '$HOSTNAME' added to '$EDEX_ENV_FILE - echo ' File backed up to '$EDEX_ENV_FILE'.setup_'$YMD + if grep -q external.fqdn "$EDEX_ENV_FILE"; then + echo '[edex] EDEX IP and Hostname Setup' + sed -i.setup_$YMD 's/external.fqdn/'$(hostname)'/g' $EDEX_ENV_FILE + echo '[edit] '$(hostname)' added to '$EDEX_ENV_FILE + echo ' File backed up to '$EDEX_ENV_FILE'.setup_'$YMD + fi # registry/data delivery fqdn - sed -i 's/external.fqdn/'$HOSTNAME'/g' /awips2/edex/conf/resources/*.properties -} - -edex_edits() { - for index in ${!editFiles[*]}; do - if ${boolFiles[$index]}; then - ${editFuncs[$index]} - fi + shopt -s nullglob + for f in /awips2/edex/conf/resources/*.properties; do + sed -i 's/external.fqdn/'$(hostname)'/g' $f done - echo '[done]' - exit; } -edex_ipexit() { # abandon ip editing, post msg to guide manual edits - for index in ${!editFiles[*]}; do - if ${boolFiles[$index]}; then - editCom+='\t'${editFiles[$index]}'\n' - fi - done - echo -e '[edex] Exiting EDEX IP Setup' - echo -e '' - echo -e ' You may need to MANUALLY EDIT the following files' - echo -e '\n'$editCom - echo -e ' for EDEX to work properly. \n' -} - -edex_setup() { # setup IP subnet and domains for EDEX, prompt user for confirm +# +# Initial EDEX setup, run as "edex setup", to set init run levels and hostname definitions +# This is executed after both awips2 and awips2-ldm RPMs are installed/updatedi +# +edex_setup() { echo '' - # run services on system startup if [ -f "/etc/init.d/edex_postgres" ]; then chkconfig edex_postgres --add chkconfig edex_postgres on --level 35 fi - if [ -f "/etc/init.d/httpd-pypies" ]; then chkconfig httpd-pypies --add chkconfig httpd-pypies on --level 35 fi - chkconfig qpidd --add chkconfig qpidd on --level 35 - chkconfig edex_camel --add chkconfig edex_camel on --level 35 - echo '[edex] EDEX IP and Hostname Setup' # check files exist - continue=true - for index in ${!editFiles[*]}; do - if [[ ! -f ${editFiles[$index]} ]]; then - echo '[Error] ** '${editFiles[$index]}' not found.' - continue=false - fi - done - if ! $continue; then + if [ ! -f ${EDEX_ENV_FILE} ]; then + echo '[Error] ** '${EDEX_ENV_FILE}' not found.' echo 'Exiting' exit; fi - continue=false # ldm regutil - if [ -d "/awips2/ldm" ]; then - #echo '[edit] ldm regutil...' + if [ -d "/awips2/ldm/etc" ]; then su - awips -c 'regutil -s '$HOSTNAME' /hostname' fi - echo '' - edex_edits - if [ $continue=true ]; then - echo ' EDEX correctly configured' - fi - echo '' + edit_setup } -edex_start() { # start all edex services +ldm_start() { + if [ -f /etc/init.d/edex_ldm ]; then + su -c "service edex_ldm start" + fi +} +ldm_stop() { + if [ -f /etc/init.d/edex_ldm ]; then + su -c "service edex_ldm stop" + fi +} + +# +# Start all EDEX services +# +edex_start() { edex_cleanup + edit_setup for dir in /awips2/tmp /awips2/data_store ; do if [ ! -d $dir ]; then mkdir -p $dir @@ -322,20 +326,23 @@ edex_start() { # start all edex services elif [ "${args[1]}" == 'ingest' ]; then printf "#!/bin/bash\nexport SERVICES=( 'ingest' 'ingestGrib' )\n" > /etc/init.d/edexServiceList su -c "service edex_camel start" - su -c "service edex_ldm start" + ldm_start elif [ "${args[1]}" == 'database' ]; then printf "#!/bin/bash\nexport SERVICES=( 'request' )\n" > /etc/init.d/edexServiceList su -c "service edex_camel start" elif [ "${args[1]}" != 'dev' ]; then su -c "service edex_camel start" - su -c "service edex_ldm start" + ldm_start fi } -edex_stop() { # stop all edex services +# +# Stop all EDEX services +# +edex_stop() { if [ "${args[1]}" != 'dev' ]; then su -c "service edex_camel stop" - su -c "service edex_ldm stop" + ldm_stop fi su -c "service qpidd stop" if [ -f "/etc/init.d/httpd-pypies" ]; then @@ -347,6 +354,9 @@ edex_stop() { # stop all edex services edex_status; } +# +# Restart all EDEX Services +# edex_restart() { su -c "service edex_camel restart" edex_status; @@ -361,6 +371,9 @@ edex_password() { fi } +# +# Check and reset EDEX purgejobs +# edex_purge() { if [ "${args[1]}" == 'reset' ]; then edex_purge_reset @@ -377,36 +390,53 @@ edex_purge() { edex_purge_reset() { su - awips -c 'psql metadata -c "update purgejobs set failedcount = 0;"' >& /dev/null } + +# +# Show Qpid data ingest queues +# edex_qpid() { - /awips2/python/bin/qpid-stat -q -S msgIn + /awips2/python/bin/qpid-stat -q -S msgIn } + +# +# Qpid cleanup +# edex_cleanup() { - rm -rf /awips2/qpid/edexMessageStore/edex/ + rm -rf /awips2/qpid/edexMessageStore/edex/ } + +# +# Print User Info +# edex_users(){ if [ "${args[1]}" != '' ]; then YMD=${args[1]} fi - userList=$(cat ${LOG_PATH}/edex-request-thriftSrv-${YMD}.log |grep ":CAVE:"|cut -d "[" -f 3| cut -d ":" -f 1|grep -v pluginName|sort | uniq) + userList=$(cat ${LOG_PATH}/edex-request-thriftSrv-${YMD}.log |grep ":CAVE:"|cut -d "[" -f 3| cut -d ":" -f 1|grep -v pluginName| grep -v ThriftSrvRequestLogger |sort | uniq) echo "" echo " -- EDEX Users ${YMD} --" echo "$userList" echo "" } -edex_options() { # print out options for this programs +# +# Echo available options +# +edex_options() { echo '' echo ' edex (status|start|stop|setup|log|purge|qpid|users)' echo '' } - edex_invalid() { echo '' echo " Invalid option: '"${args[0]}"' not understood" edex_options } -check_input() { # check input against accepted options +# +# Check input against accepted options +# +check_input() { found=false for i in "${options[@]}" do @@ -429,7 +459,7 @@ check_input() { # check input against accepted options fi } -# check input - first/only program run +# +# Check input: it all starts here. # check_input -exit; diff --git a/rpms/awips2.upc/Installer.ldm/component.spec b/rpms/awips2.upc/Installer.ldm/component.spec index c865525709..89d855d04e 100644 --- a/rpms/awips2.upc/Installer.ldm/component.spec +++ b/rpms/awips2.upc/Installer.ldm/component.spec @@ -23,7 +23,7 @@ AutoReq: no Requires: awips2 Requires: awips2-qpid-lib Requires: awips2-python -Requires: pax, gcc, libtool +Requires: pax, gcc, libtool, make Requires: libxml2-devel, libpng-devel, boost-program-options Provides: awips2-ldm BuildRequires: awips2-python @@ -272,6 +272,7 @@ if [ -d /tmp/ldm/ ]; then cp -rp /tmp/ldm/ldmd.* /awips2/ldm/etc/ fi + %preun %postun /sbin/ldconfig > /dev/null 2>&1 diff --git a/rpms/awips2.upc/Installer.ldm/patch/etc/pqact.conf b/rpms/awips2.upc/Installer.ldm/patch/etc/pqact.conf index b7ee02d0db..8bd6690143 100644 --- a/rpms/awips2.upc/Installer.ldm/patch/etc/pqact.conf +++ b/rpms/awips2.upc/Installer.ldm/patch/etc/pqact.conf @@ -749,12 +749,6 @@ HDS ^LDIZ48 KWNS (..)(..)(..) # FILE -edex -close # /awips2/data_store/grid/UKMET-\1-GRID\2/UKMET-\1-GRID\2_\3\4_\5_\6-(seq).grib1 # -# Ocean Sea Surface Temperature (SST) Grids #61-64 -# -HDS ^H.[T-W] - FILE -edex -close - /awips2/data_store/grid/SST/%Y%m%d%H%M.sst.grib -# # HPCGuide # NGRID ^([LM][ABCDFGH]U...) (KWBN) (..)(..)(..)[^!]*!(grib|grib2)/[^/]*/([^/]*)/#([^/]*)/([0-9]{8})([0-9]{4})(F[0-9]{3})/([^/]*) diff --git a/rpms/build/common/lookupRPM.sh b/rpms/build/common/lookupRPM.sh index a80a2750d1..0847a48488 100644 --- a/rpms/build/common/lookupRPM.sh +++ b/rpms/build/common/lookupRPM.sh @@ -223,7 +223,7 @@ function lookupRPM() return 0 fi if [ "${1}" = "awips2-python-awips" ]; then - export RPM_SPECIFICATION="/awips2/repo/python-awips" + export RPM_SPECIFICATION="/awips2/repo/python-awips/rpm" return 0 fi if [ "${1}" = "awips2-python-cython" ]; then