From 5bcc84e9b4f57295318c6c60bec7d1c77a10936c Mon Sep 17 00:00:00 2001 From: Michael James Date: Thu, 15 Feb 2018 12:27:30 -0700 Subject: [PATCH] Deployed fce4cd8b38 with MkDocs version: 0.16.3 --- 404.html | 2 +- appendix/appendix-acronyms/index.html | 2 +- appendix/appendix-cots/index.html | 2 +- appendix/appendix-grid-parameters/index.html | 2 +- appendix/appendix-wsr88d/index.html | 2 +- cave/bundles-and-procedures/index.html | 2 +- cave/cave-keyboard-shortcuts/index.html | 2 +- cave/cave-localization/index.html | 2 +- cave/d2d-edit-menus/index.html | 2 +- cave/d2d-gis-shapefiles/index.html | 2 +- cave/d2d-gridded-models/index.html | 2 +- cave/d2d-grids/index.html | 2 +- cave/d2d-hydro/index.html | 2 +- cave/d2d-map-resources/index.html | 2 +- cave/d2d-perspective/index.html | 2 +- cave/d2d-pointdata-surface-obs/index.html | 4 +- cave/d2d-radar-tools/index.html | 2 +- cave/d2d-radar/index.html | 4 +- cave/d2d-satellite/index.html | 2 +- cave/d2d-tools/index.html | 2 +- cave/d2d-uair/index.html | 2 +- cave/goes-16-satellite/index.html | 6 +- cave/import-export/index.html | 2 +- cave/localization-perspective/index.html | 2 +- cave/maps-views-projections/index.html | 2 +- cave/ncp-perspective/index.html | 2 +- cave/nsharp/index.html | 2 +- cave/warngen/index.html | 2 +- dev/awips-development-environment/index.html | 2 +- dev/build-nsharp-macos/index.html | 4 +- edex/case-studies/index.html | 4 +- edex/data-distribution-files/index.html | 2 +- edex/data-plugins/index.html | 2 +- edex/data-purge/index.html | 2 +- edex/distributed-computing/index.html | 6 +- edex/edex-users/index.html | 2 +- edex/ldm/index.html | 2 +- edex/new-grid/index.html | 4 +- index.html | 22 ++-- install/install-cave/index.html | 31 +++--- install/install-edex/index.html | 14 +-- install/start-edex/index.html | 2 +- mkdocs/search_index.json | 38 +++---- python/awips-grids-and-cartopy/index.html | 2 +- .../map-resources-and-topography/index.html | 2 +- python/maps-database/index.html | 2 +- python/model-sounding-data/index.html | 2 +- python/nexrad-level-3-radar/index.html | 2 +- python/python-awips-data-access/index.html | 2 +- python/satellite-imagery/index.html | 2 +- python/surface-obs-plot-metpy/index.html | 2 +- python/upper-air-bufr-soundings/index.html | 2 +- sitemap.xml | 100 +++++++++--------- 53 files changed, 160 insertions(+), 159 deletions(-) diff --git a/404.html b/404.html index ac40418ced..c52f4b1430 100644 --- a/404.html +++ b/404.html @@ -977,7 +977,7 @@ - + diff --git a/appendix/appendix-acronyms/index.html b/appendix/appendix-acronyms/index.html index ec4fb63616..b8a7af708f 100644 --- a/appendix/appendix-acronyms/index.html +++ b/appendix/appendix-acronyms/index.html @@ -1701,7 +1701,7 @@ - + diff --git a/appendix/appendix-cots/index.html b/appendix/appendix-cots/index.html index f5b2297c95..a3978ef944 100644 --- a/appendix/appendix-cots/index.html +++ b/appendix/appendix-cots/index.html @@ -1574,7 +1574,7 @@ - + diff --git a/appendix/appendix-grid-parameters/index.html b/appendix/appendix-grid-parameters/index.html index b4b7d5f480..3d7851f44c 100644 --- a/appendix/appendix-grid-parameters/index.html +++ b/appendix/appendix-grid-parameters/index.html @@ -8298,7 +8298,7 @@ - + diff --git a/appendix/appendix-wsr88d/index.html b/appendix/appendix-wsr88d/index.html index 532c796520..ad4b57a7b9 100644 --- a/appendix/appendix-wsr88d/index.html +++ b/appendix/appendix-wsr88d/index.html @@ -1874,7 +1874,7 @@ - + diff --git a/cave/bundles-and-procedures/index.html b/cave/bundles-and-procedures/index.html index a595cc587b..5b25319dcb 100644 --- a/cave/bundles-and-procedures/index.html +++ b/cave/bundles-and-procedures/index.html @@ -1276,7 +1276,7 @@ - + diff --git a/cave/cave-keyboard-shortcuts/index.html b/cave/cave-keyboard-shortcuts/index.html index 8ae5426bcd..93424417a0 100644 --- a/cave/cave-keyboard-shortcuts/index.html +++ b/cave/cave-keyboard-shortcuts/index.html @@ -1100,7 +1100,7 @@ - + diff --git a/cave/cave-localization/index.html b/cave/cave-localization/index.html index 99d6ec6241..87ebe47d65 100644 --- a/cave/cave-localization/index.html +++ b/cave/cave-localization/index.html @@ -1098,7 +1098,7 @@ - + diff --git a/cave/d2d-edit-menus/index.html b/cave/d2d-edit-menus/index.html index 4f751efab2..c630a4ac0b 100644 --- a/cave/d2d-edit-menus/index.html +++ b/cave/d2d-edit-menus/index.html @@ -1162,7 +1162,7 @@ - + diff --git a/cave/d2d-gis-shapefiles/index.html b/cave/d2d-gis-shapefiles/index.html index 4e35c5c762..f808008fc9 100644 --- a/cave/d2d-gis-shapefiles/index.html +++ b/cave/d2d-gis-shapefiles/index.html @@ -1421,7 +1421,7 @@ move all the items listed in the Displayed window to the Available window.

- + diff --git a/cave/d2d-gridded-models/index.html b/cave/d2d-gridded-models/index.html index ec6ad01a09..e57bc87ca1 100644 --- a/cave/d2d-gridded-models/index.html +++ b/cave/d2d-gridded-models/index.html @@ -1398,7 +1398,7 @@ groups to facilitate their use.

- + diff --git a/cave/d2d-grids/index.html b/cave/d2d-grids/index.html index d02fb0f1a7..b1f8a961e9 100644 --- a/cave/d2d-grids/index.html +++ b/cave/d2d-grids/index.html @@ -1088,7 +1088,7 @@ - + diff --git a/cave/d2d-hydro/index.html b/cave/d2d-hydro/index.html index afa370f039..0ed9f82e1f 100644 --- a/cave/d2d-hydro/index.html +++ b/cave/d2d-hydro/index.html @@ -1110,7 +1110,7 @@ probabilities, and pressure and frontal analysis. - + diff --git a/cave/d2d-map-resources/index.html b/cave/d2d-map-resources/index.html index 4efe82776a..78c117b037 100644 --- a/cave/d2d-map-resources/index.html +++ b/cave/d2d-map-resources/index.html @@ -1059,7 +1059,7 @@ - + diff --git a/cave/d2d-perspective/index.html b/cave/d2d-perspective/index.html index 8afa259fc2..e61dab5e40 100644 --- a/cave/d2d-perspective/index.html +++ b/cave/d2d-perspective/index.html @@ -1419,7 +1419,7 @@ want. Inventory loads into the currently displayed frame. - + diff --git a/cave/d2d-pointdata-surface-obs/index.html b/cave/d2d-pointdata-surface-obs/index.html index 33f84bc4c7..3a631e02e7 100644 --- a/cave/d2d-pointdata-surface-obs/index.html +++ b/cave/d2d-pointdata-surface-obs/index.html @@ -1029,7 +1029,7 @@ that were entered into the LSR text database and decoded into the correct point

Lightning

This menu item provides three options for displaying lightning flash plots over specified 1 minute, 5 minute, 15 minute and 1 hour intervals.


-

+

Software Components

  • EDEX
  • @@ -1281,9 +1281,9 @@ -

    CAVE

    +

    CAVE

    LDM

    -

    http://www.unidata.ucar.edu/software/ldm/

    +

    https://www.unidata.ucar.edu/software/ldm/

    The LDM (Local Data Manager), developed and supported by Unidata, is a suite of client and server programs designed for data distribution, and is the fundamental component comprising the Unidata Internet Data Distribution (IDD) system. In AWIPS, the LDM provides data feeds for grids, surface observations, upper-air profiles, satellite and radar imagery and various other meteorological datasets. The LDM writes data directly to file and alerts EDEX via Qpid when a file is available for processing. The LDM is started and stopped with the commands edex start and edex stop, which runs the commands service edex_ldm start and service edex_ldm stop

    edexBridge

    edexBridge, invoked in the LDM configuration file /awips2/ldm/etc/ldmd.conf, is used by the LDM to post "data available" messaged to Qpid, which alerts the EDEX Ingest server that a file is ready for processing.

    @@ -1355,7 +1355,7 @@ - + diff --git a/install/install-cave/index.html b/install/install-cave/index.html index dd121958b5..ff5e2dbc73 100644 --- a/install/install-cave/index.html +++ b/install/install-cave/index.html @@ -1088,31 +1088,32 @@ Linux x86_64 -installCAVE.sh +install.sh macOS -Download and install both
    awips2-cave-17.1.1.dmg
    awips-python.pkg +Download and install both
    awips2-cave-17.1.1.dmg
    awips-python.pkg 32-bit Windows -awips-cave.msi +awips-cave.msi 64-bit Windows -awips-cave.amd64.msi +awips-cave.amd64.msi

    Linux

    For CentOS/Red Hat 6 and 7. Installs to /awips2/cave and writes files to ~/caveData.

    -

    Install as root (all package dependencies should be resolved by yum)

    -
    chmod 755 ./installCAVE.sh
    -./installCAVE.sh
    +

    Install CAVE for Linux

    +
    wget https://www.unidata.ucar.edu/software/awips2/install.sh
    +chmod 755 install.sh
    +sudo ./install.sh --cave
     

    Run CAVE from the menu Applications > Internet > AWIPS CAVE, or from the command line as simply cave.

    @@ -1128,12 +1129,12 @@

    macOS

    -

    CAVE for macOS requires the awips-python.pkg package be installed in order for derived parameters to work.

    +

    CAVE for macOS requires the awips-python.pkg package be installed in order for derived parameters to work.

    -

    awips-python.pkg is not a prerequisite, and CAVE will still run and display data without it, but to use any derived parameter functions such as wind barbs/arrows and grid parameters on various vertical coordinates, jep must be installed in some way (it is assumed in /Library/Python/2.7/site-packages/jep/). You are free to install jep yourself but should know that simply to install via source code or pip requires the Oracle JDK and Xcode and its Command Line Tools. The awips-python.pkg package is provided as a workaround for this.

    +

    awips-python.pkg is not a prerequisite, and CAVE will still run and display data without it, but to use any derived parameter functions such as wind barbs/arrows and grid parameters on various vertical coordinates, jep must be installed in some way (it is assumed in /Library/Python/2.7/site-packages/jep/). You are free to install jep yourself but should know that simply to install via source code or pip requires the Oracle JDK and Xcode and its Command Line Tools. The awips-python.pkg package is provided as a workaround for this.

    CAVE for macOS writes and syncs files to ~/Library/caveData

    @@ -1141,8 +1142,8 @@

    Windows

    Windows clients are still based on the CAVE 16.2.2 code base and provided in lieu of no 17.1.1 client.

    Writes files to caveData in the user's home directory.

    @@ -1229,7 +1230,7 @@ - + diff --git a/install/install-edex/index.html b/install/install-edex/index.html index 5cf77472ee..07f74573d8 100644 --- a/install/install-edex/index.html +++ b/install/install-edex/index.html @@ -1253,12 +1253,12 @@
    groupadd fxalpha && usermod -G fxalpha awips
     

    2. Install EDEX

    -

    Download and run installEDEX.sh

    -
    wget http://www.unidata.ucar.edu/software/awips2/installEDEX.sh
    -chmod 755 ./installEDEX.sh
    -./installEDEX.sh
    +

    Download and run install.sh --edex

    +
    wget https://www.unidata.ucar.edu/software/awips2/install.sh
    +chmod 755 install.sh
    +sudo ./install.sh --edex
     
    -

    installEDEX.sh will perform the following steps (it's always a good idea to review downloaded shell scripts):

    +

    install.sh --edex will perform the following steps (it's always a good idea to review downloaded shell scripts):

    1. Saves the appropriate Yum repo file to /etc/yum.repos.d/awips2.repo
    2. Increases process and file limits for the the awips account in /etc/security/limits.conf
    3. @@ -1387,7 +1387,7 @@ EXEC "edexBridge -s localhost"
  • -

    /etc/security/limits.conf defines the number of user processes and files (this step is automatically performed by installEDEX.sh). Without these definitions, Qpid is known to crash during periods of high ingest.

    +

    /etc/security/limits.conf defines the number of user processes and files (this step is automatically performed by install.sh --edex). Without these definitions, Qpid is known to crash during periods of high ingest.

    awips soft nproc 65536
     awips soft nofile 65536
     
    @@ -1553,7 +1553,7 @@ service edex_camel start - + diff --git a/install/start-edex/index.html b/install/start-edex/index.html index 5ce2de7561..d28433ef96 100644 --- a/install/start-edex/index.html +++ b/install/start-edex/index.html @@ -1341,7 +1341,7 @@ export MAX_MEM=4096 # in Meg - + diff --git a/mkdocs/search_index.json b/mkdocs/search_index.json index f0211062cd..056931e213 100644 --- a/mkdocs/search_index.json +++ b/mkdocs/search_index.json @@ -2,7 +2,7 @@ "docs": [ { "location": "/", - "text": "Unidata AWIPS User Manual\n\uf0c1\n\n\n\n\nUnidata AWIPS\n is a meteorological display and analysis package originally developed by the \nNational Weather Service\n and \nRaytheon\n, repackaged by Unidata to support non-operational use in research and education by \nUCAR member institutions\n.\n\n\nAWIPS takes a unified approach to data ingest, and most data types follow a path through the system starting with an \nLDM\n client requesting data from the \nUnidata IDD\n. These data files are then decoded and stored as HDF5 and Postgres metadata by \nEDEX\n. \n\n\nUnidata supports two visualization frameworks for rendering data: \nCAVE\n, and the Python Data Access Framework (\npython-awips\n).\n\n\n\n\nInstall CAVE 17.1.1\n\uf0c1\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\nLinux x86_64\n\n\ninstallCAVE.sh \n\n\n\n\n\n\nmacOS\n\n\nDownload and install both\nawips2-cave-17.1.1.dmg \nawips-python.pkg \n\n\n\n\n\n\n32-bit Windows\n\n\nawips-cave.msi \n\n\n\n\n\n\n64-bit Windows\n\n\nawips-cave.amd64.msi \n\n\n\n\n\n\n\n\n\n\nEDEX Data Server 17.1.1\n\uf0c1\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\nLinux x86_64\n\n\ninstallEDEX.sh \n\n\n\n\n\n\n\n\nRead full EDEX install instructions...\n\n\n\n\nLicense\n\uf0c1\n\n\nUnidata AWIPS source code and binaries (RPMs) are considered to be in the public domain, meaning there are no restrictions on any download, modification, or distribution in any form (original or modified). Unidata AWIPS contains no proprietery content and is therefore not subject to export controls as stated in the \nMaster Rights\n licensing file. \n\n\n\n\nDistributed Computing\n\uf0c1\n\n\nAWIPS makes use of service-oriented architecture to request, process, and serve real-time meteorological data. While originally developed for use on internal NWS forecast office networks, where operational installations of AWIPS can consist of a dozen servers or more, because the AWIPS source code was hard-coded with the NWS network configuration, the early Unidata releases were stripped of operation-specific configurations and plugins, and released specifically for standalone installation. This made sense given that a single EDEX instance with a Solid State Drive could handle most of the entire NOAAport data volume. However, with GOES-R(16) coming online, and more gridded forecast models being created at finer temporal and spatial resolutions, there was now a need to distribute EDEX data decoding in order to handle this firehose of data.\n\n\n\n\nRead More: \nDistributed EDEX\n\n\n\n\n\n\n\n\nSoftware Components\n\uf0c1\n\n\n\n\nEDEX\n\n\nCAVE\n\n\nLDM\n\n\nedexBridge\n\n\nQpid\n\n\nPostgreSQL\n\n\nHDF5\n\n\nPyPIES\n\n\n\n\nEDEX\n\uf0c1\n\n\nThe main server for AWIPS. Qpid sends alerts to EDEX when data stored by the LDM is ready for processing. These Qpid messages include file header information which allows EDEX to determine the appropriate data decoder to use. The default ingest server (simply named ingest) handles all data ingest other than grib messages, which are processed by a separate ingestGrib server. After decoding, EDEX writes metadata to the database via Postgres and saves the processed data in HDF5 via PyPIES. A third EDEX server, request, feeds requested data to CAVE clients. EDEX ingest and request servers are started and stopped with the commands \nedex start\n and \nedex stop\n, which runs the system script \n/etc/rc.d/init.d/edex_camel\n\n\n\n\nRead More: How to Install EDEX\n\n\n\n\nCAVE\n\uf0c1\n\n\nCommon AWIPS Visualization Environment. The data rendering and visualization tool for AWIPS. CAVE contains of a number of different data display configurations called perspectives. Perspectives used in operational forecasting environments include \nD2D\n (Display Two-Dimensional), \nGFE\n (Graphical Forecast Editor), and \nNCP\n (National Centers Perspective). CAVE is started with the command \n/awips2/cave/cave.sh\n or \ncave.sh\n\n\n\n\nRead More: How to Install CAVE\n\n\n\n\n\n\nLDM\n\uf0c1\n\n\nhttp://www.unidata.ucar.edu/software/ldm/\n\n\nThe \nLDM\n (Local Data Manager), developed and supported by Unidata, is a suite of client and server programs designed for data distribution, and is the fundamental component comprising the Unidata Internet Data Distribution (IDD) system. In AWIPS, the LDM provides data feeds for grids, surface observations, upper-air profiles, satellite and radar imagery and various other meteorological datasets. The LDM writes data directly to file and alerts EDEX via Qpid when a file is available for processing. The LDM is started and stopped with the commands \nedex start\n and \nedex stop\n, which runs the commands \nservice edex_ldm start\n and \nservice edex_ldm stop\n\n\nedexBridge\n\uf0c1\n\n\nedexBridge, invoked in the LDM configuration file \n/awips2/ldm/etc/ldmd.conf\n, is used by the LDM to post \"data available\" messaged to Qpid, which alerts the EDEX Ingest server that a file is ready for processing.\n\n\nQpid\n\uf0c1\n\n\nhttp://qpid.apache.org\n\n\nApache Qpid\n, the Queue Processor Interface Daemon, is the messaging system used by AWIPS to facilitate communication between services. When the LDM receives a data file to be processed, it employs \nedexBridge\n to send EDEX ingest servers a message via Qpid. When EDEX has finished decoding the file, it sends CAVE a message via Qpid that data are available for display or further processing. Qpid is started and stopped by \nedex start\n and \nedex stop\n, and is controlled by the system script \n/etc/rc.d/init.d/qpidd\n\n\nPostgreSQL\n\uf0c1\n\n\nhttp://www.postgresql.org\n\n\nPostgreSQL\n, known simply as Postgres, is a relational database management system (DBMS) which handles the storage and retrieval of metadata, database tables and some decoded data. The storage and reading of EDEX metadata is handled by the Postgres DBMS. Users may query the metadata tables by using the termainal-based front-end for Postgres called \npsql\n. Postgres is started and stopped by \nedex start\n and \nedex stop\n, and is controlled by the system script \n/etc/rc.d/init.d/edex_postgres\n\n\nHDF5\n\uf0c1\n\n\nhttp://www.hdfgroup.org/HDF5/\n\n\nHierarchical Data Format (v.5)\n is the primary data storage format used by AWIPS for processed grids, satellite and radar imagery and other products. Similar to netCDF, developed and supported by Unidata, HDF5 supports multiple types of data within a single file. For example, a single HDF5 file of radar data may contain multiple volume scans of base reflectivity and base velocity as well as derived products such as composite reflectivity. The file may also contain data from multiple radars. HDF5 is stored in \n/awips2/edex/data/hdf5/\n\n\nPyPIES (httpd-pypies)\n\uf0c1\n\n\nPyPIES\n, Python Process Isolated Enhanced Storage, was created for AWIPS to isolate the management of HDF5 Processed Data Storage from the EDEX processes. PyPIES manages access, i.e., reads and writes, of data in the HDF5 files. In a sense, PyPIES provides functionality similar to a DBMS (i.e PostgreSQL for metadata); all data being written to an HDF5 file is sent to PyPIES, and requests for data stored in HDF5 are processed by PyPIES.\n\n\nPyPIES is implemented in two parts: 1. The PyPIES manager is a Python application that runs as part of an Apache HTTP server, and handles requests to store and retrieve data. 2. The PyPIES logger is a Python process that coordinates logging. PyPIES is started and stopped by \nedex start\n and \nedex stop\n, and is controlled by the system script \n/etc/rc.d/init.d/httpd-pypies", + "text": "Unidata AWIPS User Manual\n\uf0c1\n\n\n\n\nUnidata AWIPS\n is a meteorological display and analysis package originally developed by the \nNational Weather Service\n and \nRaytheon\n, repackaged by Unidata to support non-operational use in research and education by \nUCAR member institutions\n.\n\n\nAWIPS takes a unified approach to data ingest, and most data types follow a path through the system starting with an \nLDM\n client requesting data from the \nUnidata IDD\n. These data files are then decoded and stored as HDF5 and Postgres metadata by \nEDEX\n. \n\n\nUnidata supports two visualization frameworks for rendering data: \nCAVE\n, and the Python Data Access Framework (\npython-awips\n).\n\n\n\n\nInstall CAVE 17.1.1\n\uf0c1\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\nLinux x86_64\n\n\ninstall.sh \n\n\n\n\n\n\nmacOS\n\n\nDownload and install both\nawips2-cave-17.1.1.dmg \nawips-python.pkg \n\n\n\n\n\n\n32-bit Windows\n\n\nawips-cave.msi \n\n\n\n\n\n\n64-bit Windows\n\n\nawips-cave.amd64.msi \n\n\n\n\n\n\n\n\n\n\nEDEX Data Server 17.1.1\n\uf0c1\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\nLinux x86_64\n\n\ninstall.sh \n\n\n\n\n\n\n\n\nRead full EDEX install instructions...\n\n\n\n\nLicense\n\uf0c1\n\n\nUnidata AWIPS source code and binaries (RPMs) are considered to be in the public domain, meaning there are no restrictions on any download, modification, or distribution in any form (original or modified). Unidata AWIPS contains no proprietery content and is therefore not subject to export controls as stated in the \nMaster Rights\n licensing file. \n\n\n\n\nDistributed Computing\n\uf0c1\n\n\nAWIPS makes use of service-oriented architecture to request, process, and serve real-time meteorological data. While originally developed for use on internal NWS forecast office networks, where operational installations of AWIPS can consist of a dozen servers or more, because the AWIPS source code was hard-coded with the NWS network configuration, the early Unidata releases were stripped of operation-specific configurations and plugins, and released specifically for standalone installation. This made sense given that a single EDEX instance with a Solid State Drive could handle most of the entire NOAAport data volume. However, with GOES-R(16) coming online, and more gridded forecast models being created at finer temporal and spatial resolutions, there was now a need to distribute EDEX data decoding in order to handle this firehose of data.\n\n\n\n\nRead More: \nDistributed EDEX\n\n\n\n\n\n\n\n\nSoftware Components\n\uf0c1\n\n\n\n\nEDEX\n\n\nCAVE\n\n\nLDM\n\n\nedexBridge\n\n\nQpid\n\n\nPostgreSQL\n\n\nHDF5\n\n\nPyPIES\n\n\n\n\nEDEX\n\uf0c1\n\n\nThe main server for AWIPS. Qpid sends alerts to EDEX when data stored by the LDM is ready for processing. These Qpid messages include file header information which allows EDEX to determine the appropriate data decoder to use. The default ingest server (simply named ingest) handles all data ingest other than grib messages, which are processed by a separate ingestGrib server. After decoding, EDEX writes metadata to the database via Postgres and saves the processed data in HDF5 via PyPIES. A third EDEX server, request, feeds requested data to CAVE clients. EDEX ingest and request servers are started and stopped with the commands \nedex start\n and \nedex stop\n, which runs the system script \n/etc/rc.d/init.d/edex_camel\n\n\n\n\nRead More: How to Install EDEX\n\n\n\n\nCAVE\n\uf0c1\n\n\nCommon AWIPS Visualization Environment. The data rendering and visualization tool for AWIPS. CAVE contains of a number of different data display configurations called perspectives. Perspectives used in operational forecasting environments include \nD2D\n (Display Two-Dimensional), \nGFE\n (Graphical Forecast Editor), and \nNCP\n (National Centers Perspective). CAVE is started with the command \n/awips2/cave/cave.sh\n or \ncave.sh\n\n\n\n\nRead More: How to Install CAVE\n\n\n\n\n\n\nLDM\n\uf0c1\n\n\nhttps://www.unidata.ucar.edu/software/ldm/\n\n\nThe \nLDM\n (Local Data Manager), developed and supported by Unidata, is a suite of client and server programs designed for data distribution, and is the fundamental component comprising the Unidata Internet Data Distribution (IDD) system. In AWIPS, the LDM provides data feeds for grids, surface observations, upper-air profiles, satellite and radar imagery and various other meteorological datasets. The LDM writes data directly to file and alerts EDEX via Qpid when a file is available for processing. The LDM is started and stopped with the commands \nedex start\n and \nedex stop\n, which runs the commands \nservice edex_ldm start\n and \nservice edex_ldm stop\n\n\nedexBridge\n\uf0c1\n\n\nedexBridge, invoked in the LDM configuration file \n/awips2/ldm/etc/ldmd.conf\n, is used by the LDM to post \"data available\" messaged to Qpid, which alerts the EDEX Ingest server that a file is ready for processing.\n\n\nQpid\n\uf0c1\n\n\nhttp://qpid.apache.org\n\n\nApache Qpid\n, the Queue Processor Interface Daemon, is the messaging system used by AWIPS to facilitate communication between services. When the LDM receives a data file to be processed, it employs \nedexBridge\n to send EDEX ingest servers a message via Qpid. When EDEX has finished decoding the file, it sends CAVE a message via Qpid that data are available for display or further processing. Qpid is started and stopped by \nedex start\n and \nedex stop\n, and is controlled by the system script \n/etc/rc.d/init.d/qpidd\n\n\nPostgreSQL\n\uf0c1\n\n\nhttp://www.postgresql.org\n\n\nPostgreSQL\n, known simply as Postgres, is a relational database management system (DBMS) which handles the storage and retrieval of metadata, database tables and some decoded data. The storage and reading of EDEX metadata is handled by the Postgres DBMS. Users may query the metadata tables by using the termainal-based front-end for Postgres called \npsql\n. Postgres is started and stopped by \nedex start\n and \nedex stop\n, and is controlled by the system script \n/etc/rc.d/init.d/edex_postgres\n\n\nHDF5\n\uf0c1\n\n\nhttp://www.hdfgroup.org/HDF5/\n\n\nHierarchical Data Format (v.5)\n is the primary data storage format used by AWIPS for processed grids, satellite and radar imagery and other products. Similar to netCDF, developed and supported by Unidata, HDF5 supports multiple types of data within a single file. For example, a single HDF5 file of radar data may contain multiple volume scans of base reflectivity and base velocity as well as derived products such as composite reflectivity. The file may also contain data from multiple radars. HDF5 is stored in \n/awips2/edex/data/hdf5/\n\n\nPyPIES (httpd-pypies)\n\uf0c1\n\n\nPyPIES\n, Python Process Isolated Enhanced Storage, was created for AWIPS to isolate the management of HDF5 Processed Data Storage from the EDEX processes. PyPIES manages access, i.e., reads and writes, of data in the HDF5 files. In a sense, PyPIES provides functionality similar to a DBMS (i.e PostgreSQL for metadata); all data being written to an HDF5 file is sent to PyPIES, and requests for data stored in HDF5 are processed by PyPIES.\n\n\nPyPIES is implemented in two parts: 1. The PyPIES manager is a Python application that runs as part of an Apache HTTP server, and handles requests to store and retrieve data. 2. The PyPIES logger is a Python process that coordinates logging. PyPIES is started and stopped by \nedex start\n and \nedex stop\n, and is controlled by the system script \n/etc/rc.d/init.d/httpd-pypies", "title": "Home" }, { @@ -12,12 +12,12 @@ }, { "location": "/#install-cave-1711", - "text": "Linux x86_64 installCAVE.sh macOS Download and install both awips2-cave-17.1.1.dmg awips-python.pkg 32-bit Windows awips-cave.msi 64-bit Windows awips-cave.amd64.msi", + "text": "Linux x86_64 install.sh macOS Download and install both awips2-cave-17.1.1.dmg awips-python.pkg 32-bit Windows awips-cave.msi 64-bit Windows awips-cave.amd64.msi", "title": "Install CAVE 17.1.1" }, { "location": "/#edex-data-server-1711", - "text": "Linux x86_64 installEDEX.sh Read full EDEX install instructions...", + "text": "Linux x86_64 install.sh Read full EDEX install instructions...", "title": "EDEX Data Server 17.1.1" }, { @@ -47,7 +47,7 @@ }, { "location": "/#ldm", - "text": "http://www.unidata.ucar.edu/software/ldm/ The LDM (Local Data Manager), developed and supported by Unidata, is a suite of client and server programs designed for data distribution, and is the fundamental component comprising the Unidata Internet Data Distribution (IDD) system. In AWIPS, the LDM provides data feeds for grids, surface observations, upper-air profiles, satellite and radar imagery and various other meteorological datasets. The LDM writes data directly to file and alerts EDEX via Qpid when a file is available for processing. The LDM is started and stopped with the commands edex start and edex stop , which runs the commands service edex_ldm start and service edex_ldm stop", + "text": "https://www.unidata.ucar.edu/software/ldm/ The LDM (Local Data Manager), developed and supported by Unidata, is a suite of client and server programs designed for data distribution, and is the fundamental component comprising the Unidata Internet Data Distribution (IDD) system. In AWIPS, the LDM provides data feeds for grids, surface observations, upper-air profiles, satellite and radar imagery and various other meteorological datasets. The LDM writes data directly to file and alerts EDEX via Qpid when a file is available for processing. The LDM is started and stopped with the commands edex start and edex stop , which runs the commands service edex_ldm start and service edex_ldm stop", "title": "LDM" }, { @@ -77,17 +77,17 @@ }, { "location": "/install/install-cave/", - "text": "Download and Install CAVE\n\uf0c1\n\n\n\n\n\n\n\n\n\n\n\n\n\n\ncurrent release 17.1.1\n\n\n\n\n\n\n\n\n\n\nLinux x86_64\n\n\ninstallCAVE.sh \n\n\n\n\n\n\nmacOS\n\n\nDownload and install both\nawips2-cave-17.1.1.dmg \nawips-python.pkg \n\n\n\n\n\n\n32-bit Windows\n\n\nawips-cave.msi \n\n\n\n\n\n\n64-bit Windows\n\n\nawips-cave.amd64.msi \n\n\n\n\n\n\n\n\n\n\n Linux\n\uf0c1\n\n\n\n\ninstallCAVE.sh \n\n\n\n\nFor CentOS/Red Hat 6 and 7. Installs to \n/awips2/cave\n and writes files to \n~/caveData\n.\n\n\nInstall as root (all package dependencies should be resolved by yum)\n\n\nchmod 755 ./installCAVE.sh\n./installCAVE.sh\n\n\n\nRun CAVE from the menu \nApplications\n \n \nInternet\n \n \nAWIPS CAVE\n, or from the command line as simply \ncave\n.\n\n\n\n\n\n\nSystem Requirements\n\n\n\n\nx86_64 CentOS/RHEL 6 or 7\n\n\nOpenGL 2.0\n\n\n4GB RAM\n\n\nLatest NVIDIA driver\n for your graphics card\n\n\n2GB disk space for caching datasets in \n~/caveData\n\n\n\n\n\n\n\n\n macOS\n\uf0c1\n\n\nCAVE for macOS requires the \nawips-python.pkg\n package be installed in order for \nderived parameters\n to work.\n\n\n\n\nawips2-cave-17.1.1.dmg \n\n\nawips-python.pkg \n\n\n\n\nawips-python.pkg\n is not a prerequisite, and CAVE will still run and display data without it, but to use any derived parameter functions such as wind barbs/arrows and grid parameters on various vertical coordinates, \njep\n must be installed in some way (it is assumed in \n/Library/Python/2.7/site-packages/jep/\n). You are free to install jep yourself but should know that simply to install via source code or pip requires the Oracle JDK and Xcode and its Command Line Tools. The \nawips-python.pkg\n package is provided as a workaround for this.\n\n\n\n\nCAVE for macOS writes and syncs files to ~/Library/caveData\n\n\n\n\n\n\n Windows\n\uf0c1\n\n\nWindows clients are still based on the CAVE 16.2.2 code base and provided in lieu of no 17.1.1 client.\n\n\n\n\nawips-cave.msi \n\n\nawips-cave.amd64.msi \n\n\n\n\nWrites files to \ncaveData\n in the user's home directory.\n\n\n\n\nBeta status\n\n\nClient-side Python scripts (including Derived Parameters) do not work on Windows\n\n\n\n\n\n\nAWIPS Data in the Cloud\n\uf0c1\n\n\nUnidata and XSEDE Jetstream have partnered to offer a EDEX data server in the cloud, open to the Unidata university community. Select the server in the Connectivity Preferences dialog, or enter \nedex-cloud.unidata.ucar.edu\n (without \nhttp://\n before, or \n:9581/services\n after).\n\n\n\n\n\n\nTroubleshooting\n\uf0c1\n\n\nLocalization Preferences Error\n\n\nYou can reset CAVE by removing the \n~/caveData\n directory (on macOS \n~/Library/caveData\n) and then run \ncave\n again to connect to an EDEX server. Your local files have been removed, but if you are re-connecting to an EDEX server you have used before, the remote files will sync again to your local \n~/caveData\n (bundles, colormaps, etc.).\n\n\nNo Images Displayed\n\n\nIf you are able to load wire-frame contours but not images, \nupdate your video driver\n.", + "text": "Download and Install CAVE\n\uf0c1\n\n\n\n\n\n\n\n\n\n\n\n\n\n\ncurrent release 17.1.1\n\n\n\n\n\n\n\n\n\n\nLinux x86_64\n\n\ninstall.sh \n\n\n\n\n\n\nmacOS\n\n\nDownload and install both\nawips2-cave-17.1.1.dmg \nawips-python.pkg \n\n\n\n\n\n\n32-bit Windows\n\n\nawips-cave.msi \n\n\n\n\n\n\n64-bit Windows\n\n\nawips-cave.amd64.msi \n\n\n\n\n\n\n\n\n\n\n Linux\n\uf0c1\n\n\n\n\ninstall.sh \n\n\n\n\nFor CentOS/Red Hat 6 and 7. Installs to \n/awips2/cave\n and writes files to \n~/caveData\n.\n\n\nInstall CAVE for Linux\n\n\nwget https://www.unidata.ucar.edu/software/awips2/install.sh\nchmod 755 install.sh\nsudo ./install.sh --cave\n\n\n\nRun CAVE from the menu \nApplications\n \n \nInternet\n \n \nAWIPS CAVE\n, or from the command line as simply \ncave\n.\n\n\n\n\n\n\nSystem Requirements\n\n\n\n\nx86_64 CentOS/RHEL 6 or 7\n\n\nOpenGL 2.0\n\n\n4GB RAM\n\n\nLatest NVIDIA driver\n for your graphics card\n\n\n2GB disk space for caching datasets in \n~/caveData\n\n\n\n\n\n\n\n\n macOS\n\uf0c1\n\n\nCAVE for macOS requires the \nawips-python.pkg\n package be installed in order for \nderived parameters\n to work.\n\n\n\n\nawips2-cave-17.1.1.dmg \n\n\nawips-python.pkg \n\n\n\n\nawips-python.pkg\n is not a prerequisite, and CAVE will still run and display data without it, but to use any derived parameter functions such as wind barbs/arrows and grid parameters on various vertical coordinates, \njep\n must be installed in some way (it is assumed in \n/Library/Python/2.7/site-packages/jep/\n). You are free to install jep yourself but should know that simply to install via source code or pip requires the Oracle JDK and Xcode and its Command Line Tools. The \nawips-python.pkg\n package is provided as a workaround for this.\n\n\n\n\nCAVE for macOS writes and syncs files to ~/Library/caveData\n\n\n\n\n\n\n Windows\n\uf0c1\n\n\nWindows clients are still based on the CAVE 16.2.2 code base and provided in lieu of no 17.1.1 client.\n\n\n\n\nawips-cave.msi \n\n\nawips-cave.amd64.msi \n\n\n\n\nWrites files to \ncaveData\n in the user's home directory.\n\n\n\n\nBeta status\n\n\nClient-side Python scripts (including Derived Parameters) do not work on Windows\n\n\n\n\n\n\nAWIPS Data in the Cloud\n\uf0c1\n\n\nUnidata and XSEDE Jetstream have partnered to offer a EDEX data server in the cloud, open to the Unidata university community. Select the server in the Connectivity Preferences dialog, or enter \nedex-cloud.unidata.ucar.edu\n (without \nhttp://\n before, or \n:9581/services\n after).\n\n\n\n\n\n\nTroubleshooting\n\uf0c1\n\n\nLocalization Preferences Error\n\n\nYou can reset CAVE by removing the \n~/caveData\n directory (on macOS \n~/Library/caveData\n) and then run \ncave\n again to connect to an EDEX server. Your local files have been removed, but if you are re-connecting to an EDEX server you have used before, the remote files will sync again to your local \n~/caveData\n (bundles, colormaps, etc.).\n\n\nNo Images Displayed\n\n\nIf you are able to load wire-frame contours but not images, \nupdate your video driver\n.", "title": "Install CAVE" }, { "location": "/install/install-cave/#download-and-install-cave", - "text": "current release 17.1.1 Linux x86_64 installCAVE.sh macOS Download and install both awips2-cave-17.1.1.dmg awips-python.pkg 32-bit Windows awips-cave.msi 64-bit Windows awips-cave.amd64.msi", + "text": "current release 17.1.1 Linux x86_64 install.sh macOS Download and install both awips2-cave-17.1.1.dmg awips-python.pkg 32-bit Windows awips-cave.msi 64-bit Windows awips-cave.amd64.msi", "title": "Download and Install CAVE" }, { "location": "/install/install-cave/#linux", - "text": "installCAVE.sh For CentOS/Red Hat 6 and 7. Installs to /awips2/cave and writes files to ~/caveData . Install as root (all package dependencies should be resolved by yum) chmod 755 ./installCAVE.sh\n./installCAVE.sh Run CAVE from the menu Applications Internet AWIPS CAVE , or from the command line as simply cave . System Requirements x86_64 CentOS/RHEL 6 or 7 OpenGL 2.0 4GB RAM Latest NVIDIA driver for your graphics card 2GB disk space for caching datasets in ~/caveData", + "text": "install.sh For CentOS/Red Hat 6 and 7. Installs to /awips2/cave and writes files to ~/caveData . Install CAVE for Linux wget https://www.unidata.ucar.edu/software/awips2/install.sh\nchmod 755 install.sh\nsudo ./install.sh --cave Run CAVE from the menu Applications Internet AWIPS CAVE , or from the command line as simply cave . System Requirements x86_64 CentOS/RHEL 6 or 7 OpenGL 2.0 4GB RAM Latest NVIDIA driver for your graphics card 2GB disk space for caching datasets in ~/caveData", "title": " Linux" }, { @@ -112,7 +112,7 @@ }, { "location": "/install/install-edex/", - "text": "EDEX for Linux\n\uf0c1\n\n\n\n\nSystem Requirements\n\n\n\n\n64-bit CentOS/RHEL 6 or 7\n\n\n16+ CPU\n cores (each CPU core is one more decoder which can run in parallel) \n\n\n24GB+\n RAM\n\n\n500GB+\n disk space\n\n\nA \nSolid State Drive (SSD)\n is highly recommended\n\n\n\n\n\n\nAn \nSSD\n should be mounted either to \n/awips2\n (to contain the entire EDEX system) or to \n/awips2/edex/data/hdf5\n (to contain the large files in the decoded data store). EDEX can scale to any system by adjusting the incoming LDM data feeds or adjusting the resources (CPU threads) allocated to each data type.\n\n\n64-bit CentOS/RHEL 6 and 7\n are the only supported operating systems for EDEX. You may have luck with Fedora Core 12 to 14 and Scientific Linux. \n\n\nEDEX is not supported on Debian, Ubuntu, SUSE, Solaris, OS X, or Windows.\n\n\n\n\nRead More: Distributed EDEX, Installing Across Multiple Machines\n\uf0c1\n\n\n\n\n\n\nLinux One-Time Setup\n\uf0c1\n\n\nAll of these command should be run as \nroot\n\n\n1. Create AWIPS User\n\uf0c1\n\n\nCreate user awips and group fxalpha\n\n\ngroupadd fxalpha \n useradd -G fxalpha awips\n\n\n\nor if the awips account already exists:\n\n\ngroupadd fxalpha \n usermod -G fxalpha awips\n\n\n\n2. Install EDEX\n\uf0c1\n\n\nDownload and run \ninstallEDEX.sh \n\n\nwget http://www.unidata.ucar.edu/software/awips2/installEDEX.sh\nchmod 755 ./installEDEX.sh\n./installEDEX.sh\n\n\n\ninstallEDEX.sh\n will perform the following steps (it's always a good idea to review downloaded shell scripts):\n\n\n\n\nSaves the appropriate Yum repo file to \n/etc/yum.repos.d/awips2.repo\n\n\nIncreases process and file limits for the the \nawips\n account in \n/etc/security/limits.conf\n\n\nCreates \n/awips2/data_store\n if it does not exist already\n\n\nRuns \nyum groupinstall awips2-server\n\n\nAttempts to configure the EDEX hostname defined in \n/awips2/edex/bin/setup.env\n\n\nAlerts the user if the \nawips\n account does not exist (the RPMs will still install)\n\n\n\n\n3. Check \n/etc/hosts\n against \n/awips2/edex/bin/setup.env\n\uf0c1\n\n\nEDEX Server Administrators should check that the addresses and names defined in \n/awips2/edex/bin/setup.env\n are resolvable from both inside and outside the server, and make appropriate edits to \n/etc/hosts\n \n\n\nFor example, in the XSEDE Jetstream cloud, the fully-qualified domain name defined in \n/awips2/edex/bin/setup.env\n\n\nexport EXT_ADDR=js-196-132.jetstream-cloud.org\nexport DB_ADDR=localhost\nexport DB_PORT=5432\nexport BROKER_ADDR=localhost\nexport PYPIES_SERVER=http://${EXT_ADDR}:9582\n\n\n\nis directed within to localhost in \n/etc/hosts\n\n\n127.0.0.1 localhost localhost.localdomain js-196-132.jetstream-cloud.org\n\n\n\n4. Configure iptables\n\uf0c1\n\n\nConfigure iptables to allow TCP connections on ports 9581 and 9582 if you want to serve data to CAVE clients and the Python API.\n\n\n\n\n\n\nTo open ports to all connections\n\n\nvi /etc/sysconfig/iptables\n\n*filter\n:INPUT ACCEPT [0:0]\n:FORWARD ACCEPT [0:0]\n:OUTPUT ACCEPT [0:0]\n-A INPUT -m state --state ESTABLISHED,RELATED -j ACCEPT\n-A INPUT -p icmp -j ACCEPT\n-A INPUT -i lo -j ACCEPT\n-A INPUT -m state --state NEW -m tcp -p tcp --dport 22 -j ACCEPT\n-A INPUT -m state --state NEW -m tcp -p tcp --dport 9581 -j ACCEPT\n-A INPUT -m state --state NEW -m tcp -p tcp --dport 9582 -j ACCEPT\n-A INPUT -j REJECT --reject-with icmp-host-prohibited\n-A FORWARD -j REJECT --reject-with icmp-host-prohibited\nCOMMIT\n\n\n\n\n\n\n\nTo open ports to specific IP addresses\n\n\nvi /etc/sysconfig/iptables\n\n*filter\n:INPUT DROP [0:0]\n:FORWARD DROP [0:0]\n:OUTPUT ACCEPT [0:0]\n:EXTERNAL - [0:0]\n:EDEX - [0:0]\n-A INPUT -i lo -j ACCEPT\n-A INPUT -p icmp --icmp-type any -j ACCEPT\n-A INPUT -m state --state ESTABLISHED,RELATED -j ACCEPT\n-A INPUT -s 128.117.140.0/24 -j EDEX\n-A INPUT -s 128.117.156.0/24 -j EDEX\n-A INPUT -j EXTERNAL\n-A EXTERNAL -j REJECT\n-A EDEX -m state --state NEW -p tcp --dport 22 -j ACCEPT\n-A EDEX -m state --state NEW -p tcp --dport 9581 -j ACCEPT\n-A EDEX -m state --state NEW -p tcp --dport 9582 -j ACCEPT\n-A EDEX -j REJECT\nCOMMIT\n\n\n\n\n\n\n\n\n\nIn this example, the IP range \n128.117.140.0/24\n will match all 128.117.140.\n addresses, while \n128.117.156.0/24\n will match 128.117.156.\n.\n\n\n\n\nRestart iptables\n\n\nservice iptables restart\n\n\n\nFor CentOS 7 error \nRedirecting to /bin/systemctl restart iptables.service\nFailed to restart iptables.service: Unit iptables.service failed to load: No such file or directory.\n\n\nThe solution is:\n\n\nyum install iptables-services\nsystemctl enable iptables\nservice iptables restart\n\n\n\n5. Ensure SELinux is Disabled\n\uf0c1\n\n\nvi /etc/sysconfig/selinux\n\n# This file controls the state of SELinux on the system.\n# SELINUX= can take one of these three values:\n# enforcing - SELinux security policy is enforced.\n# permissive - SELinux prints warnings instead of enforcing.\n# disabled - No SELinux policy is loaded.\nSELINUX=disabled\n# SELINUXTYPE= can take one of these two values:\n# targeted - Targeted processes are protected,\n# mls - Multi Level Security protection.\nSELINUXTYPE=targeted\n\n\n\n\n\nRead more about selinux at \nredhat.com\n\n\n\n\nreboot if necessary\n, required if iptables was updated.\n\n\n\n\nAdditional Steps\n\uf0c1\n\n\nSSD Mount\n\uf0c1\n\n\nThough a Solid State Drive is not required, it is \nstrongly encouraged\n in order to handle the amount of disk IO for real-time IDD feeds. \n\n\nThe simplest configuration would be to mount an 500GB+ SSD to \n/awips2\n to contain both the installed software (approx. 20GB) and the real-time data (approx. 150GB per day).\n\n\nThe default \npurge rules\n are configured such that \n/awips2\n does not exceed 450GB. \n/awips2/data_store\n is scoured every hour and should not exceed 50GB. \n\n\nIf you want to increase EDEX data retention you should mount a large disk to \n/awips2/edex/data/hdf5\n since this will be where the archived processed data exists, and any case studies created.\n\n\n Filesystem Size Used Avail Use% Mounted on\n /dev/sda1 30G 2.5G 26G 9% /\n tmpfs 28G 0 28G 0% /dev/shm\n /dev/sdc1 788G 81G 667G 11% /awips2\n /dev/sdb1 788G 41G 708G 10% /awips2/edex/data/hdf5\n\n\n\n\n\nEDEX Setup\n\uf0c1\n\n\nThe command \nedex setup\n attempts to add the domain name of your server. \n\n\n\n\n\n\n/awips2/edex/bin/setup.env\n should contain the fully-qualified domain name, externally resolved, localhost will not work. \n\n\nexport AW_SITE_IDENTIFIER=OAX\nexport EDEX_SERVER=edex-cloud.unidata.ucar.edu\n\n\n\n\n\n\n\n/awips2/ldm/etc/ldmd.conf\n contains the upstream server (default \nidd.unidata.ucar.edu\n, which requires you connect form a .edu domain). This file also contains the \nedexBridge\n hostname (default \nlocalhost\n). \n\n\nEXEC \"pqact -e\"\nEXEC \"edexBridge -s localhost\"\n\n\n\n\n\n\n\n/etc/security/limits.conf\n defines the number of user processes and files (this step is automatically performed by \ninstallEDEX.sh\n). Without these definitions, Qpid is known to crash during periods of high ingest.\n\n\nawips soft nproc 65536\nawips soft nofile 65536\n\n\n\n\n\n\n\n\n\nLDM\n\uf0c1\n\n\nEDEX installs its own version of the LDM to the directory \n/awips2/ldm\n. As with a the default LDM configuration, two files are used to control what IDD feeds are ingested:\n\n\n\n\n\n\n/awips2/ldm/etc/ldmd.conf\n - specifies an upstream LDM server to request data from, and what feeds to request:\n\n\nREQUEST NEXRAD3 \"./p(DHR|DPR|DSP|DTA|DAA|DVL|EET|HHC|N0Q|N0S|N0U|OHA|NVW|NTV|NST).\" idd.unidata.ucar.edu\nREQUEST FNEXRAD|IDS|DDPLUS|UNIWISC \".*\" idd.unidata.ucar.edu\nREQUEST NGRID \".*\" idd.unidata.ucar.edu\nREQUEST NOTHER \"^TIP... KNES.*\" idd.unidata.ucar.edu\n\n\n\n\n\nread more about ldmd.conf in the LDM User Manual\n\n\n\n\n\n\n\n\n/awips2/ldm/etc/pqact.conf\n - specifies the WMO headers and file pattern actions to request:\n\n\n# Redbook graphics\nANY ^([PQ][A-Z0-9]{3,5}) (....) (..)(..)(..) !redbook [^/]*/([^/]*)/([^/]*)/([^/]*)/([0-9]{8})\n FILE -overwrite -close -edex /awips2/data_store/redbook/\\8/\\4\\5Z_\\8_\\7_\\6-\\1_\\2_(seq).rb.%Y%m%d%H\n# NOAAPORT GINI images\nNIMAGE ^(sat[^/]*)/ch[0-9]/([^/]*)/([^/]*)/([^ ]*) ([^/]*)/([^/]*)/([^/]*)/ (T[^ ]*) ([^ ]*) (..)(..)(..)\n FILE -overwrite -close -edex /awips2/data_store/sat/\\(11)\\(12)Z_\\3_\\7_\\6-\\8_\\9_(seq).satz.%Y%m%d%H\n\n\n\n\n\nread more about pqact.conf in the LDM User Manual\n\n\n\n\n\n\nsee available AWIPS LDM feeds\n\n\n\n\n\n\n\n\n\n\nStart and Stop\n\uf0c1\n\n\nto start all EDEX services, including the LDM:\n\n\nedex start\n\nStarting EDEX PostgreSQL: [ OK ]\nStarting httpd: [ OK ]\nStarting QPID [ OK ]\nStarting EDEX Camel (request): \nStarting EDEX Camel (ingest): \nStarting EDEX Camel (ingestGrib): \nStarting AWIPS LDM:The product-queue is OK.\n\n\n\nto stop:\n\n\nedex stop\n\nStopping EDEX Camel (request): \nStopping EDEX Camel (ingest): \nStopping EDEX Camel (ingestGrib): \nStopping QPID [ OK ]\nStopping httpd: [ OK ]\nStopping EDEX PostgreSQL: [ OK ]\nStopping AWIPS LDM:Stopping the LDM server...\n\n\n\nTo manually start, stop, and restart:\n\n\nservice edex_postgres start\nservice httpd-pypies start\nservice qpidd start\nservice edex_camel start\n\n\n\nThe fifth service, \nedex_ldm\n, does \nnot run at boot\n to prevent filling up disk space if EDEX is not running. \n\n\nldmadmin start\n\n\n\nTo start \nall services except the LDM\n (good for troubleshooting):\n\n\nedex start base\n\n\n\nTo restart EDEX\n\n\nedex restart\n\n\n\n\n\nDirectories to know\n\uf0c1\n\n\n\n\n/awips2\n - Contains all of the installed AWIPS software. \n\n\n/awips2/edex/logs\n - EDEX logs.\n\n\n/awips2/httpd_pypies/var/log/httpd\n - httpd-pypies logs.\n\n\n/awips2/data/pg_log\n - PostgreSQL logs.\n\n\n/awips2/qpid/log\n - Qpid logs.\n\n\n/awips2/edex/data/hdf5\n - HDF5 data store. \n\n\n/awips2/edex/data/utility\n - Localization store and configuration files. \n\n\n/awips2/ldm/etc\n - Location of \nldmd.conf\n and \npqact.conf\n\n\n/awips2/ldm/logs\n - LDM logs.\n\n\n/awips2/data_store\n - Raw data store.\n\n\n/awips2/data_store/ingest\n - Manual data ingest endpoint.\n\n\n\n\n\n\nWhat Version is my EDEX?\n\uf0c1\n\n\nrpm -qa | grep awips2-edex", + "text": "EDEX for Linux\n\uf0c1\n\n\n\n\nSystem Requirements\n\n\n\n\n64-bit CentOS/RHEL 6 or 7\n\n\n16+ CPU\n cores (each CPU core is one more decoder which can run in parallel) \n\n\n24GB+\n RAM\n\n\n500GB+\n disk space\n\n\nA \nSolid State Drive (SSD)\n is highly recommended\n\n\n\n\n\n\nAn \nSSD\n should be mounted either to \n/awips2\n (to contain the entire EDEX system) or to \n/awips2/edex/data/hdf5\n (to contain the large files in the decoded data store). EDEX can scale to any system by adjusting the incoming LDM data feeds or adjusting the resources (CPU threads) allocated to each data type.\n\n\n64-bit CentOS/RHEL 6 and 7\n are the only supported operating systems for EDEX. You may have luck with Fedora Core 12 to 14 and Scientific Linux. \n\n\nEDEX is not supported on Debian, Ubuntu, SUSE, Solaris, OS X, or Windows.\n\n\n\n\nRead More: Distributed EDEX, Installing Across Multiple Machines\n\uf0c1\n\n\n\n\n\n\nLinux One-Time Setup\n\uf0c1\n\n\nAll of these command should be run as \nroot\n\n\n1. Create AWIPS User\n\uf0c1\n\n\nCreate user awips and group fxalpha\n\n\ngroupadd fxalpha \n useradd -G fxalpha awips\n\n\n\nor if the awips account already exists:\n\n\ngroupadd fxalpha \n usermod -G fxalpha awips\n\n\n\n2. Install EDEX\n\uf0c1\n\n\nDownload and run \ninstall.sh --edex \n\n\nwget https://www.unidata.ucar.edu/software/awips2/install.sh\nchmod 755 install.sh\nsudo ./install.sh --edex\n\n\n\ninstall.sh --edex\n will perform the following steps (it's always a good idea to review downloaded shell scripts):\n\n\n\n\nSaves the appropriate Yum repo file to \n/etc/yum.repos.d/awips2.repo\n\n\nIncreases process and file limits for the the \nawips\n account in \n/etc/security/limits.conf\n\n\nCreates \n/awips2/data_store\n if it does not exist already\n\n\nRuns \nyum groupinstall awips2-server\n\n\nAttempts to configure the EDEX hostname defined in \n/awips2/edex/bin/setup.env\n\n\nAlerts the user if the \nawips\n account does not exist (the RPMs will still install)\n\n\n\n\n3. Check \n/etc/hosts\n against \n/awips2/edex/bin/setup.env\n\uf0c1\n\n\nEDEX Server Administrators should check that the addresses and names defined in \n/awips2/edex/bin/setup.env\n are resolvable from both inside and outside the server, and make appropriate edits to \n/etc/hosts\n \n\n\nFor example, in the XSEDE Jetstream cloud, the fully-qualified domain name defined in \n/awips2/edex/bin/setup.env\n\n\nexport EXT_ADDR=js-196-132.jetstream-cloud.org\nexport DB_ADDR=localhost\nexport DB_PORT=5432\nexport BROKER_ADDR=localhost\nexport PYPIES_SERVER=http://${EXT_ADDR}:9582\n\n\n\nis directed within to localhost in \n/etc/hosts\n\n\n127.0.0.1 localhost localhost.localdomain js-196-132.jetstream-cloud.org\n\n\n\n4. Configure iptables\n\uf0c1\n\n\nConfigure iptables to allow TCP connections on ports 9581 and 9582 if you want to serve data to CAVE clients and the Python API.\n\n\n\n\n\n\nTo open ports to all connections\n\n\nvi /etc/sysconfig/iptables\n\n*filter\n:INPUT ACCEPT [0:0]\n:FORWARD ACCEPT [0:0]\n:OUTPUT ACCEPT [0:0]\n-A INPUT -m state --state ESTABLISHED,RELATED -j ACCEPT\n-A INPUT -p icmp -j ACCEPT\n-A INPUT -i lo -j ACCEPT\n-A INPUT -m state --state NEW -m tcp -p tcp --dport 22 -j ACCEPT\n-A INPUT -m state --state NEW -m tcp -p tcp --dport 9581 -j ACCEPT\n-A INPUT -m state --state NEW -m tcp -p tcp --dport 9582 -j ACCEPT\n-A INPUT -j REJECT --reject-with icmp-host-prohibited\n-A FORWARD -j REJECT --reject-with icmp-host-prohibited\nCOMMIT\n\n\n\n\n\n\n\nTo open ports to specific IP addresses\n\n\nvi /etc/sysconfig/iptables\n\n*filter\n:INPUT DROP [0:0]\n:FORWARD DROP [0:0]\n:OUTPUT ACCEPT [0:0]\n:EXTERNAL - [0:0]\n:EDEX - [0:0]\n-A INPUT -i lo -j ACCEPT\n-A INPUT -p icmp --icmp-type any -j ACCEPT\n-A INPUT -m state --state ESTABLISHED,RELATED -j ACCEPT\n-A INPUT -s 128.117.140.0/24 -j EDEX\n-A INPUT -s 128.117.156.0/24 -j EDEX\n-A INPUT -j EXTERNAL\n-A EXTERNAL -j REJECT\n-A EDEX -m state --state NEW -p tcp --dport 22 -j ACCEPT\n-A EDEX -m state --state NEW -p tcp --dport 9581 -j ACCEPT\n-A EDEX -m state --state NEW -p tcp --dport 9582 -j ACCEPT\n-A EDEX -j REJECT\nCOMMIT\n\n\n\n\n\n\n\n\n\nIn this example, the IP range \n128.117.140.0/24\n will match all 128.117.140.\n addresses, while \n128.117.156.0/24\n will match 128.117.156.\n.\n\n\n\n\nRestart iptables\n\n\nservice iptables restart\n\n\n\nFor CentOS 7 error \nRedirecting to /bin/systemctl restart iptables.service\nFailed to restart iptables.service: Unit iptables.service failed to load: No such file or directory.\n\n\nThe solution is:\n\n\nyum install iptables-services\nsystemctl enable iptables\nservice iptables restart\n\n\n\n5. Ensure SELinux is Disabled\n\uf0c1\n\n\nvi /etc/sysconfig/selinux\n\n# This file controls the state of SELinux on the system.\n# SELINUX= can take one of these three values:\n# enforcing - SELinux security policy is enforced.\n# permissive - SELinux prints warnings instead of enforcing.\n# disabled - No SELinux policy is loaded.\nSELINUX=disabled\n# SELINUXTYPE= can take one of these two values:\n# targeted - Targeted processes are protected,\n# mls - Multi Level Security protection.\nSELINUXTYPE=targeted\n\n\n\n\n\nRead more about selinux at \nredhat.com\n\n\n\n\nreboot if necessary\n, required if iptables was updated.\n\n\n\n\nAdditional Steps\n\uf0c1\n\n\nSSD Mount\n\uf0c1\n\n\nThough a Solid State Drive is not required, it is \nstrongly encouraged\n in order to handle the amount of disk IO for real-time IDD feeds. \n\n\nThe simplest configuration would be to mount an 500GB+ SSD to \n/awips2\n to contain both the installed software (approx. 20GB) and the real-time data (approx. 150GB per day).\n\n\nThe default \npurge rules\n are configured such that \n/awips2\n does not exceed 450GB. \n/awips2/data_store\n is scoured every hour and should not exceed 50GB. \n\n\nIf you want to increase EDEX data retention you should mount a large disk to \n/awips2/edex/data/hdf5\n since this will be where the archived processed data exists, and any case studies created.\n\n\n Filesystem Size Used Avail Use% Mounted on\n /dev/sda1 30G 2.5G 26G 9% /\n tmpfs 28G 0 28G 0% /dev/shm\n /dev/sdc1 788G 81G 667G 11% /awips2\n /dev/sdb1 788G 41G 708G 10% /awips2/edex/data/hdf5\n\n\n\n\n\nEDEX Setup\n\uf0c1\n\n\nThe command \nedex setup\n attempts to add the domain name of your server. \n\n\n\n\n\n\n/awips2/edex/bin/setup.env\n should contain the fully-qualified domain name, externally resolved, localhost will not work. \n\n\nexport AW_SITE_IDENTIFIER=OAX\nexport EDEX_SERVER=edex-cloud.unidata.ucar.edu\n\n\n\n\n\n\n\n/awips2/ldm/etc/ldmd.conf\n contains the upstream server (default \nidd.unidata.ucar.edu\n, which requires you connect form a .edu domain). This file also contains the \nedexBridge\n hostname (default \nlocalhost\n). \n\n\nEXEC \"pqact -e\"\nEXEC \"edexBridge -s localhost\"\n\n\n\n\n\n\n\n/etc/security/limits.conf\n defines the number of user processes and files (this step is automatically performed by \ninstall.sh --edex\n). Without these definitions, Qpid is known to crash during periods of high ingest.\n\n\nawips soft nproc 65536\nawips soft nofile 65536\n\n\n\n\n\n\n\n\n\nLDM\n\uf0c1\n\n\nEDEX installs its own version of the LDM to the directory \n/awips2/ldm\n. As with a the default LDM configuration, two files are used to control what IDD feeds are ingested:\n\n\n\n\n\n\n/awips2/ldm/etc/ldmd.conf\n - specifies an upstream LDM server to request data from, and what feeds to request:\n\n\nREQUEST NEXRAD3 \"./p(DHR|DPR|DSP|DTA|DAA|DVL|EET|HHC|N0Q|N0S|N0U|OHA|NVW|NTV|NST).\" idd.unidata.ucar.edu\nREQUEST FNEXRAD|IDS|DDPLUS|UNIWISC \".*\" idd.unidata.ucar.edu\nREQUEST NGRID \".*\" idd.unidata.ucar.edu\nREQUEST NOTHER \"^TIP... KNES.*\" idd.unidata.ucar.edu\n\n\n\n\n\nread more about ldmd.conf in the LDM User Manual\n\n\n\n\n\n\n\n\n/awips2/ldm/etc/pqact.conf\n - specifies the WMO headers and file pattern actions to request:\n\n\n# Redbook graphics\nANY ^([PQ][A-Z0-9]{3,5}) (....) (..)(..)(..) !redbook [^/]*/([^/]*)/([^/]*)/([^/]*)/([0-9]{8})\n FILE -overwrite -close -edex /awips2/data_store/redbook/\\8/\\4\\5Z_\\8_\\7_\\6-\\1_\\2_(seq).rb.%Y%m%d%H\n# NOAAPORT GINI images\nNIMAGE ^(sat[^/]*)/ch[0-9]/([^/]*)/([^/]*)/([^ ]*) ([^/]*)/([^/]*)/([^/]*)/ (T[^ ]*) ([^ ]*) (..)(..)(..)\n FILE -overwrite -close -edex /awips2/data_store/sat/\\(11)\\(12)Z_\\3_\\7_\\6-\\8_\\9_(seq).satz.%Y%m%d%H\n\n\n\n\n\nread more about pqact.conf in the LDM User Manual\n\n\n\n\n\n\nsee available AWIPS LDM feeds\n\n\n\n\n\n\n\n\n\n\nStart and Stop\n\uf0c1\n\n\nto start all EDEX services, including the LDM:\n\n\nedex start\n\nStarting EDEX PostgreSQL: [ OK ]\nStarting httpd: [ OK ]\nStarting QPID [ OK ]\nStarting EDEX Camel (request): \nStarting EDEX Camel (ingest): \nStarting EDEX Camel (ingestGrib): \nStarting AWIPS LDM:The product-queue is OK.\n\n\n\nto stop:\n\n\nedex stop\n\nStopping EDEX Camel (request): \nStopping EDEX Camel (ingest): \nStopping EDEX Camel (ingestGrib): \nStopping QPID [ OK ]\nStopping httpd: [ OK ]\nStopping EDEX PostgreSQL: [ OK ]\nStopping AWIPS LDM:Stopping the LDM server...\n\n\n\nTo manually start, stop, and restart:\n\n\nservice edex_postgres start\nservice httpd-pypies start\nservice qpidd start\nservice edex_camel start\n\n\n\nThe fifth service, \nedex_ldm\n, does \nnot run at boot\n to prevent filling up disk space if EDEX is not running. \n\n\nldmadmin start\n\n\n\nTo start \nall services except the LDM\n (good for troubleshooting):\n\n\nedex start base\n\n\n\nTo restart EDEX\n\n\nedex restart\n\n\n\n\n\nDirectories to know\n\uf0c1\n\n\n\n\n/awips2\n - Contains all of the installed AWIPS software. \n\n\n/awips2/edex/logs\n - EDEX logs.\n\n\n/awips2/httpd_pypies/var/log/httpd\n - httpd-pypies logs.\n\n\n/awips2/data/pg_log\n - PostgreSQL logs.\n\n\n/awips2/qpid/log\n - Qpid logs.\n\n\n/awips2/edex/data/hdf5\n - HDF5 data store. \n\n\n/awips2/edex/data/utility\n - Localization store and configuration files. \n\n\n/awips2/ldm/etc\n - Location of \nldmd.conf\n and \npqact.conf\n\n\n/awips2/ldm/logs\n - LDM logs.\n\n\n/awips2/data_store\n - Raw data store.\n\n\n/awips2/data_store/ingest\n - Manual data ingest endpoint.\n\n\n\n\n\n\nWhat Version is my EDEX?\n\uf0c1\n\n\nrpm -qa | grep awips2-edex", "title": "Install EDEX" }, { @@ -137,7 +137,7 @@ }, { "location": "/install/install-edex/#2-install-edex", - "text": "Download and run installEDEX.sh wget http://www.unidata.ucar.edu/software/awips2/installEDEX.sh\nchmod 755 ./installEDEX.sh\n./installEDEX.sh installEDEX.sh will perform the following steps (it's always a good idea to review downloaded shell scripts): Saves the appropriate Yum repo file to /etc/yum.repos.d/awips2.repo Increases process and file limits for the the awips account in /etc/security/limits.conf Creates /awips2/data_store if it does not exist already Runs yum groupinstall awips2-server Attempts to configure the EDEX hostname defined in /awips2/edex/bin/setup.env Alerts the user if the awips account does not exist (the RPMs will still install)", + "text": "Download and run install.sh --edex wget https://www.unidata.ucar.edu/software/awips2/install.sh\nchmod 755 install.sh\nsudo ./install.sh --edex install.sh --edex will perform the following steps (it's always a good idea to review downloaded shell scripts): Saves the appropriate Yum repo file to /etc/yum.repos.d/awips2.repo Increases process and file limits for the the awips account in /etc/security/limits.conf Creates /awips2/data_store if it does not exist already Runs yum groupinstall awips2-server Attempts to configure the EDEX hostname defined in /awips2/edex/bin/setup.env Alerts the user if the awips account does not exist (the RPMs will still install)", "title": "2. Install EDEX" }, { @@ -167,7 +167,7 @@ }, { "location": "/install/install-edex/#edex-setup", - "text": "The command edex setup attempts to add the domain name of your server. /awips2/edex/bin/setup.env should contain the fully-qualified domain name, externally resolved, localhost will not work. export AW_SITE_IDENTIFIER=OAX\nexport EDEX_SERVER=edex-cloud.unidata.ucar.edu /awips2/ldm/etc/ldmd.conf contains the upstream server (default idd.unidata.ucar.edu , which requires you connect form a .edu domain). This file also contains the edexBridge hostname (default localhost ). EXEC \"pqact -e\"\nEXEC \"edexBridge -s localhost\" /etc/security/limits.conf defines the number of user processes and files (this step is automatically performed by installEDEX.sh ). Without these definitions, Qpid is known to crash during periods of high ingest. awips soft nproc 65536\nawips soft nofile 65536", + "text": "The command edex setup attempts to add the domain name of your server. /awips2/edex/bin/setup.env should contain the fully-qualified domain name, externally resolved, localhost will not work. export AW_SITE_IDENTIFIER=OAX\nexport EDEX_SERVER=edex-cloud.unidata.ucar.edu /awips2/ldm/etc/ldmd.conf contains the upstream server (default idd.unidata.ucar.edu , which requires you connect form a .edu domain). This file also contains the edexBridge hostname (default localhost ). EXEC \"pqact -e\"\nEXEC \"edexBridge -s localhost\" /etc/security/limits.conf defines the number of user processes and files (this step is automatically performed by install.sh --edex ). Without these definitions, Qpid is known to crash during periods of high ingest. awips soft nproc 65536\nawips soft nofile 65536", "title": "EDEX Setup" }, { @@ -572,7 +572,7 @@ }, { "location": "/cave/goes-16-satellite/", - "text": "GOES-16 is now operational as GOES East, and products are available on Unidata's \nedex-cloud.unidata.ucar.edu\n server, including 16 channels of ABI imagery and various derived products.\n\n\nThe NASA Short-term Prediction Research and Transition (\nSPoRT\n) center has developed decoding and visualization plugins for global Geostationary Lightning Mapper (GLM) observations and Derived Wind Motion vectors, available in the \nlatest Unidata CAVE release\n for Linux and macOS.\n\n\nGOES East products are accessible in the \nSatellite\n menu:\n\n\n\n\n\n\nRGB Composites\n\uf0c1\n\n\nMulti-Channel Icing Composite\n\n\nChannels 5,3,2 (1.61u, 0.87u, 0.64u)\n\n\n\n\nMulti-Channel Daytime 1 Composite \n\n\nChannels 2,5,14 (0.64u, 1.61u, 11.20u)\n\n\n\n\nMulti-Channel Daytime 2 Composite\n\n\nChannels 2,3,2 (0.64u, 0.87u, 0.64u)\n\n\n\n\n\n\nGOES-16 Composite Imagery NOT SUPPORTED on macOS\n\n\nOpenGL Shading Language limitations prevent multi-channel imagery from displaying correctly on Mac.\n\n\n\n\n\n\nDerived Products\n\uf0c1\n\n\nGLM\n\uf0c1\n\n\nDerived Motion Winds\n\uf0c1\n\n\nLDM Pattern Action\n\uf0c1\n\n\nNOTHER ^(TI[RS]...) (KNES) (......) (...)\n FILE -close -edex\n /awips2/data_store/satellite/goes16/\\1_\\2_\\3_\\4_(seq).gini\n\n\n\nPuerto Rico sector (PRREGI)\n\uf0c1\n\n\n\n\nCONUS 1km\n\uf0c1\n\n\n\n\nFull Disk 6km\n\uf0c1\n\n\n\n\nMesoscale Sectors (TMESO-1, TMESO-2)\n\uf0c1\n\n\nTwo floating mesoscale sectors (will vary from image shown)\n\n\n\n\nTo display multi-channel composites requires CAVE for Linux or Windows.\n\n\nHDF5 Data Store\n\uf0c1\n\n\nDecoded GOES-R satellite images are stored in \n/awips2/edex/data/hdf5/satellite/\n under sector subdirectories:\n\n\ndrwxr-xr-x 18 awips fxalpha PRREGI\ndrwxr-xr-x 18 awips fxalpha TCONUS\ndrwxr-xr-x 18 awips fxalpha TFD\ndrwxr-xr-x 18 awips fxalpha TMESO-1\ndrwxr-xr-x 18 awips fxalpha TMESO-2\n\n\n\n\n\nLevel 2+ Products\n\uf0c1\n\n\nLevel 2+ products are described as derived environmental variables which will be created and disseminated when GOES-16 is used operationally (compared to \nLevel 0\n, described as unprocessed instrument data at full resolution, and \nLevel 1b\n products, described as radiometric and geometric correction applied to produce parameters in physical units). \n\n\nUnidata does not currently have access to these products, but EDEX 17.1.1 can support their ingest if made available. \n\n\nRead more about GOES-R data levels...\n\n\nLevel 2+ Decoder Regular Expression\n\uf0c1\n\n\nFrom \n/awips2/edex/data/utility/common_static/base/distribution/goesr.xml\n\n\n^OR_ABI-L2-\\w{3,5}(C|F|M1|M2)-M[34]_G\\d\\d_s\\d{14}_e\\d{14}_c\\d{14}.nc$\n\n\n\n\n\nGeostationary Lightning Mapper (GLM)\n\uf0c1\n\n\nNASA's SPoRT MSFC Earth Science Office has contributed plugins to decode GLM level2 products, displayed as point data in CAVE.\n\n\nWhile Unidata is not currently distributing GLM products, you can \ndownload a sample netCDF file\n and copy it to \n/awips2/data_store/ingest/\n in order to test the decoding and display of GOES-R lightning data.\n\n\nIngest Sample Data\n\uf0c1\n\n\nRun the single command from your EDEX server to ingest a single-time sample data set:\n\n\nwget http://www.unidata.ucar.edu/software/awips2/OR_GLM-L2-LCFA_G16_s20170402339144.nc -P /awips2/data_store/ingest/\n\n\n\nDisplay GLM Lightning Data\n\uf0c1\n\n\nLoad GLM data from the menu \nSatellite\n \n \nGOES-16 Provisional\n \n \nGLM-Lightning\n. Data are displayable as \nFlash\n, \nEvent\n, and \nGroup\n and 1min, 5min, 15min, and 1hr intervals.\n\n\n\n\n\n\nDerived Motion Winds\n\uf0c1\n\n\nAt the bottom of the menu \nSatellite\n \n \nGOES-16 Provisional Products\n are sub-menus for GOES-16 Derived Motion Wind (DMW) products. Select the sub-menu \nGOES-Test\n and then region (CONUS, Full Disk, Mesoscale), and then select the wind product by level:\n\n\n\n\nBy Pressure\n\n\nBy Mandatory Levels\n\n\nBy Channel", + "text": "GOES-16 is now operational as GOES East, and products are available on Unidata's \nedex-cloud.unidata.ucar.edu\n server, including 16 channels of ABI imagery and various derived products.\n\n\nThe NASA Short-term Prediction Research and Transition (\nSPoRT\n) center has developed decoding and visualization plugins for global Geostationary Lightning Mapper (GLM) observations and Derived Wind Motion vectors, available in the \nlatest Unidata CAVE release\n for Linux and macOS.\n\n\nGOES East products are accessible in the \nSatellite\n menu:\n\n\n\n\n\n\nRGB Composites\n\uf0c1\n\n\nMulti-Channel Icing Composite\n\n\nChannels 5,3,2 (1.61u, 0.87u, 0.64u)\n\n\n\n\nMulti-Channel Daytime 1 Composite \n\n\nChannels 2,5,14 (0.64u, 1.61u, 11.20u)\n\n\n\n\nMulti-Channel Daytime 2 Composite\n\n\nChannels 2,3,2 (0.64u, 0.87u, 0.64u)\n\n\n\n\n\n\nGOES-16 Composite Imagery NOT SUPPORTED on macOS\n\n\nOpenGL Shading Language limitations prevent multi-channel imagery from displaying correctly on Mac.\n\n\n\n\n\n\nDerived Products\n\uf0c1\n\n\nGLM\n\uf0c1\n\n\nDerived Motion Winds\n\uf0c1\n\n\nLDM Pattern Action\n\uf0c1\n\n\nNOTHER ^(TI[RS]...) (KNES) (......) (...)\n FILE -close -edex\n /awips2/data_store/satellite/goes16/\\1_\\2_\\3_\\4_(seq).gini\n\n\n\nPuerto Rico sector (PRREGI)\n\uf0c1\n\n\n\n\nCONUS 1km\n\uf0c1\n\n\n\n\nFull Disk 6km\n\uf0c1\n\n\n\n\nMesoscale Sectors (TMESO-1, TMESO-2)\n\uf0c1\n\n\nTwo floating mesoscale sectors (will vary from image shown)\n\n\n\n\nTo display multi-channel composites requires CAVE for Linux or Windows.\n\n\nHDF5 Data Store\n\uf0c1\n\n\nDecoded GOES-R satellite images are stored in \n/awips2/edex/data/hdf5/satellite/\n under sector subdirectories:\n\n\ndrwxr-xr-x 18 awips fxalpha PRREGI\ndrwxr-xr-x 18 awips fxalpha TCONUS\ndrwxr-xr-x 18 awips fxalpha TFD\ndrwxr-xr-x 18 awips fxalpha TMESO-1\ndrwxr-xr-x 18 awips fxalpha TMESO-2\n\n\n\n\n\nLevel 2+ Products\n\uf0c1\n\n\nLevel 2+ products are described as derived environmental variables which will be created and disseminated when GOES-16 is used operationally (compared to \nLevel 0\n, described as unprocessed instrument data at full resolution, and \nLevel 1b\n products, described as radiometric and geometric correction applied to produce parameters in physical units). \n\n\nUnidata does not currently have access to these products, but EDEX 17.1.1 can support their ingest if made available. \n\n\nRead more about GOES-R data levels...\n\n\nLevel 2+ Decoder Regular Expression\n\uf0c1\n\n\nFrom \n/awips2/edex/data/utility/common_static/base/distribution/goesr.xml\n\n\n^OR_ABI-L2-\\w{3,5}(C|F|M1|M2)-M[34]_G\\d\\d_s\\d{14}_e\\d{14}_c\\d{14}.nc$\n\n\n\n\n\nGeostationary Lightning Mapper (GLM)\n\uf0c1\n\n\nNASA's SPoRT MSFC Earth Science Office has contributed plugins to decode GLM level2 products, displayed as point data in CAVE.\n\n\nWhile Unidata is not currently distributing GLM products, you can \ndownload a sample netCDF file\n and copy it to \n/awips2/data_store/ingest/\n in order to test the decoding and display of GOES-R lightning data.\n\n\nIngest Sample Data\n\uf0c1\n\n\nRun the single command from your EDEX server to ingest a single-time sample data set:\n\n\nwget https://www.unidata.ucar.edu/software/awips2/OR_GLM-L2-LCFA_G16_s20170402339144.nc -P /awips2/data_store/ingest/\n\n\n\nDisplay GLM Lightning Data\n\uf0c1\n\n\nLoad GLM data from the menu \nSatellite\n \n \nGOES-16 Provisional\n \n \nGLM-Lightning\n. Data are displayable as \nFlash\n, \nEvent\n, and \nGroup\n and 1min, 5min, 15min, and 1hr intervals.\n\n\n\n\n\n\nDerived Motion Winds\n\uf0c1\n\n\nAt the bottom of the menu \nSatellite\n \n \nGOES-16 Provisional Products\n are sub-menus for GOES-16 Derived Motion Wind (DMW) products. Select the sub-menu \nGOES-Test\n and then region (CONUS, Full Disk, Mesoscale), and then select the wind product by level:\n\n\n\n\nBy Pressure\n\n\nBy Mandatory Levels\n\n\nBy Channel", "title": "GOES-16 (GOES-R)" }, { @@ -642,7 +642,7 @@ }, { "location": "/cave/goes-16-satellite/#ingest-sample-data", - "text": "Run the single command from your EDEX server to ingest a single-time sample data set: wget http://www.unidata.ucar.edu/software/awips2/OR_GLM-L2-LCFA_G16_s20170402339144.nc -P /awips2/data_store/ingest/", + "text": "Run the single command from your EDEX server to ingest a single-time sample data set: wget https://www.unidata.ucar.edu/software/awips2/OR_GLM-L2-LCFA_G16_s20170402339144.nc -P /awips2/data_store/ingest/", "title": "Ingest Sample Data" }, { @@ -967,7 +967,7 @@ }, { "location": "/edex/distributed-computing/", - "text": "AWIPS makes use of service-oriented architecture to request, process, and serve real-time meteorological data. While originally developed for use on internal NWS forecast office networks, where operational installations of AWIPS could consist of a dozen servers or more, the early Unidata releases were stripped of operations-specific configurations and plugins, and released as a standalone server. This worked, since (at the time) a single EDEX instance with an attached SSD could handle most of NOAAport. However, with GOES-R(16) coming online in 2017, and more gridded forecast models being created at finer temporal and spatial resolutions, there is now a need to distribute the data decoding across multiple machine to handle this firehose of data.\n\n\n\n\nThis walkthrough will install different EDEX components on two machines in the XSEDE Jetstream Cloud, the first is used to \ningest and decode\n while the second is used to \nstore and serve\n data.\n\n\n\n\n\n\nDatabase/Request Server\n\uf0c1\n\n\n\n\nSpecs\n\n\n\n\nIP address \n10.0.0.9\n\n\nCentOS 6.9\n\n\nm1.medium (CPU: 6, Mem: 16 GB)\n\n\n1000GB attached storage for \n/awips2/edex/data/hdf5\n\n\n\n\n\n\n1. Install\n\uf0c1\n\n\ngroupadd fxalpha \n useradd -G fxalpha awips\nmkdir /awips2\nwget -O /etc/yum.repos.d/awips2.repo http://www.unidata.ucar.edu/software/awips2/doc/awips2.repo\nyum clean all\nyum groupinstall awips2-database\n\n\n\n2. IPtables Config\n\uf0c1\n\n\nIt is required that ports 5432 and 5672 be open for the specific IP addresses of outside EDEX ingest servers. It is \nnot recommended\n that you leave port 5432 open to all connections (since the default awips database password is known, and is not meant as a security measure). Further, it \nis recommended\n that you change the default postgres awips user password (which then requires a reconfiguration of every remote EDEX ingest server in order to connect to this database/request server).\n\n\nvi /etc/sysconfig/iptables\n\n*filter\n:INPUT DROP [0:0]\n:FORWARD DROP [0:0]\n:OUTPUT ACCEPT [0:0]\n:EXTERNAL - [0:0]\n:EDEX - [0:0]\n-A INPUT -i lo -j ACCEPT\n-A INPUT -p icmp --icmp-type any -j ACCEPT\n-A INPUT -m state --state ESTABLISHED,RELATED -j ACCEPT\n-A INPUT -m state --state NEW -m tcp -p tcp --dport 22 -j ACCEPT\n-A INPUT -m state --state NEW -m tcp -p tcp --dport 9581 -j ACCEPT\n-A INPUT -m state --state NEW -m tcp -p tcp --dport 9582 -j ACCEPT\n-A INPUT -s 10.0.0.7 -j EDEX\n-A INPUT -j EXTERNAL\n-A EXTERNAL -j REJECT\n-A EDEX -m state --state NEW -p tcp --dport 5432 -j ACCEPT\n-A EDEX -m state --state NEW -p tcp --dport 5672 -j ACCEPT\n-A EDEX -j REJECT\nCOMMIT\n\n\n\nNote the line \n-A INPUT -s 10.0.0.7 -j EDEX\n as well as the following \n-A EDEX ...\n rules for ports 5432 (PostgreSQL) and 5672 (PyPIES/HDF5). \n\n\n\n\nThe two ports left open to all connections (9581,9582) in addition to default port 22 are for outside CAVE client connections\n\n\n\n\n3. Database Config\n\uf0c1\n\n\nIn the file \n/awips2/data/pg_hba.conf\n you define remote connections for all postgres tables with as \nIP address\n/32\n, after the block of IPv4 local connections:\n\n\nvi /awips2/data/pg_hba.conf\n\n# IPv4 local connections:\nhost fxatext all 127.0.0.1/32 trust\nhost hd_ob92oax all 127.0.0.1/32 trust\nhost dc_ob7oax all 127.0.0.1/32 trust\nhost hmdb all 127.0.0.1/32 trust\nhost metadata all 127.0.0.1/32 md5\nhost maps all 127.0.0.1/32 md5\nhost postgres all 127.0.0.1/32 md5\nhost ncep all 127.0.0.1/32 md5\nhost ebxml all 127.0.0.1/32 trust\nhost replication replication 127.0.0.1/32 md5\n# Remote connections\nhost fxatext all 10.0.0.7/32 md5\nhost hd_ob92oax all 10.0.0.7/32 md5\nhost dc_ob7oax all 10.0.0.7/32 md5\nhost hmdb all 10.0.0.7/32 md5\nhost metadata all 10.0.0.7/32 md5\nhost maps all 10.0.0.7/32 md5\nhost postgres all 10.0.0.7/32 md5\nhost ncep all 10.0.0.7/32 md5\nhost ebxml all 10.0.0.7/32 md5\nhost replication replication 10.0.0.7/32 md5\n# IPv6 local connections:\nhost all all ::1/128 md5\nhost replication replication ::1/128 md5\n\n\n\n4. Start EDEX\n\uf0c1\n\n\nedex start database\n\n\n\nThis will start PostgreSQL, httpd-pypies, Qpid, and the EDEX Request JVM (and will not start the LDM or the EDEX Ingest and IngestGrib JVMs)\n\n\n5. Monitor Services\n\uf0c1\n\n\nThe command \nedex\n will show which services are running, and for a Database/Request server, will not include the LDM, EDEXingest, or EDEXgrib:\n\n\nedex\n\n[edex status]\npostgres :: running :: pid 571\npypies :: running :: pid 639\nqpid :: running :: pid 674\nEDEXingest :: not running\nEDEXgrib :: not running\nEDEXrequest :: running :: pid 987 1029 23792\n\n\n\nSince this Database/Request server is not running the main \nedexIngest\n JVM, we won't see anything from \nedex log\n, instead watch the Request Server with the command\n\n\nedex log reqeust\n\n\n\n\n\nConfirm that EDEX Request connects to PostgreSQL!\n\n\nWith the above \nedex log request\n, ensure that the log progresses \npast this point\n:\n\n\nSpring-enabled Plugins:\n-----------------------\nacars-common, acars-common-dataaccess, acarssounding-common, activetable-common,\nactivetable-request, airep-common, airep-common-dataaccess, airmet-common, \natcf-common, atcf-request, auth-request, awipstools-request, aww-common...\n\nJAXB context for PersistencePathKeySet inited in: 5ms\nINFO 20:21:09,134 5584 [EDEXMain] Reflections: Reflections took 436 ms to scan 258 urls, producing 31 keys and 3637 values\nFound 499 db classes in 720 ms\n\n\n\nIf the log stops at the \nFound db classes...\n line, that means EDEX is not connecting to PostgreSQL - double-check \nDB_ADDR\n in \n/awips2/edex/bin/setup.env\n\n\n\n\n\n\nIngest/Decode Server\n\uf0c1\n\n\n\n\nSpecs\n\n\n\n\nIP address \n10.0.0.9\n\n\nCentOS 6.9\n\n\nm1.xxlarge (CPU: 44, Mem: 120 GB)\n\n\n\n\n\n\n1. Install\n\uf0c1\n\n\ngroupadd fxalpha \n useradd -G fxalpha awips\nwget -O /etc/yum.repos.d/awips2.repo http://www.unidata.ucar.edu/software/awips2/doc/awips2.repo\nyum clean all\nyum groupinstall awips2-ingest\n\n\n\n2. EDEX Config\n\uf0c1\n\n\nvi /awips2/edex/bin/setup.env\n\n\nHere you should redefine \nDB_ADDR\n and \nPYPIES_SERVER\n to point to the \nDatabase/Request\n server (10.0.0.9)\n\n\nexport EDEX_SERVER=10.0.0.7\n\n# postgres connection\nexport DB_ADDR=10.0.0.9\nexport DB_PORT=5432\n\n# pypies hdf5 connection\nexport PYPIES_SERVER=http://10.0.0.9:9582\n\n# qpid connection\nexport BROKER_ADDR=${EDEX_SERVER}\n\n\n\nNotice that \nEDEX_SERVER\n and \nBROKER_ADDR\n (qpid) should remain defined as the \nlocalhost\n IP address (10.0.0.7)\n\n\n3. Start EDEX\n\uf0c1\n\n\nedex start ingest\n\n\n\nThis will start Qpid and the EDEX Ingest and IngestGrib JVMs (and not start PostgreSQL, httpd-pypies, or the EDEX Request JVM)\n\n\n4. Monitor Services\n\uf0c1\n\n\nWatch the edex JVM log with the command\n\n\nedex log\n\n\n\n\n\nConfirm that EDEX connects to PostgreSQL!\n\n\nWith the above \nedex log\n, ensure that the log progresses \npast this point\n:\n\n\nSpring-enabled Plugins:\n-----------------------\nacars-common, acars-common-dataaccess, acarssounding-common, activetable-common,\nactivetable-ingest, airep-common, airep-common-dataaccess, airmet-common, \natcf-common, atcf-ingest, aww-common...\n\nJAXB context for PersistencePathKeySet inited in: 5ms\nINFO 20:21:09,134 5584 [EDEXMain] Reflections: Reflections took 436 ms to scan 258 urls, producing 31 keys and 3637 values\nFound 499 db classes in 720 ms\n\n\n\nIf the log stops at the \nFound db classes...\n line, that means EDEX is not connecting to the \nremote PostgreSQL instance\n - double-check \nDB_ADDR\n in \n/awips2/edex/bin/setup.env\n\n\nYou can \nmanually check remote PostgreSQL connectivity\n on any EDEX Ingest server from the command line:\n\n\nsu - awips\npsql -U awips -h \nremote IP address\n -p 5432 metadata\n\n\n\nWhere the default passwd is \nawips\n and is defined in files in \n/awips2/edex/conf/db/hibernateConfig/\n\n\n\n\n\n\nAdditional Notes\n\uf0c1\n\n\n\n\nBe mindful of what IP address and hostnames are used in \n/awips2/edex/bin/setup.env\n and \n/awips2/data/pg_hba.conf\n, and that they are resolvable from the command line. Consult or edit \n/etc/hosts\n as needed.\n\n\n\n\nYou can install multiple \nawips2-ingest\n servers, each decoding a different dataset or feed, all pointing to the same Database/Request server (\nDB_ADDR\n and \nPYPIES_SERVER\n in \n/awips2/edex/bin/setup.env\n):\n\n\n\n\n\n\nEvery EDEX Ingest IP address must be allowed in both \niptables\n and \npg_hba.conf\n as \nshown above\n.", + "text": "AWIPS makes use of service-oriented architecture to request, process, and serve real-time meteorological data. While originally developed for use on internal NWS forecast office networks, where operational installations of AWIPS could consist of a dozen servers or more, the early Unidata releases were stripped of operations-specific configurations and plugins, and released as a standalone server. This worked, since (at the time) a single EDEX instance with an attached SSD could handle most of NOAAport. However, with GOES-R(16) coming online in 2017, and more gridded forecast models being created at finer temporal and spatial resolutions, there is now a need to distribute the data decoding across multiple machine to handle this firehose of data.\n\n\n\n\nThis walkthrough will install different EDEX components on two machines in the XSEDE Jetstream Cloud, the first is used to \ningest and decode\n while the second is used to \nstore and serve\n data.\n\n\n\n\n\n\nDatabase/Request Server\n\uf0c1\n\n\n\n\nSpecs\n\n\n\n\nIP address \n10.0.0.9\n\n\nCentOS 6.9\n\n\nm1.medium (CPU: 6, Mem: 16 GB)\n\n\n1000GB attached storage for \n/awips2/edex/data/hdf5\n\n\n\n\n\n\n1. Install\n\uf0c1\n\n\ngroupadd fxalpha \n useradd -G fxalpha awips\nmkdir /awips2\nwget -O /etc/yum.repos.d/awips2.repo https://www.unidata.ucar.edu/software/awips2/doc/awips2.repo\nyum clean all\nyum groupinstall awips2-database\n\n\n\n2. IPtables Config\n\uf0c1\n\n\nIt is required that ports 5432 and 5672 be open for the specific IP addresses of outside EDEX ingest servers. It is \nnot recommended\n that you leave port 5432 open to all connections (since the default awips database password is known, and is not meant as a security measure). Further, it \nis recommended\n that you change the default postgres awips user password (which then requires a reconfiguration of every remote EDEX ingest server in order to connect to this database/request server).\n\n\nvi /etc/sysconfig/iptables\n\n*filter\n:INPUT DROP [0:0]\n:FORWARD DROP [0:0]\n:OUTPUT ACCEPT [0:0]\n:EXTERNAL - [0:0]\n:EDEX - [0:0]\n-A INPUT -i lo -j ACCEPT\n-A INPUT -p icmp --icmp-type any -j ACCEPT\n-A INPUT -m state --state ESTABLISHED,RELATED -j ACCEPT\n-A INPUT -m state --state NEW -m tcp -p tcp --dport 22 -j ACCEPT\n-A INPUT -m state --state NEW -m tcp -p tcp --dport 9581 -j ACCEPT\n-A INPUT -m state --state NEW -m tcp -p tcp --dport 9582 -j ACCEPT\n-A INPUT -s 10.0.0.7 -j EDEX\n-A INPUT -j EXTERNAL\n-A EXTERNAL -j REJECT\n-A EDEX -m state --state NEW -p tcp --dport 5432 -j ACCEPT\n-A EDEX -m state --state NEW -p tcp --dport 5672 -j ACCEPT\n-A EDEX -j REJECT\nCOMMIT\n\n\n\nNote the line \n-A INPUT -s 10.0.0.7 -j EDEX\n as well as the following \n-A EDEX ...\n rules for ports 5432 (PostgreSQL) and 5672 (PyPIES/HDF5). \n\n\n\n\nThe two ports left open to all connections (9581,9582) in addition to default port 22 are for outside CAVE client connections\n\n\n\n\n3. Database Config\n\uf0c1\n\n\nIn the file \n/awips2/data/pg_hba.conf\n you define remote connections for all postgres tables with as \nIP address\n/32\n, after the block of IPv4 local connections:\n\n\nvi /awips2/data/pg_hba.conf\n\n# IPv4 local connections:\nhost fxatext all 127.0.0.1/32 trust\nhost hd_ob92oax all 127.0.0.1/32 trust\nhost dc_ob7oax all 127.0.0.1/32 trust\nhost hmdb all 127.0.0.1/32 trust\nhost metadata all 127.0.0.1/32 md5\nhost maps all 127.0.0.1/32 md5\nhost postgres all 127.0.0.1/32 md5\nhost ncep all 127.0.0.1/32 md5\nhost ebxml all 127.0.0.1/32 trust\nhost replication replication 127.0.0.1/32 md5\n# Remote connections\nhost fxatext all 10.0.0.7/32 md5\nhost hd_ob92oax all 10.0.0.7/32 md5\nhost dc_ob7oax all 10.0.0.7/32 md5\nhost hmdb all 10.0.0.7/32 md5\nhost metadata all 10.0.0.7/32 md5\nhost maps all 10.0.0.7/32 md5\nhost postgres all 10.0.0.7/32 md5\nhost ncep all 10.0.0.7/32 md5\nhost ebxml all 10.0.0.7/32 md5\nhost replication replication 10.0.0.7/32 md5\n# IPv6 local connections:\nhost all all ::1/128 md5\nhost replication replication ::1/128 md5\n\n\n\n4. Start EDEX\n\uf0c1\n\n\nedex start database\n\n\n\nThis will start PostgreSQL, httpd-pypies, Qpid, and the EDEX Request JVM (and will not start the LDM or the EDEX Ingest and IngestGrib JVMs)\n\n\n5. Monitor Services\n\uf0c1\n\n\nThe command \nedex\n will show which services are running, and for a Database/Request server, will not include the LDM, EDEXingest, or EDEXgrib:\n\n\nedex\n\n[edex status]\npostgres :: running :: pid 571\npypies :: running :: pid 639\nqpid :: running :: pid 674\nEDEXingest :: not running\nEDEXgrib :: not running\nEDEXrequest :: running :: pid 987 1029 23792\n\n\n\nSince this Database/Request server is not running the main \nedexIngest\n JVM, we won't see anything from \nedex log\n, instead watch the Request Server with the command\n\n\nedex log reqeust\n\n\n\n\n\nConfirm that EDEX Request connects to PostgreSQL!\n\n\nWith the above \nedex log request\n, ensure that the log progresses \npast this point\n:\n\n\nSpring-enabled Plugins:\n-----------------------\nacars-common, acars-common-dataaccess, acarssounding-common, activetable-common,\nactivetable-request, airep-common, airep-common-dataaccess, airmet-common, \natcf-common, atcf-request, auth-request, awipstools-request, aww-common...\n\nJAXB context for PersistencePathKeySet inited in: 5ms\nINFO 20:21:09,134 5584 [EDEXMain] Reflections: Reflections took 436 ms to scan 258 urls, producing 31 keys and 3637 values\nFound 499 db classes in 720 ms\n\n\n\nIf the log stops at the \nFound db classes...\n line, that means EDEX is not connecting to PostgreSQL - double-check \nDB_ADDR\n in \n/awips2/edex/bin/setup.env\n\n\n\n\n\n\nIngest/Decode Server\n\uf0c1\n\n\n\n\nSpecs\n\n\n\n\nIP address \n10.0.0.9\n\n\nCentOS 6.9\n\n\nm1.xxlarge (CPU: 44, Mem: 120 GB)\n\n\n\n\n\n\n1. Install\n\uf0c1\n\n\ngroupadd fxalpha \n useradd -G fxalpha awips\nwget -O /etc/yum.repos.d/awips2.repo https://www.unidata.ucar.edu/software/awips2/doc/awips2.repo\nyum clean all\nyum groupinstall awips2-ingest\n\n\n\n2. EDEX Config\n\uf0c1\n\n\nvi /awips2/edex/bin/setup.env\n\n\nHere you should redefine \nDB_ADDR\n and \nPYPIES_SERVER\n to point to the \nDatabase/Request\n server (10.0.0.9)\n\n\nexport EDEX_SERVER=10.0.0.7\n\n# postgres connection\nexport DB_ADDR=10.0.0.9\nexport DB_PORT=5432\n\n# pypies hdf5 connection\nexport PYPIES_SERVER=http://10.0.0.9:9582\n\n# qpid connection\nexport BROKER_ADDR=${EDEX_SERVER}\n\n\n\nNotice that \nEDEX_SERVER\n and \nBROKER_ADDR\n (qpid) should remain defined as the \nlocalhost\n IP address (10.0.0.7)\n\n\n3. Start EDEX\n\uf0c1\n\n\nedex start ingest\n\n\n\nThis will start Qpid and the EDEX Ingest and IngestGrib JVMs (and not start PostgreSQL, httpd-pypies, or the EDEX Request JVM)\n\n\n4. Monitor Services\n\uf0c1\n\n\nWatch the edex JVM log with the command\n\n\nedex log\n\n\n\n\n\nConfirm that EDEX connects to PostgreSQL!\n\n\nWith the above \nedex log\n, ensure that the log progresses \npast this point\n:\n\n\nSpring-enabled Plugins:\n-----------------------\nacars-common, acars-common-dataaccess, acarssounding-common, activetable-common,\nactivetable-ingest, airep-common, airep-common-dataaccess, airmet-common, \natcf-common, atcf-ingest, aww-common...\n\nJAXB context for PersistencePathKeySet inited in: 5ms\nINFO 20:21:09,134 5584 [EDEXMain] Reflections: Reflections took 436 ms to scan 258 urls, producing 31 keys and 3637 values\nFound 499 db classes in 720 ms\n\n\n\nIf the log stops at the \nFound db classes...\n line, that means EDEX is not connecting to the \nremote PostgreSQL instance\n - double-check \nDB_ADDR\n in \n/awips2/edex/bin/setup.env\n\n\nYou can \nmanually check remote PostgreSQL connectivity\n on any EDEX Ingest server from the command line:\n\n\nsu - awips\npsql -U awips -h \nremote IP address\n -p 5432 metadata\n\n\n\nWhere the default passwd is \nawips\n and is defined in files in \n/awips2/edex/conf/db/hibernateConfig/\n\n\n\n\n\n\nAdditional Notes\n\uf0c1\n\n\n\n\nBe mindful of what IP address and hostnames are used in \n/awips2/edex/bin/setup.env\n and \n/awips2/data/pg_hba.conf\n, and that they are resolvable from the command line. Consult or edit \n/etc/hosts\n as needed.\n\n\n\n\nYou can install multiple \nawips2-ingest\n servers, each decoding a different dataset or feed, all pointing to the same Database/Request server (\nDB_ADDR\n and \nPYPIES_SERVER\n in \n/awips2/edex/bin/setup.env\n):\n\n\n\n\n\n\nEvery EDEX Ingest IP address must be allowed in both \niptables\n and \npg_hba.conf\n as \nshown above\n.", "title": "Distributed EDEX" }, { @@ -977,7 +977,7 @@ }, { "location": "/edex/distributed-computing/#1-install", - "text": "groupadd fxalpha useradd -G fxalpha awips\nmkdir /awips2\nwget -O /etc/yum.repos.d/awips2.repo http://www.unidata.ucar.edu/software/awips2/doc/awips2.repo\nyum clean all\nyum groupinstall awips2-database", + "text": "groupadd fxalpha useradd -G fxalpha awips\nmkdir /awips2\nwget -O /etc/yum.repos.d/awips2.repo https://www.unidata.ucar.edu/software/awips2/doc/awips2.repo\nyum clean all\nyum groupinstall awips2-database", "title": "1. Install" }, { @@ -1007,7 +1007,7 @@ }, { "location": "/edex/distributed-computing/#1-install_1", - "text": "groupadd fxalpha useradd -G fxalpha awips\nwget -O /etc/yum.repos.d/awips2.repo http://www.unidata.ucar.edu/software/awips2/doc/awips2.repo\nyum clean all\nyum groupinstall awips2-ingest", + "text": "groupadd fxalpha useradd -G fxalpha awips\nwget -O /etc/yum.repos.d/awips2.repo https://www.unidata.ucar.edu/software/awips2/doc/awips2.repo\nyum clean all\nyum groupinstall awips2-ingest", "title": "1. Install" }, { @@ -1147,12 +1147,12 @@ }, { "location": "/edex/new-grid/", - "text": "Unrecognized grids can be decoded by EDEX simply by dropping \n*.grib\n or \n*.grib2\n files into \n/awips2/data_store/ingest/\n\n\nTo add support for a new grid, two edits must be made:\n\n\n\n\nGeospatial projection\n must be defined in a \ngrid navigation file\n\n\nGrid name\n, \ncenter\n, \nsubcenter\n, and \nprocess ID\n must be defined in a \nmodel definition file\n.\n\n\n\n\nIngest an Unsupported Grid\n\uf0c1\n\n\n\n\n\n\nDownload an example grib1 file and rename to a \n*.grib\n extension, then copy to the manual ingest point \n/awips2/data_store/ingest/\n \n\n\nwget http://www.unidata.ucar.edu/staff/mjames/14102318_nmm_d01.GrbF00600 -O wrf.grib\n\ncp wrf.grib /awips2/data_store/ingest/\n\n\n\nRemember that the data distribution file (\n/awips2/edex/data/utility/edex_static/base/distribution/grib.xml\n) will match filename which have the \n*.grib\n extension.\n\n\n\n\n\n\nConfirm that the grib file decodes in the grib log file:\n\n\nedex log grib\n\nINFO [Ingest.GribDecode] /awips2/data_store/ingest/grib/20141026/14/wrf.grib processed in: 0.1200 (sec) Latency: 21.8080 (sec)\nINFO [Ingest.GribDecode] /awips2/data_store/ingest/grib/20141026/14/wrf.grib processed in: 0.1180 (sec) Latency: 21.8140 (sec)\nINFO [Ingest.GribDecode] /awips2/data_store/ingest/grib/20141026/14/wrf.grib processed in: 0.4230 (sec) Latency: 21.8360 (sec)\nINFO [Ingest.GribDecode] /awips2/data_store/ingest/grib/20141026/14/wrf.grib processed in: 0.2240 (sec) Latency: 21.9140 (sec)\n\n...\n\n\n\n\n\n\n\nCheck that the hdf5 data directory exists for our unnamed grid\n\n\nls -latr /awips2/edex/data/hdf5/grid/GribModel:7:0:89\n\n\n\nThough the grib file has been decoded, it has been given a generic name with its center, subcenter, and process IDs (7, 0, 89, respectively). \n\n\n\n\n\n\nDetermine Grid Projection\n\uf0c1\n\n\nWhen the grid was ingested a record was added to the \ngrid_coverage\n table with its navigation information:\n\n\npsql metadata\n\nmetadata=# select nx,ny,dx,dy,majoraxis,minoraxis,la1,lo1,lov,latin1,latin2 from gridcoverage where id=(select distinct(location_id) from grid_info where datasetid='GribModel:7:0:89');\n nx | ny | dx | dy | majoraxis | minoraxis | la1 | lo1 | lov | latin1 | latin2 \n-----+-----+------------------+------------------+-----------+-----------+------------------+-------------------+-------------------+------------------+------------------\n 201 | 155 | 4.29699993133545 | 4.29699993133545 | 6378160 | 6356775 | 42.2830009460449 | -72.3610000610352 | -67.0770034790039 | 45.3680000305176 | 45.3680000305176\n(1 row)\n\n\n\nCompare with the projection info returned by wgrib on the original file:\n\n\nwgrib -V wrf.grib\n\nrec 799:27785754:date 2014102318 ALBDO kpds5=84 kpds6=1 kpds7=0 levels=(0,0) grid=255 sfc 6hr fcst: bitmap: 736 undef\n ALBDO=Albedo [%]\n timerange 0 P1 6 P2 0 TimeU 1 nx 201 ny 155 GDS grid 3 num_in_ave 0 missing 0\n center 7 subcenter 0 process 89 Table 2 scan: WE:SN winds(grid) \n Lambert Conf: Lat1 42.283000 Lon1 -72.361000 Lov -67.077000\n Latin1 45.368000 Latin2 45.368000 LatSP 0.000000 LonSP 0.000000\n North Pole (201 x 155) Dx 4.297000 Dy 4.297000 scan 64 mode 8\n min/max data 5 21.9 num bits 8 BDS_Ref 50 DecScale 1 BinScale 0\n\n\n\nNotice that our grib1 file is a \nLambert Conformal\n projection. We will need these values for the next step. Note that \nthere is a tolerance of +/- 0.1 degrees\n to keep in mind when defining your coverage area.\n\n\nCreate Grid Projection File\n\uf0c1\n\n\nGrid projection files are stored in \n/awips2/edex/data/utility/edex_static/base/grib/grids/\n and there are four grid coverage types available:\n\n\n\n\n\n\nlambertConformalGridCoverage\n example\n\n\nlambertConformalGridCoverage\n\n \nname\n305\n/name\n\n \ndescription\nRegional - CONUS (Lambert Conformal)\n/description\n\n \nla1\n16.322\n/la1\n\n \nlo1\n-125.955\n/lo1\n\n \nfirstGridPointCorner\nLowerLeft\n/firstGridPointCorner\n\n \nnx\n151\n/nx\n\n \nny\n113\n/ny\n\n \ndx\n40.63525\n/dx\n\n \ndy\n40.63525\n/dy\n\n \nspacingUnit\nkm\n/spacingUnit\n\n \nminorAxis\n6356775.0\n/minorAxis\n\n \nmajorAxis\n6378160.0\n/majorAxis\n\n \nlov\n-95.0\n/lov\n\n \nlatin1\n25.0\n/latin1\n\n \nlatin2\n25.0\n/latin2\n\n\n/lambertConformalGridCoverage\n\n\n\n\n\n\n\n\npolarStereoGridCoverage\n example\n\n\npolarStereoGridCoverage\n\n \nname\n405\n/name\n\n \ndescription\nSea Ice south 690X710 13km grid\n/description\n\n \nla1\n-36.866\n/la1\n\n \nlo1\n139.806\n/lo1\n\n \nfirstGridPointCorner\nLowerLeft\n/firstGridPointCorner\n\n \nnx\n690\n/nx\n\n \nny\n710\n/ny\n\n \ndx\n12.7\n/dx\n\n \ndy\n12.7\n/dy\n\n \nspacingUnit\nkm\n/spacingUnit\n\n \nminorAxis\n6371229.0\n/minorAxis\n\n \nmajorAxis\n6371229.0\n/majorAxis\n\n \nlov\n100.0\n/lov\n\n\n/polarStereoGridCoverage\n\n\n\n\n\n\n\n\nlatLonGridCoverage\n example\n\n\nlatLonGridCoverage\n\n \nname\n864162002\n/name\n\n \ndescription\nUKMet HiRes combined - Southern Hemisphere\n Longitude range 71.25E - 70.416E \n/description\n\n \nla1\n-89.721\n/la1\n\n \nlo1\n71.25\n/lo1\n\n \nfirstGridPointCorner\nLowerLeft\n/firstGridPointCorner\n\n \nnx\n864\n/nx\n\n \nny\n162\n/ny\n\n \ndx\n0.833\n/dx\n\n \ndy\n0.556\n/dy\n\n \nspacingUnit\ndegree\n/spacingUnit\n\n \nla2\n-0.278\n/la2\n\n \nlo2\n70.416\n/lo2\n\n\n/latLonGridCoverage\n\n\n\n\n\n\n\n\nmercatorGridCoverage\n example\n\n\nmercatorGridCoverage\n\n \nname\nNBM_PR\n/name\n\n \ndescription\n National Blend Grid over Puerto Rico - (1.25 km)\n/description\n\n \nla1\n16.9775\n/la1\n\n \nlo1\n-68.0278\n/lo1\n\n \nfirstGridPointCorner\nLowerLeft\n/firstGridPointCorner\n\n \nnx\n339\n/nx\n\n \nny\n225\n/ny\n\n \ndx\n1.25\n/dx\n\n \ndy\n1.25\n/dy\n\n \nla2\n19.3750032477232\n/la2\n\n \nlo2\n-63.984399999999994\n/lo2\n\n \nlatin\n20\n/latin\n\n \nspacingUnit\nkm\n/spacingUnit\n\n \nminorAxis\n6371200\n/minorAxis\n\n \nmajorAxis\n6371200\n/majorAxis\n\n\n/mercatorGridCoverage\n\n\n\n\n\n\n\n\nCopy an existing file file with the same grid projection type (in this case \nlambertConformalGridCoverage\n) to a new file \nwrf.xml\n\n\ncd /awips2/edex/data/utility/edex_static/base/grib/grids/\ncp RUCIcing.xml wrf.xml\n\n\n\nAnd edit the new \nwrf.xml\n to define the projection values (example provided):\n\n\nvi wrf.xml\n\n\nlambertConformalGridCoverage\n\n \nname\n201155\n/name\n\n \ndescription\nRegional - CONUS (Lambert Conformal)\n/description\n\n \nla1\n42.2830009460449\n/la1\n\n \nlo1\n-72.3610000610352\n/lo1\n\n \nfirstGridPointCorner\nLowerLeft\n/firstGridPointCorner\n\n \nnx\n201\n/nx\n\n \nny\n155\n/ny\n\n \ndx\n4.29699993133545\n/dx\n\n \ndy\n4.29699993133545\n/dy\n\n \nspacingUnit\nkm\n/spacingUnit\n\n \nminorAxis\n6356775.0\n/minorAxis\n\n \nmajorAxis\n6378160.0\n/majorAxis\n\n \nlov\n-67.0770034790039\n/lov\n\n \nlatin1\n45.3680000305176\n/latin1\n\n \nlatin2\n45.3680000305176\n/latin2\n\n\n/lambertConformalGridCoverage\n\n\n\n\n\n\nNotice \nname\n201155\n/name\n defined from the number of grid points (201 x 155). This value will be matched against an entry in our models file (below) to set the name of the model (e.g. WRF).\n\n\n\n\nCreate Model Definition\n\uf0c1\n\n\nModel definition XML files are found in \n/awips2/edex/data/utility/edex_static/base/grid/models/\n. Since our grib1 file has a center ID of 7 (NCEP) we will edit the \ngribModels_NCEP-7.xml\n file.\n\n\ncd /awips2/edex/data/utility/edex_static/base/grib/models/\n\nvi gribModels_NCEP-7.xml\n\n\n\nin \ngribModelSet\n add an entry\n\n\n \nmodel\n\n \nname\nWRF\n/name\n\n \ncenter\n7\n/center\n\n \nsubcenter\n0\n/subcenter\n\n \ngrid\n201155\n/grid\n\n \nprocess\n\n \nid\n89\n/id\n\n \n/process\n\n \n/model\n\n\n\n\nsave the file and restart EDEX for the changes to take effect.\n\n\nsudo service edex_camel restart\n\n\n\nNow copy the \nwrf.grib\n file \nagain\n to \n/awips2/data_store/ingest/\n. If everything is correct we will not see any persistence errors since the grid is now named \nWRF\n and not \nGribModel:7:0:89\n.\n\n\ncp wrd.grib /awips2/data_store/ingest/\n\nedex log grib\n\n\n\nAfter you have confirmed that the grid was ingested with the given name, you can \nedit the D2D product menus to display the new grid\n.\n\n\nTroubleshooting Grib Ingest\n\uf0c1\n\n\nIf you ingest a piece of data and the parameter appears as unknown in the metadata database, ensure that the correct parameter tables are in place for the center/subcenter.\n\n\nMake sure the latitude and longitude entries in your coverage specification file match those of your ingested raw grib file. There is a tolerance of +/- 0.1 degree to keep in mind when defining your coverage area.\n\n\nIf some of the information is unknown, using a grib utility application such as \nwgrib\n and \nwgrib2\n (not delivered) can be useful in determining the information that must be added to correctly process a new grib file.", + "text": "Unrecognized grids can be decoded by EDEX simply by dropping \n*.grib\n or \n*.grib2\n files into \n/awips2/data_store/ingest/\n\n\nTo add support for a new grid, two edits must be made:\n\n\n\n\nGeospatial projection\n must be defined in a \ngrid navigation file\n\n\nGrid name\n, \ncenter\n, \nsubcenter\n, and \nprocess ID\n must be defined in a \nmodel definition file\n.\n\n\n\n\nIngest an Unsupported Grid\n\uf0c1\n\n\n\n\n\n\nDownload an example grib1 file and rename to a \n*.grib\n extension, then copy to the manual ingest point \n/awips2/data_store/ingest/\n \n\n\nwget https://www.unidata.ucar.edu/staff/mjames/14102318_nmm_d01.GrbF00600 -O wrf.grib\n\ncp wrf.grib /awips2/data_store/ingest/\n\n\n\nRemember that the data distribution file (\n/awips2/edex/data/utility/edex_static/base/distribution/grib.xml\n) will match filename which have the \n*.grib\n extension.\n\n\n\n\n\n\nConfirm that the grib file decodes in the grib log file:\n\n\nedex log grib\n\nINFO [Ingest.GribDecode] /awips2/data_store/ingest/grib/20141026/14/wrf.grib processed in: 0.1200 (sec) Latency: 21.8080 (sec)\nINFO [Ingest.GribDecode] /awips2/data_store/ingest/grib/20141026/14/wrf.grib processed in: 0.1180 (sec) Latency: 21.8140 (sec)\nINFO [Ingest.GribDecode] /awips2/data_store/ingest/grib/20141026/14/wrf.grib processed in: 0.4230 (sec) Latency: 21.8360 (sec)\nINFO [Ingest.GribDecode] /awips2/data_store/ingest/grib/20141026/14/wrf.grib processed in: 0.2240 (sec) Latency: 21.9140 (sec)\n\n...\n\n\n\n\n\n\n\nCheck that the hdf5 data directory exists for our unnamed grid\n\n\nls -latr /awips2/edex/data/hdf5/grid/GribModel:7:0:89\n\n\n\nThough the grib file has been decoded, it has been given a generic name with its center, subcenter, and process IDs (7, 0, 89, respectively). \n\n\n\n\n\n\nDetermine Grid Projection\n\uf0c1\n\n\nWhen the grid was ingested a record was added to the \ngrid_coverage\n table with its navigation information:\n\n\npsql metadata\n\nmetadata=# select nx,ny,dx,dy,majoraxis,minoraxis,la1,lo1,lov,latin1,latin2 from gridcoverage where id=(select distinct(location_id) from grid_info where datasetid='GribModel:7:0:89');\n nx | ny | dx | dy | majoraxis | minoraxis | la1 | lo1 | lov | latin1 | latin2 \n-----+-----+------------------+------------------+-----------+-----------+------------------+-------------------+-------------------+------------------+------------------\n 201 | 155 | 4.29699993133545 | 4.29699993133545 | 6378160 | 6356775 | 42.2830009460449 | -72.3610000610352 | -67.0770034790039 | 45.3680000305176 | 45.3680000305176\n(1 row)\n\n\n\nCompare with the projection info returned by wgrib on the original file:\n\n\nwgrib -V wrf.grib\n\nrec 799:27785754:date 2014102318 ALBDO kpds5=84 kpds6=1 kpds7=0 levels=(0,0) grid=255 sfc 6hr fcst: bitmap: 736 undef\n ALBDO=Albedo [%]\n timerange 0 P1 6 P2 0 TimeU 1 nx 201 ny 155 GDS grid 3 num_in_ave 0 missing 0\n center 7 subcenter 0 process 89 Table 2 scan: WE:SN winds(grid) \n Lambert Conf: Lat1 42.283000 Lon1 -72.361000 Lov -67.077000\n Latin1 45.368000 Latin2 45.368000 LatSP 0.000000 LonSP 0.000000\n North Pole (201 x 155) Dx 4.297000 Dy 4.297000 scan 64 mode 8\n min/max data 5 21.9 num bits 8 BDS_Ref 50 DecScale 1 BinScale 0\n\n\n\nNotice that our grib1 file is a \nLambert Conformal\n projection. We will need these values for the next step. Note that \nthere is a tolerance of +/- 0.1 degrees\n to keep in mind when defining your coverage area.\n\n\nCreate Grid Projection File\n\uf0c1\n\n\nGrid projection files are stored in \n/awips2/edex/data/utility/edex_static/base/grib/grids/\n and there are four grid coverage types available:\n\n\n\n\n\n\nlambertConformalGridCoverage\n example\n\n\nlambertConformalGridCoverage\n\n \nname\n305\n/name\n\n \ndescription\nRegional - CONUS (Lambert Conformal)\n/description\n\n \nla1\n16.322\n/la1\n\n \nlo1\n-125.955\n/lo1\n\n \nfirstGridPointCorner\nLowerLeft\n/firstGridPointCorner\n\n \nnx\n151\n/nx\n\n \nny\n113\n/ny\n\n \ndx\n40.63525\n/dx\n\n \ndy\n40.63525\n/dy\n\n \nspacingUnit\nkm\n/spacingUnit\n\n \nminorAxis\n6356775.0\n/minorAxis\n\n \nmajorAxis\n6378160.0\n/majorAxis\n\n \nlov\n-95.0\n/lov\n\n \nlatin1\n25.0\n/latin1\n\n \nlatin2\n25.0\n/latin2\n\n\n/lambertConformalGridCoverage\n\n\n\n\n\n\n\n\npolarStereoGridCoverage\n example\n\n\npolarStereoGridCoverage\n\n \nname\n405\n/name\n\n \ndescription\nSea Ice south 690X710 13km grid\n/description\n\n \nla1\n-36.866\n/la1\n\n \nlo1\n139.806\n/lo1\n\n \nfirstGridPointCorner\nLowerLeft\n/firstGridPointCorner\n\n \nnx\n690\n/nx\n\n \nny\n710\n/ny\n\n \ndx\n12.7\n/dx\n\n \ndy\n12.7\n/dy\n\n \nspacingUnit\nkm\n/spacingUnit\n\n \nminorAxis\n6371229.0\n/minorAxis\n\n \nmajorAxis\n6371229.0\n/majorAxis\n\n \nlov\n100.0\n/lov\n\n\n/polarStereoGridCoverage\n\n\n\n\n\n\n\n\nlatLonGridCoverage\n example\n\n\nlatLonGridCoverage\n\n \nname\n864162002\n/name\n\n \ndescription\nUKMet HiRes combined - Southern Hemisphere\n Longitude range 71.25E - 70.416E \n/description\n\n \nla1\n-89.721\n/la1\n\n \nlo1\n71.25\n/lo1\n\n \nfirstGridPointCorner\nLowerLeft\n/firstGridPointCorner\n\n \nnx\n864\n/nx\n\n \nny\n162\n/ny\n\n \ndx\n0.833\n/dx\n\n \ndy\n0.556\n/dy\n\n \nspacingUnit\ndegree\n/spacingUnit\n\n \nla2\n-0.278\n/la2\n\n \nlo2\n70.416\n/lo2\n\n\n/latLonGridCoverage\n\n\n\n\n\n\n\n\nmercatorGridCoverage\n example\n\n\nmercatorGridCoverage\n\n \nname\nNBM_PR\n/name\n\n \ndescription\n National Blend Grid over Puerto Rico - (1.25 km)\n/description\n\n \nla1\n16.9775\n/la1\n\n \nlo1\n-68.0278\n/lo1\n\n \nfirstGridPointCorner\nLowerLeft\n/firstGridPointCorner\n\n \nnx\n339\n/nx\n\n \nny\n225\n/ny\n\n \ndx\n1.25\n/dx\n\n \ndy\n1.25\n/dy\n\n \nla2\n19.3750032477232\n/la2\n\n \nlo2\n-63.984399999999994\n/lo2\n\n \nlatin\n20\n/latin\n\n \nspacingUnit\nkm\n/spacingUnit\n\n \nminorAxis\n6371200\n/minorAxis\n\n \nmajorAxis\n6371200\n/majorAxis\n\n\n/mercatorGridCoverage\n\n\n\n\n\n\n\n\nCopy an existing file file with the same grid projection type (in this case \nlambertConformalGridCoverage\n) to a new file \nwrf.xml\n\n\ncd /awips2/edex/data/utility/edex_static/base/grib/grids/\ncp RUCIcing.xml wrf.xml\n\n\n\nAnd edit the new \nwrf.xml\n to define the projection values (example provided):\n\n\nvi wrf.xml\n\n\nlambertConformalGridCoverage\n\n \nname\n201155\n/name\n\n \ndescription\nRegional - CONUS (Lambert Conformal)\n/description\n\n \nla1\n42.2830009460449\n/la1\n\n \nlo1\n-72.3610000610352\n/lo1\n\n \nfirstGridPointCorner\nLowerLeft\n/firstGridPointCorner\n\n \nnx\n201\n/nx\n\n \nny\n155\n/ny\n\n \ndx\n4.29699993133545\n/dx\n\n \ndy\n4.29699993133545\n/dy\n\n \nspacingUnit\nkm\n/spacingUnit\n\n \nminorAxis\n6356775.0\n/minorAxis\n\n \nmajorAxis\n6378160.0\n/majorAxis\n\n \nlov\n-67.0770034790039\n/lov\n\n \nlatin1\n45.3680000305176\n/latin1\n\n \nlatin2\n45.3680000305176\n/latin2\n\n\n/lambertConformalGridCoverage\n\n\n\n\n\n\nNotice \nname\n201155\n/name\n defined from the number of grid points (201 x 155). This value will be matched against an entry in our models file (below) to set the name of the model (e.g. WRF).\n\n\n\n\nCreate Model Definition\n\uf0c1\n\n\nModel definition XML files are found in \n/awips2/edex/data/utility/edex_static/base/grid/models/\n. Since our grib1 file has a center ID of 7 (NCEP) we will edit the \ngribModels_NCEP-7.xml\n file.\n\n\ncd /awips2/edex/data/utility/edex_static/base/grib/models/\n\nvi gribModels_NCEP-7.xml\n\n\n\nin \ngribModelSet\n add an entry\n\n\n \nmodel\n\n \nname\nWRF\n/name\n\n \ncenter\n7\n/center\n\n \nsubcenter\n0\n/subcenter\n\n \ngrid\n201155\n/grid\n\n \nprocess\n\n \nid\n89\n/id\n\n \n/process\n\n \n/model\n\n\n\n\nsave the file and restart EDEX for the changes to take effect.\n\n\nsudo service edex_camel restart\n\n\n\nNow copy the \nwrf.grib\n file \nagain\n to \n/awips2/data_store/ingest/\n. If everything is correct we will not see any persistence errors since the grid is now named \nWRF\n and not \nGribModel:7:0:89\n.\n\n\ncp wrd.grib /awips2/data_store/ingest/\n\nedex log grib\n\n\n\nAfter you have confirmed that the grid was ingested with the given name, you can \nedit the D2D product menus to display the new grid\n.\n\n\nTroubleshooting Grib Ingest\n\uf0c1\n\n\nIf you ingest a piece of data and the parameter appears as unknown in the metadata database, ensure that the correct parameter tables are in place for the center/subcenter.\n\n\nMake sure the latitude and longitude entries in your coverage specification file match those of your ingested raw grib file. There is a tolerance of +/- 0.1 degree to keep in mind when defining your coverage area.\n\n\nIf some of the information is unknown, using a grib utility application such as \nwgrib\n and \nwgrib2\n (not delivered) can be useful in determining the information that must be added to correctly process a new grib file.", "title": "Ingest a New Grid" }, { "location": "/edex/new-grid/#ingest-an-unsupported-grid", - "text": "Download an example grib1 file and rename to a *.grib extension, then copy to the manual ingest point /awips2/data_store/ingest/ wget http://www.unidata.ucar.edu/staff/mjames/14102318_nmm_d01.GrbF00600 -O wrf.grib\n\ncp wrf.grib /awips2/data_store/ingest/ Remember that the data distribution file ( /awips2/edex/data/utility/edex_static/base/distribution/grib.xml ) will match filename which have the *.grib extension. Confirm that the grib file decodes in the grib log file: edex log grib\n\nINFO [Ingest.GribDecode] /awips2/data_store/ingest/grib/20141026/14/wrf.grib processed in: 0.1200 (sec) Latency: 21.8080 (sec)\nINFO [Ingest.GribDecode] /awips2/data_store/ingest/grib/20141026/14/wrf.grib processed in: 0.1180 (sec) Latency: 21.8140 (sec)\nINFO [Ingest.GribDecode] /awips2/data_store/ingest/grib/20141026/14/wrf.grib processed in: 0.4230 (sec) Latency: 21.8360 (sec)\nINFO [Ingest.GribDecode] /awips2/data_store/ingest/grib/20141026/14/wrf.grib processed in: 0.2240 (sec) Latency: 21.9140 (sec)\n\n... Check that the hdf5 data directory exists for our unnamed grid ls -latr /awips2/edex/data/hdf5/grid/GribModel:7:0:89 Though the grib file has been decoded, it has been given a generic name with its center, subcenter, and process IDs (7, 0, 89, respectively).", + "text": "Download an example grib1 file and rename to a *.grib extension, then copy to the manual ingest point /awips2/data_store/ingest/ wget https://www.unidata.ucar.edu/staff/mjames/14102318_nmm_d01.GrbF00600 -O wrf.grib\n\ncp wrf.grib /awips2/data_store/ingest/ Remember that the data distribution file ( /awips2/edex/data/utility/edex_static/base/distribution/grib.xml ) will match filename which have the *.grib extension. Confirm that the grib file decodes in the grib log file: edex log grib\n\nINFO [Ingest.GribDecode] /awips2/data_store/ingest/grib/20141026/14/wrf.grib processed in: 0.1200 (sec) Latency: 21.8080 (sec)\nINFO [Ingest.GribDecode] /awips2/data_store/ingest/grib/20141026/14/wrf.grib processed in: 0.1180 (sec) Latency: 21.8140 (sec)\nINFO [Ingest.GribDecode] /awips2/data_store/ingest/grib/20141026/14/wrf.grib processed in: 0.4230 (sec) Latency: 21.8360 (sec)\nINFO [Ingest.GribDecode] /awips2/data_store/ingest/grib/20141026/14/wrf.grib processed in: 0.2240 (sec) Latency: 21.9140 (sec)\n\n... Check that the hdf5 data directory exists for our unnamed grid ls -latr /awips2/edex/data/hdf5/grid/GribModel:7:0:89 Though the grib file has been decoded, it has been given a generic name with its center, subcenter, and process IDs (7, 0, 89, respectively).", "title": "Ingest an Unsupported Grid" }, { @@ -1227,7 +1227,7 @@ }, { "location": "/edex/case-studies/", - "text": "Case Study Server Configuration\n\uf0c1\n\n\nThis document covers what is necessary to install and run AWIPS EDEX as an archive and case study server (no purging of data).\n\n\nQuick Install\n\uf0c1\n\n\nFollow the \nEDEX Install Instructions\n including iptables config and an optional SSD mount (for large data volumes)\n\n\ngroupadd fxalpha \n useradd -G fxalpha awips\nmkdir -p /awips2/data_store\nwget -O /etc/yum.repos.d/awips2.repo http://www.unidata.ucar.edu/software/awips2/doc/awips2.repo\nyum clean all\nyum groupinstall awips2-server -y\n\n\n\nDisable Data Purging\n\uf0c1\n\n\nThe easiest way to disable data purging is to add an \nexclude\npurge.*\n/exclude\n entry in \n/awips2/edex/conf/modes/ingest-modes.xml\n so that the purge plugin is not loaded when the EDEX ingest JVM is started:\n\n\nvi /awips2/edex/conf/modes/ingest-modes.xml\n\n\n?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"yes\"?\n\n\nedexModes\n\n \nmode name=\"ingest\"\n\n \nexclude\n.*request.*\n/exclude\n\n \nexclude\nedex-security.xml\n/exclude\n\n \nexclude\n(taf|shef).*\n/exclude\n\n \nexclude\npurge.*\n/exclude\n\n \n/mode\n\n\n/edexModes\n\n\n\n\nStart EDEX\n\uf0c1\n\n\nwithout the LDM\n\n\nedex start base\n\n\n\nmonitor services\n\n\nedex\n\n[edex status]\n postgres :: running :: pid 43644\n pypies :: running :: pid 3557\n qpid :: running :: pid 43742\n EDEXingest :: running :: pid 6564 44301 44597\n EDEXgrib :: running :: pid 6565 44302 44598\n EDEXrequest :: running :: pid 6566 44303 44599\n ldmadmin :: not running\n\n\n\nIngest Case Study Data\n\uf0c1\n\n\nRaw data files of any type can be copied or moved into \n/awips2/data_store/ingest/\n to be picked up and decoded by EDEX. Most data types are recognized by regular expression matching of the WMO Header or filename. \n\n\nIndividual files can be ingested on the command line with the regex header/pattern supplied as the last argument:\n\n\nqpidNotify.py /full/path/to/data.file \nregex match\n\n\n\n\nfor example\n\n\nqpidNotify.py /home/awips/uniwisc_U5_132GOES-15_IMG10.7um_4km_20171024_1830.area.png uniwisc\n\nqpidNotify.py /awips2/data_store/grid/NAM12/conduit/NAM_CONUS_12km_conduit_20171025_1200Z_F084_TMPK-7.000007.grib2 grib\n\nqpidNotify.py /awips2/data_store/radar/FTG_N0Q_20171015_1815 Level3\n\n\n\nViewing Archive Data in CAVE\n\uf0c1\n\n\nBecause we are installing and configuring a standalone EDEX archive server without real-time LDM data ingest (and with purge disabled), any case study data that is ingested will be the \"latest available\" to CAVE, and you will see CAVE product menu time fill in with the latest of all data ingested.\n\n\nHowever, to display specific time-based data (in case you ingest more than one case study), there are two options:\n\n\n1. Load Mode \n Inventory\n\uf0c1\n\n\nIn the top-left toolbar change \nValid time seq\n to \nInventory\n.\n\n\n\n\nNow any data product selected from the menus or the Product Browser will prompt you to select the exact time.\n\n\n\n\n2. Set Data Display Time in CAVE\n\uf0c1\n\n\nAt the bottom of the CAVE application, double-click the \nTime:\n entry to bring up a dialog window where you can set CAVE to a previous time, and choose the option of freezing CAVE at that time or allowing CAVE to \"move forward in time\" from that position as if it were real-time.", + "text": "Case Study Server Configuration\n\uf0c1\n\n\nThis document covers what is necessary to install and run AWIPS EDEX as an archive and case study server (no purging of data).\n\n\nQuick Install\n\uf0c1\n\n\nFollow the \nEDEX Install Instructions\n including iptables config and an optional SSD mount (for large data volumes)\n\n\ngroupadd fxalpha \n useradd -G fxalpha awips\nmkdir -p /awips2/data_store\nwget -O /etc/yum.repos.d/awips2.repo https://www.unidata.ucar.edu/software/awips2/doc/awips2.repo\nyum clean all\nyum groupinstall awips2-server -y\n\n\n\nDisable Data Purging\n\uf0c1\n\n\nThe easiest way to disable data purging is to add an \nexclude\npurge.*\n/exclude\n entry in \n/awips2/edex/conf/modes/ingest-modes.xml\n so that the purge plugin is not loaded when the EDEX ingest JVM is started:\n\n\nvi /awips2/edex/conf/modes/ingest-modes.xml\n\n\n?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"yes\"?\n\n\nedexModes\n\n \nmode name=\"ingest\"\n\n \nexclude\n.*request.*\n/exclude\n\n \nexclude\nedex-security.xml\n/exclude\n\n \nexclude\n(taf|shef).*\n/exclude\n\n \nexclude\npurge.*\n/exclude\n\n \n/mode\n\n\n/edexModes\n\n\n\n\nStart EDEX\n\uf0c1\n\n\nwithout the LDM\n\n\nedex start base\n\n\n\nmonitor services\n\n\nedex\n\n[edex status]\n postgres :: running :: pid 43644\n pypies :: running :: pid 3557\n qpid :: running :: pid 43742\n EDEXingest :: running :: pid 6564 44301 44597\n EDEXgrib :: running :: pid 6565 44302 44598\n EDEXrequest :: running :: pid 6566 44303 44599\n ldmadmin :: not running\n\n\n\nIngest Case Study Data\n\uf0c1\n\n\nRaw data files of any type can be copied or moved into \n/awips2/data_store/ingest/\n to be picked up and decoded by EDEX. Most data types are recognized by regular expression matching of the WMO Header or filename. \n\n\nIndividual files can be ingested on the command line with the regex header/pattern supplied as the last argument:\n\n\nqpidNotify.py /full/path/to/data.file \nregex match\n\n\n\n\nfor example\n\n\nqpidNotify.py /home/awips/uniwisc_U5_132GOES-15_IMG10.7um_4km_20171024_1830.area.png uniwisc\n\nqpidNotify.py /awips2/data_store/grid/NAM12/conduit/NAM_CONUS_12km_conduit_20171025_1200Z_F084_TMPK-7.000007.grib2 grib\n\nqpidNotify.py /awips2/data_store/radar/FTG_N0Q_20171015_1815 Level3\n\n\n\nViewing Archive Data in CAVE\n\uf0c1\n\n\nBecause we are installing and configuring a standalone EDEX archive server without real-time LDM data ingest (and with purge disabled), any case study data that is ingested will be the \"latest available\" to CAVE, and you will see CAVE product menu time fill in with the latest of all data ingested.\n\n\nHowever, to display specific time-based data (in case you ingest more than one case study), there are two options:\n\n\n1. Load Mode \n Inventory\n\uf0c1\n\n\nIn the top-left toolbar change \nValid time seq\n to \nInventory\n.\n\n\n\n\nNow any data product selected from the menus or the Product Browser will prompt you to select the exact time.\n\n\n\n\n2. Set Data Display Time in CAVE\n\uf0c1\n\n\nAt the bottom of the CAVE application, double-click the \nTime:\n entry to bring up a dialog window where you can set CAVE to a previous time, and choose the option of freezing CAVE at that time or allowing CAVE to \"move forward in time\" from that position as if it were real-time.", "title": "Archive Case Studies" }, { @@ -1237,7 +1237,7 @@ }, { "location": "/edex/case-studies/#quick-install", - "text": "Follow the EDEX Install Instructions including iptables config and an optional SSD mount (for large data volumes) groupadd fxalpha useradd -G fxalpha awips\nmkdir -p /awips2/data_store\nwget -O /etc/yum.repos.d/awips2.repo http://www.unidata.ucar.edu/software/awips2/doc/awips2.repo\nyum clean all\nyum groupinstall awips2-server -y", + "text": "Follow the EDEX Install Instructions including iptables config and an optional SSD mount (for large data volumes) groupadd fxalpha useradd -G fxalpha awips\nmkdir -p /awips2/data_store\nwget -O /etc/yum.repos.d/awips2.repo https://www.unidata.ucar.edu/software/awips2/doc/awips2.repo\nyum clean all\nyum groupinstall awips2-server -y", "title": "Quick Install" }, { diff --git a/python/awips-grids-and-cartopy/index.html b/python/awips-grids-and-cartopy/index.html index 78251c100c..68428e0140 100644 --- a/python/awips-grids-and-cartopy/index.html +++ b/python/awips-grids-and-cartopy/index.html @@ -1165,7 +1165,7 @@ cbar2.set_label(str(grid.getLocationName()) +" " \ - + diff --git a/python/map-resources-and-topography/index.html b/python/map-resources-and-topography/index.html index 25b30fbcb7..88c093cea9 100644 --- a/python/map-resources-and-topography/index.html +++ b/python/map-resources-and-topography/index.html @@ -1495,7 +1495,7 @@ fig - + diff --git a/python/maps-database/index.html b/python/maps-database/index.html index f343a2ab69..0e2943967d 100644 --- a/python/maps-database/index.html +++ b/python/maps-database/index.html @@ -3361,7 +3361,7 @@ - + diff --git a/python/model-sounding-data/index.html b/python/model-sounding-data/index.html index 5ceb6872e9..03d6a9b157 100644 --- a/python/model-sounding-data/index.html +++ b/python/model-sounding-data/index.html @@ -1370,7 +1370,7 @@ plt.show() - + diff --git a/python/nexrad-level-3-radar/index.html b/python/nexrad-level-3-radar/index.html index c8ca3379ab..0cc0bd2334 100644 --- a/python/nexrad-level-3-radar/index.html +++ b/python/nexrad-level-3-radar/index.html @@ -1298,7 +1298,7 @@ for rec,ax in zip(grids, axes): - + diff --git a/python/python-awips-data-access/index.html b/python/python-awips-data-access/index.html index 7ab924da72..05de503c46 100644 --- a/python/python-awips-data-access/index.html +++ b/python/python-awips-data-access/index.html @@ -1745,7 +1745,7 @@ print(data.min(), data.max()) - + diff --git a/python/satellite-imagery/index.html b/python/satellite-imagery/index.html index 6d12c868b3..d618c8e154 100644 --- a/python/satellite-imagery/index.html +++ b/python/satellite-imagery/index.html @@ -1374,7 +1374,7 @@ plt.tight_layout() - + diff --git a/python/surface-obs-plot-metpy/index.html b/python/surface-obs-plot-metpy/index.html index 064f8213ea..523aa92355 100644 --- a/python/surface-obs-plot-metpy/index.html +++ b/python/surface-obs-plot-metpy/index.html @@ -1238,7 +1238,7 @@ plt.title("Most Recent Observations for State Capitals") - + diff --git a/python/upper-air-bufr-soundings/index.html b/python/upper-air-bufr-soundings/index.html index e5fea4c02e..04510b1828 100644 --- a/python/upper-air-bufr-soundings/index.html +++ b/python/upper-air-bufr-soundings/index.html @@ -1181,7 +1181,7 @@ plt.show() - + diff --git a/sitemap.xml b/sitemap.xml index 3f0a7e404c..a1c19f4748 100644 --- a/sitemap.xml +++ b/sitemap.xml @@ -4,7 +4,7 @@ http://unidata.github.io/awips2/ - 2018-01-26 + 2018-02-15 daily @@ -12,7 +12,7 @@ http://unidata.github.io/awips2/install/install-cave/ - 2018-01-26 + 2018-02-15 daily @@ -20,7 +20,7 @@ http://unidata.github.io/awips2/install/install-edex/ - 2018-01-26 + 2018-02-15 daily @@ -29,91 +29,91 @@ http://unidata.github.io/awips2/cave/d2d-perspective/ - 2018-01-26 + 2018-02-15 daily http://unidata.github.io/awips2/cave/maps-views-projections/ - 2018-01-26 + 2018-02-15 daily http://unidata.github.io/awips2/cave/bundles-and-procedures/ - 2018-01-26 + 2018-02-15 daily http://unidata.github.io/awips2/cave/import-export/ - 2018-01-26 + 2018-02-15 daily http://unidata.github.io/awips2/cave/localization-perspective/ - 2018-01-26 + 2018-02-15 daily http://unidata.github.io/awips2/cave/ncp-perspective/ - 2018-01-26 + 2018-02-15 daily http://unidata.github.io/awips2/cave/nsharp/ - 2018-01-26 + 2018-02-15 daily http://unidata.github.io/awips2/cave/warngen/ - 2018-01-26 + 2018-02-15 daily http://unidata.github.io/awips2/cave/goes-16-satellite/ - 2018-01-26 + 2018-02-15 daily http://unidata.github.io/awips2/cave/d2d-gis-shapefiles/ - 2018-01-26 + 2018-02-15 daily http://unidata.github.io/awips2/cave/d2d-gridded-models/ - 2018-01-26 + 2018-02-15 daily http://unidata.github.io/awips2/cave/d2d-tools/ - 2018-01-26 + 2018-02-15 daily http://unidata.github.io/awips2/cave/d2d-radar-tools/ - 2018-01-26 + 2018-02-15 daily http://unidata.github.io/awips2/cave/d2d-edit-menus/ - 2018-01-26 + 2018-02-15 daily http://unidata.github.io/awips2/cave/cave-localization/ - 2018-01-26 + 2018-02-15 daily @@ -123,55 +123,55 @@ http://unidata.github.io/awips2/edex/distributed-computing/ - 2018-01-26 + 2018-02-15 daily http://unidata.github.io/awips2/install/start-edex/ - 2018-01-26 + 2018-02-15 daily http://unidata.github.io/awips2/edex/ldm/ - 2018-01-26 + 2018-02-15 daily http://unidata.github.io/awips2/edex/data-distribution-files/ - 2018-01-26 + 2018-02-15 daily http://unidata.github.io/awips2/edex/new-grid/ - 2018-01-26 + 2018-02-15 daily http://unidata.github.io/awips2/edex/data-purge/ - 2018-01-26 + 2018-02-15 daily http://unidata.github.io/awips2/edex/edex-users/ - 2018-01-26 + 2018-02-15 daily http://unidata.github.io/awips2/edex/data-plugins/ - 2018-01-26 + 2018-02-15 daily http://unidata.github.io/awips2/edex/case-studies/ - 2018-01-26 + 2018-02-15 daily @@ -181,43 +181,43 @@ http://unidata.github.io/awips2/cave/d2d-grids/ - 2018-01-26 + 2018-02-15 daily http://unidata.github.io/awips2/cave/d2d-pointdata-surface-obs/ - 2018-01-26 + 2018-02-15 daily http://unidata.github.io/awips2/cave/d2d-satellite/ - 2018-01-26 + 2018-02-15 daily http://unidata.github.io/awips2/cave/d2d-radar/ - 2018-01-26 + 2018-02-15 daily http://unidata.github.io/awips2/cave/d2d-uair/ - 2018-01-26 + 2018-02-15 daily http://unidata.github.io/awips2/cave/d2d-hydro/ - 2018-01-26 + 2018-02-15 daily http://unidata.github.io/awips2/cave/d2d-map-resources/ - 2018-01-26 + 2018-02-15 daily @@ -227,55 +227,55 @@ http://unidata.github.io/awips2/python/python-awips-data-access/ - 2018-01-26 + 2018-02-15 daily http://unidata.github.io/awips2/python/maps-database/ - 2018-01-26 + 2018-02-15 daily http://unidata.github.io/awips2/python/awips-grids-and-cartopy/ - 2018-01-26 + 2018-02-15 daily http://unidata.github.io/awips2/python/satellite-imagery/ - 2018-01-26 + 2018-02-15 daily http://unidata.github.io/awips2/python/nexrad-level-3-radar/ - 2018-01-26 + 2018-02-15 daily http://unidata.github.io/awips2/python/upper-air-bufr-soundings/ - 2018-01-26 + 2018-02-15 daily http://unidata.github.io/awips2/python/model-sounding-data/ - 2018-01-26 + 2018-02-15 daily http://unidata.github.io/awips2/python/map-resources-and-topography/ - 2018-01-26 + 2018-02-15 daily http://unidata.github.io/awips2/python/surface-obs-plot-metpy/ - 2018-01-26 + 2018-02-15 daily @@ -285,13 +285,13 @@ http://unidata.github.io/awips2/dev/awips-development-environment/ - 2018-01-26 + 2018-02-15 daily http://unidata.github.io/awips2/dev/build-nsharp-macos/ - 2018-01-26 + 2018-02-15 daily @@ -301,31 +301,31 @@ http://unidata.github.io/awips2/appendix/appendix-grid-parameters/ - 2018-01-26 + 2018-02-15 daily http://unidata.github.io/awips2/appendix/appendix-acronyms/ - 2018-01-26 + 2018-02-15 daily http://unidata.github.io/awips2/appendix/appendix-cots/ - 2018-01-26 + 2018-02-15 daily http://unidata.github.io/awips2/appendix/appendix-wsr88d/ - 2018-01-26 + 2018-02-15 daily http://unidata.github.io/awips2/cave/cave-keyboard-shortcuts/ - 2018-01-26 + 2018-02-15 daily