docs ready

This commit is contained in:
mjames-upc 2016-03-15 20:27:25 -05:00
parent e514822704
commit 30276d80a1
50 changed files with 5911 additions and 1687 deletions

2
.gitignore vendored Normal file
View file

@ -0,0 +1,2 @@
.ipynb_checkpoints
build

217
docs/Makefile Normal file
View file

@ -0,0 +1,217 @@
# Makefile for Sphinx documentation
#
# You can set these variables from the command line.
SPHINXOPTS =
SPHINXBUILD = sphinx-build
PAPER =
BUILDDIR = build
NBCONVERT = ipython nbconvert
# User-friendly check for sphinx-build
ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1)
$(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/)
endif
# Internal variables.
PAPEROPT_a4 = -D latex_paper_size=a4
PAPEROPT_letter = -D latex_paper_size=letter
ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source
# the i18n builder cannot share the environment and doctrees with the others
I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source
.PHONY: help
help:
@echo "Please use \`make <target>' where <target> is one of"
@echo " html to make standalone HTML files"
@echo " dirhtml to make HTML files named index.html in directories"
@echo " singlehtml to make a single large HTML file"
@echo " pickle to make pickle files"
@echo " json to make JSON files"
@echo " htmlhelp to make HTML files and a HTML help project"
@echo " qthelp to make HTML files and a qthelp project"
@echo " applehelp to make an Apple Help Book"
@echo " devhelp to make HTML files and a Devhelp project"
@echo " epub to make an epub"
@echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
@echo " latexpdf to make LaTeX files and run them through pdflatex"
@echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx"
@echo " text to make text files"
@echo " man to make manual pages"
@echo " texinfo to make Texinfo files"
@echo " info to make Texinfo files and run them through makeinfo"
@echo " gettext to make PO message catalogs"
@echo " changes to make an overview of all changed/added/deprecated items"
@echo " xml to make Docutils-native XML files"
@echo " pseudoxml to make pseudoxml-XML files for display purposes"
@echo " linkcheck to check all external links for integrity"
@echo " doctest to run all doctests embedded in the documentation (if enabled)"
@echo " coverage to run coverage check of the documentation (if enabled)"
.PHONY: clean
clean:
rm -rf $(BUILDDIR)/*
.PHONY: html
html:
$(SPHINXBUILD) -vb html $(ALLSPHINXOPTS) $(BUILDDIR)/html
@echo
@echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
.PHONY: dirhtml
dirhtml:
$(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
@echo
@echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
.PHONY: singlehtml
singlehtml:
$(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
@echo
@echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
.PHONY: pickle
pickle:
$(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
@echo
@echo "Build finished; now you can process the pickle files."
.PHONY: json
json:
$(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
@echo
@echo "Build finished; now you can process the JSON files."
.PHONY: htmlhelp
htmlhelp:
$(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
@echo
@echo "Build finished; now you can run HTML Help Workshop with the" \
".hhp project file in $(BUILDDIR)/htmlhelp."
.PHONY: qthelp
qthelp:
$(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
@echo
@echo "Build finished; now you can run "qcollectiongenerator" with the" \
".qhcp project file in $(BUILDDIR)/qthelp, like this:"
@echo "# qcollectiongenerator $(BUILDDIR)/qthelp/python-awips.qhcp"
@echo "To view the help file:"
@echo "# assistant -collectionFile $(BUILDDIR)/qthelp/python-awips.qhc"
.PHONY: applehelp
applehelp:
$(SPHINXBUILD) -b applehelp $(ALLSPHINXOPTS) $(BUILDDIR)/applehelp
@echo
@echo "Build finished. The help book is in $(BUILDDIR)/applehelp."
@echo "N.B. You won't be able to view it unless you put it in" \
"~/Library/Documentation/Help or install it in your application" \
"bundle."
.PHONY: devhelp
devhelp:
$(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
@echo
@echo "Build finished."
@echo "To view the help file:"
@echo "# mkdir -p $$HOME/.local/share/devhelp/python-awips"
@echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/python-awips"
@echo "# devhelp"
.PHONY: epub
epub:
$(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
@echo
@echo "Build finished. The epub file is in $(BUILDDIR)/epub."
.PHONY: latex
latex:
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
@echo
@echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
@echo "Run \`make' in that directory to run these through (pdf)latex" \
"(use \`make latexpdf' here to do that automatically)."
.PHONY: latexpdf
latexpdf:
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
@echo "Running LaTeX files through pdflatex..."
$(MAKE) -C $(BUILDDIR)/latex all-pdf
@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
.PHONY: latexpdfja
latexpdfja:
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
@echo "Running LaTeX files through platex and dvipdfmx..."
$(MAKE) -C $(BUILDDIR)/latex all-pdf-ja
@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
.PHONY: text
text:
$(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
@echo
@echo "Build finished. The text files are in $(BUILDDIR)/text."
.PHONY: man
man:
$(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
@echo
@echo "Build finished. The manual pages are in $(BUILDDIR)/man."
.PHONY: texinfo
texinfo:
$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
@echo
@echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo."
@echo "Run \`make' in that directory to run these through makeinfo" \
"(use \`make info' here to do that automatically)."
.PHONY: info
info:
$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
@echo "Running Texinfo files through makeinfo..."
make -C $(BUILDDIR)/texinfo info
@echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo."
.PHONY: gettext
gettext:
$(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale
@echo
@echo "Build finished. The message catalogs are in $(BUILDDIR)/locale."
.PHONY: changes
changes:
$(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
@echo
@echo "The overview file is in $(BUILDDIR)/changes."
.PHONY: linkcheck
linkcheck:
$(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
@echo
@echo "Link check complete; look for any errors in the above output " \
"or in $(BUILDDIR)/linkcheck/output.txt."
.PHONY: doctest
doctest:
$(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
@echo "Testing of doctests in the sources finished, look at the " \
"results in $(BUILDDIR)/doctest/output.txt."
.PHONY: coverage
coverage:
$(SPHINXBUILD) -b coverage $(ALLSPHINXOPTS) $(BUILDDIR)/coverage
@echo "Testing of coverage in the sources finished, look at the " \
"results in $(BUILDDIR)/coverage/python.txt."
.PHONY: xml
xml:
$(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml
@echo
@echo "Build finished. The XML files are in $(BUILDDIR)/xml."
.PHONY: pseudoxml
pseudoxml:
$(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml
@echo
@echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml."

263
docs/make.bat Normal file
View file

@ -0,0 +1,263 @@
@ECHO OFF
REM Command file for Sphinx documentation
if "%SPHINXBUILD%" == "" (
set SPHINXBUILD=sphinx-build
)
set BUILDDIR=build
set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% source
set I18NSPHINXOPTS=%SPHINXOPTS% source
if NOT "%PAPER%" == "" (
set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS%
set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS%
)
if "%1" == "" goto help
if "%1" == "help" (
:help
echo.Please use `make ^<target^>` where ^<target^> is one of
echo. html to make standalone HTML files
echo. dirhtml to make HTML files named index.html in directories
echo. singlehtml to make a single large HTML file
echo. pickle to make pickle files
echo. json to make JSON files
echo. htmlhelp to make HTML files and a HTML help project
echo. qthelp to make HTML files and a qthelp project
echo. devhelp to make HTML files and a Devhelp project
echo. epub to make an epub
echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter
echo. text to make text files
echo. man to make manual pages
echo. texinfo to make Texinfo files
echo. gettext to make PO message catalogs
echo. changes to make an overview over all changed/added/deprecated items
echo. xml to make Docutils-native XML files
echo. pseudoxml to make pseudoxml-XML files for display purposes
echo. linkcheck to check all external links for integrity
echo. doctest to run all doctests embedded in the documentation if enabled
echo. coverage to run coverage check of the documentation if enabled
goto end
)
if "%1" == "clean" (
for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i
del /q /s %BUILDDIR%\*
goto end
)
REM Check if sphinx-build is available and fallback to Python version if any
%SPHINXBUILD% 1>NUL 2>NUL
if errorlevel 9009 goto sphinx_python
goto sphinx_ok
:sphinx_python
set SPHINXBUILD=python -m sphinx.__init__
%SPHINXBUILD% 2> nul
if errorlevel 9009 (
echo.
echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
echo.installed, then set the SPHINXBUILD environment variable to point
echo.to the full path of the 'sphinx-build' executable. Alternatively you
echo.may add the Sphinx directory to PATH.
echo.
echo.If you don't have Sphinx installed, grab it from
echo.http://sphinx-doc.org/
exit /b 1
)
:sphinx_ok
if "%1" == "html" (
%SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The HTML pages are in %BUILDDIR%/html.
goto end
)
if "%1" == "dirhtml" (
%SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml.
goto end
)
if "%1" == "singlehtml" (
%SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml.
goto end
)
if "%1" == "pickle" (
%SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle
if errorlevel 1 exit /b 1
echo.
echo.Build finished; now you can process the pickle files.
goto end
)
if "%1" == "json" (
%SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json
if errorlevel 1 exit /b 1
echo.
echo.Build finished; now you can process the JSON files.
goto end
)
if "%1" == "htmlhelp" (
%SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp
if errorlevel 1 exit /b 1
echo.
echo.Build finished; now you can run HTML Help Workshop with the ^
.hhp project file in %BUILDDIR%/htmlhelp.
goto end
)
if "%1" == "qthelp" (
%SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp
if errorlevel 1 exit /b 1
echo.
echo.Build finished; now you can run "qcollectiongenerator" with the ^
.qhcp project file in %BUILDDIR%/qthelp, like this:
echo.^> qcollectiongenerator %BUILDDIR%\qthelp\python-awips.qhcp
echo.To view the help file:
echo.^> assistant -collectionFile %BUILDDIR%\qthelp\python-awips.ghc
goto end
)
if "%1" == "devhelp" (
%SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp
if errorlevel 1 exit /b 1
echo.
echo.Build finished.
goto end
)
if "%1" == "epub" (
%SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The epub file is in %BUILDDIR%/epub.
goto end
)
if "%1" == "latex" (
%SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
if errorlevel 1 exit /b 1
echo.
echo.Build finished; the LaTeX files are in %BUILDDIR%/latex.
goto end
)
if "%1" == "latexpdf" (
%SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
cd %BUILDDIR%/latex
make all-pdf
cd %~dp0
echo.
echo.Build finished; the PDF files are in %BUILDDIR%/latex.
goto end
)
if "%1" == "latexpdfja" (
%SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
cd %BUILDDIR%/latex
make all-pdf-ja
cd %~dp0
echo.
echo.Build finished; the PDF files are in %BUILDDIR%/latex.
goto end
)
if "%1" == "text" (
%SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The text files are in %BUILDDIR%/text.
goto end
)
if "%1" == "man" (
%SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The manual pages are in %BUILDDIR%/man.
goto end
)
if "%1" == "texinfo" (
%SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo.
goto end
)
if "%1" == "gettext" (
%SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The message catalogs are in %BUILDDIR%/locale.
goto end
)
if "%1" == "changes" (
%SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes
if errorlevel 1 exit /b 1
echo.
echo.The overview file is in %BUILDDIR%/changes.
goto end
)
if "%1" == "linkcheck" (
%SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck
if errorlevel 1 exit /b 1
echo.
echo.Link check complete; look for any errors in the above output ^
or in %BUILDDIR%/linkcheck/output.txt.
goto end
)
if "%1" == "doctest" (
%SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest
if errorlevel 1 exit /b 1
echo.
echo.Testing of doctests in the sources finished, look at the ^
results in %BUILDDIR%/doctest/output.txt.
goto end
)
if "%1" == "coverage" (
%SPHINXBUILD% -b coverage %ALLSPHINXOPTS% %BUILDDIR%/coverage
if errorlevel 1 exit /b 1
echo.
echo.Testing of coverage in the sources finished, look at the ^
results in %BUILDDIR%/coverage/python.txt.
goto end
)
if "%1" == "xml" (
%SPHINXBUILD% -b xml %ALLSPHINXOPTS% %BUILDDIR%/xml
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The XML files are in %BUILDDIR%/xml.
goto end
)
if "%1" == "pseudoxml" (
%SPHINXBUILD% -b pseudoxml %ALLSPHINXOPTS% %BUILDDIR%/pseudoxml
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The pseudo-XML files are in %BUILDDIR%/pseudoxml.
goto end
)
:end

210
docs/source/about.rst Normal file
View file

@ -0,0 +1,210 @@
=====
About AWIPS II
=====
.. raw:: html
AWIPS II is a weather forecasting display and analysis package being
developed by the National Weather Service and Raytheon. AWIPS II is a
Java application consisting of a data-rendering client (CAVE, which runs
on Red Hat/CentOS Linux and Mac OS X) and a backend data server (EDEX,
which runs only on Linux)
AWIPS II takes a unified approach to data ingest, and most data types
follow a standard path through the system. At a high level, data flow
describes the path taken by a piece of data from its source to its
display by a client system. This path starts with data requested and
stored by an `LDM <#ldm>`_ client and includes the decoding of the data
and storing of decoded data in a form readable and displayable by the
end user.
The AWIPS II ingest and request processes are a highly distributed
system, and the messaging broken `Qpid <#qpid>`_ is used for
inter-process communication.
.. figure:: http://www.unidata.ucar.edu/software/awips2/images/awips2_coms.png
:align: center
:alt: image
image
The primary AWIPS II application for data ingest, processing, and
storage is the Environmental Data EXchange (**EDEX**) server; the
primary AWIPS II application for visualization/data manipulation is the
Common AWIPS Visualization Environment (**CAVE**) client, which is
typically installed on a workstation separate from other AWIPS II
components.
In addition to programs developed specifically for AWIPS, AWIPS II uses
several commercial off-the-shelf (COTS) and Free or Open Source software
(FOSS) products to assist in its operation. The following components,
working together and communicating, compose the entire AWIPS II system.
AWIPS II Python Stack
---------------------
A number of Python packages are bundled with the AWIPS II EDEX and CAVE
installations, on top of base Python 2.7.9.
====================== ============== ==============================
Package Version RPM Name
====================== ============== ==============================
Python 2.7.9 awips2-python
pyparsing 2.1.0 awips2-python-pyparsing
scientific 2.8 awips2-python-scientific
pupynere 1.0.13 awips2-python-pupynere
tpg 3.1.2 awips2-python-tpg
numpy 1.10.4 awips2-python-numpy
jimporter 15.1.2 awips2-python-jimporter
basemap 1.0.7 awips2-python-basemap
cherrypy 3.1.2 awips2-python-cherrypy
werkzeug 3.1.2 awips2-python-werkzeug
pycairo 1.2.2 awips2-python-pycairo
six 1.10.0 awips2-python-six
dateutil 2.5.0 awips2-python-dateutil
scipy 0.17.0 awips2-python-scipy
metpy 0.3.0 awips2-python-metpy
pygtk 2.8.6 awips2-python-pygtk
**awips** **0.9.2** **awips2-python-awips**
shapely 1.5.9 awips2-python-shapely
matplotlib 1.5.1 awips2-python-matplotlib
cython 0.23.4 awips2-python-cython
pil 1.1.6 awips2-python-pil
thrift 20080411p1 awips2-python-thrift
cartopy 0.13.0 awips2-python-cartopy
nose 0.11.1 awips2-python-nose
pmw 1.3.2 awips2-python-pmw
h5py 1.3.0 awips2-python-h5py
tables 2.1.2 awips2-python-tables
dynamicserialize 15.1.2 awips2-python-dynamicserialize
qpid 0.32 awips2-python-qpid
====================== ============== ==============================
EDEX
-------------------
The main server for AWIPS II. Qpid sends alerts to EDEX when data stored
by the LDM is ready for processing. These Qpid messages include file
header information which allows EDEX to determine the appropriate data
decoder to use. The default ingest server (simply named ingest) handles
all data ingest other than grib messages, which are processed by a
separate ingestGrib server. After decoding, EDEX writes metadata to the
database via Postgres and saves the processed data in HDF5 via PyPIES. A
third EDEX server, request, feeds requested data to CAVE clients. EDEX
ingest and request servers are started and stopped with the commands
``edex start`` and ``edex stop``, which runs the system script
``/etc/rc.d/init.d/edex_camel``
CAVE
-------------------
Common AWIPS Visualization Environment. The data rendering and
visualization tool for AWIPS II. CAVE contains of a number of different
data display configurations called perspectives. Perspectives used in
operational forecasting environments include **D2D** (Display
Two-Dimensional), **GFE** (Graphical Forecast Editor), and **NCP**
(National Centers Perspective). CAVE is started with the command
``/awips2/cave/cave.sh`` or ``cave.sh``
.. figure:: http://www.unidata.ucar.edu/software/awips2/images/Unidata_AWIPS2_CAVE.png
:align: center
:alt: CAVE
CAVE
Alertviz
-------------------
**Alertviz** is a modernized version of an AWIPS I application, designed
to present various notifications, error messages, and alarms to the user
(forecaster). AlertViz can be executed either independently or from CAVE
itself. In the Unidata CAVE client, Alertviz is run within CAVE and is
not required to be run separately. The toolbar is also **hidden from
view** and is accessed by right-click on the desktop taskbar icon.
LDM
-------------------
`http://www.unidata.ucar.edu/software/ldm/ <http://www.unidata.ucar.edu/software/ldm/>`_
The **LDM** (Local Data Manager), developed and supported by Unidata, is
a suite of client and server programs designed for data distribution,
and is the fundamental component comprising the Unidata Internet Data
Distribution (IDD) system. In AWIPS II, the LDM provides data feeds for
grids, surface observations, upper-air profiles, satellite and radar
imagery and various other meteorological datasets. The LDM writes data
directly to file and alerts EDEX via Qpid when a file is available for
processing. The LDM is started and stopped with the commands
``edex start`` and ``edex stop``, which runs the commands
``service edex_ldm start`` and ``service edex_ldm stop``
edexBridge
-------------------
edexBridge, invoked in the LDM configuration file
``/awips2/ldm/etc/ldmd.conf``, is used by the LDM to post "data
available" messaged to Qpid, which alerts the EDEX Ingest server that a
file is ready for processing.
Qpid
-------------------
`http://qpid.apache.org <http://qpid.apache.org>`_
**Apache Qpid**, the Queue Processor Interface Daemon, is the messaging
system used by AWIPS II to facilitate communication between services.
When the LDM receives a data file to be processed, it employs
**edexBridge** to send EDEX ingest servers a message via Qpid. When EDEX
has finished decoding the file, it sends CAVE a message via Qpid that
data are available for display or further processing. Qpid is started
and stopped by ``edex start`` and ``edex stop``, and is controlled by
the system script ``/etc/rc.d/init.d/qpidd``
PostgreSQL
-------------------
`http://www.postgresql.org <http://www.postgresql.org>`_
**PostgreSQL**, known simply as Postgres, is a relational database
management system (DBMS) which handles the storage and retrieval of
metadata, database tables and some decoded data. The storage and reading
of EDEX metadata is handled by the Postgres DBMS. Users may query the
metadata tables by using the termainal-based front-end for Postgres
called **psql**. Postgres is started and stopped by ``edex start`` and
``edex stop``, and is controlled by the system script
``/etc/rc.d/init.d/edex_postgres``
HDF5
-------------------
`http://www.hdfgroup.org/HDF5/ <http://www.hdfgroup.org/HDF5/>`_
**Hierarchical Data Format (v.5)** is
the primary data storage format used by AWIPS II for processed grids,
satellite and radar imagery and other products. Similar to netCDF,
developed and supported by Unidata, HDF5 supports multiple types of data
within a single file. For example, a single HDF5 file of radar data may
contain multiple volume scans of base reflectivity and base velocity as
well as derived products such as composite reflectivity. The file may
also contain data from multiple radars. HDF5 is stored in
``/awips2/edex/data/hdf5/``
PyPIES (httpd-pypies)
-------------------
**PyPIES**, Python Process Isolated Enhanced Storage, was created for
AWIPS II to isolate the management of HDF5 Processed Data Storage from
the EDEX processes. PyPIES manages access, i.e., reads and writes, of
data in the HDF5 files. In a sense, PyPIES provides functionality
similar to a DBMS (i.e PostgreSQL for metadata); all data being written
to an HDF5 file is sent to PyPIES, and requests for data stored in HDF5
are processed by PyPIES.
PyPIES is implemented in two parts: 1. The PyPIES manager is a Python
application that runs as part of an Apache HTTP server, and handles
requests to store and retrieve data. 2. The PyPIES logger is a Python
process that coordinates logging. PyPIES is started and stopped by
``edex start`` and ``edex stop``, and is controlled by the system script
``/etc/rc.d/init.d/https-pypies``

303
docs/source/conf.py Normal file
View file

@ -0,0 +1,303 @@
# -*- coding: utf-8 -*-
#
# python-awips documentation build configuration file, created by
# sphinx-quickstart on Tue Mar 15 15:59:23 2016.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath('.'))
sys.path.insert(0, os.path.abspath('../..'))
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.intersphinx',
'sphinx.ext.viewcode',
'notebook_gen_sphinxext'
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'python-awips'
copyright = u'2016, Unidata'
author = u'Unidata'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = u'0.9.2'
# The full version, including alpha/beta/rc tags.
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = []
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#html_theme = 'alabaster'
html_theme = 'sphinx_rtd_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (relative to this directory) to use as a favicon of
# the docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr'
#html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# Now only 'ja' uses this config value
#html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
#html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = 'python-awipsdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
# Latex figure (float) alignment
#'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'python-awips.tex', u'python-awips Documentation',
u'Unidata', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'python-awips', u'python-awips Documentation',
[author], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'python-awips', u'python-awips Documentation',
author, 'python-awips', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
# Set up mapping for other projects' docs
intersphinx_mapping = {
'matplotlib': ('http://matplotlib.org/', None),
'metpy': ('http://docs.scipy.org/doc/metpy/', None),
'numpy': ('http://docs.scipy.org/doc/numpy/', None),
'scipy': ('http://docs.scipy.org/doc/scipy/reference/', None),
'pint': ('http://pint.readthedocs.org/en/stable/', None),
'python': ('http://docs.python.org', None)
}

480
docs/source/dev.rst Normal file
View file

@ -0,0 +1,480 @@
Development Background
----------------------
In support of Hazard Services Raytheon Technical Services has built a
generic data access framework that can be called via Java or Python. The
data access framework code can be found within the AWIPS Baseline in
::
com.raytheon.uf.common.dataaccess
As of 2016, plugins have been written for grid, radar, satellite, Hydro
(SHEF), point data (METAR, SYNOP, Profiler, ACARS, AIREP, PIREP), and maps
data. The Factories for each can be found in the
following packages (you may need to look at the development baseline to
see these):
::
com.raytheon.uf.common.dataplugin.grid.dataaccess
com.raytheon.uf.common.dataplugin.radar.dataaccess
com.raytheon.uf.common.dataplugin.satellite.dataaccess
com.raytheon.uf.common.dataplugin.binlightning.dataaccess
com.raytheon.uf.common.dataplugin.sfc.dataaccess
com.raytheon.uf.common.dataplugin.sfcobs.dataaccess
com.raytheon.uf.common.dataplugin.acars.dataaccess
com.raytheon.uf.common.dataplugin.ffmp.dataaccess
com.raytheon.uf.common.dataplugin.bufrua.dataaccess
com.raytheon.uf.common.dataplugin.profiler.dataaccess
com.raytheon.uf.common.dataplugin.moddelsounding.dataaccess
com.raytheon.uf.common.dataplugin.ldadmesonet.dataaccess
com.raytheon.uf.common.dataplugin.binlightning.dataaccess
com.raytheon.uf.common.dataplugin.gfe.dataaccess
com.raytheon.uf.common.hydro.dataaccess
com.raytheon.uf.common.pointdata.dataaccess
com.raytheon.uf.common.dataplugin.maps.dataaccess
Additional data types may be added in the future. To determine what
datatypes are supported display the "type hierarchy" associated with the
classes
**AbstractGridDataPluginFactory**,
**AbstractGeometryDatabaseFactory**, and
**AbstractGeometryTimeAgnosticDatabaseFactory**.
The following content was taken from the design review document which is
attached and modified slightly.
Design/Implementation
~~~~~~~~~~~~~~~~~~~~~
The Data Access Framework is designed to provide a consistent interface
for requesting and using geospatial data within CAVE or EDEX. Examples
of geospatial data are grids, satellite, radar, metars, maps, river gage
heights, FFMP basin data, airmets, etc. To allow for convenient use of
geospatial data, the framework will support two types of requests: grids
and geometries (points, polygons, etc). The framework will also hide
implementation details of specific data types from users, making it
easier to use data without worrying about how the data objects are
structured or retrieved.
A suggested mapping of some current data types to one of the two
supported data requests is listed below. This list is not definitive and
can be expanded. If a developer can dream up an interpretation of the
data in the other supported request type, that support can be added.
Grids
- Grib
- Satellite
- Radar
- GFE
Geometries
- Map (states, counties, zones, etc)
- Hydro DB (IHFS)
- Obs (metar)
- FFMP
- Hazard
- Warning
- CCFP
- Airmet
The framework is designed around the concept of each data type plugin
contributing the necessary code for the framework to support its data.
For example, the satellite plugin provides a factory class for
interacting with the framework and registers itself as being compatible
with the Data Access Framework. This concept is similar to how EDEX in
AWIPS II expects a plugin developer to provide a decoder class and
record class and register them, but then automatically manages the rest
of the ingest process including routing, storing, and alerting on new
data. This style of plugin architecture effectively enables the
framework to expand its capabilities to more data types without having
to alter the framework code itself. This will enable software developers
to incrementally add support for more data types as time allows, and
allow the framework to expand to new data types as they become
available.
The Data Access Framework will not break any existing functionality or
APIs, and there are no plans to retrofit existing cosde to use the new
API at this time. Ideally code will be retrofitted in the future to
improve ease of maintainability. The plugin pecific code that hooks into
the framework will make use of existing APIs such as **IDataStore** and
**IServerRequest** to complete the requests.
The Data Access Framework can be understood as three parts:
- How users of the framework retrieve and use the data
- How plugin developers contribute support for new data types
- How the framework works when it receives a request
How users of the framework retrieve and use the data
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When a user of the framework wishes to request data, they must
instantiate a request object and set some of the values on that request.
Two request interfaces will be supported, for detailed methods see
section "Detailed Code" below.
**IDataRequest**
**IGridRequest** extends **IDataRequest**
**IGeometryRequest** extends **IDataRequest**
For the request interfaces, default implementations of
**DefaultGridRequest** and **DefaultGeometryRequest** will be provided
to handle most cases. However, the use of interfaces allows for custom
special cases in the future. If necessary, the developer of a plugin can
write their own custom request implementation to handle a special case.
After the request object has been prepared, the user will pass it to the
Data Access Layer to receive a data object in return. See the "Detailed
Code" section below for detailed methods of the Data Access Layer. The
Data Access Layer will return one of two data interfaces.
**IData**
**IGridData** extends **IData**
**IGeometryData** extends **IData**
For the data interfaces, the use of interfaces effectively hides the
implementation details of specific data types from the user of the
framework. For example, the user receives an **IGridData** and knows the
data time, grid geometry, parameter, and level, but does not know that
the data is actually a **GFEGridData** vs **D2DGridData** vs
**SatelliteGridData**. This enables users of the framework to write
generic code that can support multiple data types.
For python users of the framework, the interfaces will be very similar
with a few key distinctions. Geometries will be represented by python
geometries from the open source Shapely project. For grids, the python
**IGridData** will have a method for requesting the raw data as a numpy
array, and the Data Access Layer will have methods for requesting the
latitude coordinates and the longitude coordinates of grids as numpy
arrays. The python requests and data objects will be pure python and not
JEP PyJObjects that wrap Java objects. A future goal of the Data Access
Framework is to provide support to python local apps and therefore
enable requests of data outside of CAVE and EDEX to go through the same
familiar interfaces. This goal is out of scope for this project but by
making the request and returned data objects pure python it will not be
a huge undertaking to add this support in the future.
How plugin developers contribute support for new datatypes
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When a developer wishes to add support for another data type to the
framework, they must implement one or both of the factory interfaces
within a common plugin. Two factory interfaces will be supported, for
detailed methods see below.
**IDataFactory**
**IGridFactory** extends **IDataFactory**
**IGeometryFactory** extends **IDataFactory**
For some data types, it may be desired to add support for both types of
requests. For example, the developer of grid data may want to provide
support for both grid requests and geometry requests. In this case the
developer would write two separate classes where one implements
**IGridFactory** and the other implements **IGeometryFactory**.
Furthermore, factories could be stacked on top of one another by having
factory implementations call into the Data Access Layer.
For example, a custom factory keyed to "derived" could be written for
derived parameters, and the implementation of that factory may then call
into the Data Access Layer to retrieve “grid” data. In this example the
raw data would be retrieved through the **GridDataFactory** while the
derived factory then applies the calculations before returning the data.
Implementations do not need to support all methods on the interfaces or
all values on the request objects. For example, a developer writing the
**MapGeometryFactory** does not need to support **getAvailableTimes()**
because map data such as US counties is time agnostic. In this case the
method should throw **UnsupportedOperationException** and the javadoc
will indicate this.
Another example would be the developer writing **ObsGeometryFactory**
can ignore the Level field of the **IDataRequest** as there are not
different levels of metar data, it is all at the surface. It is up to
the factory writer to determine which methods and fields to support and
which to ignore, but the factory writer should always code the factory
with the user requesting data in mind. If a user of the framework could
reasonably expect certain behavior from the framework based on the
request, the factory writer should implement support for that behavior.
Abstract factories will be provided and can be extended to reduce the
amount of code a factory developer has to write to complete some common
actions that will be used by multiple factories. The factory should be
capable of working within either CAVE or EDEX, therefore all of its
server specific actions (e.g. database queries) should go through the
Request/Handler API by using **IServerRequests**. CAVE can then send the
**IServerRequests** to EDEX with **ThriftClient** while EDEX can use the
**ServerRequestRouter** to process the **IServerRequests**, making the
code compatible regardless of which JVM it is running inside.
Once the factory code is written, it must be registered with the
framework as an available factory. This will be done through spring xml
in a common plugin, with the xml file inside the res/spring folder of
the plugin. Registering the factory will identify the datatype name that
must match what users would use as the datatype on the **IDataRequest**,
e.g. the word "satellite". Registering the factory also indicates to the
framework what request types are supported, i.e. grid vs geometry or
both.
An example of the spring xml for a satellite factory is provided below:
::
<bean id="satelliteFactory"
class="com.raytheon.uf.common.dataplugin.satellite.SatelliteFactory" />
<bean id="satelliteFactoryRegistered" factory-bean="dataFactoryRegistry" factory-method="register">
<constructor-arg value="satellite" />
<constructor-arg value="com.raytheon.uf.common.dataaccess.grid.IGridRequest" />
<constructor-arg value="satelliteFactory" />
</bean>
How the framework works when it receives a request
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
**IDataRequest** requires a datatype to be set on every request. The
framework will have a registry of existing factories for each data type
(grid and geometry). When the Data Access Layer methods are called, it
will first lookup in the registry for the factory that corresponds to
the datatype on the **IDataRequest**. If no corresponding factory is
found, it will throw an exception with a useful error message that
indicates there is no current support for that datatype request. If a
factory is found, it will delegate the processing of the request to the
factory. The factory will receive the request and process it, returning
the result back to the Data Access Layer which then returns it to the
caller.
By going through the Data Access Layer, the user is able to retrieve the
data and use it without understanding which factory was used, how the
factory retrieved the data, or what implementation of data was returned.
This effectively frees the framework and users of the framework from any
dependencies on any particular data types. Since these dependencies are
avoided, the specific **IDataFactory** and **IData** implementations can
be altered in the future if necessary and the code making use of the
framework will not need to be changed as long as the interfaces continue
to be met.
Essentially, the Data Access Framework is a service that provides data
in a consistent way, with the service capabilities being expanded by
plugin developers who write support for more data types. Note that the
framework itself is useless without plugins contributing and registering
**IDataFactories**. Once the framework is coded, developers will need to
be tasked to add the factories necessary to support the needed data
types.
Request interfaces
~~~~~~~~~~~~~~~~~~
Requests and returned data interfaces will exist in both Java and
Python. The Java interfaces are listed below and the Python interfaces
will match the Java interfaces except where noted. Factories will only
be written in Java.
**IDataRequest**
- **void setDatatype(String datatype)** - the datatype name and
also the key to which factory will be used. Frequently pluginName
such as radar, satellite, gfe, ffmp, etc
- **void addIdentifier(String key, Object value)** - an identifier the
factory can use to determine which data to return, e.g. for grib data
key "modelName" and value “GFS40”
- **void setParameters(String... params)**
- **void setLevels(Level... levels)**
- **String getDatatype()**
- **Map getIdentifiers()**
- **String[] getParameters()**
- **Level[] getLevels()**
- Python Differences
- **Levels** will be represented as **Strings**
**IGridRequest extends IDataRequest**
- **void setStorageRequest(Request request)** - a datastorage request
that allows for slab, line, and point requests for faster performance
and less data retrieval
- **Request getStorageRequest()**
- Python Differences
- No support for storage requests
**IGeometryRequest extends IDataRequest**
- **void setEnvelope(Envelope env)** - a bounding box envelope to limit
the data that is searched through and returned. Not all factories may
support this.
- **setLocationNames(String... locationNames)** - a convenience of
requesting data by names such as ICAOs, airports, stationIDs, etc
- **Envelope getEnvelope()**
- **String[] getLocationNames()**
- Python Differences
- Envelope methods will use a **shapely.geometry.Polygon** instead of
**Envelopes** (shapely has no concept of envelopes and considers them
as rectangular polygons)
**Data Interfaces**
~~~~~~~~~~~~~~~~~~~
**IData**
- **Object getAttribute(String key)** - **getAttribute** provides a way
to get at attributes of the data that the interface does not provide,
allowing the user to get more info about the data without adding
dependencies on the specific data type plugin
- **DataTime getDataTime()** - some data may return null (e.g. maps)
- **Level getLevel()** - some data may return null
- Python Differences
- **Levels** will be represented by **Strings**
**IGridData extends IData**
- **String getParameter()**
- **GridGeometry2D getGridGeometry()**
- **Unit getUnit()** - some data may return null
- **DataDestination populateData(DataDestination destination)** - How
the user gets the raw data by passing in a **DataDestination** such
as **FloatArrayWrapper** or **ByteBufferWrapper**. This allows the
user to specify the way the raw data of the grid should be structured
in memory.
- **DataDestination populateData(DataDestination destination, Unit
unit)** - Same as the above method but also attempts to convert the
raw data to the specified unit when populating the
**DataDestination**.
- Python Differences
- **Units** will be represented by **Strings**
- **populateData()** methods will not exist, instead there will be
a **getRawData()** method that returns a numpy array in the native
type of the data
**IGeometryData extends IData**
- **Geometry getGeometry()**
- **Set getParameters()** - Gets the list of parameters included in
this data
- **String getString(String param)** - Gets the value of the parameter
as a String
- **Number getNumber(String param)** - Gets the value of the parameter
as a Number
- **Unit getUnit(String param)** - Gets the unit of the parameter,
may be null
- **Type getType(String param)** - Returns an enum of the raw type of
the parameter, such as Float, Int, or String
- **String getLocationName()** - Returns the location name of the piece
of data, typically to correlate if the request was made with
locationNames. May be null.
- Python Differences
- **Geometry** will be **shapely.geometry.Geometry**
- **getNumber()** will return the python native number of the data
- **Units** will be represented by **Strings**
- **getType()** will return the python type object
**DataAccessLayer** (in implementation, these methods delegate
processing to factories)
- **DataTime[] getAvailableTimes(IDataRequest request)**
- **DataTime[] getAvailableTimes(IDataRequest request, BinOffset
binOffset)**
- **IData[] getData(IDataRequest request, DataTime... times)**
- **IData[] getData(IDataRequest request, TimeRange timeRange)**
- **GridGeometry2D getGridGeometry(IGridRequest request)**
- **String[] getAvailableLocationNames(IGeometryRequest request)**
- Python Differences
- No support for **BinOffset**
- **getGridGeometry(IGridRequest)** will be replaced by
**getLatCoords(IGridRequest)** and **getLonCoords(IGridRequest)**
that will return numpy arrays of the lat or lon of every grid
cell
Factory Interfaces (Java only)
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
- **IDataFactory**
- **DataTime[] getAvailableTimes(R request)** - queries the
database and returns the times that match the request. Some factories
may not support this (e.g. maps).
- **DataTime[] getAvailableTimes(R request, BinOffset binOffset)** -
queries the database with a bin offset and returns the times that
match the request. Some factories may not support this.
- **D[] getData(R request, DataTime... times)** - Gets the data that
matches the request at the specified times.
- **D[] getData(R request, TimeRange timeRange)** - Gets the data that
matches the request and is within the time range.
**IGridDataFactory extends IDataFactory**
- **GridGeometry2D** **getGeometry(IGridRequest request)** - Returns
the grid geometry of the data that matches the request BEFORE making
the request. Useful for then making slab or line requests for subsets
of the data. Does not support moving grids, but moving grids dont
make subset requests either.
**IGeometryDataFactory extends IDataFactory**
- **getAvailableLocationNames(IGeometryRequest request)** - Convenience
method to retrieve available location names that match a request. Not
all factories may support this.
::

View file

@ -0,0 +1,395 @@
============
Gridded Data
============
`Notebook <http://nbviewer.ipython.org/github/Unidata/python-awips/blob/master/examples/notebooks/Gridded_Data.ipynb>`_
EDEX Grid Inventory
-------------------
.. code:: python
from awips.dataaccess import DataAccessLayer
# Set host
DataAccessLayer.changeEDEXHost("edex-cloud.unidata.ucar.edu")
# Init data request
request = DataAccessLayer.newDataRequest()
# Set datatype
request.setDatatype("grid")
# Get a list of all available models
available_grids = DataAccessLayer.getAvailableLocationNames(request)
# Sort
available_grids.sort()
for grid in available_grids:
print grid
.. parsed-literal::
AVN211
AVN225
DGEX
ECMF-Global
ECMF1
ECMF10
ECMF11
ECMF12
ECMF2
ECMF3
ECMF4
ECMF5
ECMF6
ECMF7
ECMF8
ECMF9
ETA
GFS
GFS40
GFSGuide
GFSLAMP5
HPCGuide
HPCqpfNDFD
HRRR
LAMP2p5
MRMS_1000
NAM12
NAM40
NCWF
NOHRSC-SNOW
NamDNG
NamDNG5
QPE-MSR
RAP13
RAP40
RTMA
RTMA5
URMA25
estofsPR
estofsUS
**LocationNames** is different for different plugins - radar is icao -
satellite is sector
Requesting a Grid
-----------------
.. code:: python
# Grid request
request.setLocationNames('RAP40')
request.setParameters("RH")
request.setLevels("850MB")
# Get available times
t = DataAccessLayer.getAvailableTimes(request)
# Select last available time [-1]
response = DataAccessLayer.getGridData(request, [t[0]])
data = response[0]
lon,lat = data.getLatLonCoords()
# Print info
print 'Time :', t[-1]
print 'Model:', data.getLocationName()
print 'Unit :', data.getUnit()
print 'Parm :', data.getParameter()
# Print data array
print data.getRawData().shape
print data.getRawData()
print "lat array =", lat
print "lon array =", lon
.. parsed-literal::
Time : 2016-02-23 15:00:00 (12)
Model: RAP40
Unit : %
Parm : RH
(151, 113)
[[ 93.05456543 93.05456543 87.05456543 ..., 73.05456543 72.05456543
71.05456543]
[ 70.05456543 70.05456543 67.05456543 ..., 69.05456543 46.05456924
37.05456924]
[ 40.05456924 56.05456924 68.05456543 ..., 51.05456924 73.05456543
74.05456543]
...,
[ 65.05456543 62.05456924 63.05456924 ..., 67.05456543 65.05456543
46.05456924]
[ 48.05456924 59.05456924 62.05456924 ..., 4.05456877 5.05456877
5.05456877]
[ 7.05456877 8.05456829 10.05456829 ..., 91.05456543 95.05456543
95.05456543]]
lat array = [[ 54.24940109 54.35071945 54.45080566 ..., 57.9545517 57.91926193
57.88272858]
[ 57.84495163 57.80593109 57.76566696 ..., 58.07667542 58.08861542
58.09931183]
[ 58.10876846 58.11697769 58.12394714 ..., 56.40270996 56.46187973
56.51980972]
...,
[ 19.93209648 19.89832115 19.86351395 ..., 20.054636 20.06362152
20.07156372]
[ 20.0784626 20.08431816 20.08912849 ..., 18.58354759 18.63155174
18.67854691]
[ 18.72453308 18.76950836 18.81346893 ..., 17.49624634 17.42861557
17.36001205]]
lon array = [[-139.83120728 -139.32348633 -138.81448364 ..., -79.26060486
-78.70166016 -78.14326477]
[ -77.58544922 -77.02822876 -76.47161865 ..., -100.70157623
-100.13801575 -99.57427216]
[ -99.01037598 -98.44634247 -97.88218689 ..., -121.69165039
-121.15060425 -120.60871887]
...,
[ -82.65139008 -82.26644897 -81.88170624 ..., -98.52494049
-98.13802338 -97.75105286]
[ -97.36403656 -96.97698212 -96.58989716 ..., -113.07767487
-112.69831085 -112.31866455]
[-111.93874359 -111.5585556 -111.17810822 ..., -69.85433197
-69.48160553 -69.10926819]]
Plotting a Grid with Basemap
----------------------------
Using **matplotlib**, **numpy**, and **basemap**:
.. code:: python
import matplotlib.tri as mtri
import matplotlib.pyplot as plt
from matplotlib.transforms import offset_copy
from mpl_toolkits.basemap import Basemap, cm
import numpy as np
from numpy import linspace, transpose
from numpy import meshgrid
plt.figure(figsize=(12, 12), dpi=100)
lons,lats = data.getLatLonCoords()
map = Basemap(projection='cyl',
resolution = 'c',
llcrnrlon = lons.min(), llcrnrlat = lats.min(),
urcrnrlon =lons.max(), urcrnrlat = lats.max()
)
map.drawcoastlines()
map.drawstates()
map.drawcountries()
#
# We have to reproject our grid, see https://stackoverflow.com/questions/31822553/m
#
x = linspace(0, map.urcrnrx, data.getRawData().shape[1])
y = linspace(0, map.urcrnry, data.getRawData().shape[0])
xx, yy = meshgrid(x, y)
ngrid = len(x)
rlons = np.repeat(np.linspace(np.min(lons), np.max(lons), ngrid),
ngrid).reshape(ngrid, ngrid)
rlats = np.repeat(np.linspace(np.min(lats), np.max(lats), ngrid),
ngrid).reshape(ngrid, ngrid).T
tli = mtri.LinearTriInterpolator(mtri.Triangulation(lons.flatten(),
lats.flatten()), data.getRawData().flatten())
rdata = tli(rlons, rlats)
cs = map.contourf(rlons, rlats, rdata, latlon=True, vmin=0, vmax=100, cmap='YlGn')
# add colorbar.
cbar = map.colorbar(cs,location='bottom',pad="5%")
cbar.set_label(data.getParameter() + data.getUnit() )
# Show plot
plt.show()
.. image:: Gridded_Data_files/Gridded_Data_5_0.png
or use **pcolormesh** rather than **contourf**
.. code:: python
plt.figure(figsize=(12, 12), dpi=100)
map = Basemap(projection='cyl',
resolution = 'c',
llcrnrlon = lons.min(), llcrnrlat = lats.min(),
urcrnrlon =lons.max(), urcrnrlat = lats.max()
)
map.drawcoastlines()
map.drawstates()
map.drawcountries()
cs = map.pcolormesh(rlons, rlats, rdata, latlon=True, vmin=0, vmax=100, cmap='YlGn')
.. image:: Gridded_Data_files/Gridded_Data_7_0.png
Plotting a Grid with Cartopy
----------------------------
.. code:: python
import os
import matplotlib.pyplot as plt
import numpy as np
import iris
import cartopy.crs as ccrs
from cartopy import config
lon,lat = data.getLatLonCoords()
plt.figure(figsize=(12, 12), dpi=100)
ax = plt.axes(projection=ccrs.PlateCarree())
cs = plt.contourf(rlons, rlats, rdata, 60, transform=ccrs.PlateCarree(), vmin=0, vmax=100, cmap='YlGn')
ax.coastlines()
ax.gridlines()
# add colorbar
cbar = plt.colorbar(orientation='horizontal')
cbar.set_label(data.getParameter() + data.getUnit() )
plt.show()
.. image:: Gridded_Data_files/Gridded_Data_9_0.png
.. code:: python
import matplotlib.pyplot as plt
import numpy as np
from metpy.calc import get_wind_components
from metpy.cbook import get_test_data
from metpy.plots import StationPlot, StationPlotLayout, simple_layout
from metpy.units import units
# Initialize
data,latitude,longitude,stationName,temperature,dewpoint,seaLevelPress,windDir,windSpeed = [],[],[],[],[],[],[],[],[]
request = DataAccessLayer.newDataRequest()
request.setDatatype("obs")
#
# we need to set one station to query latest time. this is hack-y and should be fixed
# because when you DON'T set a location name, you tend to get a single observation
# that came in a second ago, so your "latest data for the last time for all stations"
# data array consists of one village in Peru and time-matching is suspect right now.
#
# So here take a known US station (OKC) and hope/assume that a lot of other stations
# are also reporting (and that this is a 00/20/40 ob).
#
request.setLocationNames("KOKC")
datatimes = DataAccessLayer.getAvailableTimes(request)
# Get most recent time for location
time = datatimes[-1].validPeriod
# "presWeather","skyCover","skyLayerBase"
# are multi-dimensional(??) and returned seperately (not sure why yet)... deal with those later
request.setParameters("presWeather","skyCover", "skyLayerBase","stationName","temperature","dewpoint","windDir","windSpeed",
"seaLevelPress","longitude","latitude")
request.setLocationNames()
response = DataAccessLayer.getGeometryData(request,times=time)
print time
PRES_PARAMS = set(["presWeather"])
SKY_PARAMS = set(["skyCover", "skyLayerBase"])
# Build ordered arrays
wx,cvr,bas=[],[],[]
for ob in response:
#print ob.getParameters()
if set(ob.getParameters()) & PRES_PARAMS :
wx.append(ob.getString("presWeather"))
continue
if set(ob.getParameters()) & SKY_PARAMS :
cvr.append(ob.getString("skyCover"))
bas.append(ob.getNumber("skyLayerBase"))
continue
latitude.append(float(ob.getString("latitude")))
longitude.append(float(ob.getString("longitude")))
#stationName.append(ob.getString("stationName"))
temperature.append(float(ob.getString("temperature")))
dewpoint.append(float(ob.getString("dewpoint")))
seaLevelPress.append(float(ob.getString("seaLevelPress")))
windDir.append(float(ob.getString("windDir")))
windSpeed.append(float(ob.getString("windSpeed")))
print len(wx)
print len(temperature)
# Convert
data = dict()
data['latitude'] = np.array(latitude)
data['longitude'] = np.array(longitude)
data['air_temperature'] = np.array(temperature)* units.degC
data['dew_point_temperature'] = np.array(dewpoint)* units.degC
#data['air_pressure_at_sea_level'] = np.array(seaLevelPress)* units('mbar')
u, v = get_wind_components(np.array(windSpeed) * units('knots'),
np.array(windDir) * units.degree)
data['eastward_wind'], data['northward_wind'] = u, v
# Convert the fraction value into a code of 0-8, which can be used to pull out
# the appropriate symbol
#data['cloud_coverage'] = (8 * data_arr['cloud_fraction']).astype(int)
# Map weather strings to WMO codes, which we can use to convert to symbols
# Only use the first symbol if there are multiple
#wx_text = make_string_list(data_arr['weather'])
#wx_codes = {'':0, 'HZ':5, 'BR':10, '-DZ':51, 'DZ':53, '+DZ':55,
# '-RA':61, 'RA':63, '+RA':65, '-SN':71, 'SN':73, '+SN':75}
#data['present_weather'] = [wx_codes[s.split()[0] if ' ' in s else s] for s in wx]
# Set up the map projection
import cartopy.crs as ccrs
import cartopy.feature as feat
from matplotlib import rcParams
rcParams['savefig.dpi'] = 255
proj = ccrs.LambertConformal(central_longitude=-95, central_latitude=35,
standard_parallels=[35])
state_boundaries = feat.NaturalEarthFeature(category='cultural',
name='admin_1_states_provinces_lines',
scale='110m', facecolor='none')
# Create the figure
fig = plt.figure(figsize=(20, 10))
ax = fig.add_subplot(1, 1, 1, projection=proj)
# Add map elements
ax.add_feature(feat.LAND, zorder=-1)
ax.add_feature(feat.OCEAN, zorder=-1)
ax.add_feature(feat.LAKES, zorder=-1)
ax.coastlines(resolution='110m', zorder=2, color='black')
ax.add_feature(state_boundaries)
ax.add_feature(feat.BORDERS, linewidth='2', edgecolor='black')
ax.set_extent((-118, -73, 23, 50))
# Start the station plot by specifying the axes to draw on, as well as the
# lon/lat of the stations (with transform). We also the fontsize to 12 pt.
stationplot = StationPlot(ax, data['longitude'], data['latitude'],
transform=ccrs.PlateCarree(), fontsize=12)
# The layout knows where everything should go, and things are standardized using
# the names of variables. So the layout pulls arrays out of `data` and plots them
# using `stationplot`.
simple_layout.plot(stationplot, data)
.. parsed-literal::
(Mar 15 16 22:52:00 , Mar 15 16 22:52:00 )
430
86
.. image:: Gridded_Data_files/Gridded_Data_10_1.png

Binary file not shown.

After

Width:  |  Height:  |  Size: 806 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 446 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 395 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 634 KiB

View file

@ -0,0 +1,112 @@
===================================
NEXRAD Level 3 Plot with Matplotlib
===================================
`Notebook <http://nbviewer.ipython.org/github/Unidata/python-awips/blob/master/examples/notebooks/NEXRAD_Level_3_Plot_with_Matplotlib.ipynb>`_
.. code:: python
from awips.dataaccess import DataAccessLayer
from awips import ThriftClient, RadarCommon
from dynamicserialize.dstypes.com.raytheon.uf.common.time import TimeRange
from dynamicserialize.dstypes.com.raytheon.uf.common.dataplugin.radar.request import GetRadarDataRecordRequest
from datetime import datetime
from datetime import timedelta
import matplotlib.pyplot as plt
import numpy as np
from numpy import ma
# use metpy for color table
from metpy.plots import ctables
# Set EDEX server and radar site
edex = "edex-cloud.unidata.ucar.edu"
site = "kftg"
DataAccessLayer.changeEDEXHost(edex)
request = DataAccessLayer.newDataRequest()
request.setDatatype("radar")
request.setLocationNames(site)
datatimes = DataAccessLayer.getAvailableTimes(request)
# Get last available time
timerange = datatimes[-1].validPeriod
dateTimeStr = str(datatimes[-1])
# Buffer length in seconds
buffer = 60
dateTime = datetime.strptime(dateTimeStr, "%Y-%m-%d %H:%M:%S")
beginRange = dateTime - timedelta(0, buffer)
endRange = dateTime + timedelta(0, buffer)
timerange = TimeRange(beginRange, endRange)
print "using time",dateTimeStr
print "buffer by",buffer
print "using range",timerange
client = ThriftClient.ThriftClient(edex)
request = GetRadarDataRecordRequest()
request.setRadarId(site)
request.setPrimaryElevationAngle("0.5")
request.setTimeRange(timerange)
fig, axes = plt.subplots(1, 2, figsize=(15, 8))
for v, ctable, ax in zip((94, 99), ('NWSReflectivity', 'NWSVelocity'), axes):
request.setProductCode(v)
response = client.sendRequest(request)
if response.getData():
for record in response.getData():
idra = record.getHdf5Data()
rdat,azdat,depVals,threshVals = RadarCommon.get_hdf5_data(idra)
dim = rdat.getDimension()
yLen,xLen = rdat.getSizes()
array = rdat.getByteData()
# get data for azimuth angles if we have them.
if azdat :
azVals = azdat.getFloatData()
az = np.array(RadarCommon.encode_radial(azVals))
dattyp = RadarCommon.get_data_type(azdat)
az = np.append(az,az[-1])
print "found",v,record.getDataTime()
header = RadarCommon.get_header(record, format, xLen, yLen, azdat, "description")
rng = np.linspace(0, xLen, xLen + 1)
xlocs = rng * np.sin(np.deg2rad(az[:, np.newaxis]))
ylocs = rng * np.cos(np.deg2rad(az[:, np.newaxis]))
multiArray = np.reshape(array, (-1, xLen))
data = ma.array(multiArray)
data[data==0] = ma.masked
# Plot the data
norm, cmap = ctables.registry.get_with_steps(ctable, 16, 16)
ax.pcolormesh(xlocs, ylocs, data, norm=norm, cmap=cmap)
ax.set_aspect('equal', 'datalim')
multp = 100*(2*xLen/460)
ax.set_xlim(-multp,multp)
ax.set_ylim(-multp,multp)
# This is setting x/ylim on gate/pixel and not km
plt.show()
.. parsed-literal::
using time 2016-03-15 23:07:04
buffer by 60
using range (Mar 15 16 23:06:04 , Mar 15 16 23:08:04 )
found 94 2016-03-15 23:07:04
found 99 2016-03-15 23:07:04
.. image:: NEXRAD_Level_3_Plot_with_Matplotlib_files/NEXRAD_Level_3_Plot_with_Matplotlib_0_1.png

View file

@ -0,0 +1,104 @@
==============================
Plotting a Sounding with MetPy
==============================
`Notebook <http://nbviewer.ipython.org/github/Unidata/python-awips/blob/master/examples/notebooks/Plotting_a_Sounding_with_MetPy.ipynb>`_
.. code:: python
from awips.dataaccess import DataAccessLayer
import matplotlib.tri as mtri
import matplotlib.pyplot as plt
from mpl_toolkits.axes_grid1.inset_locator import inset_axes
import numpy as np
from metpy.calc import get_wind_components, lcl, dry_lapse, parcel_profile
from metpy.plots import SkewT, Hodograph
from metpy.units import units, concatenate
plt.rcParams['figure.figsize'] = (12, 14)
# Set EDEX host
DataAccessLayer.changeEDEXHost("edex-cloud.unidata.ucar.edu")
request = DataAccessLayer.newDataRequest()
# Data type bufrua
request.setDatatype("bufrua")
# Parameters
request.setParameters("tpMan","tdMan","prMan","htMan","wdMan","wsMan")
# Station ID (name doesn't work yet)
request.setLocationNames("72469")
datatimes = DataAccessLayer.getAvailableTimes(request)
# Get most recent record
response = DataAccessLayer.getGeometryData(request,times=datatimes[-1].validPeriod)
# Initialize data arrays
tpMan,tdMan,prMan,htMan,wdMan,wsMan = [],[],[],[],[],[]
# Build ordered arrays
for ob in response:
tpMan.append(float(ob.getString("tpMan")))
tdMan.append(float(ob.getString("tdMan")))
prMan.append(float(ob.getString("prMan")))
htMan.append(float(ob.getString("htMan")))
wdMan.append(float(ob.getString("wdMan")))
wsMan.append(float(ob.getString("wsMan")))
# Convert
# we can use units.* here?
T = np.array(tpMan)-273.15
Td = np.array(tdMan)-273.15
p = np.array(prMan)/100
height = np.array(htMan)
direc = np.array(wdMan)
spd = np.array(wsMan)
u, v = get_wind_components(spd, np.deg2rad(direc))
p = p * units.mbar
T = T * units.degC
Td = Td * units.degC
spd = spd * units.knot
direc = direc * units.deg
# Create a skewT plot
skew = SkewT()
# Plot the data using normal plotting functions, in this case using
# log scaling in Y, as dictated by the typical meteorological plot
skew.plot(p, T, 'r')
skew.plot(p, Td, 'g')
skew.plot_barbs(p, u, v)
skew.ax.set_ylim(1000, 100)
skew.ax.set_xlim(-40, 60)
# Calculate LCL height and plot as black dot
l = lcl(p[0], T[0], Td[0])
lcl_temp = dry_lapse(concatenate((p[0], l)), T[0])[-1].to('degC')
skew.plot(l, lcl_temp, 'ko', markerfacecolor='black')
# Calculate full parcel profile and add to plot as black line
prof = parcel_profile(p, T[0], Td[0]).to('degC')
skew.plot(p, prof, 'k', linewidth=2)
# Example of coloring area between profiles
skew.ax.fill_betweenx(p, T, prof, where=T>=prof, facecolor='blue', alpha=0.4)
skew.ax.fill_betweenx(p, T, prof, where=T<prof, facecolor='red', alpha=0.4)
# An example of a slanted line at constant T -- in this case the 0 isotherm
l = skew.ax.axvline(0, color='c', linestyle='--', linewidth=2)
# Draw hodograph
ax_hod = inset_axes(skew.ax, '40%', '40%', loc=3)
h = Hodograph(ax_hod, component_range=80.)
h.add_grid(increment=20)
h.plot_colormapped(u, v, spd)
# Show the plot
plt.show()
.. image:: Plotting_a_Sounding_with_MetPy_files/Plotting_a_Sounding_with_MetPy_0_0.png

Binary file not shown.

After

Width:  |  Height:  |  Size: 104 KiB

View file

@ -0,0 +1,388 @@
===========
Surface Obs
===========
`Notebook <http://nbviewer.ipython.org/github/Unidata/python-awips/blob/master/examples/notebooks/Surface_Obs.ipynb>`_
.. code:: python
from awips.dataaccess import DataAccessLayer
# Set host
DataAccessLayer.changeEDEXHost("edex-cloud.unidata.ucar.edu")
# Init data request
request = DataAccessLayer.newDataRequest()
request.setDatatype("obs")
request.setLocationNames("KBJC")
datatimes = DataAccessLayer.getAvailableTimes(request)
time = datatimes[-1].validPeriod
# "presWeather","skyCover","skyLayerBase"
# are multi-dimensional... deal with these later
request.setParameters(
"stationName",
"timeObs",
"wmoId",
"autoStationType",
"elevation",
"reportType",
"temperature",
"tempFromTenths",
"dewpoint",
"dpFromTenths",
"windDir",
"windSpeed",
"windGust",
"visibility",
"altimeter",
"seaLevelPress",
"pressChange3Hour",
"pressChangeChar",
"maxTemp24Hour",
"minTemp24Hour",
"precip1Hour",
"precip3Hour",
"precip6Hour",
"precip24Hour"
)
response = DataAccessLayer.getGeometryData(request,times=time)
for ob in response:
print "getParameters is",ob.getParameters()
print len(ob.getParameters())
#getParameters
print ob.getString("stationName"), "from", ob.getDataTime().getRefTime()
print "stationName is",ob.getString("stationName")
print "timeObs is",ob.getString("timeObs")
print "wmoId is",ob.getString("wmoId")
print "autoStationType is",ob.getString("autoStationType")
print "elevation is",ob.getString("elevation")
print "reportType is",ob.getString("reportType")
print "temperature is",ob.getString("temperature")
print "tempFromTenths is",ob.getString("tempFromTenths")
print "dewpoint is",ob.getString("dewpoint")
print "dpFromTenths is",ob.getString("dpFromTenths")
print "windDir is",ob.getString("windDir")
print "windSpeed is",ob.getString("windSpeed")
print "windGust is",ob.getString("windGust")
print "visibility is",ob.getString("visibility")
print "altimeter is",ob.getString("altimeter")
print "seaLevelPress is",ob.getString("seaLevelPress")
print "pressChange3Hour is",ob.getString("pressChange3Hour")
print "pressChangeChar is",ob.getString("pressChangeChar")
print "maxTemp24Hour is",ob.getString("maxTemp24Hour")
print "minTemp24Hour is",ob.getString("minTemp24Hour")
print "precip1Hour is",ob.getString("precip1Hour")
print "precip3Hour is",ob.getString("precip3Hour")
print "precip6Hour is",ob.getString("precip6Hour")
print "precip24Hour is",ob.getString("precip24Hour")
.. parsed-literal::
getParameters is ['precip1Hour', 'tempFromTenths', 'precip24Hour', 'seaLevelPress', 'pressChange3Hour', 'temperature', 'dpFromTenths', 'reportType', 'pressChangeChar', 'elevation', 'precip3Hour', 'dewpoint', 'visibility', 'timeObs', 'maxTemp24Hour', 'stationName', 'altimeter', 'autoStationType', 'wmoId', 'windDir', 'windSpeed', 'minTemp24Hour', 'windGust', 'precip6Hour']
24
KBJC from Mar 15 16 22:46:00 GMT
stationName is KBJC
timeObs is 1458081960000
wmoId is -9999
autoStationType is
elevation is 1729.0
reportType is METAR
temperature is 7.0
tempFromTenths is -9999.0
dewpoint is -12.0
dpFromTenths is -9999.0
windDir is 230.0
windSpeed is 15.0
windGust is 25.0
visibility is 60.0
altimeter is 29.9599990845
seaLevelPress is -9999.0
pressChange3Hour is -9999.0
pressChangeChar is
maxTemp24Hour is -9999.0
minTemp24Hour is -9999.0
precip1Hour is -9999.0
precip3Hour is -9999.0
precip6Hour is -9999.0
precip24Hour is -9999.0
.. code:: python
# multi-dimensional present WX
request = DataAccessLayer.newDataRequest()
request.setDatatype("obs")
request.setLocationNames("KBJC")
request.setParameters("presWeather")
response = DataAccessLayer.getGeometryData(request,times=time)
for ob in response:
print "getParameters is",ob.getParameters()
print ob.getString("presWeather")
# multi-dimensional Sky Condition
request.setParameters("skyCover", "skyLayerBase")
response = DataAccessLayer.getGeometryData(request,times=time)
for ob in response:
print ob.getString("skyCover")
print ob.getString("skyLayerBase")
.. parsed-literal::
getParameters is ['presWeather']
VCSH
getParameters is ['presWeather']
getParameters is ['presWeather']
getParameters is ['presWeather']
getParameters is ['presWeather']
FEW
8000.0
SCT
12000.0
BKN
20000.0
-9999.0
-9999.0
-9999.0
Synop/Marine
------------
.. code:: python
from awips.dataaccess import DataAccessLayer
DataAccessLayer.changeEDEXHost("edex-cloud.unidata.ucar.edu")
request = DataAccessLayer.newDataRequest()
request.setDatatype("sfcobs")
request.setLocationNames("72421") # Covington, Kentucky (KCVG)
request.setParameters("stationId","timeObs","elevation","reportType",
"wx_present","visibility","seaLevelPress","stationPress",
"pressChange3Hour","pressChangeChar","temperature",
"dewpoint","seaSurfaceTemp","wetBulb","windDir",
"windSpeed","equivWindSpeed10m","windGust","precip1Hour",
"precip6Hour","precip24Hour" )
datatimes = DataAccessLayer.getAvailableTimes(request)
time = datatimes[-1].validPeriod
response = DataAccessLayer.getGeometryData(request,times=time)
print response
for ob in response:
print "getParameters is",ob.getParameters()
print len(ob.getParameters())
.. parsed-literal::
[<awips.dataaccess.PyGeometryData.PyGeometryData object at 0x7f1b404f3310>]
getParameters is ['windDir', 'pressChange3Hour', 'elevation', 'temperature', 'wetBulb', 'wx_present', 'stationPress', 'visibility', 'dewpoint', 'stationId', 'precip1Hour', 'equivWindSpeed10m', 'windSpeed', 'pressChangeChar', 'windGust', 'timeObs', 'reportType', 'precip6Hour', 'precip24Hour', 'seaSurfaceTemp', 'seaLevelPress']
21
Profiler
--------
.. code:: python
MULTI_DIM_PARAMS = set(['vComponent', 'uComponent', 'peakPower',
'levelMode', 'uvQualityCode', 'consensusNum',
'HorizSpStdDev', 'wComponent', 'height',
'VertSpStdDev'])
request = DataAccessLayer.newDataRequest("profiler")
request.setParameters('numProfLvls', 'elevation', 'windDirSfc', 'validTime',
'windSpeedSfc', 'pressure', 'submode', 'relHumidity',
'profilerId', 'rainRate', 'temperature')
request.getParameters().extend(MULTI_DIM_PARAMS)
datatimes = DataAccessLayer.getAvailableTimes(request)
time = datatimes[-1].validPeriod
response = DataAccessLayer.getGeometryData(request,times=time)
print response
for ob in response:
print "getParameters is",ob.getParameters()
print len(ob.getParameters())
.. parsed-literal::
[<awips.dataaccess.PyGeometryData.PyGeometryData object at 0x7f1b4481b390>, <awips.dataaccess.PyGeometryData.PyGeometryData object at 0x7f1b40510ad0>, <awips.dataaccess.PyGeometryData.PyGeometryData object at 0x7f1b279852d0>, <awips.dataaccess.PyGeometryData.PyGeometryData object at 0x7f1b27985310>, <awips.dataaccess.PyGeometryData.PyGeometryData object at 0x7f1b279853d0>, <awips.dataaccess.PyGeometryData.PyGeometryData object at 0x7f1b27985410>, <awips.dataaccess.PyGeometryData.PyGeometryData object at 0x7f1b27985450>, <awips.dataaccess.PyGeometryData.PyGeometryData object at 0x7f1b27985490>, <awips.dataaccess.PyGeometryData.PyGeometryData object at 0x7f1b279854d0>, <awips.dataaccess.PyGeometryData.PyGeometryData object at 0x7f1b27985510>, <awips.dataaccess.PyGeometryData.PyGeometryData object at 0x7f1b27985550>, <awips.dataaccess.PyGeometryData.PyGeometryData object at 0x7f1b27985590>, <awips.dataaccess.PyGeometryData.PyGeometryData object at 0x7f1b279855d0>, <awips.dataaccess.PyGeometryData.PyGeometryData object at 0x7f1b27985610>, <awips.dataaccess.PyGeometryData.PyGeometryData object at 0x7f1b27985650>, <awips.dataaccess.PyGeometryData.PyGeometryData object at 0x7f1b27985690>, <awips.dataaccess.PyGeometryData.PyGeometryData object at 0x7f1b279856d0>, <awips.dataaccess.PyGeometryData.PyGeometryData object at 0x7f1b27985710>, <awips.dataaccess.PyGeometryData.PyGeometryData object at 0x7f1b27985750>, <awips.dataaccess.PyGeometryData.PyGeometryData object at 0x7f1b27985790>, <awips.dataaccess.PyGeometryData.PyGeometryData object at 0x7f1b279857d0>, <awips.dataaccess.PyGeometryData.PyGeometryData object at 0x7f1b27985810>, <awips.dataaccess.PyGeometryData.PyGeometryData object at 0x7f1b27985850>, <awips.dataaccess.PyGeometryData.PyGeometryData object at 0x7f1b27985890>, <awips.dataaccess.PyGeometryData.PyGeometryData object at 0x7f1b279858d0>, <awips.dataaccess.PyGeometryData.PyGeometryData object at 0x7f1b27985910>, <awips.dataaccess.PyGeometryData.PyGeometryData object at 0x7f1b27985950>, <awips.dataaccess.PyGeometryData.PyGeometryData object at 0x7f1b27985990>, <awips.dataaccess.PyGeometryData.PyGeometryData object at 0x7f1b279859d0>, <awips.dataaccess.PyGeometryData.PyGeometryData object at 0x7f1b27985a10>, <awips.dataaccess.PyGeometryData.PyGeometryData object at 0x7f1b27985a50>, <awips.dataaccess.PyGeometryData.PyGeometryData object at 0x7f1b27985a90>, <awips.dataaccess.PyGeometryData.PyGeometryData object at 0x7f1b3018ab50>, <awips.dataaccess.PyGeometryData.PyGeometryData object at 0x7f1b4053b6d0>, <awips.dataaccess.PyGeometryData.PyGeometryData object at 0x7f1b27985ad0>, <awips.dataaccess.PyGeometryData.PyGeometryData object at 0x7f1b27985b10>, <awips.dataaccess.PyGeometryData.PyGeometryData object at 0x7f1b27985b50>, <awips.dataaccess.PyGeometryData.PyGeometryData object at 0x7f1b27985b90>]
getParameters is ['vComponent', 'uComponent', 'peakPower', 'levelMode', 'uvQualityCode', 'consensusNum', 'HorizSpStdDev', 'wComponent', 'height', 'VertSpStdDev']
10
getParameters is ['vComponent', 'uComponent', 'peakPower', 'levelMode', 'uvQualityCode', 'consensusNum', 'HorizSpStdDev', 'wComponent', 'height', 'VertSpStdDev']
10
getParameters is ['vComponent', 'uComponent', 'peakPower', 'levelMode', 'uvQualityCode', 'consensusNum', 'HorizSpStdDev', 'wComponent', 'height', 'VertSpStdDev']
10
getParameters is ['vComponent', 'uComponent', 'peakPower', 'levelMode', 'uvQualityCode', 'consensusNum', 'HorizSpStdDev', 'wComponent', 'height', 'VertSpStdDev']
10
getParameters is ['vComponent', 'uComponent', 'peakPower', 'levelMode', 'uvQualityCode', 'consensusNum', 'HorizSpStdDev', 'wComponent', 'height', 'VertSpStdDev']
10
getParameters is ['vComponent', 'uComponent', 'peakPower', 'levelMode', 'uvQualityCode', 'consensusNum', 'HorizSpStdDev', 'wComponent', 'height', 'VertSpStdDev']
10
getParameters is ['vComponent', 'uComponent', 'peakPower', 'levelMode', 'uvQualityCode', 'consensusNum', 'HorizSpStdDev', 'wComponent', 'height', 'VertSpStdDev']
10
getParameters is ['vComponent', 'uComponent', 'peakPower', 'levelMode', 'uvQualityCode', 'consensusNum', 'HorizSpStdDev', 'wComponent', 'height', 'VertSpStdDev']
10
getParameters is ['vComponent', 'uComponent', 'peakPower', 'levelMode', 'uvQualityCode', 'consensusNum', 'HorizSpStdDev', 'wComponent', 'height', 'VertSpStdDev']
10
getParameters is ['vComponent', 'uComponent', 'peakPower', 'levelMode', 'uvQualityCode', 'consensusNum', 'HorizSpStdDev', 'wComponent', 'height', 'VertSpStdDev']
10
getParameters is ['vComponent', 'uComponent', 'peakPower', 'levelMode', 'uvQualityCode', 'consensusNum', 'HorizSpStdDev', 'wComponent', 'height', 'VertSpStdDev']
10
getParameters is ['vComponent', 'uComponent', 'peakPower', 'levelMode', 'uvQualityCode', 'consensusNum', 'HorizSpStdDev', 'wComponent', 'height', 'VertSpStdDev']
10
getParameters is ['vComponent', 'uComponent', 'peakPower', 'levelMode', 'uvQualityCode', 'consensusNum', 'HorizSpStdDev', 'wComponent', 'height', 'VertSpStdDev']
10
getParameters is ['vComponent', 'uComponent', 'peakPower', 'levelMode', 'uvQualityCode', 'consensusNum', 'HorizSpStdDev', 'wComponent', 'height', 'VertSpStdDev']
10
getParameters is ['vComponent', 'uComponent', 'peakPower', 'levelMode', 'uvQualityCode', 'consensusNum', 'HorizSpStdDev', 'wComponent', 'height', 'VertSpStdDev']
10
getParameters is ['vComponent', 'uComponent', 'peakPower', 'levelMode', 'uvQualityCode', 'consensusNum', 'HorizSpStdDev', 'wComponent', 'height', 'VertSpStdDev']
10
getParameters is ['vComponent', 'uComponent', 'peakPower', 'levelMode', 'uvQualityCode', 'consensusNum', 'HorizSpStdDev', 'wComponent', 'height', 'VertSpStdDev']
10
getParameters is ['numProfLvls', 'elevation', 'windDirSfc', 'validTime', 'windSpeedSfc', 'pressure', 'submode', 'relHumidity', 'profilerId', 'rainRate', 'temperature']
11
getParameters is ['vComponent', 'uComponent', 'peakPower', 'levelMode', 'uvQualityCode', 'consensusNum', 'HorizSpStdDev', 'wComponent', 'height', 'VertSpStdDev']
10
getParameters is ['vComponent', 'uComponent', 'peakPower', 'levelMode', 'uvQualityCode', 'consensusNum', 'HorizSpStdDev', 'wComponent', 'height', 'VertSpStdDev']
10
getParameters is ['vComponent', 'uComponent', 'peakPower', 'levelMode', 'uvQualityCode', 'consensusNum', 'HorizSpStdDev', 'wComponent', 'height', 'VertSpStdDev']
10
getParameters is ['vComponent', 'uComponent', 'peakPower', 'levelMode', 'uvQualityCode', 'consensusNum', 'HorizSpStdDev', 'wComponent', 'height', 'VertSpStdDev']
10
getParameters is ['vComponent', 'uComponent', 'peakPower', 'levelMode', 'uvQualityCode', 'consensusNum', 'HorizSpStdDev', 'wComponent', 'height', 'VertSpStdDev']
10
getParameters is ['vComponent', 'uComponent', 'peakPower', 'levelMode', 'uvQualityCode', 'consensusNum', 'HorizSpStdDev', 'wComponent', 'height', 'VertSpStdDev']
10
getParameters is ['vComponent', 'uComponent', 'peakPower', 'levelMode', 'uvQualityCode', 'consensusNum', 'HorizSpStdDev', 'wComponent', 'height', 'VertSpStdDev']
10
getParameters is ['vComponent', 'uComponent', 'peakPower', 'levelMode', 'uvQualityCode', 'consensusNum', 'HorizSpStdDev', 'wComponent', 'height', 'VertSpStdDev']
10
getParameters is ['vComponent', 'uComponent', 'peakPower', 'levelMode', 'uvQualityCode', 'consensusNum', 'HorizSpStdDev', 'wComponent', 'height', 'VertSpStdDev']
10
getParameters is ['vComponent', 'uComponent', 'peakPower', 'levelMode', 'uvQualityCode', 'consensusNum', 'HorizSpStdDev', 'wComponent', 'height', 'VertSpStdDev']
10
getParameters is ['vComponent', 'uComponent', 'peakPower', 'levelMode', 'uvQualityCode', 'consensusNum', 'HorizSpStdDev', 'wComponent', 'height', 'VertSpStdDev']
10
getParameters is ['vComponent', 'uComponent', 'peakPower', 'levelMode', 'uvQualityCode', 'consensusNum', 'HorizSpStdDev', 'wComponent', 'height', 'VertSpStdDev']
10
getParameters is ['vComponent', 'uComponent', 'peakPower', 'levelMode', 'uvQualityCode', 'consensusNum', 'HorizSpStdDev', 'wComponent', 'height', 'VertSpStdDev']
10
getParameters is ['vComponent', 'uComponent', 'peakPower', 'levelMode', 'uvQualityCode', 'consensusNum', 'HorizSpStdDev', 'wComponent', 'height', 'VertSpStdDev']
10
getParameters is ['vComponent', 'uComponent', 'peakPower', 'levelMode', 'uvQualityCode', 'consensusNum', 'HorizSpStdDev', 'wComponent', 'height', 'VertSpStdDev']
10
getParameters is ['vComponent', 'uComponent', 'peakPower', 'levelMode', 'uvQualityCode', 'consensusNum', 'HorizSpStdDev', 'wComponent', 'height', 'VertSpStdDev']
10
getParameters is ['vComponent', 'uComponent', 'peakPower', 'levelMode', 'uvQualityCode', 'consensusNum', 'HorizSpStdDev', 'wComponent', 'height', 'VertSpStdDev']
10
getParameters is ['vComponent', 'uComponent', 'peakPower', 'levelMode', 'uvQualityCode', 'consensusNum', 'HorizSpStdDev', 'wComponent', 'height', 'VertSpStdDev']
10
getParameters is ['vComponent', 'uComponent', 'peakPower', 'levelMode', 'uvQualityCode', 'consensusNum', 'HorizSpStdDev', 'wComponent', 'height', 'VertSpStdDev']
10
getParameters is ['numProfLvls', 'elevation', 'windDirSfc', 'validTime', 'windSpeedSfc', 'pressure', 'submode', 'relHumidity', 'profilerId', 'rainRate', 'temperature']
11
ACARS
-----
.. code:: python
request = DataAccessLayer.newDataRequest("acars")
request.setParameters("tailNumber", "receiver", "pressure", "flightPhase",
"rollAngleQuality", "temp", "windDirection", "windSpeed",
"humidity", "mixingRatio", "icing")
datatimes = DataAccessLayer.getAvailableTimes(request)
time = datatimes[-1].validPeriod
response = DataAccessLayer.getGeometryData(request,times=time)
print response
for ob in response:
print "getParameters is",ob.getParameters()
print len(ob.getParameters())
.. parsed-literal::
[<awips.dataaccess.PyGeometryData.PyGeometryData object at 0x7f1b30196dd0>]
getParameters is ['flightPhase', 'icing', 'temp', 'humidity', 'pressure', 'windSpeed', 'receiver', 'mixingRatio', 'windDirection', 'rollAngleQuality', 'tailNumber']
11
AIREP
-----
.. code:: python
request = DataAccessLayer.newDataRequest("airep")
request.setParameters("id", "flightLevel", "temp", "windDirection", "windSpeed",
"flightWeather", "flightHazard", "flightConditions")
datatimes = DataAccessLayer.getAvailableTimes(request)
time = datatimes[-1].validPeriod
response = DataAccessLayer.getGeometryData(request,times=time)
print response
for ob in response:
print "getParameters is",ob.getParameters()
print len(ob.getParameters())
.. parsed-literal::
[<awips.dataaccess.PyGeometryData.PyGeometryData object at 0x7f1b3044f6d0>]
getParameters is ['flightWeather', 'flightHazard', 'flightConditions', 'windSpeed', 'flightLevel', 'id']
6
PIREP
-----
.. code:: python
MULTI_DIM_PARAMS = set(["hazardType",
"turbType", "turbBaseHeight", "turbTopHeight",
"iceType", "iceBaseHeight", "iceTopHeight",
"skyCover1", "skyCover2", "skyBaseHeight", "skyTopHeight"
])
request = DataAccessLayer.newDataRequest("pirep")
request.setParameters('id', 'flightLevel', 'temp', 'windDirection', 'windSpeed',
'horzVisibility', 'aircraftType', 'weatherGroup')
request.getParameters().extend(MULTI_DIM_PARAMS)
datatimes = DataAccessLayer.getAvailableTimes(request)
time = datatimes[-1].validPeriod
response = DataAccessLayer.getGeometryData(request,times=time)
print response
for ob in response:
print "getParameters is",ob.getParameters()
print len(ob.getParameters())
.. parsed-literal::
[<awips.dataaccess.PyGeometryData.PyGeometryData object at 0x7f1b301a2210>, <awips.dataaccess.PyGeometryData.PyGeometryData object at 0x7f1b301a2510>]
getParameters is ['skyTopHeight', 'skyBaseHeight', 'turbTopHeight', 'iceBaseHeight', 'skyCover1', 'turbBaseHeight', 'iceType', 'iceTopHeight', 'turbType', 'hazardType', 'skyCover2']
11
getParameters is ['horzVisibility', 'weatherGroup', 'windSpeed', 'aircraftType', 'flightLevel', 'id']
6

View file

@ -0,0 +1,139 @@
===========================
Surface Obs Plot with MetPy
===========================
`Notebook <http://nbviewer.ipython.org/github/Unidata/python-awips/blob/master/examples/notebooks/Surface_Obs_Plot_with_MetPy.ipynb>`_
Based on the MetPy example `"Station Plot with
Layout" <http://metpy.readthedocs.org/en/latest/examples/generated/Station_Plot_with_Layout.html>`_
.. code:: python
import matplotlib.pyplot as plt
import numpy as np
from metpy.calc import get_wind_components
from metpy.cbook import get_test_data
from metpy.plots import StationPlot, StationPlotLayout, simple_layout
from metpy.units import units
# Initialize
data,latitude,longitude,stationName,temperature,dewpoint,seaLevelPress,windDir,windSpeed = [],[],[],[],[],[],[],[],[]
request = DataAccessLayer.newDataRequest()
request.setDatatype("obs")
#
# we need to set one station to query latest time. this is hack-y and should be fixed
# because when you DON'T set a location name, you tend to get a single observation
# that came in a second ago, so your "latest data for the last time for all stations"
# data array consists of one village in Peru and time-matching is suspect right now.
#
# So here take a known US station (OKC) and hope/assume that a lot of other stations
# are also reporting (and that this is a 00/20/40 ob).
#
request.setLocationNames("KOKC")
datatimes = DataAccessLayer.getAvailableTimes(request)
# Get most recent time for location
time = datatimes[-1].validPeriod
# "presWeather","skyCover","skyLayerBase"
# are multi-dimensional(??) and returned seperately (not sure why yet)... deal with those later
request.setParameters("presWeather","skyCover", "skyLayerBase","stationName","temperature","dewpoint","windDir","windSpeed",
"seaLevelPress","longitude","latitude")
request.setLocationNames()
response = DataAccessLayer.getGeometryData(request,times=time)
print time
PRES_PARAMS = set(["presWeather"])
SKY_PARAMS = set(["skyCover", "skyLayerBase"])
# Build ordered arrays
wx,cvr,bas=[],[],[]
for ob in response:
#print ob.getParameters()
if set(ob.getParameters()) & PRES_PARAMS :
wx.append(ob.getString("presWeather"))
continue
if set(ob.getParameters()) & SKY_PARAMS :
cvr.append(ob.getString("skyCover"))
bas.append(ob.getNumber("skyLayerBase"))
continue
latitude.append(float(ob.getString("latitude")))
longitude.append(float(ob.getString("longitude")))
#stationName.append(ob.getString("stationName"))
temperature.append(float(ob.getString("temperature")))
dewpoint.append(float(ob.getString("dewpoint")))
seaLevelPress.append(float(ob.getString("seaLevelPress")))
windDir.append(float(ob.getString("windDir")))
windSpeed.append(float(ob.getString("windSpeed")))
print len(wx)
print len(temperature)
# Convert
data = dict()
data['latitude'] = np.array(latitude)
data['longitude'] = np.array(longitude)
data['air_temperature'] = np.array(temperature)* units.degC
data['dew_point_temperature'] = np.array(dewpoint)* units.degC
#data['air_pressure_at_sea_level'] = np.array(seaLevelPress)* units('mbar')
u, v = get_wind_components(np.array(windSpeed) * units('knots'),
np.array(windDir) * units.degree)
data['eastward_wind'], data['northward_wind'] = u, v
# Convert the fraction value into a code of 0-8, which can be used to pull out
# the appropriate symbol
#data['cloud_coverage'] = (8 * data_arr['cloud_fraction']).astype(int)
# Map weather strings to WMO codes, which we can use to convert to symbols
# Only use the first symbol if there are multiple
#wx_text = make_string_list(data_arr['weather'])
#wx_codes = {'':0, 'HZ':5, 'BR':10, '-DZ':51, 'DZ':53, '+DZ':55,
# '-RA':61, 'RA':63, '+RA':65, '-SN':71, 'SN':73, '+SN':75}
#data['present_weather'] = [wx_codes[s.split()[0] if ' ' in s else s] for s in wx]
# Set up the map projection
import cartopy.crs as ccrs
import cartopy.feature as feat
from matplotlib import rcParams
rcParams['savefig.dpi'] = 255
proj = ccrs.LambertConformal(central_longitude=-95, central_latitude=35,
standard_parallels=[35])
state_boundaries = feat.NaturalEarthFeature(category='cultural',
name='admin_1_states_provinces_lines',
scale='110m', facecolor='none')
# Create the figure
fig = plt.figure(figsize=(20, 10))
ax = fig.add_subplot(1, 1, 1, projection=proj)
# Add map elements
ax.add_feature(feat.LAND, zorder=-1)
ax.add_feature(feat.OCEAN, zorder=-1)
ax.add_feature(feat.LAKES, zorder=-1)
ax.coastlines(resolution='110m', zorder=2, color='black')
ax.add_feature(state_boundaries)
ax.add_feature(feat.BORDERS, linewidth='2', edgecolor='black')
ax.set_extent((-118, -73, 23, 50))
# Start the station plot by specifying the axes to draw on, as well as the
# lon/lat of the stations (with transform). We also the fontsize to 12 pt.
stationplot = StationPlot(ax, data['longitude'], data['latitude'],
transform=ccrs.PlateCarree(), fontsize=12)
# The layout knows where everything should go, and things are standardized using
# the names of variables. So the layout pulls arrays out of `data` and plots them
# using `stationplot`.
simple_layout.plot(stationplot, data)
.. parsed-literal::
(Mar 15 16 22:52:00 , Mar 15 16 22:52:00 )
430
86
.. image:: Surface_Obs_Plot_with_MetPy_files/Surface_Obs_Plot_with_MetPy_1_1.png

Binary file not shown.

After

Width:  |  Height:  |  Size: 806 KiB

View file

@ -0,0 +1,10 @@
.. _examples-index:
################
Data Plotting Examples
################
.. toctree::
:glob:
generated/*

105
docs/source/index.md Normal file
View file

@ -0,0 +1,105 @@
---
layout: default
type: about
title: Unidata AWIPS II
subtitle: About
shortname: Introducing
---
<style>
.benchmark img {
max-width: 500px;
}
.benchmark figcaption {
font-weight: bold;
margin-bottom: 16px;
}
</style>
AWIPS II is a weather forecasting display and analysis package being developed by the National Weather Service and Raytheon. AWIPS II is a Java application consisting of a data-rendering client (CAVE, which runs on Red Hat/CentOS Linux and Mac OS X) and a backend data server (EDEX, which runs only on Linux)
AWIPS II takes a unified approach to data ingest, and most data types follow a standard path through the system. At a high level, data flow describes the path taken by a piece of data from its source to its display by a client system. This path starts with data requested and stored by an [LDM](#ldm) client and includes the decoding of the data and storing of decoded data in a form readable and displayable by the end user.
The AWIPS II ingest and request processes are a highly distributed system, and the messaging broken [Qpid](#qpid) is used for inter-process communication.
![image](http://www.unidata.ucar.edu/software/awips2/images/awips2_coms.png)
[ldm]: http://www.unidata.ucar.edu/software/ldm/
[idd]: http://www.unidata.ucar.edu/projects/#idd
[gempak]: http://www.unidata.ucar.edu/software/gempak/
[awips2]: http://www.unidata.ucar.edu/software/awips2/
[ncep]: http://www.ncep.noaa.gov
[apache]: http://httpd.apache.org
[postgres]: www.postgresql.org
[hdf5]: http://www.hdfgroup.org/HDF5/
[eclipse]: http://www.eclipse.org
[camel]: http://camel.apache.org/
[spring]: http://www.springsource.org/
[hibernate]: http://www.hibernate.org/
[qpid]: http://qpid.apache.org
## Software Components
* [EDEX](#edex)
* [CAVE](#cave)
* [Alertviz](#alertviz)
* [LDM](#ldm)
* [edexBridge](#edexbridge)
* [Qpid](#qpid)
* [PostgreSQL](#postgresql)
* [HDF5](#hdf5)
* [PyPIES](#pypies)
The primary AWIPS II application for data ingest, processing, and storage is the Environmental Data EXchange (**EDEX**) server; the primary AWIPS II application for visualization/data manipulation is the Common AWIPS Visualization Environment (**CAVE**) client, which is typically installed on a workstation separate from other AWIPS II components.
In addition to programs developed specifically for AWIPS, AWIPS II uses several commercial off-the-shelf (COTS) and Free or Open Source software (FOSS) products to assist in its operation. The following components, working together and communicating, compose the entire AWIPS II system.
### EDEX
The main server for AWIPS II. Qpid sends alerts to EDEX when data stored by the LDM is ready for processing. These Qpid messages include file header information which allows EDEX to determine the appropriate data decoder to use. The default ingest server (simply named ingest) handles all data ingest other than grib messages, which are processed by a separate ingestGrib server. After decoding, EDEX writes metadata to the database via Postgres and saves the processed data in HDF5 via PyPIES. A third EDEX server, request, feeds requested data to CAVE clients. EDEX ingest and request servers are started and stopped with the commands `edex start` and `edex stop`, which runs the system script `/etc/rc.d/init.d/edex_camel`
### CAVE
Common AWIPS Visualization Environment. The data rendering and visualization tool for AWIPS II. CAVE contains of a number of different data display configurations called perspectives. Perspectives used in operational forecasting environments include **D2D** (Display Two-Dimensional), **GFE** (Graphical Forecast Editor), and **NCP** (National Centers Perspective). CAVE is started with the command `/awips2/cave/cave.sh` or `cave.sh`
![CAVE](http://www.unidata.ucar.edu/software/awips2/images/Unidata_AWIPS2_CAVE.png)
### Alertviz
**Alertviz** is a modernized version of an AWIPS I application, designed to present various notifications, error messages, and alarms to the user (forecaster). AlertViz can be executed either independently or from CAVE itself. In the Unidata CAVE client, Alertviz is run within CAVE and is not required to be run separately. The toolbar is also **hidden from view** and is accessed by right-click on the desktop taskbar icon.
### LDM
[http://www.unidata.ucar.edu/software/ldm/](http://www.unidata.ucar.edu/software/ldm/)
The **LDM** (Local Data Manager), developed and supported by Unidata, is a suite of client and server programs designed for data distribution, and is the fundamental component comprising the Unidata Internet Data Distribution (IDD) system. In AWIPS II, the LDM provides data feeds for grids, surface observations, upper-air profiles, satellite and radar imagery and various other meteorological datasets. The LDM writes data directly to file and alerts EDEX via Qpid when a file is available for processing. The LDM is started and stopped with the commands `edex start` and `edex stop`, which runs the commands `service edex_ldm start` and `service edex_ldm stop`
### edexBridge
edexBridge, invoked in the LDM configuration file `/awips2/ldm/etc/ldmd.conf`, is used by the LDM to post "data available" messaged to Qpid, which alerts the EDEX Ingest server that a file is ready for processing.
### Qpid
[http://qpid.apache.org](http://qpid.apache.org)
**Apache Qpid**, the Queue Processor Interface Daemon, is the messaging system used by AWIPS II to facilitate communication between services. When the LDM receives a data file to be processed, it employs **edexBridge** to send EDEX ingest servers a message via Qpid. When EDEX has finished decoding the file, it sends CAVE a message via Qpid that data are available for display or further processing. Qpid is started and stopped by `edex start` and `edex stop`, and is controlled by the system script `/etc/rc.d/init.d/qpidd`
### PostgreSQL
[http://www.postgresql.org](http://www.postgresql.org)
**PostgreSQL**, known simply as Postgres, is a relational database management system (DBMS) which handles the storage and retrieval of metadata, database tables and some decoded data. The storage and reading of EDEX metadata is handled by the Postgres DBMS. Users may query the metadata tables by using the termainal-based front-end for Postgres called **psql**. Postgres is started and stopped by `edex start` and `edex stop`, and is controlled by the system script `/etc/rc.d/init.d/edex_postgres`
### HDF5
[http://www.hdfgroup.org/HDF5/](http://www.hdfgroup.org/HDF5/)
[**Hierarchical Data Format (v.5)**][hdf5] is the primary data storage format used by AWIPS II for processed grids, satellite and radar imagery and other products. Similar to netCDF, developed and supported by Unidata, HDF5 supports multiple types of data within a single file. For example, a single HDF5 file of radar data may contain multiple volume scans of base reflectivity and base velocity as well as derived products such as composite reflectivity. The file may also contain data from multiple radars. HDF5 is stored in `/awips2/edex/data/hdf5/`
### PyPIES (httpd-pypies)
**PyPIES**, Python Process Isolated Enhanced Storage, was created for AWIPS II to isolate the management of HDF5 Processed Data Storage from the EDEX processes. PyPIES manages access, i.e., reads and writes, of data in the HDF5 files. In a sense, PyPIES provides functionality similar to a DBMS (i.e PostgreSQL for metadata); all data being written to an HDF5 file is sent to PyPIES, and requests for data stored in HDF5 are processed by PyPIES.
PyPIES is implemented in two parts: 1. The PyPIES manager is a Python application that runs as part of an Apache HTTP server, and handles requests to store and retrieve data. 2. The PyPIES logger is a Python process that coordinates logging. PyPIES is started and stopped by `edex start` and `edex stop`, and is controlled by the system script `/etc/rc.d/init.d/https-pypies`

21
docs/source/index.rst Normal file
View file

@ -0,0 +1,21 @@
=====
Python AWIPS Data Access Framework
=====
.. raw:: html
The `python-awips <http://www.github.com/Unidata/python-awips>`_ package provides a Data Access Framework (DAF) for requesting data from a remote AWIPS II EDEX server.
The `AWIPS II Python Stack <http://>`_ installed via RPM contains the DAF, matplotlib, numpy, scipy, basemap, pint, shapely, and other packages.
-------------
Documentation
-------------
.. toctree::
:maxdepth: 2
install
examples/index
about
dev

29
docs/source/install.rst Normal file
View file

@ -0,0 +1,29 @@
Installation Guide
==================
- pip install python-awips
Requirements
-------------
- Python 2.7 or later
- pip install numpy shapely
From Github
-----------
- git clone https://github.com/Unidata/python-awips.git
- cd python-awips
- python setup.py install
Install for AWIPS II Python
--------------------
AWIPS II >=15.1.3 (March 2016) has `python-awips` installed in /awips2/python, as well as a full meteorological data stack (metpy, matplotlib, numpy, etc).
For AWIPS II systems 15.1.2 and lower:
- wget https://bootstrap.pypa.io/ez_setup.py -O - | /awips2/python/bin/python
- /awips2/python/bin/easy_install pip
- /awips2/python/bin/pip install python-awips

View file

@ -0,0 +1,77 @@
#
# Generation of RST from notebooks
#
import glob
import os
import os.path
import warnings
warnings.simplefilter('ignore')
from nbconvert.exporters import rst
def setup(app):
setup.app = app
setup.config = app.config
setup.confdir = app.confdir
app.connect('builder-inited', generate_rst)
return dict(
version='0.1',
parallel_read_safe=True,
parallel_write_safe=True
)
notebook_source_dir = '../../examples/notebooks'
generated_source_dir = 'examples/generated'
def nb_to_rst(nb_path):
"""convert notebook to restructured text"""
exporter = rst.RSTExporter()
out, resources = exporter.from_file(open(nb_path))
basename = os.path.splitext(os.path.basename(nb_path))[0]
imgdir = basename + '_files'
img_prefix = os.path.join(imgdir, basename + '_')
resources['metadata']['basename'] = basename
resources['metadata']['name'] = basename.replace('_', ' ')
resources['metadata']['imgdir'] = imgdir
base_url = ('http://nbviewer.ipython.org/github/Unidata/python-awips/blob/master/'
'examples/notebooks/')
out_lines = ['`Notebook <%s>`_' % (base_url + os.path.basename(nb_path))]
for line in out.split('\n'):
if line.startswith('.. image:: '):
line = line.replace('output_', img_prefix)
out_lines.append(line)
out = '\n'.join(out_lines)
return out, resources
def write_nb(dest, output, resources):
if not os.path.exists(dest):
os.makedirs(dest)
rst_file = os.path.join(dest,
resources['metadata']['basename'] + resources['output_extension'])
name = resources['metadata']['name']
with open(rst_file, 'w') as rst:
header = '=' * len(name)
rst.write(header + '\n')
rst.write(name + '\n')
rst.write(header + '\n')
rst.write(output)
imgdir = os.path.join(dest, resources['metadata']['imgdir'])
if not os.path.exists(imgdir):
os.makedirs(imgdir)
basename = resources['metadata']['basename']
for filename in resources['outputs']:
img_file = os.path.join(imgdir, filename.replace('output_', basename + '_'))
with open(img_file, 'wb') as img:
img.write(resources['outputs'][filename])
def generate_rst(app):
for fname in glob.glob(os.path.join(app.srcdir, notebook_source_dir, '*.ipynb')):
write_nb(os.path.join(app.srcdir, generated_source_dir), *nb_to_rst(fname))

Binary file not shown.

View file

@ -1,44 +0,0 @@
#!/awips2/python/bin/python
from awips.dataaccess import DataAccessLayer
# set everything up
DataAccessLayer.changeEDEXHost("edex.unidata.ucar.edu")
request = DataAccessLayer.newDataRequest()
# set grid
request.setDatatype("grid")
available_grids = DataAccessLayer.getAvailableLocationNames(request)
for grid in available_grids:
print grid
exit()
# set uair
request.setDatatype("bufrua")
# set parameters
request.setParameters("tpMan","tdMan","prMan","htMan","wdMan","wsMan")
locations = DataAccessLayer.getAvailableLocationNames(request)
#print locations
request.setLocationNames("72230")
datatimes = DataAccessLayer.getAvailableTimes(request)
#print datatimes[0].validPeriod
response = DataAccessLayer.getGeometryData(request,times=datatimes[-1].validPeriod)
#print response
for ob in response:
print "Pres is",ob.getString("prMan")
print " KBMX observation from %s" %ob.getDataTime().getRefTime()
print " Temperature is",ob.getString("tpMan")
print " Dewpoint is",ob.getString("tdMan")
print " Height is",ob.getString("htMan")
print " Wind dir is",ob.getString("wdMan")
print " Wind speed is",ob.getString("wsMan")

View file

@ -1,108 +0,0 @@
```python
#!python
from awips.dataaccess import DataAccessLayer
import numpy as np
request = DataAccessLayer.newDataRequest()
request.setDatatype("gfe")
# For GFE our locationNames are tied to the activated sites in GFE
locationNames = DataAccessLayer.getAvailableLocationNames(request)
print locationNames
request.setLocationNames('OUN')
# For GFE data we use the addIdentifier method to add a constraint.
# Our constraint is for the modelName attribute and this determines
# which GFE database to query the data from. In this request we will
# query the Official database.
request.addIdentifier('modelName','Official')
request.setParameters('PoP')
t = DataAccessLayer.getAvailableTimes(request)
for each in t:
print each.getRefTime(),each.getValidPeriod()
response = DataAccessLayer.getGridData(request, [t[0]])
print response
data = response[0]
print 'Units are in', data.getUnit()
lon,lat = data.getLatLonCoords()
print lon
print 'Parameter we requested is',data.getParameter()
print data.getRawData()
```
```python
['OUN']
```
```python
May 03 15 18:00:00 GMT (May 03 15 18:00:00 , May 03 15 21:00:00 )
May 03 15 21:00:00 GMT (May 03 15 21:00:00 , May 04 15 00:00:00 )
May 04 15 00:00:00 GMT (May 04 15 00:00:00 , May 04 15 08:00:00 )
May 04 15 08:00:00 GMT (May 04 15 08:00:00 , May 04 15 12:00:00 )
May 04 15 12:00:00 GMT (May 04 15 12:00:00 , May 04 15 18:00:00 )
May 04 15 18:00:00 GMT (May 04 15 18:00:00 , May 05 15 00:00:00 )
May 05 15 00:00:00 GMT (May 05 15 00:00:00 , May 05 15 06:00:00 )
May 05 15 06:00:00 GMT (May 05 15 06:00:00 , May 05 15 12:00:00 )
May 05 15 12:00:00 GMT (May 05 15 12:00:00 , May 05 15 18:00:00 )
May 05 15 18:00:00 GMT (May 05 15 18:00:00 , May 06 15 00:00:00 )
May 06 15 00:00:00 GMT (May 06 15 00:00:00 , May 06 15 12:00:00 )
May 06 15 12:00:00 GMT (May 06 15 12:00:00 , May 06 15 18:00:00 )
May 06 15 18:00:00 GMT (May 06 15 18:00:00 , May 07 15 00:00:00 )
May 07 15 00:00:00 GMT (May 07 15 00:00:00 , May 07 15 12:00:00 )
May 07 15 12:00:00 GMT (May 07 15 12:00:00 , May 08 15 00:00:00 )
May 08 15 00:00:00 GMT (May 08 15 00:00:00 , May 08 15 12:00:00 )
May 08 15 12:00:00 GMT (May 08 15 12:00:00 , May 09 15 00:00:00 )
May 09 15 00:00:00 GMT (May 09 15 00:00:00 , May 09 15 12:00:00 )
May 09 15 12:00:00 GMT (May 09 15 12:00:00 , May 10 15 00:00:00 )
May 10 15 00:00:00 GMT (May 10 15 00:00:00 , May 10 15 12:00:00 )
May 10 15 12:00:00 GMT (May 10 15 12:00:00 , May 11 15 00:00:00 )
May 11 15 00:00:00 GMT (May 11 15 00:00:00 , May 11 15 12:00:00 )
May 11 15 12:00:00 GMT (May 11 15 12:00:00 , May 12 15 00:00:00 )
May 12 15 00:00:00 GMT (May 12 15 00:00:00 , May 12 15 12:00:00 )
May 12 15 12:00:00 GMT (May 12 15 12:00:00 , May 12 15 13:00:00 )
```
```python
[<awips.dataaccess.PyGridData.PyGridData object at 0x26f9690>]
```
```python
Units are in %
```
```python
[[-101.30716705 -101.27905273 -101.25093842 ..., -95.05664062
-95.02846527 -95.00028992]
[-101.3058548 -101.27774811 -101.24964142 ..., -95.056633 -95.02845764
-95.00028992]
[-101.30455017 -101.27644348 -101.24834442 ..., -95.05661774
-95.02845764 -95.00028992]
...,
[-101.02937317 -101.00249481 -100.97560883 ..., -95.05414581
-95.02721405 -95.00027466]
[-101.02817535 -101.001297 -100.97442627 ..., -95.05413055
-95.02720642 -95.00027466]
[-101.02697754 -101.00010681 -100.97324371 ..., -95.05412292
-95.02719879 -95.00027466]]
```
```python
Parameter we requested is PoP
```
```python
[[ 8. 8. 7. ..., 7. 7. 7.]
[ 8. 7. 7. ..., 7. 7. 7.]
[ 7. 7. 7. ..., 7. 7. 7.]
...,
[ 3. 3. 3. ..., 2. 2. 2.]
[ 3. 3. 3. ..., 2. 2. 2.]
[ 3. 3. 3. ..., 2. 2. 2.]]
```

View file

@ -1,48 +0,0 @@
#### Using the DAF to get all of the available grids
```python
#!python
from awips.dataaccess import DataAccessLayer
#Initiate a new DataRequest
request = DataAccessLayer.newDataRequest()
#Set the datatype to grid so it knows what plugin to route the request too
request.setDatatype("grid")
#getAvailableLocationNames method will return a list of all available models
#LocationNames mean different things to different plugins beware...radar is icao,
#satellite is sector, etc
available_grids = DataAccessLayer.getAvailableLocationNames(request)
for grid in available_grids:
print grid
```
And the output of the print grid statement would look something like this:
```python
RUC236
SREF216
ENSEMBLE
RTOFS-WestAtl
AKwave10
HiResW-NMM-AK
QPE-KRF
AK-NamDNG5
GFS160
FFG-TIR
GFS254
SPCGuide
RFCqpf
RTMA-Mosaic
QPE-RFC-RSA
UKMET40
MPE-Local-MSR
gefs
WNAwave4
GFS201
QPE-XNAV-ALR
AK-RTMA3
GFS212
...
```

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View file

@ -1,33 +0,0 @@
### Using the DAF from Python to determine available model runs and available forecast steps for each model run
Apr 2, 2015 - Virgil Middendorf
The code below creates a Python Dictionary called ModelRunDict, where the keys are the available Model Run date/times (example key: Apr 02 15 06:00:00). Associated with each key, is a list array of Forecast Steps (in seconds after initialization) available for each Model Run.
```python
#!python
# Getting the Model Run Date/Times and put them into a sorted dictionary, with latest run first.
ModelRunDict = dict()
ModelRunList = []
ModelRunTimes = DataAccessLayer.getAvailableTimes(req, refTimeOnly=True)
for ModelRunTime in ModelRunTimes:
if not ModelRunDict.has_key(ModelRunTime.getRefTime()):
ModelRunDateTimeObject = datetime.strptime(str(ModelRunTime.getRefTime()), '%b %d %y %H:%M:%S %Z')
ModelRunList.append(ModelRunDateTimeObject)
ModelRunList.sort(reverse=True)
for ModelRun in ModelRunList:
ModelRunDict[ModelRun.strftime('%b %d %y %H:%M:%S %Z') + "GMT"] = []
# Get the available forecast steps for each model run.
availableTimes = DataAccessLayer.getAvailableTimes(req)
for time in availableTimes:
ModelRunDict[str(time.getRefTime())].append(time.getFcstTime())
# Printing out available model runs and forecast steps.
for ModelRun in ModelRunDict.keys():
print "Model Run = {}".format(ModelRun)
for step in ModelRunDict[ModelRun]:
print "Forecast Step: {} Hours".format(int(step)/3600)
```

View file

@ -1,78 +0,0 @@
Below shows an example of only requesting data over a specific envelope (ie bounding box). This examples uses a request from the maps database to get the Envelope around the CWA of OUN for our request. This can be used to fine tune your request to only get the data you really need and speed up requests.
You could do this same type of request using any other geometry. For example, you could query the states table and only pull out model data over a particular state. You can also create your own Shapely geometry from scratch and use it to define your envelope.
```python
#!python
#!/awips2/python/bin/python
from awips.dataaccess import DataAccessLayer
import numpy as np
# First we will request the OUN CWA from the maps database.
# We will use this to create the envelope for our grid request.
request = DataAccessLayer.newDataRequest()
request.setDatatype("maps")
request.setParameters("cwa","wfo")
request.addIdentifier("locationField","wfo")
request.addIdentifier("geomField","the_geom")
request.addIdentifier("table","mapdata.cwa")
request.setLocationNames("OUN")
response = DataAccessLayer.getGeometryData(request, None)
oungeom = response[0].getGeometry()
# First let's get the nonclipped 850MB Temp so we can compare
# the lat/lons and shape of the arrays
request = DataAccessLayer.newDataRequest()
request.setDatatype("grid")
request.setLocationNames('RUC130')
request.setParameters("T")
request.setLevels("850MB")
t = DataAccessLayer.getAvailableTimes(request)
response = DataAccessLayer.getGridData(request, times=[t[-1]])
data = response[0]
coords = data.getLatLonCoords()
print coords[0].shape,coords[1].shape
# Since the only thing we are changing is adding an Envelope
# we can reuse our previous request without having to make a
# new one.
# Now we will request only the grid points that are within the envelope (bbox)
# of the OUN cwa by using the setEnvelope method
print 'OUN Envelope is',oungeom.envelope
request.setEnvelope(oungeom)
response = DataAccessLayer.getGridData(request, times=[t[-1]])
data = response[0]
coords = data.getLatLonCoords()
print coords[0].shape,coords[1].shape
# Let's say we want to get an area a little bit larger than our CWA. For this we can use
# Shapely's buffer method to enlarge our envelope. In this example we will buffer the OUN
# geometry by half a degree.
ounbufferedgeom = oungeom.buffer(0.5)
print 'OUN Envelope with buffer is',ounbufferedgeom.envelope
request.setEnvelope(ounbufferedgeom)
response = DataAccessLayer.getGridData(request, times=[t[-1]])
data = response[0]
coords = data.getLatLonCoords()
print coords[0].shape,coords[1].shape
```
```python
(175, 175) (175, 175)
```
```python
OUN Envelope is POLYGON ((-100.0485000609999702 33.3954124450000336, -95.6716995239999619 33.3954124450000336, -95.6716995239999619 37.0016136170000323, -100.0485000609999702 37.0016136170000323, -100.0485000609999702 33.3954124450000336))
```
```python
(32, 33) (32, 33)
```
```python
OUN Envelope with buffer is POLYGON ((-100.5485000609999702 32.8954672430463262, -95.1716995239999619 32.8954672430463262, -95.1716995239999619 37.5016136170000323, -100.5485000609999702 37.5016136170000323, -100.5485000609999702 32.8954672430463262))
```
```python
(39, 41) (39, 41)
```

View file

@ -1,97 +0,0 @@
```python
#!python
import numpy as np
from awips.dataaccess import DataAccessLayer
#Initiate a new DataRequest
request = DataAccessLayer.newDataRequest()
#Set the datatype to grid so it knows what plugin to route the request too
request.setDatatype("grid")
#Use setLocationNames to set the model we want data from
request.setLocationNames('RUC130')
#Next we set the variable and level of data we want
request.setParameters("T")
request.setLevels("850MB")
#getAvailableTimes allows us to query what times are available based off of the
#model, parameter, and levels we have previously identified. These are of the type
#dynamicserialize.dstypes.com.raytheon.uf.common.time.DataTime.DataTime.
t = DataAccessLayer.getAvailableTimes(request)
#Finally lets request some data. There are two types of data (Grid, Geometry) here we are
#requesting gridded data and therefore use the getGridData method. We pass it our DataRequest object
#that has all of our model, parameter, and level information and also a list of DataTime objects we want
#the data for. In this case just give us the data from the last DataTime in the above list (t[-1]). This returns
#a list of awips.dataaccess.PyGridData.PyGridData objects.
response = DataAccessLayer.getGridData(request, [t[-1]])
print response
#Since we only asked for one DataTime we only have one PyGridData object. Lets pull this out
#into a variable named data. Then we can use functions available from awips.dataaccess.PyGridData.PyGridData
#objects (see module for more methods or details)
data = response[0]
#Show the units of the data
print 'Units are in', data.getUnit()
#Get LatLon coords of the gridded data, Returns a tuple <lon,lat> arrays
lon,lat = data.getLatLonCoords()
print lon
#In case we forget what we requested we can get the parameter
print 'Parameter we requested is', data.getParameter()
#And finally lets get the data itself. This returns an array of the raw data
print data.getRawData()
```
Our response object is a list of !PyGridData objects
```python
[<awips.dataaccess.PyGridData.PyGridData object at 0x1d39910>]
```
Output of our print statement getting the units
```python
Units are in K
```
print lon
```python
[[-118.09392548 -117.93661499 -117.77923584 ..., -90.46847534
-90.30672455 -90.14498901]
[-118.06690216 -117.90976715 -117.75257111 ..., -90.47387695
-90.31232452 -90.15077972]
[-118.03994751 -117.88298798 -117.72595978 ..., -90.47927094
-90.31790161 -90.15655518]
...,
[-114.21573639 -114.08406067 -113.9523468 ..., -91.24035645
-91.10612488 -90.97190094]
[-114.19696808 -114.06542206 -113.93383026 ..., -91.24407959
-91.10997772 -90.97589111]
[-114.17823792 -114.04681396 -113.91535187 ..., -91.24778748
-91.11382294 -90.97986603]]
```
Our print statement from getParameter()
```python
Parameter we requested is T
```
And finally our call to getRawData() gets a numpy array of the temperature values
```python
[[ 283.88305664 284.50805664 285.25805664 ..., 280.88305664
280.75805664 280.63305664]
[ 284.38305664 285.00805664 285.75805664 ..., 281.00805664
280.88305664 280.75805664]
[ 284.50805664 285.13305664 285.75805664 ..., 281.13305664
281.00805664 280.88305664]
...,
[ 285.38305664 285.63305664 285.88305664 ..., 286.88305664
286.88305664 287.00805664]
[ 285.25805664 285.50805664 285.75805664 ..., 287.00805664
287.00805664 287.13305664]
[ 285.13305664 285.50805664 285.75805664 ..., 287.25805664
287.25805664 287.25805664]]
```

File diff suppressed because one or more lines are too long

View file

@ -1,21 +0,0 @@
#!python
from awips.dataaccess import DataAccessLayer
# Set host
DataAccessLayer.changeEDEXHost("edex-cloud.unidata.ucar.edu")
# Init data request
request = DataAccessLayer.newDataRequest()
# Set datatype
request.setDatatype("grid")
#
# getAvailableLocationNames method will return a list of all available models
#
# LocationNames mean different things to different plugins beware...radar is icao,
# satellite is sector, etc
#
available_grids = DataAccessLayer.getAvailableLocationNames(request)
for grid in available_grids:
print grid

View file

@ -1049,7 +1049,7 @@
},
{
"cell_type": "code",
"execution_count": 404,
"execution_count": 403,
"metadata": {
"collapsed": false
},
@ -1062,7 +1062,24 @@
]
}
],
"source": []
"source": [
"from dynamicserialize.dstypes.com.raytheon.uf.common.time import TimeRange\n",
"from dynamicserialize.dstypes.com.raytheon.uf.common.dataplugin.level import Level\n",
"req = DataAccessLayer.newDataRequest(\"radar\")\n",
"req.addIdentifier(\"icao\", \"FTG\")\n",
"#req.setParameters(\"32\")\n",
"#level = Level()\n",
"#level.setLevelonevalue(\"0.0\")\n",
"#req.setLevels(level)\n",
" \n",
"# Indicate that when providing or requesting levels, the Levelonevalue\n",
"# is the primaryElevationAngle and the Leveltwovalue value is the\n",
"# trueElevationAngle\n",
"#req.addIdentifier(\"level.one.field\", \"primaryElevationAngle\")\n",
"#req.addIdentifier(\"level.two.field\", \"trueElevationAngle\")\n",
"times = DataAccessLayer.getAvailableTimes(req)\n",
"print times"
]
},
{
"cell_type": "code",

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View file

@ -0,0 +1,499 @@
{
"cells": [
{
"cell_type": "code",
"execution_count": 15,
"metadata": {
"collapsed": false
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"getParameters is ['precip1Hour', 'tempFromTenths', 'precip24Hour', 'seaLevelPress', 'pressChange3Hour', 'temperature', 'dpFromTenths', 'reportType', 'pressChangeChar', 'elevation', 'precip3Hour', 'dewpoint', 'visibility', 'timeObs', 'maxTemp24Hour', 'stationName', 'altimeter', 'autoStationType', 'wmoId', 'windDir', 'windSpeed', 'minTemp24Hour', 'windGust', 'precip6Hour']\n",
"24\n",
"KBJC from Mar 15 16 22:46:00 GMT\n",
"stationName is KBJC\n",
"timeObs is 1458081960000\n",
"wmoId is -9999\n",
"autoStationType is \n",
"elevation is 1729.0\n",
"reportType is METAR\n",
"temperature is 7.0\n",
"tempFromTenths is -9999.0\n",
"dewpoint is -12.0\n",
"dpFromTenths is -9999.0\n",
"windDir is 230.0\n",
"windSpeed is 15.0\n",
"windGust is 25.0\n",
"visibility is 60.0\n",
"altimeter is 29.9599990845\n",
"seaLevelPress is -9999.0\n",
"pressChange3Hour is -9999.0\n",
"pressChangeChar is \n",
"maxTemp24Hour is -9999.0\n",
"minTemp24Hour is -9999.0\n",
"precip1Hour is -9999.0\n",
"precip3Hour is -9999.0\n",
"precip6Hour is -9999.0\n",
"precip24Hour is -9999.0\n"
]
}
],
"source": [
"from awips.dataaccess import DataAccessLayer\n",
"\n",
"# Set host\n",
"DataAccessLayer.changeEDEXHost(\"edex-cloud.unidata.ucar.edu\")\n",
"# Init data request\n",
"request = DataAccessLayer.newDataRequest()\n",
"request.setDatatype(\"obs\")\n",
"request.setLocationNames(\"KBJC\")\n",
"datatimes = DataAccessLayer.getAvailableTimes(request)\n",
"time = datatimes[-1].validPeriod\n",
"\n",
"# \"presWeather\",\"skyCover\",\"skyLayerBase\"\n",
"# are multi-dimensional... deal with these later\n",
"request.setParameters(\n",
" \"stationName\",\n",
" \"timeObs\",\n",
" \"wmoId\",\n",
" \"autoStationType\",\n",
" \"elevation\",\n",
" \"reportType\",\n",
" \"temperature\",\n",
" \"tempFromTenths\",\n",
" \"dewpoint\",\n",
" \"dpFromTenths\",\n",
" \"windDir\",\n",
" \"windSpeed\",\n",
" \"windGust\",\n",
" \"visibility\",\n",
" \"altimeter\",\n",
" \"seaLevelPress\",\n",
" \"pressChange3Hour\",\n",
" \"pressChangeChar\",\n",
" \"maxTemp24Hour\",\n",
" \"minTemp24Hour\",\n",
" \"precip1Hour\",\n",
" \"precip3Hour\",\n",
" \"precip6Hour\",\n",
" \"precip24Hour\"\n",
")\n",
"\n",
"response = DataAccessLayer.getGeometryData(request,times=time)\n",
"for ob in response:\n",
" print \"getParameters is\",ob.getParameters()\n",
" print len(ob.getParameters())\n",
" #getParameters\n",
" print ob.getString(\"stationName\"), \"from\", ob.getDataTime().getRefTime()\n",
" print \"stationName is\",ob.getString(\"stationName\")\n",
" print \"timeObs is\",ob.getString(\"timeObs\")\n",
" print \"wmoId is\",ob.getString(\"wmoId\")\n",
" print \"autoStationType is\",ob.getString(\"autoStationType\")\n",
" print \"elevation is\",ob.getString(\"elevation\")\n",
" print \"reportType is\",ob.getString(\"reportType\")\n",
" print \"temperature is\",ob.getString(\"temperature\")\n",
" print \"tempFromTenths is\",ob.getString(\"tempFromTenths\")\n",
" print \"dewpoint is\",ob.getString(\"dewpoint\")\n",
" print \"dpFromTenths is\",ob.getString(\"dpFromTenths\")\n",
" print \"windDir is\",ob.getString(\"windDir\")\n",
" print \"windSpeed is\",ob.getString(\"windSpeed\")\n",
" print \"windGust is\",ob.getString(\"windGust\")\n",
" print \"visibility is\",ob.getString(\"visibility\")\n",
" print \"altimeter is\",ob.getString(\"altimeter\")\n",
" print \"seaLevelPress is\",ob.getString(\"seaLevelPress\")\n",
" print \"pressChange3Hour is\",ob.getString(\"pressChange3Hour\")\n",
" print \"pressChangeChar is\",ob.getString(\"pressChangeChar\")\n",
" print \"maxTemp24Hour is\",ob.getString(\"maxTemp24Hour\")\n",
" print \"minTemp24Hour is\",ob.getString(\"minTemp24Hour\")\n",
" print \"precip1Hour is\",ob.getString(\"precip1Hour\")\n",
" print \"precip3Hour is\",ob.getString(\"precip3Hour\")\n",
" print \"precip6Hour is\",ob.getString(\"precip6Hour\")\n",
" print \"precip24Hour is\",ob.getString(\"precip24Hour\")"
]
},
{
"cell_type": "code",
"execution_count": 9,
"metadata": {
"collapsed": false
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"getParameters is ['presWeather']\n",
"VCSH\n",
"getParameters is ['presWeather']\n",
"\n",
"getParameters is ['presWeather']\n",
"\n",
"getParameters is ['presWeather']\n",
"\n",
"getParameters is ['presWeather']\n",
"\n",
"FEW\n",
"8000.0\n",
"SCT\n",
"12000.0\n",
"BKN\n",
"20000.0\n",
"\n",
"-9999.0\n",
"\n",
"-9999.0\n",
"\n",
"-9999.0\n"
]
}
],
"source": [
"# multi-dimensional present WX\n",
"request = DataAccessLayer.newDataRequest()\n",
"request.setDatatype(\"obs\")\n",
"request.setLocationNames(\"KBJC\")\n",
"request.setParameters(\"presWeather\")\n",
"response = DataAccessLayer.getGeometryData(request,times=time)\n",
"for ob in response:\n",
" print \"getParameters is\",ob.getParameters()\n",
" print ob.getString(\"presWeather\")\n",
"\n",
"\n",
"# multi-dimensional Sky Condition\n",
"request.setParameters(\"skyCover\", \"skyLayerBase\")\n",
"response = DataAccessLayer.getGeometryData(request,times=time)\n",
"for ob in response:\n",
" print ob.getString(\"skyCover\")\n",
" print ob.getString(\"skyLayerBase\")"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Synop/Marine"
]
},
{
"cell_type": "code",
"execution_count": 30,
"metadata": {
"collapsed": false
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"[<awips.dataaccess.PyGeometryData.PyGeometryData object at 0x7f1b404f3310>]\n",
"getParameters is ['windDir', 'pressChange3Hour', 'elevation', 'temperature', 'wetBulb', 'wx_present', 'stationPress', 'visibility', 'dewpoint', 'stationId', 'precip1Hour', 'equivWindSpeed10m', 'windSpeed', 'pressChangeChar', 'windGust', 'timeObs', 'reportType', 'precip6Hour', 'precip24Hour', 'seaSurfaceTemp', 'seaLevelPress']\n",
"21\n"
]
}
],
"source": [
"from awips.dataaccess import DataAccessLayer\n",
"\n",
"DataAccessLayer.changeEDEXHost(\"edex-cloud.unidata.ucar.edu\")\n",
"request = DataAccessLayer.newDataRequest()\n",
"request.setDatatype(\"sfcobs\")\n",
"request.setLocationNames(\"72421\") # Covington, Kentucky (KCVG)\n",
"\n",
"request.setParameters(\"stationId\",\"timeObs\",\"elevation\",\"reportType\",\n",
" \"wx_present\",\"visibility\",\"seaLevelPress\",\"stationPress\",\n",
" \"pressChange3Hour\",\"pressChangeChar\",\"temperature\",\n",
" \"dewpoint\",\"seaSurfaceTemp\",\"wetBulb\",\"windDir\",\n",
" \"windSpeed\",\"equivWindSpeed10m\",\"windGust\",\"precip1Hour\",\n",
" \"precip6Hour\",\"precip24Hour\" )\n",
"\n",
"datatimes = DataAccessLayer.getAvailableTimes(request)\n",
"time = datatimes[-1].validPeriod\n",
"\n",
"response = DataAccessLayer.getGeometryData(request,times=time)\n",
"print response\n",
"for ob in response:\n",
" print \"getParameters is\",ob.getParameters()\n",
" print len(ob.getParameters())\n"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Profiler"
]
},
{
"cell_type": "code",
"execution_count": 31,
"metadata": {
"collapsed": false
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"[<awips.dataaccess.PyGeometryData.PyGeometryData object at 0x7f1b4481b390>, <awips.dataaccess.PyGeometryData.PyGeometryData object at 0x7f1b40510ad0>, <awips.dataaccess.PyGeometryData.PyGeometryData object at 0x7f1b279852d0>, <awips.dataaccess.PyGeometryData.PyGeometryData object at 0x7f1b27985310>, <awips.dataaccess.PyGeometryData.PyGeometryData object at 0x7f1b279853d0>, <awips.dataaccess.PyGeometryData.PyGeometryData object at 0x7f1b27985410>, <awips.dataaccess.PyGeometryData.PyGeometryData object at 0x7f1b27985450>, <awips.dataaccess.PyGeometryData.PyGeometryData object at 0x7f1b27985490>, <awips.dataaccess.PyGeometryData.PyGeometryData object at 0x7f1b279854d0>, <awips.dataaccess.PyGeometryData.PyGeometryData object at 0x7f1b27985510>, <awips.dataaccess.PyGeometryData.PyGeometryData object at 0x7f1b27985550>, <awips.dataaccess.PyGeometryData.PyGeometryData object at 0x7f1b27985590>, <awips.dataaccess.PyGeometryData.PyGeometryData object at 0x7f1b279855d0>, <awips.dataaccess.PyGeometryData.PyGeometryData object at 0x7f1b27985610>, <awips.dataaccess.PyGeometryData.PyGeometryData object at 0x7f1b27985650>, <awips.dataaccess.PyGeometryData.PyGeometryData object at 0x7f1b27985690>, <awips.dataaccess.PyGeometryData.PyGeometryData object at 0x7f1b279856d0>, <awips.dataaccess.PyGeometryData.PyGeometryData object at 0x7f1b27985710>, <awips.dataaccess.PyGeometryData.PyGeometryData object at 0x7f1b27985750>, <awips.dataaccess.PyGeometryData.PyGeometryData object at 0x7f1b27985790>, <awips.dataaccess.PyGeometryData.PyGeometryData object at 0x7f1b279857d0>, <awips.dataaccess.PyGeometryData.PyGeometryData object at 0x7f1b27985810>, <awips.dataaccess.PyGeometryData.PyGeometryData object at 0x7f1b27985850>, <awips.dataaccess.PyGeometryData.PyGeometryData object at 0x7f1b27985890>, <awips.dataaccess.PyGeometryData.PyGeometryData object at 0x7f1b279858d0>, <awips.dataaccess.PyGeometryData.PyGeometryData object at 0x7f1b27985910>, <awips.dataaccess.PyGeometryData.PyGeometryData object at 0x7f1b27985950>, <awips.dataaccess.PyGeometryData.PyGeometryData object at 0x7f1b27985990>, <awips.dataaccess.PyGeometryData.PyGeometryData object at 0x7f1b279859d0>, <awips.dataaccess.PyGeometryData.PyGeometryData object at 0x7f1b27985a10>, <awips.dataaccess.PyGeometryData.PyGeometryData object at 0x7f1b27985a50>, <awips.dataaccess.PyGeometryData.PyGeometryData object at 0x7f1b27985a90>, <awips.dataaccess.PyGeometryData.PyGeometryData object at 0x7f1b3018ab50>, <awips.dataaccess.PyGeometryData.PyGeometryData object at 0x7f1b4053b6d0>, <awips.dataaccess.PyGeometryData.PyGeometryData object at 0x7f1b27985ad0>, <awips.dataaccess.PyGeometryData.PyGeometryData object at 0x7f1b27985b10>, <awips.dataaccess.PyGeometryData.PyGeometryData object at 0x7f1b27985b50>, <awips.dataaccess.PyGeometryData.PyGeometryData object at 0x7f1b27985b90>]\n",
"getParameters is ['vComponent', 'uComponent', 'peakPower', 'levelMode', 'uvQualityCode', 'consensusNum', 'HorizSpStdDev', 'wComponent', 'height', 'VertSpStdDev']\n",
"10\n",
"getParameters is ['vComponent', 'uComponent', 'peakPower', 'levelMode', 'uvQualityCode', 'consensusNum', 'HorizSpStdDev', 'wComponent', 'height', 'VertSpStdDev']\n",
"10\n",
"getParameters is ['vComponent', 'uComponent', 'peakPower', 'levelMode', 'uvQualityCode', 'consensusNum', 'HorizSpStdDev', 'wComponent', 'height', 'VertSpStdDev']\n",
"10\n",
"getParameters is ['vComponent', 'uComponent', 'peakPower', 'levelMode', 'uvQualityCode', 'consensusNum', 'HorizSpStdDev', 'wComponent', 'height', 'VertSpStdDev']\n",
"10\n",
"getParameters is ['vComponent', 'uComponent', 'peakPower', 'levelMode', 'uvQualityCode', 'consensusNum', 'HorizSpStdDev', 'wComponent', 'height', 'VertSpStdDev']\n",
"10\n",
"getParameters is ['vComponent', 'uComponent', 'peakPower', 'levelMode', 'uvQualityCode', 'consensusNum', 'HorizSpStdDev', 'wComponent', 'height', 'VertSpStdDev']\n",
"10\n",
"getParameters is ['vComponent', 'uComponent', 'peakPower', 'levelMode', 'uvQualityCode', 'consensusNum', 'HorizSpStdDev', 'wComponent', 'height', 'VertSpStdDev']\n",
"10\n",
"getParameters is ['vComponent', 'uComponent', 'peakPower', 'levelMode', 'uvQualityCode', 'consensusNum', 'HorizSpStdDev', 'wComponent', 'height', 'VertSpStdDev']\n",
"10\n",
"getParameters is ['vComponent', 'uComponent', 'peakPower', 'levelMode', 'uvQualityCode', 'consensusNum', 'HorizSpStdDev', 'wComponent', 'height', 'VertSpStdDev']\n",
"10\n",
"getParameters is ['vComponent', 'uComponent', 'peakPower', 'levelMode', 'uvQualityCode', 'consensusNum', 'HorizSpStdDev', 'wComponent', 'height', 'VertSpStdDev']\n",
"10\n",
"getParameters is ['vComponent', 'uComponent', 'peakPower', 'levelMode', 'uvQualityCode', 'consensusNum', 'HorizSpStdDev', 'wComponent', 'height', 'VertSpStdDev']\n",
"10\n",
"getParameters is ['vComponent', 'uComponent', 'peakPower', 'levelMode', 'uvQualityCode', 'consensusNum', 'HorizSpStdDev', 'wComponent', 'height', 'VertSpStdDev']\n",
"10\n",
"getParameters is ['vComponent', 'uComponent', 'peakPower', 'levelMode', 'uvQualityCode', 'consensusNum', 'HorizSpStdDev', 'wComponent', 'height', 'VertSpStdDev']\n",
"10\n",
"getParameters is ['vComponent', 'uComponent', 'peakPower', 'levelMode', 'uvQualityCode', 'consensusNum', 'HorizSpStdDev', 'wComponent', 'height', 'VertSpStdDev']\n",
"10\n",
"getParameters is ['vComponent', 'uComponent', 'peakPower', 'levelMode', 'uvQualityCode', 'consensusNum', 'HorizSpStdDev', 'wComponent', 'height', 'VertSpStdDev']\n",
"10\n",
"getParameters is ['vComponent', 'uComponent', 'peakPower', 'levelMode', 'uvQualityCode', 'consensusNum', 'HorizSpStdDev', 'wComponent', 'height', 'VertSpStdDev']\n",
"10\n",
"getParameters is ['vComponent', 'uComponent', 'peakPower', 'levelMode', 'uvQualityCode', 'consensusNum', 'HorizSpStdDev', 'wComponent', 'height', 'VertSpStdDev']\n",
"10\n",
"getParameters is ['numProfLvls', 'elevation', 'windDirSfc', 'validTime', 'windSpeedSfc', 'pressure', 'submode', 'relHumidity', 'profilerId', 'rainRate', 'temperature']\n",
"11\n",
"getParameters is ['vComponent', 'uComponent', 'peakPower', 'levelMode', 'uvQualityCode', 'consensusNum', 'HorizSpStdDev', 'wComponent', 'height', 'VertSpStdDev']\n",
"10\n",
"getParameters is ['vComponent', 'uComponent', 'peakPower', 'levelMode', 'uvQualityCode', 'consensusNum', 'HorizSpStdDev', 'wComponent', 'height', 'VertSpStdDev']\n",
"10\n",
"getParameters is ['vComponent', 'uComponent', 'peakPower', 'levelMode', 'uvQualityCode', 'consensusNum', 'HorizSpStdDev', 'wComponent', 'height', 'VertSpStdDev']\n",
"10\n",
"getParameters is ['vComponent', 'uComponent', 'peakPower', 'levelMode', 'uvQualityCode', 'consensusNum', 'HorizSpStdDev', 'wComponent', 'height', 'VertSpStdDev']\n",
"10\n",
"getParameters is ['vComponent', 'uComponent', 'peakPower', 'levelMode', 'uvQualityCode', 'consensusNum', 'HorizSpStdDev', 'wComponent', 'height', 'VertSpStdDev']\n",
"10\n",
"getParameters is ['vComponent', 'uComponent', 'peakPower', 'levelMode', 'uvQualityCode', 'consensusNum', 'HorizSpStdDev', 'wComponent', 'height', 'VertSpStdDev']\n",
"10\n",
"getParameters is ['vComponent', 'uComponent', 'peakPower', 'levelMode', 'uvQualityCode', 'consensusNum', 'HorizSpStdDev', 'wComponent', 'height', 'VertSpStdDev']\n",
"10\n",
"getParameters is ['vComponent', 'uComponent', 'peakPower', 'levelMode', 'uvQualityCode', 'consensusNum', 'HorizSpStdDev', 'wComponent', 'height', 'VertSpStdDev']\n",
"10\n",
"getParameters is ['vComponent', 'uComponent', 'peakPower', 'levelMode', 'uvQualityCode', 'consensusNum', 'HorizSpStdDev', 'wComponent', 'height', 'VertSpStdDev']\n",
"10\n",
"getParameters is ['vComponent', 'uComponent', 'peakPower', 'levelMode', 'uvQualityCode', 'consensusNum', 'HorizSpStdDev', 'wComponent', 'height', 'VertSpStdDev']\n",
"10\n",
"getParameters is ['vComponent', 'uComponent', 'peakPower', 'levelMode', 'uvQualityCode', 'consensusNum', 'HorizSpStdDev', 'wComponent', 'height', 'VertSpStdDev']\n",
"10\n",
"getParameters is ['vComponent', 'uComponent', 'peakPower', 'levelMode', 'uvQualityCode', 'consensusNum', 'HorizSpStdDev', 'wComponent', 'height', 'VertSpStdDev']\n",
"10\n",
"getParameters is ['vComponent', 'uComponent', 'peakPower', 'levelMode', 'uvQualityCode', 'consensusNum', 'HorizSpStdDev', 'wComponent', 'height', 'VertSpStdDev']\n",
"10\n",
"getParameters is ['vComponent', 'uComponent', 'peakPower', 'levelMode', 'uvQualityCode', 'consensusNum', 'HorizSpStdDev', 'wComponent', 'height', 'VertSpStdDev']\n",
"10\n",
"getParameters is ['vComponent', 'uComponent', 'peakPower', 'levelMode', 'uvQualityCode', 'consensusNum', 'HorizSpStdDev', 'wComponent', 'height', 'VertSpStdDev']\n",
"10\n",
"getParameters is ['vComponent', 'uComponent', 'peakPower', 'levelMode', 'uvQualityCode', 'consensusNum', 'HorizSpStdDev', 'wComponent', 'height', 'VertSpStdDev']\n",
"10\n",
"getParameters is ['vComponent', 'uComponent', 'peakPower', 'levelMode', 'uvQualityCode', 'consensusNum', 'HorizSpStdDev', 'wComponent', 'height', 'VertSpStdDev']\n",
"10\n",
"getParameters is ['vComponent', 'uComponent', 'peakPower', 'levelMode', 'uvQualityCode', 'consensusNum', 'HorizSpStdDev', 'wComponent', 'height', 'VertSpStdDev']\n",
"10\n",
"getParameters is ['vComponent', 'uComponent', 'peakPower', 'levelMode', 'uvQualityCode', 'consensusNum', 'HorizSpStdDev', 'wComponent', 'height', 'VertSpStdDev']\n",
"10\n",
"getParameters is ['numProfLvls', 'elevation', 'windDirSfc', 'validTime', 'windSpeedSfc', 'pressure', 'submode', 'relHumidity', 'profilerId', 'rainRate', 'temperature']\n",
"11\n"
]
}
],
"source": [
"MULTI_DIM_PARAMS = set(['vComponent', 'uComponent', 'peakPower', \n",
" 'levelMode', 'uvQualityCode', 'consensusNum', \n",
" 'HorizSpStdDev', 'wComponent', 'height', \n",
" 'VertSpStdDev'])\n",
"\n",
"request = DataAccessLayer.newDataRequest(\"profiler\")\n",
"request.setParameters('numProfLvls', 'elevation', 'windDirSfc', 'validTime', \n",
" 'windSpeedSfc', 'pressure', 'submode', 'relHumidity', \n",
" 'profilerId', 'rainRate', 'temperature')\n",
"request.getParameters().extend(MULTI_DIM_PARAMS)\n",
"\n",
"datatimes = DataAccessLayer.getAvailableTimes(request)\n",
"time = datatimes[-1].validPeriod\n",
"\n",
"response = DataAccessLayer.getGeometryData(request,times=time)\n",
"print response\n",
"for ob in response:\n",
" print \"getParameters is\",ob.getParameters()\n",
" print len(ob.getParameters())"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## ACARS"
]
},
{
"cell_type": "code",
"execution_count": 32,
"metadata": {
"collapsed": false
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"[<awips.dataaccess.PyGeometryData.PyGeometryData object at 0x7f1b30196dd0>]\n",
"getParameters is ['flightPhase', 'icing', 'temp', 'humidity', 'pressure', 'windSpeed', 'receiver', 'mixingRatio', 'windDirection', 'rollAngleQuality', 'tailNumber']\n",
"11\n"
]
}
],
"source": [
"request = DataAccessLayer.newDataRequest(\"acars\")\n",
"request.setParameters(\"tailNumber\", \"receiver\", \"pressure\", \"flightPhase\", \n",
" \"rollAngleQuality\", \"temp\", \"windDirection\", \"windSpeed\",\n",
" \"humidity\", \"mixingRatio\", \"icing\")\n",
"datatimes = DataAccessLayer.getAvailableTimes(request)\n",
"time = datatimes[-1].validPeriod\n",
"\n",
"response = DataAccessLayer.getGeometryData(request,times=time)\n",
"print response\n",
"for ob in response:\n",
" print \"getParameters is\",ob.getParameters()\n",
" print len(ob.getParameters())"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## AIREP"
]
},
{
"cell_type": "code",
"execution_count": 33,
"metadata": {
"collapsed": false
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"[<awips.dataaccess.PyGeometryData.PyGeometryData object at 0x7f1b3044f6d0>]\n",
"getParameters is ['flightWeather', 'flightHazard', 'flightConditions', 'windSpeed', 'flightLevel', 'id']\n",
"6\n"
]
}
],
"source": [
"request = DataAccessLayer.newDataRequest(\"airep\")\n",
"request.setParameters(\"id\", \"flightLevel\", \"temp\", \"windDirection\", \"windSpeed\",\n",
" \"flightWeather\", \"flightHazard\", \"flightConditions\")\n",
"\n",
"datatimes = DataAccessLayer.getAvailableTimes(request)\n",
"time = datatimes[-1].validPeriod\n",
"\n",
"response = DataAccessLayer.getGeometryData(request,times=time)\n",
"print response\n",
"for ob in response:\n",
" print \"getParameters is\",ob.getParameters()\n",
" print len(ob.getParameters())"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## PIREP"
]
},
{
"cell_type": "code",
"execution_count": 34,
"metadata": {
"collapsed": false
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"[<awips.dataaccess.PyGeometryData.PyGeometryData object at 0x7f1b301a2210>, <awips.dataaccess.PyGeometryData.PyGeometryData object at 0x7f1b301a2510>]\n",
"getParameters is ['skyTopHeight', 'skyBaseHeight', 'turbTopHeight', 'iceBaseHeight', 'skyCover1', 'turbBaseHeight', 'iceType', 'iceTopHeight', 'turbType', 'hazardType', 'skyCover2']\n",
"11\n",
"getParameters is ['horzVisibility', 'weatherGroup', 'windSpeed', 'aircraftType', 'flightLevel', 'id']\n",
"6\n"
]
}
],
"source": [
"MULTI_DIM_PARAMS = set([\"hazardType\", \n",
" \"turbType\", \"turbBaseHeight\", \"turbTopHeight\",\n",
" \"iceType\", \"iceBaseHeight\", \"iceTopHeight\",\n",
" \"skyCover1\", \"skyCover2\", \"skyBaseHeight\", \"skyTopHeight\"\n",
" ])\n",
" \n",
"request = DataAccessLayer.newDataRequest(\"pirep\")\n",
"request.setParameters('id', 'flightLevel', 'temp', 'windDirection', 'windSpeed',\n",
" 'horzVisibility', 'aircraftType', 'weatherGroup')\n",
"request.getParameters().extend(MULTI_DIM_PARAMS)\n",
"\n",
"datatimes = DataAccessLayer.getAvailableTimes(request)\n",
"time = datatimes[-1].validPeriod\n",
"\n",
"response = DataAccessLayer.getGeometryData(request,times=time)\n",
"print response\n",
"for ob in response:\n",
" print \"getParameters is\",ob.getParameters()\n",
" print len(ob.getParameters())"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"collapsed": true
},
"outputs": [],
"source": []
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 2",
"language": "python",
"name": "python2"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 2
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython2",
"version": "2.7.9"
}
},
"nbformat": 4,
"nbformat_minor": 0
}

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View file

@ -0,0 +1,499 @@
{
"cells": [
{
"cell_type": "code",
"execution_count": 15,
"metadata": {
"collapsed": false
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"getParameters is ['precip1Hour', 'tempFromTenths', 'precip24Hour', 'seaLevelPress', 'pressChange3Hour', 'temperature', 'dpFromTenths', 'reportType', 'pressChangeChar', 'elevation', 'precip3Hour', 'dewpoint', 'visibility', 'timeObs', 'maxTemp24Hour', 'stationName', 'altimeter', 'autoStationType', 'wmoId', 'windDir', 'windSpeed', 'minTemp24Hour', 'windGust', 'precip6Hour']\n",
"24\n",
"KBJC from Mar 15 16 22:46:00 GMT\n",
"stationName is KBJC\n",
"timeObs is 1458081960000\n",
"wmoId is -9999\n",
"autoStationType is \n",
"elevation is 1729.0\n",
"reportType is METAR\n",
"temperature is 7.0\n",
"tempFromTenths is -9999.0\n",
"dewpoint is -12.0\n",
"dpFromTenths is -9999.0\n",
"windDir is 230.0\n",
"windSpeed is 15.0\n",
"windGust is 25.0\n",
"visibility is 60.0\n",
"altimeter is 29.9599990845\n",
"seaLevelPress is -9999.0\n",
"pressChange3Hour is -9999.0\n",
"pressChangeChar is \n",
"maxTemp24Hour is -9999.0\n",
"minTemp24Hour is -9999.0\n",
"precip1Hour is -9999.0\n",
"precip3Hour is -9999.0\n",
"precip6Hour is -9999.0\n",
"precip24Hour is -9999.0\n"
]
}
],
"source": [
"from awips.dataaccess import DataAccessLayer\n",
"\n",
"# Set host\n",
"DataAccessLayer.changeEDEXHost(\"edex-cloud.unidata.ucar.edu\")\n",
"# Init data request\n",
"request = DataAccessLayer.newDataRequest()\n",
"request.setDatatype(\"obs\")\n",
"request.setLocationNames(\"KBJC\")\n",
"datatimes = DataAccessLayer.getAvailableTimes(request)\n",
"time = datatimes[-1].validPeriod\n",
"\n",
"# \"presWeather\",\"skyCover\",\"skyLayerBase\"\n",
"# are multi-dimensional... deal with these later\n",
"request.setParameters(\n",
" \"stationName\",\n",
" \"timeObs\",\n",
" \"wmoId\",\n",
" \"autoStationType\",\n",
" \"elevation\",\n",
" \"reportType\",\n",
" \"temperature\",\n",
" \"tempFromTenths\",\n",
" \"dewpoint\",\n",
" \"dpFromTenths\",\n",
" \"windDir\",\n",
" \"windSpeed\",\n",
" \"windGust\",\n",
" \"visibility\",\n",
" \"altimeter\",\n",
" \"seaLevelPress\",\n",
" \"pressChange3Hour\",\n",
" \"pressChangeChar\",\n",
" \"maxTemp24Hour\",\n",
" \"minTemp24Hour\",\n",
" \"precip1Hour\",\n",
" \"precip3Hour\",\n",
" \"precip6Hour\",\n",
" \"precip24Hour\"\n",
")\n",
"\n",
"response = DataAccessLayer.getGeometryData(request,times=time)\n",
"for ob in response:\n",
" print \"getParameters is\",ob.getParameters()\n",
" print len(ob.getParameters())\n",
" #getParameters\n",
" print ob.getString(\"stationName\"), \"from\", ob.getDataTime().getRefTime()\n",
" print \"stationName is\",ob.getString(\"stationName\")\n",
" print \"timeObs is\",ob.getString(\"timeObs\")\n",
" print \"wmoId is\",ob.getString(\"wmoId\")\n",
" print \"autoStationType is\",ob.getString(\"autoStationType\")\n",
" print \"elevation is\",ob.getString(\"elevation\")\n",
" print \"reportType is\",ob.getString(\"reportType\")\n",
" print \"temperature is\",ob.getString(\"temperature\")\n",
" print \"tempFromTenths is\",ob.getString(\"tempFromTenths\")\n",
" print \"dewpoint is\",ob.getString(\"dewpoint\")\n",
" print \"dpFromTenths is\",ob.getString(\"dpFromTenths\")\n",
" print \"windDir is\",ob.getString(\"windDir\")\n",
" print \"windSpeed is\",ob.getString(\"windSpeed\")\n",
" print \"windGust is\",ob.getString(\"windGust\")\n",
" print \"visibility is\",ob.getString(\"visibility\")\n",
" print \"altimeter is\",ob.getString(\"altimeter\")\n",
" print \"seaLevelPress is\",ob.getString(\"seaLevelPress\")\n",
" print \"pressChange3Hour is\",ob.getString(\"pressChange3Hour\")\n",
" print \"pressChangeChar is\",ob.getString(\"pressChangeChar\")\n",
" print \"maxTemp24Hour is\",ob.getString(\"maxTemp24Hour\")\n",
" print \"minTemp24Hour is\",ob.getString(\"minTemp24Hour\")\n",
" print \"precip1Hour is\",ob.getString(\"precip1Hour\")\n",
" print \"precip3Hour is\",ob.getString(\"precip3Hour\")\n",
" print \"precip6Hour is\",ob.getString(\"precip6Hour\")\n",
" print \"precip24Hour is\",ob.getString(\"precip24Hour\")"
]
},
{
"cell_type": "code",
"execution_count": 9,
"metadata": {
"collapsed": false
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"getParameters is ['presWeather']\n",
"VCSH\n",
"getParameters is ['presWeather']\n",
"\n",
"getParameters is ['presWeather']\n",
"\n",
"getParameters is ['presWeather']\n",
"\n",
"getParameters is ['presWeather']\n",
"\n",
"FEW\n",
"8000.0\n",
"SCT\n",
"12000.0\n",
"BKN\n",
"20000.0\n",
"\n",
"-9999.0\n",
"\n",
"-9999.0\n",
"\n",
"-9999.0\n"
]
}
],
"source": [
"# multi-dimensional present WX\n",
"request = DataAccessLayer.newDataRequest()\n",
"request.setDatatype(\"obs\")\n",
"request.setLocationNames(\"KBJC\")\n",
"request.setParameters(\"presWeather\")\n",
"response = DataAccessLayer.getGeometryData(request,times=time)\n",
"for ob in response:\n",
" print \"getParameters is\",ob.getParameters()\n",
" print ob.getString(\"presWeather\")\n",
"\n",
"\n",
"# multi-dimensional Sky Condition\n",
"request.setParameters(\"skyCover\", \"skyLayerBase\")\n",
"response = DataAccessLayer.getGeometryData(request,times=time)\n",
"for ob in response:\n",
" print ob.getString(\"skyCover\")\n",
" print ob.getString(\"skyLayerBase\")"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Synop/Marine"
]
},
{
"cell_type": "code",
"execution_count": 30,
"metadata": {
"collapsed": false
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"[<awips.dataaccess.PyGeometryData.PyGeometryData object at 0x7f1b404f3310>]\n",
"getParameters is ['windDir', 'pressChange3Hour', 'elevation', 'temperature', 'wetBulb', 'wx_present', 'stationPress', 'visibility', 'dewpoint', 'stationId', 'precip1Hour', 'equivWindSpeed10m', 'windSpeed', 'pressChangeChar', 'windGust', 'timeObs', 'reportType', 'precip6Hour', 'precip24Hour', 'seaSurfaceTemp', 'seaLevelPress']\n",
"21\n"
]
}
],
"source": [
"from awips.dataaccess import DataAccessLayer\n",
"\n",
"DataAccessLayer.changeEDEXHost(\"edex-cloud.unidata.ucar.edu\")\n",
"request = DataAccessLayer.newDataRequest()\n",
"request.setDatatype(\"sfcobs\")\n",
"request.setLocationNames(\"72421\") # Covington, Kentucky (KCVG)\n",
"\n",
"request.setParameters(\"stationId\",\"timeObs\",\"elevation\",\"reportType\",\n",
" \"wx_present\",\"visibility\",\"seaLevelPress\",\"stationPress\",\n",
" \"pressChange3Hour\",\"pressChangeChar\",\"temperature\",\n",
" \"dewpoint\",\"seaSurfaceTemp\",\"wetBulb\",\"windDir\",\n",
" \"windSpeed\",\"equivWindSpeed10m\",\"windGust\",\"precip1Hour\",\n",
" \"precip6Hour\",\"precip24Hour\" )\n",
"\n",
"datatimes = DataAccessLayer.getAvailableTimes(request)\n",
"time = datatimes[-1].validPeriod\n",
"\n",
"response = DataAccessLayer.getGeometryData(request,times=time)\n",
"print response\n",
"for ob in response:\n",
" print \"getParameters is\",ob.getParameters()\n",
" print len(ob.getParameters())\n"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Profiler"
]
},
{
"cell_type": "code",
"execution_count": 31,
"metadata": {
"collapsed": false
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"[<awips.dataaccess.PyGeometryData.PyGeometryData object at 0x7f1b4481b390>, <awips.dataaccess.PyGeometryData.PyGeometryData object at 0x7f1b40510ad0>, <awips.dataaccess.PyGeometryData.PyGeometryData object at 0x7f1b279852d0>, <awips.dataaccess.PyGeometryData.PyGeometryData object at 0x7f1b27985310>, <awips.dataaccess.PyGeometryData.PyGeometryData object at 0x7f1b279853d0>, <awips.dataaccess.PyGeometryData.PyGeometryData object at 0x7f1b27985410>, <awips.dataaccess.PyGeometryData.PyGeometryData object at 0x7f1b27985450>, <awips.dataaccess.PyGeometryData.PyGeometryData object at 0x7f1b27985490>, <awips.dataaccess.PyGeometryData.PyGeometryData object at 0x7f1b279854d0>, <awips.dataaccess.PyGeometryData.PyGeometryData object at 0x7f1b27985510>, <awips.dataaccess.PyGeometryData.PyGeometryData object at 0x7f1b27985550>, <awips.dataaccess.PyGeometryData.PyGeometryData object at 0x7f1b27985590>, <awips.dataaccess.PyGeometryData.PyGeometryData object at 0x7f1b279855d0>, <awips.dataaccess.PyGeometryData.PyGeometryData object at 0x7f1b27985610>, <awips.dataaccess.PyGeometryData.PyGeometryData object at 0x7f1b27985650>, <awips.dataaccess.PyGeometryData.PyGeometryData object at 0x7f1b27985690>, <awips.dataaccess.PyGeometryData.PyGeometryData object at 0x7f1b279856d0>, <awips.dataaccess.PyGeometryData.PyGeometryData object at 0x7f1b27985710>, <awips.dataaccess.PyGeometryData.PyGeometryData object at 0x7f1b27985750>, <awips.dataaccess.PyGeometryData.PyGeometryData object at 0x7f1b27985790>, <awips.dataaccess.PyGeometryData.PyGeometryData object at 0x7f1b279857d0>, <awips.dataaccess.PyGeometryData.PyGeometryData object at 0x7f1b27985810>, <awips.dataaccess.PyGeometryData.PyGeometryData object at 0x7f1b27985850>, <awips.dataaccess.PyGeometryData.PyGeometryData object at 0x7f1b27985890>, <awips.dataaccess.PyGeometryData.PyGeometryData object at 0x7f1b279858d0>, <awips.dataaccess.PyGeometryData.PyGeometryData object at 0x7f1b27985910>, <awips.dataaccess.PyGeometryData.PyGeometryData object at 0x7f1b27985950>, <awips.dataaccess.PyGeometryData.PyGeometryData object at 0x7f1b27985990>, <awips.dataaccess.PyGeometryData.PyGeometryData object at 0x7f1b279859d0>, <awips.dataaccess.PyGeometryData.PyGeometryData object at 0x7f1b27985a10>, <awips.dataaccess.PyGeometryData.PyGeometryData object at 0x7f1b27985a50>, <awips.dataaccess.PyGeometryData.PyGeometryData object at 0x7f1b27985a90>, <awips.dataaccess.PyGeometryData.PyGeometryData object at 0x7f1b3018ab50>, <awips.dataaccess.PyGeometryData.PyGeometryData object at 0x7f1b4053b6d0>, <awips.dataaccess.PyGeometryData.PyGeometryData object at 0x7f1b27985ad0>, <awips.dataaccess.PyGeometryData.PyGeometryData object at 0x7f1b27985b10>, <awips.dataaccess.PyGeometryData.PyGeometryData object at 0x7f1b27985b50>, <awips.dataaccess.PyGeometryData.PyGeometryData object at 0x7f1b27985b90>]\n",
"getParameters is ['vComponent', 'uComponent', 'peakPower', 'levelMode', 'uvQualityCode', 'consensusNum', 'HorizSpStdDev', 'wComponent', 'height', 'VertSpStdDev']\n",
"10\n",
"getParameters is ['vComponent', 'uComponent', 'peakPower', 'levelMode', 'uvQualityCode', 'consensusNum', 'HorizSpStdDev', 'wComponent', 'height', 'VertSpStdDev']\n",
"10\n",
"getParameters is ['vComponent', 'uComponent', 'peakPower', 'levelMode', 'uvQualityCode', 'consensusNum', 'HorizSpStdDev', 'wComponent', 'height', 'VertSpStdDev']\n",
"10\n",
"getParameters is ['vComponent', 'uComponent', 'peakPower', 'levelMode', 'uvQualityCode', 'consensusNum', 'HorizSpStdDev', 'wComponent', 'height', 'VertSpStdDev']\n",
"10\n",
"getParameters is ['vComponent', 'uComponent', 'peakPower', 'levelMode', 'uvQualityCode', 'consensusNum', 'HorizSpStdDev', 'wComponent', 'height', 'VertSpStdDev']\n",
"10\n",
"getParameters is ['vComponent', 'uComponent', 'peakPower', 'levelMode', 'uvQualityCode', 'consensusNum', 'HorizSpStdDev', 'wComponent', 'height', 'VertSpStdDev']\n",
"10\n",
"getParameters is ['vComponent', 'uComponent', 'peakPower', 'levelMode', 'uvQualityCode', 'consensusNum', 'HorizSpStdDev', 'wComponent', 'height', 'VertSpStdDev']\n",
"10\n",
"getParameters is ['vComponent', 'uComponent', 'peakPower', 'levelMode', 'uvQualityCode', 'consensusNum', 'HorizSpStdDev', 'wComponent', 'height', 'VertSpStdDev']\n",
"10\n",
"getParameters is ['vComponent', 'uComponent', 'peakPower', 'levelMode', 'uvQualityCode', 'consensusNum', 'HorizSpStdDev', 'wComponent', 'height', 'VertSpStdDev']\n",
"10\n",
"getParameters is ['vComponent', 'uComponent', 'peakPower', 'levelMode', 'uvQualityCode', 'consensusNum', 'HorizSpStdDev', 'wComponent', 'height', 'VertSpStdDev']\n",
"10\n",
"getParameters is ['vComponent', 'uComponent', 'peakPower', 'levelMode', 'uvQualityCode', 'consensusNum', 'HorizSpStdDev', 'wComponent', 'height', 'VertSpStdDev']\n",
"10\n",
"getParameters is ['vComponent', 'uComponent', 'peakPower', 'levelMode', 'uvQualityCode', 'consensusNum', 'HorizSpStdDev', 'wComponent', 'height', 'VertSpStdDev']\n",
"10\n",
"getParameters is ['vComponent', 'uComponent', 'peakPower', 'levelMode', 'uvQualityCode', 'consensusNum', 'HorizSpStdDev', 'wComponent', 'height', 'VertSpStdDev']\n",
"10\n",
"getParameters is ['vComponent', 'uComponent', 'peakPower', 'levelMode', 'uvQualityCode', 'consensusNum', 'HorizSpStdDev', 'wComponent', 'height', 'VertSpStdDev']\n",
"10\n",
"getParameters is ['vComponent', 'uComponent', 'peakPower', 'levelMode', 'uvQualityCode', 'consensusNum', 'HorizSpStdDev', 'wComponent', 'height', 'VertSpStdDev']\n",
"10\n",
"getParameters is ['vComponent', 'uComponent', 'peakPower', 'levelMode', 'uvQualityCode', 'consensusNum', 'HorizSpStdDev', 'wComponent', 'height', 'VertSpStdDev']\n",
"10\n",
"getParameters is ['vComponent', 'uComponent', 'peakPower', 'levelMode', 'uvQualityCode', 'consensusNum', 'HorizSpStdDev', 'wComponent', 'height', 'VertSpStdDev']\n",
"10\n",
"getParameters is ['numProfLvls', 'elevation', 'windDirSfc', 'validTime', 'windSpeedSfc', 'pressure', 'submode', 'relHumidity', 'profilerId', 'rainRate', 'temperature']\n",
"11\n",
"getParameters is ['vComponent', 'uComponent', 'peakPower', 'levelMode', 'uvQualityCode', 'consensusNum', 'HorizSpStdDev', 'wComponent', 'height', 'VertSpStdDev']\n",
"10\n",
"getParameters is ['vComponent', 'uComponent', 'peakPower', 'levelMode', 'uvQualityCode', 'consensusNum', 'HorizSpStdDev', 'wComponent', 'height', 'VertSpStdDev']\n",
"10\n",
"getParameters is ['vComponent', 'uComponent', 'peakPower', 'levelMode', 'uvQualityCode', 'consensusNum', 'HorizSpStdDev', 'wComponent', 'height', 'VertSpStdDev']\n",
"10\n",
"getParameters is ['vComponent', 'uComponent', 'peakPower', 'levelMode', 'uvQualityCode', 'consensusNum', 'HorizSpStdDev', 'wComponent', 'height', 'VertSpStdDev']\n",
"10\n",
"getParameters is ['vComponent', 'uComponent', 'peakPower', 'levelMode', 'uvQualityCode', 'consensusNum', 'HorizSpStdDev', 'wComponent', 'height', 'VertSpStdDev']\n",
"10\n",
"getParameters is ['vComponent', 'uComponent', 'peakPower', 'levelMode', 'uvQualityCode', 'consensusNum', 'HorizSpStdDev', 'wComponent', 'height', 'VertSpStdDev']\n",
"10\n",
"getParameters is ['vComponent', 'uComponent', 'peakPower', 'levelMode', 'uvQualityCode', 'consensusNum', 'HorizSpStdDev', 'wComponent', 'height', 'VertSpStdDev']\n",
"10\n",
"getParameters is ['vComponent', 'uComponent', 'peakPower', 'levelMode', 'uvQualityCode', 'consensusNum', 'HorizSpStdDev', 'wComponent', 'height', 'VertSpStdDev']\n",
"10\n",
"getParameters is ['vComponent', 'uComponent', 'peakPower', 'levelMode', 'uvQualityCode', 'consensusNum', 'HorizSpStdDev', 'wComponent', 'height', 'VertSpStdDev']\n",
"10\n",
"getParameters is ['vComponent', 'uComponent', 'peakPower', 'levelMode', 'uvQualityCode', 'consensusNum', 'HorizSpStdDev', 'wComponent', 'height', 'VertSpStdDev']\n",
"10\n",
"getParameters is ['vComponent', 'uComponent', 'peakPower', 'levelMode', 'uvQualityCode', 'consensusNum', 'HorizSpStdDev', 'wComponent', 'height', 'VertSpStdDev']\n",
"10\n",
"getParameters is ['vComponent', 'uComponent', 'peakPower', 'levelMode', 'uvQualityCode', 'consensusNum', 'HorizSpStdDev', 'wComponent', 'height', 'VertSpStdDev']\n",
"10\n",
"getParameters is ['vComponent', 'uComponent', 'peakPower', 'levelMode', 'uvQualityCode', 'consensusNum', 'HorizSpStdDev', 'wComponent', 'height', 'VertSpStdDev']\n",
"10\n",
"getParameters is ['vComponent', 'uComponent', 'peakPower', 'levelMode', 'uvQualityCode', 'consensusNum', 'HorizSpStdDev', 'wComponent', 'height', 'VertSpStdDev']\n",
"10\n",
"getParameters is ['vComponent', 'uComponent', 'peakPower', 'levelMode', 'uvQualityCode', 'consensusNum', 'HorizSpStdDev', 'wComponent', 'height', 'VertSpStdDev']\n",
"10\n",
"getParameters is ['vComponent', 'uComponent', 'peakPower', 'levelMode', 'uvQualityCode', 'consensusNum', 'HorizSpStdDev', 'wComponent', 'height', 'VertSpStdDev']\n",
"10\n",
"getParameters is ['vComponent', 'uComponent', 'peakPower', 'levelMode', 'uvQualityCode', 'consensusNum', 'HorizSpStdDev', 'wComponent', 'height', 'VertSpStdDev']\n",
"10\n",
"getParameters is ['vComponent', 'uComponent', 'peakPower', 'levelMode', 'uvQualityCode', 'consensusNum', 'HorizSpStdDev', 'wComponent', 'height', 'VertSpStdDev']\n",
"10\n",
"getParameters is ['vComponent', 'uComponent', 'peakPower', 'levelMode', 'uvQualityCode', 'consensusNum', 'HorizSpStdDev', 'wComponent', 'height', 'VertSpStdDev']\n",
"10\n",
"getParameters is ['numProfLvls', 'elevation', 'windDirSfc', 'validTime', 'windSpeedSfc', 'pressure', 'submode', 'relHumidity', 'profilerId', 'rainRate', 'temperature']\n",
"11\n"
]
}
],
"source": [
"MULTI_DIM_PARAMS = set(['vComponent', 'uComponent', 'peakPower', \n",
" 'levelMode', 'uvQualityCode', 'consensusNum', \n",
" 'HorizSpStdDev', 'wComponent', 'height', \n",
" 'VertSpStdDev'])\n",
"\n",
"request = DataAccessLayer.newDataRequest(\"profiler\")\n",
"request.setParameters('numProfLvls', 'elevation', 'windDirSfc', 'validTime', \n",
" 'windSpeedSfc', 'pressure', 'submode', 'relHumidity', \n",
" 'profilerId', 'rainRate', 'temperature')\n",
"request.getParameters().extend(MULTI_DIM_PARAMS)\n",
"\n",
"datatimes = DataAccessLayer.getAvailableTimes(request)\n",
"time = datatimes[-1].validPeriod\n",
"\n",
"response = DataAccessLayer.getGeometryData(request,times=time)\n",
"print response\n",
"for ob in response:\n",
" print \"getParameters is\",ob.getParameters()\n",
" print len(ob.getParameters())"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## ACARS"
]
},
{
"cell_type": "code",
"execution_count": 32,
"metadata": {
"collapsed": false
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"[<awips.dataaccess.PyGeometryData.PyGeometryData object at 0x7f1b30196dd0>]\n",
"getParameters is ['flightPhase', 'icing', 'temp', 'humidity', 'pressure', 'windSpeed', 'receiver', 'mixingRatio', 'windDirection', 'rollAngleQuality', 'tailNumber']\n",
"11\n"
]
}
],
"source": [
"request = DataAccessLayer.newDataRequest(\"acars\")\n",
"request.setParameters(\"tailNumber\", \"receiver\", \"pressure\", \"flightPhase\", \n",
" \"rollAngleQuality\", \"temp\", \"windDirection\", \"windSpeed\",\n",
" \"humidity\", \"mixingRatio\", \"icing\")\n",
"datatimes = DataAccessLayer.getAvailableTimes(request)\n",
"time = datatimes[-1].validPeriod\n",
"\n",
"response = DataAccessLayer.getGeometryData(request,times=time)\n",
"print response\n",
"for ob in response:\n",
" print \"getParameters is\",ob.getParameters()\n",
" print len(ob.getParameters())"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## AIREP"
]
},
{
"cell_type": "code",
"execution_count": 33,
"metadata": {
"collapsed": false
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"[<awips.dataaccess.PyGeometryData.PyGeometryData object at 0x7f1b3044f6d0>]\n",
"getParameters is ['flightWeather', 'flightHazard', 'flightConditions', 'windSpeed', 'flightLevel', 'id']\n",
"6\n"
]
}
],
"source": [
"request = DataAccessLayer.newDataRequest(\"airep\")\n",
"request.setParameters(\"id\", \"flightLevel\", \"temp\", \"windDirection\", \"windSpeed\",\n",
" \"flightWeather\", \"flightHazard\", \"flightConditions\")\n",
"\n",
"datatimes = DataAccessLayer.getAvailableTimes(request)\n",
"time = datatimes[-1].validPeriod\n",
"\n",
"response = DataAccessLayer.getGeometryData(request,times=time)\n",
"print response\n",
"for ob in response:\n",
" print \"getParameters is\",ob.getParameters()\n",
" print len(ob.getParameters())"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## PIREP"
]
},
{
"cell_type": "code",
"execution_count": 34,
"metadata": {
"collapsed": false
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"[<awips.dataaccess.PyGeometryData.PyGeometryData object at 0x7f1b301a2210>, <awips.dataaccess.PyGeometryData.PyGeometryData object at 0x7f1b301a2510>]\n",
"getParameters is ['skyTopHeight', 'skyBaseHeight', 'turbTopHeight', 'iceBaseHeight', 'skyCover1', 'turbBaseHeight', 'iceType', 'iceTopHeight', 'turbType', 'hazardType', 'skyCover2']\n",
"11\n",
"getParameters is ['horzVisibility', 'weatherGroup', 'windSpeed', 'aircraftType', 'flightLevel', 'id']\n",
"6\n"
]
}
],
"source": [
"MULTI_DIM_PARAMS = set([\"hazardType\", \n",
" \"turbType\", \"turbBaseHeight\", \"turbTopHeight\",\n",
" \"iceType\", \"iceBaseHeight\", \"iceTopHeight\",\n",
" \"skyCover1\", \"skyCover2\", \"skyBaseHeight\", \"skyTopHeight\"\n",
" ])\n",
" \n",
"request = DataAccessLayer.newDataRequest(\"pirep\")\n",
"request.setParameters('id', 'flightLevel', 'temp', 'windDirection', 'windSpeed',\n",
" 'horzVisibility', 'aircraftType', 'weatherGroup')\n",
"request.getParameters().extend(MULTI_DIM_PARAMS)\n",
"\n",
"datatimes = DataAccessLayer.getAvailableTimes(request)\n",
"time = datatimes[-1].validPeriod\n",
"\n",
"response = DataAccessLayer.getGeometryData(request,times=time)\n",
"print response\n",
"for ob in response:\n",
" print \"getParameters is\",ob.getParameters()\n",
" print len(ob.getParameters())"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"collapsed": true
},
"outputs": [],
"source": []
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 2",
"language": "python",
"name": "python2"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 2
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython2",
"version": "2.7.9"
}
},
"nbformat": 4,
"nbformat_minor": 0
}

File diff suppressed because one or more lines are too long