mirror of
https://github.com/Unidata/python-awips.git
synced 2025-02-23 14:57:56 -05:00
add in the /awips and /dynamicsearlize directories
- see if this satisfies the webpage building
This commit is contained in:
parent
6140271d45
commit
67c5c0b579
381 changed files with 19489 additions and 0 deletions
88
awips/DateTimeConverter.py
Normal file
88
awips/DateTimeConverter.py
Normal file
|
@ -0,0 +1,88 @@
|
|||
#
|
||||
# Functions for converting between the various "Java" dynamic serialize types
|
||||
# used by EDEX to the native python time datetime.
|
||||
#
|
||||
#
|
||||
# SOFTWARE HISTORY
|
||||
#
|
||||
# Date Ticket# Engineer Description
|
||||
# ------------ ---------- ----------- --------------------------
|
||||
# 06/24/15 #4480 dgilling Initial Creation.
|
||||
#
|
||||
|
||||
import datetime
|
||||
import time
|
||||
|
||||
from dynamicserialize.dstypes.java.util import Date
|
||||
from dynamicserialize.dstypes.java.sql import Timestamp
|
||||
from dynamicserialize.dstypes.com.raytheon.uf.common.time import TimeRange
|
||||
|
||||
MAX_TIME = pow(2, 31) - 1
|
||||
MICROS_IN_SECOND = 1000000
|
||||
|
||||
|
||||
def convertToDateTime(timeArg):
|
||||
"""
|
||||
Converts the given object to a python datetime object. Supports native
|
||||
python representations like datetime and struct_time, but also
|
||||
the dynamicserialize types like Date and Timestamp. Raises TypeError
|
||||
if no conversion can be performed.
|
||||
|
||||
Args:
|
||||
timeArg: a python object representing a date and time. Supported
|
||||
types include datetime, struct_time, float, int, long and the
|
||||
dynamicserialize types Date and Timestamp.
|
||||
|
||||
Returns:
|
||||
A datetime that represents the same date/time as the passed in object.
|
||||
"""
|
||||
if isinstance(timeArg, datetime.datetime):
|
||||
return timeArg
|
||||
elif isinstance(timeArg, time.struct_time):
|
||||
return datetime.datetime(*timeArg[:6])
|
||||
elif isinstance(timeArg, float):
|
||||
# seconds as float, should be avoided due to floating point errors
|
||||
totalSecs = int(timeArg)
|
||||
micros = int((timeArg - totalSecs) * MICROS_IN_SECOND)
|
||||
return _convertSecsAndMicros(totalSecs, micros)
|
||||
elif isinstance(timeArg, int):
|
||||
# seconds as integer
|
||||
totalSecs = timeArg
|
||||
return _convertSecsAndMicros(totalSecs, 0)
|
||||
elif isinstance(timeArg, (Date, Timestamp)):
|
||||
totalSecs = timeArg.getTime()
|
||||
return _convertSecsAndMicros(totalSecs, 0)
|
||||
else:
|
||||
objType = str(type(timeArg))
|
||||
raise TypeError("Cannot convert object of type " + objType + " to datetime.")
|
||||
|
||||
|
||||
def _convertSecsAndMicros(seconds, micros):
|
||||
if seconds < MAX_TIME:
|
||||
rval = datetime.datetime.utcfromtimestamp(seconds)
|
||||
else:
|
||||
extraTime = datetime.timedelta(seconds=(seconds - MAX_TIME))
|
||||
rval = datetime.datetime.utcfromtimestamp(MAX_TIME) + extraTime
|
||||
return rval.replace(microsecond=micros)
|
||||
|
||||
|
||||
def constructTimeRange(*args):
|
||||
"""
|
||||
Builds a python dynamicserialize TimeRange object from the given
|
||||
arguments.
|
||||
|
||||
Args:
|
||||
args*: must be a TimeRange or a pair of objects that can be
|
||||
converted to a datetime via convertToDateTime().
|
||||
|
||||
Returns:
|
||||
A TimeRange.
|
||||
"""
|
||||
|
||||
if len(args) == 1 and isinstance(args[0], TimeRange):
|
||||
return args[0]
|
||||
if len(args) != 2:
|
||||
raise TypeError("constructTimeRange takes exactly 2 arguments, " + str(len(args)) + " provided.")
|
||||
startTime = convertToDateTime(args[0])
|
||||
endTime = convertToDateTime(args[1])
|
||||
return TimeRange(startTime, endTime)
|
103
awips/QpidSubscriber.py
Normal file
103
awips/QpidSubscriber.py
Normal file
|
@ -0,0 +1,103 @@
|
|||
#
|
||||
# Provides a Python-based interface for subscribing to qpid queues and topics.
|
||||
#
|
||||
#
|
||||
#
|
||||
# SOFTWARE HISTORY
|
||||
#
|
||||
# Date Ticket# Engineer Description
|
||||
# ------------ ---------- ----------- --------------------------
|
||||
# 11/17/10 njensen Initial Creation.
|
||||
# 08/15/13 2169 bkowal Optionally gzip decompress any data that is read.
|
||||
# 08/04/16 2416 tgurney Add queueStarted property
|
||||
# 02/16/17 6084 bsteffen Support ssl connections
|
||||
# 09/07/17 6175 tgurney Remove "decompressing" log message
|
||||
#
|
||||
#
|
||||
|
||||
import os
|
||||
import os.path
|
||||
import qpid
|
||||
import zlib
|
||||
|
||||
from Queue import Empty
|
||||
from qpid.exceptions import Closed
|
||||
|
||||
|
||||
class QpidSubscriber:
|
||||
|
||||
def __init__(self, host='127.0.0.1', port=5672, decompress=False, ssl=None):
|
||||
self.host = host
|
||||
self.port = port
|
||||
self.decompress = decompress
|
||||
socket = qpid.util.connect(host, port)
|
||||
if "QPID_SSL_CERT_DB" in os.environ:
|
||||
certdb = os.environ["QPID_SSL_CERT_DB"]
|
||||
else:
|
||||
certdb = os.path.expanduser("~/.qpid/")
|
||||
if "QPID_SSL_CERT_NAME" in os.environ:
|
||||
certname = os.environ["QPID_SSL_CERT_NAME"]
|
||||
else:
|
||||
certname = "guest"
|
||||
certfile = os.path.join(certdb, certname + ".crt")
|
||||
if ssl or (ssl is None and os.path.exists(certfile)):
|
||||
keyfile = os.path.join(certdb, certname + ".key")
|
||||
trustfile = os.path.join(certdb, "root.crt")
|
||||
socket = qpid.util.ssl(socket, keyfile=keyfile, certfile=certfile, ca_certs=trustfile)
|
||||
self.__connection = qpid.connection.Connection(sock=socket, username='guest', password='guest')
|
||||
self.__connection.start()
|
||||
self.__session = self.__connection.session(str(qpid.datatypes.uuid4()))
|
||||
self.subscribed = True
|
||||
self.__queueStarted = False
|
||||
|
||||
def topicSubscribe(self, topicName, callback):
|
||||
# if the queue is edex.alerts, set decompress to true always for now to
|
||||
# maintain compatibility with existing python scripts.
|
||||
if topicName == 'edex.alerts':
|
||||
self.decompress = True
|
||||
|
||||
print("Establishing connection to broker on", self.host)
|
||||
queueName = topicName + self.__session.name
|
||||
self.__session.queue_declare(queue=queueName, exclusive=True, auto_delete=True,
|
||||
arguments={'qpid.max_count': 100, 'qpid.policy_type': 'ring'})
|
||||
self.__session.exchange_bind(exchange='amq.topic', queue=queueName, binding_key=topicName)
|
||||
self.__innerSubscribe(queueName, callback)
|
||||
|
||||
def __innerSubscribe(self, serverQueueName, callback):
|
||||
local_queue_name = 'local_queue_' + serverQueueName
|
||||
queue = self.__session.incoming(local_queue_name)
|
||||
self.__session.message_subscribe(serverQueueName, destination=local_queue_name)
|
||||
queue.start()
|
||||
print("Connection complete to broker on", self.host)
|
||||
self.__queueStarted = True
|
||||
|
||||
while self.subscribed:
|
||||
try:
|
||||
message = queue.get(timeout=10)
|
||||
content = message.body
|
||||
self.__session.message_accept(qpid.datatypes.RangedSet(message.id))
|
||||
if self.decompress:
|
||||
try:
|
||||
# http://stackoverflow.com/questions/2423866/python-decompressing-gzip-chunk-by-chunk
|
||||
d = zlib.decompressobj(16+zlib.MAX_WBITS)
|
||||
content = d.decompress(content)
|
||||
except ValueError:
|
||||
# decompression failed, return the original content
|
||||
pass
|
||||
callback(content)
|
||||
except Empty:
|
||||
pass
|
||||
except Closed:
|
||||
self.close()
|
||||
|
||||
def close(self):
|
||||
self.__queueStarted = False
|
||||
self.subscribed = False
|
||||
try:
|
||||
self.__session.close(timeout=10)
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
@property
|
||||
def queueStarted(self):
|
||||
return self.__queueStarted
|
141
awips/RadarCommon.py
Normal file
141
awips/RadarCommon.py
Normal file
|
@ -0,0 +1,141 @@
|
|||
#
|
||||
# Common methods for the a2gtrad and a2advrad scripts.
|
||||
#
|
||||
#
|
||||
#
|
||||
# SOFTWARE HISTORY
|
||||
#
|
||||
# Date Ticket# Engineer Description
|
||||
# ------------ ---------- ----------- --------------------------
|
||||
# 08/13/2014 3393 nabowle Initial creation to contain common
|
||||
# code for a2*radStub scripts.
|
||||
# 03/15/2015 mjames@ucar Edited/added to awips package as RadarCommon
|
||||
#
|
||||
#
|
||||
|
||||
|
||||
def get_datetime_str(record):
|
||||
"""
|
||||
Get the datetime string for a record.
|
||||
|
||||
Args:
|
||||
record: the record to get data for.
|
||||
|
||||
Returns:
|
||||
datetime string.
|
||||
"""
|
||||
return str(record.getDataTime())[0:19].replace(" ", "_") + ".0"
|
||||
|
||||
|
||||
def get_data_type(azdat):
|
||||
"""
|
||||
Get the radar file type (radial or raster).
|
||||
|
||||
Args:
|
||||
azdat: Boolean.
|
||||
|
||||
Returns:
|
||||
Radial or raster.
|
||||
"""
|
||||
if azdat:
|
||||
return "radial"
|
||||
return "raster"
|
||||
|
||||
|
||||
def get_hdf5_data(idra):
|
||||
rdat = []
|
||||
azdat = []
|
||||
depVals = []
|
||||
threshVals = []
|
||||
if idra:
|
||||
for item in idra:
|
||||
if item.getName() == "Data":
|
||||
rdat = item
|
||||
elif item.getName() == "Angles":
|
||||
azdat = item
|
||||
# dattyp = "radial"
|
||||
elif item.getName() == "DependentValues":
|
||||
depVals = item.getShortData()
|
||||
elif item.getName() == "Thresholds":
|
||||
threshVals = item.getShortData()
|
||||
|
||||
return rdat, azdat, depVals, threshVals
|
||||
|
||||
|
||||
def get_header(record, headerFormat, xLen, yLen, azdat, description):
|
||||
# Encode dimensions, time, mapping, description, tilt, and VCP
|
||||
mytime = get_datetime_str(record)
|
||||
dattyp = get_data_type(azdat)
|
||||
|
||||
if headerFormat:
|
||||
msg = str(xLen) + " " + str(yLen) + " " + mytime + " " + \
|
||||
dattyp + " " + str(record.getLatitude()) + " " + \
|
||||
str(record.getLongitude()) + " " + \
|
||||
str(record.getElevation()) + " " + \
|
||||
str(record.getElevationNumber()) + " " + \
|
||||
description + " " + str(record.getTrueElevationAngle()) + " " + \
|
||||
str(record.getVolumeCoveragePattern()) + "\n"
|
||||
else:
|
||||
msg = str(xLen) + " " + str(yLen) + " " + mytime + " " + \
|
||||
dattyp + " " + description + " " + \
|
||||
str(record.getTrueElevationAngle()) + " " + \
|
||||
str(record.getVolumeCoveragePattern()) + "\n"
|
||||
|
||||
return msg
|
||||
|
||||
|
||||
def encode_thresh_vals(threshVals):
|
||||
spec = [".", "TH", "ND", "RF", "BI", "GC", "IC", "GR", "WS", "DS",
|
||||
"RA", "HR", "BD", "HA", "UK"]
|
||||
nnn = len(threshVals)
|
||||
j = 0
|
||||
msg = ""
|
||||
while j < nnn:
|
||||
lo = threshVals[j] % 256
|
||||
hi = threshVals[j] / 256
|
||||
msg += " "
|
||||
j += 1
|
||||
if hi < 0:
|
||||
if lo > 14:
|
||||
msg += "."
|
||||
else:
|
||||
msg += spec[lo]
|
||||
continue
|
||||
if hi % 16 >= 8:
|
||||
msg += ">"
|
||||
elif hi % 8 >= 4:
|
||||
msg += "<"
|
||||
if hi % 4 >= 2:
|
||||
msg += "+"
|
||||
elif hi % 2 >= 1:
|
||||
msg += "-"
|
||||
if hi >= 64:
|
||||
msg += "%.2f" % (lo*0.01)
|
||||
elif hi % 64 >= 32:
|
||||
msg += "%.2f" % (lo*0.05)
|
||||
elif hi % 32 >= 16:
|
||||
msg += "%.1f" % (lo*0.1)
|
||||
else:
|
||||
msg += str(lo)
|
||||
msg += "\n"
|
||||
return msg
|
||||
|
||||
|
||||
def encode_dep_vals(depVals):
|
||||
nnn = len(depVals)
|
||||
j = 0
|
||||
msg = []
|
||||
while j < nnn:
|
||||
msg.append(str(depVals[j]))
|
||||
j += 1
|
||||
return msg
|
||||
|
||||
|
||||
def encode_radial(azVals):
|
||||
azValsLen = len(azVals)
|
||||
j = 0
|
||||
msg = []
|
||||
while j < azValsLen:
|
||||
msg.append(azVals[j])
|
||||
j += 1
|
||||
return msg
|
80
awips/ThriftClient.py
Normal file
80
awips/ThriftClient.py
Normal file
|
@ -0,0 +1,80 @@
|
|||
#
|
||||
# Provides a Python-based interface for executing Thrift requests.
|
||||
#
|
||||
#
|
||||
#
|
||||
# SOFTWARE HISTORY
|
||||
#
|
||||
# Date Ticket# Engineer Description
|
||||
# ------------ ---------- ----------- --------------------------
|
||||
# 09/20/10 dgilling Initial Creation.
|
||||
#
|
||||
#
|
||||
|
||||
try:
|
||||
import http.client as httpcl
|
||||
except ImportError:
|
||||
import httplib as httpcl
|
||||
from dynamicserialize import DynamicSerializationManager
|
||||
|
||||
|
||||
class ThriftClient:
|
||||
|
||||
# How to call this constructor:
|
||||
# 1. Pass in all arguments separately (e.g.,
|
||||
# ThriftClient.ThriftClient("localhost", 9581, "/services"))
|
||||
# will return a Thrift client pointed at http://localhost:9581/services.
|
||||
# 2. Pass in all arguments through the host string (e.g.,
|
||||
# ThriftClient.ThriftClient("localhost:9581/services"))
|
||||
# will return a Thrift client pointed at http://localhost:9581/services.
|
||||
# 3. Pass in host/port arguments through the host string (e.g.,
|
||||
# ThriftClient.ThriftClient("localhost:9581", "/services"))
|
||||
# will return a Thrift client pointed at http://localhost:9581/services.
|
||||
def __init__(self, host, port=9581, uri="/services"):
|
||||
hostParts = host.split("/", 1)
|
||||
if len(hostParts) > 1:
|
||||
hostString = hostParts[0]
|
||||
self.__uri = "/" + hostParts[1]
|
||||
self.__httpConn = httpcl.HTTPConnection(hostString)
|
||||
else:
|
||||
if port is None:
|
||||
self.__httpConn = httpcl.HTTPConnection(host)
|
||||
else:
|
||||
self.__httpConn = httpcl.HTTPConnection(host, port)
|
||||
|
||||
self.__uri = uri
|
||||
|
||||
self.__dsm = DynamicSerializationManager.DynamicSerializationManager()
|
||||
|
||||
def sendRequest(self, request, uri="/thrift"):
|
||||
message = self.__dsm.serializeObject(request)
|
||||
|
||||
self.__httpConn.connect()
|
||||
self.__httpConn.request("POST", self.__uri + uri, message)
|
||||
|
||||
response = self.__httpConn.getresponse()
|
||||
if response.status != 200:
|
||||
raise ThriftRequestException("Unable to post request to server")
|
||||
|
||||
rval = self.__dsm.deserializeBytes(response.read())
|
||||
self.__httpConn.close()
|
||||
|
||||
# let's verify we have an instance of ServerErrorResponse
|
||||
# IF we do, through an exception up to the caller along
|
||||
# with the original Java stack trace
|
||||
# ELSE: we have a valid response and pass it back
|
||||
try:
|
||||
forceError = rval.getException()
|
||||
raise ThriftRequestException(forceError)
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
return rval
|
||||
|
||||
|
||||
class ThriftRequestException(Exception):
|
||||
def __init__(self, value):
|
||||
self.parameter = value
|
||||
|
||||
def __str__(self):
|
||||
return repr(self.parameter)
|
87
awips/TimeUtil.py
Normal file
87
awips/TimeUtil.py
Normal file
|
@ -0,0 +1,87 @@
|
|||
# ----------------------------------------------------------------------------
|
||||
# This software is in the public domain, furnished "as is", without technical
|
||||
# support, and with no warranty, express or implied, as to its usefulness for
|
||||
# any purpose.
|
||||
#
|
||||
# offsetTime.py
|
||||
# Handles Displaced Real Time for various applications
|
||||
#
|
||||
# Author: hansen/romberg
|
||||
# ----------------------------------------------------------------------------
|
||||
|
||||
import string
|
||||
import time
|
||||
|
||||
# Given the timeStr, return the offset (in seconds)
|
||||
# from the current time.
|
||||
# Also return the launchStr i.e. Programs launched from this
|
||||
# offset application will use the launchStr as the -z argument.
|
||||
# The offset will be positive for time in the future,
|
||||
# negative for time in the past.
|
||||
#
|
||||
# May still want it to be normalized to the most recent midnight.
|
||||
#
|
||||
# NOTES about synchronizing:
|
||||
# --With synchronizing on, the "current time" for all processes started
|
||||
# within a given hour will be the same.
|
||||
# This guarantees that GFE's have the same current time and ISC grid
|
||||
# time stamps are syncrhonized and can be exchanged.
|
||||
# Formatters launched from the GFE in this mode will be synchronized as
|
||||
# well by setting the launchStr to use the time difference format
|
||||
# (YYYYMMDD_HHMM,YYYYMMDD_HHMM).
|
||||
# --This does not solve the problem in the general case.
|
||||
# For example, if someone starts the GFE at 12:59 and someone
|
||||
# else starts it at 1:01, they will have different offsets and
|
||||
# current times.
|
||||
# --With synchronizing off, when the process starts, the current time
|
||||
# matches the drtTime in the command line. However, with synchronizing
|
||||
# on, the current time will be offset by the fraction of the hour at
|
||||
# which the process was started. Examples:
|
||||
# Actual Starting time: 20040617_1230
|
||||
# drtTime 20040616_0000
|
||||
# Synchronizing off:
|
||||
# GFE Spatial Editor at StartUp: 20040616_0000
|
||||
# Synchronizing on:
|
||||
# GFE Spatial Editor at StartUp: 20040616_0030
|
||||
#
|
||||
|
||||
|
||||
def determineDrtOffset(timeStr):
|
||||
launchStr = timeStr
|
||||
# Check for time difference
|
||||
if timeStr.find(",") >= 0:
|
||||
times = timeStr.split(",")
|
||||
t1 = makeTime(times[0])
|
||||
t2 = makeTime(times[1])
|
||||
return t1-t2, launchStr
|
||||
# Check for synchronized mode
|
||||
synch = 0
|
||||
if timeStr[0] == "S":
|
||||
timeStr = timeStr[1:]
|
||||
synch = 1
|
||||
drt_t = makeTime(timeStr)
|
||||
gm = time.gmtime()
|
||||
cur_t = time.mktime(gm)
|
||||
|
||||
# Synchronize to most recent hour
|
||||
# i.e. "truncate" cur_t to most recent hour.
|
||||
if synch:
|
||||
cur_t = time.mktime((gm[0], gm[1], gm[2], gm[3], 0, 0, 0, 0, 0))
|
||||
curStr = '%4s%2s%2s_%2s00\n' % (repr(gm[0]), repr(gm[1]),
|
||||
repr(gm[2]), repr(gm[3]))
|
||||
curStr = curStr.replace(' ', '0')
|
||||
launchStr = timeStr + "," + curStr
|
||||
|
||||
offset = drt_t - cur_t
|
||||
return int(offset), launchStr
|
||||
|
||||
|
||||
def makeTime(timeStr):
|
||||
year = string.atoi(timeStr[0:4])
|
||||
month = string.atoi(timeStr[4:6])
|
||||
day = string.atoi(timeStr[6:8])
|
||||
hour = string.atoi(timeStr[9:11])
|
||||
minute = string.atoi(timeStr[11:13])
|
||||
# Do not use daylight savings because gmtime is not in daylight
|
||||
# savings time.
|
||||
return time.mktime((year, month, day, hour, minute, 0, 0, 0, 0))
|
62
awips/UsageArgumentParser.py
Normal file
62
awips/UsageArgumentParser.py
Normal file
|
@ -0,0 +1,62 @@
|
|||
#
|
||||
# SOFTWARE HISTORY
|
||||
#
|
||||
# Date Ticket# Engineer Description
|
||||
# ------------- -------- --------- ---------------------------------------------
|
||||
# Feb 13, 2017 6092 randerso Added StoreTimeAction
|
||||
#
|
||||
|
||||
import argparse
|
||||
import sys
|
||||
import time
|
||||
|
||||
from dynamicserialize.dstypes.com.raytheon.uf.common.dataplugin.gfe.db.objects import DatabaseID
|
||||
from dynamicserialize.dstypes.com.raytheon.uf.common.dataplugin.gfe.db.objects import ParmID
|
||||
|
||||
TIME_FORMAT = "%Y%m%d_%H%M"
|
||||
|
||||
|
||||
class UsageArgumentParser(argparse.ArgumentParser):
|
||||
"""
|
||||
A subclass of ArgumentParser that overrides error() to print the
|
||||
whole help text, rather than just the usage string.
|
||||
"""
|
||||
def error(self, message):
|
||||
sys.stderr.write('%s: error: %s\n' % (self.prog, message))
|
||||
self.print_help()
|
||||
sys.exit(2)
|
||||
|
||||
|
||||
# Custom actions for ArgumentParser objects
|
||||
class StoreDatabaseIDAction(argparse.Action):
|
||||
def __call__(self, parser, namespace, values, option_string=None):
|
||||
did = DatabaseID(values)
|
||||
if did.isValid():
|
||||
setattr(namespace, self.dest, did)
|
||||
else:
|
||||
parser.error("DatabaseID [" + values + "] not a valid identifier")
|
||||
|
||||
|
||||
class AppendParmNameAndLevelAction(argparse.Action):
|
||||
def __call__(self, parser, namespace, values, option_string=None):
|
||||
tx = ParmID.parmNameAndLevel(values)
|
||||
comp = tx[0] + '_' + tx[1]
|
||||
if (hasattr(namespace, self.dest)) and (getattr(namespace, self.dest) is not None):
|
||||
currentValues = getattr(namespace, self.dest)
|
||||
currentValues.append(comp)
|
||||
setattr(namespace, self.dest, currentValues)
|
||||
else:
|
||||
setattr(namespace, self.dest, [comp])
|
||||
|
||||
|
||||
class StoreTimeAction(argparse.Action):
|
||||
"""
|
||||
argparse.Action subclass to validate GFE formatted time strings
|
||||
and parse them to time.struct_time
|
||||
"""
|
||||
def __call__(self, parser, namespace, values, option_string=None):
|
||||
try:
|
||||
timeStruct = time.strptime(values, TIME_FORMAT)
|
||||
setattr(namespace, self.dest, timeStruct)
|
||||
except ValueError:
|
||||
parser.error(str(values) + " is not a valid time string of the format YYYYMMDD_hhmm")
|
18
awips/UsageOptionParser.py
Normal file
18
awips/UsageOptionParser.py
Normal file
|
@ -0,0 +1,18 @@
|
|||
import sys
|
||||
from optparse import OptionParser
|
||||
|
||||
|
||||
class UsageOptionParser(OptionParser):
|
||||
"""
|
||||
A subclass of OptionParser that prints that overrides error() to print the
|
||||
whole help text, rather than just the usage string.
|
||||
"""
|
||||
def error(self, msg):
|
||||
"""
|
||||
Print the help text and exit.
|
||||
"""
|
||||
self.print_help(sys.stderr)
|
||||
sys.stderr.write("\n")
|
||||
sys.stderr.write(msg)
|
||||
sys.stderr.write("\n")
|
||||
sys.exit(2)
|
3
awips/__init__.py
Normal file
3
awips/__init__.py
Normal file
|
@ -0,0 +1,3 @@
|
|||
|
||||
__all__ = [
|
||||
]
|
84
awips/dataaccess/CombinedTimeQuery.py
Normal file
84
awips/dataaccess/CombinedTimeQuery.py
Normal file
|
@ -0,0 +1,84 @@
|
|||
#
|
||||
# Method for performing a DAF time query where all parameter/level/location
|
||||
# combinations must be available at the same time.
|
||||
#
|
||||
#
|
||||
#
|
||||
# SOFTWARE HISTORY
|
||||
#
|
||||
# Date Ticket# Engineer Description
|
||||
# ------------ ---------- ----------- --------------------------
|
||||
# 06/22/16 #5591 bsteffen Initial Creation.
|
||||
#
|
||||
|
||||
from awips.dataaccess import DataAccessLayer
|
||||
|
||||
|
||||
def getAvailableTimes(request, refTimeOnly=False):
|
||||
return __getAvailableTimesForEachParameter(request, refTimeOnly)
|
||||
|
||||
|
||||
def __getAvailableTimesForEachParameter(request, refTimeOnly=False):
|
||||
parameters = request.getParameters()
|
||||
if parameters:
|
||||
times = None
|
||||
for parameter in parameters:
|
||||
specificRequest = __cloneRequest(request)
|
||||
specificRequest.setParameters(parameter)
|
||||
specificTimes = __getAvailableTimesForEachLevel(specificRequest, refTimeOnly)
|
||||
if times is None:
|
||||
times = specificTimes
|
||||
else:
|
||||
times.intersection_update(specificTimes)
|
||||
if not times:
|
||||
break
|
||||
return times
|
||||
else:
|
||||
return __getAvailableTimesForEachLevel(request, refTimeOnly)
|
||||
|
||||
|
||||
def __getAvailableTimesForEachLevel(request, refTimeOnly=False):
|
||||
levels = request.getLevels()
|
||||
if levels:
|
||||
times = None
|
||||
for level in levels:
|
||||
specificRequest = __cloneRequest(request)
|
||||
specificRequest.setLevels(level)
|
||||
specificTimes = __getAvailableTimesForEachLocation(specificRequest, refTimeOnly)
|
||||
if times is None:
|
||||
times = specificTimes
|
||||
else:
|
||||
times.intersection_update(specificTimes)
|
||||
if not times:
|
||||
break
|
||||
return times
|
||||
else:
|
||||
return __getAvailableTimesForEachLocation(request, refTimeOnly)
|
||||
|
||||
|
||||
def __getAvailableTimesForEachLocation(request, refTimeOnly=False):
|
||||
locations = request.getLocationNames()
|
||||
if locations:
|
||||
times = None
|
||||
for location in locations:
|
||||
specificRequest = __cloneRequest(request)
|
||||
specificRequest.setLocationNames(location)
|
||||
specificTimes = DataAccessLayer.getAvailableTimes(specificRequest, refTimeOnly)
|
||||
if times is None:
|
||||
times = set(specificTimes)
|
||||
else:
|
||||
times.intersection_update(specificTimes)
|
||||
if not times:
|
||||
break
|
||||
return times
|
||||
else:
|
||||
return DataAccessLayer.getAvailableTimes(request, refTimeOnly)
|
||||
|
||||
|
||||
def __cloneRequest(request):
|
||||
return DataAccessLayer.newDataRequest(datatype=request.getDatatype(),
|
||||
parameters=request.getParameters(),
|
||||
levels=request.getLevels(),
|
||||
locationNames=request.getLocationNames(),
|
||||
envelope=request.getEnvelope(),
|
||||
**request.getIdentifiers())
|
391
awips/dataaccess/DataAccessLayer.py
Normal file
391
awips/dataaccess/DataAccessLayer.py
Normal file
|
@ -0,0 +1,391 @@
|
|||
#
|
||||
# Published interface for awips.dataaccess package
|
||||
#
|
||||
#
|
||||
# SOFTWARE HISTORY
|
||||
#
|
||||
# Date Ticket# Engineer Description
|
||||
# ------------ ------- ---------- -------------------------
|
||||
# 12/10/12 njensen Initial Creation.
|
||||
# Feb 14, 2013 1614 bsteffen refactor data access framework to use single request.
|
||||
# 04/10/13 1871 mnash move getLatLonCoords to JGridData and add default args
|
||||
# 05/29/13 2023 dgilling Hook up ThriftClientRouter.
|
||||
# 03/03/14 2673 bsteffen Add ability to query only ref times.
|
||||
# 07/22/14 3185 njensen Added optional/default args to newDataRequest
|
||||
# 07/30/14 3185 njensen Renamed valid identifiers to optional
|
||||
# Apr 26, 2015 4259 njensen Updated for new JEP API
|
||||
# Apr 13, 2016 5379 tgurney Add getIdentifierValues(), getRequiredIdentifiers(),
|
||||
# and getOptionalIdentifiers()
|
||||
# Oct 07, 2016 ---- mjames@ucar Added getForecastRun
|
||||
# Oct 18, 2016 5916 bsteffen Add setLazyLoadGridLatLon
|
||||
# Oct 11, 2018 ---- mjames@ucar Added getMetarObs() getSynopticObs()
|
||||
#
|
||||
|
||||
import sys
|
||||
import warnings
|
||||
|
||||
THRIFT_HOST = "edex"
|
||||
|
||||
USING_NATIVE_THRIFT = False
|
||||
|
||||
if 'jep' in sys.modules:
|
||||
# intentionally do not catch if this fails to import, we want it to
|
||||
# be obvious that something is configured wrong when running from within
|
||||
# Java instead of allowing false confidence and fallback behavior
|
||||
import JepRouter
|
||||
router = JepRouter
|
||||
else:
|
||||
from awips.dataaccess import ThriftClientRouter
|
||||
router = ThriftClientRouter.ThriftClientRouter(THRIFT_HOST)
|
||||
USING_NATIVE_THRIFT = True
|
||||
|
||||
|
||||
def getRadarProductIDs(availableParms):
|
||||
"""
|
||||
Get only the numeric idetifiers for NEXRAD3 products.
|
||||
|
||||
Args:
|
||||
availableParms: Full list of radar parameters
|
||||
|
||||
Returns:
|
||||
List of filtered parameters
|
||||
"""
|
||||
productIDs = []
|
||||
for p in list(availableParms):
|
||||
try:
|
||||
if isinstance(int(p), int):
|
||||
productIDs.append(str(p))
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
return productIDs
|
||||
|
||||
|
||||
def getRadarProductNames(availableParms):
|
||||
"""
|
||||
Get only the named idetifiers for NEXRAD3 products.
|
||||
|
||||
Args:
|
||||
availableParms: Full list of radar parameters
|
||||
|
||||
Returns:
|
||||
List of filtered parameters
|
||||
"""
|
||||
productNames = []
|
||||
for p in list(availableParms):
|
||||
if len(p) > 3:
|
||||
productNames.append(p)
|
||||
|
||||
return productNames
|
||||
|
||||
|
||||
def getMetarObs(response):
|
||||
"""
|
||||
Processes a DataAccessLayer "obs" response into a dictionary,
|
||||
with special consideration for multi-value parameters
|
||||
"presWeather", "skyCover", and "skyLayerBase".
|
||||
|
||||
Args:
|
||||
response: DAL getGeometry() list
|
||||
|
||||
Returns:
|
||||
A dictionary of METAR obs
|
||||
"""
|
||||
from datetime import datetime
|
||||
single_val_params = ["timeObs", "stationName", "longitude", "latitude",
|
||||
"temperature", "dewpoint", "windDir",
|
||||
"windSpeed", "seaLevelPress"]
|
||||
multi_val_params = ["presWeather", "skyCover", "skyLayerBase"]
|
||||
params = single_val_params + multi_val_params
|
||||
station_names, pres_weather, sky_cov, sky_layer_base = [], [], [], []
|
||||
obs = dict({params: [] for params in params})
|
||||
for ob in response:
|
||||
avail_params = ob.getParameters()
|
||||
if "presWeather" in avail_params:
|
||||
pres_weather.append(ob.getString("presWeather"))
|
||||
elif "skyCover" in avail_params and "skyLayerBase" in avail_params:
|
||||
sky_cov.append(ob.getString("skyCover"))
|
||||
sky_layer_base.append(ob.getNumber("skyLayerBase"))
|
||||
else:
|
||||
# If we already have a record for this stationName, skip
|
||||
if ob.getString('stationName') not in station_names:
|
||||
station_names.append(ob.getString('stationName'))
|
||||
for param in single_val_params:
|
||||
if param in avail_params:
|
||||
if param == 'timeObs':
|
||||
obs[param].append(datetime.fromtimestamp(ob.getNumber(param) / 1000.0))
|
||||
else:
|
||||
try:
|
||||
obs[param].append(ob.getNumber(param))
|
||||
except TypeError:
|
||||
obs[param].append(ob.getString(param))
|
||||
else:
|
||||
obs[param].append(None)
|
||||
|
||||
obs['presWeather'].append(pres_weather)
|
||||
obs['skyCover'].append(sky_cov)
|
||||
obs['skyLayerBase'].append(sky_layer_base)
|
||||
pres_weather = []
|
||||
sky_cov = []
|
||||
sky_layer_base = []
|
||||
return obs
|
||||
|
||||
|
||||
def getSynopticObs(response):
|
||||
"""
|
||||
Processes a DataAccessLayer "sfcobs" response into a dictionary
|
||||
of available parameters.
|
||||
|
||||
Args:
|
||||
response: DAL getGeometry() list
|
||||
|
||||
Returns:
|
||||
A dictionary of synop obs
|
||||
"""
|
||||
from datetime import datetime
|
||||
station_names = []
|
||||
params = response[0].getParameters()
|
||||
sfcobs = dict({params: [] for params in params})
|
||||
for sfcob in response:
|
||||
# If we already have a record for this stationId, skip
|
||||
if sfcob.getString('stationId') not in station_names:
|
||||
station_names.append(sfcob.getString('stationId'))
|
||||
for param in params:
|
||||
if param == 'timeObs':
|
||||
sfcobs[param].append(datetime.fromtimestamp(sfcob.getNumber(param) / 1000.0))
|
||||
else:
|
||||
try:
|
||||
sfcobs[param].append(sfcob.getNumber(param))
|
||||
except TypeError:
|
||||
sfcobs[param].append(sfcob.getString(param))
|
||||
|
||||
return sfcobs
|
||||
|
||||
|
||||
def getForecastRun(cycle, times):
|
||||
"""
|
||||
Get the latest forecast run (list of objects) from all
|
||||
all cycles and times returned from DataAccessLayer "grid"
|
||||
response.
|
||||
|
||||
Args:
|
||||
cycle: Forecast cycle reference time
|
||||
times: All available times/cycles
|
||||
|
||||
Returns:
|
||||
DataTime array for a single forecast run
|
||||
"""
|
||||
fcstRun = []
|
||||
for t in times:
|
||||
if str(t)[:19] == str(cycle):
|
||||
fcstRun.append(t)
|
||||
return fcstRun
|
||||
|
||||
|
||||
def getAvailableTimes(request, refTimeOnly=False):
|
||||
"""
|
||||
Get the times of available data to the request.
|
||||
|
||||
Args:
|
||||
request: the IDataRequest to get data for
|
||||
refTimeOnly: optional, use True if only unique refTimes should be
|
||||
returned (without a forecastHr)
|
||||
|
||||
Returns:
|
||||
a list of DataTimes
|
||||
"""
|
||||
return router.getAvailableTimes(request, refTimeOnly)
|
||||
|
||||
|
||||
def getGridData(request, times=[]):
|
||||
"""
|
||||
Gets the grid data that matches the request at the specified times. Each
|
||||
combination of parameter, level, and dataTime will be returned as a
|
||||
separate IGridData.
|
||||
|
||||
Args:
|
||||
request: the IDataRequest to get data for
|
||||
times: a list of DataTimes, a TimeRange, or None if the data is time
|
||||
agnostic
|
||||
|
||||
Returns:
|
||||
a list of IGridData
|
||||
"""
|
||||
return router.getGridData(request, times)
|
||||
|
||||
|
||||
def getGeometryData(request, times=[]):
|
||||
"""
|
||||
Gets the geometry data that matches the request at the specified times.
|
||||
Each combination of geometry, level, and dataTime will be returned as a
|
||||
separate IGeometryData.
|
||||
|
||||
Args:
|
||||
request: the IDataRequest to get data for
|
||||
times: a list of DataTimes, a TimeRange, or None if the data is time
|
||||
agnostic
|
||||
|
||||
Returns:
|
||||
a list of IGeometryData
|
||||
"""
|
||||
return router.getGeometryData(request, times)
|
||||
|
||||
|
||||
def getAvailableLocationNames(request):
|
||||
"""
|
||||
Gets the available location names that match the request without actually
|
||||
requesting the data.
|
||||
|
||||
Args:
|
||||
request: the request to find matching location names for
|
||||
|
||||
Returns:
|
||||
a list of strings of available location names.
|
||||
"""
|
||||
return router.getAvailableLocationNames(request)
|
||||
|
||||
|
||||
def getAvailableParameters(request):
|
||||
"""
|
||||
Gets the available parameters names that match the request without actually
|
||||
requesting the data.
|
||||
|
||||
Args:
|
||||
request: the request to find matching parameter names for
|
||||
|
||||
Returns:
|
||||
a list of strings of available parameter names.
|
||||
"""
|
||||
return router.getAvailableParameters(request)
|
||||
|
||||
|
||||
def getAvailableLevels(request):
|
||||
"""
|
||||
Gets the available levels that match the request without actually
|
||||
requesting the data.
|
||||
|
||||
Args:
|
||||
request: the request to find matching levels for
|
||||
|
||||
Returns:
|
||||
a list of strings of available levels.
|
||||
"""
|
||||
return router.getAvailableLevels(request)
|
||||
|
||||
|
||||
def getRequiredIdentifiers(request):
|
||||
"""
|
||||
Gets the required identifiers for this request. These identifiers
|
||||
must be set on a request for the request of this datatype to succeed.
|
||||
|
||||
Args:
|
||||
request: the request to find required identifiers for
|
||||
|
||||
Returns:
|
||||
a list of strings of required identifiers
|
||||
"""
|
||||
if str(request) == request:
|
||||
warnings.warn("Use getRequiredIdentifiers(IDataRequest) instead",
|
||||
DeprecationWarning)
|
||||
return router.getRequiredIdentifiers(request)
|
||||
|
||||
|
||||
def getOptionalIdentifiers(request):
|
||||
"""
|
||||
Gets the optional identifiers for this request.
|
||||
|
||||
Args:
|
||||
request: the request to find optional identifiers for
|
||||
|
||||
Returns:
|
||||
a list of strings of optional identifiers
|
||||
"""
|
||||
if str(request) == request:
|
||||
warnings.warn("Use getOptionalIdentifiers(IDataRequest) instead",
|
||||
DeprecationWarning)
|
||||
return router.getOptionalIdentifiers(request)
|
||||
|
||||
|
||||
def getIdentifierValues(request, identifierKey):
|
||||
"""
|
||||
Gets the allowed values for a particular identifier on this datatype.
|
||||
|
||||
Args:
|
||||
request: the request to find identifier values for
|
||||
identifierKey: the identifier to find values for
|
||||
|
||||
Returns:
|
||||
a list of strings of allowed values for the specified identifier
|
||||
"""
|
||||
return router.getIdentifierValues(request, identifierKey)
|
||||
|
||||
|
||||
def newDataRequest(datatype=None, **kwargs):
|
||||
"""
|
||||
Creates a new instance of IDataRequest suitable for the runtime environment.
|
||||
All args are optional and exist solely for convenience.
|
||||
|
||||
Args:
|
||||
datatype: the datatype to create a request for
|
||||
parameters: a list of parameters to set on the request
|
||||
levels: a list of levels to set on the request
|
||||
locationNames: a list of locationNames to set on the request
|
||||
envelope: an envelope to limit the request
|
||||
kwargs: any leftover kwargs will be set as identifiers
|
||||
|
||||
Returns:
|
||||
a new IDataRequest
|
||||
"""
|
||||
return router.newDataRequest(datatype, **kwargs)
|
||||
|
||||
|
||||
def getSupportedDatatypes():
|
||||
"""
|
||||
Gets the datatypes that are supported by the framework
|
||||
|
||||
Returns:
|
||||
a list of strings of supported datatypes
|
||||
"""
|
||||
return router.getSupportedDatatypes()
|
||||
|
||||
|
||||
def changeEDEXHost(newHostName):
|
||||
"""
|
||||
Changes the EDEX host the Data Access Framework is communicating with. Only
|
||||
works if using the native Python client implementation, otherwise, this
|
||||
method will throw a TypeError.
|
||||
|
||||
Args:
|
||||
newHostName: the EDEX host to connect to
|
||||
"""
|
||||
if USING_NATIVE_THRIFT:
|
||||
global THRIFT_HOST
|
||||
THRIFT_HOST = newHostName
|
||||
global router
|
||||
router = ThriftClientRouter.ThriftClientRouter(THRIFT_HOST)
|
||||
else:
|
||||
raise TypeError("Cannot call changeEDEXHost when using JepRouter.")
|
||||
|
||||
|
||||
def setLazyLoadGridLatLon(lazyLoadGridLatLon):
|
||||
"""
|
||||
Provide a hint to the Data Access Framework indicating whether to load the
|
||||
lat/lon data for a grid immediately or wait until it is needed. This is
|
||||
provided as a performance tuning hint and should not affect the way the
|
||||
Data Access Framework is used. Depending on the internal implementation of
|
||||
the Data Access Framework this hint might be ignored. Examples of when this
|
||||
should be set to True are when the lat/lon information is not used or when
|
||||
it is used only if certain conditions within the data are met. It could be
|
||||
set to False if it is guaranteed that all lat/lon information is needed and
|
||||
it would be better to get any performance overhead for generating the
|
||||
lat/lon data out of the way during the initial request.
|
||||
|
||||
|
||||
Args:
|
||||
lazyLoadGridLatLon: Boolean value indicating whether to lazy load.
|
||||
"""
|
||||
try:
|
||||
router.setLazyLoadGridLatLon(lazyLoadGridLatLon)
|
||||
except AttributeError:
|
||||
# The router is not required to support this capability.
|
||||
pass
|
135
awips/dataaccess/DataNotificationLayer.py
Normal file
135
awips/dataaccess/DataNotificationLayer.py
Normal file
|
@ -0,0 +1,135 @@
|
|||
#
|
||||
# Published interface for retrieving data updates via awips.dataaccess package
|
||||
#
|
||||
#
|
||||
# SOFTWARE HISTORY
|
||||
#
|
||||
# Date Ticket# Engineer Description
|
||||
# ------------ ---------- ----------- --------------------------
|
||||
# May 26, 2016 2416 rjpeter Initial Creation.
|
||||
# Aug 1, 2016 2416 tgurney Finish implementation
|
||||
#
|
||||
#
|
||||
|
||||
"""
|
||||
Interface for the DAF's data notification feature, which allows continuous
|
||||
retrieval of new data as it is coming into the system.
|
||||
|
||||
There are two ways to access this feature:
|
||||
|
||||
1. The DataQueue module (awips.dataaccess.DataQueue) offers a collection that
|
||||
automatically fills up with new data as it receives notifications. See that
|
||||
module for more information.
|
||||
|
||||
2. Depending on the type of data you want, use either getGridDataUpdates() or
|
||||
getGeometryDataUpdates() in this module. Either one will give you back an
|
||||
object that will retrieve new data for you and will call a function you specify
|
||||
each time new data is received.
|
||||
|
||||
Example code follows. This example prints temperature as observed from KOMA
|
||||
each time a METAR is received from there.
|
||||
|
||||
from awips.dataaccess import DataAccessLayer as DAL
|
||||
from awips.dataaccess import DataNotificationLayer as DNL
|
||||
|
||||
def process_obs(list_of_data):
|
||||
for item in list_of_data:
|
||||
print(item.getNumber('temperature'))
|
||||
|
||||
request = DAL.newDataRequest('obs')
|
||||
request.setParameters('temperature')
|
||||
request.setLocationNames('KOMA')
|
||||
|
||||
notifier = DNL.getGeometryDataUpdates(request)
|
||||
notifier.subscribe(process_obs)
|
||||
# process_obs will called with a list of data each time new data comes in
|
||||
|
||||
"""
|
||||
|
||||
import re
|
||||
import sys
|
||||
from awips.dataaccess.PyGeometryNotification import PyGeometryNotification
|
||||
from awips.dataaccess.PyGridNotification import PyGridNotification
|
||||
|
||||
|
||||
THRIFT_HOST = "edex"
|
||||
|
||||
USING_NATIVE_THRIFT = False
|
||||
|
||||
JMS_HOST_PATTERN = re.compile('tcp://([^:]+):([0-9]+)')
|
||||
|
||||
if 'jep' in sys.modules:
|
||||
# intentionally do not catch if this fails to import, we want it to
|
||||
# be obvious that something is configured wrong when running from within
|
||||
# Java instead of allowing false confidence and fallback behavior
|
||||
import JepRouter
|
||||
router = JepRouter
|
||||
else:
|
||||
from awips.dataaccess import ThriftClientRouter
|
||||
router = ThriftClientRouter.ThriftClientRouter(THRIFT_HOST)
|
||||
USING_NATIVE_THRIFT = True
|
||||
|
||||
|
||||
def _getJmsConnectionInfo(notifFilterResponse):
|
||||
serverString = notifFilterResponse.getJmsConnectionInfo()
|
||||
try:
|
||||
host, port = JMS_HOST_PATTERN.match(serverString).groups()
|
||||
except AttributeError:
|
||||
raise RuntimeError('Got bad JMS connection info from server: ' + serverString)
|
||||
return {'host': host, 'port': port}
|
||||
|
||||
|
||||
def getGridDataUpdates(request):
|
||||
"""
|
||||
Get a notification object that receives updates to grid data.
|
||||
|
||||
Args:
|
||||
request: the IDataRequest specifying the data you want to receive
|
||||
|
||||
Returns:
|
||||
an update request object that you can listen for updates to by
|
||||
calling its subscribe() method
|
||||
"""
|
||||
response = router.getNotificationFilter(request)
|
||||
notificationFilter = response.getNotificationFilter()
|
||||
jmsInfo = _getJmsConnectionInfo(response)
|
||||
notifier = PyGridNotification(request, notificationFilter,
|
||||
requestHost=THRIFT_HOST, **jmsInfo)
|
||||
return notifier
|
||||
|
||||
|
||||
def getGeometryDataUpdates(request):
|
||||
"""
|
||||
Get a notification object that receives updates to geometry data.
|
||||
|
||||
Args:
|
||||
request: the IDataRequest specifying the data you want to receive
|
||||
|
||||
Returns:
|
||||
an update request object that you can listen for updates to by
|
||||
calling its subscribe() method
|
||||
"""
|
||||
response = router.getNotificationFilter(request)
|
||||
notificationFilter = response.getNotificationFilter()
|
||||
jmsInfo = _getJmsConnectionInfo(response)
|
||||
notifier = PyGeometryNotification(request, notificationFilter,
|
||||
requestHost=THRIFT_HOST, **jmsInfo)
|
||||
return notifier
|
||||
|
||||
|
||||
def changeEDEXHost(newHostName):
|
||||
"""
|
||||
Changes the EDEX host the Data Access Framework is communicating with. Only
|
||||
works if using the native Python client implementation, otherwise, this
|
||||
method will throw a TypeError.
|
||||
|
||||
Args:
|
||||
newHostName: the EDEX host to connect to
|
||||
"""
|
||||
if USING_NATIVE_THRIFT:
|
||||
global THRIFT_HOST
|
||||
THRIFT_HOST = newHostName
|
||||
global router
|
||||
router = ThriftClientRouter.ThriftClientRouter(THRIFT_HOST)
|
||||
else:
|
||||
raise TypeError("Cannot call changeEDEXHost when using JepRouter.")
|
190
awips/dataaccess/DataQueue.py
Normal file
190
awips/dataaccess/DataQueue.py
Normal file
|
@ -0,0 +1,190 @@
|
|||
#
|
||||
# Convenience class for using the DAF's notifications feature. This is a
|
||||
# collection that, once connected to EDEX by calling start(), fills with
|
||||
# data as notifications come in. Runs on a separate thread to allow
|
||||
# non-blocking data retrieval.
|
||||
#
|
||||
#
|
||||
#
|
||||
# SOFTWARE HISTORY
|
||||
#
|
||||
# Date Ticket# Engineer Description
|
||||
# ------------ ---------- ----------- --------------------------
|
||||
# 07/29/16 2416 tgurney Initial creation
|
||||
#
|
||||
|
||||
from awips.dataaccess import DataNotificationLayer as DNL
|
||||
|
||||
import time
|
||||
from threading import Thread
|
||||
import sys
|
||||
|
||||
|
||||
if sys.version_info.major == 2:
|
||||
from Queue import Queue, Empty
|
||||
else: # Python 3 module renamed to 'queue'
|
||||
from queue import Queue, Empty
|
||||
|
||||
# Used to indicate a DataQueue that will produce geometry data.
|
||||
GEOMETRY = object()
|
||||
|
||||
# Used to indicate a DataQueue that will produce grid data.
|
||||
GRID = object()
|
||||
|
||||
# Default maximum queue size.
|
||||
_DEFAULT_MAXSIZE = 100
|
||||
|
||||
|
||||
class Closed(Exception):
|
||||
"""Raised when attempting to get data from a closed queue."""
|
||||
pass
|
||||
|
||||
|
||||
class DataQueue(object):
|
||||
|
||||
"""
|
||||
Convenience class for using the DAF's notifications feature. This is a
|
||||
collection that, once connected to EDEX by calling start(), fills with
|
||||
data as notifications come in.
|
||||
|
||||
Example for getting obs data:
|
||||
|
||||
from DataQueue import DataQueue, GEOMETRY
|
||||
request = DataAccessLayer.newDataRequest('obs')
|
||||
request.setParameters('temperature')
|
||||
request.setLocationNames('KOMA')
|
||||
q = DataQueue(GEOMETRY, request)
|
||||
q.start()
|
||||
for item in q:
|
||||
print(item.getNumber('temperature'))
|
||||
"""
|
||||
|
||||
def __init__(self, dtype, request, maxsize=_DEFAULT_MAXSIZE):
|
||||
"""
|
||||
Create a new DataQueue.
|
||||
|
||||
Args:
|
||||
dtype: Either GRID or GEOMETRY; must match the type of data
|
||||
requested.
|
||||
request: IDataRequest describing the data you want. It must at
|
||||
least have datatype set. All data produced will satisfy the
|
||||
constraints you specify.
|
||||
maxsize: Maximum number of data objects the queue can hold at
|
||||
one time. If the limit is reached, any data coming in after
|
||||
that will not appear until one or more items are removed using
|
||||
DataQueue.get().
|
||||
"""
|
||||
assert maxsize > 0
|
||||
assert dtype in (GEOMETRY, GRID)
|
||||
self._maxsize = maxsize
|
||||
self._queue = Queue(maxsize=maxsize)
|
||||
self._thread = None
|
||||
if dtype is GEOMETRY:
|
||||
self._notifier = DNL.getGeometryDataUpdates(request)
|
||||
elif dtype is GRID:
|
||||
self._notifier = DNL.getGridDataUpdates(request)
|
||||
|
||||
def start(self):
|
||||
"""Start listening for notifications and requesting data."""
|
||||
if self._thread is not None:
|
||||
# Already started
|
||||
return
|
||||
kwargs = {'callback': self._data_received}
|
||||
self._thread = Thread(target=self._notifier.subscribe, kwargs=kwargs)
|
||||
self._thread.daemon = True
|
||||
self._thread.start()
|
||||
timer = 0
|
||||
while not self._notifier.subscribed:
|
||||
time.sleep(0.1)
|
||||
timer += 1
|
||||
if timer >= 100: # ten seconds
|
||||
raise RuntimeError('timed out when attempting to subscribe')
|
||||
|
||||
def _data_received(self, data):
|
||||
for d in data:
|
||||
if not isinstance(d, list):
|
||||
d = [d]
|
||||
for item in d:
|
||||
self._queue.put(item)
|
||||
|
||||
def get(self, block=True, timeout=None):
|
||||
"""
|
||||
Get and return the next available data object. By default, if there is
|
||||
no data yet available, this method will not return until data becomes
|
||||
available.
|
||||
|
||||
Args:
|
||||
block: Specifies behavior when the queue is empty. If True, wait
|
||||
until an item is available before returning (the default). If
|
||||
False, return None immediately if the queue is empty.
|
||||
timeout: If block is True, wait this many seconds, and return None
|
||||
if data is not received in that time.
|
||||
Returns:
|
||||
IData
|
||||
"""
|
||||
if self.closed:
|
||||
raise Closed
|
||||
try:
|
||||
return self._queue.get(block, timeout)
|
||||
except Empty:
|
||||
return None
|
||||
|
||||
def get_all(self):
|
||||
"""
|
||||
Get all data waiting for processing, in a single list. Always returns
|
||||
immediately. Returns an empty list if no data has arrived yet.
|
||||
|
||||
Returns:
|
||||
List of IData
|
||||
"""
|
||||
data = []
|
||||
for _ in range(self._maxsize):
|
||||
next_item = self.get(False)
|
||||
if next_item is None:
|
||||
break
|
||||
data.append(next_item)
|
||||
return data
|
||||
|
||||
def close(self):
|
||||
"""Close the queue. May not be re-opened after closing."""
|
||||
if not self.closed:
|
||||
self._notifier.close()
|
||||
self._thread.join()
|
||||
|
||||
def qsize(self):
|
||||
"""Return number of items in the queue."""
|
||||
return self._queue.qsize()
|
||||
|
||||
def empty(self):
|
||||
"""Return True if the queue is empty."""
|
||||
return self._queue.empty()
|
||||
|
||||
def full(self):
|
||||
"""Return True if the queue is full."""
|
||||
return self._queue.full()
|
||||
|
||||
@property
|
||||
def closed(self):
|
||||
"""True if the queue has been closed."""
|
||||
return not self._notifier.subscribed
|
||||
|
||||
@property
|
||||
def maxsize(self):
|
||||
"""
|
||||
Maximum number of data objects the queue can hold at one time.
|
||||
If this limit is reached, any data coming in after that will not appear
|
||||
until one or more items are removed using get().
|
||||
"""
|
||||
return self._maxsize
|
||||
|
||||
def __iter__(self):
|
||||
if self._thread is not None:
|
||||
while not self.closed:
|
||||
yield self.get()
|
||||
|
||||
def __enter__(self):
|
||||
self.start()
|
||||
return self
|
||||
|
||||
def __exit__(self, *unused):
|
||||
self.close()
|
231
awips/dataaccess/ModelSounding.py
Normal file
231
awips/dataaccess/ModelSounding.py
Normal file
|
@ -0,0 +1,231 @@
|
|||
#
|
||||
# Classes for retrieving soundings based on gridded data from the Data Access
|
||||
# Framework
|
||||
#
|
||||
#
|
||||
#
|
||||
# SOFTWARE HISTORY
|
||||
#
|
||||
# Date Ticket# Engineer Description
|
||||
# ------------ ---------- ----------- --------------------------
|
||||
# 06/24/15 #4480 dgilling Initial Creation.
|
||||
#
|
||||
|
||||
from awips.dataaccess import DataAccessLayer
|
||||
from dynamicserialize.dstypes.com.raytheon.uf.common.dataplugin.level import Level
|
||||
from shapely.geometry import Point
|
||||
|
||||
|
||||
def getSounding(modelName, weatherElements, levels, samplePoint, timeRange=None):
|
||||
"""
|
||||
Performs a series of Data Access Framework requests to retrieve a sounding object
|
||||
based on the specified request parameters.
|
||||
|
||||
Args:
|
||||
modelName: the grid model datasetid to use as the basis of the sounding.
|
||||
weatherElements: a list of parameters to return in the sounding.
|
||||
levels: a list of levels to sample the given weather elements at
|
||||
samplePoint: a lat/lon pair to perform the sampling of data at.
|
||||
timeRange: (optional) a list of times, or a TimeRange to specify
|
||||
which forecast hours to use. If not specified, will default to all forecast hours.
|
||||
|
||||
Returns:
|
||||
A _SoundingCube instance, which acts a 3-tiered dictionary, keyed
|
||||
by DataTime, then by level and finally by weather element. If no
|
||||
data is available for the given request parameters, None is returned.
|
||||
|
||||
"""
|
||||
|
||||
(locationNames, parameters, levels, envelope, timeRange) = \
|
||||
__sanitizeInputs(modelName, weatherElements, levels, samplePoint, timeRange)
|
||||
|
||||
requestArgs = {'datatype': 'grid', 'locationNames': locationNames,
|
||||
'parameters': parameters, 'levels': levels, 'envelope': envelope}
|
||||
|
||||
req = DataAccessLayer.newDataRequest(**requestArgs)
|
||||
response = DataAccessLayer.getGeometryData(req, timeRange)
|
||||
soundingObject = _SoundingCube(response)
|
||||
|
||||
return soundingObject
|
||||
|
||||
|
||||
def changeEDEXHost(host):
|
||||
"""
|
||||
Changes the EDEX host the Data Access Framework is communicating with.
|
||||
|
||||
Args:
|
||||
host: the EDEX host to connect to
|
||||
"""
|
||||
|
||||
if host:
|
||||
DataAccessLayer.changeEDEXHost(str(host))
|
||||
|
||||
|
||||
def __sanitizeInputs(modelName, weatherElements, levels, samplePoint, timeRange):
|
||||
locationNames = [str(modelName)]
|
||||
parameters = __buildStringList(weatherElements)
|
||||
levels = __buildStringList(levels)
|
||||
envelope = Point(samplePoint)
|
||||
return locationNames, parameters, levels, envelope, timeRange
|
||||
|
||||
|
||||
def __buildStringList(param):
|
||||
if __notStringIter(param):
|
||||
return [str(item) for item in param]
|
||||
else:
|
||||
return [str(param)]
|
||||
|
||||
|
||||
def __notStringIter(iterable):
|
||||
if not isinstance(iterable, str):
|
||||
try:
|
||||
iter(iterable)
|
||||
return True
|
||||
except TypeError:
|
||||
return False
|
||||
|
||||
|
||||
class _SoundingCube(object):
|
||||
"""
|
||||
The top-level sounding object returned when calling ModelSounding.getSounding.
|
||||
|
||||
This object acts as a 3-tiered dict which is keyed by time then level
|
||||
then parameter name. Calling times() will return all valid keys into this
|
||||
object.
|
||||
"""
|
||||
|
||||
def __init__(self, geometryDataObjects):
|
||||
self._dataDict = {}
|
||||
self._sortedTimes = []
|
||||
if geometryDataObjects:
|
||||
for geometryData in geometryDataObjects:
|
||||
dataTime = geometryData.getDataTime()
|
||||
level = geometryData.getLevel()
|
||||
for parameter in geometryData.getParameters():
|
||||
self.__addItem(parameter, dataTime, level, geometryData.getNumber(parameter))
|
||||
|
||||
def __addItem(self, parameter, dataTime, level, value):
|
||||
timeLayer = self._dataDict.get(dataTime, _SoundingTimeLayer(dataTime))
|
||||
self._dataDict[dataTime] = timeLayer
|
||||
timeLayer._addItem(parameter, level, value)
|
||||
if dataTime not in self._sortedTimes:
|
||||
self._sortedTimes.append(dataTime)
|
||||
self._sortedTimes.sort()
|
||||
|
||||
def __getitem__(self, key):
|
||||
return self._dataDict[key]
|
||||
|
||||
def __len__(self):
|
||||
return len(self._dataDict)
|
||||
|
||||
def times(self):
|
||||
"""
|
||||
Returns the valid times for this sounding.
|
||||
|
||||
Returns:
|
||||
A list containing the valid DataTimes for this sounding in order.
|
||||
"""
|
||||
return self._sortedTimes
|
||||
|
||||
|
||||
class _SoundingTimeLayer(object):
|
||||
"""
|
||||
The second-level sounding object returned when calling ModelSounding.getSounding.
|
||||
|
||||
This object acts as a 2-tiered dict which is keyed by level then parameter
|
||||
name. Calling levels() will return all valid keys into this
|
||||
object. Calling time() will return the DataTime for this particular layer.
|
||||
"""
|
||||
|
||||
def __init__(self, dataTime):
|
||||
self._dataTime = dataTime
|
||||
self._dataDict = {}
|
||||
|
||||
def _addItem(self, parameter, level, value):
|
||||
asString = str(level)
|
||||
levelLayer = self._dataDict.get(asString, _SoundingTimeAndLevelLayer(self._dataTime, asString))
|
||||
levelLayer._addItem(parameter, value)
|
||||
self._dataDict[asString] = levelLayer
|
||||
|
||||
def __getitem__(self, key):
|
||||
asString = str(key)
|
||||
if asString in self._dataDict:
|
||||
return self._dataDict[asString]
|
||||
else:
|
||||
raise KeyError("Level " + str(key) + " is not a valid level for this sounding.")
|
||||
|
||||
def __len__(self):
|
||||
return len(self._dataDict)
|
||||
|
||||
def time(self):
|
||||
"""
|
||||
Returns the DataTime for this sounding cube layer.
|
||||
|
||||
Returns:
|
||||
The DataTime for this sounding layer.
|
||||
"""
|
||||
return self._dataTime
|
||||
|
||||
def levels(self):
|
||||
"""
|
||||
Returns the valid levels for this sounding.
|
||||
|
||||
Returns:
|
||||
A list containing the valid levels for this sounding in order of
|
||||
closest to surface to highest from surface.
|
||||
"""
|
||||
sortedLevels = [Level(level) for level in list(self._dataDict.keys())]
|
||||
sortedLevels.sort()
|
||||
return [str(level) for level in sortedLevels]
|
||||
|
||||
|
||||
class _SoundingTimeAndLevelLayer(object):
|
||||
"""
|
||||
The bottom-level sounding object returned when calling ModelSounding.getSounding.
|
||||
|
||||
This object acts as a dict which is keyed by parameter name. Calling
|
||||
parameters() will return all valid keys into this object. Calling time()
|
||||
will return the DataTime for this particular layer. Calling level() will
|
||||
return the level for this layer.
|
||||
"""
|
||||
|
||||
def __init__(self, time, level):
|
||||
self._time = time
|
||||
self._level = level
|
||||
self._parameters = {}
|
||||
|
||||
def _addItem(self, parameter, value):
|
||||
self._parameters[parameter] = value
|
||||
|
||||
def __getitem__(self, key):
|
||||
return self._parameters[key]
|
||||
|
||||
def __len__(self):
|
||||
return len(self._parameters)
|
||||
|
||||
def level(self):
|
||||
"""
|
||||
Returns the level for this sounding cube layer.
|
||||
|
||||
Returns:
|
||||
The level for this sounding layer.
|
||||
"""
|
||||
return self._level
|
||||
|
||||
def parameters(self):
|
||||
"""
|
||||
Returns the valid parameters for this sounding.
|
||||
|
||||
Returns:
|
||||
A list containing the valid parameter names.
|
||||
"""
|
||||
return list(self._parameters.keys())
|
||||
|
||||
def time(self):
|
||||
"""
|
||||
Returns the DataTime for this sounding cube layer.
|
||||
|
||||
Returns:
|
||||
The DataTime for this sounding layer.
|
||||
"""
|
||||
return self._time
|
44
awips/dataaccess/PyData.py
Normal file
44
awips/dataaccess/PyData.py
Normal file
|
@ -0,0 +1,44 @@
|
|||
#
|
||||
# Implements IData for use by native Python clients to the Data Access
|
||||
# Framework.
|
||||
#
|
||||
#
|
||||
# SOFTWARE HISTORY
|
||||
#
|
||||
# Date Ticket# Engineer Description
|
||||
# ------------ ---------- ----------- --------------------------
|
||||
# 06/03/13 dgilling Initial Creation.
|
||||
# 10/05/18 mjames@ucar Encode/decode attribute names.
|
||||
#
|
||||
#
|
||||
|
||||
from awips.dataaccess import IData
|
||||
import six
|
||||
|
||||
|
||||
class PyData(IData):
|
||||
|
||||
def __init__(self, dataRecord):
|
||||
self.__time = dataRecord.getTime()
|
||||
self.__level = dataRecord.getLevel()
|
||||
self.__locationName = dataRecord.getLocationName()
|
||||
self.__attributes = dataRecord.getAttributes()
|
||||
|
||||
def getAttribute(self, key):
|
||||
return self.__attributes[key]
|
||||
|
||||
def getAttributes(self):
|
||||
return self.__attributes.keys()
|
||||
|
||||
def getDataTime(self):
|
||||
return self.__time
|
||||
|
||||
def getLevel(self):
|
||||
if six.PY2:
|
||||
return self.__level
|
||||
if not isinstance(self.__level, str):
|
||||
return self.__level.decode('utf-8')
|
||||
return self.__level
|
||||
|
||||
def getLocationName(self):
|
||||
return self.__locationName
|
81
awips/dataaccess/PyGeometryData.py
Normal file
81
awips/dataaccess/PyGeometryData.py
Normal file
|
@ -0,0 +1,81 @@
|
|||
#
|
||||
# Implements IGeometryData for use by native Python clients to the Data Access
|
||||
# Framework.
|
||||
#
|
||||
#
|
||||
# SOFTWARE HISTORY
|
||||
#
|
||||
# Date Ticket# Engineer Description
|
||||
# ------------ ---------- ----------- --------------------------
|
||||
# 06/03/13 dgilling Initial Creation.
|
||||
# 01/06/14 2537 bsteffen Share geometry WKT.
|
||||
# 03/19/14 2882 dgilling Raise an exception when getNumber()
|
||||
# is called for data that is not a
|
||||
# numeric Type.
|
||||
# 06/09/16 5574 mapeters Handle 'SHORT' type in getNumber().
|
||||
# 10/05/18 mjames@ucar Encode/decode string, number val, and type
|
||||
#
|
||||
#
|
||||
|
||||
from awips.dataaccess import IGeometryData
|
||||
from awips.dataaccess import PyData
|
||||
import six
|
||||
|
||||
|
||||
class PyGeometryData(IGeometryData, PyData.PyData):
|
||||
|
||||
def __init__(self, geoDataRecord, geometry):
|
||||
PyData.PyData.__init__(self, geoDataRecord)
|
||||
self.__geometry = geometry
|
||||
self.__dataMap = {}
|
||||
tempDataMap = geoDataRecord.getDataMap()
|
||||
for key, value in list(tempDataMap.items()):
|
||||
self.__dataMap[key] = (value[0], value[1], value[2])
|
||||
|
||||
def getGeometry(self):
|
||||
return self.__geometry
|
||||
|
||||
def getParameters(self):
|
||||
if six.PY2:
|
||||
return list(self.__dataMap.keys())
|
||||
else:
|
||||
return [x.decode('utf-8') for x in list(self.__dataMap.keys())]
|
||||
|
||||
def getString(self, param):
|
||||
if six.PY2:
|
||||
return self.__dataMap[param][0]
|
||||
value = self.__dataMap[param.encode('utf-8')][0]
|
||||
if isinstance(value, bytes):
|
||||
return str(value.decode('utf-8'))
|
||||
return str(value)
|
||||
|
||||
def getNumber(self, param):
|
||||
t = self.getType(param)
|
||||
if six.PY2:
|
||||
value = self.__dataMap[param][0]
|
||||
else:
|
||||
value = self.__dataMap[param.encode('utf-8')][0]
|
||||
if t == 'INT' or t == 'SHORT' or t == 'LONG':
|
||||
return int(value)
|
||||
elif t == 'FLOAT':
|
||||
return float(value)
|
||||
elif t == 'DOUBLE':
|
||||
return float(value)
|
||||
else:
|
||||
raise TypeError("Data for parameter " + param + " is not a numeric type.")
|
||||
|
||||
def getUnit(self, param):
|
||||
if six.PY2:
|
||||
return self.__dataMap[param][2]
|
||||
unit = self.__dataMap[param.encode('utf-8')][2]
|
||||
if unit is not None:
|
||||
return unit.decode('utf-8')
|
||||
return unit
|
||||
|
||||
def getType(self, param):
|
||||
if six.PY2:
|
||||
return self.__dataMap[param][1]
|
||||
datatype = self.__dataMap[param.encode('utf-8')][1]
|
||||
if datatype is not None:
|
||||
return datatype.decode('utf-8')
|
||||
return datatype
|
35
awips/dataaccess/PyGeometryNotification.py
Normal file
35
awips/dataaccess/PyGeometryNotification.py
Normal file
|
@ -0,0 +1,35 @@
|
|||
#
|
||||
# Notification object that produces geometry data
|
||||
#
|
||||
#
|
||||
# SOFTWARE HISTORY
|
||||
#
|
||||
# Date Ticket# Engineer Description
|
||||
# ------------ ---------- ----------- --------------------------
|
||||
# 07/22/16 2416 tgurney Initial creation
|
||||
# 09/07/17 6175 tgurney Override messageReceived
|
||||
#
|
||||
|
||||
import traceback
|
||||
import dynamicserialize
|
||||
from awips.dataaccess.PyNotification import PyNotification
|
||||
|
||||
|
||||
class PyGeometryNotification(PyNotification):
|
||||
|
||||
def messageReceived(self, msg):
|
||||
dataUriMsg = dynamicserialize.deserialize(msg)
|
||||
dataUris = dataUriMsg.getDataURIs()
|
||||
dataTimes = set()
|
||||
for dataUri in dataUris:
|
||||
if self.notificationFilter.accept(dataUri):
|
||||
dataTimes.add(self.getDataTime(dataUri))
|
||||
if dataTimes:
|
||||
try:
|
||||
data = self.getData(self.request, list(dataTimes))
|
||||
self.callback(data)
|
||||
except ValueError:
|
||||
traceback.print_exc()
|
||||
|
||||
def getData(self, request, dataTimes):
|
||||
return self.DAL.getGeometryData(request, dataTimes)
|
64
awips/dataaccess/PyGridData.py
Normal file
64
awips/dataaccess/PyGridData.py
Normal file
|
@ -0,0 +1,64 @@
|
|||
#
|
||||
# Implements IGridData for use by native Python clients to the Data Access
|
||||
# Framework.
|
||||
#
|
||||
#
|
||||
# SOFTWARE HISTORY
|
||||
#
|
||||
# Date Ticket# Engineer Description
|
||||
# ------------ ---------- ----------- --------------------------
|
||||
# 06/03/13 #2023 dgilling Initial Creation.
|
||||
# 10/13/16 #5916 bsteffen Correct grid shape, allow lat/lon
|
||||
# 11/10/16 #5900 bsteffen Correct grid shape
|
||||
# to be requested by a delegate
|
||||
#
|
||||
#
|
||||
|
||||
import numpy
|
||||
import warnings
|
||||
import six
|
||||
|
||||
from awips.dataaccess import IGridData
|
||||
from awips.dataaccess import PyData
|
||||
|
||||
NO_UNIT_CONVERT_WARNING = """
|
||||
The ability to unit convert grid data is not currently available in this version of the Data Access Framework.
|
||||
"""
|
||||
|
||||
|
||||
class PyGridData(IGridData, PyData.PyData):
|
||||
|
||||
def __init__(self, gridDataRecord, nx, ny, latLonGrid=None, latLonDelegate=None):
|
||||
PyData.PyData.__init__(self, gridDataRecord)
|
||||
nx = nx
|
||||
ny = ny
|
||||
self.__parameter = gridDataRecord.getParameter()
|
||||
self.__unit = gridDataRecord.getUnit()
|
||||
self.__gridData = numpy.reshape(numpy.array(gridDataRecord.getGridData()), (ny, nx))
|
||||
self.__latLonGrid = latLonGrid
|
||||
self.__latLonDelegate = latLonDelegate
|
||||
|
||||
def getParameter(self):
|
||||
return self.__parameter
|
||||
|
||||
def getUnit(self):
|
||||
if six.PY2:
|
||||
return self.__unit
|
||||
if self.__unit is not None and not isinstance(self.__unit, str):
|
||||
return self.__unit.decode('utf-8')
|
||||
return self.__unit
|
||||
|
||||
def getRawData(self, unit=None):
|
||||
# TODO: Find a proper python library that deals will with numpy and
|
||||
# javax.measure style unit strings and hook it in to this method to
|
||||
# allow end-users to perform unit conversion for grid data.
|
||||
if unit is not None:
|
||||
warnings.warn(NO_UNIT_CONVERT_WARNING, stacklevel=2)
|
||||
return self.__gridData
|
||||
|
||||
def getLatLonCoords(self):
|
||||
if self.__latLonGrid is not None:
|
||||
return self.__latLonGrid
|
||||
elif self.__latLonDelegate is not None:
|
||||
return self.__latLonDelegate()
|
||||
return self.__latLonGrid
|
40
awips/dataaccess/PyGridNotification.py
Normal file
40
awips/dataaccess/PyGridNotification.py
Normal file
|
@ -0,0 +1,40 @@
|
|||
#
|
||||
# Notification object that produces grid data
|
||||
#
|
||||
#
|
||||
# SOFTWARE HISTORY
|
||||
#
|
||||
# Date Ticket# Engineer Description
|
||||
# ------------ ---------- ----------- --------------------------
|
||||
# 06/03/16 2416 rjpeter Initial Creation.
|
||||
# 09/06/17 6175 tgurney Override messageReceived
|
||||
#
|
||||
|
||||
import dynamicserialize
|
||||
import traceback
|
||||
from awips.dataaccess.PyNotification import PyNotification
|
||||
|
||||
|
||||
class PyGridNotification(PyNotification):
|
||||
|
||||
def messageReceived(self, msg):
|
||||
dataUriMsg = dynamicserialize.deserialize(msg)
|
||||
dataUris = dataUriMsg.getDataURIs()
|
||||
for dataUri in dataUris:
|
||||
if not self.notificationFilter.accept(dataUri):
|
||||
continue
|
||||
try:
|
||||
# This improves performance over requesting by datatime since it requests only the
|
||||
# parameter that the notification was received for (instead of this and all previous
|
||||
# parameters for the same forecast hour)
|
||||
# TODO: This utterly fails for derived requests
|
||||
newReq = self.DAL.newDataRequest(self.request.getDatatype())
|
||||
newReq.addIdentifier("dataURI", dataUri)
|
||||
newReq.setParameters(self.request.getParameters())
|
||||
data = self.getData(newReq, [])
|
||||
self.callback(data)
|
||||
except ValueError:
|
||||
traceback.print_exc()
|
||||
|
||||
def getData(self, request, dataTimes):
|
||||
return self.DAL.getGridData(request, dataTimes)
|
85
awips/dataaccess/PyNotification.py
Normal file
85
awips/dataaccess/PyNotification.py
Normal file
|
@ -0,0 +1,85 @@
|
|||
#
|
||||
# Implements IData for use by native Python clients to the Data Access
|
||||
# Framework.
|
||||
#
|
||||
#
|
||||
# SOFTWARE HISTORY
|
||||
#
|
||||
# Date Ticket# Engineer Description
|
||||
# ------------ ---------- ----------- --------------------------
|
||||
# Jun 22, 2016 2416 rjpeter Initial creation
|
||||
# Jul 22, 2016 2416 tgurney Finish implementation
|
||||
# Sep 07, 2017 6175 tgurney Override messageReceived in subclasses
|
||||
#
|
||||
|
||||
from six import with_metaclass
|
||||
import abc
|
||||
|
||||
from awips.dataaccess import DataAccessLayer
|
||||
from awips.dataaccess import INotificationSubscriber
|
||||
from awips.QpidSubscriber import QpidSubscriber
|
||||
from dynamicserialize.dstypes.com.raytheon.uf.common.time import DataTime
|
||||
|
||||
|
||||
class PyNotification(with_metaclass(abc.ABCMeta, INotificationSubscriber)):
|
||||
"""
|
||||
Receives notifications for new data and retrieves the data that meets
|
||||
specified filtering criteria.
|
||||
"""
|
||||
|
||||
def __init__(self, request, notificationFilter, host='localhost',
|
||||
port=5672, requestHost='localhost'):
|
||||
self.DAL = DataAccessLayer
|
||||
self.DAL.changeEDEXHost(requestHost)
|
||||
self.request = request
|
||||
self.notificationFilter = notificationFilter
|
||||
self.__topicSubscriber = QpidSubscriber(host, port, decompress=True)
|
||||
self.__topicName = "edex.alerts"
|
||||
self.callback = None
|
||||
|
||||
def subscribe(self, callback):
|
||||
"""
|
||||
Start listening for notifications.
|
||||
|
||||
Args:
|
||||
callback: Function to call with a list of received data objects.
|
||||
Will be called once for each request made for data.
|
||||
"""
|
||||
assert hasattr(callback, '__call__'), 'callback arg must be callable'
|
||||
self.callback = callback
|
||||
self.__topicSubscriber.topicSubscribe(self.__topicName, self.messageReceived)
|
||||
# Blocks here
|
||||
|
||||
def close(self):
|
||||
if self.__topicSubscriber.subscribed:
|
||||
self.__topicSubscriber.close()
|
||||
|
||||
def getDataTime(self, dataURI):
|
||||
dataTimeStr = dataURI.split('/')[2]
|
||||
return DataTime(dataTimeStr)
|
||||
|
||||
@abc.abstractmethod
|
||||
def messageReceived(self, msg):
|
||||
"""Called when a message is received from QpidSubscriber.
|
||||
|
||||
This method must call self.callback once for each request made for data
|
||||
"""
|
||||
pass
|
||||
|
||||
@abc.abstractmethod
|
||||
def getData(self, request, dataTimes):
|
||||
"""
|
||||
Retrieve and return data
|
||||
|
||||
Args:
|
||||
request: IDataRequest to send to the server
|
||||
dataTimes: list of data times
|
||||
Returns:
|
||||
list of IData
|
||||
"""
|
||||
pass
|
||||
|
||||
@property
|
||||
def subscribed(self):
|
||||
"""True if currently subscribed to notifications."""
|
||||
return self.__topicSubscriber.queueStarted
|
257
awips/dataaccess/ThriftClientRouter.py
Normal file
257
awips/dataaccess/ThriftClientRouter.py
Normal file
|
@ -0,0 +1,257 @@
|
|||
#
|
||||
# Routes requests to the Data Access Framework through Python Thrift.
|
||||
#
|
||||
#
|
||||
# SOFTWARE HISTORY
|
||||
#
|
||||
# Date Ticket# Engineer Description
|
||||
# ------------ ---------- ----------- --------------------------
|
||||
# 05/21/13 2023 dgilling Initial Creation.
|
||||
# 01/06/14 2537 bsteffen Share geometry WKT.
|
||||
# 03/03/14 2673 bsteffen Add ability to query only ref times.
|
||||
# 07/22/14 3185 njensen Added optional/default args to newDataRequest
|
||||
# 07/23/14 3185 njensen Added new methods
|
||||
# 07/30/14 3185 njensen Renamed valid identifiers to optional
|
||||
# 06/30/15 4569 nabowle Use hex WKB for geometries.
|
||||
# 04/13/15 5379 tgurney Add getIdentifierValues()
|
||||
# 06/01/16 5587 tgurney Add new signatures for
|
||||
# getRequiredIdentifiers() and
|
||||
# getOptionalIdentifiers()
|
||||
# 08/01/16 2416 tgurney Add getNotificationFilter()
|
||||
# 10/13/16 5916 bsteffen Correct grid shape, allow lazy grid lat/lon
|
||||
# 10/26/16 5919 njensen Speed up geometry creation in getGeometryData()
|
||||
#
|
||||
|
||||
import numpy
|
||||
import six
|
||||
import shapely.wkb
|
||||
|
||||
from dynamicserialize.dstypes.com.raytheon.uf.common.dataaccess.impl import DefaultDataRequest
|
||||
from dynamicserialize.dstypes.com.raytheon.uf.common.dataaccess.request import GetAvailableLocationNamesRequest
|
||||
from dynamicserialize.dstypes.com.raytheon.uf.common.dataaccess.request import GetAvailableTimesRequest
|
||||
from dynamicserialize.dstypes.com.raytheon.uf.common.dataaccess.request import GetGeometryDataRequest
|
||||
from dynamicserialize.dstypes.com.raytheon.uf.common.dataaccess.request import GetGridDataRequest
|
||||
from dynamicserialize.dstypes.com.raytheon.uf.common.dataaccess.request import GetGridLatLonRequest
|
||||
from dynamicserialize.dstypes.com.raytheon.uf.common.dataaccess.request import GetAvailableParametersRequest
|
||||
from dynamicserialize.dstypes.com.raytheon.uf.common.dataaccess.request import GetAvailableLevelsRequest
|
||||
from dynamicserialize.dstypes.com.raytheon.uf.common.dataaccess.request import GetRequiredIdentifiersRequest
|
||||
from dynamicserialize.dstypes.com.raytheon.uf.common.dataaccess.request import GetOptionalIdentifiersRequest
|
||||
from dynamicserialize.dstypes.com.raytheon.uf.common.dataaccess.request import GetIdentifierValuesRequest
|
||||
from dynamicserialize.dstypes.com.raytheon.uf.common.dataaccess.request import GetSupportedDatatypesRequest
|
||||
from dynamicserialize.dstypes.com.raytheon.uf.common.dataaccess.request import GetNotificationFilterRequest
|
||||
|
||||
from awips import ThriftClient
|
||||
from awips.dataaccess import PyGeometryData
|
||||
from awips.dataaccess import PyGridData
|
||||
|
||||
|
||||
class LazyGridLatLon(object):
|
||||
|
||||
def __init__(self, client, nx, ny, envelope, crsWkt):
|
||||
self._latLonGrid = None
|
||||
self._client = client
|
||||
self._request = GetGridLatLonRequest()
|
||||
self._request.setNx(nx)
|
||||
self._request.setNy(ny)
|
||||
self._request.setEnvelope(envelope)
|
||||
self._request.setCrsWkt(crsWkt)
|
||||
|
||||
def __call__(self):
|
||||
# Its important that the data is cached internally so that if multiple
|
||||
# GridData are sharing the same delegate then they can also share a
|
||||
# single request for the LatLon information.
|
||||
if self._latLonGrid is None:
|
||||
response = self._client.sendRequest(self._request)
|
||||
nx = response.getNx()
|
||||
ny = response.getNy()
|
||||
latData = numpy.reshape(numpy.array(response.getLats()), (ny, nx))
|
||||
lonData = numpy.reshape(numpy.array(response.getLons()), (ny, nx))
|
||||
self._latLonGrid = (lonData, latData)
|
||||
return self._latLonGrid
|
||||
|
||||
|
||||
class ThriftClientRouter(object):
|
||||
|
||||
def __init__(self, host='localhost'):
|
||||
self._client = ThriftClient.ThriftClient(host)
|
||||
self._lazyLoadGridLatLon = False
|
||||
|
||||
def setLazyLoadGridLatLon(self, lazyLoadGridLatLon):
|
||||
self._lazyLoadGridLatLon = lazyLoadGridLatLon
|
||||
|
||||
def getAvailableTimes(self, request, refTimeOnly):
|
||||
timesRequest = GetAvailableTimesRequest()
|
||||
timesRequest.setRequestParameters(request)
|
||||
timesRequest.setRefTimeOnly(refTimeOnly)
|
||||
response = self._client.sendRequest(timesRequest)
|
||||
return response
|
||||
|
||||
def getGridData(self, request, times):
|
||||
gridDataRequest = GetGridDataRequest()
|
||||
gridDataRequest.setIncludeLatLonData(not self._lazyLoadGridLatLon)
|
||||
gridDataRequest.setRequestParameters(request)
|
||||
# if we have an iterable times instance, then the user must have asked
|
||||
# for grid data with the List of DataTime objects
|
||||
# else, we assume it was a single TimeRange that was meant for the
|
||||
# request
|
||||
try:
|
||||
iter(times)
|
||||
gridDataRequest.setRequestedTimes(times)
|
||||
except TypeError:
|
||||
gridDataRequest.setRequestedPeriod(times)
|
||||
response = self._client.sendRequest(gridDataRequest)
|
||||
|
||||
locSpecificData = {}
|
||||
locNames = list(response.getSiteNxValues().keys())
|
||||
for location in locNames:
|
||||
nx = response.getSiteNxValues()[location]
|
||||
ny = response.getSiteNyValues()[location]
|
||||
if self._lazyLoadGridLatLon:
|
||||
envelope = response.getSiteEnvelopes()[location]
|
||||
crsWkt = response.getSiteCrsWkt()[location]
|
||||
delegate = LazyGridLatLon(
|
||||
self._client, nx, ny, envelope, crsWkt)
|
||||
locSpecificData[location] = (nx, ny, delegate)
|
||||
else:
|
||||
latData = numpy.reshape(numpy.array(
|
||||
response.getSiteLatGrids()[location]), (ny, nx))
|
||||
lonData = numpy.reshape(numpy.array(
|
||||
response.getSiteLonGrids()[location]), (ny, nx))
|
||||
locSpecificData[location] = (nx, ny, (lonData, latData))
|
||||
retVal = []
|
||||
for gridDataRecord in response.getGridData():
|
||||
locationName = gridDataRecord.getLocationName()
|
||||
if locationName is not None:
|
||||
if six.PY2:
|
||||
locData = locSpecificData[locationName]
|
||||
else:
|
||||
locData = locSpecificData[locationName.encode('utf-8')]
|
||||
else:
|
||||
locData = locSpecificData[locationName]
|
||||
if self._lazyLoadGridLatLon:
|
||||
retVal.append(PyGridData.PyGridData(gridDataRecord, locData[
|
||||
0], locData[1], latLonDelegate=locData[2]))
|
||||
else:
|
||||
retVal.append(PyGridData.PyGridData(
|
||||
gridDataRecord, locData[0], locData[1], locData[2]))
|
||||
return retVal
|
||||
|
||||
def getGeometryData(self, request, times):
|
||||
geoDataRequest = GetGeometryDataRequest()
|
||||
geoDataRequest.setRequestParameters(request)
|
||||
# if we have an iterable times instance, then the user must have asked
|
||||
# for geometry data with the List of DataTime objects
|
||||
# else, we assume it was a single TimeRange that was meant for the
|
||||
# request
|
||||
try:
|
||||
iter(times)
|
||||
geoDataRequest.setRequestedTimes(times)
|
||||
except TypeError:
|
||||
geoDataRequest.setRequestedPeriod(times)
|
||||
response = self._client.sendRequest(geoDataRequest)
|
||||
geometries = []
|
||||
for wkb in response.getGeometryWKBs():
|
||||
# the wkb is a numpy.ndarray of dtype int8
|
||||
# convert the bytearray to a byte string and load it
|
||||
geometries.append(shapely.wkb.loads(wkb.tostring()))
|
||||
|
||||
retVal = []
|
||||
for geoDataRecord in response.getGeoData():
|
||||
geom = geometries[geoDataRecord.getGeometryWKBindex()]
|
||||
retVal.append(PyGeometryData.PyGeometryData(geoDataRecord, geom))
|
||||
return retVal
|
||||
|
||||
def getAvailableLocationNames(self, request):
|
||||
locNamesRequest = GetAvailableLocationNamesRequest()
|
||||
locNamesRequest.setRequestParameters(request)
|
||||
response = self._client.sendRequest(locNamesRequest)
|
||||
if six.PY2:
|
||||
return response
|
||||
if response is not None:
|
||||
return [x.decode('utf-8') for x in response]
|
||||
return response
|
||||
|
||||
def getAvailableParameters(self, request):
|
||||
paramReq = GetAvailableParametersRequest()
|
||||
paramReq.setRequestParameters(request)
|
||||
response = self._client.sendRequest(paramReq)
|
||||
if six.PY2:
|
||||
return response
|
||||
if response is not None:
|
||||
return [x.decode('utf-8') for x in response]
|
||||
return response
|
||||
|
||||
def getAvailableLevels(self, request):
|
||||
levelReq = GetAvailableLevelsRequest()
|
||||
levelReq.setRequestParameters(request)
|
||||
response = self._client.sendRequest(levelReq)
|
||||
return response
|
||||
|
||||
def getRequiredIdentifiers(self, request):
|
||||
if str(request) == request:
|
||||
# Handle old version getRequiredIdentifiers(str)
|
||||
request = self.newDataRequest(request)
|
||||
idReq = GetRequiredIdentifiersRequest()
|
||||
idReq.setRequest(request)
|
||||
response = self._client.sendRequest(idReq)
|
||||
if six.PY2:
|
||||
return response
|
||||
if response is not None:
|
||||
return [x.decode('utf-8') for x in response]
|
||||
return response
|
||||
|
||||
def getOptionalIdentifiers(self, request):
|
||||
if str(request) == request:
|
||||
# Handle old version getOptionalIdentifiers(str)
|
||||
request = self.newDataRequest(request)
|
||||
idReq = GetOptionalIdentifiersRequest()
|
||||
idReq.setRequest(request)
|
||||
response = self._client.sendRequest(idReq)
|
||||
if six.PY2:
|
||||
return response
|
||||
if response is not None:
|
||||
return [x.decode('utf-8') for x in response]
|
||||
return response
|
||||
|
||||
def getIdentifierValues(self, request, identifierKey):
|
||||
idValReq = GetIdentifierValuesRequest()
|
||||
idValReq.setIdentifierKey(identifierKey)
|
||||
idValReq.setRequestParameters(request)
|
||||
response = self._client.sendRequest(idValReq)
|
||||
if six.PY2:
|
||||
return response
|
||||
if response is not None:
|
||||
return [x.decode('utf-8') for x in response]
|
||||
return response
|
||||
|
||||
def newDataRequest(self, datatype, parameters=[], levels=[], locationNames=[],
|
||||
envelope=None, **kwargs):
|
||||
req = DefaultDataRequest()
|
||||
if datatype:
|
||||
req.setDatatype(datatype)
|
||||
if parameters:
|
||||
req.setParameters(*parameters)
|
||||
if levels:
|
||||
req.setLevels(*levels)
|
||||
if locationNames:
|
||||
req.setLocationNames(*locationNames)
|
||||
if envelope:
|
||||
req.setEnvelope(envelope)
|
||||
if kwargs:
|
||||
# any args leftover are assumed to be identifiers
|
||||
req.identifiers = kwargs
|
||||
return req
|
||||
|
||||
def getSupportedDatatypes(self):
|
||||
response = self._client.sendRequest(GetSupportedDatatypesRequest())
|
||||
if six.PY2:
|
||||
return response
|
||||
if response is not None:
|
||||
return [x.decode('utf-8') for x in response]
|
||||
return response
|
||||
|
||||
def getNotificationFilter(self, request):
|
||||
notifReq = GetNotificationFilterRequest()
|
||||
notifReq.setRequestParameters(request)
|
||||
response = self._client.sendRequest(notifReq)
|
||||
return response
|
370
awips/dataaccess/__init__.py
Normal file
370
awips/dataaccess/__init__.py
Normal file
|
@ -0,0 +1,370 @@
|
|||
#
|
||||
# __init__.py for awips.dataaccess package
|
||||
#
|
||||
#
|
||||
# SOFTWARE HISTORY
|
||||
#
|
||||
# Date Ticket# Engineer Description
|
||||
# ------------ ---------- ----------- --------------------------
|
||||
# 12/10/12 njensen Initial Creation.
|
||||
# Feb 14, 2013 1614 bsteffen refactor data access framework
|
||||
# to use single request.
|
||||
# Apr 09, 2013 1871 njensen Add doc strings
|
||||
# Jun 03, 2013 2023 dgilling Add getAttributes to IData, add
|
||||
# getLatLonGrids() to IGridData.
|
||||
# Aug 01, 2016 2416 tgurney Add INotificationSubscriber
|
||||
# and INotificationFilter
|
||||
#
|
||||
#
|
||||
|
||||
__all__ = [
|
||||
'IData',
|
||||
'IDataRequest',
|
||||
'IGeometryData',
|
||||
'IGridData',
|
||||
'IGeometryData',
|
||||
'INotificationFilter',
|
||||
'INotificationSubscriber'
|
||||
]
|
||||
|
||||
import abc
|
||||
from six import with_metaclass
|
||||
|
||||
|
||||
class IDataRequest(with_metaclass(abc.ABCMeta, object)):
|
||||
"""
|
||||
An IDataRequest to be submitted to the DataAccessLayer to retrieve data.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def setDatatype(self, datatype):
|
||||
"""
|
||||
Sets the datatype of the request.
|
||||
|
||||
Args:
|
||||
datatype: A string of the datatype, such as "grid", "radar", "gfe", "obs"
|
||||
"""
|
||||
return
|
||||
|
||||
@abc.abstractmethod
|
||||
def addIdentifier(self, key, value):
|
||||
"""
|
||||
Adds an identifier to the request. Identifiers are specific to the
|
||||
datatype being requested.
|
||||
|
||||
Args:
|
||||
key: the string key of the identifier
|
||||
value: the value of the identifier
|
||||
"""
|
||||
return
|
||||
|
||||
@abc.abstractmethod
|
||||
def setParameters(self, params):
|
||||
"""
|
||||
Sets the parameters of data to request.
|
||||
|
||||
Args:
|
||||
params: a list of strings of parameters to request
|
||||
"""
|
||||
return
|
||||
|
||||
@abc.abstractmethod
|
||||
def setLevels(self, levels):
|
||||
"""
|
||||
Sets the levels of data to request. Not all datatypes support levels.
|
||||
|
||||
Args:
|
||||
levels: a list of strings of level abbreviations to request
|
||||
"""
|
||||
return
|
||||
|
||||
@abc.abstractmethod
|
||||
def setEnvelope(self, env):
|
||||
"""
|
||||
Sets the envelope of the request. If supported by the datatype factory,
|
||||
the data returned for the request will be constrained to only the data
|
||||
within the envelope.
|
||||
|
||||
Args:
|
||||
env: a shapely geometry
|
||||
"""
|
||||
return
|
||||
|
||||
@abc.abstractmethod
|
||||
def setLocationNames(self, locationNames):
|
||||
"""
|
||||
Sets the location names of the request.
|
||||
|
||||
Args:
|
||||
locationNames: a list of strings of location names to request
|
||||
"""
|
||||
return
|
||||
|
||||
@abc.abstractmethod
|
||||
def getDatatype(self):
|
||||
"""
|
||||
Gets the datatype of the request
|
||||
|
||||
Returns:
|
||||
the datatype set on the request
|
||||
"""
|
||||
return
|
||||
|
||||
@abc.abstractmethod
|
||||
def getIdentifiers(self):
|
||||
"""
|
||||
Gets the identifiers on the request
|
||||
|
||||
Returns:
|
||||
a dictionary of the identifiers
|
||||
"""
|
||||
return
|
||||
|
||||
@abc.abstractmethod
|
||||
def getLevels(self):
|
||||
"""
|
||||
Gets the levels on the request
|
||||
|
||||
Returns:
|
||||
a list of strings of the levels
|
||||
"""
|
||||
return
|
||||
|
||||
@abc.abstractmethod
|
||||
def getLocationNames(self):
|
||||
"""
|
||||
Gets the location names on the request
|
||||
|
||||
Returns:
|
||||
a list of strings of the location names
|
||||
"""
|
||||
return
|
||||
|
||||
@abc.abstractmethod
|
||||
def getEnvelope(self):
|
||||
"""
|
||||
Gets the envelope on the request
|
||||
|
||||
Returns:
|
||||
a rectangular shapely geometry
|
||||
"""
|
||||
return
|
||||
|
||||
|
||||
class IData(with_metaclass(abc.ABCMeta, object)):
|
||||
"""
|
||||
An IData representing data returned from the DataAccessLayer.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def getAttribute(self, key):
|
||||
"""
|
||||
Gets an attribute of the data.
|
||||
|
||||
Args:
|
||||
key: the key of the attribute
|
||||
|
||||
Returns:
|
||||
the value of the attribute
|
||||
"""
|
||||
return
|
||||
|
||||
@abc.abstractmethod
|
||||
def getAttributes(self):
|
||||
"""
|
||||
Gets the valid attributes for the data.
|
||||
|
||||
Returns:
|
||||
a list of strings of the attribute names
|
||||
"""
|
||||
return
|
||||
|
||||
@abc.abstractmethod
|
||||
def getDataTime(self):
|
||||
"""
|
||||
Gets the data time of the data.
|
||||
|
||||
Returns:
|
||||
the data time of the data, or None if no time is associated
|
||||
"""
|
||||
return
|
||||
|
||||
@abc.abstractmethod
|
||||
def getLevel(self):
|
||||
"""
|
||||
Gets the level of the data.
|
||||
|
||||
Returns:
|
||||
the level of the data, or None if no level is associated
|
||||
"""
|
||||
return
|
||||
|
||||
@abc.abstractmethod
|
||||
def getLocationName(self, param):
|
||||
"""
|
||||
Gets the location name of the data.
|
||||
|
||||
Returns:
|
||||
the location name of the data, or None if no location name is
|
||||
associated
|
||||
"""
|
||||
return
|
||||
|
||||
|
||||
class IGridData(IData):
|
||||
"""
|
||||
An IData representing grid data that is returned by the DataAccessLayer.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def getParameter(self):
|
||||
"""
|
||||
Gets the parameter of the data.
|
||||
|
||||
Returns:
|
||||
the parameter of the data
|
||||
"""
|
||||
return
|
||||
|
||||
@abc.abstractmethod
|
||||
def getUnit(self):
|
||||
"""
|
||||
Gets the unit of the data.
|
||||
|
||||
Returns:
|
||||
the string abbreviation of the unit, or None if no unit is associated
|
||||
"""
|
||||
return
|
||||
|
||||
@abc.abstractmethod
|
||||
def getRawData(self):
|
||||
"""
|
||||
Gets the grid data as a numpy array.
|
||||
|
||||
Returns:
|
||||
a numpy array of the data
|
||||
"""
|
||||
return
|
||||
|
||||
@abc.abstractmethod
|
||||
def getLatLonCoords(self):
|
||||
"""
|
||||
Gets the lat/lon coordinates of the grid data.
|
||||
|
||||
Returns:
|
||||
a tuple where the first element is a numpy array of lons, and the
|
||||
second element is a numpy array of lats
|
||||
"""
|
||||
return
|
||||
|
||||
|
||||
class IGeometryData(IData):
|
||||
"""
|
||||
An IData representing geometry data that is returned by the DataAccessLayer.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def getGeometry(self):
|
||||
"""
|
||||
Gets the geometry of the data.
|
||||
|
||||
Returns:
|
||||
a shapely geometry
|
||||
"""
|
||||
return
|
||||
|
||||
@abc.abstractmethod
|
||||
def getParameters(self):
|
||||
"""Gets the parameters of the data.
|
||||
|
||||
Returns:
|
||||
a list of strings of the parameter names
|
||||
"""
|
||||
return
|
||||
|
||||
@abc.abstractmethod
|
||||
def getString(self, param):
|
||||
"""
|
||||
Gets the string value of the specified param.
|
||||
|
||||
Args:
|
||||
param: the string name of the param
|
||||
|
||||
Returns:
|
||||
the string value of the param
|
||||
"""
|
||||
return
|
||||
|
||||
@abc.abstractmethod
|
||||
def getNumber(self, param):
|
||||
"""
|
||||
Gets the number value of the specified param.
|
||||
|
||||
Args:
|
||||
param: the string name of the param
|
||||
|
||||
Returns:
|
||||
the number value of the param
|
||||
"""
|
||||
return
|
||||
|
||||
@abc.abstractmethod
|
||||
def getUnit(self, param):
|
||||
"""
|
||||
Gets the unit of the specified param.
|
||||
|
||||
Args:
|
||||
param: the string name of the param
|
||||
|
||||
Returns:
|
||||
the string abbreviation of the unit of the param
|
||||
"""
|
||||
return
|
||||
|
||||
@abc.abstractmethod
|
||||
def getType(self, param):
|
||||
"""
|
||||
Gets the type of the param.
|
||||
|
||||
Args:
|
||||
param: the string name of the param
|
||||
|
||||
Returns:
|
||||
a string of the type of the parameter, such as
|
||||
"STRING", "INT", "LONG", "FLOAT", or "DOUBLE"
|
||||
"""
|
||||
return
|
||||
|
||||
|
||||
class INotificationSubscriber(with_metaclass(abc.ABCMeta, object)):
|
||||
"""
|
||||
An INotificationSubscriber representing a notification filter returned from
|
||||
the DataNotificationLayer.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def subscribe(self, callback):
|
||||
"""
|
||||
Subscribes to the requested data. Method will not return until close is
|
||||
called in a separate thread.
|
||||
|
||||
Args:
|
||||
callback: the method to call with the IGridData/IGeometryData
|
||||
|
||||
"""
|
||||
pass
|
||||
|
||||
@abc.abstractmethod
|
||||
def close(self):
|
||||
"""Closes the notification subscriber"""
|
||||
pass
|
||||
|
||||
|
||||
class INotificationFilter(with_metaclass(abc.ABCMeta, object)):
|
||||
"""
|
||||
Represents data required to filter a set of URIs and
|
||||
return a corresponding list of IDataRequest to retrieve data for.
|
||||
"""
|
||||
@abc.abstractmethod
|
||||
def accept(dataUri):
|
||||
pass
|
128
awips/gempak/GridDataRetriever.py
Normal file
128
awips/gempak/GridDataRetriever.py
Normal file
|
@ -0,0 +1,128 @@
|
|||
import os
|
||||
import numpy
|
||||
from datetime import datetime
|
||||
from awips import ThriftClient
|
||||
from dynamicserialize.dstypes.gov.noaa.nws.ncep.common.dataplugin.gempak.request import GetGridDataRequest
|
||||
|
||||
|
||||
class GridDataRetriever:
|
||||
|
||||
def __init__(self, server, pluginName, modelId, cycle, forecast, level1, level2, vcoord, param, nnx, nny):
|
||||
self.pluginName = pluginName
|
||||
self.modelId = modelId
|
||||
self.cycle = cycle
|
||||
self.forecast = forecast
|
||||
self.level1 = level1
|
||||
self.level2 = level2
|
||||
self.vcoord = vcoord
|
||||
self.param = param
|
||||
self.nx = nnx
|
||||
self.ny = nny
|
||||
self.host = os.getenv("DEFAULT_HOST", server)
|
||||
self.port = os.getenv("DEFAULT_PORT", "9581")
|
||||
self.client = ThriftClient.ThriftClient(self.host, self.port)
|
||||
|
||||
def getData(self):
|
||||
""" Sends ThriftClient request and writes out received files."""
|
||||
req = GetGridDataRequest()
|
||||
|
||||
req.setPluginName(self.pluginName)
|
||||
req.setModelId(self.modelId)
|
||||
|
||||
dt = datetime.strptime(self.cycle, '%y%m%d/%H%M')
|
||||
ct = datetime.strftime(dt, '%Y-%m-%d %H:%M:%S')
|
||||
req.setReftime(ct)
|
||||
req.setFcstsec(self.forecast)
|
||||
|
||||
if self.level1 == '-1':
|
||||
f1 = -999999.0
|
||||
else:
|
||||
f1 = float(self.level1)
|
||||
|
||||
if self.level2 == '-1':
|
||||
f2 = -999999.0
|
||||
else:
|
||||
f2 = float(self.level2)
|
||||
|
||||
vcoord = self.vcoord
|
||||
if vcoord == 'SGMA':
|
||||
if f1 >= 0.0:
|
||||
f1 = f1 / 10000
|
||||
if f2 >= 0.0:
|
||||
f2 = f2 / 10000
|
||||
elif vcoord == 'DPTH':
|
||||
if f1 >= 0.0:
|
||||
f1 = f1 / 100.0
|
||||
if f2 >= 0.0:
|
||||
f2 = f2 / 100.0
|
||||
elif vcoord == 'POTV':
|
||||
if f1 >= 0.0:
|
||||
f1 = f1 / 1000.0
|
||||
if f2 >= 0.0:
|
||||
f2 = f2 / 1000.0
|
||||
|
||||
req.setLevel1(str(f1))
|
||||
req.setLevel2(str(f2))
|
||||
req.setVcoord(vcoord)
|
||||
|
||||
req.setParm(self.param)
|
||||
|
||||
resp = self.client.sendRequest(req)
|
||||
|
||||
# Get the dimensions of the grid
|
||||
kx = int(self.nx)
|
||||
ky = int(self.ny)
|
||||
kxky = kx * ky
|
||||
|
||||
# Put the data into a NUMPY array
|
||||
grid = numpy.asarray(resp.getFloatData())
|
||||
|
||||
# All grids need to be flipped from a GEMPAK point of view
|
||||
# Reshape the array into 2D
|
||||
grid = numpy.reshape(grid, (ky, kx))
|
||||
# Flip the array in the up-down direction
|
||||
grid = numpy.flipud(grid)
|
||||
# Reshape the array back into 1D
|
||||
grid = numpy.reshape(grid, kxky)
|
||||
|
||||
return [replacemissing(x) for x in grid]
|
||||
|
||||
|
||||
def getgriddata(server, table, model, cycle, forecast, level1,
|
||||
level2, vcoord, param, nnx, nny):
|
||||
gir = GridDataRetriever(server, table, model, cycle, forecast,
|
||||
level1, level2, vcoord, param, nnx, nny)
|
||||
return gir.getData()
|
||||
|
||||
|
||||
def getheader(server, table, model, cycle, forecast, level1,
|
||||
level2, vcoord, param, nnx, nny):
|
||||
idata = []
|
||||
idata.append(0)
|
||||
idata.append(0)
|
||||
return idata
|
||||
|
||||
|
||||
def replacemissing(x):
|
||||
if x == -999999.0:
|
||||
return -9999.0
|
||||
return x
|
||||
|
||||
|
||||
# This is the standard boilerplate that runs this script as a main
|
||||
if __name__ == '__main__':
|
||||
# Run Test
|
||||
srv = 'edex-cloud.unidata.ucar.edu'
|
||||
tbl = 'grid'
|
||||
mdl = 'GFS20'
|
||||
cyc = '131227/0000'
|
||||
fcs = '43200'
|
||||
lv1 = '500'
|
||||
lv2 = '-1'
|
||||
vcd = 'PRES'
|
||||
prm = 'HGHT'
|
||||
nx = '720'
|
||||
ny = '361'
|
||||
|
||||
print(getheader(srv, tbl, mdl, cyc, fcs, lv1, lv2, vcd, prm, nx, ny))
|
||||
print(getgriddata(srv, tbl, mdl, cyc, fcs, lv1, lv2, vcd, prm, nx, ny))
|
145
awips/gempak/GridInfoRetriever.py
Normal file
145
awips/gempak/GridInfoRetriever.py
Normal file
|
@ -0,0 +1,145 @@
|
|||
import os
|
||||
import sys
|
||||
from datetime import datetime
|
||||
from operator import itemgetter
|
||||
from awips import ThriftClient
|
||||
from dynamicserialize.dstypes.gov.noaa.nws.ncep.common.dataplugin.gempak.request import GetGridInfoRequest
|
||||
|
||||
|
||||
class GridInfoRetriever:
|
||||
|
||||
def __init__(self, server, pluginName, modelId, cycle=None, forecast=None):
|
||||
self.pluginName = pluginName
|
||||
self.modelId = modelId
|
||||
self.cycle = cycle
|
||||
self.forecast = forecast
|
||||
self.host = os.getenv("DEFAULT_HOST", server)
|
||||
self.port = os.getenv("DEFAULT_PORT", "9581")
|
||||
self.client = ThriftClient.ThriftClient(self.host, self.port)
|
||||
|
||||
def getInfo(self):
|
||||
import sys
|
||||
""" Sends ThriftClient request and writes out received files."""
|
||||
req = GetGridInfoRequest()
|
||||
req.setPluginName(self.pluginName)
|
||||
req.setModelId(self.modelId)
|
||||
|
||||
req.setReftime(self.cycle)
|
||||
if len(self.cycle) > 2:
|
||||
dt = datetime.strptime(self.cycle, '%y%m%d/%H%M')
|
||||
ct = datetime.strftime(dt, '%Y-%m-%d %H:%M:%S')
|
||||
req.setReftime(ct)
|
||||
|
||||
req.setFcstsec(self.forecast)
|
||||
resp = self.client.sendRequest(req)
|
||||
|
||||
# Take care of bytestring encodings in python3
|
||||
for i, rec in enumerate(resp):
|
||||
resp[i] = {
|
||||
key.decode() if isinstance(key, bytes) else key:
|
||||
val.decode() if isinstance(val, bytes) else val
|
||||
for key, val in rec.items()
|
||||
}
|
||||
|
||||
sortresp = sorted(sorted(resp, key=itemgetter("reftime"), reverse=True), key=itemgetter("fcstsec"))
|
||||
|
||||
grids = []
|
||||
|
||||
count = 0
|
||||
for record in sortresp:
|
||||
s = '{:<12}'.format(record['param'])
|
||||
|
||||
if sys.byteorder == 'little':
|
||||
parm1 = (ord(s[3]) << 24) + (ord(s[2]) << 16) + (ord(s[1]) << 8) + ord(s[0])
|
||||
parm2 = (ord(s[7]) << 24) + (ord(s[6]) << 16) + (ord(s[5]) << 8) + ord(s[4])
|
||||
parm3 = (ord(s[11]) << 24) + (ord(s[10]) << 16) + (ord(s[9]) << 8) + ord(s[8])
|
||||
else:
|
||||
parm1 = (ord(s[0]) << 24) + (ord(s[1]) << 16) + (ord(s[2]) << 8) + ord(s[3])
|
||||
parm2 = (ord(s[4]) << 24) + (ord(s[5]) << 16) + (ord(s[6]) << 8) + ord(s[7])
|
||||
parm3 = (ord(s[8]) << 24) + (ord(s[9]) << 16) + (ord(s[10]) << 8) + ord(s[11])
|
||||
|
||||
dt = datetime.strptime(record['reftime'], '%Y-%m-%d %H:%M:%S.%f')
|
||||
dattim = dt.month * 100000000 + dt.day * 1000000 + (dt.year%100) * 10000 + dt.hour * 100 + dt.minute
|
||||
fcsth = (int(record['fcstsec']) / 60) / 60
|
||||
fcstm = (int(record['fcstsec']) / 60) % 60
|
||||
fcst = 100000 + fcsth * 100 + fcstm
|
||||
|
||||
lv1 = float(record['level1'])
|
||||
if lv1 == -999999.0:
|
||||
lv1 = -1.0
|
||||
lv2 = float(record['level2'])
|
||||
if lv2 == -999999.0:
|
||||
lv2 = -1.0
|
||||
|
||||
vcd = record['vcoord']
|
||||
if vcd == 'NONE':
|
||||
ivcd = 0
|
||||
elif vcd == 'PRES':
|
||||
ivcd = 1
|
||||
elif vcd == 'THTA':
|
||||
ivcd = 2
|
||||
elif vcd == 'HGHT':
|
||||
ivcd = 3
|
||||
elif vcd == 'SGMA':
|
||||
ivcd = 4
|
||||
if lv1 >= 0.0:
|
||||
lv1 = lv1 * 10000.0
|
||||
if lv2 >= 0.0:
|
||||
lv2 = lv2 * 10000.0
|
||||
elif vcd == 'DPTH':
|
||||
ivcd = 5
|
||||
if lv1 >= 0.0:
|
||||
lv1 = lv1 * 100.0
|
||||
if lv2 >= 0.0:
|
||||
lv2 = lv2 * 100.0
|
||||
elif vcd == 'HYBL':
|
||||
ivcd = 6
|
||||
else:
|
||||
v = '{:<4}'.format(vcd)
|
||||
if sys.byteorder == 'little':
|
||||
ivcd = (ord(v[3]) << 24) + (ord(v[2]) << 16) + (ord(v[1]) << 8) + ord(v[0])
|
||||
else:
|
||||
ivcd = (ord(v[0]) << 24) + (ord(v[1]) << 16) + (ord(v[2]) << 8) + ord(v[3])
|
||||
if vcd == 'POTV':
|
||||
if lv1 >= 0.0:
|
||||
lv1 = lv1 * 1000.0
|
||||
if lv2 >= 0.0:
|
||||
lv2 = lv2 * 1000.0
|
||||
grids.append(9999)
|
||||
grids.append(dattim)
|
||||
grids.append(fcst)
|
||||
grids.append(0)
|
||||
grids.append(0)
|
||||
grids.append(int(lv1))
|
||||
grids.append(int(lv2))
|
||||
grids.append(ivcd)
|
||||
grids.append(parm1)
|
||||
grids.append(parm2)
|
||||
grids.append(parm3)
|
||||
count += 1
|
||||
if count > 29998:
|
||||
break
|
||||
|
||||
return grids
|
||||
|
||||
|
||||
def getinfo(server, table, model, cycle, forecast):
|
||||
gir = GridInfoRetriever(server, table, model, cycle, forecast)
|
||||
return gir.getInfo()
|
||||
|
||||
|
||||
def getrow(server, table, model, cycle, forecast):
|
||||
idata = []
|
||||
idata.append(9999)
|
||||
idata.append(1)
|
||||
return idata
|
||||
|
||||
|
||||
# This is the standard boilerplate that runs this script as a main
|
||||
if __name__ == '__main__':
|
||||
# Run Test
|
||||
srv = 'edex-cloud.unidata.ucar.edu'
|
||||
tbl = 'grid'
|
||||
mdl = 'NAM40'
|
||||
print(getrow(srv, tbl, mdl))
|
||||
print(getinfo(srv, tbl, mdl))
|
301
awips/gempak/GridNavRetriever.py
Normal file
301
awips/gempak/GridNavRetriever.py
Normal file
|
@ -0,0 +1,301 @@
|
|||
import os
|
||||
import math
|
||||
from awips import ThriftClient
|
||||
from dynamicserialize.dstypes.gov.noaa.nws.ncep.common.dataplugin.gempak.request import GetGridNavRequest
|
||||
from ctypes import *
|
||||
|
||||
EARTH_RADIUS = 6371200.0
|
||||
DEG_TO_RAD = math.pi / 180.0
|
||||
RAD_TO_DEG = 180.0 / math.pi
|
||||
TWOPI = math.pi * 2.0
|
||||
HALFPI = math.pi / 2.0
|
||||
PI4TH = math.pi / 4.0
|
||||
PI3RD = math.pi / 3.0
|
||||
|
||||
|
||||
def createPolar(nsflag, clon, lat1, lon1, dx, dy, unit, nx, ny):
|
||||
clonr = clon * DEG_TO_RAD
|
||||
latr = lat1 * DEG_TO_RAD
|
||||
lonr = lon1 * DEG_TO_RAD
|
||||
if nsflag == 'N':
|
||||
x1 = EARTH_RADIUS * math.tan(PI4TH - latr/2.0) * math.sin(lonr-clonr)
|
||||
y1 = -1 * EARTH_RADIUS * math.tan(PI4TH - latr/2.0) * math.cos(lonr-clonr)
|
||||
else:
|
||||
x1 = EARTH_RADIUS * math.tan(PI4TH + latr/2.0) * math.sin(lonr-clonr)
|
||||
y1 = EARTH_RADIUS * math.tan(PI4TH + latr/2.0) * math.cos(lonr-clonr)
|
||||
|
||||
if unit == 'm':
|
||||
tdx = dx / (1 + math.sin(PI3RD))
|
||||
tdy = dy / (1 + math.sin(PI3RD))
|
||||
else:
|
||||
tdx = (dx*1000.0) / (1 + math.sin(PI3RD))
|
||||
tdy = (dy*1000.0) / (1 + math.sin(PI3RD))
|
||||
|
||||
x2 = x1 + tdx * (nx-1)
|
||||
y2 = y1 + tdy * (ny-1)
|
||||
xll = min(x1, x2)
|
||||
yll = min(y1, y2)
|
||||
xur = max(x1, x2)
|
||||
yur = max(y1, y2)
|
||||
|
||||
if nsflag == 'N':
|
||||
latll = (HALFPI - 2*math.atan2(math.hypot(xll, yll), EARTH_RADIUS)) * RAD_TO_DEG
|
||||
rtemp = clonr + math.atan2(xll, -yll)
|
||||
else:
|
||||
latll = -1 * (HALFPI - 2*math.atan2(math.hypot(xll, yll), EARTH_RADIUS)) * RAD_TO_DEG
|
||||
rtemp = clonr + math.atan2(xll, yll)
|
||||
|
||||
if rtemp > math.pi:
|
||||
lonll = (rtemp-TWOPI) * RAD_TO_DEG
|
||||
elif rtemp < -math.pi:
|
||||
lonll = (rtemp+TWOPI) * RAD_TO_DEG
|
||||
else:
|
||||
lonll = rtemp * RAD_TO_DEG
|
||||
|
||||
if nsflag == 'N':
|
||||
latur = (HALFPI - 2*math.atan2(math.hypot(xur, yur), EARTH_RADIUS)) * RAD_TO_DEG
|
||||
rtemp = clonr + math.atan2(xur, -yur)
|
||||
else:
|
||||
latur = -1 * (HALFPI - 2*math.atan2(math.hypot(xur, yur), EARTH_RADIUS)) * RAD_TO_DEG
|
||||
rtemp = clonr + math.atan2(xur, yur)
|
||||
|
||||
if rtemp > math.pi:
|
||||
lonur = (rtemp-TWOPI) * RAD_TO_DEG
|
||||
elif rtemp < -math.pi:
|
||||
lonur = (rtemp+TWOPI) * RAD_TO_DEG
|
||||
else:
|
||||
lonur = rtemp * RAD_TO_DEG
|
||||
|
||||
return [latll, lonll, latur, lonur]
|
||||
|
||||
|
||||
def createConic(nsflag, clon, lat1, lon1, dx, dy, unit, nx, ny, ang1, ang3):
|
||||
clonr = clon * DEG_TO_RAD
|
||||
latr = lat1 * DEG_TO_RAD
|
||||
lonr = lon1 * DEG_TO_RAD
|
||||
|
||||
angle1 = HALFPI - (math.fabs(ang1) * DEG_TO_RAD)
|
||||
angle2 = HALFPI - (math.fabs(ang3) * DEG_TO_RAD)
|
||||
|
||||
if ang1 == ang3:
|
||||
cc = math.cos(angle1)
|
||||
else:
|
||||
cc = (math.log(math.sin(angle2)) - math.log(math.sin(angle1))) \
|
||||
/ (math.log(math.tan(angle2/2.0)) - math.log(math.tan(angle1/2.0)))
|
||||
|
||||
er = EARTH_RADIUS / cc
|
||||
|
||||
if nsflag == 'N':
|
||||
x1 = er * math.pow(math.tan((HALFPI-latr)/2.0), cc) * math.sin(cc*(lonr-clonr))
|
||||
y1 = -1.0 * er * math.pow(math.tan((HALFPI-latr)/2.0), cc) * math.cos(cc*(lonr-clonr))
|
||||
else:
|
||||
x1 = er * math.pow(math.tan((HALFPI+latr)/2.0), cc) * math.sin(cc*(lonr-clonr))
|
||||
y1 = er * math.pow(math.tan((HALFPI+latr)/2.0), cc) * math.cos(cc*(lonr-clonr))
|
||||
|
||||
alpha = math.pow(math.tan(angle1/2.0), cc) / math.sin(angle1)
|
||||
|
||||
if unit == 'm':
|
||||
x2 = x1 + (nx-1) * alpha * dx
|
||||
y2 = y1 + (ny-1) * alpha * dy
|
||||
else:
|
||||
x2 = x1 + (nx-1) * alpha * (dx*1000.0)
|
||||
y2 = y1 + (ny-1) * alpha * (dy*1000.0)
|
||||
|
||||
xll = min(x1, x2)
|
||||
yll = min(y1, y2)
|
||||
xur = max(x1, x2)
|
||||
yur = max(y1, y2)
|
||||
|
||||
if nsflag == 'N':
|
||||
latll = (HALFPI - 2.0 * math.atan(math.pow(math.hypot(xll, yll)/er, (1/cc)))) * RAD_TO_DEG
|
||||
rtemp = math.atan2(xll, -yll) * (1/cc) + clonr
|
||||
else:
|
||||
latll = (-1.0 * (HALFPI - 2.0 * math.atan(math.pow(math.hypot(xll, yll)/er, (1/cc))))) * RAD_TO_DEG
|
||||
rtemp = math.atan2(xll, yll) * (1/cc) + clonr
|
||||
|
||||
if rtemp > math.pi:
|
||||
lonll = (rtemp-TWOPI) * RAD_TO_DEG
|
||||
elif rtemp < -math.pi:
|
||||
lonll = (rtemp+TWOPI) * RAD_TO_DEG
|
||||
else:
|
||||
lonll = rtemp * RAD_TO_DEG
|
||||
|
||||
if nsflag == 'N':
|
||||
latur = (HALFPI - 2.0 * math.atan(math.pow(math.hypot(xur, yur)/er, (1/cc)))) * RAD_TO_DEG
|
||||
rtemp = math.atan2(xur, -yur) * (1/cc) + clonr
|
||||
else:
|
||||
latur = (-1.0 * (HALFPI - 2.0 * math.atan(math.pow(math.hypot(xur, yur)/er, (1/cc))))) * RAD_TO_DEG
|
||||
rtemp = math.atan2(xur, yur) * (1/cc) + clonr
|
||||
|
||||
if rtemp > math.pi:
|
||||
lonur = (rtemp-TWOPI) * RAD_TO_DEG
|
||||
elif rtemp < -math.pi:
|
||||
lonur = (rtemp+TWOPI) * RAD_TO_DEG
|
||||
else:
|
||||
lonur = rtemp * RAD_TO_DEG
|
||||
|
||||
return [latll, lonll, latur, lonur]
|
||||
|
||||
|
||||
class StringConverter(Union):
|
||||
_fields_ = [("char", c_char*4), ("int", c_int), ("float", c_float)]
|
||||
|
||||
|
||||
class GridNavRetriever:
|
||||
|
||||
def __init__(self, server, pluginName, modelId, arrayLen):
|
||||
self.pluginName = pluginName
|
||||
self.modelId = modelId
|
||||
self.arrayLen = arrayLen
|
||||
self.host = os.getenv("DEFAULT_HOST", server)
|
||||
self.port = os.getenv("DEFAULT_PORT", "9581")
|
||||
self.client = ThriftClient.ThriftClient(self.host, self.port)
|
||||
|
||||
def getNavBlk(self):
|
||||
""" Sends ThriftClient request and writes out received files."""
|
||||
req = GetGridNavRequest()
|
||||
req.setPluginName(self.pluginName)
|
||||
req.setModelId(self.modelId)
|
||||
resp = self.client.sendRequest(req)
|
||||
|
||||
for i, rec in enumerate(resp):
|
||||
resp[i] = {
|
||||
key.decode() if isinstance(key, bytes) else key:
|
||||
val.decode() if isinstance(val, bytes) else val
|
||||
for key, val in rec.items()
|
||||
}
|
||||
|
||||
nav = []
|
||||
|
||||
for record in resp:
|
||||
unit = record['spacingunit']
|
||||
sk = record['spatialkey']
|
||||
skarr = sk.split('/')
|
||||
|
||||
nx = float(skarr[1])
|
||||
ny = float(skarr[2])
|
||||
dx = float(skarr[3])
|
||||
dy = float(skarr[4])
|
||||
|
||||
sc = StringConverter()
|
||||
if record['projtype'] == 'LatLon':
|
||||
sc.char = 'CED '
|
||||
gemproj = 2.0
|
||||
ang1 = 0.0
|
||||
ang2 = 0.0
|
||||
ang3 = 0.0
|
||||
|
||||
lllat = float(record['lowerleftlat'])
|
||||
lllon = float(record['lowerleftlon'])
|
||||
urlat = lllat + (dy * (ny-1))
|
||||
urlon = lllon + (dx * (nx-1))
|
||||
if lllon > 180:
|
||||
lllon -= 360.0
|
||||
if urlon > 180:
|
||||
urlon -= 360.0
|
||||
|
||||
if record['projtype'] == 'Polar Stereographic':
|
||||
sc.char = 'STR '
|
||||
gemproj = 2.0
|
||||
if float(record['standard_parallel_1']) < 0.0:
|
||||
ang1 = -90.0
|
||||
nsflag = 'S'
|
||||
else:
|
||||
ang1 = 90.0
|
||||
nsflag = 'N'
|
||||
ang2 = float(record['central_meridian'])
|
||||
ang3 = 0.0
|
||||
|
||||
lat1 = float(record['lowerleftlat'])
|
||||
lon1 = float(record['lowerleftlon'])
|
||||
coords = createPolar(nsflag, ang2, lat1, lon1, dx, dy, unit, nx, ny)
|
||||
lllat = coords[0]
|
||||
lllon = coords[1]
|
||||
urlat = coords[2]
|
||||
urlon = coords[3]
|
||||
|
||||
if record['projtype'] == 'Lambert Conformal':
|
||||
sc.char = 'LCC '
|
||||
gemproj = 2.0
|
||||
|
||||
ang1 = float(skarr[7])
|
||||
ang2 = float(record['central_meridian'])
|
||||
ang3 = float(skarr[8])
|
||||
if ang1 < 0.0:
|
||||
nsflag = 'S'
|
||||
else:
|
||||
nsflag = 'N'
|
||||
|
||||
lat1 = float(record['lowerleftlat'])
|
||||
lon1 = float(record['lowerleftlon'])
|
||||
coords = createConic(nsflag, ang2, lat1, lon1, dx, dy, unit, nx, ny, ang1, ang3)
|
||||
lllat = coords[0]
|
||||
lllon = coords[1]
|
||||
urlat = coords[2]
|
||||
urlon = coords[3]
|
||||
|
||||
# Fill up the output array of floats
|
||||
nav.append(gemproj)
|
||||
nav.append(sc.float)
|
||||
nav.append(1.0)
|
||||
nav.append(1.0)
|
||||
nav.append(nx)
|
||||
nav.append(ny)
|
||||
nav.append(lllat)
|
||||
nav.append(lllon)
|
||||
nav.append(urlat)
|
||||
nav.append(urlon)
|
||||
nav.append(ang1)
|
||||
nav.append(ang2)
|
||||
nav.append(ang3)
|
||||
|
||||
for i in range(13, int(self.arrayLen)):
|
||||
nav.append(0.0)
|
||||
return nav
|
||||
|
||||
def getAnlBlk(self):
|
||||
anl = []
|
||||
# Type
|
||||
anl.append(2.0)
|
||||
# Delta
|
||||
anl.append(1.0)
|
||||
# Extend area
|
||||
anl.append(0.0)
|
||||
anl.append(0.0)
|
||||
anl.append(0.0)
|
||||
anl.append(0.0)
|
||||
# Grid area
|
||||
anl.append(-90.0)
|
||||
anl.append(-180.0)
|
||||
anl.append(90.0)
|
||||
anl.append(180.0)
|
||||
# Data area
|
||||
anl.append(-90.0)
|
||||
anl.append(-180.0)
|
||||
anl.append(90.0)
|
||||
anl.append(180.0)
|
||||
for i in range(18, int(self.arrayLen)):
|
||||
anl.append(0.0)
|
||||
return anl
|
||||
|
||||
|
||||
def getnavb(server, table, model, arrlen):
|
||||
gnr = GridNavRetriever(server, table, model, arrlen)
|
||||
return gnr.getNavBlk()
|
||||
|
||||
|
||||
def getanlb(server, table, model, arrlen):
|
||||
gnr = GridNavRetriever(server, table, model, arrlen)
|
||||
return gnr.getAnlBlk()
|
||||
|
||||
|
||||
# This is the standard boilerplate that runs this script as a main
|
||||
if __name__ == '__main__':
|
||||
# Run Test
|
||||
srv = 'edex-cloud.unidata.ucar.edu'
|
||||
tbl = 'grid_info'
|
||||
mdl = 'NAM40'
|
||||
navlen = '256'
|
||||
print(getnavb(srv, tbl, mdl, navlen))
|
||||
anllen = '128'
|
||||
print(getanlb(srv, tbl, mdl, anllen))
|
144
awips/gempak/StationDataRetriever.py
Normal file
144
awips/gempak/StationDataRetriever.py
Normal file
|
@ -0,0 +1,144 @@
|
|||
import os
|
||||
from datetime import datetime
|
||||
from awips import ThriftClient
|
||||
from dynamicserialize.dstypes.com.raytheon.uf.common.time import DataTime
|
||||
from dynamicserialize.dstypes.com.raytheon.uf.common.time import TimeRange
|
||||
from dynamicserialize.dstypes.gov.noaa.nws.ncep.common.dataplugin.gempak.request import StationDataRequest
|
||||
|
||||
|
||||
class StationDataRetriever:
|
||||
""" Retrieves all data for a requested station and time """
|
||||
|
||||
def __init__(self, server, pluginName, stationId, refTime, parmList, partNumber):
|
||||
self.pluginName = pluginName
|
||||
self.stationId = stationId
|
||||
self.refTime = refTime
|
||||
self.parmList = parmList
|
||||
self.partNumber = partNumber
|
||||
self.host = os.getenv("DEFAULT_HOST", server)
|
||||
self.port = os.getenv("DEFAULT_PORT", "9581")
|
||||
self.client = ThriftClient.ThriftClient(self.host, self.port)
|
||||
|
||||
def getStationData(self):
|
||||
""" Sends ThriftClient request and writes out received files."""
|
||||
dtime = datetime.strptime(self.refTime, "%y%m%d/%H%M")
|
||||
trange = TimeRange()
|
||||
trange.setStart(dtime)
|
||||
trange.setEnd(dtime)
|
||||
dataTime = DataTime(refTime=dtime, validPeriod=trange)
|
||||
req = StationDataRequest()
|
||||
req.setPluginName(self.pluginName)
|
||||
req.setStationId(self.stationId)
|
||||
req.setRefTime(dataTime)
|
||||
req.setParmList(self.parmList)
|
||||
req.setPartNumber(self.partNumber)
|
||||
resp = self.client.sendRequest(req)
|
||||
|
||||
for i, rec in enumerate(resp):
|
||||
resp[i] = {
|
||||
key.decode() if isinstance(key, bytes) else key:
|
||||
val.decode() if isinstance(val, bytes) else val
|
||||
for key, val in rec.items()
|
||||
}
|
||||
|
||||
return resp
|
||||
|
||||
|
||||
def getstationdata(server, table, stationId, refTime, parmList, partNumber):
|
||||
sr = StationDataRetriever(server, table, stationId, refTime, parmList, partNumber)
|
||||
lcldict = sr.getStationData()
|
||||
|
||||
rdata = []
|
||||
|
||||
for substr in parmList.split(','):
|
||||
if substr in lcldict:
|
||||
rdata.append(lcldict[substr])
|
||||
else:
|
||||
rdata.append(-9999.00)
|
||||
|
||||
return rdata
|
||||
|
||||
|
||||
def getleveldata(server, table, stationId, refTime, parmList, partNumber):
|
||||
sr = StationDataRetriever(server, table, stationId, refTime, parmList, partNumber)
|
||||
lcldict = sr.getStationData()
|
||||
|
||||
numset = [1]
|
||||
for substr in parmList.split(','):
|
||||
if substr in lcldict:
|
||||
pnum = len(lcldict[substr]) - 1
|
||||
while pnum >= 0:
|
||||
if lcldict[substr][pnum] != -9999.00:
|
||||
break
|
||||
pnum = pnum - 1
|
||||
numset.append(pnum)
|
||||
|
||||
rdata = []
|
||||
|
||||
for jj in range(max(numset)):
|
||||
for substr in parmList.split(','):
|
||||
if substr in lcldict:
|
||||
if lcldict[substr][jj] == -9999998.0:
|
||||
rdata.append(-9999.0)
|
||||
else:
|
||||
rdata.append(lcldict[substr][jj])
|
||||
else:
|
||||
rdata.append(-9999.0)
|
||||
|
||||
return rdata
|
||||
|
||||
|
||||
def getstationtext(server, table, stationId, refTime, parmList, partNumber):
|
||||
sr = StationDataRetriever(server, table, stationId, refTime, parmList, partNumber)
|
||||
lcldict = sr.getStationData()
|
||||
|
||||
if parmList in lcldict:
|
||||
return lcldict[parmList]
|
||||
else:
|
||||
return ' '
|
||||
|
||||
|
||||
def getheader(server, table, stationId, refTime, parmList, partNumber):
|
||||
idata = []
|
||||
idata.append(0)
|
||||
return idata
|
||||
|
||||
|
||||
# This is the standard boilerplate that runs this script as a main
|
||||
if __name__ == '__main__':
|
||||
# Run Test
|
||||
srv = 'edex-cloud.unidata.ucar.edu'
|
||||
key = '-'
|
||||
print('OBS - METAR')
|
||||
tbl = 'obs'
|
||||
stn = 'KLGA'
|
||||
time = '130823/1700'
|
||||
parm = 'seaLevelPress,temperature,dewpoint,windSpeed,windDir'
|
||||
part = '0'
|
||||
print(getheader(srv, tbl, stn, time, parm, part))
|
||||
print(getstationdata(srv, tbl, stn, time, parm, part))
|
||||
parm = 'rawMETAR'
|
||||
print(getstationtext(srv, tbl, stn, time, parm, part))
|
||||
print('SFCOBS - SYNOP')
|
||||
tbl = 'sfcobs'
|
||||
stn = '72403'
|
||||
time = '130823/1800'
|
||||
parm = 'seaLevelPress,temperature,dewpoint,windSpeed,windDir'
|
||||
part = '0'
|
||||
print(getheader(srv, tbl, stn, time, parm, part))
|
||||
print(getstationdata(srv, tbl, stn, time, parm, part))
|
||||
parm = 'rawReport'
|
||||
print(getstationtext(srv, tbl, stn, time, parm, part))
|
||||
print('UAIR')
|
||||
tbl = 'bufrua'
|
||||
stn = '72469'
|
||||
time = '130823/1200'
|
||||
parm = 'prMan,htMan,tpMan,tdMan,wdMan,wsMan'
|
||||
part = '2020'
|
||||
print(getleveldata(srv, tbl, stn, time, parm, part))
|
||||
parm = 'prSigT,tpSigT,tdSigT'
|
||||
part = '2022'
|
||||
print(getleveldata(srv, tbl, stn, time, parm, part))
|
||||
parm = 'htSigW,wsSigW,wdSigW'
|
||||
part = '2021'
|
||||
print(getleveldata(srv, tbl, stn, time, parm, part))
|
93
awips/gempak/StationRetriever.py
Normal file
93
awips/gempak/StationRetriever.py
Normal file
|
@ -0,0 +1,93 @@
|
|||
import os
|
||||
import sys
|
||||
from awips import ThriftClient
|
||||
from dynamicserialize.dstypes.gov.noaa.nws.ncep.common.dataplugin.gempak.request import GetStationsRequest
|
||||
|
||||
|
||||
class StationRetriever:
|
||||
""" Retrieves all requested stations """
|
||||
|
||||
def __init__(self, server, pluginName):
|
||||
self.pluginName = pluginName
|
||||
self.outdir = os.getcwd()
|
||||
self.host = os.getenv("DEFAULT_HOST", server)
|
||||
self.port = os.getenv("DEFAULT_PORT", "9581")
|
||||
self.client = ThriftClient.ThriftClient(self.host, self.port)
|
||||
|
||||
def getStations(self):
|
||||
""" Sends ThriftClient request and writes out received files."""
|
||||
req = GetStationsRequest()
|
||||
req.setPluginName(self.pluginName)
|
||||
resp = self.client.sendRequest(req)
|
||||
|
||||
for i, rec in enumerate(resp):
|
||||
resp[i] = {
|
||||
key.decode() if isinstance(key, bytes) else key:
|
||||
val.decode() if isinstance(val, bytes) else val
|
||||
for key, val in rec.items()
|
||||
}
|
||||
|
||||
stns = []
|
||||
for item in resp:
|
||||
stationstr = '{:<8}'.format(item.getStationId())
|
||||
|
||||
if sys.byteorder == 'little':
|
||||
stnid = (ord(stationstr[3]) << 24) + (ord(stationstr[2]) << 16) + \
|
||||
(ord(stationstr[1]) << 8) + ord(stationstr[0])
|
||||
stnid2 = (ord(stationstr[7]) << 24) + (ord(stationstr[6]) << 16) + \
|
||||
(ord(stationstr[5]) << 8) + ord(stationstr[4])
|
||||
else:
|
||||
stnid = (ord(stationstr[0]) << 24) + (ord(stationstr[1]) << 16) + \
|
||||
(ord(stationstr[2]) << 8) + ord(stationstr[3])
|
||||
stnid2 = (ord(stationstr[4]) << 24) + (ord(stationstr[5]) << 16) + \
|
||||
(ord(stationstr[6]) << 8) + ord(stationstr[7])
|
||||
|
||||
if item.getState() is None:
|
||||
stationstr = ' '
|
||||
else:
|
||||
stationstr = '{:<4}'.format(item.getState())
|
||||
|
||||
if sys.byteorder == 'little':
|
||||
state = (ord(stationstr[3]) << 24) + (ord(stationstr[2]) << 16) \
|
||||
+ (ord(stationstr[1]) << 8) + ord(stationstr[0])
|
||||
else:
|
||||
state = (ord(stationstr[0]) << 24) + (ord(stationstr[1]) << 16) \
|
||||
+ (ord(stationstr[2]) << 8) + ord(stationstr[3])
|
||||
|
||||
stationstr = '{:<4}'.format(item.getCountry())
|
||||
if sys.byteorder == 'little':
|
||||
cntry = (ord(stationstr[3]) << 24) + (ord(stationstr[2]) << 16) \
|
||||
+ (ord(stationstr[1]) << 8) + ord(stationstr[0])
|
||||
else:
|
||||
cntry = (ord(stationstr[0]) << 24) + (ord(stationstr[1]) << 16) \
|
||||
+ (ord(stationstr[2]) << 8) + ord(stationstr[3])
|
||||
|
||||
stns.append(9999)
|
||||
stns.append(stnid)
|
||||
stns.append(item.getWmoIndex())
|
||||
stns.append(int(item.getLatitude()*100))
|
||||
stns.append(int(item.getLongitude()*100))
|
||||
stns.append(int(item.getElevation()))
|
||||
stns.append(state)
|
||||
stns.append(cntry)
|
||||
stns.append(stnid2)
|
||||
stns.append(0)
|
||||
return stns
|
||||
|
||||
|
||||
def getstations(server, table, key, dummy, dummy2):
|
||||
sr = StationRetriever(server, table)
|
||||
return sr.getStations()
|
||||
|
||||
|
||||
# This is the standard boilerplate that runs this script as a main
|
||||
if __name__ == '__main__':
|
||||
# Run Test
|
||||
srv = 'edex-cloud.unidata.ucar.edu'
|
||||
key = '-'
|
||||
print('OBS - METAR')
|
||||
tbl = 'obs'
|
||||
print(getstations(srv, tbl, key))
|
||||
print('SFCOBS - SYNOP')
|
||||
tbl = 'sfcobs'
|
||||
print(getstations(srv, tbl, key))
|
76
awips/gempak/TimeRetriever.py
Normal file
76
awips/gempak/TimeRetriever.py
Normal file
|
@ -0,0 +1,76 @@
|
|||
import os
|
||||
from datetime import datetime
|
||||
from awips import ThriftClient
|
||||
from dynamicserialize.dstypes.java.util import GregorianCalendar
|
||||
from dynamicserialize.dstypes.gov.noaa.nws.ncep.common.dataplugin.gempak.request import GetTimesRequest
|
||||
|
||||
|
||||
class TimeRetriever:
|
||||
""" Retrieves all requested times"""
|
||||
|
||||
def __init__(self, server, pluginName, timeField):
|
||||
self.pluginName = pluginName
|
||||
self.timeField = timeField
|
||||
self.outdir = os.getcwd()
|
||||
self.host = os.getenv("DEFAULT_HOST", server)
|
||||
self.port = os.getenv("DEFAULT_PORT", "9581")
|
||||
self.client = ThriftClient.ThriftClient(self.host, self.port)
|
||||
|
||||
def getTimes(self):
|
||||
""" Sends ThriftClient request and writes out received files."""
|
||||
req = GetTimesRequest()
|
||||
req.setPluginName(self.pluginName)
|
||||
req.setTimeField(self.timeField)
|
||||
resp = self.client.sendRequest(req)
|
||||
|
||||
for i, rec in enumerate(resp):
|
||||
resp[i] = {
|
||||
key.decode() if isinstance(key, bytes) else key:
|
||||
val.decode() if isinstance(val, bytes) else val
|
||||
for key, val in rec.items()
|
||||
}
|
||||
|
||||
timelist = []
|
||||
for item in resp.getTimes():
|
||||
if isinstance(item, GregorianCalendar):
|
||||
tstamp = item.getTimeInMillis()
|
||||
else:
|
||||
tstamp = item.getTime()
|
||||
time = datetime.utcfromtimestamp(tstamp/1000)
|
||||
timelist.append(time)
|
||||
|
||||
timelist.sort(reverse=True)
|
||||
|
||||
times = []
|
||||
for time in timelist:
|
||||
times.append(9999)
|
||||
times.append((time.year % 100) * 10000 + (time.month * 100) + time.day)
|
||||
times.append((time.hour * 100) + time.minute)
|
||||
|
||||
# GEMPAK can only handle up to 200 times, which is 600 elements
|
||||
# in this array -- [9999, DATE, TIME] -- repeated
|
||||
return times[0:600]
|
||||
|
||||
|
||||
def gettimes(server, table, key, dummy, dummy2):
|
||||
tr = TimeRetriever(server, table, key)
|
||||
return tr.getTimes()
|
||||
|
||||
|
||||
# This is the standard boilerplate that runs this script as a main
|
||||
if __name__ == '__main__':
|
||||
srv = 'edex-cloud.unidata.ucar.edu'
|
||||
print('OBS - METAR')
|
||||
tbl = 'obs'
|
||||
key = 'refHour'
|
||||
print(gettimes(srv, tbl, key))
|
||||
|
||||
print('SFCOBS - SYNOP')
|
||||
tbl = 'sfcobs'
|
||||
key = 'refHour'
|
||||
print(gettimes(srv, tbl, key))
|
||||
|
||||
print('BUFRUA')
|
||||
tbl = 'bufrua'
|
||||
key = 'dataTime.refTime'
|
||||
print(gettimes(srv, tbl, key))
|
99
awips/gempak/ncepGribTables.py
Executable file
99
awips/gempak/ncepGribTables.py
Executable file
|
@ -0,0 +1,99 @@
|
|||
#!/usr/bin/env python
|
||||
# Parse html tables from a given URL and output CSV.
|
||||
# Note: To install a missing python module foo do "easy_install foo"
|
||||
# (or the new way is "pip install foo" but you might have to do
|
||||
# "easy_install pip" first)
|
||||
|
||||
from BeautifulSoup import BeautifulSoup
|
||||
import scrape
|
||||
import urllib.request, urllib.error, urllib.parse
|
||||
import html.entities
|
||||
import re
|
||||
import sys
|
||||
import unicodedata
|
||||
|
||||
|
||||
# from http://stackoverflow.com/questions/1197981/convert-html-entities
|
||||
def asciify2(s):
|
||||
matches = re.findall("&#\d+;", s)
|
||||
if len(matches) > 0:
|
||||
hits = set(matches)
|
||||
for hit in hits:
|
||||
name = hit[2:-1]
|
||||
try:
|
||||
entnum = int(name)
|
||||
s = s.replace(hit, chr(entnum))
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
matches = re.findall("&\w+;", s)
|
||||
hits = set(matches)
|
||||
amp = "&"
|
||||
if amp in hits:
|
||||
hits.remove(amp)
|
||||
for hit in hits:
|
||||
name = hit[1:-1]
|
||||
if name in html.entities.name2codepoint:
|
||||
s = s.replace(hit, "")
|
||||
s = s.replace(amp, "&")
|
||||
return s
|
||||
|
||||
|
||||
def opensoup(url):
|
||||
request = urllib.request.Request(url)
|
||||
request.add_header("User-Agent", "Mozilla/5.0")
|
||||
# To mimic a real browser's user-agent string more exactly, if necessary:
|
||||
# Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.8.1.14)
|
||||
# Gecko/20080418 Ubuntu/7.10 (gutsy) Firefox/2.0.0.14
|
||||
pagefile = urllib.request.urlopen(request)
|
||||
soup = BeautifulSoup(pagefile)
|
||||
pagefile.close()
|
||||
return soup
|
||||
|
||||
|
||||
def asciify(s):
|
||||
return unicodedata.normalize('NFKD', s).encode('ascii', 'ignore')
|
||||
|
||||
|
||||
# remove extra whitespace, including stripping leading and trailing whitespace.
|
||||
def condense(s):
|
||||
s = re.sub(r"\s+", " ", s, re.DOTALL)
|
||||
return s.strip()
|
||||
|
||||
|
||||
def stripurl(s):
|
||||
s = re.sub(r"\<span\s+style\s*\=\s*\"display\:none[^\"]*\"[^\>]*\>[^\<]*\<\/span\>", "", s)
|
||||
s = re.sub(r"\&\#160\;", " ", s)
|
||||
return condense(re.sub(r"\<[^\>]*\>", " ", s))
|
||||
|
||||
|
||||
# this gets rid of tags and condenses whitespace
|
||||
def striptags(s):
|
||||
s = re.sub(r"\<span\s+style\s*\=\s*\"display\:none[^\"]*\"[^\>]*\>[^\<]*\<\/span\>", "", s)
|
||||
s = re.sub(r"\&\#160\;", " ", s)
|
||||
return condense(s)
|
||||
|
||||
|
||||
def getUrlArgs(parseUrl):
|
||||
return re.search('grib2_table4-2-(\d+)-(\d+).shtml', parseUrl).groups()
|
||||
|
||||
|
||||
if len(sys.argv) == 1:
|
||||
print("Usage: ", sys.argv[0], " url [n]")
|
||||
print(" (where n indicates which html table to parse)")
|
||||
exit(1)
|
||||
|
||||
url = sys.argv[1]
|
||||
soup = opensoup(url)
|
||||
tables = soup.findAll("table")
|
||||
|
||||
for table in tables:
|
||||
for r in table.findAll('tr'):
|
||||
rl = []
|
||||
for c in r.findAll(re.compile('td|th')):
|
||||
rl.append(striptags(c.renderContents()))
|
||||
if len(rl) > 1 and "href" in rl[1]:
|
||||
print('! ' + stripurl(rl[1]))
|
||||
scrapeUrl = 'http://www.nco.ncep.noaa.gov/pmb/docs/grib2/grib2_table4-2-' + \
|
||||
getUrlArgs(rl[1])[0] + "-" + getUrlArgs(rl[1])[1] + '.shtml'
|
||||
scrape.run(scrapeUrl)
|
106
awips/gempak/scrape.py
Executable file
106
awips/gempak/scrape.py
Executable file
|
@ -0,0 +1,106 @@
|
|||
#!/usr/bin/env python
|
||||
# Parse html tables from a given URL and output CSV.
|
||||
# Note: To install a missing python module foo do "easy_install foo"
|
||||
# (or the new way is "pip install foo" but you might have to do
|
||||
# "easy_install pip" first)
|
||||
|
||||
from BeautifulSoup import BeautifulSoup
|
||||
import urllib.request, urllib.error, urllib.parse
|
||||
import html.entities
|
||||
import re
|
||||
import sys
|
||||
import unicodedata
|
||||
|
||||
|
||||
# from http://stackoverflow.com/questions/1197981/convert-html-entities
|
||||
def asciify2(s):
|
||||
matches = re.findall("&#\d+;", s)
|
||||
if len(matches) > 0:
|
||||
hits = set(matches)
|
||||
for hit in hits:
|
||||
name = hit[2:-1]
|
||||
try:
|
||||
entnum = int(name)
|
||||
s = s.replace(hit, chr(entnum))
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
matches = re.findall("&\w+;", s)
|
||||
hits = set(matches)
|
||||
amp = "&"
|
||||
if amp in hits:
|
||||
hits.remove(amp)
|
||||
for hit in hits:
|
||||
name = hit[1:-1]
|
||||
if name in html.entities.name2codepoint:
|
||||
s = s.replace(hit, "")
|
||||
s = s.replace(amp, "&")
|
||||
return s
|
||||
|
||||
|
||||
def opensoup(url):
|
||||
request = urllib.request.Request(url)
|
||||
request.add_header("User-Agent", "Mozilla/5.0")
|
||||
# To mimic a real browser's user-agent string more exactly, if necessary:
|
||||
# Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.8.1.14)
|
||||
# Gecko/20080418 Ubuntu/7.10 (gutsy) Firefox/2.0.0.14
|
||||
pagefile = urllib.request.urlopen(request)
|
||||
soup = BeautifulSoup(pagefile)
|
||||
pagefile.close()
|
||||
return soup
|
||||
|
||||
|
||||
def asciify(s):
|
||||
return unicodedata.normalize('NFKD', s).encode('ascii', 'ignore')
|
||||
|
||||
|
||||
# remove extra whitespace, including stripping leading and trailing whitespace.
|
||||
def condense(s):
|
||||
s = re.sub(r"\s+", " ", s, re.DOTALL)
|
||||
return s.strip()
|
||||
|
||||
|
||||
# this gets rid of tags and condenses whitespace
|
||||
def striptags(s):
|
||||
s = re.sub(r"\<span\s+style\s*\=\s*\"display\:none[^\"]*\"[^\>]*\>[^\<]*\<\/span\>", "", s)
|
||||
s = re.sub(r"\&\#160\;", " ", s)
|
||||
return condense(re.sub(r"\<[^\>]*\>", " ", s))
|
||||
|
||||
|
||||
if len(sys.argv) == 1: # called with no arguments
|
||||
print("Usage: ", sys.argv[0], " url [n]")
|
||||
print(" (where n indicates which html table to parse)")
|
||||
exit(1)
|
||||
|
||||
|
||||
def getUrlArgs(parseUrl):
|
||||
return re.search('grib2_table4-2-(\d+)-(\d+).shtml', parseUrl).groups()
|
||||
|
||||
|
||||
def run(url):
|
||||
soup = opensoup(url)
|
||||
tables = soup.findAll("table")
|
||||
for table in tables:
|
||||
ct = 0
|
||||
for r in table.findAll('tr'):
|
||||
rl = []
|
||||
for c in r.findAll(re.compile('td|th')):
|
||||
rl.append(striptags(c.renderContents()))
|
||||
if ct > 0:
|
||||
rl[0] = getUrlArgs(url)[0].zfill(3) + " " + \
|
||||
getUrlArgs(url)[1].zfill(3) + " " + rl[0].zfill(3) + " 000"
|
||||
if len(rl) > 1:
|
||||
if "Reserved" in rl[1]:
|
||||
rl[0] = '!' + rl[0]
|
||||
if "See Table" in rl[2] or "Code table" in rl[2]:
|
||||
rl[2] = "cat"
|
||||
rl[1] = rl[1][:32].ljust(32)
|
||||
rl[2] = rl[2].ljust(20)
|
||||
rl[3] = rl[3].ljust(12) + " 0 -9999.00"
|
||||
if ct:
|
||||
print(" ".join(rl))
|
||||
ct += 1
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
run(sys.argv[1])
|
152
awips/gfe/IFPClient.py
Normal file
152
awips/gfe/IFPClient.py
Normal file
|
@ -0,0 +1,152 @@
|
|||
#
|
||||
# Provides a Python-based interface for executing GFE requests.
|
||||
#
|
||||
#
|
||||
# SOFTWARE HISTORY
|
||||
#
|
||||
# Date Ticket# Engineer Description
|
||||
# ------------ ---------- ----------- --------------------------
|
||||
# 07/26/12 dgilling Initial Creation.
|
||||
#
|
||||
#
|
||||
|
||||
from awips import ThriftClient
|
||||
from dynamicserialize.dstypes.com.raytheon.uf.common.dataplugin.gfe.db.objects import DatabaseID
|
||||
from dynamicserialize.dstypes.com.raytheon.uf.common.dataplugin.gfe.db.objects import ParmID
|
||||
from dynamicserialize.dstypes.com.raytheon.uf.common.dataplugin.gfe.request import CommitGridsRequest
|
||||
from dynamicserialize.dstypes.com.raytheon.uf.common.dataplugin.gfe.request import GetGridInventoryRequest
|
||||
from dynamicserialize.dstypes.com.raytheon.uf.common.dataplugin.gfe.request import GetParmListRequest
|
||||
from dynamicserialize.dstypes.com.raytheon.uf.common.dataplugin.gfe.request import GetSelectTimeRangeRequest
|
||||
from dynamicserialize.dstypes.com.raytheon.uf.common.dataplugin.gfe.server.request import CommitGridRequest
|
||||
from dynamicserialize.dstypes.com.raytheon.uf.common.message import WsId
|
||||
from dynamicserialize.dstypes.com.raytheon.uf.common.site.requests import GetActiveSitesRequest
|
||||
from dynamicserialize.dstypes.com.raytheon.uf.common.dataplugin.gfe.server.message import ServerResponse
|
||||
|
||||
|
||||
class IFPClient(object):
|
||||
def __init__(self, host, port, user, site=None, progName=None):
|
||||
self.__thrift = ThriftClient.ThriftClient(host, port)
|
||||
self.__wsId = WsId(userName=user, progName=progName)
|
||||
# retrieve default site
|
||||
if site is None:
|
||||
sr = self.getSiteID()
|
||||
if len(sr.getPayload()) > 0:
|
||||
site = sr.getPayload()[0]
|
||||
self.__siteId = site
|
||||
|
||||
def commitGrid(self, request):
|
||||
if isinstance(request, CommitGridRequest):
|
||||
return self.__commitGrid([request])
|
||||
elif self.__isHomogenousIterable(request, CommitGridRequest):
|
||||
return self.__commitGrid([cgr for cgr in request])
|
||||
raise TypeError("Invalid type: " + str(type(request)) +
|
||||
" for commitGrid(). Only accepts CommitGridRequest or lists of CommitGridRequest.")
|
||||
|
||||
def __commitGrid(self, requests):
|
||||
ssr = ServerResponse()
|
||||
request = CommitGridsRequest()
|
||||
request.setCommits(requests)
|
||||
sr = self.__makeRequest(request)
|
||||
ssr.setMessages(sr.getMessages())
|
||||
return ssr
|
||||
|
||||
def getParmList(self, pid):
|
||||
argType = type(pid)
|
||||
if argType is DatabaseID:
|
||||
return self.__getParmList([pid])
|
||||
elif self.__isHomogenousIterable(pid, DatabaseID):
|
||||
return self.__getParmList([dbid for dbid in pid])
|
||||
raise TypeError("Invalid type: " + str(argType) +
|
||||
" for getParmList(). Only accepts DatabaseID or lists of DatabaseID.")
|
||||
|
||||
def __getParmList(self, ids):
|
||||
ssr = ServerResponse()
|
||||
request = GetParmListRequest()
|
||||
request.setDbIds(ids)
|
||||
sr = self.__makeRequest(request)
|
||||
ssr.setMessages(sr.getMessages())
|
||||
parmlist = sr.getPayload() if sr.getPayload() is not None else []
|
||||
ssr.setPayload(parmlist)
|
||||
return ssr
|
||||
|
||||
def __isHomogenousIterable(self, iterable, classType):
|
||||
try:
|
||||
iterator = iter(iterable)
|
||||
for item in iterator:
|
||||
if not isinstance(item, classType):
|
||||
return False
|
||||
except TypeError:
|
||||
return False
|
||||
return True
|
||||
|
||||
def getGridInventory(self, parmID):
|
||||
if isinstance(parmID, ParmID):
|
||||
sr = self.__getGridInventory([parmID])
|
||||
inventoryList = []
|
||||
try:
|
||||
inventoryList = sr.getPayload()[parmID]
|
||||
except KeyError:
|
||||
# no-op, we've already default the TimeRange list to empty
|
||||
pass
|
||||
sr.setPayload(inventoryList)
|
||||
return sr
|
||||
elif self.__isHomogenousIterable(parmID, ParmID):
|
||||
return self.__getGridInventory([pid for pid in parmID])
|
||||
raise TypeError("Invalid type: " + str(type(parmID)) +
|
||||
" specified to getGridInventory(). Accepts ParmID or lists of ParmID.")
|
||||
|
||||
def __getGridInventory(self, parmIDs):
|
||||
ssr = ServerResponse()
|
||||
request = GetGridInventoryRequest()
|
||||
request.setParmIds(parmIDs)
|
||||
sr = self.__makeRequest(request)
|
||||
ssr.setMessages(sr.getMessages())
|
||||
trs = sr.getPayload() if sr.getPayload() is not None else {}
|
||||
ssr.setPayload(trs)
|
||||
return ssr
|
||||
|
||||
def getSelectTR(self, name):
|
||||
request = GetSelectTimeRangeRequest()
|
||||
request.setName(name)
|
||||
sr = self.__makeRequest(request)
|
||||
ssr = ServerResponse()
|
||||
ssr.setMessages(sr.getMessages())
|
||||
ssr.setPayload(sr.getPayload())
|
||||
return ssr
|
||||
|
||||
def getSiteID(self):
|
||||
ssr = ServerResponse()
|
||||
request = GetActiveSitesRequest()
|
||||
sr = self.__makeRequest(request)
|
||||
ssr.setMessages(sr.getMessages())
|
||||
ids = sr.getPayload() if sr.getPayload() is not None else []
|
||||
sr.setPayload(ids)
|
||||
return sr
|
||||
|
||||
def __makeRequest(self, request):
|
||||
try:
|
||||
request.setSiteID(self.__siteId)
|
||||
except AttributeError:
|
||||
pass
|
||||
try:
|
||||
request.setWorkstationID(self.__wsId)
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
sr = ServerResponse()
|
||||
response = None
|
||||
try:
|
||||
response = self.__thrift.sendRequest(request)
|
||||
except ThriftClient.ThriftRequestException as e:
|
||||
sr.setMessages([str(e)])
|
||||
try:
|
||||
sr.setPayload(response.getPayload())
|
||||
except AttributeError:
|
||||
sr.setPayload(response)
|
||||
try:
|
||||
sr.setMessages(response.getMessages())
|
||||
except AttributeError:
|
||||
# not a server response, nothing else to do
|
||||
pass
|
||||
|
||||
return sr
|
3
awips/gfe/__init__.py
Normal file
3
awips/gfe/__init__.py
Normal file
|
@ -0,0 +1,3 @@
|
|||
|
||||
__all__ = [
|
||||
]
|
131
awips/qpidingest.py
Normal file
131
awips/qpidingest.py
Normal file
|
@ -0,0 +1,131 @@
|
|||
# ===============================================================================
|
||||
# qpidingest.py
|
||||
#
|
||||
# @author: Aaron Anderson
|
||||
# @organization: NOAA/WDTB OU/CIMMS
|
||||
# @version: 1.0 02/19/2010
|
||||
# @requires: QPID Python Client available from http://qpid.apache.org/download.html
|
||||
# The Python Client is located under Single Component Package/Client
|
||||
#
|
||||
# From the README.txt Installation Instructions
|
||||
# = INSTALLATION =
|
||||
# Extract the release archive into a directory of your choice and set
|
||||
# your PYTHONPATH accordingly:
|
||||
#
|
||||
# tar -xzf qpid-python-<version>.tar.gz -C <install-prefix>
|
||||
# export PYTHONPATH=<install-prefix>/qpid-<version>/python
|
||||
#
|
||||
# ***EDEX and QPID must be running for this module to work***
|
||||
#
|
||||
# DESCRIPTION:
|
||||
# This module is used to connect to QPID and send messages to the external.dropbox queue
|
||||
# which tells EDEX to ingest a data file from a specified path. This avoids having to copy
|
||||
# a data file into an endpoint. Each message also contains a header which is used to determine
|
||||
# which plugin should be used to decode the file. Each plugin has an xml file located in
|
||||
# $EDEX_HOME/data/utility/edex_static/base/distribution that contains regular expressions
|
||||
# that the header is compared to. When the header matches one of these regular expressions
|
||||
# the file is decoded with that plugin. If you make changes to one of these xml files you
|
||||
# must restart EDEX for the changes to take effect.
|
||||
#
|
||||
# NOTE: If the message is being sent but you do not see it being ingested in the EDEX log
|
||||
# check the xml files to make sure the header you are passing matches one of the regular
|
||||
# expressions. Beware of spaces, some regular expressions require spaces while others use
|
||||
# a wildcard character so a space is optional. It seems you are better off having the space
|
||||
# as this will be matched to both patterns. For the file in the example below,
|
||||
# 20100218_185755_SAUS46KLOX.metar, I use SAUS46 KLOX as the header to make sure it matches.
|
||||
#
|
||||
#
|
||||
# EXAMPLE:
|
||||
# Simple example program:
|
||||
#
|
||||
# ------------------------------------------------------------------------------
|
||||
# import qpidingest
|
||||
# #Tell EDEX to ingest a metar file from data_store. The filepath is
|
||||
# #/data_store/20100218/metar/00/standard/20100218_005920_SAUS46KSEW.metar
|
||||
#
|
||||
# conn=qpidingest.IngestViaQPID() #defaults to localhost port 5672
|
||||
#
|
||||
# #If EDEX is not on the local machine you can make the connection as follows
|
||||
# #conn=qpidingest.IngestViaQPID(host='<MACHINE NAME>',port=<PORT NUMBER>)
|
||||
#
|
||||
# conn.sendmessage('/data_store/20100218/metar/18/standard/20100218_185755_SAUS46KLOX.metar','SAUS46 KLOX')
|
||||
# conn.close()
|
||||
# -------------------------------------------------------------------------------
|
||||
#
|
||||
# SOFTWARE HISTORY
|
||||
#
|
||||
# Date Ticket# Engineer Description
|
||||
# ------------ ---------- ----------- --------------------------
|
||||
# ....
|
||||
# 06/13/2013 DR 16242 D. Friedman Add Qpid authentication info
|
||||
# 03/06/2014 DR 17907 D. Friedman Workaround for issue QPID-5569
|
||||
# 02/16/2017 DR 6084 bsteffen Support ssl connections
|
||||
#
|
||||
# ===============================================================================
|
||||
|
||||
import os
|
||||
import os.path
|
||||
|
||||
import qpid
|
||||
from qpid.util import connect
|
||||
from qpid.connection import Connection
|
||||
from qpid.datatypes import Message, uuid4
|
||||
|
||||
QPID_USERNAME = 'guest'
|
||||
QPID_PASSWORD = 'guest'
|
||||
|
||||
|
||||
class IngestViaQPID:
|
||||
def __init__(self, host='localhost', port=5672, ssl=None):
|
||||
"""
|
||||
Connect to QPID and make bindings to route message to external.dropbox queue
|
||||
@param host: string hostname of computer running EDEX and QPID (default localhost)
|
||||
@param port: integer port used to connect to QPID (default 5672)
|
||||
@param ssl: boolean to determine whether ssl is used, default value of None will use
|
||||
ssl only if a client certificate is found.
|
||||
"""
|
||||
|
||||
try:
|
||||
#
|
||||
socket = connect(host, port)
|
||||
if "QPID_SSL_CERT_DB" in os.environ:
|
||||
certdb = os.environ["QPID_SSL_CERT_DB"]
|
||||
else:
|
||||
certdb = os.path.expanduser("~/.qpid/")
|
||||
if "QPID_SSL_CERT_NAME" in os.environ:
|
||||
certname = os.environ["QPID_SSL_CERT_NAME"]
|
||||
else:
|
||||
certname = QPID_USERNAME
|
||||
certfile = os.path.join(certdb, certname + ".crt")
|
||||
if ssl or (ssl is None and os.path.exists(certfile)):
|
||||
keyfile = os.path.join(certdb, certname + ".key")
|
||||
trustfile = os.path.join(certdb, "root.crt")
|
||||
socket = qpid.util.ssl(socket, keyfile=keyfile, certfile=certfile, ca_certs=trustfile)
|
||||
self.connection = Connection(sock=socket, username=QPID_USERNAME, password=QPID_PASSWORD)
|
||||
self.connection.start()
|
||||
self.session = self.connection.session(str(uuid4()))
|
||||
self.session.exchange_bind(exchange='amq.direct', queue='external.dropbox', binding_key='external.dropbox')
|
||||
print('Connected to Qpid')
|
||||
except ValueError:
|
||||
print('Unable to connect to Qpid')
|
||||
|
||||
def sendmessage(self, filepath, header):
|
||||
"""
|
||||
This function sends a message to the external.dropbox queue providing the path
|
||||
to the file to be ingested and a header to determine the plugin to be used to
|
||||
decode the file.
|
||||
@param filepath: string full path to file to be ingested
|
||||
@param header: string header used to determine plugin decoder to use
|
||||
"""
|
||||
props = self.session.delivery_properties(routing_key='external.dropbox')
|
||||
head = self.session.message_properties(application_headers={'header': header},
|
||||
user_id=QPID_USERNAME)
|
||||
self.session.message_transfer(destination='amq.direct', message=Message(props, head, filepath))
|
||||
|
||||
def close(self):
|
||||
"""
|
||||
After all messages are sent call this function to close connection and make sure
|
||||
there are no threads left open
|
||||
"""
|
||||
self.session.close(timeout=10)
|
||||
print('Connection to Qpid closed')
|
2025
awips/tables.py
Normal file
2025
awips/tables.py
Normal file
File diff suppressed because it is too large
Load diff
12
awips/test/Record.py
Normal file
12
awips/test/Record.py
Normal file
|
@ -0,0 +1,12 @@
|
|||
import sys
|
||||
|
||||
|
||||
class Record():
|
||||
def __init__(self, level=0, msg='Test Message'):
|
||||
self.levelno = level
|
||||
self.message = msg
|
||||
self.exc_info = sys.exc_info()
|
||||
self.exc_text = "TEST"
|
||||
|
||||
def getMessage(self):
|
||||
return self.message
|
2
awips/test/__init__.py
Normal file
2
awips/test/__init__.py
Normal file
|
@ -0,0 +1,2 @@
|
|||
|
||||
__all__ = []
|
2
awips/test/dafTests/__init__.py
Normal file
2
awips/test/dafTests/__init__.py
Normal file
|
@ -0,0 +1,2 @@
|
|||
|
||||
__all__ = []
|
51
awips/test/dafTests/baseBufrMosTestCase.py
Normal file
51
awips/test/dafTests/baseBufrMosTestCase.py
Normal file
|
@ -0,0 +1,51 @@
|
|||
#
|
||||
# Base TestCase for BufrMos* tests.
|
||||
#
|
||||
# SOFTWARE HISTORY
|
||||
#
|
||||
# Date Ticket# Engineer Description
|
||||
# ------------ ---------- ----------- --------------------------
|
||||
# 01/19/16 4795 mapeters Initial Creation.
|
||||
# 04/11/16 5548 tgurney Cleanup
|
||||
# 12/07/16 5981 tgurney Parameterize
|
||||
# 12/15/16 5981 tgurney Add envelope test
|
||||
#
|
||||
#
|
||||
|
||||
from awips.dataaccess import DataAccessLayer as DAL
|
||||
|
||||
from awips.test.dafTests import baseDafTestCase
|
||||
from awips.test.dafTests import params
|
||||
|
||||
|
||||
class BufrMosTestCase(baseDafTestCase.DafTestCase):
|
||||
"""Base class for testing DAF support of bufrmos data"""
|
||||
|
||||
data_params = "temperature", "dewpoint"
|
||||
|
||||
def testGetAvailableParameters(self):
|
||||
req = DAL.newDataRequest(self.datatype)
|
||||
self.runParametersTest(req)
|
||||
|
||||
def testGetAvailableLocations(self):
|
||||
req = DAL.newDataRequest(self.datatype)
|
||||
self.runLocationsTest(req)
|
||||
|
||||
def testGetAvailableTimes(self):
|
||||
req = DAL.newDataRequest(self.datatype)
|
||||
req.setLocationNames(params.OBS_STATION)
|
||||
self.runTimesTest(req)
|
||||
|
||||
def testGetGeometryData(self):
|
||||
req = DAL.newDataRequest(self.datatype)
|
||||
req.setLocationNames(params.OBS_STATION)
|
||||
req.setParameters(*self.data_params)
|
||||
self.runGeometryDataTest(req)
|
||||
|
||||
def testGetGeometryDataWithEnvelope(self):
|
||||
req = DAL.newDataRequest(self.datatype)
|
||||
req.setParameters(*self.data_params)
|
||||
req.setEnvelope(params.ENVELOPE)
|
||||
data = self.runGeometryDataTest(req)
|
||||
for item in data:
|
||||
self.assertTrue(params.ENVELOPE.contains(item.getGeometry()))
|
192
awips/test/dafTests/baseDafTestCase.py
Normal file
192
awips/test/dafTests/baseDafTestCase.py
Normal file
|
@ -0,0 +1,192 @@
|
|||
#
|
||||
# Base TestCase for DAF tests. This class provides helper methods and
|
||||
# tests common to all DAF test cases.
|
||||
#
|
||||
# SOFTWARE HISTORY
|
||||
#
|
||||
# Date Ticket# Engineer Description
|
||||
# ------------ ---------- ----------- --------------------------
|
||||
# 01/19/16 4795 mapeters Initial Creation.
|
||||
# 04/11/16 5548 tgurney Cleanup
|
||||
# 04/13/16 5379 tgurney Add identifier values tests
|
||||
# 04/18/16 5548 tgurney More cleanup, plus new tests
|
||||
# 04/26/16 5587 tgurney Move identifier values tests
|
||||
# to subclasses
|
||||
# 06/01/16 5587 tgurney Add testGet*Identifiers
|
||||
# 06/07/16 5574 tgurney Make geometry/grid data tests
|
||||
# return the retrieved data
|
||||
# 06/10/16 5548 tgurney Make testDatatypeIsSupported
|
||||
# case-insensitive
|
||||
# 08/10/16 2416 tgurney Don't test identifier values
|
||||
# for dataURI
|
||||
# 10/05/16 5926 dgilling Better checks in runGeometryDataTest.
|
||||
# 11/08/16 5985 tgurney Do not check data times on
|
||||
# time-agnostic data
|
||||
# 03/13/17 5981 tgurney Do not check valid period on
|
||||
# data time
|
||||
#
|
||||
#
|
||||
|
||||
from __future__ import print_function
|
||||
from awips.dataaccess import DataAccessLayer as DAL
|
||||
from awips.ThriftClient import ThriftRequestException
|
||||
|
||||
import os
|
||||
import unittest
|
||||
|
||||
|
||||
class DafTestCase(unittest.TestCase):
|
||||
|
||||
sampleDataLimit = 5
|
||||
"""
|
||||
Maximum number of levels, locations, times, and geometry/grid data to
|
||||
display
|
||||
"""
|
||||
|
||||
numTimesToLimit = 3
|
||||
"""
|
||||
When limiting geometry/grid data requests with times, only retrieve data
|
||||
for this many times
|
||||
"""
|
||||
|
||||
datatype = None
|
||||
"""Name of the datatype"""
|
||||
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
host = os.environ.get('DAF_TEST_HOST')
|
||||
if host is None:
|
||||
host = 'edex-cloud.unidata.ucar.edu'
|
||||
DAL.changeEDEXHost(host)
|
||||
|
||||
@staticmethod
|
||||
def getTimesIfSupported(req):
|
||||
"""Return available times for req. If req refers to a time-agnostic
|
||||
datatype, return an empty list instead.
|
||||
"""
|
||||
times = []
|
||||
try:
|
||||
times = DAL.getAvailableTimes(req)
|
||||
except ThriftRequestException as e:
|
||||
if 'TimeAgnosticDataException' not in str(e):
|
||||
raise
|
||||
return times
|
||||
|
||||
def testDatatypeIsSupported(self):
|
||||
allSupported = DAL.getSupportedDatatypes()
|
||||
self.assertIn(self.datatype, allSupported)
|
||||
|
||||
def testGetRequiredIdentifiers(self):
|
||||
req = DAL.newDataRequest(self.datatype)
|
||||
required = DAL.getRequiredIdentifiers(req)
|
||||
self.assertIsNotNone(required)
|
||||
print("Required identifiers:", required)
|
||||
|
||||
def testGetOptionalIdentifiers(self):
|
||||
req = DAL.newDataRequest(self.datatype)
|
||||
optional = DAL.getOptionalIdentifiers(req)
|
||||
self.assertIsNotNone(optional)
|
||||
print("Optional identifiers:", optional)
|
||||
|
||||
def runGetIdValuesTest(self, identifiers):
|
||||
for identifier in identifiers:
|
||||
if identifier.lower() == 'datauri':
|
||||
continue
|
||||
req = DAL.newDataRequest(self.datatype)
|
||||
idValues = DAL.getIdentifierValues(req, identifier)
|
||||
self.assertTrue(hasattr(idValues, '__iter__'))
|
||||
|
||||
def runInvalidIdValuesTest(self):
|
||||
badString = 'id from ' + self.datatype + '; select 1;'
|
||||
with self.assertRaises(ThriftRequestException):
|
||||
req = DAL.newDataRequest(self.datatype)
|
||||
DAL.getIdentifierValues(req, badString)
|
||||
|
||||
def runNonexistentIdValuesTest(self):
|
||||
with self.assertRaises(ThriftRequestException):
|
||||
req = DAL.newDataRequest(self.datatype)
|
||||
DAL.getIdentifierValues(req, 'idthatdoesnotexist')
|
||||
|
||||
def runParametersTest(self, req):
|
||||
params = DAL.getAvailableParameters(req)
|
||||
self.assertIsNotNone(params)
|
||||
print(params)
|
||||
|
||||
def runLevelsTest(self, req):
|
||||
levels = DAL.getAvailableLevels(req)
|
||||
self.assertIsNotNone(levels)
|
||||
print("Number of levels: " + str(len(levels)))
|
||||
strLevels = [str(t) for t in levels[:self.sampleDataLimit]]
|
||||
print("Sample levels:\n" + str(strLevels))
|
||||
|
||||
def runLocationsTest(self, req):
|
||||
locs = DAL.getAvailableLocationNames(req)
|
||||
self.assertIsNotNone(locs)
|
||||
print("Number of location names: " + str(len(locs)))
|
||||
print("Sample location names:\n" + str(locs[:self.sampleDataLimit]))
|
||||
|
||||
def runTimesTest(self, req):
|
||||
times = DAL.getAvailableTimes(req)
|
||||
self.assertIsNotNone(times)
|
||||
print("Number of times: " + str(len(times)))
|
||||
strTimes = [str(t) for t in times[:self.sampleDataLimit]]
|
||||
print("Sample times:\n" + str(strTimes))
|
||||
|
||||
def runTimeAgnosticTest(self, req):
|
||||
with self.assertRaises(ThriftRequestException) as cm:
|
||||
DAL.getAvailableTimes(req)
|
||||
self.assertIn('TimeAgnosticDataException', str(cm.exception))
|
||||
|
||||
def runGeometryDataTest(self, req, checkDataTimes=True):
|
||||
"""
|
||||
Test that we are able to successfully retrieve geometry data for the
|
||||
given request.
|
||||
"""
|
||||
times = DafTestCase.getTimesIfSupported(req)
|
||||
geomData = DAL.getGeometryData(req, times[:self.numTimesToLimit])
|
||||
self.assertIsNotNone(geomData)
|
||||
if not geomData:
|
||||
raise unittest.SkipTest("No data available")
|
||||
print("Number of geometry records: " + str(len(geomData)))
|
||||
return geomData
|
||||
|
||||
def runGeometryDataTestWithTimeRange(self, req, timeRange):
|
||||
"""
|
||||
Test that we are able to successfully retrieve geometry data for the
|
||||
given request.
|
||||
"""
|
||||
geomData = DAL.getGeometryData(req, timeRange)
|
||||
self.assertIsNotNone(geomData)
|
||||
if not geomData:
|
||||
raise unittest.SkipTest("No data available")
|
||||
print("Number of geometry records: " + str(len(geomData)))
|
||||
return geomData
|
||||
|
||||
def runGridDataTest(self, req, testSameShape=True):
|
||||
"""
|
||||
Test that we are able to successfully retrieve grid data for the given
|
||||
request.
|
||||
|
||||
Args:
|
||||
req: the grid request
|
||||
testSameShape: whether or not to verify that all the retrieved data
|
||||
have the same shape (most data don't change shape)
|
||||
"""
|
||||
times = DafTestCase.getTimesIfSupported(req)
|
||||
gridData = DAL.getGridData(req, times[:self.numTimesToLimit])
|
||||
self.assertIsNotNone(gridData)
|
||||
if not gridData:
|
||||
raise unittest.SkipTest("No data available")
|
||||
print("Number of grid records: " + str(len(gridData)))
|
||||
if len(gridData) > 0:
|
||||
print("Sample grid data shape:\n" + str(gridData[0].getRawData().shape) + "\n")
|
||||
print("Sample grid data:\n" + str(gridData[0].getRawData()) + "\n")
|
||||
print("Sample lat-lon data:\n" + str(gridData[0].getLatLonCoords()) + "\n")
|
||||
|
||||
if testSameShape:
|
||||
correctGridShape = gridData[0].getLatLonCoords()[0].shape
|
||||
for record in gridData:
|
||||
rawData = record.getRawData()
|
||||
self.assertIsNotNone(rawData)
|
||||
self.assertEqual(rawData.shape, correctGridShape)
|
||||
return gridData
|
167
awips/test/dafTests/baseRadarTestCase.py
Normal file
167
awips/test/dafTests/baseRadarTestCase.py
Normal file
|
@ -0,0 +1,167 @@
|
|||
#
|
||||
# Tests common to all radar factories
|
||||
#
|
||||
# SOFTWARE HISTORY
|
||||
#
|
||||
# Date Ticket# Engineer Description
|
||||
# ------------ ---------- ----------- --------------------------
|
||||
# 01/19/16 4795 mapeters Initial Creation.
|
||||
# 04/11/16 5548 tgurney Cleanup
|
||||
# 04/18/16 5548 tgurney More cleanup
|
||||
# 04/26/16 5587 tgurney Move identifier values tests
|
||||
# out of base class
|
||||
# 06/01/16 5587 tgurney Update testGetIdentifierValues
|
||||
# 06/08/16 5574 mapeters Add advanced query tests
|
||||
# 06/13/16 5574 tgurney Fix checks for None
|
||||
# 06/14/16 5548 tgurney Undo previous change (broke
|
||||
# test)
|
||||
# 06/30/16 5725 tgurney Add test for NOT IN
|
||||
# 08/25/16 2671 tgurney Rename to baseRadarTestCase
|
||||
# and move factory-specific
|
||||
# tests
|
||||
# 12/07/16 5981 tgurney Parameterize
|
||||
#
|
||||
#
|
||||
|
||||
from __future__ import print_function
|
||||
from awips.dataaccess import DataAccessLayer as DAL
|
||||
from awips.ThriftClient import ThriftRequestException
|
||||
|
||||
from awips.test.dafTests import baseDafTestCase
|
||||
from awips.test.dafTests import params
|
||||
|
||||
|
||||
class BaseRadarTestCase(baseDafTestCase.DafTestCase):
|
||||
"""Tests common to all radar factories"""
|
||||
|
||||
# datatype is specified by subclass
|
||||
datatype = None
|
||||
|
||||
radarLoc = params.RADAR.lower()
|
||||
|
||||
def testGetAvailableParameters(self):
|
||||
req = DAL.newDataRequest(self.datatype)
|
||||
self.runParametersTest(req)
|
||||
|
||||
def testGetAvailableLocations(self):
|
||||
req = DAL.newDataRequest(self.datatype)
|
||||
self.runLocationsTest(req)
|
||||
|
||||
def testGetAvailableLevels(self):
|
||||
req = DAL.newDataRequest(self.datatype)
|
||||
self.runLevelsTest(req)
|
||||
|
||||
def testGetAvailableLevelsWithInvalidLevelIdentifierThrowsException(self):
|
||||
req = DAL.newDataRequest(self.datatype)
|
||||
req.addIdentifier('level.one.field', 'invalidLevelField')
|
||||
with self.assertRaises(ThriftRequestException) as cm:
|
||||
self.runLevelsTest(req)
|
||||
self.assertIn('IncompatibleRequestException', str(cm.exception))
|
||||
|
||||
def testGetAvailableTimes(self):
|
||||
req = DAL.newDataRequest(self.datatype)
|
||||
req.setEnvelope(params.ENVELOPE)
|
||||
self.runTimesTest(req)
|
||||
|
||||
def testGetIdentifierValues(self):
|
||||
req = DAL.newDataRequest(self.datatype)
|
||||
optionalIds = set(DAL.getOptionalIdentifiers(req))
|
||||
requiredIds = set(DAL.getRequiredIdentifiers(req))
|
||||
self.runGetIdValuesTest(optionalIds | requiredIds)
|
||||
|
||||
def testGetInvalidIdentifierValuesThrowsException(self):
|
||||
self.runInvalidIdValuesTest()
|
||||
|
||||
def testGetNonexistentIdentifierValuesThrowsException(self):
|
||||
self.runNonexistentIdValuesTest()
|
||||
|
||||
def runConstraintTest(self, key, operator, value):
|
||||
raise NotImplementedError
|
||||
|
||||
def testGetDataWithEqualsString(self):
|
||||
gridData = self.runConstraintTest('icao', '=', self.radarLoc)
|
||||
for record in gridData:
|
||||
self.assertEqual(record.getAttribute('icao'), self.radarLoc)
|
||||
|
||||
def testGetDataWithEqualsInt(self):
|
||||
gridData = self.runConstraintTest('icao', '=', 1000)
|
||||
for record in gridData:
|
||||
self.assertEqual(record.getAttribute('icao'), 1000)
|
||||
|
||||
def testGetDataWithEqualsLong(self):
|
||||
gridData = self.runConstraintTest('icao', '=', 1000)
|
||||
for record in gridData:
|
||||
self.assertEqual(record.getAttribute('icao'), 1000)
|
||||
|
||||
def testGetDataWithEqualsFloat(self):
|
||||
gridData = self.runConstraintTest('icao', '=', 1.0)
|
||||
for record in gridData:
|
||||
self.assertEqual(round(record.getAttribute('icao'), 1), 1.0)
|
||||
|
||||
def testGetDataWithEqualsNone(self):
|
||||
gridData = self.runConstraintTest('icao', '=', None)
|
||||
for record in gridData:
|
||||
self.assertIsNone(record.getAttribute('icao'))
|
||||
|
||||
def testGetDataWithNotEquals(self):
|
||||
gridData = self.runConstraintTest('icao', '!=', self.radarLoc)
|
||||
for record in gridData:
|
||||
self.assertNotEqual(record.getAttribute('icao'), self.radarLoc)
|
||||
|
||||
def testGetDataWithNotEqualsNone(self):
|
||||
gridData = self.runConstraintTest('icao', '!=', None)
|
||||
for record in gridData:
|
||||
self.assertIsNotNone(record.getAttribute('icao'))
|
||||
|
||||
def testGetDataWithGreaterThan(self):
|
||||
gridData = self.runConstraintTest('icao', '>', self.radarLoc)
|
||||
for record in gridData:
|
||||
self.assertGreater(record.getAttribute('icao'), self.radarLoc)
|
||||
|
||||
def testGetDataWithLessThan(self):
|
||||
gridData = self.runConstraintTest('icao', '<', self.radarLoc)
|
||||
for record in gridData:
|
||||
self.assertLess(record.getAttribute('icao'), self.radarLoc)
|
||||
|
||||
def testGetDataWithGreaterThanEquals(self):
|
||||
gridData = self.runConstraintTest('icao', '>=', self.radarLoc)
|
||||
for record in gridData:
|
||||
self.assertGreaterEqual(record.getAttribute('icao'), self.radarLoc)
|
||||
|
||||
def testGetDataWithLessThanEquals(self):
|
||||
gridData = self.runConstraintTest('icao', '<=', self.radarLoc)
|
||||
for record in gridData:
|
||||
self.assertLessEqual(record.getAttribute('icao'), self.radarLoc)
|
||||
|
||||
def testGetDataWithInTuple(self):
|
||||
gridData = self.runConstraintTest('icao', 'in', (self.radarLoc, 'tpbi'))
|
||||
for record in gridData:
|
||||
self.assertIn(record.getAttribute('icao'), (self.radarLoc, 'tpbi'))
|
||||
|
||||
def testGetDataWithInList(self):
|
||||
gridData = self.runConstraintTest('icao', 'in', [self.radarLoc, 'tpbi'])
|
||||
for record in gridData:
|
||||
self.assertIn(record.getAttribute('icao'), (self.radarLoc, 'tpbi'))
|
||||
|
||||
def testGetDataWithInGenerator(self):
|
||||
generator = (item for item in (self.radarLoc, 'tpbi'))
|
||||
gridData = self.runConstraintTest('icao', 'in', generator)
|
||||
for record in gridData:
|
||||
self.assertIn(record.getAttribute('icao'), (self.radarLoc, 'tpbi'))
|
||||
|
||||
def testGetDataWithNotInList(self):
|
||||
gridData = self.runConstraintTest('icao', 'not in', ['zzzz', self.radarLoc])
|
||||
for record in gridData:
|
||||
self.assertNotIn(record.getAttribute('icao'), ('zzzz', self.radarLoc))
|
||||
|
||||
def testGetDataWithInvalidConstraintTypeThrowsException(self):
|
||||
with self.assertRaises(ValueError):
|
||||
self.runConstraintTest('icao', 'junk', self.radarLoc)
|
||||
|
||||
def testGetDataWithInvalidConstraintValueThrowsException(self):
|
||||
with self.assertRaises(TypeError):
|
||||
self.runConstraintTest('icao', '=', {})
|
||||
|
||||
def testGetDataWithEmptyInConstraintThrowsException(self):
|
||||
with self.assertRaises(ValueError):
|
||||
self.runConstraintTest('icao', 'in', [])
|
23
awips/test/dafTests/params.py
Normal file
23
awips/test/dafTests/params.py
Normal file
|
@ -0,0 +1,23 @@
|
|||
#
|
||||
# Site-specific parameters for DAF tests
|
||||
#
|
||||
# SOFTWARE HISTORY
|
||||
#
|
||||
# Date Ticket# Engineer Description
|
||||
# ------------ ---------- ----------- --------------------------
|
||||
# 12/07/16 5981 tgurney Initial creation
|
||||
# 12/15/16 5981 tgurney Add ENVELOPE
|
||||
#
|
||||
#
|
||||
|
||||
from shapely.geometry import box
|
||||
|
||||
|
||||
AIRPORT = 'OMA'
|
||||
OBS_STATION = 'KOMA'
|
||||
SITE_ID = 'OAX'
|
||||
STATION_ID = '72558'
|
||||
RADAR = 'KOAX'
|
||||
SAMPLE_AREA = (-97.0, 41.0, -96.0, 42.0)
|
||||
|
||||
ENVELOPE = box(*SAMPLE_AREA)
|
39
awips/test/dafTests/testAcars.py
Normal file
39
awips/test/dafTests/testAcars.py
Normal file
|
@ -0,0 +1,39 @@
|
|||
#
|
||||
# Test DAF support for ACARS data
|
||||
#
|
||||
# SOFTWARE HISTORY
|
||||
#
|
||||
# Date Ticket# Engineer Description
|
||||
# ------------ ---------- ----------- --------------------------
|
||||
# 01/19/16 4795 mapeters Initial Creation.
|
||||
# 04/11/16 5548 tgurney Cleanup
|
||||
# 04/18/16 5548 tgurney More cleanup
|
||||
#
|
||||
#
|
||||
|
||||
from __future__ import print_function
|
||||
from awips.dataaccess import DataAccessLayer as DAL
|
||||
from awips.test.dafTests import baseDafTestCase
|
||||
|
||||
|
||||
class AcarsTestCase(baseDafTestCase.DafTestCase):
|
||||
"""Test DAF support for ACARS data"""
|
||||
|
||||
datatype = "acars"
|
||||
|
||||
def testGetAvailableParameters(self):
|
||||
req = DAL.newDataRequest(self.datatype)
|
||||
self.runParametersTest(req)
|
||||
|
||||
def testGetAvailableLocations(self):
|
||||
req = DAL.newDataRequest(self.datatype)
|
||||
self.runLocationsTest(req)
|
||||
|
||||
def testGetAvailableTimes(self):
|
||||
req = DAL.newDataRequest(self.datatype)
|
||||
self.runTimesTest(req)
|
||||
|
||||
def testGetGeometryData(self):
|
||||
req = DAL.newDataRequest(self.datatype)
|
||||
req.setParameters("flightLevel", "tailNumber")
|
||||
self.runGeometryDataTest(req)
|
146
awips/test/dafTests/testAirep.py
Normal file
146
awips/test/dafTests/testAirep.py
Normal file
|
@ -0,0 +1,146 @@
|
|||
#
|
||||
# Test DAF support for airep data
|
||||
#
|
||||
# SOFTWARE HISTORY
|
||||
#
|
||||
# Date Ticket# Engineer Description
|
||||
# ------------ ---------- ----------- --------------------------
|
||||
# 01/19/16 4795 mapeters Initial Creation.
|
||||
# 04/11/16 5548 tgurney Cleanup
|
||||
# 04/18/16 5548 tgurney More cleanup
|
||||
# 06/09/16 5587 bsteffen Add getIdentifierValues tests
|
||||
# 06/13/16 5574 tgurney Add advanced query tests
|
||||
# 06/30/16 5725 tgurney Add test for NOT IN
|
||||
#
|
||||
#
|
||||
|
||||
from __future__ import print_function
|
||||
from awips.dataaccess import DataAccessLayer as DAL
|
||||
|
||||
from dynamicserialize.dstypes.com.raytheon.uf.common.dataquery.requests import RequestConstraint
|
||||
from awips.test.dafTests import baseDafTestCase
|
||||
|
||||
|
||||
class AirepTestCase(baseDafTestCase.DafTestCase):
|
||||
"""Test DAF support for airep data"""
|
||||
|
||||
datatype = "airep"
|
||||
|
||||
def testGetAvailableParameters(self):
|
||||
req = DAL.newDataRequest(self.datatype)
|
||||
self.runParametersTest(req)
|
||||
|
||||
def testGetAvailableLocations(self):
|
||||
req = DAL.newDataRequest(self.datatype)
|
||||
self.runLocationsTest(req)
|
||||
|
||||
def testGetAvailableTimes(self):
|
||||
req = DAL.newDataRequest(self.datatype)
|
||||
self.runTimesTest(req)
|
||||
|
||||
def testGetGeometryData(self):
|
||||
req = DAL.newDataRequest(self.datatype)
|
||||
req.setParameters("flightLevel", "reportType")
|
||||
self.runGeometryDataTest(req)
|
||||
|
||||
def testGetIdentifierValues(self):
|
||||
req = DAL.newDataRequest(self.datatype)
|
||||
optionalIds = set(DAL.getOptionalIdentifiers(req))
|
||||
self.runGetIdValuesTest(optionalIds)
|
||||
|
||||
def testGetInvalidIdentifierValuesThrowsException(self):
|
||||
self.runInvalidIdValuesTest()
|
||||
|
||||
def testGetNonexistentIdentifierValuesThrowsException(self):
|
||||
self.runNonexistentIdValuesTest()
|
||||
|
||||
def _runConstraintTest(self, key, operator, value):
|
||||
req = DAL.newDataRequest(self.datatype)
|
||||
constraint = RequestConstraint.new(operator, value)
|
||||
req.setParameters("flightLevel", "reportType")
|
||||
req.addIdentifier(key, constraint)
|
||||
return self.runGeometryDataTest(req)
|
||||
|
||||
def testGetDataWithEqualsString(self):
|
||||
geometryData = self._runConstraintTest('reportType', '=', 'AIREP')
|
||||
for record in geometryData:
|
||||
self.assertEqual(record.getString('reportType'), 'AIREP')
|
||||
|
||||
# No numeric tests since no numeric identifiers are available.
|
||||
|
||||
def testGetDataWithEqualsNone(self):
|
||||
geometryData = self._runConstraintTest('reportType', '=', None)
|
||||
for record in geometryData:
|
||||
self.assertEqual(record.getType('reportType'), 'NULL')
|
||||
|
||||
def testGetDataWithNotEquals(self):
|
||||
geometryData = self._runConstraintTest('reportType', '!=', 'AIREP')
|
||||
for record in geometryData:
|
||||
self.assertNotEqual(record.getString('reportType'), 'AIREP')
|
||||
|
||||
def testGetDataWithNotEqualsNone(self):
|
||||
geometryData = self._runConstraintTest('reportType', '!=', None)
|
||||
for record in geometryData:
|
||||
self.assertNotEqual(record.getType('reportType'), 'NULL')
|
||||
|
||||
def testGetDataWithGreaterThan(self):
|
||||
geometryData = self._runConstraintTest('reportType', '>', 'AIREP')
|
||||
for record in geometryData:
|
||||
self.assertGreater(record.getString('reportType'), 'AIREP')
|
||||
|
||||
def testGetDataWithLessThan(self):
|
||||
geometryData = self._runConstraintTest('reportType', '<', 'AIREP')
|
||||
for record in geometryData:
|
||||
self.assertLess(record.getString('reportType'), 'AIREP')
|
||||
|
||||
def testGetDataWithGreaterThanEquals(self):
|
||||
geometryData = self._runConstraintTest('reportType', '>=', 'AIREP')
|
||||
for record in geometryData:
|
||||
self.assertGreaterEqual(record.getString('reportType'), 'AIREP')
|
||||
|
||||
def testGetDataWithLessThanEquals(self):
|
||||
geometryData = self._runConstraintTest('reportType', '<=', 'AIREP')
|
||||
for record in geometryData:
|
||||
self.assertLessEqual(record.getString('reportType'), 'AIREP')
|
||||
|
||||
def testGetDataWithInTuple(self):
|
||||
collection = ('AIREP', 'AMDAR')
|
||||
geometryData = self._runConstraintTest('reportType', 'in', collection)
|
||||
for record in geometryData:
|
||||
self.assertIn(record.getString('reportType'), collection)
|
||||
|
||||
def testGetDataWithInList(self):
|
||||
collection = ['AIREP', 'AMDAR']
|
||||
geometryData = self._runConstraintTest('reportType', 'in', collection)
|
||||
for record in geometryData:
|
||||
self.assertIn(record.getString('reportType'), collection)
|
||||
|
||||
def testGetDataWithInGenerator(self):
|
||||
collection = ('AIREP', 'AMDAR')
|
||||
generator = (item for item in collection)
|
||||
geometryData = self._runConstraintTest('reportType', 'in', generator)
|
||||
for record in geometryData:
|
||||
self.assertIn(record.getString('reportType'), collection)
|
||||
|
||||
def testGetDataWithNotInList(self):
|
||||
collection = ['AMDAR']
|
||||
geometryData = self._runConstraintTest('reportType', 'not in', collection)
|
||||
for record in geometryData:
|
||||
self.assertNotIn(record.getString('reportType'), collection)
|
||||
|
||||
def testGetDataWithInvalidConstraintTypeThrowsException(self):
|
||||
with self.assertRaises(ValueError):
|
||||
self._runConstraintTest('reportType', 'junk', 'AIREP')
|
||||
|
||||
def testGetDataWithInvalidConstraintValueThrowsException(self):
|
||||
with self.assertRaises(TypeError):
|
||||
self._runConstraintTest('reportType', '=', {})
|
||||
|
||||
def testGetDataWithEmptyInConstraintThrowsException(self):
|
||||
with self.assertRaises(ValueError):
|
||||
self._runConstraintTest('reportType', 'in', [])
|
||||
|
||||
def testGetDataWithNestedInConstraintThrowsException(self):
|
||||
collection = ('AIREP', 'AMDAR', ())
|
||||
with self.assertRaises(TypeError):
|
||||
self._runConstraintTest('reportType', 'in', collection)
|
170
awips/test/dafTests/testBinLightning.py
Normal file
170
awips/test/dafTests/testBinLightning.py
Normal file
|
@ -0,0 +1,170 @@
|
|||
#
|
||||
# Test DAF support for binlightning data
|
||||
#
|
||||
# SOFTWARE HISTORY
|
||||
#
|
||||
# Date Ticket# Engineer Description
|
||||
# ------------ ---------- ----------- --------------------------
|
||||
# 01/19/16 4795 mapeters Initial Creation.
|
||||
# 04/11/16 5548 tgurney Cleanup
|
||||
# 04/18/16 5548 tgurney More cleanup
|
||||
# 04/21/16 5551 tgurney Add tests to verify #5551
|
||||
# 04/25/16 5587 tgurney Enable skipped test added in
|
||||
# #5551
|
||||
# 04/26/16 5587 tgurney Move identifier values tests
|
||||
# out of base class
|
||||
# 06/01/16 5587 tgurney Update testGetIdentifierValues
|
||||
# 06/03/16 5574 tgurney Add advanced query tests
|
||||
# 06/13/16 5574 tgurney Typo
|
||||
# 06/30/16 5725 tgurney Add test for NOT IN
|
||||
# 11/08/16 5985 tgurney Do not check data times
|
||||
#
|
||||
#
|
||||
from __future__ import print_function
|
||||
from awips.dataaccess import DataAccessLayer as DAL
|
||||
from awips.ThriftClient import ThriftRequestException
|
||||
from dynamicserialize.dstypes.com.raytheon.uf.common.dataquery.requests import RequestConstraint
|
||||
from awips.test.dafTests import baseDafTestCase
|
||||
|
||||
|
||||
class BinLightningTestCase(baseDafTestCase.DafTestCase):
|
||||
"""Test DAF support for binlightning data"""
|
||||
|
||||
datatype = "binlightning"
|
||||
source = "GLMfl"
|
||||
|
||||
def testGetAvailableParameters(self):
|
||||
req = DAL.newDataRequest(self.datatype)
|
||||
self.runParametersTest(req)
|
||||
|
||||
def testGetAvailableTimes(self):
|
||||
req = DAL.newDataRequest(self.datatype)
|
||||
req.addIdentifier('source', self.source)
|
||||
self.runTimesTest(req)
|
||||
|
||||
def testGetGeometryDataSingleSourceSingleParameter(self):
|
||||
req = DAL.newDataRequest(self.datatype)
|
||||
req.addIdentifier('source', self.source)
|
||||
req.setParameters('intensity')
|
||||
self.runGeometryDataTest(req, checkDataTimes=False)
|
||||
|
||||
def testGetGeometryDataInvalidParamRaisesIncompatibleRequestException(self):
|
||||
req = DAL.newDataRequest(self.datatype)
|
||||
req.addIdentifier('source', self.source)
|
||||
req.setParameters('blahblahblah')
|
||||
with self.assertRaises(ThriftRequestException) as cm:
|
||||
self.runGeometryDataTest(req)
|
||||
self.assertIn('IncompatibleRequestException', str(cm.exception))
|
||||
|
||||
def testGetGeometryDataSingleSourceAllParameters(self):
|
||||
req = DAL.newDataRequest(self.datatype)
|
||||
req.addIdentifier('source', self.source)
|
||||
req.setParameters(*DAL.getAvailableParameters(req))
|
||||
self.runGeometryDataTest(req, checkDataTimes=False)
|
||||
|
||||
def testGetIdentifierValues(self):
|
||||
req = DAL.newDataRequest(self.datatype)
|
||||
optionalIds = set(DAL.getOptionalIdentifiers(req))
|
||||
requiredIds = set(DAL.getRequiredIdentifiers(req))
|
||||
self.runGetIdValuesTest(optionalIds | requiredIds)
|
||||
|
||||
def testGetInvalidIdentifierValuesThrowsException(self):
|
||||
self.runInvalidIdValuesTest()
|
||||
|
||||
def testGetNonexistentIdentifierValuesThrowsException(self):
|
||||
self.runNonexistentIdValuesTest()
|
||||
|
||||
def _runConstraintTest(self, key, operator, value):
|
||||
req = DAL.newDataRequest(self.datatype)
|
||||
constraint = RequestConstraint.new(operator, value)
|
||||
req.addIdentifier(key, constraint)
|
||||
req.setParameters('intensity')
|
||||
return self.runGeometryDataTest(req, checkDataTimes=False)
|
||||
|
||||
def testGetDataWithEqualsString(self):
|
||||
geomData = self._runConstraintTest('source', '=', self.source)
|
||||
for record in geomData:
|
||||
self.assertEqual(record.getAttribute('source'), self.source)
|
||||
|
||||
def testGetDataWithEqualsInt(self):
|
||||
geomData = self._runConstraintTest('source', '=', 1000)
|
||||
for record in geomData:
|
||||
self.assertEqual(record.getAttribute('source'), 1000)
|
||||
|
||||
def testGetDataWithEqualsLong(self):
|
||||
geomData = self._runConstraintTest('source', '=', 1000)
|
||||
for record in geomData:
|
||||
self.assertEqual(record.getAttribute('source'), 1000)
|
||||
|
||||
def testGetDataWithEqualsFloat(self):
|
||||
geomData = self._runConstraintTest('source', '=', 1.0)
|
||||
for record in geomData:
|
||||
self.assertEqual(round(record.getAttribute('source'), 1), 1.0)
|
||||
|
||||
def testGetDataWithEqualsNone(self):
|
||||
geomData = self._runConstraintTest('source', '=', None)
|
||||
for record in geomData:
|
||||
self.assertIsNone(record.getAttribute('source'))
|
||||
|
||||
def testGetDataWithNotEquals(self):
|
||||
geomData = self._runConstraintTest('source', '!=', self.source)
|
||||
for record in geomData:
|
||||
self.assertNotEqual(record.getAttribute('source'), self.source)
|
||||
|
||||
def testGetDataWithNotEqualsNone(self):
|
||||
geomData = self._runConstraintTest('source', '!=', None)
|
||||
for record in geomData:
|
||||
self.assertIsNotNone(record.getAttribute('source'))
|
||||
|
||||
def testGetDataWithGreaterThan(self):
|
||||
geomData = self._runConstraintTest('source', '>', self.source)
|
||||
for record in geomData:
|
||||
self.assertGreater(record.getAttribute('source'), self.source)
|
||||
|
||||
def testGetDataWithLessThan(self):
|
||||
geomData = self._runConstraintTest('source', '<', self.source)
|
||||
for record in geomData:
|
||||
self.assertLess(record.getAttribute('source'), self.source)
|
||||
|
||||
def testGetDataWithGreaterThanEquals(self):
|
||||
geomData = self._runConstraintTest('source', '>=', self.source)
|
||||
for record in geomData:
|
||||
self.assertGreaterEqual(record.getAttribute('source'), self.source)
|
||||
|
||||
def testGetDataWithLessThanEquals(self):
|
||||
geomData = self._runConstraintTest('source', '<=', self.source)
|
||||
for record in geomData:
|
||||
self.assertLessEqual(record.getAttribute('source'), self.source)
|
||||
|
||||
def testGetDataWithInTuple(self):
|
||||
geomData = self._runConstraintTest('source', 'in', (self.source, 'GLMev'))
|
||||
for record in geomData:
|
||||
self.assertIn(record.getAttribute('source'), (self.source, 'GLMev'))
|
||||
|
||||
def testGetDataWithInList(self):
|
||||
geomData = self._runConstraintTest('source', 'in', [self.source, 'GLMev'])
|
||||
for record in geomData:
|
||||
self.assertIn(record.getAttribute('source'), (self.source, 'GLMev'))
|
||||
|
||||
def testGetDataWithInGenerator(self):
|
||||
generator = (item for item in (self.source, 'GLMev'))
|
||||
geomData = self._runConstraintTest('source', 'in', generator)
|
||||
for record in geomData:
|
||||
self.assertIn(record.getAttribute('source'), (self.source, 'GLMev'))
|
||||
|
||||
def testGetDataWithNotInList(self):
|
||||
geomData = self._runConstraintTest('source', 'not in', [self.source, 'blah'])
|
||||
for record in geomData:
|
||||
self.assertNotIn(record.getAttribute('source'), (self.source, 'blah'))
|
||||
|
||||
def testGetDataWithInvalidConstraintTypeThrowsException(self):
|
||||
with self.assertRaises(ValueError):
|
||||
self._runConstraintTest('source', 'junk', self.source)
|
||||
|
||||
def testGetDataWithInvalidConstraintValueThrowsException(self):
|
||||
with self.assertRaises(TypeError):
|
||||
self._runConstraintTest('source', '=', {})
|
||||
|
||||
def testGetDataWithEmptyInConstraintThrowsException(self):
|
||||
with self.assertRaises(ValueError):
|
||||
self._runConstraintTest('source', 'in', [])
|
22
awips/test/dafTests/testBufrMosGfs.py
Normal file
22
awips/test/dafTests/testBufrMosGfs.py
Normal file
|
@ -0,0 +1,22 @@
|
|||
#
|
||||
# Test DAF support for bufrmosGFS data
|
||||
#
|
||||
# SOFTWARE HISTORY
|
||||
#
|
||||
# Date Ticket# Engineer Description
|
||||
# ------------ ---------- ----------- --------------------------
|
||||
# 01/19/16 4795 mapeters Initial Creation.
|
||||
# 04/11/16 5548 tgurney Cleanup
|
||||
# 04/18/16 5548 tgurney More cleanup
|
||||
#
|
||||
#
|
||||
|
||||
from awips.test.dafTests import baseBufrMosTestCase
|
||||
|
||||
|
||||
class BufrMosGfsTestCase(baseBufrMosTestCase.BufrMosTestCase):
|
||||
"""Test DAF support for bufrmosGFS data"""
|
||||
|
||||
datatype = "bufrmosGFS"
|
||||
|
||||
# All tests inherited from superclass
|
195
awips/test/dafTests/testBufrUa.py
Normal file
195
awips/test/dafTests/testBufrUa.py
Normal file
|
@ -0,0 +1,195 @@
|
|||
from __future__ import print_function
|
||||
from awips.dataaccess import DataAccessLayer as DAL
|
||||
|
||||
from dynamicserialize.dstypes.com.raytheon.uf.common.dataquery.requests import RequestConstraint
|
||||
from awips.test.dafTests import baseDafTestCase
|
||||
from awips.test.dafTests import params
|
||||
|
||||
#
|
||||
# Test DAF support for bufrua data
|
||||
#
|
||||
# SOFTWARE HISTORY
|
||||
#
|
||||
# Date Ticket# Engineer Description
|
||||
# ------------ ---------- ----------- --------------------------
|
||||
# 01/19/16 4795 mapeters Initial Creation.
|
||||
# 04/11/16 5548 tgurney Cleanup
|
||||
# 04/18/16 5548 tgurney More cleanup
|
||||
# 06/09/16 5587 bsteffen Add getIdentifierValues tests
|
||||
# 06/13/16 5574 tgurney Add advanced query tests
|
||||
# 06/30/16 5725 tgurney Add test for NOT IN
|
||||
# 12/07/16 5981 tgurney Parameterize
|
||||
# 12/15/16 5981 tgurney Add envelope test
|
||||
#
|
||||
#
|
||||
|
||||
|
||||
class BufrUaTestCase(baseDafTestCase.DafTestCase):
|
||||
"""Test DAF support for bufrua data"""
|
||||
|
||||
datatype = "bufrua"
|
||||
|
||||
location = params.STATION_ID
|
||||
|
||||
def testGetAvailableParameters(self):
|
||||
req = DAL.newDataRequest(self.datatype)
|
||||
self.runParametersTest(req)
|
||||
|
||||
def testGetAvailableLocations(self):
|
||||
req = DAL.newDataRequest(self.datatype)
|
||||
req.addIdentifier("reportType", "2020")
|
||||
self.runLocationsTest(req)
|
||||
|
||||
def testGetAvailableTimes(self):
|
||||
req = DAL.newDataRequest(self.datatype)
|
||||
req.setLocationNames(self.location)
|
||||
req.addIdentifier("reportType", "2020")
|
||||
self.runTimesTest(req)
|
||||
|
||||
def testGetGeometryData(self):
|
||||
req = DAL.newDataRequest(self.datatype)
|
||||
req.setLocationNames(self.location)
|
||||
req.addIdentifier("reportType", "2020")
|
||||
req.setParameters("sfcPressure", "staName", "rptType", "tdMan")
|
||||
|
||||
print("Testing getGeometryData()")
|
||||
|
||||
geomData = DAL.getGeometryData(req)
|
||||
self.assertIsNotNone(geomData)
|
||||
print("Number of geometry records: " + str(len(geomData)))
|
||||
print("Sample geometry data:")
|
||||
for record in geomData[:self.sampleDataLimit]:
|
||||
print("level=", record.getLevel(), end="")
|
||||
# One dimensional parameters are reported on the 0.0UNKNOWN level.
|
||||
# 2D parameters are reported on MB levels from pressure.
|
||||
if record.getLevel() == "0.0UNKNOWN":
|
||||
print(" sfcPressure=" + record.getString("sfcPressure") + record.getUnit("sfcPressure"), end="")
|
||||
print(" staName=" + record.getString("staName"), end="")
|
||||
print(" rptType=" + record.getString("rptType") + record.getUnit("rptType"), end="")
|
||||
else:
|
||||
print(" tdMan=" + str(record.getNumber("tdMan")) + record.getUnit("tdMan"), end="")
|
||||
print(" geometry=", record.getGeometry())
|
||||
|
||||
print("getGeometryData() complete\n\n")
|
||||
|
||||
def testGetGeometryDataWithEnvelope(self):
|
||||
req = DAL.newDataRequest(self.datatype)
|
||||
req.setParameters("staName", "rptType")
|
||||
req.setEnvelope(params.ENVELOPE)
|
||||
data = self.runGeometryDataTest(req)
|
||||
for item in data:
|
||||
self.assertTrue(params.ENVELOPE.contains(item.getGeometry()))
|
||||
|
||||
def testGetIdentifierValues(self):
|
||||
req = DAL.newDataRequest(self.datatype)
|
||||
optionalIds = set(DAL.getOptionalIdentifiers(req))
|
||||
self.runGetIdValuesTest(optionalIds)
|
||||
|
||||
def testGetInvalidIdentifierValuesThrowsException(self):
|
||||
self.runInvalidIdValuesTest()
|
||||
|
||||
def testGetNonexistentIdentifierValuesThrowsException(self):
|
||||
self.runNonexistentIdValuesTest()
|
||||
|
||||
def _runConstraintTest(self, key, operator, value):
|
||||
req = DAL.newDataRequest(self.datatype)
|
||||
constraint = RequestConstraint.new(operator, value)
|
||||
req.addIdentifier(key, constraint)
|
||||
# As an identifier it is "reportType" but as a parameter it is
|
||||
# "rptType"... this is weird...
|
||||
req.setParameters("staName", "rptType")
|
||||
return self.runGeometryDataTest(req)
|
||||
|
||||
def testGetDataWithEqualsString(self):
|
||||
geometryData = self._runConstraintTest('reportType', '=', '2022')
|
||||
for record in geometryData:
|
||||
self.assertEqual(record.getString('rptType'), '2022')
|
||||
|
||||
def testGetDataWithEqualsInt(self):
|
||||
geometryData = self._runConstraintTest('reportType', '=', 2022)
|
||||
for record in geometryData:
|
||||
self.assertEqual(record.getString('rptType'), '2022')
|
||||
|
||||
def testGetDataWithEqualsLong(self):
|
||||
geometryData = self._runConstraintTest('reportType', '=', 2022)
|
||||
for record in geometryData:
|
||||
self.assertEqual(record.getString('rptType'), '2022')
|
||||
|
||||
# No float test because no float identifiers are available
|
||||
|
||||
def testGetDataWithEqualsNone(self):
|
||||
geometryData = self._runConstraintTest('reportType', '=', None)
|
||||
for record in geometryData:
|
||||
self.assertEqual(record.getType('rptType'), 'NULL')
|
||||
|
||||
def testGetDataWithNotEquals(self):
|
||||
geometryData = self._runConstraintTest('reportType', '!=', 2022)
|
||||
for record in geometryData:
|
||||
self.assertNotEqual(record.getString('rptType'), '2022')
|
||||
|
||||
def testGetDataWithNotEqualsNone(self):
|
||||
geometryData = self._runConstraintTest('reportType', '!=', None)
|
||||
for record in geometryData:
|
||||
self.assertNotEqual(record.getType('rptType'), 'NULL')
|
||||
|
||||
def testGetDataWithGreaterThan(self):
|
||||
geometryData = self._runConstraintTest('reportType', '>', 2022)
|
||||
for record in geometryData:
|
||||
self.assertGreater(record.getString('rptType'), '2022')
|
||||
|
||||
def testGetDataWithLessThan(self):
|
||||
geometryData = self._runConstraintTest('reportType', '<', 2022)
|
||||
for record in geometryData:
|
||||
self.assertLess(record.getString('rptType'), '2022')
|
||||
|
||||
def testGetDataWithGreaterThanEquals(self):
|
||||
geometryData = self._runConstraintTest('reportType', '>=', 2022)
|
||||
for record in geometryData:
|
||||
self.assertGreaterEqual(record.getString('rptType'), '2022')
|
||||
|
||||
def testGetDataWithLessThanEquals(self):
|
||||
geometryData = self._runConstraintTest('reportType', '<=', 2022)
|
||||
for record in geometryData:
|
||||
self.assertLessEqual(record.getString('rptType'), '2022')
|
||||
|
||||
def testGetDataWithInTuple(self):
|
||||
collection = ('2022', '2032')
|
||||
geometryData = self._runConstraintTest('reportType', 'in', collection)
|
||||
for record in geometryData:
|
||||
self.assertIn(record.getString('rptType'), collection)
|
||||
|
||||
def testGetDataWithInList(self):
|
||||
collection = ['2022', '2032']
|
||||
geometryData = self._runConstraintTest('reportType', 'in', collection)
|
||||
for record in geometryData:
|
||||
self.assertIn(record.getString('rptType'), collection)
|
||||
|
||||
def testGetDataWithInGenerator(self):
|
||||
collection = ('2022', '2032')
|
||||
generator = (item for item in collection)
|
||||
geometryData = self._runConstraintTest('reportType', 'in', generator)
|
||||
for record in geometryData:
|
||||
self.assertIn(record.getString('rptType'), collection)
|
||||
|
||||
def testGetDataWithNotInList(self):
|
||||
collection = ('2022', '2032')
|
||||
geometryData = self._runConstraintTest('reportType', 'not in', collection)
|
||||
for record in geometryData:
|
||||
self.assertNotIn(record.getString('rptType'), collection)
|
||||
|
||||
def testGetDataWithInvalidConstraintTypeThrowsException(self):
|
||||
with self.assertRaises(ValueError):
|
||||
self._runConstraintTest('reportType', 'junk', '2022')
|
||||
|
||||
def testGetDataWithInvalidConstraintValueThrowsException(self):
|
||||
with self.assertRaises(TypeError):
|
||||
self._runConstraintTest('reportType', '=', {})
|
||||
|
||||
def testGetDataWithEmptyInConstraintThrowsException(self):
|
||||
with self.assertRaises(ValueError):
|
||||
self._runConstraintTest('rptType', 'in', [])
|
||||
|
||||
def testGetDataWithNestedInConstraintThrowsException(self):
|
||||
collection = ('2022', '2032', ())
|
||||
with self.assertRaises(TypeError):
|
||||
self._runConstraintTest('rptType', 'in', collection)
|
49
awips/test/dafTests/testCombinedTimeQuery.py
Normal file
49
awips/test/dafTests/testCombinedTimeQuery.py
Normal file
|
@ -0,0 +1,49 @@
|
|||
#
|
||||
# Test the CombinedTimedQuery module
|
||||
#
|
||||
# SOFTWARE HISTORY
|
||||
#
|
||||
# Date Ticket# Engineer Description
|
||||
# ------------ ---------- ----------- --------------------------
|
||||
# 06/24/16 5591 bsteffen Initial Creation.
|
||||
# 11/08/16 5895 tgurney Change grid model
|
||||
#
|
||||
#
|
||||
#
|
||||
|
||||
from awips.dataaccess import DataAccessLayer as DAL
|
||||
from awips.dataaccess import CombinedTimeQuery as CTQ
|
||||
|
||||
import unittest
|
||||
import os
|
||||
|
||||
|
||||
class CombinedTimeQueryTestCase(unittest.TestCase):
|
||||
|
||||
modelName = "RAP13"
|
||||
|
||||
@classmethod
|
||||
def setUp(cls):
|
||||
host = os.environ.get('DAF_TEST_HOST')
|
||||
if host is None:
|
||||
host = 'edex-cloud.unidata.ucar.edu'
|
||||
DAL.changeEDEXHost(host)
|
||||
|
||||
def testSuccessfulQuery(self):
|
||||
req = DAL.newDataRequest('grid')
|
||||
req.setLocationNames(self.modelName)
|
||||
req.setParameters('T', 'GH')
|
||||
req.setLevels('300MB', '500MB', '700MB')
|
||||
times = CTQ.getAvailableTimes(req)
|
||||
self.assertNotEqual(len(times), 0)
|
||||
|
||||
def testNonIntersectingQuery(self):
|
||||
"""
|
||||
Test that when a parameter is only available on one of the levels that no times are returned.
|
||||
"""
|
||||
req = DAL.newDataRequest('grid')
|
||||
req.setLocationNames(self.modelName)
|
||||
req.setParameters('T', 'GH', 'LgSP1hr')
|
||||
req.setLevels('300MB', '500MB', '700MB', '0.0SFC')
|
||||
times = CTQ.getAvailableTimes(req)
|
||||
self.assertEqual(len(times), 0)
|
150
awips/test/dafTests/testCommonObsSpatial.py
Normal file
150
awips/test/dafTests/testCommonObsSpatial.py
Normal file
|
@ -0,0 +1,150 @@
|
|||
#
|
||||
# Test DAF support for common_obs_spatial data
|
||||
#
|
||||
# SOFTWARE HISTORY
|
||||
#
|
||||
# Date Ticket# Engineer Description
|
||||
# ------------ ---------- ----------- --------------------------
|
||||
# 01/19/16 4795 mapeters Initial Creation.
|
||||
# 04/11/16 5548 tgurney Cleanup
|
||||
# 04/18/16 5548 tgurney More cleanup
|
||||
# 05/26/16 5587 njensen Added testGetIdentifierValues()
|
||||
# 06/01/16 5587 tgurney Move testIdentifiers() to
|
||||
# superclass
|
||||
# 06/13/16 5574 tgurney Add advanced query tests
|
||||
# 06/21/16 5548 tgurney Skip tests that cause errors
|
||||
# 06/30/16 5725 tgurney Add test for NOT IN
|
||||
# 12/07/16 5981 tgurney Parameterize
|
||||
# 01/06/17 5981 tgurney Do not check data times
|
||||
#
|
||||
|
||||
from __future__ import print_function
|
||||
from awips.dataaccess import DataAccessLayer as DAL
|
||||
from dynamicserialize.dstypes.com.raytheon.uf.common.dataquery.requests import RequestConstraint
|
||||
from awips.test.dafTests import baseDafTestCase
|
||||
from awips.test.dafTests import params
|
||||
|
||||
|
||||
class CommonObsSpatialTestCase(baseDafTestCase.DafTestCase):
|
||||
"""Test DAF support for common_obs_spatial data"""
|
||||
|
||||
datatype = "common_obs_spatial"
|
||||
|
||||
def testGetAvailableParameters(self):
|
||||
req = DAL.newDataRequest(self.datatype)
|
||||
self.runParametersTest(req)
|
||||
|
||||
def testGetAvailableLocations(self):
|
||||
req = DAL.newDataRequest(self.datatype)
|
||||
req.addIdentifier("country", ["US", "CN"])
|
||||
self.runLocationsTest(req)
|
||||
|
||||
def testGetIdentifierValues(self):
|
||||
self.runGetIdValuesTest(['country'])
|
||||
|
||||
def testGetGeometryData(self):
|
||||
req = DAL.newDataRequest(self.datatype)
|
||||
req.setEnvelope(params.ENVELOPE)
|
||||
req.setParameters("name", "stationid")
|
||||
self.runGeometryDataTest(req, checkDataTimes=False)
|
||||
|
||||
def testRequestingTimesThrowsTimeAgnosticDataException(self):
|
||||
req = DAL.newDataRequest(self.datatype)
|
||||
self.runTimeAgnosticTest(req)
|
||||
|
||||
def _runConstraintTest(self, key, operator, value):
|
||||
req = DAL.newDataRequest(self.datatype)
|
||||
constraint = RequestConstraint.new(operator, value)
|
||||
req.addIdentifier(key, constraint)
|
||||
req.setParameters('catalogtype', 'elevation', 'state')
|
||||
return self.runGeometryDataTest(req, checkDataTimes=False)
|
||||
|
||||
def testGetDataWithEqualsString(self):
|
||||
geometryData = self._runConstraintTest('state', '=', 'NE')
|
||||
for record in geometryData:
|
||||
self.assertEqual(record.getString('state'), 'NE')
|
||||
|
||||
def testGetDataWithEqualsInt(self):
|
||||
geometryData = self._runConstraintTest('catalogtype', '=', 32)
|
||||
for record in geometryData:
|
||||
self.assertEqual(record.getNumber('catalogtype'), 32)
|
||||
|
||||
def testGetDataWithEqualsLong(self):
|
||||
geometryData = self._runConstraintTest('elevation', '=', 0)
|
||||
for record in geometryData:
|
||||
self.assertEqual(record.getNumber('elevation'), 0)
|
||||
|
||||
# No float test since there are no float identifiers available. Attempting
|
||||
# to filter a non-float identifier on a float value raises an exception.
|
||||
|
||||
def testGetDataWithEqualsNone(self):
|
||||
geometryData = self._runConstraintTest('state', '=', None)
|
||||
for record in geometryData:
|
||||
self.assertEqual(record.getType('state'), 'NULL')
|
||||
|
||||
def testGetDataWithNotEquals(self):
|
||||
geometryData = self._runConstraintTest('state', '!=', 'NE')
|
||||
for record in geometryData:
|
||||
self.assertNotEqual(record.getString('state'), 'NE')
|
||||
|
||||
def testGetDataWithNotEqualsNone(self):
|
||||
geometryData = self._runConstraintTest('state', '!=', None)
|
||||
for record in geometryData:
|
||||
self.assertNotEqual(record.getType('state'), 'NULL')
|
||||
|
||||
def testGetDataWithGreaterThan(self):
|
||||
geometryData = self._runConstraintTest('elevation', '>', 500)
|
||||
for record in geometryData:
|
||||
self.assertGreater(record.getNumber('elevation'), 500)
|
||||
|
||||
def testGetDataWithLessThan(self):
|
||||
geometryData = self._runConstraintTest('elevation', '<', 100)
|
||||
for record in geometryData:
|
||||
self.assertLess(record.getNumber('elevation'), 100)
|
||||
|
||||
def testGetDataWithGreaterThanEquals(self):
|
||||
geometryData = self._runConstraintTest('elevation', '>=', 500)
|
||||
for record in geometryData:
|
||||
self.assertGreaterEqual(record.getNumber('elevation'), 500)
|
||||
|
||||
def testGetDataWithLessThanEquals(self):
|
||||
geometryData = self._runConstraintTest('elevation', '<=', 100)
|
||||
for record in geometryData:
|
||||
self.assertLessEqual(record.getNumber('elevation'), 100)
|
||||
|
||||
def testGetDataWithInTuple(self):
|
||||
collection = ('NE', 'TX')
|
||||
geometryData = self._runConstraintTest('state', 'in', collection)
|
||||
for record in geometryData:
|
||||
self.assertIn(record.getString('state'), collection)
|
||||
|
||||
def testGetDataWithInList(self):
|
||||
collection = ['NE', 'TX']
|
||||
geometryData = self._runConstraintTest('state', 'in', collection)
|
||||
for record in geometryData:
|
||||
self.assertIn(record.getString('state'), collection)
|
||||
|
||||
def testGetDataWithInGenerator(self):
|
||||
collection = ('NE', 'TX')
|
||||
generator = (item for item in collection)
|
||||
geometryData = self._runConstraintTest('state', 'in', generator)
|
||||
for record in geometryData:
|
||||
self.assertIn(record.getString('state'), collection)
|
||||
|
||||
def testGetDataWithNotInList(self):
|
||||
collection = ('NE', 'TX')
|
||||
geometryData = self._runConstraintTest('state', 'not in', collection)
|
||||
for record in geometryData:
|
||||
self.assertNotIn(record.getString('state'), collection)
|
||||
|
||||
def testGetDataWithInvalidConstraintTypeThrowsException(self):
|
||||
with self.assertRaises(ValueError):
|
||||
self._runConstraintTest('state', 'junk', 'NE')
|
||||
|
||||
def testGetDataWithInvalidConstraintValueThrowsException(self):
|
||||
with self.assertRaises(TypeError):
|
||||
self._runConstraintTest('state', '=', {})
|
||||
|
||||
def testGetDataWithEmptyInConstraintThrowsException(self):
|
||||
with self.assertRaises(ValueError):
|
||||
self._runConstraintTest('state', 'in', [])
|
114
awips/test/dafTests/testDataTime.py
Normal file
114
awips/test/dafTests/testDataTime.py
Normal file
|
@ -0,0 +1,114 @@
|
|||
#
|
||||
# Unit tests for Python implementation of RequestConstraint
|
||||
#
|
||||
# SOFTWARE HISTORY
|
||||
#
|
||||
# Date Ticket# Engineer Description
|
||||
# ------------ ---------- ----------- --------------------------
|
||||
# 08/02/16 2416 tgurney Initial creation
|
||||
#
|
||||
#
|
||||
|
||||
from dynamicserialize.dstypes.com.raytheon.uf.common.time import DataTime
|
||||
|
||||
import unittest
|
||||
|
||||
|
||||
class DataTimeTestCase(unittest.TestCase):
|
||||
|
||||
def testFromStrRefTimeOnly(self):
|
||||
s = '2016-08-02 01:23:45'
|
||||
expected = s
|
||||
self.assertEqual(expected, str(DataTime(s)))
|
||||
s = s.replace(' ', '_')
|
||||
self.assertEqual(expected, str(DataTime(s)))
|
||||
|
||||
def testFromStrRefTimeOnlyZeroMillis(self):
|
||||
s = '2016-08-02 01:23:45.0'
|
||||
# result of str() will always drop trailing .0 milliseconds
|
||||
expected = '2016-08-02 01:23:45'
|
||||
self.assertEqual(expected, str(DataTime(s)))
|
||||
s = s.replace(' ', '_')
|
||||
self.assertEqual(expected, str(DataTime(s)))
|
||||
|
||||
def testFromStrRefTimeOnlyWithMillis(self):
|
||||
s = '2016-08-02 01:23:45.1'
|
||||
expected = '2016-08-02 01:23:45.001000'
|
||||
self.assertEqual(expected, str(DataTime(s)))
|
||||
s = s.replace(' ', '_')
|
||||
self.assertEqual(expected, str(DataTime(s)))
|
||||
|
||||
def testFromStrWithFcstTimeHr(self):
|
||||
s = '2016-08-02 01:23:45 (17)'
|
||||
expected = s
|
||||
self.assertEqual(expected, str(DataTime(s)))
|
||||
s = s.replace(' ', '_')
|
||||
self.assertEqual(expected, str(DataTime(s)))
|
||||
|
||||
def testFromStrWithFcstTimeHrZeroMillis(self):
|
||||
s = '2016-08-02 01:23:45.0 (17)'
|
||||
expected = '2016-08-02 01:23:45 (17)'
|
||||
self.assertEqual(expected, str(DataTime(s)))
|
||||
s = s.replace(' ', '_')
|
||||
self.assertEqual(expected, str(DataTime(s)))
|
||||
|
||||
def testFromStrWithFcstTimeHrAndMillis(self):
|
||||
s = '2016-08-02 01:23:45.1 (17)'
|
||||
expected = '2016-08-02 01:23:45.001000 (17)'
|
||||
self.assertEqual(expected, str(DataTime(s)))
|
||||
s = s.replace(' ', '_')
|
||||
self.assertEqual(expected, str(DataTime(s)))
|
||||
|
||||
def testFromStrWithFcstTimeHrMin(self):
|
||||
s = '2016-08-02 01:23:45 (17:34)'
|
||||
expected = s
|
||||
self.assertEqual(expected, str(DataTime(s)))
|
||||
s = s.replace(' ', '_')
|
||||
self.assertEqual(expected, str(DataTime(s)))
|
||||
|
||||
def testFromStrWithFcstTimeHrMinZeroMillis(self):
|
||||
s = '2016-08-02 01:23:45.0 (17:34)'
|
||||
expected = '2016-08-02 01:23:45 (17:34)'
|
||||
self.assertEqual(expected, str(DataTime(s)))
|
||||
s = s.replace(' ', '_')
|
||||
self.assertEqual(expected, str(DataTime(s)))
|
||||
|
||||
def testFromStrWithPeriod(self):
|
||||
s = '2016-08-02 01:23:45[2016-08-02 02:34:45--2016-08-02 03:45:56]'
|
||||
expected = s
|
||||
self.assertEqual(expected, str(DataTime(s)))
|
||||
s = s.replace(' ', '_')
|
||||
self.assertEqual(expected, str(DataTime(s)))
|
||||
|
||||
def testFromStrWithPeriodZeroMillis(self):
|
||||
s = '2016-08-02 01:23:45.0[2016-08-02 02:34:45.0--2016-08-02 03:45:56.0]'
|
||||
expected = '2016-08-02 01:23:45[2016-08-02 02:34:45--2016-08-02 03:45:56]'
|
||||
self.assertEqual(expected, str(DataTime(s)))
|
||||
s = s.replace(' ', '_')
|
||||
self.assertEqual(expected, str(DataTime(s)))
|
||||
|
||||
def testFromStrWithEverything(self):
|
||||
s = '2016-08-02 01:23:45.0_(17:34)[2016-08-02 02:34:45.0--2016-08-02 03:45:56.0]'
|
||||
expected = '2016-08-02 01:23:45 (17:34)[2016-08-02 02:34:45--2016-08-02 03:45:56]'
|
||||
self.assertEqual(expected, str(DataTime(s)))
|
||||
s = s.replace(' ', '_')
|
||||
self.assertEqual(expected, str(DataTime(s)))
|
||||
|
||||
def testDataTimeReconstructItselfFromString(self):
|
||||
times = [
|
||||
'2016-08-02 01:23:45',
|
||||
'2016-08-02 01:23:45.0',
|
||||
'2016-08-02 01:23:45.1',
|
||||
'2016-08-02 01:23:45.123000',
|
||||
'2016-08-02 01:23:45 (17)',
|
||||
'2016-08-02 01:23:45.0 (17)',
|
||||
'2016-08-02 01:23:45.1 (17)',
|
||||
'2016-08-02 01:23:45 (17:34)',
|
||||
'2016-08-02 01:23:45.0 (17:34)',
|
||||
'2016-08-02 01:23:45.1 (17:34)',
|
||||
'2016-08-02 01:23:45.0[2016-08-02_02:34:45.0--2016-08-02_03:45:56.0]',
|
||||
'2016-08-02 01:23:45.0[2016-08-02_02:34:45.123--2016-08-02_03:45:56.456]',
|
||||
'2016-08-02 01:23:45.456_(17:34)[2016-08-02_02:34:45.0--2016-08-02_03:45:56.0]'
|
||||
]
|
||||
for time in times:
|
||||
self.assertEqual(DataTime(time), DataTime(str(DataTime(time))), time)
|
194
awips/test/dafTests/testGfe.py
Normal file
194
awips/test/dafTests/testGfe.py
Normal file
|
@ -0,0 +1,194 @@
|
|||
#
|
||||
# Test DAF support for GFE data
|
||||
#
|
||||
# SOFTWARE HISTORY
|
||||
#
|
||||
# Date Ticket# Engineer Description
|
||||
# ------------ ---------- ----------- --------------------------
|
||||
# 01/19/16 4795 mapeters Initial Creation.
|
||||
# 04/11/16 5548 tgurney Cleanup
|
||||
# 04/18/16 5548 tgurney More cleanup
|
||||
# 05/23/16 5637 bsteffen Test vectors
|
||||
# 05/31/16 5587 tgurney Add getIdentifierValues tests
|
||||
# 06/01/16 5587 tgurney Update testGetIdentifierValues
|
||||
# 06/17/16 5574 mapeters Add advanced query tests
|
||||
# 06/30/16 5725 tgurney Add test for NOT IN
|
||||
# 11/07/16 5991 bsteffen Improve vector tests
|
||||
# 12/07/16 5981 tgurney Parameterize
|
||||
# 12/15/16 6040 tgurney Add testGetGridDataWithDbType
|
||||
# 12/20/16 5981 tgurney Add envelope test
|
||||
# 10/19/17 6491 tgurney Add test for dbtype identifier
|
||||
# 11/10/17 6491 tgurney Replace modelName with
|
||||
# parmId.dbId.modelName
|
||||
#
|
||||
#
|
||||
|
||||
from __future__ import print_function
|
||||
from dynamicserialize.dstypes.com.raytheon.uf.common.dataquery.requests import RequestConstraint
|
||||
from awips.dataaccess import DataAccessLayer as DAL
|
||||
from shapely.geometry import box, Point
|
||||
|
||||
from awips.test.dafTests import baseDafTestCase
|
||||
from awips.test.dafTests import params
|
||||
import unittest
|
||||
|
||||
|
||||
class GfeTestCase(baseDafTestCase.DafTestCase):
|
||||
"""Test DAF support for GFE data"""
|
||||
|
||||
datatype = 'gfe'
|
||||
|
||||
def testGetAvailableParameters(self):
|
||||
req = DAL.newDataRequest(self.datatype)
|
||||
self.runParametersTest(req)
|
||||
|
||||
def testGetAvailableLocations(self):
|
||||
req = DAL.newDataRequest(self.datatype)
|
||||
req.addIdentifier('parmId.dbId.modelName', 'Fcst')
|
||||
self.runLocationsTest(req)
|
||||
|
||||
def testGetAvailableTimes(self):
|
||||
req = DAL.newDataRequest(self.datatype)
|
||||
req.addIdentifier('parmId.dbId.modelName', 'Fcst')
|
||||
req.addIdentifier('parmId.dbId.siteId', params.SITE_ID)
|
||||
self.runTimesTest(req)
|
||||
|
||||
def testGetGridData(self):
|
||||
req = DAL.newDataRequest(self.datatype)
|
||||
req.addIdentifier('parmId.dbId.modelName', 'Fcst')
|
||||
req.addIdentifier('parmId.dbId.siteId', params.SITE_ID)
|
||||
req.setParameters('T')
|
||||
gridDatas = self.runGridDataTest(req)
|
||||
for gridData in gridDatas:
|
||||
self.assertEqual(gridData.getAttribute('parmId.dbId.dbType'), '')
|
||||
|
||||
def testGetGridDataWithEnvelope(self):
|
||||
req = DAL.newDataRequest(self.datatype)
|
||||
req.addIdentifier('parmId.dbId.modelName', 'Fcst')
|
||||
req.addIdentifier('parmId.dbId.siteId', params.SITE_ID)
|
||||
req.setParameters('T')
|
||||
req.setEnvelope(params.ENVELOPE)
|
||||
gridData = self.runGridDataTest(req)
|
||||
if not gridData:
|
||||
raise unittest.SkipTest('no data available')
|
||||
lons, lats = gridData[0].getLatLonCoords()
|
||||
lons = lons.reshape(-1)
|
||||
lats = lats.reshape(-1)
|
||||
|
||||
# Ensure all points are within one degree of the original box
|
||||
# to allow slight margin of error for reprojection distortion.
|
||||
testEnv = box(params.ENVELOPE.bounds[0] - 1, params.ENVELOPE.bounds[1] - 1,
|
||||
params.ENVELOPE.bounds[2] + 1, params.ENVELOPE.bounds[3] + 1)
|
||||
|
||||
for i in range(len(lons)):
|
||||
self.assertTrue(testEnv.contains(Point(lons[i], lats[i])))
|
||||
|
||||
def testGetVectorGridData(self):
|
||||
req = DAL.newDataRequest(self.datatype)
|
||||
req.addIdentifier('parmId.dbId.modelName', 'Fcst')
|
||||
req.addIdentifier('parmId.dbId.siteId', params.SITE_ID)
|
||||
req.setParameters('Wind')
|
||||
times = DAL.getAvailableTimes(req)
|
||||
if not times:
|
||||
raise unittest.SkipTest('No Wind Data available for testing')
|
||||
gridData = DAL.getGridData(req, [times[0]])
|
||||
rawWind = None
|
||||
rawDir = None
|
||||
for grid in gridData:
|
||||
if grid.getParameter() == 'Wind':
|
||||
self.assertEqual(grid.getUnit(), 'kts')
|
||||
rawWind = grid.getRawData()
|
||||
elif grid.getParameter() == 'WindDirection':
|
||||
self.assertEqual(grid.getUnit(), 'deg')
|
||||
rawDir = grid.getRawData()
|
||||
self.assertIsNotNone(rawWind, 'Wind Magnitude grid is not present')
|
||||
self.assertIsNotNone(rawDir, 'Wind Direction grid is not present')
|
||||
# rawWind and rawDir are numpy.ndarrays so comparison will result in boolean ndarrays.
|
||||
self.assertTrue((rawWind >= 0).all(), 'Wind Speed should not contain negative values')
|
||||
self.assertTrue((rawDir >= 0).all(), 'Wind Direction should not contain negative values')
|
||||
self.assertTrue((rawDir <= 360).all(), 'Wind Direction should be less than or equal to 360')
|
||||
self.assertFalse((rawDir == rawWind).all(), 'Wind Direction should be different from Wind Speed')
|
||||
|
||||
def testGetIdentifierValues(self):
|
||||
req = DAL.newDataRequest(self.datatype)
|
||||
optionalIds = set(DAL.getOptionalIdentifiers(req))
|
||||
requiredIds = set(DAL.getRequiredIdentifiers(req))
|
||||
self.runGetIdValuesTest(optionalIds | requiredIds)
|
||||
|
||||
def testGetInvalidIdentifierValuesThrowsException(self):
|
||||
self.runInvalidIdValuesTest()
|
||||
|
||||
def testGetNonexistentIdentifierValuesThrowsException(self):
|
||||
self.runNonexistentIdValuesTest()
|
||||
|
||||
def _runConstraintTest(self, key, operator, value):
|
||||
req = DAL.newDataRequest(self.datatype)
|
||||
constraint = RequestConstraint.new(operator, value)
|
||||
req.addIdentifier(key, constraint)
|
||||
req.setLocationNames(params.SITE_ID)
|
||||
req.setParameters('T')
|
||||
return self.runGridDataTest(req)
|
||||
|
||||
def testGetDataWithModelNameEqualsString(self):
|
||||
gridData = self._runConstraintTest('parmId.dbId.modelName', '=', 'Fcst')
|
||||
for record in gridData:
|
||||
self.assertEqual(record.getAttribute('parmId.dbId.modelName'), 'Fcst')
|
||||
|
||||
def testGetDataWithDbTypeEqualsString(self):
|
||||
gridData = self._runConstraintTest('parmId.dbId.dbType', '=', 'Prac')
|
||||
for record in gridData:
|
||||
self.assertEqual(record.getAttribute('parmId.dbId.dbType'), 'Prac')
|
||||
|
||||
# No numeric tests since no numeric identifiers are available.
|
||||
|
||||
def testGetDataWithEqualsNone(self):
|
||||
gridData = self._runConstraintTest('parmId.dbId.modelName', '=', None)
|
||||
for record in gridData:
|
||||
self.assertIsNone(record.getAttribute('parmId.dbId.modelName'))
|
||||
|
||||
def testGetDataWithNotEquals(self):
|
||||
gridData = self._runConstraintTest('parmId.dbId.modelName', '!=', 'Fcst')
|
||||
for record in gridData:
|
||||
self.assertNotEqual(record.getAttribute('parmId.dbId.modelName'), 'Fcst')
|
||||
|
||||
def testGetDataWithNotEqualsNone(self):
|
||||
gridData = self._runConstraintTest('parmId.dbId.modelName', '!=', None)
|
||||
for record in gridData:
|
||||
self.assertIsNotNone(record.getAttribute('parmId.dbId.modelName'))
|
||||
|
||||
def testGetDataWithInTuple(self):
|
||||
collection = ('Fcst', 'SAT')
|
||||
gridData = self._runConstraintTest('parmId.dbId.modelName', 'in', collection)
|
||||
for record in gridData:
|
||||
self.assertIn(record.getAttribute('parmId.dbId.modelName'), collection)
|
||||
|
||||
def testGetDataWithInList(self):
|
||||
collection = ['Fcst', 'SAT']
|
||||
gridData = self._runConstraintTest('parmId.dbId.modelName', 'in', collection)
|
||||
for record in gridData:
|
||||
self.assertIn(record.getAttribute('parmId.dbId.modelName'), collection)
|
||||
|
||||
def testGetDataWithInGenerator(self):
|
||||
collection = ('Fcst', 'SAT')
|
||||
generator = (item for item in collection)
|
||||
gridData = self._runConstraintTest('parmId.dbId.modelName', 'in', generator)
|
||||
for record in gridData:
|
||||
self.assertIn(record.getAttribute('parmId.dbId.modelName'), collection)
|
||||
|
||||
def testGetDataWithNotInList(self):
|
||||
collection = ('Fcst', 'SAT')
|
||||
gridData = self._runConstraintTest('parmId.dbId.modelName', 'not in', collection)
|
||||
for record in gridData:
|
||||
self.assertNotIn(record.getAttribute('parmId.dbId.modelName'), collection)
|
||||
|
||||
def testGetDataWithInvalidConstraintTypeThrowsException(self):
|
||||
with self.assertRaises(ValueError):
|
||||
self._runConstraintTest('parmId.dbId.modelName', 'junk', 'Fcst')
|
||||
|
||||
def testGetDataWithInvalidConstraintValueThrowsException(self):
|
||||
with self.assertRaises(TypeError):
|
||||
self._runConstraintTest('parmId.dbId.modelName', '=', {})
|
||||
|
||||
def testGetDataWithEmptyInConstraintThrowsException(self):
|
||||
with self.assertRaises(ValueError):
|
||||
self._runConstraintTest('parmId.dbId.modelName', 'in', [])
|
261
awips/test/dafTests/testGrid.py
Normal file
261
awips/test/dafTests/testGrid.py
Normal file
|
@ -0,0 +1,261 @@
|
|||
#
|
||||
# Test DAF support for grid data
|
||||
#
|
||||
# SOFTWARE HISTORY
|
||||
#
|
||||
# Date Ticket# Engineer Description
|
||||
# ------------ ---------- ----------- --------------------------
|
||||
# 01/19/16 4795 mapeters Initial Creation.
|
||||
# 04/11/16 5548 tgurney Cleanup
|
||||
# 04/18/16 5548 tgurney More cleanup
|
||||
# 06/09/16 5587 tgurney Typo in id values test
|
||||
# 07/06/16 5728 mapeters Add advanced query tests
|
||||
# 08/03/16 5728 mapeters Add additional identifiers to testGetDataWith*
|
||||
# tests to shorten run time and prevent EOFError
|
||||
# 10/13/16 5942 bsteffen Test envelopes
|
||||
# 11/08/16 5985 tgurney Skip certain tests when no
|
||||
# data is available
|
||||
# 12/07/16 5981 tgurney Parameterize
|
||||
# 01/06/17 5981 tgurney Skip envelope test when no
|
||||
# data is available
|
||||
#
|
||||
|
||||
from __future__ import print_function
|
||||
from dynamicserialize.dstypes.com.raytheon.uf.common.dataquery.requests import RequestConstraint
|
||||
from awips.dataaccess import DataAccessLayer as DAL
|
||||
from awips.ThriftClient import ThriftRequestException
|
||||
from shapely.geometry import box, Point
|
||||
|
||||
from awips.test.dafTests import baseDafTestCase
|
||||
from awips.test.dafTests import params
|
||||
import unittest
|
||||
|
||||
|
||||
class GridTestCase(baseDafTestCase.DafTestCase):
|
||||
"""Test DAF support for grid data"""
|
||||
|
||||
datatype = 'grid'
|
||||
|
||||
model = 'GFS160'
|
||||
|
||||
def testGetAvailableParameters(self):
|
||||
req = DAL.newDataRequest(self.datatype)
|
||||
req.addIdentifier('info.datasetId', self.model)
|
||||
self.runParametersTest(req)
|
||||
|
||||
def testGetAvailableLocations(self):
|
||||
req = DAL.newDataRequest(self.datatype)
|
||||
req.addIdentifier('info.datasetId', self.model)
|
||||
self.runLocationsTest(req)
|
||||
|
||||
def testGetAvailableLevels(self):
|
||||
req = DAL.newDataRequest(self.datatype)
|
||||
req.addIdentifier('info.datasetId', self.model)
|
||||
self.runLevelsTest(req)
|
||||
|
||||
def testGetAvailableTimes(self):
|
||||
req = DAL.newDataRequest(self.datatype)
|
||||
req.addIdentifier('info.datasetId', self.model)
|
||||
req.setLevels('2FHAG')
|
||||
self.runTimesTest(req)
|
||||
|
||||
def testGetGridData(self):
|
||||
req = DAL.newDataRequest(self.datatype)
|
||||
req.addIdentifier('info.datasetId', self.model)
|
||||
req.setLevels('2FHAG')
|
||||
req.setParameters('T')
|
||||
self.runGridDataTest(req)
|
||||
|
||||
def testGetIdentifierValues(self):
|
||||
req = DAL.newDataRequest(self.datatype)
|
||||
req.addIdentifier('info.datasetId', 'ENSEMBLE')
|
||||
req.setLevels('2FHAG')
|
||||
req.setParameters('T')
|
||||
idValues = DAL.getIdentifierValues(req, 'info.ensembleId')
|
||||
self.assertTrue(hasattr(idValues, '__iter__'))
|
||||
if idValues:
|
||||
self.assertIn('ctl1', idValues)
|
||||
self.assertIn('p1', idValues)
|
||||
self.assertIn('n1', idValues)
|
||||
else:
|
||||
raise unittest.SkipTest("no data available")
|
||||
|
||||
def testGetInvalidIdentifierValuesThrowsException(self):
|
||||
self.runInvalidIdValuesTest()
|
||||
|
||||
def testGetNonexistentIdentifierValuesThrowsException(self):
|
||||
self.runNonexistentIdValuesTest()
|
||||
|
||||
def testGetDataWithEnvelope(self):
|
||||
req = DAL.newDataRequest(self.datatype)
|
||||
req.addIdentifier('info.datasetId', self.model)
|
||||
req.setLevels('2FHAG')
|
||||
req.setParameters('T')
|
||||
req.setEnvelope(params.ENVELOPE)
|
||||
gridData = self.runGridDataTest(req)
|
||||
if len(gridData) == 0:
|
||||
raise unittest.SkipTest("No data available")
|
||||
lons, lats = gridData[0].getLatLonCoords()
|
||||
lons = lons.reshape(-1)
|
||||
lats = lats.reshape(-1)
|
||||
|
||||
# Ensure all points are within one degree of the original box
|
||||
# to allow slight margin of error for reprojection distortion.
|
||||
testEnv = box(params.ENVELOPE.bounds[0] - 1, params.ENVELOPE.bounds[1] - 1,
|
||||
params.ENVELOPE.bounds[2] + 1, params.ENVELOPE.bounds[3] + 1)
|
||||
|
||||
for i in range(len(lons)):
|
||||
self.assertTrue(testEnv.contains(Point(lons[i], lats[i])))
|
||||
|
||||
def _runConstraintTest(self, key, operator, value):
|
||||
req = DAL.newDataRequest(self.datatype)
|
||||
constraint = RequestConstraint.new(operator, value)
|
||||
req.addIdentifier(key, constraint)
|
||||
req.addIdentifier('info.datasetId', self.model)
|
||||
req.addIdentifier('info.level.masterLevel.name', 'FHAG')
|
||||
req.addIdentifier('info.level.leveltwovalue', 3000.0)
|
||||
req.setParameters('T')
|
||||
return self.runGridDataTest(req)
|
||||
|
||||
def testGetDataWithEqualsString(self):
|
||||
gridData = self._runConstraintTest('info.level.levelonevalue', '=', '2000.0')
|
||||
for record in gridData:
|
||||
self.assertEqual(record.getAttribute('info.level.levelonevalue'), 2000.0)
|
||||
|
||||
def testGetDataWithEqualsInt(self):
|
||||
gridData = self._runConstraintTest('info.level.levelonevalue', '=', 2000)
|
||||
for record in gridData:
|
||||
self.assertEqual(record.getAttribute('info.level.levelonevalue'), 2000)
|
||||
|
||||
def testGetDataWithEqualsLong(self):
|
||||
gridData = self._runConstraintTest('info.level.levelonevalue', '=', 2000)
|
||||
for record in gridData:
|
||||
self.assertEqual(record.getAttribute('info.level.levelonevalue'), 2000)
|
||||
|
||||
def testGetDataWithEqualsFloat(self):
|
||||
gridData = self._runConstraintTest('info.level.levelonevalue', '=', 2000.0)
|
||||
for record in gridData:
|
||||
self.assertEqual(round(record.getAttribute('info.level.levelonevalue'), 1), 2000.0)
|
||||
|
||||
def testGetDataWithEqualsNone(self):
|
||||
gridData = self._runConstraintTest('info.level.levelonevalue', '=', None)
|
||||
for record in gridData:
|
||||
self.assertIsNone(record.getAttribute('info.level.levelonevalue'))
|
||||
|
||||
def testGetDataWithNotEquals(self):
|
||||
gridData = self._runConstraintTest('info.level.levelonevalue', '!=', 2000.0)
|
||||
for record in gridData:
|
||||
self.assertNotEqual(record.getAttribute('info.level.levelonevalue'), 2000.0)
|
||||
|
||||
def testGetDataWithNotEqualsNone(self):
|
||||
gridData = self._runConstraintTest('info.level.levelonevalue', '!=', None)
|
||||
for record in gridData:
|
||||
self.assertIsNotNone(record.getAttribute('info.level.levelonevalue'))
|
||||
|
||||
def testGetDataWithGreaterThan(self):
|
||||
gridData = self._runConstraintTest('info.level.levelonevalue', '>', 2000.0)
|
||||
for record in gridData:
|
||||
self.assertGreater(record.getAttribute('info.level.levelonevalue'), 2000.0)
|
||||
|
||||
def testGetDataWithLessThan(self):
|
||||
gridData = self._runConstraintTest('info.level.levelonevalue', '<', 2000.0)
|
||||
for record in gridData:
|
||||
self.assertLess(record.getAttribute('info.level.levelonevalue'), 2000.0)
|
||||
|
||||
def testGetDataWithGreaterThanEquals(self):
|
||||
gridData = self._runConstraintTest('info.level.levelonevalue', '>=', 2000.0)
|
||||
for record in gridData:
|
||||
self.assertGreaterEqual(record.getAttribute('info.level.levelonevalue'), 2000.0)
|
||||
|
||||
def testGetDataWithLessThanEquals(self):
|
||||
gridData = self._runConstraintTest('info.level.levelonevalue', '<=', 2000.0)
|
||||
for record in gridData:
|
||||
self.assertLessEqual(record.getAttribute('info.level.levelonevalue'), 2000.0)
|
||||
|
||||
def testGetDataWithInList(self):
|
||||
collection = [2000.0, 1000.0]
|
||||
gridData = self._runConstraintTest('info.level.levelonevalue', 'in', collection)
|
||||
for record in gridData:
|
||||
self.assertIn(record.getAttribute('info.level.levelonevalue'), collection)
|
||||
|
||||
def testGetDataWithNotInList(self):
|
||||
collection = [2000.0, 1000.0]
|
||||
gridData = self._runConstraintTest('info.level.levelonevalue', 'not in', collection)
|
||||
for record in gridData:
|
||||
self.assertNotIn(record.getAttribute('info.level.levelonevalue'), collection)
|
||||
|
||||
def testGetDataWithInvalidConstraintTypeThrowsException(self):
|
||||
with self.assertRaises(ValueError):
|
||||
self._runConstraintTest('info.level.levelonevalue', 'junk', '2000.0')
|
||||
|
||||
def testGetDataWithInvalidConstraintValueThrowsException(self):
|
||||
with self.assertRaises(TypeError):
|
||||
self._runConstraintTest('info.level.levelonevalue', '=', {})
|
||||
|
||||
def testGetDataWithEmptyInConstraintThrowsException(self):
|
||||
with self.assertRaises(ValueError):
|
||||
self._runConstraintTest('info.level.levelonevalue', 'in', [])
|
||||
|
||||
def testGetDataWithLevelOneAndLevelTwoConstraints(self):
|
||||
req = DAL.newDataRequest(self.datatype)
|
||||
levelOneConstraint = RequestConstraint.new('>=', 2000.0)
|
||||
req.addIdentifier('info.level.levelonevalue', levelOneConstraint)
|
||||
levelTwoConstraint = RequestConstraint.new('in', (4000.0, 5000.0))
|
||||
req.addIdentifier('info.level.leveltwovalue', levelTwoConstraint)
|
||||
req.addIdentifier('info.datasetId', self.model)
|
||||
req.addIdentifier('info.level.masterLevel.name', 'FHAG')
|
||||
req.setParameters('T')
|
||||
gridData = self.runGridDataTest(req)
|
||||
for record in gridData:
|
||||
self.assertGreaterEqual(record.getAttribute('info.level.levelonevalue'), 2000.0)
|
||||
self.assertIn(record.getAttribute('info.level.leveltwovalue'), (4000.0, 5000.0))
|
||||
|
||||
def testGetDataWithMasterLevelNameInConstraint(self):
|
||||
req = DAL.newDataRequest(self.datatype)
|
||||
masterLevelConstraint = RequestConstraint.new('in', ('FHAG', 'K'))
|
||||
req.addIdentifier('info.level.masterLevel.name', masterLevelConstraint)
|
||||
req.addIdentifier('info.level.levelonevalue', 2000.0)
|
||||
req.addIdentifier('info.level.leveltwovalue', 3000.0)
|
||||
req.addIdentifier('info.datasetId', 'GFS160')
|
||||
req.setParameters('T')
|
||||
gridData = self.runGridDataTest(req)
|
||||
for record in gridData:
|
||||
self.assertIn(record.getAttribute('info.level.masterLevel.name'), ('FHAG', 'K'))
|
||||
|
||||
def testGetDataWithDatasetIdInConstraint(self):
|
||||
req = DAL.newDataRequest(self.datatype)
|
||||
# gfs160 is alias for GFS160 in this namespace
|
||||
req.addIdentifier('namespace', 'gfeParamInfo')
|
||||
datasetIdConstraint = RequestConstraint.new('in', ('gfs160', 'HRRR'))
|
||||
req.addIdentifier('info.datasetId', datasetIdConstraint)
|
||||
req.addIdentifier('info.level.masterLevel.name', 'FHAG')
|
||||
req.addIdentifier('info.level.levelonevalue', 2000.0)
|
||||
req.addIdentifier('info.level.leveltwovalue', 3000.0)
|
||||
req.setParameters('T')
|
||||
gridData = self.runGridDataTest(req, testSameShape=False)
|
||||
for record in gridData:
|
||||
self.assertIn(record.getAttribute('info.datasetId'), ('gfs160', 'HRRR'))
|
||||
|
||||
def testGetDataWithMasterLevelNameLessThanEqualsConstraint(self):
|
||||
req = DAL.newDataRequest(self.datatype)
|
||||
masterLevelConstraint = RequestConstraint.new('<=', 'K')
|
||||
req.addIdentifier('info.level.masterLevel.name', masterLevelConstraint)
|
||||
req.addIdentifier('info.level.levelonevalue', 2000.0)
|
||||
req.addIdentifier('info.level.leveltwovalue', 3000.0)
|
||||
req.addIdentifier('info.datasetId', 'GFS160')
|
||||
req.setParameters('T')
|
||||
gridData = self.runGridDataTest(req)
|
||||
for record in gridData:
|
||||
self.assertLessEqual(record.getAttribute('info.level.masterLevel.name'), 'K')
|
||||
|
||||
def testGetDataWithComplexConstraintAndNamespaceThrowsException(self):
|
||||
req = DAL.newDataRequest(self.datatype)
|
||||
req.addIdentifier('namespace', 'grib')
|
||||
masterLevelConstraint = RequestConstraint.new('<=', 'K')
|
||||
req.addIdentifier('info.level.masterLevel.name', masterLevelConstraint)
|
||||
req.addIdentifier('info.datasetId', 'GFS160')
|
||||
req.setParameters('T')
|
||||
with self.assertRaises(ThriftRequestException) as cm:
|
||||
self.runGridDataTest(req)
|
||||
self.assertIn('IncompatibleRequestException', str(cm.exception))
|
||||
self.assertIn('info.level.masterLevel.name', str(cm.exception))
|
193
awips/test/dafTests/testMaps.py
Normal file
193
awips/test/dafTests/testMaps.py
Normal file
|
@ -0,0 +1,193 @@
|
|||
#
|
||||
# Test DAF support for maps data
|
||||
#
|
||||
# SOFTWARE HISTORY
|
||||
#
|
||||
# Date Ticket# Engineer Description
|
||||
# ------------ ---------- ----------- --------------------------
|
||||
# 01/19/16 4795 mapeters Initial Creation.
|
||||
# 04/11/16 5548 tgurney Cleanup
|
||||
# 04/18/16 5548 tgurney More cleanup
|
||||
# 04/26/16 5587 tgurney Add identifier values tests
|
||||
# 06/13/16 5574 mapeters Add advanced query tests
|
||||
# 06/21/16 5548 tgurney Skip tests that cause errors
|
||||
# 06/30/16 5725 tgurney Add test for NOT IN
|
||||
# 01/06/17 5981 tgurney Do not check data times
|
||||
#
|
||||
#
|
||||
|
||||
from __future__ import print_function
|
||||
from dynamicserialize.dstypes.com.raytheon.uf.common.dataquery.requests import RequestConstraint
|
||||
from awips.dataaccess import DataAccessLayer as DAL
|
||||
from awips.ThriftClient import ThriftRequestException
|
||||
|
||||
from awips.test.dafTests import baseDafTestCase
|
||||
|
||||
|
||||
class MapsTestCase(baseDafTestCase.DafTestCase):
|
||||
"""Test DAF support for maps data"""
|
||||
|
||||
datatype = 'maps'
|
||||
|
||||
def testGetAvailableParameters(self):
|
||||
req = DAL.newDataRequest(self.datatype)
|
||||
req.addIdentifier('table', 'mapdata.county')
|
||||
req.addIdentifier('geomField', 'the_geom')
|
||||
self.runParametersTest(req)
|
||||
|
||||
def testGetAvailableLocations(self):
|
||||
req = DAL.newDataRequest(self.datatype)
|
||||
req.addIdentifier('table', 'mapdata.county')
|
||||
req.addIdentifier('geomField', 'the_geom')
|
||||
req.addIdentifier('locationField', 'cwa')
|
||||
self.runLocationsTest(req)
|
||||
|
||||
def testGetGeometryData(self):
|
||||
req = DAL.newDataRequest(self.datatype)
|
||||
req.addIdentifier('table', 'mapdata.county')
|
||||
req.addIdentifier('geomField', 'the_geom')
|
||||
req.addIdentifier('inLocation', 'true')
|
||||
req.addIdentifier('locationField', 'cwa')
|
||||
req.setLocationNames('OAX')
|
||||
req.addIdentifier('cwa', 'OAX')
|
||||
req.setParameters('countyname', 'state', 'fips')
|
||||
self.runGeometryDataTest(req, checkDataTimes=False)
|
||||
|
||||
def testRequestingTimesThrowsTimeAgnosticDataException(self):
|
||||
req = DAL.newDataRequest(self.datatype)
|
||||
self.runTimeAgnosticTest(req)
|
||||
|
||||
def testGetTableIdentifierValues(self):
|
||||
self.runGetIdValuesTest(['table'])
|
||||
|
||||
def testGetGeomFieldIdentifierValues(self):
|
||||
req = DAL.newDataRequest(self.datatype)
|
||||
req.addIdentifier('table', 'mapdata.county')
|
||||
idValues = DAL.getIdentifierValues(req, 'geomField')
|
||||
for idValue in idValues:
|
||||
self.assertTrue(idValue.startswith('the_geom'))
|
||||
|
||||
def testGetGeomFieldIdValuesWithoutTableThrowsException(self):
|
||||
with self.assertRaises(ThriftRequestException):
|
||||
self.runGetIdValuesTest(['geomField'])
|
||||
|
||||
def testGetColumnIdValuesWithTable(self):
|
||||
req = DAL.newDataRequest(self.datatype)
|
||||
req.addIdentifier('table', 'mapdata.county')
|
||||
req.addIdentifier('geomField', 'the_geom')
|
||||
idValues = DAL.getIdentifierValues(req, 'state')
|
||||
self.assertIn('NE', idValues)
|
||||
|
||||
def testGetColumnIdValuesWithoutTableThrowsException(self):
|
||||
req = DAL.newDataRequest(self.datatype)
|
||||
req.addIdentifier('geomField', 'the_geom')
|
||||
with self.assertRaises(ThriftRequestException):
|
||||
DAL.getIdentifierValues(req, 'state')
|
||||
|
||||
def testGetInvalidIdentifierValuesThrowsException(self):
|
||||
self.runInvalidIdValuesTest()
|
||||
|
||||
def testGetNonexistentIdentifierValuesThrowsException(self):
|
||||
self.runNonexistentIdValuesTest()
|
||||
|
||||
def _runConstraintTest(self, key, operator, value):
|
||||
req = DAL.newDataRequest(self.datatype)
|
||||
req.addIdentifier('table', 'mapdata.ffmp_basins')
|
||||
req.addIdentifier('geomField', 'the_geom')
|
||||
req.addIdentifier('cwa', 'OAX')
|
||||
constraint = RequestConstraint.new(operator, value)
|
||||
req.addIdentifier(key, constraint)
|
||||
req.setParameters('state', 'reservoir', 'area_sq_mi')
|
||||
return self.runGeometryDataTest(req, checkDataTimes=False)
|
||||
|
||||
def testGetDataWithEqualsString(self):
|
||||
geometryData = self._runConstraintTest('state', '=', 'NE')
|
||||
for record in geometryData:
|
||||
self.assertEqual(record.getString('state'), 'NE')
|
||||
|
||||
def testGetDataWithEqualsInt(self):
|
||||
geometryData = self._runConstraintTest('reservoir', '=', 1)
|
||||
for record in geometryData:
|
||||
self.assertEqual(record.getNumber('reservoir'), 1)
|
||||
|
||||
def testGetDataWithEqualsLong(self):
|
||||
geometryData = self._runConstraintTest('reservoir', '=', 1)
|
||||
for record in geometryData:
|
||||
self.assertEqual(record.getNumber('reservoir'), 1)
|
||||
|
||||
def testGetDataWithEqualsFloat(self):
|
||||
geometryData = self._runConstraintTest('area_sq_mi', '=', 5.00)
|
||||
for record in geometryData:
|
||||
self.assertEqual(round(record.getNumber('area_sq_mi'), 2), 5.00)
|
||||
|
||||
def testGetDataWithEqualsNone(self):
|
||||
geometryData = self._runConstraintTest('state', '=', None)
|
||||
for record in geometryData:
|
||||
self.assertEqual(record.getType('state'), 'NULL')
|
||||
|
||||
def testGetDataWithNotEquals(self):
|
||||
geometryData = self._runConstraintTest('state', '!=', 'NE')
|
||||
for record in geometryData:
|
||||
self.assertNotEqual(record.getString('state'), 'NE')
|
||||
|
||||
def testGetDataWithNotEqualsNone(self):
|
||||
geometryData = self._runConstraintTest('state', '!=', None)
|
||||
for record in geometryData:
|
||||
self.assertNotEqual(record.getType('state'), 'NULL')
|
||||
|
||||
def testGetDataWithGreaterThan(self):
|
||||
geometryData = self._runConstraintTest('area_sq_mi', '>', 5)
|
||||
for record in geometryData:
|
||||
self.assertGreater(record.getNumber('area_sq_mi'), 5)
|
||||
|
||||
def testGetDataWithLessThan(self):
|
||||
geometryData = self._runConstraintTest('area_sq_mi', '<', 5)
|
||||
for record in geometryData:
|
||||
self.assertLess(record.getNumber('area_sq_mi'), 5)
|
||||
|
||||
def testGetDataWithGreaterThanEquals(self):
|
||||
geometryData = self._runConstraintTest('area_sq_mi', '>=', 5)
|
||||
for record in geometryData:
|
||||
self.assertGreaterEqual(record.getNumber('area_sq_mi'), 5)
|
||||
|
||||
def testGetDataWithLessThanEquals(self):
|
||||
geometryData = self._runConstraintTest('area_sq_mi', '<=', 5)
|
||||
for record in geometryData:
|
||||
self.assertLessEqual(record.getNumber('area_sq_mi'), 5)
|
||||
|
||||
def testGetDataWithInTuple(self):
|
||||
collection = ('NE', 'TX')
|
||||
geometryData = self._runConstraintTest('state', 'in', collection)
|
||||
for record in geometryData:
|
||||
self.assertIn(record.getString('state'), collection)
|
||||
|
||||
def testGetDataWithInList(self):
|
||||
collection = ['NE', 'TX']
|
||||
geometryData = self._runConstraintTest('state', 'in', collection)
|
||||
for record in geometryData:
|
||||
self.assertIn(record.getString('state'), collection)
|
||||
|
||||
def testGetDataWithInGenerator(self):
|
||||
collection = ('NE', 'TX')
|
||||
generator = (item for item in collection)
|
||||
geometryData = self._runConstraintTest('state', 'in', generator)
|
||||
for record in geometryData:
|
||||
self.assertIn(record.getString('state'), collection)
|
||||
|
||||
def testGetDataWithNotInList(self):
|
||||
collection = ['IA', 'TX']
|
||||
geometryData = self._runConstraintTest('state', 'not in', collection)
|
||||
for record in geometryData:
|
||||
self.assertNotIn(record.getString('state'), collection)
|
||||
|
||||
def testGetDataWithInvalidConstraintTypeThrowsException(self):
|
||||
with self.assertRaises(ValueError):
|
||||
self._runConstraintTest('state', 'junk', 'NE')
|
||||
|
||||
def testGetDataWithInvalidConstraintValueThrowsException(self):
|
||||
with self.assertRaises(TypeError):
|
||||
self._runConstraintTest('state', '=', {})
|
||||
|
||||
def testGetDataWithEmptyInConstraintThrowsException(self):
|
||||
with self.assertRaises(ValueError):
|
||||
self._runConstraintTest('state', 'in', [])
|
193
awips/test/dafTests/testModelSounding.py
Normal file
193
awips/test/dafTests/testModelSounding.py
Normal file
|
@ -0,0 +1,193 @@
|
|||
#
|
||||
# Test DAF support for modelsounding data
|
||||
#
|
||||
# SOFTWARE HISTORY
|
||||
#
|
||||
# Date Ticket# Engineer Description
|
||||
# ------------ ---------- ----------- --------------------------
|
||||
# 01/19/16 4795 mapeters Initial Creation.
|
||||
# 04/11/16 5548 tgurney Cleanup
|
||||
# 04/18/16 5548 tgurney More cleanup
|
||||
# 06/09/16 5587 bsteffen Add getIdentifierValues tests
|
||||
# 06/13/16 5574 tgurney Add advanced query tests
|
||||
# 06/30/16 5725 tgurney Add test for NOT IN
|
||||
# 11/10/16 5985 tgurney Mark expected failures prior
|
||||
# to 17.3.1
|
||||
# 12/07/16 5981 tgurney Parameterize
|
||||
# 12/19/16 5981 tgurney Remove pre-17.3 expected fails
|
||||
# 12/20/16 5981 tgurney Add envelope test
|
||||
#
|
||||
#
|
||||
|
||||
from __future__ import print_function
|
||||
from awips.dataaccess import DataAccessLayer as DAL
|
||||
from dynamicserialize.dstypes.com.raytheon.uf.common.dataquery.requests import RequestConstraint
|
||||
|
||||
from awips.test.dafTests import baseDafTestCase
|
||||
from awips.test.dafTests import params
|
||||
|
||||
|
||||
class ModelSoundingTestCase(baseDafTestCase.DafTestCase):
|
||||
"""Test DAF support for modelsounding data"""
|
||||
|
||||
datatype = "modelsounding"
|
||||
|
||||
reporttype = "ETA"
|
||||
|
||||
def testGetAvailableParameters(self):
|
||||
req = DAL.newDataRequest(self.datatype)
|
||||
self.runParametersTest(req)
|
||||
|
||||
def testGetAvailableLocations(self):
|
||||
req = DAL.newDataRequest(self.datatype)
|
||||
req.addIdentifier("reportType", self.reporttype)
|
||||
self.runLocationsTest(req)
|
||||
|
||||
def testGetAvailableTimes(self):
|
||||
req = DAL.newDataRequest(self.datatype)
|
||||
req.addIdentifier("reportType", self.reporttype)
|
||||
req.setLocationNames(params.OBS_STATION)
|
||||
self.runTimesTest(req)
|
||||
|
||||
def testGetGeometryData(self):
|
||||
req = DAL.newDataRequest(self.datatype)
|
||||
req.addIdentifier("reportType", self.reporttype)
|
||||
req.setLocationNames(params.OBS_STATION)
|
||||
req.setParameters("temperature", "pressure", "specHum", "sfcPress", "temp2", "q2")
|
||||
print("Testing getGeometryData()")
|
||||
geomData = DAL.getGeometryData(req)
|
||||
print("Number of geometry records: " + str(len(geomData)))
|
||||
print("Sample geometry data:")
|
||||
for record in geomData[:self.sampleDataLimit]:
|
||||
print("level=" + record.getLevel(), end="")
|
||||
# One dimensional parameters are reported on the 0.0UNKNOWN level.
|
||||
# 2D parameters are reported on MB levels from pressure.
|
||||
if record.getLevel() == "0.0UNKNOWN":
|
||||
print(" sfcPress=" + record.getString("sfcPress") +
|
||||
record.getUnit("sfcPress"), end="")
|
||||
print(" temp2=" + record.getString("temp2") +
|
||||
record.getUnit("temp2"), end="")
|
||||
print(" q2=" + record.getString("q2") +
|
||||
record.getUnit("q2"), end="")
|
||||
else:
|
||||
print(" pressure=" + record.getString("pressure") +
|
||||
record.getUnit("pressure"), end="")
|
||||
print(" temperature=" + record.getString("temperature") +
|
||||
record.getUnit("temperature"), end="")
|
||||
print(" specHum=" + record.getString("specHum") +
|
||||
record.getUnit("specHum"), end="")
|
||||
print(" geometry=" + str(record.getGeometry()))
|
||||
print("getGeometryData() complete\n\n")
|
||||
|
||||
def testGetGeometryDataWithEnvelope(self):
|
||||
req = DAL.newDataRequest(self.datatype)
|
||||
req.addIdentifier("reportType", self.reporttype)
|
||||
req.setEnvelope(params.ENVELOPE)
|
||||
req.setParameters("temperature", "pressure", "specHum", "sfcPress", "temp2", "q2")
|
||||
print("Testing getGeometryData()")
|
||||
data = DAL.getGeometryData(req)
|
||||
for item in data:
|
||||
self.assertTrue(params.ENVELOPE.contains(item.getGeometry()))
|
||||
|
||||
def testGetIdentifierValues(self):
|
||||
req = DAL.newDataRequest(self.datatype)
|
||||
optionalIds = set(DAL.getOptionalIdentifiers(req))
|
||||
self.runGetIdValuesTest(optionalIds)
|
||||
|
||||
def testGetInvalidIdentifierValuesThrowsException(self):
|
||||
self.runInvalidIdValuesTest()
|
||||
|
||||
def testGetNonexistentIdentifierValuesThrowsException(self):
|
||||
self.runNonexistentIdValuesTest()
|
||||
|
||||
def _runConstraintTest(self, key, operator, value):
|
||||
req = DAL.newDataRequest(self.datatype)
|
||||
constraint = RequestConstraint.new(operator, value)
|
||||
req.setParameters('dataURI')
|
||||
req.setLocationNames(params.OBS_STATION, 'KORD', 'KOFK', 'KLNK')
|
||||
req.addIdentifier(key, constraint)
|
||||
return self.runGeometryDataTest(req)
|
||||
|
||||
# We can filter on reportType but it is not possible to retrieve the value
|
||||
# of reportType directly. We can look inside the dataURI instead.
|
||||
#
|
||||
# For cases like '<=' and '>' the best we can do is send the request and
|
||||
# see if it throws back an exception.
|
||||
#
|
||||
# Can also eyeball the number of returned records.
|
||||
|
||||
def testGetDataWithEqualsString(self):
|
||||
geometryData = self._runConstraintTest('reportType', '=', 'ETA')
|
||||
for record in geometryData:
|
||||
self.assertIn('/ETA/', record.getString('dataURI'))
|
||||
|
||||
# No numeric tests since no numeric identifiers are available.
|
||||
|
||||
def testGetDataWithEqualsNone(self):
|
||||
self._runConstraintTest('reportType', '=', None)
|
||||
|
||||
def testGetDataWithNotEquals(self):
|
||||
geometryData = self._runConstraintTest('reportType', '!=', 'ETA')
|
||||
for record in geometryData:
|
||||
self.assertNotIn('/ETA/', record.getString('dataURI'))
|
||||
|
||||
def testGetDataWithNotEqualsNone(self):
|
||||
self._runConstraintTest('reportType', '!=', None)
|
||||
|
||||
def testGetDataWithGreaterThan(self):
|
||||
self._runConstraintTest('reportType', '>', 'ETA')
|
||||
|
||||
def testGetDataWithLessThan(self):
|
||||
self._runConstraintTest('reportType', '<', 'ETA')
|
||||
|
||||
def testGetDataWithGreaterThanEquals(self):
|
||||
self._runConstraintTest('reportType', '>=', 'ETA')
|
||||
|
||||
def testGetDataWithLessThanEquals(self):
|
||||
self._runConstraintTest('reportType', '<=', 'ETA')
|
||||
|
||||
def testGetDataWithInTuple(self):
|
||||
collection = ('ETA', 'GFS')
|
||||
geometryData = self._runConstraintTest('reportType', 'in', collection)
|
||||
for record in geometryData:
|
||||
dataURI = record.getString('dataURI')
|
||||
self.assertTrue('/ETA/' in dataURI or '/GFS/' in dataURI)
|
||||
|
||||
def testGetDataWithInList(self):
|
||||
collection = ['ETA', 'GFS']
|
||||
geometryData = self._runConstraintTest('reportType', 'in', collection)
|
||||
for record in geometryData:
|
||||
dataURI = record.getString('dataURI')
|
||||
self.assertTrue('/ETA/' in dataURI or '/GFS/' in dataURI)
|
||||
|
||||
def testGetDataWithInGenerator(self):
|
||||
collection = ('ETA', 'GFS')
|
||||
generator = (item for item in collection)
|
||||
geometryData = self._runConstraintTest('reportType', 'in', generator)
|
||||
for record in geometryData:
|
||||
dataURI = record.getString('dataURI')
|
||||
self.assertTrue('/ETA/' in dataURI or '/GFS/' in dataURI)
|
||||
|
||||
def testGetDataWithNotInList(self):
|
||||
collection = ['ETA', 'GFS']
|
||||
geometryData = self._runConstraintTest('reportType', 'not in', collection)
|
||||
for record in geometryData:
|
||||
dataURI = record.getString('dataURI')
|
||||
self.assertTrue('/ETA/' not in dataURI and '/GFS/' not in dataURI)
|
||||
|
||||
def testGetDataWithInvalidConstraintTypeThrowsException(self):
|
||||
with self.assertRaises(ValueError):
|
||||
self._runConstraintTest('reportType', 'junk', 'ETA')
|
||||
|
||||
def testGetDataWithInvalidConstraintValueThrowsException(self):
|
||||
with self.assertRaises(TypeError):
|
||||
self._runConstraintTest('reportType', '=', {})
|
||||
|
||||
def testGetDataWithEmptyInConstraintThrowsException(self):
|
||||
with self.assertRaises(ValueError):
|
||||
self._runConstraintTest('reportType', 'in', [])
|
||||
|
||||
def testGetDataWithNestedInConstraintThrowsException(self):
|
||||
collection = ('ETA', 'GFS', ())
|
||||
with self.assertRaises(TypeError):
|
||||
self._runConstraintTest('reportType', 'in', collection)
|
160
awips/test/dafTests/testObs.py
Normal file
160
awips/test/dafTests/testObs.py
Normal file
|
@ -0,0 +1,160 @@
|
|||
#
|
||||
# Test DAF support for obs data
|
||||
#
|
||||
# SOFTWARE HISTORY
|
||||
#
|
||||
# Date Ticket# Engineer Description
|
||||
# ------------ ---------- ----------- --------------------------
|
||||
# 01/19/16 4795 mapeters Initial Creation.
|
||||
# 04/11/16 5548 tgurney Cleanup
|
||||
# 04/18/16 5548 tgurney More cleanup
|
||||
# 06/09/16 5587 bsteffen Add getIdentifierValues tests
|
||||
# 06/13/16 5574 tgurney Add advanced query tests
|
||||
# 06/30/16 5725 tgurney Add test for NOT IN
|
||||
# 12/07/16 5981 tgurney Parameterize
|
||||
# 12/20/16 5981 tgurney Add envelope test
|
||||
#
|
||||
#
|
||||
|
||||
from __future__ import print_function
|
||||
from awips.dataaccess import DataAccessLayer as DAL
|
||||
from dynamicserialize.dstypes.com.raytheon.uf.common.dataquery.requests import RequestConstraint
|
||||
|
||||
from awips.test.dafTests import baseDafTestCase
|
||||
from awips.test.dafTests import params
|
||||
|
||||
|
||||
class ObsTestCase(baseDafTestCase.DafTestCase):
|
||||
"""Test DAF support for obs data"""
|
||||
|
||||
datatype = "obs"
|
||||
|
||||
def testGetAvailableParameters(self):
|
||||
req = DAL.newDataRequest(self.datatype)
|
||||
self.runParametersTest(req)
|
||||
|
||||
def testGetAvailableLocations(self):
|
||||
req = DAL.newDataRequest(self.datatype)
|
||||
self.runLocationsTest(req)
|
||||
|
||||
def testGetAvailableTimes(self):
|
||||
req = DAL.newDataRequest(self.datatype)
|
||||
req.setLocationNames(params.OBS_STATION)
|
||||
self.runTimesTest(req)
|
||||
|
||||
def testGetGeometryData(self):
|
||||
req = DAL.newDataRequest(self.datatype)
|
||||
req.setLocationNames(params.OBS_STATION)
|
||||
req.setParameters("temperature", "seaLevelPress", "dewpoint")
|
||||
self.runGeometryDataTest(req)
|
||||
|
||||
def testGetGeometryDataWithEnvelope(self):
|
||||
req = DAL.newDataRequest(self.datatype)
|
||||
req.setEnvelope(params.ENVELOPE)
|
||||
req.setParameters("temperature", "seaLevelPress", "dewpoint")
|
||||
data = self.runGeometryDataTest(req)
|
||||
for item in data:
|
||||
self.assertTrue(params.ENVELOPE.contains(item.getGeometry()))
|
||||
|
||||
def testGetIdentifierValues(self):
|
||||
req = DAL.newDataRequest(self.datatype)
|
||||
optionalIds = set(DAL.getOptionalIdentifiers(req))
|
||||
self.runGetIdValuesTest(optionalIds)
|
||||
|
||||
def testGetInvalidIdentifierValuesThrowsException(self):
|
||||
self.runInvalidIdValuesTest()
|
||||
|
||||
def testGetNonexistentIdentifierValuesThrowsException(self):
|
||||
self.runNonexistentIdValuesTest()
|
||||
|
||||
def _runConstraintTest(self, key, operator, value):
|
||||
req = DAL.newDataRequest(self.datatype)
|
||||
constraint = RequestConstraint.new(operator, value)
|
||||
req.setParameters("temperature", "reportType")
|
||||
req.setLocationNames(params.OBS_STATION)
|
||||
req.addIdentifier(key, constraint)
|
||||
return self.runGeometryDataTest(req)
|
||||
|
||||
def testGetDataWithEqualsString(self):
|
||||
geometryData = self._runConstraintTest('reportType', '=', 'METAR')
|
||||
for record in geometryData:
|
||||
self.assertEqual(record.getString('reportType'), 'METAR')
|
||||
|
||||
# No numeric tests since no numeric identifiers are available.
|
||||
|
||||
def testGetDataWithEqualsNone(self):
|
||||
geometryData = self._runConstraintTest('reportType', '=', None)
|
||||
for record in geometryData:
|
||||
self.assertEqual(record.getType('reportType'), 'NULL')
|
||||
|
||||
def testGetDataWithNotEquals(self):
|
||||
geometryData = self._runConstraintTest('reportType', '!=', 'METAR')
|
||||
for record in geometryData:
|
||||
self.assertNotEqual(record.getString('reportType'), 'METAR')
|
||||
|
||||
def testGetDataWithNotEqualsNone(self):
|
||||
geometryData = self._runConstraintTest('reportType', '!=', None)
|
||||
for record in geometryData:
|
||||
self.assertNotEqual(record.getType('reportType'), 'NULL')
|
||||
|
||||
def testGetDataWithGreaterThan(self):
|
||||
geometryData = self._runConstraintTest('reportType', '>', 'METAR')
|
||||
for record in geometryData:
|
||||
self.assertGreater(record.getString('reportType'), 'METAR')
|
||||
|
||||
def testGetDataWithLessThan(self):
|
||||
geometryData = self._runConstraintTest('reportType', '<', 'METAR')
|
||||
for record in geometryData:
|
||||
self.assertLess(record.getString('reportType'), 'METAR')
|
||||
|
||||
def testGetDataWithGreaterThanEquals(self):
|
||||
geometryData = self._runConstraintTest('reportType', '>=', 'METAR')
|
||||
for record in geometryData:
|
||||
self.assertGreaterEqual(record.getString('reportType'), 'METAR')
|
||||
|
||||
def testGetDataWithLessThanEquals(self):
|
||||
geometryData = self._runConstraintTest('reportType', '<=', 'METAR')
|
||||
for record in geometryData:
|
||||
self.assertLessEqual(record.getString('reportType'), 'METAR')
|
||||
|
||||
def testGetDataWithInTuple(self):
|
||||
collection = ('METAR', 'SPECI')
|
||||
geometryData = self._runConstraintTest('reportType', 'in', collection)
|
||||
for record in geometryData:
|
||||
self.assertIn(record.getString('reportType'), collection)
|
||||
|
||||
def testGetDataWithInList(self):
|
||||
collection = ['METAR', 'SPECI']
|
||||
geometryData = self._runConstraintTest('reportType', 'in', collection)
|
||||
for record in geometryData:
|
||||
self.assertIn(record.getString('reportType'), collection)
|
||||
|
||||
def testGetDataWithInGenerator(self):
|
||||
collection = ('METAR', 'SPECI')
|
||||
generator = (item for item in collection)
|
||||
geometryData = self._runConstraintTest('reportType', 'in', generator)
|
||||
for record in geometryData:
|
||||
self.assertIn(record.getString('reportType'), collection)
|
||||
|
||||
def testGetDataWithNotInList(self):
|
||||
collection = ['METAR', 'SPECI']
|
||||
geometryData = self._runConstraintTest('reportType', 'not in', collection)
|
||||
for record in geometryData:
|
||||
self.assertNotIn(record.getString('reportType'), collection)
|
||||
|
||||
def testGetDataWithInvalidConstraintTypeThrowsException(self):
|
||||
with self.assertRaises(ValueError):
|
||||
self._runConstraintTest('reportType', 'junk', 'METAR')
|
||||
|
||||
def testGetDataWithInvalidConstraintValueThrowsException(self):
|
||||
with self.assertRaises(TypeError):
|
||||
self._runConstraintTest('reportType', '=', {})
|
||||
|
||||
def testGetDataWithEmptyInConstraintThrowsException(self):
|
||||
with self.assertRaises(ValueError):
|
||||
self._runConstraintTest('reportType', 'in', [])
|
||||
|
||||
def testGetDataWithNestedInConstraintThrowsException(self):
|
||||
collection = ('METAR', 'SPECI', ())
|
||||
with self.assertRaises(TypeError):
|
||||
self._runConstraintTest('reportType', 'in', collection)
|
59
awips/test/dafTests/testProfiler.py
Normal file
59
awips/test/dafTests/testProfiler.py
Normal file
|
@ -0,0 +1,59 @@
|
|||
#
|
||||
# Test DAF support for profiler data
|
||||
#
|
||||
# SOFTWARE HISTORY
|
||||
#
|
||||
# Date Ticket# Engineer Description
|
||||
# ------------ ---------- ----------- --------------------------
|
||||
# 01/19/16 4795 mapeters Initial Creation.
|
||||
# 04/11/16 5548 tgurney Cleanup
|
||||
# 04/18/16 5548 tgurney More cleanup
|
||||
#
|
||||
#
|
||||
|
||||
from __future__ import print_function
|
||||
from awips.dataaccess import DataAccessLayer as DAL
|
||||
|
||||
from awips.test.dafTests import baseDafTestCase
|
||||
|
||||
|
||||
class ProfilerTestCase(baseDafTestCase.DafTestCase):
|
||||
"""Test DAF support for profiler data"""
|
||||
|
||||
datatype = "profiler"
|
||||
|
||||
def testGetAvailableParameters(self):
|
||||
req = DAL.newDataRequest(self.datatype)
|
||||
self.runParametersTest(req)
|
||||
|
||||
def testGetAvailableLocations(self):
|
||||
req = DAL.newDataRequest(self.datatype)
|
||||
self.runLocationsTest(req)
|
||||
|
||||
def testGetAvailableTimes(self):
|
||||
req = DAL.newDataRequest(self.datatype)
|
||||
self.runTimesTest(req)
|
||||
|
||||
def testGetGeometryData(self):
|
||||
req = DAL.newDataRequest(self.datatype)
|
||||
req.setParameters("temperature", "pressure", "uComponent", "vComponent")
|
||||
|
||||
print("Testing getGeometryData()")
|
||||
|
||||
geomData = DAL.getGeometryData(req)
|
||||
self.assertIsNotNone(geomData)
|
||||
print("Number of geometry records: " + str(len(geomData)))
|
||||
print("Sample geometry data:")
|
||||
for record in geomData[:self.sampleDataLimit]:
|
||||
print("level:", record.getLevel(), end="")
|
||||
# One dimensional parameters are reported on the 0.0UNKNOWN level.
|
||||
# 2D parameters are reported on MB levels from pressure.
|
||||
if record.getLevel() == "0.0UNKNOWN":
|
||||
print(" temperature=" + record.getString("temperature") + record.getUnit("temperature"), end="")
|
||||
print(" pressure=" + record.getString("pressure") + record.getUnit("pressure"), end="")
|
||||
else:
|
||||
print(" uComponent=" + record.getString("uComponent") + record.getUnit("uComponent"), end="")
|
||||
print(" vComponent=" + record.getString("vComponent") + record.getUnit("vComponent"), end="")
|
||||
print(" geometry:", record.getGeometry())
|
||||
|
||||
print("getGeometryData() complete\n\n")
|
74
awips/test/dafTests/testRadarGraphics.py
Normal file
74
awips/test/dafTests/testRadarGraphics.py
Normal file
|
@ -0,0 +1,74 @@
|
|||
#
|
||||
# Test DAF support for radar graphics data
|
||||
#
|
||||
# SOFTWARE HISTORY
|
||||
#
|
||||
# Date Ticket# Engineer Description
|
||||
# ------------ ---------- ----------- --------------------------
|
||||
# 08/25/16 2671 tgurney Initial creation.
|
||||
# 08/31/16 2671 tgurney Add mesocyclone
|
||||
# 09/08/16 2671 tgurney Add storm track
|
||||
# 09/27/16 2671 tgurney Add hail index
|
||||
# 09/30/16 2671 tgurney Add TVS
|
||||
# 12/07/16 5981 tgurney Parameterize
|
||||
# 12/19/16 5981 tgurney Do not check data times on
|
||||
# returned data
|
||||
#
|
||||
#
|
||||
|
||||
from dynamicserialize.dstypes.com.raytheon.uf.common.dataquery.requests import RequestConstraint
|
||||
from awips.dataaccess import DataAccessLayer as DAL
|
||||
|
||||
from awips.test.dafTests import baseRadarTestCase
|
||||
from awips.test.dafTests import params
|
||||
|
||||
|
||||
class RadarGraphicsTestCase(baseRadarTestCase.BaseRadarTestCase):
|
||||
"""Test DAF support for radar data"""
|
||||
|
||||
datatype = 'radar'
|
||||
|
||||
def runConstraintTest(self, key, operator, value):
|
||||
req = DAL.newDataRequest(self.datatype)
|
||||
constraint = RequestConstraint.new(operator, value)
|
||||
req.addIdentifier(key, constraint)
|
||||
req.setParameters('166')
|
||||
# TODO: Cannot check datatimes on the result because the times returned
|
||||
# by getAvailableTimes have level = -1.0, while the time on the actual
|
||||
# data has the correct level set (>= 0.0).
|
||||
return self.runGeometryDataTest(req, checkDataTimes=False)
|
||||
|
||||
def testGetGeometryDataMeltingLayer(self):
|
||||
req = DAL.newDataRequest(self.datatype)
|
||||
req.setEnvelope(params.ENVELOPE)
|
||||
req.setLocationNames(self.radarLoc)
|
||||
req.setParameters('166')
|
||||
self.runGeometryDataTest(req, checkDataTimes=False)
|
||||
|
||||
def testGetGeometryDataMesocyclone(self):
|
||||
req = DAL.newDataRequest(self.datatype)
|
||||
req.setEnvelope(params.ENVELOPE)
|
||||
req.setLocationNames(self.radarLoc)
|
||||
req.setParameters('141')
|
||||
self.runGeometryDataTest(req, checkDataTimes=False)
|
||||
|
||||
def testGetGeometryDataStormTrack(self):
|
||||
req = DAL.newDataRequest(self.datatype)
|
||||
req.setEnvelope(params.ENVELOPE)
|
||||
req.setLocationNames(self.radarLoc)
|
||||
req.setParameters('58')
|
||||
self.runGeometryDataTest(req, checkDataTimes=False)
|
||||
|
||||
def testGetGeometryDataHailIndex(self):
|
||||
req = DAL.newDataRequest(self.datatype)
|
||||
req.setEnvelope(params.ENVELOPE)
|
||||
req.setLocationNames(self.radarLoc)
|
||||
req.setParameters('59')
|
||||
self.runGeometryDataTest(req, checkDataTimes=False)
|
||||
|
||||
def testGetGeometryDataTVS(self):
|
||||
req = DAL.newDataRequest(self.datatype)
|
||||
req.setEnvelope(params.ENVELOPE)
|
||||
req.setLocationNames(self.radarLoc)
|
||||
req.setParameters('61')
|
||||
self.runGeometryDataTest(req, checkDataTimes=False)
|
40
awips/test/dafTests/testRadarGrid.py
Normal file
40
awips/test/dafTests/testRadarGrid.py
Normal file
|
@ -0,0 +1,40 @@
|
|||
#
|
||||
# Test DAF support for radar grid data
|
||||
#
|
||||
# SOFTWARE HISTORY
|
||||
#
|
||||
# Date Ticket# Engineer Description
|
||||
# ------------ ---------- ----------- --------------------------
|
||||
# 08/25/16 2671 tgurney Initial creation
|
||||
#
|
||||
#
|
||||
|
||||
from awips.dataaccess import DataAccessLayer as DAL
|
||||
from dynamicserialize.dstypes.com.raytheon.uf.common.dataquery.requests import RequestConstraint
|
||||
|
||||
from awips.test.dafTests import baseRadarTestCase
|
||||
from awips.test.dafTests import params
|
||||
|
||||
|
||||
class RadarTestCase(baseRadarTestCase.BaseRadarTestCase):
|
||||
"""Test DAF support for radar data"""
|
||||
|
||||
datatype = 'radar'
|
||||
|
||||
parameterList = ['94']
|
||||
|
||||
def runConstraintTest(self, key, operator, value):
|
||||
req = DAL.newDataRequest(self.datatype)
|
||||
constraint = RequestConstraint.new(operator, value)
|
||||
req.addIdentifier(key, constraint)
|
||||
req.setParameters(*self.parameterList)
|
||||
# Don't test shapes since they may differ.
|
||||
return self.runGridDataTest(req, testSameShape=False)
|
||||
|
||||
def testGetGridData(self):
|
||||
req = DAL.newDataRequest(self.datatype)
|
||||
req.setEnvelope(params.ENVELOPE)
|
||||
req.setLocationNames(self.radarLoc)
|
||||
req.setParameters(*self.parameterList)
|
||||
# Don't test shapes since they may differ.
|
||||
self.runGridDataTest(req, testSameShape=False)
|
153
awips/test/dafTests/testRadarSpatial.py
Normal file
153
awips/test/dafTests/testRadarSpatial.py
Normal file
|
@ -0,0 +1,153 @@
|
|||
#
|
||||
# Test DAF support for radar_spatial data
|
||||
#
|
||||
# SOFTWARE HISTORY
|
||||
#
|
||||
# Date Ticket# Engineer Description
|
||||
# ------------ ---------- ----------- --------------------------
|
||||
# 01/19/16 4795 mapeters Initial Creation.
|
||||
# 04/11/16 5548 tgurney Cleanup
|
||||
# 04/18/16 5548 tgurney More cleanup
|
||||
# 05/26/16 5587 njensen Added testGetIdentifierValues()
|
||||
# 06/01/16 5587 tgurney Move testIdentifiers() to
|
||||
# superclass
|
||||
# 06/13/16 5574 tgurney Add advanced query tests
|
||||
# 06/30/16 5725 tgurney Add test for NOT IN
|
||||
# 12/07/16 5981 tgurney Parameterize
|
||||
# 01/06/17 5981 tgurney Do not check data times
|
||||
#
|
||||
#
|
||||
|
||||
from __future__ import print_function
|
||||
from awips.dataaccess import DataAccessLayer as DAL
|
||||
|
||||
from dynamicserialize.dstypes.com.raytheon.uf.common.dataquery.requests import RequestConstraint
|
||||
from awips.test.dafTests import baseDafTestCase
|
||||
from awips.test.dafTests import params
|
||||
|
||||
|
||||
class RadarSpatialTestCase(baseDafTestCase.DafTestCase):
|
||||
"""Test DAF support for radar_spatial data"""
|
||||
|
||||
datatype = "radar_spatial"
|
||||
|
||||
def testGetAvailableLocations(self):
|
||||
req = DAL.newDataRequest(self.datatype)
|
||||
req.setEnvelope(params.ENVELOPE)
|
||||
self.runLocationsTest(req)
|
||||
|
||||
def testGetAvailableParameters(self):
|
||||
req = DAL.newDataRequest(self.datatype)
|
||||
self.runParametersTest(req)
|
||||
|
||||
def testGetIdentifierValues(self):
|
||||
self.runGetIdValuesTest(['wfo_id'])
|
||||
|
||||
def testGetGeometryData(self):
|
||||
req = DAL.newDataRequest(self.datatype)
|
||||
req.setLocationNames("TORD", "TMDW")
|
||||
req.setParameters("wfo_id", "name", "elevmeter")
|
||||
self.runGeometryDataTest(req, checkDataTimes=False)
|
||||
|
||||
def testRequestingTimesThrowsTimeAgnosticDataException(self):
|
||||
req = DAL.newDataRequest(self.datatype)
|
||||
self.runTimeAgnosticTest(req)
|
||||
|
||||
def _runConstraintTest(self, key, operator, value):
|
||||
req = DAL.newDataRequest(self.datatype)
|
||||
constraint = RequestConstraint.new(operator, value)
|
||||
req.addIdentifier(key, constraint)
|
||||
req.setParameters('elevmeter', 'eqp_elv', 'wfo_id', 'immutablex')
|
||||
return self.runGeometryDataTest(req, checkDataTimes=False)
|
||||
|
||||
def testGetDataWithEqualsString(self):
|
||||
geometryData = self._runConstraintTest('wfo_id', '=', params.SITE_ID)
|
||||
for record in geometryData:
|
||||
self.assertEqual(record.getString('wfo_id'), params.SITE_ID)
|
||||
|
||||
def testGetDataWithEqualsInt(self):
|
||||
geometryData = self._runConstraintTest('immutablex', '=', 57)
|
||||
for record in geometryData:
|
||||
self.assertEqual(record.getNumber('immutablex'), 57)
|
||||
|
||||
def testGetDataWithEqualsLong(self):
|
||||
geometryData = self._runConstraintTest('immutablex', '=', 57)
|
||||
for record in geometryData:
|
||||
self.assertEqual(record.getNumber('immutablex'), 57)
|
||||
|
||||
def testGetDataWithEqualsFloat(self):
|
||||
geometryData = self._runConstraintTest('immutablex', '=', 57.0)
|
||||
for record in geometryData:
|
||||
self.assertEqual(round(record.getNumber('immutablex'), 1), 57.0)
|
||||
|
||||
def testGetDataWithEqualsNone(self):
|
||||
geometryData = self._runConstraintTest('wfo_id', '=', None)
|
||||
for record in geometryData:
|
||||
self.assertEqual(record.getType('wfo_id'), 'NULL')
|
||||
|
||||
def testGetDataWithNotEquals(self):
|
||||
geometryData = self._runConstraintTest('wfo_id', '!=', params.SITE_ID)
|
||||
for record in geometryData:
|
||||
self.assertNotEquals(record.getString('wfo_id'), params.SITE_ID)
|
||||
|
||||
def testGetDataWithNotEqualsNone(self):
|
||||
geometryData = self._runConstraintTest('wfo_id', '!=', None)
|
||||
for record in geometryData:
|
||||
self.assertNotEqual(record.getType('wfo_id'), 'NULL')
|
||||
|
||||
def testGetDataWithGreaterThan(self):
|
||||
geometryData = self._runConstraintTest('elevmeter', '>', 1000)
|
||||
for record in geometryData:
|
||||
self.assertGreater(record.getNumber('elevmeter'), 1000)
|
||||
|
||||
def testGetDataWithLessThan(self):
|
||||
geometryData = self._runConstraintTest('elevmeter', '<', 1000)
|
||||
for record in geometryData:
|
||||
self.assertLess(record.getNumber('elevmeter'), 1000)
|
||||
|
||||
def testGetDataWithGreaterThanEquals(self):
|
||||
geometryData = self._runConstraintTest('eqp_elv', '>=', 1295)
|
||||
for record in geometryData:
|
||||
self.assertGreaterEqual(record.getNumber('eqp_elv'), 1295)
|
||||
|
||||
def testGetDataWithLessThanEquals(self):
|
||||
geometryData = self._runConstraintTest('eqp_elv', '<=', 138)
|
||||
for record in geometryData:
|
||||
self.assertLessEqual(record.getNumber('eqp_elv'), 138)
|
||||
|
||||
def testGetDataWithInTuple(self):
|
||||
collection = (params.SITE_ID, 'GID')
|
||||
geometryData = self._runConstraintTest('wfo_id', 'in', collection)
|
||||
for record in geometryData:
|
||||
self.assertIn(record.getString('wfo_id'), collection)
|
||||
|
||||
def testGetDataWithInList(self):
|
||||
collection = [params.SITE_ID, 'GID']
|
||||
geometryData = self._runConstraintTest('wfo_id', 'in', collection)
|
||||
for record in geometryData:
|
||||
self.assertIn(record.getString('wfo_id'), collection)
|
||||
|
||||
def testGetDataWithInGenerator(self):
|
||||
collection = (params.SITE_ID, 'GID')
|
||||
generator = (item for item in collection)
|
||||
geometryData = self._runConstraintTest('wfo_id', 'in', generator)
|
||||
for record in geometryData:
|
||||
self.assertIn(record.getString('wfo_id'), collection)
|
||||
|
||||
def testGetDataWithNotInList(self):
|
||||
collection = [params.SITE_ID, 'GID']
|
||||
geometryData = self._runConstraintTest('wfo_id', 'not in', collection)
|
||||
for record in geometryData:
|
||||
self.assertNotIn(record.getString('wfo_id'), collection)
|
||||
|
||||
def testGetDataWithInvalidConstraintTypeThrowsException(self):
|
||||
with self.assertRaises(ValueError):
|
||||
self._runConstraintTest('wfo_id', 'junk', params.SITE_ID)
|
||||
|
||||
def testGetDataWithInvalidConstraintValueThrowsException(self):
|
||||
with self.assertRaises(TypeError):
|
||||
self._runConstraintTest('wfo_id', '=', {})
|
||||
|
||||
def testGetDataWithEmptyInConstraintThrowsException(self):
|
||||
with self.assertRaises(ValueError):
|
||||
self._runConstraintTest('wfo_id', 'in', [])
|
158
awips/test/dafTests/testRequestConstraint.py
Normal file
158
awips/test/dafTests/testRequestConstraint.py
Normal file
|
@ -0,0 +1,158 @@
|
|||
#
|
||||
# Unit tests for Python implementation of RequestConstraint
|
||||
#
|
||||
# SOFTWARE HISTORY
|
||||
#
|
||||
# Date Ticket# Engineer Description
|
||||
# ------------ ---------- ----------- --------------------------
|
||||
# 07/22/16 2416 tgurney Initial creation
|
||||
#
|
||||
#
|
||||
|
||||
from dynamicserialize.dstypes.com.raytheon.uf.common.dataquery.requests import RequestConstraint
|
||||
|
||||
import unittest
|
||||
|
||||
|
||||
class RequestConstraintTestCase(unittest.TestCase):
|
||||
|
||||
def _newRequestConstraint(self, constraintType, constraintValue):
|
||||
constraint = RequestConstraint()
|
||||
constraint.constraintType = constraintType
|
||||
constraint.constraintValue = constraintValue
|
||||
return constraint
|
||||
|
||||
def testEvaluateEquals(self):
|
||||
new = RequestConstraint.new
|
||||
self.assertTrue(new('=', 3).evaluate(3))
|
||||
self.assertTrue(new('=', 3).evaluate('3'))
|
||||
self.assertTrue(new('=', '3').evaluate(3))
|
||||
self.assertTrue(new('=', 12345).evaluate(12345))
|
||||
self.assertTrue(new('=', 'a').evaluate('a'))
|
||||
self.assertTrue(new('=', 'a').evaluate(u'a'))
|
||||
self.assertTrue(new('=', 1.0001).evaluate(2.0 - 0.999999))
|
||||
self.assertTrue(new('=', 1.00001).evaluate(1))
|
||||
self.assertFalse(new('=', 'a').evaluate(['a']))
|
||||
self.assertFalse(new('=', 'a').evaluate(['b']))
|
||||
self.assertFalse(new('=', 3).evaluate(4))
|
||||
self.assertFalse(new('=', 4).evaluate(3))
|
||||
self.assertFalse(new('=', 'a').evaluate('z'))
|
||||
|
||||
def testEvaluateNotEquals(self):
|
||||
new = RequestConstraint.new
|
||||
self.assertTrue(new('!=', 'a').evaluate(['a']))
|
||||
self.assertTrue(new('!=', 'a').evaluate(['b']))
|
||||
self.assertTrue(new('!=', 3).evaluate(4))
|
||||
self.assertTrue(new('!=', 4).evaluate(3))
|
||||
self.assertTrue(new('!=', 'a').evaluate('z'))
|
||||
self.assertFalse(new('!=', 3).evaluate('3'))
|
||||
self.assertFalse(new('!=', '3').evaluate(3))
|
||||
self.assertFalse(new('!=', 3).evaluate(3))
|
||||
self.assertFalse(new('!=', 12345).evaluate(12345))
|
||||
self.assertFalse(new('!=', 'a').evaluate('a'))
|
||||
self.assertFalse(new('!=', 'a').evaluate(u'a'))
|
||||
self.assertFalse(new('!=', 1.0001).evaluate(2.0 - 0.9999))
|
||||
|
||||
def testEvaluateGreaterThan(self):
|
||||
new = RequestConstraint.new
|
||||
self.assertTrue(new('>', 1.0001).evaluate(1.0002))
|
||||
self.assertTrue(new('>', 'a').evaluate('b'))
|
||||
self.assertTrue(new('>', 3).evaluate(4))
|
||||
self.assertFalse(new('>', 20).evaluate(3))
|
||||
self.assertFalse(new('>', 12345).evaluate(12345))
|
||||
self.assertFalse(new('>', 'a').evaluate('a'))
|
||||
self.assertFalse(new('>', 'z').evaluate('a'))
|
||||
self.assertFalse(new('>', 4).evaluate(3))
|
||||
|
||||
def testEvaluateGreaterThanEquals(self):
|
||||
new = RequestConstraint.new
|
||||
self.assertTrue(new('>=', 3).evaluate(3))
|
||||
self.assertTrue(new('>=', 12345).evaluate(12345))
|
||||
self.assertTrue(new('>=', 'a').evaluate('a'))
|
||||
self.assertTrue(new('>=', 1.0001).evaluate(1.0002))
|
||||
self.assertTrue(new('>=', 'a').evaluate('b'))
|
||||
self.assertTrue(new('>=', 3).evaluate(20))
|
||||
self.assertFalse(new('>=', 1.0001).evaluate(1.0))
|
||||
self.assertFalse(new('>=', 'z').evaluate('a'))
|
||||
self.assertFalse(new('>=', 40).evaluate(3))
|
||||
|
||||
def testEvaluateLessThan(self):
|
||||
new = RequestConstraint.new
|
||||
self.assertTrue(new('<', 'z').evaluate('a'))
|
||||
self.assertTrue(new('<', 30).evaluate(4))
|
||||
self.assertFalse(new('<', 3).evaluate(3))
|
||||
self.assertFalse(new('<', 12345).evaluate(12345))
|
||||
self.assertFalse(new('<', 'a').evaluate('a'))
|
||||
self.assertFalse(new('<', 1.0001).evaluate(1.0002))
|
||||
self.assertFalse(new('<', 'a').evaluate('b'))
|
||||
self.assertFalse(new('<', 3).evaluate(40))
|
||||
|
||||
def testEvaluateLessThanEquals(self):
|
||||
new = RequestConstraint.new
|
||||
self.assertTrue(new('<=', 'z').evaluate('a'))
|
||||
self.assertTrue(new('<=', 20).evaluate(3))
|
||||
self.assertTrue(new('<=', 3).evaluate(3))
|
||||
self.assertTrue(new('<=', 12345).evaluate(12345))
|
||||
self.assertTrue(new('<=', 'a').evaluate('a'))
|
||||
self.assertFalse(new('<=', 1.0001).evaluate(1.0002))
|
||||
self.assertFalse(new('<=', 'a').evaluate('b'))
|
||||
self.assertFalse(new('<=', 4).evaluate(30))
|
||||
|
||||
def testEvaluateIsNull(self):
|
||||
new = RequestConstraint.new
|
||||
self.assertTrue(new('=', None).evaluate(None))
|
||||
self.assertTrue(new('=', None).evaluate('null'))
|
||||
self.assertFalse(new('=', None).evaluate(()))
|
||||
self.assertFalse(new('=', None).evaluate(0))
|
||||
self.assertFalse(new('=', None).evaluate(False))
|
||||
|
||||
def testEvaluateIsNotNull(self):
|
||||
new = RequestConstraint.new
|
||||
self.assertTrue(new('!=', None).evaluate(()))
|
||||
self.assertTrue(new('!=', None).evaluate(0))
|
||||
self.assertTrue(new('!=', None).evaluate(False))
|
||||
self.assertFalse(new('!=', None).evaluate(None))
|
||||
self.assertFalse(new('!=', None).evaluate('null'))
|
||||
|
||||
def testEvaluateIn(self):
|
||||
new = RequestConstraint.new
|
||||
self.assertTrue(new('in', [3]).evaluate(3))
|
||||
self.assertTrue(new('in', ['a', 'b', 3]).evaluate(3))
|
||||
self.assertTrue(new('in', 'a').evaluate('a'))
|
||||
self.assertTrue(new('in', [3, 4, 5]).evaluate('5'))
|
||||
self.assertTrue(new('in', [1.0001, 2, 3]).evaluate(2.0 - 0.9999))
|
||||
self.assertFalse(new('in', ['a', 'b', 'c']).evaluate('d'))
|
||||
self.assertFalse(new('in', 'a').evaluate('b'))
|
||||
|
||||
def testEvaluateNotIn(self):
|
||||
new = RequestConstraint.new
|
||||
self.assertTrue(new('not in', ['a', 'b', 'c']).evaluate('d'))
|
||||
self.assertTrue(new('not in', [3, 4, 5]).evaluate(6))
|
||||
self.assertTrue(new('not in', 'a').evaluate('b'))
|
||||
self.assertFalse(new('not in', [3]).evaluate(3))
|
||||
self.assertFalse(new('not in', ['a', 'b', 3]).evaluate(3))
|
||||
self.assertFalse(new('not in', 'a').evaluate('a'))
|
||||
self.assertFalse(new('not in', [1.0001, 2, 3]).evaluate(2.0 - 0.9999))
|
||||
|
||||
def testEvaluateBetween(self):
|
||||
# cannot make "between" with RequestConstraint.new()
|
||||
new = self._newRequestConstraint
|
||||
self.assertTrue(new('BETWEEN', '1--1').evaluate(1))
|
||||
self.assertTrue(new('BETWEEN', '1--10').evaluate(1))
|
||||
self.assertTrue(new('BETWEEN', '1--10').evaluate(5))
|
||||
self.assertTrue(new('BETWEEN', '1--10').evaluate(10))
|
||||
self.assertTrue(new('BETWEEN', '1.0--1.1').evaluate(1.0))
|
||||
self.assertTrue(new('BETWEEN', '1.0--1.1').evaluate(1.05))
|
||||
self.assertTrue(new('BETWEEN', '1.0--1.1').evaluate(1.1))
|
||||
self.assertTrue(new('BETWEEN', 'a--x').evaluate('a'))
|
||||
self.assertTrue(new('BETWEEN', 'a--x').evaluate('j'))
|
||||
self.assertTrue(new('BETWEEN', 'a--x').evaluate('x'))
|
||||
self.assertFalse(new('BETWEEN', '1--1').evaluate(2))
|
||||
self.assertFalse(new('BETWEEN', '1--2').evaluate(10))
|
||||
self.assertFalse(new('BETWEEN', '1--10').evaluate(0))
|
||||
self.assertFalse(new('BETWEEN', '1--10').evaluate(11))
|
||||
self.assertFalse(new('BETWEEN', '1.0--1.1').evaluate(0.99))
|
||||
self.assertFalse(new('BETWEEN', '1.0--1.1').evaluate(1.11))
|
||||
self.assertFalse(new('BETWEEN', 'a--x').evaluate(' '))
|
||||
self.assertFalse(new('BETWEEN', 'a--x').evaluate('z'))
|
||||
|
166
awips/test/dafTests/testSatellite.py
Normal file
166
awips/test/dafTests/testSatellite.py
Normal file
|
@ -0,0 +1,166 @@
|
|||
#
|
||||
# Test DAF support for satellite data
|
||||
#
|
||||
# SOFTWARE HISTORY
|
||||
#
|
||||
# Date Ticket# Engineer Description
|
||||
# ------------ ---------- ----------- --------------------------
|
||||
# 01/19/16 4795 mapeters Initial Creation.
|
||||
# 04/11/16 5548 tgurney Cleanup
|
||||
# 04/18/16 5548 tgurney More cleanup
|
||||
# 04/26/16 5587 tgurney Move identifier values tests
|
||||
# out of base class
|
||||
# 06/01/16 5587 tgurney Update testGetIdentifierValues
|
||||
# 06/07/16 5574 tgurney Add advanced query tests
|
||||
# 06/13/16 5574 tgurney Typo
|
||||
# 06/30/16 5725 tgurney Add test for NOT IN
|
||||
#
|
||||
#
|
||||
|
||||
from __future__ import print_function
|
||||
from awips.dataaccess import DataAccessLayer as DAL
|
||||
from dynamicserialize.dstypes.com.raytheon.uf.common.dataquery.requests import RequestConstraint
|
||||
|
||||
from awips.test.dafTests import baseDafTestCase
|
||||
|
||||
|
||||
class SatelliteTestCase(baseDafTestCase.DafTestCase):
|
||||
"""Test DAF support for satellite data"""
|
||||
|
||||
datatype = "satellite"
|
||||
|
||||
def testGetAvailableParameters(self):
|
||||
req = DAL.newDataRequest(self.datatype)
|
||||
self.runParametersTest(req)
|
||||
|
||||
def testGetAvailableLocations(self):
|
||||
req = DAL.newDataRequest(self.datatype)
|
||||
self.runLocationsTest(req)
|
||||
|
||||
def testGetAvailableTimes(self):
|
||||
req = DAL.newDataRequest(self.datatype)
|
||||
req.setLocationNames("West CONUS")
|
||||
self.runTimesTest(req)
|
||||
|
||||
def testGetGridData(self):
|
||||
req = DAL.newDataRequest(self.datatype)
|
||||
req.setParameters("Imager 11 micron IR")
|
||||
req.setLocationNames("West CONUS")
|
||||
self.runGridDataTest(req)
|
||||
|
||||
def testGetIdentifierValues(self):
|
||||
req = DAL.newDataRequest(self.datatype)
|
||||
optionalIds = set(DAL.getOptionalIdentifiers(req))
|
||||
requiredIds = set(DAL.getRequiredIdentifiers(req))
|
||||
self.runGetIdValuesTest(optionalIds | requiredIds)
|
||||
|
||||
def testGetInvalidIdentifierValuesThrowsException(self):
|
||||
self.runInvalidIdValuesTest()
|
||||
|
||||
def testGetNonexistentIdentifierValuesThrowsException(self):
|
||||
self.runNonexistentIdValuesTest()
|
||||
|
||||
def _runConstraintTest(self, key, operator, value):
|
||||
req = DAL.newDataRequest(self.datatype)
|
||||
constraint = RequestConstraint.new(operator, value)
|
||||
req.addIdentifier(key, constraint)
|
||||
req.setParameters("Imager 11 micron IR")
|
||||
req.setLocationNames("West CONUS")
|
||||
return self.runGridDataTest(req)
|
||||
|
||||
def testGetDataWithEqualsString(self):
|
||||
gridData = self._runConstraintTest('creatingEntity', '=', 'Composite')
|
||||
for record in gridData:
|
||||
self.assertEqual(record.getAttribute('creatingEntity'), 'Composite')
|
||||
|
||||
def testGetDataWithEqualsInt(self):
|
||||
gridData = self._runConstraintTest('creatingEntity', '=', 1000)
|
||||
for record in gridData:
|
||||
self.assertEqual(record.getAttribute('creatingEntity'), 1000)
|
||||
|
||||
def testGetDataWithEqualsLong(self):
|
||||
gridData = self._runConstraintTest('creatingEntity', '=', 1000)
|
||||
for record in gridData:
|
||||
self.assertEqual(record.getAttribute('creatingEntity'), 1000)
|
||||
|
||||
def testGetDataWithEqualsFloat(self):
|
||||
gridData = self._runConstraintTest('creatingEntity', '=', 1.0)
|
||||
for record in gridData:
|
||||
self.assertEqual(round(record.getAttribute('creatingEntity'), 1), 1.0)
|
||||
|
||||
def testGetDataWithEqualsNone(self):
|
||||
gridData = self._runConstraintTest('creatingEntity', '=', None)
|
||||
for record in gridData:
|
||||
self.assertIsNone(record.getAttribute('creatingEntity'))
|
||||
|
||||
def testGetDataWithNotEquals(self):
|
||||
gridData = self._runConstraintTest('creatingEntity', '!=', 'Composite')
|
||||
for record in gridData:
|
||||
self.assertNotEqual(record.getAttribute('creatingEntity'), 'Composite')
|
||||
|
||||
def testGetDataWithNotEqualsNone(self):
|
||||
gridData = self._runConstraintTest('creatingEntity', '!=', None)
|
||||
for record in gridData:
|
||||
self.assertIsNotNone(record.getAttribute('creatingEntity'))
|
||||
|
||||
def testGetDataWithGreaterThan(self):
|
||||
gridData = self._runConstraintTest('creatingEntity', '>', 'Composite')
|
||||
for record in gridData:
|
||||
self.assertGreater(record.getAttribute('creatingEntity'), 'Composite')
|
||||
|
||||
def testGetDataWithLessThan(self):
|
||||
gridData = self._runConstraintTest('creatingEntity', '<', 'Composite')
|
||||
for record in gridData:
|
||||
self.assertLess(record.getAttribute('creatingEntity'), 'Composite')
|
||||
|
||||
def testGetDataWithGreaterThanEquals(self):
|
||||
gridData = self._runConstraintTest('creatingEntity', '>=', 'Composite')
|
||||
for record in gridData:
|
||||
self.assertGreaterEqual(record.getAttribute('creatingEntity'), 'Composite')
|
||||
|
||||
def testGetDataWithLessThanEquals(self):
|
||||
gridData = self._runConstraintTest('creatingEntity', '<=', 'Composite')
|
||||
for record in gridData:
|
||||
self.assertLessEqual(record.getAttribute('creatingEntity'), 'Composite')
|
||||
|
||||
def testGetDataWithInTuple(self):
|
||||
collection = ('Composite', 'Miscellaneous')
|
||||
gridData = self._runConstraintTest('creatingEntity', 'in', collection)
|
||||
for record in gridData:
|
||||
self.assertIn(record.getAttribute('creatingEntity'), collection)
|
||||
|
||||
def testGetDataWithInList(self):
|
||||
collection = ('Composite', 'Miscellaneous')
|
||||
gridData = self._runConstraintTest('creatingEntity', 'in', collection)
|
||||
for record in gridData:
|
||||
self.assertIn(record.getAttribute('creatingEntity'), collection)
|
||||
|
||||
def testGetDataWithInGenerator(self):
|
||||
collection = ('Composite', 'Miscellaneous')
|
||||
generator = (item for item in collection)
|
||||
gridData = self._runConstraintTest('creatingEntity', 'in', generator)
|
||||
for record in gridData:
|
||||
self.assertIn(record.getAttribute('creatingEntity'), collection)
|
||||
|
||||
def testGetDataWithNotInList(self):
|
||||
collection = ('Composite', 'Miscellaneous')
|
||||
gridData = self._runConstraintTest('creatingEntity', 'not in', collection)
|
||||
for record in gridData:
|
||||
self.assertNotIn(record.getAttribute('creatingEntity'), collection)
|
||||
|
||||
def testGetDataWithInvalidConstraintTypeThrowsException(self):
|
||||
with self.assertRaises(ValueError):
|
||||
self._runConstraintTest('creatingEntity', 'junk', 'Composite')
|
||||
|
||||
def testGetDataWithInvalidConstraintValueThrowsException(self):
|
||||
with self.assertRaises(TypeError):
|
||||
self._runConstraintTest('creatingEntity', '=', {})
|
||||
|
||||
def testGetDataWithEmptyInConstraintThrowsException(self):
|
||||
with self.assertRaises(ValueError):
|
||||
self._runConstraintTest('creatingEntity', 'in', [])
|
||||
|
||||
def testGetDataWithNestedInConstraintThrowsException(self):
|
||||
collection = ('Composite', 'Miscellaneous', ())
|
||||
with self.assertRaises(TypeError):
|
||||
self._runConstraintTest('creatingEntity', 'in', collection)
|
166
awips/test/dafTests/testSfcObs.py
Normal file
166
awips/test/dafTests/testSfcObs.py
Normal file
|
@ -0,0 +1,166 @@
|
|||
#
|
||||
# Test DAF support for sfcobs data
|
||||
#
|
||||
# SOFTWARE HISTORY
|
||||
#
|
||||
# Date Ticket# Engineer Description
|
||||
# ------------ ---------- ----------- --------------------------
|
||||
# 01/19/16 4795 mapeters Initial Creation.
|
||||
# 04/11/16 5548 tgurney Cleanup
|
||||
# 04/18/16 5548 tgurney More cleanup
|
||||
# 06/09/16 5587 bsteffen Add getIdentifierValues tests
|
||||
# 06/13/16 5574 tgurney Add advanced query tests
|
||||
# 06/30/16 5725 tgurney Add test for NOT IN
|
||||
# 01/20/17 6095 tgurney Add null identifiers test
|
||||
#
|
||||
#
|
||||
|
||||
from __future__ import print_function
|
||||
from awips.dataaccess import DataAccessLayer as DAL
|
||||
from dynamicserialize.dstypes.com.raytheon.uf.common.dataquery.requests import RequestConstraint
|
||||
|
||||
from awips.test.dafTests import baseDafTestCase
|
||||
|
||||
|
||||
class SfcObsTestCase(baseDafTestCase.DafTestCase):
|
||||
"""Test DAF support for sfcobs data"""
|
||||
|
||||
datatype = "sfcobs"
|
||||
|
||||
def testGetAvailableParameters(self):
|
||||
req = DAL.newDataRequest(self.datatype)
|
||||
self.runParametersTest(req)
|
||||
|
||||
def testGetAvailableLocations(self):
|
||||
req = DAL.newDataRequest(self.datatype)
|
||||
self.runLocationsTest(req)
|
||||
|
||||
def testGetAvailableTimes(self):
|
||||
req = DAL.newDataRequest(self.datatype)
|
||||
req.setLocationNames("14547")
|
||||
self.runTimesTest(req)
|
||||
|
||||
def testGetGeometryData(self):
|
||||
req = DAL.newDataRequest(self.datatype)
|
||||
req.setLocationNames("14547")
|
||||
req.setParameters("temperature", "seaLevelPress", "dewpoint")
|
||||
self.runGeometryDataTest(req)
|
||||
|
||||
def testGetGeometryDataNullIdentifiers(self):
|
||||
req = DAL.newDataRequest(self.datatype)
|
||||
req.setLocationNames("14547")
|
||||
req.setParameters("temperature", "seaLevelPress", "dewpoint")
|
||||
req.identifiers = None
|
||||
self.runGeometryDataTest(req)
|
||||
|
||||
def testGetIdentifierValues(self):
|
||||
req = DAL.newDataRequest(self.datatype)
|
||||
optionalIds = set(DAL.getOptionalIdentifiers(req))
|
||||
self.runGetIdValuesTest(optionalIds)
|
||||
|
||||
def testGetInvalidIdentifierValuesThrowsException(self):
|
||||
self.runInvalidIdValuesTest()
|
||||
|
||||
def testGetNonexistentIdentifierValuesThrowsException(self):
|
||||
self.runNonexistentIdValuesTest()
|
||||
|
||||
def _runConstraintTest(self, key, operator, value):
|
||||
req = DAL.newDataRequest(self.datatype)
|
||||
constraint = RequestConstraint.new(operator, value)
|
||||
req.addIdentifier(key, constraint)
|
||||
req.setParameters("temperature", "reportType")
|
||||
return self.runGeometryDataTest(req)
|
||||
|
||||
def testGetDataWithEqualsString(self):
|
||||
geometryData = self._runConstraintTest('reportType', '=', '1004')
|
||||
for record in geometryData:
|
||||
self.assertEqual(record.getString('reportType'), '1004')
|
||||
|
||||
def testGetDataWithEqualsInt(self):
|
||||
geometryData = self._runConstraintTest('reportType', '=', 1004)
|
||||
for record in geometryData:
|
||||
self.assertEqual(record.getString('reportType'), '1004')
|
||||
|
||||
def testGetDataWithEqualsLong(self):
|
||||
geometryData = self._runConstraintTest('reportType', '=', 1004)
|
||||
for record in geometryData:
|
||||
self.assertEqual(record.getString('reportType'), '1004')
|
||||
|
||||
# No float test because no float identifiers are available
|
||||
|
||||
def testGetDataWithEqualsNone(self):
|
||||
geometryData = self._runConstraintTest('reportType', '=', None)
|
||||
for record in geometryData:
|
||||
self.assertEqual(record.getType('reportType'), 'NULL')
|
||||
|
||||
def testGetDataWithNotEquals(self):
|
||||
geometryData = self._runConstraintTest('reportType', '!=', 1004)
|
||||
for record in geometryData:
|
||||
self.assertNotEqual(record.getString('reportType'), '1004')
|
||||
|
||||
def testGetDataWithNotEqualsNone(self):
|
||||
geometryData = self._runConstraintTest('reportType', '!=', None)
|
||||
for record in geometryData:
|
||||
self.assertNotEqual(record.getType('reportType'), 'NULL')
|
||||
|
||||
def testGetDataWithGreaterThan(self):
|
||||
geometryData = self._runConstraintTest('reportType', '>', 1004)
|
||||
for record in geometryData:
|
||||
self.assertGreater(record.getString('reportType'), '1004')
|
||||
|
||||
def testGetDataWithLessThan(self):
|
||||
geometryData = self._runConstraintTest('reportType', '<', 1004)
|
||||
for record in geometryData:
|
||||
self.assertLess(record.getString('reportType'), '1004')
|
||||
|
||||
def testGetDataWithGreaterThanEquals(self):
|
||||
geometryData = self._runConstraintTest('reportType', '>=', 1004)
|
||||
for record in geometryData:
|
||||
self.assertGreaterEqual(record.getString('reportType'), '1004')
|
||||
|
||||
def testGetDataWithLessThanEquals(self):
|
||||
geometryData = self._runConstraintTest('reportType', '<=', 1004)
|
||||
for record in geometryData:
|
||||
self.assertLessEqual(record.getString('reportType'), '1004')
|
||||
|
||||
def testGetDataWithInTuple(self):
|
||||
collection = ('1004', '1005')
|
||||
geometryData = self._runConstraintTest('reportType', 'in', collection)
|
||||
for record in geometryData:
|
||||
self.assertIn(record.getString('reportType'), collection)
|
||||
|
||||
def testGetDataWithInList(self):
|
||||
collection = ['1004', '1005']
|
||||
geometryData = self._runConstraintTest('reportType', 'in', collection)
|
||||
for record in geometryData:
|
||||
self.assertIn(record.getString('reportType'), collection)
|
||||
|
||||
def testGetDataWithInGenerator(self):
|
||||
collection = ('1004', '1005')
|
||||
generator = (item for item in collection)
|
||||
geometryData = self._runConstraintTest('reportType', 'in', generator)
|
||||
for record in geometryData:
|
||||
self.assertIn(record.getString('reportType'), collection)
|
||||
|
||||
def testGetDataWithNotInList(self):
|
||||
collection = ['1004', '1005']
|
||||
geometryData = self._runConstraintTest('reportType', 'not in', collection)
|
||||
for record in geometryData:
|
||||
self.assertNotIn(record.getString('reportType'), collection)
|
||||
|
||||
def testGetDataWithInvalidConstraintTypeThrowsException(self):
|
||||
with self.assertRaises(ValueError):
|
||||
self._runConstraintTest('reportType', 'junk', '1004')
|
||||
|
||||
def testGetDataWithInvalidConstraintValueThrowsException(self):
|
||||
with self.assertRaises(TypeError):
|
||||
self._runConstraintTest('reportType', '=', {})
|
||||
|
||||
def testGetDataWithEmptyInConstraintThrowsException(self):
|
||||
with self.assertRaises(ValueError):
|
||||
self._runConstraintTest('reportType', 'in', [])
|
||||
|
||||
def testGetDataWithNestedInConstraintThrowsException(self):
|
||||
collection = ('1004', '1005', ())
|
||||
with self.assertRaises(TypeError):
|
||||
self._runConstraintTest('reportType', 'in', collection)
|
74
awips/test/dafTests/testTopo.py
Normal file
74
awips/test/dafTests/testTopo.py
Normal file
|
@ -0,0 +1,74 @@
|
|||
#
|
||||
# Test DAF support for topo data
|
||||
#
|
||||
# SOFTWARE HISTORY
|
||||
#
|
||||
# Date Ticket# Engineer Description
|
||||
# ------------ ---------- ----------- --------------------------
|
||||
# 01/19/16 4795 mapeters Initial Creation.
|
||||
# 04/11/16 5548 tgurney Cleanup
|
||||
# 04/18/16 5548 tgurney More cleanup
|
||||
# 05/26/16 5587 tgurney Add test for
|
||||
# getIdentifierValues()
|
||||
# 06/01/16 5587 tgurney Update testGetIdentifierValues
|
||||
# 07/18/17 6253 randerso Removed referenced to GMTED
|
||||
#
|
||||
|
||||
from __future__ import print_function
|
||||
from awips.dataaccess import DataAccessLayer as DAL
|
||||
from awips.ThriftClient import ThriftRequestException
|
||||
import shapely.geometry
|
||||
|
||||
from awips.test.dafTests import baseDafTestCase
|
||||
|
||||
|
||||
class TopoTestCase(baseDafTestCase.DafTestCase):
|
||||
"""Test DAF support for topo data"""
|
||||
|
||||
datatype = "topo"
|
||||
|
||||
def testGetGridData(self):
|
||||
print("defaultTopo")
|
||||
req = DAL.newDataRequest(self.datatype)
|
||||
req.addIdentifier("group", "/")
|
||||
req.addIdentifier("dataset", "full")
|
||||
poly = shapely.geometry.LinearRing(((-70, 40), (-71, 40), (-71, 42), (-70, 42)))
|
||||
req.setEnvelope(poly)
|
||||
gridData = DAL.getGridData(req)
|
||||
self.assertIsNotNone(gridData)
|
||||
print("Number of grid records: " + str(len(gridData)))
|
||||
print("Sample grid data shape:\n" + str(gridData[0].getRawData().shape) + "\n")
|
||||
print("Sample grid data:\n" + str(gridData[0].getRawData()) + "\n")
|
||||
|
||||
for topoFile in ["gtopo30"]:
|
||||
print("\n" + topoFile)
|
||||
req.addIdentifier("topoFile", topoFile)
|
||||
gridData = DAL.getGridData(req)
|
||||
self.assertIsNotNone(gridData)
|
||||
print("Number of grid records: " + str(len(gridData)))
|
||||
print("Sample grid data shape:\n" + str(gridData[0].getRawData().shape) + "\n")
|
||||
print("Sample grid data:\n" + str(gridData[0].getRawData()) + "\n")
|
||||
|
||||
def testRequestingTooMuchDataThrowsResponseTooLargeException(self):
|
||||
req = DAL.newDataRequest(self.datatype)
|
||||
req.addIdentifier("group", "/")
|
||||
req.addIdentifier("dataset", "full")
|
||||
points = ((-180, 90), (180, 90), (180, -90), (-180, -90))
|
||||
poly = shapely.geometry.LinearRing(points)
|
||||
req.setEnvelope(poly)
|
||||
|
||||
with self.assertRaises(ThriftRequestException) as cm:
|
||||
DAL.getGridData(req)
|
||||
self.assertIn('ResponseTooLargeException', str(cm.exception))
|
||||
|
||||
def testGetIdentifierValues(self):
|
||||
req = DAL.newDataRequest(self.datatype)
|
||||
optionalIds = set(DAL.getOptionalIdentifiers(req))
|
||||
requiredIds = set(DAL.getRequiredIdentifiers(req))
|
||||
self.runGetIdValuesTest(optionalIds | requiredIds)
|
||||
|
||||
def testGetInvalidIdentifierValuesThrowsException(self):
|
||||
self.runInvalidIdValuesTest()
|
||||
|
||||
def testGetNonexistentIdentifierValuesThrowsException(self):
|
||||
self.runNonexistentIdValuesTest()
|
208
awips/test/dafTests/testWarning.py
Normal file
208
awips/test/dafTests/testWarning.py
Normal file
|
@ -0,0 +1,208 @@
|
|||
#
|
||||
# Test DAF support for warning data
|
||||
#
|
||||
# SOFTWARE HISTORY
|
||||
#
|
||||
# Date Ticket# Engineer Description
|
||||
# ------------ ---------- ----------- --------------------------
|
||||
# 01/19/16 4795 mapeters Initial Creation.
|
||||
# 04/11/16 5548 tgurney Cleanup
|
||||
# 04/18/16 5548 tgurney More cleanup
|
||||
# 04/26/16 5587 tgurney Add identifier values tests
|
||||
# 06/08/16 5574 tgurney Add advanced query tests
|
||||
# 06/10/16 5548 tgurney Clean up references to name
|
||||
# of data type
|
||||
# 06/13/16 5574 tgurney Fix checks for None
|
||||
# 06/21/16 5548 tgurney Skip tests that cause errors
|
||||
# 06/30/16 5725 tgurney Add test for NOT IN
|
||||
# 12/12/16 5981 tgurney Improve test performance
|
||||
#
|
||||
#
|
||||
|
||||
from __future__ import print_function
|
||||
from awips.dataaccess import DataAccessLayer as DAL
|
||||
from dynamicserialize.dstypes.com.raytheon.uf.common.dataquery.requests import RequestConstraint
|
||||
|
||||
from awips.test.dafTests import baseDafTestCase
|
||||
import unittest
|
||||
|
||||
|
||||
class WarningTestCase(baseDafTestCase.DafTestCase):
|
||||
"""Test DAF support for warning data"""
|
||||
|
||||
datatype = "warning"
|
||||
|
||||
def _getLocationNames(self):
|
||||
req = DAL.newDataRequest()
|
||||
req.setDatatype(self.datatype)
|
||||
return DAL.getAvailableLocationNames(req)
|
||||
|
||||
def _getAllRecords(self):
|
||||
req = DAL.newDataRequest()
|
||||
req.setDatatype(self.datatype)
|
||||
req.setParameters('id')
|
||||
return DAL.getGeometryData(req)
|
||||
|
||||
def testGetAvailableParameters(self):
|
||||
req = DAL.newDataRequest(self.datatype)
|
||||
self.runParametersTest(req)
|
||||
|
||||
def testGetAvailableLocations(self):
|
||||
req = DAL.newDataRequest(self.datatype)
|
||||
self.runLocationsTest(req)
|
||||
|
||||
def testGetAvailableTimes(self):
|
||||
req = DAL.newDataRequest(self.datatype)
|
||||
req.setParameters("etn", "wmoid")
|
||||
self.runTimesTest(req)
|
||||
|
||||
def testGetGeometryData(self):
|
||||
req = DAL.newDataRequest(self.datatype)
|
||||
req.setParameters("etn", "wmoid")
|
||||
self.runGeometryDataTest(req)
|
||||
|
||||
def testFilterOnLocationName(self):
|
||||
allLocationNames = self._getLocationNames()
|
||||
if len(allLocationNames) == 0:
|
||||
errmsg = "No {0} data exists on {1}. Try again with {0} data."
|
||||
raise unittest.SkipTest(errmsg.format(self.datatype, DAL.THRIFT_HOST))
|
||||
testCount = 3 # number of different location names to test
|
||||
for locationName in allLocationNames[:testCount]:
|
||||
req = DAL.newDataRequest()
|
||||
req.setDatatype(self.datatype)
|
||||
req.setParameters('id')
|
||||
req.setLocationNames(locationName)
|
||||
geomData = DAL.getGeometryData(req)
|
||||
for geom in geomData:
|
||||
self.assertEqual(geom.getLocationName(), locationName)
|
||||
|
||||
def testFilterOnNonexistentLocationReturnsEmpty(self):
|
||||
req = DAL.newDataRequest()
|
||||
req.setDatatype(self.datatype)
|
||||
req.setParameters('id')
|
||||
req.setLocationNames('ZZZZ')
|
||||
self.assertEqual(len(DAL.getGeometryData(req)), 0)
|
||||
|
||||
def testFilterOnInvalidLocationThrowsIncompatibleRequestException(self):
|
||||
req = DAL.newDataRequest()
|
||||
req.setDatatype(self.datatype)
|
||||
req.setParameters('id')
|
||||
req.setLocationNames(') and 0=1')
|
||||
with self.assertRaises(Exception) as cm:
|
||||
DAL.getGeometryData(req)
|
||||
self.assertIn('IncompatibleRequestException', str(cm.exception))
|
||||
|
||||
def testGetColumnIdentifierValues(self):
|
||||
self.runGetIdValuesTest(['act'])
|
||||
|
||||
@unittest.skip('avoid EDEX error')
|
||||
def testGetInvalidIdentifierValuesThrowsException(self):
|
||||
self.runInvalidIdValuesTest()
|
||||
|
||||
@unittest.skip('avoid EDEX error')
|
||||
def testGetNonexistentIdentifierValuesThrowsException(self):
|
||||
self.runNonexistentIdValuesTest()
|
||||
|
||||
def _runConstraintTest(self, key, operator, value):
|
||||
req = DAL.newDataRequest(self.datatype)
|
||||
constraint = RequestConstraint.new(operator, value)
|
||||
req.addIdentifier(key, constraint)
|
||||
req.setParameters("etn", "wmoid", "sig")
|
||||
return self.runGeometryDataTest(req)
|
||||
|
||||
def testGetDataWithEqualsString(self):
|
||||
geometryData = self._runConstraintTest('sig', '=', 'Y')
|
||||
for record in geometryData:
|
||||
self.assertEqual(record.getString('sig'), 'Y')
|
||||
|
||||
def testGetDataWithEqualsInt(self):
|
||||
geometryData = self._runConstraintTest('etn', '=', 1000)
|
||||
for record in geometryData:
|
||||
self.assertEqual(record.getString('etn'), '1000')
|
||||
|
||||
def testGetDataWithEqualsLong(self):
|
||||
geometryData = self._runConstraintTest('etn', '=', 1000)
|
||||
for record in geometryData:
|
||||
self.assertEqual(record.getString('etn'), '1000')
|
||||
|
||||
def testGetDataWithEqualsFloat(self):
|
||||
geometryData = self._runConstraintTest('etn', '=', 1.0)
|
||||
for record in geometryData:
|
||||
self.assertEqual(round(float(record.getString('etn')), 1), 1.0)
|
||||
|
||||
def testGetDataWithEqualsNone(self):
|
||||
geometryData = self._runConstraintTest('sig', '=', None)
|
||||
for record in geometryData:
|
||||
self.assertEqual(record.getType('sig'), 'NULL')
|
||||
|
||||
def testGetDataWithNotEquals(self):
|
||||
geometryData = self._runConstraintTest('sig', '!=', 'Y')
|
||||
for record in geometryData:
|
||||
self.assertNotEqual(record.getString('sig'), 'Y')
|
||||
|
||||
def testGetDataWithNotEqualsNone(self):
|
||||
geometryData = self._runConstraintTest('sig', '!=', None)
|
||||
for record in geometryData:
|
||||
self.assertNotEqual(record.getType('sig'), 'NULL')
|
||||
|
||||
def testGetDataWithGreaterThan(self):
|
||||
geometryData = self._runConstraintTest('sig', '>', 'Y')
|
||||
for record in geometryData:
|
||||
self.assertGreater(record.getString('sig'), 'Y')
|
||||
|
||||
def testGetDataWithLessThan(self):
|
||||
geometryData = self._runConstraintTest('sig', '<', 'Y')
|
||||
for record in geometryData:
|
||||
self.assertLess(record.getString('sig'), 'Y')
|
||||
|
||||
def testGetDataWithGreaterThanEquals(self):
|
||||
geometryData = self._runConstraintTest('sig', '>=', 'Y')
|
||||
for record in geometryData:
|
||||
self.assertGreaterEqual(record.getString('sig'), 'Y')
|
||||
|
||||
def testGetDataWithLessThanEquals(self):
|
||||
geometryData = self._runConstraintTest('sig', '<=', 'Y')
|
||||
for record in geometryData:
|
||||
self.assertLessEqual(record.getString('sig'), 'Y')
|
||||
|
||||
def testGetDataWithInTuple(self):
|
||||
collection = ('Y', 'A')
|
||||
geometryData = self._runConstraintTest('sig', 'in', collection)
|
||||
for record in geometryData:
|
||||
self.assertIn(record.getString('sig'), collection)
|
||||
|
||||
def testGetDataWithInList(self):
|
||||
collection = ['Y', 'A']
|
||||
geometryData = self._runConstraintTest('sig', 'in', collection)
|
||||
for record in geometryData:
|
||||
self.assertIn(record.getString('sig'), collection)
|
||||
|
||||
def testGetDataWithInGenerator(self):
|
||||
collection = ('Y', 'A')
|
||||
generator = (item for item in collection)
|
||||
geometryData = self._runConstraintTest('sig', 'in', generator)
|
||||
for record in geometryData:
|
||||
self.assertIn(record.getString('sig'), collection)
|
||||
|
||||
def testGetDataWithNotInList(self):
|
||||
collection = ['Y', 'W']
|
||||
geometryData = self._runConstraintTest('sig', 'not in', collection)
|
||||
for record in geometryData:
|
||||
self.assertNotIn(record.getString('sig'), collection)
|
||||
|
||||
def testGetDataWithInvalidConstraintTypeThrowsException(self):
|
||||
with self.assertRaises(ValueError):
|
||||
self._runConstraintTest('sig', 'junk', 'Y')
|
||||
|
||||
def testGetDataWithInvalidConstraintValueThrowsException(self):
|
||||
with self.assertRaises(TypeError):
|
||||
self._runConstraintTest('sig', '=', {})
|
||||
|
||||
def testGetDataWithEmptyInConstraintThrowsException(self):
|
||||
with self.assertRaises(ValueError):
|
||||
self._runConstraintTest('sig', 'in', [])
|
||||
|
||||
def testGetDataWithNestedInConstraintThrowsException(self):
|
||||
collection = ('Y', 'A', ())
|
||||
with self.assertRaises(TypeError):
|
||||
self._runConstraintTest('sig', 'in', collection)
|
45
dynamicserialize/DynamicSerializationManager.py
Normal file
45
dynamicserialize/DynamicSerializationManager.py
Normal file
|
@ -0,0 +1,45 @@
|
|||
#
|
||||
# A port of the Java DynamicSerializeManager. Should be used to read/write
|
||||
# DynamicSerialize binary data.
|
||||
#
|
||||
#
|
||||
# SOFTWARE HISTORY
|
||||
#
|
||||
# Date Ticket# Engineer Description
|
||||
# ------------ ---------- ----------- --------------------------
|
||||
# 06/09/10 njensen Initial Creation.
|
||||
#
|
||||
|
||||
from thrift.transport import TTransport
|
||||
from . import SelfDescribingBinaryProtocol, ThriftSerializationContext
|
||||
|
||||
|
||||
class DynamicSerializationManager:
|
||||
|
||||
def __init__(self):
|
||||
self.transport = None
|
||||
|
||||
def _deserialize(self, ctx):
|
||||
return ctx.deserializeMessage()
|
||||
|
||||
def deserializeBytes(self, sbytes):
|
||||
ctx = self._buildSerializationContext(sbytes)
|
||||
ctx.readMessageStart()
|
||||
obj = self._deserialize(ctx)
|
||||
ctx.readMessageEnd()
|
||||
return obj
|
||||
|
||||
def _buildSerializationContext(self, sbytes=None):
|
||||
self.transport = TTransport.TMemoryBuffer(sbytes)
|
||||
protocol = SelfDescribingBinaryProtocol.SelfDescribingBinaryProtocol(self.transport)
|
||||
return ThriftSerializationContext.ThriftSerializationContext(self, protocol)
|
||||
|
||||
def serializeObject(self, obj):
|
||||
ctx = self._buildSerializationContext()
|
||||
ctx.writeMessageStart("dynamicSerialize")
|
||||
self._serialize(ctx, obj)
|
||||
ctx.writeMessageEnd()
|
||||
return self.transport.getvalue()
|
||||
|
||||
def _serialize(self, ctx, obj):
|
||||
ctx.serializeMessage(obj)
|
114
dynamicserialize/SelfDescribingBinaryProtocol.py
Normal file
114
dynamicserialize/SelfDescribingBinaryProtocol.py
Normal file
|
@ -0,0 +1,114 @@
|
|||
#
|
||||
# Partially compatible AWIPS-II Thrift Binary Protocol
|
||||
#
|
||||
#
|
||||
# SOFTWARE HISTORY
|
||||
#
|
||||
# Date Ticket# Engineer Description
|
||||
# ------------ ---------- ----------- --------------------------
|
||||
# 11/11/09 chammack Initial Creation.
|
||||
# 06/09/10 njensen Added float, list methods
|
||||
# Apr 24, 2015 4425 nabowle Add F64List support.
|
||||
#
|
||||
#
|
||||
|
||||
import struct
|
||||
import numpy
|
||||
from thrift.protocol.TProtocol import *
|
||||
from thrift.protocol.TBinaryProtocol import *
|
||||
|
||||
FLOAT = 64
|
||||
|
||||
intList = numpy.dtype(numpy.int32).newbyteorder('>')
|
||||
floatList = numpy.dtype(numpy.float32).newbyteorder('>')
|
||||
longList = numpy.dtype(numpy.int64).newbyteorder('>')
|
||||
shortList = numpy.dtype(numpy.int16).newbyteorder('>')
|
||||
byteList = numpy.dtype(numpy.int8).newbyteorder('>')
|
||||
doubleList = numpy.dtype(numpy.float64).newbyteorder('>')
|
||||
|
||||
|
||||
class SelfDescribingBinaryProtocol(TBinaryProtocol):
|
||||
|
||||
def readFieldBegin(self):
|
||||
ftype = self.readByte()
|
||||
if ftype == TType.STOP:
|
||||
return None, ftype, 0
|
||||
name = self.readString()
|
||||
fid = self.readI16()
|
||||
return name, ftype, fid
|
||||
|
||||
def readStructBegin(self):
|
||||
return self.readString()
|
||||
|
||||
def writeStructBegin(self, name):
|
||||
self.writeString(name)
|
||||
|
||||
def writeFieldBegin(self, name, ftype, fid):
|
||||
self.writeByte(ftype)
|
||||
self.writeString(name)
|
||||
self.writeI16(fid)
|
||||
|
||||
def readFloat(self):
|
||||
d = self.readI32()
|
||||
dAsBytes = struct.pack('i', d)
|
||||
f = struct.unpack('f', dAsBytes)
|
||||
return f[0]
|
||||
|
||||
def writeFloat(self, f):
|
||||
dAsBytes = struct.pack('f', f)
|
||||
i = struct.unpack('i', dAsBytes)
|
||||
self.writeI32(i[0])
|
||||
|
||||
def readI32List(self, sz):
|
||||
buff = self.trans.readAll(4*sz)
|
||||
val = numpy.frombuffer(buff, dtype=intList, count=sz)
|
||||
return val
|
||||
|
||||
def readF32List(self, sz):
|
||||
buff = self.trans.readAll(4*sz)
|
||||
val = numpy.frombuffer(buff, dtype=floatList, count=sz)
|
||||
return val
|
||||
|
||||
def readF64List(self, sz):
|
||||
buff = self.trans.readAll(8*sz)
|
||||
val = numpy.frombuffer(buff, dtype=doubleList, count=sz)
|
||||
return val
|
||||
|
||||
def readI64List(self, sz):
|
||||
buff = self.trans.readAll(8*sz)
|
||||
val = numpy.frombuffer(buff, dtype=longList, count=sz)
|
||||
return val
|
||||
|
||||
def readI16List(self, sz):
|
||||
buff = self.trans.readAll(2*sz)
|
||||
val = numpy.frombuffer(buff, dtype=shortList, count=sz)
|
||||
return val
|
||||
|
||||
def readI8List(self, sz):
|
||||
buff = self.trans.readAll(sz)
|
||||
val = numpy.frombuffer(buff, dtype=byteList, count=sz)
|
||||
return val
|
||||
|
||||
def writeI32List(self, buff):
|
||||
b = numpy.asarray(buff, intList)
|
||||
self.trans.write(numpy.getbuffer(b))
|
||||
|
||||
def writeF32List(self, buff):
|
||||
b = numpy.asarray(buff, floatList)
|
||||
self.trans.write(numpy.getbuffer(b))
|
||||
|
||||
def writeF64List(self, buff):
|
||||
b = numpy.asarray(buff, doubleList)
|
||||
self.trans.write(numpy.getbuffer(b))
|
||||
|
||||
def writeI64List(self, buff):
|
||||
b = numpy.asarray(buff, longList)
|
||||
self.trans.write(numpy.getbuffer(b))
|
||||
|
||||
def writeI16List(self, buff):
|
||||
b = numpy.asarray(buff, shortList)
|
||||
self.trans.write(numpy.getbuffer(b))
|
||||
|
||||
def writeI8List(self, buff):
|
||||
b = numpy.asarray(buff, byteList)
|
||||
self.trans.write(numpy.getbuffer(b))
|
433
dynamicserialize/ThriftSerializationContext.py
Normal file
433
dynamicserialize/ThriftSerializationContext.py
Normal file
|
@ -0,0 +1,433 @@
|
|||
#
|
||||
# A port of the Java ThriftSerializationContext, used for reading/writing
|
||||
# DynamicSerialize objects to/from thrift.
|
||||
#
|
||||
# For serialization, it has no knowledge of the expected types in other
|
||||
# languages, it is instead all based on inspecting the types of the objects
|
||||
# passed to it. Therefore, ensure the types of python objects and primitives
|
||||
# match what they should be in the destination language.
|
||||
#
|
||||
#
|
||||
# SOFTWARE HISTORY
|
||||
#
|
||||
# Date Ticket# Engineer Description
|
||||
# ------------ ---------- ----------- --------------------------
|
||||
# 06/09/10 njensen Initial Creation.
|
||||
# 06/12/13 #2099 dgilling Implement readObject() and
|
||||
# writeObject().
|
||||
# Apr 24, 2015 4425 nabowle Add Double support
|
||||
# Oct 17, 2016 5919 njensen Optimized for speed
|
||||
# Sep 06, 2018 mjames@ucar Python3 compliance
|
||||
#
|
||||
#
|
||||
|
||||
import inspect
|
||||
import sys
|
||||
import types
|
||||
import six
|
||||
import numpy
|
||||
from thrift.Thrift import TType
|
||||
import dynamicserialize
|
||||
from dynamicserialize import dstypes, adapters
|
||||
from dynamicserialize import SelfDescribingBinaryProtocol
|
||||
|
||||
DS_LEN = len('dynamicserialize.dstypes.')
|
||||
|
||||
dsObjTypes = {}
|
||||
|
||||
|
||||
def buildObjMap(module):
|
||||
if '__all__' in module.__dict__:
|
||||
for i in module.__all__:
|
||||
name = module.__name__ + '.' + i
|
||||
__import__(name)
|
||||
buildObjMap(sys.modules[name])
|
||||
else:
|
||||
clzName = module.__name__[module.__name__.rfind('.') + 1:]
|
||||
clz = module.__dict__[clzName]
|
||||
tname = module.__name__
|
||||
tname = tname[DS_LEN:]
|
||||
dsObjTypes[tname] = clz
|
||||
|
||||
|
||||
buildObjMap(dstypes)
|
||||
|
||||
if six.PY2:
|
||||
pythonToThriftMap = {
|
||||
types.StringType: TType.STRING,
|
||||
types.IntType: TType.I32,
|
||||
types.LongType: TType.I64,
|
||||
types.ListType: TType.LIST,
|
||||
unicode: TType.STRING,
|
||||
types.DictionaryType: TType.MAP,
|
||||
type(set([])): TType.SET,
|
||||
types.FloatType: SelfDescribingBinaryProtocol.FLOAT,
|
||||
# types.FloatType: TType.DOUBLE,
|
||||
types.BooleanType: TType.BOOL,
|
||||
types.InstanceType: TType.STRUCT,
|
||||
types.NoneType: TType.VOID,
|
||||
numpy.float32: SelfDescribingBinaryProtocol.FLOAT,
|
||||
numpy.int32: TType.I32,
|
||||
numpy.ndarray: TType.LIST,
|
||||
numpy.object_: TType.STRING, # making an assumption here
|
||||
numpy.string_: TType.STRING,
|
||||
numpy.float64: TType.DOUBLE,
|
||||
numpy.int16: TType.I16,
|
||||
numpy.int8: TType.BYTE,
|
||||
numpy.int64: TType.I64
|
||||
}
|
||||
else:
|
||||
pythonToThriftMap = {
|
||||
bytes: TType.STRING,
|
||||
int: TType.I32,
|
||||
int: TType.I64,
|
||||
list: TType.LIST,
|
||||
dict: TType.MAP,
|
||||
type(set([])): TType.SET,
|
||||
float: SelfDescribingBinaryProtocol.FLOAT,
|
||||
# types.FloatType: TType.DOUBLE,
|
||||
bool: TType.BOOL,
|
||||
object: TType.STRUCT,
|
||||
str: TType.STRING,
|
||||
type(None): TType.VOID,
|
||||
numpy.float32: SelfDescribingBinaryProtocol.FLOAT,
|
||||
numpy.int32: TType.I32,
|
||||
numpy.ndarray: TType.LIST,
|
||||
numpy.object_: TType.STRING, # making an assumption here
|
||||
numpy.string_: TType.STRING,
|
||||
numpy.float64: TType.DOUBLE,
|
||||
numpy.int16: TType.I16,
|
||||
numpy.int8: TType.BYTE,
|
||||
numpy.int64: TType.I64
|
||||
}
|
||||
|
||||
primitiveSupport = (TType.BYTE, TType.I16, TType.I32, TType.I64,
|
||||
SelfDescribingBinaryProtocol.FLOAT, TType.DOUBLE)
|
||||
|
||||
|
||||
class ThriftSerializationContext(object):
|
||||
|
||||
def __init__(self, serializationManager, selfDescribingBinaryProtocol):
|
||||
self.serializationManager = serializationManager
|
||||
self.protocol = selfDescribingBinaryProtocol
|
||||
self.typeDeserializationMethod = {
|
||||
TType.STRING: self.protocol.readString,
|
||||
TType.I16: self.protocol.readI16,
|
||||
TType.I32: self.protocol.readI32,
|
||||
TType.LIST: self._deserializeArray,
|
||||
TType.MAP: self._deserializeMap,
|
||||
TType.SET: self._deserializeSet,
|
||||
SelfDescribingBinaryProtocol.FLOAT: self.protocol.readFloat,
|
||||
TType.BYTE: self.protocol.readByte,
|
||||
TType.I64: self.protocol.readI64,
|
||||
TType.DOUBLE: self.protocol.readDouble,
|
||||
TType.BOOL: self.protocol.readBool,
|
||||
TType.STRUCT: self.deserializeMessage,
|
||||
TType.VOID: lambda: None
|
||||
}
|
||||
self.typeSerializationMethod = {
|
||||
TType.STRING: self.protocol.writeString,
|
||||
TType.I16: self.protocol.writeI16,
|
||||
TType.I32: self.protocol.writeI32,
|
||||
TType.LIST: self._serializeArray,
|
||||
TType.MAP: self._serializeMap,
|
||||
TType.SET: self._serializeSet,
|
||||
SelfDescribingBinaryProtocol.FLOAT: self.protocol.writeFloat,
|
||||
TType.BYTE: self.protocol.writeByte,
|
||||
TType.I64: self.protocol.writeI64,
|
||||
TType.DOUBLE: self.protocol.writeDouble,
|
||||
TType.BOOL: self.protocol.writeBool,
|
||||
TType.STRUCT: self.serializeMessage,
|
||||
TType.VOID: lambda x: None
|
||||
}
|
||||
self.listDeserializationMethod = {
|
||||
TType.BYTE: self.protocol.readI8List,
|
||||
TType.I16: self.protocol.readI16List,
|
||||
TType.I32: self.protocol.readI32List,
|
||||
TType.I64: self.protocol.readI64List,
|
||||
SelfDescribingBinaryProtocol.FLOAT: self.protocol.readF32List,
|
||||
TType.DOUBLE: self.protocol.readF64List
|
||||
}
|
||||
self.listSerializationMethod = {
|
||||
TType.BYTE: self.protocol.writeI8List,
|
||||
TType.I16: self.protocol.writeI16List,
|
||||
TType.I32: self.protocol.writeI32List,
|
||||
TType.I64: self.protocol.writeI64List,
|
||||
SelfDescribingBinaryProtocol.FLOAT: self.protocol.writeF32List,
|
||||
TType.DOUBLE: self.protocol.writeF64List
|
||||
}
|
||||
|
||||
def readMessageStart(self):
|
||||
msg = self.protocol.readMessageBegin()
|
||||
return msg[0]
|
||||
|
||||
def readMessageEnd(self):
|
||||
self.protocol.readMessageEnd()
|
||||
|
||||
def deserializeMessage(self):
|
||||
name = self.protocol.readStructBegin()
|
||||
name = name.decode('cp437')
|
||||
name = name.replace('_', '.')
|
||||
if name.isdigit():
|
||||
obj = self._deserializeType(int(name))
|
||||
return obj
|
||||
if name in adapters.classAdapterRegistry:
|
||||
return adapters.classAdapterRegistry[name].deserialize(self)
|
||||
elif '$' in name:
|
||||
# it's an inner class, we're going to hope it's an enum, treat it
|
||||
# special
|
||||
fieldName, fieldType, fieldId = self.protocol.readFieldBegin()
|
||||
if fieldName.decode('utf8') != '__enumValue__':
|
||||
raise dynamicserialize.SerializationException(
|
||||
"Expected to find enum payload. Found: " + fieldName)
|
||||
obj = self.protocol.readString()
|
||||
self.protocol.readFieldEnd()
|
||||
return obj
|
||||
else:
|
||||
clz = dsObjTypes[name]
|
||||
obj = clz()
|
||||
|
||||
while self._deserializeField(obj):
|
||||
pass
|
||||
|
||||
self.protocol.readStructEnd()
|
||||
return obj
|
||||
|
||||
def _deserializeType(self, b):
|
||||
try:
|
||||
return self.typeDeserializationMethod[b]()
|
||||
except KeyError:
|
||||
raise dynamicserialize.SerializationException(
|
||||
"Unsupported type value " + str(b))
|
||||
|
||||
def _deserializeField(self, obj):
|
||||
fieldName, fieldType, fieldId = self.protocol.readFieldBegin()
|
||||
if fieldType == TType.STOP:
|
||||
return False
|
||||
elif fieldType != TType.VOID:
|
||||
result = self._deserializeType(fieldType)
|
||||
fn_str = bytes.decode(fieldName)
|
||||
lookingFor = "set" + fn_str[0].upper() + fn_str[1:]
|
||||
|
||||
try:
|
||||
setMethod = getattr(obj, lookingFor)
|
||||
setMethod(result)
|
||||
except ValueError:
|
||||
raise dynamicserialize.SerializationException(
|
||||
"Couldn't find setter method " + lookingFor)
|
||||
|
||||
self.protocol.readFieldEnd()
|
||||
return True
|
||||
|
||||
def _deserializeArray(self):
|
||||
listType, size = self.protocol.readListBegin()
|
||||
result = []
|
||||
if size:
|
||||
if listType not in primitiveSupport:
|
||||
m = self.typeDeserializationMethod[listType]
|
||||
result = [m() for __ in range(size)]
|
||||
else:
|
||||
result = self.listDeserializationMethod[listType](size)
|
||||
self.protocol.readListEnd()
|
||||
return result
|
||||
|
||||
def _deserializeMap(self):
|
||||
keyType, valueType, size = self.protocol.readMapBegin()
|
||||
result = {}
|
||||
for __ in range(size):
|
||||
# can't go off the type, due to java generics limitations dynamic serialize is
|
||||
# serializing keys and values as void
|
||||
key = self.typeDeserializationMethod[TType.STRUCT]()
|
||||
value = self.typeDeserializationMethod[TType.STRUCT]()
|
||||
result[key] = value
|
||||
self.protocol.readMapEnd()
|
||||
return result
|
||||
|
||||
def _deserializeSet(self):
|
||||
setType, setSize = self.protocol.readSetBegin()
|
||||
result = set([])
|
||||
for __ in range(setSize):
|
||||
result.add(self.typeDeserializationMethod[TType.STRUCT]())
|
||||
self.protocol.readSetEnd()
|
||||
return result
|
||||
|
||||
def _lookupType(self, obj):
|
||||
pyt = type(obj)
|
||||
if pyt in pythonToThriftMap:
|
||||
return pythonToThriftMap[pyt]
|
||||
elif pyt.__module__[:DS_LEN - 1] == ('dynamicserialize.dstypes'):
|
||||
if six.PY2:
|
||||
return pythonToThriftMap[types.InstanceType]
|
||||
return pythonToThriftMap[object]
|
||||
raise dynamicserialize.SerializationException(
|
||||
"Don't know how to serialize object of type: " + str(pyt))
|
||||
|
||||
def serializeMessage(self, obj):
|
||||
tt = self._lookupType(obj)
|
||||
|
||||
if tt == TType.STRUCT:
|
||||
fqn = obj.__module__[DS_LEN:]
|
||||
if fqn in adapters.classAdapterRegistry:
|
||||
# get proper class name when writing class name to serialization stream
|
||||
# in case we have a special inner-class case
|
||||
m = sys.modules[adapters.classAdapterRegistry[fqn].__name__]
|
||||
if isinstance(m.ClassAdapter, list):
|
||||
fqn = m.ClassAdapter[0]
|
||||
self.protocol.writeStructBegin(fqn)
|
||||
adapters.classAdapterRegistry[fqn].serialize(self, obj)
|
||||
return
|
||||
else:
|
||||
self.protocol.writeStructBegin(fqn)
|
||||
methods = inspect.getmembers(obj, inspect.ismethod)
|
||||
fid = 1
|
||||
for m in methods:
|
||||
methodName = m[0]
|
||||
if methodName.startswith('get'):
|
||||
fieldname = methodName[3].lower() + methodName[4:]
|
||||
val = m[1]()
|
||||
ft = self._lookupType(val)
|
||||
if ft == TType.STRUCT:
|
||||
self._serializeField(fieldname, ft, fid, val)
|
||||
else:
|
||||
self._serializeField(fieldname, ft, fid, val)
|
||||
fid += 1
|
||||
self.protocol.writeFieldStop()
|
||||
|
||||
self.protocol.writeStructEnd()
|
||||
else:
|
||||
# basic types
|
||||
self.protocol.writeStructBegin(str(tt))
|
||||
self._serializeType(obj, tt)
|
||||
self.protocol.writeStructEnd()
|
||||
|
||||
def _serializeField(self, fieldName, fieldType, fieldId, fieldValue):
|
||||
self.protocol.writeFieldBegin(fieldName, fieldType, fieldId)
|
||||
self._serializeType(fieldValue, fieldType)
|
||||
self.protocol.writeFieldEnd()
|
||||
|
||||
def _serializeType(self, fieldValue, fieldType):
|
||||
if fieldType in self.typeSerializationMethod:
|
||||
return self.typeSerializationMethod[fieldType](fieldValue)
|
||||
else:
|
||||
raise dynamicserialize.SerializationException(
|
||||
"Unsupported type value " + str(fieldType))
|
||||
|
||||
def _serializeArray(self, obj):
|
||||
size = len(obj)
|
||||
if size:
|
||||
if isinstance(obj, numpy.ndarray):
|
||||
t = pythonToThriftMap[obj.dtype.type]
|
||||
size = obj.size
|
||||
else:
|
||||
t = self._lookupType(obj[0])
|
||||
else:
|
||||
t = TType.STRUCT
|
||||
self.protocol.writeListBegin(t, size)
|
||||
if t == TType.STRING:
|
||||
if isinstance(obj, numpy.ndarray):
|
||||
if len(obj.shape) == 1:
|
||||
for x in obj:
|
||||
s = str(x).strip()
|
||||
self.typeSerializationMethod[t](s)
|
||||
else:
|
||||
for x in obj:
|
||||
for y in x:
|
||||
s = str(y).strip()
|
||||
self.typeSerializationMethod[t](s)
|
||||
else:
|
||||
for x in obj:
|
||||
s = str(x)
|
||||
self.typeSerializationMethod[t](s)
|
||||
elif t not in primitiveSupport:
|
||||
for x in obj:
|
||||
self.typeSerializationMethod[t](x)
|
||||
else:
|
||||
self.listSerializationMethod[t](obj)
|
||||
self.protocol.writeListEnd()
|
||||
|
||||
def _serializeMap(self, obj):
|
||||
size = len(obj)
|
||||
self.protocol.writeMapBegin(TType.VOID, TType.VOID, size)
|
||||
for k in list(obj.keys()):
|
||||
self.typeSerializationMethod[TType.STRUCT](k)
|
||||
self.typeSerializationMethod[TType.STRUCT](obj[k])
|
||||
self.protocol.writeMapEnd()
|
||||
|
||||
def _serializeSet(self, obj):
|
||||
size = len(obj)
|
||||
self.protocol.writeSetBegin(TType.VOID, size)
|
||||
for x in obj:
|
||||
self.typeSerializationMethod[TType.STRUCT](x)
|
||||
self.protocol.writeSetEnd()
|
||||
|
||||
def writeMessageStart(self, name):
|
||||
self.protocol.writeMessageBegin(name, TType.VOID, 0)
|
||||
|
||||
def writeMessageEnd(self):
|
||||
self.protocol.writeMessageEnd()
|
||||
|
||||
def readBool(self):
|
||||
return self.protocol.readBool()
|
||||
|
||||
def writeBool(self, b):
|
||||
self.protocol.writeBool(b)
|
||||
|
||||
def readByte(self):
|
||||
return self.protocol.readByte()
|
||||
|
||||
def writeByte(self, b):
|
||||
self.protocol.writeByte(b)
|
||||
|
||||
def readDouble(self):
|
||||
return self.protocol.readDouble()
|
||||
|
||||
def writeDouble(self, d):
|
||||
self.protocol.writeDouble(d)
|
||||
|
||||
def readFloat(self):
|
||||
return self.protocol.readFloat()
|
||||
|
||||
def writeFloat(self, f):
|
||||
self.protocol.writeFloat(f)
|
||||
|
||||
def readI16(self):
|
||||
return self.protocol.readI16()
|
||||
|
||||
def writeI16(self, i):
|
||||
self.protocol.writeI16(i)
|
||||
|
||||
def readI32(self):
|
||||
return self.protocol.readI32()
|
||||
|
||||
def writeI32(self, i):
|
||||
self.protocol.writeI32(i)
|
||||
|
||||
def readI64(self):
|
||||
return self.protocol.readI64()
|
||||
|
||||
def writeI64(self, i):
|
||||
self.protocol.writeI64(i)
|
||||
|
||||
def readString(self):
|
||||
return self.protocol.readString()
|
||||
|
||||
def writeString(self, s):
|
||||
self.protocol.writeString(s)
|
||||
|
||||
def readBinary(self):
|
||||
numBytes = self.protocol.readI32()
|
||||
return self.protocol.readI8List(numBytes)
|
||||
|
||||
def readFloatArray(self):
|
||||
size = self.protocol.readI32()
|
||||
return self.protocol.readF32List(size)
|
||||
|
||||
def writeFloatArray(self, floats):
|
||||
self.protocol.writeI32(len(floats))
|
||||
self.protocol.writeF32List(floats)
|
||||
|
||||
def readObject(self):
|
||||
return self.deserializeMessage()
|
||||
|
||||
def writeObject(self, obj):
|
||||
self.serializeMessage(obj)
|
35
dynamicserialize/__init__.py
Normal file
35
dynamicserialize/__init__.py
Normal file
|
@ -0,0 +1,35 @@
|
|||
#
|
||||
# SOFTWARE HISTORY
|
||||
#
|
||||
# Date Ticket# Engineer Description
|
||||
# ------------ ---------- ----------- --------------------------
|
||||
# 08/20/10 njensen Initial Creation.
|
||||
#
|
||||
#
|
||||
|
||||
__all__ = ['SerializationException']
|
||||
|
||||
from . import dstypes, adapters
|
||||
from . import DynamicSerializationManager
|
||||
|
||||
|
||||
class SerializationException(Exception):
|
||||
|
||||
def __init__(self, message=None):
|
||||
self.message = message
|
||||
|
||||
def __str__(self):
|
||||
if self.message:
|
||||
return self.message
|
||||
else:
|
||||
return ""
|
||||
|
||||
|
||||
def serialize(obj):
|
||||
dsm = DynamicSerializationManager.DynamicSerializationManager()
|
||||
return dsm.serializeObject(obj)
|
||||
|
||||
|
||||
def deserialize(objbytes):
|
||||
dsm = DynamicSerializationManager.DynamicSerializationManager()
|
||||
return dsm.deserializeBytes(objbytes)
|
21
dynamicserialize/adapters/ByteBufferAdapter.py
Normal file
21
dynamicserialize/adapters/ByteBufferAdapter.py
Normal file
|
@ -0,0 +1,21 @@
|
|||
#
|
||||
# Adapter for java.nio.ByteBuffer
|
||||
#
|
||||
#
|
||||
# SOFTWARE HISTORY
|
||||
#
|
||||
# Date Ticket# Engineer Description
|
||||
# ------------ ---------- ----------- --------------------------
|
||||
# 08/03/11 dgilling Initial Creation.
|
||||
#
|
||||
|
||||
ClassAdapter = ['java.nio.ByteBuffer', 'java.nio.HeapByteBuffer']
|
||||
|
||||
|
||||
def serialize(context, bufferset):
|
||||
raise NotImplementedError("Serialization of ByteBuffers is not supported.")
|
||||
|
||||
|
||||
def deserialize(context):
|
||||
byteBuf = context.readBinary()
|
||||
return byteBuf
|
25
dynamicserialize/adapters/CalendarAdapter.py
Normal file
25
dynamicserialize/adapters/CalendarAdapter.py
Normal file
|
@ -0,0 +1,25 @@
|
|||
#
|
||||
# Adapter for java.util.Calendar
|
||||
#
|
||||
#
|
||||
# SOFTWARE HISTORY
|
||||
#
|
||||
# Date Ticket# Engineer Description
|
||||
# ------------ ---------- ----------- --------------------------
|
||||
# 09/29/10 wldougher Initial Creation.
|
||||
#
|
||||
|
||||
from dynamicserialize.dstypes.java.util import Calendar
|
||||
|
||||
ClassAdapter = 'java.util.Calendar'
|
||||
|
||||
|
||||
def serialize(context, calendar):
|
||||
calTiM = calendar.getTimeInMillis()
|
||||
context.writeI64(calTiM)
|
||||
|
||||
|
||||
def deserialize(context):
|
||||
result = Calendar()
|
||||
result.setTimeInMillis(context.readI64())
|
||||
return result
|
25
dynamicserialize/adapters/CommutativeTimestampAdapter.py
Normal file
25
dynamicserialize/adapters/CommutativeTimestampAdapter.py
Normal file
|
@ -0,0 +1,25 @@
|
|||
#
|
||||
# Adapter for CommutativeTimestamp
|
||||
#
|
||||
#
|
||||
# SOFTWARE HISTORY
|
||||
#
|
||||
# Date Ticket# Engineer Description
|
||||
# ------------ ---------- ----------- --------------------------
|
||||
# 9/21/2015 4486 rjpeter Initial creation.
|
||||
# Jun 23, 2016 5696 rjpeter Handle CommutativeTimestamp.
|
||||
#
|
||||
|
||||
from dynamicserialize.dstypes.com.raytheon.uf.common.time import CommutativeTimestamp
|
||||
|
||||
ClassAdapter = 'com.raytheon.uf.common.time.CommutativeTimestamp'
|
||||
|
||||
|
||||
def serialize(context, date):
|
||||
context.writeI64(date.getTime())
|
||||
|
||||
|
||||
def deserialize(context):
|
||||
result = CommutativeTimestamp()
|
||||
result.setTime(context.readI64())
|
||||
return result
|
28
dynamicserialize/adapters/CoordAdapter.py
Normal file
28
dynamicserialize/adapters/CoordAdapter.py
Normal file
|
@ -0,0 +1,28 @@
|
|||
#
|
||||
# Adapter for com.vividsolutions.jts.geom.Coordinate
|
||||
#
|
||||
#
|
||||
# SOFTWARE HISTORY
|
||||
#
|
||||
# Date Ticket# Engineer Description
|
||||
# ------------ ---------- ----------- --------------------------
|
||||
# 01/20/11 dgilling Initial Creation.
|
||||
#
|
||||
|
||||
from dynamicserialize.dstypes.com.vividsolutions.jts.geom import Coordinate
|
||||
|
||||
ClassAdapter = 'com.vividsolutions.jts.geom.Coordinate'
|
||||
|
||||
|
||||
def serialize(context, coordinate):
|
||||
context.writeDouble(coordinate.getX())
|
||||
context.writeDouble(coordinate.getY())
|
||||
|
||||
|
||||
def deserialize(context):
|
||||
x = context.readDouble()
|
||||
y = context.readDouble()
|
||||
coord = Coordinate()
|
||||
coord.setX(x)
|
||||
coord.setY(y)
|
||||
return coord
|
23
dynamicserialize/adapters/DatabaseIDAdapter.py
Normal file
23
dynamicserialize/adapters/DatabaseIDAdapter.py
Normal file
|
@ -0,0 +1,23 @@
|
|||
#
|
||||
# Adapter for com.raytheon.uf.common.dataplugin.gfe.db.objects.DatabaseID
|
||||
#
|
||||
#
|
||||
# SOFTWARE HISTORY
|
||||
#
|
||||
# Date Ticket# Engineer Description
|
||||
# ------------ ---------- ----------- --------------------------
|
||||
# 03/29/11 dgilling Initial Creation.
|
||||
#
|
||||
|
||||
from dynamicserialize.dstypes.com.raytheon.uf.common.dataplugin.gfe.db.objects import DatabaseID
|
||||
|
||||
ClassAdapter = 'com.raytheon.uf.common.dataplugin.gfe.db.objects.DatabaseID'
|
||||
|
||||
|
||||
def serialize(context, dbId):
|
||||
context.writeString(str(dbId))
|
||||
|
||||
|
||||
def deserialize(context):
|
||||
result = DatabaseID(context.readString())
|
||||
return result
|
24
dynamicserialize/adapters/DateAdapter.py
Normal file
24
dynamicserialize/adapters/DateAdapter.py
Normal file
|
@ -0,0 +1,24 @@
|
|||
#
|
||||
# Adapter for java.util.Date
|
||||
#
|
||||
#
|
||||
# SOFTWARE HISTORY
|
||||
#
|
||||
# Date Ticket# Engineer Description
|
||||
# ------------ ---------- ----------- --------------------------
|
||||
# 12/06/10 dgilling Initial Creation.
|
||||
#
|
||||
|
||||
from dynamicserialize.dstypes.java.util import Date
|
||||
|
||||
ClassAdapter = 'java.util.Date'
|
||||
|
||||
|
||||
def serialize(context, date):
|
||||
context.writeI64(date.getTime())
|
||||
|
||||
|
||||
def deserialize(context):
|
||||
result = Date()
|
||||
result.setTime(context.readI64())
|
||||
return result
|
32
dynamicserialize/adapters/EnumSetAdapter.py
Normal file
32
dynamicserialize/adapters/EnumSetAdapter.py
Normal file
|
@ -0,0 +1,32 @@
|
|||
#
|
||||
# Adapter for java.util.EnumSet
|
||||
#
|
||||
#
|
||||
# SOFTWARE HISTORY
|
||||
#
|
||||
# Date Ticket# Engineer Description
|
||||
# ------------ ---------- ----------- --------------------------
|
||||
# 07/28/11 dgilling Initial Creation.
|
||||
# 12/02/13 2537 bsteffen Serialize empty enum sets.
|
||||
#
|
||||
|
||||
from dynamicserialize.dstypes.java.util import EnumSet
|
||||
|
||||
ClassAdapter = ['java.util.EnumSet', 'java.util.RegularEnumSet']
|
||||
|
||||
|
||||
def serialize(context, bufferset):
|
||||
setSize = len(bufferset)
|
||||
context.writeI32(setSize)
|
||||
context.writeString(bufferset.getEnumClass())
|
||||
for val in bufferset:
|
||||
context.writeString(val)
|
||||
|
||||
|
||||
def deserialize(context):
|
||||
setSize = context.readI32()
|
||||
enumClassName = context.readString()
|
||||
valList = []
|
||||
for __ in range(setSize):
|
||||
valList.append(context.readString())
|
||||
return EnumSet(enumClassName, valList)
|
21
dynamicserialize/adapters/FloatBufferAdapter.py
Normal file
21
dynamicserialize/adapters/FloatBufferAdapter.py
Normal file
|
@ -0,0 +1,21 @@
|
|||
#
|
||||
# Adapter for java.nio.FloatBuffer
|
||||
#
|
||||
#
|
||||
# SOFTWARE HISTORY
|
||||
#
|
||||
# Date Ticket# Engineer Description
|
||||
# ------------ ---------- ----------- --------------------------
|
||||
# 08/01/11 dgilling Initial Creation.
|
||||
#
|
||||
|
||||
ClassAdapter = ['java.nio.FloatBuffer', 'java.nio.HeapFloatBuffer']
|
||||
|
||||
|
||||
def serialize(context, bufferset):
|
||||
raise NotImplementedError("Serialization of FloatBuffers is not supported.")
|
||||
|
||||
|
||||
def deserialize(context):
|
||||
floatBuf = context.readFloatArray()
|
||||
return floatBuf
|
24
dynamicserialize/adapters/FormattedDateAdapter.py
Normal file
24
dynamicserialize/adapters/FormattedDateAdapter.py
Normal file
|
@ -0,0 +1,24 @@
|
|||
#
|
||||
# Adapter for FormattedDate
|
||||
#
|
||||
#
|
||||
# SOFTWARE HISTORY
|
||||
#
|
||||
# Date Ticket# Engineer Description
|
||||
# ------------ ---------- ----------- --------------------------
|
||||
# 9/21/2015 4486 rjpeter Initial creation.
|
||||
#
|
||||
|
||||
from dynamicserialize.dstypes.com.raytheon.uf.common.time import FormattedDate
|
||||
|
||||
ClassAdapter = 'com.raytheon.uf.common.time.FormattedDate'
|
||||
|
||||
|
||||
def serialize(context, date):
|
||||
context.writeI64(date.getTime())
|
||||
|
||||
|
||||
def deserialize(context):
|
||||
result = FormattedDate()
|
||||
result.setTime(context.readI64())
|
||||
return result
|
94
dynamicserialize/adapters/GeomDataRespAdapter.py
Normal file
94
dynamicserialize/adapters/GeomDataRespAdapter.py
Normal file
|
@ -0,0 +1,94 @@
|
|||
#
|
||||
# Efficient adapter for GetGeometryDataResponse
|
||||
#
|
||||
#
|
||||
# SOFTWARE HISTORY
|
||||
#
|
||||
# Date Ticket# Engineer Description
|
||||
# ------------ ---------- ----------- --------------------------
|
||||
# Oct 17, 2016 5919 njensen Initial creation
|
||||
#
|
||||
#
|
||||
#
|
||||
|
||||
from dynamicserialize.dstypes.com.raytheon.uf.common.dataaccess.response import GeometryResponseData
|
||||
from dynamicserialize.dstypes.com.raytheon.uf.common.dataaccess.response import GetGeometryDataResponse
|
||||
|
||||
ClassAdapter = 'com.raytheon.uf.common.dataaccess.response.GetGeometryDataResponse'
|
||||
|
||||
|
||||
def serialize(context, resp):
|
||||
wkbs = resp.getGeometryWKBs()
|
||||
# write list size
|
||||
context.writeI32(len(wkbs))
|
||||
# write byte arrays
|
||||
for b in wkbs:
|
||||
context.writeBinary(b)
|
||||
|
||||
geoData = resp.getGeoData()
|
||||
# write list size
|
||||
context.writeI32(len(geoData))
|
||||
# write objects
|
||||
for geo in geoData:
|
||||
context.writeI32(geo.getGeometryWKBindex())
|
||||
context.writeObject(geo.getTime())
|
||||
context.writeObject(geo.getLevel())
|
||||
context.writeObject(geo.getLocationName())
|
||||
context.writeObject(geo.getAttributes())
|
||||
|
||||
# write data map
|
||||
params = geo.getDataMap()
|
||||
context.writeI32(len(params))
|
||||
for p in params:
|
||||
context.writeString(p)
|
||||
value = params[p]
|
||||
# actual value
|
||||
context.writeObject(value[0])
|
||||
# value type as string
|
||||
context.writeString(str(value[1]))
|
||||
# unit
|
||||
context.writeObject(value[2])
|
||||
|
||||
|
||||
def deserialize(context):
|
||||
size = context.readI32()
|
||||
wkbs = []
|
||||
for __ in range(size):
|
||||
wkb = context.readBinary()
|
||||
wkbs.append(wkb)
|
||||
|
||||
geoData = []
|
||||
size = context.readI32()
|
||||
for _ in range(size):
|
||||
data = GeometryResponseData()
|
||||
# wkb index
|
||||
wkbIndex = context.readI32()
|
||||
data.setGeometryWKBindex(wkbIndex)
|
||||
|
||||
time = context.readObject()
|
||||
data.setTime(time)
|
||||
level = context.readObject()
|
||||
data.setLevel(level)
|
||||
locName = context.readObject()
|
||||
data.setLocationName(locName)
|
||||
attrs = context.readObject()
|
||||
data.setAttributes(attrs)
|
||||
|
||||
# parameters
|
||||
paramSize = context.readI32()
|
||||
paramMap = {}
|
||||
for __ in range(paramSize):
|
||||
paramName = context.readString()
|
||||
value = context.readObject()
|
||||
tName = context.readString()
|
||||
unit = context.readObject()
|
||||
paramMap[paramName] = [value, tName, unit]
|
||||
data.setDataMap(paramMap)
|
||||
geoData.append(data)
|
||||
|
||||
# make the response object
|
||||
resp = GetGeometryDataResponse()
|
||||
resp.setGeometryWKBs(wkbs)
|
||||
resp.setGeoData(geoData)
|
||||
|
||||
return resp
|
36
dynamicserialize/adapters/GeometryTypeAdapter.py
Normal file
36
dynamicserialize/adapters/GeometryTypeAdapter.py
Normal file
|
@ -0,0 +1,36 @@
|
|||
#
|
||||
# Adapter for com.vividsolutions.jts.geom.Polygon
|
||||
#
|
||||
#
|
||||
# SOFTWARE HISTORY
|
||||
#
|
||||
# Date Ticket# Engineer Description
|
||||
# ------------ ---------- ----------- --------------------------
|
||||
# 01/20/11 dgilling Initial Creation.
|
||||
#
|
||||
|
||||
import dynamicserialize
|
||||
|
||||
# TODO: Implement serialization/make deserialization useful.
|
||||
# Deserialization was simply implemented to allow GridLocation objects to be
|
||||
# passed through thrift, but the resulting Geometry object will not be transformed into
|
||||
# useful data; the base byte array is passed to a worthless Geometry class.
|
||||
|
||||
from dynamicserialize.dstypes.com.vividsolutions.jts.geom import Geometry
|
||||
|
||||
# NOTE: At the moment, EDEX serializes Polygon, MultiPolygons, Points, and
|
||||
# Geometrys with the tag of the base class Geometry. Java's serialization
|
||||
# adapter is smarter and can determine the exact object by reading the binary
|
||||
# data. This adapter doesn't need this _yet_, so it has not been implemented.
|
||||
ClassAdapter = 'com.vividsolutions.jts.geom.Geometry'
|
||||
|
||||
|
||||
def serialize(context, coordinate):
|
||||
raise dynamicserialize.SerializationException('Not implemented yet')
|
||||
|
||||
|
||||
def deserialize(context):
|
||||
data = context.readBinary()
|
||||
geom = Geometry()
|
||||
geom.setBinaryData(data)
|
||||
return geom
|
25
dynamicserialize/adapters/GregorianCalendarAdapter.py
Normal file
25
dynamicserialize/adapters/GregorianCalendarAdapter.py
Normal file
|
@ -0,0 +1,25 @@
|
|||
#
|
||||
# Adapter for java.util.Calendar
|
||||
#
|
||||
#
|
||||
# SOFTWARE HISTORY
|
||||
#
|
||||
# Date Ticket# Engineer Description
|
||||
# ------------ ---------- ----------- --------------------------
|
||||
# 09/29/10 wldougher Initial Creation.
|
||||
#
|
||||
|
||||
from dynamicserialize.dstypes.java.util import GregorianCalendar
|
||||
|
||||
ClassAdapter = 'java.util.GregorianCalendar'
|
||||
|
||||
|
||||
def serialize(context, calendar):
|
||||
calTiM = calendar.getTimeInMillis()
|
||||
context.writeI64(calTiM)
|
||||
|
||||
|
||||
def deserialize(context):
|
||||
result = GregorianCalendar()
|
||||
result.setTimeInMillis(context.readI64())
|
||||
return result
|
26
dynamicserialize/adapters/GridDataHistoryAdapter.py
Normal file
26
dynamicserialize/adapters/GridDataHistoryAdapter.py
Normal file
|
@ -0,0 +1,26 @@
|
|||
#
|
||||
# Adapter for com.raytheon.uf.common.dataplugin.gfe.GridDataHistory
|
||||
#
|
||||
# TODO: REWRITE THIS ADAPTER when serialization/deserialization of this
|
||||
# class has been finalized.
|
||||
#
|
||||
#
|
||||
# SOFTWARE HISTORY
|
||||
#
|
||||
# Date Ticket# Engineer Description
|
||||
# ------------ ---------- ----------- --------------------------
|
||||
# 03/29/11 dgilling Initial Creation.
|
||||
#
|
||||
|
||||
from dynamicserialize.dstypes.com.raytheon.uf.common.dataplugin.gfe import GridDataHistory
|
||||
|
||||
ClassAdapter = 'com.raytheon.uf.common.dataplugin.gfe.GridDataHistory'
|
||||
|
||||
|
||||
def serialize(context, history):
|
||||
context.writeString(history.getCodedString())
|
||||
|
||||
|
||||
def deserialize(context):
|
||||
result = GridDataHistory(context.readString())
|
||||
return result
|
30
dynamicserialize/adapters/JTSEnvelopeAdapter.py
Normal file
30
dynamicserialize/adapters/JTSEnvelopeAdapter.py
Normal file
|
@ -0,0 +1,30 @@
|
|||
#
|
||||
# Adapter for com.vividsolutions.jts.geom.Envelope
|
||||
#
|
||||
#
|
||||
# SOFTWARE HISTORY
|
||||
#
|
||||
# Date Ticket# Engineer Description
|
||||
# ------------ ---------- ----------- --------------------------
|
||||
# 05/29/13 2023 dgilling Initial Creation.
|
||||
#
|
||||
|
||||
from dynamicserialize.dstypes.com.vividsolutions.jts.geom import Envelope
|
||||
|
||||
ClassAdapter = 'com.vividsolutions.jts.geom.Envelope'
|
||||
|
||||
|
||||
def serialize(context, envelope):
|
||||
context.writeDouble(envelope.getMinX())
|
||||
context.writeDouble(envelope.getMaxX())
|
||||
context.writeDouble(envelope.getMinY())
|
||||
context.writeDouble(envelope.getMaxY())
|
||||
|
||||
|
||||
def deserialize(context):
|
||||
env = Envelope()
|
||||
env.setMinX(context.readDouble())
|
||||
env.setMaxX(context.readDouble())
|
||||
env.setMinY(context.readDouble())
|
||||
env.setMaxY(context.readDouble())
|
||||
return env
|
|
@ -0,0 +1,31 @@
|
|||
#
|
||||
# Adapter for com.raytheon.uf.common.localization.LocalizationContext$LocalizationLevel
|
||||
#
|
||||
#
|
||||
# SOFTWARE HISTORY
|
||||
#
|
||||
# Date Ticket# Engineer Description
|
||||
# ------------ ---------- ----------- --------------------------
|
||||
# 01/11/11 dgilling Initial Creation.
|
||||
#
|
||||
|
||||
from dynamicserialize.dstypes.com.raytheon.uf.common.localization import LocalizationLevel
|
||||
|
||||
ClassAdapter = [
|
||||
'com.raytheon.uf.common.localization.LocalizationContext$LocalizationLevel',
|
||||
'com.raytheon.uf.common.localization.LocalizationLevel'
|
||||
]
|
||||
|
||||
|
||||
def serialize(context, level):
|
||||
context.writeString(level.getText())
|
||||
context.writeI32(level.getOrder())
|
||||
context.writeBool(level.isSystemLevel())
|
||||
|
||||
|
||||
def deserialize(context):
|
||||
text = context.readString()
|
||||
order = context.readI32()
|
||||
systemLevel = context.readBool()
|
||||
level = LocalizationLevel(text, order, systemLevel=systemLevel)
|
||||
return level
|
|
@ -0,0 +1,26 @@
|
|||
#
|
||||
# Adapter for com.raytheon.uf.common.localization.LocalizationContext$LocalizationType
|
||||
#
|
||||
#
|
||||
# SOFTWARE HISTORY
|
||||
#
|
||||
# Date Ticket# Engineer Description
|
||||
# ------------ ---------- ----------- --------------------------
|
||||
# 01/11/11 dgilling Initial Creation.
|
||||
#
|
||||
|
||||
from dynamicserialize.dstypes.com.raytheon.uf.common.localization import LocalizationType
|
||||
|
||||
ClassAdapter = [
|
||||
'com.raytheon.uf.common.localization.LocalizationContext$LocalizationType',
|
||||
'com.raytheon.uf.common.localization.LocalizationType'
|
||||
]
|
||||
|
||||
|
||||
def serialize(context, ltype):
|
||||
context.writeString(ltype.getText())
|
||||
|
||||
|
||||
def deserialize(context):
|
||||
typeString = context.readString()
|
||||
return LocalizationType(typeString)
|
23
dynamicserialize/adapters/ParmIDAdapter.py
Normal file
23
dynamicserialize/adapters/ParmIDAdapter.py
Normal file
|
@ -0,0 +1,23 @@
|
|||
#
|
||||
# Adapter for com.raytheon.uf.common.dataplugin.gfe.db.objects.ParmID
|
||||
#
|
||||
#
|
||||
# SOFTWARE HISTORY
|
||||
#
|
||||
# Date Ticket# Engineer Description
|
||||
# ------------ ---------- ----------- --------------------------
|
||||
# 03/29/11 dgilling Initial Creation.
|
||||
#
|
||||
|
||||
from dynamicserialize.dstypes.com.raytheon.uf.common.dataplugin.gfe.db.objects import ParmID
|
||||
|
||||
ClassAdapter = 'com.raytheon.uf.common.dataplugin.gfe.db.objects.ParmID'
|
||||
|
||||
|
||||
def serialize(context, parmId):
|
||||
context.writeString(str(parmId))
|
||||
|
||||
|
||||
def deserialize(context):
|
||||
result = ParmID(context.readString())
|
||||
return result
|
28
dynamicserialize/adapters/PointAdapter.py
Normal file
28
dynamicserialize/adapters/PointAdapter.py
Normal file
|
@ -0,0 +1,28 @@
|
|||
#
|
||||
# Adapter for java.awt.Point
|
||||
#
|
||||
#
|
||||
# SOFTWARE HISTORY
|
||||
#
|
||||
# Date Ticket# Engineer Description
|
||||
# ------------ ---------- ----------- --------------------------
|
||||
# 08/31/10 njensen Initial Creation.
|
||||
#
|
||||
|
||||
from dynamicserialize.dstypes.java.awt import Point
|
||||
|
||||
ClassAdapter = 'java.awt.Point'
|
||||
|
||||
|
||||
def serialize(context, point):
|
||||
context.writeI32(point.getX())
|
||||
context.writeI32(point.getY())
|
||||
|
||||
|
||||
def deserialize(context):
|
||||
x = context.readI32()
|
||||
y = context.readI32()
|
||||
point = Point()
|
||||
point.setX(x)
|
||||
point.setY(y)
|
||||
return point
|
28
dynamicserialize/adapters/StackTraceElementAdapter.py
Normal file
28
dynamicserialize/adapters/StackTraceElementAdapter.py
Normal file
|
@ -0,0 +1,28 @@
|
|||
#
|
||||
# Adapter for java.lang.StackTraceElement[]
|
||||
#
|
||||
#
|
||||
# SOFTWARE HISTORY
|
||||
#
|
||||
# Date Ticket# Engineer Description
|
||||
# ------------ ---------- ----------- --------------------------
|
||||
# 09/21/10 njensen Initial Creation.
|
||||
#
|
||||
|
||||
import dynamicserialize
|
||||
from dynamicserialize.dstypes.java.lang import StackTraceElement
|
||||
|
||||
ClassAdapter = 'java.lang.StackTraceElement'
|
||||
|
||||
|
||||
def serialize(context, obj):
|
||||
raise dynamicserialize.SerializationException('Not implemented yet')
|
||||
|
||||
|
||||
def deserialize(context):
|
||||
result = StackTraceElement()
|
||||
result.setDeclaringClass(context.readString())
|
||||
result.setMethodName(context.readString())
|
||||
result.setFileName(context.readString())
|
||||
result.setLineNumber(context.readI32())
|
||||
return result
|
25
dynamicserialize/adapters/TimeConstraintsAdapter.py
Normal file
25
dynamicserialize/adapters/TimeConstraintsAdapter.py
Normal file
|
@ -0,0 +1,25 @@
|
|||
#
|
||||
# Adapter for com.raytheon.uf.common.dataplugin.gfe.db.objects.ParmID
|
||||
#
|
||||
#
|
||||
# SOFTWARE HISTORY
|
||||
#
|
||||
# Date Ticket# Engineer Description
|
||||
# ------------ ---------- ----------- --------------------------
|
||||
# 03/20/13 #1774 randerso Initial Creation.
|
||||
#
|
||||
|
||||
from dynamicserialize.dstypes.com.raytheon.uf.common.dataplugin.gfe.db.objects import TimeConstraints
|
||||
|
||||
ClassAdapter = 'com.raytheon.uf.common.dataplugin.gfe.db.objects.TimeConstraints'
|
||||
|
||||
|
||||
def serialize(context, timeConstraints):
|
||||
context.writeI32(timeConstraints.getDuration())
|
||||
context.writeI32(timeConstraints.getRepeatInterval())
|
||||
context.writeI32(timeConstraints.getStartTime())
|
||||
|
||||
|
||||
def deserialize(context):
|
||||
result = TimeConstraints(context.readI32(), context.readI32(), context.readI32())
|
||||
return result
|
40
dynamicserialize/adapters/TimeRangeTypeAdapter.py
Normal file
40
dynamicserialize/adapters/TimeRangeTypeAdapter.py
Normal file
|
@ -0,0 +1,40 @@
|
|||
#
|
||||
# Adapter for com.raytheon.uf.common.message.WsId
|
||||
#
|
||||
#
|
||||
# SOFTWARE HISTORY
|
||||
#
|
||||
# Date Ticket# Engineer Description
|
||||
# ------------ ---------- ----------- --------------------------
|
||||
# 09/16/10 dgilling Initial Creation.
|
||||
# 01/22/14 2667 bclement use method to get millis from time range
|
||||
# 02/28/14 2667 bclement deserialize now converts millis to micros
|
||||
#
|
||||
|
||||
from dynamicserialize.dstypes.com.raytheon.uf.common.time import TimeRange
|
||||
|
||||
ClassAdapter = 'com.raytheon.uf.common.time.TimeRange'
|
||||
|
||||
MICROS_IN_MILLISECOND = 1000
|
||||
MILLIS_IN_SECOND = 1000
|
||||
|
||||
|
||||
def serialize(context, timeRange):
|
||||
context.writeI64(timeRange.getStartInMillis())
|
||||
context.writeI64(timeRange.getEndInMillis())
|
||||
|
||||
|
||||
def deserialize(context):
|
||||
startTime = context.readI64()
|
||||
endTime = context.readI64()
|
||||
|
||||
timeRange = TimeRange()
|
||||
# java uses milliseconds, python uses microseconds
|
||||
startSeconds = startTime // MILLIS_IN_SECOND
|
||||
endSeconds = endTime // MILLIS_IN_SECOND
|
||||
startExtraMicros = (startTime % MILLIS_IN_SECOND) * MICROS_IN_MILLISECOND
|
||||
endExtraMicros = (endTime % MILLIS_IN_SECOND) * MICROS_IN_MILLISECOND
|
||||
timeRange.setStart(startSeconds, startExtraMicros)
|
||||
timeRange.setEnd(endSeconds, endExtraMicros)
|
||||
|
||||
return timeRange
|
23
dynamicserialize/adapters/TimestampAdapter.py
Normal file
23
dynamicserialize/adapters/TimestampAdapter.py
Normal file
|
@ -0,0 +1,23 @@
|
|||
#
|
||||
# Adapter for java.sql.Timestamp
|
||||
#
|
||||
#
|
||||
# SOFTWARE HISTORY
|
||||
#
|
||||
# Date Ticket# Engineer Description
|
||||
# ------------ ---------- ----------- --------------------------
|
||||
# 06/30/11 dgilling Initial Creation.
|
||||
#
|
||||
|
||||
from dynamicserialize.dstypes.java.sql import Timestamp
|
||||
|
||||
ClassAdapter = 'java.sql.Timestamp'
|
||||
|
||||
|
||||
def serialize(context, timestamp):
|
||||
context.writeI64(timestamp.getTime())
|
||||
|
||||
|
||||
def deserialize(context):
|
||||
result = Timestamp(context.readI64())
|
||||
return result
|
32
dynamicserialize/adapters/WsIdAdapter.py
Normal file
32
dynamicserialize/adapters/WsIdAdapter.py
Normal file
|
@ -0,0 +1,32 @@
|
|||
#
|
||||
# Adapter for com.raytheon.uf.common.message.WsId
|
||||
#
|
||||
#
|
||||
# SOFTWARE HISTORY
|
||||
#
|
||||
# Date Ticket# Engineer Description
|
||||
# ------------- -------- --------- ---------------------------------------------
|
||||
# Sep 16, 2010 dgilling Initial Creation.
|
||||
# Apr 25, 2012 545 randerso Repurposed the lockKey field as threadId
|
||||
# Feb 06, 2017 5959 randerso Removed Java .toString() calls
|
||||
#
|
||||
|
||||
from dynamicserialize.dstypes.com.raytheon.uf.common.message import WsId
|
||||
|
||||
ClassAdapter = 'com.raytheon.uf.common.message.WsId'
|
||||
|
||||
|
||||
def serialize(context, wsId):
|
||||
context.writeString(str(wsId))
|
||||
|
||||
|
||||
def deserialize(context):
|
||||
wsIdString = context.readString()
|
||||
wsIdParts = wsIdString.split(":", 5)
|
||||
wsId = WsId()
|
||||
wsId.setNetworkId(wsIdParts[0])
|
||||
wsId.setUserName(wsIdParts[1])
|
||||
wsId.setProgName(wsIdParts[2])
|
||||
wsId.setPid(wsIdParts[3])
|
||||
wsId.setThreadId(int(wsIdParts[4]))
|
||||
return wsId
|
99
dynamicserialize/adapters/__init__.py
Normal file
99
dynamicserialize/adapters/__init__.py
Normal file
|
@ -0,0 +1,99 @@
|
|||
#
|
||||
# __init__.py for Dynamic Serialize adapters.
|
||||
#
|
||||
# Plugins can contribute to dynamicserialize.adapters by either including their
|
||||
# classes directly in pythonPackages/dynamicserialize/adapters/ within their
|
||||
# plugin. The plugin's adapter will automatically be added to __all__ at runtime
|
||||
# and registered.
|
||||
# Plugins should not include a custom __init__.py in
|
||||
# pythonPackages/dynamicserialize/adapters/ because it will overwrite this file.
|
||||
# If custom package initialization is needed, a subpackage should be created
|
||||
# with an __init__.py that includes the following:
|
||||
#
|
||||
# __all__ = ['CustomAdapter1', 'CustomAdapter2']
|
||||
# from dynamicserialize.adapters import registerAdapters
|
||||
# registerAdapters(__name__, __all__)
|
||||
#
|
||||
#
|
||||
# SOFTWARE HISTORY
|
||||
#
|
||||
# Date Ticket# Engineer Description
|
||||
# ------------ ---------- ----------- --------------------------
|
||||
# 08/31/10 njensen Initial Creation.
|
||||
# 03/20/13 #1774 randerso Added TimeConstraintsAdapter
|
||||
# 04/22/13 #1949 rjpeter Added LockTableAdapter
|
||||
# 02/06/14 #2672 bsteffen Added JTSEnvelopeAdapter
|
||||
# 09/21/2015 #4486 rjpeter Added FormattedDateAdapter
|
||||
# 06/23/2016 #5696 rjpeter Added CommutativeTimestampAdapter
|
||||
# 10/17/2016 #5919 njensen Added GeomDataRespAdapter
|
||||
# 01/09/2017 #5997 nabowle Allow contribution from plugins.
|
||||
#
|
||||
|
||||
__all__ = [
|
||||
'PointAdapter',
|
||||
'StackTraceElementAdapter',
|
||||
'WsIdAdapter',
|
||||
'CalendarAdapter',
|
||||
'GregorianCalendarAdapter',
|
||||
'DateAdapter',
|
||||
'FormattedDateAdapter',
|
||||
'LocalizationLevelSerializationAdapter',
|
||||
'LocalizationTypeSerializationAdapter',
|
||||
'GeometryTypeAdapter',
|
||||
'CoordAdapter',
|
||||
'TimeRangeTypeAdapter',
|
||||
'ParmIDAdapter',
|
||||
'DatabaseIDAdapter',
|
||||
'TimestampAdapter',
|
||||
'CommutativeTimestampAdapter',
|
||||
'EnumSetAdapter',
|
||||
'FloatBufferAdapter',
|
||||
'ByteBufferAdapter',
|
||||
'TimeConstraintsAdapter',
|
||||
'JTSEnvelopeAdapter'
|
||||
]
|
||||
|
||||
classAdapterRegistry = {}
|
||||
|
||||
|
||||
def getAdapterRegistry():
|
||||
import pkgutil
|
||||
|
||||
discoveredPackages = []
|
||||
# allow other plugins to contribute to adapters by dropping their adapter or
|
||||
# package into the dynamicserialize.adapters package
|
||||
for _, modname, ispkg in pkgutil.iter_modules(__path__):
|
||||
if ispkg:
|
||||
discoveredPackages.append(modname)
|
||||
else:
|
||||
if modname not in __all__:
|
||||
__all__.append(modname)
|
||||
|
||||
registerAdapters(__name__, __all__)
|
||||
|
||||
for pkg in discoveredPackages:
|
||||
__import__(__name__ + '.' + pkg)
|
||||
|
||||
|
||||
def registerAdapters(package, modules):
|
||||
import sys
|
||||
if not package.endswith('.'):
|
||||
package += '.'
|
||||
for x in modules:
|
||||
# TODO: use importlib
|
||||
exec('import ' + package + x)
|
||||
m = sys.modules[package + x]
|
||||
d = m.__dict__
|
||||
if 'ClassAdapter' in d:
|
||||
if isinstance(m.ClassAdapter, list):
|
||||
for clz in m.ClassAdapter:
|
||||
classAdapterRegistry[clz] = m
|
||||
else:
|
||||
clzName = m.ClassAdapter
|
||||
classAdapterRegistry[clzName] = m
|
||||
else:
|
||||
raise LookupError('Adapter class ' + x + ' has no ClassAdapter field ' +
|
||||
'and cannot be registered.')
|
||||
|
||||
|
||||
getAdapterRegistry()
|
6
dynamicserialize/dstypes/__init__.py
Normal file
6
dynamicserialize/dstypes/__init__.py
Normal file
|
@ -0,0 +1,6 @@
|
|||
|
||||
__all__ = [
|
||||
'com',
|
||||
'gov',
|
||||
'java'
|
||||
]
|
5
dynamicserialize/dstypes/com/__init__.py
Normal file
5
dynamicserialize/dstypes/com/__init__.py
Normal file
|
@ -0,0 +1,5 @@
|
|||
|
||||
__all__ = [
|
||||
'raytheon',
|
||||
'vividsolutions'
|
||||
]
|
4
dynamicserialize/dstypes/com/raytheon/__init__.py
Normal file
4
dynamicserialize/dstypes/com/raytheon/__init__.py
Normal file
|
@ -0,0 +1,4 @@
|
|||
|
||||
__all__ = [
|
||||
'uf'
|
||||
]
|
4
dynamicserialize/dstypes/com/raytheon/uf/__init__.py
Normal file
4
dynamicserialize/dstypes/com/raytheon/uf/__init__.py
Normal file
|
@ -0,0 +1,4 @@
|
|||
|
||||
__all__ = [
|
||||
'common'
|
||||
]
|
17
dynamicserialize/dstypes/com/raytheon/uf/common/__init__.py
Normal file
17
dynamicserialize/dstypes/com/raytheon/uf/common/__init__.py
Normal file
|
@ -0,0 +1,17 @@
|
|||
|
||||
__all__ = [
|
||||
'alertviz',
|
||||
'auth',
|
||||
'dataaccess',
|
||||
'dataplugin',
|
||||
'dataquery',
|
||||
'datastorage',
|
||||
'localization',
|
||||
'management',
|
||||
'message',
|
||||
'pointdata',
|
||||
'pypies',
|
||||
'serialization',
|
||||
'site',
|
||||
'time'
|
||||
]
|
63
dynamicserialize/dstypes/com/raytheon/uf/common/alertviz/AlertVizRequest.py
Executable file
63
dynamicserialize/dstypes/com/raytheon/uf/common/alertviz/AlertVizRequest.py
Executable file
|
@ -0,0 +1,63 @@
|
|||
# Jul 27, 2015 4654 skorolev Added filters
|
||||
|
||||
|
||||
class AlertVizRequest(object):
|
||||
|
||||
def __init__(self):
|
||||
self.message = None
|
||||
self.machine = None
|
||||
self.priority = None
|
||||
self.sourceKey = None
|
||||
self.category = None
|
||||
self.audioFile = None
|
||||
self.filters = None
|
||||
|
||||
def getMessage(self):
|
||||
return self.message
|
||||
|
||||
def setMessage(self, message):
|
||||
self.message = message
|
||||
|
||||
def getMachine(self):
|
||||
return self.machine
|
||||
|
||||
def setMachine(self, machine):
|
||||
self.machine = machine
|
||||
|
||||
def getPriority(self):
|
||||
return self.priority
|
||||
|
||||
def setPriority(self, priority):
|
||||
self.priority = priority
|
||||
|
||||
def getSourceKey(self):
|
||||
return self.sourceKey
|
||||
|
||||
def setSourceKey(self, sourceKey):
|
||||
self.sourceKey = sourceKey
|
||||
|
||||
def getCategory(self):
|
||||
return self.category
|
||||
|
||||
def setCategory(self, category):
|
||||
self.category = category
|
||||
|
||||
def getAudioFile(self):
|
||||
return self.audioFile
|
||||
|
||||
def setAudioFile(self, audioFile):
|
||||
self.audioFile = audioFile
|
||||
|
||||
def getFilters(self):
|
||||
return self.filters
|
||||
|
||||
def setFilters(self, filters):
|
||||
if filters is None:
|
||||
self.filters = {}
|
||||
elif not(None in filters
|
||||
or filters.values().count(None) > 0
|
||||
or '' in filters
|
||||
or filters.values().count('') > 0):
|
||||
self.filters = filters
|
||||
else:
|
||||
raise ValueError('Filters must not contain None or empty keys or values: %s' % filters)
|
|
@ -0,0 +1,7 @@
|
|||
|
||||
__all__ = [
|
||||
'AlertVizRequest'
|
||||
]
|
||||
|
||||
from .AlertVizRequest import AlertVizRequest
|
||||
|
|
@ -0,0 +1,5 @@
|
|||
|
||||
__all__ = [
|
||||
'resp',
|
||||
'user'
|
||||
]
|
|
@ -0,0 +1,14 @@
|
|||
from six import with_metaclass
|
||||
import abc
|
||||
|
||||
|
||||
class AbstractFailedResponse(with_metaclass(abc.ABCMeta, object)):
|
||||
@abc.abstractmethod
|
||||
def __init__(self):
|
||||
self.request = None
|
||||
|
||||
def getRequest(self):
|
||||
return self.request
|
||||
|
||||
def setRequest(self, request):
|
||||
self.request = request
|
|
@ -0,0 +1,10 @@
|
|||
# nothing to implement here that isn't already covered by ServerErrorResponse
|
||||
# Just need the separate class for de-serialization.
|
||||
|
||||
from dynamicserialize.dstypes.com.raytheon.uf.common.serialization.comm.response import ServerErrorResponse
|
||||
|
||||
|
||||
class AuthServerErrorResponse(ServerErrorResponse):
|
||||
|
||||
def __init__(self):
|
||||
super(AuthServerErrorResponse, self).__init__()
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Reference in a new issue