mirror of
https://github.com/Unidata/python-awips.git
synced 2025-02-23 14:57:56 -05:00
merge master_17.1.1p2 source code changes into master-python2
This commit is contained in:
parent
940ec627f8
commit
e8e27fe0b7
40 changed files with 1868 additions and 90 deletions
|
@ -1,34 +1,17 @@
|
||||||
##
|
##
|
||||||
# This software was developed and / or modified by Raytheon Company,
|
|
||||||
# pursuant to Contract DG133W-05-CQ-1067 with the US Government.
|
|
||||||
#
|
|
||||||
# U.S. EXPORT CONTROLLED TECHNICAL DATA
|
|
||||||
# This software product contains export-restricted data whose
|
|
||||||
# export/transfer/disclosure is restricted by U.S. law. Dissemination
|
|
||||||
# to non-U.S. persons whether in the United States or abroad requires
|
|
||||||
# an export license or other authorization.
|
|
||||||
#
|
|
||||||
# Contractor Name: Raytheon Company
|
|
||||||
# Contractor Address: 6825 Pine Street, Suite 340
|
|
||||||
# Mail Stop B8
|
|
||||||
# Omaha, NE 68106
|
|
||||||
# 402.291.0100
|
|
||||||
#
|
|
||||||
# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
|
||||||
# further licensing information.
|
|
||||||
##
|
##
|
||||||
|
|
||||||
from string import Template
|
from string import Template
|
||||||
|
|
||||||
import ctypes
|
import ctypes
|
||||||
import stomp
|
from . import stomp
|
||||||
import socket
|
import socket
|
||||||
import sys
|
import sys
|
||||||
import time
|
import time
|
||||||
import threading
|
import threading
|
||||||
import xml.etree.ElementTree as ET
|
import xml.etree.ElementTree as ET
|
||||||
|
|
||||||
import ThriftClient
|
from . import ThriftClient
|
||||||
from dynamicserialize.dstypes.com.raytheon.uf.common.alertviz import AlertVizRequest
|
from dynamicserialize.dstypes.com.raytheon.uf.common.alertviz import AlertVizRequest
|
||||||
from dynamicserialize import DynamicSerializationManager
|
from dynamicserialize import DynamicSerializationManager
|
||||||
|
|
||||||
|
@ -92,8 +75,8 @@ class NotificationMessage:
|
||||||
priorityInt = int(5)
|
priorityInt = int(5)
|
||||||
|
|
||||||
if (priorityInt < 0 or priorityInt > 5):
|
if (priorityInt < 0 or priorityInt > 5):
|
||||||
print "Error occurred, supplied an invalid Priority value: " + str(priorityInt)
|
print("Error occurred, supplied an invalid Priority value: " + str(priorityInt))
|
||||||
print "Priority values are 0, 1, 2, 3, 4 and 5."
|
print("Priority values are 0, 1, 2, 3, 4 and 5.")
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
if priorityInt is not None:
|
if priorityInt is not None:
|
||||||
|
@ -103,8 +86,8 @@ class NotificationMessage:
|
||||||
|
|
||||||
def connection_timeout(self, connection):
|
def connection_timeout(self, connection):
|
||||||
if (connection is not None and not connection.is_connected()):
|
if (connection is not None and not connection.is_connected()):
|
||||||
print "Connection Retry Timeout"
|
print("Connection Retry Timeout")
|
||||||
for tid, tobj in threading._active.items():
|
for tid, tobj in list(threading._active.items()):
|
||||||
if tobj.name is "MainThread":
|
if tobj.name is "MainThread":
|
||||||
res = ctypes.pythonapi.PyThreadState_SetAsyncExc(tid, ctypes.py_object(SystemExit))
|
res = ctypes.pythonapi.PyThreadState_SetAsyncExc(tid, ctypes.py_object(SystemExit))
|
||||||
if res != 0 and res != 1:
|
if res != 0 and res != 1:
|
||||||
|
@ -155,14 +138,14 @@ class NotificationMessage:
|
||||||
serverResponse = None
|
serverResponse = None
|
||||||
try:
|
try:
|
||||||
serverResponse = thriftClient.sendRequest(alertVizRequest)
|
serverResponse = thriftClient.sendRequest(alertVizRequest)
|
||||||
except Exception, ex:
|
except Exception as ex:
|
||||||
print "Caught exception submitting AlertVizRequest: ", str(ex)
|
print("Caught exception submitting AlertVizRequest: ", str(ex))
|
||||||
|
|
||||||
if (serverResponse != "None"):
|
if (serverResponse != "None"):
|
||||||
print "Error occurred submitting Notification Message to AlertViz receiver: ", serverResponse
|
print("Error occurred submitting Notification Message to AlertViz receiver: ", serverResponse)
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
else:
|
else:
|
||||||
print "Response: " + str(serverResponse)
|
print("Response: " + str(serverResponse))
|
||||||
|
|
||||||
def createRequest(message, priority, source, category, audioFile, filters):
|
def createRequest(message, priority, source, category, audioFile, filters):
|
||||||
obj = AlertVizRequest()
|
obj = AlertVizRequest()
|
||||||
|
|
|
@ -29,6 +29,7 @@
|
||||||
# ------------ ---------- ----------- --------------------------
|
# ------------ ---------- ----------- --------------------------
|
||||||
# 11/17/10 njensen Initial Creation.
|
# 11/17/10 njensen Initial Creation.
|
||||||
# 08/15/13 2169 bkowal Optionally gzip decompress any data that is read.
|
# 08/15/13 2169 bkowal Optionally gzip decompress any data that is read.
|
||||||
|
# 08/04/16 2416 tgurney Add queueStarted property
|
||||||
#
|
#
|
||||||
#
|
#
|
||||||
|
|
||||||
|
@ -49,6 +50,7 @@ class QpidSubscriber:
|
||||||
self.__connection.start()
|
self.__connection.start()
|
||||||
self.__session = self.__connection.session(str(qpid.datatypes.uuid4()))
|
self.__session = self.__connection.session(str(qpid.datatypes.uuid4()))
|
||||||
self.subscribed = True
|
self.subscribed = True
|
||||||
|
self.__queueStarted = False
|
||||||
|
|
||||||
def topicSubscribe(self, topicName, callback):
|
def topicSubscribe(self, topicName, callback):
|
||||||
# if the queue is edex.alerts, set decompress to true always for now to
|
# if the queue is edex.alerts, set decompress to true always for now to
|
||||||
|
@ -68,6 +70,7 @@ class QpidSubscriber:
|
||||||
self.__session.message_subscribe(serverQueueName, destination=local_queue_name)
|
self.__session.message_subscribe(serverQueueName, destination=local_queue_name)
|
||||||
queue.start()
|
queue.start()
|
||||||
print "Connection complete to broker on", self.host
|
print "Connection complete to broker on", self.host
|
||||||
|
self.__queueStarted = True
|
||||||
|
|
||||||
while self.subscribed:
|
while self.subscribed:
|
||||||
try:
|
try:
|
||||||
|
@ -80,7 +83,7 @@ class QpidSubscriber:
|
||||||
# http://stackoverflow.com/questions/2423866/python-decompressing-gzip-chunk-by-chunk
|
# http://stackoverflow.com/questions/2423866/python-decompressing-gzip-chunk-by-chunk
|
||||||
d = zlib.decompressobj(16+zlib.MAX_WBITS)
|
d = zlib.decompressobj(16+zlib.MAX_WBITS)
|
||||||
content = d.decompress(content)
|
content = d.decompress(content)
|
||||||
except:
|
except Exception:
|
||||||
# decompression failed, return the original content
|
# decompression failed, return the original content
|
||||||
pass
|
pass
|
||||||
callback(content)
|
callback(content)
|
||||||
|
@ -90,8 +93,13 @@ class QpidSubscriber:
|
||||||
self.close()
|
self.close()
|
||||||
|
|
||||||
def close(self):
|
def close(self):
|
||||||
|
self.__queueStarted = False
|
||||||
self.subscribed = False
|
self.subscribed = False
|
||||||
try:
|
try:
|
||||||
self.__session.close(timeout=10)
|
self.__session.close(timeout=10)
|
||||||
except:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
@property
|
||||||
|
def queueStarted(self):
|
||||||
|
return self.__queueStarted
|
||||||
|
|
157
awips/dataaccess/DataNotificationLayer.py
Normal file
157
awips/dataaccess/DataNotificationLayer.py
Normal file
|
@ -0,0 +1,157 @@
|
||||||
|
# #
|
||||||
|
# This software was developed and / or modified by Raytheon Company,
|
||||||
|
# pursuant to Contract DG133W-05-CQ-1067 with the US Government.
|
||||||
|
#
|
||||||
|
# U.S. EXPORT CONTROLLED TECHNICAL DATA
|
||||||
|
# This software product contains export-restricted data whose
|
||||||
|
# export/transfer/disclosure is restricted by U.S. law. Dissemination
|
||||||
|
# to non-U.S. persons whether in the United States or abroad requires
|
||||||
|
# an export license or other authorization.
|
||||||
|
#
|
||||||
|
# Contractor Name: Raytheon Company
|
||||||
|
# Contractor Address: 6825 Pine Street, Suite 340
|
||||||
|
# Mail Stop B8
|
||||||
|
# Omaha, NE 68106
|
||||||
|
# 402.291.0100
|
||||||
|
#
|
||||||
|
# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
||||||
|
# further licensing information.
|
||||||
|
# #
|
||||||
|
|
||||||
|
#
|
||||||
|
# Published interface for retrieving data updates via awips.dataaccess package
|
||||||
|
#
|
||||||
|
#
|
||||||
|
# SOFTWARE HISTORY
|
||||||
|
#
|
||||||
|
# Date Ticket# Engineer Description
|
||||||
|
# ------------ ---------- ----------- --------------------------
|
||||||
|
# May 26, 2016 2416 rjpeter Initial Creation.
|
||||||
|
# Aug 1, 2016 2416 tgurney Finish implementation
|
||||||
|
#
|
||||||
|
#
|
||||||
|
|
||||||
|
"""
|
||||||
|
Interface for the DAF's data notification feature, which allows continuous
|
||||||
|
retrieval of new data as it is coming into the system.
|
||||||
|
|
||||||
|
There are two ways to access this feature:
|
||||||
|
|
||||||
|
1. The DataQueue module (awips.dataaccess.DataQueue) offers a collection that
|
||||||
|
automatically fills up with new data as it receives notifications. See that
|
||||||
|
module for more information.
|
||||||
|
|
||||||
|
2. Depending on the type of data you want, use either getGridDataUpdates() or
|
||||||
|
getGeometryDataUpdates() in this module. Either one will give you back an
|
||||||
|
object that will retrieve new data for you and will call a function you specify
|
||||||
|
each time new data is received.
|
||||||
|
|
||||||
|
Example code follows. This example prints temperature as observed from KOMA
|
||||||
|
each time a METAR is received from there.
|
||||||
|
|
||||||
|
from awips.dataaccess import DataAccessLayer as DAL
|
||||||
|
from awips.dataaccess import DataNotificationLayer as DNL
|
||||||
|
|
||||||
|
def process_obs(list_of_data):
|
||||||
|
for item in list_of_data:
|
||||||
|
print(item.getNumber('temperature'))
|
||||||
|
|
||||||
|
request = DAL.newDataRequest('obs')
|
||||||
|
request.setParameters('temperature')
|
||||||
|
request.setLocationNames('KOMA')
|
||||||
|
|
||||||
|
notifier = DNL.getGeometryDataUpdates(request)
|
||||||
|
notifier.subscribe(process_obs)
|
||||||
|
# process_obs will called with a list of data each time new data comes in
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
import re
|
||||||
|
import sys
|
||||||
|
import subprocess
|
||||||
|
from awips.dataaccess.PyGeometryNotification import PyGeometryNotification
|
||||||
|
from awips.dataaccess.PyGridNotification import PyGridNotification
|
||||||
|
|
||||||
|
|
||||||
|
THRIFT_HOST = subprocess.check_output(
|
||||||
|
"source /awips2/fxa/bin/setup.env; echo $DEFAULT_HOST",
|
||||||
|
shell=True).strip()
|
||||||
|
|
||||||
|
|
||||||
|
USING_NATIVE_THRIFT = False
|
||||||
|
|
||||||
|
JMS_HOST_PATTERN=re.compile('tcp://([^:]+):([0-9]+)')
|
||||||
|
|
||||||
|
if sys.modules.has_key('jep'):
|
||||||
|
# intentionally do not catch if this fails to import, we want it to
|
||||||
|
# be obvious that something is configured wrong when running from within
|
||||||
|
# Java instead of allowing false confidence and fallback behavior
|
||||||
|
import JepRouter
|
||||||
|
router = JepRouter
|
||||||
|
else:
|
||||||
|
from awips.dataaccess import ThriftClientRouter
|
||||||
|
router = ThriftClientRouter.ThriftClientRouter(THRIFT_HOST)
|
||||||
|
USING_NATIVE_THRIFT = True
|
||||||
|
|
||||||
|
|
||||||
|
def _getJmsConnectionInfo(notifFilterResponse):
|
||||||
|
serverString = notifFilterResponse.getJmsConnectionInfo()
|
||||||
|
try:
|
||||||
|
host, port = JMS_HOST_PATTERN.match(serverString).groups()
|
||||||
|
except AttributeError as e:
|
||||||
|
raise RuntimeError('Got bad JMS connection info from server: ' + serverString)
|
||||||
|
return {'host': host, 'port': port}
|
||||||
|
|
||||||
|
|
||||||
|
def getGridDataUpdates(request):
|
||||||
|
"""
|
||||||
|
Get a notification object that receives updates to grid data.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
request: the IDataRequest specifying the data you want to receive
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
an update request object that you can listen for updates to by
|
||||||
|
calling its subscribe() method
|
||||||
|
"""
|
||||||
|
response = router.getNotificationFilter(request)
|
||||||
|
filter = response.getNotificationFilter()
|
||||||
|
jmsInfo = _getJmsConnectionInfo(response)
|
||||||
|
notifier = PyGridNotification(request, filter, requestHost=THRIFT_HOST, **jmsInfo)
|
||||||
|
return notifier
|
||||||
|
|
||||||
|
|
||||||
|
def getGeometryDataUpdates(request):
|
||||||
|
"""
|
||||||
|
Get a notification object that receives updates to geometry data.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
request: the IDataRequest specifying the data you want to receive
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
an update request object that you can listen for updates to by
|
||||||
|
calling its subscribe() method
|
||||||
|
"""
|
||||||
|
response = router.getNotificationFilter(request)
|
||||||
|
filter = response.getNotificationFilter()
|
||||||
|
jmsInfo = _getJmsConnectionInfo(response)
|
||||||
|
notifier = PyGeometryNotification(request, filter, requestHost=THRIFT_HOST, **jmsInfo)
|
||||||
|
return notifier
|
||||||
|
|
||||||
|
|
||||||
|
def changeEDEXHost(newHostName):
|
||||||
|
"""
|
||||||
|
Changes the EDEX host the Data Access Framework is communicating with. Only
|
||||||
|
works if using the native Python client implementation, otherwise, this
|
||||||
|
method will throw a TypeError.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
newHostHame: the EDEX host to connect to
|
||||||
|
"""
|
||||||
|
if USING_NATIVE_THRIFT:
|
||||||
|
global THRIFT_HOST
|
||||||
|
THRIFT_HOST = newHostName
|
||||||
|
global router
|
||||||
|
router = ThriftClientRouter.ThriftClientRouter(THRIFT_HOST)
|
||||||
|
else:
|
||||||
|
raise TypeError("Cannot call changeEDEXHost when using JepRouter.")
|
213
awips/dataaccess/DataQueue.py
Normal file
213
awips/dataaccess/DataQueue.py
Normal file
|
@ -0,0 +1,213 @@
|
||||||
|
# #
|
||||||
|
# This software was developed and / or modified by Raytheon Company,
|
||||||
|
# pursuant to Contract DG133W-05-CQ-1067 with the US Government.
|
||||||
|
#
|
||||||
|
# U.S. EXPORT CONTROLLED TECHNICAL DATA
|
||||||
|
# This software product contains export-restricted data whose
|
||||||
|
# export/transfer/disclosure is restricted by U.S. law. Dissemination
|
||||||
|
# to non-U.S. persons whether in the United States or abroad requires
|
||||||
|
# an export license or other authorization.
|
||||||
|
#
|
||||||
|
# Contractor Name: Raytheon Company
|
||||||
|
# Contractor Address: 6825 Pine Street, Suite 340
|
||||||
|
# Mail Stop B8
|
||||||
|
# Omaha, NE 68106
|
||||||
|
# 402.291.0100
|
||||||
|
#
|
||||||
|
# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
||||||
|
# further licensing information.
|
||||||
|
# #
|
||||||
|
|
||||||
|
#
|
||||||
|
# Convenience class for using the DAF's notifications feature. This is a
|
||||||
|
# collection that, once connected to EDEX by calling start(), fills with
|
||||||
|
# data as notifications come in. Runs on a separate thread to allow
|
||||||
|
# non-blocking data retrieval.
|
||||||
|
#
|
||||||
|
#
|
||||||
|
#
|
||||||
|
# SOFTWARE HISTORY
|
||||||
|
#
|
||||||
|
# Date Ticket# Engineer Description
|
||||||
|
# ------------ ---------- ----------- --------------------------
|
||||||
|
# 07/29/16 2416 tgurney Initial creation
|
||||||
|
#
|
||||||
|
|
||||||
|
from awips.dataaccess import DataNotificationLayer as DNL
|
||||||
|
|
||||||
|
import time
|
||||||
|
from threading import Thread
|
||||||
|
import sys
|
||||||
|
|
||||||
|
|
||||||
|
if sys.version_info.major == 2:
|
||||||
|
from Queue import Queue, Empty
|
||||||
|
else: # Python 3 module renamed to 'queue'
|
||||||
|
from queue import Queue, Empty
|
||||||
|
|
||||||
|
|
||||||
|
"""Used to indicate a DataQueue that will produce geometry data."""
|
||||||
|
GEOMETRY = object()
|
||||||
|
|
||||||
|
|
||||||
|
"""Used to indicate a DataQueue that will produce grid data."""
|
||||||
|
GRID = object()
|
||||||
|
|
||||||
|
|
||||||
|
"""Default maximum queue size."""
|
||||||
|
_DEFAULT_MAXSIZE = 100
|
||||||
|
|
||||||
|
|
||||||
|
class Closed(Exception):
|
||||||
|
"""Raised when attempting to get data from a closed queue."""
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class DataQueue(object):
|
||||||
|
|
||||||
|
"""
|
||||||
|
Convenience class for using the DAF's notifications feature. This is a
|
||||||
|
collection that, once connected to EDEX by calling start(), fills with
|
||||||
|
data as notifications come in.
|
||||||
|
|
||||||
|
Example for getting obs data:
|
||||||
|
|
||||||
|
from DataQueue import DataQueue, GEOMETRY
|
||||||
|
request = DataAccessLayer.newDataRequest('obs')
|
||||||
|
request.setParameters('temperature')
|
||||||
|
request.setLocationNames('KOMA')
|
||||||
|
q = DataQueue(GEOMETRY, request)
|
||||||
|
q.start()
|
||||||
|
for item in q:
|
||||||
|
print(item.getNumber('temperature'))
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, dtype, request, maxsize=_DEFAULT_MAXSIZE):
|
||||||
|
"""
|
||||||
|
Create a new DataQueue.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
dtype: Either GRID or GEOMETRY; must match the type of data
|
||||||
|
requested.
|
||||||
|
request: IDataRequest describing the data you want. It must at
|
||||||
|
least have datatype set. All data produced will satisfy the
|
||||||
|
constraints you specify.
|
||||||
|
maxsize: Maximum number of data objects the queue can hold at
|
||||||
|
one time. If the limit is reached, any data coming in after
|
||||||
|
that will not appear until one or more items are removed using
|
||||||
|
DataQueue.get().
|
||||||
|
"""
|
||||||
|
assert maxsize > 0
|
||||||
|
assert dtype in (GEOMETRY, GRID)
|
||||||
|
self._maxsize = maxsize
|
||||||
|
self._queue = Queue(maxsize=maxsize)
|
||||||
|
self._thread = None
|
||||||
|
if dtype is GEOMETRY:
|
||||||
|
self._notifier = DNL.getGeometryDataUpdates(request)
|
||||||
|
elif dtype is GRID:
|
||||||
|
self._notifier = DNL.getGridDataUpdates(request)
|
||||||
|
|
||||||
|
def start(self):
|
||||||
|
"""Start listening for notifications and requesting data."""
|
||||||
|
if self._thread is not None:
|
||||||
|
# Already started
|
||||||
|
return
|
||||||
|
kwargs = {'callback': self._data_received}
|
||||||
|
self._thread = Thread(target=self._notifier.subscribe, kwargs=kwargs)
|
||||||
|
self._thread.daemon = True
|
||||||
|
self._thread.start()
|
||||||
|
timer = 0
|
||||||
|
while not self._notifier.subscribed:
|
||||||
|
time.sleep(0.1)
|
||||||
|
timer += 1
|
||||||
|
if timer >= 100: # ten seconds
|
||||||
|
raise RuntimeError('timed out when attempting to subscribe')
|
||||||
|
|
||||||
|
def _data_received(self, data):
|
||||||
|
for d in data:
|
||||||
|
if not isinstance(d, list):
|
||||||
|
d = [d]
|
||||||
|
for item in d:
|
||||||
|
self._queue.put(item)
|
||||||
|
|
||||||
|
def get(self, block=True, timeout=None):
|
||||||
|
"""
|
||||||
|
Get and return the next available data object. By default, if there is
|
||||||
|
no data yet available, this method will not return until data becomes
|
||||||
|
available.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
block: Specifies behavior when the queue is empty. If True, wait
|
||||||
|
until an item is available before returning (the default). If
|
||||||
|
False, return None immediately if the queue is empty.
|
||||||
|
timeout: If block is True, wait this many seconds, and return None
|
||||||
|
if data is not received in that time.
|
||||||
|
Returns:
|
||||||
|
IData
|
||||||
|
"""
|
||||||
|
if self.closed:
|
||||||
|
raise Closed
|
||||||
|
try:
|
||||||
|
return self._queue.get(block, timeout)
|
||||||
|
except Empty:
|
||||||
|
return None
|
||||||
|
|
||||||
|
def get_all(self):
|
||||||
|
"""
|
||||||
|
Get all data waiting for processing, in a single list. Always returns
|
||||||
|
immediately. Returns an empty list if no data has arrived yet.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
List of IData
|
||||||
|
"""
|
||||||
|
data = []
|
||||||
|
for _ in range(self._maxsize):
|
||||||
|
next_item = self.get(False)
|
||||||
|
if next_item is None:
|
||||||
|
break
|
||||||
|
data.append(next_item)
|
||||||
|
return data
|
||||||
|
|
||||||
|
def close(self):
|
||||||
|
"""Close the queue. May not be re-opened after closing."""
|
||||||
|
if not self.closed:
|
||||||
|
self._notifier.close()
|
||||||
|
self._thread.join()
|
||||||
|
|
||||||
|
def qsize(self):
|
||||||
|
"""Return number of items in the queue."""
|
||||||
|
return self._queue.qsize()
|
||||||
|
|
||||||
|
def empty(self):
|
||||||
|
"""Return True if the queue is empty."""
|
||||||
|
return self._queue.empty()
|
||||||
|
|
||||||
|
def full(self):
|
||||||
|
"""Return True if the queue is full."""
|
||||||
|
return self._queue.full()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def closed(self):
|
||||||
|
"""True if the queue has been closed."""
|
||||||
|
return not self._notifier.subscribed
|
||||||
|
|
||||||
|
@property
|
||||||
|
def maxsize(self):
|
||||||
|
"""
|
||||||
|
Maximum number of data objects the queue can hold at one time.
|
||||||
|
If this limit is reached, any data coming in after that will not appear
|
||||||
|
until one or more items are removed using get().
|
||||||
|
"""
|
||||||
|
return self._maxsize
|
||||||
|
|
||||||
|
def __iter__(self):
|
||||||
|
if self._thread is not None:
|
||||||
|
while not self.closed:
|
||||||
|
yield self.get()
|
||||||
|
|
||||||
|
def __enter__(self):
|
||||||
|
self.start()
|
||||||
|
return self
|
||||||
|
|
||||||
|
def __exit__(self, *unused):
|
||||||
|
self.close()
|
38
awips/dataaccess/PyGeometryNotification.py
Normal file
38
awips/dataaccess/PyGeometryNotification.py
Normal file
|
@ -0,0 +1,38 @@
|
||||||
|
# #
|
||||||
|
# This software was developed and / or modified by Raytheon Company,
|
||||||
|
# pursuant to Contract DG133W-05-CQ-1067 with the US Government.
|
||||||
|
#
|
||||||
|
# U.S. EXPORT CONTROLLED TECHNICAL DATA
|
||||||
|
# This software product contains export-restricted data whose
|
||||||
|
# export/transfer/disclosure is restricted by U.S. law. Dissemination
|
||||||
|
# to non-U.S. persons whether in the United States or abroad requires
|
||||||
|
# an export license or other authorization.
|
||||||
|
#
|
||||||
|
# Contractor Name: Raytheon Company
|
||||||
|
# Contractor Address: 6825 Pine Street, Suite 340
|
||||||
|
# Mail Stop B8
|
||||||
|
# Omaha, NE 68106
|
||||||
|
# 402.291.0100
|
||||||
|
#
|
||||||
|
# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
||||||
|
# further licensing information.
|
||||||
|
# #
|
||||||
|
|
||||||
|
#
|
||||||
|
# Notification object that produces geometry data
|
||||||
|
#
|
||||||
|
#
|
||||||
|
# SOFTWARE HISTORY
|
||||||
|
#
|
||||||
|
# Date Ticket# Engineer Description
|
||||||
|
# ------------ ---------- ----------- --------------------------
|
||||||
|
# 07/22/16 2416 tgurney Initial creation
|
||||||
|
#
|
||||||
|
|
||||||
|
from awips.dataaccess.PyNotification import PyNotification
|
||||||
|
from dynamicserialize.dstypes.com.raytheon.uf.common.dataquery.requests import RequestConstraint
|
||||||
|
|
||||||
|
class PyGeometryNotification(PyNotification):
|
||||||
|
|
||||||
|
def getData(self, request, dataTimes):
|
||||||
|
return self.DAL.getGeometryData(request, dataTimes)
|
38
awips/dataaccess/PyGridNotification.py
Normal file
38
awips/dataaccess/PyGridNotification.py
Normal file
|
@ -0,0 +1,38 @@
|
||||||
|
# #
|
||||||
|
# This software was developed and / or modified by Raytheon Company,
|
||||||
|
# pursuant to Contract DG133W-05-CQ-1067 with the US Government.
|
||||||
|
#
|
||||||
|
# U.S. EXPORT CONTROLLED TECHNICAL DATA
|
||||||
|
# This software product contains export-restricted data whose
|
||||||
|
# export/transfer/disclosure is restricted by U.S. law. Dissemination
|
||||||
|
# to non-U.S. persons whether in the United States or abroad requires
|
||||||
|
# an export license or other authorization.
|
||||||
|
#
|
||||||
|
# Contractor Name: Raytheon Company
|
||||||
|
# Contractor Address: 6825 Pine Street, Suite 340
|
||||||
|
# Mail Stop B8
|
||||||
|
# Omaha, NE 68106
|
||||||
|
# 402.291.0100
|
||||||
|
#
|
||||||
|
# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
||||||
|
# further licensing information.
|
||||||
|
# #
|
||||||
|
|
||||||
|
#
|
||||||
|
# Notification object that produces grid data
|
||||||
|
#
|
||||||
|
#
|
||||||
|
# SOFTWARE HISTORY
|
||||||
|
#
|
||||||
|
# Date Ticket# Engineer Description
|
||||||
|
# ------------ ---------- ----------- --------------------------
|
||||||
|
# 06/03/16 2416 rjpeter Initial Creation.
|
||||||
|
#
|
||||||
|
|
||||||
|
from awips.dataaccess.PyNotification import PyNotification
|
||||||
|
from dynamicserialize.dstypes.com.raytheon.uf.common.dataquery.requests import RequestConstraint
|
||||||
|
|
||||||
|
class PyGridNotification(PyNotification):
|
||||||
|
|
||||||
|
def getData(self, request, dataTimes):
|
||||||
|
return self.DAL.getGridData(request, dataTimes)
|
132
awips/dataaccess/PyNotification.py
Normal file
132
awips/dataaccess/PyNotification.py
Normal file
|
@ -0,0 +1,132 @@
|
||||||
|
##
|
||||||
|
# This software was developed and / or modified by Raytheon Company,
|
||||||
|
# pursuant to Contract DG133W-05-CQ-1067 with the US Government.
|
||||||
|
#
|
||||||
|
# U.S. EXPORT CONTROLLED TECHNICAL DATA
|
||||||
|
# This software product contains export-restricted data whose
|
||||||
|
# export/transfer/disclosure is restricted by U.S. law. Dissemination
|
||||||
|
# to non-U.S. persons whether in the United States or abroad requires
|
||||||
|
# an export license or other authorization.
|
||||||
|
#
|
||||||
|
# Contractor Name: Raytheon Company
|
||||||
|
# Contractor Address: 6825 Pine Street, Suite 340
|
||||||
|
# Mail Stop B8
|
||||||
|
# Omaha, NE 68106
|
||||||
|
# 402.291.0100
|
||||||
|
#
|
||||||
|
# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
||||||
|
# further licensing information.
|
||||||
|
##
|
||||||
|
|
||||||
|
#
|
||||||
|
# Implements IData for use by native Python clients to the Data Access
|
||||||
|
# Framework.
|
||||||
|
#
|
||||||
|
#
|
||||||
|
# SOFTWARE HISTORY
|
||||||
|
#
|
||||||
|
# Date Ticket# Engineer Description
|
||||||
|
# ------------ ---------- ----------- --------------------------
|
||||||
|
# Jun 22, 2016 2416 rjpeter Initial creation
|
||||||
|
# Jul 22, 2016 2416 tgurney Finish implementation
|
||||||
|
#
|
||||||
|
|
||||||
|
|
||||||
|
import abc
|
||||||
|
import time
|
||||||
|
import traceback
|
||||||
|
|
||||||
|
import dynamicserialize
|
||||||
|
from awips.dataaccess import DataAccessLayer
|
||||||
|
from awips.dataaccess import INotificationSubscriber
|
||||||
|
from awips.QpidSubscriber import QpidSubscriber
|
||||||
|
from awips.ThriftClient import ThriftRequestException
|
||||||
|
from dynamicserialize.dstypes.com.raytheon.uf.common.time import DataTime
|
||||||
|
|
||||||
|
|
||||||
|
class PyNotification(INotificationSubscriber):
|
||||||
|
"""
|
||||||
|
Receives notifications for new data and retrieves the data that meets
|
||||||
|
specified filtering criteria.
|
||||||
|
"""
|
||||||
|
|
||||||
|
__metaclass__ = abc.ABCMeta
|
||||||
|
|
||||||
|
def __init__(self, request, filter, host='localhost', port=5672, requestHost='localhost'):
|
||||||
|
self.DAL = DataAccessLayer
|
||||||
|
self.DAL.changeEDEXHost(requestHost)
|
||||||
|
self.__request = request
|
||||||
|
self.__notificationFilter = filter
|
||||||
|
self.__topicSubscriber = QpidSubscriber(host, port, decompress=True)
|
||||||
|
self.__topicName = "edex.alerts"
|
||||||
|
self.__callback = None
|
||||||
|
|
||||||
|
def subscribe(self, callback):
|
||||||
|
"""
|
||||||
|
Start listening for notifications.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
callback: Function to call with a list of received data objects.
|
||||||
|
Will be called once for each request made for data.
|
||||||
|
"""
|
||||||
|
assert hasattr(callback, '__call__'), 'callback arg must be callable'
|
||||||
|
self.__callback = callback
|
||||||
|
self.__topicSubscriber.topicSubscribe(self.__topicName, self._messageReceived)
|
||||||
|
# Blocks here
|
||||||
|
|
||||||
|
def close(self):
|
||||||
|
if self.__topicSubscriber.subscribed:
|
||||||
|
self.__topicSubscriber.close()
|
||||||
|
|
||||||
|
def _getDataTime(self, dataURI):
|
||||||
|
dataTimeStr = dataURI.split('/')[2]
|
||||||
|
return DataTime(dataTimeStr)
|
||||||
|
|
||||||
|
def _messageReceived(self, msg):
|
||||||
|
dataUriMsg = dynamicserialize.deserialize(msg)
|
||||||
|
dataUris = dataUriMsg.getDataURIs()
|
||||||
|
dataTimes = [
|
||||||
|
self._getDataTime(dataUri)
|
||||||
|
for dataUri in dataUris
|
||||||
|
if self.__notificationFilter.accept(dataUri)
|
||||||
|
]
|
||||||
|
if dataTimes:
|
||||||
|
secondTry = False
|
||||||
|
while True:
|
||||||
|
try:
|
||||||
|
data = self.getData(self.__request, dataTimes)
|
||||||
|
break
|
||||||
|
except ThriftRequestException:
|
||||||
|
if secondTry:
|
||||||
|
try:
|
||||||
|
self.close()
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
raise
|
||||||
|
else:
|
||||||
|
secondTry = True
|
||||||
|
time.sleep(5)
|
||||||
|
try:
|
||||||
|
self.__callback(data)
|
||||||
|
except Exception as e:
|
||||||
|
# don't want callback to blow up the notifier itself.
|
||||||
|
traceback.print_exc()
|
||||||
|
# TODO: This utterly fails for derived requests
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def getData(self, request, dataTimes):
|
||||||
|
"""
|
||||||
|
Retrieve and return data
|
||||||
|
|
||||||
|
Args:
|
||||||
|
request: IDataRequest to send to the server
|
||||||
|
dataTimes: list of data times
|
||||||
|
Returns:
|
||||||
|
list of IData
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
@property
|
||||||
|
def subscribed(self):
|
||||||
|
"""True if currently subscribed to notifications."""
|
||||||
|
return self.__topicSubscriber.queueStarted
|
|
@ -38,6 +38,7 @@
|
||||||
# 06/01/16 5587 tgurney Add new signatures for
|
# 06/01/16 5587 tgurney Add new signatures for
|
||||||
# getRequiredIdentifiers() and
|
# getRequiredIdentifiers() and
|
||||||
# getOptionalIdentifiers()
|
# getOptionalIdentifiers()
|
||||||
|
# 08/01/16 2416 tgurney Add getNotificationFilter()
|
||||||
# 11/10/16 5900 bsteffen Correct grid shape
|
# 11/10/16 5900 bsteffen Correct grid shape
|
||||||
#
|
#
|
||||||
|
|
||||||
|
@ -56,6 +57,7 @@ from dynamicserialize.dstypes.com.raytheon.uf.common.dataaccess.request import G
|
||||||
from dynamicserialize.dstypes.com.raytheon.uf.common.dataaccess.request import GetOptionalIdentifiersRequest
|
from dynamicserialize.dstypes.com.raytheon.uf.common.dataaccess.request import GetOptionalIdentifiersRequest
|
||||||
from dynamicserialize.dstypes.com.raytheon.uf.common.dataaccess.request import GetIdentifierValuesRequest
|
from dynamicserialize.dstypes.com.raytheon.uf.common.dataaccess.request import GetIdentifierValuesRequest
|
||||||
from dynamicserialize.dstypes.com.raytheon.uf.common.dataaccess.request import GetSupportedDatatypesRequest
|
from dynamicserialize.dstypes.com.raytheon.uf.common.dataaccess.request import GetSupportedDatatypesRequest
|
||||||
|
from dynamicserialize.dstypes.com.raytheon.uf.common.dataaccess.request import GetNotificationFilterRequest
|
||||||
|
|
||||||
from awips import ThriftClient
|
from awips import ThriftClient
|
||||||
from awips.dataaccess import PyGeometryData
|
from awips.dataaccess import PyGeometryData
|
||||||
|
@ -193,3 +195,9 @@ class ThriftClientRouter(object):
|
||||||
def getSupportedDatatypes(self):
|
def getSupportedDatatypes(self):
|
||||||
response = self._client.sendRequest(GetSupportedDatatypesRequest())
|
response = self._client.sendRequest(GetSupportedDatatypesRequest())
|
||||||
return response
|
return response
|
||||||
|
|
||||||
|
def getNotificationFilter(self, request):
|
||||||
|
notifReq = GetNotificationFilterRequest()
|
||||||
|
notifReq.setRequestParameters(request)
|
||||||
|
response = self._client.sendRequest(notifReq)
|
||||||
|
return response
|
|
@ -33,6 +33,8 @@
|
||||||
# Apr 09, 2013 1871 njensen Add doc strings
|
# Apr 09, 2013 1871 njensen Add doc strings
|
||||||
# Jun 03, 2013 2023 dgilling Add getAttributes to IData, add
|
# Jun 03, 2013 2023 dgilling Add getAttributes to IData, add
|
||||||
# getLatLonGrids() to IGridData.
|
# getLatLonGrids() to IGridData.
|
||||||
|
# Aug 01, 2016 2416 tgurney Add INotificationSubscriber
|
||||||
|
# and INotificationFilter
|
||||||
#
|
#
|
||||||
#
|
#
|
||||||
|
|
||||||
|
@ -351,3 +353,37 @@ class IGeometryData(IData):
|
||||||
"""
|
"""
|
||||||
return
|
return
|
||||||
|
|
||||||
|
|
||||||
|
class INotificationSubscriber(object):
|
||||||
|
"""
|
||||||
|
An INotificationSubscriber representing a notification filter returned from
|
||||||
|
the DataNotificationLayer.
|
||||||
|
"""
|
||||||
|
__metaclass__ = abc.ABCMeta
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def subscribe(self, callback):
|
||||||
|
"""
|
||||||
|
Subscribes to the requested data. Method will not return until close is
|
||||||
|
called in a separate thread.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
callback: the method to call with the IGridData/IGeometryData
|
||||||
|
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def close(self):
|
||||||
|
"""Closes the notification subscriber"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
class INotificationFilter(object):
|
||||||
|
"""
|
||||||
|
Represents data required to filter a set of URIs and
|
||||||
|
return a corresponding list of IDataRequest to retrieve data for.
|
||||||
|
"""
|
||||||
|
__metaclass__ = abc.ABCMeta
|
||||||
|
@abc.abstractmethod
|
||||||
|
def accept(dataUri):
|
||||||
|
pass
|
||||||
|
|
|
@ -45,6 +45,8 @@ import unittest
|
||||||
# return the retrieved data
|
# return the retrieved data
|
||||||
# 06/10/16 5548 tgurney Make testDatatypeIsSupported
|
# 06/10/16 5548 tgurney Make testDatatypeIsSupported
|
||||||
# case-insensitive
|
# case-insensitive
|
||||||
|
# 08/10/16 2416 tgurney Don't test identifier values
|
||||||
|
# for dataURI
|
||||||
# 10/05/16 5926 dgilling Better checks in runGeometryDataTest.
|
# 10/05/16 5926 dgilling Better checks in runGeometryDataTest.
|
||||||
# 11/08/16 5985 tgurney Do not check data times on
|
# 11/08/16 5985 tgurney Do not check data times on
|
||||||
# time-agnostic data
|
# time-agnostic data
|
||||||
|
@ -70,7 +72,7 @@ class DafTestCase(unittest.TestCase):
|
||||||
"""Name of the datatype"""
|
"""Name of the datatype"""
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def setUp(cls):
|
def setUpClass(cls):
|
||||||
host = os.environ.get('DAF_TEST_HOST')
|
host = os.environ.get('DAF_TEST_HOST')
|
||||||
if host is None:
|
if host is None:
|
||||||
host = 'localhost'
|
host = 'localhost'
|
||||||
|
@ -107,6 +109,8 @@ class DafTestCase(unittest.TestCase):
|
||||||
|
|
||||||
def runGetIdValuesTest(self, identifiers):
|
def runGetIdValuesTest(self, identifiers):
|
||||||
for id in identifiers:
|
for id in identifiers:
|
||||||
|
if id.lower() == 'datauri':
|
||||||
|
continue
|
||||||
req = DAL.newDataRequest(self.datatype)
|
req = DAL.newDataRequest(self.datatype)
|
||||||
idValues = DAL.getIdentifierValues(req, id)
|
idValues = DAL.getIdentifierValues(req, id)
|
||||||
self.assertTrue(hasattr(idValues, '__iter__'))
|
self.assertTrue(hasattr(idValues, '__iter__'))
|
||||||
|
|
|
@ -37,6 +37,7 @@ import unittest
|
||||||
# 04/18/16 5548 tgurney More cleanup
|
# 04/18/16 5548 tgurney More cleanup
|
||||||
# 06/09/16 5587 bsteffen Add getIdentifierValues tests
|
# 06/09/16 5587 bsteffen Add getIdentifierValues tests
|
||||||
# 06/13/16 5574 tgurney Add advanced query tests
|
# 06/13/16 5574 tgurney Add advanced query tests
|
||||||
|
# 06/30/16 5725 tgurney Add test for NOT IN
|
||||||
#
|
#
|
||||||
#
|
#
|
||||||
|
|
||||||
|
@ -147,6 +148,12 @@ class AirepTestCase(baseDafTestCase.DafTestCase):
|
||||||
for record in geometryData:
|
for record in geometryData:
|
||||||
self.assertIn(record.getString('reportType'), collection)
|
self.assertIn(record.getString('reportType'), collection)
|
||||||
|
|
||||||
|
def testGetDataWithNotInList(self):
|
||||||
|
collection = ['AMDAR']
|
||||||
|
geometryData = self._runConstraintTest('reportType', 'not in', collection)
|
||||||
|
for record in geometryData:
|
||||||
|
self.assertNotIn(record.getString('reportType'), collection)
|
||||||
|
|
||||||
def testGetDataWithInvalidConstraintTypeThrowsException(self):
|
def testGetDataWithInvalidConstraintTypeThrowsException(self):
|
||||||
with self.assertRaises(ValueError):
|
with self.assertRaises(ValueError):
|
||||||
self._runConstraintTest('reportType', 'junk', 'AIREP')
|
self._runConstraintTest('reportType', 'junk', 'AIREP')
|
||||||
|
|
|
@ -45,6 +45,7 @@ import unittest
|
||||||
# 06/01/16 5587 tgurney Update testGetIdentifierValues
|
# 06/01/16 5587 tgurney Update testGetIdentifierValues
|
||||||
# 06/03/16 5574 tgurney Add advanced query tests
|
# 06/03/16 5574 tgurney Add advanced query tests
|
||||||
# 06/13/16 5574 tgurney Typo
|
# 06/13/16 5574 tgurney Typo
|
||||||
|
# 06/30/16 5725 tgurney Add test for NOT IN
|
||||||
# 11/08/16 5985 tgurney Do not check data times
|
# 11/08/16 5985 tgurney Do not check data times
|
||||||
#
|
#
|
||||||
#
|
#
|
||||||
|
@ -179,6 +180,11 @@ class BinLightningTestCase(baseDafTestCase.DafTestCase):
|
||||||
for record in geomData:
|
for record in geomData:
|
||||||
self.assertIn(record.getAttribute('source'), ('NLDN', 'ENTLN'))
|
self.assertIn(record.getAttribute('source'), ('NLDN', 'ENTLN'))
|
||||||
|
|
||||||
|
def testGetDataWithNotInList(self):
|
||||||
|
geomData = self._runConstraintTest('source', 'not in', ['NLDN', 'blah'])
|
||||||
|
for record in geomData:
|
||||||
|
self.assertNotIn(record.getAttribute('source'), ('NLDN', 'blah'))
|
||||||
|
|
||||||
def testGetDataWithInvalidConstraintTypeThrowsException(self):
|
def testGetDataWithInvalidConstraintTypeThrowsException(self):
|
||||||
with self.assertRaises(ValueError):
|
with self.assertRaises(ValueError):
|
||||||
self._runConstraintTest('source', 'junk', 'NLDN')
|
self._runConstraintTest('source', 'junk', 'NLDN')
|
||||||
|
|
|
@ -37,6 +37,7 @@ import unittest
|
||||||
# 04/18/16 5548 tgurney More cleanup
|
# 04/18/16 5548 tgurney More cleanup
|
||||||
# 06/09/16 5587 bsteffen Add getIdentifierValues tests
|
# 06/09/16 5587 bsteffen Add getIdentifierValues tests
|
||||||
# 06/13/16 5574 tgurney Add advanced query tests
|
# 06/13/16 5574 tgurney Add advanced query tests
|
||||||
|
# 06/30/16 5725 tgurney Add test for NOT IN
|
||||||
#
|
#
|
||||||
#
|
#
|
||||||
|
|
||||||
|
@ -187,6 +188,12 @@ class BufrUaTestCase(baseDafTestCase.DafTestCase):
|
||||||
for record in geometryData:
|
for record in geometryData:
|
||||||
self.assertIn(record.getString('rptType'), collection)
|
self.assertIn(record.getString('rptType'), collection)
|
||||||
|
|
||||||
|
def testGetDataWithNotInList(self):
|
||||||
|
collection = ('2022', '2032')
|
||||||
|
geometryData = self._runConstraintTest('reportType', 'not in', collection)
|
||||||
|
for record in geometryData:
|
||||||
|
self.assertNotIn(record.getString('rptType'), collection)
|
||||||
|
|
||||||
def testGetDataWithInvalidConstraintTypeThrowsException(self):
|
def testGetDataWithInvalidConstraintTypeThrowsException(self):
|
||||||
with self.assertRaises(ValueError):
|
with self.assertRaises(ValueError):
|
||||||
self._runConstraintTest('reportType', 'junk', '2022')
|
self._runConstraintTest('reportType', 'junk', '2022')
|
||||||
|
|
|
@ -42,6 +42,7 @@ import unittest
|
||||||
# 06/09/16 5574 mapeters Add advanced query tests, Short parameter test
|
# 06/09/16 5574 mapeters Add advanced query tests, Short parameter test
|
||||||
# 06/13/16 5574 tgurney Fix checks for None
|
# 06/13/16 5574 tgurney Fix checks for None
|
||||||
# 06/21/16 5548 tgurney Skip tests that cause errors
|
# 06/21/16 5548 tgurney Skip tests that cause errors
|
||||||
|
# 06/30/16 5725 tgurney Add test for NOT IN
|
||||||
# 10/06/16 5926 dgilling Add additional time and location tests.
|
# 10/06/16 5926 dgilling Add additional time and location tests.
|
||||||
#
|
#
|
||||||
#
|
#
|
||||||
|
@ -319,6 +320,12 @@ class ClimateTestCase(baseDafTestCase.DafTestCase):
|
||||||
for record in geometryData:
|
for record in geometryData:
|
||||||
self.assertIn(record.getString('station_code'), collection)
|
self.assertIn(record.getString('station_code'), collection)
|
||||||
|
|
||||||
|
def testGetDataWithNotInList(self):
|
||||||
|
collection = ['KORD', 'KABR']
|
||||||
|
geometryData = self._runConstraintTest('station_code', 'not in', collection)
|
||||||
|
for record in geometryData:
|
||||||
|
self.assertNotIn(record.getString('station_code'), collection)
|
||||||
|
|
||||||
def testGetDataWithInvalidConstraintTypeThrowsException(self):
|
def testGetDataWithInvalidConstraintTypeThrowsException(self):
|
||||||
with self.assertRaises(ValueError):
|
with self.assertRaises(ValueError):
|
||||||
self._runConstraintTest('station_code', 'junk', 'KOMA')
|
self._runConstraintTest('station_code', 'junk', 'KOMA')
|
||||||
|
|
|
@ -49,7 +49,7 @@ class CombinedTimeQueryTestCase(unittest.TestCase):
|
||||||
|
|
||||||
def testSuccessfulQuery(self):
|
def testSuccessfulQuery(self):
|
||||||
req = DAL.newDataRequest('grid')
|
req = DAL.newDataRequest('grid')
|
||||||
req.setLocationNames('RUC130')
|
req.setLocationNames('RAP13')
|
||||||
req.setParameters('T','GH')
|
req.setParameters('T','GH')
|
||||||
req.setLevels('300MB', '500MB','700MB')
|
req.setLevels('300MB', '500MB','700MB')
|
||||||
times = CTQ.getAvailableTimes(req);
|
times = CTQ.getAvailableTimes(req);
|
||||||
|
@ -60,7 +60,7 @@ class CombinedTimeQueryTestCase(unittest.TestCase):
|
||||||
Test that when a parameter is only available on one of the levels that no times are returned.
|
Test that when a parameter is only available on one of the levels that no times are returned.
|
||||||
"""
|
"""
|
||||||
req = DAL.newDataRequest('grid')
|
req = DAL.newDataRequest('grid')
|
||||||
req.setLocationNames('RUC130')
|
req.setLocationNames('RAP13')
|
||||||
req.setParameters('T','GH', 'LgSP1hr')
|
req.setParameters('T','GH', 'LgSP1hr')
|
||||||
req.setLevels('300MB', '500MB','700MB','0.0SFC')
|
req.setLevels('300MB', '500MB','700MB','0.0SFC')
|
||||||
times = CTQ.getAvailableTimes(req);
|
times = CTQ.getAvailableTimes(req);
|
||||||
|
|
|
@ -41,6 +41,7 @@ import unittest
|
||||||
# superclass
|
# superclass
|
||||||
# 06/13/16 5574 tgurney Add advanced query tests
|
# 06/13/16 5574 tgurney Add advanced query tests
|
||||||
# 06/21/16 5548 tgurney Skip tests that cause errors
|
# 06/21/16 5548 tgurney Skip tests that cause errors
|
||||||
|
# 06/30/16 5725 tgurney Add test for NOT IN
|
||||||
#
|
#
|
||||||
|
|
||||||
|
|
||||||
|
@ -166,6 +167,12 @@ class CommonObsSpatialTestCase(baseDafTestCase.DafTestCase):
|
||||||
for record in geometryData:
|
for record in geometryData:
|
||||||
self.assertIn(record.getString('state'), collection)
|
self.assertIn(record.getString('state'), collection)
|
||||||
|
|
||||||
|
def testGetDataWithNotInList(self):
|
||||||
|
collection = ('NE', 'TX')
|
||||||
|
geometryData = self._runConstraintTest('state', 'not in', collection)
|
||||||
|
for record in geometryData:
|
||||||
|
self.assertNotIn(record.getString('state'), collection)
|
||||||
|
|
||||||
def testGetDataWithInvalidConstraintTypeThrowsException(self):
|
def testGetDataWithInvalidConstraintTypeThrowsException(self):
|
||||||
with self.assertRaises(ValueError):
|
with self.assertRaises(ValueError):
|
||||||
self._runConstraintTest('state', 'junk', 'NE')
|
self._runConstraintTest('state', 'junk', 'NE')
|
||||||
|
|
134
awips/test/dafTests/testDataTime.py
Normal file
134
awips/test/dafTests/testDataTime.py
Normal file
|
@ -0,0 +1,134 @@
|
||||||
|
##
|
||||||
|
# This software was developed and / or modified by Raytheon Company,
|
||||||
|
# pursuant to Contract DG133W-05-CQ-1067 with the US Government.
|
||||||
|
#
|
||||||
|
# U.S. EXPORT CONTROLLED TECHNICAL DATA
|
||||||
|
# This software product contains export-restricted data whose
|
||||||
|
# export/transfer/disclosure is restricted by U.S. law. Dissemination
|
||||||
|
# to non-U.S. persons whether in the United States or abroad requires
|
||||||
|
# an export license or other authorization.
|
||||||
|
#
|
||||||
|
# Contractor Name: Raytheon Company
|
||||||
|
# Contractor Address: 6825 Pine Street, Suite 340
|
||||||
|
# Mail Stop B8
|
||||||
|
# Omaha, NE 68106
|
||||||
|
# 402.291.0100
|
||||||
|
#
|
||||||
|
# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
||||||
|
# further licensing information.
|
||||||
|
##
|
||||||
|
|
||||||
|
from dynamicserialize.dstypes.com.raytheon.uf.common.time import DataTime
|
||||||
|
|
||||||
|
import unittest
|
||||||
|
|
||||||
|
#
|
||||||
|
# Unit tests for Python implementation of RequestConstraint
|
||||||
|
#
|
||||||
|
# SOFTWARE HISTORY
|
||||||
|
#
|
||||||
|
# Date Ticket# Engineer Description
|
||||||
|
# ------------ ---------- ----------- --------------------------
|
||||||
|
# 08/02/16 2416 tgurney Initial creation
|
||||||
|
#
|
||||||
|
#
|
||||||
|
|
||||||
|
|
||||||
|
class DataTimeTestCase(unittest.TestCase):
|
||||||
|
|
||||||
|
def testFromStrRefTimeOnly(self):
|
||||||
|
s = '2016-08-02 01:23:45'
|
||||||
|
expected = s
|
||||||
|
self.assertEqual(expected, str(DataTime(s)))
|
||||||
|
s = s.replace(' ', '_')
|
||||||
|
self.assertEqual(expected, str(DataTime(s)))
|
||||||
|
|
||||||
|
def testFromStrRefTimeOnlyZeroMillis(self):
|
||||||
|
s = '2016-08-02 01:23:45.0'
|
||||||
|
# result of str() will always drop trailing .0 milliseconds
|
||||||
|
expected = '2016-08-02 01:23:45'
|
||||||
|
self.assertEqual(expected, str(DataTime(s)))
|
||||||
|
s = s.replace(' ', '_')
|
||||||
|
self.assertEqual(expected, str(DataTime(s)))
|
||||||
|
|
||||||
|
def testFromStrRefTimeOnlyWithMillis(self):
|
||||||
|
s = '2016-08-02 01:23:45.1'
|
||||||
|
expected = '2016-08-02 01:23:45.001000'
|
||||||
|
self.assertEqual(expected, str(DataTime(s)))
|
||||||
|
s = s.replace(' ', '_')
|
||||||
|
self.assertEqual(expected, str(DataTime(s)))
|
||||||
|
|
||||||
|
def testFromStrWithFcstTimeHr(self):
|
||||||
|
s = '2016-08-02 01:23:45 (17)'
|
||||||
|
expected = s
|
||||||
|
self.assertEqual(expected, str(DataTime(s)))
|
||||||
|
s = s.replace(' ', '_')
|
||||||
|
self.assertEqual(expected, str(DataTime(s)))
|
||||||
|
|
||||||
|
def testFromStrWithFcstTimeHrZeroMillis(self):
|
||||||
|
s = '2016-08-02 01:23:45.0 (17)'
|
||||||
|
expected = '2016-08-02 01:23:45 (17)'
|
||||||
|
self.assertEqual(expected, str(DataTime(s)))
|
||||||
|
s = s.replace(' ', '_')
|
||||||
|
self.assertEqual(expected, str(DataTime(s)))
|
||||||
|
|
||||||
|
def testFromStrWithFcstTimeHrAndMillis(self):
|
||||||
|
s = '2016-08-02 01:23:45.1 (17)'
|
||||||
|
expected = '2016-08-02 01:23:45.001000 (17)'
|
||||||
|
self.assertEqual(expected, str(DataTime(s)))
|
||||||
|
s = s.replace(' ', '_')
|
||||||
|
self.assertEqual(expected, str(DataTime(s)))
|
||||||
|
|
||||||
|
def testFromStrWithFcstTimeHrMin(self):
|
||||||
|
s = '2016-08-02 01:23:45 (17:34)'
|
||||||
|
expected = s
|
||||||
|
self.assertEqual(expected, str(DataTime(s)))
|
||||||
|
s = s.replace(' ', '_')
|
||||||
|
self.assertEqual(expected, str(DataTime(s)))
|
||||||
|
|
||||||
|
def testFromStrWithFcstTimeHrMinZeroMillis(self):
|
||||||
|
s = '2016-08-02 01:23:45.0 (17:34)'
|
||||||
|
expected = '2016-08-02 01:23:45 (17:34)'
|
||||||
|
self.assertEqual(expected, str(DataTime(s)))
|
||||||
|
s = s.replace(' ', '_')
|
||||||
|
self.assertEqual(expected, str(DataTime(s)))
|
||||||
|
|
||||||
|
def testFromStrWithPeriod(self):
|
||||||
|
s = '2016-08-02 01:23:45[2016-08-02 02:34:45--2016-08-02 03:45:56]'
|
||||||
|
expected = s
|
||||||
|
self.assertEqual(expected, str(DataTime(s)))
|
||||||
|
s = s.replace(' ', '_')
|
||||||
|
self.assertEqual(expected, str(DataTime(s)))
|
||||||
|
|
||||||
|
def testFromStrWithPeriodZeroMillis(self):
|
||||||
|
s = '2016-08-02 01:23:45.0[2016-08-02 02:34:45.0--2016-08-02 03:45:56.0]'
|
||||||
|
expected = '2016-08-02 01:23:45[2016-08-02 02:34:45--2016-08-02 03:45:56]'
|
||||||
|
self.assertEqual(expected, str(DataTime(s)))
|
||||||
|
s = s.replace(' ', '_')
|
||||||
|
self.assertEqual(expected, str(DataTime(s)))
|
||||||
|
|
||||||
|
def testFromStrWithEverything(self):
|
||||||
|
s = '2016-08-02 01:23:45.0_(17:34)[2016-08-02 02:34:45.0--2016-08-02 03:45:56.0]'
|
||||||
|
expected = '2016-08-02 01:23:45 (17:34)[2016-08-02 02:34:45--2016-08-02 03:45:56]'
|
||||||
|
self.assertEqual(expected, str(DataTime(s)))
|
||||||
|
s = s.replace(' ', '_')
|
||||||
|
self.assertEqual(expected, str(DataTime(s)))
|
||||||
|
|
||||||
|
def testDataTimeReconstructItselfFromString(self):
|
||||||
|
times = [
|
||||||
|
'2016-08-02 01:23:45',
|
||||||
|
'2016-08-02 01:23:45.0',
|
||||||
|
'2016-08-02 01:23:45.1',
|
||||||
|
'2016-08-02 01:23:45.123000',
|
||||||
|
'2016-08-02 01:23:45 (17)',
|
||||||
|
'2016-08-02 01:23:45.0 (17)',
|
||||||
|
'2016-08-02 01:23:45.1 (17)',
|
||||||
|
'2016-08-02 01:23:45 (17:34)',
|
||||||
|
'2016-08-02 01:23:45.0 (17:34)',
|
||||||
|
'2016-08-02 01:23:45.1 (17:34)',
|
||||||
|
'2016-08-02 01:23:45.0[2016-08-02_02:34:45.0--2016-08-02_03:45:56.0]',
|
||||||
|
'2016-08-02 01:23:45.0[2016-08-02_02:34:45.123--2016-08-02_03:45:56.456]',
|
||||||
|
'2016-08-02 01:23:45.456_(17:34)[2016-08-02_02:34:45.0--2016-08-02_03:45:56.0]'
|
||||||
|
]
|
||||||
|
for time in times:
|
||||||
|
self.assertEqual(DataTime(time), DataTime(str(DataTime(time))), time)
|
|
@ -19,6 +19,7 @@
|
||||||
##
|
##
|
||||||
|
|
||||||
from __future__ import print_function
|
from __future__ import print_function
|
||||||
|
from dynamicserialize.dstypes.com.raytheon.uf.common.dataquery.requests import RequestConstraint
|
||||||
from awips.dataaccess import DataAccessLayer as DAL
|
from awips.dataaccess import DataAccessLayer as DAL
|
||||||
|
|
||||||
import baseDafTestCase
|
import baseDafTestCase
|
||||||
|
@ -37,6 +38,13 @@ import unittest
|
||||||
# 04/18/16 5587 tgurney Add test for sane handling of
|
# 04/18/16 5587 tgurney Add test for sane handling of
|
||||||
# zero records returned
|
# zero records returned
|
||||||
# 06/20/16 5587 tgurney Add identifier values tests
|
# 06/20/16 5587 tgurney Add identifier values tests
|
||||||
|
# 07/01/16 5728 mapeters Add advanced query tests,
|
||||||
|
# include huc and accumHrs in
|
||||||
|
# id values tests, test that
|
||||||
|
# accumHrs id is never required
|
||||||
|
# 08/03/16 5728 mapeters Fixed minor bugs, replaced
|
||||||
|
# PRTM parameter since it isn't
|
||||||
|
# configured for ec-oma
|
||||||
# 11/08/16 5985 tgurney Do not check data times
|
# 11/08/16 5985 tgurney Do not check data times
|
||||||
#
|
#
|
||||||
#
|
#
|
||||||
|
@ -45,14 +53,14 @@ import unittest
|
||||||
class FfmpTestCase(baseDafTestCase.DafTestCase):
|
class FfmpTestCase(baseDafTestCase.DafTestCase):
|
||||||
"""Test DAF support for ffmp data"""
|
"""Test DAF support for ffmp data"""
|
||||||
|
|
||||||
datatype = "ffmp"
|
datatype = 'ffmp'
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def addIdentifiers(req):
|
def addIdentifiers(req):
|
||||||
req.addIdentifier("wfo", "OAX")
|
req.addIdentifier('wfo', 'OAX')
|
||||||
req.addIdentifier("siteKey", "hpe")
|
req.addIdentifier('siteKey', 'hpe')
|
||||||
req.addIdentifier("dataKey", "hpe")
|
req.addIdentifier('dataKey', 'hpe')
|
||||||
req.addIdentifier("huc", "ALL")
|
req.addIdentifier('huc', 'ALL')
|
||||||
|
|
||||||
def testGetAvailableParameters(self):
|
def testGetAvailableParameters(self):
|
||||||
req = DAL.newDataRequest(self.datatype)
|
req = DAL.newDataRequest(self.datatype)
|
||||||
|
@ -66,18 +74,19 @@ class FfmpTestCase(baseDafTestCase.DafTestCase):
|
||||||
def testGetAvailableTimes(self):
|
def testGetAvailableTimes(self):
|
||||||
req = DAL.newDataRequest(self.datatype)
|
req = DAL.newDataRequest(self.datatype)
|
||||||
self.addIdentifiers(req)
|
self.addIdentifiers(req)
|
||||||
|
req.setParameters('DHRMOSAIC')
|
||||||
self.runTimesTest(req)
|
self.runTimesTest(req)
|
||||||
|
|
||||||
def testGetGeometryData(self):
|
def testGetGeometryData(self):
|
||||||
req = DAL.newDataRequest(self.datatype)
|
req = DAL.newDataRequest(self.datatype)
|
||||||
self.addIdentifiers(req)
|
self.addIdentifiers(req)
|
||||||
req.setParameters("PRTM")
|
req.setParameters('DHRMOSAIC')
|
||||||
self.runGeometryDataTest(req, checkDataTimes=False)
|
self.runGeometryDataTest(req, checkDataTimes=False)
|
||||||
|
|
||||||
def testGetGeometryDataEmptyResult(self):
|
def testGetGeometryDataEmptyResult(self):
|
||||||
req = DAL.newDataRequest(self.datatype)
|
req = DAL.newDataRequest(self.datatype)
|
||||||
self.addIdentifiers(req)
|
self.addIdentifiers(req)
|
||||||
req.setParameters("blah blah blah") # force 0 records returned
|
req.setParameters('blah blah blah') # force 0 records returned
|
||||||
result = self.runGeometryDataTest(req, checkDataTimes=False)
|
result = self.runGeometryDataTest(req, checkDataTimes=False)
|
||||||
self.assertEqual(len(result), 0)
|
self.assertEqual(len(result), 0)
|
||||||
|
|
||||||
|
@ -86,13 +95,130 @@ class FfmpTestCase(baseDafTestCase.DafTestCase):
|
||||||
optionalIds = set(DAL.getOptionalIdentifiers(req))
|
optionalIds = set(DAL.getOptionalIdentifiers(req))
|
||||||
requiredIds = set(DAL.getRequiredIdentifiers(req))
|
requiredIds = set(DAL.getRequiredIdentifiers(req))
|
||||||
ids = requiredIds | optionalIds
|
ids = requiredIds | optionalIds
|
||||||
# These two not yet supported
|
for id in ids:
|
||||||
ids.remove('huc')
|
req = DAL.newDataRequest(self.datatype)
|
||||||
ids.remove('accumHrs')
|
if id == 'accumHrs':
|
||||||
self.runGetIdValuesTest(ids)
|
req.setParameters('ARI6H2YR')
|
||||||
|
req.addIdentifier('wfo', 'OAX')
|
||||||
|
req.addIdentifier('siteKey', 'koax')
|
||||||
|
req.addIdentifier('huc', 'ALL')
|
||||||
|
idValues = DAL.getIdentifierValues(req, id)
|
||||||
|
self.assertTrue(hasattr(idValues, '__iter__'))
|
||||||
|
print(id + " values: " + str(idValues))
|
||||||
|
|
||||||
def testGetInvalidIdentifierValuesThrowsException(self):
|
def testGetInvalidIdentifierValuesThrowsException(self):
|
||||||
self.runInvalidIdValuesTest()
|
self.runInvalidIdValuesTest()
|
||||||
|
|
||||||
def testGetNonexistentIdentifierValuesThrowsException(self):
|
def testGetNonexistentIdentifierValuesThrowsException(self):
|
||||||
self.runNonexistentIdValuesTest()
|
self.runNonexistentIdValuesTest()
|
||||||
|
|
||||||
|
def _runConstraintTest(self, key, operator, value):
|
||||||
|
req = DAL.newDataRequest(self.datatype)
|
||||||
|
constraint = RequestConstraint.new(operator, value)
|
||||||
|
req.addIdentifier(key, constraint)
|
||||||
|
req.addIdentifier('wfo', 'OAX')
|
||||||
|
req.addIdentifier('huc', 'ALL')
|
||||||
|
req.setParameters('QPFSCAN')
|
||||||
|
return self.runGeometryDataTest(req, checkDataTimes=False)
|
||||||
|
|
||||||
|
def testGetDataWithEqualsString(self):
|
||||||
|
geometryData = self._runConstraintTest('siteKey', '=', 'koax')
|
||||||
|
for record in geometryData:
|
||||||
|
self.assertEqual(record.getAttribute('siteKey'), 'koax')
|
||||||
|
|
||||||
|
def testGetDataWithEqualsUnicode(self):
|
||||||
|
geometryData = self._runConstraintTest('siteKey', '=', u'koax')
|
||||||
|
for record in geometryData:
|
||||||
|
self.assertEqual(record.getAttribute('siteKey'), 'koax')
|
||||||
|
|
||||||
|
# No numeric tests since no numeric identifiers are available that support
|
||||||
|
# RequestConstraints.
|
||||||
|
|
||||||
|
def testGetDataWithEqualsNone(self):
|
||||||
|
geometryData = self._runConstraintTest('siteKey', '=', None)
|
||||||
|
for record in geometryData:
|
||||||
|
self.assertIsNone(record.getAttribute('siteKey'))
|
||||||
|
|
||||||
|
def testGetDataWithNotEquals(self):
|
||||||
|
geometryData = self._runConstraintTest('siteKey', '!=', 'koax')
|
||||||
|
for record in geometryData:
|
||||||
|
self.assertNotEqual(record.getAttribute('siteKey'), 'koax')
|
||||||
|
|
||||||
|
def testGetDataWithNotEqualsNone(self):
|
||||||
|
geometryData = self._runConstraintTest('siteKey', '!=', None)
|
||||||
|
for record in geometryData:
|
||||||
|
self.assertIsNotNone(record.getAttribute('siteKey'))
|
||||||
|
|
||||||
|
def testGetDataWithGreaterThan(self):
|
||||||
|
geometryData = self._runConstraintTest('siteKey', '>', 'koax')
|
||||||
|
for record in geometryData:
|
||||||
|
self.assertGreater(record.getAttribute('siteKey'), 'koax')
|
||||||
|
|
||||||
|
def testGetDataWithLessThan(self):
|
||||||
|
geometryData = self._runConstraintTest('siteKey', '<', 'koax')
|
||||||
|
for record in geometryData:
|
||||||
|
self.assertLess(record.getAttribute('siteKey'), 'koax')
|
||||||
|
|
||||||
|
def testGetDataWithGreaterThanEquals(self):
|
||||||
|
geometryData = self._runConstraintTest('siteKey', '>=', 'koax')
|
||||||
|
for record in geometryData:
|
||||||
|
self.assertGreaterEqual(record.getAttribute('siteKey'), 'koax')
|
||||||
|
|
||||||
|
def testGetDataWithLessThanEquals(self):
|
||||||
|
geometryData = self._runConstraintTest('siteKey', '<=', 'koax')
|
||||||
|
for record in geometryData:
|
||||||
|
self.assertLessEqual(record.getAttribute('siteKey'), 'koax')
|
||||||
|
|
||||||
|
def testGetDataWithInList(self):
|
||||||
|
collection = ['koax', 'kuex']
|
||||||
|
geometryData = self._runConstraintTest('siteKey', 'in', collection)
|
||||||
|
for record in geometryData:
|
||||||
|
self.assertIn(record.getAttribute('siteKey'), collection)
|
||||||
|
|
||||||
|
def testGetDataWithNotInList(self):
|
||||||
|
collection = ['koax', 'kuex']
|
||||||
|
geometryData = self._runConstraintTest('siteKey', 'not in', collection)
|
||||||
|
for record in geometryData:
|
||||||
|
self.assertNotIn(record.getAttribute('siteKey'), collection)
|
||||||
|
|
||||||
|
def testGetDataWithInvalidConstraintTypeThrowsException(self):
|
||||||
|
with self.assertRaises(ValueError):
|
||||||
|
self._runConstraintTest('siteKey', 'junk', 'koax')
|
||||||
|
|
||||||
|
def testGetDataWithInvalidConstraintValueThrowsException(self):
|
||||||
|
with self.assertRaises(TypeError):
|
||||||
|
self._runConstraintTest('siteKey', '=', {})
|
||||||
|
|
||||||
|
def testGetDataWithEmptyInConstraintThrowsException(self):
|
||||||
|
with self.assertRaises(ValueError):
|
||||||
|
self._runConstraintTest('siteKey', 'in', [])
|
||||||
|
|
||||||
|
def testGetDataWithSiteKeyAndDataKeyConstraints(self):
|
||||||
|
siteKeys = ['koax', 'hpe']
|
||||||
|
dataKeys = ['kuex', 'kdmx']
|
||||||
|
|
||||||
|
req = DAL.newDataRequest(self.datatype)
|
||||||
|
req.addIdentifier('wfo', 'OAX')
|
||||||
|
req.addIdentifier('huc', 'ALL')
|
||||||
|
|
||||||
|
siteKeysConstraint = RequestConstraint.new('in', siteKeys)
|
||||||
|
req.addIdentifier('siteKey', siteKeysConstraint)
|
||||||
|
dataKeysConstraint = RequestConstraint.new('in', dataKeys)
|
||||||
|
req.addIdentifier('dataKey', dataKeysConstraint)
|
||||||
|
|
||||||
|
req.setParameters('QPFSCAN')
|
||||||
|
geometryData = self.runGeometryDataTest(req, checkDataTimes=False)
|
||||||
|
for record in geometryData:
|
||||||
|
self.assertIn(record.getAttribute('siteKey'), siteKeys)
|
||||||
|
# dataKey attr. is comma-separated list of dataKeys that had data
|
||||||
|
for dataKey in record.getAttribute('dataKey').split(','):
|
||||||
|
self.assertIn(dataKey, dataKeys)
|
||||||
|
|
||||||
|
def testGetGuidanceDataWithoutAccumHrsIdentifierSet(self):
|
||||||
|
# Test that accumHrs identifier is not required for guidance data
|
||||||
|
req = DAL.newDataRequest(self.datatype)
|
||||||
|
req.addIdentifier('wfo', 'OAX')
|
||||||
|
req.addIdentifier('siteKey', 'koax')
|
||||||
|
req.addIdentifier('huc', 'ALL')
|
||||||
|
req.setParameters('FFG0124hr')
|
||||||
|
self.runGeometryDataTest(req, checkDataTimes=False)
|
|
@ -39,6 +39,7 @@ import unittest
|
||||||
# 05/31/16 5587 tgurney Add getIdentifierValues tests
|
# 05/31/16 5587 tgurney Add getIdentifierValues tests
|
||||||
# 06/01/16 5587 tgurney Update testGetIdentifierValues
|
# 06/01/16 5587 tgurney Update testGetIdentifierValues
|
||||||
# 06/17/16 5574 mapeters Add advanced query tests
|
# 06/17/16 5574 mapeters Add advanced query tests
|
||||||
|
# 06/30/16 5725 tgurney Add test for NOT IN
|
||||||
# 11/07/16 5991 bsteffen Improve vector tests
|
# 11/07/16 5991 bsteffen Improve vector tests
|
||||||
#
|
#
|
||||||
#
|
#
|
||||||
|
@ -183,6 +184,12 @@ class GfeTestCase(baseDafTestCase.DafTestCase):
|
||||||
for record in geometryData:
|
for record in geometryData:
|
||||||
self.assertIn(record.getAttribute('modelName'), collection)
|
self.assertIn(record.getAttribute('modelName'), collection)
|
||||||
|
|
||||||
|
def testGetDataWithNotInList(self):
|
||||||
|
collection = ('Fcst', 'SAT')
|
||||||
|
geometryData = self._runConstraintTest('modelName', 'not in', collection)
|
||||||
|
for record in geometryData:
|
||||||
|
self.assertNotIn(record.getAttribute('modelName'), collection)
|
||||||
|
|
||||||
def testGetDataWithInvalidConstraintTypeThrowsException(self):
|
def testGetDataWithInvalidConstraintTypeThrowsException(self):
|
||||||
with self.assertRaises(ValueError):
|
with self.assertRaises(ValueError):
|
||||||
self._runConstraintTest('modelName', 'junk', 'Fcst')
|
self._runConstraintTest('modelName', 'junk', 'Fcst')
|
||||||
|
|
|
@ -19,8 +19,10 @@
|
||||||
##
|
##
|
||||||
|
|
||||||
from __future__ import print_function
|
from __future__ import print_function
|
||||||
|
from dynamicserialize.dstypes.com.raytheon.uf.common.dataquery.requests import RequestConstraint
|
||||||
from shapely.geometry import box, Point
|
from shapely.geometry import box, Point
|
||||||
from awips.dataaccess import DataAccessLayer as DAL
|
from awips.dataaccess import DataAccessLayer as DAL
|
||||||
|
from awips.ThriftClient import ThriftRequestException
|
||||||
|
|
||||||
import baseDafTestCase
|
import baseDafTestCase
|
||||||
import unittest
|
import unittest
|
||||||
|
@ -36,6 +38,9 @@ import unittest
|
||||||
# 04/11/16 5548 tgurney Cleanup
|
# 04/11/16 5548 tgurney Cleanup
|
||||||
# 04/18/16 5548 tgurney More cleanup
|
# 04/18/16 5548 tgurney More cleanup
|
||||||
# 06/09/16 5587 tgurney Typo in id values test
|
# 06/09/16 5587 tgurney Typo in id values test
|
||||||
|
# 07/06/16 5728 mapeters Add advanced query tests
|
||||||
|
# 08/03/16 5728 mapeters Add additional identifiers to testGetDataWith*
|
||||||
|
# tests to shorten run time and prevent EOFError
|
||||||
# 10/13/16 5942 bsteffen Test envelopes
|
# 10/13/16 5942 bsteffen Test envelopes
|
||||||
# 11/08/16 5985 tgurney Skip certain tests when no
|
# 11/08/16 5985 tgurney Skip certain tests when no
|
||||||
# data is available
|
# data is available
|
||||||
|
@ -45,45 +50,45 @@ import unittest
|
||||||
class GridTestCase(baseDafTestCase.DafTestCase):
|
class GridTestCase(baseDafTestCase.DafTestCase):
|
||||||
"""Test DAF support for grid data"""
|
"""Test DAF support for grid data"""
|
||||||
|
|
||||||
datatype = "grid"
|
datatype = 'grid'
|
||||||
|
|
||||||
model = "GFS160"
|
model = 'GFS160'
|
||||||
|
|
||||||
envelope = box(-97.0, 41.0, -96.0, 42.0)
|
envelope = box(-97.0, 41.0, -96.0, 42.0)
|
||||||
|
|
||||||
def testGetAvailableParameters(self):
|
def testGetAvailableParameters(self):
|
||||||
req = DAL.newDataRequest(self.datatype)
|
req = DAL.newDataRequest(self.datatype)
|
||||||
req.addIdentifier("info.datasetId", self.model)
|
req.addIdentifier('info.datasetId', self.model)
|
||||||
self.runParametersTest(req)
|
self.runParametersTest(req)
|
||||||
|
|
||||||
def testGetAvailableLocations(self):
|
def testGetAvailableLocations(self):
|
||||||
req = DAL.newDataRequest(self.datatype)
|
req = DAL.newDataRequest(self.datatype)
|
||||||
req.addIdentifier("info.datasetId", self.model)
|
req.addIdentifier('info.datasetId', self.model)
|
||||||
self.runLocationsTest(req)
|
self.runLocationsTest(req)
|
||||||
|
|
||||||
def testGetAvailableLevels(self):
|
def testGetAvailableLevels(self):
|
||||||
req = DAL.newDataRequest(self.datatype)
|
req = DAL.newDataRequest(self.datatype)
|
||||||
req.addIdentifier("info.datasetId", self.model)
|
req.addIdentifier('info.datasetId', self.model)
|
||||||
self.runLevelsTest(req)
|
self.runLevelsTest(req)
|
||||||
|
|
||||||
def testGetAvailableTimes(self):
|
def testGetAvailableTimes(self):
|
||||||
req = DAL.newDataRequest(self.datatype)
|
req = DAL.newDataRequest(self.datatype)
|
||||||
req.addIdentifier("info.datasetId", self.model)
|
req.addIdentifier('info.datasetId', self.model)
|
||||||
req.setLevels("2FHAG")
|
req.setLevels('2FHAG')
|
||||||
self.runTimesTest(req)
|
self.runTimesTest(req)
|
||||||
|
|
||||||
def testGetGridData(self):
|
def testGetGridData(self):
|
||||||
req = DAL.newDataRequest(self.datatype)
|
req = DAL.newDataRequest(self.datatype)
|
||||||
req.addIdentifier("info.datasetId", self.model)
|
req.addIdentifier('info.datasetId', self.model)
|
||||||
req.setLevels("2FHAG")
|
req.setLevels('2FHAG')
|
||||||
req.setParameters("T")
|
req.setParameters('T')
|
||||||
self.runGridDataTest(req)
|
self.runGridDataTest(req)
|
||||||
|
|
||||||
def testGetIdentifierValues(self):
|
def testGetIdentifierValues(self):
|
||||||
req = DAL.newDataRequest(self.datatype)
|
req = DAL.newDataRequest(self.datatype)
|
||||||
req.addIdentifier("info.datasetId", 'ENSEMBLE')
|
req.addIdentifier('info.datasetId', 'ENSEMBLE')
|
||||||
req.setLevels("2FHAG")
|
req.setLevels('2FHAG')
|
||||||
req.setParameters("T")
|
req.setParameters('T')
|
||||||
idValues = DAL.getIdentifierValues(req, 'info.ensembleId')
|
idValues = DAL.getIdentifierValues(req, 'info.ensembleId')
|
||||||
self.assertTrue(hasattr(idValues, '__iter__'))
|
self.assertTrue(hasattr(idValues, '__iter__'))
|
||||||
if idValues:
|
if idValues:
|
||||||
|
@ -121,3 +126,161 @@ class GridTestCase(baseDafTestCase.DafTestCase):
|
||||||
for i in range(len(lons)):
|
for i in range(len(lons)):
|
||||||
self.assertTrue(testEnv.contains(Point(lons[i], lats[i])))
|
self.assertTrue(testEnv.contains(Point(lons[i], lats[i])))
|
||||||
|
|
||||||
|
|
||||||
|
def _runConstraintTest(self, key, operator, value):
|
||||||
|
req = DAL.newDataRequest(self.datatype)
|
||||||
|
constraint = RequestConstraint.new(operator, value)
|
||||||
|
req.addIdentifier(key, constraint)
|
||||||
|
req.addIdentifier('info.datasetId', self.model)
|
||||||
|
req.addIdentifier('info.level.masterLevel.name', 'FHAG')
|
||||||
|
req.addIdentifier('info.level.leveltwovalue', 3000.0)
|
||||||
|
req.setParameters('T')
|
||||||
|
return self.runGridDataTest(req)
|
||||||
|
|
||||||
|
def testGetDataWithEqualsString(self):
|
||||||
|
gridData = self._runConstraintTest('info.level.levelonevalue', '=', '2000.0')
|
||||||
|
for record in gridData:
|
||||||
|
self.assertEqual(record.getAttribute('info.level.levelonevalue'), 2000.0)
|
||||||
|
|
||||||
|
def testGetDataWithEqualsUnicode(self):
|
||||||
|
gridData = self._runConstraintTest('info.level.levelonevalue', '=', u'2000.0')
|
||||||
|
for record in gridData:
|
||||||
|
self.assertEqual(record.getAttribute('info.level.levelonevalue'), 2000.0)
|
||||||
|
|
||||||
|
def testGetDataWithEqualsInt(self):
|
||||||
|
gridData = self._runConstraintTest('info.level.levelonevalue', '=', 2000)
|
||||||
|
for record in gridData:
|
||||||
|
self.assertEqual(record.getAttribute('info.level.levelonevalue'), 2000)
|
||||||
|
|
||||||
|
def testGetDataWithEqualsLong(self):
|
||||||
|
gridData = self._runConstraintTest('info.level.levelonevalue', '=', 2000L)
|
||||||
|
for record in gridData:
|
||||||
|
self.assertEqual(record.getAttribute('info.level.levelonevalue'), 2000)
|
||||||
|
|
||||||
|
def testGetDataWithEqualsFloat(self):
|
||||||
|
gridData = self._runConstraintTest('info.level.levelonevalue', '=', 2000.0)
|
||||||
|
for record in gridData:
|
||||||
|
self.assertEqual(round(record.getAttribute('info.level.levelonevalue'), 1), 2000.0)
|
||||||
|
|
||||||
|
def testGetDataWithEqualsNone(self):
|
||||||
|
gridData = self._runConstraintTest('info.level.levelonevalue', '=', None)
|
||||||
|
for record in gridData:
|
||||||
|
self.assertIsNone(record.getAttribute('info.level.levelonevalue'))
|
||||||
|
|
||||||
|
def testGetDataWithNotEquals(self):
|
||||||
|
gridData = self._runConstraintTest('info.level.levelonevalue', '!=', 2000.0)
|
||||||
|
for record in gridData:
|
||||||
|
self.assertNotEqual(record.getAttribute('info.level.levelonevalue'), 2000.0)
|
||||||
|
|
||||||
|
def testGetDataWithNotEqualsNone(self):
|
||||||
|
gridData = self._runConstraintTest('info.level.levelonevalue', '!=', None)
|
||||||
|
for record in gridData:
|
||||||
|
self.assertIsNotNone(record.getAttribute('info.level.levelonevalue'))
|
||||||
|
|
||||||
|
def testGetDataWithGreaterThan(self):
|
||||||
|
gridData = self._runConstraintTest('info.level.levelonevalue', '>', 2000.0)
|
||||||
|
for record in gridData:
|
||||||
|
self.assertGreater(record.getAttribute('info.level.levelonevalue'), 2000.0)
|
||||||
|
|
||||||
|
def testGetDataWithLessThan(self):
|
||||||
|
gridData = self._runConstraintTest('info.level.levelonevalue', '<', 2000.0)
|
||||||
|
for record in gridData:
|
||||||
|
self.assertLess(record.getAttribute('info.level.levelonevalue'), 2000.0)
|
||||||
|
|
||||||
|
def testGetDataWithGreaterThanEquals(self):
|
||||||
|
gridData = self._runConstraintTest('info.level.levelonevalue', '>=', 2000.0)
|
||||||
|
for record in gridData:
|
||||||
|
self.assertGreaterEqual(record.getAttribute('info.level.levelonevalue'), 2000.0)
|
||||||
|
|
||||||
|
def testGetDataWithLessThanEquals(self):
|
||||||
|
gridData = self._runConstraintTest('info.level.levelonevalue', '<=', 2000.0)
|
||||||
|
for record in gridData:
|
||||||
|
self.assertLessEqual(record.getAttribute('info.level.levelonevalue'), 2000.0)
|
||||||
|
|
||||||
|
def testGetDataWithInList(self):
|
||||||
|
collection = [2000.0, 1000.0]
|
||||||
|
gridData = self._runConstraintTest('info.level.levelonevalue', 'in', collection)
|
||||||
|
for record in gridData:
|
||||||
|
self.assertIn(record.getAttribute('info.level.levelonevalue'), collection)
|
||||||
|
|
||||||
|
def testGetDataWithNotInList(self):
|
||||||
|
collection = [2000.0, 1000.0]
|
||||||
|
gridData = self._runConstraintTest('info.level.levelonevalue', 'not in', collection)
|
||||||
|
for record in gridData:
|
||||||
|
self.assertNotIn(record.getAttribute('info.level.levelonevalue'), collection)
|
||||||
|
|
||||||
|
def testGetDataWithInvalidConstraintTypeThrowsException(self):
|
||||||
|
with self.assertRaises(ValueError):
|
||||||
|
self._runConstraintTest('info.level.levelonevalue', 'junk', '2000.0')
|
||||||
|
|
||||||
|
def testGetDataWithInvalidConstraintValueThrowsException(self):
|
||||||
|
with self.assertRaises(TypeError):
|
||||||
|
self._runConstraintTest('info.level.levelonevalue', '=', {})
|
||||||
|
|
||||||
|
def testGetDataWithEmptyInConstraintThrowsException(self):
|
||||||
|
with self.assertRaises(ValueError):
|
||||||
|
self._runConstraintTest('info.level.levelonevalue', 'in', [])
|
||||||
|
|
||||||
|
def testGetDataWithLevelOneAndLevelTwoConstraints(self):
|
||||||
|
req = DAL.newDataRequest(self.datatype)
|
||||||
|
levelOneConstraint = RequestConstraint.new('>=', 2000.0)
|
||||||
|
req.addIdentifier('info.level.levelonevalue', levelOneConstraint)
|
||||||
|
levelTwoConstraint = RequestConstraint.new('in', (4000.0, 5000.0))
|
||||||
|
req.addIdentifier('info.level.leveltwovalue', levelTwoConstraint)
|
||||||
|
req.addIdentifier('info.datasetId', self.model)
|
||||||
|
req.addIdentifier('info.level.masterLevel.name', 'FHAG')
|
||||||
|
req.setParameters('T')
|
||||||
|
gridData = self.runGridDataTest(req)
|
||||||
|
for record in gridData:
|
||||||
|
self.assertGreaterEqual(record.getAttribute('info.level.levelonevalue'), 2000.0)
|
||||||
|
self.assertIn(record.getAttribute('info.level.leveltwovalue'), (4000.0, 5000.0))
|
||||||
|
|
||||||
|
def testGetDataWithMasterLevelNameInConstraint(self):
|
||||||
|
req = DAL.newDataRequest(self.datatype)
|
||||||
|
masterLevelConstraint = RequestConstraint.new('in', ('FHAG', 'K'))
|
||||||
|
req.addIdentifier('info.level.masterLevel.name', masterLevelConstraint)
|
||||||
|
req.addIdentifier('info.level.levelonevalue', 2000.0)
|
||||||
|
req.addIdentifier('info.level.leveltwovalue', 3000.0)
|
||||||
|
req.addIdentifier('info.datasetId', 'GFS160')
|
||||||
|
req.setParameters('T')
|
||||||
|
gridData = self.runGridDataTest(req)
|
||||||
|
for record in gridData:
|
||||||
|
self.assertIn(record.getAttribute('info.level.masterLevel.name'), ('FHAG', 'K'))
|
||||||
|
|
||||||
|
def testGetDataWithDatasetIdInConstraint(self):
|
||||||
|
req = DAL.newDataRequest(self.datatype)
|
||||||
|
# gfs160 is alias for GFS160 in this namespace
|
||||||
|
req.addIdentifier('namespace', 'gfeParamInfo')
|
||||||
|
datasetIdConstraint = RequestConstraint.new('in', ('gfs160', 'HRRR'))
|
||||||
|
req.addIdentifier('info.datasetId', datasetIdConstraint)
|
||||||
|
req.addIdentifier('info.level.masterLevel.name', 'FHAG')
|
||||||
|
req.addIdentifier('info.level.levelonevalue', 2000.0)
|
||||||
|
req.addIdentifier('info.level.leveltwovalue', 3000.0)
|
||||||
|
req.setParameters('T')
|
||||||
|
gridData = self.runGridDataTest(req, testSameShape=False)
|
||||||
|
for record in gridData:
|
||||||
|
self.assertIn(record.getAttribute('info.datasetId'), ('gfs160', 'HRRR'))
|
||||||
|
|
||||||
|
def testGetDataWithMasterLevelNameLessThanEqualsConstraint(self):
|
||||||
|
req = DAL.newDataRequest(self.datatype)
|
||||||
|
masterLevelConstraint = RequestConstraint.new('<=', 'K')
|
||||||
|
req.addIdentifier('info.level.masterLevel.name', masterLevelConstraint)
|
||||||
|
req.addIdentifier('info.level.levelonevalue', 2000.0)
|
||||||
|
req.addIdentifier('info.level.leveltwovalue', 3000.0)
|
||||||
|
req.addIdentifier('info.datasetId', 'GFS160')
|
||||||
|
req.setParameters('T')
|
||||||
|
gridData = self.runGridDataTest(req)
|
||||||
|
for record in gridData:
|
||||||
|
self.assertLessEqual(record.getAttribute('info.level.masterLevel.name'), 'K')
|
||||||
|
|
||||||
|
def testGetDataWithComplexConstraintAndNamespaceThrowsException(self):
|
||||||
|
req = DAL.newDataRequest(self.datatype)
|
||||||
|
req.addIdentifier('namespace', 'grib')
|
||||||
|
masterLevelConstraint = RequestConstraint.new('<=', 'K')
|
||||||
|
req.addIdentifier('info.level.masterLevel.name', masterLevelConstraint)
|
||||||
|
req.addIdentifier('info.datasetId', 'GFS160')
|
||||||
|
req.setParameters('T')
|
||||||
|
with self.assertRaises(ThriftRequestException) as cm:
|
||||||
|
self.runGridDataTest(req)
|
||||||
|
self.assertIn('IncompatibleRequestException', str(cm.exception))
|
||||||
|
self.assertIn('info.level.masterLevel.name', str(cm.exception))
|
|
@ -43,6 +43,7 @@ import unittest
|
||||||
# 06/09/16 5574 tgurney Add advanced query tests
|
# 06/09/16 5574 tgurney Add advanced query tests
|
||||||
# 06/13/16 5574 tgurney Fix checks for None
|
# 06/13/16 5574 tgurney Fix checks for None
|
||||||
# 06/21/16 5548 tgurney Skip tests that cause errors
|
# 06/21/16 5548 tgurney Skip tests that cause errors
|
||||||
|
# 06/30/16 5725 tgurney Add test for NOT IN
|
||||||
# 10/06/16 5926 dgilling Add additional location tests.
|
# 10/06/16 5926 dgilling Add additional location tests.
|
||||||
#
|
#
|
||||||
#
|
#
|
||||||
|
@ -231,6 +232,12 @@ class HydroTestCase(baseDafTestCase.DafTestCase):
|
||||||
for record in geometryData:
|
for record in geometryData:
|
||||||
self.assertIn(record.getNumber('value'), collection)
|
self.assertIn(record.getNumber('value'), collection)
|
||||||
|
|
||||||
|
def testGetDataWithNotInList(self):
|
||||||
|
collection = [3, 4]
|
||||||
|
geometryData = self._runConstraintTest('value', 'not in', collection)
|
||||||
|
for record in geometryData:
|
||||||
|
self.assertNotIn(record.getNumber('value'), collection)
|
||||||
|
|
||||||
def testGetDataWithTimeRange(self):
|
def testGetDataWithTimeRange(self):
|
||||||
req = DAL.newDataRequest(self.datatype)
|
req = DAL.newDataRequest(self.datatype)
|
||||||
req.addIdentifier('table', 'height')
|
req.addIdentifier('table', 'height')
|
||||||
|
|
|
@ -35,6 +35,7 @@ import unittest
|
||||||
# 01/19/16 4795 mapeters Initial Creation.
|
# 01/19/16 4795 mapeters Initial Creation.
|
||||||
# 04/11/16 5548 tgurney Cleanup
|
# 04/11/16 5548 tgurney Cleanup
|
||||||
# 04/18/16 5548 tgurney More cleanup
|
# 04/18/16 5548 tgurney More cleanup
|
||||||
|
# 01/20/17 6095 tgurney Add null identifiers test
|
||||||
#
|
#
|
||||||
#
|
#
|
||||||
|
|
||||||
|
@ -75,3 +76,10 @@ class LdadMesonetTestCase(baseDafTestCase.DafTestCase):
|
||||||
req.setParameters("highLevelCloud", "pressure")
|
req.setParameters("highLevelCloud", "pressure")
|
||||||
req.setEnvelope(self.getReqEnvelope())
|
req.setEnvelope(self.getReqEnvelope())
|
||||||
self.runGeometryDataTest(req)
|
self.runGeometryDataTest(req)
|
||||||
|
|
||||||
|
def testGetGeometryDataNullIdentifiers(self):
|
||||||
|
req = DAL.newDataRequest(self.datatype)
|
||||||
|
req.setParameters("highLevelCloud", "pressure")
|
||||||
|
req.setEnvelope(self.getReqEnvelope())
|
||||||
|
req.identifiers = None
|
||||||
|
self.runGeometryDataTest(req)
|
||||||
|
|
|
@ -39,6 +39,7 @@ import unittest
|
||||||
# 04/26/16 5587 tgurney Add identifier values tests
|
# 04/26/16 5587 tgurney Add identifier values tests
|
||||||
# 06/13/16 5574 mapeters Add advanced query tests
|
# 06/13/16 5574 mapeters Add advanced query tests
|
||||||
# 06/21/16 5548 tgurney Skip tests that cause errors
|
# 06/21/16 5548 tgurney Skip tests that cause errors
|
||||||
|
# 06/30/16 5725 tgurney Add test for NOT IN
|
||||||
#
|
#
|
||||||
#
|
#
|
||||||
|
|
||||||
|
@ -214,6 +215,12 @@ class MapsTestCase(baseDafTestCase.DafTestCase):
|
||||||
for record in geometryData:
|
for record in geometryData:
|
||||||
self.assertIn(record.getString('state'), collection)
|
self.assertIn(record.getString('state'), collection)
|
||||||
|
|
||||||
|
def testGetDataWithNotInList(self):
|
||||||
|
collection = ['IA', 'TX']
|
||||||
|
geometryData = self._runConstraintTest('state', 'not in', collection)
|
||||||
|
for record in geometryData:
|
||||||
|
self.assertNotIn(record.getString('state'), collection)
|
||||||
|
|
||||||
def testGetDataWithInvalidConstraintTypeThrowsException(self):
|
def testGetDataWithInvalidConstraintTypeThrowsException(self):
|
||||||
with self.assertRaises(ValueError):
|
with self.assertRaises(ValueError):
|
||||||
self._runConstraintTest('state', 'junk', 'NE')
|
self._runConstraintTest('state', 'junk', 'NE')
|
||||||
|
|
|
@ -37,6 +37,7 @@ import unittest
|
||||||
# 04/18/16 5548 tgurney More cleanup
|
# 04/18/16 5548 tgurney More cleanup
|
||||||
# 06/09/16 5587 bsteffen Add getIdentifierValues tests
|
# 06/09/16 5587 bsteffen Add getIdentifierValues tests
|
||||||
# 06/13/16 5574 tgurney Add advanced query tests
|
# 06/13/16 5574 tgurney Add advanced query tests
|
||||||
|
# 06/30/16 5725 tgurney Add test for NOT IN
|
||||||
# 11/10/16 5985 tgurney Mark expected failures prior
|
# 11/10/16 5985 tgurney Mark expected failures prior
|
||||||
# to 17.3.1
|
# to 17.3.1
|
||||||
#
|
#
|
||||||
|
@ -191,6 +192,13 @@ class ModelSoundingTestCase(baseDafTestCase.DafTestCase):
|
||||||
dataURI = record.getString('dataURI')
|
dataURI = record.getString('dataURI')
|
||||||
self.assertTrue('/ETA/' in dataURI or '/GFS/' in dataURI)
|
self.assertTrue('/ETA/' in dataURI or '/GFS/' in dataURI)
|
||||||
|
|
||||||
|
def testGetDataWithNotInList(self):
|
||||||
|
collection = ['ETA', 'GFS']
|
||||||
|
geometryData = self._runConstraintTest('reportType', 'not in', collection)
|
||||||
|
for record in geometryData:
|
||||||
|
dataURI = record.getString('dataURI')
|
||||||
|
self.assertTrue('/ETA/' not in dataURI and '/GFS/' not in dataURI)
|
||||||
|
|
||||||
def testGetDataWithInvalidConstraintTypeThrowsException(self):
|
def testGetDataWithInvalidConstraintTypeThrowsException(self):
|
||||||
with self.assertRaises(ValueError):
|
with self.assertRaises(ValueError):
|
||||||
self._runConstraintTest('reportType', 'junk', 'ETA')
|
self._runConstraintTest('reportType', 'junk', 'ETA')
|
||||||
|
|
|
@ -37,6 +37,7 @@ import unittest
|
||||||
# 04/18/16 5548 tgurney More cleanup
|
# 04/18/16 5548 tgurney More cleanup
|
||||||
# 06/09/16 5587 bsteffen Add getIdentifierValues tests
|
# 06/09/16 5587 bsteffen Add getIdentifierValues tests
|
||||||
# 06/13/16 5574 tgurney Add advanced query tests
|
# 06/13/16 5574 tgurney Add advanced query tests
|
||||||
|
# 06/30/16 5725 tgurney Add test for NOT IN
|
||||||
#
|
#
|
||||||
#
|
#
|
||||||
|
|
||||||
|
@ -150,6 +151,12 @@ class ObsTestCase(baseDafTestCase.DafTestCase):
|
||||||
for record in geometryData:
|
for record in geometryData:
|
||||||
self.assertIn(record.getString('reportType'), collection)
|
self.assertIn(record.getString('reportType'), collection)
|
||||||
|
|
||||||
|
def testGetDataWithNotInList(self):
|
||||||
|
collection = ['METAR', 'SPECI']
|
||||||
|
geometryData = self._runConstraintTest('reportType', 'not in', collection)
|
||||||
|
for record in geometryData:
|
||||||
|
self.assertNotIn(record.getString('reportType'), collection)
|
||||||
|
|
||||||
def testGetDataWithInvalidConstraintTypeThrowsException(self):
|
def testGetDataWithInvalidConstraintTypeThrowsException(self):
|
||||||
with self.assertRaises(ValueError):
|
with self.assertRaises(ValueError):
|
||||||
self._runConstraintTest('reportType', 'junk', 'METAR')
|
self._runConstraintTest('reportType', 'junk', 'METAR')
|
||||||
|
|
|
@ -44,6 +44,7 @@ import unittest
|
||||||
# 06/13/16 5574 tgurney Fix checks for None
|
# 06/13/16 5574 tgurney Fix checks for None
|
||||||
# 06/14/16 5548 tgurney Undo previous change (broke
|
# 06/14/16 5548 tgurney Undo previous change (broke
|
||||||
# test)
|
# test)
|
||||||
|
# 06/30/16 5725 tgurney Add test for NOT IN
|
||||||
#
|
#
|
||||||
#
|
#
|
||||||
|
|
||||||
|
@ -184,6 +185,11 @@ class RadarTestCase(baseDafTestCase.DafTestCase):
|
||||||
for record in gridData:
|
for record in gridData:
|
||||||
self.assertIn(record.getAttribute('icao'), ('koax', 'tpbi'))
|
self.assertIn(record.getAttribute('icao'), ('koax', 'tpbi'))
|
||||||
|
|
||||||
|
def testGetDataWithNotInList(self):
|
||||||
|
gridData = self._runConstraintTest('icao', 'not in', ['zzzz', 'koax'])
|
||||||
|
for record in gridData:
|
||||||
|
self.assertNotIn(record.getAttribute('icao'), ('zzzz', 'koax'))
|
||||||
|
|
||||||
def testGetDataWithInvalidConstraintTypeThrowsException(self):
|
def testGetDataWithInvalidConstraintTypeThrowsException(self):
|
||||||
with self.assertRaises(ValueError):
|
with self.assertRaises(ValueError):
|
||||||
self._runConstraintTest('icao', 'junk', 'koax')
|
self._runConstraintTest('icao', 'junk', 'koax')
|
||||||
|
|
|
@ -40,6 +40,7 @@ import unittest
|
||||||
# 06/01/16 5587 tgurney Move testIdentifiers() to
|
# 06/01/16 5587 tgurney Move testIdentifiers() to
|
||||||
# superclass
|
# superclass
|
||||||
# 06/13/16 5574 tgurney Add advanced query tests
|
# 06/13/16 5574 tgurney Add advanced query tests
|
||||||
|
# 06/30/16 5725 tgurney Add test for NOT IN
|
||||||
#
|
#
|
||||||
#
|
#
|
||||||
|
|
||||||
|
@ -162,6 +163,12 @@ class RadarSpatialTestCase(baseDafTestCase.DafTestCase):
|
||||||
for record in geometryData:
|
for record in geometryData:
|
||||||
self.assertIn(record.getString('wfo_id'), collection)
|
self.assertIn(record.getString('wfo_id'), collection)
|
||||||
|
|
||||||
|
def testGetDataWithNotInList(self):
|
||||||
|
collection = ['OAX', 'GID']
|
||||||
|
geometryData = self._runConstraintTest('wfo_id', 'not in', collection)
|
||||||
|
for record in geometryData:
|
||||||
|
self.assertNotIn(record.getString('wfo_id'), collection)
|
||||||
|
|
||||||
def testGetDataWithInvalidConstraintTypeThrowsException(self):
|
def testGetDataWithInvalidConstraintTypeThrowsException(self):
|
||||||
with self.assertRaises(ValueError):
|
with self.assertRaises(ValueError):
|
||||||
self._runConstraintTest('wfo_id', 'junk', 'OAX')
|
self._runConstraintTest('wfo_id', 'junk', 'OAX')
|
||||||
|
|
245
awips/test/dafTests/testRequestConstraint.py
Normal file
245
awips/test/dafTests/testRequestConstraint.py
Normal file
|
@ -0,0 +1,245 @@
|
||||||
|
##
|
||||||
|
# This software was developed and / or modified by Raytheon Company,
|
||||||
|
# pursuant to Contract DG133W-05-CQ-1067 with the US Government.
|
||||||
|
#
|
||||||
|
# U.S. EXPORT CONTROLLED TECHNICAL DATA
|
||||||
|
# This software product contains export-restricted data whose
|
||||||
|
# export/transfer/disclosure is restricted by U.S. law. Dissemination
|
||||||
|
# to non-U.S. persons whether in the United States or abroad requires
|
||||||
|
# an export license or other authorization.
|
||||||
|
#
|
||||||
|
# Contractor Name: Raytheon Company
|
||||||
|
# Contractor Address: 6825 Pine Street, Suite 340
|
||||||
|
# Mail Stop B8
|
||||||
|
# Omaha, NE 68106
|
||||||
|
# 402.291.0100
|
||||||
|
#
|
||||||
|
# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
||||||
|
# further licensing information.
|
||||||
|
##
|
||||||
|
|
||||||
|
from dynamicserialize.dstypes.com.raytheon.uf.common.dataquery.requests import RequestConstraint
|
||||||
|
|
||||||
|
import unittest
|
||||||
|
|
||||||
|
#
|
||||||
|
# Unit tests for Python implementation of RequestConstraint
|
||||||
|
#
|
||||||
|
# SOFTWARE HISTORY
|
||||||
|
#
|
||||||
|
# Date Ticket# Engineer Description
|
||||||
|
# ------------ ---------- ----------- --------------------------
|
||||||
|
# 07/22/16 2416 tgurney Initial creation
|
||||||
|
#
|
||||||
|
#
|
||||||
|
|
||||||
|
|
||||||
|
class RequestConstraintTestCase(unittest.TestCase):
|
||||||
|
|
||||||
|
def _newRequestConstraint(self, constraintType, constraintValue):
|
||||||
|
constraint = RequestConstraint()
|
||||||
|
constraint.constraintType = constraintType
|
||||||
|
constraint.constraintValue = constraintValue
|
||||||
|
return constraint
|
||||||
|
|
||||||
|
def testEvaluateEquals(self):
|
||||||
|
new = RequestConstraint.new
|
||||||
|
self.assertTrue(new('=', 3).evaluate(3))
|
||||||
|
self.assertTrue(new('=', 3).evaluate('3'))
|
||||||
|
self.assertTrue(new('=', '3').evaluate(3))
|
||||||
|
self.assertTrue(new('=', 12345).evaluate(12345L))
|
||||||
|
self.assertTrue(new('=', 'a').evaluate('a'))
|
||||||
|
self.assertTrue(new('=', 'a').evaluate(u'a'))
|
||||||
|
self.assertTrue(new('=', 1.0001).evaluate(2.0 - 0.999999))
|
||||||
|
self.assertTrue(new('=', 1.00001).evaluate(1))
|
||||||
|
self.assertFalse(new('=', 'a').evaluate(['a']))
|
||||||
|
self.assertFalse(new('=', 'a').evaluate(['b']))
|
||||||
|
self.assertFalse(new('=', 3).evaluate(4))
|
||||||
|
self.assertFalse(new('=', 4).evaluate(3))
|
||||||
|
self.assertFalse(new('=', 'a').evaluate('z'))
|
||||||
|
|
||||||
|
def testEvaluateNotEquals(self):
|
||||||
|
new = RequestConstraint.new
|
||||||
|
self.assertTrue(new('!=', 'a').evaluate(['a']))
|
||||||
|
self.assertTrue(new('!=', 'a').evaluate(['b']))
|
||||||
|
self.assertTrue(new('!=', 3).evaluate(4))
|
||||||
|
self.assertTrue(new('!=', 4).evaluate(3))
|
||||||
|
self.assertTrue(new('!=', 'a').evaluate('z'))
|
||||||
|
self.assertFalse(new('!=', 3).evaluate('3'))
|
||||||
|
self.assertFalse(new('!=', '3').evaluate(3))
|
||||||
|
self.assertFalse(new('!=', 3).evaluate(3))
|
||||||
|
self.assertFalse(new('!=', 12345).evaluate(12345L))
|
||||||
|
self.assertFalse(new('!=', 'a').evaluate('a'))
|
||||||
|
self.assertFalse(new('!=', 'a').evaluate(u'a'))
|
||||||
|
self.assertFalse(new('!=', 1.0001).evaluate(2.0 - 0.9999))
|
||||||
|
|
||||||
|
def testEvaluateGreaterThan(self):
|
||||||
|
new = RequestConstraint.new
|
||||||
|
self.assertTrue(new('>', 1.0001).evaluate(1.0002))
|
||||||
|
self.assertTrue(new('>', 'a').evaluate('b'))
|
||||||
|
self.assertTrue(new('>', 3).evaluate(4))
|
||||||
|
self.assertFalse(new('>', 20).evaluate(3))
|
||||||
|
self.assertFalse(new('>', 12345).evaluate(12345L))
|
||||||
|
self.assertFalse(new('>', 'a').evaluate('a'))
|
||||||
|
self.assertFalse(new('>', 'z').evaluate('a'))
|
||||||
|
self.assertFalse(new('>', 4).evaluate(3))
|
||||||
|
|
||||||
|
def testEvaluateGreaterThanEquals(self):
|
||||||
|
new = RequestConstraint.new
|
||||||
|
self.assertTrue(new('>=', 3).evaluate(3))
|
||||||
|
self.assertTrue(new('>=', 12345).evaluate(12345L))
|
||||||
|
self.assertTrue(new('>=', 'a').evaluate('a'))
|
||||||
|
self.assertTrue(new('>=', 1.0001).evaluate(1.0002))
|
||||||
|
self.assertTrue(new('>=', 'a').evaluate('b'))
|
||||||
|
self.assertTrue(new('>=', 3).evaluate(20))
|
||||||
|
self.assertFalse(new('>=', 1.0001).evaluate(1.0))
|
||||||
|
self.assertFalse(new('>=', 'z').evaluate('a'))
|
||||||
|
self.assertFalse(new('>=', 40).evaluate(3))
|
||||||
|
|
||||||
|
def testEvaluateLessThan(self):
|
||||||
|
new = RequestConstraint.new
|
||||||
|
self.assertTrue(new('<', 'z').evaluate('a'))
|
||||||
|
self.assertTrue(new('<', 30).evaluate(4))
|
||||||
|
self.assertFalse(new('<', 3).evaluate(3))
|
||||||
|
self.assertFalse(new('<', 12345).evaluate(12345L))
|
||||||
|
self.assertFalse(new('<', 'a').evaluate('a'))
|
||||||
|
self.assertFalse(new('<', 1.0001).evaluate(1.0002))
|
||||||
|
self.assertFalse(new('<', 'a').evaluate('b'))
|
||||||
|
self.assertFalse(new('<', 3).evaluate(40))
|
||||||
|
|
||||||
|
def testEvaluateLessThanEquals(self):
|
||||||
|
new = RequestConstraint.new
|
||||||
|
self.assertTrue(new('<=', 'z').evaluate('a'))
|
||||||
|
self.assertTrue(new('<=', 20).evaluate(3))
|
||||||
|
self.assertTrue(new('<=', 3).evaluate(3))
|
||||||
|
self.assertTrue(new('<=', 12345).evaluate(12345L))
|
||||||
|
self.assertTrue(new('<=', 'a').evaluate('a'))
|
||||||
|
self.assertFalse(new('<=', 1.0001).evaluate(1.0002))
|
||||||
|
self.assertFalse(new('<=', 'a').evaluate('b'))
|
||||||
|
self.assertFalse(new('<=', 4).evaluate(30))
|
||||||
|
|
||||||
|
def testEvaluateIsNull(self):
|
||||||
|
new = RequestConstraint.new
|
||||||
|
self.assertTrue(new('=', None).evaluate(None))
|
||||||
|
self.assertTrue(new('=', None).evaluate('null'))
|
||||||
|
self.assertFalse(new('=', None).evaluate(()))
|
||||||
|
self.assertFalse(new('=', None).evaluate(0))
|
||||||
|
self.assertFalse(new('=', None).evaluate(False))
|
||||||
|
|
||||||
|
def testEvaluateIsNotNull(self):
|
||||||
|
new = RequestConstraint.new
|
||||||
|
self.assertTrue(new('!=', None).evaluate(()))
|
||||||
|
self.assertTrue(new('!=', None).evaluate(0))
|
||||||
|
self.assertTrue(new('!=', None).evaluate(False))
|
||||||
|
self.assertFalse(new('!=', None).evaluate(None))
|
||||||
|
self.assertFalse(new('!=', None).evaluate('null'))
|
||||||
|
|
||||||
|
def testEvaluateIn(self):
|
||||||
|
new = RequestConstraint.new
|
||||||
|
self.assertTrue(new('in', [3]).evaluate(3))
|
||||||
|
self.assertTrue(new('in', ['a', 'b', 3]).evaluate(3))
|
||||||
|
self.assertTrue(new('in', 'a').evaluate('a'))
|
||||||
|
self.assertTrue(new('in', [3, 4, 5]).evaluate('5'))
|
||||||
|
self.assertTrue(new('in', [1.0001, 2, 3]).evaluate(2.0 - 0.9999))
|
||||||
|
self.assertFalse(new('in', ['a', 'b', 'c']).evaluate('d'))
|
||||||
|
self.assertFalse(new('in', 'a').evaluate('b'))
|
||||||
|
|
||||||
|
def testEvaluateNotIn(self):
|
||||||
|
new = RequestConstraint.new
|
||||||
|
self.assertTrue(new('not in', ['a', 'b', 'c']).evaluate('d'))
|
||||||
|
self.assertTrue(new('not in', [3, 4, 5]).evaluate(6))
|
||||||
|
self.assertTrue(new('not in', 'a').evaluate('b'))
|
||||||
|
self.assertFalse(new('not in', [3]).evaluate(3))
|
||||||
|
self.assertFalse(new('not in', ['a', 'b', 3]).evaluate(3))
|
||||||
|
self.assertFalse(new('not in', 'a').evaluate('a'))
|
||||||
|
self.assertFalse(new('not in', [1.0001, 2, 3]).evaluate(2.0 - 0.9999))
|
||||||
|
|
||||||
|
def testEvaluateLike(self):
|
||||||
|
# cannot make "like" with RequestConstraint.new()
|
||||||
|
new = self._newRequestConstraint
|
||||||
|
self.assertTrue(new('LIKE', 'a').evaluate('a'))
|
||||||
|
self.assertTrue(new('LIKE', 'a%').evaluate('a'))
|
||||||
|
self.assertTrue(new('LIKE', 'a%').evaluate('abcd'))
|
||||||
|
self.assertTrue(new('LIKE', '%a').evaluate('a'))
|
||||||
|
self.assertTrue(new('LIKE', '%a').evaluate('bcda'))
|
||||||
|
self.assertTrue(new('LIKE', '%').evaluate(''))
|
||||||
|
self.assertTrue(new('LIKE', '%').evaluate('anything'))
|
||||||
|
self.assertTrue(new('LIKE', 'a%d').evaluate('ad'))
|
||||||
|
self.assertTrue(new('LIKE', 'a%d').evaluate('abcd'))
|
||||||
|
self.assertTrue(new('LIKE', 'aa.()!{[]^%$').evaluate('aa.()!{[]^zzz$'))
|
||||||
|
self.assertTrue(new('LIKE', 'a__d%').evaluate('abcdefg'))
|
||||||
|
self.assertFalse(new('LIKE', 'a%').evaluate('b'))
|
||||||
|
self.assertFalse(new('LIKE', 'a%').evaluate('ba'))
|
||||||
|
self.assertFalse(new('LIKE', '%a').evaluate('b'))
|
||||||
|
self.assertFalse(new('LIKE', '%a').evaluate('ab'))
|
||||||
|
self.assertFalse(new('LIKE', 'a%').evaluate('A'))
|
||||||
|
self.assertFalse(new('LIKE', 'A%').evaluate('a'))
|
||||||
|
self.assertFalse(new('LIKE', 'a%d').evaluate('da'))
|
||||||
|
self.assertFalse(new('LIKE', 'a__d%').evaluate('abccdefg'))
|
||||||
|
self.assertFalse(new('LIKE', '....').evaluate('aaaa'))
|
||||||
|
self.assertFalse(new('LIKE', '.*').evaluate('anything'))
|
||||||
|
|
||||||
|
def testEvaluateILike(self):
|
||||||
|
# cannot make "ilike" with RequestConstraint.new()
|
||||||
|
new = self._newRequestConstraint
|
||||||
|
self.assertTrue(new('ILIKE', 'a').evaluate('a'))
|
||||||
|
self.assertTrue(new('ILIKE', 'a%').evaluate('a'))
|
||||||
|
self.assertTrue(new('ILIKE', 'a%').evaluate('abcd'))
|
||||||
|
self.assertTrue(new('ILIKE', '%a').evaluate('a'))
|
||||||
|
self.assertTrue(new('ILIKE', '%a').evaluate('bcda'))
|
||||||
|
self.assertTrue(new('ILIKE', '%').evaluate(''))
|
||||||
|
self.assertTrue(new('ILIKE', '%').evaluate('anything'))
|
||||||
|
self.assertTrue(new('ILIKE', 'a%d').evaluate('ad'))
|
||||||
|
self.assertTrue(new('ILIKE', 'a%d').evaluate('abcd'))
|
||||||
|
self.assertTrue(new('ILIKE', 'a').evaluate('A'))
|
||||||
|
self.assertTrue(new('ILIKE', 'a%').evaluate('A'))
|
||||||
|
self.assertTrue(new('ILIKE', 'a%').evaluate('ABCD'))
|
||||||
|
self.assertTrue(new('ILIKE', '%a').evaluate('A'))
|
||||||
|
self.assertTrue(new('ILIKE', '%a').evaluate('BCDA'))
|
||||||
|
self.assertTrue(new('ILIKE', '%').evaluate(''))
|
||||||
|
self.assertTrue(new('ILIKE', '%').evaluate('anything'))
|
||||||
|
self.assertTrue(new('ILIKE', 'a%d').evaluate('AD'))
|
||||||
|
self.assertTrue(new('ILIKE', 'a%d').evaluate('ABCD'))
|
||||||
|
self.assertTrue(new('ILIKE', 'A').evaluate('a'))
|
||||||
|
self.assertTrue(new('ILIKE', 'A%').evaluate('a'))
|
||||||
|
self.assertTrue(new('ILIKE', 'A%').evaluate('abcd'))
|
||||||
|
self.assertTrue(new('ILIKE', '%A').evaluate('a'))
|
||||||
|
self.assertTrue(new('ILIKE', '%A').evaluate('bcda'))
|
||||||
|
self.assertTrue(new('ILIKE', '%').evaluate(''))
|
||||||
|
self.assertTrue(new('ILIKE', '%').evaluate('anything'))
|
||||||
|
self.assertTrue(new('ILIKE', 'A%D').evaluate('ad'))
|
||||||
|
self.assertTrue(new('ILIKE', 'A%D').evaluate('abcd'))
|
||||||
|
self.assertTrue(new('ILIKE', 'aa.()!{[]^%$').evaluate('AA.()!{[]^zzz$'))
|
||||||
|
self.assertTrue(new('ILIKE', 'a__d%').evaluate('abcdefg'))
|
||||||
|
self.assertTrue(new('ILIKE', 'a__d%').evaluate('ABCDEFG'))
|
||||||
|
self.assertFalse(new('ILIKE', 'a%').evaluate('b'))
|
||||||
|
self.assertFalse(new('ILIKE', 'a%').evaluate('ba'))
|
||||||
|
self.assertFalse(new('ILIKE', '%a').evaluate('b'))
|
||||||
|
self.assertFalse(new('ILIKE', '%a').evaluate('ab'))
|
||||||
|
self.assertFalse(new('ILIKE', 'a%d').evaluate('da'))
|
||||||
|
self.assertFalse(new('ILIKE', 'a__d%').evaluate('abccdefg'))
|
||||||
|
self.assertFalse(new('ILIKE', '....').evaluate('aaaa'))
|
||||||
|
self.assertFalse(new('ILIKE', '.*').evaluate('anything'))
|
||||||
|
|
||||||
|
def testEvaluateBetween(self):
|
||||||
|
# cannot make "between" with RequestConstraint.new()
|
||||||
|
new = self._newRequestConstraint
|
||||||
|
self.assertTrue(new('BETWEEN', '1--1').evaluate(1))
|
||||||
|
self.assertTrue(new('BETWEEN', '1--10').evaluate(1))
|
||||||
|
self.assertTrue(new('BETWEEN', '1--10').evaluate(5))
|
||||||
|
self.assertTrue(new('BETWEEN', '1--10').evaluate(10))
|
||||||
|
self.assertTrue(new('BETWEEN', '1.0--1.1').evaluate(1.0))
|
||||||
|
self.assertTrue(new('BETWEEN', '1.0--1.1').evaluate(1.05))
|
||||||
|
self.assertTrue(new('BETWEEN', '1.0--1.1').evaluate(1.1))
|
||||||
|
self.assertTrue(new('BETWEEN', 'a--x').evaluate('a'))
|
||||||
|
self.assertTrue(new('BETWEEN', 'a--x').evaluate('j'))
|
||||||
|
self.assertTrue(new('BETWEEN', 'a--x').evaluate('x'))
|
||||||
|
self.assertFalse(new('BETWEEN', '1--1').evaluate(2))
|
||||||
|
self.assertFalse(new('BETWEEN', '1--2').evaluate(10))
|
||||||
|
self.assertFalse(new('BETWEEN', '1--10').evaluate(0))
|
||||||
|
self.assertFalse(new('BETWEEN', '1--10').evaluate(11))
|
||||||
|
self.assertFalse(new('BETWEEN', '1.0--1.1').evaluate(0.99))
|
||||||
|
self.assertFalse(new('BETWEEN', '1.0--1.1').evaluate(1.11))
|
||||||
|
self.assertFalse(new('BETWEEN', 'a--x').evaluate(' '))
|
||||||
|
self.assertFalse(new('BETWEEN', 'a--x').evaluate('z'))
|
||||||
|
|
|
@ -41,6 +41,7 @@ import unittest
|
||||||
# 06/01/16 5587 tgurney Update testGetIdentifierValues
|
# 06/01/16 5587 tgurney Update testGetIdentifierValues
|
||||||
# 06/07/16 5574 tgurney Add advanced query tests
|
# 06/07/16 5574 tgurney Add advanced query tests
|
||||||
# 06/13/16 5574 tgurney Typo
|
# 06/13/16 5574 tgurney Typo
|
||||||
|
# 06/30/16 5725 tgurney Add test for NOT IN
|
||||||
#
|
#
|
||||||
#
|
#
|
||||||
|
|
||||||
|
@ -168,6 +169,12 @@ class SatelliteTestCase(baseDafTestCase.DafTestCase):
|
||||||
for record in gridData:
|
for record in gridData:
|
||||||
self.assertIn(record.getAttribute('creatingEntity'), collection)
|
self.assertIn(record.getAttribute('creatingEntity'), collection)
|
||||||
|
|
||||||
|
def testGetDataWithNotInList(self):
|
||||||
|
collection = ('Composite', 'Miscellaneous')
|
||||||
|
gridData = self._runConstraintTest('creatingEntity', 'not in', collection)
|
||||||
|
for record in gridData:
|
||||||
|
self.assertNotIn(record.getAttribute('creatingEntity'), collection)
|
||||||
|
|
||||||
def testGetDataWithInvalidConstraintTypeThrowsException(self):
|
def testGetDataWithInvalidConstraintTypeThrowsException(self):
|
||||||
with self.assertRaises(ValueError):
|
with self.assertRaises(ValueError):
|
||||||
self._runConstraintTest('creatingEntity', 'junk', 'Composite')
|
self._runConstraintTest('creatingEntity', 'junk', 'Composite')
|
||||||
|
|
|
@ -37,6 +37,8 @@ import unittest
|
||||||
# 04/18/16 5548 tgurney More cleanup
|
# 04/18/16 5548 tgurney More cleanup
|
||||||
# 06/09/16 5587 bsteffen Add getIdentifierValues tests
|
# 06/09/16 5587 bsteffen Add getIdentifierValues tests
|
||||||
# 06/13/16 5574 tgurney Add advanced query tests
|
# 06/13/16 5574 tgurney Add advanced query tests
|
||||||
|
# 06/30/16 5725 tgurney Add test for NOT IN
|
||||||
|
# 01/20/17 6095 tgurney Add null identifiers test
|
||||||
#
|
#
|
||||||
#
|
#
|
||||||
|
|
||||||
|
@ -65,6 +67,13 @@ class SfcObsTestCase(baseDafTestCase.DafTestCase):
|
||||||
req.setParameters("temperature", "seaLevelPress", "dewpoint")
|
req.setParameters("temperature", "seaLevelPress", "dewpoint")
|
||||||
self.runGeometryDataTest(req)
|
self.runGeometryDataTest(req)
|
||||||
|
|
||||||
|
def testGetGeometryDataNullIdentifiers(self):
|
||||||
|
req = DAL.newDataRequest(self.datatype)
|
||||||
|
req.setLocationNames("14547")
|
||||||
|
req.setParameters("temperature", "seaLevelPress", "dewpoint")
|
||||||
|
req.identifiers = None
|
||||||
|
self.runGeometryDataTest(req)
|
||||||
|
|
||||||
def testGetIdentifierValues(self):
|
def testGetIdentifierValues(self):
|
||||||
req = DAL.newDataRequest(self.datatype)
|
req = DAL.newDataRequest(self.datatype)
|
||||||
optionalIds = set(DAL.getOptionalIdentifiers(req))
|
optionalIds = set(DAL.getOptionalIdentifiers(req))
|
||||||
|
@ -159,6 +168,12 @@ class SfcObsTestCase(baseDafTestCase.DafTestCase):
|
||||||
for record in geometryData:
|
for record in geometryData:
|
||||||
self.assertIn(record.getString('reportType'), collection)
|
self.assertIn(record.getString('reportType'), collection)
|
||||||
|
|
||||||
|
def testGetDataWithNotInList(self):
|
||||||
|
collection = ['1004', '1005']
|
||||||
|
geometryData = self._runConstraintTest('reportType', 'not in', collection)
|
||||||
|
for record in geometryData:
|
||||||
|
self.assertNotIn(record.getString('reportType'), collection)
|
||||||
|
|
||||||
def testGetDataWithInvalidConstraintTypeThrowsException(self):
|
def testGetDataWithInvalidConstraintTypeThrowsException(self):
|
||||||
with self.assertRaises(ValueError):
|
with self.assertRaises(ValueError):
|
||||||
self._runConstraintTest('reportType', 'junk', '1004')
|
self._runConstraintTest('reportType', 'junk', '1004')
|
||||||
|
|
|
@ -41,6 +41,7 @@ import unittest
|
||||||
# of data type
|
# of data type
|
||||||
# 06/13/16 5574 tgurney Fix checks for None
|
# 06/13/16 5574 tgurney Fix checks for None
|
||||||
# 06/21/16 5548 tgurney Skip tests that cause errors
|
# 06/21/16 5548 tgurney Skip tests that cause errors
|
||||||
|
# 06/30/16 5725 tgurney Add test for NOT IN
|
||||||
#
|
#
|
||||||
#
|
#
|
||||||
|
|
||||||
|
@ -210,6 +211,12 @@ class WarningTestCase(baseDafTestCase.DafTestCase):
|
||||||
for record in geometryData:
|
for record in geometryData:
|
||||||
self.assertIn(record.getString('sig'), collection)
|
self.assertIn(record.getString('sig'), collection)
|
||||||
|
|
||||||
|
def testGetDataWithNotInList(self):
|
||||||
|
collection = ['Y', 'W']
|
||||||
|
geometryData = self._runConstraintTest('sig', 'not in', collection)
|
||||||
|
for record in geometryData:
|
||||||
|
self.assertNotIn(record.getString('sig'), collection)
|
||||||
|
|
||||||
def testGetDataWithInvalidConstraintTypeThrowsException(self):
|
def testGetDataWithInvalidConstraintTypeThrowsException(self):
|
||||||
with self.assertRaises(ValueError):
|
with self.assertRaises(ValueError):
|
||||||
self._runConstraintTest('sig', 'junk', 'Y')
|
self._runConstraintTest('sig', 'junk', 'Y')
|
||||||
|
|
|
@ -0,0 +1,60 @@
|
||||||
|
##
|
||||||
|
# This software was developed and / or modified by Raytheon Company,
|
||||||
|
# pursuant to Contract DG133W-05-CQ-1067 with the US Government.
|
||||||
|
#
|
||||||
|
# U.S. EXPORT CONTROLLED TECHNICAL DATA
|
||||||
|
# This software product contains export-restricted data whose
|
||||||
|
# export/transfer/disclosure is restricted by U.S. law. Dissemination
|
||||||
|
# to non-U.S. persons whether in the United States or abroad requires
|
||||||
|
# an export license or other authorization.
|
||||||
|
#
|
||||||
|
# Contractor Name: Raytheon Company
|
||||||
|
# Contractor Address: 6825 Pine Street, Suite 340
|
||||||
|
# Mail Stop B8
|
||||||
|
# Omaha, NE 68106
|
||||||
|
# 402.291.0100
|
||||||
|
#
|
||||||
|
# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
||||||
|
# further licensing information.
|
||||||
|
##
|
||||||
|
|
||||||
|
# File auto-generated against equivalent DynamicSerialize Java class
|
||||||
|
# and then modified post-generation to sub-class IDataRequest.
|
||||||
|
#
|
||||||
|
# SOFTWARE HISTORY
|
||||||
|
#
|
||||||
|
# Date Ticket# Engineer Description
|
||||||
|
# ------------ ---------- ----------- --------------------------
|
||||||
|
# 06/03/16 2416 rjpeter Initial Creation.
|
||||||
|
# 08/01/16 2416 tgurney Implement accept()
|
||||||
|
#
|
||||||
|
#
|
||||||
|
|
||||||
|
|
||||||
|
from awips.dataaccess import INotificationFilter
|
||||||
|
import sys
|
||||||
|
|
||||||
|
if sys.version_info.major == 2:
|
||||||
|
from itertools import izip
|
||||||
|
# shadowing built-in zip
|
||||||
|
zip = izip
|
||||||
|
|
||||||
|
class DefaultNotificationFilter(INotificationFilter):
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self.constraints = None
|
||||||
|
|
||||||
|
def getConstraints(self):
|
||||||
|
return self.constraints
|
||||||
|
|
||||||
|
def setConstraints(self, constraints):
|
||||||
|
self.constraints = constraints
|
||||||
|
|
||||||
|
def accept(self, dataUri):
|
||||||
|
tokens = dataUri.split('/')[1:]
|
||||||
|
if len(self.constraints) != len(tokens):
|
||||||
|
return False
|
||||||
|
for constraint, token in zip(self.constraints, tokens):
|
||||||
|
if not constraint.evaluate(token):
|
||||||
|
return False
|
||||||
|
return True
|
|
@ -21,8 +21,9 @@
|
||||||
# File auto-generated by PythonFileGenerator
|
# File auto-generated by PythonFileGenerator
|
||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
'DefaultDataRequest'
|
'DefaultDataRequest',
|
||||||
|
'DefaultNotificationFilter'
|
||||||
]
|
]
|
||||||
|
|
||||||
from DefaultDataRequest import DefaultDataRequest
|
from DefaultDataRequest import DefaultDataRequest
|
||||||
|
from DefaultNotificationFilter import DefaultNotificationFilter
|
|
@ -0,0 +1,38 @@
|
||||||
|
##
|
||||||
|
# This software was developed and / or modified by Raytheon Company,
|
||||||
|
# pursuant to Contract DG133W-05-CQ-1067 with the US Government.
|
||||||
|
#
|
||||||
|
# U.S. EXPORT CONTROLLED TECHNICAL DATA
|
||||||
|
# This software product contains export-restricted data whose
|
||||||
|
# export/transfer/disclosure is restricted by U.S. law. Dissemination
|
||||||
|
# to non-U.S. persons whether in the United States or abroad requires
|
||||||
|
# an export license or other authorization.
|
||||||
|
#
|
||||||
|
# Contractor Name: Raytheon Company
|
||||||
|
# Contractor Address: 6825 Pine Street, Suite 340
|
||||||
|
# Mail Stop B8
|
||||||
|
# Omaha, NE 68106
|
||||||
|
# 402.291.0100
|
||||||
|
#
|
||||||
|
# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
||||||
|
# further licensing information.
|
||||||
|
##
|
||||||
|
|
||||||
|
# File auto-generated against equivalent DynamicSerialize Java class
|
||||||
|
# and then modified post-generation to make it sub class
|
||||||
|
# AbstractDataAccessRequest.
|
||||||
|
#
|
||||||
|
# SOFTWARE HISTORY
|
||||||
|
#
|
||||||
|
# Date Ticket# Engineer Description
|
||||||
|
# ------------ ---------- ----------- --------------------------
|
||||||
|
# 05/26/16 2416 rjpeter Initial Creation.
|
||||||
|
#
|
||||||
|
#
|
||||||
|
|
||||||
|
from dynamicserialize.dstypes.com.raytheon.uf.common.dataaccess.request import AbstractDataAccessRequest
|
||||||
|
|
||||||
|
class GetNotificationFilterRequest(AbstractDataAccessRequest):
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
super(GetNotificationFilterRequest, self).__init__()
|
|
@ -29,6 +29,7 @@ __all__ = [
|
||||||
'GetAvailableTimesRequest',
|
'GetAvailableTimesRequest',
|
||||||
'GetGeometryDataRequest',
|
'GetGeometryDataRequest',
|
||||||
'GetGridDataRequest',
|
'GetGridDataRequest',
|
||||||
|
'GetNotificationFilterRequest',
|
||||||
'GetRequiredIdentifiersRequest',
|
'GetRequiredIdentifiersRequest',
|
||||||
'GetSupportedDatatypesRequest',
|
'GetSupportedDatatypesRequest',
|
||||||
'GetOptionalIdentifiersRequest',
|
'GetOptionalIdentifiersRequest',
|
||||||
|
@ -43,6 +44,7 @@ from GetAvailableParametersRequest import GetAvailableParametersRequest
|
||||||
from GetAvailableTimesRequest import GetAvailableTimesRequest
|
from GetAvailableTimesRequest import GetAvailableTimesRequest
|
||||||
from GetGeometryDataRequest import GetGeometryDataRequest
|
from GetGeometryDataRequest import GetGeometryDataRequest
|
||||||
from GetGridDataRequest import GetGridDataRequest
|
from GetGridDataRequest import GetGridDataRequest
|
||||||
|
from GetNotificationFilterRequest import GetNotificationFilterRequest
|
||||||
from GetRequiredIdentifiersRequest import GetRequiredIdentifiersRequest
|
from GetRequiredIdentifiersRequest import GetRequiredIdentifiersRequest
|
||||||
from GetSupportedDatatypesRequest import GetSupportedDatatypesRequest
|
from GetSupportedDatatypesRequest import GetSupportedDatatypesRequest
|
||||||
from GetOptionalIdentifiersRequest import GetOptionalIdentifiersRequest
|
from GetOptionalIdentifiersRequest import GetOptionalIdentifiersRequest
|
||||||
|
|
|
@ -0,0 +1,39 @@
|
||||||
|
##
|
||||||
|
# This software was developed and / or modified by Raytheon Company,
|
||||||
|
# pursuant to Contract DG133W-05-CQ-1067 with the US Government.
|
||||||
|
#
|
||||||
|
# U.S. EXPORT CONTROLLED TECHNICAL DATA
|
||||||
|
# This software product contains export-restricted data whose
|
||||||
|
# export/transfer/disclosure is restricted by U.S. law. Dissemination
|
||||||
|
# to non-U.S. persons whether in the United States or abroad requires
|
||||||
|
# an export license or other authorization.
|
||||||
|
#
|
||||||
|
# Contractor Name: Raytheon Company
|
||||||
|
# Contractor Address: 6825 Pine Street, Suite 340
|
||||||
|
# Mail Stop B8
|
||||||
|
# Omaha, NE 68106
|
||||||
|
# 402.291.0100
|
||||||
|
#
|
||||||
|
# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
||||||
|
# further licensing information.
|
||||||
|
##
|
||||||
|
|
||||||
|
# File auto-generated against equivalent DynamicSerialize Java class
|
||||||
|
|
||||||
|
class GetNotificationFilterResponse(object):
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self.notificationFilter = None
|
||||||
|
self.jmsConnectionInfo = None
|
||||||
|
|
||||||
|
def getNotificationFilter(self):
|
||||||
|
return self.notificationFilter
|
||||||
|
|
||||||
|
def setNotificationFilter(self, notificationFilter):
|
||||||
|
self.notificationFilter = notificationFilter
|
||||||
|
|
||||||
|
def getJmsConnectionInfo(self):
|
||||||
|
return self.jmsConnectionInfo
|
||||||
|
|
||||||
|
def setJmsConnectionInfo(self, jmsConnectionInfo):
|
||||||
|
self.jmsConnectionInfo = jmsConnectionInfo
|
|
@ -25,7 +25,8 @@ __all__ = [
|
||||||
'GeometryResponseData',
|
'GeometryResponseData',
|
||||||
'GetGeometryDataResponse',
|
'GetGeometryDataResponse',
|
||||||
'GetGridDataResponse',
|
'GetGridDataResponse',
|
||||||
'GridResponseData'
|
'GridResponseData',
|
||||||
|
'GetNotificationFilterResponse'
|
||||||
]
|
]
|
||||||
|
|
||||||
from AbstractResponseData import AbstractResponseData
|
from AbstractResponseData import AbstractResponseData
|
||||||
|
@ -33,4 +34,4 @@ from GeometryResponseData import GeometryResponseData
|
||||||
from GetGeometryDataResponse import GetGeometryDataResponse
|
from GetGeometryDataResponse import GetGeometryDataResponse
|
||||||
from GetGridDataResponse import GetGridDataResponse
|
from GetGridDataResponse import GetGridDataResponse
|
||||||
from GridResponseData import GridResponseData
|
from GridResponseData import GridResponseData
|
||||||
|
from GetNotificationFilterResponse import GetNotificationFilterResponse
|
|
@ -24,11 +24,21 @@
|
||||||
# Date Ticket# Engineer Description
|
# Date Ticket# Engineer Description
|
||||||
# ------------ ---------- ----------- --------------------------
|
# ------------ ---------- ----------- --------------------------
|
||||||
# Jun 01, 2016 5574 tgurney Initial creation
|
# Jun 01, 2016 5574 tgurney Initial creation
|
||||||
|
# Jun 27, 2016 5725 tgurney Add NOT IN
|
||||||
|
# Jul 22, 2016 2416 tgurney Add evaluate()
|
||||||
#
|
#
|
||||||
#
|
#
|
||||||
|
|
||||||
|
import re
|
||||||
|
from ...time import DataTime
|
||||||
|
|
||||||
|
|
||||||
class RequestConstraint(object):
|
class RequestConstraint(object):
|
||||||
|
|
||||||
|
TOLERANCE = 0.0001
|
||||||
|
|
||||||
|
IN_PATTERN = re.compile(',\s?')
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.constraintValue = None
|
self.constraintValue = None
|
||||||
self.constraintType = None
|
self.constraintType = None
|
||||||
|
@ -37,14 +47,172 @@ class RequestConstraint(object):
|
||||||
return self.constraintValue
|
return self.constraintValue
|
||||||
|
|
||||||
def setConstraintValue(self, constraintValue):
|
def setConstraintValue(self, constraintValue):
|
||||||
|
if hasattr(self, '_evalValue'):
|
||||||
|
del self._evalValue
|
||||||
self.constraintValue = constraintValue
|
self.constraintValue = constraintValue
|
||||||
|
|
||||||
def getConstraintType(self):
|
def getConstraintType(self):
|
||||||
return self.constraintType
|
return self.constraintType
|
||||||
|
|
||||||
def setConstraintType(self, constraintType):
|
def setConstraintType(self, constraintType):
|
||||||
|
if hasattr(self, '_evalValue'):
|
||||||
|
del self._evalValue
|
||||||
self.constraintType = constraintType
|
self.constraintType = constraintType
|
||||||
|
|
||||||
|
def evaluate(self, value):
|
||||||
|
if not hasattr(self, '_evalValue'):
|
||||||
|
self._setupEvalValue()
|
||||||
|
|
||||||
|
if self.constraintType == 'EQUALS':
|
||||||
|
return self._evalEquals(value)
|
||||||
|
elif self.constraintType == 'NOT_EQUALS':
|
||||||
|
return not self._evalEquals(value)
|
||||||
|
elif self.constraintType == 'GREATER_THAN':
|
||||||
|
return self._evalGreaterThan(value)
|
||||||
|
elif self.constraintType == 'GREATER_THAN_EQUALS':
|
||||||
|
return self._evalGreaterThanEquals(value)
|
||||||
|
elif self.constraintType == 'LESS_THAN':
|
||||||
|
return self._evalLessThan(value)
|
||||||
|
elif self.constraintType == 'LESS_THAN_EQUALS':
|
||||||
|
return self._evalLessThanEquals(value)
|
||||||
|
elif self.constraintType == 'BETWEEN':
|
||||||
|
return self._evalBetween(value)
|
||||||
|
elif self.constraintType == 'IN':
|
||||||
|
return self._evalIn(value)
|
||||||
|
elif self.constraintType == 'NOT_IN':
|
||||||
|
return not self._evalIn(value)
|
||||||
|
elif self.constraintType == 'LIKE':
|
||||||
|
return self._evalLike(value)
|
||||||
|
# setupConstraintType already adds correct flags for ilike
|
||||||
|
# on regex pattern
|
||||||
|
elif self.constraintType == 'ILIKE':
|
||||||
|
return self._evalLike(value)
|
||||||
|
elif self.constraintType == 'ISNULL':
|
||||||
|
return self._evalIsNull(value)
|
||||||
|
elif self.constraintType == 'ISNOTNULL':
|
||||||
|
return not self._evalIsNull(value)
|
||||||
|
else:
|
||||||
|
errmsg = '{} is not a valid constraint type.'
|
||||||
|
raise ValueError(errmsg.format(self.constraintType))
|
||||||
|
|
||||||
|
def _makeRegex(self, pattern, flags):
|
||||||
|
"""Make a pattern using % wildcard into a regex"""
|
||||||
|
pattern = re.escape(pattern)
|
||||||
|
pattern = pattern.replace('\\%', '.*')
|
||||||
|
pattern = pattern.replace('\\_', '.')
|
||||||
|
pattern = pattern + '$'
|
||||||
|
return re.compile(pattern, flags)
|
||||||
|
|
||||||
|
def _setupEvalValue(self):
|
||||||
|
if self.constraintType == 'BETWEEN':
|
||||||
|
self._evalValue = self.constraintValue.split('--')
|
||||||
|
self._evalValue[0] = self._adjustValueType(self._evalValue[0])
|
||||||
|
self._evalValue[1] = self._adjustValueType(self._evalValue[1])
|
||||||
|
elif self.constraintType in ('IN', 'NOT_IN'):
|
||||||
|
splitValue = self.IN_PATTERN.split(self.constraintValue)
|
||||||
|
self._evalValue = {
|
||||||
|
self._adjustValueType(value)
|
||||||
|
for value in splitValue
|
||||||
|
}
|
||||||
|
# if collection now contains multiple types we have to force
|
||||||
|
# everything to string instead
|
||||||
|
initialType = next(iter(self._evalValue)).__class__
|
||||||
|
for item in self._evalValue:
|
||||||
|
if item.__class__ is not initialType:
|
||||||
|
self._evalValue = {str(value) for value in splitValue}
|
||||||
|
break
|
||||||
|
elif self.constraintType == 'LIKE':
|
||||||
|
self._evalValue = self._makeRegex(self.constraintValue, re.DOTALL)
|
||||||
|
elif self.constraintType == 'ILIKE':
|
||||||
|
self._evalValue = self._makeRegex(self.constraintValue, re.IGNORECASE | re.DOTALL)
|
||||||
|
elif self.constraintValue is None:
|
||||||
|
self._evalValue = None
|
||||||
|
else:
|
||||||
|
self._evalValue = self._adjustValueType(self.constraintValue)
|
||||||
|
|
||||||
|
def _adjustValueType(self, value):
|
||||||
|
'''
|
||||||
|
Try to take part of a constraint value, encoded as a string, and
|
||||||
|
return it as its 'true type'.
|
||||||
|
|
||||||
|
_adjustValueType('3.0') -> 3.0
|
||||||
|
_adjustValueType('3') -> 3.0
|
||||||
|
_adjustValueType('a string') -> 'a string'
|
||||||
|
'''
|
||||||
|
try:
|
||||||
|
return float(value)
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
try:
|
||||||
|
return DataTime(value)
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
return value
|
||||||
|
|
||||||
|
def _matchType(self, value, otherValue):
|
||||||
|
'''
|
||||||
|
Return value coerced to be the same type as otherValue. If this is
|
||||||
|
not possible, just return value unmodified.
|
||||||
|
'''
|
||||||
|
# cannot use type() because otherValue might be an instance of an
|
||||||
|
# old-style class (then it would just be of type "instance")
|
||||||
|
if not isinstance(value, otherValue.__class__):
|
||||||
|
try:
|
||||||
|
return otherValue.__class__(value)
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
return value
|
||||||
|
|
||||||
|
def _evalEquals(self, value):
|
||||||
|
value = self._matchType(value, self._evalValue)
|
||||||
|
if isinstance(value, float):
|
||||||
|
return abs(float(self._evalValue) - value) < self.TOLERANCE
|
||||||
|
else:
|
||||||
|
return value == self._evalValue
|
||||||
|
|
||||||
|
def _evalGreaterThan(self, value):
|
||||||
|
value = self._matchType(value, self._evalValue)
|
||||||
|
return value > self._evalValue
|
||||||
|
|
||||||
|
def _evalGreaterThanEquals(self, value):
|
||||||
|
value = self._matchType(value, self._evalValue)
|
||||||
|
return value >= self._evalValue
|
||||||
|
|
||||||
|
def _evalLessThan(self, value):
|
||||||
|
value = self._matchType(value, self._evalValue)
|
||||||
|
return value < self._evalValue
|
||||||
|
|
||||||
|
def _evalLessThanEquals(self, value):
|
||||||
|
value = self._matchType(value, self._evalValue)
|
||||||
|
return value <= self._evalValue
|
||||||
|
|
||||||
|
def _evalBetween(self, value):
|
||||||
|
value = self._matchType(value, self._evalValue[0])
|
||||||
|
return value >= self._evalValue[0] and value <= self._evalValue[1]
|
||||||
|
|
||||||
|
def _evalIn(self, value):
|
||||||
|
anEvalValue = next(iter(self._evalValue))
|
||||||
|
if isinstance(anEvalValue, float):
|
||||||
|
for otherValue in self._evalValue:
|
||||||
|
try:
|
||||||
|
if abs(otherValue - float(value)) < self.TOLERANCE:
|
||||||
|
return True
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
return False
|
||||||
|
else:
|
||||||
|
value = self._matchType(value, anEvalValue)
|
||||||
|
return value in self._evalValue
|
||||||
|
|
||||||
|
def _evalLike(self, value):
|
||||||
|
value = self._matchType(value, self._evalValue)
|
||||||
|
if self.constraintValue == '%':
|
||||||
|
return True
|
||||||
|
return self._evalValue.match(value) is not None
|
||||||
|
|
||||||
|
def _evalIsNull(self, value):
|
||||||
|
return value is None or 'null' == value
|
||||||
|
|
||||||
# DAF-specific stuff begins here ##########################################
|
# DAF-specific stuff begins here ##########################################
|
||||||
|
|
||||||
CONSTRAINT_MAP = {'=': 'EQUALS',
|
CONSTRAINT_MAP = {'=': 'EQUALS',
|
||||||
|
@ -54,7 +222,7 @@ class RequestConstraint(object):
|
||||||
'<': 'LESS_THAN',
|
'<': 'LESS_THAN',
|
||||||
'<=': 'LESS_THAN_EQUALS',
|
'<=': 'LESS_THAN_EQUALS',
|
||||||
'IN': 'IN',
|
'IN': 'IN',
|
||||||
#'NOT IN': 'NOT_IN'
|
'NOT IN': 'NOT_IN'
|
||||||
}
|
}
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
|
@ -69,22 +237,22 @@ class RequestConstraint(object):
|
||||||
'are not allowed')
|
'are not allowed')
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def _construct_in(cls, constraintType, constraintValue):
|
def _constructIn(cls, constraintType, constraintValue):
|
||||||
"""Build a new "IN" constraint from an iterable."""
|
"""Build a new "IN" or "NOT IN" constraint from an iterable."""
|
||||||
try:
|
try:
|
||||||
iterator = iter(constraintValue)
|
iterator = iter(constraintValue)
|
||||||
except TypeError:
|
except TypeError:
|
||||||
raise TypeError("value for IN constraint must be an iterable")
|
raise TypeError("value for IN / NOT IN constraint must be an iterable")
|
||||||
stringValue = ', '.join(cls._stringify(item) for item in iterator)
|
stringValue = ', '.join(cls._stringify(item) for item in iterator)
|
||||||
if len(stringValue) == 0:
|
if len(stringValue) == 0:
|
||||||
raise ValueError('cannot use IN with empty collection')
|
raise ValueError('cannot use IN / NOT IN with empty collection')
|
||||||
obj = cls()
|
obj = cls()
|
||||||
obj.setConstraintType(constraintType)
|
obj.setConstraintType(constraintType)
|
||||||
obj.setConstraintValue(stringValue)
|
obj.setConstraintValue(stringValue)
|
||||||
return obj
|
return obj
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def _construct_eq_not_eq(cls, constraintType, constraintValue):
|
def _constructEq(cls, constraintType, constraintValue):
|
||||||
"""Build a new = or != constraint. Handle None specially by making an
|
"""Build a new = or != constraint. Handle None specially by making an
|
||||||
"is null" or "is not null" instead.
|
"is null" or "is not null" instead.
|
||||||
"""
|
"""
|
||||||
|
@ -116,9 +284,10 @@ class RequestConstraint(object):
|
||||||
errmsg = '{} is not a valid operator. Valid operators are: {}'
|
errmsg = '{} is not a valid operator. Valid operators are: {}'
|
||||||
validOperators = list(sorted(cls.CONSTRAINT_MAP.keys()))
|
validOperators = list(sorted(cls.CONSTRAINT_MAP.keys()))
|
||||||
raise ValueError(errmsg.format(operator, validOperators))
|
raise ValueError(errmsg.format(operator, validOperators))
|
||||||
if constraintType == 'IN':
|
if constraintType in ('IN', 'NOT_IN'):
|
||||||
return cls._construct_in(constraintType, constraintValue)
|
return cls._constructIn(constraintType, constraintValue)
|
||||||
elif constraintType in {'EQUALS', 'NOT_EQUALS'}:
|
elif constraintType in {'EQUALS', 'NOT_EQUALS'}:
|
||||||
return cls._construct_eq_not_eq(constraintType, constraintValue)
|
return cls._constructEq(constraintType, constraintValue)
|
||||||
else:
|
else:
|
||||||
return cls._construct(constraintType, constraintValue)
|
return cls._construct(constraintType, constraintValue)
|
||||||
|
|
||||||
|
|
|
@ -34,27 +34,54 @@
|
||||||
# 06/24/15 4480 dgilling implement __hash__ and __eq__,
|
# 06/24/15 4480 dgilling implement __hash__ and __eq__,
|
||||||
# replace __cmp__ with rich comparison
|
# replace __cmp__ with rich comparison
|
||||||
# operators.
|
# operators.
|
||||||
#
|
# 05/26/16 2416 rjpeter Added str based constructor.
|
||||||
|
# 08/02/16 2416 tgurney Forecast time regex bug fix,
|
||||||
|
# plus misc cleanup
|
||||||
|
|
||||||
|
|
||||||
import calendar
|
import calendar
|
||||||
import datetime
|
import datetime
|
||||||
import numpy
|
import numpy
|
||||||
import time
|
import re
|
||||||
import StringIO
|
import StringIO
|
||||||
|
import time
|
||||||
|
|
||||||
from dynamicserialize.dstypes.java.util import Date
|
from dynamicserialize.dstypes.java.util import Date
|
||||||
from dynamicserialize.dstypes.java.util import EnumSet
|
from dynamicserialize.dstypes.java.util import EnumSet
|
||||||
|
|
||||||
from TimeRange import TimeRange
|
from TimeRange import TimeRange
|
||||||
|
|
||||||
|
_DATE=r'(\d{4}-\d{2}-\d{2})'
|
||||||
|
_TIME=r'(\d{2}:\d{2}:\d{2})'
|
||||||
|
_MILLIS='(?:\.(\d{1,3})(?:\d{1,4})?)?' # might have microsecond but that is thrown out
|
||||||
|
REFTIME_PATTERN_STR=_DATE + '[ _]' + _TIME + _MILLIS
|
||||||
|
FORECAST_PATTERN_STR=r'(?:[ _]\((\d+)(?::(\d{1,2}))?\))?'
|
||||||
|
VALID_PERIOD_PATTERN_STR=r'(?:\['+ REFTIME_PATTERN_STR + '--' + REFTIME_PATTERN_STR + r'\])?'
|
||||||
|
STR_PATTERN=re.compile(REFTIME_PATTERN_STR + FORECAST_PATTERN_STR + VALID_PERIOD_PATTERN_STR)
|
||||||
|
|
||||||
class DataTime(object):
|
class DataTime(object):
|
||||||
|
|
||||||
def __init__(self, refTime=None, fcstTime=None, validPeriod=None):
|
def __init__(self, refTime=None, fcstTime=None, validPeriod=None):
|
||||||
self.fcstTime = int(fcstTime) if fcstTime is not None else 0
|
"""
|
||||||
self.refTime = refTime if refTime is not None else None
|
Construct a new DataTime.
|
||||||
|
May also be called as DataTime(str) to parse a string and create a
|
||||||
|
DataTime from it. Some examples of valid DataTime strings:
|
||||||
|
|
||||||
|
'2016-08-02 01:23:45.0'
|
||||||
|
'2016-08-02 01:23:45.123'
|
||||||
|
'2016-08-02 01:23:45.0 (17)',
|
||||||
|
'2016-08-02 01:23:45.0 (17:34)'
|
||||||
|
'2016-08-02 01:23:45.0[2016-08-02_02:34:45.0--2016-08-02_03:45:56.0]'
|
||||||
|
'2016-08-02 01:23:45.456_(17:34)[2016-08-02_02:34:45.0--2016-08-02_03:45:56.0]'
|
||||||
|
"""
|
||||||
|
if fcstTime is not None:
|
||||||
|
self.fcstTime = int(fcstTime)
|
||||||
|
else:
|
||||||
|
self.fcstTime = 0
|
||||||
|
self.refTime = refTime
|
||||||
if validPeriod is not None and type(validPeriod) is not TimeRange:
|
if validPeriod is not None and type(validPeriod) is not TimeRange:
|
||||||
ValueError("Invalid validPeriod object specified for DataTime.")
|
raise ValueError("Invalid validPeriod object specified for DataTime.")
|
||||||
self.validPeriod = validPeriod if validPeriod is not None else None
|
self.validPeriod = validPeriod
|
||||||
self.utilityFlags = EnumSet('com.raytheon.uf.common.time.DataTime$FLAG')
|
self.utilityFlags = EnumSet('com.raytheon.uf.common.time.DataTime$FLAG')
|
||||||
self.levelValue = numpy.float64(-1.0)
|
self.levelValue = numpy.float64(-1.0)
|
||||||
|
|
||||||
|
@ -68,7 +95,37 @@ class DataTime(object):
|
||||||
# This is expected for java Date
|
# This is expected for java Date
|
||||||
self.refTime = long(self.refTime.getTime())
|
self.refTime = long(self.refTime.getTime())
|
||||||
else:
|
else:
|
||||||
self.refTime = long(refTime)
|
try:
|
||||||
|
self.refTime = long(self.refTime)
|
||||||
|
except ValueError:
|
||||||
|
# Assume first arg is a string. Attempt to parse.
|
||||||
|
match = STR_PATTERN.match(self.refTime)
|
||||||
|
if match is None:
|
||||||
|
raise ValueError('Could not parse DataTime info from '
|
||||||
|
+ str(refTime))
|
||||||
|
|
||||||
|
groups = match.groups()
|
||||||
|
rDate = groups[0]
|
||||||
|
rTime = groups[1]
|
||||||
|
rMillis = groups[2] or 0
|
||||||
|
fcstTimeHr = groups[3]
|
||||||
|
fcstTimeMin = groups[4]
|
||||||
|
periodStart = groups[5], groups[6], (groups[7] or 0)
|
||||||
|
periodEnd = groups[8], groups[9], (groups[10] or 0)
|
||||||
|
self.refTime = self._getTimeAsEpochMillis(rDate, rTime, rMillis)
|
||||||
|
|
||||||
|
if fcstTimeHr is not None:
|
||||||
|
self.fcstTime = long(fcstTimeHr) * 3600
|
||||||
|
if fcstTimeMin is not None:
|
||||||
|
self.fcstTime += long(fcstTimeMin) * 60
|
||||||
|
|
||||||
|
if periodStart[0] is not None:
|
||||||
|
self.validPeriod = TimeRange()
|
||||||
|
periodStartTime = self._getTimeAsEpochMillis(*periodStart)
|
||||||
|
self.validPeriod.setStart(periodStartTime / 1000)
|
||||||
|
periodEndTime = self._getTimeAsEpochMillis(*periodEnd)
|
||||||
|
self.validPeriod.setEnd(periodEndTime / 1000)
|
||||||
|
|
||||||
self.refTime = Date(self.refTime)
|
self.refTime = Date(self.refTime)
|
||||||
|
|
||||||
if self.validPeriod is None:
|
if self.validPeriod is None:
|
||||||
|
@ -78,7 +135,7 @@ class DataTime(object):
|
||||||
self.validPeriod.setEnd(validTimeMillis / 1000)
|
self.validPeriod.setEnd(validTimeMillis / 1000)
|
||||||
|
|
||||||
# figure out utility flags
|
# figure out utility flags
|
||||||
if fcstTime:
|
if self.fcstTime:
|
||||||
self.utilityFlags.add("FCST_USED")
|
self.utilityFlags.add("FCST_USED")
|
||||||
if self.validPeriod and self.validPeriod.isValid():
|
if self.validPeriod and self.validPeriod.isValid():
|
||||||
self.utilityFlags.add("PERIOD_USED")
|
self.utilityFlags.add("PERIOD_USED")
|
||||||
|
@ -121,6 +178,7 @@ class DataTime(object):
|
||||||
micros = (self.refTime.getTime() % 1000) * 1000
|
micros = (self.refTime.getTime() % 1000) * 1000
|
||||||
dtObj = datetime.datetime.utcfromtimestamp(refTimeInSecs)
|
dtObj = datetime.datetime.utcfromtimestamp(refTimeInSecs)
|
||||||
dtObj = dtObj.replace(microsecond=micros)
|
dtObj = dtObj.replace(microsecond=micros)
|
||||||
|
# This won't be compatible with java or string from java since its to microsecond
|
||||||
buffer.write(dtObj.isoformat(' '))
|
buffer.write(dtObj.isoformat(' '))
|
||||||
|
|
||||||
if "FCST_USED" in self.utilityFlags:
|
if "FCST_USED" in self.utilityFlags:
|
||||||
|
@ -224,3 +282,8 @@ class DataTime(object):
|
||||||
return NotImplemented
|
return NotImplemented
|
||||||
|
|
||||||
return self.__gt__(other) or self.__eq__(other)
|
return self.__gt__(other) or self.__eq__(other)
|
||||||
|
|
||||||
|
def _getTimeAsEpochMillis(self, dateStr, timeStr, millis):
|
||||||
|
t = time.strptime(dateStr + ' ' + timeStr, '%Y-%m-%d %H:%M:%S')
|
||||||
|
epochSeconds = calendar.timegm(t)
|
||||||
|
return long(epochSeconds * 1000) + long(millis)
|
||||||
|
|
Loading…
Add table
Reference in a new issue