mirror of
https://github.com/Unidata/python-awips.git
synced 2025-02-23 22:57:56 -05:00
ufpy updates from master_19.1.1
This commit is contained in:
parent
5c51ad599e
commit
3837f21015
49 changed files with 2413 additions and 489 deletions
|
@ -1,19 +1,33 @@
|
||||||
##
|
##
|
||||||
|
# This software was developed and / or modified by Raytheon Company,
|
||||||
|
# pursuant to Contract DG133W-05-CQ-1067 with the US Government.
|
||||||
|
#
|
||||||
|
# U.S. EXPORT CONTROLLED TECHNICAL DATA
|
||||||
|
# This software product contains export-restricted data whose
|
||||||
|
# export/transfer/disclosure is restricted by U.S. law. Dissemination
|
||||||
|
# to non-U.S. persons whether in the United States or abroad requires
|
||||||
|
# an export license or other authorization.
|
||||||
|
#
|
||||||
|
# Contractor Name: Raytheon Company
|
||||||
|
# Contractor Address: 6825 Pine Street, Suite 340
|
||||||
|
# Mail Stop B8
|
||||||
|
# Omaha, NE 68106
|
||||||
|
# 402.291.0100
|
||||||
|
#
|
||||||
|
# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
||||||
|
# further licensing information.
|
||||||
##
|
##
|
||||||
|
|
||||||
from string import Template
|
from __future__ import print_function
|
||||||
|
|
||||||
import ctypes
|
import stomp
|
||||||
from . import stomp
|
|
||||||
import socket
|
import socket
|
||||||
import sys
|
import sys
|
||||||
import time
|
import time
|
||||||
import threading
|
|
||||||
import xml.etree.ElementTree as ET
|
import xml.etree.ElementTree as ET
|
||||||
|
|
||||||
from . import ThriftClient
|
import ThriftClient
|
||||||
from dynamicserialize.dstypes.com.raytheon.uf.common.alertviz import AlertVizRequest
|
from dynamicserialize.dstypes.com.raytheon.uf.common.alertviz import AlertVizRequest
|
||||||
from dynamicserialize import DynamicSerializationManager
|
|
||||||
|
|
||||||
#
|
#
|
||||||
# Provides a capability of constructing notification messages and sending
|
# Provides a capability of constructing notification messages and sending
|
||||||
|
@ -32,6 +46,7 @@ from dynamicserialize import DynamicSerializationManager
|
||||||
# value
|
# value
|
||||||
# 07/27/15 4654 skorolev Added filters
|
# 07/27/15 4654 skorolev Added filters
|
||||||
# 11/11/15 5120 rferrel Cannot serialize empty filters.
|
# 11/11/15 5120 rferrel Cannot serialize empty filters.
|
||||||
|
# 03/05/18 6899 dgilling Update to latest version of stomp.py API.
|
||||||
#
|
#
|
||||||
class NotificationMessage:
|
class NotificationMessage:
|
||||||
|
|
||||||
|
@ -75,7 +90,7 @@ class NotificationMessage:
|
||||||
priorityInt = int(5)
|
priorityInt = int(5)
|
||||||
|
|
||||||
if (priorityInt < 0 or priorityInt > 5):
|
if (priorityInt < 0 or priorityInt > 5):
|
||||||
print("Error occurred, supplied an invalid Priority value: " + str(priorityInt))
|
print("Error occurred, supplied an invalid Priority value:", str(priorityInt))
|
||||||
print("Priority values are 0, 1, 2, 3, 4 and 5.")
|
print("Priority values are 0, 1, 2, 3, 4 and 5.")
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
|
@ -84,16 +99,6 @@ class NotificationMessage:
|
||||||
else:
|
else:
|
||||||
self.priority = priority
|
self.priority = priority
|
||||||
|
|
||||||
def connection_timeout(self, connection):
|
|
||||||
if (connection is not None and not connection.is_connected()):
|
|
||||||
print("Connection Retry Timeout")
|
|
||||||
for tid, tobj in list(threading._active.items()):
|
|
||||||
if tobj.name is "MainThread":
|
|
||||||
res = ctypes.pythonapi.PyThreadState_SetAsyncExc(tid, ctypes.py_object(SystemExit))
|
|
||||||
if res != 0 and res != 1:
|
|
||||||
# problem, reset state
|
|
||||||
ctypes.pythonapi.PyThreadState_SetAsyncExc(tid, 0)
|
|
||||||
|
|
||||||
def send(self):
|
def send(self):
|
||||||
# depending on the value of the port number indicates the distribution
|
# depending on the value of the port number indicates the distribution
|
||||||
# of the message to AlertViz
|
# of the message to AlertViz
|
||||||
|
@ -101,32 +106,26 @@ class NotificationMessage:
|
||||||
# 61999 is local distribution
|
# 61999 is local distribution
|
||||||
if (int(self.port) == 61999):
|
if (int(self.port) == 61999):
|
||||||
# use stomp.py
|
# use stomp.py
|
||||||
conn = stomp.Connection(host_and_ports=[(self.host, 61999)])
|
conn = stomp.Connection(host_and_ports=[(self.host, 61999)],
|
||||||
timeout = threading.Timer(5.0, self.connection_timeout, [conn])
|
timeout=5.)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
timeout.start();
|
|
||||||
conn.start()
|
conn.start()
|
||||||
finally:
|
conn.connect()
|
||||||
timeout.cancel()
|
|
||||||
|
|
||||||
conn.connect()
|
sm = ET.Element("statusMessage")
|
||||||
|
sm.set("machine", socket.gethostname())
|
||||||
|
sm.set("priority", self.priority)
|
||||||
|
sm.set("category", self.category)
|
||||||
|
sm.set("sourceKey", self.source)
|
||||||
|
sm.set("audioFile", self.audioFile)
|
||||||
|
if self.filters:
|
||||||
|
sm.set("filters", self.filters)
|
||||||
|
msg = ET.SubElement(sm, "message")
|
||||||
|
msg.text = self.message
|
||||||
|
msg = ET.tostring(sm, "UTF-8")
|
||||||
|
|
||||||
sm = ET.Element("statusMessage")
|
conn.send(destination='/queue/messages', body=msg, content_type='application/xml;charset=utf-8')
|
||||||
sm.set("machine", socket.gethostname())
|
|
||||||
sm.set("priority", self.priority)
|
|
||||||
sm.set("category", self.category)
|
|
||||||
sm.set("sourceKey", self.source)
|
|
||||||
sm.set("audioFile", self.audioFile)
|
|
||||||
if self.filters is not None and len(self.filters) > 0:
|
|
||||||
sm.set("filters", self.filters)
|
|
||||||
msg = ET.SubElement(sm, "message")
|
|
||||||
msg.text = self.message
|
|
||||||
details = ET.SubElement(sm, "details")
|
|
||||||
msg = ET.tostring(sm, "UTF-8")
|
|
||||||
|
|
||||||
try :
|
|
||||||
conn.send(msg, destination='/queue/messages')
|
|
||||||
time.sleep(2)
|
time.sleep(2)
|
||||||
finally:
|
finally:
|
||||||
conn.stop()
|
conn.stop()
|
||||||
|
@ -139,13 +138,13 @@ class NotificationMessage:
|
||||||
try:
|
try:
|
||||||
serverResponse = thriftClient.sendRequest(alertVizRequest)
|
serverResponse = thriftClient.sendRequest(alertVizRequest)
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
print("Caught exception submitting AlertVizRequest: ", str(ex))
|
print("Caught exception submitting AlertVizRequest:", str(ex))
|
||||||
|
|
||||||
if (serverResponse != "None"):
|
if (serverResponse != "None"):
|
||||||
print("Error occurred submitting Notification Message to AlertViz receiver: ", serverResponse)
|
print("Error occurred submitting Notification Message to AlertViz receiver:", serverResponse)
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
else:
|
else:
|
||||||
print("Response: " + str(serverResponse))
|
print("Response:", str(serverResponse))
|
||||||
|
|
||||||
def createRequest(message, priority, source, category, audioFile, filters):
|
def createRequest(message, priority, source, category, audioFile, filters):
|
||||||
obj = AlertVizRequest()
|
obj = AlertVizRequest()
|
||||||
|
|
|
@ -30,9 +30,13 @@
|
||||||
# 11/17/10 njensen Initial Creation.
|
# 11/17/10 njensen Initial Creation.
|
||||||
# 08/15/13 2169 bkowal Optionally gzip decompress any data that is read.
|
# 08/15/13 2169 bkowal Optionally gzip decompress any data that is read.
|
||||||
# 08/04/16 2416 tgurney Add queueStarted property
|
# 08/04/16 2416 tgurney Add queueStarted property
|
||||||
|
# 02/16/17 6084 bsteffen Support ssl connections
|
||||||
|
# 09/07/17 6175 tgurney Remove "decompressing" log message
|
||||||
#
|
#
|
||||||
#
|
#
|
||||||
|
|
||||||
|
import os
|
||||||
|
import os.path
|
||||||
import qpid
|
import qpid
|
||||||
import zlib
|
import zlib
|
||||||
|
|
||||||
|
@ -41,11 +45,24 @@ from qpid.exceptions import Closed
|
||||||
|
|
||||||
class QpidSubscriber:
|
class QpidSubscriber:
|
||||||
|
|
||||||
def __init__(self, host='127.0.0.1', port=5672, decompress=False):
|
def __init__(self, host='127.0.0.1', port=5672, decompress=False, ssl=None):
|
||||||
self.host = host
|
self.host = host
|
||||||
self.port = port
|
self.port = port
|
||||||
self.decompress = decompress;
|
self.decompress = decompress;
|
||||||
socket = qpid.util.connect(host, port)
|
socket = qpid.util.connect(host, port)
|
||||||
|
if "QPID_SSL_CERT_DB" in os.environ:
|
||||||
|
certdb = os.environ["QPID_SSL_CERT_DB"]
|
||||||
|
else:
|
||||||
|
certdb = os.path.expanduser("~/.qpid/")
|
||||||
|
if "QPID_SSL_CERT_NAME" in os.environ:
|
||||||
|
certname = os.environ["QPID_SSL_CERT_NAME"]
|
||||||
|
else:
|
||||||
|
certname = "guest"
|
||||||
|
certfile = os.path.join(certdb, certname + ".crt")
|
||||||
|
if ssl or (ssl is None and os.path.exists(certfile)):
|
||||||
|
keyfile = os.path.join(certdb, certname + ".key")
|
||||||
|
trustfile = os.path.join(certdb, "root.crt")
|
||||||
|
socket = qpid.util.ssl(socket, keyfile=keyfile, certfile=certfile, ca_certs=trustfile)
|
||||||
self.__connection = qpid.connection.Connection(sock=socket, username='guest', password='guest')
|
self.__connection = qpid.connection.Connection(sock=socket, username='guest', password='guest')
|
||||||
self.__connection.start()
|
self.__connection.start()
|
||||||
self.__session = self.__connection.session(str(qpid.datatypes.uuid4()))
|
self.__session = self.__connection.session(str(qpid.datatypes.uuid4()))
|
||||||
|
@ -78,7 +95,6 @@ class QpidSubscriber:
|
||||||
content = message.body
|
content = message.body
|
||||||
self.__session.message_accept(qpid.datatypes.RangedSet(message.id))
|
self.__session.message_accept(qpid.datatypes.RangedSet(message.id))
|
||||||
if (self.decompress):
|
if (self.decompress):
|
||||||
print "Decompressing received content"
|
|
||||||
try:
|
try:
|
||||||
# http://stackoverflow.com/questions/2423866/python-decompressing-gzip-chunk-by-chunk
|
# http://stackoverflow.com/questions/2423866/python-decompressing-gzip-chunk-by-chunk
|
||||||
d = zlib.decompressobj(16+zlib.MAX_WBITS)
|
d = zlib.decompressobj(16+zlib.MAX_WBITS)
|
||||||
|
@ -103,3 +119,4 @@ class QpidSubscriber:
|
||||||
@property
|
@property
|
||||||
def queueStarted(self):
|
def queueStarted(self):
|
||||||
return self.__queueStarted
|
return self.__queueStarted
|
||||||
|
|
||||||
|
|
|
@ -17,13 +17,23 @@
|
||||||
# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
||||||
# further licensing information.
|
# further licensing information.
|
||||||
##
|
##
|
||||||
|
#
|
||||||
|
# SOFTWARE HISTORY
|
||||||
|
#
|
||||||
|
# Date Ticket# Engineer Description
|
||||||
|
# ------------- -------- --------- ---------------------------------------------
|
||||||
|
# Feb 13, 2017 6092 randerso Added StoreTimeAction
|
||||||
|
#
|
||||||
|
##
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
import sys
|
import sys
|
||||||
|
import time
|
||||||
|
|
||||||
from dynamicserialize.dstypes.com.raytheon.uf.common.dataplugin.gfe.db.objects import DatabaseID
|
from dynamicserialize.dstypes.com.raytheon.uf.common.dataplugin.gfe.db.objects import DatabaseID
|
||||||
from dynamicserialize.dstypes.com.raytheon.uf.common.dataplugin.gfe.db.objects import ParmID
|
from dynamicserialize.dstypes.com.raytheon.uf.common.dataplugin.gfe.db.objects import ParmID
|
||||||
|
|
||||||
|
TIME_FORMAT = "%Y%m%d_%H%M"
|
||||||
|
|
||||||
class UsageArgumentParser(argparse.ArgumentParser):
|
class UsageArgumentParser(argparse.ArgumentParser):
|
||||||
"""
|
"""
|
||||||
|
@ -56,3 +66,16 @@ class AppendParmNameAndLevelAction(argparse.Action):
|
||||||
else:
|
else:
|
||||||
setattr(namespace, self.dest, [comp])
|
setattr(namespace, self.dest, [comp])
|
||||||
|
|
||||||
|
class StoreTimeAction(argparse.Action):
|
||||||
|
"""
|
||||||
|
argparse.Action subclass to validate GFE formatted time strings
|
||||||
|
and parse them to time.struct_time
|
||||||
|
"""
|
||||||
|
def __call__(self, parser, namespace, values, option_string=None):
|
||||||
|
try:
|
||||||
|
timeStruct = time.strptime(values, TIME_FORMAT)
|
||||||
|
except:
|
||||||
|
parser.error(str(values) + " is not a valid time string of the format YYYYMMDD_hhmm")
|
||||||
|
|
||||||
|
setattr(namespace, self.dest, timeStruct)
|
||||||
|
|
||||||
|
|
|
@ -90,6 +90,7 @@ def __getAvailableTimesForEachLocation(request, refTimeOnly=False):
|
||||||
else:
|
else:
|
||||||
return DataAccessLayer.getAvailableTimes(request, refTimeOnly)
|
return DataAccessLayer.getAvailableTimes(request, refTimeOnly)
|
||||||
|
|
||||||
|
|
||||||
def __cloneRequest(request):
|
def __cloneRequest(request):
|
||||||
return DataAccessLayer.newDataRequest(datatype = request.getDatatype(),
|
return DataAccessLayer.newDataRequest(datatype = request.getDatatype(),
|
||||||
parameters = request.getParameters(),
|
parameters = request.getParameters(),
|
||||||
|
|
|
@ -41,7 +41,7 @@
|
||||||
# getRequiredIdentifiers() and
|
# getRequiredIdentifiers() and
|
||||||
# getOptionalIdentifiers()
|
# getOptionalIdentifiers()
|
||||||
# 10/07/16 ---- mjames@ucar Added getForecastRun
|
# 10/07/16 ---- mjames@ucar Added getForecastRun
|
||||||
#
|
# Oct 18, 2016 5916 bsteffen Add setLazyLoadGridLatLon
|
||||||
#
|
#
|
||||||
#
|
#
|
||||||
|
|
||||||
|
@ -252,3 +252,26 @@ def changeEDEXHost(newHostName):
|
||||||
router = ThriftClientRouter.ThriftClientRouter(THRIFT_HOST)
|
router = ThriftClientRouter.ThriftClientRouter(THRIFT_HOST)
|
||||||
else:
|
else:
|
||||||
raise TypeError("Cannot call changeEDEXHost when using JepRouter.")
|
raise TypeError("Cannot call changeEDEXHost when using JepRouter.")
|
||||||
|
|
||||||
|
def setLazyLoadGridLatLon(lazyLoadGridLatLon):
|
||||||
|
"""
|
||||||
|
Provide a hint to the Data Access Framework indicating whether to load the
|
||||||
|
lat/lon data for a grid immediately or wait until it is needed. This is
|
||||||
|
provided as a performance tuning hint and should not affect the way the
|
||||||
|
Data Access Framework is used. Depending on the internal implementation of
|
||||||
|
the Data Access Framework this hint might be ignored. Examples of when this
|
||||||
|
should be set to True are when the lat/lon information is not used or when
|
||||||
|
it is used only if certain conditions within the data are met. It could be
|
||||||
|
set to False if it is guaranteed that all lat/lon information is needed and
|
||||||
|
it would be better to get any performance overhead for generating the
|
||||||
|
lat/lon data out of the way during the initial request.
|
||||||
|
|
||||||
|
|
||||||
|
Args:
|
||||||
|
lazyLoadGridLatLon: Boolean value indicating whether to lazy load.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
router.setLazyLoadGridLatLon(lazyLoadGridLatLon)
|
||||||
|
except AttributeError:
|
||||||
|
# The router is not required to support this capability.
|
||||||
|
pass
|
||||||
|
|
|
@ -27,12 +27,28 @@
|
||||||
# Date Ticket# Engineer Description
|
# Date Ticket# Engineer Description
|
||||||
# ------------ ---------- ----------- --------------------------
|
# ------------ ---------- ----------- --------------------------
|
||||||
# 07/22/16 2416 tgurney Initial creation
|
# 07/22/16 2416 tgurney Initial creation
|
||||||
|
# 09/07/17 6175 tgurney Override messageReceived
|
||||||
#
|
#
|
||||||
|
|
||||||
|
import dynamicserialize
|
||||||
from awips.dataaccess.PyNotification import PyNotification
|
from awips.dataaccess.PyNotification import PyNotification
|
||||||
from dynamicserialize.dstypes.com.raytheon.uf.common.dataquery.requests import RequestConstraint
|
from dynamicserialize.dstypes.com.raytheon.uf.common.dataquery.requests import RequestConstraint
|
||||||
|
|
||||||
class PyGeometryNotification(PyNotification):
|
class PyGeometryNotification(PyNotification):
|
||||||
|
|
||||||
|
def messageReceived(self, msg):
|
||||||
|
dataUriMsg = dynamicserialize.deserialize(msg)
|
||||||
|
dataUris = dataUriMsg.getDataURIs()
|
||||||
|
dataTimes = set()
|
||||||
|
for dataUri in dataUris:
|
||||||
|
if self.notificationFilter.accept(dataUri):
|
||||||
|
dataTimes.add(self.getDataTime(dataUri))
|
||||||
|
if dataTimes:
|
||||||
|
try:
|
||||||
|
data = self.getData(self.request, list(dataTimes))
|
||||||
|
self.callback(data)
|
||||||
|
except Exception as e:
|
||||||
|
traceback.print_exc()
|
||||||
|
|
||||||
def getData(self, request, dataTimes):
|
def getData(self, request, dataTimes):
|
||||||
return self.DAL.getGeometryData(request, dataTimes)
|
return self.DAL.getGeometryData(request, dataTimes)
|
||||||
|
|
|
@ -28,7 +28,9 @@
|
||||||
# Date Ticket# Engineer Description
|
# Date Ticket# Engineer Description
|
||||||
# ------------ ---------- ----------- --------------------------
|
# ------------ ---------- ----------- --------------------------
|
||||||
# 06/03/13 #2023 dgilling Initial Creation.
|
# 06/03/13 #2023 dgilling Initial Creation.
|
||||||
|
# 10/13/16 #5916 bsteffen Correct grid shape, allow lat/lon
|
||||||
# 11/10/16 #5900 bsteffen Correct grid shape
|
# 11/10/16 #5900 bsteffen Correct grid shape
|
||||||
|
# to be requested by a delegate
|
||||||
#
|
#
|
||||||
#
|
#
|
||||||
|
|
||||||
|
@ -46,7 +48,7 @@ The ability to unit convert grid data is not currently available in this version
|
||||||
|
|
||||||
class PyGridData(IGridData, PyData.PyData):
|
class PyGridData(IGridData, PyData.PyData):
|
||||||
|
|
||||||
def __init__(self, gridDataRecord, nx, ny, latLonGrid):
|
def __init__(self, gridDataRecord, nx, ny, latLonGrid = None, latLonDelegate = None):
|
||||||
PyData.PyData.__init__(self, gridDataRecord)
|
PyData.PyData.__init__(self, gridDataRecord)
|
||||||
nx = nx
|
nx = nx
|
||||||
ny = ny
|
ny = ny
|
||||||
|
@ -54,6 +56,8 @@ class PyGridData(IGridData, PyData.PyData):
|
||||||
self.__unit = gridDataRecord.getUnit()
|
self.__unit = gridDataRecord.getUnit()
|
||||||
self.__gridData = numpy.reshape(numpy.array(gridDataRecord.getGridData()), (ny, nx))
|
self.__gridData = numpy.reshape(numpy.array(gridDataRecord.getGridData()), (ny, nx))
|
||||||
self.__latLonGrid = latLonGrid
|
self.__latLonGrid = latLonGrid
|
||||||
|
self.__latLonDelegate = latLonDelegate
|
||||||
|
|
||||||
|
|
||||||
def getParameter(self):
|
def getParameter(self):
|
||||||
return self.__parameter
|
return self.__parameter
|
||||||
|
@ -70,4 +74,8 @@ class PyGridData(IGridData, PyData.PyData):
|
||||||
return self.__gridData
|
return self.__gridData
|
||||||
|
|
||||||
def getLatLonCoords(self):
|
def getLatLonCoords(self):
|
||||||
|
if self.__latLonGrid is not None:
|
||||||
|
return self.__latLonGrid
|
||||||
|
elif self.__latLonDelegate is not None:
|
||||||
|
return self.__latLonDelegate()
|
||||||
return self.__latLonGrid
|
return self.__latLonGrid
|
||||||
|
|
|
@ -26,13 +26,34 @@
|
||||||
#
|
#
|
||||||
# Date Ticket# Engineer Description
|
# Date Ticket# Engineer Description
|
||||||
# ------------ ---------- ----------- --------------------------
|
# ------------ ---------- ----------- --------------------------
|
||||||
# 06/03/16 2416 rjpeter Initial Creation.
|
# 06/03/16 2416 rjpeter Initial Creation.
|
||||||
|
# 09/06/17 6175 tgurney Override messageReceived
|
||||||
#
|
#
|
||||||
|
|
||||||
|
import dynamicserialize
|
||||||
from awips.dataaccess.PyNotification import PyNotification
|
from awips.dataaccess.PyNotification import PyNotification
|
||||||
from dynamicserialize.dstypes.com.raytheon.uf.common.dataquery.requests import RequestConstraint
|
from dynamicserialize.dstypes.com.raytheon.uf.common.dataquery.requests import RequestConstraint
|
||||||
|
|
||||||
class PyGridNotification(PyNotification):
|
class PyGridNotification(PyNotification):
|
||||||
|
|
||||||
|
def messageReceived(self, msg):
|
||||||
|
dataUriMsg = dynamicserialize.deserialize(msg)
|
||||||
|
dataUris = dataUriMsg.getDataURIs()
|
||||||
|
for dataUri in dataUris:
|
||||||
|
if not self.notificationFilter.accept(dataUri):
|
||||||
|
continue
|
||||||
|
try:
|
||||||
|
# This improves performance over requesting by datatime since it requests only the
|
||||||
|
# parameter that the notification was received for (instead of this and all previous
|
||||||
|
# parameters for the same forecast hour)
|
||||||
|
# TODO: This utterly fails for derived requests
|
||||||
|
newReq = self.DAL.newDataRequest(self.request.getDatatype())
|
||||||
|
newReq.addIdentifier("dataURI", dataUri)
|
||||||
|
newReq.setParameters(self.request.getParameters())
|
||||||
|
data = self.getData(newReq, [])
|
||||||
|
self.callback(data)
|
||||||
|
except Exception as e:
|
||||||
|
traceback.print_exc()
|
||||||
|
|
||||||
def getData(self, request, dataTimes):
|
def getData(self, request, dataTimes):
|
||||||
return self.DAL.getGridData(request, dataTimes)
|
return self.DAL.getGridData(request, dataTimes)
|
||||||
|
|
|
@ -29,6 +29,7 @@
|
||||||
# ------------ ---------- ----------- --------------------------
|
# ------------ ---------- ----------- --------------------------
|
||||||
# Jun 22, 2016 2416 rjpeter Initial creation
|
# Jun 22, 2016 2416 rjpeter Initial creation
|
||||||
# Jul 22, 2016 2416 tgurney Finish implementation
|
# Jul 22, 2016 2416 tgurney Finish implementation
|
||||||
|
# Sep 07, 2017 6175 tgurney Override messageReceived in subclasses
|
||||||
#
|
#
|
||||||
|
|
||||||
|
|
||||||
|
@ -55,11 +56,11 @@ class PyNotification(INotificationSubscriber):
|
||||||
def __init__(self, request, filter, host='localhost', port=5672, requestHost='localhost'):
|
def __init__(self, request, filter, host='localhost', port=5672, requestHost='localhost'):
|
||||||
self.DAL = DataAccessLayer
|
self.DAL = DataAccessLayer
|
||||||
self.DAL.changeEDEXHost(requestHost)
|
self.DAL.changeEDEXHost(requestHost)
|
||||||
self.__request = request
|
self.request = request
|
||||||
self.__notificationFilter = filter
|
self.notificationFilter = filter
|
||||||
self.__topicSubscriber = QpidSubscriber(host, port, decompress=True)
|
self.__topicSubscriber = QpidSubscriber(host, port, decompress=True)
|
||||||
self.__topicName = "edex.alerts"
|
self.__topicName = "edex.alerts"
|
||||||
self.__callback = None
|
self.callback = None
|
||||||
|
|
||||||
def subscribe(self, callback):
|
def subscribe(self, callback):
|
||||||
"""
|
"""
|
||||||
|
@ -70,48 +71,25 @@ class PyNotification(INotificationSubscriber):
|
||||||
Will be called once for each request made for data.
|
Will be called once for each request made for data.
|
||||||
"""
|
"""
|
||||||
assert hasattr(callback, '__call__'), 'callback arg must be callable'
|
assert hasattr(callback, '__call__'), 'callback arg must be callable'
|
||||||
self.__callback = callback
|
self.callback = callback
|
||||||
self.__topicSubscriber.topicSubscribe(self.__topicName, self._messageReceived)
|
self.__topicSubscriber.topicSubscribe(self.__topicName, self.messageReceived)
|
||||||
# Blocks here
|
# Blocks here
|
||||||
|
|
||||||
def close(self):
|
def close(self):
|
||||||
if self.__topicSubscriber.subscribed:
|
if self.__topicSubscriber.subscribed:
|
||||||
self.__topicSubscriber.close()
|
self.__topicSubscriber.close()
|
||||||
|
|
||||||
def _getDataTime(self, dataURI):
|
def getDataTime(self, dataURI):
|
||||||
dataTimeStr = dataURI.split('/')[2]
|
dataTimeStr = dataURI.split('/')[2]
|
||||||
return DataTime(dataTimeStr)
|
return DataTime(dataTimeStr)
|
||||||
|
|
||||||
def _messageReceived(self, msg):
|
@abc.abstractmethod
|
||||||
dataUriMsg = dynamicserialize.deserialize(msg)
|
def messageReceived(self, msg):
|
||||||
dataUris = dataUriMsg.getDataURIs()
|
"""Called when a message is received from QpidSubscriber.
|
||||||
dataTimes = [
|
|
||||||
self._getDataTime(dataUri)
|
This method must call self.callback once for each request made for data
|
||||||
for dataUri in dataUris
|
"""
|
||||||
if self.__notificationFilter.accept(dataUri)
|
pass
|
||||||
]
|
|
||||||
if dataTimes:
|
|
||||||
secondTry = False
|
|
||||||
while True:
|
|
||||||
try:
|
|
||||||
data = self.getData(self.__request, dataTimes)
|
|
||||||
break
|
|
||||||
except ThriftRequestException:
|
|
||||||
if secondTry:
|
|
||||||
try:
|
|
||||||
self.close()
|
|
||||||
except Exception:
|
|
||||||
pass
|
|
||||||
raise
|
|
||||||
else:
|
|
||||||
secondTry = True
|
|
||||||
time.sleep(5)
|
|
||||||
try:
|
|
||||||
self.__callback(data)
|
|
||||||
except Exception as e:
|
|
||||||
# don't want callback to blow up the notifier itself.
|
|
||||||
traceback.print_exc()
|
|
||||||
# TODO: This utterly fails for derived requests
|
|
||||||
|
|
||||||
@abc.abstractmethod
|
@abc.abstractmethod
|
||||||
def getData(self, request, dataTimes):
|
def getData(self, request, dataTimes):
|
||||||
|
|
|
@ -39,7 +39,8 @@
|
||||||
# getRequiredIdentifiers() and
|
# getRequiredIdentifiers() and
|
||||||
# getOptionalIdentifiers()
|
# getOptionalIdentifiers()
|
||||||
# 08/01/16 2416 tgurney Add getNotificationFilter()
|
# 08/01/16 2416 tgurney Add getNotificationFilter()
|
||||||
# 11/10/16 5900 bsteffen Correct grid shape
|
# 10/13/16 5916 bsteffen Correct grid shape, allow lazy grid lat/lon
|
||||||
|
# 10/26/16 5919 njensen Speed up geometry creation in getGeometryData()
|
||||||
#
|
#
|
||||||
|
|
||||||
|
|
||||||
|
@ -51,6 +52,7 @@ from dynamicserialize.dstypes.com.raytheon.uf.common.dataaccess.request import G
|
||||||
from dynamicserialize.dstypes.com.raytheon.uf.common.dataaccess.request import GetAvailableTimesRequest
|
from dynamicserialize.dstypes.com.raytheon.uf.common.dataaccess.request import GetAvailableTimesRequest
|
||||||
from dynamicserialize.dstypes.com.raytheon.uf.common.dataaccess.request import GetGeometryDataRequest
|
from dynamicserialize.dstypes.com.raytheon.uf.common.dataaccess.request import GetGeometryDataRequest
|
||||||
from dynamicserialize.dstypes.com.raytheon.uf.common.dataaccess.request import GetGridDataRequest
|
from dynamicserialize.dstypes.com.raytheon.uf.common.dataaccess.request import GetGridDataRequest
|
||||||
|
from dynamicserialize.dstypes.com.raytheon.uf.common.dataaccess.request import GetGridLatLonRequest
|
||||||
from dynamicserialize.dstypes.com.raytheon.uf.common.dataaccess.request import GetAvailableParametersRequest
|
from dynamicserialize.dstypes.com.raytheon.uf.common.dataaccess.request import GetAvailableParametersRequest
|
||||||
from dynamicserialize.dstypes.com.raytheon.uf.common.dataaccess.request import GetAvailableLevelsRequest
|
from dynamicserialize.dstypes.com.raytheon.uf.common.dataaccess.request import GetAvailableLevelsRequest
|
||||||
from dynamicserialize.dstypes.com.raytheon.uf.common.dataaccess.request import GetRequiredIdentifiersRequest
|
from dynamicserialize.dstypes.com.raytheon.uf.common.dataaccess.request import GetRequiredIdentifiersRequest
|
||||||
|
@ -64,10 +66,39 @@ from awips.dataaccess import PyGeometryData
|
||||||
from awips.dataaccess import PyGridData
|
from awips.dataaccess import PyGridData
|
||||||
|
|
||||||
|
|
||||||
|
class LazyGridLatLon(object):
|
||||||
|
|
||||||
|
def __init__(self, client, nx, ny, envelope, crsWkt):
|
||||||
|
self._latLonGrid = None
|
||||||
|
self._client = client
|
||||||
|
self._request = GetGridLatLonRequest()
|
||||||
|
self._request.setNx(nx)
|
||||||
|
self._request.setNy(ny)
|
||||||
|
self._request.setEnvelope(envelope)
|
||||||
|
self._request.setCrsWkt(crsWkt)
|
||||||
|
|
||||||
|
def __call__(self):
|
||||||
|
# Its important that the data is cached internally so that if multiple
|
||||||
|
# GridData are sharing the same delegate then they can also share a
|
||||||
|
# single request for the LatLon information.
|
||||||
|
if self._latLonGrid is None:
|
||||||
|
response = self._client.sendRequest(self._request)
|
||||||
|
nx = response.getNx()
|
||||||
|
ny = response.getNy()
|
||||||
|
latData = numpy.reshape(numpy.array(response.getLats()), (ny, nx))
|
||||||
|
lonData = numpy.reshape(numpy.array(response.getLons()), (ny, nx))
|
||||||
|
self._latLonGrid = (lonData, latData)
|
||||||
|
return self._latLonGrid
|
||||||
|
|
||||||
|
|
||||||
class ThriftClientRouter(object):
|
class ThriftClientRouter(object):
|
||||||
|
|
||||||
def __init__(self, host='localhost'):
|
def __init__(self, host='localhost'):
|
||||||
self._client = ThriftClient.ThriftClient(host)
|
self._client = ThriftClient.ThriftClient(host)
|
||||||
|
self._lazyLoadGridLatLon = False
|
||||||
|
|
||||||
|
def setLazyLoadGridLatLon(self, lazyLoadGridLatLon):
|
||||||
|
self._lazyLoadGridLatLon = lazyLoadGridLatLon
|
||||||
|
|
||||||
def getAvailableTimes(self, request, refTimeOnly):
|
def getAvailableTimes(self, request, refTimeOnly):
|
||||||
timesRequest = GetAvailableTimesRequest()
|
timesRequest = GetAvailableTimesRequest()
|
||||||
|
@ -78,6 +109,7 @@ class ThriftClientRouter(object):
|
||||||
|
|
||||||
def getGridData(self, request, times):
|
def getGridData(self, request, times):
|
||||||
gridDataRequest = GetGridDataRequest()
|
gridDataRequest = GetGridDataRequest()
|
||||||
|
gridDataRequest.setIncludeLatLonData(not self._lazyLoadGridLatLon)
|
||||||
gridDataRequest.setRequestParameters(request)
|
gridDataRequest.setRequestParameters(request)
|
||||||
# if we have an iterable times instance, then the user must have asked
|
# if we have an iterable times instance, then the user must have asked
|
||||||
# for grid data with the List of DataTime objects
|
# for grid data with the List of DataTime objects
|
||||||
|
@ -95,15 +127,28 @@ class ThriftClientRouter(object):
|
||||||
for location in locNames:
|
for location in locNames:
|
||||||
nx = response.getSiteNxValues()[location]
|
nx = response.getSiteNxValues()[location]
|
||||||
ny = response.getSiteNyValues()[location]
|
ny = response.getSiteNyValues()[location]
|
||||||
latData = numpy.reshape(numpy.array(response.getSiteLatGrids()[location]), (ny, nx))
|
if self._lazyLoadGridLatLon:
|
||||||
lonData = numpy.reshape(numpy.array(response.getSiteLonGrids()[location]), (ny, nx))
|
envelope = response.getSiteEnvelopes()[location]
|
||||||
locSpecificData[location] = (nx, ny, (lonData, latData))
|
crsWkt = response.getSiteCrsWkt()[location]
|
||||||
|
delegate = LazyGridLatLon(
|
||||||
|
self._client, nx, ny, envelope, crsWkt)
|
||||||
|
locSpecificData[location] = (nx, ny, delegate)
|
||||||
|
else:
|
||||||
|
latData = numpy.reshape(numpy.array(
|
||||||
|
response.getSiteLatGrids()[location]), (ny, nx))
|
||||||
|
lonData = numpy.reshape(numpy.array(
|
||||||
|
response.getSiteLonGrids()[location]), (ny, nx))
|
||||||
|
locSpecificData[location] = (nx, ny, (lonData, latData))
|
||||||
retVal = []
|
retVal = []
|
||||||
for gridDataRecord in response.getGridData():
|
for gridDataRecord in response.getGridData():
|
||||||
locationName = gridDataRecord.getLocationName()
|
locationName = gridDataRecord.getLocationName()
|
||||||
locData = locSpecificData[locationName]
|
locData = locSpecificData[locationName]
|
||||||
retVal.append(PyGridData.PyGridData(gridDataRecord, locData[0], locData[1], locData[2]))
|
if self._lazyLoadGridLatLon:
|
||||||
|
retVal.append(PyGridData.PyGridData(gridDataRecord, locData[
|
||||||
|
0], locData[1], latLonDelegate=locData[2]))
|
||||||
|
else:
|
||||||
|
retVal.append(PyGridData.PyGridData(
|
||||||
|
gridDataRecord, locData[0], locData[1], locData[2]))
|
||||||
return retVal
|
return retVal
|
||||||
|
|
||||||
def getGeometryData(self, request, times):
|
def getGeometryData(self, request, times):
|
||||||
|
@ -121,10 +166,9 @@ class ThriftClientRouter(object):
|
||||||
response = self._client.sendRequest(geoDataRequest)
|
response = self._client.sendRequest(geoDataRequest)
|
||||||
geometries = []
|
geometries = []
|
||||||
for wkb in response.getGeometryWKBs():
|
for wkb in response.getGeometryWKBs():
|
||||||
# convert the wkb to a bytearray with only positive values
|
# the wkb is a numpy.ndarray of dtype int8
|
||||||
byteArrWKB = bytearray(map(lambda x: x % 256,wkb.tolist()))
|
# convert the bytearray to a byte string and load it
|
||||||
# convert the bytearray to a byte string and load it.
|
geometries.append(shapely.wkb.loads(wkb.tostring()))
|
||||||
geometries.append(shapely.wkb.loads(str(byteArrWKB)))
|
|
||||||
|
|
||||||
retVal = []
|
retVal = []
|
||||||
for geoDataRecord in response.getGeoData():
|
for geoDataRecord in response.getGeoData():
|
||||||
|
@ -175,7 +219,7 @@ class ThriftClientRouter(object):
|
||||||
response = self._client.sendRequest(idValReq)
|
response = self._client.sendRequest(idValReq)
|
||||||
return response
|
return response
|
||||||
|
|
||||||
def newDataRequest(self, datatype, parameters=[], levels=[], locationNames = [], envelope=None, **kwargs):
|
def newDataRequest(self, datatype, parameters=[], levels=[], locationNames=[], envelope=None, **kwargs):
|
||||||
req = DefaultDataRequest()
|
req = DefaultDataRequest()
|
||||||
if datatype:
|
if datatype:
|
||||||
req.setDatatype(datatype)
|
req.setDatatype(datatype)
|
||||||
|
|
470
awips/localization/LocalizationFileManager.py
Normal file
470
awips/localization/LocalizationFileManager.py
Normal file
|
@ -0,0 +1,470 @@
|
||||||
|
##
|
||||||
|
# This software was developed and / or modified by Raytheon Company,
|
||||||
|
# pursuant to Contract DG133W-05-CQ-1067 with the US Government.
|
||||||
|
#
|
||||||
|
# U.S. EXPORT CONTROLLED TECHNICAL DATA
|
||||||
|
# This software product contains export-restricted data whose
|
||||||
|
# export/transfer/disclosure is restricted by U.S. law. Dissemination
|
||||||
|
# to non-U.S. persons whether in the United States or abroad requires
|
||||||
|
# an export license or other authorization.
|
||||||
|
#
|
||||||
|
# Contractor Name: Raytheon Company
|
||||||
|
# Contractor Address: 6825 Pine Street, Suite 340
|
||||||
|
# Mail Stop B8
|
||||||
|
# Omaha, NE 68106
|
||||||
|
# 402.291.0100
|
||||||
|
#
|
||||||
|
# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
||||||
|
# further licensing information.
|
||||||
|
##
|
||||||
|
|
||||||
|
#
|
||||||
|
# Library for accessing localization files from python.
|
||||||
|
#
|
||||||
|
# SOFTWARE HISTORY
|
||||||
|
#
|
||||||
|
# Date Ticket# Engineer Description
|
||||||
|
# --------- -------- --------- --------------------------
|
||||||
|
# 08/09/17 5731 bsteffen Initial Creation.
|
||||||
|
|
||||||
|
|
||||||
|
import urllib2
|
||||||
|
from json import load as loadjson
|
||||||
|
from xml.etree.ElementTree import parse as parseXml
|
||||||
|
from base64 import b64encode
|
||||||
|
from StringIO import StringIO
|
||||||
|
from getpass import getuser
|
||||||
|
import dateutil.parser
|
||||||
|
import contextlib
|
||||||
|
import os
|
||||||
|
from urlparse import urlunparse, urljoin
|
||||||
|
|
||||||
|
NON_EXISTENT_CHECKSUM = 'NON_EXISTENT_CHECKSUM'
|
||||||
|
DIRECTORY_CHECKSUM = 'DIRECTORY_CHECKSUM'
|
||||||
|
|
||||||
|
class LocalizationFileVersionConflictException(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
class LocalizationFileDoesNotExistException(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
class LocalizationFileIsNotDirectoryException(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
class LocalizationContext(object):
|
||||||
|
"""A localization context defines the scope of a localization file.
|
||||||
|
|
||||||
|
For example the base localization context includes all the default files
|
||||||
|
installed with EDEX, while a particular user context has custom files for
|
||||||
|
that user.
|
||||||
|
|
||||||
|
A localization context consists of a level and name. The level defines what
|
||||||
|
kind of entity this context is valid for, such as 'base', 'site', or 'user'.
|
||||||
|
The name identifies the specific entity, for example the name of a 'user'
|
||||||
|
level context is usually the username. The 'base' level does not have a name
|
||||||
|
because there cannot be only one 'base' context.
|
||||||
|
|
||||||
|
Attributes:
|
||||||
|
level: the localization level
|
||||||
|
name: the context name
|
||||||
|
"""
|
||||||
|
def __init__(self, level="base", name=None, type="common_static"):
|
||||||
|
if level != "base":
|
||||||
|
assert name is not None
|
||||||
|
self.level = level
|
||||||
|
self.name = name
|
||||||
|
self.type = type
|
||||||
|
def isBase(self):
|
||||||
|
return self.level == "base"
|
||||||
|
def _getUrlComponent(self):
|
||||||
|
if self.isBase():
|
||||||
|
return self.type + '/' + "base/"
|
||||||
|
else:
|
||||||
|
return self.type + '/' + self.level + '/' + self.name + '/'
|
||||||
|
def __str__(self):
|
||||||
|
if self.isBase():
|
||||||
|
return self.type + ".base"
|
||||||
|
else:
|
||||||
|
return self.type + "." + self.level + "." + self.name
|
||||||
|
def __eq__(self, other):
|
||||||
|
return self.level == other.level and \
|
||||||
|
self.name == other.name and \
|
||||||
|
self.type == other.type
|
||||||
|
def __hash__(self):
|
||||||
|
return hash((self.level, self.name, self.type))
|
||||||
|
|
||||||
|
class _LocalizationOutput(StringIO):
|
||||||
|
"""A file-like object for writing a localization file.
|
||||||
|
|
||||||
|
The contents being written are stored in memory and written to a
|
||||||
|
localization server only when the writing is finished.
|
||||||
|
|
||||||
|
This object should be used as a context manager, a save operation will be
|
||||||
|
executed if the context exits with no errors. If errors occur the partial
|
||||||
|
contents are abandoned and the server is unchanged.
|
||||||
|
|
||||||
|
It is also possible to save the contents to the server with the save()
|
||||||
|
method.
|
||||||
|
"""
|
||||||
|
def __init__(self, manager, file):
|
||||||
|
StringIO.__init__(self)
|
||||||
|
self._manager = manager
|
||||||
|
self._file = file
|
||||||
|
def save(self):
|
||||||
|
"""Send the currently written contents to the server."""
|
||||||
|
request = self._manager._buildRequest(self._file.context, self._file.path, method="PUT")
|
||||||
|
|
||||||
|
request.add_data(self.getvalue())
|
||||||
|
request.add_header("If-Match", self._file.checksum)
|
||||||
|
try:
|
||||||
|
urllib2.urlopen(request)
|
||||||
|
except urllib2.HTTPError as e:
|
||||||
|
if e.code == 409:
|
||||||
|
raise LocalizationFileVersionConflictException, e.read()
|
||||||
|
else:
|
||||||
|
raise e
|
||||||
|
def __enter__(self):
|
||||||
|
return self
|
||||||
|
def __exit__(self, exc_type, exc_value, traceback):
|
||||||
|
if exc_type is None:
|
||||||
|
self.save()
|
||||||
|
def __str__(self):
|
||||||
|
return '<' + self.__class__.__name__ + " for " + str(self._file) + '>'
|
||||||
|
|
||||||
|
class LocalizationFile(object):
|
||||||
|
"""A specific file stored in localization.
|
||||||
|
|
||||||
|
A localization file is uniquely defined by the context and path. There can
|
||||||
|
only be one valid file for that path and localization at a time. To access
|
||||||
|
the contents of the file use the open method.
|
||||||
|
|
||||||
|
Attributes:
|
||||||
|
context: A LocalizationContext
|
||||||
|
path: A path to this file
|
||||||
|
checksum: A string representation of a checksum generated from the file contents.
|
||||||
|
timnestamp: A datetime.datetime object indicating when the file was last modified.
|
||||||
|
"""
|
||||||
|
def __init__(self, manager, context, path, checksum, timestamp):
|
||||||
|
"""Initialize a LocalizationFile with the given manager and attributes.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
manager: A LocalizationFileManager to assist with server communication
|
||||||
|
context: A LocalizationContext
|
||||||
|
path: A path to this file
|
||||||
|
checksum: A string representation of a checksum generated from the file contents.
|
||||||
|
timnestamp: A datetime.datetime object indicating when the file was last modified.
|
||||||
|
"""
|
||||||
|
self._manager = manager
|
||||||
|
self.context = context
|
||||||
|
self.path = path
|
||||||
|
self.checksum = checksum
|
||||||
|
self.timestamp = timestamp
|
||||||
|
def open(self, mode='r'):
|
||||||
|
"""Open the file.
|
||||||
|
|
||||||
|
This should always be called as as part of a with statement. When
|
||||||
|
writing the content is not saved on the server until leaving the with
|
||||||
|
statement normally, if an error occurs the server is left unchanged.
|
||||||
|
|
||||||
|
Example:
|
||||||
|
with locFile.open('w') as output:
|
||||||
|
output.write('some content')
|
||||||
|
|
||||||
|
Args:
|
||||||
|
mode: 'r' for reading the file, 'w' for writing
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
A file like object that can be used for reads or writes.
|
||||||
|
"""
|
||||||
|
if mode == 'r':
|
||||||
|
request = self._manager._buildRequest(self.context, self.path)
|
||||||
|
response = urllib2.urlopen(request)
|
||||||
|
# Not the recommended way of reading directories.
|
||||||
|
if not(self.isDirectory()):
|
||||||
|
checksum = response.headers["Content-MD5"]
|
||||||
|
if self.checksum != checksum:
|
||||||
|
raise RuntimeError, "Localization checksum mismatch " + self.checksum + " " + checksum
|
||||||
|
return contextlib.closing(response)
|
||||||
|
elif mode == 'w':
|
||||||
|
return _LocalizationOutput(self._manager, self)
|
||||||
|
else:
|
||||||
|
raise ValueError, "mode string must be 'r' or 'w' not " + str(r)
|
||||||
|
def delete(self):
|
||||||
|
"""Delete this file from the server"""
|
||||||
|
request = self._manager._buildRequest(self.context, self.path, method='DELETE')
|
||||||
|
request.add_header("If-Match", self.checksum)
|
||||||
|
try:
|
||||||
|
urllib2.urlopen(request)
|
||||||
|
except urllib2.HTTPError as e:
|
||||||
|
if e.code == 409:
|
||||||
|
raise LocalizationFileVersionConflictException, e.read()
|
||||||
|
else:
|
||||||
|
raise e
|
||||||
|
def exists(self):
|
||||||
|
"""Check if this file actually exists.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
boolean indicating existence of this file
|
||||||
|
"""
|
||||||
|
return self.checksum != NON_EXISTENT_CHECKSUM
|
||||||
|
def isDirectory(self):
|
||||||
|
"""Check if this file is a directory.
|
||||||
|
|
||||||
|
A file must exist to be considered a directory.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
boolean indicating directorocity of this file
|
||||||
|
"""
|
||||||
|
return self.checksum == DIRECTORY_CHECKSUM
|
||||||
|
def getCheckSum(self):
|
||||||
|
return self.checksum
|
||||||
|
def getContext(self):
|
||||||
|
return self.context
|
||||||
|
def getPath(self):
|
||||||
|
return self.path
|
||||||
|
def getTimeStamp(self):
|
||||||
|
return self.timestamp
|
||||||
|
def __str__(self):
|
||||||
|
return str(self.context) + "/" + self.path
|
||||||
|
def __eq__(self, other):
|
||||||
|
return self.context == other.context and \
|
||||||
|
self.path == other.path and \
|
||||||
|
self.checksum == other.checksum \
|
||||||
|
and self.timestamp == other.timestamp
|
||||||
|
def __hash__(self):
|
||||||
|
return hash((self.context, self.path, self.checksum, self.timestamp))
|
||||||
|
|
||||||
|
def _getHost():
|
||||||
|
import subprocess
|
||||||
|
host = subprocess.check_output(
|
||||||
|
"source /awips2/fxa/bin/setup.env; echo $DEFAULT_HOST",
|
||||||
|
shell=True).strip()
|
||||||
|
if host:
|
||||||
|
return host
|
||||||
|
return 'localhost'
|
||||||
|
|
||||||
|
def _getSiteFromServer(host):
|
||||||
|
try:
|
||||||
|
from ufpy import ThriftClient
|
||||||
|
from dynamicserialize.dstypes.com.raytheon.uf.common.site.requests import GetPrimarySiteRequest
|
||||||
|
client = ThriftClient.ThriftClient(host)
|
||||||
|
return client.sendRequest(GetPrimarySiteRequest())
|
||||||
|
except:
|
||||||
|
# Servers that don't have GFE installed will not return a site
|
||||||
|
pass
|
||||||
|
|
||||||
|
def _getSiteFromEnv():
|
||||||
|
site = os.environ.get('FXA_LOCAL_SITE')
|
||||||
|
if site is None:
|
||||||
|
site = os.environ.get('SITE_IDENTIFIER');
|
||||||
|
return site
|
||||||
|
|
||||||
|
def _getSite(host):
|
||||||
|
site = _getSiteFromEnv()
|
||||||
|
if not(site):
|
||||||
|
site = _getSiteFromServer(host)
|
||||||
|
return site
|
||||||
|
|
||||||
|
def _parseJsonList(manager, response, context, path):
|
||||||
|
fileList = []
|
||||||
|
jsonResponse = loadjson(response)
|
||||||
|
for name, jsonData in jsonResponse.items():
|
||||||
|
checksum = jsonData["checksum"]
|
||||||
|
timestampString = jsonData["timestamp"]
|
||||||
|
timestamp = dateutil.parser.parse(timestampString)
|
||||||
|
newpath = urljoin(path, name)
|
||||||
|
fileList.append(LocalizationFile(manager, context, newpath, checksum, timestamp))
|
||||||
|
return fileList
|
||||||
|
|
||||||
|
def _parseXmlList(manager, response, context, path):
|
||||||
|
fileList = []
|
||||||
|
for xmlData in parseXml(response).getroot().findall('file'):
|
||||||
|
name = xmlData.get("name")
|
||||||
|
checksum = xmlData.get("checksum")
|
||||||
|
timestampString = xmlData.get("timestamp")
|
||||||
|
timestamp = dateutil.parser.parse(timestampString)
|
||||||
|
newpath = urljoin(path, name)
|
||||||
|
fileList.append(LocalizationFile(manager, context, newpath, checksum, timestamp))
|
||||||
|
return fileList
|
||||||
|
|
||||||
|
class LocalizationFileManager(object):
|
||||||
|
"""Connects to a server and retrieves LocalizationFiles."""
|
||||||
|
def __init__(self, host=None, port=9581, path="/services/localization/", contexts=None, site=None, type="common_static"):
|
||||||
|
"""Initializes a LocalizationFileManager with connection parameters and context information
|
||||||
|
|
||||||
|
All arguments are optional and will use defaults or attempt to figure out appropriate values form the environment.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
host: A hostname of the localization server, such as 'ec'.
|
||||||
|
port: A port to use to connect to the localization server, usually 9581.
|
||||||
|
path: A path to reach the localization file service on the server.
|
||||||
|
contexts: A list of contexts to check for files, the order of the contexts will be used
|
||||||
|
for the order of incremental results and the priority of absolute results.
|
||||||
|
site: A site identifier to use for site specific contexts. This is only used if the contexts arg is None.
|
||||||
|
type: A localization type for contexts. This is only used if the contexts arg is None.
|
||||||
|
|
||||||
|
"""
|
||||||
|
if host is None:
|
||||||
|
host = _getHost()
|
||||||
|
if contexts is None:
|
||||||
|
if site is None :
|
||||||
|
site = _getSite(host)
|
||||||
|
contexts = [LocalizationContext("base", None, type)]
|
||||||
|
if site:
|
||||||
|
contexts.append(LocalizationContext("configured", site, type))
|
||||||
|
contexts.append(LocalizationContext("site", site, type))
|
||||||
|
contexts.append(LocalizationContext("user", getuser(), type))
|
||||||
|
netloc = host + ':' + str(port)
|
||||||
|
self._baseUrl = urlunparse(('http', netloc, path, None, None, None))
|
||||||
|
self._contexts = contexts
|
||||||
|
def _buildRequest(self, context, path, method='GET'):
|
||||||
|
url = urljoin(self._baseUrl, context._getUrlComponent())
|
||||||
|
url = urljoin(url, path)
|
||||||
|
request = urllib2.Request(url)
|
||||||
|
username = getuser()
|
||||||
|
# Currently password is ignored in the server
|
||||||
|
# this is the defacto standard for not providing one to this service.
|
||||||
|
password = username
|
||||||
|
base64string = b64encode('%s:%s' % (username, password))
|
||||||
|
request.add_header("Authorization", "Basic %s" % base64string)
|
||||||
|
if method != 'GET':
|
||||||
|
request.get_method = lambda: method
|
||||||
|
return request
|
||||||
|
def _normalizePath(self, path):
|
||||||
|
if path == '' or path == '/':
|
||||||
|
path = '.'
|
||||||
|
if path[0] == '/':
|
||||||
|
path = path[1:]
|
||||||
|
return path
|
||||||
|
def _list(self, path):
|
||||||
|
path = self._normalizePath(path)
|
||||||
|
if path[-1] != '/':
|
||||||
|
path += '/'
|
||||||
|
fileList = []
|
||||||
|
exists = False
|
||||||
|
for context in self._contexts:
|
||||||
|
try:
|
||||||
|
request = self._buildRequest(context, path)
|
||||||
|
request.add_header("Accept", "application/json, application/xml")
|
||||||
|
response = urllib2.urlopen(request)
|
||||||
|
exists = True
|
||||||
|
if not(response.geturl().endswith("/")):
|
||||||
|
# For ordinary files the server sends a redirect to remove the slash.
|
||||||
|
raise LocalizationFileIsNotDirectoryException, "Not a directory: " + path
|
||||||
|
elif response.headers["Content-Type"] == "application/xml":
|
||||||
|
fileList += _parseXmlList(self, response, context, path)
|
||||||
|
else:
|
||||||
|
fileList += _parseJsonList(self, response, context, path)
|
||||||
|
except urllib2.HTTPError as e:
|
||||||
|
if e.code != 404:
|
||||||
|
raise e
|
||||||
|
if not(exists):
|
||||||
|
raise LocalizationFileDoesNotExistException, "No such file or directory: " + path
|
||||||
|
return fileList
|
||||||
|
def _get(self, context, path):
|
||||||
|
path = self._normalizePath(path)
|
||||||
|
try:
|
||||||
|
request = self._buildRequest(context, path, method='HEAD')
|
||||||
|
resp = urllib2.urlopen(request)
|
||||||
|
if (resp.geturl().endswith("/")):
|
||||||
|
checksum = DIRECTORY_CHECKSUM;
|
||||||
|
else:
|
||||||
|
if "Content-MD5" not in resp.headers:
|
||||||
|
raise RuntimeError, "Missing Content-MD5 header in response from " + resp.geturl()
|
||||||
|
checksum = resp.headers["Content-MD5"]
|
||||||
|
if "Last-Modified" not in resp.headers:
|
||||||
|
raise RuntimeError, "Missing Last-Modified header in response from " + resp.geturl()
|
||||||
|
timestamp = dateutil.parser.parse(resp.headers["Last-Modified"])
|
||||||
|
return LocalizationFile(self, context, path, checksum, timestamp)
|
||||||
|
except urllib2.HTTPError as e:
|
||||||
|
if e.code != 404:
|
||||||
|
raise e
|
||||||
|
else:
|
||||||
|
return LocalizationFile(self, context, path, NON_EXISTENT_CHECKSUM, None)
|
||||||
|
def listAbsolute(self, path):
|
||||||
|
"""List the files in a localization directory, only a single file is returned for each unique path.
|
||||||
|
|
||||||
|
If a file exists in more than one context then the highest level(furthest from base) is used.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
path: A path to a directory that should be the root of the listing
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
A list of LocalizationFiles
|
||||||
|
"""
|
||||||
|
merged = dict()
|
||||||
|
for file in self._list(path):
|
||||||
|
merged[file.path] = file
|
||||||
|
return sorted(merged.values(), key=lambda file: file.path)
|
||||||
|
def listIncremental(self, path):
|
||||||
|
"""List the files in a localization directory, this includes all files for all contexts.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
path: A path to a directory that should be the root of the listing
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
A list of tuples, each tuple will contain one or more files for the
|
||||||
|
same paths but different contexts. Each tuple will be ordered the
|
||||||
|
same as the contexts in this manager, generally with 'base' first
|
||||||
|
and 'user' last.
|
||||||
|
"""
|
||||||
|
merged = dict()
|
||||||
|
for file in self._list(path):
|
||||||
|
if file.path in merged:
|
||||||
|
merged[file.path] += (file,)
|
||||||
|
else:
|
||||||
|
merged[file.path] = (file, )
|
||||||
|
return sorted(merged.values(), key=lambda t: t[0].path)
|
||||||
|
def getAbsolute(self, path):
|
||||||
|
"""Get a single localization file from the highest level context where it exists.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
path: A path to a localization file
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
A Localization File with the specified path or None if the file does not exist in any context.
|
||||||
|
|
||||||
|
"""
|
||||||
|
for context in reversed(self._contexts):
|
||||||
|
f = self._get(context, path)
|
||||||
|
if f.exists():
|
||||||
|
return f
|
||||||
|
def getIncremental(self, path):
|
||||||
|
"""Get all the localization files that exist in any context for the provided path.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
path: A path to a localization file
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
A tuple containing all the files that exist for this path in any context. The tuple
|
||||||
|
will be ordered the same as the contexts in this manager, generally with 'base' first
|
||||||
|
and 'user' last.
|
||||||
|
"""
|
||||||
|
result = ()
|
||||||
|
for context in self._contexts:
|
||||||
|
f = self._get(context, path)
|
||||||
|
if f.exists():
|
||||||
|
result += (f,)
|
||||||
|
return result
|
||||||
|
def getSpecific(self, level, path):
|
||||||
|
"""Get a specific localization file at a given level, the file may not exist.
|
||||||
|
|
||||||
|
The file is returned for whichever context is valid for the provided level in this manager.
|
||||||
|
|
||||||
|
For writing new files this is the only way to get access to a file that
|
||||||
|
does not exist in order to create it.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
level: the name of a localization level, such as "base", "site", "user"
|
||||||
|
path: A path to a localization file
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
A Localization File with the specified path and a context for the specified level.
|
||||||
|
"""
|
||||||
|
for context in self._contexts:
|
||||||
|
if context.level == level:
|
||||||
|
return self._get(context, path)
|
||||||
|
raise ValueError, "No context defined for level " + level
|
||||||
|
def __str__(self):
|
||||||
|
contextsStr = '[' + ' '.join((str(c) for c in self._contexts)) + ']'
|
||||||
|
return '<' + self.__class__.__name__ + " for " + self._baseUrl + ' ' + contextsStr + '>'
|
32
awips/localization/__init__.py
Normal file
32
awips/localization/__init__.py
Normal file
|
@ -0,0 +1,32 @@
|
||||||
|
##
|
||||||
|
# This software was developed and / or modified by Raytheon Company,
|
||||||
|
# pursuant to Contract DG133W-05-CQ-1067 with the US Government.
|
||||||
|
#
|
||||||
|
# U.S. EXPORT CONTROLLED TECHNICAL DATA
|
||||||
|
# This software product contains export-restricted data whose
|
||||||
|
# export/transfer/disclosure is restricted by U.S. law. Dissemination
|
||||||
|
# to non-U.S. persons whether in the United States or abroad requires
|
||||||
|
# an export license or other authorization.
|
||||||
|
#
|
||||||
|
# Contractor Name: Raytheon Company
|
||||||
|
# Contractor Address: 6825 Pine Street, Suite 340
|
||||||
|
# Mail Stop B8
|
||||||
|
# Omaha, NE 68106
|
||||||
|
# 402.291.0100
|
||||||
|
#
|
||||||
|
# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
||||||
|
# further licensing information.
|
||||||
|
##
|
||||||
|
|
||||||
|
#
|
||||||
|
# __init__.py for ufpy.localization package
|
||||||
|
#
|
||||||
|
#
|
||||||
|
# SOFTWARE HISTORY
|
||||||
|
#
|
||||||
|
# Date Ticket# Engineer Description
|
||||||
|
# --------- -------- --------- --------------------------
|
||||||
|
# 08/10/17 5731 bsteffen Initial Creation.
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
]
|
|
@ -59,9 +59,13 @@
|
||||||
# ....
|
# ....
|
||||||
# 06/13/2013 DR 16242 D. Friedman Add Qpid authentication info
|
# 06/13/2013 DR 16242 D. Friedman Add Qpid authentication info
|
||||||
# 03/06/2014 DR 17907 D. Friedman Workaround for issue QPID-5569
|
# 03/06/2014 DR 17907 D. Friedman Workaround for issue QPID-5569
|
||||||
|
# 02/16/2017 DR 6084 bsteffen Support ssl connections
|
||||||
#
|
#
|
||||||
#===============================================================================
|
#===============================================================================
|
||||||
|
|
||||||
|
import os
|
||||||
|
import os.path
|
||||||
|
|
||||||
import qpid
|
import qpid
|
||||||
from qpid.util import connect
|
from qpid.util import connect
|
||||||
from qpid.connection import Connection
|
from qpid.connection import Connection
|
||||||
|
@ -71,17 +75,31 @@ QPID_USERNAME = 'guest'
|
||||||
QPID_PASSWORD = 'guest'
|
QPID_PASSWORD = 'guest'
|
||||||
|
|
||||||
class IngestViaQPID:
|
class IngestViaQPID:
|
||||||
def __init__(self, host='localhost', port=5672):
|
def __init__(self, host='localhost', port=5672, ssl=None):
|
||||||
'''
|
'''
|
||||||
Connect to QPID and make bindings to route message to external.dropbox queue
|
Connect to QPID and make bindings to route message to external.dropbox queue
|
||||||
@param host: string hostname of computer running EDEX and QPID (default localhost)
|
@param host: string hostname of computer running EDEX and QPID (default localhost)
|
||||||
@param port: integer port used to connect to QPID (default 5672)
|
@param port: integer port used to connect to QPID (default 5672)
|
||||||
|
@param ssl: boolean to determine whether ssl is used, default value of None will use ssl only if a client certificate is found.
|
||||||
'''
|
'''
|
||||||
|
|
||||||
try:
|
try:
|
||||||
#
|
#
|
||||||
self.socket = connect(host, port)
|
socket = connect(host, port)
|
||||||
self.connection = Connection (sock=self.socket, username=QPID_USERNAME, password=QPID_PASSWORD)
|
if "QPID_SSL_CERT_DB" in os.environ:
|
||||||
|
certdb = os.environ["QPID_SSL_CERT_DB"]
|
||||||
|
else:
|
||||||
|
certdb = os.path.expanduser("~/.qpid/")
|
||||||
|
if "QPID_SSL_CERT_NAME" in os.environ:
|
||||||
|
certname = os.environ["QPID_SSL_CERT_NAME"]
|
||||||
|
else:
|
||||||
|
certname = QPID_USERNAME
|
||||||
|
certfile = os.path.join(certdb, certname + ".crt")
|
||||||
|
if ssl or (ssl is None and os.path.exists(certfile)):
|
||||||
|
keyfile = os.path.join(certdb, certname + ".key")
|
||||||
|
trustfile = os.path.join(certdb, "root.crt")
|
||||||
|
socket = qpid.util.ssl(socket, keyfile=keyfile, certfile=certfile, ca_certs=trustfile)
|
||||||
|
self.connection = Connection (sock=socket, username=QPID_USERNAME, password=QPID_PASSWORD)
|
||||||
self.connection.start()
|
self.connection.start()
|
||||||
self.session = self.connection.session(str(uuid4()))
|
self.session = self.connection.session(str(uuid4()))
|
||||||
self.session.exchange_bind(exchange='amq.direct', queue='external.dropbox', binding_key='external.dropbox')
|
self.session.exchange_bind(exchange='amq.direct', queue='external.dropbox', binding_key='external.dropbox')
|
||||||
|
|
|
@ -45,4 +45,3 @@ import Record
|
||||||
avh = AlertVizHandler.AlertVizHandler(host=os.getenv("BROKER_ADDR","localhost"), port=9581, category='LOCAL', source='ANNOUNCER', level=logging.NOTSET)
|
avh = AlertVizHandler.AlertVizHandler(host=os.getenv("BROKER_ADDR","localhost"), port=9581, category='LOCAL', source='ANNOUNCER', level=logging.NOTSET)
|
||||||
record = Record.Record(10)
|
record = Record.Record(10)
|
||||||
avh.emit(record)
|
avh.emit(record)
|
||||||
|
|
||||||
|
|
|
@ -19,8 +19,11 @@
|
||||||
##
|
##
|
||||||
|
|
||||||
from awips.dataaccess import DataAccessLayer as DAL
|
from awips.dataaccess import DataAccessLayer as DAL
|
||||||
|
from shapely.geometry import box
|
||||||
|
|
||||||
import baseDafTestCase
|
import baseDafTestCase
|
||||||
|
import params
|
||||||
|
import unittest
|
||||||
|
|
||||||
#
|
#
|
||||||
# Base TestCase for BufrMos* tests.
|
# Base TestCase for BufrMos* tests.
|
||||||
|
@ -31,7 +34,8 @@ import baseDafTestCase
|
||||||
# ------------ ---------- ----------- --------------------------
|
# ------------ ---------- ----------- --------------------------
|
||||||
# 01/19/16 4795 mapeters Initial Creation.
|
# 01/19/16 4795 mapeters Initial Creation.
|
||||||
# 04/11/16 5548 tgurney Cleanup
|
# 04/11/16 5548 tgurney Cleanup
|
||||||
#
|
# 12/07/16 5981 tgurney Parameterize
|
||||||
|
# 12/15/16 5981 tgurney Add envelope test
|
||||||
#
|
#
|
||||||
#
|
#
|
||||||
|
|
||||||
|
@ -39,6 +43,8 @@ import baseDafTestCase
|
||||||
class BufrMosTestCase(baseDafTestCase.DafTestCase):
|
class BufrMosTestCase(baseDafTestCase.DafTestCase):
|
||||||
"""Base class for testing DAF support of bufrmos data"""
|
"""Base class for testing DAF support of bufrmos data"""
|
||||||
|
|
||||||
|
data_params = "temperature", "dewpoint"
|
||||||
|
|
||||||
def testGetAvailableParameters(self):
|
def testGetAvailableParameters(self):
|
||||||
req = DAL.newDataRequest(self.datatype)
|
req = DAL.newDataRequest(self.datatype)
|
||||||
self.runParametersTest(req)
|
self.runParametersTest(req)
|
||||||
|
@ -49,11 +55,19 @@ class BufrMosTestCase(baseDafTestCase.DafTestCase):
|
||||||
|
|
||||||
def testGetAvailableTimes(self):
|
def testGetAvailableTimes(self):
|
||||||
req = DAL.newDataRequest(self.datatype)
|
req = DAL.newDataRequest(self.datatype)
|
||||||
req.setLocationNames("KOMA")
|
req.setLocationNames(params.OBS_STATION)
|
||||||
self.runTimesTest(req)
|
self.runTimesTest(req)
|
||||||
|
|
||||||
def testGetGeometryData(self):
|
def testGetGeometryData(self):
|
||||||
req = DAL.newDataRequest(self.datatype)
|
req = DAL.newDataRequest(self.datatype)
|
||||||
req.setLocationNames("KOMA")
|
req.setLocationNames(params.OBS_STATION)
|
||||||
req.setParameters("temperature", "dewpoint")
|
req.setParameters(*self.data_params)
|
||||||
self.runGeometryDataTest(req)
|
self.runGeometryDataTest(req)
|
||||||
|
|
||||||
|
def testGetGeometryDataWithEnvelope(self):
|
||||||
|
req = DAL.newDataRequest(self.datatype)
|
||||||
|
req.setParameters(*self.data_params)
|
||||||
|
req.setEnvelope(params.ENVELOPE)
|
||||||
|
data = self.runGeometryDataTest(req)
|
||||||
|
for item in data:
|
||||||
|
self.assertTrue(params.ENVELOPE.contains(item.getGeometry()))
|
||||||
|
|
|
@ -50,6 +50,8 @@ import unittest
|
||||||
# 10/05/16 5926 dgilling Better checks in runGeometryDataTest.
|
# 10/05/16 5926 dgilling Better checks in runGeometryDataTest.
|
||||||
# 11/08/16 5985 tgurney Do not check data times on
|
# 11/08/16 5985 tgurney Do not check data times on
|
||||||
# time-agnostic data
|
# time-agnostic data
|
||||||
|
# 03/13/17 5981 tgurney Do not check valid period on
|
||||||
|
# data time
|
||||||
#
|
#
|
||||||
#
|
#
|
||||||
|
|
||||||
|
@ -166,10 +168,13 @@ class DafTestCase(unittest.TestCase):
|
||||||
self.assertIsNotNone(geomData)
|
self.assertIsNotNone(geomData)
|
||||||
if times:
|
if times:
|
||||||
self.assertNotEqual(len(geomData), 0)
|
self.assertNotEqual(len(geomData), 0)
|
||||||
|
if not geomData:
|
||||||
|
raise unittest.SkipTest("No data available")
|
||||||
print("Number of geometry records: " + str(len(geomData)))
|
print("Number of geometry records: " + str(len(geomData)))
|
||||||
print("Sample geometry data:")
|
print("Sample geometry data:")
|
||||||
for record in geomData[:self.sampleDataLimit]:
|
for record in geomData[:self.sampleDataLimit]:
|
||||||
if checkDataTimes and times:
|
if (checkDataTimes and times and
|
||||||
|
"PERIOD_USED" not in record.getDataTime().getUtilityFlags()):
|
||||||
self.assertIn(record.getDataTime(), times[:self.numTimesToLimit])
|
self.assertIn(record.getDataTime(), times[:self.numTimesToLimit])
|
||||||
print("geometry=" + str(record.getGeometry()), end="")
|
print("geometry=" + str(record.getGeometry()), end="")
|
||||||
for p in req.getParameters():
|
for p in req.getParameters():
|
||||||
|
@ -184,6 +189,8 @@ class DafTestCase(unittest.TestCase):
|
||||||
"""
|
"""
|
||||||
geomData = DAL.getGeometryData(req, timeRange)
|
geomData = DAL.getGeometryData(req, timeRange)
|
||||||
self.assertIsNotNone(geomData)
|
self.assertIsNotNone(geomData)
|
||||||
|
if not geomData:
|
||||||
|
raise unittest.SkipTest("No data available")
|
||||||
print("Number of geometry records: " + str(len(geomData)))
|
print("Number of geometry records: " + str(len(geomData)))
|
||||||
print("Sample geometry data:")
|
print("Sample geometry data:")
|
||||||
for record in geomData[:self.sampleDataLimit]:
|
for record in geomData[:self.sampleDataLimit]:
|
||||||
|
@ -207,6 +214,8 @@ class DafTestCase(unittest.TestCase):
|
||||||
times = DafTestCase.getTimesIfSupported(req)
|
times = DafTestCase.getTimesIfSupported(req)
|
||||||
gridData = DAL.getGridData(req, times[:self.numTimesToLimit])
|
gridData = DAL.getGridData(req, times[:self.numTimesToLimit])
|
||||||
self.assertIsNotNone(gridData)
|
self.assertIsNotNone(gridData)
|
||||||
|
if not gridData:
|
||||||
|
raise unittest.SkipTest("No data available")
|
||||||
print("Number of grid records: " + str(len(gridData)))
|
print("Number of grid records: " + str(len(gridData)))
|
||||||
if len(gridData) > 0:
|
if len(gridData) > 0:
|
||||||
print("Sample grid data shape:\n" + str(gridData[0].getRawData().shape) + "\n")
|
print("Sample grid data shape:\n" + str(gridData[0].getRawData().shape) + "\n")
|
||||||
|
|
194
awips/test/dafTests/baseRadarTestCase.py
Normal file
194
awips/test/dafTests/baseRadarTestCase.py
Normal file
|
@ -0,0 +1,194 @@
|
||||||
|
##
|
||||||
|
# This software was developed and / or modified by Raytheon Company,
|
||||||
|
# pursuant to Contract DG133W-05-CQ-1067 with the US Government.
|
||||||
|
#
|
||||||
|
# U.S. EXPORT CONTROLLED TECHNICAL DATA
|
||||||
|
# This software product contains export-restricted data whose
|
||||||
|
# export/transfer/disclosure is restricted by U.S. law. Dissemination
|
||||||
|
# to non-U.S. persons whether in the United States or abroad requires
|
||||||
|
# an export license or other authorization.
|
||||||
|
#
|
||||||
|
# Contractor Name: Raytheon Company
|
||||||
|
# Contractor Address: 6825 Pine Street, Suite 340
|
||||||
|
# Mail Stop B8
|
||||||
|
# Omaha, NE 68106
|
||||||
|
# 402.291.0100
|
||||||
|
#
|
||||||
|
# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
||||||
|
# further licensing information.
|
||||||
|
##
|
||||||
|
|
||||||
|
from __future__ import print_function
|
||||||
|
from shapely.geometry import box
|
||||||
|
from ufpy.dataaccess import DataAccessLayer as DAL
|
||||||
|
from ufpy.ThriftClient import ThriftRequestException
|
||||||
|
|
||||||
|
import baseDafTestCase
|
||||||
|
import params
|
||||||
|
import unittest
|
||||||
|
|
||||||
|
#
|
||||||
|
# Tests common to all radar factories
|
||||||
|
#
|
||||||
|
# SOFTWARE HISTORY
|
||||||
|
#
|
||||||
|
# Date Ticket# Engineer Description
|
||||||
|
# ------------ ---------- ----------- --------------------------
|
||||||
|
# 01/19/16 4795 mapeters Initial Creation.
|
||||||
|
# 04/11/16 5548 tgurney Cleanup
|
||||||
|
# 04/18/16 5548 tgurney More cleanup
|
||||||
|
# 04/26/16 5587 tgurney Move identifier values tests
|
||||||
|
# out of base class
|
||||||
|
# 06/01/16 5587 tgurney Update testGetIdentifierValues
|
||||||
|
# 06/08/16 5574 mapeters Add advanced query tests
|
||||||
|
# 06/13/16 5574 tgurney Fix checks for None
|
||||||
|
# 06/14/16 5548 tgurney Undo previous change (broke
|
||||||
|
# test)
|
||||||
|
# 06/30/16 5725 tgurney Add test for NOT IN
|
||||||
|
# 08/25/16 2671 tgurney Rename to baseRadarTestCase
|
||||||
|
# and move factory-specific
|
||||||
|
# tests
|
||||||
|
# 12/07/16 5981 tgurney Parameterize
|
||||||
|
#
|
||||||
|
#
|
||||||
|
|
||||||
|
|
||||||
|
class BaseRadarTestCase(baseDafTestCase.DafTestCase):
|
||||||
|
"""Tests common to all radar factories"""
|
||||||
|
|
||||||
|
# datatype is specified by subclass
|
||||||
|
datatype = None
|
||||||
|
|
||||||
|
radarLoc = params.RADAR.lower()
|
||||||
|
|
||||||
|
def testGetAvailableParameters(self):
|
||||||
|
req = DAL.newDataRequest(self.datatype)
|
||||||
|
self.runParametersTest(req)
|
||||||
|
|
||||||
|
def testGetAvailableLocations(self):
|
||||||
|
req = DAL.newDataRequest(self.datatype)
|
||||||
|
self.runLocationsTest(req)
|
||||||
|
|
||||||
|
def testGetAvailableLevels(self):
|
||||||
|
req = DAL.newDataRequest(self.datatype)
|
||||||
|
self.runLevelsTest(req)
|
||||||
|
|
||||||
|
def testGetAvailableLevelsWithInvalidLevelIdentifierThrowsException(self):
|
||||||
|
req = DAL.newDataRequest(self.datatype)
|
||||||
|
req.addIdentifier('level.one.field', 'invalidLevelField')
|
||||||
|
with self.assertRaises(ThriftRequestException) as cm:
|
||||||
|
self.runLevelsTest(req)
|
||||||
|
self.assertIn('IncompatibleRequestException', str(cm.exception))
|
||||||
|
|
||||||
|
def testGetAvailableTimes(self):
|
||||||
|
req = DAL.newDataRequest(self.datatype)
|
||||||
|
req.setEnvelope(params.ENVELOPE)
|
||||||
|
self.runTimesTest(req)
|
||||||
|
|
||||||
|
def testGetIdentifierValues(self):
|
||||||
|
req = DAL.newDataRequest(self.datatype)
|
||||||
|
optionalIds = set(DAL.getOptionalIdentifiers(req))
|
||||||
|
requiredIds = set(DAL.getRequiredIdentifiers(req))
|
||||||
|
self.runGetIdValuesTest(optionalIds | requiredIds)
|
||||||
|
|
||||||
|
def testGetInvalidIdentifierValuesThrowsException(self):
|
||||||
|
self.runInvalidIdValuesTest()
|
||||||
|
|
||||||
|
def testGetNonexistentIdentifierValuesThrowsException(self):
|
||||||
|
self.runNonexistentIdValuesTest()
|
||||||
|
|
||||||
|
def runConstraintTest(self, key, operator, value):
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
def testGetDataWithEqualsString(self):
|
||||||
|
gridData = self.runConstraintTest('icao', '=', self.radarLoc)
|
||||||
|
for record in gridData:
|
||||||
|
self.assertEqual(record.getAttribute('icao'), self.radarLoc)
|
||||||
|
|
||||||
|
def testGetDataWithEqualsUnicode(self):
|
||||||
|
gridData = self.runConstraintTest('icao', '=', unicode(self.radarLoc))
|
||||||
|
for record in gridData:
|
||||||
|
self.assertEqual(record.getAttribute('icao'), self.radarLoc)
|
||||||
|
|
||||||
|
def testGetDataWithEqualsInt(self):
|
||||||
|
gridData = self.runConstraintTest('icao', '=', 1000)
|
||||||
|
for record in gridData:
|
||||||
|
self.assertEqual(record.getAttribute('icao'), 1000)
|
||||||
|
|
||||||
|
def testGetDataWithEqualsLong(self):
|
||||||
|
gridData = self.runConstraintTest('icao', '=', 1000L)
|
||||||
|
for record in gridData:
|
||||||
|
self.assertEqual(record.getAttribute('icao'), 1000)
|
||||||
|
|
||||||
|
def testGetDataWithEqualsFloat(self):
|
||||||
|
gridData = self.runConstraintTest('icao', '=', 1.0)
|
||||||
|
for record in gridData:
|
||||||
|
self.assertEqual(round(record.getAttribute('icao'), 1), 1.0)
|
||||||
|
|
||||||
|
def testGetDataWithEqualsNone(self):
|
||||||
|
gridData = self.runConstraintTest('icao', '=', None)
|
||||||
|
for record in gridData:
|
||||||
|
self.assertIsNone(record.getAttribute('icao'))
|
||||||
|
|
||||||
|
def testGetDataWithNotEquals(self):
|
||||||
|
gridData = self.runConstraintTest('icao', '!=', self.radarLoc)
|
||||||
|
for record in gridData:
|
||||||
|
self.assertNotEqual(record.getAttribute('icao'), self.radarLoc)
|
||||||
|
|
||||||
|
def testGetDataWithNotEqualsNone(self):
|
||||||
|
gridData = self.runConstraintTest('icao', '!=', None)
|
||||||
|
for record in gridData:
|
||||||
|
self.assertIsNotNone(record.getAttribute('icao'))
|
||||||
|
|
||||||
|
def testGetDataWithGreaterThan(self):
|
||||||
|
gridData = self.runConstraintTest('icao', '>', self.radarLoc)
|
||||||
|
for record in gridData:
|
||||||
|
self.assertGreater(record.getAttribute('icao'), self.radarLoc)
|
||||||
|
|
||||||
|
def testGetDataWithLessThan(self):
|
||||||
|
gridData = self.runConstraintTest('icao', '<', self.radarLoc)
|
||||||
|
for record in gridData:
|
||||||
|
self.assertLess(record.getAttribute('icao'), self.radarLoc)
|
||||||
|
|
||||||
|
def testGetDataWithGreaterThanEquals(self):
|
||||||
|
gridData = self.runConstraintTest('icao', '>=', self.radarLoc)
|
||||||
|
for record in gridData:
|
||||||
|
self.assertGreaterEqual(record.getAttribute('icao'), self.radarLoc)
|
||||||
|
|
||||||
|
def testGetDataWithLessThanEquals(self):
|
||||||
|
gridData = self.runConstraintTest('icao', '<=', self.radarLoc)
|
||||||
|
for record in gridData:
|
||||||
|
self.assertLessEqual(record.getAttribute('icao'), self.radarLoc)
|
||||||
|
|
||||||
|
def testGetDataWithInTuple(self):
|
||||||
|
gridData = self.runConstraintTest('icao', 'in', (self.radarLoc, 'tpbi'))
|
||||||
|
for record in gridData:
|
||||||
|
self.assertIn(record.getAttribute('icao'), (self.radarLoc, 'tpbi'))
|
||||||
|
|
||||||
|
def testGetDataWithInList(self):
|
||||||
|
gridData = self.runConstraintTest('icao', 'in', [self.radarLoc, 'tpbi'])
|
||||||
|
for record in gridData:
|
||||||
|
self.assertIn(record.getAttribute('icao'), (self.radarLoc, 'tpbi'))
|
||||||
|
|
||||||
|
def testGetDataWithInGenerator(self):
|
||||||
|
generator = (item for item in (self.radarLoc, 'tpbi'))
|
||||||
|
gridData = self.runConstraintTest('icao', 'in', generator)
|
||||||
|
for record in gridData:
|
||||||
|
self.assertIn(record.getAttribute('icao'), (self.radarLoc, 'tpbi'))
|
||||||
|
|
||||||
|
def testGetDataWithNotInList(self):
|
||||||
|
gridData = self.runConstraintTest('icao', 'not in', ['zzzz', self.radarLoc])
|
||||||
|
for record in gridData:
|
||||||
|
self.assertNotIn(record.getAttribute('icao'), ('zzzz', self.radarLoc))
|
||||||
|
|
||||||
|
def testGetDataWithInvalidConstraintTypeThrowsException(self):
|
||||||
|
with self.assertRaises(ValueError):
|
||||||
|
self.runConstraintTest('icao', 'junk', self.radarLoc)
|
||||||
|
|
||||||
|
def testGetDataWithInvalidConstraintValueThrowsException(self):
|
||||||
|
with self.assertRaises(TypeError):
|
||||||
|
self.runConstraintTest('icao', '=', {})
|
||||||
|
|
||||||
|
def testGetDataWithEmptyInConstraintThrowsException(self):
|
||||||
|
with self.assertRaises(ValueError):
|
||||||
|
self.runConstraintTest('icao', 'in', [])
|
43
awips/test/dafTests/params.py
Normal file
43
awips/test/dafTests/params.py
Normal file
|
@ -0,0 +1,43 @@
|
||||||
|
##
|
||||||
|
# This software was developed and / or modified by Raytheon Company,
|
||||||
|
# pursuant to Contract DG133W-05-CQ-1067 with the US Government.
|
||||||
|
#
|
||||||
|
# U.S. EXPORT CONTROLLED TECHNICAL DATA
|
||||||
|
# This software product contains export-restricted data whose
|
||||||
|
# export/transfer/disclosure is restricted by U.S. law. Dissemination
|
||||||
|
# to non-U.S. persons whether in the United States or abroad requires
|
||||||
|
# an export license or other authorization.
|
||||||
|
#
|
||||||
|
# Contractor Name: Raytheon Company
|
||||||
|
# Contractor Address: 6825 Pine Street, Suite 340
|
||||||
|
# Mail Stop B8
|
||||||
|
# Omaha, NE 68106
|
||||||
|
# 402.291.0100
|
||||||
|
#
|
||||||
|
# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
||||||
|
# further licensing information.
|
||||||
|
##
|
||||||
|
|
||||||
|
|
||||||
|
#
|
||||||
|
# Site-specific parameters for DAF tests
|
||||||
|
#
|
||||||
|
# SOFTWARE HISTORY
|
||||||
|
#
|
||||||
|
# Date Ticket# Engineer Description
|
||||||
|
# ------------ ---------- ----------- --------------------------
|
||||||
|
# 12/07/16 5981 tgurney Initial creation
|
||||||
|
# 12/15/16 5981 tgurney Add ENVELOPE
|
||||||
|
#
|
||||||
|
#
|
||||||
|
|
||||||
|
from shapely.geometry import box
|
||||||
|
|
||||||
|
AIRPORT = 'OMA'
|
||||||
|
OBS_STATION = 'KOMA'
|
||||||
|
SITE_ID = 'OAX'
|
||||||
|
STATION_ID = '72558'
|
||||||
|
RADAR = 'KOAX'
|
||||||
|
SAMPLE_AREA = (-97.0, 41.0, -96.0, 42.0)
|
||||||
|
|
||||||
|
ENVELOPE = box(*SAMPLE_AREA)
|
|
@ -22,6 +22,7 @@ from __future__ import print_function
|
||||||
from awips.dataaccess import DataAccessLayer as DAL
|
from awips.dataaccess import DataAccessLayer as DAL
|
||||||
|
|
||||||
import baseBufrMosTestCase
|
import baseBufrMosTestCase
|
||||||
|
import params
|
||||||
import unittest
|
import unittest
|
||||||
|
|
||||||
#
|
#
|
||||||
|
@ -34,6 +35,8 @@ import unittest
|
||||||
# 01/19/16 4795 mapeters Initial Creation.
|
# 01/19/16 4795 mapeters Initial Creation.
|
||||||
# 04/11/16 5548 tgurney Cleanup
|
# 04/11/16 5548 tgurney Cleanup
|
||||||
# 04/18/16 5548 tgurney More cleanup
|
# 04/18/16 5548 tgurney More cleanup
|
||||||
|
# 12/07/16 5981 tgurney Parameterize
|
||||||
|
# 12/20/16 5981 tgurney Inherit all tests
|
||||||
#
|
#
|
||||||
#
|
#
|
||||||
|
|
||||||
|
@ -42,11 +45,6 @@ class BufrMosHpcTestCase(baseBufrMosTestCase.BufrMosTestCase):
|
||||||
"""Test DAF support for bufrmosHPC data"""
|
"""Test DAF support for bufrmosHPC data"""
|
||||||
|
|
||||||
datatype = "bufrmosHPC"
|
datatype = "bufrmosHPC"
|
||||||
|
data_params = "forecastHr", "maxTemp24Hour"
|
||||||
|
|
||||||
# Most tests inherited from superclass
|
# All tests inherited from superclass
|
||||||
|
|
||||||
def testGetGeometryData(self):
|
|
||||||
req = DAL.newDataRequest(self.datatype)
|
|
||||||
req.setLocationNames("KOMA")
|
|
||||||
req.setParameters("forecastHr", "maxTemp24Hour")
|
|
||||||
self.runGeometryDataTest(req)
|
|
|
@ -22,6 +22,7 @@ from __future__ import print_function
|
||||||
from awips.dataaccess import DataAccessLayer as DAL
|
from awips.dataaccess import DataAccessLayer as DAL
|
||||||
|
|
||||||
import baseBufrMosTestCase
|
import baseBufrMosTestCase
|
||||||
|
import params
|
||||||
import unittest
|
import unittest
|
||||||
|
|
||||||
#
|
#
|
||||||
|
@ -34,6 +35,8 @@ import unittest
|
||||||
# 01/19/16 4795 mapeters Initial Creation.
|
# 01/19/16 4795 mapeters Initial Creation.
|
||||||
# 04/11/16 5548 tgurney Cleanup
|
# 04/11/16 5548 tgurney Cleanup
|
||||||
# 04/18/16 5548 tgurney More cleanup
|
# 04/18/16 5548 tgurney More cleanup
|
||||||
|
# 12/07/16 5981 tgurney Parameterize
|
||||||
|
# 12/20/16 5981 tgurney Inherit all tests
|
||||||
#
|
#
|
||||||
#
|
#
|
||||||
|
|
||||||
|
@ -42,11 +45,6 @@ class BufrMosMrfTestCase(baseBufrMosTestCase.BufrMosTestCase):
|
||||||
"""Test DAF support for bufrmosMRF data"""
|
"""Test DAF support for bufrmosMRF data"""
|
||||||
|
|
||||||
datatype = "bufrmosMRF"
|
datatype = "bufrmosMRF"
|
||||||
|
data_params = "forecastHr", "maxTempDay"
|
||||||
|
|
||||||
# Most tests inherited from superclass
|
# All tests inherited from superclass
|
||||||
|
|
||||||
def testGetGeometryData(self):
|
|
||||||
req = DAL.newDataRequest(self.datatype)
|
|
||||||
req.setLocationNames("KOMA")
|
|
||||||
req.setParameters("forecastHr", "maxTempDay")
|
|
||||||
self.runGeometryDataTest(req)
|
|
||||||
|
|
|
@ -23,6 +23,7 @@ from awips.dataaccess import DataAccessLayer as DAL
|
||||||
|
|
||||||
from dynamicserialize.dstypes.com.raytheon.uf.common.dataquery.requests import RequestConstraint
|
from dynamicserialize.dstypes.com.raytheon.uf.common.dataquery.requests import RequestConstraint
|
||||||
import baseDafTestCase
|
import baseDafTestCase
|
||||||
|
import params
|
||||||
import unittest
|
import unittest
|
||||||
|
|
||||||
#
|
#
|
||||||
|
@ -38,6 +39,8 @@ import unittest
|
||||||
# 06/09/16 5587 bsteffen Add getIdentifierValues tests
|
# 06/09/16 5587 bsteffen Add getIdentifierValues tests
|
||||||
# 06/13/16 5574 tgurney Add advanced query tests
|
# 06/13/16 5574 tgurney Add advanced query tests
|
||||||
# 06/30/16 5725 tgurney Add test for NOT IN
|
# 06/30/16 5725 tgurney Add test for NOT IN
|
||||||
|
# 12/07/16 5981 tgurney Parameterize
|
||||||
|
# 12/15/16 5981 tgurney Add envelope test
|
||||||
#
|
#
|
||||||
#
|
#
|
||||||
|
|
||||||
|
@ -47,8 +50,7 @@ class BufrUaTestCase(baseDafTestCase.DafTestCase):
|
||||||
|
|
||||||
datatype = "bufrua"
|
datatype = "bufrua"
|
||||||
|
|
||||||
location = "72558"
|
location = params.STATION_ID
|
||||||
"""stationid corresponding to KOAX"""
|
|
||||||
|
|
||||||
def testGetAvailableParameters(self):
|
def testGetAvailableParameters(self):
|
||||||
req = DAL.newDataRequest(self.datatype)
|
req = DAL.newDataRequest(self.datatype)
|
||||||
|
@ -91,6 +93,14 @@ class BufrUaTestCase(baseDafTestCase.DafTestCase):
|
||||||
|
|
||||||
print("getGeometryData() complete\n\n")
|
print("getGeometryData() complete\n\n")
|
||||||
|
|
||||||
|
def testGetGeometryDataWithEnvelope(self):
|
||||||
|
req = DAL.newDataRequest(self.datatype)
|
||||||
|
req.setParameters("staName", "rptType")
|
||||||
|
req.setEnvelope(params.ENVELOPE)
|
||||||
|
data = self.runGeometryDataTest(req)
|
||||||
|
for item in data:
|
||||||
|
self.assertTrue(params.ENVELOPE.contains(item.getGeometry()))
|
||||||
|
|
||||||
def testGetIdentifierValues(self):
|
def testGetIdentifierValues(self):
|
||||||
req = DAL.newDataRequest(self.datatype)
|
req = DAL.newDataRequest(self.datatype)
|
||||||
optionalIds = set(DAL.getOptionalIdentifiers(req))
|
optionalIds = set(DAL.getOptionalIdentifiers(req))
|
||||||
|
@ -133,7 +143,6 @@ class BufrUaTestCase(baseDafTestCase.DafTestCase):
|
||||||
|
|
||||||
# No float test because no float identifiers are available
|
# No float test because no float identifiers are available
|
||||||
|
|
||||||
|
|
||||||
def testGetDataWithEqualsNone(self):
|
def testGetDataWithEqualsNone(self):
|
||||||
geometryData = self._runConstraintTest('reportType', '=', None)
|
geometryData = self._runConstraintTest('reportType', '=', None)
|
||||||
for record in geometryData:
|
for record in geometryData:
|
||||||
|
|
|
@ -26,6 +26,7 @@ from awips.dataaccess import DataAccessLayer as DAL
|
||||||
from awips.ThriftClient import ThriftRequestException
|
from awips.ThriftClient import ThriftRequestException
|
||||||
|
|
||||||
import baseDafTestCase
|
import baseDafTestCase
|
||||||
|
import params
|
||||||
import unittest
|
import unittest
|
||||||
|
|
||||||
#
|
#
|
||||||
|
@ -44,6 +45,9 @@ import unittest
|
||||||
# 06/21/16 5548 tgurney Skip tests that cause errors
|
# 06/21/16 5548 tgurney Skip tests that cause errors
|
||||||
# 06/30/16 5725 tgurney Add test for NOT IN
|
# 06/30/16 5725 tgurney Add test for NOT IN
|
||||||
# 10/06/16 5926 dgilling Add additional time and location tests.
|
# 10/06/16 5926 dgilling Add additional time and location tests.
|
||||||
|
# 12/07/16 5981 tgurney Parameterize
|
||||||
|
# 12/20/16 5981 tgurney Add envelope test
|
||||||
|
# 08/16/17 6388 tgurney Test for duplicate data
|
||||||
#
|
#
|
||||||
#
|
#
|
||||||
|
|
||||||
|
@ -52,6 +56,7 @@ class ClimateTestCase(baseDafTestCase.DafTestCase):
|
||||||
"""Test DAF support for climate data"""
|
"""Test DAF support for climate data"""
|
||||||
|
|
||||||
datatype = 'climate'
|
datatype = 'climate'
|
||||||
|
obsStation = params.OBS_STATION
|
||||||
|
|
||||||
def testGetAvailableParameters(self):
|
def testGetAvailableParameters(self):
|
||||||
req = DAL.newDataRequest(self.datatype)
|
req = DAL.newDataRequest(self.datatype)
|
||||||
|
@ -104,7 +109,7 @@ class ClimateTestCase(baseDafTestCase.DafTestCase):
|
||||||
"""
|
"""
|
||||||
req = DAL.newDataRequest(self.datatype)
|
req = DAL.newDataRequest(self.datatype)
|
||||||
req.addIdentifier('table', 'public.cli_asos_monthly')
|
req.addIdentifier('table', 'public.cli_asos_monthly')
|
||||||
req.setLocationNames('KOMA', 'KABR', 'KDMO')
|
req.setLocationNames(self.obsStation, 'KABR', 'KDMO')
|
||||||
req.setParameters('maxtemp_mon', 'min_sea_press')
|
req.setParameters('maxtemp_mon', 'min_sea_press')
|
||||||
self.runTimesTest(req)
|
self.runTimesTest(req)
|
||||||
|
|
||||||
|
@ -115,7 +120,7 @@ class ClimateTestCase(baseDafTestCase.DafTestCase):
|
||||||
"""
|
"""
|
||||||
req = DAL.newDataRequest(self.datatype)
|
req = DAL.newDataRequest(self.datatype)
|
||||||
req.addIdentifier('table', 'public.cli_asos_daily')
|
req.addIdentifier('table', 'public.cli_asos_daily')
|
||||||
req.setLocationNames('KOMA', 'KABR', 'KDMO')
|
req.setLocationNames(self.obsStation, 'KABR', 'KDMO')
|
||||||
req.setParameters('maxtemp_cal', 'min_press')
|
req.setParameters('maxtemp_cal', 'min_press')
|
||||||
self.runTimesTest(req)
|
self.runTimesTest(req)
|
||||||
|
|
||||||
|
@ -126,7 +131,7 @@ class ClimateTestCase(baseDafTestCase.DafTestCase):
|
||||||
"""
|
"""
|
||||||
req = DAL.newDataRequest(self.datatype)
|
req = DAL.newDataRequest(self.datatype)
|
||||||
req.addIdentifier('table', 'public.cli_mon_season_yr')
|
req.addIdentifier('table', 'public.cli_mon_season_yr')
|
||||||
req.setLocationNames('KOMA', 'KABR', 'KDMO')
|
req.setLocationNames(self.obsStation, 'KABR', 'KDMO')
|
||||||
req.setParameters('max_temp', 'precip_total')
|
req.setParameters('max_temp', 'precip_total')
|
||||||
self.runTimesTest(req)
|
self.runTimesTest(req)
|
||||||
|
|
||||||
|
@ -137,7 +142,7 @@ class ClimateTestCase(baseDafTestCase.DafTestCase):
|
||||||
"""
|
"""
|
||||||
req = DAL.newDataRequest(self.datatype)
|
req = DAL.newDataRequest(self.datatype)
|
||||||
req.addIdentifier('table', 'public.daily_climate')
|
req.addIdentifier('table', 'public.daily_climate')
|
||||||
req.setLocationNames('KOMA', 'KABR', 'KDMO')
|
req.setLocationNames(self.obsStation, 'KABR', 'KDMO')
|
||||||
req.setParameters('max_temp', 'precip', 'avg_wind_speed')
|
req.setParameters('max_temp', 'precip', 'avg_wind_speed')
|
||||||
self.runTimesTest(req)
|
self.runTimesTest(req)
|
||||||
|
|
||||||
|
@ -155,6 +160,15 @@ class ClimateTestCase(baseDafTestCase.DafTestCase):
|
||||||
req.setParameters('maxtemp_mon', 'min_sea_press')
|
req.setParameters('maxtemp_mon', 'min_sea_press')
|
||||||
self.runGeometryDataTest(req)
|
self.runGeometryDataTest(req)
|
||||||
|
|
||||||
|
def testGetGeometryDataWithEnvelopeThrowsException(self):
|
||||||
|
# Envelope is not used
|
||||||
|
req = DAL.newDataRequest(self.datatype)
|
||||||
|
req.addIdentifier('table', 'public.cli_asos_monthly')
|
||||||
|
req.setParameters('maxtemp_mon', 'min_sea_press')
|
||||||
|
req.setEnvelope(params.ENVELOPE)
|
||||||
|
with self.assertRaises(Exception):
|
||||||
|
data = self.runGeometryDataTest(req)
|
||||||
|
|
||||||
def testGetGeometryDataForYearAndDayOfYearTable(self):
|
def testGetGeometryDataForYearAndDayOfYearTable(self):
|
||||||
"""
|
"""
|
||||||
Test retrieval of data for a climo table that uses year and
|
Test retrieval of data for a climo table that uses year and
|
||||||
|
@ -243,14 +257,14 @@ class ClimateTestCase(baseDafTestCase.DafTestCase):
|
||||||
return self.runGeometryDataTest(req)
|
return self.runGeometryDataTest(req)
|
||||||
|
|
||||||
def testGetDataWithEqualsString(self):
|
def testGetDataWithEqualsString(self):
|
||||||
geometryData = self._runConstraintTest('station_code', '=', 'KOMA')
|
geometryData = self._runConstraintTest('station_code', '=', self.obsStation)
|
||||||
for record in geometryData:
|
for record in geometryData:
|
||||||
self.assertEqual(record.getString('station_code'), 'KOMA')
|
self.assertEqual(record.getString('station_code'), self.obsStation)
|
||||||
|
|
||||||
def testGetDataWithEqualsUnicode(self):
|
def testGetDataWithEqualsUnicode(self):
|
||||||
geometryData = self._runConstraintTest('station_code', '=', u'KOMA')
|
geometryData = self._runConstraintTest('station_code', '=', unicode(self.obsStation))
|
||||||
for record in geometryData:
|
for record in geometryData:
|
||||||
self.assertEqual(record.getString('station_code'), 'KOMA')
|
self.assertEqual(record.getString('station_code'), self.obsStation)
|
||||||
|
|
||||||
def testGetDataWithEqualsInt(self):
|
def testGetDataWithEqualsInt(self):
|
||||||
geometryData = self._runConstraintTest('avg_daily_max', '=', 70)
|
geometryData = self._runConstraintTest('avg_daily_max', '=', 70)
|
||||||
|
@ -272,9 +286,9 @@ class ClimateTestCase(baseDafTestCase.DafTestCase):
|
||||||
self.assertEqual(len(geometryData), 0)
|
self.assertEqual(len(geometryData), 0)
|
||||||
|
|
||||||
def testGetDataWithNotEquals(self):
|
def testGetDataWithNotEquals(self):
|
||||||
geometryData = self._runConstraintTest('station_code', '!=', 'KOMA')
|
geometryData = self._runConstraintTest('station_code', '!=', self.obsStation)
|
||||||
for record in geometryData:
|
for record in geometryData:
|
||||||
self.assertNotEqual(record.getString('station_code'), 'KOMA')
|
self.assertNotEqual(record.getString('station_code'), self.obsStation)
|
||||||
|
|
||||||
def testGetDataWithNotEqualsNone(self):
|
def testGetDataWithNotEqualsNone(self):
|
||||||
geometryData = self._runConstraintTest('station_code', '!=', None)
|
geometryData = self._runConstraintTest('station_code', '!=', None)
|
||||||
|
@ -302,19 +316,19 @@ class ClimateTestCase(baseDafTestCase.DafTestCase):
|
||||||
self.assertLessEqual(record.getNumber('avg_daily_max'), 70)
|
self.assertLessEqual(record.getNumber('avg_daily_max'), 70)
|
||||||
|
|
||||||
def testGetDataWithInTuple(self):
|
def testGetDataWithInTuple(self):
|
||||||
collection = ('KOMA', 'KABR')
|
collection = (self.obsStation, 'KABR')
|
||||||
geometryData = self._runConstraintTest('station_code', 'in', collection)
|
geometryData = self._runConstraintTest('station_code', 'in', collection)
|
||||||
for record in geometryData:
|
for record in geometryData:
|
||||||
self.assertIn(record.getString('station_code'), collection)
|
self.assertIn(record.getString('station_code'), collection)
|
||||||
|
|
||||||
def testGetDataWithInList(self):
|
def testGetDataWithInList(self):
|
||||||
collection = ['KOMA', 'KABR']
|
collection = [self.obsStation, 'KABR']
|
||||||
geometryData = self._runConstraintTest('station_code', 'in', collection)
|
geometryData = self._runConstraintTest('station_code', 'in', collection)
|
||||||
for record in geometryData:
|
for record in geometryData:
|
||||||
self.assertIn(record.getString('station_code'), collection)
|
self.assertIn(record.getString('station_code'), collection)
|
||||||
|
|
||||||
def testGetDataWithInGenerator(self):
|
def testGetDataWithInGenerator(self):
|
||||||
collection = ('KOMA', 'KABR')
|
collection = (self.obsStation, 'KABR')
|
||||||
generator = (item for item in collection)
|
generator = (item for item in collection)
|
||||||
geometryData = self._runConstraintTest('station_code', 'in', generator)
|
geometryData = self._runConstraintTest('station_code', 'in', generator)
|
||||||
for record in geometryData:
|
for record in geometryData:
|
||||||
|
@ -328,7 +342,7 @@ class ClimateTestCase(baseDafTestCase.DafTestCase):
|
||||||
|
|
||||||
def testGetDataWithInvalidConstraintTypeThrowsException(self):
|
def testGetDataWithInvalidConstraintTypeThrowsException(self):
|
||||||
with self.assertRaises(ValueError):
|
with self.assertRaises(ValueError):
|
||||||
self._runConstraintTest('station_code', 'junk', 'KOMA')
|
self._runConstraintTest('station_code', 'junk', self.obsStation)
|
||||||
|
|
||||||
def testGetDataWithInvalidConstraintValueThrowsException(self):
|
def testGetDataWithInvalidConstraintValueThrowsException(self):
|
||||||
with self.assertRaises(TypeError):
|
with self.assertRaises(TypeError):
|
||||||
|
@ -418,3 +432,13 @@ class ClimateTestCase(baseDafTestCase.DafTestCase):
|
||||||
tr = TimeRange(startTime, endTime)
|
tr = TimeRange(startTime, endTime)
|
||||||
self.runGeometryDataTestWithTimeRange(req, tr)
|
self.runGeometryDataTestWithTimeRange(req, tr)
|
||||||
|
|
||||||
|
def testNoDuplicateData(self):
|
||||||
|
req = DAL.newDataRequest(self.datatype)
|
||||||
|
req.addIdentifier('table', 'public.cli_asos_monthly')
|
||||||
|
req.setLocationNames('KOMA')
|
||||||
|
req.setParameters('maxtemp_day1')
|
||||||
|
rows = DAL.getGeometryData(req, DAL.getAvailableTimes(req)[0:5])
|
||||||
|
for i in range(len(rows)):
|
||||||
|
for j in range(len(rows)):
|
||||||
|
if i != j:
|
||||||
|
self.assertNotEqual(rows[i].__dict__, rows[j].__dict__)
|
||||||
|
|
|
@ -49,7 +49,7 @@ class CombinedTimeQueryTestCase(unittest.TestCase):
|
||||||
|
|
||||||
def testSuccessfulQuery(self):
|
def testSuccessfulQuery(self):
|
||||||
req = DAL.newDataRequest('grid')
|
req = DAL.newDataRequest('grid')
|
||||||
req.setLocationNames('RAP13')
|
req.setLocationNames('RUC130')
|
||||||
req.setParameters('T','GH')
|
req.setParameters('T','GH')
|
||||||
req.setLevels('300MB', '500MB','700MB')
|
req.setLevels('300MB', '500MB','700MB')
|
||||||
times = CTQ.getAvailableTimes(req);
|
times = CTQ.getAvailableTimes(req);
|
||||||
|
@ -60,7 +60,7 @@ class CombinedTimeQueryTestCase(unittest.TestCase):
|
||||||
Test that when a parameter is only available on one of the levels that no times are returned.
|
Test that when a parameter is only available on one of the levels that no times are returned.
|
||||||
"""
|
"""
|
||||||
req = DAL.newDataRequest('grid')
|
req = DAL.newDataRequest('grid')
|
||||||
req.setLocationNames('RAP13')
|
req.setLocationNames('RUC130')
|
||||||
req.setParameters('T','GH', 'LgSP1hr')
|
req.setParameters('T','GH', 'LgSP1hr')
|
||||||
req.setLevels('300MB', '500MB','700MB','0.0SFC')
|
req.setLevels('300MB', '500MB','700MB','0.0SFC')
|
||||||
times = CTQ.getAvailableTimes(req);
|
times = CTQ.getAvailableTimes(req);
|
||||||
|
|
|
@ -24,6 +24,7 @@ from awips.dataaccess import DataAccessLayer as DAL
|
||||||
|
|
||||||
from dynamicserialize.dstypes.com.raytheon.uf.common.dataquery.requests import RequestConstraint
|
from dynamicserialize.dstypes.com.raytheon.uf.common.dataquery.requests import RequestConstraint
|
||||||
import baseDafTestCase
|
import baseDafTestCase
|
||||||
|
import params
|
||||||
import unittest
|
import unittest
|
||||||
|
|
||||||
#
|
#
|
||||||
|
@ -42,6 +43,8 @@ import unittest
|
||||||
# 06/13/16 5574 tgurney Add advanced query tests
|
# 06/13/16 5574 tgurney Add advanced query tests
|
||||||
# 06/21/16 5548 tgurney Skip tests that cause errors
|
# 06/21/16 5548 tgurney Skip tests that cause errors
|
||||||
# 06/30/16 5725 tgurney Add test for NOT IN
|
# 06/30/16 5725 tgurney Add test for NOT IN
|
||||||
|
# 12/07/16 5981 tgurney Parameterize
|
||||||
|
# 01/06/17 5981 tgurney Do not check data times
|
||||||
#
|
#
|
||||||
|
|
||||||
|
|
||||||
|
@ -50,9 +53,6 @@ class CommonObsSpatialTestCase(baseDafTestCase.DafTestCase):
|
||||||
|
|
||||||
datatype = "common_obs_spatial"
|
datatype = "common_obs_spatial"
|
||||||
|
|
||||||
envelope = box(-97.0, 41.0, -96.0, 42.0)
|
|
||||||
"""Default request area (box around KOAX)"""
|
|
||||||
|
|
||||||
def testGetAvailableParameters(self):
|
def testGetAvailableParameters(self):
|
||||||
req = DAL.newDataRequest(self.datatype)
|
req = DAL.newDataRequest(self.datatype)
|
||||||
self.runParametersTest(req)
|
self.runParametersTest(req)
|
||||||
|
@ -65,19 +65,11 @@ class CommonObsSpatialTestCase(baseDafTestCase.DafTestCase):
|
||||||
def testGetIdentifierValues(self):
|
def testGetIdentifierValues(self):
|
||||||
self.runGetIdValuesTest(['country'])
|
self.runGetIdValuesTest(['country'])
|
||||||
|
|
||||||
@unittest.skip('avoid EDEX error')
|
|
||||||
def testGetInvalidIdentifierValuesThrowsException(self):
|
|
||||||
self.runInvalidIdValuesTest()
|
|
||||||
|
|
||||||
@unittest.skip('avoid EDEX error')
|
|
||||||
def testGetNonexistentIdentifierValuesThrowsException(self):
|
|
||||||
self.runNonexistentIdValuesTest()
|
|
||||||
|
|
||||||
def testGetGeometryData(self):
|
def testGetGeometryData(self):
|
||||||
req = DAL.newDataRequest(self.datatype)
|
req = DAL.newDataRequest(self.datatype)
|
||||||
req.setEnvelope(self.envelope)
|
req.setEnvelope(params.ENVELOPE)
|
||||||
req.setParameters("name", "stationid")
|
req.setParameters("name", "stationid")
|
||||||
self.runGeometryDataTest(req)
|
self.runGeometryDataTest(req, checkDataTimes=False)
|
||||||
|
|
||||||
def testRequestingTimesThrowsTimeAgnosticDataException(self):
|
def testRequestingTimesThrowsTimeAgnosticDataException(self):
|
||||||
req = DAL.newDataRequest(self.datatype)
|
req = DAL.newDataRequest(self.datatype)
|
||||||
|
@ -88,7 +80,7 @@ class CommonObsSpatialTestCase(baseDafTestCase.DafTestCase):
|
||||||
constraint = RequestConstraint.new(operator, value)
|
constraint = RequestConstraint.new(operator, value)
|
||||||
req.addIdentifier(key, constraint)
|
req.addIdentifier(key, constraint)
|
||||||
req.setParameters('catalogtype', 'elevation', 'state')
|
req.setParameters('catalogtype', 'elevation', 'state')
|
||||||
return self.runGeometryDataTest(req)
|
return self.runGeometryDataTest(req, checkDataTimes=False)
|
||||||
|
|
||||||
def testGetDataWithEqualsString(self):
|
def testGetDataWithEqualsString(self):
|
||||||
geometryData = self._runConstraintTest('state', '=', 'NE')
|
geometryData = self._runConstraintTest('state', '=', 'NE')
|
||||||
|
|
|
@ -23,6 +23,7 @@ from dynamicserialize.dstypes.com.raytheon.uf.common.dataquery.requests import R
|
||||||
from awips.dataaccess import DataAccessLayer as DAL
|
from awips.dataaccess import DataAccessLayer as DAL
|
||||||
|
|
||||||
import baseDafTestCase
|
import baseDafTestCase
|
||||||
|
import params
|
||||||
import unittest
|
import unittest
|
||||||
|
|
||||||
#
|
#
|
||||||
|
@ -46,6 +47,8 @@ import unittest
|
||||||
# PRTM parameter since it isn't
|
# PRTM parameter since it isn't
|
||||||
# configured for ec-oma
|
# configured for ec-oma
|
||||||
# 11/08/16 5985 tgurney Do not check data times
|
# 11/08/16 5985 tgurney Do not check data times
|
||||||
|
# 12/07/16 5981 tgurney Parameterize
|
||||||
|
# 12/20/16 5981 tgurney Do not check data times
|
||||||
#
|
#
|
||||||
#
|
#
|
||||||
|
|
||||||
|
@ -54,10 +57,11 @@ class FfmpTestCase(baseDafTestCase.DafTestCase):
|
||||||
"""Test DAF support for ffmp data"""
|
"""Test DAF support for ffmp data"""
|
||||||
|
|
||||||
datatype = 'ffmp'
|
datatype = 'ffmp'
|
||||||
|
location = params.RADAR.lower()
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def addIdentifiers(req):
|
def addIdentifiers(req):
|
||||||
req.addIdentifier('wfo', 'OAX')
|
req.addIdentifier('wfo', params.SITE_ID)
|
||||||
req.addIdentifier('siteKey', 'hpe')
|
req.addIdentifier('siteKey', 'hpe')
|
||||||
req.addIdentifier('dataKey', 'hpe')
|
req.addIdentifier('dataKey', 'hpe')
|
||||||
req.addIdentifier('huc', 'ALL')
|
req.addIdentifier('huc', 'ALL')
|
||||||
|
@ -99,8 +103,8 @@ class FfmpTestCase(baseDafTestCase.DafTestCase):
|
||||||
req = DAL.newDataRequest(self.datatype)
|
req = DAL.newDataRequest(self.datatype)
|
||||||
if id == 'accumHrs':
|
if id == 'accumHrs':
|
||||||
req.setParameters('ARI6H2YR')
|
req.setParameters('ARI6H2YR')
|
||||||
req.addIdentifier('wfo', 'OAX')
|
req.addIdentifier('wfo', params.SITE_ID)
|
||||||
req.addIdentifier('siteKey', 'koax')
|
req.addIdentifier('siteKey', self.location)
|
||||||
req.addIdentifier('huc', 'ALL')
|
req.addIdentifier('huc', 'ALL')
|
||||||
idValues = DAL.getIdentifierValues(req, id)
|
idValues = DAL.getIdentifierValues(req, id)
|
||||||
self.assertTrue(hasattr(idValues, '__iter__'))
|
self.assertTrue(hasattr(idValues, '__iter__'))
|
||||||
|
@ -116,20 +120,20 @@ class FfmpTestCase(baseDafTestCase.DafTestCase):
|
||||||
req = DAL.newDataRequest(self.datatype)
|
req = DAL.newDataRequest(self.datatype)
|
||||||
constraint = RequestConstraint.new(operator, value)
|
constraint = RequestConstraint.new(operator, value)
|
||||||
req.addIdentifier(key, constraint)
|
req.addIdentifier(key, constraint)
|
||||||
req.addIdentifier('wfo', 'OAX')
|
req.addIdentifier('wfo', params.SITE_ID)
|
||||||
req.addIdentifier('huc', 'ALL')
|
req.addIdentifier('huc', 'ALL')
|
||||||
req.setParameters('QPFSCAN')
|
req.setParameters('QPFSCAN')
|
||||||
return self.runGeometryDataTest(req, checkDataTimes=False)
|
return self.runGeometryDataTest(req, checkDataTimes=False)
|
||||||
|
|
||||||
def testGetDataWithEqualsString(self):
|
def testGetDataWithEqualsString(self):
|
||||||
geometryData = self._runConstraintTest('siteKey', '=', 'koax')
|
geometryData = self._runConstraintTest('siteKey', '=', self.location)
|
||||||
for record in geometryData:
|
for record in geometryData:
|
||||||
self.assertEqual(record.getAttribute('siteKey'), 'koax')
|
self.assertEqual(record.getAttribute('siteKey'), self.location)
|
||||||
|
|
||||||
def testGetDataWithEqualsUnicode(self):
|
def testGetDataWithEqualsUnicode(self):
|
||||||
geometryData = self._runConstraintTest('siteKey', '=', u'koax')
|
geometryData = self._runConstraintTest('siteKey', '=', unicode(self.location))
|
||||||
for record in geometryData:
|
for record in geometryData:
|
||||||
self.assertEqual(record.getAttribute('siteKey'), 'koax')
|
self.assertEqual(record.getAttribute('siteKey'), self.location)
|
||||||
|
|
||||||
# No numeric tests since no numeric identifiers are available that support
|
# No numeric tests since no numeric identifiers are available that support
|
||||||
# RequestConstraints.
|
# RequestConstraints.
|
||||||
|
@ -140,9 +144,9 @@ class FfmpTestCase(baseDafTestCase.DafTestCase):
|
||||||
self.assertIsNone(record.getAttribute('siteKey'))
|
self.assertIsNone(record.getAttribute('siteKey'))
|
||||||
|
|
||||||
def testGetDataWithNotEquals(self):
|
def testGetDataWithNotEquals(self):
|
||||||
geometryData = self._runConstraintTest('siteKey', '!=', 'koax')
|
geometryData = self._runConstraintTest('siteKey', '!=', self.location)
|
||||||
for record in geometryData:
|
for record in geometryData:
|
||||||
self.assertNotEqual(record.getAttribute('siteKey'), 'koax')
|
self.assertNotEqual(record.getAttribute('siteKey'), self.location)
|
||||||
|
|
||||||
def testGetDataWithNotEqualsNone(self):
|
def testGetDataWithNotEqualsNone(self):
|
||||||
geometryData = self._runConstraintTest('siteKey', '!=', None)
|
geometryData = self._runConstraintTest('siteKey', '!=', None)
|
||||||
|
@ -150,40 +154,40 @@ class FfmpTestCase(baseDafTestCase.DafTestCase):
|
||||||
self.assertIsNotNone(record.getAttribute('siteKey'))
|
self.assertIsNotNone(record.getAttribute('siteKey'))
|
||||||
|
|
||||||
def testGetDataWithGreaterThan(self):
|
def testGetDataWithGreaterThan(self):
|
||||||
geometryData = self._runConstraintTest('siteKey', '>', 'koax')
|
geometryData = self._runConstraintTest('siteKey', '>', self.location)
|
||||||
for record in geometryData:
|
for record in geometryData:
|
||||||
self.assertGreater(record.getAttribute('siteKey'), 'koax')
|
self.assertGreater(record.getAttribute('siteKey'), self.location)
|
||||||
|
|
||||||
def testGetDataWithLessThan(self):
|
def testGetDataWithLessThan(self):
|
||||||
geometryData = self._runConstraintTest('siteKey', '<', 'koax')
|
geometryData = self._runConstraintTest('siteKey', '<', self.location)
|
||||||
for record in geometryData:
|
for record in geometryData:
|
||||||
self.assertLess(record.getAttribute('siteKey'), 'koax')
|
self.assertLess(record.getAttribute('siteKey'), self.location)
|
||||||
|
|
||||||
def testGetDataWithGreaterThanEquals(self):
|
def testGetDataWithGreaterThanEquals(self):
|
||||||
geometryData = self._runConstraintTest('siteKey', '>=', 'koax')
|
geometryData = self._runConstraintTest('siteKey', '>=', self.location)
|
||||||
for record in geometryData:
|
for record in geometryData:
|
||||||
self.assertGreaterEqual(record.getAttribute('siteKey'), 'koax')
|
self.assertGreaterEqual(record.getAttribute('siteKey'), self.location)
|
||||||
|
|
||||||
def testGetDataWithLessThanEquals(self):
|
def testGetDataWithLessThanEquals(self):
|
||||||
geometryData = self._runConstraintTest('siteKey', '<=', 'koax')
|
geometryData = self._runConstraintTest('siteKey', '<=', self.location)
|
||||||
for record in geometryData:
|
for record in geometryData:
|
||||||
self.assertLessEqual(record.getAttribute('siteKey'), 'koax')
|
self.assertLessEqual(record.getAttribute('siteKey'), self.location)
|
||||||
|
|
||||||
def testGetDataWithInList(self):
|
def testGetDataWithInList(self):
|
||||||
collection = ['koax', 'kuex']
|
collection = [self.location, 'kuex']
|
||||||
geometryData = self._runConstraintTest('siteKey', 'in', collection)
|
geometryData = self._runConstraintTest('siteKey', 'in', collection)
|
||||||
for record in geometryData:
|
for record in geometryData:
|
||||||
self.assertIn(record.getAttribute('siteKey'), collection)
|
self.assertIn(record.getAttribute('siteKey'), collection)
|
||||||
|
|
||||||
def testGetDataWithNotInList(self):
|
def testGetDataWithNotInList(self):
|
||||||
collection = ['koax', 'kuex']
|
collection = [self.location, 'kuex']
|
||||||
geometryData = self._runConstraintTest('siteKey', 'not in', collection)
|
geometryData = self._runConstraintTest('siteKey', 'not in', collection)
|
||||||
for record in geometryData:
|
for record in geometryData:
|
||||||
self.assertNotIn(record.getAttribute('siteKey'), collection)
|
self.assertNotIn(record.getAttribute('siteKey'), collection)
|
||||||
|
|
||||||
def testGetDataWithInvalidConstraintTypeThrowsException(self):
|
def testGetDataWithInvalidConstraintTypeThrowsException(self):
|
||||||
with self.assertRaises(ValueError):
|
with self.assertRaises(ValueError):
|
||||||
self._runConstraintTest('siteKey', 'junk', 'koax')
|
self._runConstraintTest('siteKey', 'junk', self.location)
|
||||||
|
|
||||||
def testGetDataWithInvalidConstraintValueThrowsException(self):
|
def testGetDataWithInvalidConstraintValueThrowsException(self):
|
||||||
with self.assertRaises(TypeError):
|
with self.assertRaises(TypeError):
|
||||||
|
@ -194,11 +198,11 @@ class FfmpTestCase(baseDafTestCase.DafTestCase):
|
||||||
self._runConstraintTest('siteKey', 'in', [])
|
self._runConstraintTest('siteKey', 'in', [])
|
||||||
|
|
||||||
def testGetDataWithSiteKeyAndDataKeyConstraints(self):
|
def testGetDataWithSiteKeyAndDataKeyConstraints(self):
|
||||||
siteKeys = ['koax', 'hpe']
|
siteKeys = [self.location, 'hpe']
|
||||||
dataKeys = ['kuex', 'kdmx']
|
dataKeys = ['kuex', 'kdmx']
|
||||||
|
|
||||||
req = DAL.newDataRequest(self.datatype)
|
req = DAL.newDataRequest(self.datatype)
|
||||||
req.addIdentifier('wfo', 'OAX')
|
req.addIdentifier('wfo', params.SITE_ID)
|
||||||
req.addIdentifier('huc', 'ALL')
|
req.addIdentifier('huc', 'ALL')
|
||||||
|
|
||||||
siteKeysConstraint = RequestConstraint.new('in', siteKeys)
|
siteKeysConstraint = RequestConstraint.new('in', siteKeys)
|
||||||
|
@ -217,8 +221,8 @@ class FfmpTestCase(baseDafTestCase.DafTestCase):
|
||||||
def testGetGuidanceDataWithoutAccumHrsIdentifierSet(self):
|
def testGetGuidanceDataWithoutAccumHrsIdentifierSet(self):
|
||||||
# Test that accumHrs identifier is not required for guidance data
|
# Test that accumHrs identifier is not required for guidance data
|
||||||
req = DAL.newDataRequest(self.datatype)
|
req = DAL.newDataRequest(self.datatype)
|
||||||
req.addIdentifier('wfo', 'OAX')
|
req.addIdentifier('wfo', params.SITE_ID)
|
||||||
req.addIdentifier('siteKey', 'koax')
|
req.addIdentifier('siteKey', self.location)
|
||||||
req.addIdentifier('huc', 'ALL')
|
req.addIdentifier('huc', 'ALL')
|
||||||
req.setParameters('FFG0124hr')
|
req.setParameters('FFG0124hr')
|
||||||
self.runGeometryDataTest(req, checkDataTimes=False)
|
self.runGeometryDataTest(req, checkDataTimes=False)
|
|
@ -21,8 +21,10 @@
|
||||||
from __future__ import print_function
|
from __future__ import print_function
|
||||||
from dynamicserialize.dstypes.com.raytheon.uf.common.dataquery.requests import RequestConstraint
|
from dynamicserialize.dstypes.com.raytheon.uf.common.dataquery.requests import RequestConstraint
|
||||||
from awips.dataaccess import DataAccessLayer as DAL
|
from awips.dataaccess import DataAccessLayer as DAL
|
||||||
|
from shapely.geometry import box, Point
|
||||||
|
|
||||||
import baseDafTestCase
|
import baseDafTestCase
|
||||||
|
import params
|
||||||
import unittest
|
import unittest
|
||||||
|
|
||||||
#
|
#
|
||||||
|
@ -41,6 +43,12 @@ import unittest
|
||||||
# 06/17/16 5574 mapeters Add advanced query tests
|
# 06/17/16 5574 mapeters Add advanced query tests
|
||||||
# 06/30/16 5725 tgurney Add test for NOT IN
|
# 06/30/16 5725 tgurney Add test for NOT IN
|
||||||
# 11/07/16 5991 bsteffen Improve vector tests
|
# 11/07/16 5991 bsteffen Improve vector tests
|
||||||
|
# 12/07/16 5981 tgurney Parameterize
|
||||||
|
# 12/15/16 6040 tgurney Add testGetGridDataWithDbType
|
||||||
|
# 12/20/16 5981 tgurney Add envelope test
|
||||||
|
# 10/19/17 6491 tgurney Add test for dbtype identifier
|
||||||
|
# 11/10/17 6491 tgurney Replace modelName with
|
||||||
|
# parmId.dbId.modelName
|
||||||
#
|
#
|
||||||
#
|
#
|
||||||
|
|
||||||
|
@ -56,26 +64,49 @@ class GfeTestCase(baseDafTestCase.DafTestCase):
|
||||||
|
|
||||||
def testGetAvailableLocations(self):
|
def testGetAvailableLocations(self):
|
||||||
req = DAL.newDataRequest(self.datatype)
|
req = DAL.newDataRequest(self.datatype)
|
||||||
req.addIdentifier('modelName', 'Fcst')
|
req.addIdentifier('parmId.dbId.modelName', 'Fcst')
|
||||||
self.runLocationsTest(req)
|
self.runLocationsTest(req)
|
||||||
|
|
||||||
def testGetAvailableTimes(self):
|
def testGetAvailableTimes(self):
|
||||||
req = DAL.newDataRequest(self.datatype)
|
req = DAL.newDataRequest(self.datatype)
|
||||||
req.addIdentifier('modelName', 'Fcst')
|
req.addIdentifier('parmId.dbId.modelName', 'Fcst')
|
||||||
req.addIdentifier('siteId', 'OAX')
|
req.addIdentifier('parmId.dbId.siteId', params.SITE_ID)
|
||||||
self.runTimesTest(req)
|
self.runTimesTest(req)
|
||||||
|
|
||||||
def testGetGridData(self):
|
def testGetGridData(self):
|
||||||
req = DAL.newDataRequest(self.datatype)
|
req = DAL.newDataRequest(self.datatype)
|
||||||
req.addIdentifier('modelName', 'Fcst')
|
req.addIdentifier('parmId.dbId.modelName', 'Fcst')
|
||||||
req.addIdentifier('siteId', 'OAX')
|
req.addIdentifier('parmId.dbId.siteId', params.SITE_ID)
|
||||||
req.setParameters('T')
|
req.setParameters('T')
|
||||||
self.runGridDataTest(req)
|
gridDatas = self.runGridDataTest(req)
|
||||||
|
for gridData in gridDatas:
|
||||||
|
self.assertEqual(gridData.getAttribute('parmId.dbId.dbType'), '')
|
||||||
|
|
||||||
|
def testGetGridDataWithEnvelope(self):
|
||||||
|
req = DAL.newDataRequest(self.datatype)
|
||||||
|
req.addIdentifier('parmId.dbId.modelName', 'Fcst')
|
||||||
|
req.addIdentifier('parmId.dbId.siteId', params.SITE_ID)
|
||||||
|
req.setParameters('T')
|
||||||
|
req.setEnvelope(params.ENVELOPE)
|
||||||
|
gridData = self.runGridDataTest(req)
|
||||||
|
if not gridData:
|
||||||
|
raise unittest.SkipTest('no data available')
|
||||||
|
lons, lats = gridData[0].getLatLonCoords()
|
||||||
|
lons = lons.reshape(-1)
|
||||||
|
lats = lats.reshape(-1)
|
||||||
|
|
||||||
|
# Ensure all points are within one degree of the original box
|
||||||
|
# to allow slight margin of error for reprojection distortion.
|
||||||
|
testEnv = box(params.ENVELOPE.bounds[0] - 1, params.ENVELOPE.bounds[1] - 1,
|
||||||
|
params.ENVELOPE.bounds[2] + 1, params.ENVELOPE.bounds[3] + 1 )
|
||||||
|
|
||||||
|
for i in range(len(lons)):
|
||||||
|
self.assertTrue(testEnv.contains(Point(lons[i], lats[i])))
|
||||||
|
|
||||||
def testGetVectorGridData(self):
|
def testGetVectorGridData(self):
|
||||||
req = DAL.newDataRequest(self.datatype)
|
req = DAL.newDataRequest(self.datatype)
|
||||||
req.addIdentifier('modelName', 'Fcst')
|
req.addIdentifier('parmId.dbId.modelName', 'Fcst')
|
||||||
req.addIdentifier('siteId', 'OAX')
|
req.addIdentifier('parmId.dbId.siteId', params.SITE_ID)
|
||||||
req.setParameters('Wind')
|
req.setParameters('Wind')
|
||||||
times = DAL.getAvailableTimes(req)
|
times = DAL.getAvailableTimes(req)
|
||||||
if not(times):
|
if not(times):
|
||||||
|
@ -114,90 +145,76 @@ class GfeTestCase(baseDafTestCase.DafTestCase):
|
||||||
req = DAL.newDataRequest(self.datatype)
|
req = DAL.newDataRequest(self.datatype)
|
||||||
constraint = RequestConstraint.new(operator, value)
|
constraint = RequestConstraint.new(operator, value)
|
||||||
req.addIdentifier(key, constraint)
|
req.addIdentifier(key, constraint)
|
||||||
req.setLocationNames('OAX')
|
req.setLocationNames(params.SITE_ID)
|
||||||
req.setParameters('T')
|
req.setParameters('T')
|
||||||
return self.runGridDataTest(req)
|
return self.runGridDataTest(req)
|
||||||
|
|
||||||
def testGetDataWithEqualsString(self):
|
def testGetDataWithModelNameEqualsString(self):
|
||||||
geometryData = self._runConstraintTest('modelName', '=', 'Fcst')
|
gridData = self._runConstraintTest('parmId.dbId.modelName', '=', 'Fcst')
|
||||||
for record in geometryData:
|
for record in gridData:
|
||||||
self.assertEqual(record.getAttribute('modelName'), 'Fcst')
|
self.assertEqual(record.getAttribute('parmId.dbId.modelName'), 'Fcst')
|
||||||
|
|
||||||
|
def testGetDataWithDbTypeEqualsString(self):
|
||||||
|
gridData = self._runConstraintTest('parmId.dbId.dbType', '=', 'Prac')
|
||||||
|
for record in gridData:
|
||||||
|
self.assertEqual(record.getAttribute('parmId.dbId.dbType'), 'Prac')
|
||||||
|
|
||||||
def testGetDataWithEqualsUnicode(self):
|
def testGetDataWithEqualsUnicode(self):
|
||||||
geometryData = self._runConstraintTest('modelName', '=', u'Fcst')
|
gridData = self._runConstraintTest('parmId.dbId.modelName', '=', u'Fcst')
|
||||||
for record in geometryData:
|
for record in gridData:
|
||||||
self.assertEqual(record.getAttribute('modelName'), 'Fcst')
|
self.assertEqual(record.getAttribute('parmId.dbId.modelName'), 'Fcst')
|
||||||
|
|
||||||
# No numeric tests since no numeric identifiers are available.
|
# No numeric tests since no numeric identifiers are available.
|
||||||
|
|
||||||
def testGetDataWithEqualsNone(self):
|
def testGetDataWithEqualsNone(self):
|
||||||
geometryData = self._runConstraintTest('modelName', '=', None)
|
gridData = self._runConstraintTest('parmId.dbId.modelName', '=', None)
|
||||||
for record in geometryData:
|
for record in gridData:
|
||||||
self.assertIsNone(record.getAttribute('modelName'))
|
self.assertIsNone(record.getAttribute('parmId.dbId.modelName'))
|
||||||
|
|
||||||
def testGetDataWithNotEquals(self):
|
def testGetDataWithNotEquals(self):
|
||||||
geometryData = self._runConstraintTest('modelName', '!=', 'Fcst')
|
gridData = self._runConstraintTest('parmId.dbId.modelName', '!=', 'Fcst')
|
||||||
for record in geometryData:
|
for record in gridData:
|
||||||
self.assertNotEqual(record.getAttribute('modelName'), 'Fcst')
|
self.assertNotEqual(record.getAttribute('parmId.dbId.modelName'), 'Fcst')
|
||||||
|
|
||||||
def testGetDataWithNotEqualsNone(self):
|
def testGetDataWithNotEqualsNone(self):
|
||||||
geometryData = self._runConstraintTest('modelName', '!=', None)
|
gridData = self._runConstraintTest('parmId.dbId.modelName', '!=', None)
|
||||||
for record in geometryData:
|
for record in gridData:
|
||||||
self.assertIsNotNone(record.getAttribute('modelName'))
|
self.assertIsNotNone(record.getAttribute('parmId.dbId.modelName'))
|
||||||
|
|
||||||
def testGetDataWithGreaterThan(self):
|
|
||||||
geometryData = self._runConstraintTest('modelName', '>', 'Fcst')
|
|
||||||
for record in geometryData:
|
|
||||||
self.assertGreater(record.getAttribute('modelName'), 'Fcst')
|
|
||||||
|
|
||||||
def testGetDataWithLessThan(self):
|
|
||||||
geometryData = self._runConstraintTest('modelName', '<', 'Fcst')
|
|
||||||
for record in geometryData:
|
|
||||||
self.assertLess(record.getAttribute('modelName'), 'Fcst')
|
|
||||||
|
|
||||||
def testGetDataWithGreaterThanEquals(self):
|
|
||||||
geometryData = self._runConstraintTest('modelName', '>=', 'Fcst')
|
|
||||||
for record in geometryData:
|
|
||||||
self.assertGreaterEqual(record.getAttribute('modelName'), 'Fcst')
|
|
||||||
|
|
||||||
def testGetDataWithLessThanEquals(self):
|
|
||||||
geometryData = self._runConstraintTest('modelName', '<=', 'Fcst')
|
|
||||||
for record in geometryData:
|
|
||||||
self.assertLessEqual(record.getAttribute('modelName'), 'Fcst')
|
|
||||||
|
|
||||||
def testGetDataWithInTuple(self):
|
def testGetDataWithInTuple(self):
|
||||||
collection = ('Fcst', 'SAT')
|
collection = ('Fcst', 'SAT')
|
||||||
geometryData = self._runConstraintTest('modelName', 'in', collection)
|
gridData = self._runConstraintTest('parmId.dbId.modelName', 'in', collection)
|
||||||
for record in geometryData:
|
for record in gridData:
|
||||||
self.assertIn(record.getAttribute('modelName'), collection)
|
self.assertIn(record.getAttribute('parmId.dbId.modelName'), collection)
|
||||||
|
|
||||||
def testGetDataWithInList(self):
|
def testGetDataWithInList(self):
|
||||||
collection = ['Fcst', 'SAT']
|
collection = ['Fcst', 'SAT']
|
||||||
geometryData = self._runConstraintTest('modelName', 'in', collection)
|
gridData = self._runConstraintTest('parmId.dbId.modelName', 'in', collection)
|
||||||
for record in geometryData:
|
for record in gridData:
|
||||||
self.assertIn(record.getAttribute('modelName'), collection)
|
self.assertIn(record.getAttribute('parmId.dbId.modelName'), collection)
|
||||||
|
|
||||||
def testGetDataWithInGenerator(self):
|
def testGetDataWithInGenerator(self):
|
||||||
collection = ('Fcst', 'SAT')
|
collection = ('Fcst', 'SAT')
|
||||||
generator = (item for item in collection)
|
generator = (item for item in collection)
|
||||||
geometryData = self._runConstraintTest('modelName', 'in', generator)
|
gridData = self._runConstraintTest('parmId.dbId.modelName', 'in', generator)
|
||||||
for record in geometryData:
|
for record in gridData:
|
||||||
self.assertIn(record.getAttribute('modelName'), collection)
|
self.assertIn(record.getAttribute('parmId.dbId.modelName'), collection)
|
||||||
|
|
||||||
def testGetDataWithNotInList(self):
|
def testGetDataWithNotInList(self):
|
||||||
collection = ('Fcst', 'SAT')
|
collection = ('Fcst', 'SAT')
|
||||||
geometryData = self._runConstraintTest('modelName', 'not in', collection)
|
gridData = self._runConstraintTest('parmId.dbId.modelName', 'not in', collection)
|
||||||
for record in geometryData:
|
for record in gridData:
|
||||||
self.assertNotIn(record.getAttribute('modelName'), collection)
|
self.assertNotIn(record.getAttribute('parmId.dbId.modelName'), collection)
|
||||||
|
|
||||||
def testGetDataWithInvalidConstraintTypeThrowsException(self):
|
def testGetDataWithInvalidConstraintTypeThrowsException(self):
|
||||||
with self.assertRaises(ValueError):
|
with self.assertRaises(ValueError):
|
||||||
self._runConstraintTest('modelName', 'junk', 'Fcst')
|
self._runConstraintTest('parmId.dbId.modelName', 'junk', 'Fcst')
|
||||||
|
|
||||||
def testGetDataWithInvalidConstraintValueThrowsException(self):
|
def testGetDataWithInvalidConstraintValueThrowsException(self):
|
||||||
with self.assertRaises(TypeError):
|
with self.assertRaises(TypeError):
|
||||||
self._runConstraintTest('modelName', '=', {})
|
self._runConstraintTest('parmId.dbId.modelName', '=', {})
|
||||||
|
|
||||||
def testGetDataWithEmptyInConstraintThrowsException(self):
|
def testGetDataWithEmptyInConstraintThrowsException(self):
|
||||||
with self.assertRaises(ValueError):
|
with self.assertRaises(ValueError):
|
||||||
self._runConstraintTest('modelName', 'in', [])
|
self._runConstraintTest('parmId.dbId.modelName', 'in', [])
|
||||||
|
|
||||||
|
|
220
awips/test/dafTests/testGfeEditArea.py
Normal file
220
awips/test/dafTests/testGfeEditArea.py
Normal file
|
@ -0,0 +1,220 @@
|
||||||
|
##
|
||||||
|
# This software was developed and / or modified by Raytheon Company,
|
||||||
|
# pursuant to Contract DG133W-05-CQ-1067 with the US Government.
|
||||||
|
#
|
||||||
|
# U.S. EXPORT CONTROLLED TECHNICAL DATA
|
||||||
|
# This software product contains export-restricted data whose
|
||||||
|
# export/transfer/disclosure is restricted by U.S. law. Dissemination
|
||||||
|
# to non-U.S. persons whether in the United States or abroad requires
|
||||||
|
# an export license or other authorization.
|
||||||
|
#
|
||||||
|
# Contractor Name: Raytheon Company
|
||||||
|
# Contractor Address: 6825 Pine Street, Suite 340
|
||||||
|
# Mail Stop B8
|
||||||
|
# Omaha, NE 68106
|
||||||
|
# 402.291.0100
|
||||||
|
#
|
||||||
|
# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
||||||
|
# further licensing information.
|
||||||
|
##
|
||||||
|
|
||||||
|
from __future__ import print_function
|
||||||
|
from dynamicserialize.dstypes.com.raytheon.uf.common.dataquery.requests import RequestConstraint
|
||||||
|
from ufpy.dataaccess import DataAccessLayer as DAL
|
||||||
|
from ufpy.ThriftClient import ThriftRequestException
|
||||||
|
|
||||||
|
import baseDafTestCase
|
||||||
|
import params
|
||||||
|
|
||||||
|
#
|
||||||
|
# Test DAF support for GFE edit area data
|
||||||
|
#
|
||||||
|
# SOFTWARE HISTORY
|
||||||
|
#
|
||||||
|
# Date Ticket# Engineer Description
|
||||||
|
# ------------ ---------- ----------- --------------------------
|
||||||
|
# 06/08/17 6298 mapeters Initial Creation.
|
||||||
|
# 09/27/17 6463 tgurney Remove GID site identifier
|
||||||
|
#
|
||||||
|
#
|
||||||
|
|
||||||
|
|
||||||
|
class GfeEditAreaTestCase(baseDafTestCase.DafTestCase):
|
||||||
|
"""Test DAF support for GFE edit area data"""
|
||||||
|
|
||||||
|
datatype = 'gfeEditArea'
|
||||||
|
|
||||||
|
siteIdKey = 'siteId'
|
||||||
|
|
||||||
|
editAreaNames = ['ISC_NHA', 'SDZ066', 'StormSurgeWW_EditArea']
|
||||||
|
|
||||||
|
groupKey = 'group'
|
||||||
|
|
||||||
|
groups = ['ISC', 'WFOs', 'FIPS_' + params.SITE_ID]
|
||||||
|
|
||||||
|
def testGetAvailableParameters(self):
|
||||||
|
req = DAL.newDataRequest(self.datatype)
|
||||||
|
req.addIdentifier(self.siteIdKey, params.SITE_ID)
|
||||||
|
with self.assertRaises(ThriftRequestException):
|
||||||
|
self.runParametersTest(req)
|
||||||
|
|
||||||
|
def testGetAvailableLocations(self):
|
||||||
|
req = DAL.newDataRequest(self.datatype)
|
||||||
|
req.addIdentifier(self.siteIdKey, params.SITE_ID)
|
||||||
|
self.runLocationsTest(req)
|
||||||
|
|
||||||
|
def testGetAvailableTimes(self):
|
||||||
|
req = DAL.newDataRequest(self.datatype)
|
||||||
|
req.addIdentifier(self.siteIdKey, params.SITE_ID)
|
||||||
|
with self.assertRaises(ThriftRequestException):
|
||||||
|
self.runTimesTest(req)
|
||||||
|
|
||||||
|
def testGetGeometryDataWithoutSiteIdThrowsException(self):
|
||||||
|
req = DAL.newDataRequest(self.datatype)
|
||||||
|
with self.assertRaises(ThriftRequestException):
|
||||||
|
self.runGeometryDataTest(req)
|
||||||
|
|
||||||
|
def testGetGeometryData(self):
|
||||||
|
req = DAL.newDataRequest(self.datatype)
|
||||||
|
req.addIdentifier(self.siteIdKey, params.SITE_ID)
|
||||||
|
data = self.runGeometryDataTest(req)
|
||||||
|
for item in data:
|
||||||
|
self.assertEqual(params.SITE_ID, item.getAttribute(self.siteIdKey))
|
||||||
|
|
||||||
|
def testGetGeometryDataWithLocNames(self):
|
||||||
|
req = DAL.newDataRequest(self.datatype)
|
||||||
|
req.addIdentifier(self.siteIdKey, params.SITE_ID)
|
||||||
|
req.setLocationNames(*self.editAreaNames)
|
||||||
|
data = self.runGeometryDataTest(req)
|
||||||
|
for item in data:
|
||||||
|
self.assertEqual(params.SITE_ID, item.getAttribute(self.siteIdKey))
|
||||||
|
self.assertIn(item.getLocationName(), self.editAreaNames)
|
||||||
|
|
||||||
|
def testGetGeometryDataWithGroups(self):
|
||||||
|
req = DAL.newDataRequest(self.datatype)
|
||||||
|
req.addIdentifier(self.siteIdKey, params.SITE_ID)
|
||||||
|
req.addIdentifier(self.groupKey, RequestConstraint.new('in', self.groups))
|
||||||
|
data = self.runGeometryDataTest(req)
|
||||||
|
for item in data:
|
||||||
|
self.assertEqual(params.SITE_ID, item.getAttribute(self.siteIdKey))
|
||||||
|
self.assertIn(item.getAttribute(self.groupKey), self.groups)
|
||||||
|
|
||||||
|
def testGetGeometryDataWithLocNamesAndGroupsThrowException(self):
|
||||||
|
req = DAL.newDataRequest(self.datatype)
|
||||||
|
req.addIdentifier(self.siteIdKey, params.SITE_ID)
|
||||||
|
req.setLocationNames(*self.editAreaNames)
|
||||||
|
req.addIdentifier(self.groupKey, RequestConstraint.new('in', self.groups))
|
||||||
|
with self.assertRaises(ThriftRequestException):
|
||||||
|
self.runGeometryDataTest(req)
|
||||||
|
|
||||||
|
def testGetGeometryDataWithEnvelope(self):
|
||||||
|
req = DAL.newDataRequest(self.datatype)
|
||||||
|
req.addIdentifier(self.siteIdKey, params.SITE_ID)
|
||||||
|
req.setEnvelope(params.ENVELOPE)
|
||||||
|
data = self.runGeometryDataTest(req)
|
||||||
|
for item in data:
|
||||||
|
self.assertEqual(params.SITE_ID, item.getAttribute(self.siteIdKey))
|
||||||
|
self.assertTrue(params.ENVELOPE.intersects(item.getGeometry()))
|
||||||
|
|
||||||
|
def testGetIdentifierValues(self):
|
||||||
|
req = DAL.newDataRequest(self.datatype)
|
||||||
|
optionalIds = set(DAL.getOptionalIdentifiers(req))
|
||||||
|
requiredIds = set(DAL.getRequiredIdentifiers(req))
|
||||||
|
self.runGetIdValuesTest(optionalIds | requiredIds)
|
||||||
|
|
||||||
|
def testGetInvalidIdentifierValuesThrowsException(self):
|
||||||
|
self.runInvalidIdValuesTest()
|
||||||
|
|
||||||
|
def testGetNonexistentIdentifierValuesThrowsException(self):
|
||||||
|
self.runNonexistentIdValuesTest()
|
||||||
|
|
||||||
|
def _runConstraintTest(self, key, operator, value):
|
||||||
|
req = DAL.newDataRequest(self.datatype)
|
||||||
|
constraint = RequestConstraint.new(operator, value)
|
||||||
|
req.addIdentifier(key, constraint)
|
||||||
|
req.setLocationNames(*self.editAreaNames)
|
||||||
|
return self.runGeometryDataTest(req)
|
||||||
|
|
||||||
|
def testGetDataWithEqualsString(self):
|
||||||
|
geomData = self._runConstraintTest(self.siteIdKey, '=', params.SITE_ID)
|
||||||
|
for record in geomData:
|
||||||
|
self.assertEqual(record.getAttribute(self.siteIdKey), params.SITE_ID)
|
||||||
|
|
||||||
|
def testGetDataWithEqualsUnicode(self):
|
||||||
|
geomData = self._runConstraintTest(self.siteIdKey, '=', params.SITE_ID.decode('unicode-escape'))
|
||||||
|
for record in geomData:
|
||||||
|
self.assertEqual(record.getAttribute(self.siteIdKey), params.SITE_ID)
|
||||||
|
|
||||||
|
# No numeric tests since no numeric identifiers are available.
|
||||||
|
|
||||||
|
def testGetDataWithEqualsNone(self):
|
||||||
|
geomData = self._runConstraintTest(self.siteIdKey, '=', None)
|
||||||
|
for record in geomData:
|
||||||
|
self.assertIsNone(record.getAttribute(self.siteIdKey))
|
||||||
|
|
||||||
|
def testGetDataWithNotEquals(self):
|
||||||
|
geomData = self._runConstraintTest(self.siteIdKey, '!=', params.SITE_ID)
|
||||||
|
for record in geomData:
|
||||||
|
self.assertNotEqual(record.getAttribute(self.siteIdKey), params.SITE_ID)
|
||||||
|
|
||||||
|
def testGetDataWithNotEqualsNone(self):
|
||||||
|
geomData = self._runConstraintTest(self.siteIdKey, '!=', None)
|
||||||
|
for record in geomData:
|
||||||
|
self.assertIsNotNone(record.getAttribute(self.siteIdKey))
|
||||||
|
|
||||||
|
def testGetDataWithGreaterThan(self):
|
||||||
|
geomData = self._runConstraintTest(self.siteIdKey, '>', params.SITE_ID)
|
||||||
|
for record in geomData:
|
||||||
|
self.assertGreater(record.getAttribute(self.siteIdKey), params.SITE_ID)
|
||||||
|
|
||||||
|
def testGetDataWithLessThan(self):
|
||||||
|
geomData = self._runConstraintTest(self.siteIdKey, '<', params.SITE_ID)
|
||||||
|
for record in geomData:
|
||||||
|
self.assertLess(record.getAttribute(self.siteIdKey), params.SITE_ID)
|
||||||
|
|
||||||
|
def testGetDataWithGreaterThanEquals(self):
|
||||||
|
geomData = self._runConstraintTest(self.siteIdKey, '>=', params.SITE_ID)
|
||||||
|
for record in geomData:
|
||||||
|
self.assertGreaterEqual(record.getAttribute(self.siteIdKey), params.SITE_ID)
|
||||||
|
|
||||||
|
def testGetDataWithLessThanEquals(self):
|
||||||
|
geomData = self._runConstraintTest(self.siteIdKey, '<=', params.SITE_ID)
|
||||||
|
for record in geomData:
|
||||||
|
self.assertLessEqual(record.getAttribute(self.siteIdKey), params.SITE_ID)
|
||||||
|
|
||||||
|
def testGetDataWithInTuple(self):
|
||||||
|
collection = (params.SITE_ID,)
|
||||||
|
geomData = self._runConstraintTest(self.siteIdKey, 'in', collection)
|
||||||
|
for record in geomData:
|
||||||
|
self.assertIn(record.getAttribute(self.siteIdKey), collection)
|
||||||
|
|
||||||
|
def testGetDataWithInList(self):
|
||||||
|
collection = [params.SITE_ID,]
|
||||||
|
geomData = self._runConstraintTest(self.siteIdKey, 'in', collection)
|
||||||
|
for record in geomData:
|
||||||
|
self.assertIn(record.getAttribute(self.siteIdKey), collection)
|
||||||
|
|
||||||
|
def testGetDataWithInGenerator(self):
|
||||||
|
collection = (params.SITE_ID,)
|
||||||
|
generator = (item for item in collection)
|
||||||
|
geomData = self._runConstraintTest(self.siteIdKey, 'in', generator)
|
||||||
|
for record in geomData:
|
||||||
|
self.assertIn(record.getAttribute(self.siteIdKey), collection)
|
||||||
|
|
||||||
|
def testGetDataWithNotInList(self):
|
||||||
|
collection = [params.SITE_ID,]
|
||||||
|
geomData = self._runConstraintTest(self.siteIdKey, 'not in', collection)
|
||||||
|
for record in geomData:
|
||||||
|
self.assertNotIn(record.getAttribute(self.siteIdKey), collection)
|
||||||
|
|
||||||
|
def testGetDataWithInvalidConstraintTypeThrowsException(self):
|
||||||
|
with self.assertRaises(ValueError):
|
||||||
|
self._runConstraintTest(self.siteIdKey, 'junk', params.SITE_ID)
|
||||||
|
|
||||||
|
def testGetDataWithInvalidConstraintValueThrowsException(self):
|
||||||
|
with self.assertRaises(TypeError):
|
||||||
|
self._runConstraintTest(self.siteIdKey, '=', {})
|
||||||
|
|
||||||
|
def testGetDataWithEmptyInConstraintThrowsException(self):
|
||||||
|
with self.assertRaises(ValueError):
|
||||||
|
self._runConstraintTest(self.siteIdKey, 'in', [])
|
|
@ -25,6 +25,7 @@ from awips.dataaccess import DataAccessLayer as DAL
|
||||||
from awips.ThriftClient import ThriftRequestException
|
from awips.ThriftClient import ThriftRequestException
|
||||||
|
|
||||||
import baseDafTestCase
|
import baseDafTestCase
|
||||||
|
import params
|
||||||
import unittest
|
import unittest
|
||||||
|
|
||||||
#
|
#
|
||||||
|
@ -44,6 +45,9 @@ import unittest
|
||||||
# 10/13/16 5942 bsteffen Test envelopes
|
# 10/13/16 5942 bsteffen Test envelopes
|
||||||
# 11/08/16 5985 tgurney Skip certain tests when no
|
# 11/08/16 5985 tgurney Skip certain tests when no
|
||||||
# data is available
|
# data is available
|
||||||
|
# 12/07/16 5981 tgurney Parameterize
|
||||||
|
# 01/06/17 5981 tgurney Skip envelope test when no
|
||||||
|
# data is available
|
||||||
#
|
#
|
||||||
|
|
||||||
|
|
||||||
|
@ -54,8 +58,6 @@ class GridTestCase(baseDafTestCase.DafTestCase):
|
||||||
|
|
||||||
model = 'GFS160'
|
model = 'GFS160'
|
||||||
|
|
||||||
envelope = box(-97.0, 41.0, -96.0, 42.0)
|
|
||||||
|
|
||||||
def testGetAvailableParameters(self):
|
def testGetAvailableParameters(self):
|
||||||
req = DAL.newDataRequest(self.datatype)
|
req = DAL.newDataRequest(self.datatype)
|
||||||
req.addIdentifier('info.datasetId', self.model)
|
req.addIdentifier('info.datasetId', self.model)
|
||||||
|
@ -110,18 +112,18 @@ class GridTestCase(baseDafTestCase.DafTestCase):
|
||||||
req.addIdentifier('info.datasetId', self.model)
|
req.addIdentifier('info.datasetId', self.model)
|
||||||
req.setLevels('2FHAG')
|
req.setLevels('2FHAG')
|
||||||
req.setParameters('T')
|
req.setParameters('T')
|
||||||
req.setEnvelope(self.envelope)
|
req.setEnvelope(params.ENVELOPE)
|
||||||
gridData = self.runGridDataTest(req)
|
gridData = self.runGridDataTest(req)
|
||||||
if not gridData:
|
if len(gridData) == 0:
|
||||||
raise unittest.SkipTest('no data available')
|
raise unittest.SkipTest("No data available")
|
||||||
lons, lats = gridData[0].getLatLonCoords()
|
lons, lats = gridData[0].getLatLonCoords()
|
||||||
lons = lons.reshape(-1)
|
lons = lons.reshape(-1)
|
||||||
lats = lats.reshape(-1)
|
lats = lats.reshape(-1)
|
||||||
|
|
||||||
# Ensure all points are within one degree of the original box
|
# Ensure all points are within one degree of the original box
|
||||||
# to allow slight margin of error for reprojection distortion.
|
# to allow slight margin of error for reprojection distortion.
|
||||||
testEnv = box(self.envelope.bounds[0] - 1, self.envelope.bounds[1] - 1,
|
testEnv = box(params.ENVELOPE.bounds[0] - 1, params.ENVELOPE.bounds[1] - 1,
|
||||||
self.envelope.bounds[2] + 1, self.envelope.bounds[3] + 1 )
|
params.ENVELOPE.bounds[2] + 1, params.ENVELOPE.bounds[3] + 1 )
|
||||||
|
|
||||||
for i in range(len(lons)):
|
for i in range(len(lons)):
|
||||||
self.assertTrue(testEnv.contains(Point(lons[i], lats[i])))
|
self.assertTrue(testEnv.contains(Point(lons[i], lats[i])))
|
||||||
|
|
|
@ -40,6 +40,7 @@ import unittest
|
||||||
# 06/13/16 5574 mapeters Add advanced query tests
|
# 06/13/16 5574 mapeters Add advanced query tests
|
||||||
# 06/21/16 5548 tgurney Skip tests that cause errors
|
# 06/21/16 5548 tgurney Skip tests that cause errors
|
||||||
# 06/30/16 5725 tgurney Add test for NOT IN
|
# 06/30/16 5725 tgurney Add test for NOT IN
|
||||||
|
# 01/06/17 5981 tgurney Do not check data times
|
||||||
#
|
#
|
||||||
#
|
#
|
||||||
|
|
||||||
|
@ -71,7 +72,7 @@ class MapsTestCase(baseDafTestCase.DafTestCase):
|
||||||
req.setLocationNames('OAX')
|
req.setLocationNames('OAX')
|
||||||
req.addIdentifier('cwa', 'OAX')
|
req.addIdentifier('cwa', 'OAX')
|
||||||
req.setParameters('countyname', 'state', 'fips')
|
req.setParameters('countyname', 'state', 'fips')
|
||||||
self.runGeometryDataTest(req)
|
self.runGeometryDataTest(req, checkDataTimes=False)
|
||||||
|
|
||||||
def testRequestingTimesThrowsTimeAgnosticDataException(self):
|
def testRequestingTimesThrowsTimeAgnosticDataException(self):
|
||||||
req = DAL.newDataRequest(self.datatype)
|
req = DAL.newDataRequest(self.datatype)
|
||||||
|
@ -104,22 +105,6 @@ class MapsTestCase(baseDafTestCase.DafTestCase):
|
||||||
with self.assertRaises(ThriftRequestException):
|
with self.assertRaises(ThriftRequestException):
|
||||||
idValues = DAL.getIdentifierValues(req, 'state')
|
idValues = DAL.getIdentifierValues(req, 'state')
|
||||||
|
|
||||||
@unittest.skip('avoid EDEX error')
|
|
||||||
def testGetColumnIdValuesWithNonexistentTableThrowsException(self):
|
|
||||||
req = DAL.newDataRequest(self.datatype)
|
|
||||||
req.addIdentifier('table', 'mapdata.nonexistentjunk')
|
|
||||||
req.addIdentifier('geomField', 'the_geom')
|
|
||||||
with self.assertRaises(ThriftRequestException):
|
|
||||||
idValues = DAL.getIdentifierValues(req, 'state')
|
|
||||||
|
|
||||||
@unittest.skip('avoid EDEX error')
|
|
||||||
def testGetNonexistentColumnIdValuesThrowsException(self):
|
|
||||||
req = DAL.newDataRequest(self.datatype)
|
|
||||||
req.addIdentifier('table', 'mapdata.county')
|
|
||||||
req.addIdentifier('geomField', 'the_geom')
|
|
||||||
with self.assertRaises(ThriftRequestException):
|
|
||||||
idValues = DAL.getIdentifierValues(req, 'nonexistentjunk')
|
|
||||||
|
|
||||||
def testGetInvalidIdentifierValuesThrowsException(self):
|
def testGetInvalidIdentifierValuesThrowsException(self):
|
||||||
self.runInvalidIdValuesTest()
|
self.runInvalidIdValuesTest()
|
||||||
|
|
||||||
|
@ -134,7 +119,7 @@ class MapsTestCase(baseDafTestCase.DafTestCase):
|
||||||
constraint = RequestConstraint.new(operator, value)
|
constraint = RequestConstraint.new(operator, value)
|
||||||
req.addIdentifier(key, constraint)
|
req.addIdentifier(key, constraint)
|
||||||
req.setParameters('state', 'reservoir', 'area_sq_mi')
|
req.setParameters('state', 'reservoir', 'area_sq_mi')
|
||||||
return self.runGeometryDataTest(req)
|
return self.runGeometryDataTest(req, checkDataTimes=False)
|
||||||
|
|
||||||
def testGetDataWithEqualsString(self):
|
def testGetDataWithEqualsString(self):
|
||||||
geometryData = self._runConstraintTest('state', '=', 'NE')
|
geometryData = self._runConstraintTest('state', '=', 'NE')
|
||||||
|
|
|
@ -23,6 +23,7 @@ from awips.dataaccess import DataAccessLayer as DAL
|
||||||
from dynamicserialize.dstypes.com.raytheon.uf.common.dataquery.requests import RequestConstraint
|
from dynamicserialize.dstypes.com.raytheon.uf.common.dataquery.requests import RequestConstraint
|
||||||
|
|
||||||
import baseDafTestCase
|
import baseDafTestCase
|
||||||
|
import params
|
||||||
import unittest
|
import unittest
|
||||||
|
|
||||||
#
|
#
|
||||||
|
@ -40,6 +41,9 @@ import unittest
|
||||||
# 06/30/16 5725 tgurney Add test for NOT IN
|
# 06/30/16 5725 tgurney Add test for NOT IN
|
||||||
# 11/10/16 5985 tgurney Mark expected failures prior
|
# 11/10/16 5985 tgurney Mark expected failures prior
|
||||||
# to 17.3.1
|
# to 17.3.1
|
||||||
|
# 12/07/16 5981 tgurney Parameterize
|
||||||
|
# 12/19/16 5981 tgurney Remove pre-17.3 expected fails
|
||||||
|
# 12/20/16 5981 tgurney Add envelope test
|
||||||
#
|
#
|
||||||
#
|
#
|
||||||
|
|
||||||
|
@ -51,31 +55,25 @@ class ModelSoundingTestCase(baseDafTestCase.DafTestCase):
|
||||||
|
|
||||||
def testGetAvailableParameters(self):
|
def testGetAvailableParameters(self):
|
||||||
req = DAL.newDataRequest(self.datatype)
|
req = DAL.newDataRequest(self.datatype)
|
||||||
|
|
||||||
self.runParametersTest(req)
|
self.runParametersTest(req)
|
||||||
|
|
||||||
def testGetAvailableLocations(self):
|
def testGetAvailableLocations(self):
|
||||||
req = DAL.newDataRequest(self.datatype)
|
req = DAL.newDataRequest(self.datatype)
|
||||||
req.addIdentifier("reportType", "ETA")
|
req.addIdentifier("reportType", "ETA")
|
||||||
|
|
||||||
self.runLocationsTest(req)
|
self.runLocationsTest(req)
|
||||||
|
|
||||||
def testGetAvailableTimes(self):
|
def testGetAvailableTimes(self):
|
||||||
req = DAL.newDataRequest(self.datatype)
|
req = DAL.newDataRequest(self.datatype)
|
||||||
req.addIdentifier("reportType", "ETA")
|
req.addIdentifier("reportType", "ETA")
|
||||||
req.setLocationNames("KOMA")
|
req.setLocationNames(params.OBS_STATION)
|
||||||
|
|
||||||
self.runTimesTest(req)
|
self.runTimesTest(req)
|
||||||
|
|
||||||
@unittest.expectedFailure
|
|
||||||
def testGetGeometryData(self):
|
def testGetGeometryData(self):
|
||||||
req = DAL.newDataRequest(self.datatype)
|
req = DAL.newDataRequest(self.datatype)
|
||||||
req.addIdentifier("reportType", "ETA")
|
req.addIdentifier("reportType", "ETA")
|
||||||
req.setLocationNames("KOMA")
|
req.setLocationNames(params.OBS_STATION)
|
||||||
req.setParameters("temperature", "pressure", "specHum", "sfcPress", "temp2", "q2")
|
req.setParameters("temperature", "pressure", "specHum", "sfcPress", "temp2", "q2")
|
||||||
|
|
||||||
print("Testing getGeometryData()")
|
print("Testing getGeometryData()")
|
||||||
|
|
||||||
geomData = DAL.getGeometryData(req)
|
geomData = DAL.getGeometryData(req)
|
||||||
print("Number of geometry records: " + str(len(geomData)))
|
print("Number of geometry records: " + str(len(geomData)))
|
||||||
print("Sample geometry data:")
|
print("Sample geometry data:")
|
||||||
|
@ -84,18 +82,32 @@ class ModelSoundingTestCase(baseDafTestCase.DafTestCase):
|
||||||
# One dimensional parameters are reported on the 0.0UNKNOWN level.
|
# One dimensional parameters are reported on the 0.0UNKNOWN level.
|
||||||
# 2D parameters are reported on MB levels from pressure.
|
# 2D parameters are reported on MB levels from pressure.
|
||||||
if record.getLevel() == "0.0UNKNOWN":
|
if record.getLevel() == "0.0UNKNOWN":
|
||||||
print(" sfcPress=" + record.getString("sfcPress") + record.getUnit("sfcPress"), end="")
|
print(" sfcPress=" + record.getString("sfcPress") +
|
||||||
print(" temp2=" + record.getString("temp2") + record.getUnit("temp2"), end="")
|
record.getUnit("sfcPress"), end="")
|
||||||
print(" q2=" + record.getString("q2") + record.getUnit("q2"), end="")
|
print(" temp2=" + record.getString("temp2") +
|
||||||
|
record.getUnit("temp2"), end="")
|
||||||
|
print(" q2=" + record.getString("q2") +
|
||||||
|
record.getUnit("q2"), end="")
|
||||||
else:
|
else:
|
||||||
print(" pressure=" + record.getString("pressure") + record.getUnit("pressure"), end="")
|
print(" pressure=" + record.getString("pressure") +
|
||||||
print(" temperature=" + record.getString("temperature") + record.getUnit("temperature"), end="")
|
record.getUnit("pressure"), end="")
|
||||||
print(" specHum=" + record.getString("specHum") + record.getUnit("specHum"), end="")
|
print(" temperature=" + record.getString("temperature") +
|
||||||
|
record.getUnit("temperature"), end="")
|
||||||
|
print(" specHum=" + record.getString("specHum") +
|
||||||
|
record.getUnit("specHum"), end="")
|
||||||
print(" geometry=" + str(record.getGeometry()))
|
print(" geometry=" + str(record.getGeometry()))
|
||||||
|
|
||||||
print("getGeometryData() complete\n\n")
|
print("getGeometryData() complete\n\n")
|
||||||
|
|
||||||
|
def testGetGeometryDataWithEnvelope(self):
|
||||||
|
req = DAL.newDataRequest(self.datatype)
|
||||||
|
req.addIdentifier("reportType", "ETA")
|
||||||
|
req.setEnvelope(params.ENVELOPE)
|
||||||
|
req.setParameters("temperature", "pressure", "specHum", "sfcPress", "temp2", "q2")
|
||||||
|
print("Testing getGeometryData()")
|
||||||
|
data = DAL.getGeometryData(req)
|
||||||
|
for item in data:
|
||||||
|
self.assertTrue(params.ENVELOPE.contains(item.getGeometry()))
|
||||||
|
|
||||||
def testGetIdentifierValues(self):
|
def testGetIdentifierValues(self):
|
||||||
req = DAL.newDataRequest(self.datatype)
|
req = DAL.newDataRequest(self.datatype)
|
||||||
optionalIds = set(DAL.getOptionalIdentifiers(req))
|
optionalIds = set(DAL.getOptionalIdentifiers(req))
|
||||||
|
@ -111,7 +123,7 @@ class ModelSoundingTestCase(baseDafTestCase.DafTestCase):
|
||||||
req = DAL.newDataRequest(self.datatype)
|
req = DAL.newDataRequest(self.datatype)
|
||||||
constraint = RequestConstraint.new(operator, value)
|
constraint = RequestConstraint.new(operator, value)
|
||||||
req.setParameters('dataURI')
|
req.setParameters('dataURI')
|
||||||
req.setLocationNames('KOMA', 'KORD', 'KOFK', 'KLNK')
|
req.setLocationNames(params.OBS_STATION, 'KORD', 'KOFK', 'KLNK')
|
||||||
req.addIdentifier(key, constraint)
|
req.addIdentifier(key, constraint)
|
||||||
return self.runGeometryDataTest(req)
|
return self.runGeometryDataTest(req)
|
||||||
|
|
||||||
|
@ -123,13 +135,11 @@ class ModelSoundingTestCase(baseDafTestCase.DafTestCase):
|
||||||
#
|
#
|
||||||
# Can also eyeball the number of returned records.
|
# Can also eyeball the number of returned records.
|
||||||
|
|
||||||
@unittest.expectedFailure
|
|
||||||
def testGetDataWithEqualsString(self):
|
def testGetDataWithEqualsString(self):
|
||||||
geometryData = self._runConstraintTest('reportType', '=', 'ETA')
|
geometryData = self._runConstraintTest('reportType', '=', 'ETA')
|
||||||
for record in geometryData:
|
for record in geometryData:
|
||||||
self.assertIn('/ETA/', record.getString('dataURI'))
|
self.assertIn('/ETA/', record.getString('dataURI'))
|
||||||
|
|
||||||
@unittest.expectedFailure
|
|
||||||
def testGetDataWithEqualsUnicode(self):
|
def testGetDataWithEqualsUnicode(self):
|
||||||
geometryData = self._runConstraintTest('reportType', '=', u'ETA')
|
geometryData = self._runConstraintTest('reportType', '=', u'ETA')
|
||||||
for record in geometryData:
|
for record in geometryData:
|
||||||
|
@ -137,37 +147,29 @@ class ModelSoundingTestCase(baseDafTestCase.DafTestCase):
|
||||||
|
|
||||||
# No numeric tests since no numeric identifiers are available.
|
# No numeric tests since no numeric identifiers are available.
|
||||||
|
|
||||||
@unittest.expectedFailure
|
|
||||||
def testGetDataWithEqualsNone(self):
|
def testGetDataWithEqualsNone(self):
|
||||||
geometryData = self._runConstraintTest('reportType', '=', None)
|
geometryData = self._runConstraintTest('reportType', '=', None)
|
||||||
|
|
||||||
@unittest.expectedFailure
|
|
||||||
def testGetDataWithNotEquals(self):
|
def testGetDataWithNotEquals(self):
|
||||||
geometryData = self._runConstraintTest('reportType', '!=', 'ETA')
|
geometryData = self._runConstraintTest('reportType', '!=', 'ETA')
|
||||||
for record in geometryData:
|
for record in geometryData:
|
||||||
self.assertNotIn('/ETA/', record.getString('dataURI'))
|
self.assertNotIn('/ETA/', record.getString('dataURI'))
|
||||||
|
|
||||||
@unittest.expectedFailure
|
|
||||||
def testGetDataWithNotEqualsNone(self):
|
def testGetDataWithNotEqualsNone(self):
|
||||||
geometryData = self._runConstraintTest('reportType', '!=', None)
|
geometryData = self._runConstraintTest('reportType', '!=', None)
|
||||||
|
|
||||||
@unittest.expectedFailure
|
|
||||||
def testGetDataWithGreaterThan(self):
|
def testGetDataWithGreaterThan(self):
|
||||||
geometryData = self._runConstraintTest('reportType', '>', 'ETA')
|
geometryData = self._runConstraintTest('reportType', '>', 'ETA')
|
||||||
|
|
||||||
@unittest.expectedFailure
|
|
||||||
def testGetDataWithLessThan(self):
|
def testGetDataWithLessThan(self):
|
||||||
geometryData = self._runConstraintTest('reportType', '<', 'ETA')
|
geometryData = self._runConstraintTest('reportType', '<', 'ETA')
|
||||||
|
|
||||||
@unittest.expectedFailure
|
|
||||||
def testGetDataWithGreaterThanEquals(self):
|
def testGetDataWithGreaterThanEquals(self):
|
||||||
geometryData = self._runConstraintTest('reportType', '>=', 'ETA')
|
geometryData = self._runConstraintTest('reportType', '>=', 'ETA')
|
||||||
|
|
||||||
@unittest.expectedFailure
|
|
||||||
def testGetDataWithLessThanEquals(self):
|
def testGetDataWithLessThanEquals(self):
|
||||||
geometryData = self._runConstraintTest('reportType', '<=', 'ETA')
|
geometryData = self._runConstraintTest('reportType', '<=', 'ETA')
|
||||||
|
|
||||||
@unittest.expectedFailure
|
|
||||||
def testGetDataWithInTuple(self):
|
def testGetDataWithInTuple(self):
|
||||||
collection = ('ETA', 'GFS')
|
collection = ('ETA', 'GFS')
|
||||||
geometryData = self._runConstraintTest('reportType', 'in', collection)
|
geometryData = self._runConstraintTest('reportType', 'in', collection)
|
||||||
|
@ -175,7 +177,6 @@ class ModelSoundingTestCase(baseDafTestCase.DafTestCase):
|
||||||
dataURI = record.getString('dataURI')
|
dataURI = record.getString('dataURI')
|
||||||
self.assertTrue('/ETA/' in dataURI or '/GFS/' in dataURI)
|
self.assertTrue('/ETA/' in dataURI or '/GFS/' in dataURI)
|
||||||
|
|
||||||
@unittest.expectedFailure
|
|
||||||
def testGetDataWithInList(self):
|
def testGetDataWithInList(self):
|
||||||
collection = ['ETA', 'GFS']
|
collection = ['ETA', 'GFS']
|
||||||
geometryData = self._runConstraintTest('reportType', 'in', collection)
|
geometryData = self._runConstraintTest('reportType', 'in', collection)
|
||||||
|
@ -183,7 +184,6 @@ class ModelSoundingTestCase(baseDafTestCase.DafTestCase):
|
||||||
dataURI = record.getString('dataURI')
|
dataURI = record.getString('dataURI')
|
||||||
self.assertTrue('/ETA/' in dataURI or '/GFS/' in dataURI)
|
self.assertTrue('/ETA/' in dataURI or '/GFS/' in dataURI)
|
||||||
|
|
||||||
@unittest.expectedFailure
|
|
||||||
def testGetDataWithInGenerator(self):
|
def testGetDataWithInGenerator(self):
|
||||||
collection = ('ETA', 'GFS')
|
collection = ('ETA', 'GFS')
|
||||||
generator = (item for item in collection)
|
generator = (item for item in collection)
|
||||||
|
|
|
@ -23,6 +23,7 @@ from awips.dataaccess import DataAccessLayer as DAL
|
||||||
from dynamicserialize.dstypes.com.raytheon.uf.common.dataquery.requests import RequestConstraint
|
from dynamicserialize.dstypes.com.raytheon.uf.common.dataquery.requests import RequestConstraint
|
||||||
|
|
||||||
import baseDafTestCase
|
import baseDafTestCase
|
||||||
|
import params
|
||||||
import unittest
|
import unittest
|
||||||
|
|
||||||
#
|
#
|
||||||
|
@ -38,6 +39,8 @@ import unittest
|
||||||
# 06/09/16 5587 bsteffen Add getIdentifierValues tests
|
# 06/09/16 5587 bsteffen Add getIdentifierValues tests
|
||||||
# 06/13/16 5574 tgurney Add advanced query tests
|
# 06/13/16 5574 tgurney Add advanced query tests
|
||||||
# 06/30/16 5725 tgurney Add test for NOT IN
|
# 06/30/16 5725 tgurney Add test for NOT IN
|
||||||
|
# 12/07/16 5981 tgurney Parameterize
|
||||||
|
# 12/20/16 5981 tgurney Add envelope test
|
||||||
#
|
#
|
||||||
#
|
#
|
||||||
|
|
||||||
|
@ -57,14 +60,22 @@ class ObsTestCase(baseDafTestCase.DafTestCase):
|
||||||
|
|
||||||
def testGetAvailableTimes(self):
|
def testGetAvailableTimes(self):
|
||||||
req = DAL.newDataRequest(self.datatype)
|
req = DAL.newDataRequest(self.datatype)
|
||||||
req.setLocationNames("KOMA")
|
req.setLocationNames(params.OBS_STATION)
|
||||||
self.runTimesTest(req)
|
self.runTimesTest(req)
|
||||||
|
|
||||||
def testGetGeometryData(self):
|
def testGetGeometryData(self):
|
||||||
req = DAL.newDataRequest(self.datatype)
|
req = DAL.newDataRequest(self.datatype)
|
||||||
req.setLocationNames("KOMA")
|
req.setLocationNames(params.OBS_STATION)
|
||||||
req.setParameters("temperature", "seaLevelPress", "dewpoint")
|
req.setParameters("temperature", "seaLevelPress", "dewpoint")
|
||||||
self.runGeometryDataTest(req)
|
data = self.runGeometryDataTest(req)
|
||||||
|
|
||||||
|
def testGetGeometryDataWithEnvelope(self):
|
||||||
|
req = DAL.newDataRequest(self.datatype)
|
||||||
|
req.setEnvelope(params.ENVELOPE)
|
||||||
|
req.setParameters("temperature", "seaLevelPress", "dewpoint")
|
||||||
|
data = self.runGeometryDataTest(req)
|
||||||
|
for item in data:
|
||||||
|
self.assertTrue(params.ENVELOPE.contains(item.getGeometry()))
|
||||||
|
|
||||||
def testGetIdentifierValues(self):
|
def testGetIdentifierValues(self):
|
||||||
req = DAL.newDataRequest(self.datatype)
|
req = DAL.newDataRequest(self.datatype)
|
||||||
|
@ -81,7 +92,7 @@ class ObsTestCase(baseDafTestCase.DafTestCase):
|
||||||
req = DAL.newDataRequest(self.datatype)
|
req = DAL.newDataRequest(self.datatype)
|
||||||
constraint = RequestConstraint.new(operator, value)
|
constraint = RequestConstraint.new(operator, value)
|
||||||
req.setParameters("temperature", "reportType")
|
req.setParameters("temperature", "reportType")
|
||||||
req.setLocationNames("KOMA")
|
req.setLocationNames(params.OBS_STATION)
|
||||||
req.addIdentifier(key, constraint)
|
req.addIdentifier(key, constraint)
|
||||||
return self.runGeometryDataTest(req)
|
return self.runGeometryDataTest(req)
|
||||||
|
|
||||||
|
|
|
@ -22,6 +22,7 @@ from __future__ import print_function
|
||||||
from awips.dataaccess import DataAccessLayer as DAL
|
from awips.dataaccess import DataAccessLayer as DAL
|
||||||
|
|
||||||
import baseDafTestCase
|
import baseDafTestCase
|
||||||
|
import params
|
||||||
import unittest
|
import unittest
|
||||||
|
|
||||||
#
|
#
|
||||||
|
@ -34,6 +35,8 @@ import unittest
|
||||||
# 01/19/16 4795 mapeters Initial Creation.
|
# 01/19/16 4795 mapeters Initial Creation.
|
||||||
# 04/11/16 5548 tgurney Cleanup
|
# 04/11/16 5548 tgurney Cleanup
|
||||||
# 04/18/16 5548 tgurney More cleanup
|
# 04/18/16 5548 tgurney More cleanup
|
||||||
|
# 12/07/16 5981 tgurney Parameterize
|
||||||
|
# 12/20/16 5981 tgurney Add envelope test
|
||||||
#
|
#
|
||||||
#
|
#
|
||||||
|
|
||||||
|
@ -53,16 +56,14 @@ class PirepTestCase(baseDafTestCase.DafTestCase):
|
||||||
|
|
||||||
def testGetAvailableTimes(self):
|
def testGetAvailableTimes(self):
|
||||||
req = DAL.newDataRequest(self.datatype)
|
req = DAL.newDataRequest(self.datatype)
|
||||||
req.setLocationNames('OMA')
|
req.setLocationNames(params.AIRPORT)
|
||||||
self.runTimesTest(req)
|
self.runTimesTest(req)
|
||||||
|
|
||||||
def testGetGeometryData(self):
|
def testGetGeometryData(self):
|
||||||
req = DAL.newDataRequest(self.datatype)
|
req = DAL.newDataRequest(self.datatype)
|
||||||
req.setLocationNames('OMA')
|
req.setLocationNames(params.AIRPORT)
|
||||||
req.setParameters("temperature", "windSpeed", "hazardType", "turbType")
|
req.setParameters("temperature", "windSpeed", "hazardType", "turbType")
|
||||||
|
|
||||||
print("Testing getGeometryData()")
|
print("Testing getGeometryData()")
|
||||||
|
|
||||||
geomData = DAL.getGeometryData(req)
|
geomData = DAL.getGeometryData(req)
|
||||||
self.assertIsNotNone(geomData)
|
self.assertIsNotNone(geomData)
|
||||||
print("Number of geometry records: " + str(len(geomData)))
|
print("Number of geometry records: " + str(len(geomData)))
|
||||||
|
@ -78,6 +79,13 @@ class PirepTestCase(baseDafTestCase.DafTestCase):
|
||||||
print(" hazardType=" + record.getString("hazardType"), end="")
|
print(" hazardType=" + record.getString("hazardType"), end="")
|
||||||
print(" turbType=" + record.getString("turbType"), end="")
|
print(" turbType=" + record.getString("turbType"), end="")
|
||||||
print(" geometry=", record.getGeometry())
|
print(" geometry=", record.getGeometry())
|
||||||
|
|
||||||
print("getGeometryData() complete\n")
|
print("getGeometryData() complete\n")
|
||||||
|
|
||||||
|
def testGetGeometryDataWithEnvelope(self):
|
||||||
|
req = DAL.newDataRequest(self.datatype)
|
||||||
|
req.setParameters("temperature", "windSpeed", "hazardType", "turbType")
|
||||||
|
req.setEnvelope(params.ENVELOPE)
|
||||||
|
print("Testing getGeometryData()")
|
||||||
|
data = DAL.getGeometryData(req)
|
||||||
|
for item in data:
|
||||||
|
self.assertTrue(params.ENVELOPE.contains(item.getGeometry()))
|
||||||
|
|
95
awips/test/dafTests/testRadarGraphics.py
Normal file
95
awips/test/dafTests/testRadarGraphics.py
Normal file
|
@ -0,0 +1,95 @@
|
||||||
|
##
|
||||||
|
# This software was developed and / or modified by Raytheon Company,
|
||||||
|
# pursuant to Contract DG133W-05-CQ-1067 with the US Government.
|
||||||
|
#
|
||||||
|
# U.S. EXPORT CONTROLLED TECHNICAL DATA
|
||||||
|
# This software product contains export-restricted data whose
|
||||||
|
# export/transfer/disclosure is restricted by U.S. law. Dissemination
|
||||||
|
# to non-U.S. persons whether in the United States or abroad requires
|
||||||
|
# an export license or other authorization.
|
||||||
|
#
|
||||||
|
# Contractor Name: Raytheon Company
|
||||||
|
# Contractor Address: 6825 Pine Street, Suite 340
|
||||||
|
# Mail Stop B8
|
||||||
|
# Omaha, NE 68106
|
||||||
|
# 402.291.0100
|
||||||
|
#
|
||||||
|
# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
||||||
|
# further licensing information.
|
||||||
|
##
|
||||||
|
|
||||||
|
import unittest
|
||||||
|
|
||||||
|
from dynamicserialize.dstypes.com.raytheon.uf.common.dataquery.requests import RequestConstraint
|
||||||
|
from ufpy.dataaccess import DataAccessLayer as DAL
|
||||||
|
|
||||||
|
import baseRadarTestCase
|
||||||
|
import params
|
||||||
|
|
||||||
|
|
||||||
|
#
|
||||||
|
# Test DAF support for radar graphics data
|
||||||
|
#
|
||||||
|
# SOFTWARE HISTORY
|
||||||
|
#
|
||||||
|
# Date Ticket# Engineer Description
|
||||||
|
# ------------ ---------- ----------- --------------------------
|
||||||
|
# 08/25/16 2671 tgurney Initial creation.
|
||||||
|
# 08/31/16 2671 tgurney Add mesocyclone
|
||||||
|
# 09/08/16 2671 tgurney Add storm track
|
||||||
|
# 09/27/16 2671 tgurney Add hail index
|
||||||
|
# 09/30/16 2671 tgurney Add TVS
|
||||||
|
# 12/07/16 5981 tgurney Parameterize
|
||||||
|
# 12/19/16 5981 tgurney Do not check data times on
|
||||||
|
# returned data
|
||||||
|
#
|
||||||
|
#
|
||||||
|
class RadarGraphicsTestCase(baseRadarTestCase.BaseRadarTestCase):
|
||||||
|
"""Test DAF support for radar data"""
|
||||||
|
|
||||||
|
datatype = 'radar'
|
||||||
|
|
||||||
|
def runConstraintTest(self, key, operator, value):
|
||||||
|
req = DAL.newDataRequest(self.datatype)
|
||||||
|
constraint = RequestConstraint.new(operator, value)
|
||||||
|
req.addIdentifier(key, constraint)
|
||||||
|
req.setParameters('166')
|
||||||
|
# TODO: Cannot check datatimes on the result because the times returned
|
||||||
|
# by getAvailableTimes have level = -1.0, while the time on the actual
|
||||||
|
# data has the correct level set (>= 0.0).
|
||||||
|
return self.runGeometryDataTest(req, checkDataTimes=False)
|
||||||
|
|
||||||
|
def testGetGeometryDataMeltingLayer(self):
|
||||||
|
req = DAL.newDataRequest(self.datatype)
|
||||||
|
req.setEnvelope(params.ENVELOPE)
|
||||||
|
req.setLocationNames(self.radarLoc)
|
||||||
|
req.setParameters('166')
|
||||||
|
self.runGeometryDataTest(req, checkDataTimes=False)
|
||||||
|
|
||||||
|
def testGetGeometryDataMesocyclone(self):
|
||||||
|
req = DAL.newDataRequest(self.datatype)
|
||||||
|
req.setEnvelope(params.ENVELOPE)
|
||||||
|
req.setLocationNames(self.radarLoc)
|
||||||
|
req.setParameters('141')
|
||||||
|
self.runGeometryDataTest(req, checkDataTimes=False)
|
||||||
|
|
||||||
|
def testGetGeometryDataStormTrack(self):
|
||||||
|
req = DAL.newDataRequest(self.datatype)
|
||||||
|
req.setEnvelope(params.ENVELOPE)
|
||||||
|
req.setLocationNames(self.radarLoc)
|
||||||
|
req.setParameters('58')
|
||||||
|
self.runGeometryDataTest(req, checkDataTimes=False)
|
||||||
|
|
||||||
|
def testGetGeometryDataHailIndex(self):
|
||||||
|
req = DAL.newDataRequest(self.datatype)
|
||||||
|
req.setEnvelope(params.ENVELOPE)
|
||||||
|
req.setLocationNames(self.radarLoc)
|
||||||
|
req.setParameters('59')
|
||||||
|
self.runGeometryDataTest(req, checkDataTimes=False)
|
||||||
|
|
||||||
|
def testGetGeometryDataTVS(self):
|
||||||
|
req = DAL.newDataRequest(self.datatype)
|
||||||
|
req.setEnvelope(params.ENVELOPE)
|
||||||
|
req.setLocationNames(self.radarLoc)
|
||||||
|
req.setParameters('61')
|
||||||
|
self.runGeometryDataTest(req, checkDataTimes=False)
|
61
awips/test/dafTests/testRadarGrid.py
Normal file
61
awips/test/dafTests/testRadarGrid.py
Normal file
|
@ -0,0 +1,61 @@
|
||||||
|
##
|
||||||
|
# This software was developed and / or modified by Raytheon Company,
|
||||||
|
# pursuant to Contract DG133W-05-CQ-1067 with the US Government.
|
||||||
|
#
|
||||||
|
# U.S. EXPORT CONTROLLED TECHNICAL DATA
|
||||||
|
# This software product contains export-restricted data whose
|
||||||
|
# export/transfer/disclosure is restricted by U.S. law. Dissemination
|
||||||
|
# to non-U.S. persons whether in the United States or abroad requires
|
||||||
|
# an export license or other authorization.
|
||||||
|
#
|
||||||
|
# Contractor Name: Raytheon Company
|
||||||
|
# Contractor Address: 6825 Pine Street, Suite 340
|
||||||
|
# Mail Stop B8
|
||||||
|
# Omaha, NE 68106
|
||||||
|
# 402.291.0100
|
||||||
|
#
|
||||||
|
# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
||||||
|
# further licensing information.
|
||||||
|
##
|
||||||
|
|
||||||
|
from ufpy.dataaccess import DataAccessLayer as DAL
|
||||||
|
from dynamicserialize.dstypes.com.raytheon.uf.common.dataquery.requests import RequestConstraint
|
||||||
|
|
||||||
|
import baseRadarTestCase
|
||||||
|
import params
|
||||||
|
import unittest
|
||||||
|
|
||||||
|
#
|
||||||
|
# Test DAF support for radar grid data
|
||||||
|
#
|
||||||
|
# SOFTWARE HISTORY
|
||||||
|
#
|
||||||
|
# Date Ticket# Engineer Description
|
||||||
|
# ------------ ---------- ----------- --------------------------
|
||||||
|
# 08/25/16 2671 tgurney Initial creation
|
||||||
|
#
|
||||||
|
#
|
||||||
|
|
||||||
|
|
||||||
|
class RadarTestCase(baseRadarTestCase.BaseRadarTestCase):
|
||||||
|
"""Test DAF support for radar data"""
|
||||||
|
|
||||||
|
datatype = 'radar'
|
||||||
|
|
||||||
|
parameterList = ['94']
|
||||||
|
|
||||||
|
def runConstraintTest(self, key, operator, value):
|
||||||
|
req = DAL.newDataRequest(self.datatype)
|
||||||
|
constraint = RequestConstraint.new(operator, value)
|
||||||
|
req.addIdentifier(key, constraint)
|
||||||
|
req.setParameters(*self.parameterList)
|
||||||
|
# Don't test shapes since they may differ.
|
||||||
|
return self.runGridDataTest(req, testSameShape=False)
|
||||||
|
|
||||||
|
def testGetGridData(self):
|
||||||
|
req = DAL.newDataRequest(self.datatype)
|
||||||
|
req.setEnvelope(params.ENVELOPE)
|
||||||
|
req.setLocationNames(self.radarLoc)
|
||||||
|
req.setParameters(*self.parameterList)
|
||||||
|
# Don't test shapes since they may differ.
|
||||||
|
self.runGridDataTest(req, testSameShape=False)
|
|
@ -24,6 +24,7 @@ from awips.dataaccess import DataAccessLayer as DAL
|
||||||
|
|
||||||
from dynamicserialize.dstypes.com.raytheon.uf.common.dataquery.requests import RequestConstraint
|
from dynamicserialize.dstypes.com.raytheon.uf.common.dataquery.requests import RequestConstraint
|
||||||
import baseDafTestCase
|
import baseDafTestCase
|
||||||
|
import params
|
||||||
import unittest
|
import unittest
|
||||||
|
|
||||||
#
|
#
|
||||||
|
@ -41,6 +42,8 @@ import unittest
|
||||||
# superclass
|
# superclass
|
||||||
# 06/13/16 5574 tgurney Add advanced query tests
|
# 06/13/16 5574 tgurney Add advanced query tests
|
||||||
# 06/30/16 5725 tgurney Add test for NOT IN
|
# 06/30/16 5725 tgurney Add test for NOT IN
|
||||||
|
# 12/07/16 5981 tgurney Parameterize
|
||||||
|
# 01/06/17 5981 tgurney Do not check data times
|
||||||
#
|
#
|
||||||
#
|
#
|
||||||
|
|
||||||
|
@ -50,14 +53,9 @@ class RadarSpatialTestCase(baseDafTestCase.DafTestCase):
|
||||||
|
|
||||||
datatype = "radar_spatial"
|
datatype = "radar_spatial"
|
||||||
|
|
||||||
envelope = box(-97.0, 41.0, -96.0, 42.0)
|
|
||||||
"""
|
|
||||||
Default request area (box around KOAX)
|
|
||||||
"""
|
|
||||||
|
|
||||||
def testGetAvailableLocations(self):
|
def testGetAvailableLocations(self):
|
||||||
req = DAL.newDataRequest(self.datatype)
|
req = DAL.newDataRequest(self.datatype)
|
||||||
req.setEnvelope(self.envelope)
|
req.setEnvelope(params.ENVELOPE)
|
||||||
self.runLocationsTest(req)
|
self.runLocationsTest(req)
|
||||||
|
|
||||||
def testGetAvailableParameters(self):
|
def testGetAvailableParameters(self):
|
||||||
|
@ -71,7 +69,7 @@ class RadarSpatialTestCase(baseDafTestCase.DafTestCase):
|
||||||
req = DAL.newDataRequest(self.datatype)
|
req = DAL.newDataRequest(self.datatype)
|
||||||
req.setLocationNames("TORD", "TMDW")
|
req.setLocationNames("TORD", "TMDW")
|
||||||
req.setParameters("wfo_id", "name", "elevmeter")
|
req.setParameters("wfo_id", "name", "elevmeter")
|
||||||
self.runGeometryDataTest(req)
|
self.runGeometryDataTest(req, checkDataTimes=False)
|
||||||
|
|
||||||
def testRequestingTimesThrowsTimeAgnosticDataException(self):
|
def testRequestingTimesThrowsTimeAgnosticDataException(self):
|
||||||
req = DAL.newDataRequest(self.datatype)
|
req = DAL.newDataRequest(self.datatype)
|
||||||
|
@ -82,17 +80,17 @@ class RadarSpatialTestCase(baseDafTestCase.DafTestCase):
|
||||||
constraint = RequestConstraint.new(operator, value)
|
constraint = RequestConstraint.new(operator, value)
|
||||||
req.addIdentifier(key, constraint)
|
req.addIdentifier(key, constraint)
|
||||||
req.setParameters('elevmeter', 'eqp_elv', 'wfo_id', 'immutablex')
|
req.setParameters('elevmeter', 'eqp_elv', 'wfo_id', 'immutablex')
|
||||||
return self.runGeometryDataTest(req)
|
return self.runGeometryDataTest(req, checkDataTimes=False)
|
||||||
|
|
||||||
def testGetDataWithEqualsString(self):
|
def testGetDataWithEqualsString(self):
|
||||||
geometryData = self._runConstraintTest('wfo_id', '=', 'OAX')
|
geometryData = self._runConstraintTest('wfo_id', '=', params.SITE_ID)
|
||||||
for record in geometryData:
|
for record in geometryData:
|
||||||
self.assertEqual(record.getString('wfo_id'), 'OAX')
|
self.assertEqual(record.getString('wfo_id'), params.SITE_ID)
|
||||||
|
|
||||||
def testGetDataWithEqualsUnicode(self):
|
def testGetDataWithEqualsUnicode(self):
|
||||||
geometryData = self._runConstraintTest('wfo_id', '=', u'OAX')
|
geometryData = self._runConstraintTest('wfo_id', '=', unicode(params.SITE_ID))
|
||||||
for record in geometryData:
|
for record in geometryData:
|
||||||
self.assertEqual(record.getString('wfo_id'), 'OAX')
|
self.assertEqual(record.getString('wfo_id'), params.SITE_ID)
|
||||||
|
|
||||||
def testGetDataWithEqualsInt(self):
|
def testGetDataWithEqualsInt(self):
|
||||||
geometryData = self._runConstraintTest('immutablex', '=', 57)
|
geometryData = self._runConstraintTest('immutablex', '=', 57)
|
||||||
|
@ -115,9 +113,9 @@ class RadarSpatialTestCase(baseDafTestCase.DafTestCase):
|
||||||
self.assertEqual(record.getType('wfo_id'), 'NULL')
|
self.assertEqual(record.getType('wfo_id'), 'NULL')
|
||||||
|
|
||||||
def testGetDataWithNotEquals(self):
|
def testGetDataWithNotEquals(self):
|
||||||
geometryData = self._runConstraintTest('wfo_id', '!=', 'OAX')
|
geometryData = self._runConstraintTest('wfo_id', '!=', params.SITE_ID)
|
||||||
for record in geometryData:
|
for record in geometryData:
|
||||||
self.assertNotEquals(record.getString('wfo_id'), 'OAX')
|
self.assertNotEquals(record.getString('wfo_id'), params.SITE_ID)
|
||||||
|
|
||||||
def testGetDataWithNotEqualsNone(self):
|
def testGetDataWithNotEqualsNone(self):
|
||||||
geometryData = self._runConstraintTest('wfo_id', '!=', None)
|
geometryData = self._runConstraintTest('wfo_id', '!=', None)
|
||||||
|
@ -145,33 +143,33 @@ class RadarSpatialTestCase(baseDafTestCase.DafTestCase):
|
||||||
self.assertLessEqual(record.getNumber('eqp_elv'), 138)
|
self.assertLessEqual(record.getNumber('eqp_elv'), 138)
|
||||||
|
|
||||||
def testGetDataWithInTuple(self):
|
def testGetDataWithInTuple(self):
|
||||||
collection = ('OAX', 'GID')
|
collection = (params.SITE_ID, 'GID')
|
||||||
geometryData = self._runConstraintTest('wfo_id', 'in', collection)
|
geometryData = self._runConstraintTest('wfo_id', 'in', collection)
|
||||||
for record in geometryData:
|
for record in geometryData:
|
||||||
self.assertIn(record.getString('wfo_id'), collection)
|
self.assertIn(record.getString('wfo_id'), collection)
|
||||||
|
|
||||||
def testGetDataWithInList(self):
|
def testGetDataWithInList(self):
|
||||||
collection = ['OAX', 'GID']
|
collection = [params.SITE_ID, 'GID']
|
||||||
geometryData = self._runConstraintTest('wfo_id', 'in', collection)
|
geometryData = self._runConstraintTest('wfo_id', 'in', collection)
|
||||||
for record in geometryData:
|
for record in geometryData:
|
||||||
self.assertIn(record.getString('wfo_id'), collection)
|
self.assertIn(record.getString('wfo_id'), collection)
|
||||||
|
|
||||||
def testGetDataWithInGenerator(self):
|
def testGetDataWithInGenerator(self):
|
||||||
collection = ('OAX', 'GID')
|
collection = (params.SITE_ID, 'GID')
|
||||||
generator = (item for item in collection)
|
generator = (item for item in collection)
|
||||||
geometryData = self._runConstraintTest('wfo_id', 'in', generator)
|
geometryData = self._runConstraintTest('wfo_id', 'in', generator)
|
||||||
for record in geometryData:
|
for record in geometryData:
|
||||||
self.assertIn(record.getString('wfo_id'), collection)
|
self.assertIn(record.getString('wfo_id'), collection)
|
||||||
|
|
||||||
def testGetDataWithNotInList(self):
|
def testGetDataWithNotInList(self):
|
||||||
collection = ['OAX', 'GID']
|
collection = [params.SITE_ID, 'GID']
|
||||||
geometryData = self._runConstraintTest('wfo_id', 'not in', collection)
|
geometryData = self._runConstraintTest('wfo_id', 'not in', collection)
|
||||||
for record in geometryData:
|
for record in geometryData:
|
||||||
self.assertNotIn(record.getString('wfo_id'), collection)
|
self.assertNotIn(record.getString('wfo_id'), collection)
|
||||||
|
|
||||||
def testGetDataWithInvalidConstraintTypeThrowsException(self):
|
def testGetDataWithInvalidConstraintTypeThrowsException(self):
|
||||||
with self.assertRaises(ValueError):
|
with self.assertRaises(ValueError):
|
||||||
self._runConstraintTest('wfo_id', 'junk', 'OAX')
|
self._runConstraintTest('wfo_id', 'junk', params.SITE_ID)
|
||||||
|
|
||||||
def testGetDataWithInvalidConstraintValueThrowsException(self):
|
def testGetDataWithInvalidConstraintValueThrowsException(self):
|
||||||
with self.assertRaises(TypeError):
|
with self.assertRaises(TypeError):
|
||||||
|
|
|
@ -24,7 +24,6 @@ from awips.ThriftClient import ThriftRequestException
|
||||||
|
|
||||||
import baseDafTestCase
|
import baseDafTestCase
|
||||||
import shapely.geometry
|
import shapely.geometry
|
||||||
import unittest
|
|
||||||
|
|
||||||
#
|
#
|
||||||
# Test DAF support for topo data
|
# Test DAF support for topo data
|
||||||
|
@ -39,7 +38,9 @@ import unittest
|
||||||
# 05/26/16 5587 tgurney Add test for
|
# 05/26/16 5587 tgurney Add test for
|
||||||
# getIdentifierValues()
|
# getIdentifierValues()
|
||||||
# 06/01/16 5587 tgurney Update testGetIdentifierValues
|
# 06/01/16 5587 tgurney Update testGetIdentifierValues
|
||||||
#
|
# 07/18/17 6253 randerso Removed referenced to GMTED
|
||||||
|
# 02/20/18 7220 mapeters Added tests for getting filtered
|
||||||
|
# group/dataset identifier values
|
||||||
#
|
#
|
||||||
|
|
||||||
|
|
||||||
|
@ -61,7 +62,7 @@ class TopoTestCase(baseDafTestCase.DafTestCase):
|
||||||
print("Sample grid data shape:\n" + str(gridData[0].getRawData().shape) + "\n")
|
print("Sample grid data shape:\n" + str(gridData[0].getRawData().shape) + "\n")
|
||||||
print("Sample grid data:\n" + str(gridData[0].getRawData()) + "\n")
|
print("Sample grid data:\n" + str(gridData[0].getRawData()) + "\n")
|
||||||
|
|
||||||
for topoFile in ["gmted2010", "gtopo30"]:
|
for topoFile in ["gtopo30"]:
|
||||||
print("\n" + topoFile)
|
print("\n" + topoFile)
|
||||||
req.addIdentifier("topoFile", topoFile)
|
req.addIdentifier("topoFile", topoFile)
|
||||||
gridData = DAL.getGridData(req)
|
gridData = DAL.getGridData(req)
|
||||||
|
@ -89,6 +90,18 @@ class TopoTestCase(baseDafTestCase.DafTestCase):
|
||||||
requiredIds = set(DAL.getRequiredIdentifiers(req))
|
requiredIds = set(DAL.getRequiredIdentifiers(req))
|
||||||
self.runGetIdValuesTest(optionalIds | requiredIds)
|
self.runGetIdValuesTest(optionalIds | requiredIds)
|
||||||
|
|
||||||
|
def testGetFilteredDatasetValues(self):
|
||||||
|
req = DAL.newDataRequest(self.datatype)
|
||||||
|
req.addIdentifier('group', '/')
|
||||||
|
datasetVals = DAL.getIdentifierValues(req, 'dataset')
|
||||||
|
self.assertSequenceEqual(datasetVals, ['full'])
|
||||||
|
|
||||||
|
def testGetFilteredGroupValues(self):
|
||||||
|
req = DAL.newDataRequest(self.datatype)
|
||||||
|
req.addIdentifier('dataset', '1')
|
||||||
|
groupVals = DAL.getIdentifierValues(req, 'group')
|
||||||
|
self.assertSequenceEqual(groupVals, ['/interpolated'])
|
||||||
|
|
||||||
def testGetInvalidIdentifierValuesThrowsException(self):
|
def testGetInvalidIdentifierValuesThrowsException(self):
|
||||||
self.runInvalidIdValuesTest()
|
self.runInvalidIdValuesTest()
|
||||||
|
|
||||||
|
|
|
@ -42,7 +42,9 @@ import unittest
|
||||||
# 06/13/16 5574 tgurney Fix checks for None
|
# 06/13/16 5574 tgurney Fix checks for None
|
||||||
# 06/21/16 5548 tgurney Skip tests that cause errors
|
# 06/21/16 5548 tgurney Skip tests that cause errors
|
||||||
# 06/30/16 5725 tgurney Add test for NOT IN
|
# 06/30/16 5725 tgurney Add test for NOT IN
|
||||||
#
|
# 12/12/16 5981 tgurney Improve test performance
|
||||||
|
# 02/20/18 7220 mapeters Added test for getting filtered
|
||||||
|
# column identifier values
|
||||||
#
|
#
|
||||||
|
|
||||||
|
|
||||||
|
@ -81,22 +83,19 @@ class WarningTestCase(baseDafTestCase.DafTestCase):
|
||||||
self.runGeometryDataTest(req)
|
self.runGeometryDataTest(req)
|
||||||
|
|
||||||
def testFilterOnLocationName(self):
|
def testFilterOnLocationName(self):
|
||||||
allRecordsCount = len(self._getAllRecords())
|
|
||||||
allLocationNames = self._getLocationNames()
|
allLocationNames = self._getLocationNames()
|
||||||
if allRecordsCount == 0:
|
if len(allLocationNames) == 0:
|
||||||
errmsg = "No {0} data exists on {1}. Try again with {0} data."
|
errmsg = "No {0} data exists on {1}. Try again with {0} data."
|
||||||
raise unittest.SkipTest(errmsg.format(self.datatype, DAL.THRIFT_HOST))
|
raise unittest.SkipTest(errmsg.format(self.datatype, DAL.THRIFT_HOST))
|
||||||
if len(allLocationNames) != 1:
|
testCount = 3 # number of different location names to test
|
||||||
testCount = 3 # number of different location names to test
|
for locationName in allLocationNames[:testCount]:
|
||||||
for locationName in allLocationNames[:testCount]:
|
req = DAL.newDataRequest()
|
||||||
req = DAL.newDataRequest()
|
req.setDatatype(self.datatype)
|
||||||
req.setDatatype(self.datatype)
|
req.setParameters('id')
|
||||||
req.setParameters('id')
|
req.setLocationNames(locationName)
|
||||||
req.setLocationNames(locationName)
|
geomData = DAL.getGeometryData(req)
|
||||||
geomData = DAL.getGeometryData(req)
|
for geom in geomData:
|
||||||
self.assertLess(len(geomData), allRecordsCount)
|
self.assertEqual(geom.getLocationName(), locationName)
|
||||||
for geom in geomData:
|
|
||||||
self.assertEqual(geom.getLocationName(), locationName)
|
|
||||||
|
|
||||||
def testFilterOnNonexistentLocationReturnsEmpty(self):
|
def testFilterOnNonexistentLocationReturnsEmpty(self):
|
||||||
req = DAL.newDataRequest()
|
req = DAL.newDataRequest()
|
||||||
|
@ -117,6 +116,13 @@ class WarningTestCase(baseDafTestCase.DafTestCase):
|
||||||
def testGetColumnIdentifierValues(self):
|
def testGetColumnIdentifierValues(self):
|
||||||
self.runGetIdValuesTest(['act'])
|
self.runGetIdValuesTest(['act'])
|
||||||
|
|
||||||
|
def testGetFilteredColumnIdentifierValues(self):
|
||||||
|
req = DAL.newDataRequest(self.datatype)
|
||||||
|
req.addIdentifier('sig', 'W')
|
||||||
|
phensigs = DAL.getIdentifierValues(req, 'phensig')
|
||||||
|
for phensig in phensigs:
|
||||||
|
self.assertTrue(phensig.endswith('.W'))
|
||||||
|
|
||||||
@unittest.skip('avoid EDEX error')
|
@unittest.skip('avoid EDEX error')
|
||||||
def testGetInvalidIdentifierValuesThrowsException(self):
|
def testGetInvalidIdentifierValuesThrowsException(self):
|
||||||
self.runInvalidIdValuesTest()
|
self.runInvalidIdValuesTest()
|
||||||
|
|
32
awips/test/localization/__init__.py
Normal file
32
awips/test/localization/__init__.py
Normal file
|
@ -0,0 +1,32 @@
|
||||||
|
##
|
||||||
|
# This software was developed and / or modified by Raytheon Company,
|
||||||
|
# pursuant to Contract DG133W-05-CQ-1067 with the US Government.
|
||||||
|
#
|
||||||
|
# U.S. EXPORT CONTROLLED TECHNICAL DATA
|
||||||
|
# This software product contains export-restricted data whose
|
||||||
|
# export/transfer/disclosure is restricted by U.S. law. Dissemination
|
||||||
|
# to non-U.S. persons whether in the United States or abroad requires
|
||||||
|
# an export license or other authorization.
|
||||||
|
#
|
||||||
|
# Contractor Name: Raytheon Company
|
||||||
|
# Contractor Address: 6825 Pine Street, Suite 340
|
||||||
|
# Mail Stop B8
|
||||||
|
# Omaha, NE 68106
|
||||||
|
# 402.291.0100
|
||||||
|
#
|
||||||
|
# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
||||||
|
# further licensing information.
|
||||||
|
##
|
||||||
|
|
||||||
|
|
||||||
|
#
|
||||||
|
# __init__.py for ufpy.test.localization package
|
||||||
|
#
|
||||||
|
#
|
||||||
|
# SOFTWARE HISTORY
|
||||||
|
#
|
||||||
|
# Date Ticket# Engineer Description
|
||||||
|
# --------- -------- --------- --------------------------
|
||||||
|
# 08/07/17 5731 bsteffen Initial Creation.
|
||||||
|
|
||||||
|
__all__ = []
|
172
awips/test/localization/testLocalizationFileManager.py
Normal file
172
awips/test/localization/testLocalizationFileManager.py
Normal file
|
@ -0,0 +1,172 @@
|
||||||
|
##
|
||||||
|
# This software was developed and / or modified by Raytheon Company,
|
||||||
|
# pursuant to Contract DG133W-05-CQ-1067 with the US Government.
|
||||||
|
#
|
||||||
|
# U.S. EXPORT CONTROLLED TECHNICAL DATA
|
||||||
|
# This software product contains export-restricted data whose
|
||||||
|
# export/transfer/disclosure is restricted by U.S. law. Dissemination
|
||||||
|
# to non-U.S. persons whether in the United States or abroad requires
|
||||||
|
# an export license or other authorization.
|
||||||
|
#
|
||||||
|
# Contractor Name: Raytheon Company
|
||||||
|
# Contractor Address: 6825 Pine Street, Suite 340
|
||||||
|
# Mail Stop B8
|
||||||
|
# Omaha, NE 68106
|
||||||
|
# 402.291.0100
|
||||||
|
#
|
||||||
|
# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
||||||
|
# further licensing information.
|
||||||
|
##
|
||||||
|
|
||||||
|
#
|
||||||
|
# Tests for the LocalizationFileManager
|
||||||
|
#
|
||||||
|
# SOFTWARE HISTORY
|
||||||
|
#
|
||||||
|
# Date Ticket# Engineer Description
|
||||||
|
# --------- -------- --------- --------------------------
|
||||||
|
# 08/09/17 5731 bsteffen Initial Creation.
|
||||||
|
|
||||||
|
import unittest
|
||||||
|
|
||||||
|
from ufpy.localization.LocalizationFileManager import (LocalizationFileManager,
|
||||||
|
LocalizationFileVersionConflictException,
|
||||||
|
LocalizationContext,
|
||||||
|
LocalizationFileIsNotDirectoryException,
|
||||||
|
LocalizationFileDoesNotExistException)
|
||||||
|
|
||||||
|
testFile = "purge/defaultPurgeRules.xml"
|
||||||
|
testContent = "<purgeRuleSet><defaultRule><period>05-05:05:05</period></defaultRule></purgeRuleSet>"
|
||||||
|
testDir = "purge/"
|
||||||
|
testNewFile = "purge/testPurgeRules.xml"
|
||||||
|
|
||||||
|
class ContextTestCase(unittest.TestCase):
|
||||||
|
def test_eq(self):
|
||||||
|
c1 = LocalizationContext()
|
||||||
|
c2 = LocalizationContext()
|
||||||
|
self.assertEqual(c1,c2)
|
||||||
|
c3 = LocalizationContext("site", "test")
|
||||||
|
c4 = LocalizationContext("site", "test")
|
||||||
|
self.assertEqual(c3,c4)
|
||||||
|
self.assertNotEqual(c1,c3)
|
||||||
|
|
||||||
|
def test_hash(self):
|
||||||
|
c1 = LocalizationContext()
|
||||||
|
c2 = LocalizationContext()
|
||||||
|
self.assertEqual(hash(c1),hash(c2))
|
||||||
|
c3 = LocalizationContext("site", "test")
|
||||||
|
c4 = LocalizationContext("site", "test")
|
||||||
|
self.assertEqual(hash(c3),hash(c4))
|
||||||
|
|
||||||
|
class LFMTestCase(unittest.TestCase):
|
||||||
|
def setUp(self):
|
||||||
|
self.manager = LocalizationFileManager()
|
||||||
|
userFile = self.manager.getSpecific("user", testFile)
|
||||||
|
if userFile.exists():
|
||||||
|
userFile.delete()
|
||||||
|
newFile = self.manager.getSpecific("user", testNewFile)
|
||||||
|
if newFile.exists():
|
||||||
|
newFile.delete()
|
||||||
|
def test_gets(self):
|
||||||
|
startingIncremental = self.manager.getIncremental(testFile)
|
||||||
|
baseFile = self.manager.getSpecific("base", testFile)
|
||||||
|
self.assertEqual(baseFile, startingIncremental[0])
|
||||||
|
self.assertTrue(baseFile.exists())
|
||||||
|
self.assertFalse(baseFile.isDirectory())
|
||||||
|
userFile = self.manager.getSpecific("user", testFile)
|
||||||
|
self.assertFalse(userFile.exists())
|
||||||
|
with userFile.open("w") as stream:
|
||||||
|
stream.write(testContent)
|
||||||
|
userFile = self.manager.getSpecific("user", testFile)
|
||||||
|
self.assertTrue(userFile.exists())
|
||||||
|
with userFile.open('r') as stream:
|
||||||
|
self.assertEqual(stream.read(), testContent)
|
||||||
|
absFile = self.manager.getAbsolute(testFile)
|
||||||
|
self.assertEqual(absFile, userFile)
|
||||||
|
endingIncremental = self.manager.getIncremental(testFile)
|
||||||
|
self.assertEqual(len(startingIncremental) + 1, len(endingIncremental))
|
||||||
|
self.assertEqual(userFile, endingIncremental[-1])
|
||||||
|
self.assertEqual(baseFile, endingIncremental[0])
|
||||||
|
|
||||||
|
|
||||||
|
userFile.delete()
|
||||||
|
userFile = self.manager.getSpecific("user", testFile)
|
||||||
|
self.assertFalse(userFile.exists())
|
||||||
|
|
||||||
|
def test_concurrent_edit(self):
|
||||||
|
userFile1 = self.manager.getSpecific("user", testFile)
|
||||||
|
userFile2 = self.manager.getSpecific("user", testFile)
|
||||||
|
self.assertFalse(userFile1.exists())
|
||||||
|
self.assertFalse(userFile2.exists())
|
||||||
|
with self.assertRaises(LocalizationFileVersionConflictException):
|
||||||
|
with userFile1.open("w") as stream1:
|
||||||
|
stream1.write(testContent)
|
||||||
|
with userFile2.open("w") as stream2:
|
||||||
|
stream2.write(testContent)
|
||||||
|
|
||||||
|
userFile = self.manager.getSpecific("user", testFile)
|
||||||
|
userFile.delete()
|
||||||
|
|
||||||
|
def test_dir(self):
|
||||||
|
dir = self.manager.getAbsolute(testDir)
|
||||||
|
self.assertTrue(dir.isDirectory())
|
||||||
|
with self.assertRaises(Exception):
|
||||||
|
dir.delete()
|
||||||
|
|
||||||
|
def test_list(self):
|
||||||
|
abs1 = self.manager.listAbsolute(testDir)
|
||||||
|
inc1 = self.manager.listIncremental(testDir)
|
||||||
|
self.assertEqual(len(abs1), len(inc1))
|
||||||
|
for i in range(len(abs1)):
|
||||||
|
self.assertEquals(abs1[i], inc1[i][-1])
|
||||||
|
|
||||||
|
userFile = self.manager.getSpecific("user", testNewFile)
|
||||||
|
self.assertNotIn(userFile, abs1)
|
||||||
|
|
||||||
|
with userFile.open("w") as stream:
|
||||||
|
stream.write(testContent)
|
||||||
|
userFile = self.manager.getSpecific("user", testNewFile)
|
||||||
|
|
||||||
|
|
||||||
|
abs2 = self.manager.listAbsolute(testDir)
|
||||||
|
inc2 = self.manager.listIncremental(testDir)
|
||||||
|
self.assertEqual(len(abs2), len(inc2))
|
||||||
|
for i in range(len(abs2)):
|
||||||
|
self.assertEquals(abs2[i], inc2[i][-1])
|
||||||
|
|
||||||
|
self.assertEquals(len(abs1) + 1, len(abs2))
|
||||||
|
self.assertIn(userFile, abs2)
|
||||||
|
|
||||||
|
userFile.delete()
|
||||||
|
|
||||||
|
def test_list_file(self):
|
||||||
|
with self.assertRaises(LocalizationFileIsNotDirectoryException):
|
||||||
|
self.manager.listIncremental(testFile)
|
||||||
|
|
||||||
|
def test_list_nonexistant(self):
|
||||||
|
with self.assertRaises(LocalizationFileDoesNotExistException):
|
||||||
|
self.manager.listIncremental('dontNameYourDirectoryThis')
|
||||||
|
|
||||||
|
def test_root_variants(self):
|
||||||
|
list1 = self.manager.listAbsolute(".")
|
||||||
|
list2 = self.manager.listAbsolute("")
|
||||||
|
list3 = self.manager.listAbsolute("/")
|
||||||
|
self.assertEquals(list1,list2)
|
||||||
|
self.assertEquals(list2,list3)
|
||||||
|
|
||||||
|
def test_slashiness(self):
|
||||||
|
raw = testDir
|
||||||
|
if raw[0] == '/':
|
||||||
|
raw = raw[1:]
|
||||||
|
if raw[-1] == '/':
|
||||||
|
raw = raw[:-1]
|
||||||
|
list1 = self.manager.listAbsolute(raw)
|
||||||
|
list2 = self.manager.listAbsolute(raw + "/")
|
||||||
|
list3 = self.manager.listAbsolute("/" + raw)
|
||||||
|
self.assertEquals(list1,list2)
|
||||||
|
self.assertEquals(list2,list3)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
unittest.main()
|
359
awips/test/localization/testLocalizationRest.py
Normal file
359
awips/test/localization/testLocalizationRest.py
Normal file
|
@ -0,0 +1,359 @@
|
||||||
|
##
|
||||||
|
# This software was developed and / or modified by Raytheon Company,
|
||||||
|
# pursuant to Contract DG133W-05-CQ-1067 with the US Government.
|
||||||
|
#
|
||||||
|
# U.S. EXPORT CONTROLLED TECHNICAL DATA
|
||||||
|
# This software product contains export-restricted data whose
|
||||||
|
# export/transfer/disclosure is restricted by U.S. law. Dissemination
|
||||||
|
# to non-U.S. persons whether in the United States or abroad requires
|
||||||
|
# an export license or other authorization.
|
||||||
|
#
|
||||||
|
# Contractor Name: Raytheon Company
|
||||||
|
# Contractor Address: 6825 Pine Street, Suite 340
|
||||||
|
# Mail Stop B8
|
||||||
|
# Omaha, NE 68106
|
||||||
|
# 402.291.0100
|
||||||
|
#
|
||||||
|
# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
||||||
|
# further licensing information.
|
||||||
|
##
|
||||||
|
|
||||||
|
import unittest
|
||||||
|
import urllib2
|
||||||
|
|
||||||
|
from HTMLParser import HTMLParser
|
||||||
|
from xml.etree.ElementTree import parse as parseXml
|
||||||
|
from json import load as loadjson
|
||||||
|
from urlparse import urljoin
|
||||||
|
from base64 import b64encode
|
||||||
|
|
||||||
|
#
|
||||||
|
# Test the localizaiton REST service.
|
||||||
|
#
|
||||||
|
# SOFTWARE HISTORY
|
||||||
|
#
|
||||||
|
# Date Ticket# Engineer Description
|
||||||
|
# --------- -------- --------- --------------------------
|
||||||
|
# 08/07/17 5731 bsteffen Initial Creation.
|
||||||
|
|
||||||
|
baseURL = "http://localhost:9581/services/localization/"
|
||||||
|
testSite = "OAX"
|
||||||
|
testDir = "menus"
|
||||||
|
testFile = "test.xml"
|
||||||
|
username = "test"
|
||||||
|
password = username
|
||||||
|
|
||||||
|
base64string = b64encode('%s:%s' % (username, password))
|
||||||
|
authString = "Basic %s" % base64string
|
||||||
|
|
||||||
|
class ValidHTMLParser(HTMLParser):
|
||||||
|
"""Simple HTML parser that performs very minimal validation.
|
||||||
|
|
||||||
|
This ensures that all start and end tags match, and also that there are
|
||||||
|
some tags. It also accumulates the text of all links in the html file
|
||||||
|
in the link_texts attribute, which can be used for further validation.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, testcase):
|
||||||
|
HTMLParser.__init__(self)
|
||||||
|
self._testcase = testcase
|
||||||
|
self._tags = []
|
||||||
|
self._any = False
|
||||||
|
self.link_texts = []
|
||||||
|
|
||||||
|
def handle_starttag(self, tag, attrs):
|
||||||
|
self._tags.append(tag)
|
||||||
|
self._any = True
|
||||||
|
|
||||||
|
def handle_endtag(self, tag):
|
||||||
|
self._testcase.assertNotEquals([], self._tags, "Unstarted end tag " + tag)
|
||||||
|
self._testcase.assertEquals(tag, self._tags.pop())
|
||||||
|
|
||||||
|
def handle_data(self, data):
|
||||||
|
if self._tags[-1] == "a":
|
||||||
|
self.link_texts.append(data)
|
||||||
|
|
||||||
|
def close(self):
|
||||||
|
HTMLParser.close(self)
|
||||||
|
self._testcase.assertTrue(self._any)
|
||||||
|
self._testcase.assertEquals([], self._tags)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
class AbstractListingTestCase():
|
||||||
|
"""Base test case for testing listings, retrieves data as html, xml, and json.
|
||||||
|
|
||||||
|
Sub classes should implement assertValidHtml, assertValidXml, and
|
||||||
|
assertValidJson to ensure that the content returned matches what was
|
||||||
|
expected.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def assertRequestGetsHtml(self, request):
|
||||||
|
response = urllib2.urlopen(request)
|
||||||
|
self.assertEquals(response.headers["Content-Type"], "text/html")
|
||||||
|
body = response.read()
|
||||||
|
parser = ValidHTMLParser(self)
|
||||||
|
parser.feed(body)
|
||||||
|
parser.close()
|
||||||
|
self.assertValidHtml(parser)
|
||||||
|
|
||||||
|
def assertValidHtml(self, parser):
|
||||||
|
"""Intended to be overriden by subclasses to validate HTML content.
|
||||||
|
|
||||||
|
The argument is a populated instance of ValidHTMLParser.
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
def test_default(self):
|
||||||
|
request = urllib2.Request(self.url)
|
||||||
|
self.assertRequestGetsHtml(request)
|
||||||
|
|
||||||
|
def test_last_slash(self):
|
||||||
|
if self.url.endswith("/"):
|
||||||
|
request = urllib2.Request(self.url[:-1])
|
||||||
|
else:
|
||||||
|
request = urllib2.Request(self.url + "/")
|
||||||
|
self.assertRequestGetsHtml(request)
|
||||||
|
|
||||||
|
def test_wild_mime(self):
|
||||||
|
request = urllib2.Request(self.url)
|
||||||
|
request.add_header("Accept", "*/*")
|
||||||
|
self.assertRequestGetsHtml(request)
|
||||||
|
request.add_header("Accept", "text/*")
|
||||||
|
self.assertRequestGetsHtml(request)
|
||||||
|
|
||||||
|
def test_html(self):
|
||||||
|
request = urllib2.Request(self.url)
|
||||||
|
request.add_header("Accept", "text/html")
|
||||||
|
self.assertRequestGetsHtml(request)
|
||||||
|
|
||||||
|
def test_json(self):
|
||||||
|
request = urllib2.Request(self.url)
|
||||||
|
request.add_header("Accept", "application/json")
|
||||||
|
response = urllib2.urlopen(request)
|
||||||
|
self.assertEquals(response.headers["Content-Type"], "application/json")
|
||||||
|
jsonData = loadjson(response)
|
||||||
|
self.assertValidJson(jsonData)
|
||||||
|
|
||||||
|
|
||||||
|
def assertValidJson(self, jsonData):
|
||||||
|
"""Intended to be overriden by subclasses to validate JSON content.
|
||||||
|
|
||||||
|
The argument is a python object as returned from json.load
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
def test_xml(self):
|
||||||
|
request = urllib2.Request(self.url)
|
||||||
|
request.add_header("Accept", "application/xml")
|
||||||
|
response = urllib2.urlopen(request)
|
||||||
|
self.assertEquals(response.headers["Content-Type"], "application/xml")
|
||||||
|
xmlData = parseXml(response)
|
||||||
|
self.assertValidXml(xmlData)
|
||||||
|
|
||||||
|
def assertValidXml(self, xmlData):
|
||||||
|
"""Intended to be overriden by subclasses to validate XML content.
|
||||||
|
|
||||||
|
The argument is an ElementTree
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
def test_delete(self):
|
||||||
|
request = urllib2.Request(self.url)
|
||||||
|
request.get_method = lambda: "DELETE"
|
||||||
|
with self.assertRaises(urllib2.HTTPError) as cm:
|
||||||
|
response = urllib2.urlopen(request)
|
||||||
|
self.assertEqual(405, cm.exception.code)
|
||||||
|
|
||||||
|
def test_put(self):
|
||||||
|
request = urllib2.Request(self.url)
|
||||||
|
request.get_method = lambda: "PUT"
|
||||||
|
request.add_data("Test Data")
|
||||||
|
with self.assertRaises(urllib2.HTTPError) as cm:
|
||||||
|
response = urllib2.urlopen(request)
|
||||||
|
self.assertEqual(405, cm.exception.code)
|
||||||
|
|
||||||
|
def test_unacceptable(self):
|
||||||
|
request = urllib2.Request(self.url)
|
||||||
|
request.add_header("Accept", "application/fakemimetype")
|
||||||
|
with self.assertRaises(urllib2.HTTPError) as cm:
|
||||||
|
response = urllib2.urlopen(request)
|
||||||
|
self.assertEqual(406, cm.exception.code)
|
||||||
|
request.add_header("Accept", "fakemimetype/*")
|
||||||
|
with self.assertRaises(urllib2.HTTPError) as cm:
|
||||||
|
response = urllib2.urlopen(request)
|
||||||
|
self.assertEqual(406, cm.exception.code)
|
||||||
|
|
||||||
|
def test_accept_quality_factor(self):
|
||||||
|
request = urllib2.Request(self.url)
|
||||||
|
request.add_header("Accept", "application/xml; q=0.8, application/json; q=0.2")
|
||||||
|
response = urllib2.urlopen(request)
|
||||||
|
self.assertEquals(response.headers["Content-Type"], "application/xml")
|
||||||
|
xmlData = parseXml(response)
|
||||||
|
self.assertValidXml(xmlData)
|
||||||
|
|
||||||
|
request.add_header("Accept", "application/xml; q=0.2, application/json; q=0.8")
|
||||||
|
response = urllib2.urlopen(request)
|
||||||
|
self.assertEquals(response.headers["Content-Type"], "application/json")
|
||||||
|
jsonData = loadjson(response)
|
||||||
|
self.assertValidJson(jsonData)
|
||||||
|
|
||||||
|
request.add_header("Accept", "application/xml, application/json; q=0.8")
|
||||||
|
response = urllib2.urlopen(request)
|
||||||
|
self.assertEquals(response.headers["Content-Type"], "application/xml")
|
||||||
|
xmlData = parseXml(response)
|
||||||
|
self.assertValidXml(xmlData)
|
||||||
|
|
||||||
|
request.add_header("Accept", "application/fakemimetype, application/json; q=0.8")
|
||||||
|
response = urllib2.urlopen(request)
|
||||||
|
self.assertEquals(response.headers["Content-Type"], "application/json")
|
||||||
|
jsonData = loadjson(response)
|
||||||
|
self.assertValidJson(jsonData)
|
||||||
|
|
||||||
|
class RootTestCase(AbstractListingTestCase, unittest.TestCase):
|
||||||
|
"""Test that the root of the localization service returns listing of localization types."""
|
||||||
|
def setUp(self):
|
||||||
|
self.url = baseURL
|
||||||
|
def assertValidHtml(self, parser):
|
||||||
|
self.assertIn("common_static/", parser.link_texts)
|
||||||
|
def assertValidJson(self, jsonData):
|
||||||
|
self.assertIn("common_static/", jsonData)
|
||||||
|
def assertValidXml(self, xmlData):
|
||||||
|
root = xmlData.getroot()
|
||||||
|
self.assertEquals(root.tag, "entries")
|
||||||
|
names = [e.text for e in root.findall("entry")]
|
||||||
|
self.assertIn("common_static/", names)
|
||||||
|
|
||||||
|
class TypeTestCase(AbstractListingTestCase, unittest.TestCase):
|
||||||
|
"""Test that common_static will list context levels."""
|
||||||
|
def setUp(self):
|
||||||
|
self.url = urljoin(baseURL, "common_static/")
|
||||||
|
def assertValidHtml(self, parser):
|
||||||
|
self.assertIn("base/", parser.link_texts)
|
||||||
|
self.assertIn("site/", parser.link_texts)
|
||||||
|
def assertValidJson(self, jsonData):
|
||||||
|
self.assertIn("base/", jsonData)
|
||||||
|
self.assertIn("site/", jsonData)
|
||||||
|
def assertValidXml(self, xmlData):
|
||||||
|
root = xmlData.getroot()
|
||||||
|
self.assertEquals(root.tag, "entries")
|
||||||
|
names = [e.text for e in root.findall("entry")]
|
||||||
|
self.assertIn("base/", names)
|
||||||
|
self.assertIn("site/", names)
|
||||||
|
|
||||||
|
class LevelTestCase(AbstractListingTestCase, unittest.TestCase):
|
||||||
|
"""Test that common_static/site will list sites."""
|
||||||
|
def setUp(self):
|
||||||
|
self.url = urljoin(baseURL, "common_static/site/")
|
||||||
|
def assertValidHtml(self, parser):
|
||||||
|
self.assertIn(testSite +"/", parser.link_texts)
|
||||||
|
def assertValidJson(self, jsonData):
|
||||||
|
self.assertIn(testSite +"/", jsonData)
|
||||||
|
def assertValidXml(self, xmlData):
|
||||||
|
root = xmlData.getroot()
|
||||||
|
self.assertEquals(root.tag, "entries")
|
||||||
|
names = [e.text for e in root.findall("entry")]
|
||||||
|
self.assertIn(testSite +"/", names)
|
||||||
|
|
||||||
|
class AbstractFileListingTestCase(AbstractListingTestCase):
|
||||||
|
"""Base test case for a file listing"""
|
||||||
|
|
||||||
|
def assertValidHtml(self, parser):
|
||||||
|
self.assertIn(testDir +"/", parser.link_texts)
|
||||||
|
self.assertEquals(parser.link_texts, sorted(parser.link_texts))
|
||||||
|
def assertValidJson(self, jsonData):
|
||||||
|
self.assertIn(testDir +"/", jsonData)
|
||||||
|
def assertValidXml(self, xmlData):
|
||||||
|
root = xmlData.getroot()
|
||||||
|
self.assertEquals(root.tag, "files")
|
||||||
|
names = [e.get("name") for e in root.findall("file")]
|
||||||
|
self.assertIn(testDir +"/", names)
|
||||||
|
self.assertEquals(names, sorted(names))
|
||||||
|
|
||||||
|
class BaseFileListingTestCase(AbstractFileListingTestCase, unittest.TestCase):
|
||||||
|
"""Test that common_static/base lists files"""
|
||||||
|
def setUp(self):
|
||||||
|
self.url = urljoin(baseURL, "common_static/base/")
|
||||||
|
|
||||||
|
class SiteFileListingTestCase(AbstractFileListingTestCase, unittest.TestCase):
|
||||||
|
"""Test that common_static/site/<testSite>/ lists files"""
|
||||||
|
def setUp(self):
|
||||||
|
self.url = urljoin(baseURL, "common_static/site/" + testSite + "/")
|
||||||
|
|
||||||
|
class FileTestCase(unittest.TestCase):
|
||||||
|
"""Test retrieval, modification and deletion of an individual."""
|
||||||
|
def setUp(self):
|
||||||
|
self.url = urljoin(baseURL, "common_static/user/" + username + "/" + testFile)
|
||||||
|
# The file should not exist before the test, but if it does then delete it
|
||||||
|
# This is some of the same functionality we are testing so if setup fails
|
||||||
|
# then the test would probably fail anyway
|
||||||
|
try:
|
||||||
|
request = urllib2.Request(self.url)
|
||||||
|
response = urllib2.urlopen(request)
|
||||||
|
request = urllib2.Request(self.url)
|
||||||
|
request.get_method = lambda: "DELETE"
|
||||||
|
request.add_header("Authorization", authString)
|
||||||
|
request.add_header("If-Match", response.headers["Content-MD5"])
|
||||||
|
response = urllib2.urlopen(request)
|
||||||
|
except urllib2.HTTPError as e:
|
||||||
|
if e.code != 404:
|
||||||
|
raise e
|
||||||
|
def test_file_operations(self):
|
||||||
|
"""Run through a typical set of file interactions and verify everything works correctly."""
|
||||||
|
request = urllib2.Request(self.url)
|
||||||
|
request.get_method = lambda: "PUT"
|
||||||
|
request.add_data("Test Data")
|
||||||
|
with self.assertRaises(urllib2.HTTPError) as cm:
|
||||||
|
response = urllib2.urlopen(request)
|
||||||
|
self.assertEqual(401, cm.exception.code)
|
||||||
|
|
||||||
|
request.add_header("Authorization", authString)
|
||||||
|
with self.assertRaises(urllib2.HTTPError) as cm:
|
||||||
|
response = urllib2.urlopen(request)
|
||||||
|
self.assertEqual(409, cm.exception.code)
|
||||||
|
|
||||||
|
request.add_header("If-Match", "NON_EXISTENT_CHECKSUM")
|
||||||
|
response = urllib2.urlopen(request)
|
||||||
|
|
||||||
|
|
||||||
|
request = urllib2.Request(self.url)
|
||||||
|
response = urllib2.urlopen(request)
|
||||||
|
self.assertEquals(response.read(), "Test Data")
|
||||||
|
|
||||||
|
request = urllib2.Request(self.url + "/")
|
||||||
|
response = urllib2.urlopen(request)
|
||||||
|
self.assertEquals(response.read(), "Test Data")
|
||||||
|
|
||||||
|
request = urllib2.Request(self.url)
|
||||||
|
request.get_method = lambda: "PUT"
|
||||||
|
request.add_data("Test Data2")
|
||||||
|
request.add_header("If-Match", response.headers["Content-MD5"])
|
||||||
|
request.add_header("Authorization", authString)
|
||||||
|
response = urllib2.urlopen(request)
|
||||||
|
checksum = response.headers["Content-MD5"]
|
||||||
|
|
||||||
|
request = urllib2.Request(self.url)
|
||||||
|
response = urllib2.urlopen(request)
|
||||||
|
self.assertEquals(response.read(), "Test Data2")
|
||||||
|
|
||||||
|
request = urllib2.Request(self.url)
|
||||||
|
request.get_method = lambda: "DELETE"
|
||||||
|
with self.assertRaises(urllib2.HTTPError) as cm:
|
||||||
|
response = urllib2.urlopen(request)
|
||||||
|
self.assertEqual(401, cm.exception.code)
|
||||||
|
|
||||||
|
request.add_header("Authorization", authString)
|
||||||
|
with self.assertRaises(urllib2.HTTPError) as cm:
|
||||||
|
response = urllib2.urlopen(request)
|
||||||
|
self.assertEqual(409, cm.exception.code)
|
||||||
|
|
||||||
|
request.add_header("If-Match", checksum)
|
||||||
|
response = urllib2.urlopen(request)
|
||||||
|
|
||||||
|
request = urllib2.Request(self.url)
|
||||||
|
with self.assertRaises(urllib2.HTTPError) as cm:
|
||||||
|
response = urllib2.urlopen(request)
|
||||||
|
self.assertEqual(404, cm.exception.code)
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
unittest.main()
|
|
@ -78,6 +78,7 @@ class ListenThread(threading.Thread):
|
||||||
self.qs.close()
|
self.qs.close()
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
print "Starting up at", time.strftime('%H:%M:%S')
|
print "Starting up at", time.strftime('%H:%M:%S')
|
||||||
|
|
||||||
|
@ -95,5 +96,9 @@ def main():
|
||||||
finally:
|
finally:
|
||||||
thread.stop()
|
thread.stop()
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
main()
|
main()
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -29,7 +29,7 @@
|
||||||
# match what they should be in the destination language.
|
# match what they should be in the destination language.
|
||||||
#
|
#
|
||||||
#
|
#
|
||||||
# SOFTWARE HISTORY
|
# SOFTWARE HISTORY
|
||||||
#
|
#
|
||||||
# Date Ticket# Engineer Description
|
# Date Ticket# Engineer Description
|
||||||
# ------------ ---------- ----------- --------------------------
|
# ------------ ---------- ----------- --------------------------
|
||||||
|
@ -37,20 +37,27 @@
|
||||||
# 06/12/13 #2099 dgilling Implement readObject() and
|
# 06/12/13 #2099 dgilling Implement readObject() and
|
||||||
# writeObject().
|
# writeObject().
|
||||||
# Apr 24, 2015 4425 nabowle Add Double support
|
# Apr 24, 2015 4425 nabowle Add Double support
|
||||||
|
# Oct 17, 2016 5919 njensen Optimized for speed
|
||||||
#
|
#
|
||||||
#
|
#
|
||||||
|
|
||||||
from thrift.Thrift import TType
|
from thrift.Thrift import TType
|
||||||
import inspect, sys, types
|
import inspect
|
||||||
|
import sys
|
||||||
|
import types
|
||||||
|
import time
|
||||||
import dynamicserialize
|
import dynamicserialize
|
||||||
from dynamicserialize import dstypes, adapters
|
from dynamicserialize import dstypes, adapters
|
||||||
import SelfDescribingBinaryProtocol
|
import SelfDescribingBinaryProtocol
|
||||||
import numpy
|
import numpy
|
||||||
|
|
||||||
|
DS_LEN = len('dynamicserialize.dstypes.')
|
||||||
|
|
||||||
dsObjTypes = {}
|
dsObjTypes = {}
|
||||||
|
|
||||||
|
|
||||||
def buildObjMap(module):
|
def buildObjMap(module):
|
||||||
if module.__dict__.has_key('__all__'):
|
if '__all__' in module.__dict__:
|
||||||
for i in module.__all__:
|
for i in module.__all__:
|
||||||
name = module.__name__ + '.' + i
|
name = module.__name__ + '.' + i
|
||||||
__import__(name)
|
__import__(name)
|
||||||
|
@ -59,7 +66,7 @@ def buildObjMap(module):
|
||||||
clzName = module.__name__[module.__name__.rfind('.') + 1:]
|
clzName = module.__name__[module.__name__.rfind('.') + 1:]
|
||||||
clz = module.__dict__[clzName]
|
clz = module.__dict__[clzName]
|
||||||
tname = module.__name__
|
tname = module.__name__
|
||||||
tname = tname.replace('dynamicserialize.dstypes.', '')
|
tname = tname[DS_LEN:]
|
||||||
dsObjTypes[tname] = clz
|
dsObjTypes[tname] = clz
|
||||||
|
|
||||||
buildObjMap(dstypes)
|
buildObjMap(dstypes)
|
||||||
|
@ -72,7 +79,7 @@ pythonToThriftMap = {
|
||||||
types.DictionaryType: TType.MAP,
|
types.DictionaryType: TType.MAP,
|
||||||
type(set([])): TType.SET,
|
type(set([])): TType.SET,
|
||||||
types.FloatType: SelfDescribingBinaryProtocol.FLOAT,
|
types.FloatType: SelfDescribingBinaryProtocol.FLOAT,
|
||||||
#types.FloatType: TType.DOUBLE,
|
# types.FloatType: TType.DOUBLE,
|
||||||
types.BooleanType: TType.BOOL,
|
types.BooleanType: TType.BOOL,
|
||||||
types.InstanceType: TType.STRUCT,
|
types.InstanceType: TType.STRUCT,
|
||||||
types.NoneType: TType.VOID,
|
types.NoneType: TType.VOID,
|
||||||
|
@ -87,7 +94,9 @@ pythonToThriftMap = {
|
||||||
numpy.int64: TType.I64
|
numpy.int64: TType.I64
|
||||||
}
|
}
|
||||||
|
|
||||||
primitiveSupport = (TType.BYTE, TType.I16, TType.I32, TType.I64, SelfDescribingBinaryProtocol.FLOAT, TType.DOUBLE)
|
primitiveSupport = (TType.BYTE, TType.I16, TType.I32, TType.I64,
|
||||||
|
SelfDescribingBinaryProtocol.FLOAT, TType.DOUBLE)
|
||||||
|
|
||||||
|
|
||||||
class ThriftSerializationContext(object):
|
class ThriftSerializationContext(object):
|
||||||
|
|
||||||
|
@ -95,52 +104,51 @@ class ThriftSerializationContext(object):
|
||||||
self.serializationManager = serializationManager
|
self.serializationManager = serializationManager
|
||||||
self.protocol = selfDescribingBinaryProtocol
|
self.protocol = selfDescribingBinaryProtocol
|
||||||
self.typeDeserializationMethod = {
|
self.typeDeserializationMethod = {
|
||||||
TType.STRING: self.protocol.readString,
|
TType.STRING: self.protocol.readString,
|
||||||
TType.I16: self.protocol.readI16,
|
TType.I16: self.protocol.readI16,
|
||||||
TType.I32: self.protocol.readI32,
|
TType.I32: self.protocol.readI32,
|
||||||
TType.LIST: self._deserializeArray,
|
TType.LIST: self._deserializeArray,
|
||||||
TType.MAP: self._deserializeMap,
|
TType.MAP: self._deserializeMap,
|
||||||
TType.SET: self._deserializeSet,
|
TType.SET: self._deserializeSet,
|
||||||
SelfDescribingBinaryProtocol.FLOAT: self.protocol.readFloat,
|
SelfDescribingBinaryProtocol.FLOAT: self.protocol.readFloat,
|
||||||
TType.BYTE: self.protocol.readByte,
|
TType.BYTE: self.protocol.readByte,
|
||||||
TType.I64: self.protocol.readI64,
|
TType.I64: self.protocol.readI64,
|
||||||
TType.DOUBLE: self.protocol.readDouble,
|
TType.DOUBLE: self.protocol.readDouble,
|
||||||
TType.BOOL: self.protocol.readBool,
|
TType.BOOL: self.protocol.readBool,
|
||||||
TType.STRUCT: self.deserializeMessage,
|
TType.STRUCT: self.deserializeMessage,
|
||||||
TType.VOID: lambda: None
|
TType.VOID: lambda: None
|
||||||
}
|
}
|
||||||
self.typeSerializationMethod = {
|
self.typeSerializationMethod = {
|
||||||
TType.STRING: self.protocol.writeString,
|
TType.STRING: self.protocol.writeString,
|
||||||
TType.I16: self.protocol.writeI16,
|
TType.I16: self.protocol.writeI16,
|
||||||
TType.I32: self.protocol.writeI32,
|
TType.I32: self.protocol.writeI32,
|
||||||
TType.LIST: self._serializeArray,
|
TType.LIST: self._serializeArray,
|
||||||
TType.MAP: self._serializeMap,
|
TType.MAP: self._serializeMap,
|
||||||
TType.SET: self._serializeSet,
|
TType.SET: self._serializeSet,
|
||||||
SelfDescribingBinaryProtocol.FLOAT: self.protocol.writeFloat,
|
SelfDescribingBinaryProtocol.FLOAT: self.protocol.writeFloat,
|
||||||
TType.BYTE: self.protocol.writeByte,
|
TType.BYTE: self.protocol.writeByte,
|
||||||
TType.I64: self.protocol.writeI64,
|
TType.I64: self.protocol.writeI64,
|
||||||
TType.DOUBLE: self.protocol.writeDouble,
|
TType.DOUBLE: self.protocol.writeDouble,
|
||||||
TType.BOOL: self.protocol.writeBool,
|
TType.BOOL: self.protocol.writeBool,
|
||||||
TType.STRUCT: self.serializeMessage,
|
TType.STRUCT: self.serializeMessage,
|
||||||
TType.VOID: lambda x: None
|
TType.VOID: lambda x: None
|
||||||
}
|
}
|
||||||
self.listDeserializationMethod = {
|
self.listDeserializationMethod = {
|
||||||
TType.BYTE: self.protocol.readI8List,
|
TType.BYTE: self.protocol.readI8List,
|
||||||
TType.I16: self.protocol.readI16List,
|
TType.I16: self.protocol.readI16List,
|
||||||
TType.I32: self.protocol.readI32List,
|
TType.I32: self.protocol.readI32List,
|
||||||
TType.I64: self.protocol.readI64List,
|
TType.I64: self.protocol.readI64List,
|
||||||
SelfDescribingBinaryProtocol.FLOAT: self.protocol.readF32List,
|
SelfDescribingBinaryProtocol.FLOAT: self.protocol.readF32List,
|
||||||
TType.DOUBLE: self.protocol.readF64List
|
TType.DOUBLE: self.protocol.readF64List
|
||||||
}
|
}
|
||||||
self.listSerializationMethod = {
|
self.listSerializationMethod = {
|
||||||
TType.BYTE: self.protocol.writeI8List,
|
TType.BYTE: self.protocol.writeI8List,
|
||||||
TType.I16: self.protocol.writeI16List,
|
TType.I16: self.protocol.writeI16List,
|
||||||
TType.I32: self.protocol.writeI32List,
|
TType.I32: self.protocol.writeI32List,
|
||||||
TType.I64: self.protocol.writeI64List,
|
TType.I64: self.protocol.writeI64List,
|
||||||
SelfDescribingBinaryProtocol.FLOAT: self.protocol.writeF32List,
|
SelfDescribingBinaryProtocol.FLOAT: self.protocol.writeF32List,
|
||||||
TType.DOUBLE: self.protocol.writeF64List
|
TType.DOUBLE: self.protocol.writeF64List
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
def readMessageStart(self):
|
def readMessageStart(self):
|
||||||
msg = self.protocol.readMessageBegin()
|
msg = self.protocol.readMessageBegin()
|
||||||
|
@ -151,17 +159,19 @@ class ThriftSerializationContext(object):
|
||||||
|
|
||||||
def deserializeMessage(self):
|
def deserializeMessage(self):
|
||||||
name = self.protocol.readStructBegin()
|
name = self.protocol.readStructBegin()
|
||||||
name = name.replace('_', '.')
|
|
||||||
if name.isdigit():
|
if name.isdigit():
|
||||||
obj = self._deserializeType(int(name))
|
obj = self._deserializeType(int(name))
|
||||||
return obj
|
return obj
|
||||||
elif adapters.classAdapterRegistry.has_key(name):
|
name = name.replace('_', '.')
|
||||||
|
if name in adapters.classAdapterRegistry:
|
||||||
return adapters.classAdapterRegistry[name].deserialize(self)
|
return adapters.classAdapterRegistry[name].deserialize(self)
|
||||||
elif name.find('$') > -1:
|
elif '$' in name:
|
||||||
# it's an inner class, we're going to hope it's an enum, treat it special
|
# it's an inner class, we're going to hope it's an enum, treat it
|
||||||
|
# special
|
||||||
fieldName, fieldType, fieldId = self.protocol.readFieldBegin()
|
fieldName, fieldType, fieldId = self.protocol.readFieldBegin()
|
||||||
if fieldName != '__enumValue__':
|
if fieldName != '__enumValue__':
|
||||||
raise dynamiceserialize.SerializationException("Expected to find enum payload. Found: " + fieldName)
|
raise dynamiceserialize.SerializationException(
|
||||||
|
"Expected to find enum payload. Found: " + fieldName)
|
||||||
obj = self.protocol.readString()
|
obj = self.protocol.readString()
|
||||||
self.protocol.readFieldEnd()
|
self.protocol.readFieldEnd()
|
||||||
return obj
|
return obj
|
||||||
|
@ -176,37 +186,30 @@ class ThriftSerializationContext(object):
|
||||||
return obj
|
return obj
|
||||||
|
|
||||||
def _deserializeType(self, b):
|
def _deserializeType(self, b):
|
||||||
if self.typeDeserializationMethod.has_key(b):
|
try:
|
||||||
return self.typeDeserializationMethod[b]()
|
return self.typeDeserializationMethod[b]()
|
||||||
else:
|
except KeyError:
|
||||||
raise dynamicserialize.SerializationException("Unsupported type value " + str(b))
|
raise dynamicserialize.SerializationException(
|
||||||
|
"Unsupported type value " + str(b))
|
||||||
|
|
||||||
def _deserializeField(self, structname, obj):
|
def _deserializeField(self, structname, obj):
|
||||||
fieldName, fieldType, fieldId = self.protocol.readFieldBegin()
|
fieldName, fieldType, fieldId = self.protocol.readFieldBegin()
|
||||||
if fieldType == TType.STOP:
|
if fieldType == TType.STOP:
|
||||||
return False
|
return False
|
||||||
elif fieldType != TType.VOID:
|
elif fieldType != TType.VOID:
|
||||||
# if adapters.fieldAdapterRegistry.has_key(structname) and adapters.fieldAdapterRegistry[structname].has_key(fieldName):
|
|
||||||
# result = adapters.fieldAdapterRegistry[structname][fieldName].deserialize(self)
|
|
||||||
# else:
|
|
||||||
result = self._deserializeType(fieldType)
|
result = self._deserializeType(fieldType)
|
||||||
lookingFor = "set" + fieldName[0].upper() + fieldName[1:]
|
lookingFor = "set" + fieldName[0].upper() + fieldName[1:]
|
||||||
|
|
||||||
try:
|
try:
|
||||||
setMethod = getattr(obj, lookingFor)
|
setMethod = getattr(obj, lookingFor)
|
||||||
|
setMethod(result)
|
||||||
if callable(setMethod):
|
|
||||||
setMethod(result)
|
|
||||||
else:
|
|
||||||
raise dynamicserialize.SerializationException("Couldn't find setter method " + lookingFor)
|
|
||||||
except:
|
except:
|
||||||
raise dynamicserialize.SerializationException("Couldn't find setter method " + lookingFor)
|
raise dynamicserialize.SerializationException(
|
||||||
|
"Couldn't find setter method " + lookingFor)
|
||||||
|
|
||||||
self.protocol.readFieldEnd()
|
self.protocol.readFieldEnd()
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
def _deserializeArray(self):
|
def _deserializeArray(self):
|
||||||
listType, size = self.protocol.readListBegin()
|
listType, size = self.protocol.readListBegin()
|
||||||
result = []
|
result = []
|
||||||
|
@ -241,19 +244,20 @@ class ThriftSerializationContext(object):
|
||||||
|
|
||||||
def _lookupType(self, obj):
|
def _lookupType(self, obj):
|
||||||
pyt = type(obj)
|
pyt = type(obj)
|
||||||
if pythonToThriftMap.has_key(pyt):
|
if pyt in pythonToThriftMap:
|
||||||
return pythonToThriftMap[pyt]
|
return pythonToThriftMap[pyt]
|
||||||
elif pyt.__module__.startswith('dynamicserialize.dstypes'):
|
elif pyt.__module__[:DS_LEN - 1] == ('dynamicserialize.dstypes'):
|
||||||
return pythonToThriftMap[types.InstanceType]
|
return pythonToThriftMap[types.InstanceType]
|
||||||
else:
|
else:
|
||||||
raise dynamicserialize.SerializationException("Don't know how to serialize object of type: " + str(pyt))
|
raise dynamicserialize.SerializationException(
|
||||||
|
"Don't know how to serialize object of type: " + str(pyt))
|
||||||
|
|
||||||
def serializeMessage(self, obj):
|
def serializeMessage(self, obj):
|
||||||
tt = self._lookupType(obj)
|
tt = self._lookupType(obj)
|
||||||
|
|
||||||
if tt == TType.STRUCT:
|
if tt == TType.STRUCT:
|
||||||
fqn = obj.__module__.replace('dynamicserialize.dstypes.', '')
|
fqn = obj.__module__[DS_LEN:]
|
||||||
if adapters.classAdapterRegistry.has_key(fqn):
|
if fqn in adapters.classAdapterRegistry:
|
||||||
# get proper class name when writing class name to serialization stream
|
# get proper class name when writing class name to serialization stream
|
||||||
# in case we have a special inner-class case
|
# in case we have a special inner-class case
|
||||||
m = sys.modules[adapters.classAdapterRegistry[fqn].__name__]
|
m = sys.modules[adapters.classAdapterRegistry[fqn].__name__]
|
||||||
|
@ -273,7 +277,7 @@ class ThriftSerializationContext(object):
|
||||||
val = m[1]()
|
val = m[1]()
|
||||||
ft = self._lookupType(val)
|
ft = self._lookupType(val)
|
||||||
if ft == TType.STRUCT:
|
if ft == TType.STRUCT:
|
||||||
fc = val.__module__.replace('dynamicserialize.dstypes.', '')
|
fc = val.__module__[DS_LEN:]
|
||||||
self._serializeField(fieldname, ft, fid, val)
|
self._serializeField(fieldname, ft, fid, val)
|
||||||
else:
|
else:
|
||||||
self._serializeField(fieldname, ft, fid, val)
|
self._serializeField(fieldname, ft, fid, val)
|
||||||
|
@ -293,17 +297,18 @@ class ThriftSerializationContext(object):
|
||||||
self.protocol.writeFieldEnd()
|
self.protocol.writeFieldEnd()
|
||||||
|
|
||||||
def _serializeType(self, fieldValue, fieldType):
|
def _serializeType(self, fieldValue, fieldType):
|
||||||
if self.typeSerializationMethod.has_key(fieldType):
|
if fieldType in self.typeSerializationMethod:
|
||||||
return self.typeSerializationMethod[fieldType](fieldValue)
|
return self.typeSerializationMethod[fieldType](fieldValue)
|
||||||
else:
|
else:
|
||||||
raise dynamicserialize.SerializationException("Unsupported type value " + str(fieldType))
|
raise dynamicserialize.SerializationException(
|
||||||
|
"Unsupported type value " + str(fieldType))
|
||||||
|
|
||||||
def _serializeArray(self, obj):
|
def _serializeArray(self, obj):
|
||||||
size = len(obj)
|
size = len(obj)
|
||||||
if size:
|
if size:
|
||||||
if type(obj) is numpy.ndarray:
|
if type(obj) is numpy.ndarray:
|
||||||
t = pythonToThriftMap[obj.dtype.type]
|
t = pythonToThriftMap[obj.dtype.type]
|
||||||
size = obj.size
|
size = obj.size
|
||||||
else:
|
else:
|
||||||
t = self._lookupType(obj[0])
|
t = self._lookupType(obj[0])
|
||||||
else:
|
else:
|
||||||
|
@ -331,7 +336,6 @@ class ThriftSerializationContext(object):
|
||||||
self.listSerializationMethod[t](obj)
|
self.listSerializationMethod[t](obj)
|
||||||
self.protocol.writeListEnd()
|
self.protocol.writeListEnd()
|
||||||
|
|
||||||
|
|
||||||
def _serializeMap(self, obj):
|
def _serializeMap(self, obj):
|
||||||
size = len(obj)
|
size = len(obj)
|
||||||
self.protocol.writeMapBegin(TType.VOID, TType.VOID, size)
|
self.protocol.writeMapBegin(TType.VOID, TType.VOID, size)
|
||||||
|
|
|
@ -22,55 +22,79 @@
|
||||||
#
|
#
|
||||||
# __init__.py for Dynamic Serialize adapters.
|
# __init__.py for Dynamic Serialize adapters.
|
||||||
#
|
#
|
||||||
|
# Plugins can contribute to dynamicserialize.adapters by either including their
|
||||||
|
# classes directly in pythonPackages/dynamicserialize/adapters/ within their
|
||||||
|
# plugin. The plugin's adapter will automatically be added to __all__ at runtime
|
||||||
|
# and registered.
|
||||||
|
# Plugins should not include a custom __init__.py in
|
||||||
|
# pythonPackages/dynamicserialize/adapters/ because it will overwrite this file.
|
||||||
|
# If custom package initialization is needed, a subpackage should be created
|
||||||
|
# with an __init__.py that includes the following:
|
||||||
|
#
|
||||||
|
# __all__ = ['CustomAdapter1', 'CustomAdapter2']
|
||||||
|
# from dynamicserialize.adapters import registerAdapters
|
||||||
|
# registerAdapters(__name__, __all__)
|
||||||
|
#
|
||||||
#
|
#
|
||||||
# SOFTWARE HISTORY
|
# SOFTWARE HISTORY
|
||||||
#
|
#
|
||||||
# Date Ticket# Engineer Description
|
# Date Ticket# Engineer Description
|
||||||
# ------------ ---------- ----------- --------------------------
|
# ------------ ---------- ----------- --------------------------
|
||||||
# 08/31/10 njensen Initial Creation.
|
# 08/31/10 njensen Initial Creation.
|
||||||
# 03/20/13 #1774 randerso Added TimeConstraintsAdapter
|
# 03/20/13 #1774 randerso Added TimeConstraintsAdapter
|
||||||
# 04/22/13 #1949 rjpeter Added LockTableAdapter
|
# 04/22/13 #1949 rjpeter Added LockTableAdapter
|
||||||
# 02/06/14 #2672 bsteffen Added JTSEnvelopeAdapter
|
# 02/06/14 #2672 bsteffen Added JTSEnvelopeAdapter
|
||||||
# 06/22/2015 #4573 randerso Added JobProgressAdapter
|
# 06/22/2015 #4573 randerso Added JobProgressAdapter
|
||||||
# 09/21/2015 #4486 rjpeter Added FormattedDateAdapter
|
# 09/21/2015 #4486 rjpeter Added FormattedDateAdapter
|
||||||
# 06/23/2016 #5696 rjpeter Added CommutativeTimestampAdapter
|
# 06/23/2016 #5696 rjpeter Added CommutativeTimestampAdapter
|
||||||
|
# 10/17/2016 #5919 njensen Added GeomDataRespAdapter
|
||||||
|
# 01/09/2017 #5997 nabowle Allow contribution from plugins.
|
||||||
#
|
#
|
||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
'PointAdapter',
|
'PointAdapter',
|
||||||
'StackTraceElementAdapter',
|
'StackTraceElementAdapter',
|
||||||
'WsIdAdapter',
|
'CalendarAdapter',
|
||||||
'CalendarAdapter',
|
'GregorianCalendarAdapter',
|
||||||
'GregorianCalendarAdapter',
|
'DateAdapter',
|
||||||
'ActiveTableModeAdapter',
|
'GeometryTypeAdapter',
|
||||||
'DateAdapter',
|
'CoordAdapter',
|
||||||
'FormattedDateAdapter',
|
'TimestampAdapter',
|
||||||
'LocalizationLevelSerializationAdapter',
|
'EnumSetAdapter',
|
||||||
'LocalizationTypeSerializationAdapter',
|
'FloatBufferAdapter',
|
||||||
'GeometryTypeAdapter',
|
'ByteBufferAdapter',
|
||||||
'CoordAdapter',
|
'JTSEnvelopeAdapter'
|
||||||
'TimeRangeTypeAdapter',
|
]
|
||||||
'ParmIDAdapter',
|
|
||||||
'DatabaseIDAdapter',
|
|
||||||
'TimestampAdapter',
|
|
||||||
'CommutativeTimestampAdapter',
|
|
||||||
'EnumSetAdapter',
|
|
||||||
'FloatBufferAdapter',
|
|
||||||
'ByteBufferAdapter',
|
|
||||||
'TimeConstraintsAdapter',
|
|
||||||
'LockTableAdapter',
|
|
||||||
'JTSEnvelopeAdapter',
|
|
||||||
'JobProgressAdapter',
|
|
||||||
]
|
|
||||||
|
|
||||||
classAdapterRegistry = {}
|
classAdapterRegistry = {}
|
||||||
|
|
||||||
|
|
||||||
def getAdapterRegistry():
|
def getAdapterRegistry():
|
||||||
|
import pkgutil
|
||||||
|
|
||||||
|
discoveredPackages = []
|
||||||
|
# allow other plugins to contribute to adapters by dropping their adapter or
|
||||||
|
# package into the dynamicserialize.adapters package
|
||||||
|
for _, modname, ispkg in pkgutil.iter_modules(__path__):
|
||||||
|
if ispkg:
|
||||||
|
discoveredPackages.append(modname)
|
||||||
|
else:
|
||||||
|
if modname not in __all__:
|
||||||
|
__all__.append(modname)
|
||||||
|
|
||||||
|
registerAdapters(__name__, __all__)
|
||||||
|
|
||||||
|
for pkg in discoveredPackages:
|
||||||
|
__import__(__name__ + '.' + pkg)
|
||||||
|
|
||||||
|
|
||||||
|
def registerAdapters(package, modules):
|
||||||
import sys
|
import sys
|
||||||
for x in __all__:
|
if not package.endswith('.'):
|
||||||
exec 'import ' + x
|
package += '.'
|
||||||
m = sys.modules['dynamicserialize.adapters.' + x]
|
for x in modules:
|
||||||
|
exec 'import ' + package + x
|
||||||
|
m = sys.modules[package + x]
|
||||||
d = m.__dict__
|
d = m.__dict__
|
||||||
if d.has_key('ClassAdapter'):
|
if d.has_key('ClassAdapter'):
|
||||||
if isinstance(m.ClassAdapter, list):
|
if isinstance(m.ClassAdapter, list):
|
||||||
|
@ -80,9 +104,8 @@ def getAdapterRegistry():
|
||||||
clzName = m.ClassAdapter
|
clzName = m.ClassAdapter
|
||||||
classAdapterRegistry[clzName] = m
|
classAdapterRegistry[clzName] = m
|
||||||
else:
|
else:
|
||||||
raise LookupError('Adapter class ' + x + ' has no ClassAdapter field ' + \
|
raise LookupError('Adapter class ' + x + ' has no ClassAdapter field ' +
|
||||||
'and cannot be registered.')
|
'and cannot be registered.')
|
||||||
|
|
||||||
|
|
||||||
getAdapterRegistry()
|
getAdapterRegistry()
|
||||||
|
|
||||||
|
|
|
@ -21,22 +21,7 @@
|
||||||
# File auto-generated by PythonFileGenerator
|
# File auto-generated by PythonFileGenerator
|
||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
'activetable',
|
'dataplugin'
|
||||||
'alertviz',
|
|
||||||
'auth',
|
|
||||||
'dataaccess',
|
|
||||||
'dataplugin',
|
|
||||||
'dataquery',
|
|
||||||
'datastorage',
|
|
||||||
'localization',
|
|
||||||
'management',
|
|
||||||
'message',
|
|
||||||
'plugin',
|
|
||||||
'pointdata',
|
|
||||||
'pypies',
|
|
||||||
'serialization',
|
|
||||||
'site',
|
|
||||||
'time'
|
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -21,13 +21,7 @@
|
||||||
# File auto-generated by PythonFileGenerator
|
# File auto-generated by PythonFileGenerator
|
||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
'events',
|
'events'
|
||||||
'gfe',
|
|
||||||
'grid',
|
|
||||||
'level',
|
|
||||||
'message',
|
|
||||||
'radar',
|
|
||||||
'text'
|
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -2,7 +2,7 @@
|
||||||
# File auto-generated against equivalent DynamicSerialize Java class
|
# File auto-generated against equivalent DynamicSerialize Java class
|
||||||
#
|
#
|
||||||
# SOFTWARE HISTORY
|
# SOFTWARE HISTORY
|
||||||
|
#
|
||||||
# Date Ticket# Engineer Description
|
# Date Ticket# Engineer Description
|
||||||
# ------------ ---------- ----------- --------------------------
|
# ------------ ---------- ----------- --------------------------
|
||||||
# May 05, 2016 root Generated
|
# May 05, 2016 root Generated
|
||||||
|
|
Loading…
Add table
Reference in a new issue