mirror of
https://github.com/Unidata/python-awips.git
synced 2025-02-23 14:57:56 -05:00
parent
8c3145b63e
commit
5c51ad599e
10 changed files with 168 additions and 308 deletions
|
@ -30,12 +30,9 @@
|
|||
# 11/17/10 njensen Initial Creation.
|
||||
# 08/15/13 2169 bkowal Optionally gzip decompress any data that is read.
|
||||
# 08/04/16 2416 tgurney Add queueStarted property
|
||||
# 02/16/17 6084 bsteffen Support ssl connections
|
||||
#
|
||||
#
|
||||
|
||||
import os
|
||||
import os.path
|
||||
import qpid
|
||||
import zlib
|
||||
|
||||
|
@ -44,24 +41,11 @@ from qpid.exceptions import Closed
|
|||
|
||||
class QpidSubscriber:
|
||||
|
||||
def __init__(self, host='127.0.0.1', port=5672, decompress=False, ssl=None):
|
||||
def __init__(self, host='127.0.0.1', port=5672, decompress=False):
|
||||
self.host = host
|
||||
self.port = port
|
||||
self.decompress = decompress;
|
||||
socket = qpid.util.connect(host, port)
|
||||
if "QPID_SSL_CERT_DB" in os.environ:
|
||||
certdb = os.environ["QPID_SSL_CERT_DB"]
|
||||
else:
|
||||
certdb = os.path.expanduser("~/.qpid/")
|
||||
if "QPID_SSL_CERT_NAME" in os.environ:
|
||||
certname = os.environ["QPID_SSL_CERT_NAME"]
|
||||
else:
|
||||
certname = "guest"
|
||||
certfile = os.path.join(certdb, certname + ".crt")
|
||||
if ssl or (ssl is None and os.path.exists(certfile)):
|
||||
keyfile = os.path.join(certdb, certname + ".key")
|
||||
trustfile = os.path.join(certdb, "root.crt")
|
||||
socket = qpid.util.ssl(socket, keyfile=keyfile, certfile=certfile, ca_certs=trustfile)
|
||||
self.__connection = qpid.connection.Connection(sock=socket, username='guest', password='guest')
|
||||
self.__connection.start()
|
||||
self.__session = self.__connection.session(str(qpid.datatypes.uuid4()))
|
||||
|
@ -119,4 +103,3 @@ class QpidSubscriber:
|
|||
@property
|
||||
def queueStarted(self):
|
||||
return self.__queueStarted
|
||||
|
||||
|
|
|
@ -17,23 +17,13 @@
|
|||
# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
||||
# further licensing information.
|
||||
##
|
||||
#
|
||||
# SOFTWARE HISTORY
|
||||
#
|
||||
# Date Ticket# Engineer Description
|
||||
# ------------- -------- --------- ---------------------------------------------
|
||||
# Feb 13, 2017 6092 randerso Added StoreTimeAction
|
||||
#
|
||||
##
|
||||
|
||||
import argparse
|
||||
import sys
|
||||
import time
|
||||
|
||||
from dynamicserialize.dstypes.com.raytheon.uf.common.dataplugin.gfe.db.objects import DatabaseID
|
||||
from dynamicserialize.dstypes.com.raytheon.uf.common.dataplugin.gfe.db.objects import ParmID
|
||||
|
||||
TIME_FORMAT = "%Y%m%d_%H%M"
|
||||
|
||||
class UsageArgumentParser(argparse.ArgumentParser):
|
||||
"""
|
||||
|
@ -66,16 +56,3 @@ class AppendParmNameAndLevelAction(argparse.Action):
|
|||
else:
|
||||
setattr(namespace, self.dest, [comp])
|
||||
|
||||
class StoreTimeAction(argparse.Action):
|
||||
"""
|
||||
argparse.Action subclass to validate GFE formatted time strings
|
||||
and parse them to time.struct_time
|
||||
"""
|
||||
def __call__(self, parser, namespace, values, option_string=None):
|
||||
try:
|
||||
timeStruct = time.strptime(values, TIME_FORMAT)
|
||||
except:
|
||||
parser.error(str(values) + " is not a valid time string of the format YYYYMMDD_hhmm")
|
||||
|
||||
setattr(namespace, self.dest, timeStruct)
|
||||
|
||||
|
|
|
@ -40,8 +40,8 @@
|
|||
# Jun 01, 2016 5587 tgurney Add new signatures for
|
||||
# getRequiredIdentifiers() and
|
||||
# getOptionalIdentifiers()
|
||||
# Oct 07, 2016 ---- mjames@ucar Added getForecastRun
|
||||
# Oct 18, 2016 5916 bsteffen Add setLazyLoadGridLatLon
|
||||
# 10/07/16 ---- mjames@ucar Added getForecastRun
|
||||
#
|
||||
#
|
||||
#
|
||||
|
||||
|
@ -53,7 +53,6 @@ import warnings
|
|||
THRIFT_HOST = "edex"
|
||||
USING_NATIVE_THRIFT = False
|
||||
|
||||
|
||||
if sys.modules.has_key('jep'):
|
||||
# intentionally do not catch if this fails to import, we want it to
|
||||
# be obvious that something is configured wrong when running from within
|
||||
|
@ -253,26 +252,3 @@ def changeEDEXHost(newHostName):
|
|||
router = ThriftClientRouter.ThriftClientRouter(THRIFT_HOST)
|
||||
else:
|
||||
raise TypeError("Cannot call changeEDEXHost when using JepRouter.")
|
||||
|
||||
def setLazyLoadGridLatLon(lazyLoadGridLatLon):
|
||||
"""
|
||||
Provide a hint to the Data Access Framework indicating whether to load the
|
||||
lat/lon data for a grid immediately or wait until it is needed. This is
|
||||
provided as a performance tuning hint and should not affect the way the
|
||||
Data Access Framework is used. Depending on the internal implementation of
|
||||
the Data Access Framework this hint might be ignored. Examples of when this
|
||||
should be set to True are when the lat/lon information is not used or when
|
||||
it is used only if certain conditions within the data are met. It could be
|
||||
set to False if it is guaranteed that all lat/lon information is needed and
|
||||
it would be better to get any performance overhead for generating the
|
||||
lat/lon data out of the way during the initial request.
|
||||
|
||||
|
||||
Args:
|
||||
lazyLoadGridLatLon: Boolean value indicating whether to lazy load.
|
||||
"""
|
||||
try:
|
||||
router.setLazyLoadGridLatLon(lazyLoadGridLatLon)
|
||||
except AttributeError:
|
||||
# The router is not required to support this capability.
|
||||
pass
|
||||
|
|
|
@ -28,9 +28,7 @@
|
|||
# Date Ticket# Engineer Description
|
||||
# ------------ ---------- ----------- --------------------------
|
||||
# 06/03/13 #2023 dgilling Initial Creation.
|
||||
# 10/13/16 #5916 bsteffen Correct grid shape, allow lat/lon
|
||||
# 11/10/16 #5900 bsteffen Correct grid shape
|
||||
# to be requested by a delegate
|
||||
#
|
||||
#
|
||||
|
||||
|
@ -48,7 +46,7 @@ The ability to unit convert grid data is not currently available in this version
|
|||
|
||||
class PyGridData(IGridData, PyData.PyData):
|
||||
|
||||
def __init__(self, gridDataRecord, nx, ny, latLonGrid = None, latLonDelegate = None):
|
||||
def __init__(self, gridDataRecord, nx, ny, latLonGrid):
|
||||
PyData.PyData.__init__(self, gridDataRecord)
|
||||
nx = nx
|
||||
ny = ny
|
||||
|
@ -56,8 +54,6 @@ class PyGridData(IGridData, PyData.PyData):
|
|||
self.__unit = gridDataRecord.getUnit()
|
||||
self.__gridData = numpy.reshape(numpy.array(gridDataRecord.getGridData()), (ny, nx))
|
||||
self.__latLonGrid = latLonGrid
|
||||
self.__latLonDelegate = latLonDelegate
|
||||
|
||||
|
||||
def getParameter(self):
|
||||
return self.__parameter
|
||||
|
@ -74,8 +70,4 @@ class PyGridData(IGridData, PyData.PyData):
|
|||
return self.__gridData
|
||||
|
||||
def getLatLonCoords(self):
|
||||
if self.__latLonGrid is not None:
|
||||
return self.__latLonGrid
|
||||
elif self.__latLonDelegate is not None:
|
||||
return self.__latLonDelegate()
|
||||
return self.__latLonGrid
|
||||
|
|
|
@ -39,8 +39,7 @@
|
|||
# getRequiredIdentifiers() and
|
||||
# getOptionalIdentifiers()
|
||||
# 08/01/16 2416 tgurney Add getNotificationFilter()
|
||||
# 10/13/16 5916 bsteffen Correct grid shape, allow lazy grid lat/lon
|
||||
# 10/26/16 5919 njensen Speed up geometry creation in getGeometryData()
|
||||
# 11/10/16 5900 bsteffen Correct grid shape
|
||||
#
|
||||
|
||||
|
||||
|
@ -52,7 +51,6 @@ from dynamicserialize.dstypes.com.raytheon.uf.common.dataaccess.request import G
|
|||
from dynamicserialize.dstypes.com.raytheon.uf.common.dataaccess.request import GetAvailableTimesRequest
|
||||
from dynamicserialize.dstypes.com.raytheon.uf.common.dataaccess.request import GetGeometryDataRequest
|
||||
from dynamicserialize.dstypes.com.raytheon.uf.common.dataaccess.request import GetGridDataRequest
|
||||
from dynamicserialize.dstypes.com.raytheon.uf.common.dataaccess.request import GetGridLatLonRequest
|
||||
from dynamicserialize.dstypes.com.raytheon.uf.common.dataaccess.request import GetAvailableParametersRequest
|
||||
from dynamicserialize.dstypes.com.raytheon.uf.common.dataaccess.request import GetAvailableLevelsRequest
|
||||
from dynamicserialize.dstypes.com.raytheon.uf.common.dataaccess.request import GetRequiredIdentifiersRequest
|
||||
|
@ -66,39 +64,10 @@ from awips.dataaccess import PyGeometryData
|
|||
from awips.dataaccess import PyGridData
|
||||
|
||||
|
||||
class LazyGridLatLon(object):
|
||||
|
||||
def __init__(self, client, nx, ny, envelope, crsWkt):
|
||||
self._latLonGrid = None
|
||||
self._client = client
|
||||
self._request = GetGridLatLonRequest()
|
||||
self._request.setNx(nx)
|
||||
self._request.setNy(ny)
|
||||
self._request.setEnvelope(envelope)
|
||||
self._request.setCrsWkt(crsWkt)
|
||||
|
||||
def __call__(self):
|
||||
# Its important that the data is cached internally so that if multiple
|
||||
# GridData are sharing the same delegate then they can also share a
|
||||
# single request for the LatLon information.
|
||||
if self._latLonGrid is None:
|
||||
response = self._client.sendRequest(self._request)
|
||||
nx = response.getNx()
|
||||
ny = response.getNy()
|
||||
latData = numpy.reshape(numpy.array(response.getLats()), (ny, nx))
|
||||
lonData = numpy.reshape(numpy.array(response.getLons()), (ny, nx))
|
||||
self._latLonGrid = (lonData, latData)
|
||||
return self._latLonGrid
|
||||
|
||||
|
||||
class ThriftClientRouter(object):
|
||||
|
||||
def __init__(self, host='localhost'):
|
||||
self._client = ThriftClient.ThriftClient(host)
|
||||
self._lazyLoadGridLatLon = False
|
||||
|
||||
def setLazyLoadGridLatLon(self, lazyLoadGridLatLon):
|
||||
self._lazyLoadGridLatLon = lazyLoadGridLatLon
|
||||
|
||||
def getAvailableTimes(self, request, refTimeOnly):
|
||||
timesRequest = GetAvailableTimesRequest()
|
||||
|
@ -109,7 +78,6 @@ class ThriftClientRouter(object):
|
|||
|
||||
def getGridData(self, request, times):
|
||||
gridDataRequest = GetGridDataRequest()
|
||||
gridDataRequest.setIncludeLatLonData(not self._lazyLoadGridLatLon)
|
||||
gridDataRequest.setRequestParameters(request)
|
||||
# if we have an iterable times instance, then the user must have asked
|
||||
# for grid data with the List of DataTime objects
|
||||
|
@ -127,28 +95,15 @@ class ThriftClientRouter(object):
|
|||
for location in locNames:
|
||||
nx = response.getSiteNxValues()[location]
|
||||
ny = response.getSiteNyValues()[location]
|
||||
if self._lazyLoadGridLatLon:
|
||||
envelope = response.getSiteEnvelopes()[location]
|
||||
crsWkt = response.getSiteCrsWkt()[location]
|
||||
delegate = LazyGridLatLon(
|
||||
self._client, nx, ny, envelope, crsWkt)
|
||||
locSpecificData[location] = (nx, ny, delegate)
|
||||
else:
|
||||
latData = numpy.reshape(numpy.array(
|
||||
response.getSiteLatGrids()[location]), (ny, nx))
|
||||
lonData = numpy.reshape(numpy.array(
|
||||
response.getSiteLonGrids()[location]), (ny, nx))
|
||||
locSpecificData[location] = (nx, ny, (lonData, latData))
|
||||
latData = numpy.reshape(numpy.array(response.getSiteLatGrids()[location]), (ny, nx))
|
||||
lonData = numpy.reshape(numpy.array(response.getSiteLonGrids()[location]), (ny, nx))
|
||||
locSpecificData[location] = (nx, ny, (lonData, latData))
|
||||
|
||||
retVal = []
|
||||
for gridDataRecord in response.getGridData():
|
||||
locationName = gridDataRecord.getLocationName()
|
||||
locData = locSpecificData[locationName]
|
||||
if self._lazyLoadGridLatLon:
|
||||
retVal.append(PyGridData.PyGridData(gridDataRecord, locData[
|
||||
0], locData[1], latLonDelegate=locData[2]))
|
||||
else:
|
||||
retVal.append(PyGridData.PyGridData(
|
||||
gridDataRecord, locData[0], locData[1], locData[2]))
|
||||
retVal.append(PyGridData.PyGridData(gridDataRecord, locData[0], locData[1], locData[2]))
|
||||
return retVal
|
||||
|
||||
def getGeometryData(self, request, times):
|
||||
|
@ -166,9 +121,10 @@ class ThriftClientRouter(object):
|
|||
response = self._client.sendRequest(geoDataRequest)
|
||||
geometries = []
|
||||
for wkb in response.getGeometryWKBs():
|
||||
# the wkb is a numpy.ndarray of dtype int8
|
||||
# convert the bytearray to a byte string and load it
|
||||
geometries.append(shapely.wkb.loads(wkb.tostring()))
|
||||
# convert the wkb to a bytearray with only positive values
|
||||
byteArrWKB = bytearray(map(lambda x: x % 256,wkb.tolist()))
|
||||
# convert the bytearray to a byte string and load it.
|
||||
geometries.append(shapely.wkb.loads(str(byteArrWKB)))
|
||||
|
||||
retVal = []
|
||||
for geoDataRecord in response.getGeoData():
|
||||
|
@ -219,7 +175,7 @@ class ThriftClientRouter(object):
|
|||
response = self._client.sendRequest(idValReq)
|
||||
return response
|
||||
|
||||
def newDataRequest(self, datatype, parameters=[], levels=[], locationNames=[], envelope=None, **kwargs):
|
||||
def newDataRequest(self, datatype, parameters=[], levels=[], locationNames = [], envelope=None, **kwargs):
|
||||
req = DefaultDataRequest()
|
||||
if datatype:
|
||||
req.setDatatype(datatype)
|
||||
|
|
|
@ -59,13 +59,9 @@
|
|||
# ....
|
||||
# 06/13/2013 DR 16242 D. Friedman Add Qpid authentication info
|
||||
# 03/06/2014 DR 17907 D. Friedman Workaround for issue QPID-5569
|
||||
# 02/16/2017 DR 6084 bsteffen Support ssl connections
|
||||
#
|
||||
#===============================================================================
|
||||
|
||||
import os
|
||||
import os.path
|
||||
|
||||
import qpid
|
||||
from qpid.util import connect
|
||||
from qpid.connection import Connection
|
||||
|
@ -75,31 +71,17 @@ QPID_USERNAME = 'guest'
|
|||
QPID_PASSWORD = 'guest'
|
||||
|
||||
class IngestViaQPID:
|
||||
def __init__(self, host='localhost', port=5672, ssl=None):
|
||||
def __init__(self, host='localhost', port=5672):
|
||||
'''
|
||||
Connect to QPID and make bindings to route message to external.dropbox queue
|
||||
@param host: string hostname of computer running EDEX and QPID (default localhost)
|
||||
@param port: integer port used to connect to QPID (default 5672)
|
||||
@param ssl: boolean to determine whether ssl is used, default value of None will use ssl only if a client certificate is found.
|
||||
'''
|
||||
|
||||
try:
|
||||
#
|
||||
socket = connect(host, port)
|
||||
if "QPID_SSL_CERT_DB" in os.environ:
|
||||
certdb = os.environ["QPID_SSL_CERT_DB"]
|
||||
else:
|
||||
certdb = os.path.expanduser("~/.qpid/")
|
||||
if "QPID_SSL_CERT_NAME" in os.environ:
|
||||
certname = os.environ["QPID_SSL_CERT_NAME"]
|
||||
else:
|
||||
certname = QPID_USERNAME
|
||||
certfile = os.path.join(certdb, certname + ".crt")
|
||||
if ssl or (ssl is None and os.path.exists(certfile)):
|
||||
keyfile = os.path.join(certdb, certname + ".key")
|
||||
trustfile = os.path.join(certdb, "root.crt")
|
||||
socket = qpid.util.ssl(socket, keyfile=keyfile, certfile=certfile, ca_certs=trustfile)
|
||||
self.connection = Connection (sock=socket, username=QPID_USERNAME, password=QPID_PASSWORD)
|
||||
self.socket = connect(host, port)
|
||||
self.connection = Connection (sock=self.socket, username=QPID_USERNAME, password=QPID_PASSWORD)
|
||||
self.connection.start()
|
||||
self.session = self.connection.session(str(uuid4()))
|
||||
self.session.exchange_bind(exchange='amq.direct', queue='external.dropbox', binding_key='external.dropbox')
|
||||
|
|
|
@ -29,7 +29,7 @@
|
|||
# match what they should be in the destination language.
|
||||
#
|
||||
#
|
||||
# SOFTWARE HISTORY
|
||||
# SOFTWARE HISTORY
|
||||
#
|
||||
# Date Ticket# Engineer Description
|
||||
# ------------ ---------- ----------- --------------------------
|
||||
|
@ -37,27 +37,20 @@
|
|||
# 06/12/13 #2099 dgilling Implement readObject() and
|
||||
# writeObject().
|
||||
# Apr 24, 2015 4425 nabowle Add Double support
|
||||
# Oct 17, 2016 5919 njensen Optimized for speed
|
||||
#
|
||||
#
|
||||
|
||||
from thrift.Thrift import TType
|
||||
import inspect
|
||||
import sys
|
||||
import types
|
||||
import time
|
||||
import inspect, sys, types
|
||||
import dynamicserialize
|
||||
from dynamicserialize import dstypes, adapters
|
||||
import SelfDescribingBinaryProtocol
|
||||
import numpy
|
||||
|
||||
DS_LEN = len('dynamicserialize.dstypes.')
|
||||
|
||||
dsObjTypes = {}
|
||||
|
||||
|
||||
def buildObjMap(module):
|
||||
if '__all__' in module.__dict__:
|
||||
if module.__dict__.has_key('__all__'):
|
||||
for i in module.__all__:
|
||||
name = module.__name__ + '.' + i
|
||||
__import__(name)
|
||||
|
@ -66,7 +59,7 @@ def buildObjMap(module):
|
|||
clzName = module.__name__[module.__name__.rfind('.') + 1:]
|
||||
clz = module.__dict__[clzName]
|
||||
tname = module.__name__
|
||||
tname = tname[DS_LEN:]
|
||||
tname = tname.replace('dynamicserialize.dstypes.', '')
|
||||
dsObjTypes[tname] = clz
|
||||
|
||||
buildObjMap(dstypes)
|
||||
|
@ -79,7 +72,7 @@ pythonToThriftMap = {
|
|||
types.DictionaryType: TType.MAP,
|
||||
type(set([])): TType.SET,
|
||||
types.FloatType: SelfDescribingBinaryProtocol.FLOAT,
|
||||
# types.FloatType: TType.DOUBLE,
|
||||
#types.FloatType: TType.DOUBLE,
|
||||
types.BooleanType: TType.BOOL,
|
||||
types.InstanceType: TType.STRUCT,
|
||||
types.NoneType: TType.VOID,
|
||||
|
@ -94,9 +87,7 @@ pythonToThriftMap = {
|
|||
numpy.int64: TType.I64
|
||||
}
|
||||
|
||||
primitiveSupport = (TType.BYTE, TType.I16, TType.I32, TType.I64,
|
||||
SelfDescribingBinaryProtocol.FLOAT, TType.DOUBLE)
|
||||
|
||||
primitiveSupport = (TType.BYTE, TType.I16, TType.I32, TType.I64, SelfDescribingBinaryProtocol.FLOAT, TType.DOUBLE)
|
||||
|
||||
class ThriftSerializationContext(object):
|
||||
|
||||
|
@ -104,51 +95,52 @@ class ThriftSerializationContext(object):
|
|||
self.serializationManager = serializationManager
|
||||
self.protocol = selfDescribingBinaryProtocol
|
||||
self.typeDeserializationMethod = {
|
||||
TType.STRING: self.protocol.readString,
|
||||
TType.I16: self.protocol.readI16,
|
||||
TType.I32: self.protocol.readI32,
|
||||
TType.LIST: self._deserializeArray,
|
||||
TType.MAP: self._deserializeMap,
|
||||
TType.SET: self._deserializeSet,
|
||||
SelfDescribingBinaryProtocol.FLOAT: self.protocol.readFloat,
|
||||
TType.BYTE: self.protocol.readByte,
|
||||
TType.I64: self.protocol.readI64,
|
||||
TType.DOUBLE: self.protocol.readDouble,
|
||||
TType.BOOL: self.protocol.readBool,
|
||||
TType.STRUCT: self.deserializeMessage,
|
||||
TType.VOID: lambda: None
|
||||
}
|
||||
TType.STRING: self.protocol.readString,
|
||||
TType.I16: self.protocol.readI16,
|
||||
TType.I32: self.protocol.readI32,
|
||||
TType.LIST: self._deserializeArray,
|
||||
TType.MAP: self._deserializeMap,
|
||||
TType.SET: self._deserializeSet,
|
||||
SelfDescribingBinaryProtocol.FLOAT: self.protocol.readFloat,
|
||||
TType.BYTE: self.protocol.readByte,
|
||||
TType.I64: self.protocol.readI64,
|
||||
TType.DOUBLE: self.protocol.readDouble,
|
||||
TType.BOOL: self.protocol.readBool,
|
||||
TType.STRUCT: self.deserializeMessage,
|
||||
TType.VOID: lambda: None
|
||||
}
|
||||
self.typeSerializationMethod = {
|
||||
TType.STRING: self.protocol.writeString,
|
||||
TType.I16: self.protocol.writeI16,
|
||||
TType.I32: self.protocol.writeI32,
|
||||
TType.LIST: self._serializeArray,
|
||||
TType.MAP: self._serializeMap,
|
||||
TType.SET: self._serializeSet,
|
||||
SelfDescribingBinaryProtocol.FLOAT: self.protocol.writeFloat,
|
||||
TType.BYTE: self.protocol.writeByte,
|
||||
TType.I64: self.protocol.writeI64,
|
||||
TType.DOUBLE: self.protocol.writeDouble,
|
||||
TType.BOOL: self.protocol.writeBool,
|
||||
TType.STRUCT: self.serializeMessage,
|
||||
TType.VOID: lambda x: None
|
||||
}
|
||||
TType.STRING: self.protocol.writeString,
|
||||
TType.I16: self.protocol.writeI16,
|
||||
TType.I32: self.protocol.writeI32,
|
||||
TType.LIST: self._serializeArray,
|
||||
TType.MAP: self._serializeMap,
|
||||
TType.SET: self._serializeSet,
|
||||
SelfDescribingBinaryProtocol.FLOAT: self.protocol.writeFloat,
|
||||
TType.BYTE: self.protocol.writeByte,
|
||||
TType.I64: self.protocol.writeI64,
|
||||
TType.DOUBLE: self.protocol.writeDouble,
|
||||
TType.BOOL: self.protocol.writeBool,
|
||||
TType.STRUCT: self.serializeMessage,
|
||||
TType.VOID: lambda x: None
|
||||
}
|
||||
self.listDeserializationMethod = {
|
||||
TType.BYTE: self.protocol.readI8List,
|
||||
TType.I16: self.protocol.readI16List,
|
||||
TType.I32: self.protocol.readI32List,
|
||||
TType.I64: self.protocol.readI64List,
|
||||
SelfDescribingBinaryProtocol.FLOAT: self.protocol.readF32List,
|
||||
TType.DOUBLE: self.protocol.readF64List
|
||||
}
|
||||
TType.BYTE: self.protocol.readI8List,
|
||||
TType.I16: self.protocol.readI16List,
|
||||
TType.I32: self.protocol.readI32List,
|
||||
TType.I64: self.protocol.readI64List,
|
||||
SelfDescribingBinaryProtocol.FLOAT: self.protocol.readF32List,
|
||||
TType.DOUBLE: self.protocol.readF64List
|
||||
}
|
||||
self.listSerializationMethod = {
|
||||
TType.BYTE: self.protocol.writeI8List,
|
||||
TType.I16: self.protocol.writeI16List,
|
||||
TType.I32: self.protocol.writeI32List,
|
||||
TType.I64: self.protocol.writeI64List,
|
||||
SelfDescribingBinaryProtocol.FLOAT: self.protocol.writeF32List,
|
||||
TType.DOUBLE: self.protocol.writeF64List
|
||||
}
|
||||
TType.BYTE: self.protocol.writeI8List,
|
||||
TType.I16: self.protocol.writeI16List,
|
||||
TType.I32: self.protocol.writeI32List,
|
||||
TType.I64: self.protocol.writeI64List,
|
||||
SelfDescribingBinaryProtocol.FLOAT: self.protocol.writeF32List,
|
||||
TType.DOUBLE: self.protocol.writeF64List
|
||||
}
|
||||
|
||||
|
||||
def readMessageStart(self):
|
||||
msg = self.protocol.readMessageBegin()
|
||||
|
@ -159,19 +151,17 @@ class ThriftSerializationContext(object):
|
|||
|
||||
def deserializeMessage(self):
|
||||
name = self.protocol.readStructBegin()
|
||||
name = name.replace('_', '.')
|
||||
if name.isdigit():
|
||||
obj = self._deserializeType(int(name))
|
||||
return obj
|
||||
name = name.replace('_', '.')
|
||||
if name in adapters.classAdapterRegistry:
|
||||
elif adapters.classAdapterRegistry.has_key(name):
|
||||
return adapters.classAdapterRegistry[name].deserialize(self)
|
||||
elif '$' in name:
|
||||
# it's an inner class, we're going to hope it's an enum, treat it
|
||||
# special
|
||||
elif name.find('$') > -1:
|
||||
# it's an inner class, we're going to hope it's an enum, treat it special
|
||||
fieldName, fieldType, fieldId = self.protocol.readFieldBegin()
|
||||
if fieldName != '__enumValue__':
|
||||
raise dynamiceserialize.SerializationException(
|
||||
"Expected to find enum payload. Found: " + fieldName)
|
||||
raise dynamiceserialize.SerializationException("Expected to find enum payload. Found: " + fieldName)
|
||||
obj = self.protocol.readString()
|
||||
self.protocol.readFieldEnd()
|
||||
return obj
|
||||
|
@ -186,30 +176,37 @@ class ThriftSerializationContext(object):
|
|||
return obj
|
||||
|
||||
def _deserializeType(self, b):
|
||||
try:
|
||||
if self.typeDeserializationMethod.has_key(b):
|
||||
return self.typeDeserializationMethod[b]()
|
||||
except KeyError:
|
||||
raise dynamicserialize.SerializationException(
|
||||
"Unsupported type value " + str(b))
|
||||
else:
|
||||
raise dynamicserialize.SerializationException("Unsupported type value " + str(b))
|
||||
|
||||
|
||||
def _deserializeField(self, structname, obj):
|
||||
fieldName, fieldType, fieldId = self.protocol.readFieldBegin()
|
||||
if fieldType == TType.STOP:
|
||||
return False
|
||||
elif fieldType != TType.VOID:
|
||||
# if adapters.fieldAdapterRegistry.has_key(structname) and adapters.fieldAdapterRegistry[structname].has_key(fieldName):
|
||||
# result = adapters.fieldAdapterRegistry[structname][fieldName].deserialize(self)
|
||||
# else:
|
||||
result = self._deserializeType(fieldType)
|
||||
lookingFor = "set" + fieldName[0].upper() + fieldName[1:]
|
||||
|
||||
try:
|
||||
setMethod = getattr(obj, lookingFor)
|
||||
setMethod(result)
|
||||
|
||||
if callable(setMethod):
|
||||
setMethod(result)
|
||||
else:
|
||||
raise dynamicserialize.SerializationException("Couldn't find setter method " + lookingFor)
|
||||
except:
|
||||
raise dynamicserialize.SerializationException(
|
||||
"Couldn't find setter method " + lookingFor)
|
||||
raise dynamicserialize.SerializationException("Couldn't find setter method " + lookingFor)
|
||||
|
||||
self.protocol.readFieldEnd()
|
||||
return True
|
||||
|
||||
|
||||
def _deserializeArray(self):
|
||||
listType, size = self.protocol.readListBegin()
|
||||
result = []
|
||||
|
@ -244,20 +241,19 @@ class ThriftSerializationContext(object):
|
|||
|
||||
def _lookupType(self, obj):
|
||||
pyt = type(obj)
|
||||
if pyt in pythonToThriftMap:
|
||||
if pythonToThriftMap.has_key(pyt):
|
||||
return pythonToThriftMap[pyt]
|
||||
elif pyt.__module__[:DS_LEN - 1] == ('dynamicserialize.dstypes'):
|
||||
elif pyt.__module__.startswith('dynamicserialize.dstypes'):
|
||||
return pythonToThriftMap[types.InstanceType]
|
||||
else:
|
||||
raise dynamicserialize.SerializationException(
|
||||
"Don't know how to serialize object of type: " + str(pyt))
|
||||
raise dynamicserialize.SerializationException("Don't know how to serialize object of type: " + str(pyt))
|
||||
|
||||
def serializeMessage(self, obj):
|
||||
tt = self._lookupType(obj)
|
||||
|
||||
if tt == TType.STRUCT:
|
||||
fqn = obj.__module__[DS_LEN:]
|
||||
if fqn in adapters.classAdapterRegistry:
|
||||
fqn = obj.__module__.replace('dynamicserialize.dstypes.', '')
|
||||
if adapters.classAdapterRegistry.has_key(fqn):
|
||||
# get proper class name when writing class name to serialization stream
|
||||
# in case we have a special inner-class case
|
||||
m = sys.modules[adapters.classAdapterRegistry[fqn].__name__]
|
||||
|
@ -277,7 +273,7 @@ class ThriftSerializationContext(object):
|
|||
val = m[1]()
|
||||
ft = self._lookupType(val)
|
||||
if ft == TType.STRUCT:
|
||||
fc = val.__module__[DS_LEN:]
|
||||
fc = val.__module__.replace('dynamicserialize.dstypes.', '')
|
||||
self._serializeField(fieldname, ft, fid, val)
|
||||
else:
|
||||
self._serializeField(fieldname, ft, fid, val)
|
||||
|
@ -297,18 +293,17 @@ class ThriftSerializationContext(object):
|
|||
self.protocol.writeFieldEnd()
|
||||
|
||||
def _serializeType(self, fieldValue, fieldType):
|
||||
if fieldType in self.typeSerializationMethod:
|
||||
if self.typeSerializationMethod.has_key(fieldType):
|
||||
return self.typeSerializationMethod[fieldType](fieldValue)
|
||||
else:
|
||||
raise dynamicserialize.SerializationException(
|
||||
"Unsupported type value " + str(fieldType))
|
||||
raise dynamicserialize.SerializationException("Unsupported type value " + str(fieldType))
|
||||
|
||||
def _serializeArray(self, obj):
|
||||
size = len(obj)
|
||||
if size:
|
||||
if type(obj) is numpy.ndarray:
|
||||
t = pythonToThriftMap[obj.dtype.type]
|
||||
size = obj.size
|
||||
t = pythonToThriftMap[obj.dtype.type]
|
||||
size = obj.size
|
||||
else:
|
||||
t = self._lookupType(obj[0])
|
||||
else:
|
||||
|
@ -336,6 +331,7 @@ class ThriftSerializationContext(object):
|
|||
self.listSerializationMethod[t](obj)
|
||||
self.protocol.writeListEnd()
|
||||
|
||||
|
||||
def _serializeMap(self, obj):
|
||||
size = len(obj)
|
||||
self.protocol.writeMapBegin(TType.VOID, TType.VOID, size)
|
||||
|
|
|
@ -22,79 +22,55 @@
|
|||
#
|
||||
# __init__.py for Dynamic Serialize adapters.
|
||||
#
|
||||
# Plugins can contribute to dynamicserialize.adapters by either including their
|
||||
# classes directly in pythonPackages/dynamicserialize/adapters/ within their
|
||||
# plugin. The plugin's adapter will automatically be added to __all__ at runtime
|
||||
# and registered.
|
||||
# Plugins should not include a custom __init__.py in
|
||||
# pythonPackages/dynamicserialize/adapters/ because it will overwrite this file.
|
||||
# If custom package initialization is needed, a subpackage should be created
|
||||
# with an __init__.py that includes the following:
|
||||
#
|
||||
# __all__ = ['CustomAdapter1', 'CustomAdapter2']
|
||||
# from dynamicserialize.adapters import registerAdapters
|
||||
# registerAdapters(__name__, __all__)
|
||||
#
|
||||
#
|
||||
# SOFTWARE HISTORY
|
||||
#
|
||||
# Date Ticket# Engineer Description
|
||||
# ------------ ---------- ----------- --------------------------
|
||||
# 08/31/10 njensen Initial Creation.
|
||||
# 03/20/13 #1774 randerso Added TimeConstraintsAdapter
|
||||
# 04/22/13 #1949 rjpeter Added LockTableAdapter
|
||||
# 02/06/14 #2672 bsteffen Added JTSEnvelopeAdapter
|
||||
# 06/22/2015 #4573 randerso Added JobProgressAdapter
|
||||
# 09/21/2015 #4486 rjpeter Added FormattedDateAdapter
|
||||
# 06/23/2016 #5696 rjpeter Added CommutativeTimestampAdapter
|
||||
# 10/17/2016 #5919 njensen Added GeomDataRespAdapter
|
||||
# 01/09/2017 #5997 nabowle Allow contribution from plugins.
|
||||
# 08/31/10 njensen Initial Creation.
|
||||
# 03/20/13 #1774 randerso Added TimeConstraintsAdapter
|
||||
# 04/22/13 #1949 rjpeter Added LockTableAdapter
|
||||
# 02/06/14 #2672 bsteffen Added JTSEnvelopeAdapter
|
||||
# 06/22/2015 #4573 randerso Added JobProgressAdapter
|
||||
# 09/21/2015 #4486 rjpeter Added FormattedDateAdapter
|
||||
# 06/23/2016 #5696 rjpeter Added CommutativeTimestampAdapter
|
||||
#
|
||||
|
||||
__all__ = [
|
||||
'PointAdapter',
|
||||
'StackTraceElementAdapter',
|
||||
'CalendarAdapter',
|
||||
'GregorianCalendarAdapter',
|
||||
'DateAdapter',
|
||||
'GeometryTypeAdapter',
|
||||
'CoordAdapter',
|
||||
'TimestampAdapter',
|
||||
'EnumSetAdapter',
|
||||
'FloatBufferAdapter',
|
||||
'ByteBufferAdapter',
|
||||
'JTSEnvelopeAdapter'
|
||||
]
|
||||
'PointAdapter',
|
||||
'StackTraceElementAdapter',
|
||||
'WsIdAdapter',
|
||||
'CalendarAdapter',
|
||||
'GregorianCalendarAdapter',
|
||||
'ActiveTableModeAdapter',
|
||||
'DateAdapter',
|
||||
'FormattedDateAdapter',
|
||||
'LocalizationLevelSerializationAdapter',
|
||||
'LocalizationTypeSerializationAdapter',
|
||||
'GeometryTypeAdapter',
|
||||
'CoordAdapter',
|
||||
'TimeRangeTypeAdapter',
|
||||
'ParmIDAdapter',
|
||||
'DatabaseIDAdapter',
|
||||
'TimestampAdapter',
|
||||
'CommutativeTimestampAdapter',
|
||||
'EnumSetAdapter',
|
||||
'FloatBufferAdapter',
|
||||
'ByteBufferAdapter',
|
||||
'TimeConstraintsAdapter',
|
||||
'LockTableAdapter',
|
||||
'JTSEnvelopeAdapter',
|
||||
'JobProgressAdapter',
|
||||
]
|
||||
|
||||
classAdapterRegistry = {}
|
||||
|
||||
|
||||
def getAdapterRegistry():
|
||||
import pkgutil
|
||||
|
||||
discoveredPackages = []
|
||||
# allow other plugins to contribute to adapters by dropping their adapter or
|
||||
# package into the dynamicserialize.adapters package
|
||||
for _, modname, ispkg in pkgutil.iter_modules(__path__):
|
||||
if ispkg:
|
||||
discoveredPackages.append(modname)
|
||||
else:
|
||||
if modname not in __all__:
|
||||
__all__.append(modname)
|
||||
|
||||
registerAdapters(__name__, __all__)
|
||||
|
||||
for pkg in discoveredPackages:
|
||||
__import__(__name__ + '.' + pkg)
|
||||
|
||||
|
||||
def registerAdapters(package, modules):
|
||||
import sys
|
||||
if not package.endswith('.'):
|
||||
package += '.'
|
||||
for x in modules:
|
||||
exec 'import ' + package + x
|
||||
m = sys.modules[package + x]
|
||||
for x in __all__:
|
||||
exec 'import ' + x
|
||||
m = sys.modules['dynamicserialize.adapters.' + x]
|
||||
d = m.__dict__
|
||||
if d.has_key('ClassAdapter'):
|
||||
if isinstance(m.ClassAdapter, list):
|
||||
|
@ -104,8 +80,9 @@ def registerAdapters(package, modules):
|
|||
clzName = m.ClassAdapter
|
||||
classAdapterRegistry[clzName] = m
|
||||
else:
|
||||
raise LookupError('Adapter class ' + x + ' has no ClassAdapter field ' +
|
||||
raise LookupError('Adapter class ' + x + ' has no ClassAdapter field ' + \
|
||||
'and cannot be registered.')
|
||||
|
||||
|
||||
getAdapterRegistry()
|
||||
|
||||
|
|
|
@ -21,7 +21,22 @@
|
|||
# File auto-generated by PythonFileGenerator
|
||||
|
||||
__all__ = [
|
||||
'dataplugin'
|
||||
'activetable',
|
||||
'alertviz',
|
||||
'auth',
|
||||
'dataaccess',
|
||||
'dataplugin',
|
||||
'dataquery',
|
||||
'datastorage',
|
||||
'localization',
|
||||
'management',
|
||||
'message',
|
||||
'plugin',
|
||||
'pointdata',
|
||||
'pypies',
|
||||
'serialization',
|
||||
'site',
|
||||
'time'
|
||||
]
|
||||
|
||||
|
||||
|
|
|
@ -21,7 +21,13 @@
|
|||
# File auto-generated by PythonFileGenerator
|
||||
|
||||
__all__ = [
|
||||
'events'
|
||||
'events',
|
||||
'gfe',
|
||||
'grid',
|
||||
'level',
|
||||
'message',
|
||||
'radar',
|
||||
'text'
|
||||
]
|
||||
|
||||
|
||||
|
|
Loading…
Add table
Reference in a new issue