mirror of
https://github.com/Unidata/python-awips.git
synced 2025-02-23 14:57:56 -05:00
python3 compliance notebook tested
This commit is contained in:
parent
edb5c276e8
commit
f172333430
146 changed files with 619 additions and 471 deletions
|
@ -18,7 +18,7 @@
|
|||
#
|
||||
|
||||
import logging
|
||||
import NotificationMessage
|
||||
from . import NotificationMessage
|
||||
|
||||
class AlertVizHandler(logging.Handler):
|
||||
|
||||
|
|
|
@ -46,10 +46,10 @@ def convertToDateTime(timeArg):
|
|||
return datetime.datetime(*timeArg[:6])
|
||||
elif isinstance(timeArg, float):
|
||||
# seconds as float, should be avoided due to floating point errors
|
||||
totalSecs = long(timeArg)
|
||||
totalSecs = int(timeArg)
|
||||
micros = int((timeArg - totalSecs) * MICROS_IN_SECOND)
|
||||
return _convertSecsAndMicros(totalSecs, micros)
|
||||
elif isinstance(timeArg, (int, long)):
|
||||
elif isinstance(timeArg, (int, int)):
|
||||
# seconds as integer
|
||||
totalSecs = timeArg
|
||||
return _convertSecsAndMicros(totalSecs, 0)
|
||||
|
|
|
@ -4,14 +4,14 @@
|
|||
from string import Template
|
||||
|
||||
import ctypes
|
||||
import stomp
|
||||
from . import stomp
|
||||
import socket
|
||||
import sys
|
||||
import time
|
||||
import threading
|
||||
import xml.etree.ElementTree as ET
|
||||
|
||||
import ThriftClient
|
||||
from . import ThriftClient
|
||||
from dynamicserialize.dstypes.com.raytheon.uf.common.alertviz import AlertVizRequest
|
||||
from dynamicserialize import DynamicSerializationManager
|
||||
|
||||
|
@ -75,8 +75,8 @@ class NotificationMessage:
|
|||
priorityInt = int(5)
|
||||
|
||||
if (priorityInt < 0 or priorityInt > 5):
|
||||
print "Error occurred, supplied an invalid Priority value: " + str(priorityInt)
|
||||
print "Priority values are 0, 1, 2, 3, 4 and 5."
|
||||
print("Error occurred, supplied an invalid Priority value: " + str(priorityInt))
|
||||
print("Priority values are 0, 1, 2, 3, 4 and 5.")
|
||||
sys.exit(1)
|
||||
|
||||
if priorityInt is not None:
|
||||
|
@ -86,7 +86,7 @@ class NotificationMessage:
|
|||
|
||||
def connection_timeout(self, connection):
|
||||
if (connection is not None and not connection.is_connected()):
|
||||
print "Connection Retry Timeout"
|
||||
print("Connection Retry Timeout")
|
||||
for tid, tobj in threading._active.items():
|
||||
if tobj.name is "MainThread":
|
||||
res = ctypes.pythonapi.PyThreadState_SetAsyncExc(tid, ctypes.py_object(SystemExit))
|
||||
|
@ -138,14 +138,14 @@ class NotificationMessage:
|
|||
serverResponse = None
|
||||
try:
|
||||
serverResponse = thriftClient.sendRequest(alertVizRequest)
|
||||
except Exception, ex:
|
||||
print "Caught exception submitting AlertVizRequest: ", str(ex)
|
||||
except Exception as ex:
|
||||
print("Caught exception submitting AlertVizRequest: ", str(ex))
|
||||
|
||||
if (serverResponse != "None"):
|
||||
print "Error occurred submitting Notification Message to AlertViz receiver: ", serverResponse
|
||||
print("Error occurred submitting Notification Message to AlertViz receiver: ", serverResponse)
|
||||
sys.exit(1)
|
||||
else:
|
||||
print "Response: " + str(serverResponse)
|
||||
print("Response: " + str(serverResponse))
|
||||
|
||||
def createRequest(message, priority, source, category, audioFile, filters):
|
||||
obj = AlertVizRequest()
|
||||
|
@ -163,4 +163,4 @@ def createRequest(message, priority, source, category, audioFile, filters):
|
|||
return obj
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
main()
|
||||
|
|
|
@ -58,7 +58,7 @@ class QpidSubscriber:
|
|||
if (topicName == 'edex.alerts'):
|
||||
self.decompress = True
|
||||
|
||||
print "Establishing connection to broker on", self.host
|
||||
print("Establishing connection to broker on", self.host)
|
||||
queueName = topicName + self.__session.name
|
||||
self.__session.queue_declare(queue=queueName, exclusive=True, auto_delete=True, arguments={'qpid.max_count':100, 'qpid.policy_type':'ring'})
|
||||
self.__session.exchange_bind(exchange='amq.topic', queue=queueName, binding_key=topicName)
|
||||
|
@ -69,7 +69,7 @@ class QpidSubscriber:
|
|||
queue = self.__session.incoming(local_queue_name)
|
||||
self.__session.message_subscribe(serverQueueName, destination=local_queue_name)
|
||||
queue.start()
|
||||
print "Connection complete to broker on", self.host
|
||||
print("Connection complete to broker on", self.host)
|
||||
self.__queueStarted = True
|
||||
|
||||
while self.subscribed:
|
||||
|
|
150
awips/RadarCommon.py
Normal file
150
awips/RadarCommon.py
Normal file
|
@ -0,0 +1,150 @@
|
|||
##
|
||||
##
|
||||
|
||||
#
|
||||
# Common methods for the a2gtrad and a2advrad scripts.
|
||||
#
|
||||
#
|
||||
#
|
||||
# SOFTWARE HISTORY
|
||||
#
|
||||
# Date Ticket# Engineer Description
|
||||
# ------------ ---------- ----------- --------------------------
|
||||
# 08/13/2014 3393 nabowle Initial creation to contain common
|
||||
# code for a2*radStub scripts.
|
||||
# 03/15/2015 mjames@ucar Edited/added to awips package as RadarCommon
|
||||
#
|
||||
#
|
||||
|
||||
import argparse
|
||||
import sys
|
||||
|
||||
from datetime import datetime
|
||||
from datetime import timedelta
|
||||
from awips import ThriftClient
|
||||
|
||||
from dynamicserialize.dstypes.com.raytheon.uf.common.time import TimeRange
|
||||
from dynamicserialize.dstypes.com.raytheon.uf.common.dataplugin.radar.request import GetRadarDataRecordRequest
|
||||
|
||||
def get_datetime_str(record):
|
||||
"""
|
||||
Get the datetime string for a record.
|
||||
|
||||
:param record: the record to get data for.
|
||||
|
||||
:returns: datetime string.
|
||||
"""
|
||||
return str(record.getDataTime())[0:19].replace(" ","_") + ".0"
|
||||
|
||||
def get_data_type(azdat):
|
||||
"""
|
||||
Get the radar file type (radial or raster).
|
||||
|
||||
:param azdat: Boolean.
|
||||
|
||||
:returns: Radial or raster.
|
||||
"""
|
||||
if azdat:
|
||||
dattyp = "radial"
|
||||
else :
|
||||
dattyp = "raster"
|
||||
return dattyp
|
||||
|
||||
def get_hdf5_data(idra):
|
||||
rdat = []
|
||||
azdat = []
|
||||
depVals = []
|
||||
threshVals = []
|
||||
if len(idra) > 0:
|
||||
for ii in range(len(idra)):
|
||||
if idra[ii].getName() == b"Data":
|
||||
rdat = idra[ii]
|
||||
elif idra[ii].getName() == b"Angles":
|
||||
azdat = idra[ii]
|
||||
dattyp = "radial"
|
||||
elif idra[ii].getName() == b"DependentValues":
|
||||
depVals = idra[ii].getShortData()
|
||||
elif idra[ii].getName() == b"Thresholds":
|
||||
threshVals = idra[ii].getShortData()
|
||||
|
||||
return rdat,azdat,depVals,threshVals
|
||||
|
||||
|
||||
def get_header(record, format, xLen, yLen, azdat, description):
|
||||
# Encode dimensions, time, mapping, description, tilt, and VCP
|
||||
mytime = get_datetime_str(record)
|
||||
dattyp = get_data_type(azdat)
|
||||
|
||||
if format :
|
||||
msg = str(xLen) + " " + str(yLen) + " " + mytime + " " + \
|
||||
dattyp + " " + str(record.getLatitude()) + " " + \
|
||||
str(record.getLongitude()) + " " + \
|
||||
str(record.getElevation()) + " " + \
|
||||
str(record.getElevationNumber()) + " " + \
|
||||
description + " " + str(record.getTrueElevationAngle()) + " " + \
|
||||
str(record.getVolumeCoveragePattern()) + "\n"
|
||||
#"%.1f"%
|
||||
else :
|
||||
msg = str(xLen) + " " + str(yLen) + " " + mytime + " " + \
|
||||
dattyp + " " + description + " " + \
|
||||
str(record.getTrueElevationAngle()) + " " + \
|
||||
str(record.getVolumeCoveragePattern()) + "\n"
|
||||
|
||||
return msg
|
||||
|
||||
|
||||
def encode_thresh_vals(threshVals):
|
||||
spec = [".", "TH", "ND", "RF", "BI", "GC", "IC", "GR", "WS", "DS",
|
||||
"RA", "HR", "BD", "HA", "UK"]
|
||||
nnn = len(threshVals)
|
||||
j = 0
|
||||
msg = ""
|
||||
while j<nnn :
|
||||
lo = threshVals[j] % 256
|
||||
hi = threshVals[j] / 256
|
||||
msg += " "
|
||||
j += 1
|
||||
if hi < 0 :
|
||||
if lo > 14 :
|
||||
msg += "."
|
||||
else :
|
||||
msg += spec[lo]
|
||||
continue
|
||||
if hi % 16 >= 8 :
|
||||
msg += ">"
|
||||
elif hi % 8 >= 4 :
|
||||
msg += "<"
|
||||
if hi % 4 >= 2 :
|
||||
msg += "+"
|
||||
elif hi % 2 >= 1 :
|
||||
msg += "-"
|
||||
if hi >= 64 :
|
||||
msg += "%.2f"%(lo*0.01)
|
||||
elif hi % 64 >= 32 :
|
||||
msg += "%.2f"%(lo*0.05)
|
||||
elif hi % 32 >= 16 :
|
||||
msg += "%.1f"%(lo*0.1)
|
||||
else :
|
||||
msg += str(lo)
|
||||
msg += "\n"
|
||||
return msg
|
||||
|
||||
|
||||
def encode_dep_vals(depVals):
|
||||
nnn = len(depVals)
|
||||
j = 0
|
||||
msg = []
|
||||
while j<nnn :
|
||||
msg.append(str(depVals[j]))
|
||||
j += 1
|
||||
return msg
|
||||
|
||||
|
||||
def encode_radial(azVals):
|
||||
azValsLen = len(azVals)
|
||||
j = 0
|
||||
msg = []
|
||||
while j<azValsLen :
|
||||
msg.append(azVals[j])
|
||||
j += 1
|
||||
return msg
|
|
@ -1,7 +1,10 @@
|
|||
##
|
||||
##
|
||||
|
||||
import httplib
|
||||
try:
|
||||
import http.client as httpcl
|
||||
except ImportError:
|
||||
import httplib as httpcl
|
||||
from dynamicserialize import DynamicSerializationManager
|
||||
from dynamicserialize.dstypes.com.raytheon.uf.common.serialization.comm.response import ServerErrorResponse
|
||||
from dynamicserialize.dstypes.com.raytheon.uf.common.serialization import SerializableExceptionWrapper
|
||||
|
@ -38,12 +41,12 @@ class ThriftClient:
|
|||
if (len(hostParts) > 1):
|
||||
hostString = hostParts[0]
|
||||
self.__uri = "/" + hostParts[1]
|
||||
self.__httpConn = httplib.HTTPConnection(hostString)
|
||||
self.__httpConn = httpcl.HTTPConnection(hostString)
|
||||
else:
|
||||
if (port is None):
|
||||
self.__httpConn = httplib.HTTPConnection(host)
|
||||
self.__httpConn = httpcl.HTTPConnection(host)
|
||||
else:
|
||||
self.__httpConn = httplib.HTTPConnection(host, port)
|
||||
self.__httpConn = httpcl.HTTPConnection(host, port)
|
||||
|
||||
self.__uri = uri
|
||||
|
||||
|
|
|
@ -53,7 +53,6 @@ def determineDrtOffset(timeStr):
|
|||
times = timeStr.split(",")
|
||||
t1 = makeTime(times[0])
|
||||
t2 = makeTime(times[1])
|
||||
#print "time offset", t1-t2, (t1-t2)/3600
|
||||
return t1-t2, launchStr
|
||||
# Check for synchronized mode
|
||||
synch = 0
|
||||
|
@ -61,22 +60,18 @@ def determineDrtOffset(timeStr):
|
|||
timeStr = timeStr[1:]
|
||||
synch = 1
|
||||
drt_t = makeTime(timeStr)
|
||||
#print "input", year, month, day, hour, minute
|
||||
gm = time.gmtime()
|
||||
cur_t = time.mktime(gm)
|
||||
|
||||
# Synchronize to most recent hour
|
||||
# i.e. "truncate" cur_t to most recent hour.
|
||||
#print "gmtime", gm
|
||||
if synch:
|
||||
cur_t = time.mktime((gm[0], gm[1], gm[2], gm[3], 0, 0, 0, 0, 0))
|
||||
curStr = '%4s%2s%2s_%2s00\n' % (`gm[0]`,`gm[1]`,`gm[2]`,`gm[3]`)
|
||||
curStr = '%4s%2s%2s_%2s00\n' % (gm[0],gm[1],gm[2],gm[3])
|
||||
curStr = curStr.replace(' ','0')
|
||||
launchStr = timeStr + "," + curStr
|
||||
|
||||
#print "drt, cur", drt_t, cur_t
|
||||
offset = drt_t - cur_t
|
||||
#print "offset", offset, offset/3600, launchStr
|
||||
return int(offset), launchStr
|
||||
|
||||
def makeTime(timeStr):
|
||||
|
|
|
@ -80,4 +80,4 @@ def __cloneRequest(request):
|
|||
levels = request.getLevels(),
|
||||
locationNames = request.getLocationNames(),
|
||||
envelope = request.getEnvelope(),
|
||||
**request.getIdentifiers())
|
||||
**request.getIdentifiers())
|
||||
|
|
|
@ -56,16 +56,13 @@ from awips.dataaccess.PyGeometryNotification import PyGeometryNotification
|
|||
from awips.dataaccess.PyGridNotification import PyGridNotification
|
||||
|
||||
|
||||
THRIFT_HOST = subprocess.check_output(
|
||||
"source /awips2/fxa/bin/setup.env; echo $DEFAULT_HOST",
|
||||
shell=True).strip()
|
||||
|
||||
THRIFT_HOST = "edex"
|
||||
|
||||
USING_NATIVE_THRIFT = False
|
||||
|
||||
JMS_HOST_PATTERN=re.compile('tcp://([^:]+):([0-9]+)')
|
||||
|
||||
if sys.modules.has_key('jep'):
|
||||
if 'jep' in sys.modules:
|
||||
# intentionally do not catch if this fails to import, we want it to
|
||||
# be obvious that something is configured wrong when running from within
|
||||
# Java instead of allowing false confidence and fallback behavior
|
||||
|
|
|
@ -193,4 +193,4 @@ class DataQueue(object):
|
|||
return self
|
||||
|
||||
def __exit__(self, *unused):
|
||||
self.close()
|
||||
self.close()
|
||||
|
|
|
@ -29,18 +29,18 @@ class PyGeometryData(IGeometryData, PyData.PyData):
|
|||
self.__geometry = geometry
|
||||
self.__dataMap = {}
|
||||
tempDataMap = geoDataRecord.getDataMap()
|
||||
for key, value in tempDataMap.items():
|
||||
for key, value in list(tempDataMap.items()):
|
||||
self.__dataMap[key] = (value[0], value[1], value[2])
|
||||
|
||||
def getGeometry(self):
|
||||
return self.__geometry
|
||||
|
||||
def getParameters(self):
|
||||
return self.__dataMap.keys()
|
||||
return list(self.__dataMap.keys())
|
||||
|
||||
def getString(self, param):
|
||||
value = self.__dataMap[param][0]
|
||||
return str(value)
|
||||
return value
|
||||
|
||||
def getNumber(self, param):
|
||||
value = self.__dataMap[param][0]
|
||||
|
@ -48,7 +48,7 @@ class PyGeometryData(IGeometryData, PyData.PyData):
|
|||
if t == 'INT' or t == 'SHORT':
|
||||
return int(value)
|
||||
elif t == 'LONG':
|
||||
return long(value)
|
||||
return int(value)
|
||||
elif t == 'FLOAT':
|
||||
return float(value)
|
||||
elif t == 'DOUBLE':
|
||||
|
|
|
@ -15,7 +15,7 @@
|
|||
# Sep 07, 2017 6175 tgurney Override messageReceived in subclasses
|
||||
#
|
||||
|
||||
|
||||
from six import with_metaclass
|
||||
import abc
|
||||
import time
|
||||
import traceback
|
||||
|
@ -28,14 +28,12 @@ from awips.ThriftClient import ThriftRequestException
|
|||
from dynamicserialize.dstypes.com.raytheon.uf.common.time import DataTime
|
||||
|
||||
|
||||
class PyNotification(INotificationSubscriber):
|
||||
class PyNotification(with_metaclass(abc.ABCMeta, INotificationSubscriber)):
|
||||
"""
|
||||
Receives notifications for new data and retrieves the data that meets
|
||||
specified filtering criteria.
|
||||
"""
|
||||
|
||||
__metaclass__ = abc.ABCMeta
|
||||
|
||||
def __init__(self, request, filter, host='localhost', port=5672, requestHost='localhost'):
|
||||
self.DAL = DataAccessLayer
|
||||
self.DAL.changeEDEXHost(requestHost)
|
||||
|
|
|
@ -106,7 +106,7 @@ class ThriftClientRouter(object):
|
|||
response = self._client.sendRequest(gridDataRequest)
|
||||
|
||||
locSpecificData = {}
|
||||
locNames = response.getSiteNxValues().keys()
|
||||
locNames = list(response.getSiteNxValues().keys())
|
||||
for location in locNames:
|
||||
nx = response.getSiteNxValues()[location]
|
||||
ny = response.getSiteNyValues()[location]
|
||||
|
|
|
@ -26,6 +26,7 @@ __all__ = [
|
|||
]
|
||||
|
||||
import abc
|
||||
from six import with_metaclass
|
||||
|
||||
class IDataRequest(object):
|
||||
"""
|
||||
|
@ -149,11 +150,10 @@ class IDataRequest(object):
|
|||
|
||||
|
||||
|
||||
class IData(object):
|
||||
class IData(with_metaclass(abc.ABCMeta, object)):
|
||||
"""
|
||||
An IData representing data returned from the DataAccessLayer.
|
||||
"""
|
||||
__metaclass__ = abc.ABCMeta
|
||||
|
||||
@abc.abstractmethod
|
||||
def getAttribute(self, key):
|
||||
|
@ -337,12 +337,11 @@ class IGeometryData(IData):
|
|||
return
|
||||
|
||||
|
||||
class INotificationSubscriber(object):
|
||||
class INotificationSubscriber(with_metaclass(abc.ABCMeta, object)):
|
||||
"""
|
||||
An INotificationSubscriber representing a notification filter returned from
|
||||
the DataNotificationLayer.
|
||||
"""
|
||||
__metaclass__ = abc.ABCMeta
|
||||
|
||||
@abc.abstractmethod
|
||||
def subscribe(self, callback):
|
||||
|
@ -361,12 +360,11 @@ class INotificationSubscriber(object):
|
|||
"""Closes the notification subscriber"""
|
||||
pass
|
||||
|
||||
class INotificationFilter(object):
|
||||
class INotificationFilter(with_metaclass(abc.ABCMeta, object)):
|
||||
"""
|
||||
Represents data required to filter a set of URIs and
|
||||
return a corresponding list of IDataRequest to retrieve data for.
|
||||
"""
|
||||
__metaclass__ = abc.ABCMeta
|
||||
@abc.abstractmethod
|
||||
def accept(dataUri):
|
||||
pass
|
||||
|
|
|
@ -15,7 +15,10 @@ import urllib2
|
|||
from json import load as loadjson
|
||||
from xml.etree.ElementTree import parse as parseXml
|
||||
from base64 import b64encode
|
||||
from StringIO import StringIO
|
||||
try:
|
||||
from StringIO import StringIO
|
||||
except ImportError:
|
||||
from io import StringIO
|
||||
from getpass import getuser
|
||||
import dateutil.parser
|
||||
import contextlib
|
||||
|
@ -103,7 +106,7 @@ class _LocalizationOutput(StringIO):
|
|||
urllib2.urlopen(request)
|
||||
except urllib2.HTTPError as e:
|
||||
if e.code == 409:
|
||||
raise LocalizationFileVersionConflictException, e.read()
|
||||
raise LocalizationFileVersionConflictException(e.read())
|
||||
else:
|
||||
raise e
|
||||
def __enter__(self):
|
||||
|
@ -166,12 +169,12 @@ class LocalizationFile(object):
|
|||
if not(self.isDirectory()):
|
||||
checksum = response.headers["Content-MD5"]
|
||||
if self.checksum != checksum:
|
||||
raise RuntimeError, "Localization checksum mismatch " + self.checksum + " " + checksum
|
||||
raise RuntimeError("Localization checksum mismatch " + self.checksum + " " + checksum)
|
||||
return contextlib.closing(response)
|
||||
elif mode == 'w':
|
||||
return _LocalizationOutput(self._manager, self)
|
||||
else:
|
||||
raise ValueError, "mode string must be 'r' or 'w' not " + str(r)
|
||||
raise ValueError("mode string must be 'r' or 'w' not " + str(r))
|
||||
def delete(self):
|
||||
"""Delete this file from the server"""
|
||||
request = self._manager._buildRequest(self.context, self.path, method='DELETE')
|
||||
|
@ -180,7 +183,7 @@ class LocalizationFile(object):
|
|||
urllib2.urlopen(request)
|
||||
except urllib2.HTTPError as e:
|
||||
if e.code == 409:
|
||||
raise LocalizationFileVersionConflictException, e.read()
|
||||
raise LocalizationFileVersionConflictException(e.read())
|
||||
else:
|
||||
raise e
|
||||
def exists(self):
|
||||
|
@ -333,7 +336,7 @@ class LocalizationFileManager(object):
|
|||
exists = True
|
||||
if not(response.geturl().endswith("/")):
|
||||
# For ordinary files the server sends a redirect to remove the slash.
|
||||
raise LocalizationFileIsNotDirectoryException, "Not a directory: " + path
|
||||
raise LocalizationFileIsNotDirectoryException("Not a directory: " + path)
|
||||
elif response.headers["Content-Type"] == "application/xml":
|
||||
fileList += _parseXmlList(self, response, context, path)
|
||||
else:
|
||||
|
@ -342,7 +345,7 @@ class LocalizationFileManager(object):
|
|||
if e.code != 404:
|
||||
raise e
|
||||
if not(exists):
|
||||
raise LocalizationFileDoesNotExistException, "No such file or directory: " + path
|
||||
raise LocalizationFileDoesNotExistException("No such file or directory: " + path)
|
||||
return fileList
|
||||
def _get(self, context, path):
|
||||
path = self._normalizePath(path)
|
||||
|
@ -353,10 +356,10 @@ class LocalizationFileManager(object):
|
|||
checksum = DIRECTORY_CHECKSUM;
|
||||
else:
|
||||
if "Content-MD5" not in resp.headers:
|
||||
raise RuntimeError, "Missing Content-MD5 header in response from " + resp.geturl()
|
||||
raise RuntimeError("Missing Content-MD5 header in response from " + resp.geturl())
|
||||
checksum = resp.headers["Content-MD5"]
|
||||
if "Last-Modified" not in resp.headers:
|
||||
raise RuntimeError, "Missing Last-Modified header in response from " + resp.geturl()
|
||||
raise RuntimeError("Missing Last-Modified header in response from " + resp.geturl())
|
||||
timestamp = dateutil.parser.parse(resp.headers["Last-Modified"])
|
||||
return LocalizationFile(self, context, path, checksum, timestamp)
|
||||
except urllib2.HTTPError as e:
|
||||
|
@ -447,7 +450,7 @@ class LocalizationFileManager(object):
|
|||
for context in self._contexts:
|
||||
if context.level == level:
|
||||
return self._get(context, path)
|
||||
raise ValueError, "No context defined for level " + level
|
||||
raise ValueError("No context defined for level " + level)
|
||||
def __str__(self):
|
||||
contextsStr = '[' + ' '.join((str(c) for c in self._contexts)) + ']'
|
||||
return '<' + self.__class__.__name__ + " for " + self._baseUrl + ' ' + contextsStr + '>'
|
||||
|
|
|
@ -103,9 +103,9 @@ class IngestViaQPID:
|
|||
self.connection.start()
|
||||
self.session = self.connection.session(str(uuid4()))
|
||||
self.session.exchange_bind(exchange='amq.direct', queue='external.dropbox', binding_key='external.dropbox')
|
||||
print 'Connected to Qpid'
|
||||
print('Connected to Qpid')
|
||||
except:
|
||||
print 'Unable to connect to Qpid'
|
||||
print('Unable to connect to Qpid')
|
||||
|
||||
def sendmessage(self, filepath, header):
|
||||
'''
|
||||
|
@ -126,4 +126,4 @@ class IngestViaQPID:
|
|||
there are no threads left open
|
||||
'''
|
||||
self.session.close(timeout=10)
|
||||
print 'Connection to Qpid closed'
|
||||
print('Connection to Qpid closed')
|
||||
|
|
|
@ -64,18 +64,26 @@
|
|||
|
||||
"""
|
||||
|
||||
from __future__ import print_function
|
||||
import hashlib
|
||||
import math
|
||||
import random
|
||||
import re
|
||||
import socket
|
||||
import sys
|
||||
import thread
|
||||
import threading
|
||||
import time
|
||||
import types
|
||||
import xml.dom.minidom
|
||||
from cStringIO import StringIO
|
||||
try:
|
||||
from StringIO import StringIO
|
||||
except ImportError:
|
||||
from io import StringIO
|
||||
from functools import reduce
|
||||
try:
|
||||
import _thread
|
||||
except ImportError:
|
||||
import thread
|
||||
|
||||
#
|
||||
# stomp.py version number
|
||||
|
@ -89,14 +97,14 @@ def _uuid( *args ):
|
|||
(http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/213761)
|
||||
"""
|
||||
|
||||
t = long( time.time() * 1000 )
|
||||
r = long( random.random() * 100000000000000000L )
|
||||
t = int( time.time() * 1000 )
|
||||
r = int( random.random() * 100000000000000000 )
|
||||
|
||||
try:
|
||||
a = socket.gethostbyname( socket.gethostname() )
|
||||
except:
|
||||
# if we can't get a network address, just imagine one
|
||||
a = random.random() * 100000000000000000L
|
||||
a = random.random() * 100000000000000000
|
||||
data = str(t) + ' ' + str(r) + ' ' + str(a) + ' ' + str(args)
|
||||
md5 = hashlib.md5()
|
||||
md5.update(data)
|
||||
|
@ -109,7 +117,7 @@ class DevNullLogger(object):
|
|||
dummy logging class for environments without the logging module
|
||||
"""
|
||||
def log(self, msg):
|
||||
print msg
|
||||
print(msg)
|
||||
|
||||
def devnull(self, msg):
|
||||
pass
|
||||
|
@ -354,7 +362,7 @@ class Connection(object):
|
|||
"""
|
||||
self.__running = True
|
||||
self.__attempt_connection()
|
||||
thread.start_new_thread(self.__receiver_loop, ())
|
||||
_thread.start_new_thread(self.__receiver_loop, ())
|
||||
|
||||
def stop(self):
|
||||
"""
|
||||
|
@ -417,7 +425,7 @@ class Connection(object):
|
|||
|
||||
def begin(self, headers={}, **keyword_headers):
|
||||
use_headers = self.__merge_headers([headers, keyword_headers])
|
||||
if not 'transaction' in use_headers.keys():
|
||||
if not 'transaction' in list(use_headers.keys()):
|
||||
use_headers['transaction'] = _uuid()
|
||||
self.__send_frame_helper('BEGIN', '', use_headers, [ 'transaction' ])
|
||||
return use_headers['transaction']
|
||||
|
@ -429,7 +437,7 @@ class Connection(object):
|
|||
self.__send_frame_helper('COMMIT', '', self.__merge_headers([headers, keyword_headers]), [ 'transaction' ])
|
||||
|
||||
def connect(self, headers={}, **keyword_headers):
|
||||
if keyword_headers.has_key('wait') and keyword_headers['wait']:
|
||||
if 'wait' in keyword_headers and keyword_headers['wait']:
|
||||
while not self.is_connected(): time.sleep(0.1)
|
||||
del keyword_headers['wait']
|
||||
self.__send_frame_helper('CONNECT', '', self.__merge_headers([self.__connect_headers, headers, keyword_headers]), [ ])
|
||||
|
@ -515,11 +523,11 @@ class Connection(object):
|
|||
if type(required_header_key) == tuple:
|
||||
found_alternative = False
|
||||
for alternative in required_header_key:
|
||||
if alternative in headers.keys():
|
||||
if alternative in list(headers.keys()):
|
||||
found_alternative = True
|
||||
if not found_alternative:
|
||||
raise KeyError("Command %s requires one of the following headers: %s" % (command, str(required_header_key)))
|
||||
elif not required_header_key in headers.keys():
|
||||
elif not required_header_key in list(headers.keys()):
|
||||
raise KeyError("Command %s requires header %r" % (command, required_header_key))
|
||||
self.__send_frame(command, headers, payload)
|
||||
|
||||
|
@ -533,7 +541,7 @@ class Connection(object):
|
|||
|
||||
if self.__socket is not None:
|
||||
frame = '%s\n%s\n%s\x00' % (command,
|
||||
reduce(lambda accu, key: accu + ('%s:%s\n' % (key, headers[key])), headers.keys(), ''),
|
||||
reduce(lambda accu, key: accu + ('%s:%s\n' % (key, list(headers[key]))), headers.keys(), ''),
|
||||
payload)
|
||||
self.__socket.sendall(frame)
|
||||
log.debug("Sent frame: type=%s, headers=%r, body=%r" % (command, headers, payload))
|
||||
|
@ -690,7 +698,7 @@ class Connection(object):
|
|||
assert len(pair) == 2
|
||||
entries[pair[0]] = pair[1]
|
||||
return entries
|
||||
except Exception, ex:
|
||||
except Exception as ex:
|
||||
# unable to parse message. return original
|
||||
return body
|
||||
|
||||
|
@ -745,7 +753,7 @@ class Connection(object):
|
|||
break
|
||||
except socket.error:
|
||||
self.__socket = None
|
||||
if type(sys.exc_info()[1]) == types.TupleType:
|
||||
if type(sys.exc_info()[1]) == tuple:
|
||||
exc = sys.exc_info()[1][1]
|
||||
else:
|
||||
exc = sys.exc_info()[1]
|
||||
|
@ -796,20 +804,20 @@ if __name__ == '__main__':
|
|||
self.c.start()
|
||||
|
||||
def __print_async(self, frame_type, headers, body):
|
||||
print "\r \r",
|
||||
print frame_type
|
||||
for header_key in headers.keys():
|
||||
print '%s: %s' % (header_key, headers[header_key])
|
||||
print
|
||||
print body
|
||||
print '> ',
|
||||
print("\r \r",)
|
||||
print(frame_type)
|
||||
for header_key in list(headers.keys()):
|
||||
print('%s: %s' % (header_key, headers[header_key]))
|
||||
print("")
|
||||
print(body)
|
||||
print('> ',)
|
||||
sys.stdout.flush()
|
||||
|
||||
def on_connecting(self, host_and_port):
|
||||
self.c.connect(wait=True)
|
||||
|
||||
def on_disconnected(self):
|
||||
print "lost connection"
|
||||
print("lost connection")
|
||||
|
||||
def on_message(self, headers, body):
|
||||
self.__print_async("MESSAGE", headers, body)
|
||||
|
@ -833,13 +841,13 @@ if __name__ == '__main__':
|
|||
self.c.abort(transaction=args[1])
|
||||
|
||||
def begin(self, args):
|
||||
print 'transaction id: %s' % self.c.begin()
|
||||
print('transaction id: %s' % self.c.begin())
|
||||
|
||||
def commit(self, args):
|
||||
if len(args) < 2:
|
||||
print 'expecting: commit <transid>'
|
||||
print('expecting: commit <transid>')
|
||||
else:
|
||||
print 'committing %s' % args[1]
|
||||
print('committing %s' % args[1])
|
||||
self.c.commit(transaction=args[1])
|
||||
|
||||
def disconnect(self, args):
|
||||
|
@ -850,35 +858,35 @@ if __name__ == '__main__':
|
|||
|
||||
def send(self, args):
|
||||
if len(args) < 3:
|
||||
print 'expecting: send <destination> <message>'
|
||||
print('expecting: send <destination> <message>')
|
||||
else:
|
||||
self.c.send(destination=args[1], message=' '.join(args[2:]))
|
||||
|
||||
def sendtrans(self, args):
|
||||
if len(args) < 3:
|
||||
print 'expecting: sendtrans <destination> <transid> <message>'
|
||||
print('expecting: sendtrans <destination> <transid> <message>')
|
||||
else:
|
||||
self.c.send(destination=args[1], message="%s\n" % ' '.join(args[3:]), transaction=args[2])
|
||||
|
||||
def subscribe(self, args):
|
||||
if len(args) < 2:
|
||||
print 'expecting: subscribe <destination> [ack]'
|
||||
print('expecting: subscribe <destination> [ack]')
|
||||
elif len(args) > 2:
|
||||
print 'subscribing to "%s" with acknowledge set to "%s"' % (args[1], args[2])
|
||||
print('subscribing to "%s" with acknowledge set to "%s"' % (args[1], args[2]))
|
||||
self.c.subscribe(destination=args[1], ack=args[2])
|
||||
else:
|
||||
print 'subscribing to "%s" with auto acknowledge' % args[1]
|
||||
print('subscribing to "%s" with auto acknowledge' % args[1])
|
||||
self.c.subscribe(destination=args[1], ack='auto')
|
||||
|
||||
def unsubscribe(self, args):
|
||||
if len(args) < 2:
|
||||
print 'expecting: unsubscribe <destination>'
|
||||
print('expecting: unsubscribe <destination>')
|
||||
else:
|
||||
print 'unsubscribing from "%s"' % args[1]
|
||||
print('unsubscribing from "%s"' % args[1])
|
||||
self.c.unsubscribe(destination=args[1])
|
||||
|
||||
if len(sys.argv) > 5:
|
||||
print 'USAGE: stomp.py [host] [port] [user] [passcode]'
|
||||
print('USAGE: stomp.py [host] [port] [user] [passcode]')
|
||||
sys.exit(1)
|
||||
|
||||
if len(sys.argv) >= 2:
|
||||
|
@ -900,7 +908,7 @@ if __name__ == '__main__':
|
|||
st = StompTester(host, port, user, passcode)
|
||||
try:
|
||||
while True:
|
||||
line = raw_input("\r> ")
|
||||
line = input("\r> ")
|
||||
if not line or line.lstrip().rstrip() == '':
|
||||
continue
|
||||
elif 'quit' in line or 'disconnect' in line:
|
||||
|
@ -910,7 +918,7 @@ if __name__ == '__main__':
|
|||
if not command.startswith("on_") and hasattr(st, command):
|
||||
getattr(st, command)(split)
|
||||
else:
|
||||
print 'unrecognized command'
|
||||
print('unrecognized command')
|
||||
finally:
|
||||
st.disconnect(None)
|
||||
|
||||
|
|
|
@ -1965,7 +1965,7 @@ upgradeHazardsDict = {
|
|||
def checkForUpgrade(pPhen, pSig, cPhen, cSig):
|
||||
proposed = pPhen + "." + pSig
|
||||
current = cPhen + "." + cSig
|
||||
if upgradeHazardsDict.has_key(proposed):
|
||||
if proposed in upgradeHazardsDict:
|
||||
if current in upgradeHazardsDict[proposed]:
|
||||
return 1
|
||||
else:
|
||||
|
@ -2016,7 +2016,7 @@ downgradeHazardsDict = {
|
|||
def checkForDowngrade(pPhen, pSig, cPhen, cSig):
|
||||
proposed = pPhen + "." + pSig
|
||||
current = cPhen + "." + cSig
|
||||
if downgradeHazardsDict.has_key(proposed):
|
||||
if proposed in downgradeHazardsDict:
|
||||
if current in downgradeHazardsDict[proposed]:
|
||||
return 1
|
||||
else:
|
||||
|
|
|
@ -23,7 +23,7 @@
|
|||
import os
|
||||
import logging
|
||||
from awips import AlertVizHandler
|
||||
import Record
|
||||
from . import Record
|
||||
|
||||
avh = AlertVizHandler.AlertVizHandler(host=os.getenv("BROKER_ADDR","localhost"), port=9581, category='LOCAL', source='ANNOUNCER', level=logging.NOTSET)
|
||||
record = Record.Record(10)
|
||||
|
|
|
@ -99,7 +99,7 @@ class BaseRadarTestCase(baseDafTestCase.DafTestCase):
|
|||
self.assertEqual(record.getAttribute('icao'), 1000)
|
||||
|
||||
def testGetDataWithEqualsLong(self):
|
||||
gridData = self.runConstraintTest('icao', '=', 1000L)
|
||||
gridData = self.runConstraintTest('icao', '=', 1000)
|
||||
for record in gridData:
|
||||
self.assertEqual(record.getAttribute('icao'), 1000)
|
||||
|
||||
|
|
|
@ -23,4 +23,4 @@ STATION_ID = '72558'
|
|||
RADAR = 'KOAX'
|
||||
SAMPLE_AREA = (-97.0, 41.0, -96.0, 42.0)
|
||||
|
||||
ENVELOPE = box(*SAMPLE_AREA)
|
||||
ENVELOPE = box(*SAMPLE_AREA)
|
||||
|
|
|
@ -103,7 +103,7 @@ class BinLightningTestCase(baseDafTestCase.DafTestCase):
|
|||
self.assertEqual(record.getAttribute('source'), 1000)
|
||||
|
||||
def testGetDataWithEqualsLong(self):
|
||||
geomData = self._runConstraintTest('source', '=', 1000L)
|
||||
geomData = self._runConstraintTest('source', '=', 1000)
|
||||
for record in geomData:
|
||||
self.assertEqual(record.getAttribute('source'), 1000)
|
||||
|
||||
|
|
|
@ -30,4 +30,4 @@ class BufrMosHpcTestCase(baseBufrMosTestCase.BufrMosTestCase):
|
|||
datatype = "bufrmosHPC"
|
||||
data_params = "forecastHr", "maxTemp24Hour"
|
||||
|
||||
# All tests inherited from superclass
|
||||
# All tests inherited from superclass
|
||||
|
|
|
@ -120,7 +120,7 @@ class BufrUaTestCase(baseDafTestCase.DafTestCase):
|
|||
self.assertEqual(record.getString('rptType'), '2022')
|
||||
|
||||
def testGetDataWithEqualsLong(self):
|
||||
geometryData = self._runConstraintTest('reportType', '=', 2022L)
|
||||
geometryData = self._runConstraintTest('reportType', '=', 2022)
|
||||
for record in geometryData:
|
||||
self.assertEqual(record.getString('rptType'), '2022')
|
||||
|
||||
|
|
|
@ -255,7 +255,7 @@ class ClimateTestCase(baseDafTestCase.DafTestCase):
|
|||
self.assertEqual(record.getNumber('avg_daily_max'), 70)
|
||||
|
||||
def testGetDataWithEqualsLong(self):
|
||||
geometryData = self._runConstraintTest('avg_daily_max', '=', 70L)
|
||||
geometryData = self._runConstraintTest('avg_daily_max', '=', 70)
|
||||
for record in geometryData:
|
||||
self.assertEqual(record.getNumber('avg_daily_max'), 70)
|
||||
|
||||
|
|
|
@ -81,7 +81,7 @@ class CommonObsSpatialTestCase(baseDafTestCase.DafTestCase):
|
|||
self.assertEqual(record.getNumber('catalogtype'), 32)
|
||||
|
||||
def testGetDataWithEqualsLong(self):
|
||||
geometryData = self._runConstraintTest('elevation', '=', 0L)
|
||||
geometryData = self._runConstraintTest('elevation', '=', 0)
|
||||
for record in geometryData:
|
||||
self.assertEqual(record.getNumber('elevation'), 0)
|
||||
|
||||
|
|
|
@ -114,4 +114,4 @@ class DataTimeTestCase(unittest.TestCase):
|
|||
'2016-08-02 01:23:45.456_(17:34)[2016-08-02_02:34:45.0--2016-08-02_03:45:56.0]'
|
||||
]
|
||||
for time in times:
|
||||
self.assertEqual(DataTime(time), DataTime(str(DataTime(time))), time)
|
||||
self.assertEqual(DataTime(time), DataTime(str(DataTime(time))), time)
|
||||
|
|
|
@ -208,4 +208,4 @@ class FfmpTestCase(baseDafTestCase.DafTestCase):
|
|||
req.addIdentifier('siteKey', self.location)
|
||||
req.addIdentifier('huc', 'ALL')
|
||||
req.setParameters('FFG0124hr')
|
||||
self.runGeometryDataTest(req, checkDataTimes=False)
|
||||
self.runGeometryDataTest(req, checkDataTimes=False)
|
||||
|
|
|
@ -138,7 +138,7 @@ class GridTestCase(baseDafTestCase.DafTestCase):
|
|||
self.assertEqual(record.getAttribute('info.level.levelonevalue'), 2000)
|
||||
|
||||
def testGetDataWithEqualsLong(self):
|
||||
gridData = self._runConstraintTest('info.level.levelonevalue', '=', 2000L)
|
||||
gridData = self._runConstraintTest('info.level.levelonevalue', '=', 2000)
|
||||
for record in gridData:
|
||||
self.assertEqual(record.getAttribute('info.level.levelonevalue'), 2000)
|
||||
|
||||
|
|
|
@ -152,9 +152,9 @@ class HydroTestCase(baseDafTestCase.DafTestCase):
|
|||
self.assertEqual(record.getNumber('value'), 3)
|
||||
|
||||
def testGetDataWithEqualsLong(self):
|
||||
geometryData = self._runConstraintTest('value', '=', 3L)
|
||||
geometryData = self._runConstraintTest('value', '=', 3)
|
||||
for record in geometryData:
|
||||
self.assertEqual(record.getNumber('value'), 3L)
|
||||
self.assertEqual(record.getNumber('value'), 3)
|
||||
|
||||
def testGetDataWithEqualsFloat(self):
|
||||
geometryData = self._runConstraintTest('value', '=', 3.0)
|
||||
|
|
|
@ -120,7 +120,7 @@ class MapsTestCase(baseDafTestCase.DafTestCase):
|
|||
self.assertEqual(record.getNumber('reservoir'), 1)
|
||||
|
||||
def testGetDataWithEqualsLong(self):
|
||||
geometryData = self._runConstraintTest('reservoir', '=', 1L)
|
||||
geometryData = self._runConstraintTest('reservoir', '=', 1)
|
||||
for record in geometryData:
|
||||
self.assertEqual(record.getNumber('reservoir'), 1)
|
||||
|
||||
|
|
|
@ -81,7 +81,7 @@ class RadarSpatialTestCase(baseDafTestCase.DafTestCase):
|
|||
self.assertEqual(record.getNumber('immutablex'), 57)
|
||||
|
||||
def testGetDataWithEqualsLong(self):
|
||||
geometryData = self._runConstraintTest('immutablex', '=', 57L)
|
||||
geometryData = self._runConstraintTest('immutablex', '=', 57)
|
||||
for record in geometryData:
|
||||
self.assertEqual(record.getNumber('immutablex'), 57)
|
||||
|
||||
|
|
|
@ -30,7 +30,7 @@ class RequestConstraintTestCase(unittest.TestCase):
|
|||
self.assertTrue(new('=', 3).evaluate(3))
|
||||
self.assertTrue(new('=', 3).evaluate('3'))
|
||||
self.assertTrue(new('=', '3').evaluate(3))
|
||||
self.assertTrue(new('=', 12345).evaluate(12345L))
|
||||
self.assertTrue(new('=', 12345).evaluate(12345))
|
||||
self.assertTrue(new('=', 'a').evaluate('a'))
|
||||
self.assertTrue(new('=', 'a').evaluate(u'a'))
|
||||
self.assertTrue(new('=', 1.0001).evaluate(2.0 - 0.999999))
|
||||
|
@ -51,7 +51,7 @@ class RequestConstraintTestCase(unittest.TestCase):
|
|||
self.assertFalse(new('!=', 3).evaluate('3'))
|
||||
self.assertFalse(new('!=', '3').evaluate(3))
|
||||
self.assertFalse(new('!=', 3).evaluate(3))
|
||||
self.assertFalse(new('!=', 12345).evaluate(12345L))
|
||||
self.assertFalse(new('!=', 12345).evaluate(12345))
|
||||
self.assertFalse(new('!=', 'a').evaluate('a'))
|
||||
self.assertFalse(new('!=', 'a').evaluate(u'a'))
|
||||
self.assertFalse(new('!=', 1.0001).evaluate(2.0 - 0.9999))
|
||||
|
@ -62,7 +62,7 @@ class RequestConstraintTestCase(unittest.TestCase):
|
|||
self.assertTrue(new('>', 'a').evaluate('b'))
|
||||
self.assertTrue(new('>', 3).evaluate(4))
|
||||
self.assertFalse(new('>', 20).evaluate(3))
|
||||
self.assertFalse(new('>', 12345).evaluate(12345L))
|
||||
self.assertFalse(new('>', 12345).evaluate(12345))
|
||||
self.assertFalse(new('>', 'a').evaluate('a'))
|
||||
self.assertFalse(new('>', 'z').evaluate('a'))
|
||||
self.assertFalse(new('>', 4).evaluate(3))
|
||||
|
@ -70,7 +70,7 @@ class RequestConstraintTestCase(unittest.TestCase):
|
|||
def testEvaluateGreaterThanEquals(self):
|
||||
new = RequestConstraint.new
|
||||
self.assertTrue(new('>=', 3).evaluate(3))
|
||||
self.assertTrue(new('>=', 12345).evaluate(12345L))
|
||||
self.assertTrue(new('>=', 12345).evaluate(12345))
|
||||
self.assertTrue(new('>=', 'a').evaluate('a'))
|
||||
self.assertTrue(new('>=', 1.0001).evaluate(1.0002))
|
||||
self.assertTrue(new('>=', 'a').evaluate('b'))
|
||||
|
@ -84,7 +84,7 @@ class RequestConstraintTestCase(unittest.TestCase):
|
|||
self.assertTrue(new('<', 'z').evaluate('a'))
|
||||
self.assertTrue(new('<', 30).evaluate(4))
|
||||
self.assertFalse(new('<', 3).evaluate(3))
|
||||
self.assertFalse(new('<', 12345).evaluate(12345L))
|
||||
self.assertFalse(new('<', 12345).evaluate(12345))
|
||||
self.assertFalse(new('<', 'a').evaluate('a'))
|
||||
self.assertFalse(new('<', 1.0001).evaluate(1.0002))
|
||||
self.assertFalse(new('<', 'a').evaluate('b'))
|
||||
|
@ -95,7 +95,7 @@ class RequestConstraintTestCase(unittest.TestCase):
|
|||
self.assertTrue(new('<=', 'z').evaluate('a'))
|
||||
self.assertTrue(new('<=', 20).evaluate(3))
|
||||
self.assertTrue(new('<=', 3).evaluate(3))
|
||||
self.assertTrue(new('<=', 12345).evaluate(12345L))
|
||||
self.assertTrue(new('<=', 12345).evaluate(12345))
|
||||
self.assertTrue(new('<=', 'a').evaluate('a'))
|
||||
self.assertFalse(new('<=', 1.0001).evaluate(1.0002))
|
||||
self.assertFalse(new('<=', 'a').evaluate('b'))
|
||||
|
|
|
@ -89,7 +89,7 @@ class SatelliteTestCase(baseDafTestCase.DafTestCase):
|
|||
self.assertEqual(record.getAttribute('creatingEntity'), 1000)
|
||||
|
||||
def testGetDataWithEqualsLong(self):
|
||||
gridData = self._runConstraintTest('creatingEntity', '=', 1000L)
|
||||
gridData = self._runConstraintTest('creatingEntity', '=', 1000)
|
||||
for record in gridData:
|
||||
self.assertEqual(record.getAttribute('creatingEntity'), 1000)
|
||||
|
||||
|
|
|
@ -91,7 +91,7 @@ class SfcObsTestCase(baseDafTestCase.DafTestCase):
|
|||
self.assertEqual(record.getString('reportType'), '1004')
|
||||
|
||||
def testGetDataWithEqualsLong(self):
|
||||
geometryData = self._runConstraintTest('reportType', '=', 1004L)
|
||||
geometryData = self._runConstraintTest('reportType', '=', 1004)
|
||||
for record in geometryData:
|
||||
self.assertEqual(record.getString('reportType'), '1004')
|
||||
|
||||
|
|
|
@ -129,7 +129,7 @@ class WarningTestCase(baseDafTestCase.DafTestCase):
|
|||
self.assertEqual(record.getString('etn'), '1000')
|
||||
|
||||
def testGetDataWithEqualsLong(self):
|
||||
geometryData = self._runConstraintTest('etn', '=', 1000L)
|
||||
geometryData = self._runConstraintTest('etn', '=', 1000)
|
||||
for record in geometryData:
|
||||
self.assertEqual(record.getString('etn'), '1000')
|
||||
|
||||
|
|
|
@ -152,4 +152,4 @@ class LFMTestCase(unittest.TestCase):
|
|||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
unittest.main()
|
||||
|
|
|
@ -40,7 +40,7 @@ class ListenThread(threading.Thread):
|
|||
self.qs.topicSubscribe(self.topicName, self.receivedMessage)
|
||||
|
||||
def receivedMessage(self, msg):
|
||||
print "Received message"
|
||||
print("Received message")
|
||||
self.nMessagesReceived += 1
|
||||
if self.waitSecond == 0:
|
||||
fmsg = open('/tmp/rawMessage', 'w')
|
||||
|
@ -49,21 +49,21 @@ class ListenThread(threading.Thread):
|
|||
|
||||
while self.waitSecond < TIME_TO_SLEEP and not self.stopped:
|
||||
if self.waitSecond % 60 == 0:
|
||||
print time.strftime('%H:%M:%S'), "Sleeping and stuck in not so infinite while loop"
|
||||
print(time.strftime('%H:%M:%S'), "Sleeping and stuck in not so infinite while loop")
|
||||
self.waitSecond += 1
|
||||
time.sleep(1)
|
||||
|
||||
print time.strftime('%H:%M:%S'), "Received", self.nMessagesReceived, "messages"
|
||||
print(time.strftime('%H:%M:%S'), "Received", self.nMessagesReceived, "messages")
|
||||
|
||||
def stop(self):
|
||||
print "Stopping"
|
||||
print("Stopping")
|
||||
self.stopped = True
|
||||
self.qs.close()
|
||||
|
||||
|
||||
|
||||
def main():
|
||||
print "Starting up at", time.strftime('%H:%M:%S')
|
||||
print("Starting up at", time.strftime('%H:%M:%S'))
|
||||
|
||||
topic = 'edex.alerts'
|
||||
host = 'localhost'
|
||||
|
|
|
@ -19,7 +19,7 @@
|
|||
#
|
||||
|
||||
from thrift.transport import TTransport
|
||||
import SelfDescribingBinaryProtocol, ThriftSerializationContext
|
||||
from . import SelfDescribingBinaryProtocol, ThriftSerializationContext
|
||||
|
||||
class DynamicSerializationManager:
|
||||
|
||||
|
@ -49,4 +49,4 @@ class DynamicSerializationManager:
|
|||
return self.transport.getvalue()
|
||||
|
||||
def _serialize(self, ctx, obj):
|
||||
ctx.serializeMessage(obj)
|
||||
ctx.serializeMessage(obj)
|
||||
|
|
|
@ -21,6 +21,7 @@
|
|||
# writeObject().
|
||||
# Apr 24, 2015 4425 nabowle Add Double support
|
||||
# Oct 17, 2016 5919 njensen Optimized for speed
|
||||
# Sep 06, 2018 mjames@ucar Python3 compliance
|
||||
#
|
||||
#
|
||||
|
||||
|
@ -29,10 +30,10 @@ import inspect
|
|||
import sys
|
||||
import types
|
||||
import time
|
||||
import numpy
|
||||
import dynamicserialize
|
||||
from dynamicserialize import dstypes, adapters
|
||||
import SelfDescribingBinaryProtocol
|
||||
import numpy
|
||||
from . import SelfDescribingBinaryProtocol
|
||||
|
||||
DS_LEN = len('dynamicserialize.dstypes.')
|
||||
|
||||
|
@ -55,17 +56,18 @@ def buildObjMap(module):
|
|||
buildObjMap(dstypes)
|
||||
|
||||
pythonToThriftMap = {
|
||||
types.StringType: TType.STRING,
|
||||
types.IntType: TType.I32,
|
||||
types.LongType: TType.I64,
|
||||
types.ListType: TType.LIST,
|
||||
types.DictionaryType: TType.MAP,
|
||||
bytes: TType.STRING,
|
||||
int: TType.I32,
|
||||
int: TType.I64,
|
||||
list: TType.LIST,
|
||||
dict: TType.MAP,
|
||||
type(set([])): TType.SET,
|
||||
types.FloatType: SelfDescribingBinaryProtocol.FLOAT,
|
||||
float: SelfDescribingBinaryProtocol.FLOAT,
|
||||
# types.FloatType: TType.DOUBLE,
|
||||
types.BooleanType: TType.BOOL,
|
||||
types.InstanceType: TType.STRUCT,
|
||||
types.NoneType: TType.VOID,
|
||||
bool: TType.BOOL,
|
||||
object: TType.STRUCT,
|
||||
str: TType.STRING,
|
||||
type(None): TType.VOID,
|
||||
numpy.float32: SelfDescribingBinaryProtocol.FLOAT,
|
||||
numpy.int32: TType.I32,
|
||||
numpy.ndarray: TType.LIST,
|
||||
|
@ -142,19 +144,19 @@ class ThriftSerializationContext(object):
|
|||
|
||||
def deserializeMessage(self):
|
||||
name = self.protocol.readStructBegin()
|
||||
name = name.decode('cp437')
|
||||
name = name.replace('_', '.')
|
||||
if name.isdigit():
|
||||
obj = self._deserializeType(int(name))
|
||||
return obj
|
||||
name = name.replace('_', '.')
|
||||
if name in adapters.classAdapterRegistry:
|
||||
return adapters.classAdapterRegistry[name].deserialize(self)
|
||||
elif '$' in name:
|
||||
# it's an inner class, we're going to hope it's an enum, treat it
|
||||
# special
|
||||
fieldName, fieldType, fieldId = self.protocol.readFieldBegin()
|
||||
if fieldName != '__enumValue__':
|
||||
raise dynamiceserialize.SerializationException(
|
||||
"Expected to find enum payload. Found: " + fieldName)
|
||||
if fieldName.decode('utf8') != '__enumValue__':
|
||||
raise dynamicserialize.SerializationException("Expected to find enum payload. Found: " + fieldName)
|
||||
obj = self.protocol.readString()
|
||||
self.protocol.readFieldEnd()
|
||||
return obj
|
||||
|
@ -181,7 +183,8 @@ class ThriftSerializationContext(object):
|
|||
return False
|
||||
elif fieldType != TType.VOID:
|
||||
result = self._deserializeType(fieldType)
|
||||
lookingFor = "set" + fieldName[0].upper() + fieldName[1:]
|
||||
fn_str = bytes.decode(fieldName)
|
||||
lookingFor = "set" + fn_str[0].upper() + fn_str[1:]
|
||||
|
||||
try:
|
||||
setMethod = getattr(obj, lookingFor)
|
||||
|
@ -199,7 +202,7 @@ class ThriftSerializationContext(object):
|
|||
if size:
|
||||
if listType not in primitiveSupport:
|
||||
m = self.typeDeserializationMethod[listType]
|
||||
result = [m() for n in xrange(size)]
|
||||
result = [m() for n in range(size)]
|
||||
else:
|
||||
result = self.listDeserializationMethod[listType](size)
|
||||
self.protocol.readListEnd()
|
||||
|
@ -208,7 +211,7 @@ class ThriftSerializationContext(object):
|
|||
def _deserializeMap(self):
|
||||
keyType, valueType, size = self.protocol.readMapBegin()
|
||||
result = {}
|
||||
for n in xrange(size):
|
||||
for n in range(size):
|
||||
# can't go off the type, due to java generics limitations dynamic serialize is
|
||||
# serializing keys and values as void
|
||||
key = self.typeDeserializationMethod[TType.STRUCT]()
|
||||
|
@ -220,7 +223,7 @@ class ThriftSerializationContext(object):
|
|||
def _deserializeSet(self):
|
||||
setType, setSize = self.protocol.readSetBegin()
|
||||
result = set([])
|
||||
for n in xrange(setSize):
|
||||
for n in range(setSize):
|
||||
result.add(self.typeDeserializationMethod[TType.STRUCT]())
|
||||
self.protocol.readSetEnd()
|
||||
return result
|
||||
|
@ -230,7 +233,7 @@ class ThriftSerializationContext(object):
|
|||
if pyt in pythonToThriftMap:
|
||||
return pythonToThriftMap[pyt]
|
||||
elif pyt.__module__[:DS_LEN - 1] == ('dynamicserialize.dstypes'):
|
||||
return pythonToThriftMap[types.InstanceType]
|
||||
return pythonToThriftMap[object]
|
||||
else:
|
||||
raise dynamicserialize.SerializationException(
|
||||
"Don't know how to serialize object of type: " + str(pyt))
|
||||
|
@ -253,11 +256,14 @@ class ThriftSerializationContext(object):
|
|||
self.protocol.writeStructBegin(fqn)
|
||||
methods = inspect.getmembers(obj, inspect.ismethod)
|
||||
fid = 1
|
||||
#print(methods);
|
||||
for m in methods:
|
||||
methodName = m[0]
|
||||
if methodName.startswith('get'):
|
||||
fieldname = methodName[3].lower() + methodName[4:]
|
||||
val = m[1]()
|
||||
#print(val);
|
||||
dir(val);
|
||||
ft = self._lookupType(val)
|
||||
if ft == TType.STRUCT:
|
||||
fc = val.__module__[DS_LEN:]
|
||||
|
|
|
@ -18,8 +18,8 @@
|
|||
__all__ = [
|
||||
]
|
||||
|
||||
import dstypes, adapters
|
||||
import DynamicSerializationManager
|
||||
from . import dstypes, adapters
|
||||
from . import DynamicSerializationManager
|
||||
|
||||
class SerializationException(Exception):
|
||||
|
||||
|
@ -38,4 +38,4 @@ def serialize(obj):
|
|||
|
||||
def deserialize(bytes):
|
||||
dsm = DynamicSerializationManager.DynamicSerializationManager()
|
||||
return dsm.deserializeBytes(bytes)
|
||||
return dsm.deserializeBytes(bytes)
|
||||
|
|
|
@ -26,4 +26,4 @@ def serialize(context, date):
|
|||
def deserialize(context):
|
||||
result = CommutativeTimestamp()
|
||||
result.setTime(context.readI64())
|
||||
return result
|
||||
return result
|
||||
|
|
|
@ -24,4 +24,4 @@ def serialize(context, dbId):
|
|||
|
||||
def deserialize(context):
|
||||
result = DatabaseID(context.readString())
|
||||
return result
|
||||
return result
|
||||
|
|
|
@ -25,4 +25,4 @@ def serialize(context, date):
|
|||
def deserialize(context):
|
||||
result = Date()
|
||||
result.setTime(context.readI64())
|
||||
return result
|
||||
return result
|
||||
|
|
|
@ -35,6 +35,6 @@ def deserialize(context):
|
|||
setSize = context.readI32()
|
||||
enumClassName = context.readString()
|
||||
valList = []
|
||||
for i in xrange(setSize):
|
||||
for i in range(setSize):
|
||||
valList.append(context.readString())
|
||||
return EnumSet(enumClassName, valList)
|
||||
|
|
|
@ -26,4 +26,4 @@ def serialize(context, date):
|
|||
def deserialize(context):
|
||||
result = FormattedDate()
|
||||
result.setTime(context.readI64())
|
||||
return result
|
||||
return result
|
||||
|
|
|
@ -59,13 +59,13 @@ def serialize(context, resp):
|
|||
def deserialize(context):
|
||||
size = context.readI32()
|
||||
wkbs = []
|
||||
for i in xrange(size):
|
||||
for i in range(size):
|
||||
wkb = context.readBinary()
|
||||
wkbs.append(wkb)
|
||||
|
||||
geoData = []
|
||||
size = context.readI32()
|
||||
for i in xrange(size):
|
||||
for i in range(size):
|
||||
data = GeometryResponseData()
|
||||
# wkb index
|
||||
wkbIndex = context.readI32()
|
||||
|
@ -83,7 +83,7 @@ def deserialize(context):
|
|||
# parameters
|
||||
paramSize = context.readI32()
|
||||
paramMap = {}
|
||||
for k in xrange(paramSize):
|
||||
for k in range(paramSize):
|
||||
paramName = context.readString()
|
||||
value = context.readObject()
|
||||
tName = context.readString()
|
||||
|
|
|
@ -27,4 +27,4 @@ def serialize(context, history):
|
|||
|
||||
def deserialize(context):
|
||||
result = GridDataHistory(context.readString())
|
||||
return result
|
||||
return result
|
||||
|
|
|
@ -29,7 +29,7 @@ def serialize(context, lockTable):
|
|||
for lock in locks:
|
||||
wsIdString = str(lock.getWsId())
|
||||
|
||||
if wsIds.has_key(wsIdString):
|
||||
if wsIdString in wsIds:
|
||||
lockWsIdIndex.append(wsIds[wsIdString])
|
||||
else:
|
||||
lockWsIdIndex.append(index)
|
||||
|
@ -52,12 +52,12 @@ def deserialize(context):
|
|||
parmId = context.readObject()
|
||||
numWsIds = context.readI32()
|
||||
wsIds = []
|
||||
for x in xrange(numWsIds):
|
||||
for x in range(numWsIds):
|
||||
wsIds.append(context.readObject())
|
||||
|
||||
numLocks = context.readI32()
|
||||
locks = []
|
||||
for x in xrange(numLocks):
|
||||
for x in range(numLocks):
|
||||
startTime = context.readI64()
|
||||
endTime = context.readI64()
|
||||
wsId = wsIds[context.readI32()]
|
||||
|
@ -69,4 +69,4 @@ def deserialize(context):
|
|||
lockTable.setWsId(wsIds[0])
|
||||
lockTable.setLocks(locks)
|
||||
|
||||
return lockTable
|
||||
return lockTable
|
||||
|
|
|
@ -24,4 +24,4 @@ def serialize(context, parmId):
|
|||
|
||||
def deserialize(context):
|
||||
result = ParmID(context.readString())
|
||||
return result
|
||||
return result
|
||||
|
|
|
@ -26,4 +26,4 @@ def serialize(context, timeConstraints):
|
|||
|
||||
def deserialize(context):
|
||||
result = TimeConstraints(context.readI32(), context.readI32(), context.readI32())
|
||||
return result
|
||||
return result
|
||||
|
|
|
@ -24,4 +24,4 @@ def serialize(context, timestamp):
|
|||
|
||||
def deserialize(context):
|
||||
result = Timestamp(context.readI64())
|
||||
return result
|
||||
return result
|
||||
|
|
|
@ -35,7 +35,7 @@ def deserialize(context):
|
|||
wsId.setUserName(wsIdParts[1])
|
||||
wsId.setProgName(wsIdParts[2])
|
||||
wsId.setPid(wsIdParts[3])
|
||||
wsId.setThreadId(long(wsIdParts[4]))
|
||||
wsId.setThreadId(int(wsIdParts[4]))
|
||||
|
||||
return wsId
|
||||
|
||||
|
|
|
@ -76,10 +76,10 @@ def registerAdapters(package, modules):
|
|||
if not package.endswith('.'):
|
||||
package += '.'
|
||||
for x in modules:
|
||||
exec 'import ' + package + x
|
||||
exec('import ' + package + x)
|
||||
m = sys.modules[package + x]
|
||||
d = m.__dict__
|
||||
if d.has_key('ClassAdapter'):
|
||||
if 'ClassAdapter' in d:
|
||||
if isinstance(m.ClassAdapter, list):
|
||||
for clz in m.ClassAdapter:
|
||||
classAdapterRegistry[clz] = m
|
||||
|
|
|
@ -11,12 +11,11 @@
|
|||
#
|
||||
##
|
||||
|
||||
import ActiveTableKey
|
||||
import abc
|
||||
from six import with_metaclass
|
||||
from . import ActiveTableKey
|
||||
|
||||
class ActiveTableRecord(object):
|
||||
__metaclass__ = abc.ABCMeta
|
||||
|
||||
class ActiveTableRecord(with_metaclass(abc.ABCMeta, object)):
|
||||
@abc.abstractmethod
|
||||
def __init__(self):
|
||||
self.key = ActiveTableKey.ActiveTableKey()
|
||||
|
|
|
@ -9,9 +9,9 @@
|
|||
#
|
||||
##
|
||||
|
||||
import ActiveTableRecord
|
||||
from . import ActiveTableRecord
|
||||
|
||||
class OperationalActiveTableRecord(ActiveTableRecord.ActiveTableRecord):
|
||||
class OperationalActiveTableRecord(ActiveTableRecord):
|
||||
|
||||
def __init__(self):
|
||||
super(OperationalActiveTableRecord, self).__init__()
|
||||
|
|
|
@ -9,9 +9,9 @@
|
|||
#
|
||||
##
|
||||
|
||||
import ActiveTableRecord
|
||||
from . import ActiveTableRecord
|
||||
|
||||
class PracticeActiveTableRecord(ActiveTableRecord.ActiveTableRecord):
|
||||
class PracticeActiveTableRecord(ActiveTableRecord):
|
||||
|
||||
def __init__(self):
|
||||
super(PracticeActiveTableRecord, self).__init__()
|
||||
|
|
|
@ -24,20 +24,20 @@ __all__ = [
|
|||
'VTECTableChangeNotification'
|
||||
]
|
||||
|
||||
from ActiveTableKey import ActiveTableKey
|
||||
from ActiveTableRecord import ActiveTableRecord
|
||||
from ActiveTableMode import ActiveTableMode
|
||||
from DumpActiveTableRequest import DumpActiveTableRequest
|
||||
from DumpActiveTableResponse import DumpActiveTableResponse
|
||||
from GetActiveTableDictRequest import GetActiveTableDictRequest
|
||||
from GetActiveTableDictResponse import GetActiveTableDictResponse
|
||||
from GetFourCharSitesRequest import GetFourCharSitesRequest
|
||||
from GetFourCharSitesResponse import GetFourCharSitesResponse
|
||||
from GetVtecAttributeRequest import GetVtecAttributeRequest
|
||||
from GetVtecAttributeResponse import GetVtecAttributeResponse
|
||||
from OperationalActiveTableRecord import OperationalActiveTableRecord
|
||||
from PracticeActiveTableRecord import PracticeActiveTableRecord
|
||||
from SendPracticeProductRequest import SendPracticeProductRequest
|
||||
from VTECChange import VTECChange
|
||||
from VTECTableChangeNotification import VTECTableChangeNotification
|
||||
from .ActiveTableKey import ActiveTableKey
|
||||
from .ActiveTableRecord import ActiveTableRecord
|
||||
from .ActiveTableMode import ActiveTableMode
|
||||
from .DumpActiveTableRequest import DumpActiveTableRequest
|
||||
from .DumpActiveTableResponse import DumpActiveTableResponse
|
||||
from .GetActiveTableDictRequest import GetActiveTableDictRequest
|
||||
from .GetActiveTableDictResponse import GetActiveTableDictResponse
|
||||
from .GetFourCharSitesRequest import GetFourCharSitesRequest
|
||||
from .GetFourCharSitesResponse import GetFourCharSitesResponse
|
||||
from .GetVtecAttributeRequest import GetVtecAttributeRequest
|
||||
from .GetVtecAttributeResponse import GetVtecAttributeResponse
|
||||
from .OperationalActiveTableRecord import OperationalActiveTableRecord
|
||||
from .PracticeActiveTableRecord import PracticeActiveTableRecord
|
||||
from .SendPracticeProductRequest import SendPracticeProductRequest
|
||||
from .VTECChange import VTECChange
|
||||
from .VTECTableChangeNotification import VTECTableChangeNotification
|
||||
|
||||
|
|
|
@ -10,8 +10,8 @@ __all__ = [
|
|||
'SendActiveTableRequest'
|
||||
]
|
||||
|
||||
from ClearPracticeVTECTableRequest import ClearPracticeVTECTableRequest
|
||||
from MergeActiveTableRequest import MergeActiveTableRequest
|
||||
from RetrieveRemoteActiveTableRequest import RetrieveRemoteActiveTableRequest
|
||||
from SendActiveTableRequest import SendActiveTableRequest
|
||||
from .ClearPracticeVTECTableRequest import ClearPracticeVTECTableRequest
|
||||
from .MergeActiveTableRequest import MergeActiveTableRequest
|
||||
from .RetrieveRemoteActiveTableRequest import RetrieveRemoteActiveTableRequest
|
||||
from .SendActiveTableRequest import SendActiveTableRequest
|
||||
|
||||
|
|
|
@ -7,5 +7,5 @@ __all__ = [
|
|||
'ActiveTableSharingResponse'
|
||||
]
|
||||
|
||||
from ActiveTableSharingResponse import ActiveTableSharingResponse
|
||||
from .ActiveTableSharingResponse import ActiveTableSharingResponse
|
||||
|
||||
|
|
|
@ -57,7 +57,7 @@ class AlertVizRequest(object):
|
|||
def setFilters(self, filters):
|
||||
if filters is None:
|
||||
self.filters = {}
|
||||
elif not(filters.has_key(None) or filters.values().count(None)>0 or filters.has_key('') or filters.values().count('')>0):
|
||||
elif not(None in filters or filters.values().count(None)>0 or '' in filters or filters.values().count('')>0):
|
||||
self.filters = filters
|
||||
else:
|
||||
raise ValueError('Filters must not contain None or empty keys or values: %s' % filters)
|
||||
|
|
|
@ -7,5 +7,5 @@ __all__ = [
|
|||
'AlertVizRequest'
|
||||
]
|
||||
|
||||
from AlertVizRequest import AlertVizRequest
|
||||
from .AlertVizRequest import AlertVizRequest
|
||||
|
||||
|
|
|
@ -3,12 +3,11 @@
|
|||
|
||||
# File auto-generated against equivalent DynamicSerialize Java class
|
||||
|
||||
from six import with_metaclass
|
||||
import abc
|
||||
|
||||
|
||||
class AbstractFailedResponse(object):
|
||||
__metaclass__ = abc.ABCMeta
|
||||
|
||||
class AbstractFailedResponse(with_metaclass(abc.ABCMeta, object)):
|
||||
@abc.abstractmethod
|
||||
def __init__(self):
|
||||
self.request = None
|
||||
|
|
|
@ -10,8 +10,8 @@ __all__ = [
|
|||
'UserNotAuthorized'
|
||||
]
|
||||
|
||||
from AbstractFailedResponse import AbstractFailedResponse
|
||||
from AuthServerErrorResponse import AuthServerErrorResponse
|
||||
from SuccessfulExecution import SuccessfulExecution
|
||||
from UserNotAuthorized import UserNotAuthorized
|
||||
from .AbstractFailedResponse import AbstractFailedResponse
|
||||
from .AuthServerErrorResponse import AuthServerErrorResponse
|
||||
from .SuccessfulExecution import SuccessfulExecution
|
||||
from .UserNotAuthorized import UserNotAuthorized
|
||||
|
||||
|
|
|
@ -8,5 +8,5 @@ __all__ = [
|
|||
'UserId'
|
||||
]
|
||||
|
||||
from User import User
|
||||
from UserId import UserId
|
||||
from .User import User
|
||||
from .UserId import UserId
|
||||
|
|
|
@ -40,10 +40,10 @@ class DefaultDataRequest(IDataRequest):
|
|||
del self.identifiers[key]
|
||||
|
||||
def setParameters(self, *params):
|
||||
self.parameters = map(str, params)
|
||||
self.parameters = list(map(str, params))
|
||||
|
||||
def setLevels(self, *levels):
|
||||
self.levels = map(self.__makeLevel, levels)
|
||||
self.levels = list(map(self.__makeLevel, levels))
|
||||
|
||||
def __makeLevel(self, level):
|
||||
if type(level) is Level:
|
||||
|
@ -57,7 +57,7 @@ class DefaultDataRequest(IDataRequest):
|
|||
self.envelope = Envelope(env.envelope)
|
||||
|
||||
def setLocationNames(self, *locationNames):
|
||||
self.locationNames = map(str, locationNames)
|
||||
self.locationNames = list(map(str, locationNames))
|
||||
|
||||
def getDatatype(self):
|
||||
return self.datatype
|
||||
|
|
|
@ -8,5 +8,5 @@ __all__ = [
|
|||
'DefaultNotificationFilter'
|
||||
]
|
||||
|
||||
from DefaultDataRequest import DefaultDataRequest
|
||||
from DefaultNotificationFilter import DefaultNotificationFilter
|
||||
from .DefaultDataRequest import DefaultDataRequest
|
||||
from .DefaultNotificationFilter import DefaultNotificationFilter
|
||||
|
|
|
@ -12,12 +12,11 @@
|
|||
#
|
||||
#
|
||||
|
||||
from six import with_metaclass
|
||||
import abc
|
||||
|
||||
|
||||
class AbstractDataAccessRequest(object):
|
||||
__metaclass__ = abc.ABCMeta
|
||||
|
||||
class AbstractDataAccessRequest(with_metaclass(abc.ABCMeta, object)):
|
||||
def __init__(self):
|
||||
self.requestParameters = None
|
||||
|
||||
|
|
|
@ -14,11 +14,10 @@
|
|||
#
|
||||
#
|
||||
|
||||
from six import with_metaclass
|
||||
import abc
|
||||
|
||||
class AbstractIdentifierRequest(object):
|
||||
__metaclass__ = abc.ABCMeta
|
||||
|
||||
class AbstractIdentifierRequest(with_metaclass(abc.ABCMeta, object)):
|
||||
def __init__(self):
|
||||
self.request = None
|
||||
|
||||
|
|
|
@ -41,4 +41,4 @@ class GetGridDataRequest(AbstractDataAccessRequest):
|
|||
return self.includeLatLonData
|
||||
|
||||
def setIncludeLatLonData(self, includeLatLonData):
|
||||
self.includeLatLonData = includeLatLonData;
|
||||
self.includeLatLonData = includeLatLonData;
|
||||
|
|
|
@ -20,18 +20,18 @@ __all__ = [
|
|||
'GetSupportedDatatypesRequest'
|
||||
]
|
||||
|
||||
from AbstractDataAccessRequest import AbstractDataAccessRequest
|
||||
from AbstractIdentifierRequest import AbstractIdentifierRequest
|
||||
from GetAvailableLevelsRequest import GetAvailableLevelsRequest
|
||||
from GetAvailableLocationNamesRequest import GetAvailableLocationNamesRequest
|
||||
from GetAvailableParametersRequest import GetAvailableParametersRequest
|
||||
from GetAvailableTimesRequest import GetAvailableTimesRequest
|
||||
from GetGeometryDataRequest import GetGeometryDataRequest
|
||||
from GetGridDataRequest import GetGridDataRequest
|
||||
from GetGridLatLonRequest import GetGridLatLonRequest
|
||||
from GetIdentifierValuesRequest import GetIdentifierValuesRequest
|
||||
from GetNotificationFilterRequest import GetNotificationFilterRequest
|
||||
from GetOptionalIdentifiersRequest import GetOptionalIdentifiersRequest
|
||||
from GetRequiredIdentifiersRequest import GetRequiredIdentifiersRequest
|
||||
from GetSupportedDatatypesRequest import GetSupportedDatatypesRequest
|
||||
from .AbstractDataAccessRequest import AbstractDataAccessRequest
|
||||
from .AbstractIdentifierRequest import AbstractIdentifierRequest
|
||||
from .GetAvailableLevelsRequest import GetAvailableLevelsRequest
|
||||
from .GetAvailableLocationNamesRequest import GetAvailableLocationNamesRequest
|
||||
from .GetAvailableParametersRequest import GetAvailableParametersRequest
|
||||
from .GetAvailableTimesRequest import GetAvailableTimesRequest
|
||||
from .GetGeometryDataRequest import GetGeometryDataRequest
|
||||
from .GetGridDataRequest import GetGridDataRequest
|
||||
from .GetGridLatLonRequest import GetGridLatLonRequest
|
||||
from .GetIdentifierValuesRequest import GetIdentifierValuesRequest
|
||||
from .GetNotificationFilterRequest import GetNotificationFilterRequest
|
||||
from .GetOptionalIdentifiersRequest import GetOptionalIdentifiersRequest
|
||||
from .GetRequiredIdentifiersRequest import GetRequiredIdentifiersRequest
|
||||
from .GetSupportedDatatypesRequest import GetSupportedDatatypesRequest
|
||||
|
||||
|
|
|
@ -4,11 +4,9 @@
|
|||
# File auto-generated against equivalent DynamicSerialize Java class
|
||||
|
||||
import abc
|
||||
from six import with_metaclass
|
||||
|
||||
|
||||
class AbstractResponseData(object):
|
||||
__metaclass__ = abc.ABCMeta
|
||||
|
||||
class AbstractResponseData(with_metaclass(abc.ABCMeta, object)):
|
||||
@abc.abstractmethod
|
||||
def __init__(self):
|
||||
self.time = None
|
||||
|
|
|
@ -13,11 +13,11 @@ __all__ = [
|
|||
'GridResponseData'
|
||||
]
|
||||
|
||||
from AbstractResponseData import AbstractResponseData
|
||||
from GeometryResponseData import GeometryResponseData
|
||||
from GetGeometryDataResponse import GetGeometryDataResponse
|
||||
from GetGridDataResponse import GetGridDataResponse
|
||||
from GetGridLatLonResponse import GetGridLatLonResponse
|
||||
from GetNotificationFilterResponse import GetNotificationFilterResponse
|
||||
from GridResponseData import GridResponseData
|
||||
from .AbstractResponseData import AbstractResponseData
|
||||
from .GeometryResponseData import GeometryResponseData
|
||||
from .GetGeometryDataResponse import GetGeometryDataResponse
|
||||
from .GetGridDataResponse import GetGridDataResponse
|
||||
from .GetGridLatLonResponse import GetGridLatLonResponse
|
||||
from .GetNotificationFilterResponse import GetNotificationFilterResponse
|
||||
from .GridResponseData import GridResponseData
|
||||
|
||||
|
|
|
@ -7,5 +7,5 @@ __all__ = [
|
|||
'RegionLookupRequest'
|
||||
]
|
||||
|
||||
from RegionLookupRequest import RegionLookupRequest
|
||||
from .RegionLookupRequest import RegionLookupRequest
|
||||
|
||||
|
|
|
@ -22,5 +22,5 @@ __all__ = [
|
|||
'GridDataHistory'
|
||||
]
|
||||
|
||||
from GridDataHistory import GridDataHistory
|
||||
from .GridDataHistory import GridDataHistory
|
||||
|
||||
|
|
|
@ -7,5 +7,5 @@ __all__ = [
|
|||
'ProjectionData'
|
||||
]
|
||||
|
||||
from ProjectionData import ProjectionData
|
||||
from .ProjectionData import ProjectionData
|
||||
|
||||
|
|
|
@ -12,10 +12,10 @@ __all__ = [
|
|||
'TimeConstraints'
|
||||
]
|
||||
|
||||
from DatabaseID import DatabaseID
|
||||
from GFERecord import GFERecord
|
||||
from GridLocation import GridLocation
|
||||
from GridParmInfo import GridParmInfo
|
||||
from ParmID import ParmID
|
||||
from TimeConstraints import TimeConstraints
|
||||
from .DatabaseID import DatabaseID
|
||||
from .GFERecord import GFERecord
|
||||
from .GridLocation import GridLocation
|
||||
from .GridParmInfo import GridParmInfo
|
||||
from .ParmID import ParmID
|
||||
from .TimeConstraints import TimeConstraints
|
||||
|
||||
|
|
|
@ -7,5 +7,5 @@ __all__ = [
|
|||
'DiscreteKey'
|
||||
]
|
||||
|
||||
from DiscreteKey import DiscreteKey
|
||||
from .DiscreteKey import DiscreteKey
|
||||
|
||||
|
|
|
@ -8,6 +8,6 @@ __all__ = [
|
|||
'Grid2DFloat'
|
||||
]
|
||||
|
||||
from Grid2DByte import Grid2DByte
|
||||
from Grid2DFloat import Grid2DFloat
|
||||
from .Grid2DByte import Grid2DByte
|
||||
from .Grid2DFloat import Grid2DFloat
|
||||
|
||||
|
|
|
@ -4,11 +4,9 @@
|
|||
# File auto-generated against equivalent DynamicSerialize Java class
|
||||
|
||||
import abc
|
||||
from six import with_metaclass
|
||||
|
||||
|
||||
class AbstractGfeRequest(object):
|
||||
__metaclass__ = abc.ABCMeta
|
||||
|
||||
class AbstractGfeRequest(with_metaclass(abc.ABCMeta, object)):
|
||||
@abc.abstractmethod
|
||||
def __init__(self):
|
||||
self.siteID = None
|
||||
|
|
|
@ -4,13 +4,11 @@
|
|||
# File auto-generated against equivalent DynamicSerialize Java class
|
||||
|
||||
import abc
|
||||
|
||||
from six import with_metaclass
|
||||
from dynamicserialize.dstypes.com.raytheon.uf.common.dataplugin.gfe.server.request import GetGridRequest
|
||||
|
||||
|
||||
class GetGridDataRequest(object):
|
||||
__metaclass__ = abc.ABCMeta
|
||||
|
||||
class GetGridDataRequest(with_metaclass(abc.ABCMeta, object)):
|
||||
@abc.abstractmethod
|
||||
def __init__(self):
|
||||
self.requests = []
|
||||
|
|
|
@ -38,31 +38,31 @@ __all__ = [
|
|||
'SmartInitRequest'
|
||||
]
|
||||
|
||||
from AbstractGfeRequest import AbstractGfeRequest
|
||||
from CommitGridsRequest import CommitGridsRequest
|
||||
from ConfigureTextProductsRequest import ConfigureTextProductsRequest
|
||||
from ExecuteIfpNetCDFGridRequest import ExecuteIfpNetCDFGridRequest
|
||||
from ExecuteIscMosaicRequest import ExecuteIscMosaicRequest
|
||||
from ExportGridsRequest import ExportGridsRequest
|
||||
from GetASCIIGridsRequest import GetASCIIGridsRequest
|
||||
from GetGridDataRequest import GetGridDataRequest
|
||||
from GetGridInventoryRequest import GetGridInventoryRequest
|
||||
from GetLatestDbTimeRequest import GetLatestDbTimeRequest
|
||||
from GetLatestModelDbIdRequest import GetLatestModelDbIdRequest
|
||||
from GetLockTablesRequest import GetLockTablesRequest
|
||||
from GetOfficialDbNameRequest import GetOfficialDbNameRequest
|
||||
from GetParmListRequest import GetParmListRequest
|
||||
from GetSelectTimeRangeRequest import GetSelectTimeRangeRequest
|
||||
from GetSingletonDbIdsRequest import GetSingletonDbIdsRequest
|
||||
from GetSiteTimeZoneInfoRequest import GetSiteTimeZoneInfoRequest
|
||||
from GfeClientRequest import GfeClientRequest
|
||||
from GridLocRequest import GridLocRequest
|
||||
from IscDataRecRequest import IscDataRecRequest
|
||||
from LockChangeRequest import LockChangeRequest
|
||||
from ProcessReceivedConfRequest import ProcessReceivedConfRequest
|
||||
from ProcessReceivedDigitalDataRequest import ProcessReceivedDigitalDataRequest
|
||||
from PurgeGfeGridsRequest import PurgeGfeGridsRequest
|
||||
from SaveASCIIGridsRequest import SaveASCIIGridsRequest
|
||||
from SmartInitRequest import SmartInitRequest
|
||||
from RsyncGridsToCWFRequest import RsyncGridsToCWFRequest
|
||||
from .AbstractGfeRequest import AbstractGfeRequest
|
||||
from .CommitGridsRequest import CommitGridsRequest
|
||||
from .ConfigureTextProductsRequest import ConfigureTextProductsRequest
|
||||
from .ExecuteIfpNetCDFGridRequest import ExecuteIfpNetCDFGridRequest
|
||||
from .ExecuteIscMosaicRequest import ExecuteIscMosaicRequest
|
||||
from .ExportGridsRequest import ExportGridsRequest
|
||||
from .GetASCIIGridsRequest import GetASCIIGridsRequest
|
||||
from .GetGridDataRequest import GetGridDataRequest
|
||||
from .GetGridInventoryRequest import GetGridInventoryRequest
|
||||
from .GetLatestDbTimeRequest import GetLatestDbTimeRequest
|
||||
from .GetLatestModelDbIdRequest import GetLatestModelDbIdRequest
|
||||
from .GetLockTablesRequest import GetLockTablesRequest
|
||||
from .GetOfficialDbNameRequest import GetOfficialDbNameRequest
|
||||
from .GetParmListRequest import GetParmListRequest
|
||||
from .GetSelectTimeRangeRequest import GetSelectTimeRangeRequest
|
||||
from .GetSingletonDbIdsRequest import GetSingletonDbIdsRequest
|
||||
from .GetSiteTimeZoneInfoRequest import GetSiteTimeZoneInfoRequest
|
||||
from .GfeClientRequest import GfeClientRequest
|
||||
from .GridLocRequest import GridLocRequest
|
||||
from .IscDataRecRequest import IscDataRecRequest
|
||||
from .LockChangeRequest import LockChangeRequest
|
||||
from .ProcessReceivedConfRequest import ProcessReceivedConfRequest
|
||||
from .ProcessReceivedDigitalDataRequest import ProcessReceivedDigitalDataRequest
|
||||
from .PurgeGfeGridsRequest import PurgeGfeGridsRequest
|
||||
from .SaveASCIIGridsRequest import SaveASCIIGridsRequest
|
||||
from .SmartInitRequest import SmartInitRequest
|
||||
from .RsyncGridsToCWFRequest import RsyncGridsToCWFRequest
|
||||
|
||||
|
|
|
@ -8,6 +8,6 @@ __all__ = [
|
|||
'LockTable'
|
||||
]
|
||||
|
||||
from Lock import Lock
|
||||
from LockTable import LockTable
|
||||
from .Lock import Lock
|
||||
from .LockTable import LockTable
|
||||
|
||||
|
|
|
@ -8,6 +8,6 @@ __all__ = [
|
|||
'ServerResponse'
|
||||
]
|
||||
|
||||
from ServerMsg import ServerMsg
|
||||
from ServerResponse import ServerResponse
|
||||
from .ServerMsg import ServerMsg
|
||||
from .ServerResponse import ServerResponse
|
||||
|
||||
|
|
|
@ -9,7 +9,7 @@
|
|||
#
|
||||
##
|
||||
|
||||
import GfeNotification
|
||||
from . import GfeNotification
|
||||
|
||||
class CombinationsFileChangedNotification(GfeNotification.GfeNotification):
|
||||
|
||||
|
|
|
@ -11,7 +11,7 @@
|
|||
#
|
||||
##
|
||||
|
||||
import GfeNotification
|
||||
from . import GfeNotification
|
||||
|
||||
class DBInvChangeNotification(GfeNotification.GfeNotification):
|
||||
|
||||
|
|
|
@ -8,10 +8,9 @@
|
|||
#
|
||||
##
|
||||
import abc
|
||||
from six import with_metaclass
|
||||
|
||||
class GfeNotification(object):
|
||||
__metaclass__ = abc.ABCMeta
|
||||
|
||||
class GfeNotification(with_metaclass(abc.ABCMeta, object)):
|
||||
@abc.abstractmethod
|
||||
def __init__(self):
|
||||
self.siteID = None
|
||||
|
|
|
@ -9,9 +9,9 @@
|
|||
#
|
||||
##
|
||||
|
||||
import GfeNotification
|
||||
from . import GfeNotification
|
||||
|
||||
class GridHistoryUpdateNotification(GfeNotification.GfeNotification):
|
||||
class GridHistoryUpdateNotification(GfeNotification):
|
||||
|
||||
def __init__(self):
|
||||
super(GridHistoryUpdateNotification, self).__init__()
|
||||
|
|
|
@ -10,9 +10,9 @@
|
|||
#
|
||||
##
|
||||
|
||||
import GfeNotification
|
||||
from . import GfeNotification
|
||||
|
||||
class GridUpdateNotification(GfeNotification.GfeNotification):
|
||||
class GridUpdateNotification(GfeNotification):
|
||||
|
||||
def __init__(self):
|
||||
super(GridUpdateNotification, self).__init__()
|
||||
|
|
|
@ -10,9 +10,9 @@
|
|||
#
|
||||
##
|
||||
|
||||
import GfeNotification
|
||||
from . import GfeNotification
|
||||
|
||||
class LockNotification(GfeNotification.GfeNotification):
|
||||
class LockNotification(GfeNotification):
|
||||
|
||||
def __init__(self):
|
||||
super(LockNotification, self).__init__()
|
||||
|
|
|
@ -9,9 +9,9 @@
|
|||
#
|
||||
##
|
||||
|
||||
import GfeNotification
|
||||
from . import GfeNotification
|
||||
|
||||
class ServiceBackupJobStatusNotification(GfeNotification.GfeNotification):
|
||||
class ServiceBackupJobStatusNotification(GfeNotification):
|
||||
|
||||
def __init__(self):
|
||||
super(ServiceBackupJobStatusNotification, self).__init__()
|
||||
|
|
|
@ -9,9 +9,9 @@
|
|||
#
|
||||
##
|
||||
|
||||
import GfeNotification
|
||||
from . import GfeNotification
|
||||
|
||||
class UserMessageNotification(GfeNotification.GfeNotification):
|
||||
class UserMessageNotification(GfeNotification):
|
||||
|
||||
def __init__(self):
|
||||
super(UserMessageNotification, self).__init__()
|
||||
|
|
|
@ -10,8 +10,8 @@ __all__ = [
|
|||
'LockTableRequest'
|
||||
]
|
||||
|
||||
from CommitGridRequest import CommitGridRequest
|
||||
from GetGridRequest import GetGridRequest
|
||||
from LockRequest import LockRequest
|
||||
from LockTableRequest import LockTableRequest
|
||||
from .CommitGridRequest import CommitGridRequest
|
||||
from .GetGridRequest import GetGridRequest
|
||||
from .LockRequest import LockRequest
|
||||
from .LockTableRequest import LockTableRequest
|
||||
|
||||
|
|
|
@ -2,11 +2,9 @@
|
|||
##
|
||||
|
||||
import abc
|
||||
from six import with_metaclass
|
||||
|
||||
|
||||
class AbstractGridSlice(object):
|
||||
__metaclass__ = abc.ABCMeta
|
||||
|
||||
class AbstractGridSlice(with_metaclass(abc.ABCMeta, object)):
|
||||
@abc.abstractmethod
|
||||
def __init__(self):
|
||||
self.validTime = None
|
||||
|
@ -33,4 +31,4 @@ class AbstractGridSlice(object):
|
|||
return self.gridDataHistory
|
||||
|
||||
def setGridDataHistory(self, gridDataHistory):
|
||||
self.gridDataHistory = gridDataHistory
|
||||
self.gridDataHistory = gridDataHistory
|
||||
|
|
|
@ -11,9 +11,9 @@ __all__ = [
|
|||
'WeatherGridSlice'
|
||||
]
|
||||
|
||||
from AbstractGridSlice import AbstractGridSlice
|
||||
from DiscreteGridSlice import DiscreteGridSlice
|
||||
from ScalarGridSlice import ScalarGridSlice
|
||||
from VectorGridSlice import VectorGridSlice
|
||||
from WeatherGridSlice import WeatherGridSlice
|
||||
from .AbstractGridSlice import AbstractGridSlice
|
||||
from .DiscreteGridSlice import DiscreteGridSlice
|
||||
from .ScalarGridSlice import ScalarGridSlice
|
||||
from .VectorGridSlice import VectorGridSlice
|
||||
from .WeatherGridSlice import WeatherGridSlice
|
||||
|
||||
|
|
|
@ -13,4 +13,4 @@ __all__ = [
|
|||
'JobProgress'
|
||||
]
|
||||
|
||||
from JobProgress import JobProgress
|
||||
from .JobProgress import JobProgress
|
||||
|
|
|
@ -8,6 +8,6 @@ __all__ = [
|
|||
'WeatherSubKey'
|
||||
]
|
||||
|
||||
from WeatherKey import WeatherKey
|
||||
from WeatherSubKey import WeatherSubKey
|
||||
from .WeatherKey import WeatherKey
|
||||
from .WeatherSubKey import WeatherSubKey
|
||||
|
||||
|
|
|
@ -7,5 +7,5 @@ __all__ = [
|
|||
'DeleteAllGridDataRequest'
|
||||
]
|
||||
|
||||
from DeleteAllGridDataRequest import DeleteAllGridDataRequest
|
||||
from .DeleteAllGridDataRequest import DeleteAllGridDataRequest
|
||||
|
||||
|
|
|
@ -28,7 +28,7 @@ INVALID_VALUE = numpy.float64(-999999)
|
|||
class Level(object):
|
||||
|
||||
def __init__(self, levelString=None):
|
||||
self.id = 0L
|
||||
self.id = 0
|
||||
self.identifier = None
|
||||
self.masterLevel = None
|
||||
self.levelonevalue = INVALID_VALUE
|
||||
|
|
|
@ -8,6 +8,6 @@ __all__ = [
|
|||
'MasterLevel'
|
||||
]
|
||||
|
||||
from Level import Level
|
||||
from MasterLevel import MasterLevel
|
||||
from .Level import Level
|
||||
from .MasterLevel import MasterLevel
|
||||
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Reference in a new issue