cleanup, remove variable names copying built-ins, fix logic, etc.

This commit is contained in:
Michael James 2018-10-15 21:05:01 -06:00
parent 61fc74a9ef
commit 18f9afa122
19 changed files with 79 additions and 100 deletions

View file

@ -13,16 +13,6 @@
#
#
import argparse
import sys
from datetime import datetime
from datetime import timedelta
from awips import ThriftClient
from dynamicserialize.dstypes.com.raytheon.uf.common.time import TimeRange
from dynamicserialize.dstypes.com.raytheon.uf.common.dataplugin.radar.request import GetRadarDataRecordRequest
def get_datetime_str(record):
"""
@ -48,10 +38,8 @@ def get_data_type(azdat):
Radial or raster.
"""
if azdat:
dattyp = "radial"
else :
dattyp = "raster"
return dattyp
return "radial"
return "raster"
def get_hdf5_data(idra):

View file

@ -58,7 +58,7 @@ class StoreTimeAction(argparse.Action):
def __call__(self, parser, namespace, values, option_string=None):
try:
timeStruct = time.strptime(values, TIME_FORMAT)
except:
except ValueError:
parser.error(str(values) + " is not a valid time string of the format YYYYMMDD_hhmm")
setattr(namespace, self.dest, timeStruct)

View file

@ -39,7 +39,8 @@ class IFPClient(object):
return self.__commitGrid([request])
elif self.__isHomogenousIterable(request, CommitGridRequest):
return self.__commitGrid([cgr for cgr in request])
raise TypeError("Invalid type: " + str(type(request)) + " specified to commitGrid(). Only accepts CommitGridRequest or lists of CommitGridRequest.")
raise TypeError("Invalid type: " + str(type(request)) +
" for commitGrid(). Only accepts CommitGridRequest or lists of CommitGridRequest.")
def __commitGrid(self, requests):
ssr = ServerResponse()
@ -49,13 +50,14 @@ class IFPClient(object):
ssr.setMessages(sr.getMessages())
return ssr
def getParmList(self, id):
argType = type(id)
def getParmList(self, pid):
argType = type(pid)
if argType is DatabaseID:
return self.__getParmList([id])
elif self.__isHomogenousIterable(id, DatabaseID):
return self.__getParmList([dbid for dbid in id])
raise TypeError("Invalid type: " + str(argType) + " specified to getParmList(). Only accepts DatabaseID or lists of DatabaseID.")
return self.__getParmList([pid])
elif self.__isHomogenousIterable(pid, DatabaseID):
return self.__getParmList([dbid for dbid in pid])
raise TypeError("Invalid type: " + str(argType) +
" for getParmList(). Only accepts DatabaseID or lists of DatabaseID.")
def __getParmList(self, ids):
ssr = ServerResponse()

View file

@ -8,26 +8,25 @@ class Item(object):
setattr(self, key, kwargs[key])
class GenerateNexradTable(object):
def __init__(self, *initial_data, **kwargs):
import numpy as np
import json
# find this file locally and confirm location
filename = '~/awips2-builds/edexOsgi/com.raytheon.edex.plugin.radar/utility/common_static/base/radarInfoSimple.txt'
data = np.genfromtxt(
filename,
delimiter=",",
autostrip=True,
skip_header=0,
names=True,
dtype=None)
for x in data:
defn = Item(dict(zip(data.dtype.names, x)))
prod = dict(zip(data.dtype.names, x))
nexrad[defn.id] = prod
return json.dumps(nexrad, indent=1)
#class GenerateNexradTable(object):
# import numpy as np
# import json
# # find this file locally and confirm location
# filename = '~/awips2-builds/edexOsgi/com.raytheon.edex.plugin.radar/utility/common_static/base/radarInfoSimple.txt'
# data = np.genfromtxt(
# filename,
# delimiter=",",
# autostrip=True,
# skip_header=0,
# names=True,
# dtype=None)
#
# for x in data:
# defn = Item(dict(zip(data.dtype.names, x)))
# prod = dict(zip(data.dtype.names, x))
# nexrad[defn.id] = prod
#
# return json.dumps(nexrad, indent=1)
profiler = {
@ -1534,7 +1533,7 @@ vtec = {
'CF.S' : {'phen': 'CF',
'sig': 'S',
'color': 'olivedrab',
'hdln': 'Coastal Flood Statement'},
'hdln': 'Coastal Flood Statement'},
'DS.W' : {'phen': 'DS',
'sig': 'W',
'color': 'bisque',
@ -1798,11 +1797,11 @@ vtec = {
'SU.Y' : {'phen': 'SU',
'sig': 'Y',
'color': 'mediumorchid',
'hdln': 'High Surf Advisory'},
'hdln': 'High Surf Advisory'},
'SV.A' : {'phen': 'SV',
'sig': 'A',
'color': 'palevioletred',
'hdln': 'Severe Thunderstorm Watch'},
'hdln': 'Severe Thunderstorm Watch'},
'SV.S' : {'phen': 'SV',
'sig': 'S',
'color': 'aqua',
@ -1902,14 +1901,14 @@ vtec = {
}
#
# Upgrade Hazards Dictionary - upgradeHazardsDict is a dictionary of
# phen/sig combinations defining upgrades. Each key is the proposed hazard.
# The associated list are the hazards which are upgraded by the
# Upgrade Hazards Dictionary - upgradeHazardsDict is a dictionary of
# phen/sig combinations defining upgrades. Each key is the proposed hazard.
# The associated list are the hazards which are upgraded by the
# proposed hazard.
#
upgradeHazardsDict = {
'WC.W': ['WC.A', 'WC.Y'],
'WC.W': ['WC.A', 'WC.Y'],
'WC.Y': ['WC.A'],
'BZ.W': ['WS.W', 'LE.W', 'ZR.Y', 'LE.Y', 'WW.Y',
'BZ.A', 'WS.A', 'LE.A'],
@ -1957,7 +1956,7 @@ upgradeHazardsDict = {
'AF.W': ['AF.Y'],
'MH.W': ['MH.Y'],
}
#
# When passed a phen/sig for both the current hazard and the proposed hazard,
# checkForUpgrade returns a 1 if the proposed hazard is an upgrade, otherwise 0
@ -1975,9 +1974,9 @@ def checkForUpgrade(pPhen, pSig, cPhen, cSig):
return 0
#
# Downgrade Hazards Dictionary - downgradeHazardsDict is a dictionary of
# phen/sig combinations defining downgrades. Each key is the proposed hazard.
# The associated list are the hazards which are downgraded by the
# Downgrade Hazards Dictionary - downgradeHazardsDict is a dictionary of
# phen/sig combinations defining downgrades. Each key is the proposed hazard.
# The associated list are the hazards which are downgraded by the
# proposed hazard.
#

View file

@ -1,10 +1,3 @@
from awips.dataaccess import DataAccessLayer as DAL
from shapely.geometry import box
from awips.test.dafTests import baseDafTestCase
from awips.test.dafTests import params
import unittest
#
# Base TestCase for BufrMos* tests.
#
@ -19,6 +12,10 @@ import unittest
#
#
from awips.dataaccess import DataAccessLayer as DAL
from awips.test.dafTests import baseDafTestCase
from awips.test.dafTests import params
class BufrMosTestCase(baseDafTestCase.DafTestCase):
"""Base class for testing DAF support of bufrmos data"""

View file

@ -4,7 +4,6 @@ from awips.ThriftClient import ThriftRequestException
from dynamicserialize.dstypes.com.raytheon.uf.common.dataquery.requests import RequestConstraint
from awips.test.dafTests import baseDafTestCase
import unittest
#
# Test DAF support for binlightning data

View file

@ -206,21 +206,21 @@ class ClimateTestCase(baseDafTestCase.DafTestCase):
def testGetColumnIdValuesWithoutTableThrowsException(self):
req = DAL.newDataRequest(self.datatype)
with self.assertRaises(ThriftRequestException):
idValues = DAL.getIdentifierValues(req, 'year')
DAL.getIdentifierValues(req, 'year')
@unittest.skip('avoid EDEX error')
def testGetColumnIdValuesWithNonexistentTableThrowsException(self):
req = DAL.newDataRequest(self.datatype)
req.addIdentifier('table', 'nonexistentjunk')
with self.assertRaises(ThriftRequestException):
idValues = DAL.getIdentifierValues(req, 'year')
DAL.getIdentifierValues(req, 'year')
@unittest.skip('avoid EDEX error')
def testGetNonexistentColumnIdValuesThrowsException(self):
req = DAL.newDataRequest(self.datatype)
req.addIdentifier('table', 'public.cli_asos_monthly')
with self.assertRaises(ThriftRequestException):
idValues = DAL.getIdentifierValues(req, 'nonexistentjunk')
DAL.getIdentifierValues(req, 'nonexistentjunk')
def testGetInvalidIdentifierValuesThrowsException(self):
self.runInvalidIdValuesTest()

View file

@ -83,7 +83,7 @@ class MapsTestCase(baseDafTestCase.DafTestCase):
req = DAL.newDataRequest(self.datatype)
req.addIdentifier('geomField', 'the_geom')
with self.assertRaises(ThriftRequestException):
idValues = DAL.getIdentifierValues(req, 'state')
DAL.getIdentifierValues(req, 'state')
def testGetInvalidIdentifierValuesThrowsException(self):
self.runInvalidIdValuesTest()

View file

@ -46,7 +46,7 @@ class ObsTestCase(baseDafTestCase.DafTestCase):
req = DAL.newDataRequest(self.datatype)
req.setLocationNames(params.OBS_STATION)
req.setParameters("temperature", "seaLevelPress", "dewpoint")
data = self.runGeometryDataTest(req)
self.runGeometryDataTest(req)
def testGetGeometryDataWithEnvelope(self):
req = DAL.newDataRequest(self.datatype)

View file

@ -1,10 +1,3 @@
from __future__ import print_function
from awips.dataaccess import DataAccessLayer as DAL
from dynamicserialize.dstypes.com.raytheon.uf.common.dataquery.requests import RequestConstraint
from awips.test.dafTests import baseDafTestCase
import unittest
#
# Test DAF support for satellite data
#
@ -24,6 +17,11 @@ import unittest
#
#
from __future__ import print_function
from awips.dataaccess import DataAccessLayer as DAL
from dynamicserialize.dstypes.com.raytheon.uf.common.dataquery.requests import RequestConstraint
from awips.test.dafTests import baseDafTestCase
class SatelliteTestCase(baseDafTestCase.DafTestCase):
"""Test DAF support for satellite data"""

View file

@ -3,7 +3,6 @@ from awips.dataaccess import DataAccessLayer as DAL
from dynamicserialize.dstypes.com.raytheon.uf.common.dataquery.requests import RequestConstraint
from awips.test.dafTests import baseDafTestCase
import unittest
#
# Test DAF support for sfcobs data

View file

@ -29,12 +29,12 @@ doubleList = numpy.dtype(numpy.float64).newbyteorder('>')
class SelfDescribingBinaryProtocol(TBinaryProtocol):
def readFieldBegin(self):
type = self.readByte()
if type == TType.STOP:
return (None, type, 0)
ftype = self.readByte()
if ftype == TType.STOP:
return (None, ftype, 0)
name = self.readString()
id = self.readI16()
return (name, type, id)
fid = self.readI16()
return (name, ftype, fid)
def readStructBegin(self):
return self.readString()
@ -42,10 +42,10 @@ class SelfDescribingBinaryProtocol(TBinaryProtocol):
def writeStructBegin(self, name):
self.writeString(name)
def writeFieldBegin(self, name, type, id):
self.writeByte(type)
def writeFieldBegin(self, name, ftype, fid):
self.writeByte(ftype)
self.writeString(name)
self.writeI16(id)
self.writeI16(fid)
def readFloat(self):
d = self.readI32()

View file

@ -12,7 +12,7 @@
ClassAdapter = ['java.nio.ByteBuffer', 'java.nio.HeapByteBuffer']
def serialize(context, set):
def serialize(context, bufferset):
raise NotImplementedError("Serialization of ByteBuffers is not supported.")

View file

@ -15,11 +15,11 @@ from dynamicserialize.dstypes.java.util import EnumSet
ClassAdapter = ['java.util.EnumSet', 'java.util.RegularEnumSet']
def serialize(context, set):
setSize = len(set)
def serialize(context, bufferset):
setSize = len(bufferset)
context.writeI32(setSize)
context.writeString(set.getEnumClass())
for val in set:
for val in bufferset:
context.writeString(val)
@ -27,6 +27,6 @@ def deserialize(context):
setSize = context.readI32()
enumClassName = context.readString()
valList = []
for i in range(setSize):
for __ in range(setSize):
valList.append(context.readString())
return EnumSet(enumClassName, valList)

View file

@ -12,7 +12,7 @@
ClassAdapter = ['java.nio.FloatBuffer', 'java.nio.HeapFloatBuffer']
def serialize(context, set):
def serialize(context, bufferset):
raise NotImplementedError("Serialization of FloatBuffers is not supported.")

View file

@ -80,6 +80,7 @@ def registerAdapters(package, modules):
if not package.endswith('.'):
package += '.'
for x in modules:
# TODO: use importlib
exec('import ' + package + x)
m = sys.modules[package + x]
d = m.__dict__

View file

@ -45,8 +45,9 @@ class Lock(object):
def __repr__(self):
t0 = time.gmtime(self.getStartTime() / 1000.0)
t1 = time.gmtime(self.getEndTime() / 1000.0)
format = '%b %d %y %H:%M:%S %Z'
msg = 'TR: (' + time.strftime(format, t0) + ', ' + time.strftime(format, t1)
timeformat = '%b %d %y %H:%M:%S %Z'
msg = 'TR: (' + time.strftime(timeformat, t0) \
+ ', ' + time.strftime(timeformat, t1)
msg += " WsId: " + str(self.wsId)
return msg

View file

@ -8,7 +8,6 @@
# Feb 06, 2017 5959 randerso Removed Java .toString() calls
#
import struct
import socket
import os
@ -20,7 +19,7 @@ except ImportError:
try:
import _thread
except ImportError:
import thread
import thread as _thread
class WsId(object):

12
prep.sh
View file

@ -1,11 +1,7 @@
#!/bin/bash
#
# python-awips prep script
# author: mjames@ucar.edu
#
# This script is designed to
#
# should be /awips2/repo/python-awips or ~/python-awips
dir="$( cd "$(dirname "$0")" ; pwd -P )"
@ -24,14 +20,14 @@ find /awips2/repo/awips2-builds/edexOsgi/ -path '*/pythonPackages/dynamicseriali
echo "Updating dynamicserialize/dstypes"
# Update __all__ for every package under dstypes
for package in `find dynamicserialize/dstypes -name __init__.py -printf '%h '`
for package in $(find dynamicserialize/dstypes -name __init__.py -printf '%h ')
do
pushd $package > /dev/null
# find non-hidden packages
subpackages=(`find . -maxdepth 1 -type d ! -name ".*" -printf '%f\n' | sort`)
subpackages=$(find . -maxdepth 1 -type d ! -name ".*" -printf '%f\n' | sort)
# find non-hidden python modules
modules=(`find . -maxdepth 1 -type f \( -name "*.py" ! -name "__init__.py" ! -name ".*" \) -printf '%f\n' | sed 's/\.py//' | sort`)
modules=$(find . -maxdepth 1 -type f \( -name "*.py" ! -name "__init__.py" ! -name ".*" \) -printf '%f\n' | sed 's/\.py//' | sort)
# join subpackages and modules into a single list, modules first
all=("${subpackages[@]}" "${modules[@]}")
@ -39,7 +35,7 @@ do
#replace the current __all__ definition with the rebuilt __all__, which now includes all contributed packages and modules.
#-0777 allows us to match the multi-line __all__ definition
perl -0777 -p -i -e "s/__all__ = \[[^\]]*\]/__all__ = \[`echo \"${joined:1}\"`\n \]/g" __init__.py
perl -0777 -p -i -e "s/__all__ = \[[^\]]*\]/__all__ = \[$(echo \"${joined:1}\")\n \]/g" __init__.py
popd > /dev/null
done