From 67c5c0b579d83da27802de117335ba7c63e065b7 Mon Sep 17 00:00:00 2001 From: Shay Carter Date: Tue, 8 Jun 2021 15:41:53 -0600 Subject: [PATCH 1/3] add in the /awips and /dynamicsearlize directories - see if this satisfies the webpage building --- awips/DateTimeConverter.py | 88 + awips/QpidSubscriber.py | 103 + awips/RadarCommon.py | 141 ++ awips/ThriftClient.py | 80 + awips/TimeUtil.py | 87 + awips/UsageArgumentParser.py | 62 + awips/UsageOptionParser.py | 18 + awips/__init__.py | 3 + awips/dataaccess/CombinedTimeQuery.py | 84 + awips/dataaccess/DataAccessLayer.py | 391 ++++ awips/dataaccess/DataNotificationLayer.py | 135 ++ awips/dataaccess/DataQueue.py | 190 ++ awips/dataaccess/ModelSounding.py | 231 ++ awips/dataaccess/PyData.py | 44 + awips/dataaccess/PyGeometryData.py | 81 + awips/dataaccess/PyGeometryNotification.py | 35 + awips/dataaccess/PyGridData.py | 64 + awips/dataaccess/PyGridNotification.py | 40 + awips/dataaccess/PyNotification.py | 85 + awips/dataaccess/ThriftClientRouter.py | 257 +++ awips/dataaccess/__init__.py | 370 +++ awips/gempak/GridDataRetriever.py | 128 ++ awips/gempak/GridInfoRetriever.py | 145 ++ awips/gempak/GridNavRetriever.py | 301 +++ awips/gempak/StationDataRetriever.py | 144 ++ awips/gempak/StationRetriever.py | 93 + awips/gempak/TimeRetriever.py | 76 + awips/gempak/ncepGribTables.py | 99 + awips/gempak/scrape.py | 106 + awips/gfe/IFPClient.py | 152 ++ awips/gfe/__init__.py | 3 + awips/qpidingest.py | 131 ++ awips/tables.py | 2025 +++++++++++++++++ awips/test/Record.py | 12 + awips/test/__init__.py | 2 + awips/test/dafTests/__init__.py | 2 + awips/test/dafTests/baseBufrMosTestCase.py | 51 + awips/test/dafTests/baseDafTestCase.py | 192 ++ awips/test/dafTests/baseRadarTestCase.py | 167 ++ awips/test/dafTests/params.py | 23 + awips/test/dafTests/testAcars.py | 39 + awips/test/dafTests/testAirep.py | 146 ++ awips/test/dafTests/testBinLightning.py | 170 ++ awips/test/dafTests/testBufrMosGfs.py | 22 + awips/test/dafTests/testBufrUa.py | 195 ++ awips/test/dafTests/testCombinedTimeQuery.py | 49 + awips/test/dafTests/testCommonObsSpatial.py | 150 ++ awips/test/dafTests/testDataTime.py | 114 + awips/test/dafTests/testGfe.py | 194 ++ awips/test/dafTests/testGrid.py | 261 +++ awips/test/dafTests/testMaps.py | 193 ++ awips/test/dafTests/testModelSounding.py | 193 ++ awips/test/dafTests/testObs.py | 160 ++ awips/test/dafTests/testProfiler.py | 59 + awips/test/dafTests/testRadarGraphics.py | 74 + awips/test/dafTests/testRadarGrid.py | 40 + awips/test/dafTests/testRadarSpatial.py | 153 ++ awips/test/dafTests/testRequestConstraint.py | 158 ++ awips/test/dafTests/testSatellite.py | 166 ++ awips/test/dafTests/testSfcObs.py | 166 ++ awips/test/dafTests/testTopo.py | 74 + awips/test/dafTests/testWarning.py | 208 ++ .../DynamicSerializationManager.py | 45 + .../SelfDescribingBinaryProtocol.py | 114 + .../ThriftSerializationContext.py | 433 ++++ dynamicserialize/__init__.py | 35 + .../adapters/ByteBufferAdapter.py | 21 + dynamicserialize/adapters/CalendarAdapter.py | 25 + .../adapters/CommutativeTimestampAdapter.py | 25 + dynamicserialize/adapters/CoordAdapter.py | 28 + .../adapters/DatabaseIDAdapter.py | 23 + dynamicserialize/adapters/DateAdapter.py | 24 + dynamicserialize/adapters/EnumSetAdapter.py | 32 + .../adapters/FloatBufferAdapter.py | 21 + .../adapters/FormattedDateAdapter.py | 24 + .../adapters/GeomDataRespAdapter.py | 94 + .../adapters/GeometryTypeAdapter.py | 36 + .../adapters/GregorianCalendarAdapter.py | 25 + .../adapters/GridDataHistoryAdapter.py | 26 + .../adapters/JTSEnvelopeAdapter.py | 30 + .../LocalizationLevelSerializationAdapter.py | 31 + .../LocalizationTypeSerializationAdapter.py | 26 + dynamicserialize/adapters/ParmIDAdapter.py | 23 + dynamicserialize/adapters/PointAdapter.py | 28 + .../adapters/StackTraceElementAdapter.py | 28 + .../adapters/TimeConstraintsAdapter.py | 25 + .../adapters/TimeRangeTypeAdapter.py | 40 + dynamicserialize/adapters/TimestampAdapter.py | 23 + dynamicserialize/adapters/WsIdAdapter.py | 32 + dynamicserialize/adapters/__init__.py | 99 + dynamicserialize/dstypes/__init__.py | 6 + dynamicserialize/dstypes/com/__init__.py | 5 + .../dstypes/com/raytheon/__init__.py | 4 + .../dstypes/com/raytheon/uf/__init__.py | 4 + .../com/raytheon/uf/common/__init__.py | 17 + .../uf/common/alertviz/AlertVizRequest.py | 63 + .../raytheon/uf/common/alertviz/__init__.py | 7 + .../com/raytheon/uf/common/auth/__init__.py | 5 + .../auth/resp/AbstractFailedResponse.py | 14 + .../auth/resp/AuthServerErrorResponse.py | 10 + .../common/auth/resp/SuccessfulExecution.py | 19 + .../uf/common/auth/resp/UserNotAuthorized.py | 14 + .../raytheon/uf/common/auth/resp/__init__.py | 12 + .../com/raytheon/uf/common/auth/user/User.py | 23 + .../raytheon/uf/common/auth/user/UserId.py | 24 + .../raytheon/uf/common/auth/user/__init__.py | 8 + .../raytheon/uf/common/dataaccess/__init__.py | 6 + .../dataaccess/impl/DefaultDataRequest.py | 77 + .../impl/DefaultNotificationFilter.py | 38 + .../uf/common/dataaccess/impl/__init__.py | 8 + .../request/AbstractDataAccessRequest.py | 23 + .../request/AbstractIdentifierRequest.py | 25 + .../request/GetAvailableLevelsRequest.py | 16 + .../GetAvailableLocationNamesRequest.py | 16 + .../request/GetAvailableParametersRequest.py | 16 + .../request/GetAvailableTimesRequest.py | 24 + .../request/GetGeometryDataRequest.py | 30 + .../dataaccess/request/GetGridDataRequest.py | 38 + .../request/GetGridLatLonRequest.py | 40 + .../request/GetIdentifierValuesRequest.py | 23 + .../request/GetNotificationFilterRequest.py | 16 + .../request/GetOptionalIdentifiersRequest.py | 17 + .../request/GetRequiredIdentifiersRequest.py | 16 + .../request/GetSupportedDatatypesRequest.py | 14 + .../uf/common/dataaccess/request/__init__.py | 33 + .../response/AbstractResponseData.py | 52 + .../response/GeometryResponseData.py | 32 + .../response/GetGeometryDataResponse.py | 19 + .../response/GetGridDataResponse.py | 54 + .../response/GetGridLatLonResponse.py | 39 + .../response/GetNotificationFilterResponse.py | 19 + .../dataaccess/response/GridResponseData.py | 46 + .../uf/common/dataaccess/response/__init__.py | 19 + .../raytheon/uf/common/dataplugin/__init__.py | 10 + .../uf/common/dataplugin/events/__init__.py | 4 + .../dataplugin/events/hazards/__init__.py | 4 + .../hazards/requests/RegionLookupRequest.py | 25 + .../events/hazards/requests/__init__.py | 6 + .../common/dataplugin/gfe/GridDataHistory.py | 75 + .../uf/common/dataplugin/gfe/__init__.py | 22 + .../dataplugin/gfe/config/ProjectionData.py | 95 + .../common/dataplugin/gfe/config/__init__.py | 6 + .../uf/common/dataplugin/gfe/db/__init__.py | 4 + .../dataplugin/gfe/db/objects/DatabaseID.py | 194 ++ .../dataplugin/gfe/db/objects/GFERecord.py | 92 + .../dataplugin/gfe/db/objects/GridLocation.py | 114 + .../dataplugin/gfe/db/objects/GridParmInfo.py | 183 ++ .../dataplugin/gfe/db/objects/ParmID.py | 131 ++ .../gfe/db/objects/TimeConstraints.py | 89 + .../dataplugin/gfe/db/objects/__init__.py | 16 + .../dataplugin/gfe/discrete/DiscreteKey.py | 85 + .../dataplugin/gfe/discrete/__init__.py | 6 + .../common/dataplugin/gfe/grid/Grid2DByte.py | 30 + .../common/dataplugin/gfe/grid/Grid2DFloat.py | 30 + .../uf/common/dataplugin/gfe/grid/__init__.py | 8 + .../gfe/request/AbstractGfeRequest.py | 21 + .../gfe/request/CommitGridsRequest.py | 26 + .../request/ConfigureTextProductsRequest.py | 33 + .../request/ExecuteIfpNetCDFGridRequest.py | 157 ++ .../gfe/request/ExportGridsRequest.py | 56 + .../gfe/request/GetASCIIGridsRequest.py | 47 + .../gfe/request/GetGridDataRequest.py | 38 + .../gfe/request/GetGridInventoryRequest.py | 26 + .../gfe/request/GetLatestDbTimeRequest.py | 47 + .../gfe/request/GetLatestModelDbIdRequest.py | 40 + .../gfe/request/GetLockTablesRequest.py | 26 + .../gfe/request/GetOfficialDbNameRequest.py | 19 + .../gfe/request/GetParmListRequest.py | 26 + .../gfe/request/GetSelectTimeRangeRequest.py | 26 + .../gfe/request/GetSingletonDbIdsRequest.py | 19 + .../gfe/request/GetSiteTimeZoneInfoRequest.py | 23 + .../gfe/request/GfeClientRequest.py | 62 + .../dataplugin/gfe/request/GridLocRequest.py | 19 + .../gfe/request/LockChangeRequest.py | 26 + .../gfe/request/ProcessReceivedConfRequest.py | 26 + .../ProcessReceivedDigitalDataRequest.py | 26 + .../gfe/request/PurgeGfeGridsRequest.py | 21 + .../gfe/request/RsyncGridsToCWFRequest.py | 18 + .../gfe/request/SaveASCIIGridsRequest.py | 26 + .../gfe/request/SmartInitRequest.py | 40 + .../common/dataplugin/gfe/request/__init__.py | 61 + .../common/dataplugin/gfe/server/__init__.py | 7 + .../common/dataplugin/gfe/server/lock/Lock.py | 52 + .../dataplugin/gfe/server/lock/LockTable.py | 42 + .../dataplugin/gfe/server/lock/__init__.py | 8 + .../gfe/server/message/ServerMsg.py | 12 + .../gfe/server/message/ServerResponse.py | 45 + .../dataplugin/gfe/server/message/__init__.py | 8 + .../CombinationsFileChangedNotification.py | 34 + .../server/notify/DBInvChangeNotification.py | 36 + .../gfe/server/notify/GfeNotification.py | 29 + .../notify/GridHistoryUpdateNotification.py | 41 + .../server/notify/GridUpdateNotification.py | 50 + .../gfe/server/notify/LockNotification.py | 26 + .../server/notify/UserMessageNotification.py | 42 + .../dataplugin/gfe/server/notify/__init__.py | 18 + .../gfe/server/request/CommitGridRequest.py | 33 + .../gfe/server/request/GetGridRequest.py | 30 + .../gfe/server/request/LockRequest.py | 33 + .../gfe/server/request/LockTableRequest.py | 19 + .../dataplugin/gfe/server/request/__init__.py | 12 + .../dataplugin/gfe/slice/AbstractGridSlice.py | 32 + .../dataplugin/gfe/slice/DiscreteGridSlice.py | 24 + .../dataplugin/gfe/slice/ScalarGridSlice.py | 17 + .../dataplugin/gfe/slice/VectorGridSlice.py | 23 + .../dataplugin/gfe/slice/WeatherGridSlice.py | 24 + .../common/dataplugin/gfe/slice/__init__.py | 14 + .../dataplugin/gfe/weather/WeatherKey.py | 47 + .../dataplugin/gfe/weather/WeatherSubKey.py | 8 + .../common/dataplugin/gfe/weather/__init__.py | 8 + .../uf/common/dataplugin/grid/__init__.py | 4 + .../grid/request/DeleteAllGridDataRequest.py | 12 + .../dataplugin/grid/request/__init__.py | 6 + .../uf/common/dataplugin/level/Level.py | 178 ++ .../uf/common/dataplugin/level/MasterLevel.py | 99 + .../uf/common/dataplugin/level/__init__.py | 8 + .../message/DataURINotificationMessage.py | 19 + .../uf/common/dataplugin/message/__init__.py | 6 + .../uf/common/dataplugin/radar/__init__.py | 5 + .../request/GetRadarDataRecordRequest.py | 41 + .../dataplugin/radar/request/__init__.py | 6 + .../response/GetRadarDataRecordResponse.py | 18 + .../radar/response/RadarDataRecord.py | 67 + .../dataplugin/radar/response/__init__.py | 8 + .../uf/common/dataplugin/text/__init__.py | 5 + .../dataplugin/text/dbsrv/TextDBRequest.py | 12 + .../common/dataplugin/text/dbsrv/__init__.py | 6 + .../dataplugin/text/subscription/__init__.py | 4 + .../request/SubscriptionRequest.py | 18 + .../text/subscription/request/__init__.py | 6 + .../raytheon/uf/common/dataquery/__init__.py | 4 + .../dataquery/requests/RequestConstraint.py | 279 +++ .../uf/common/dataquery/requests/__init__.py | 6 + .../raytheon/uf/common/datastorage/Request.py | 40 + .../common/datastorage/StorageProperties.py | 19 + .../uf/common/datastorage/StorageStatus.py | 19 + .../uf/common/datastorage/__init__.py | 21 + .../datastorage/records/AbstractDataRecord.py | 78 + .../datastorage/records/ByteDataRecord.py | 20 + .../datastorage/records/DoubleDataRecord.py | 20 + .../datastorage/records/FloatDataRecord.py | 20 + .../datastorage/records/IntegerDataRecord.py | 20 + .../datastorage/records/LongDataRecord.py | 20 + .../datastorage/records/ShortDataRecord.py | 20 + .../datastorage/records/StringDataRecord.py | 37 + .../uf/common/datastorage/records/__init__.py | 31 + .../localization/LocalizationContext.py | 35 + .../common/localization/LocalizationLevel.py | 42 + .../common/localization/LocalizationType.py | 18 + .../uf/common/localization/__init__.py | 12 + .../localization/msgs/DeleteUtilityCommand.py | 26 + .../msgs/DeleteUtilityResponse.py | 39 + .../localization/msgs/ListResponseEntry.py | 60 + .../localization/msgs/ListUtilityCommand.py | 40 + .../localization/msgs/ListUtilityResponse.py | 40 + .../msgs/PrivilegedUtilityRequestMessage.py | 20 + .../msgs/UtilityRequestMessage.py | 12 + .../msgs/UtilityResponseMessage.py | 12 + .../uf/common/localization/msgs/__init__.py | 20 + .../AbstractLocalizationStreamRequest.py | 39 + .../stream/LocalizationStreamGetRequest.py | 21 + .../stream/LocalizationStreamPutRequest.py | 44 + .../uf/common/localization/stream/__init__.py | 10 + .../raytheon/uf/common/management/__init__.py | 5 + .../request/ChangeContextRequest.py | 19 + .../management/request/PassThroughRequest.py | 26 + .../uf/common/management/request/__init__.py | 9 + .../diagnostic/GetClusterMembersRequest.py | 6 + .../request/diagnostic/GetContextsRequest.py | 12 + .../request/diagnostic/StatusRequest.py | 6 + .../management/request/diagnostic/__init__.py | 10 + .../uf/common/management/response/__init__.py | 4 + .../diagnostic/ClusterMembersResponse.py | 18 + .../response/diagnostic/ContextsResponse.py | 22 + .../response/diagnostic/StatusResponse.py | 29 + .../response/diagnostic/__init__.py | 10 + .../com/raytheon/uf/common/message/Body.py | 12 + .../com/raytheon/uf/common/message/Header.py | 21 + .../com/raytheon/uf/common/message/Message.py | 19 + .../raytheon/uf/common/message/Property.py | 19 + .../com/raytheon/uf/common/message/WsId.py | 86 + .../raytheon/uf/common/message/__init__.py | 27 + .../raytheon/uf/common/pointdata/__init__.py | 4 + .../requests/NewAdaptivePlotRequest.py | 33 + .../uf/common/pointdata/requests/__init__.py | 6 + .../raytheon/uf/common/pypies/PointTest.py | 12 + .../com/raytheon/uf/common/pypies/__init__.py | 9 + .../pypies/records/CompressedDataRecord.py | 147 ++ .../uf/common/pypies/records/__init__.py | 6 + .../uf/common/pypies/request/CopyRequest.py | 47 + .../pypies/request/CreateDatasetRequest.py | 19 + .../pypies/request/DatasetDataRequest.py | 26 + .../pypies/request/DatasetNamesRequest.py | 19 + .../pypies/request/DeleteFilesRequest.py | 18 + .../pypies/request/DeleteOrphansRequest.py | 26 + .../uf/common/pypies/request/DeleteRequest.py | 26 + .../uf/common/pypies/request/GroupsRequest.py | 26 + .../uf/common/pypies/request/RepackRequest.py | 19 + .../common/pypies/request/RetrieveRequest.py | 33 + .../uf/common/pypies/request/StoreRequest.py | 26 + .../uf/common/pypies/request/__init__.py | 26 + .../common/pypies/response/DeleteResponse.py | 12 + .../common/pypies/response/ErrorResponse.py | 12 + .../pypies/response/FileActionResponse.py | 19 + .../pypies/response/RetrieveResponse.py | 12 + .../common/pypies/response/StoreResponse.py | 26 + .../uf/common/pypies/response/__init__.py | 14 + .../SerializableExceptionWrapper.py | 55 + .../uf/common/serialization/__init__.py | 7 + .../uf/common/serialization/comm/__init__.py | 4 + .../comm/response/ServerErrorResponse.py | 12 + .../serialization/comm/response/__init__.py | 18 + .../com/raytheon/uf/common/site/__init__.py | 5 + .../notify/ClusterActivationNotification.py | 39 + .../site/notify/SiteActivationNotification.py | 69 + .../uf/common/site/notify/__init__.py | 8 + .../site/requests/ActivateSiteRequest.py | 26 + .../site/requests/DeactivateSiteRequest.py | 26 + .../site/requests/GetActiveSitesRequest.py | 6 + .../site/requests/GetPrimarySiteRequest.py | 6 + .../site/requests/ValidateConfigRequest.py | 26 + .../uf/common/site/requests/__init__.py | 14 + .../uf/common/time/CommutativeTimestamp.py | 15 + .../com/raytheon/uf/common/time/DataTime.py | 265 +++ .../raytheon/uf/common/time/FormattedDate.py | 17 + .../com/raytheon/uf/common/time/TimeRange.py | 140 ++ .../com/raytheon/uf/common/time/__init__.py | 12 + .../dstypes/com/vividsolutions/__init__.py | 4 + .../com/vividsolutions/jts/__init__.py | 4 + .../com/vividsolutions/jts/geom/Coordinate.py | 25 + .../com/vividsolutions/jts/geom/Envelope.py | 47 + .../com/vividsolutions/jts/geom/Geometry.py | 17 + .../com/vividsolutions/jts/geom/__init__.py | 10 + dynamicserialize/dstypes/gov/__init__.py | 4 + dynamicserialize/dstypes/gov/noaa/__init__.py | 4 + .../dstypes/gov/noaa/nws/__init__.py | 4 + .../dstypes/gov/noaa/nws/ncep/__init__.py | 4 + .../gov/noaa/nws/ncep/common/__init__.py | 4 + .../nws/ncep/common/dataplugin/__init__.py | 7 + .../ncep/common/dataplugin/atcf/__init__.py | 4 + .../atcf/request/RetrieveAtcfDeckRequest.py | 12 + .../dataplugin/atcf/request/__init__.py | 6 + .../ncep/common/dataplugin/gempak/__init__.py | 4 + .../gempak/request/GetGridDataRequest.py | 67 + .../gempak/request/GetGridInfoRequest.py | 39 + .../gempak/request/GetGridNavRequest.py | 25 + .../gempak/request/GetStationsRequest.py | 18 + .../gempak/request/GetTimesRequest.py | 25 + .../gempak/request/GetTimesResponse.py | 18 + .../dataplugin/gempak/request/Station.py | 62 + .../gempak/request/StationDataRequest.py | 46 + .../gempak/request/SurfaceDataRequest.py | 46 + .../gempak/request/UpperAirDataRequest.py | 46 + .../dataplugin/gempak/request/__init__.py | 24 + .../ncep/common/dataplugin/gpd/__init__.py | 4 + .../gpd/query/GenericPointDataReqMsg.py | 82 + .../common/dataplugin/gpd/query/__init__.py | 6 + .../common/dataplugin/pgen/ActivityInfo.py | 75 + .../common/dataplugin/pgen/DerivedProduct.py | 26 + .../pgen/ResponseMessageValidate.py | 40 + .../ncep/common/dataplugin/pgen/__init__.py | 12 + .../request/RetrieveActivityMapRequest.py | 12 + .../request/RetrieveAllProductsRequest.py | 12 + .../pgen/request/StoreActivityRequest.py | 19 + .../request/StoreDerivedProductRequest.py | 19 + .../dataplugin/pgen/request/__init__.py | 12 + .../pgen/response/ActivityMapData.py | 53 + .../response/RetrieveActivityMapResponse.py | 18 + .../dataplugin/pgen/response/__init__.py | 8 + dynamicserialize/dstypes/java/__init__.py | 7 + dynamicserialize/dstypes/java/awt/Point.py | 36 + dynamicserialize/dstypes/java/awt/__init__.py | 16 + .../dstypes/java/lang/StackTraceElement.py | 51 + .../dstypes/java/lang/__init__.py | 6 + .../dstypes/java/sql/Timestamp.py | 22 + dynamicserialize/dstypes/java/sql/__init__.py | 6 + .../dstypes/java/util/Calendar.py | 30 + dynamicserialize/dstypes/java/util/Date.py | 37 + dynamicserialize/dstypes/java/util/EnumSet.py | 48 + .../dstypes/java/util/GregorianCalendar.py | 29 + .../dstypes/java/util/__init__.py | 12 + 381 files changed, 19489 insertions(+) create mode 100644 awips/DateTimeConverter.py create mode 100644 awips/QpidSubscriber.py create mode 100644 awips/RadarCommon.py create mode 100644 awips/ThriftClient.py create mode 100644 awips/TimeUtil.py create mode 100644 awips/UsageArgumentParser.py create mode 100644 awips/UsageOptionParser.py create mode 100644 awips/__init__.py create mode 100644 awips/dataaccess/CombinedTimeQuery.py create mode 100644 awips/dataaccess/DataAccessLayer.py create mode 100644 awips/dataaccess/DataNotificationLayer.py create mode 100644 awips/dataaccess/DataQueue.py create mode 100644 awips/dataaccess/ModelSounding.py create mode 100644 awips/dataaccess/PyData.py create mode 100644 awips/dataaccess/PyGeometryData.py create mode 100644 awips/dataaccess/PyGeometryNotification.py create mode 100644 awips/dataaccess/PyGridData.py create mode 100644 awips/dataaccess/PyGridNotification.py create mode 100644 awips/dataaccess/PyNotification.py create mode 100644 awips/dataaccess/ThriftClientRouter.py create mode 100644 awips/dataaccess/__init__.py create mode 100644 awips/gempak/GridDataRetriever.py create mode 100644 awips/gempak/GridInfoRetriever.py create mode 100644 awips/gempak/GridNavRetriever.py create mode 100644 awips/gempak/StationDataRetriever.py create mode 100644 awips/gempak/StationRetriever.py create mode 100644 awips/gempak/TimeRetriever.py create mode 100755 awips/gempak/ncepGribTables.py create mode 100755 awips/gempak/scrape.py create mode 100644 awips/gfe/IFPClient.py create mode 100644 awips/gfe/__init__.py create mode 100644 awips/qpidingest.py create mode 100644 awips/tables.py create mode 100644 awips/test/Record.py create mode 100644 awips/test/__init__.py create mode 100644 awips/test/dafTests/__init__.py create mode 100644 awips/test/dafTests/baseBufrMosTestCase.py create mode 100644 awips/test/dafTests/baseDafTestCase.py create mode 100644 awips/test/dafTests/baseRadarTestCase.py create mode 100644 awips/test/dafTests/params.py create mode 100644 awips/test/dafTests/testAcars.py create mode 100644 awips/test/dafTests/testAirep.py create mode 100644 awips/test/dafTests/testBinLightning.py create mode 100644 awips/test/dafTests/testBufrMosGfs.py create mode 100644 awips/test/dafTests/testBufrUa.py create mode 100644 awips/test/dafTests/testCombinedTimeQuery.py create mode 100644 awips/test/dafTests/testCommonObsSpatial.py create mode 100644 awips/test/dafTests/testDataTime.py create mode 100644 awips/test/dafTests/testGfe.py create mode 100644 awips/test/dafTests/testGrid.py create mode 100644 awips/test/dafTests/testMaps.py create mode 100644 awips/test/dafTests/testModelSounding.py create mode 100644 awips/test/dafTests/testObs.py create mode 100644 awips/test/dafTests/testProfiler.py create mode 100644 awips/test/dafTests/testRadarGraphics.py create mode 100644 awips/test/dafTests/testRadarGrid.py create mode 100644 awips/test/dafTests/testRadarSpatial.py create mode 100644 awips/test/dafTests/testRequestConstraint.py create mode 100644 awips/test/dafTests/testSatellite.py create mode 100644 awips/test/dafTests/testSfcObs.py create mode 100644 awips/test/dafTests/testTopo.py create mode 100644 awips/test/dafTests/testWarning.py create mode 100644 dynamicserialize/DynamicSerializationManager.py create mode 100644 dynamicserialize/SelfDescribingBinaryProtocol.py create mode 100644 dynamicserialize/ThriftSerializationContext.py create mode 100644 dynamicserialize/__init__.py create mode 100644 dynamicserialize/adapters/ByteBufferAdapter.py create mode 100644 dynamicserialize/adapters/CalendarAdapter.py create mode 100644 dynamicserialize/adapters/CommutativeTimestampAdapter.py create mode 100644 dynamicserialize/adapters/CoordAdapter.py create mode 100644 dynamicserialize/adapters/DatabaseIDAdapter.py create mode 100644 dynamicserialize/adapters/DateAdapter.py create mode 100644 dynamicserialize/adapters/EnumSetAdapter.py create mode 100644 dynamicserialize/adapters/FloatBufferAdapter.py create mode 100644 dynamicserialize/adapters/FormattedDateAdapter.py create mode 100644 dynamicserialize/adapters/GeomDataRespAdapter.py create mode 100644 dynamicserialize/adapters/GeometryTypeAdapter.py create mode 100644 dynamicserialize/adapters/GregorianCalendarAdapter.py create mode 100644 dynamicserialize/adapters/GridDataHistoryAdapter.py create mode 100644 dynamicserialize/adapters/JTSEnvelopeAdapter.py create mode 100644 dynamicserialize/adapters/LocalizationLevelSerializationAdapter.py create mode 100644 dynamicserialize/adapters/LocalizationTypeSerializationAdapter.py create mode 100644 dynamicserialize/adapters/ParmIDAdapter.py create mode 100644 dynamicserialize/adapters/PointAdapter.py create mode 100644 dynamicserialize/adapters/StackTraceElementAdapter.py create mode 100644 dynamicserialize/adapters/TimeConstraintsAdapter.py create mode 100644 dynamicserialize/adapters/TimeRangeTypeAdapter.py create mode 100644 dynamicserialize/adapters/TimestampAdapter.py create mode 100644 dynamicserialize/adapters/WsIdAdapter.py create mode 100644 dynamicserialize/adapters/__init__.py create mode 100644 dynamicserialize/dstypes/__init__.py create mode 100644 dynamicserialize/dstypes/com/__init__.py create mode 100644 dynamicserialize/dstypes/com/raytheon/__init__.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/__init__.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/__init__.py create mode 100755 dynamicserialize/dstypes/com/raytheon/uf/common/alertviz/AlertVizRequest.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/alertviz/__init__.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/auth/__init__.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/auth/resp/AbstractFailedResponse.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/auth/resp/AuthServerErrorResponse.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/auth/resp/SuccessfulExecution.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/auth/resp/UserNotAuthorized.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/auth/resp/__init__.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/auth/user/User.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/auth/user/UserId.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/auth/user/__init__.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/dataaccess/__init__.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/dataaccess/impl/DefaultDataRequest.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/dataaccess/impl/DefaultNotificationFilter.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/dataaccess/impl/__init__.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/dataaccess/request/AbstractDataAccessRequest.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/dataaccess/request/AbstractIdentifierRequest.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/dataaccess/request/GetAvailableLevelsRequest.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/dataaccess/request/GetAvailableLocationNamesRequest.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/dataaccess/request/GetAvailableParametersRequest.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/dataaccess/request/GetAvailableTimesRequest.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/dataaccess/request/GetGeometryDataRequest.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/dataaccess/request/GetGridDataRequest.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/dataaccess/request/GetGridLatLonRequest.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/dataaccess/request/GetIdentifierValuesRequest.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/dataaccess/request/GetNotificationFilterRequest.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/dataaccess/request/GetOptionalIdentifiersRequest.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/dataaccess/request/GetRequiredIdentifiersRequest.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/dataaccess/request/GetSupportedDatatypesRequest.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/dataaccess/request/__init__.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/dataaccess/response/AbstractResponseData.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/dataaccess/response/GeometryResponseData.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/dataaccess/response/GetGeometryDataResponse.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/dataaccess/response/GetGridDataResponse.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/dataaccess/response/GetGridLatLonResponse.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/dataaccess/response/GetNotificationFilterResponse.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/dataaccess/response/GridResponseData.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/dataaccess/response/__init__.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/__init__.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/events/__init__.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/events/hazards/__init__.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/events/hazards/requests/RegionLookupRequest.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/events/hazards/requests/__init__.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/GridDataHistory.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/__init__.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/config/ProjectionData.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/config/__init__.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/db/__init__.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/db/objects/DatabaseID.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/db/objects/GFERecord.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/db/objects/GridLocation.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/db/objects/GridParmInfo.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/db/objects/ParmID.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/db/objects/TimeConstraints.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/db/objects/__init__.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/discrete/DiscreteKey.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/discrete/__init__.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/grid/Grid2DByte.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/grid/Grid2DFloat.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/grid/__init__.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/request/AbstractGfeRequest.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/request/CommitGridsRequest.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/request/ConfigureTextProductsRequest.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/request/ExecuteIfpNetCDFGridRequest.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/request/ExportGridsRequest.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/request/GetASCIIGridsRequest.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/request/GetGridDataRequest.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/request/GetGridInventoryRequest.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/request/GetLatestDbTimeRequest.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/request/GetLatestModelDbIdRequest.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/request/GetLockTablesRequest.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/request/GetOfficialDbNameRequest.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/request/GetParmListRequest.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/request/GetSelectTimeRangeRequest.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/request/GetSingletonDbIdsRequest.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/request/GetSiteTimeZoneInfoRequest.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/request/GfeClientRequest.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/request/GridLocRequest.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/request/LockChangeRequest.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/request/ProcessReceivedConfRequest.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/request/ProcessReceivedDigitalDataRequest.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/request/PurgeGfeGridsRequest.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/request/RsyncGridsToCWFRequest.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/request/SaveASCIIGridsRequest.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/request/SmartInitRequest.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/request/__init__.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/server/__init__.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/server/lock/Lock.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/server/lock/LockTable.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/server/lock/__init__.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/server/message/ServerMsg.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/server/message/ServerResponse.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/server/message/__init__.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/server/notify/CombinationsFileChangedNotification.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/server/notify/DBInvChangeNotification.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/server/notify/GfeNotification.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/server/notify/GridHistoryUpdateNotification.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/server/notify/GridUpdateNotification.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/server/notify/LockNotification.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/server/notify/UserMessageNotification.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/server/notify/__init__.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/server/request/CommitGridRequest.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/server/request/GetGridRequest.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/server/request/LockRequest.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/server/request/LockTableRequest.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/server/request/__init__.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/slice/AbstractGridSlice.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/slice/DiscreteGridSlice.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/slice/ScalarGridSlice.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/slice/VectorGridSlice.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/slice/WeatherGridSlice.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/slice/__init__.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/weather/WeatherKey.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/weather/WeatherSubKey.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/weather/__init__.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/grid/__init__.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/grid/request/DeleteAllGridDataRequest.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/grid/request/__init__.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/level/Level.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/level/MasterLevel.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/level/__init__.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/message/DataURINotificationMessage.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/message/__init__.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/radar/__init__.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/radar/request/GetRadarDataRecordRequest.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/radar/request/__init__.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/radar/response/GetRadarDataRecordResponse.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/radar/response/RadarDataRecord.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/radar/response/__init__.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/text/__init__.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/text/dbsrv/TextDBRequest.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/text/dbsrv/__init__.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/text/subscription/__init__.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/text/subscription/request/SubscriptionRequest.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/text/subscription/request/__init__.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/dataquery/__init__.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/dataquery/requests/RequestConstraint.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/dataquery/requests/__init__.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/datastorage/Request.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/datastorage/StorageProperties.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/datastorage/StorageStatus.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/datastorage/__init__.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/datastorage/records/AbstractDataRecord.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/datastorage/records/ByteDataRecord.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/datastorage/records/DoubleDataRecord.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/datastorage/records/FloatDataRecord.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/datastorage/records/IntegerDataRecord.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/datastorage/records/LongDataRecord.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/datastorage/records/ShortDataRecord.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/datastorage/records/StringDataRecord.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/datastorage/records/__init__.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/localization/LocalizationContext.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/localization/LocalizationLevel.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/localization/LocalizationType.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/localization/__init__.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/localization/msgs/DeleteUtilityCommand.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/localization/msgs/DeleteUtilityResponse.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/localization/msgs/ListResponseEntry.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/localization/msgs/ListUtilityCommand.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/localization/msgs/ListUtilityResponse.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/localization/msgs/PrivilegedUtilityRequestMessage.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/localization/msgs/UtilityRequestMessage.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/localization/msgs/UtilityResponseMessage.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/localization/msgs/__init__.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/localization/stream/AbstractLocalizationStreamRequest.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/localization/stream/LocalizationStreamGetRequest.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/localization/stream/LocalizationStreamPutRequest.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/localization/stream/__init__.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/management/__init__.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/management/request/ChangeContextRequest.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/management/request/PassThroughRequest.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/management/request/__init__.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/management/request/diagnostic/GetClusterMembersRequest.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/management/request/diagnostic/GetContextsRequest.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/management/request/diagnostic/StatusRequest.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/management/request/diagnostic/__init__.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/management/response/__init__.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/management/response/diagnostic/ClusterMembersResponse.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/management/response/diagnostic/ContextsResponse.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/management/response/diagnostic/StatusResponse.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/management/response/diagnostic/__init__.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/message/Body.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/message/Header.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/message/Message.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/message/Property.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/message/WsId.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/message/__init__.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/pointdata/__init__.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/pointdata/requests/NewAdaptivePlotRequest.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/pointdata/requests/__init__.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/pypies/PointTest.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/pypies/__init__.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/pypies/records/CompressedDataRecord.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/pypies/records/__init__.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/pypies/request/CopyRequest.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/pypies/request/CreateDatasetRequest.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/pypies/request/DatasetDataRequest.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/pypies/request/DatasetNamesRequest.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/pypies/request/DeleteFilesRequest.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/pypies/request/DeleteOrphansRequest.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/pypies/request/DeleteRequest.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/pypies/request/GroupsRequest.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/pypies/request/RepackRequest.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/pypies/request/RetrieveRequest.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/pypies/request/StoreRequest.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/pypies/request/__init__.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/pypies/response/DeleteResponse.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/pypies/response/ErrorResponse.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/pypies/response/FileActionResponse.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/pypies/response/RetrieveResponse.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/pypies/response/StoreResponse.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/pypies/response/__init__.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/serialization/SerializableExceptionWrapper.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/serialization/__init__.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/serialization/comm/__init__.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/serialization/comm/response/ServerErrorResponse.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/serialization/comm/response/__init__.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/site/__init__.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/site/notify/ClusterActivationNotification.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/site/notify/SiteActivationNotification.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/site/notify/__init__.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/site/requests/ActivateSiteRequest.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/site/requests/DeactivateSiteRequest.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/site/requests/GetActiveSitesRequest.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/site/requests/GetPrimarySiteRequest.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/site/requests/ValidateConfigRequest.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/site/requests/__init__.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/time/CommutativeTimestamp.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/time/DataTime.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/time/FormattedDate.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/time/TimeRange.py create mode 100644 dynamicserialize/dstypes/com/raytheon/uf/common/time/__init__.py create mode 100644 dynamicserialize/dstypes/com/vividsolutions/__init__.py create mode 100644 dynamicserialize/dstypes/com/vividsolutions/jts/__init__.py create mode 100644 dynamicserialize/dstypes/com/vividsolutions/jts/geom/Coordinate.py create mode 100644 dynamicserialize/dstypes/com/vividsolutions/jts/geom/Envelope.py create mode 100644 dynamicserialize/dstypes/com/vividsolutions/jts/geom/Geometry.py create mode 100644 dynamicserialize/dstypes/com/vividsolutions/jts/geom/__init__.py create mode 100644 dynamicserialize/dstypes/gov/__init__.py create mode 100644 dynamicserialize/dstypes/gov/noaa/__init__.py create mode 100644 dynamicserialize/dstypes/gov/noaa/nws/__init__.py create mode 100644 dynamicserialize/dstypes/gov/noaa/nws/ncep/__init__.py create mode 100644 dynamicserialize/dstypes/gov/noaa/nws/ncep/common/__init__.py create mode 100644 dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/__init__.py create mode 100644 dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/atcf/__init__.py create mode 100644 dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/atcf/request/RetrieveAtcfDeckRequest.py create mode 100644 dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/atcf/request/__init__.py create mode 100644 dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/gempak/__init__.py create mode 100644 dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/gempak/request/GetGridDataRequest.py create mode 100644 dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/gempak/request/GetGridInfoRequest.py create mode 100644 dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/gempak/request/GetGridNavRequest.py create mode 100644 dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/gempak/request/GetStationsRequest.py create mode 100644 dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/gempak/request/GetTimesRequest.py create mode 100644 dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/gempak/request/GetTimesResponse.py create mode 100644 dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/gempak/request/Station.py create mode 100644 dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/gempak/request/StationDataRequest.py create mode 100644 dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/gempak/request/SurfaceDataRequest.py create mode 100644 dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/gempak/request/UpperAirDataRequest.py create mode 100644 dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/gempak/request/__init__.py create mode 100644 dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/gpd/__init__.py create mode 100644 dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/gpd/query/GenericPointDataReqMsg.py create mode 100644 dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/gpd/query/__init__.py create mode 100644 dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/pgen/ActivityInfo.py create mode 100644 dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/pgen/DerivedProduct.py create mode 100644 dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/pgen/ResponseMessageValidate.py create mode 100644 dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/pgen/__init__.py create mode 100644 dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/pgen/request/RetrieveActivityMapRequest.py create mode 100644 dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/pgen/request/RetrieveAllProductsRequest.py create mode 100644 dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/pgen/request/StoreActivityRequest.py create mode 100644 dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/pgen/request/StoreDerivedProductRequest.py create mode 100644 dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/pgen/request/__init__.py create mode 100644 dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/pgen/response/ActivityMapData.py create mode 100644 dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/pgen/response/RetrieveActivityMapResponse.py create mode 100644 dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/pgen/response/__init__.py create mode 100644 dynamicserialize/dstypes/java/__init__.py create mode 100644 dynamicserialize/dstypes/java/awt/Point.py create mode 100644 dynamicserialize/dstypes/java/awt/__init__.py create mode 100644 dynamicserialize/dstypes/java/lang/StackTraceElement.py create mode 100644 dynamicserialize/dstypes/java/lang/__init__.py create mode 100644 dynamicserialize/dstypes/java/sql/Timestamp.py create mode 100644 dynamicserialize/dstypes/java/sql/__init__.py create mode 100644 dynamicserialize/dstypes/java/util/Calendar.py create mode 100644 dynamicserialize/dstypes/java/util/Date.py create mode 100644 dynamicserialize/dstypes/java/util/EnumSet.py create mode 100644 dynamicserialize/dstypes/java/util/GregorianCalendar.py create mode 100644 dynamicserialize/dstypes/java/util/__init__.py diff --git a/awips/DateTimeConverter.py b/awips/DateTimeConverter.py new file mode 100644 index 0000000..bab093b --- /dev/null +++ b/awips/DateTimeConverter.py @@ -0,0 +1,88 @@ +# +# Functions for converting between the various "Java" dynamic serialize types +# used by EDEX to the native python time datetime. +# +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# 06/24/15 #4480 dgilling Initial Creation. +# + +import datetime +import time + +from dynamicserialize.dstypes.java.util import Date +from dynamicserialize.dstypes.java.sql import Timestamp +from dynamicserialize.dstypes.com.raytheon.uf.common.time import TimeRange + +MAX_TIME = pow(2, 31) - 1 +MICROS_IN_SECOND = 1000000 + + +def convertToDateTime(timeArg): + """ + Converts the given object to a python datetime object. Supports native + python representations like datetime and struct_time, but also + the dynamicserialize types like Date and Timestamp. Raises TypeError + if no conversion can be performed. + + Args: + timeArg: a python object representing a date and time. Supported + types include datetime, struct_time, float, int, long and the + dynamicserialize types Date and Timestamp. + + Returns: + A datetime that represents the same date/time as the passed in object. + """ + if isinstance(timeArg, datetime.datetime): + return timeArg + elif isinstance(timeArg, time.struct_time): + return datetime.datetime(*timeArg[:6]) + elif isinstance(timeArg, float): + # seconds as float, should be avoided due to floating point errors + totalSecs = int(timeArg) + micros = int((timeArg - totalSecs) * MICROS_IN_SECOND) + return _convertSecsAndMicros(totalSecs, micros) + elif isinstance(timeArg, int): + # seconds as integer + totalSecs = timeArg + return _convertSecsAndMicros(totalSecs, 0) + elif isinstance(timeArg, (Date, Timestamp)): + totalSecs = timeArg.getTime() + return _convertSecsAndMicros(totalSecs, 0) + else: + objType = str(type(timeArg)) + raise TypeError("Cannot convert object of type " + objType + " to datetime.") + + +def _convertSecsAndMicros(seconds, micros): + if seconds < MAX_TIME: + rval = datetime.datetime.utcfromtimestamp(seconds) + else: + extraTime = datetime.timedelta(seconds=(seconds - MAX_TIME)) + rval = datetime.datetime.utcfromtimestamp(MAX_TIME) + extraTime + return rval.replace(microsecond=micros) + + +def constructTimeRange(*args): + """ + Builds a python dynamicserialize TimeRange object from the given + arguments. + + Args: + args*: must be a TimeRange or a pair of objects that can be + converted to a datetime via convertToDateTime(). + + Returns: + A TimeRange. + """ + + if len(args) == 1 and isinstance(args[0], TimeRange): + return args[0] + if len(args) != 2: + raise TypeError("constructTimeRange takes exactly 2 arguments, " + str(len(args)) + " provided.") + startTime = convertToDateTime(args[0]) + endTime = convertToDateTime(args[1]) + return TimeRange(startTime, endTime) diff --git a/awips/QpidSubscriber.py b/awips/QpidSubscriber.py new file mode 100644 index 0000000..833511e --- /dev/null +++ b/awips/QpidSubscriber.py @@ -0,0 +1,103 @@ +# +# Provides a Python-based interface for subscribing to qpid queues and topics. +# +# +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# 11/17/10 njensen Initial Creation. +# 08/15/13 2169 bkowal Optionally gzip decompress any data that is read. +# 08/04/16 2416 tgurney Add queueStarted property +# 02/16/17 6084 bsteffen Support ssl connections +# 09/07/17 6175 tgurney Remove "decompressing" log message +# +# + +import os +import os.path +import qpid +import zlib + +from Queue import Empty +from qpid.exceptions import Closed + + +class QpidSubscriber: + + def __init__(self, host='127.0.0.1', port=5672, decompress=False, ssl=None): + self.host = host + self.port = port + self.decompress = decompress + socket = qpid.util.connect(host, port) + if "QPID_SSL_CERT_DB" in os.environ: + certdb = os.environ["QPID_SSL_CERT_DB"] + else: + certdb = os.path.expanduser("~/.qpid/") + if "QPID_SSL_CERT_NAME" in os.environ: + certname = os.environ["QPID_SSL_CERT_NAME"] + else: + certname = "guest" + certfile = os.path.join(certdb, certname + ".crt") + if ssl or (ssl is None and os.path.exists(certfile)): + keyfile = os.path.join(certdb, certname + ".key") + trustfile = os.path.join(certdb, "root.crt") + socket = qpid.util.ssl(socket, keyfile=keyfile, certfile=certfile, ca_certs=trustfile) + self.__connection = qpid.connection.Connection(sock=socket, username='guest', password='guest') + self.__connection.start() + self.__session = self.__connection.session(str(qpid.datatypes.uuid4())) + self.subscribed = True + self.__queueStarted = False + + def topicSubscribe(self, topicName, callback): + # if the queue is edex.alerts, set decompress to true always for now to + # maintain compatibility with existing python scripts. + if topicName == 'edex.alerts': + self.decompress = True + + print("Establishing connection to broker on", self.host) + queueName = topicName + self.__session.name + self.__session.queue_declare(queue=queueName, exclusive=True, auto_delete=True, + arguments={'qpid.max_count': 100, 'qpid.policy_type': 'ring'}) + self.__session.exchange_bind(exchange='amq.topic', queue=queueName, binding_key=topicName) + self.__innerSubscribe(queueName, callback) + + def __innerSubscribe(self, serverQueueName, callback): + local_queue_name = 'local_queue_' + serverQueueName + queue = self.__session.incoming(local_queue_name) + self.__session.message_subscribe(serverQueueName, destination=local_queue_name) + queue.start() + print("Connection complete to broker on", self.host) + self.__queueStarted = True + + while self.subscribed: + try: + message = queue.get(timeout=10) + content = message.body + self.__session.message_accept(qpid.datatypes.RangedSet(message.id)) + if self.decompress: + try: + # http://stackoverflow.com/questions/2423866/python-decompressing-gzip-chunk-by-chunk + d = zlib.decompressobj(16+zlib.MAX_WBITS) + content = d.decompress(content) + except ValueError: + # decompression failed, return the original content + pass + callback(content) + except Empty: + pass + except Closed: + self.close() + + def close(self): + self.__queueStarted = False + self.subscribed = False + try: + self.__session.close(timeout=10) + except ValueError: + pass + + @property + def queueStarted(self): + return self.__queueStarted diff --git a/awips/RadarCommon.py b/awips/RadarCommon.py new file mode 100644 index 0000000..8c66c99 --- /dev/null +++ b/awips/RadarCommon.py @@ -0,0 +1,141 @@ +# +# Common methods for the a2gtrad and a2advrad scripts. +# +# +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# 08/13/2014 3393 nabowle Initial creation to contain common +# code for a2*radStub scripts. +# 03/15/2015 mjames@ucar Edited/added to awips package as RadarCommon +# +# + + +def get_datetime_str(record): + """ + Get the datetime string for a record. + + Args: + record: the record to get data for. + + Returns: + datetime string. + """ + return str(record.getDataTime())[0:19].replace(" ", "_") + ".0" + + +def get_data_type(azdat): + """ + Get the radar file type (radial or raster). + + Args: + azdat: Boolean. + + Returns: + Radial or raster. + """ + if azdat: + return "radial" + return "raster" + + +def get_hdf5_data(idra): + rdat = [] + azdat = [] + depVals = [] + threshVals = [] + if idra: + for item in idra: + if item.getName() == "Data": + rdat = item + elif item.getName() == "Angles": + azdat = item + # dattyp = "radial" + elif item.getName() == "DependentValues": + depVals = item.getShortData() + elif item.getName() == "Thresholds": + threshVals = item.getShortData() + + return rdat, azdat, depVals, threshVals + + +def get_header(record, headerFormat, xLen, yLen, azdat, description): + # Encode dimensions, time, mapping, description, tilt, and VCP + mytime = get_datetime_str(record) + dattyp = get_data_type(azdat) + + if headerFormat: + msg = str(xLen) + " " + str(yLen) + " " + mytime + " " + \ + dattyp + " " + str(record.getLatitude()) + " " + \ + str(record.getLongitude()) + " " + \ + str(record.getElevation()) + " " + \ + str(record.getElevationNumber()) + " " + \ + description + " " + str(record.getTrueElevationAngle()) + " " + \ + str(record.getVolumeCoveragePattern()) + "\n" + else: + msg = str(xLen) + " " + str(yLen) + " " + mytime + " " + \ + dattyp + " " + description + " " + \ + str(record.getTrueElevationAngle()) + " " + \ + str(record.getVolumeCoveragePattern()) + "\n" + + return msg + + +def encode_thresh_vals(threshVals): + spec = [".", "TH", "ND", "RF", "BI", "GC", "IC", "GR", "WS", "DS", + "RA", "HR", "BD", "HA", "UK"] + nnn = len(threshVals) + j = 0 + msg = "" + while j < nnn: + lo = threshVals[j] % 256 + hi = threshVals[j] / 256 + msg += " " + j += 1 + if hi < 0: + if lo > 14: + msg += "." + else: + msg += spec[lo] + continue + if hi % 16 >= 8: + msg += ">" + elif hi % 8 >= 4: + msg += "<" + if hi % 4 >= 2: + msg += "+" + elif hi % 2 >= 1: + msg += "-" + if hi >= 64: + msg += "%.2f" % (lo*0.01) + elif hi % 64 >= 32: + msg += "%.2f" % (lo*0.05) + elif hi % 32 >= 16: + msg += "%.1f" % (lo*0.1) + else: + msg += str(lo) + msg += "\n" + return msg + + +def encode_dep_vals(depVals): + nnn = len(depVals) + j = 0 + msg = [] + while j < nnn: + msg.append(str(depVals[j])) + j += 1 + return msg + + +def encode_radial(azVals): + azValsLen = len(azVals) + j = 0 + msg = [] + while j < azValsLen: + msg.append(azVals[j]) + j += 1 + return msg diff --git a/awips/ThriftClient.py b/awips/ThriftClient.py new file mode 100644 index 0000000..34c0e5c --- /dev/null +++ b/awips/ThriftClient.py @@ -0,0 +1,80 @@ +# +# Provides a Python-based interface for executing Thrift requests. +# +# +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# 09/20/10 dgilling Initial Creation. +# +# + +try: + import http.client as httpcl +except ImportError: + import httplib as httpcl +from dynamicserialize import DynamicSerializationManager + + +class ThriftClient: + + # How to call this constructor: + # 1. Pass in all arguments separately (e.g., + # ThriftClient.ThriftClient("localhost", 9581, "/services")) + # will return a Thrift client pointed at http://localhost:9581/services. + # 2. Pass in all arguments through the host string (e.g., + # ThriftClient.ThriftClient("localhost:9581/services")) + # will return a Thrift client pointed at http://localhost:9581/services. + # 3. Pass in host/port arguments through the host string (e.g., + # ThriftClient.ThriftClient("localhost:9581", "/services")) + # will return a Thrift client pointed at http://localhost:9581/services. + def __init__(self, host, port=9581, uri="/services"): + hostParts = host.split("/", 1) + if len(hostParts) > 1: + hostString = hostParts[0] + self.__uri = "/" + hostParts[1] + self.__httpConn = httpcl.HTTPConnection(hostString) + else: + if port is None: + self.__httpConn = httpcl.HTTPConnection(host) + else: + self.__httpConn = httpcl.HTTPConnection(host, port) + + self.__uri = uri + + self.__dsm = DynamicSerializationManager.DynamicSerializationManager() + + def sendRequest(self, request, uri="/thrift"): + message = self.__dsm.serializeObject(request) + + self.__httpConn.connect() + self.__httpConn.request("POST", self.__uri + uri, message) + + response = self.__httpConn.getresponse() + if response.status != 200: + raise ThriftRequestException("Unable to post request to server") + + rval = self.__dsm.deserializeBytes(response.read()) + self.__httpConn.close() + + # let's verify we have an instance of ServerErrorResponse + # IF we do, through an exception up to the caller along + # with the original Java stack trace + # ELSE: we have a valid response and pass it back + try: + forceError = rval.getException() + raise ThriftRequestException(forceError) + except AttributeError: + pass + + return rval + + +class ThriftRequestException(Exception): + def __init__(self, value): + self.parameter = value + + def __str__(self): + return repr(self.parameter) diff --git a/awips/TimeUtil.py b/awips/TimeUtil.py new file mode 100644 index 0000000..6543bea --- /dev/null +++ b/awips/TimeUtil.py @@ -0,0 +1,87 @@ +# ---------------------------------------------------------------------------- +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. +# +# offsetTime.py +# Handles Displaced Real Time for various applications +# +# Author: hansen/romberg +# ---------------------------------------------------------------------------- + +import string +import time + +# Given the timeStr, return the offset (in seconds) +# from the current time. +# Also return the launchStr i.e. Programs launched from this +# offset application will use the launchStr as the -z argument. +# The offset will be positive for time in the future, +# negative for time in the past. +# +# May still want it to be normalized to the most recent midnight. +# +# NOTES about synchronizing: +# --With synchronizing on, the "current time" for all processes started +# within a given hour will be the same. +# This guarantees that GFE's have the same current time and ISC grid +# time stamps are syncrhonized and can be exchanged. +# Formatters launched from the GFE in this mode will be synchronized as +# well by setting the launchStr to use the time difference format +# (YYYYMMDD_HHMM,YYYYMMDD_HHMM). +# --This does not solve the problem in the general case. +# For example, if someone starts the GFE at 12:59 and someone +# else starts it at 1:01, they will have different offsets and +# current times. +# --With synchronizing off, when the process starts, the current time +# matches the drtTime in the command line. However, with synchronizing +# on, the current time will be offset by the fraction of the hour at +# which the process was started. Examples: +# Actual Starting time: 20040617_1230 +# drtTime 20040616_0000 +# Synchronizing off: +# GFE Spatial Editor at StartUp: 20040616_0000 +# Synchronizing on: +# GFE Spatial Editor at StartUp: 20040616_0030 +# + + +def determineDrtOffset(timeStr): + launchStr = timeStr + # Check for time difference + if timeStr.find(",") >= 0: + times = timeStr.split(",") + t1 = makeTime(times[0]) + t2 = makeTime(times[1]) + return t1-t2, launchStr + # Check for synchronized mode + synch = 0 + if timeStr[0] == "S": + timeStr = timeStr[1:] + synch = 1 + drt_t = makeTime(timeStr) + gm = time.gmtime() + cur_t = time.mktime(gm) + + # Synchronize to most recent hour + # i.e. "truncate" cur_t to most recent hour. + if synch: + cur_t = time.mktime((gm[0], gm[1], gm[2], gm[3], 0, 0, 0, 0, 0)) + curStr = '%4s%2s%2s_%2s00\n' % (repr(gm[0]), repr(gm[1]), + repr(gm[2]), repr(gm[3])) + curStr = curStr.replace(' ', '0') + launchStr = timeStr + "," + curStr + + offset = drt_t - cur_t + return int(offset), launchStr + + +def makeTime(timeStr): + year = string.atoi(timeStr[0:4]) + month = string.atoi(timeStr[4:6]) + day = string.atoi(timeStr[6:8]) + hour = string.atoi(timeStr[9:11]) + minute = string.atoi(timeStr[11:13]) + # Do not use daylight savings because gmtime is not in daylight + # savings time. + return time.mktime((year, month, day, hour, minute, 0, 0, 0, 0)) diff --git a/awips/UsageArgumentParser.py b/awips/UsageArgumentParser.py new file mode 100644 index 0000000..66823fc --- /dev/null +++ b/awips/UsageArgumentParser.py @@ -0,0 +1,62 @@ +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------- -------- --------- --------------------------------------------- +# Feb 13, 2017 6092 randerso Added StoreTimeAction +# + +import argparse +import sys +import time + +from dynamicserialize.dstypes.com.raytheon.uf.common.dataplugin.gfe.db.objects import DatabaseID +from dynamicserialize.dstypes.com.raytheon.uf.common.dataplugin.gfe.db.objects import ParmID + +TIME_FORMAT = "%Y%m%d_%H%M" + + +class UsageArgumentParser(argparse.ArgumentParser): + """ + A subclass of ArgumentParser that overrides error() to print the + whole help text, rather than just the usage string. + """ + def error(self, message): + sys.stderr.write('%s: error: %s\n' % (self.prog, message)) + self.print_help() + sys.exit(2) + + +# Custom actions for ArgumentParser objects +class StoreDatabaseIDAction(argparse.Action): + def __call__(self, parser, namespace, values, option_string=None): + did = DatabaseID(values) + if did.isValid(): + setattr(namespace, self.dest, did) + else: + parser.error("DatabaseID [" + values + "] not a valid identifier") + + +class AppendParmNameAndLevelAction(argparse.Action): + def __call__(self, parser, namespace, values, option_string=None): + tx = ParmID.parmNameAndLevel(values) + comp = tx[0] + '_' + tx[1] + if (hasattr(namespace, self.dest)) and (getattr(namespace, self.dest) is not None): + currentValues = getattr(namespace, self.dest) + currentValues.append(comp) + setattr(namespace, self.dest, currentValues) + else: + setattr(namespace, self.dest, [comp]) + + +class StoreTimeAction(argparse.Action): + """ + argparse.Action subclass to validate GFE formatted time strings + and parse them to time.struct_time + """ + def __call__(self, parser, namespace, values, option_string=None): + try: + timeStruct = time.strptime(values, TIME_FORMAT) + setattr(namespace, self.dest, timeStruct) + except ValueError: + parser.error(str(values) + " is not a valid time string of the format YYYYMMDD_hhmm") diff --git a/awips/UsageOptionParser.py b/awips/UsageOptionParser.py new file mode 100644 index 0000000..2f3ae77 --- /dev/null +++ b/awips/UsageOptionParser.py @@ -0,0 +1,18 @@ +import sys +from optparse import OptionParser + + +class UsageOptionParser(OptionParser): + """ + A subclass of OptionParser that prints that overrides error() to print the + whole help text, rather than just the usage string. + """ + def error(self, msg): + """ + Print the help text and exit. + """ + self.print_help(sys.stderr) + sys.stderr.write("\n") + sys.stderr.write(msg) + sys.stderr.write("\n") + sys.exit(2) diff --git a/awips/__init__.py b/awips/__init__.py new file mode 100644 index 0000000..b6e95a6 --- /dev/null +++ b/awips/__init__.py @@ -0,0 +1,3 @@ + +__all__ = [ + ] diff --git a/awips/dataaccess/CombinedTimeQuery.py b/awips/dataaccess/CombinedTimeQuery.py new file mode 100644 index 0000000..06651ee --- /dev/null +++ b/awips/dataaccess/CombinedTimeQuery.py @@ -0,0 +1,84 @@ +# +# Method for performing a DAF time query where all parameter/level/location +# combinations must be available at the same time. +# +# +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# 06/22/16 #5591 bsteffen Initial Creation. +# + +from awips.dataaccess import DataAccessLayer + + +def getAvailableTimes(request, refTimeOnly=False): + return __getAvailableTimesForEachParameter(request, refTimeOnly) + + +def __getAvailableTimesForEachParameter(request, refTimeOnly=False): + parameters = request.getParameters() + if parameters: + times = None + for parameter in parameters: + specificRequest = __cloneRequest(request) + specificRequest.setParameters(parameter) + specificTimes = __getAvailableTimesForEachLevel(specificRequest, refTimeOnly) + if times is None: + times = specificTimes + else: + times.intersection_update(specificTimes) + if not times: + break + return times + else: + return __getAvailableTimesForEachLevel(request, refTimeOnly) + + +def __getAvailableTimesForEachLevel(request, refTimeOnly=False): + levels = request.getLevels() + if levels: + times = None + for level in levels: + specificRequest = __cloneRequest(request) + specificRequest.setLevels(level) + specificTimes = __getAvailableTimesForEachLocation(specificRequest, refTimeOnly) + if times is None: + times = specificTimes + else: + times.intersection_update(specificTimes) + if not times: + break + return times + else: + return __getAvailableTimesForEachLocation(request, refTimeOnly) + + +def __getAvailableTimesForEachLocation(request, refTimeOnly=False): + locations = request.getLocationNames() + if locations: + times = None + for location in locations: + specificRequest = __cloneRequest(request) + specificRequest.setLocationNames(location) + specificTimes = DataAccessLayer.getAvailableTimes(specificRequest, refTimeOnly) + if times is None: + times = set(specificTimes) + else: + times.intersection_update(specificTimes) + if not times: + break + return times + else: + return DataAccessLayer.getAvailableTimes(request, refTimeOnly) + + +def __cloneRequest(request): + return DataAccessLayer.newDataRequest(datatype=request.getDatatype(), + parameters=request.getParameters(), + levels=request.getLevels(), + locationNames=request.getLocationNames(), + envelope=request.getEnvelope(), + **request.getIdentifiers()) diff --git a/awips/dataaccess/DataAccessLayer.py b/awips/dataaccess/DataAccessLayer.py new file mode 100644 index 0000000..58943ce --- /dev/null +++ b/awips/dataaccess/DataAccessLayer.py @@ -0,0 +1,391 @@ +# +# Published interface for awips.dataaccess package +# +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ------- ---------- ------------------------- +# 12/10/12 njensen Initial Creation. +# Feb 14, 2013 1614 bsteffen refactor data access framework to use single request. +# 04/10/13 1871 mnash move getLatLonCoords to JGridData and add default args +# 05/29/13 2023 dgilling Hook up ThriftClientRouter. +# 03/03/14 2673 bsteffen Add ability to query only ref times. +# 07/22/14 3185 njensen Added optional/default args to newDataRequest +# 07/30/14 3185 njensen Renamed valid identifiers to optional +# Apr 26, 2015 4259 njensen Updated for new JEP API +# Apr 13, 2016 5379 tgurney Add getIdentifierValues(), getRequiredIdentifiers(), +# and getOptionalIdentifiers() +# Oct 07, 2016 ---- mjames@ucar Added getForecastRun +# Oct 18, 2016 5916 bsteffen Add setLazyLoadGridLatLon +# Oct 11, 2018 ---- mjames@ucar Added getMetarObs() getSynopticObs() +# + +import sys +import warnings + +THRIFT_HOST = "edex" + +USING_NATIVE_THRIFT = False + +if 'jep' in sys.modules: + # intentionally do not catch if this fails to import, we want it to + # be obvious that something is configured wrong when running from within + # Java instead of allowing false confidence and fallback behavior + import JepRouter + router = JepRouter +else: + from awips.dataaccess import ThriftClientRouter + router = ThriftClientRouter.ThriftClientRouter(THRIFT_HOST) + USING_NATIVE_THRIFT = True + + +def getRadarProductIDs(availableParms): + """ + Get only the numeric idetifiers for NEXRAD3 products. + + Args: + availableParms: Full list of radar parameters + + Returns: + List of filtered parameters + """ + productIDs = [] + for p in list(availableParms): + try: + if isinstance(int(p), int): + productIDs.append(str(p)) + except ValueError: + pass + + return productIDs + + +def getRadarProductNames(availableParms): + """ + Get only the named idetifiers for NEXRAD3 products. + + Args: + availableParms: Full list of radar parameters + + Returns: + List of filtered parameters + """ + productNames = [] + for p in list(availableParms): + if len(p) > 3: + productNames.append(p) + + return productNames + + +def getMetarObs(response): + """ + Processes a DataAccessLayer "obs" response into a dictionary, + with special consideration for multi-value parameters + "presWeather", "skyCover", and "skyLayerBase". + + Args: + response: DAL getGeometry() list + + Returns: + A dictionary of METAR obs + """ + from datetime import datetime + single_val_params = ["timeObs", "stationName", "longitude", "latitude", + "temperature", "dewpoint", "windDir", + "windSpeed", "seaLevelPress"] + multi_val_params = ["presWeather", "skyCover", "skyLayerBase"] + params = single_val_params + multi_val_params + station_names, pres_weather, sky_cov, sky_layer_base = [], [], [], [] + obs = dict({params: [] for params in params}) + for ob in response: + avail_params = ob.getParameters() + if "presWeather" in avail_params: + pres_weather.append(ob.getString("presWeather")) + elif "skyCover" in avail_params and "skyLayerBase" in avail_params: + sky_cov.append(ob.getString("skyCover")) + sky_layer_base.append(ob.getNumber("skyLayerBase")) + else: + # If we already have a record for this stationName, skip + if ob.getString('stationName') not in station_names: + station_names.append(ob.getString('stationName')) + for param in single_val_params: + if param in avail_params: + if param == 'timeObs': + obs[param].append(datetime.fromtimestamp(ob.getNumber(param) / 1000.0)) + else: + try: + obs[param].append(ob.getNumber(param)) + except TypeError: + obs[param].append(ob.getString(param)) + else: + obs[param].append(None) + + obs['presWeather'].append(pres_weather) + obs['skyCover'].append(sky_cov) + obs['skyLayerBase'].append(sky_layer_base) + pres_weather = [] + sky_cov = [] + sky_layer_base = [] + return obs + + +def getSynopticObs(response): + """ + Processes a DataAccessLayer "sfcobs" response into a dictionary + of available parameters. + + Args: + response: DAL getGeometry() list + + Returns: + A dictionary of synop obs + """ + from datetime import datetime + station_names = [] + params = response[0].getParameters() + sfcobs = dict({params: [] for params in params}) + for sfcob in response: + # If we already have a record for this stationId, skip + if sfcob.getString('stationId') not in station_names: + station_names.append(sfcob.getString('stationId')) + for param in params: + if param == 'timeObs': + sfcobs[param].append(datetime.fromtimestamp(sfcob.getNumber(param) / 1000.0)) + else: + try: + sfcobs[param].append(sfcob.getNumber(param)) + except TypeError: + sfcobs[param].append(sfcob.getString(param)) + + return sfcobs + + +def getForecastRun(cycle, times): + """ + Get the latest forecast run (list of objects) from all + all cycles and times returned from DataAccessLayer "grid" + response. + + Args: + cycle: Forecast cycle reference time + times: All available times/cycles + + Returns: + DataTime array for a single forecast run + """ + fcstRun = [] + for t in times: + if str(t)[:19] == str(cycle): + fcstRun.append(t) + return fcstRun + + +def getAvailableTimes(request, refTimeOnly=False): + """ + Get the times of available data to the request. + + Args: + request: the IDataRequest to get data for + refTimeOnly: optional, use True if only unique refTimes should be + returned (without a forecastHr) + + Returns: + a list of DataTimes + """ + return router.getAvailableTimes(request, refTimeOnly) + + +def getGridData(request, times=[]): + """ + Gets the grid data that matches the request at the specified times. Each + combination of parameter, level, and dataTime will be returned as a + separate IGridData. + + Args: + request: the IDataRequest to get data for + times: a list of DataTimes, a TimeRange, or None if the data is time + agnostic + + Returns: + a list of IGridData + """ + return router.getGridData(request, times) + + +def getGeometryData(request, times=[]): + """ + Gets the geometry data that matches the request at the specified times. + Each combination of geometry, level, and dataTime will be returned as a + separate IGeometryData. + + Args: + request: the IDataRequest to get data for + times: a list of DataTimes, a TimeRange, or None if the data is time + agnostic + + Returns: + a list of IGeometryData + """ + return router.getGeometryData(request, times) + + +def getAvailableLocationNames(request): + """ + Gets the available location names that match the request without actually + requesting the data. + + Args: + request: the request to find matching location names for + + Returns: + a list of strings of available location names. + """ + return router.getAvailableLocationNames(request) + + +def getAvailableParameters(request): + """ + Gets the available parameters names that match the request without actually + requesting the data. + + Args: + request: the request to find matching parameter names for + + Returns: + a list of strings of available parameter names. + """ + return router.getAvailableParameters(request) + + +def getAvailableLevels(request): + """ + Gets the available levels that match the request without actually + requesting the data. + + Args: + request: the request to find matching levels for + + Returns: + a list of strings of available levels. + """ + return router.getAvailableLevels(request) + + +def getRequiredIdentifiers(request): + """ + Gets the required identifiers for this request. These identifiers + must be set on a request for the request of this datatype to succeed. + + Args: + request: the request to find required identifiers for + + Returns: + a list of strings of required identifiers + """ + if str(request) == request: + warnings.warn("Use getRequiredIdentifiers(IDataRequest) instead", + DeprecationWarning) + return router.getRequiredIdentifiers(request) + + +def getOptionalIdentifiers(request): + """ + Gets the optional identifiers for this request. + + Args: + request: the request to find optional identifiers for + + Returns: + a list of strings of optional identifiers + """ + if str(request) == request: + warnings.warn("Use getOptionalIdentifiers(IDataRequest) instead", + DeprecationWarning) + return router.getOptionalIdentifiers(request) + + +def getIdentifierValues(request, identifierKey): + """ + Gets the allowed values for a particular identifier on this datatype. + + Args: + request: the request to find identifier values for + identifierKey: the identifier to find values for + + Returns: + a list of strings of allowed values for the specified identifier + """ + return router.getIdentifierValues(request, identifierKey) + + +def newDataRequest(datatype=None, **kwargs): + """ + Creates a new instance of IDataRequest suitable for the runtime environment. + All args are optional and exist solely for convenience. + + Args: + datatype: the datatype to create a request for + parameters: a list of parameters to set on the request + levels: a list of levels to set on the request + locationNames: a list of locationNames to set on the request + envelope: an envelope to limit the request + kwargs: any leftover kwargs will be set as identifiers + + Returns: + a new IDataRequest + """ + return router.newDataRequest(datatype, **kwargs) + + +def getSupportedDatatypes(): + """ + Gets the datatypes that are supported by the framework + + Returns: + a list of strings of supported datatypes + """ + return router.getSupportedDatatypes() + + +def changeEDEXHost(newHostName): + """ + Changes the EDEX host the Data Access Framework is communicating with. Only + works if using the native Python client implementation, otherwise, this + method will throw a TypeError. + + Args: + newHostName: the EDEX host to connect to + """ + if USING_NATIVE_THRIFT: + global THRIFT_HOST + THRIFT_HOST = newHostName + global router + router = ThriftClientRouter.ThriftClientRouter(THRIFT_HOST) + else: + raise TypeError("Cannot call changeEDEXHost when using JepRouter.") + + +def setLazyLoadGridLatLon(lazyLoadGridLatLon): + """ + Provide a hint to the Data Access Framework indicating whether to load the + lat/lon data for a grid immediately or wait until it is needed. This is + provided as a performance tuning hint and should not affect the way the + Data Access Framework is used. Depending on the internal implementation of + the Data Access Framework this hint might be ignored. Examples of when this + should be set to True are when the lat/lon information is not used or when + it is used only if certain conditions within the data are met. It could be + set to False if it is guaranteed that all lat/lon information is needed and + it would be better to get any performance overhead for generating the + lat/lon data out of the way during the initial request. + + + Args: + lazyLoadGridLatLon: Boolean value indicating whether to lazy load. + """ + try: + router.setLazyLoadGridLatLon(lazyLoadGridLatLon) + except AttributeError: + # The router is not required to support this capability. + pass diff --git a/awips/dataaccess/DataNotificationLayer.py b/awips/dataaccess/DataNotificationLayer.py new file mode 100644 index 0000000..d1297fd --- /dev/null +++ b/awips/dataaccess/DataNotificationLayer.py @@ -0,0 +1,135 @@ +# +# Published interface for retrieving data updates via awips.dataaccess package +# +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# May 26, 2016 2416 rjpeter Initial Creation. +# Aug 1, 2016 2416 tgurney Finish implementation +# +# + +""" +Interface for the DAF's data notification feature, which allows continuous +retrieval of new data as it is coming into the system. + +There are two ways to access this feature: + +1. The DataQueue module (awips.dataaccess.DataQueue) offers a collection that +automatically fills up with new data as it receives notifications. See that +module for more information. + +2. Depending on the type of data you want, use either getGridDataUpdates() or +getGeometryDataUpdates() in this module. Either one will give you back an +object that will retrieve new data for you and will call a function you specify +each time new data is received. + +Example code follows. This example prints temperature as observed from KOMA +each time a METAR is received from there. + + from awips.dataaccess import DataAccessLayer as DAL + from awips.dataaccess import DataNotificationLayer as DNL + + def process_obs(list_of_data): + for item in list_of_data: + print(item.getNumber('temperature')) + + request = DAL.newDataRequest('obs') + request.setParameters('temperature') + request.setLocationNames('KOMA') + + notifier = DNL.getGeometryDataUpdates(request) + notifier.subscribe(process_obs) + # process_obs will called with a list of data each time new data comes in + +""" + +import re +import sys +from awips.dataaccess.PyGeometryNotification import PyGeometryNotification +from awips.dataaccess.PyGridNotification import PyGridNotification + + +THRIFT_HOST = "edex" + +USING_NATIVE_THRIFT = False + +JMS_HOST_PATTERN = re.compile('tcp://([^:]+):([0-9]+)') + +if 'jep' in sys.modules: + # intentionally do not catch if this fails to import, we want it to + # be obvious that something is configured wrong when running from within + # Java instead of allowing false confidence and fallback behavior + import JepRouter + router = JepRouter +else: + from awips.dataaccess import ThriftClientRouter + router = ThriftClientRouter.ThriftClientRouter(THRIFT_HOST) + USING_NATIVE_THRIFT = True + + +def _getJmsConnectionInfo(notifFilterResponse): + serverString = notifFilterResponse.getJmsConnectionInfo() + try: + host, port = JMS_HOST_PATTERN.match(serverString).groups() + except AttributeError: + raise RuntimeError('Got bad JMS connection info from server: ' + serverString) + return {'host': host, 'port': port} + + +def getGridDataUpdates(request): + """ + Get a notification object that receives updates to grid data. + + Args: + request: the IDataRequest specifying the data you want to receive + + Returns: + an update request object that you can listen for updates to by + calling its subscribe() method + """ + response = router.getNotificationFilter(request) + notificationFilter = response.getNotificationFilter() + jmsInfo = _getJmsConnectionInfo(response) + notifier = PyGridNotification(request, notificationFilter, + requestHost=THRIFT_HOST, **jmsInfo) + return notifier + + +def getGeometryDataUpdates(request): + """ + Get a notification object that receives updates to geometry data. + + Args: + request: the IDataRequest specifying the data you want to receive + + Returns: + an update request object that you can listen for updates to by + calling its subscribe() method + """ + response = router.getNotificationFilter(request) + notificationFilter = response.getNotificationFilter() + jmsInfo = _getJmsConnectionInfo(response) + notifier = PyGeometryNotification(request, notificationFilter, + requestHost=THRIFT_HOST, **jmsInfo) + return notifier + + +def changeEDEXHost(newHostName): + """ + Changes the EDEX host the Data Access Framework is communicating with. Only + works if using the native Python client implementation, otherwise, this + method will throw a TypeError. + + Args: + newHostName: the EDEX host to connect to + """ + if USING_NATIVE_THRIFT: + global THRIFT_HOST + THRIFT_HOST = newHostName + global router + router = ThriftClientRouter.ThriftClientRouter(THRIFT_HOST) + else: + raise TypeError("Cannot call changeEDEXHost when using JepRouter.") diff --git a/awips/dataaccess/DataQueue.py b/awips/dataaccess/DataQueue.py new file mode 100644 index 0000000..2ea2d71 --- /dev/null +++ b/awips/dataaccess/DataQueue.py @@ -0,0 +1,190 @@ +# +# Convenience class for using the DAF's notifications feature. This is a +# collection that, once connected to EDEX by calling start(), fills with +# data as notifications come in. Runs on a separate thread to allow +# non-blocking data retrieval. +# +# +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# 07/29/16 2416 tgurney Initial creation +# + +from awips.dataaccess import DataNotificationLayer as DNL + +import time +from threading import Thread +import sys + + +if sys.version_info.major == 2: + from Queue import Queue, Empty +else: # Python 3 module renamed to 'queue' + from queue import Queue, Empty + +# Used to indicate a DataQueue that will produce geometry data. +GEOMETRY = object() + +# Used to indicate a DataQueue that will produce grid data. +GRID = object() + +# Default maximum queue size. +_DEFAULT_MAXSIZE = 100 + + +class Closed(Exception): + """Raised when attempting to get data from a closed queue.""" + pass + + +class DataQueue(object): + + """ + Convenience class for using the DAF's notifications feature. This is a + collection that, once connected to EDEX by calling start(), fills with + data as notifications come in. + + Example for getting obs data: + + from DataQueue import DataQueue, GEOMETRY + request = DataAccessLayer.newDataRequest('obs') + request.setParameters('temperature') + request.setLocationNames('KOMA') + q = DataQueue(GEOMETRY, request) + q.start() + for item in q: + print(item.getNumber('temperature')) + """ + + def __init__(self, dtype, request, maxsize=_DEFAULT_MAXSIZE): + """ + Create a new DataQueue. + + Args: + dtype: Either GRID or GEOMETRY; must match the type of data + requested. + request: IDataRequest describing the data you want. It must at + least have datatype set. All data produced will satisfy the + constraints you specify. + maxsize: Maximum number of data objects the queue can hold at + one time. If the limit is reached, any data coming in after + that will not appear until one or more items are removed using + DataQueue.get(). + """ + assert maxsize > 0 + assert dtype in (GEOMETRY, GRID) + self._maxsize = maxsize + self._queue = Queue(maxsize=maxsize) + self._thread = None + if dtype is GEOMETRY: + self._notifier = DNL.getGeometryDataUpdates(request) + elif dtype is GRID: + self._notifier = DNL.getGridDataUpdates(request) + + def start(self): + """Start listening for notifications and requesting data.""" + if self._thread is not None: + # Already started + return + kwargs = {'callback': self._data_received} + self._thread = Thread(target=self._notifier.subscribe, kwargs=kwargs) + self._thread.daemon = True + self._thread.start() + timer = 0 + while not self._notifier.subscribed: + time.sleep(0.1) + timer += 1 + if timer >= 100: # ten seconds + raise RuntimeError('timed out when attempting to subscribe') + + def _data_received(self, data): + for d in data: + if not isinstance(d, list): + d = [d] + for item in d: + self._queue.put(item) + + def get(self, block=True, timeout=None): + """ + Get and return the next available data object. By default, if there is + no data yet available, this method will not return until data becomes + available. + + Args: + block: Specifies behavior when the queue is empty. If True, wait + until an item is available before returning (the default). If + False, return None immediately if the queue is empty. + timeout: If block is True, wait this many seconds, and return None + if data is not received in that time. + Returns: + IData + """ + if self.closed: + raise Closed + try: + return self._queue.get(block, timeout) + except Empty: + return None + + def get_all(self): + """ + Get all data waiting for processing, in a single list. Always returns + immediately. Returns an empty list if no data has arrived yet. + + Returns: + List of IData + """ + data = [] + for _ in range(self._maxsize): + next_item = self.get(False) + if next_item is None: + break + data.append(next_item) + return data + + def close(self): + """Close the queue. May not be re-opened after closing.""" + if not self.closed: + self._notifier.close() + self._thread.join() + + def qsize(self): + """Return number of items in the queue.""" + return self._queue.qsize() + + def empty(self): + """Return True if the queue is empty.""" + return self._queue.empty() + + def full(self): + """Return True if the queue is full.""" + return self._queue.full() + + @property + def closed(self): + """True if the queue has been closed.""" + return not self._notifier.subscribed + + @property + def maxsize(self): + """ + Maximum number of data objects the queue can hold at one time. + If this limit is reached, any data coming in after that will not appear + until one or more items are removed using get(). + """ + return self._maxsize + + def __iter__(self): + if self._thread is not None: + while not self.closed: + yield self.get() + + def __enter__(self): + self.start() + return self + + def __exit__(self, *unused): + self.close() diff --git a/awips/dataaccess/ModelSounding.py b/awips/dataaccess/ModelSounding.py new file mode 100644 index 0000000..3c5904f --- /dev/null +++ b/awips/dataaccess/ModelSounding.py @@ -0,0 +1,231 @@ +# +# Classes for retrieving soundings based on gridded data from the Data Access +# Framework +# +# +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# 06/24/15 #4480 dgilling Initial Creation. +# + +from awips.dataaccess import DataAccessLayer +from dynamicserialize.dstypes.com.raytheon.uf.common.dataplugin.level import Level +from shapely.geometry import Point + + +def getSounding(modelName, weatherElements, levels, samplePoint, timeRange=None): + """ + Performs a series of Data Access Framework requests to retrieve a sounding object + based on the specified request parameters. + + Args: + modelName: the grid model datasetid to use as the basis of the sounding. + weatherElements: a list of parameters to return in the sounding. + levels: a list of levels to sample the given weather elements at + samplePoint: a lat/lon pair to perform the sampling of data at. + timeRange: (optional) a list of times, or a TimeRange to specify + which forecast hours to use. If not specified, will default to all forecast hours. + + Returns: + A _SoundingCube instance, which acts a 3-tiered dictionary, keyed + by DataTime, then by level and finally by weather element. If no + data is available for the given request parameters, None is returned. + + """ + + (locationNames, parameters, levels, envelope, timeRange) = \ + __sanitizeInputs(modelName, weatherElements, levels, samplePoint, timeRange) + + requestArgs = {'datatype': 'grid', 'locationNames': locationNames, + 'parameters': parameters, 'levels': levels, 'envelope': envelope} + + req = DataAccessLayer.newDataRequest(**requestArgs) + response = DataAccessLayer.getGeometryData(req, timeRange) + soundingObject = _SoundingCube(response) + + return soundingObject + + +def changeEDEXHost(host): + """ + Changes the EDEX host the Data Access Framework is communicating with. + + Args: + host: the EDEX host to connect to + """ + + if host: + DataAccessLayer.changeEDEXHost(str(host)) + + +def __sanitizeInputs(modelName, weatherElements, levels, samplePoint, timeRange): + locationNames = [str(modelName)] + parameters = __buildStringList(weatherElements) + levels = __buildStringList(levels) + envelope = Point(samplePoint) + return locationNames, parameters, levels, envelope, timeRange + + +def __buildStringList(param): + if __notStringIter(param): + return [str(item) for item in param] + else: + return [str(param)] + + +def __notStringIter(iterable): + if not isinstance(iterable, str): + try: + iter(iterable) + return True + except TypeError: + return False + + +class _SoundingCube(object): + """ + The top-level sounding object returned when calling ModelSounding.getSounding. + + This object acts as a 3-tiered dict which is keyed by time then level + then parameter name. Calling times() will return all valid keys into this + object. + """ + + def __init__(self, geometryDataObjects): + self._dataDict = {} + self._sortedTimes = [] + if geometryDataObjects: + for geometryData in geometryDataObjects: + dataTime = geometryData.getDataTime() + level = geometryData.getLevel() + for parameter in geometryData.getParameters(): + self.__addItem(parameter, dataTime, level, geometryData.getNumber(parameter)) + + def __addItem(self, parameter, dataTime, level, value): + timeLayer = self._dataDict.get(dataTime, _SoundingTimeLayer(dataTime)) + self._dataDict[dataTime] = timeLayer + timeLayer._addItem(parameter, level, value) + if dataTime not in self._sortedTimes: + self._sortedTimes.append(dataTime) + self._sortedTimes.sort() + + def __getitem__(self, key): + return self._dataDict[key] + + def __len__(self): + return len(self._dataDict) + + def times(self): + """ + Returns the valid times for this sounding. + + Returns: + A list containing the valid DataTimes for this sounding in order. + """ + return self._sortedTimes + + +class _SoundingTimeLayer(object): + """ + The second-level sounding object returned when calling ModelSounding.getSounding. + + This object acts as a 2-tiered dict which is keyed by level then parameter + name. Calling levels() will return all valid keys into this + object. Calling time() will return the DataTime for this particular layer. + """ + + def __init__(self, dataTime): + self._dataTime = dataTime + self._dataDict = {} + + def _addItem(self, parameter, level, value): + asString = str(level) + levelLayer = self._dataDict.get(asString, _SoundingTimeAndLevelLayer(self._dataTime, asString)) + levelLayer._addItem(parameter, value) + self._dataDict[asString] = levelLayer + + def __getitem__(self, key): + asString = str(key) + if asString in self._dataDict: + return self._dataDict[asString] + else: + raise KeyError("Level " + str(key) + " is not a valid level for this sounding.") + + def __len__(self): + return len(self._dataDict) + + def time(self): + """ + Returns the DataTime for this sounding cube layer. + + Returns: + The DataTime for this sounding layer. + """ + return self._dataTime + + def levels(self): + """ + Returns the valid levels for this sounding. + + Returns: + A list containing the valid levels for this sounding in order of + closest to surface to highest from surface. + """ + sortedLevels = [Level(level) for level in list(self._dataDict.keys())] + sortedLevels.sort() + return [str(level) for level in sortedLevels] + + +class _SoundingTimeAndLevelLayer(object): + """ + The bottom-level sounding object returned when calling ModelSounding.getSounding. + + This object acts as a dict which is keyed by parameter name. Calling + parameters() will return all valid keys into this object. Calling time() + will return the DataTime for this particular layer. Calling level() will + return the level for this layer. + """ + + def __init__(self, time, level): + self._time = time + self._level = level + self._parameters = {} + + def _addItem(self, parameter, value): + self._parameters[parameter] = value + + def __getitem__(self, key): + return self._parameters[key] + + def __len__(self): + return len(self._parameters) + + def level(self): + """ + Returns the level for this sounding cube layer. + + Returns: + The level for this sounding layer. + """ + return self._level + + def parameters(self): + """ + Returns the valid parameters for this sounding. + + Returns: + A list containing the valid parameter names. + """ + return list(self._parameters.keys()) + + def time(self): + """ + Returns the DataTime for this sounding cube layer. + + Returns: + The DataTime for this sounding layer. + """ + return self._time diff --git a/awips/dataaccess/PyData.py b/awips/dataaccess/PyData.py new file mode 100644 index 0000000..01e3150 --- /dev/null +++ b/awips/dataaccess/PyData.py @@ -0,0 +1,44 @@ +# +# Implements IData for use by native Python clients to the Data Access +# Framework. +# +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# 06/03/13 dgilling Initial Creation. +# 10/05/18 mjames@ucar Encode/decode attribute names. +# +# + +from awips.dataaccess import IData +import six + + +class PyData(IData): + + def __init__(self, dataRecord): + self.__time = dataRecord.getTime() + self.__level = dataRecord.getLevel() + self.__locationName = dataRecord.getLocationName() + self.__attributes = dataRecord.getAttributes() + + def getAttribute(self, key): + return self.__attributes[key] + + def getAttributes(self): + return self.__attributes.keys() + + def getDataTime(self): + return self.__time + + def getLevel(self): + if six.PY2: + return self.__level + if not isinstance(self.__level, str): + return self.__level.decode('utf-8') + return self.__level + + def getLocationName(self): + return self.__locationName diff --git a/awips/dataaccess/PyGeometryData.py b/awips/dataaccess/PyGeometryData.py new file mode 100644 index 0000000..cc09f2c --- /dev/null +++ b/awips/dataaccess/PyGeometryData.py @@ -0,0 +1,81 @@ +# +# Implements IGeometryData for use by native Python clients to the Data Access +# Framework. +# +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# 06/03/13 dgilling Initial Creation. +# 01/06/14 2537 bsteffen Share geometry WKT. +# 03/19/14 2882 dgilling Raise an exception when getNumber() +# is called for data that is not a +# numeric Type. +# 06/09/16 5574 mapeters Handle 'SHORT' type in getNumber(). +# 10/05/18 mjames@ucar Encode/decode string, number val, and type +# +# + +from awips.dataaccess import IGeometryData +from awips.dataaccess import PyData +import six + + +class PyGeometryData(IGeometryData, PyData.PyData): + + def __init__(self, geoDataRecord, geometry): + PyData.PyData.__init__(self, geoDataRecord) + self.__geometry = geometry + self.__dataMap = {} + tempDataMap = geoDataRecord.getDataMap() + for key, value in list(tempDataMap.items()): + self.__dataMap[key] = (value[0], value[1], value[2]) + + def getGeometry(self): + return self.__geometry + + def getParameters(self): + if six.PY2: + return list(self.__dataMap.keys()) + else: + return [x.decode('utf-8') for x in list(self.__dataMap.keys())] + + def getString(self, param): + if six.PY2: + return self.__dataMap[param][0] + value = self.__dataMap[param.encode('utf-8')][0] + if isinstance(value, bytes): + return str(value.decode('utf-8')) + return str(value) + + def getNumber(self, param): + t = self.getType(param) + if six.PY2: + value = self.__dataMap[param][0] + else: + value = self.__dataMap[param.encode('utf-8')][0] + if t == 'INT' or t == 'SHORT' or t == 'LONG': + return int(value) + elif t == 'FLOAT': + return float(value) + elif t == 'DOUBLE': + return float(value) + else: + raise TypeError("Data for parameter " + param + " is not a numeric type.") + + def getUnit(self, param): + if six.PY2: + return self.__dataMap[param][2] + unit = self.__dataMap[param.encode('utf-8')][2] + if unit is not None: + return unit.decode('utf-8') + return unit + + def getType(self, param): + if six.PY2: + return self.__dataMap[param][1] + datatype = self.__dataMap[param.encode('utf-8')][1] + if datatype is not None: + return datatype.decode('utf-8') + return datatype diff --git a/awips/dataaccess/PyGeometryNotification.py b/awips/dataaccess/PyGeometryNotification.py new file mode 100644 index 0000000..8884d43 --- /dev/null +++ b/awips/dataaccess/PyGeometryNotification.py @@ -0,0 +1,35 @@ +# +# Notification object that produces geometry data +# +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# 07/22/16 2416 tgurney Initial creation +# 09/07/17 6175 tgurney Override messageReceived +# + +import traceback +import dynamicserialize +from awips.dataaccess.PyNotification import PyNotification + + +class PyGeometryNotification(PyNotification): + + def messageReceived(self, msg): + dataUriMsg = dynamicserialize.deserialize(msg) + dataUris = dataUriMsg.getDataURIs() + dataTimes = set() + for dataUri in dataUris: + if self.notificationFilter.accept(dataUri): + dataTimes.add(self.getDataTime(dataUri)) + if dataTimes: + try: + data = self.getData(self.request, list(dataTimes)) + self.callback(data) + except ValueError: + traceback.print_exc() + + def getData(self, request, dataTimes): + return self.DAL.getGeometryData(request, dataTimes) diff --git a/awips/dataaccess/PyGridData.py b/awips/dataaccess/PyGridData.py new file mode 100644 index 0000000..4dece5b --- /dev/null +++ b/awips/dataaccess/PyGridData.py @@ -0,0 +1,64 @@ +# +# Implements IGridData for use by native Python clients to the Data Access +# Framework. +# +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# 06/03/13 #2023 dgilling Initial Creation. +# 10/13/16 #5916 bsteffen Correct grid shape, allow lat/lon +# 11/10/16 #5900 bsteffen Correct grid shape +# to be requested by a delegate +# +# + +import numpy +import warnings +import six + +from awips.dataaccess import IGridData +from awips.dataaccess import PyData + +NO_UNIT_CONVERT_WARNING = """ +The ability to unit convert grid data is not currently available in this version of the Data Access Framework. +""" + + +class PyGridData(IGridData, PyData.PyData): + + def __init__(self, gridDataRecord, nx, ny, latLonGrid=None, latLonDelegate=None): + PyData.PyData.__init__(self, gridDataRecord) + nx = nx + ny = ny + self.__parameter = gridDataRecord.getParameter() + self.__unit = gridDataRecord.getUnit() + self.__gridData = numpy.reshape(numpy.array(gridDataRecord.getGridData()), (ny, nx)) + self.__latLonGrid = latLonGrid + self.__latLonDelegate = latLonDelegate + + def getParameter(self): + return self.__parameter + + def getUnit(self): + if six.PY2: + return self.__unit + if self.__unit is not None and not isinstance(self.__unit, str): + return self.__unit.decode('utf-8') + return self.__unit + + def getRawData(self, unit=None): + # TODO: Find a proper python library that deals will with numpy and + # javax.measure style unit strings and hook it in to this method to + # allow end-users to perform unit conversion for grid data. + if unit is not None: + warnings.warn(NO_UNIT_CONVERT_WARNING, stacklevel=2) + return self.__gridData + + def getLatLonCoords(self): + if self.__latLonGrid is not None: + return self.__latLonGrid + elif self.__latLonDelegate is not None: + return self.__latLonDelegate() + return self.__latLonGrid diff --git a/awips/dataaccess/PyGridNotification.py b/awips/dataaccess/PyGridNotification.py new file mode 100644 index 0000000..340fb65 --- /dev/null +++ b/awips/dataaccess/PyGridNotification.py @@ -0,0 +1,40 @@ +# +# Notification object that produces grid data +# +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# 06/03/16 2416 rjpeter Initial Creation. +# 09/06/17 6175 tgurney Override messageReceived +# + +import dynamicserialize +import traceback +from awips.dataaccess.PyNotification import PyNotification + + +class PyGridNotification(PyNotification): + + def messageReceived(self, msg): + dataUriMsg = dynamicserialize.deserialize(msg) + dataUris = dataUriMsg.getDataURIs() + for dataUri in dataUris: + if not self.notificationFilter.accept(dataUri): + continue + try: + # This improves performance over requesting by datatime since it requests only the + # parameter that the notification was received for (instead of this and all previous + # parameters for the same forecast hour) + # TODO: This utterly fails for derived requests + newReq = self.DAL.newDataRequest(self.request.getDatatype()) + newReq.addIdentifier("dataURI", dataUri) + newReq.setParameters(self.request.getParameters()) + data = self.getData(newReq, []) + self.callback(data) + except ValueError: + traceback.print_exc() + + def getData(self, request, dataTimes): + return self.DAL.getGridData(request, dataTimes) diff --git a/awips/dataaccess/PyNotification.py b/awips/dataaccess/PyNotification.py new file mode 100644 index 0000000..f24851c --- /dev/null +++ b/awips/dataaccess/PyNotification.py @@ -0,0 +1,85 @@ +# +# Implements IData for use by native Python clients to the Data Access +# Framework. +# +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# Jun 22, 2016 2416 rjpeter Initial creation +# Jul 22, 2016 2416 tgurney Finish implementation +# Sep 07, 2017 6175 tgurney Override messageReceived in subclasses +# + +from six import with_metaclass +import abc + +from awips.dataaccess import DataAccessLayer +from awips.dataaccess import INotificationSubscriber +from awips.QpidSubscriber import QpidSubscriber +from dynamicserialize.dstypes.com.raytheon.uf.common.time import DataTime + + +class PyNotification(with_metaclass(abc.ABCMeta, INotificationSubscriber)): + """ + Receives notifications for new data and retrieves the data that meets + specified filtering criteria. + """ + + def __init__(self, request, notificationFilter, host='localhost', + port=5672, requestHost='localhost'): + self.DAL = DataAccessLayer + self.DAL.changeEDEXHost(requestHost) + self.request = request + self.notificationFilter = notificationFilter + self.__topicSubscriber = QpidSubscriber(host, port, decompress=True) + self.__topicName = "edex.alerts" + self.callback = None + + def subscribe(self, callback): + """ + Start listening for notifications. + + Args: + callback: Function to call with a list of received data objects. + Will be called once for each request made for data. + """ + assert hasattr(callback, '__call__'), 'callback arg must be callable' + self.callback = callback + self.__topicSubscriber.topicSubscribe(self.__topicName, self.messageReceived) + # Blocks here + + def close(self): + if self.__topicSubscriber.subscribed: + self.__topicSubscriber.close() + + def getDataTime(self, dataURI): + dataTimeStr = dataURI.split('/')[2] + return DataTime(dataTimeStr) + + @abc.abstractmethod + def messageReceived(self, msg): + """Called when a message is received from QpidSubscriber. + + This method must call self.callback once for each request made for data + """ + pass + + @abc.abstractmethod + def getData(self, request, dataTimes): + """ + Retrieve and return data + + Args: + request: IDataRequest to send to the server + dataTimes: list of data times + Returns: + list of IData + """ + pass + + @property + def subscribed(self): + """True if currently subscribed to notifications.""" + return self.__topicSubscriber.queueStarted diff --git a/awips/dataaccess/ThriftClientRouter.py b/awips/dataaccess/ThriftClientRouter.py new file mode 100644 index 0000000..7637bc8 --- /dev/null +++ b/awips/dataaccess/ThriftClientRouter.py @@ -0,0 +1,257 @@ +# +# Routes requests to the Data Access Framework through Python Thrift. +# +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# 05/21/13 2023 dgilling Initial Creation. +# 01/06/14 2537 bsteffen Share geometry WKT. +# 03/03/14 2673 bsteffen Add ability to query only ref times. +# 07/22/14 3185 njensen Added optional/default args to newDataRequest +# 07/23/14 3185 njensen Added new methods +# 07/30/14 3185 njensen Renamed valid identifiers to optional +# 06/30/15 4569 nabowle Use hex WKB for geometries. +# 04/13/15 5379 tgurney Add getIdentifierValues() +# 06/01/16 5587 tgurney Add new signatures for +# getRequiredIdentifiers() and +# getOptionalIdentifiers() +# 08/01/16 2416 tgurney Add getNotificationFilter() +# 10/13/16 5916 bsteffen Correct grid shape, allow lazy grid lat/lon +# 10/26/16 5919 njensen Speed up geometry creation in getGeometryData() +# + +import numpy +import six +import shapely.wkb + +from dynamicserialize.dstypes.com.raytheon.uf.common.dataaccess.impl import DefaultDataRequest +from dynamicserialize.dstypes.com.raytheon.uf.common.dataaccess.request import GetAvailableLocationNamesRequest +from dynamicserialize.dstypes.com.raytheon.uf.common.dataaccess.request import GetAvailableTimesRequest +from dynamicserialize.dstypes.com.raytheon.uf.common.dataaccess.request import GetGeometryDataRequest +from dynamicserialize.dstypes.com.raytheon.uf.common.dataaccess.request import GetGridDataRequest +from dynamicserialize.dstypes.com.raytheon.uf.common.dataaccess.request import GetGridLatLonRequest +from dynamicserialize.dstypes.com.raytheon.uf.common.dataaccess.request import GetAvailableParametersRequest +from dynamicserialize.dstypes.com.raytheon.uf.common.dataaccess.request import GetAvailableLevelsRequest +from dynamicserialize.dstypes.com.raytheon.uf.common.dataaccess.request import GetRequiredIdentifiersRequest +from dynamicserialize.dstypes.com.raytheon.uf.common.dataaccess.request import GetOptionalIdentifiersRequest +from dynamicserialize.dstypes.com.raytheon.uf.common.dataaccess.request import GetIdentifierValuesRequest +from dynamicserialize.dstypes.com.raytheon.uf.common.dataaccess.request import GetSupportedDatatypesRequest +from dynamicserialize.dstypes.com.raytheon.uf.common.dataaccess.request import GetNotificationFilterRequest + +from awips import ThriftClient +from awips.dataaccess import PyGeometryData +from awips.dataaccess import PyGridData + + +class LazyGridLatLon(object): + + def __init__(self, client, nx, ny, envelope, crsWkt): + self._latLonGrid = None + self._client = client + self._request = GetGridLatLonRequest() + self._request.setNx(nx) + self._request.setNy(ny) + self._request.setEnvelope(envelope) + self._request.setCrsWkt(crsWkt) + + def __call__(self): + # Its important that the data is cached internally so that if multiple + # GridData are sharing the same delegate then they can also share a + # single request for the LatLon information. + if self._latLonGrid is None: + response = self._client.sendRequest(self._request) + nx = response.getNx() + ny = response.getNy() + latData = numpy.reshape(numpy.array(response.getLats()), (ny, nx)) + lonData = numpy.reshape(numpy.array(response.getLons()), (ny, nx)) + self._latLonGrid = (lonData, latData) + return self._latLonGrid + + +class ThriftClientRouter(object): + + def __init__(self, host='localhost'): + self._client = ThriftClient.ThriftClient(host) + self._lazyLoadGridLatLon = False + + def setLazyLoadGridLatLon(self, lazyLoadGridLatLon): + self._lazyLoadGridLatLon = lazyLoadGridLatLon + + def getAvailableTimes(self, request, refTimeOnly): + timesRequest = GetAvailableTimesRequest() + timesRequest.setRequestParameters(request) + timesRequest.setRefTimeOnly(refTimeOnly) + response = self._client.sendRequest(timesRequest) + return response + + def getGridData(self, request, times): + gridDataRequest = GetGridDataRequest() + gridDataRequest.setIncludeLatLonData(not self._lazyLoadGridLatLon) + gridDataRequest.setRequestParameters(request) + # if we have an iterable times instance, then the user must have asked + # for grid data with the List of DataTime objects + # else, we assume it was a single TimeRange that was meant for the + # request + try: + iter(times) + gridDataRequest.setRequestedTimes(times) + except TypeError: + gridDataRequest.setRequestedPeriod(times) + response = self._client.sendRequest(gridDataRequest) + + locSpecificData = {} + locNames = list(response.getSiteNxValues().keys()) + for location in locNames: + nx = response.getSiteNxValues()[location] + ny = response.getSiteNyValues()[location] + if self._lazyLoadGridLatLon: + envelope = response.getSiteEnvelopes()[location] + crsWkt = response.getSiteCrsWkt()[location] + delegate = LazyGridLatLon( + self._client, nx, ny, envelope, crsWkt) + locSpecificData[location] = (nx, ny, delegate) + else: + latData = numpy.reshape(numpy.array( + response.getSiteLatGrids()[location]), (ny, nx)) + lonData = numpy.reshape(numpy.array( + response.getSiteLonGrids()[location]), (ny, nx)) + locSpecificData[location] = (nx, ny, (lonData, latData)) + retVal = [] + for gridDataRecord in response.getGridData(): + locationName = gridDataRecord.getLocationName() + if locationName is not None: + if six.PY2: + locData = locSpecificData[locationName] + else: + locData = locSpecificData[locationName.encode('utf-8')] + else: + locData = locSpecificData[locationName] + if self._lazyLoadGridLatLon: + retVal.append(PyGridData.PyGridData(gridDataRecord, locData[ + 0], locData[1], latLonDelegate=locData[2])) + else: + retVal.append(PyGridData.PyGridData( + gridDataRecord, locData[0], locData[1], locData[2])) + return retVal + + def getGeometryData(self, request, times): + geoDataRequest = GetGeometryDataRequest() + geoDataRequest.setRequestParameters(request) + # if we have an iterable times instance, then the user must have asked + # for geometry data with the List of DataTime objects + # else, we assume it was a single TimeRange that was meant for the + # request + try: + iter(times) + geoDataRequest.setRequestedTimes(times) + except TypeError: + geoDataRequest.setRequestedPeriod(times) + response = self._client.sendRequest(geoDataRequest) + geometries = [] + for wkb in response.getGeometryWKBs(): + # the wkb is a numpy.ndarray of dtype int8 + # convert the bytearray to a byte string and load it + geometries.append(shapely.wkb.loads(wkb.tostring())) + + retVal = [] + for geoDataRecord in response.getGeoData(): + geom = geometries[geoDataRecord.getGeometryWKBindex()] + retVal.append(PyGeometryData.PyGeometryData(geoDataRecord, geom)) + return retVal + + def getAvailableLocationNames(self, request): + locNamesRequest = GetAvailableLocationNamesRequest() + locNamesRequest.setRequestParameters(request) + response = self._client.sendRequest(locNamesRequest) + if six.PY2: + return response + if response is not None: + return [x.decode('utf-8') for x in response] + return response + + def getAvailableParameters(self, request): + paramReq = GetAvailableParametersRequest() + paramReq.setRequestParameters(request) + response = self._client.sendRequest(paramReq) + if six.PY2: + return response + if response is not None: + return [x.decode('utf-8') for x in response] + return response + + def getAvailableLevels(self, request): + levelReq = GetAvailableLevelsRequest() + levelReq.setRequestParameters(request) + response = self._client.sendRequest(levelReq) + return response + + def getRequiredIdentifiers(self, request): + if str(request) == request: + # Handle old version getRequiredIdentifiers(str) + request = self.newDataRequest(request) + idReq = GetRequiredIdentifiersRequest() + idReq.setRequest(request) + response = self._client.sendRequest(idReq) + if six.PY2: + return response + if response is not None: + return [x.decode('utf-8') for x in response] + return response + + def getOptionalIdentifiers(self, request): + if str(request) == request: + # Handle old version getOptionalIdentifiers(str) + request = self.newDataRequest(request) + idReq = GetOptionalIdentifiersRequest() + idReq.setRequest(request) + response = self._client.sendRequest(idReq) + if six.PY2: + return response + if response is not None: + return [x.decode('utf-8') for x in response] + return response + + def getIdentifierValues(self, request, identifierKey): + idValReq = GetIdentifierValuesRequest() + idValReq.setIdentifierKey(identifierKey) + idValReq.setRequestParameters(request) + response = self._client.sendRequest(idValReq) + if six.PY2: + return response + if response is not None: + return [x.decode('utf-8') for x in response] + return response + + def newDataRequest(self, datatype, parameters=[], levels=[], locationNames=[], + envelope=None, **kwargs): + req = DefaultDataRequest() + if datatype: + req.setDatatype(datatype) + if parameters: + req.setParameters(*parameters) + if levels: + req.setLevels(*levels) + if locationNames: + req.setLocationNames(*locationNames) + if envelope: + req.setEnvelope(envelope) + if kwargs: + # any args leftover are assumed to be identifiers + req.identifiers = kwargs + return req + + def getSupportedDatatypes(self): + response = self._client.sendRequest(GetSupportedDatatypesRequest()) + if six.PY2: + return response + if response is not None: + return [x.decode('utf-8') for x in response] + return response + + def getNotificationFilter(self, request): + notifReq = GetNotificationFilterRequest() + notifReq.setRequestParameters(request) + response = self._client.sendRequest(notifReq) + return response diff --git a/awips/dataaccess/__init__.py b/awips/dataaccess/__init__.py new file mode 100644 index 0000000..14a7fbf --- /dev/null +++ b/awips/dataaccess/__init__.py @@ -0,0 +1,370 @@ +# +# __init__.py for awips.dataaccess package +# +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# 12/10/12 njensen Initial Creation. +# Feb 14, 2013 1614 bsteffen refactor data access framework +# to use single request. +# Apr 09, 2013 1871 njensen Add doc strings +# Jun 03, 2013 2023 dgilling Add getAttributes to IData, add +# getLatLonGrids() to IGridData. +# Aug 01, 2016 2416 tgurney Add INotificationSubscriber +# and INotificationFilter +# +# + +__all__ = [ + 'IData', + 'IDataRequest', + 'IGeometryData', + 'IGridData', + 'IGeometryData', + 'INotificationFilter', + 'INotificationSubscriber' +] + +import abc +from six import with_metaclass + + +class IDataRequest(with_metaclass(abc.ABCMeta, object)): + """ + An IDataRequest to be submitted to the DataAccessLayer to retrieve data. + """ + + @abc.abstractmethod + def setDatatype(self, datatype): + """ + Sets the datatype of the request. + + Args: + datatype: A string of the datatype, such as "grid", "radar", "gfe", "obs" + """ + return + + @abc.abstractmethod + def addIdentifier(self, key, value): + """ + Adds an identifier to the request. Identifiers are specific to the + datatype being requested. + + Args: + key: the string key of the identifier + value: the value of the identifier + """ + return + + @abc.abstractmethod + def setParameters(self, params): + """ + Sets the parameters of data to request. + + Args: + params: a list of strings of parameters to request + """ + return + + @abc.abstractmethod + def setLevels(self, levels): + """ + Sets the levels of data to request. Not all datatypes support levels. + + Args: + levels: a list of strings of level abbreviations to request + """ + return + + @abc.abstractmethod + def setEnvelope(self, env): + """ + Sets the envelope of the request. If supported by the datatype factory, + the data returned for the request will be constrained to only the data + within the envelope. + + Args: + env: a shapely geometry + """ + return + + @abc.abstractmethod + def setLocationNames(self, locationNames): + """ + Sets the location names of the request. + + Args: + locationNames: a list of strings of location names to request + """ + return + + @abc.abstractmethod + def getDatatype(self): + """ + Gets the datatype of the request + + Returns: + the datatype set on the request + """ + return + + @abc.abstractmethod + def getIdentifiers(self): + """ + Gets the identifiers on the request + + Returns: + a dictionary of the identifiers + """ + return + + @abc.abstractmethod + def getLevels(self): + """ + Gets the levels on the request + + Returns: + a list of strings of the levels + """ + return + + @abc.abstractmethod + def getLocationNames(self): + """ + Gets the location names on the request + + Returns: + a list of strings of the location names + """ + return + + @abc.abstractmethod + def getEnvelope(self): + """ + Gets the envelope on the request + + Returns: + a rectangular shapely geometry + """ + return + + +class IData(with_metaclass(abc.ABCMeta, object)): + """ + An IData representing data returned from the DataAccessLayer. + """ + + @abc.abstractmethod + def getAttribute(self, key): + """ + Gets an attribute of the data. + + Args: + key: the key of the attribute + + Returns: + the value of the attribute + """ + return + + @abc.abstractmethod + def getAttributes(self): + """ + Gets the valid attributes for the data. + + Returns: + a list of strings of the attribute names + """ + return + + @abc.abstractmethod + def getDataTime(self): + """ + Gets the data time of the data. + + Returns: + the data time of the data, or None if no time is associated + """ + return + + @abc.abstractmethod + def getLevel(self): + """ + Gets the level of the data. + + Returns: + the level of the data, or None if no level is associated + """ + return + + @abc.abstractmethod + def getLocationName(self, param): + """ + Gets the location name of the data. + + Returns: + the location name of the data, or None if no location name is + associated + """ + return + + +class IGridData(IData): + """ + An IData representing grid data that is returned by the DataAccessLayer. + """ + + @abc.abstractmethod + def getParameter(self): + """ + Gets the parameter of the data. + + Returns: + the parameter of the data + """ + return + + @abc.abstractmethod + def getUnit(self): + """ + Gets the unit of the data. + + Returns: + the string abbreviation of the unit, or None if no unit is associated + """ + return + + @abc.abstractmethod + def getRawData(self): + """ + Gets the grid data as a numpy array. + + Returns: + a numpy array of the data + """ + return + + @abc.abstractmethod + def getLatLonCoords(self): + """ + Gets the lat/lon coordinates of the grid data. + + Returns: + a tuple where the first element is a numpy array of lons, and the + second element is a numpy array of lats + """ + return + + +class IGeometryData(IData): + """ + An IData representing geometry data that is returned by the DataAccessLayer. + """ + + @abc.abstractmethod + def getGeometry(self): + """ + Gets the geometry of the data. + + Returns: + a shapely geometry + """ + return + + @abc.abstractmethod + def getParameters(self): + """Gets the parameters of the data. + + Returns: + a list of strings of the parameter names + """ + return + + @abc.abstractmethod + def getString(self, param): + """ + Gets the string value of the specified param. + + Args: + param: the string name of the param + + Returns: + the string value of the param + """ + return + + @abc.abstractmethod + def getNumber(self, param): + """ + Gets the number value of the specified param. + + Args: + param: the string name of the param + + Returns: + the number value of the param + """ + return + + @abc.abstractmethod + def getUnit(self, param): + """ + Gets the unit of the specified param. + + Args: + param: the string name of the param + + Returns: + the string abbreviation of the unit of the param + """ + return + + @abc.abstractmethod + def getType(self, param): + """ + Gets the type of the param. + + Args: + param: the string name of the param + + Returns: + a string of the type of the parameter, such as + "STRING", "INT", "LONG", "FLOAT", or "DOUBLE" + """ + return + + +class INotificationSubscriber(with_metaclass(abc.ABCMeta, object)): + """ + An INotificationSubscriber representing a notification filter returned from + the DataNotificationLayer. + """ + + @abc.abstractmethod + def subscribe(self, callback): + """ + Subscribes to the requested data. Method will not return until close is + called in a separate thread. + + Args: + callback: the method to call with the IGridData/IGeometryData + + """ + pass + + @abc.abstractmethod + def close(self): + """Closes the notification subscriber""" + pass + + +class INotificationFilter(with_metaclass(abc.ABCMeta, object)): + """ + Represents data required to filter a set of URIs and + return a corresponding list of IDataRequest to retrieve data for. + """ + @abc.abstractmethod + def accept(dataUri): + pass diff --git a/awips/gempak/GridDataRetriever.py b/awips/gempak/GridDataRetriever.py new file mode 100644 index 0000000..5525899 --- /dev/null +++ b/awips/gempak/GridDataRetriever.py @@ -0,0 +1,128 @@ +import os +import numpy +from datetime import datetime +from awips import ThriftClient +from dynamicserialize.dstypes.gov.noaa.nws.ncep.common.dataplugin.gempak.request import GetGridDataRequest + + +class GridDataRetriever: + + def __init__(self, server, pluginName, modelId, cycle, forecast, level1, level2, vcoord, param, nnx, nny): + self.pluginName = pluginName + self.modelId = modelId + self.cycle = cycle + self.forecast = forecast + self.level1 = level1 + self.level2 = level2 + self.vcoord = vcoord + self.param = param + self.nx = nnx + self.ny = nny + self.host = os.getenv("DEFAULT_HOST", server) + self.port = os.getenv("DEFAULT_PORT", "9581") + self.client = ThriftClient.ThriftClient(self.host, self.port) + + def getData(self): + """ Sends ThriftClient request and writes out received files.""" + req = GetGridDataRequest() + + req.setPluginName(self.pluginName) + req.setModelId(self.modelId) + + dt = datetime.strptime(self.cycle, '%y%m%d/%H%M') + ct = datetime.strftime(dt, '%Y-%m-%d %H:%M:%S') + req.setReftime(ct) + req.setFcstsec(self.forecast) + + if self.level1 == '-1': + f1 = -999999.0 + else: + f1 = float(self.level1) + + if self.level2 == '-1': + f2 = -999999.0 + else: + f2 = float(self.level2) + + vcoord = self.vcoord + if vcoord == 'SGMA': + if f1 >= 0.0: + f1 = f1 / 10000 + if f2 >= 0.0: + f2 = f2 / 10000 + elif vcoord == 'DPTH': + if f1 >= 0.0: + f1 = f1 / 100.0 + if f2 >= 0.0: + f2 = f2 / 100.0 + elif vcoord == 'POTV': + if f1 >= 0.0: + f1 = f1 / 1000.0 + if f2 >= 0.0: + f2 = f2 / 1000.0 + + req.setLevel1(str(f1)) + req.setLevel2(str(f2)) + req.setVcoord(vcoord) + + req.setParm(self.param) + + resp = self.client.sendRequest(req) + + # Get the dimensions of the grid + kx = int(self.nx) + ky = int(self.ny) + kxky = kx * ky + + # Put the data into a NUMPY array + grid = numpy.asarray(resp.getFloatData()) + + # All grids need to be flipped from a GEMPAK point of view + # Reshape the array into 2D + grid = numpy.reshape(grid, (ky, kx)) + # Flip the array in the up-down direction + grid = numpy.flipud(grid) + # Reshape the array back into 1D + grid = numpy.reshape(grid, kxky) + + return [replacemissing(x) for x in grid] + + +def getgriddata(server, table, model, cycle, forecast, level1, + level2, vcoord, param, nnx, nny): + gir = GridDataRetriever(server, table, model, cycle, forecast, + level1, level2, vcoord, param, nnx, nny) + return gir.getData() + + +def getheader(server, table, model, cycle, forecast, level1, + level2, vcoord, param, nnx, nny): + idata = [] + idata.append(0) + idata.append(0) + return idata + + +def replacemissing(x): + if x == -999999.0: + return -9999.0 + return x + + +# This is the standard boilerplate that runs this script as a main +if __name__ == '__main__': + # Run Test + srv = 'edex-cloud.unidata.ucar.edu' + tbl = 'grid' + mdl = 'GFS20' + cyc = '131227/0000' + fcs = '43200' + lv1 = '500' + lv2 = '-1' + vcd = 'PRES' + prm = 'HGHT' + nx = '720' + ny = '361' + + print(getheader(srv, tbl, mdl, cyc, fcs, lv1, lv2, vcd, prm, nx, ny)) + print(getgriddata(srv, tbl, mdl, cyc, fcs, lv1, lv2, vcd, prm, nx, ny)) diff --git a/awips/gempak/GridInfoRetriever.py b/awips/gempak/GridInfoRetriever.py new file mode 100644 index 0000000..8fc5f80 --- /dev/null +++ b/awips/gempak/GridInfoRetriever.py @@ -0,0 +1,145 @@ +import os +import sys +from datetime import datetime +from operator import itemgetter +from awips import ThriftClient +from dynamicserialize.dstypes.gov.noaa.nws.ncep.common.dataplugin.gempak.request import GetGridInfoRequest + + +class GridInfoRetriever: + + def __init__(self, server, pluginName, modelId, cycle=None, forecast=None): + self.pluginName = pluginName + self.modelId = modelId + self.cycle = cycle + self.forecast = forecast + self.host = os.getenv("DEFAULT_HOST", server) + self.port = os.getenv("DEFAULT_PORT", "9581") + self.client = ThriftClient.ThriftClient(self.host, self.port) + + def getInfo(self): + import sys + """ Sends ThriftClient request and writes out received files.""" + req = GetGridInfoRequest() + req.setPluginName(self.pluginName) + req.setModelId(self.modelId) + + req.setReftime(self.cycle) + if len(self.cycle) > 2: + dt = datetime.strptime(self.cycle, '%y%m%d/%H%M') + ct = datetime.strftime(dt, '%Y-%m-%d %H:%M:%S') + req.setReftime(ct) + + req.setFcstsec(self.forecast) + resp = self.client.sendRequest(req) + + # Take care of bytestring encodings in python3 + for i, rec in enumerate(resp): + resp[i] = { + key.decode() if isinstance(key, bytes) else key: + val.decode() if isinstance(val, bytes) else val + for key, val in rec.items() + } + + sortresp = sorted(sorted(resp, key=itemgetter("reftime"), reverse=True), key=itemgetter("fcstsec")) + + grids = [] + + count = 0 + for record in sortresp: + s = '{:<12}'.format(record['param']) + + if sys.byteorder == 'little': + parm1 = (ord(s[3]) << 24) + (ord(s[2]) << 16) + (ord(s[1]) << 8) + ord(s[0]) + parm2 = (ord(s[7]) << 24) + (ord(s[6]) << 16) + (ord(s[5]) << 8) + ord(s[4]) + parm3 = (ord(s[11]) << 24) + (ord(s[10]) << 16) + (ord(s[9]) << 8) + ord(s[8]) + else: + parm1 = (ord(s[0]) << 24) + (ord(s[1]) << 16) + (ord(s[2]) << 8) + ord(s[3]) + parm2 = (ord(s[4]) << 24) + (ord(s[5]) << 16) + (ord(s[6]) << 8) + ord(s[7]) + parm3 = (ord(s[8]) << 24) + (ord(s[9]) << 16) + (ord(s[10]) << 8) + ord(s[11]) + + dt = datetime.strptime(record['reftime'], '%Y-%m-%d %H:%M:%S.%f') + dattim = dt.month * 100000000 + dt.day * 1000000 + (dt.year%100) * 10000 + dt.hour * 100 + dt.minute + fcsth = (int(record['fcstsec']) / 60) / 60 + fcstm = (int(record['fcstsec']) / 60) % 60 + fcst = 100000 + fcsth * 100 + fcstm + + lv1 = float(record['level1']) + if lv1 == -999999.0: + lv1 = -1.0 + lv2 = float(record['level2']) + if lv2 == -999999.0: + lv2 = -1.0 + + vcd = record['vcoord'] + if vcd == 'NONE': + ivcd = 0 + elif vcd == 'PRES': + ivcd = 1 + elif vcd == 'THTA': + ivcd = 2 + elif vcd == 'HGHT': + ivcd = 3 + elif vcd == 'SGMA': + ivcd = 4 + if lv1 >= 0.0: + lv1 = lv1 * 10000.0 + if lv2 >= 0.0: + lv2 = lv2 * 10000.0 + elif vcd == 'DPTH': + ivcd = 5 + if lv1 >= 0.0: + lv1 = lv1 * 100.0 + if lv2 >= 0.0: + lv2 = lv2 * 100.0 + elif vcd == 'HYBL': + ivcd = 6 + else: + v = '{:<4}'.format(vcd) + if sys.byteorder == 'little': + ivcd = (ord(v[3]) << 24) + (ord(v[2]) << 16) + (ord(v[1]) << 8) + ord(v[0]) + else: + ivcd = (ord(v[0]) << 24) + (ord(v[1]) << 16) + (ord(v[2]) << 8) + ord(v[3]) + if vcd == 'POTV': + if lv1 >= 0.0: + lv1 = lv1 * 1000.0 + if lv2 >= 0.0: + lv2 = lv2 * 1000.0 + grids.append(9999) + grids.append(dattim) + grids.append(fcst) + grids.append(0) + grids.append(0) + grids.append(int(lv1)) + grids.append(int(lv2)) + grids.append(ivcd) + grids.append(parm1) + grids.append(parm2) + grids.append(parm3) + count += 1 + if count > 29998: + break + + return grids + + +def getinfo(server, table, model, cycle, forecast): + gir = GridInfoRetriever(server, table, model, cycle, forecast) + return gir.getInfo() + + +def getrow(server, table, model, cycle, forecast): + idata = [] + idata.append(9999) + idata.append(1) + return idata + + +# This is the standard boilerplate that runs this script as a main +if __name__ == '__main__': + # Run Test + srv = 'edex-cloud.unidata.ucar.edu' + tbl = 'grid' + mdl = 'NAM40' + print(getrow(srv, tbl, mdl)) + print(getinfo(srv, tbl, mdl)) diff --git a/awips/gempak/GridNavRetriever.py b/awips/gempak/GridNavRetriever.py new file mode 100644 index 0000000..43c7f5a --- /dev/null +++ b/awips/gempak/GridNavRetriever.py @@ -0,0 +1,301 @@ +import os +import math +from awips import ThriftClient +from dynamicserialize.dstypes.gov.noaa.nws.ncep.common.dataplugin.gempak.request import GetGridNavRequest +from ctypes import * + +EARTH_RADIUS = 6371200.0 +DEG_TO_RAD = math.pi / 180.0 +RAD_TO_DEG = 180.0 / math.pi +TWOPI = math.pi * 2.0 +HALFPI = math.pi / 2.0 +PI4TH = math.pi / 4.0 +PI3RD = math.pi / 3.0 + + +def createPolar(nsflag, clon, lat1, lon1, dx, dy, unit, nx, ny): + clonr = clon * DEG_TO_RAD + latr = lat1 * DEG_TO_RAD + lonr = lon1 * DEG_TO_RAD + if nsflag == 'N': + x1 = EARTH_RADIUS * math.tan(PI4TH - latr/2.0) * math.sin(lonr-clonr) + y1 = -1 * EARTH_RADIUS * math.tan(PI4TH - latr/2.0) * math.cos(lonr-clonr) + else: + x1 = EARTH_RADIUS * math.tan(PI4TH + latr/2.0) * math.sin(lonr-clonr) + y1 = EARTH_RADIUS * math.tan(PI4TH + latr/2.0) * math.cos(lonr-clonr) + + if unit == 'm': + tdx = dx / (1 + math.sin(PI3RD)) + tdy = dy / (1 + math.sin(PI3RD)) + else: + tdx = (dx*1000.0) / (1 + math.sin(PI3RD)) + tdy = (dy*1000.0) / (1 + math.sin(PI3RD)) + + x2 = x1 + tdx * (nx-1) + y2 = y1 + tdy * (ny-1) + xll = min(x1, x2) + yll = min(y1, y2) + xur = max(x1, x2) + yur = max(y1, y2) + + if nsflag == 'N': + latll = (HALFPI - 2*math.atan2(math.hypot(xll, yll), EARTH_RADIUS)) * RAD_TO_DEG + rtemp = clonr + math.atan2(xll, -yll) + else: + latll = -1 * (HALFPI - 2*math.atan2(math.hypot(xll, yll), EARTH_RADIUS)) * RAD_TO_DEG + rtemp = clonr + math.atan2(xll, yll) + + if rtemp > math.pi: + lonll = (rtemp-TWOPI) * RAD_TO_DEG + elif rtemp < -math.pi: + lonll = (rtemp+TWOPI) * RAD_TO_DEG + else: + lonll = rtemp * RAD_TO_DEG + + if nsflag == 'N': + latur = (HALFPI - 2*math.atan2(math.hypot(xur, yur), EARTH_RADIUS)) * RAD_TO_DEG + rtemp = clonr + math.atan2(xur, -yur) + else: + latur = -1 * (HALFPI - 2*math.atan2(math.hypot(xur, yur), EARTH_RADIUS)) * RAD_TO_DEG + rtemp = clonr + math.atan2(xur, yur) + + if rtemp > math.pi: + lonur = (rtemp-TWOPI) * RAD_TO_DEG + elif rtemp < -math.pi: + lonur = (rtemp+TWOPI) * RAD_TO_DEG + else: + lonur = rtemp * RAD_TO_DEG + + return [latll, lonll, latur, lonur] + + +def createConic(nsflag, clon, lat1, lon1, dx, dy, unit, nx, ny, ang1, ang3): + clonr = clon * DEG_TO_RAD + latr = lat1 * DEG_TO_RAD + lonr = lon1 * DEG_TO_RAD + + angle1 = HALFPI - (math.fabs(ang1) * DEG_TO_RAD) + angle2 = HALFPI - (math.fabs(ang3) * DEG_TO_RAD) + + if ang1 == ang3: + cc = math.cos(angle1) + else: + cc = (math.log(math.sin(angle2)) - math.log(math.sin(angle1))) \ + / (math.log(math.tan(angle2/2.0)) - math.log(math.tan(angle1/2.0))) + + er = EARTH_RADIUS / cc + + if nsflag == 'N': + x1 = er * math.pow(math.tan((HALFPI-latr)/2.0), cc) * math.sin(cc*(lonr-clonr)) + y1 = -1.0 * er * math.pow(math.tan((HALFPI-latr)/2.0), cc) * math.cos(cc*(lonr-clonr)) + else: + x1 = er * math.pow(math.tan((HALFPI+latr)/2.0), cc) * math.sin(cc*(lonr-clonr)) + y1 = er * math.pow(math.tan((HALFPI+latr)/2.0), cc) * math.cos(cc*(lonr-clonr)) + + alpha = math.pow(math.tan(angle1/2.0), cc) / math.sin(angle1) + + if unit == 'm': + x2 = x1 + (nx-1) * alpha * dx + y2 = y1 + (ny-1) * alpha * dy + else: + x2 = x1 + (nx-1) * alpha * (dx*1000.0) + y2 = y1 + (ny-1) * alpha * (dy*1000.0) + + xll = min(x1, x2) + yll = min(y1, y2) + xur = max(x1, x2) + yur = max(y1, y2) + + if nsflag == 'N': + latll = (HALFPI - 2.0 * math.atan(math.pow(math.hypot(xll, yll)/er, (1/cc)))) * RAD_TO_DEG + rtemp = math.atan2(xll, -yll) * (1/cc) + clonr + else: + latll = (-1.0 * (HALFPI - 2.0 * math.atan(math.pow(math.hypot(xll, yll)/er, (1/cc))))) * RAD_TO_DEG + rtemp = math.atan2(xll, yll) * (1/cc) + clonr + + if rtemp > math.pi: + lonll = (rtemp-TWOPI) * RAD_TO_DEG + elif rtemp < -math.pi: + lonll = (rtemp+TWOPI) * RAD_TO_DEG + else: + lonll = rtemp * RAD_TO_DEG + + if nsflag == 'N': + latur = (HALFPI - 2.0 * math.atan(math.pow(math.hypot(xur, yur)/er, (1/cc)))) * RAD_TO_DEG + rtemp = math.atan2(xur, -yur) * (1/cc) + clonr + else: + latur = (-1.0 * (HALFPI - 2.0 * math.atan(math.pow(math.hypot(xur, yur)/er, (1/cc))))) * RAD_TO_DEG + rtemp = math.atan2(xur, yur) * (1/cc) + clonr + + if rtemp > math.pi: + lonur = (rtemp-TWOPI) * RAD_TO_DEG + elif rtemp < -math.pi: + lonur = (rtemp+TWOPI) * RAD_TO_DEG + else: + lonur = rtemp * RAD_TO_DEG + + return [latll, lonll, latur, lonur] + + +class StringConverter(Union): + _fields_ = [("char", c_char*4), ("int", c_int), ("float", c_float)] + + +class GridNavRetriever: + + def __init__(self, server, pluginName, modelId, arrayLen): + self.pluginName = pluginName + self.modelId = modelId + self.arrayLen = arrayLen + self.host = os.getenv("DEFAULT_HOST", server) + self.port = os.getenv("DEFAULT_PORT", "9581") + self.client = ThriftClient.ThriftClient(self.host, self.port) + + def getNavBlk(self): + """ Sends ThriftClient request and writes out received files.""" + req = GetGridNavRequest() + req.setPluginName(self.pluginName) + req.setModelId(self.modelId) + resp = self.client.sendRequest(req) + + for i, rec in enumerate(resp): + resp[i] = { + key.decode() if isinstance(key, bytes) else key: + val.decode() if isinstance(val, bytes) else val + for key, val in rec.items() + } + + nav = [] + + for record in resp: + unit = record['spacingunit'] + sk = record['spatialkey'] + skarr = sk.split('/') + + nx = float(skarr[1]) + ny = float(skarr[2]) + dx = float(skarr[3]) + dy = float(skarr[4]) + + sc = StringConverter() + if record['projtype'] == 'LatLon': + sc.char = 'CED ' + gemproj = 2.0 + ang1 = 0.0 + ang2 = 0.0 + ang3 = 0.0 + + lllat = float(record['lowerleftlat']) + lllon = float(record['lowerleftlon']) + urlat = lllat + (dy * (ny-1)) + urlon = lllon + (dx * (nx-1)) + if lllon > 180: + lllon -= 360.0 + if urlon > 180: + urlon -= 360.0 + + if record['projtype'] == 'Polar Stereographic': + sc.char = 'STR ' + gemproj = 2.0 + if float(record['standard_parallel_1']) < 0.0: + ang1 = -90.0 + nsflag = 'S' + else: + ang1 = 90.0 + nsflag = 'N' + ang2 = float(record['central_meridian']) + ang3 = 0.0 + + lat1 = float(record['lowerleftlat']) + lon1 = float(record['lowerleftlon']) + coords = createPolar(nsflag, ang2, lat1, lon1, dx, dy, unit, nx, ny) + lllat = coords[0] + lllon = coords[1] + urlat = coords[2] + urlon = coords[3] + + if record['projtype'] == 'Lambert Conformal': + sc.char = 'LCC ' + gemproj = 2.0 + + ang1 = float(skarr[7]) + ang2 = float(record['central_meridian']) + ang3 = float(skarr[8]) + if ang1 < 0.0: + nsflag = 'S' + else: + nsflag = 'N' + + lat1 = float(record['lowerleftlat']) + lon1 = float(record['lowerleftlon']) + coords = createConic(nsflag, ang2, lat1, lon1, dx, dy, unit, nx, ny, ang1, ang3) + lllat = coords[0] + lllon = coords[1] + urlat = coords[2] + urlon = coords[3] + + # Fill up the output array of floats + nav.append(gemproj) + nav.append(sc.float) + nav.append(1.0) + nav.append(1.0) + nav.append(nx) + nav.append(ny) + nav.append(lllat) + nav.append(lllon) + nav.append(urlat) + nav.append(urlon) + nav.append(ang1) + nav.append(ang2) + nav.append(ang3) + + for i in range(13, int(self.arrayLen)): + nav.append(0.0) + return nav + + def getAnlBlk(self): + anl = [] + # Type + anl.append(2.0) + # Delta + anl.append(1.0) + # Extend area + anl.append(0.0) + anl.append(0.0) + anl.append(0.0) + anl.append(0.0) + # Grid area + anl.append(-90.0) + anl.append(-180.0) + anl.append(90.0) + anl.append(180.0) + # Data area + anl.append(-90.0) + anl.append(-180.0) + anl.append(90.0) + anl.append(180.0) + for i in range(18, int(self.arrayLen)): + anl.append(0.0) + return anl + + +def getnavb(server, table, model, arrlen): + gnr = GridNavRetriever(server, table, model, arrlen) + return gnr.getNavBlk() + + +def getanlb(server, table, model, arrlen): + gnr = GridNavRetriever(server, table, model, arrlen) + return gnr.getAnlBlk() + + +# This is the standard boilerplate that runs this script as a main +if __name__ == '__main__': + # Run Test + srv = 'edex-cloud.unidata.ucar.edu' + tbl = 'grid_info' + mdl = 'NAM40' + navlen = '256' + print(getnavb(srv, tbl, mdl, navlen)) + anllen = '128' + print(getanlb(srv, tbl, mdl, anllen)) diff --git a/awips/gempak/StationDataRetriever.py b/awips/gempak/StationDataRetriever.py new file mode 100644 index 0000000..97b4d01 --- /dev/null +++ b/awips/gempak/StationDataRetriever.py @@ -0,0 +1,144 @@ +import os +from datetime import datetime +from awips import ThriftClient +from dynamicserialize.dstypes.com.raytheon.uf.common.time import DataTime +from dynamicserialize.dstypes.com.raytheon.uf.common.time import TimeRange +from dynamicserialize.dstypes.gov.noaa.nws.ncep.common.dataplugin.gempak.request import StationDataRequest + + +class StationDataRetriever: + """ Retrieves all data for a requested station and time """ + + def __init__(self, server, pluginName, stationId, refTime, parmList, partNumber): + self.pluginName = pluginName + self.stationId = stationId + self.refTime = refTime + self.parmList = parmList + self.partNumber = partNumber + self.host = os.getenv("DEFAULT_HOST", server) + self.port = os.getenv("DEFAULT_PORT", "9581") + self.client = ThriftClient.ThriftClient(self.host, self.port) + + def getStationData(self): + """ Sends ThriftClient request and writes out received files.""" + dtime = datetime.strptime(self.refTime, "%y%m%d/%H%M") + trange = TimeRange() + trange.setStart(dtime) + trange.setEnd(dtime) + dataTime = DataTime(refTime=dtime, validPeriod=trange) + req = StationDataRequest() + req.setPluginName(self.pluginName) + req.setStationId(self.stationId) + req.setRefTime(dataTime) + req.setParmList(self.parmList) + req.setPartNumber(self.partNumber) + resp = self.client.sendRequest(req) + + for i, rec in enumerate(resp): + resp[i] = { + key.decode() if isinstance(key, bytes) else key: + val.decode() if isinstance(val, bytes) else val + for key, val in rec.items() + } + + return resp + + +def getstationdata(server, table, stationId, refTime, parmList, partNumber): + sr = StationDataRetriever(server, table, stationId, refTime, parmList, partNumber) + lcldict = sr.getStationData() + + rdata = [] + + for substr in parmList.split(','): + if substr in lcldict: + rdata.append(lcldict[substr]) + else: + rdata.append(-9999.00) + + return rdata + + +def getleveldata(server, table, stationId, refTime, parmList, partNumber): + sr = StationDataRetriever(server, table, stationId, refTime, parmList, partNumber) + lcldict = sr.getStationData() + + numset = [1] + for substr in parmList.split(','): + if substr in lcldict: + pnum = len(lcldict[substr]) - 1 + while pnum >= 0: + if lcldict[substr][pnum] != -9999.00: + break + pnum = pnum - 1 + numset.append(pnum) + + rdata = [] + + for jj in range(max(numset)): + for substr in parmList.split(','): + if substr in lcldict: + if lcldict[substr][jj] == -9999998.0: + rdata.append(-9999.0) + else: + rdata.append(lcldict[substr][jj]) + else: + rdata.append(-9999.0) + + return rdata + + +def getstationtext(server, table, stationId, refTime, parmList, partNumber): + sr = StationDataRetriever(server, table, stationId, refTime, parmList, partNumber) + lcldict = sr.getStationData() + + if parmList in lcldict: + return lcldict[parmList] + else: + return ' ' + + +def getheader(server, table, stationId, refTime, parmList, partNumber): + idata = [] + idata.append(0) + return idata + + +# This is the standard boilerplate that runs this script as a main +if __name__ == '__main__': + # Run Test + srv = 'edex-cloud.unidata.ucar.edu' + key = '-' + print('OBS - METAR') + tbl = 'obs' + stn = 'KLGA' + time = '130823/1700' + parm = 'seaLevelPress,temperature,dewpoint,windSpeed,windDir' + part = '0' + print(getheader(srv, tbl, stn, time, parm, part)) + print(getstationdata(srv, tbl, stn, time, parm, part)) + parm = 'rawMETAR' + print(getstationtext(srv, tbl, stn, time, parm, part)) + print('SFCOBS - SYNOP') + tbl = 'sfcobs' + stn = '72403' + time = '130823/1800' + parm = 'seaLevelPress,temperature,dewpoint,windSpeed,windDir' + part = '0' + print(getheader(srv, tbl, stn, time, parm, part)) + print(getstationdata(srv, tbl, stn, time, parm, part)) + parm = 'rawReport' + print(getstationtext(srv, tbl, stn, time, parm, part)) + print('UAIR') + tbl = 'bufrua' + stn = '72469' + time = '130823/1200' + parm = 'prMan,htMan,tpMan,tdMan,wdMan,wsMan' + part = '2020' + print(getleveldata(srv, tbl, stn, time, parm, part)) + parm = 'prSigT,tpSigT,tdSigT' + part = '2022' + print(getleveldata(srv, tbl, stn, time, parm, part)) + parm = 'htSigW,wsSigW,wdSigW' + part = '2021' + print(getleveldata(srv, tbl, stn, time, parm, part)) diff --git a/awips/gempak/StationRetriever.py b/awips/gempak/StationRetriever.py new file mode 100644 index 0000000..b18d1c2 --- /dev/null +++ b/awips/gempak/StationRetriever.py @@ -0,0 +1,93 @@ +import os +import sys +from awips import ThriftClient +from dynamicserialize.dstypes.gov.noaa.nws.ncep.common.dataplugin.gempak.request import GetStationsRequest + + +class StationRetriever: + """ Retrieves all requested stations """ + + def __init__(self, server, pluginName): + self.pluginName = pluginName + self.outdir = os.getcwd() + self.host = os.getenv("DEFAULT_HOST", server) + self.port = os.getenv("DEFAULT_PORT", "9581") + self.client = ThriftClient.ThriftClient(self.host, self.port) + + def getStations(self): + """ Sends ThriftClient request and writes out received files.""" + req = GetStationsRequest() + req.setPluginName(self.pluginName) + resp = self.client.sendRequest(req) + + for i, rec in enumerate(resp): + resp[i] = { + key.decode() if isinstance(key, bytes) else key: + val.decode() if isinstance(val, bytes) else val + for key, val in rec.items() + } + + stns = [] + for item in resp: + stationstr = '{:<8}'.format(item.getStationId()) + + if sys.byteorder == 'little': + stnid = (ord(stationstr[3]) << 24) + (ord(stationstr[2]) << 16) + \ + (ord(stationstr[1]) << 8) + ord(stationstr[0]) + stnid2 = (ord(stationstr[7]) << 24) + (ord(stationstr[6]) << 16) + \ + (ord(stationstr[5]) << 8) + ord(stationstr[4]) + else: + stnid = (ord(stationstr[0]) << 24) + (ord(stationstr[1]) << 16) + \ + (ord(stationstr[2]) << 8) + ord(stationstr[3]) + stnid2 = (ord(stationstr[4]) << 24) + (ord(stationstr[5]) << 16) + \ + (ord(stationstr[6]) << 8) + ord(stationstr[7]) + + if item.getState() is None: + stationstr = ' ' + else: + stationstr = '{:<4}'.format(item.getState()) + + if sys.byteorder == 'little': + state = (ord(stationstr[3]) << 24) + (ord(stationstr[2]) << 16) \ + + (ord(stationstr[1]) << 8) + ord(stationstr[0]) + else: + state = (ord(stationstr[0]) << 24) + (ord(stationstr[1]) << 16) \ + + (ord(stationstr[2]) << 8) + ord(stationstr[3]) + + stationstr = '{:<4}'.format(item.getCountry()) + if sys.byteorder == 'little': + cntry = (ord(stationstr[3]) << 24) + (ord(stationstr[2]) << 16) \ + + (ord(stationstr[1]) << 8) + ord(stationstr[0]) + else: + cntry = (ord(stationstr[0]) << 24) + (ord(stationstr[1]) << 16) \ + + (ord(stationstr[2]) << 8) + ord(stationstr[3]) + + stns.append(9999) + stns.append(stnid) + stns.append(item.getWmoIndex()) + stns.append(int(item.getLatitude()*100)) + stns.append(int(item.getLongitude()*100)) + stns.append(int(item.getElevation())) + stns.append(state) + stns.append(cntry) + stns.append(stnid2) + stns.append(0) + return stns + + +def getstations(server, table, key, dummy, dummy2): + sr = StationRetriever(server, table) + return sr.getStations() + + +# This is the standard boilerplate that runs this script as a main +if __name__ == '__main__': + # Run Test + srv = 'edex-cloud.unidata.ucar.edu' + key = '-' + print('OBS - METAR') + tbl = 'obs' + print(getstations(srv, tbl, key)) + print('SFCOBS - SYNOP') + tbl = 'sfcobs' + print(getstations(srv, tbl, key)) diff --git a/awips/gempak/TimeRetriever.py b/awips/gempak/TimeRetriever.py new file mode 100644 index 0000000..0df50cd --- /dev/null +++ b/awips/gempak/TimeRetriever.py @@ -0,0 +1,76 @@ +import os +from datetime import datetime +from awips import ThriftClient +from dynamicserialize.dstypes.java.util import GregorianCalendar +from dynamicserialize.dstypes.gov.noaa.nws.ncep.common.dataplugin.gempak.request import GetTimesRequest + + +class TimeRetriever: + """ Retrieves all requested times""" + + def __init__(self, server, pluginName, timeField): + self.pluginName = pluginName + self.timeField = timeField + self.outdir = os.getcwd() + self.host = os.getenv("DEFAULT_HOST", server) + self.port = os.getenv("DEFAULT_PORT", "9581") + self.client = ThriftClient.ThriftClient(self.host, self.port) + + def getTimes(self): + """ Sends ThriftClient request and writes out received files.""" + req = GetTimesRequest() + req.setPluginName(self.pluginName) + req.setTimeField(self.timeField) + resp = self.client.sendRequest(req) + + for i, rec in enumerate(resp): + resp[i] = { + key.decode() if isinstance(key, bytes) else key: + val.decode() if isinstance(val, bytes) else val + for key, val in rec.items() + } + + timelist = [] + for item in resp.getTimes(): + if isinstance(item, GregorianCalendar): + tstamp = item.getTimeInMillis() + else: + tstamp = item.getTime() + time = datetime.utcfromtimestamp(tstamp/1000) + timelist.append(time) + + timelist.sort(reverse=True) + + times = [] + for time in timelist: + times.append(9999) + times.append((time.year % 100) * 10000 + (time.month * 100) + time.day) + times.append((time.hour * 100) + time.minute) + + # GEMPAK can only handle up to 200 times, which is 600 elements + # in this array -- [9999, DATE, TIME] -- repeated + return times[0:600] + + +def gettimes(server, table, key, dummy, dummy2): + tr = TimeRetriever(server, table, key) + return tr.getTimes() + + +# This is the standard boilerplate that runs this script as a main +if __name__ == '__main__': + srv = 'edex-cloud.unidata.ucar.edu' + print('OBS - METAR') + tbl = 'obs' + key = 'refHour' + print(gettimes(srv, tbl, key)) + + print('SFCOBS - SYNOP') + tbl = 'sfcobs' + key = 'refHour' + print(gettimes(srv, tbl, key)) + + print('BUFRUA') + tbl = 'bufrua' + key = 'dataTime.refTime' + print(gettimes(srv, tbl, key)) diff --git a/awips/gempak/ncepGribTables.py b/awips/gempak/ncepGribTables.py new file mode 100755 index 0000000..9423513 --- /dev/null +++ b/awips/gempak/ncepGribTables.py @@ -0,0 +1,99 @@ +#!/usr/bin/env python +# Parse html tables from a given URL and output CSV. +# Note: To install a missing python module foo do "easy_install foo" +# (or the new way is "pip install foo" but you might have to do +# "easy_install pip" first) + +from BeautifulSoup import BeautifulSoup +import scrape +import urllib.request, urllib.error, urllib.parse +import html.entities +import re +import sys +import unicodedata + + +# from http://stackoverflow.com/questions/1197981/convert-html-entities +def asciify2(s): + matches = re.findall("&#\d+;", s) + if len(matches) > 0: + hits = set(matches) + for hit in hits: + name = hit[2:-1] + try: + entnum = int(name) + s = s.replace(hit, chr(entnum)) + except ValueError: + pass + + matches = re.findall("&\w+;", s) + hits = set(matches) + amp = "&" + if amp in hits: + hits.remove(amp) + for hit in hits: + name = hit[1:-1] + if name in html.entities.name2codepoint: + s = s.replace(hit, "") + s = s.replace(amp, "&") + return s + + +def opensoup(url): + request = urllib.request.Request(url) + request.add_header("User-Agent", "Mozilla/5.0") + # To mimic a real browser's user-agent string more exactly, if necessary: + # Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.8.1.14) + # Gecko/20080418 Ubuntu/7.10 (gutsy) Firefox/2.0.0.14 + pagefile = urllib.request.urlopen(request) + soup = BeautifulSoup(pagefile) + pagefile.close() + return soup + + +def asciify(s): + return unicodedata.normalize('NFKD', s).encode('ascii', 'ignore') + + +# remove extra whitespace, including stripping leading and trailing whitespace. +def condense(s): + s = re.sub(r"\s+", " ", s, re.DOTALL) + return s.strip() + + +def stripurl(s): + s = re.sub(r"\]*\>[^\<]*\<\/span\>", "", s) + s = re.sub(r"\&\#160\;", " ", s) + return condense(re.sub(r"\<[^\>]*\>", " ", s)) + + +# this gets rid of tags and condenses whitespace +def striptags(s): + s = re.sub(r"\]*\>[^\<]*\<\/span\>", "", s) + s = re.sub(r"\&\#160\;", " ", s) + return condense(s) + + +def getUrlArgs(parseUrl): + return re.search('grib2_table4-2-(\d+)-(\d+).shtml', parseUrl).groups() + + +if len(sys.argv) == 1: + print("Usage: ", sys.argv[0], " url [n]") + print(" (where n indicates which html table to parse)") + exit(1) + +url = sys.argv[1] +soup = opensoup(url) +tables = soup.findAll("table") + +for table in tables: + for r in table.findAll('tr'): + rl = [] + for c in r.findAll(re.compile('td|th')): + rl.append(striptags(c.renderContents())) + if len(rl) > 1 and "href" in rl[1]: + print('! ' + stripurl(rl[1])) + scrapeUrl = 'http://www.nco.ncep.noaa.gov/pmb/docs/grib2/grib2_table4-2-' + \ + getUrlArgs(rl[1])[0] + "-" + getUrlArgs(rl[1])[1] + '.shtml' + scrape.run(scrapeUrl) diff --git a/awips/gempak/scrape.py b/awips/gempak/scrape.py new file mode 100755 index 0000000..06989f4 --- /dev/null +++ b/awips/gempak/scrape.py @@ -0,0 +1,106 @@ +#!/usr/bin/env python +# Parse html tables from a given URL and output CSV. +# Note: To install a missing python module foo do "easy_install foo" +# (or the new way is "pip install foo" but you might have to do +# "easy_install pip" first) + +from BeautifulSoup import BeautifulSoup +import urllib.request, urllib.error, urllib.parse +import html.entities +import re +import sys +import unicodedata + + +# from http://stackoverflow.com/questions/1197981/convert-html-entities +def asciify2(s): + matches = re.findall("&#\d+;", s) + if len(matches) > 0: + hits = set(matches) + for hit in hits: + name = hit[2:-1] + try: + entnum = int(name) + s = s.replace(hit, chr(entnum)) + except ValueError: + pass + + matches = re.findall("&\w+;", s) + hits = set(matches) + amp = "&" + if amp in hits: + hits.remove(amp) + for hit in hits: + name = hit[1:-1] + if name in html.entities.name2codepoint: + s = s.replace(hit, "") + s = s.replace(amp, "&") + return s + + +def opensoup(url): + request = urllib.request.Request(url) + request.add_header("User-Agent", "Mozilla/5.0") + # To mimic a real browser's user-agent string more exactly, if necessary: + # Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.8.1.14) + # Gecko/20080418 Ubuntu/7.10 (gutsy) Firefox/2.0.0.14 + pagefile = urllib.request.urlopen(request) + soup = BeautifulSoup(pagefile) + pagefile.close() + return soup + + +def asciify(s): + return unicodedata.normalize('NFKD', s).encode('ascii', 'ignore') + + +# remove extra whitespace, including stripping leading and trailing whitespace. +def condense(s): + s = re.sub(r"\s+", " ", s, re.DOTALL) + return s.strip() + + +# this gets rid of tags and condenses whitespace +def striptags(s): + s = re.sub(r"\]*\>[^\<]*\<\/span\>", "", s) + s = re.sub(r"\&\#160\;", " ", s) + return condense(re.sub(r"\<[^\>]*\>", " ", s)) + + +if len(sys.argv) == 1: # called with no arguments + print("Usage: ", sys.argv[0], " url [n]") + print(" (where n indicates which html table to parse)") + exit(1) + + +def getUrlArgs(parseUrl): + return re.search('grib2_table4-2-(\d+)-(\d+).shtml', parseUrl).groups() + + +def run(url): + soup = opensoup(url) + tables = soup.findAll("table") + for table in tables: + ct = 0 + for r in table.findAll('tr'): + rl = [] + for c in r.findAll(re.compile('td|th')): + rl.append(striptags(c.renderContents())) + if ct > 0: + rl[0] = getUrlArgs(url)[0].zfill(3) + " " + \ + getUrlArgs(url)[1].zfill(3) + " " + rl[0].zfill(3) + " 000" + if len(rl) > 1: + if "Reserved" in rl[1]: + rl[0] = '!' + rl[0] + if "See Table" in rl[2] or "Code table" in rl[2]: + rl[2] = "cat" + rl[1] = rl[1][:32].ljust(32) + rl[2] = rl[2].ljust(20) + rl[3] = rl[3].ljust(12) + " 0 -9999.00" + if ct: + print(" ".join(rl)) + ct += 1 + + +if __name__ == '__main__': + run(sys.argv[1]) diff --git a/awips/gfe/IFPClient.py b/awips/gfe/IFPClient.py new file mode 100644 index 0000000..6fbc1e6 --- /dev/null +++ b/awips/gfe/IFPClient.py @@ -0,0 +1,152 @@ +# +# Provides a Python-based interface for executing GFE requests. +# +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# 07/26/12 dgilling Initial Creation. +# +# + +from awips import ThriftClient +from dynamicserialize.dstypes.com.raytheon.uf.common.dataplugin.gfe.db.objects import DatabaseID +from dynamicserialize.dstypes.com.raytheon.uf.common.dataplugin.gfe.db.objects import ParmID +from dynamicserialize.dstypes.com.raytheon.uf.common.dataplugin.gfe.request import CommitGridsRequest +from dynamicserialize.dstypes.com.raytheon.uf.common.dataplugin.gfe.request import GetGridInventoryRequest +from dynamicserialize.dstypes.com.raytheon.uf.common.dataplugin.gfe.request import GetParmListRequest +from dynamicserialize.dstypes.com.raytheon.uf.common.dataplugin.gfe.request import GetSelectTimeRangeRequest +from dynamicserialize.dstypes.com.raytheon.uf.common.dataplugin.gfe.server.request import CommitGridRequest +from dynamicserialize.dstypes.com.raytheon.uf.common.message import WsId +from dynamicserialize.dstypes.com.raytheon.uf.common.site.requests import GetActiveSitesRequest +from dynamicserialize.dstypes.com.raytheon.uf.common.dataplugin.gfe.server.message import ServerResponse + + +class IFPClient(object): + def __init__(self, host, port, user, site=None, progName=None): + self.__thrift = ThriftClient.ThriftClient(host, port) + self.__wsId = WsId(userName=user, progName=progName) + # retrieve default site + if site is None: + sr = self.getSiteID() + if len(sr.getPayload()) > 0: + site = sr.getPayload()[0] + self.__siteId = site + + def commitGrid(self, request): + if isinstance(request, CommitGridRequest): + return self.__commitGrid([request]) + elif self.__isHomogenousIterable(request, CommitGridRequest): + return self.__commitGrid([cgr for cgr in request]) + raise TypeError("Invalid type: " + str(type(request)) + + " for commitGrid(). Only accepts CommitGridRequest or lists of CommitGridRequest.") + + def __commitGrid(self, requests): + ssr = ServerResponse() + request = CommitGridsRequest() + request.setCommits(requests) + sr = self.__makeRequest(request) + ssr.setMessages(sr.getMessages()) + return ssr + + def getParmList(self, pid): + argType = type(pid) + if argType is DatabaseID: + return self.__getParmList([pid]) + elif self.__isHomogenousIterable(pid, DatabaseID): + return self.__getParmList([dbid for dbid in pid]) + raise TypeError("Invalid type: " + str(argType) + + " for getParmList(). Only accepts DatabaseID or lists of DatabaseID.") + + def __getParmList(self, ids): + ssr = ServerResponse() + request = GetParmListRequest() + request.setDbIds(ids) + sr = self.__makeRequest(request) + ssr.setMessages(sr.getMessages()) + parmlist = sr.getPayload() if sr.getPayload() is not None else [] + ssr.setPayload(parmlist) + return ssr + + def __isHomogenousIterable(self, iterable, classType): + try: + iterator = iter(iterable) + for item in iterator: + if not isinstance(item, classType): + return False + except TypeError: + return False + return True + + def getGridInventory(self, parmID): + if isinstance(parmID, ParmID): + sr = self.__getGridInventory([parmID]) + inventoryList = [] + try: + inventoryList = sr.getPayload()[parmID] + except KeyError: + # no-op, we've already default the TimeRange list to empty + pass + sr.setPayload(inventoryList) + return sr + elif self.__isHomogenousIterable(parmID, ParmID): + return self.__getGridInventory([pid for pid in parmID]) + raise TypeError("Invalid type: " + str(type(parmID)) + + " specified to getGridInventory(). Accepts ParmID or lists of ParmID.") + + def __getGridInventory(self, parmIDs): + ssr = ServerResponse() + request = GetGridInventoryRequest() + request.setParmIds(parmIDs) + sr = self.__makeRequest(request) + ssr.setMessages(sr.getMessages()) + trs = sr.getPayload() if sr.getPayload() is not None else {} + ssr.setPayload(trs) + return ssr + + def getSelectTR(self, name): + request = GetSelectTimeRangeRequest() + request.setName(name) + sr = self.__makeRequest(request) + ssr = ServerResponse() + ssr.setMessages(sr.getMessages()) + ssr.setPayload(sr.getPayload()) + return ssr + + def getSiteID(self): + ssr = ServerResponse() + request = GetActiveSitesRequest() + sr = self.__makeRequest(request) + ssr.setMessages(sr.getMessages()) + ids = sr.getPayload() if sr.getPayload() is not None else [] + sr.setPayload(ids) + return sr + + def __makeRequest(self, request): + try: + request.setSiteID(self.__siteId) + except AttributeError: + pass + try: + request.setWorkstationID(self.__wsId) + except AttributeError: + pass + + sr = ServerResponse() + response = None + try: + response = self.__thrift.sendRequest(request) + except ThriftClient.ThriftRequestException as e: + sr.setMessages([str(e)]) + try: + sr.setPayload(response.getPayload()) + except AttributeError: + sr.setPayload(response) + try: + sr.setMessages(response.getMessages()) + except AttributeError: + # not a server response, nothing else to do + pass + + return sr diff --git a/awips/gfe/__init__.py b/awips/gfe/__init__.py new file mode 100644 index 0000000..b6e95a6 --- /dev/null +++ b/awips/gfe/__init__.py @@ -0,0 +1,3 @@ + +__all__ = [ + ] diff --git a/awips/qpidingest.py b/awips/qpidingest.py new file mode 100644 index 0000000..bb5e07a --- /dev/null +++ b/awips/qpidingest.py @@ -0,0 +1,131 @@ +# =============================================================================== +# qpidingest.py +# +# @author: Aaron Anderson +# @organization: NOAA/WDTB OU/CIMMS +# @version: 1.0 02/19/2010 +# @requires: QPID Python Client available from http://qpid.apache.org/download.html +# The Python Client is located under Single Component Package/Client +# +# From the README.txt Installation Instructions +# = INSTALLATION = +# Extract the release archive into a directory of your choice and set +# your PYTHONPATH accordingly: +# +# tar -xzf qpid-python-.tar.gz -C +# export PYTHONPATH=/qpid-/python +# +# ***EDEX and QPID must be running for this module to work*** +# +# DESCRIPTION: +# This module is used to connect to QPID and send messages to the external.dropbox queue +# which tells EDEX to ingest a data file from a specified path. This avoids having to copy +# a data file into an endpoint. Each message also contains a header which is used to determine +# which plugin should be used to decode the file. Each plugin has an xml file located in +# $EDEX_HOME/data/utility/edex_static/base/distribution that contains regular expressions +# that the header is compared to. When the header matches one of these regular expressions +# the file is decoded with that plugin. If you make changes to one of these xml files you +# must restart EDEX for the changes to take effect. +# +# NOTE: If the message is being sent but you do not see it being ingested in the EDEX log +# check the xml files to make sure the header you are passing matches one of the regular +# expressions. Beware of spaces, some regular expressions require spaces while others use +# a wildcard character so a space is optional. It seems you are better off having the space +# as this will be matched to both patterns. For the file in the example below, +# 20100218_185755_SAUS46KLOX.metar, I use SAUS46 KLOX as the header to make sure it matches. +# +# +# EXAMPLE: +# Simple example program: +# +# ------------------------------------------------------------------------------ +# import qpidingest +# #Tell EDEX to ingest a metar file from data_store. The filepath is +# #/data_store/20100218/metar/00/standard/20100218_005920_SAUS46KSEW.metar +# +# conn=qpidingest.IngestViaQPID() #defaults to localhost port 5672 +# +# #If EDEX is not on the local machine you can make the connection as follows +# #conn=qpidingest.IngestViaQPID(host='',port=) +# +# conn.sendmessage('/data_store/20100218/metar/18/standard/20100218_185755_SAUS46KLOX.metar','SAUS46 KLOX') +# conn.close() +# ------------------------------------------------------------------------------- +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# .... +# 06/13/2013 DR 16242 D. Friedman Add Qpid authentication info +# 03/06/2014 DR 17907 D. Friedman Workaround for issue QPID-5569 +# 02/16/2017 DR 6084 bsteffen Support ssl connections +# +# =============================================================================== + +import os +import os.path + +import qpid +from qpid.util import connect +from qpid.connection import Connection +from qpid.datatypes import Message, uuid4 + +QPID_USERNAME = 'guest' +QPID_PASSWORD = 'guest' + + +class IngestViaQPID: + def __init__(self, host='localhost', port=5672, ssl=None): + """ + Connect to QPID and make bindings to route message to external.dropbox queue + @param host: string hostname of computer running EDEX and QPID (default localhost) + @param port: integer port used to connect to QPID (default 5672) + @param ssl: boolean to determine whether ssl is used, default value of None will use + ssl only if a client certificate is found. + """ + + try: + # + socket = connect(host, port) + if "QPID_SSL_CERT_DB" in os.environ: + certdb = os.environ["QPID_SSL_CERT_DB"] + else: + certdb = os.path.expanduser("~/.qpid/") + if "QPID_SSL_CERT_NAME" in os.environ: + certname = os.environ["QPID_SSL_CERT_NAME"] + else: + certname = QPID_USERNAME + certfile = os.path.join(certdb, certname + ".crt") + if ssl or (ssl is None and os.path.exists(certfile)): + keyfile = os.path.join(certdb, certname + ".key") + trustfile = os.path.join(certdb, "root.crt") + socket = qpid.util.ssl(socket, keyfile=keyfile, certfile=certfile, ca_certs=trustfile) + self.connection = Connection(sock=socket, username=QPID_USERNAME, password=QPID_PASSWORD) + self.connection.start() + self.session = self.connection.session(str(uuid4())) + self.session.exchange_bind(exchange='amq.direct', queue='external.dropbox', binding_key='external.dropbox') + print('Connected to Qpid') + except ValueError: + print('Unable to connect to Qpid') + + def sendmessage(self, filepath, header): + """ + This function sends a message to the external.dropbox queue providing the path + to the file to be ingested and a header to determine the plugin to be used to + decode the file. + @param filepath: string full path to file to be ingested + @param header: string header used to determine plugin decoder to use + """ + props = self.session.delivery_properties(routing_key='external.dropbox') + head = self.session.message_properties(application_headers={'header': header}, + user_id=QPID_USERNAME) + self.session.message_transfer(destination='amq.direct', message=Message(props, head, filepath)) + + def close(self): + """ + After all messages are sent call this function to close connection and make sure + there are no threads left open + """ + self.session.close(timeout=10) + print('Connection to Qpid closed') diff --git a/awips/tables.py b/awips/tables.py new file mode 100644 index 0000000..823f5c9 --- /dev/null +++ b/awips/tables.py @@ -0,0 +1,2025 @@ + +class Item(object): + def __init__(self, *initial_data, **kwargs): + for dictionary in initial_data: + for key in dictionary: + setattr(self, key, dictionary[key]) + for key in kwargs: + setattr(self, key, kwargs[key]) + + +#class GenerateNexradTable(object): +# import numpy as np +# import json +# # find this file locally and confirm location +# filename = '~/awips2-builds/edexOsgi/com.raytheon.edex.plugin.radar/utility/common_static/base/radarInfoSimple.txt' +# data = np.genfromtxt( +# filename, +# delimiter=",", +# autostrip=True, +# skip_header=0, +# names=True, +# dtype=None) +# +# for x in data: +# defn = Item(dict(zip(data.dtype.names, x))) +# prod = dict(zip(data.dtype.names, x)) +# nexrad[defn.id] = prod +# +# return json.dumps(nexrad, indent=1) + + +profiler = { + "74997" : { + "profilerId": "ASTC1", + "latitude": 46.16, + "longitude": -123.88, + "elevation": 3, + "profilerName": "Astoria OR"}, + "74996" : { + "profilerId": "BGDC1", + "latitude": 38.32, + "longitude": -123.07, + "elevation": 13, + "profilerName": "Bodega Bay CA"}, + "74995" : { + "profilerId": "FKSC1", + "latitude": 47.97, + "longitude": -124.40, + "elevation": 92, + "profilerName": "Forks WA"}, + "74998" : { + "profilerId": "ACVC1", + "latitude": 40.97, + "longitude": -124.11, + "elevation": 56, + "profilerName": "McKinleyville CA"}, + "74994" : { + "profilerId": "OTHC1", + "latitude": 43.42, + "longitude": -124.24, + "elevation": 5, + "profilerName": "North Bend OR"}, + "74993" : { + "profilerId": "PTSC1", + "latitude": 36.30, + "longitude": -121.89, + "elevation": 11, + "profilerName": "Point Sur CA"}, + "74992" : { + "profilerId": "SBAC1", + "latitude": 34.43, + "longitude": -119.85, + "elevation": 4, + "profilerName": "Santa Barbara CA"}, + "74991" : { + "profilerId": "PVLC1", + "latitude": 40.18, + "longitude": -104.73, + "elevation": 1503, + "profilerName": "Platteville CO"}, + "74990" : { + "profilerId": "TDEC1", + "latitude": 45.55, + "longitude": -122.39, + "elevation": 12, + "profilerName": "Troutdale OR"}, + "74989" : { + "profilerId": "CCOC1", + "latitude": 39.70, + "longitude": -121.91, + "elevation": 42, + "profilerName": "Chico CA"} +} + +nexrad = { + "2": { + "layer": 0, + "name": "NEXRAD Unit Status Msg", + "res": 0.0, + "mnemo": "GSM", + "range": 0, + "lvls": 0, + "id": 2, + "unit": "" + }, + "3": { + "layer": 0, + "name": "Product Request Resp", + "res": 0.0, + "mnemo": "PRR", + "range": 0, + "lvls": 0, + "id": 3, + "unit": "" + }, + "8": { + "layer": 0, + "name": "Product List", + "res": 0.0, + "mnemo": "PTL", + "range": 0, + "lvls": 0, + "id": 8, + "unit": "" + }, + "12": { + "layer": 0, + "name": "Command Parameter Msg", + "res": 0.0, + "mnemo": "CPM", + "range": 0, + "lvls": 0, + "id": 12, + "unit": "" + }, + "14": { + "layer": 0, + "name": "Command Control Msg", + "res": 0.0, + "mnemo": "CCM", + "range": 0, + "lvls": 0, + "id": 14, + "unit": "" + }, + "16": { + "layer": 0, + "name": "Reflectivity", + "res": 1.0, + "mnemo": "Z", + "range": 230, + "lvls": 8, + "id": 16, + "unit": "dBZ" + }, + "17": { + "layer": 0, + "name": "Reflectivity", + "res": 2.0, + "mnemo": "Z", + "range": 460, + "lvls": 8, + "id": 17, + "unit": "dBZ" + }, + "18": { + "layer": 0, + "name": "Reflectivity", + "res": 4.0, + "mnemo": "Z", + "range": 460, + "lvls": 8, + "id": 18, + "unit": "dBZ" + }, + "19": { + "layer": 0, + "name": "Reflectivity", + "res": 1.0, + "mnemo": "Z", + "range": 230, + "lvls": 16, + "id": 19, + "unit": "dBZ" + }, + "20": { + "layer": 0, + "name": "Reflectivity", + "res": 2.0, + "mnemo": "Z", + "range": 460, + "lvls": 16, + "id": 20, + "unit": "dBZ" + }, + "21": { + "layer": 0, + "name": "Reflectivity", + "res": 4.0, + "mnemo": "Z", + "range": 460, + "lvls": 16, + "id": 21, + "unit": "dBZ" + }, + "22": { + "layer": 0, + "name": "Velocity", + "res": 0.25, + "mnemo": "V", + "ctable": ['NWS8bitVel', -100., 1.], + "scale": [-100, 100], + "range": 60, + "lvls": 8, + "id": 22, + "unit": "kts" + }, + "23": { + "layer": 0, + "name": "Velocity", + "ctable": ['NWS8bitVel', -100., 1.], + "scale": [-100, 100], + "res": 0.5, + "mnemo": "V", + "range": 115, + "lvls": 8, + "id": 23, + "unit": "kts" + }, + "24": { + "layer": 0, + "name": "Velocity", + "ctable": ['NWS8bitVel', -100., 1.], + "scale": [-100, 100], + "res": 1.0, + "mnemo": "V", + "range": 230, + "lvls": 8, + "id": 24, + "unit": "kts" + }, + "25": { + "layer": 0, + "name": "Velocity", + "ctable": ['NWS8bitVel', -100., 1.], + "scale": [-100, 100], + "res": 0.25, + "mnemo": "V", + "range": 60, + "lvls": 16, + "id": 25, + "unit": "kts" + }, + "26": { + "layer": 0, + "name": "Velocity", + "ctable": ['NWS8bitVel', -100., 1.], + "scale": [-100, 100], + "res": 0.5, + "mnemo": "V", + "range": 115, + "lvls": 16, + "id": 26, + "unit": "kts" + }, + "27": { + "layer": 0, + "name": "Velocity", + "ctable": ['NWS8bitVel', -100., 1.], + "scale": [-100, 100], + "res": 1.0, + "mnemo": "V", + "range": 230, + "lvls": 16, + "id": 27, + "unit": "kts" + }, + "28": { + "layer": 0, + "name": "Spectrum Width", + "res": 0.25, + "mnemo": "SW", + "range": 60, + "lvls": 8, + "id": 28, + "unit": "kts" + }, + "29": { + "layer": 0, + "name": "Spectrum Width", + "res": 0.5, + "mnemo": "SW", + "range": 115, + "lvls": 8, + "id": 29, + "unit": "kts" + }, + "30": { + "layer": 0, + "name": "Spectrum Width", + "res": 1.0, + "mnemo": "SW", + "range": 230, + "lvls": 8, + "id": 30, + "unit": "kts" + }, + "31": { + "layer": 0, + "name": "User Select Precip", + "res": 2.0, + "mnemo": "USP", + "range": 460, + "lvls": 16, + "id": 31, + "unit": "in" + }, + "32": { + "layer": 0, + "name": "Digital Hybrid Scan Refl", + "res": 1.0, + "mnemo": "DHR", + "ctable": ['NWSStormClearReflectivity', -20., 0.5], + "scale": [-32.0, 94.5], + "range": 230, + "lvls": 256, + "id": 32, + "unit": "dBZ/10" + }, + "33": { + "layer": 0, + "name": "Hybrid Scan Reflectivity", + "res": 1.0, + "mnemo": "HSR", + "range": 230, + "lvls": 16, + "id": 33, + "unit": "dBZ" + }, + "34": { + "layer": 0, + "name": "Clutter Filter Control", + "res": 1.0, + "mnemo": "CFC", + "range": 230, + "lvls": 8, + "id": 34, + "unit": "" + }, + "35": { + "layer": 0, + "name": "Composite Refl", + "res": 1.0, + "mnemo": "CZ", + "range": 230, + "lvls": 8, + "id": 35, + "unit": "dBZ" + }, + "36": { + "layer": 0, + "name": "Composite Refl", + "res": 4.0, + "mnemo": "CZ", + "range": 460, + "lvls": 8, + "id": 36, + "unit": "dBZ" + }, + "37": { + "layer": 0, + "name": "Composite Refl", + "res": 1.0, + "mnemo": "CZ", + "range": 230, + "lvls": 16, + "id": 37, + "unit": "dBZ" + }, + "38": { + "layer": 0, + "name": "Composite Refl", + "res": 4.0, + "mnemo": "CZ", + "range": 460, + "lvls": 16, + "id": 38, + "unit": "dBZ" + }, + "41": { + "layer": 0, + "name": "Echo Tops", + "res": 4.0, + "mnemo": "ET", + "range": 230, + "lvls": 16, + "id": 41, + "unit": "ft*1000" + }, + "43": { + "layer": 0, + "name": "Svr Wx Analysis - Ref", + "res": 1.0, + "mnemo": "SWR", + "range": 230, + "lvls": 16, + "id": 43, + "unit": "" + }, + "44": { + "layer": 0, + "name": "Svr Wx Analysis - Vel", + "res": 0.25, + "mnemo": "SWV", + "range": 230, + "lvls": 16, + "id": 44, + "unit": "" + }, + "45": { + "layer": 0, + "name": "Svr Wx Analysis - SW", + "res": 0.25, + "mnemo": "SWW", + "range": 230, + "lvls": 8, + "id": 45, + "unit": "" + }, + "46": { + "layer": 0, + "name": "Svr Wx Analysis - Shear", + "res": 0.5, + "mnemo": "SWS", + "range": 230, + "lvls": 16, + "id": 46, + "unit": "" + }, + "47": { + "layer": 0, + "name": "Severe Wx Prob", + "res": 4.0, + "mnemo": "SWP", + "range": 230, + "lvls": 0, + "id": 47, + "unit": "" + }, + "48": { + "layer": 0, + "name": "VAD Wind Profile", + "res": 0.0, + "mnemo": "VWP", + "range": 0, + "lvls": 8, + "id": 48, + "unit": "kts" + }, + "50": { + "layer": 0, + "name": "Ref X-Sect", + "res": 1.0, + "mnemo": "RCS", + "range": 230, + "lvls": 16, + "id": 50, + "unit": "dBZ" + }, + "51": { + "layer": 0, + "name": "Vel X-Sect", + "res": 0.5, + "mnemo": "VCS", + "range": 230, + "lvls": 16, + "id": 51, + "unit": "kts" + }, + "55": { + "layer": 0, + "name": "Storm Rel Vel Region", + "res": 0.5, + "mnemo": "SRR", + "range": 230, + "lvls": 16, + "id": 55, + "unit": "kts" + }, + "56": { + "layer": 0, + "name": "Storm Rel Velocity", + "ctable": ['NWS8bitVel', -100., 1.], + "scale": [-100, 100], + "res": 1.0, + "mnemo": "SRM", + "range": 230, + "lvls": 16, + "id": 56, + "unit": "kts" + }, + "57": { + "layer": 0, + "name": "Vert Integ Liq", + "res": 4.0, + "mnemo": "VIL", + "range": 230, + "lvls": 16, + "id": 57, + "unit": "kg/m\u00b2" + }, + "58": { + "layer": 0, + "name": "Storm Track", + "res": 0.0, + "mnemo": "STI", + "range": 345, + "lvls": 0, + "id": 58, + "unit": "" + }, + "59": { + "layer": 0, + "name": "Hail Index", + "res": 0.0, + "mnemo": "HI", + "range": 230, + "lvls": 0, + "id": 59, + "unit": "" + }, + "60": { + "layer": 0, + "name": "Legacy Meso", + "res": 0.0, + "mnemo": "M", + "range": 230, + "lvls": 0, + "id": 60, + "unit": "" + }, + "61": { + "layer": 0, + "name": "Tornadic Vortex Sig", + "res": 0.0, + "mnemo": "TVS", + "range": 230, + "lvls": 0, + "id": 61, + "unit": "" + }, + "62": { + "layer": 0, + "name": "Storm Structure", + "res": 0.0, + "mnemo": "SS", + "range": 460, + "lvls": 0, + "id": 62, + "unit": "" + }, + "65": { + "layer": 1, + "name": "Layer 1 Max Refl", + "res": 4.0, + "mnemo": "LRM", + "range": 460, + "lvls": 8, + "id": 65, + "unit": "dBZ" + }, + "66": { + "layer": 2, + "name": "Lyr 2 Comp Ref Max", + "res": 4.0, + "mnemo": "LRM", + "range": 460, + "lvls": 8, + "id": 66, + "unit": "dBZ" + }, + "67": { + "layer": 1, + "name": "Lyr 1 Comp Ref Max", + "res": 4.0, + "mnemo": "APR", + "range": 460, + "lvls": 8, + "id": 67, + "unit": "dBZ" + }, + "74": { + "layer": 0, + "name": "Radar Coded Message", + "res": 0.0, + "mnemo": "RCM", + "range": 460, + "lvls": 0, + "id": 74, + "unit": "" + }, + "75": { + "layer": 0, + "name": "Free Text Message", + "res": 0.0, + "mnemo": "FTM", + "range": 0, + "lvls": 0, + "id": 75, + "unit": "" + }, + "77": { + "layer": 0, + "name": "PUP Text Message", + "res": 0.0, + "mnemo": "PTM", + "range": 0, + "lvls": 0, + "id": 77, + "unit": "" + }, + "78": { + "layer": 0, + "name": "One Hour Precip", + "res": 2.0, + "mnemo": "OHP", + "range": 460, + "lvls": 16, + "id": 78, + "unit": "in" + }, + "79": { + "layer": 0, + "name": "Three Hour Precip", + "res": 2.0, + "mnemo": "THP", + "range": 460, + "lvls": 16, + "id": 79, + "unit": "in" + }, + "80": { + "layer": 0, + "name": "Storm Total Precip", + "res": 2.0, + "mnemo": "STP", + "range": 460, + "lvls": 16, + "id": 80, + "unit": "in" + }, + "81": { + "layer": 0, + "name": "Digital Precip Array", + "res": 4.0, + "mnemo": "DPA", + "range": 230, + "lvls": 256, + "id": 81, + "unit": "" + }, + "82": { + "layer": 0, + "name": "Supplemental Precip Data", + "res": 40.0, + "mnemo": "SPD", + "range": 230, + "lvls": 8, + "id": 82, + "unit": "" + }, + "83": { + "layer": 0, + "name": "Intermediate Radar Message", + "res": 0.0, + "mnemo": "IRM", + "range": 0, + "lvls": 0, + "id": 83, + "unit": "" + }, + "84": { + "layer": 0, + "name": "Velocity Azimuth Disp", + "res": 0.0, + "mnemo": "VAD", + "range": 0, + "lvls": 8, + "id": 84, + "unit": "dBZ" + }, + "85": { + "layer": 0, + "name": "Ref X-Sect", + "res": 1.0, + "mnemo": "RCS", + "range": 230, + "lvls": 8, + "id": 85, + "unit": "dBZ" + }, + "86": { + "layer": 0, + "name": "Vel X-Sect", + "res": 0.5, + "mnemo": "VCS", + "range": 230, + "lvls": 8, + "id": 86, + "unit": "kts" + }, + "87": { + "layer": 0, + "name": "Combined Shear", + "res": 2.0, + "mnemo": "CS", + "range": 230, + "lvls": 16, + "id": 87, + "unit": "" + }, + "90": { + "layer": 3, + "name": "Lyr 3 Comp Ref Max", + "res": 4.0, + "mnemo": "LRM", + "range": 460, + "lvls": 8, + "id": 90, + "unit": "dBZ" + }, + "93": { + "layer": 0, + "name": "ITWS Digital Velocity", + "res": 1.0, + "mnemo": "DBV", + "range": 115, + "lvls": 256, + "id": 93, + "unit": "(m/s)/10" + }, + "94": { + "layer": 0, + "name": "Reflectivity", + "res": 1.0, + "mnemo": "Z", + "range": 460, + "ctable": ['NWSStormClearReflectivity', -20., 0.5], + "scale": [-32.0, 94.5], + "lvls": 256, + "id": 94, + "unit": "dBZ/10" + }, + "95": { + "layer": 0, + "name": "Comp Refl Edited for AP", + "res": 1.0, + "mnemo": "CZE", + "range": 230, + "lvls": 8, + "id": 95, + "unit": "dBZ" + }, + "96": { + "layer": 0, + "name": "Comp Refl Edited for AP", + "res": 4.0, + "mnemo": "CZE", + "range": 460, + "lvls": 8, + "id": 96, + "unit": "dBZ" + }, + "97": { + "layer": 0, + "name": "Comp Refl Edited for AP", + "res": 1.0, + "mnemo": "CZE", + "range": 230, + "lvls": 16, + "id": 97, + "unit": "dBZ" + }, + "98": { + "layer": 0, + "name": "Comp Refl Edited for AP", + "res": 4.0, + "mnemo": "CZE", + "range": 460, + "lvls": 16, + "id": 98, + "unit": "dBZ" + }, + "99": { + "layer": 0, + "name": "Velocity", + "res": 0.25, + "mnemo": "V", + "range": 300, + "ctable": ['NWS8bitVel', -100., 1.], + "scale": [-100, 100], + "lvls": 256, + "id": 99, + "unit": "(m/s)/10" + }, + "100": { + "layer": 0, + "name": "VAD Site Adapt Params", + "res": 0.0, + "mnemo": "VSDT", + "range": 0, + "lvls": 0, + "id": 100, + "unit": "" + }, + "101": { + "layer": 0, + "name": "Storm Track Alpha block", + "res": 0.0, + "mnemo": "STIT", + "range": 0, + "lvls": 0, + "id": 101, + "unit": "" + }, + "102": { + "layer": 0, + "name": "Hail Index Alpha block", + "res": 0.0, + "mnemo": "HIT", + "range": 0, + "lvls": 0, + "id": 102, + "unit": "" + }, + "103": { + "layer": 0, + "name": "Mesocyclone Alpha block", + "res": 0.0, + "mnemo": "MT", + "range": 0, + "lvls": 0, + "id": 103, + "unit": "" + }, + "104": { + "layer": 0, + "name": "TVS Alpha block", + "res": 0.0, + "mnemo": "TVST", + "range": 0, + "lvls": 0, + "id": 104, + "unit": "" + }, + "105": { + "layer": 0, + "name": "Combined Shear Params", + "res": 0.0, + "mnemo": "CST", + "range": 0, + "lvls": 0, + "id": 105, + "unit": "" + }, + "107": { + "layer": 0, + "name": "1hr Rainfall Params", + "res": 0.0, + "mnemo": "OHPT", + "range": 0, + "lvls": 0, + "id": 107, + "unit": "" + }, + "108": { + "layer": 0, + "name": "3hr Rainfall Params", + "res": 0.0, + "mnemo": "THPT", + "range": 0, + "lvls": 0, + "id": 108, + "unit": "" + }, + "109": { + "layer": 0, + "name": "Storm Total Params", + "res": 0.0, + "mnemo": "STPT", + "range": 0, + "lvls": 0, + "id": 109, + "unit": "" + }, + "132": { + "layer": 0, + "name": "Clutter Likelihood Refl", + "res": 1.0, + "mnemo": "CLR", + "range": 230, + "lvls": 16, + "id": 132, + "unit": "%" + }, + "133": { + "layer": 0, + "name": "Clutter Likelihood Dopp", + "res": 1.0, + "mnemo": "CLD", + "range": 230, + "lvls": 16, + "id": 133, + "unit": "%" + }, + "134": { + "layer": 0, + "name": "Digital Vert Integ Liq", + "res": 1.0, + "mnemo": "DVL", + "range": 460, + "lvls": 256, + "id": 134, + "unit": "kg/m\u00b2" + }, + "135": { + "layer": 0, + "name": "Enhanced Echo Tops", + "res": 1.0, + "mnemo": "EET", + "ctable": ['NWSEnhancedEchoTops', 2, 1], + "scale": [0, 255], + "range": 460, + "lvls": 256, + "id": 135, + "unit": "ft*1000" + }, + "136": { + "layer": 0, + "name": "SuperOb", + "res": 4.0, + "mnemo": "SO", + "range": 0, + "lvls": 256, + "id": 136, + "unit": "" + }, + "137": { + "layer": 9, + "name": "User Selectable Lyr Refl", + "res": 1.0, + "mnemo": "ULR", + "range": 230, + "lvls": 16, + "id": 137, + "unit": "dBZ" + }, + "138": { + "layer": 0, + "name": "Storm Total Precip", + "res": 2.0, + "mnemo": "STP", + "ctable": ['NWSStormClearReflectivity', -100., 1.], + "scale": [0, 10], + "range": 460, + "lvls": 256, + "id": 138, + "unit": "in/100" + }, + "139": { + "layer": 0, + "name": "Rapid Update Mesocyclone", + "res": 0.0, + "mnemo": "MRU", + "range": 230, + "lvls": 0, + "id": 139, + "unit": "" + }, + "140": { + "layer": 0, + "name": "Gust Front MIGFA", + "res": 0.0, + "mnemo": "GFM", + "range": 230, + "lvls": 0, + "id": 140, + "unit": "" + }, + "141": { + "layer": 0, + "name": "Mesocyclone", + "res": 0.0, + "mnemo": "MD", + "range": 230, + "lvls": 0, + "id": 141, + "unit": "" + }, + "143": { + "layer": 0, + "name": "TVS Rapid Update", + "res": 0.0, + "mnemo": "TRU", + "range": 230, + "lvls": 0, + "id": 143, + "unit": "" + }, + "144": { + "layer": 0, + "name": "1-Hour Snow-Water Equiv", + "res": 1.0, + "mnemo": "OSW", + "range": 230, + "lvls": 16, + "id": 144, + "unit": "in" + }, + "145": { + "layer": 0, + "name": "1-Hour Snow Depth", + "res": 1.0, + "mnemo": "OSD", + "range": 230, + "lvls": 16, + "id": 145, + "unit": "in" + }, + "146": { + "layer": 0, + "name": "Storm Tot Snow-Water Equiv", + "res": 1.0, + "mnemo": "SSW", + "range": 230, + "lvls": 16, + "id": 146, + "unit": "in" + }, + "147": { + "layer": 0, + "name": "Storm Tot Snow Depth", + "res": 1.0, + "mnemo": "SSD", + "range": 230, + "lvls": 16, + "id": 147, + "unit": "in" + }, + "149": { + "layer": 0, + "name": "Digital Mesocyclone Data", + "res": 0.0, + "mnemo": "DMD", + "range": 230, + "lvls": 0, + "id": 149, + "unit": "" + }, + "150": { + "layer": 0, + "name": "User Sel Snow-Water Equiv", + "res": 1.0, + "mnemo": "USW", + "range": 230, + "lvls": 16, + "id": 150, + "unit": "in" + }, + "151": { + "layer": 0, + "name": "User Sel Snow Depth", + "res": 1.0, + "mnemo": "USD", + "range": 230, + "lvls": 16, + "id": 151, + "unit": "in" + }, + "152": { + "layer": 0, + "name": "RPG System Status", + "res": 0.0, + "mnemo": "RSS", + "range": 0, + "lvls": 0, + "id": 152, + "unit": "" + }, + "153": { + "layer": 0, + "name": "Reflectivity", + "res": 0.25, + "mnemo": "HZ", + "range": 460, + "lvls": 256, + "id": 153, + "unit": "dBZ/10" + }, + "154": { + "layer": 0, + "name": "Velocity", + "res": 0.25, + "mnemo": "HV", + "range": 300, + "lvls": 256, + "id": 154, + "unit": "(m/s)/10" + }, + "155": { + "layer": 0, + "name": "Spec Width", + "res": 0.25, + "mnemo": "HSW", + "range": 300, + "lvls": 256, + "id": 155, + "unit": "(m/s)/10" + }, + "158": { + "layer": 0, + "name": "Diff Reflectivity", + "res": 1.0, + "mnemo": "ZDR", + "range": 230, + "ctable": ['NWSStormClearReflectivity', -4.0, 5.0], + "lvls": 16, + "id": 158, + "unit": "dB" + }, + "159": { + "layer": 0, + "name": "Diff Reflectivity", + "res": 0.25, + "mnemo": "ZDR", + "range": 300, + "ctable": ['NWSStormClearReflectivity', -4.0, 5.0], + "lvls": 256, + "id": 159, + "unit": "dB" + }, + "160": { + "layer": 0, + "name": "Correlation Coeff", + "res": 1.0, + "mnemo": "CC", + "range": 230, + "lvls": 16, + "id": 160, + "unit": "" + }, + "161": { + "layer": 0, + "name": "Correlation Coeff", + "res": 0.25, + "mnemo": "CC", + "range": 300, + "lvls": 256, + "id": 161, + "unit": "" + }, + "162": { + "layer": 0, + "name": "Specific Diff Phase", + "res": 1.0, + "mnemo": "KDP", + "range": 230, + "lvls": 16, + "id": 162, + "unit": "deg/km" + }, + "163": { + "layer": 0, + "name": "Specific Diff Phase", + "res": 0.25, + "mnemo": "KDP", + "range": 300, + "lvls": 256, + "id": 163, + "unit": "deg/km" + }, + "164": { + "layer": 0, + "name": "Hydrometeor Class", + "res": 1.0, + "mnemo": "HC", + "range": 230, + "lvls": 16, + "id": 164, + "unit": "" + }, + "165": { + "layer": 0, + "name": "Hydrometeor Class", + "res": 0.25, + "mnemo": "HC", + "range": 300, + "lvls": 256, + "id": 165, + "unit": "/10" + }, + "166": { + "layer": 0, + "name": "Melting Layer", + "res": 0.0, + "mnemo": "ML", + "range": 230, + "lvls": 0, + "id": 166, + "unit": "" + }, + "167": { + "layer": 0, + "name": "Raw CC", + "res": 0.25, + "mnemo": "SDC", + "range": 300, + "lvls": 256, + "id": 167, + "unit": "" + }, + "168": { + "layer": 0, + "name": "Raw PHIDP", + "res": 0.25, + "mnemo": "SDP", + "range": 300, + "lvls": 256, + "id": 168, + "unit": "deg" + }, + "169": { + "layer": 0, + "name": "One Hour Accum", + "res": 2.0, + "mnemo": "OHA", + "ctable": ['NWSStormClearReflectivity', -100., 1.], + "scale": [0, 10], + "range": 230, + "lvls": 16, + "id": 169, + "unit": "in" + }, + "170": { + "layer": 0, + "name": "One Hour Unbiased Accum", + "res": 0.25, + "mnemo": "DAA", + "ctable": ['NWSStormClearReflectivity', -100., 1.], + "scale": [0, 10], + "range": 230, + "lvls": 256, + "id": 170, + "unit": "in/100" + }, + "171": { + "layer": 0, + "name": "Storm Total Accum", + "res": 2.0, + "mnemo": "STA", + "ctable": ['NWSStormClearReflectivity', -100., 1.], + "scale": [0, 30], + "range": 230, + "lvls": 16, + "id": 171, + "unit": "in" + }, + "172": { + "layer": 0, + "name": "Storm Total Accum", + "res": 0.25, + "mnemo": "STA", + "ctable": ['NWSStormClearReflectivity', -100., 1.], + "scale": [0, 30], + "range": 230, + "lvls": 256, + "id": 172, + "unit": "in/100" + }, + "173": { + "layer": 0, + "name": "User Select Accum", + "res": 0.25, + "mnemo": "DUA", + "ctable": ['NWSStormClearReflectivity', -100., 1.], + "scale": [0, 30], + "range": 230, + "lvls": 256, + "id": 173, + "unit": "in/100" + }, + "174": { + "layer": 0, + "name": "One Hour Diff", + "res": 0.25, + "mnemo": "DOD", + "range": 230, + "lvls": 256, + "id": 174, + "unit": "in/100" + }, + "175": { + "layer": 0, + "name": "Storm Total Diff", + "res": 0.25, + "mnemo": "DSD", + "range": 230, + "lvls": 256, + "id": 175, + "unit": "in/100" + }, + "176": { + "layer": 0, + "name": "Digital Inst Precip Rate", + "res": 0.25, + "mnemo": "DPR", + "ctable": ['NWS8bitVel', -100., 1.], + "scale": [0, 16], + "range": 230, + "lvls": 65536, + "id": 176, + "unit": "in/h" + }, + "177": { + "layer": 0, + "name": "Hybrid Hydrometeor Class", + "res": 0.25, + "mnemo": "HHC", + "range": 230, + "lvls": 256, + "id": 177, + "unit": "/10" + }, + "180": { + "layer": 0, + "name": "Reflectivity", + "res": 0.15, + "mnemo": "Z", + "range": 90, + "lvls": 256, + "id": 180, + "unit": "dBZ/10" + }, + "181": { + "layer": 0, + "name": "Reflectivity", + "res": 0.15, + "mnemo": "Z", + "range": 90, + "lvls": 16, + "id": 181, + "unit": "dBZ" + }, + "182": { + "layer": 0, + "name": "Velocity", + "res": 0.15, + "mnemo": "V", + "range": 90, + "lvls": 256, + "id": 182, + "unit": "(m/s)/10" + }, + "183": { + "layer": 0, + "name": "Velocity", + "res": 0.15, + "mnemo": "V", + "range": 90, + "lvls": 16, + "id": 183, + "unit": "kts" + }, + "184": { + "layer": 0, + "name": "Spec Width", + "res": 0.15, + "mnemo": "SW", + "range": 90, + "lvls": 256, + "id": 184, + "unit": "(m/s)/10" + }, + "185": { + "layer": 0, + "name": "Spectrum Width", + "res": 0.15, + "mnemo": "SW", + "range": 90, + "lvls": 8, + "id": 185, + "unit": "kts" + }, + "186": { + "layer": 0, + "name": "Long Range Refl", + "res": 0.3, + "mnemo": "Z", + "range": 460, + "lvls": 256, + "id": 186, + "unit": "dBZ/10" + }, + "187": { + "layer": 0, + "name": "Long Range Refl", + "res": 0.3, + "mnemo": "Z", + "range": 460, + "lvls": 16, + "id": 187, + "unit": "dBZ" + }, + "196": { + "layer": 0, + "name": "Microburst AMDA", + "res": 0.0, + "mnemo": "MBA", + "range": 50, + "lvls": 0, + "id": 196, + "unit": "" + }, + "202": { + "layer": 0, + "name": "Shift Change Checklist", + "res": 0.0, + "mnemo": "SCC", + "range": 0, + "lvls": 0, + "id": 202, + "unit": "" + }, + "550": { + "layer": 0, + "name": "Reflectivity", + "res": 0.926, + "mnemo": "Z", + "range": 111, + "lvls": 8, + "id": 550, + "unit": "dBZ" + }, + "300": { + "layer": 0, + "name": "Level II Reflectivity", + "res": 1.0, + "mnemo": "Z", + "range": 460, + "lvls": 256, + "id": 300, + "unit": "dBZ/10" + }, + "301": { + "layer": 0, + "name": "Level II Velocity", + "res": 0.25, + "mnemo": "V", + "range": 230, + "lvls": 256, + "id": 301, + "unit": "kts" + }, + "500": { + "layer": 0, + "name": "Reflectivity", + "res": 0.463, + "mnemo": "Z", + "range": 463, + "lvls": 8, + "id": 500, + "unit": "dBZ" + } +} + +# +# Dictionary that maps VTEC codes to headlines +# Modified from com.raytheon.uf.edex.activetable/utility/common_static/base/vtec/VTECTable.py +# Colornames from: +# 1) https://www.weather.gov/bro/mapcolors +# 2) https://digital.weather.gov/staticpages/definitions.php +# 3) https://www.w3schools.com/colors/colors_names.asp +# +vtec = { + 'AF.W' : {'phen': 'AF', + 'sig': 'W', + 'color': 'gray', + 'hdln': 'Ashfall Warning'}, + 'AF.Y' : {'phen': 'AF', + 'sig': 'Y', + 'color': 'gray', + 'hdln': 'Ashfall Advisory'}, + 'AQ.Y' : {'phen': 'AQ', + 'sig': 'Y', + 'color': 'gray', + 'hdln': 'Air Quality Alert'}, + 'AS.O' : {'phen': 'AS', + 'sig': 'O', + 'color': 'gray', + 'hdln': 'Air Stagnation Outlook'}, + 'AS.Y' : {'phen': 'AS', + 'sig': 'Y', + 'color': 'gray', + 'hdln': 'Air Stagnation Advisory'}, + 'BH.S' : {'phen': 'BH', + 'sig': 'S', + 'color': 'turquoise', + 'hdln': 'Beach Hazards Statement'}, + 'BW.Y' : {'phen': 'BW', + 'sig': 'Y', + 'color': 'thistle', + 'hdln': 'Brisk Wind Advisory'}, + 'BZ.A' : {'phen' : 'BZ', + 'sig' : 'A', + 'color': 'greenyellow', + 'hdln' : 'Blizzard Watch'}, + 'BZ.W' : {'phen' : 'BZ', + 'sig' : 'W', + 'color': 'orangered', + 'hdln' : 'Blizzard Warning'}, + 'CF.A' : {'phen': 'CF', + 'sig': 'A', + 'color': 'mediumaquamarine', + 'hdln': 'Coastal Flood Watch'}, + 'CF.W' : {'phen': 'CF', + 'sig': 'W', + 'color': 'forestgreen', + 'hdln': 'Coastal Flood Warning'}, + 'CF.Y' : {'phen': 'CF', + 'sig': 'Y', + 'color': 'lawngreen', + 'hdln': 'Coastal Flood Advisory'}, + 'CF.S' : {'phen': 'CF', + 'sig': 'S', + 'color': 'olivedrab', + 'hdln': 'Coastal Flood Statement'}, + 'DS.W' : {'phen': 'DS', + 'sig': 'W', + 'color': 'bisque', + 'hdln': 'Dust Storm Warning'}, + 'DU.Y' : {'phen': 'DU', + 'sig': 'Y', + 'color': 'darkkhaki', + 'hdln': 'Blowing Dust Advisory'}, + 'EC.A' : {'phen': 'EC', + 'sig': 'A', + 'color': 'blue', + 'hdln': 'Extreme Cold Watch'}, + 'EC.W' : {'phen': 'EC', + 'sig': 'W', + 'color': 'blue', + 'hdln': 'Extreme Cold Warning'}, + 'EH.A' : {'phen': 'EH', + 'sig': 'A', + 'color': 'maroon', + 'hdln': 'Excessive Heat Watch'}, + 'EH.W' : {'phen': 'EH', + 'sig': 'W', + 'color': 'mediumvioletred', + 'hdln': 'Excessive Heat Warning'}, + 'EW.W' : {'phen': 'EW', + 'sig': 'W', + 'color': 'deeppink', + 'hdln': 'Excessive Wind Warning'}, + 'FA.A' : {'phen': 'FA', + 'sig': 'A', + 'color': 'seagreen', + 'hdln': 'Flood Watch'}, + 'FA.W' : {'phen': 'FA', + 'sig': 'W', + 'color': 'seagreen', + 'hdln': 'Areal Flood Warning'}, + 'FA.Y' : {'phen': 'FA', + 'sig': 'Y', + 'color': 'seagreen', + 'hdln': 'Areal Flood Advisory'}, + 'FF.A' : {'phen': 'FF', + 'sig': 'A', + 'color': 'limegreen', + 'hdln': 'Flash Flood Watch'}, + 'FF.S' : {'phen': 'FF', + 'sig': 'S', + 'color': 'yellowgreen', + 'hdln': 'Flash Flood Statement'}, + 'FF.W' : {'phen': 'FF', + 'sig': 'W', + 'color': 'darkred', + 'hdln': 'Flash Flood Warning'}, + 'FG.Y' : {'phen': 'FG', + 'sig': 'Y', + 'color': 'slategray', + 'hdln': 'Dense Fog Advisory'}, + 'FL.A' : {'phen': 'FL', + 'sig': 'A', + 'color': 'seagreen', + 'hdln': 'Flood Watch'}, + 'FL.W' : {'phen': 'FL', + 'sig': 'W', + 'color': 'green', + 'hdln': 'Flood Warning'}, + 'HY.S' : {'phen': 'FL', + 'sig': 'S', + 'color': '#00ff7f', + 'hdln': 'Hydrological Statement'}, + 'FL.S' : {'phen': 'FL', + 'sig': 'S', + 'color': '#00ff7f', + 'hdln': 'Flood Statement'}, + 'FL.Y' : {'phen': 'FL', + 'sig': 'Y', + 'color': '#00ff7f', + 'hdln': 'Flood Advisory'}, + 'FR.Y' : {'phen': 'FR', + 'sig': 'Y', + 'color': 'cornflowerblue', + 'hdln': 'Frost Advisory'}, + 'FW.A' : {'phen': 'FW', + 'sig': 'A', + 'color': 'navajowhite', + 'hdln': 'Fire Weather Watch'}, + 'FW.W' : {'phen': 'FW', + 'sig': 'W', + 'color': 'deeppink', + 'hdln': 'Red Flag Warning'}, + 'FZ.A' : {'phen': 'FZ', + 'sig': 'A', + 'color': 'royalblue', + 'hdln': 'Freeze Watch'}, + 'FZ.W' : {'phen': 'FZ', + 'sig': 'W', + 'color': 'cyan', + 'hdln': 'Freeze Warning'}, + 'GL.A' : {'phen': 'GL', + 'sig': 'A', + 'color': 'pink', + 'hdln': 'Gale Watch'}, + 'GL.W' : {'phen': 'GL', + 'sig': 'W', + 'color': '#dda0dd', + 'hdln': 'Gale Warning'}, + 'HF.A' : {'phen': 'HF', + 'sig': 'A', + 'color': 'darkorchid', + 'hdln': 'Hurricane Force Wind Watch'}, + 'HF.W' : {'phen': 'HF', + 'sig': 'W', + 'color': '#cd5c5c', + 'hdln': 'Hurricane Force Wind Warning'}, + 'HT.Y' : {'phen': 'HT', + 'sig': 'Y', + 'color': '#ff7f4e', + 'hdln': 'Heat Advisory'}, + 'HU.A' : {'phen': 'HU', + 'sig': 'A', + 'color': '#ff00ff', + 'hdln': 'Hurricane Watch'}, + 'HU.S' : {'phen': 'HU', + 'sig': 'S', + 'color': 'yellow', + 'hdln': 'Hurricane Statement'}, + 'HU.W' : {'phen': 'HU', + 'sig': 'W', + 'color': '#dc143c', + 'hdln': 'Hurricane Warning'}, + 'HW.A' : {'phen': 'HW', + 'sig': 'A', + 'color': 'darkgoldenrod', + 'hdln': 'High Wind Watch'}, + 'HW.W' : {'phen': 'HW', + 'sig': 'W', + 'color': 'goldenrod', + 'hdln': 'High Wind Warning'}, + 'HZ.A' : {'phen': 'HZ', + 'sig': 'A', + 'color': 'royalblue', + 'hdln': 'Hard Freeze Watch'}, + 'HZ.W' : {'phen': 'HZ', + 'sig': 'W', + 'color': 'blue', + 'hdln': 'Hard Freeze Warning'}, + 'IS.W' : {'phen': 'IS', + 'sig': 'W', + 'color': 'darkmagenta', + 'hdln': 'Ice Storm Warning'}, + 'LE.A' : {'phen': 'LE', + 'sig': 'A', + 'color': 'LightSkyBlue', + 'hdln': 'Lake Effect Snow Watch'}, + 'LE.W' : {'phen': 'LE', + 'sig': 'W', + 'color': 'DarkCyan', + 'hdln': 'Lake Effect Snow Warning'}, + 'LE.Y' : {'phen': 'LE', + 'sig': 'Y', + 'color': 'MediumTurquoise', + 'hdln': 'Lake Effect Snow Advisory'}, + 'LO.Y' : {'phen': 'LO', + 'sig': 'Y', + 'color': 'maroon', + 'hdln': 'Low Water Advisory'}, + 'LS.A' : {'phen': 'LS', + 'sig': 'A', + 'color': 'MediumAquaMarine', + 'hdln': 'Lakeshore Flood Watch'}, + 'LS.S' : {'phen': 'LS', + 'sig': 'S', + 'color': 'lawngreen', + 'hdln': 'Lakeshore Flood Statement'}, + 'LS.W' : {'phen': 'LS', + 'sig': 'W', + 'color': 'forestgreen', + 'hdln': 'Lakeshore Flood Warning'}, + 'LS.Y' : {'phen': 'LS', + 'sig': 'Y', + 'color': 'lawngreen', + 'hdln': 'Lakeshore Flood Advisory'}, + 'LW.Y' : {'phen': 'LW', + 'sig': 'Y', + 'color': 'tan', + 'hdln': 'Lake Wind Advisory'}, + 'MA.A' : {'phen': 'MA', + 'sig': 'A', + 'color': 'peachpuff', + 'hdln': 'Special Marine Advisory'}, + 'MA.S' : {'phen': 'MA', + 'sig': 'S', + 'color': 'peachpuff', + 'hdln': 'Special Marine Statement'}, + 'MA.W' : {'phen': 'MA', + 'sig': 'W', + 'color': 'palevioletred', + 'hdln': 'Special Marine Warning'}, + 'MF.Y' : {'phen': 'MF', + 'sig': 'Y', + 'color': 'slategray', + 'hdln': 'Dense Fog Advisory'}, # Marine Fog + 'MH.W' : {'phen': 'MH', + 'sig': 'W', + 'color': 'darkgray', + 'hdln': 'Ashfall Warning'}, # Marine Ashfall + 'MH.Y' : {'phen': 'MH', + 'sig': 'Y', + 'color': 'dimgray', + 'hdln': 'Ashfall Advisory'}, # Marine Ashfall + 'MS.Y' : {'phen': 'MS', + 'sig': 'Y', + 'color': 'khaki', + 'hdln': 'Dense Smoke Advisory'}, # Marine Smoke + 'RB.Y' : {'phen': 'RB', + 'sig': 'Y', + 'color': 'thistle', + 'hdln': 'Small Craft Advisory for rough bar'}, + 'RP.S' : {'phen': 'RP', + 'sig' : 'S', + 'color': 'turquoise', + 'hdln': 'High Rip Current Risk'}, + 'SC.Y' : {'phen': 'SC', + 'sig': 'Y', + 'color': 'thistle', + 'hdln': 'Small Craft Advisory'}, + 'SE.A' : {'phen': 'SE', + 'sig': 'A', + 'color': 'darkslateblue', + 'hdln': 'Hazardous Seas Watch'}, + 'SE.W' : {'phen': 'SE', + 'sig': 'W', + 'color': 'thistle', + 'hdln': 'Hazardous Seas Warning'}, + 'SI.Y' : {'phen': 'SI', + 'sig': 'Y', + 'color': 'thistle', + 'hdln': 'Small Craft Advisory for winds'}, + 'SM.Y' : {'phen': 'SM', + 'sig': 'Y', + 'color': 'khaki', + 'hdln': 'Dense Smoke Advisory'}, + 'SR.A' : {'phen': 'SR', + 'sig': 'A', + 'color': 'violet', + 'hdln': 'Storm Watch'}, + 'SR.W' : {'phen': 'SR', + 'sig': 'W', + 'color': 'darkorchid', + 'hdln': 'Storm Warning'}, + 'SS.A' : {'phen': 'SS', + 'sig': 'A', + 'color': '#DB7FF7', + 'hdln': 'Storm Surge Watch'}, + 'SS.W' : {'phen': 'SS', + 'sig': 'W', + 'color': '#B524F7', + 'hdln': 'Storm Surge Warning'}, + 'SU.W' : {'phen': 'SU', + 'sig': 'W', + 'color': 'forestgreen', + 'hdln': 'High Surf Warning'}, + 'SU.Y' : {'phen': 'SU', + 'sig': 'Y', + 'color': 'mediumorchid', + 'hdln': 'High Surf Advisory'}, + 'SV.A' : {'phen': 'SV', + 'sig': 'A', + 'color': 'palevioletred', + 'hdln': 'Severe Thunderstorm Watch'}, + 'SV.S' : {'phen': 'SV', + 'sig': 'S', + 'color': 'aqua', + 'hdln': 'Severe Weather Statement'}, + 'SV.W' : {'phen': 'SV', + 'sig': 'W', + 'color': 'orange', + 'hdln': 'Severe Thunderstorm Warning'}, + 'SW.Y' : {'phen': 'SW', + 'sig': 'Y', + 'color': 'thistle', + 'hdln': 'Small Craft Advisory for hazardous seas'}, + 'TO.A' : {'phen': 'TO', + 'sig': 'A', + 'color': 'yellow', + 'hdln': 'Tornado Watch'}, + 'TO.W' : {'phen': 'TO', + 'sig': 'W', + 'color': 'red', + 'hdln': 'Tornado Warning'}, + 'TR.A' : {'phen': 'TR', + 'sig': 'A', + 'color': '#f08080', + 'hdln': 'Tropical Storm Watch'}, + 'TR.W' : {'phen': 'TR', + 'sig': 'W', + 'color': '#b22222', + 'hdln': 'Tropical Storm Warning'}, + 'TS.A' : {'phen': 'TS', + 'sig': 'A', + 'color': 'fuchsia', + 'hdln': 'Tsunami Watch'}, + 'TS.W' : {'phen': 'TS', + 'sig': 'W', + 'color': '#FD6347', + 'hdln': 'Tsunami Warning'}, + 'TS.Y' : {'phen': 'TS', + 'sig': 'Y', + 'color': 'chocolate', + 'hdln': 'Tsunami Advisory'}, + 'TY.A' : {'phen': 'TY', + 'sig': 'A', + 'color': 'fuchsia', + 'hdln': 'Typhoon Watch'}, + 'TY.W' : {'phen': 'TY', + 'sig': 'W', + 'color': '#FD6347', + 'hdln': 'Typhoon Warning'}, + 'UP.A' : {'phen': 'UP', + 'sig': 'A', + 'color': 'RosyBrown', + 'hdln': 'Heavy Freezing Spray Watch'}, + 'UP.W' : {'phen': 'UP', + 'sig': 'W', + 'color': '#00C0FF', + 'hdln': 'Heavy Freezing Spray Warning'}, + 'UP.Y' : {'phen': 'UP', + 'sig': 'Y', + 'color': '#00C0FF', + 'hdln': 'Freezing Spray Advisory'}, + 'WC.A' : {'phen': 'WC', + 'sig': 'A', + 'color': 'cadetblue', + 'hdln': 'Wind Chill Watch'}, + 'WC.W' : {'phen': 'WC', + 'sig': 'W', + 'color': 'lightsteelblue', + 'hdln': 'Wind Chill Warning'}, + 'WC.Y' : {'phen': 'WC', + 'sig': 'Y', + 'color': 'paleturquoise', + 'hdln': 'Wind Chill Advisory'}, + 'WI.Y' : {'phen': 'WI', + 'sig': 'Y', + 'color': 'tan', + 'hdln': 'Wind Advisory'}, + 'WS.A' : {'phen': 'WS', + 'sig': 'A', + 'color': '#4682b4', + 'hdln': 'Winter Storm Watch'}, + 'WS.W' : {'phen': 'WS', + 'sig': 'W', + 'color': 'hotpink', + 'hdln': 'Winter Storm Warning'}, + 'WW.Y' : {'phen': 'WW', + 'sig': 'Y', + 'color': '#7b68ee', + 'hdln': 'Winter Weather Advisory'}, + 'ZF.Y' : {'phen': 'ZF', + 'sig': 'Y', + 'color': 'teal', + 'hdln': 'Freezing Fog Advisory'}, + 'ZR.Y' : {'phen': 'ZR', + 'sig': 'Y', + 'color': 'orchid', + 'hdln': 'Freezing Rain Advisory'}, + } + +# +# Upgrade Hazards Dictionary - upgradeHazardsDict is a dictionary of +# phen/sig combinations defining upgrades. Each key is the proposed hazard. +# The associated list are the hazards which are upgraded by the +# proposed hazard. +# + +upgradeHazardsDict = { +'WC.W': ['WC.A', 'WC.Y'], +'WC.Y': ['WC.A'], +'BZ.W': ['WS.W', 'LE.W', 'ZR.Y', 'LE.Y', 'WW.Y', + 'BZ.A', 'WS.A', 'LE.A'], +'IS.W': ['WS.W', 'LE.W', 'ZR.Y', 'LE.Y', 'WW.Y', + 'BZ.A', 'WS.A', 'LE.A'], +'LE.W': ['ZR.Y', 'LE.Y', 'WW.Y', + 'BZ.A', 'WS.A', 'LE.A'], +'WS.W': ['ZR.Y', 'LE.Y', 'WW.Y', + 'BZ.A', 'WS.A', 'LE.A'], +'ZR.Y': ['BZ.A', 'WS.A', 'LE.A'], +'LE.Y': ['BZ.A', 'WS.A', 'LE.A'], +'WW.Y': ['BZ.A', 'WS.A', 'LE.A'], +'EH.W': ['EH.A', 'HT.Y'], +'HT.Y': ['EH.A'], +'FZ.W': ['FZ.A', 'FR.Y', 'HZ.A'], +'HZ.W': ['FZ.A', 'FR.Y', 'HZ.A'], +'FR.Y': ['FZ.A', 'HZ.A'], +'HW.W': ['DU.Y', 'LW.Y', 'WI.Y', 'HW.A'], +'DS.W': ['DU.Y', 'LW.Y', 'WI.Y', 'HW.A'], +'WI.Y': ['HW.A'], +'EC.W': ['EC.A'], +'FW.W': ['FW.A'], +'CF.W': ['CF.A', 'CF.Y'], +'CF.Y': ['CF.A'], +'LS.W': ['LS.A', 'LS.Y'], +'LS.Y': ['LS.A'], +'BW.Y': ['GL.A', 'SR.A', 'HF.A', 'SE.A'], +'RB.Y': ['GL.A', 'SR.A', 'HF.A', 'SE.A'], +'SC.Y': ['GL.A', 'SR.A', 'HF.A', 'SE.A'], +'SI.Y': ['GL.A', 'SR.A', 'HF.A', 'SE.A'], +'SW.Y': ['SE.A'], +'UP.Y': ['UP.A'], +'HF.W': ['SR.W', 'GL.W', 'SC.Y', 'SW.Y', 'BW.Y', 'SI.Y', 'RB.Y', 'GL.A', 'SR.A', 'HF.A', 'SE.A'], +'SR.W': ['GL.W', 'SC.Y', 'SW.Y', 'BW.Y', 'SI.Y', 'RB.Y', 'GL.A', 'SR.A', 'HF.A', 'SE.A'], +'GL.W': ['SC.Y', 'SW.Y', 'BW.Y', 'SI.Y', 'RB.Y', 'GL.A', 'SR.A', 'HF.A', 'SE.A'], +'SE.W': ['SC.Y', 'RB.Y', 'GL.A', 'SR.A', 'HF.A', 'SE.A'], +'UP.W': ['UP.Y', 'UP.A'], +'SU.W': ['SU.Y'], +'SS.W': ['SS.A'], +'HU.W': ['HU.A', 'TR.W', 'TR.A'], +'HU.A': ['TR.A'], +'TR.W': ['TR.A', 'HU.A', 'TY.A'], +'TY.W': ['TY.A', 'TR.W', 'TR.A'], +'TY.A': ['TR.A'], +'AF.W': ['AF.Y'], +'MH.W': ['MH.Y'], + } + +# +# When passed a phen/sig for both the current hazard and the proposed hazard, +# checkForUpgrade returns a 1 if the proposed hazard is an upgrade, otherwise 0 +# + +def checkForUpgrade(pPhen, pSig, cPhen, cSig): + proposed = pPhen + "." + pSig + current = cPhen + "." + cSig + if proposed in upgradeHazardsDict: + if current in upgradeHazardsDict[proposed]: + return 1 + else: + return 0 + else: + return 0 + +# +# Downgrade Hazards Dictionary - downgradeHazardsDict is a dictionary of +# phen/sig combinations defining downgrades. Each key is the proposed hazard. +# The associated list are the hazards which are downgraded by the +# proposed hazard. +# + +downgradeHazardsDict = { +'ZR.Y': ['BZ.W', 'LE.W', 'IS.W', 'WS.W'], +'LE.Y': ['BZ.W', 'LE.W', 'IS.W', 'WS.W'], +'WW.Y': ['BZ.W', 'LE.W', 'IS.W', 'WS.W'], +'WC.Y': ['WC.W'], +'WS.W': ['BZ.W', 'IS.W'], +'LE.W': ['BZ.W', 'IS.W'], +'DU.Y': ['DS.W', 'HW.W'], +'LW.Y': ['DS.W', 'HW.W', 'WI.Y'], +'WI.Y': ['DS.W', 'HW.W'], +'HT.Y': ['EH.W'], +'FR.Y': ['FZ.W', 'HZ.W'], +'TR.W': ['HU.W', 'TY.W'], +'UP.Y': ['UP.W'], +'SR.W': ['HF.W'], +'GL.W': ['HF.W', 'SR.W'], +'SC.Y': ['HF.W', 'SR.W', 'GL.W', 'SE.W'], +'SW.Y': ['SE.W'], +'RB.Y': ['HF.W', 'SR.W', 'GL.W', 'SE.W'], +'SU.Y': ['SU.W'], +'BW.Y': ['HF.W', 'SR.W', 'GL.W'], +'SI.Y': ['HF.W', 'SR.W', 'GL.W'], +'LS.Y': ['LS.W'], +'CF.Y': ['CF.W'], +'AF.Y': ['AF.W'], +'MH.Y': ['MH.W'], +} + +# +# When passed a phen/sig for both the current hazard and the proposed hazard, +# checkForDowngrade returns a 1 if the proposed hazard is an downgrade, otherwise 0 +# + +def checkForDowngrade(pPhen, pSig, cPhen, cSig): + proposed = pPhen + "." + pSig + current = cPhen + "." + cSig + if proposed in downgradeHazardsDict: + if current in downgradeHazardsDict[proposed]: + return 1 + else: + return 0 + else: + return 0 diff --git a/awips/test/Record.py b/awips/test/Record.py new file mode 100644 index 0000000..c9eac48 --- /dev/null +++ b/awips/test/Record.py @@ -0,0 +1,12 @@ +import sys + + +class Record(): + def __init__(self, level=0, msg='Test Message'): + self.levelno = level + self.message = msg + self.exc_info = sys.exc_info() + self.exc_text = "TEST" + + def getMessage(self): + return self.message diff --git a/awips/test/__init__.py b/awips/test/__init__.py new file mode 100644 index 0000000..b5757f8 --- /dev/null +++ b/awips/test/__init__.py @@ -0,0 +1,2 @@ + +__all__ = [] diff --git a/awips/test/dafTests/__init__.py b/awips/test/dafTests/__init__.py new file mode 100644 index 0000000..b5757f8 --- /dev/null +++ b/awips/test/dafTests/__init__.py @@ -0,0 +1,2 @@ + +__all__ = [] diff --git a/awips/test/dafTests/baseBufrMosTestCase.py b/awips/test/dafTests/baseBufrMosTestCase.py new file mode 100644 index 0000000..ac18a40 --- /dev/null +++ b/awips/test/dafTests/baseBufrMosTestCase.py @@ -0,0 +1,51 @@ +# +# Base TestCase for BufrMos* tests. +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# 01/19/16 4795 mapeters Initial Creation. +# 04/11/16 5548 tgurney Cleanup +# 12/07/16 5981 tgurney Parameterize +# 12/15/16 5981 tgurney Add envelope test +# +# + +from awips.dataaccess import DataAccessLayer as DAL + +from awips.test.dafTests import baseDafTestCase +from awips.test.dafTests import params + + +class BufrMosTestCase(baseDafTestCase.DafTestCase): + """Base class for testing DAF support of bufrmos data""" + + data_params = "temperature", "dewpoint" + + def testGetAvailableParameters(self): + req = DAL.newDataRequest(self.datatype) + self.runParametersTest(req) + + def testGetAvailableLocations(self): + req = DAL.newDataRequest(self.datatype) + self.runLocationsTest(req) + + def testGetAvailableTimes(self): + req = DAL.newDataRequest(self.datatype) + req.setLocationNames(params.OBS_STATION) + self.runTimesTest(req) + + def testGetGeometryData(self): + req = DAL.newDataRequest(self.datatype) + req.setLocationNames(params.OBS_STATION) + req.setParameters(*self.data_params) + self.runGeometryDataTest(req) + + def testGetGeometryDataWithEnvelope(self): + req = DAL.newDataRequest(self.datatype) + req.setParameters(*self.data_params) + req.setEnvelope(params.ENVELOPE) + data = self.runGeometryDataTest(req) + for item in data: + self.assertTrue(params.ENVELOPE.contains(item.getGeometry())) diff --git a/awips/test/dafTests/baseDafTestCase.py b/awips/test/dafTests/baseDafTestCase.py new file mode 100644 index 0000000..ab0fb38 --- /dev/null +++ b/awips/test/dafTests/baseDafTestCase.py @@ -0,0 +1,192 @@ +# +# Base TestCase for DAF tests. This class provides helper methods and +# tests common to all DAF test cases. +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# 01/19/16 4795 mapeters Initial Creation. +# 04/11/16 5548 tgurney Cleanup +# 04/13/16 5379 tgurney Add identifier values tests +# 04/18/16 5548 tgurney More cleanup, plus new tests +# 04/26/16 5587 tgurney Move identifier values tests +# to subclasses +# 06/01/16 5587 tgurney Add testGet*Identifiers +# 06/07/16 5574 tgurney Make geometry/grid data tests +# return the retrieved data +# 06/10/16 5548 tgurney Make testDatatypeIsSupported +# case-insensitive +# 08/10/16 2416 tgurney Don't test identifier values +# for dataURI +# 10/05/16 5926 dgilling Better checks in runGeometryDataTest. +# 11/08/16 5985 tgurney Do not check data times on +# time-agnostic data +# 03/13/17 5981 tgurney Do not check valid period on +# data time +# +# + +from __future__ import print_function +from awips.dataaccess import DataAccessLayer as DAL +from awips.ThriftClient import ThriftRequestException + +import os +import unittest + + +class DafTestCase(unittest.TestCase): + + sampleDataLimit = 5 + """ + Maximum number of levels, locations, times, and geometry/grid data to + display + """ + + numTimesToLimit = 3 + """ + When limiting geometry/grid data requests with times, only retrieve data + for this many times + """ + + datatype = None + """Name of the datatype""" + + @classmethod + def setUpClass(cls): + host = os.environ.get('DAF_TEST_HOST') + if host is None: + host = 'edex-cloud.unidata.ucar.edu' + DAL.changeEDEXHost(host) + + @staticmethod + def getTimesIfSupported(req): + """Return available times for req. If req refers to a time-agnostic + datatype, return an empty list instead. + """ + times = [] + try: + times = DAL.getAvailableTimes(req) + except ThriftRequestException as e: + if 'TimeAgnosticDataException' not in str(e): + raise + return times + + def testDatatypeIsSupported(self): + allSupported = DAL.getSupportedDatatypes() + self.assertIn(self.datatype, allSupported) + + def testGetRequiredIdentifiers(self): + req = DAL.newDataRequest(self.datatype) + required = DAL.getRequiredIdentifiers(req) + self.assertIsNotNone(required) + print("Required identifiers:", required) + + def testGetOptionalIdentifiers(self): + req = DAL.newDataRequest(self.datatype) + optional = DAL.getOptionalIdentifiers(req) + self.assertIsNotNone(optional) + print("Optional identifiers:", optional) + + def runGetIdValuesTest(self, identifiers): + for identifier in identifiers: + if identifier.lower() == 'datauri': + continue + req = DAL.newDataRequest(self.datatype) + idValues = DAL.getIdentifierValues(req, identifier) + self.assertTrue(hasattr(idValues, '__iter__')) + + def runInvalidIdValuesTest(self): + badString = 'id from ' + self.datatype + '; select 1;' + with self.assertRaises(ThriftRequestException): + req = DAL.newDataRequest(self.datatype) + DAL.getIdentifierValues(req, badString) + + def runNonexistentIdValuesTest(self): + with self.assertRaises(ThriftRequestException): + req = DAL.newDataRequest(self.datatype) + DAL.getIdentifierValues(req, 'idthatdoesnotexist') + + def runParametersTest(self, req): + params = DAL.getAvailableParameters(req) + self.assertIsNotNone(params) + print(params) + + def runLevelsTest(self, req): + levels = DAL.getAvailableLevels(req) + self.assertIsNotNone(levels) + print("Number of levels: " + str(len(levels))) + strLevels = [str(t) for t in levels[:self.sampleDataLimit]] + print("Sample levels:\n" + str(strLevels)) + + def runLocationsTest(self, req): + locs = DAL.getAvailableLocationNames(req) + self.assertIsNotNone(locs) + print("Number of location names: " + str(len(locs))) + print("Sample location names:\n" + str(locs[:self.sampleDataLimit])) + + def runTimesTest(self, req): + times = DAL.getAvailableTimes(req) + self.assertIsNotNone(times) + print("Number of times: " + str(len(times))) + strTimes = [str(t) for t in times[:self.sampleDataLimit]] + print("Sample times:\n" + str(strTimes)) + + def runTimeAgnosticTest(self, req): + with self.assertRaises(ThriftRequestException) as cm: + DAL.getAvailableTimes(req) + self.assertIn('TimeAgnosticDataException', str(cm.exception)) + + def runGeometryDataTest(self, req, checkDataTimes=True): + """ + Test that we are able to successfully retrieve geometry data for the + given request. + """ + times = DafTestCase.getTimesIfSupported(req) + geomData = DAL.getGeometryData(req, times[:self.numTimesToLimit]) + self.assertIsNotNone(geomData) + if not geomData: + raise unittest.SkipTest("No data available") + print("Number of geometry records: " + str(len(geomData))) + return geomData + + def runGeometryDataTestWithTimeRange(self, req, timeRange): + """ + Test that we are able to successfully retrieve geometry data for the + given request. + """ + geomData = DAL.getGeometryData(req, timeRange) + self.assertIsNotNone(geomData) + if not geomData: + raise unittest.SkipTest("No data available") + print("Number of geometry records: " + str(len(geomData))) + return geomData + + def runGridDataTest(self, req, testSameShape=True): + """ + Test that we are able to successfully retrieve grid data for the given + request. + + Args: + req: the grid request + testSameShape: whether or not to verify that all the retrieved data + have the same shape (most data don't change shape) + """ + times = DafTestCase.getTimesIfSupported(req) + gridData = DAL.getGridData(req, times[:self.numTimesToLimit]) + self.assertIsNotNone(gridData) + if not gridData: + raise unittest.SkipTest("No data available") + print("Number of grid records: " + str(len(gridData))) + if len(gridData) > 0: + print("Sample grid data shape:\n" + str(gridData[0].getRawData().shape) + "\n") + print("Sample grid data:\n" + str(gridData[0].getRawData()) + "\n") + print("Sample lat-lon data:\n" + str(gridData[0].getLatLonCoords()) + "\n") + + if testSameShape: + correctGridShape = gridData[0].getLatLonCoords()[0].shape + for record in gridData: + rawData = record.getRawData() + self.assertIsNotNone(rawData) + self.assertEqual(rawData.shape, correctGridShape) + return gridData diff --git a/awips/test/dafTests/baseRadarTestCase.py b/awips/test/dafTests/baseRadarTestCase.py new file mode 100644 index 0000000..afbe040 --- /dev/null +++ b/awips/test/dafTests/baseRadarTestCase.py @@ -0,0 +1,167 @@ +# +# Tests common to all radar factories +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# 01/19/16 4795 mapeters Initial Creation. +# 04/11/16 5548 tgurney Cleanup +# 04/18/16 5548 tgurney More cleanup +# 04/26/16 5587 tgurney Move identifier values tests +# out of base class +# 06/01/16 5587 tgurney Update testGetIdentifierValues +# 06/08/16 5574 mapeters Add advanced query tests +# 06/13/16 5574 tgurney Fix checks for None +# 06/14/16 5548 tgurney Undo previous change (broke +# test) +# 06/30/16 5725 tgurney Add test for NOT IN +# 08/25/16 2671 tgurney Rename to baseRadarTestCase +# and move factory-specific +# tests +# 12/07/16 5981 tgurney Parameterize +# +# + +from __future__ import print_function +from awips.dataaccess import DataAccessLayer as DAL +from awips.ThriftClient import ThriftRequestException + +from awips.test.dafTests import baseDafTestCase +from awips.test.dafTests import params + + +class BaseRadarTestCase(baseDafTestCase.DafTestCase): + """Tests common to all radar factories""" + + # datatype is specified by subclass + datatype = None + + radarLoc = params.RADAR.lower() + + def testGetAvailableParameters(self): + req = DAL.newDataRequest(self.datatype) + self.runParametersTest(req) + + def testGetAvailableLocations(self): + req = DAL.newDataRequest(self.datatype) + self.runLocationsTest(req) + + def testGetAvailableLevels(self): + req = DAL.newDataRequest(self.datatype) + self.runLevelsTest(req) + + def testGetAvailableLevelsWithInvalidLevelIdentifierThrowsException(self): + req = DAL.newDataRequest(self.datatype) + req.addIdentifier('level.one.field', 'invalidLevelField') + with self.assertRaises(ThriftRequestException) as cm: + self.runLevelsTest(req) + self.assertIn('IncompatibleRequestException', str(cm.exception)) + + def testGetAvailableTimes(self): + req = DAL.newDataRequest(self.datatype) + req.setEnvelope(params.ENVELOPE) + self.runTimesTest(req) + + def testGetIdentifierValues(self): + req = DAL.newDataRequest(self.datatype) + optionalIds = set(DAL.getOptionalIdentifiers(req)) + requiredIds = set(DAL.getRequiredIdentifiers(req)) + self.runGetIdValuesTest(optionalIds | requiredIds) + + def testGetInvalidIdentifierValuesThrowsException(self): + self.runInvalidIdValuesTest() + + def testGetNonexistentIdentifierValuesThrowsException(self): + self.runNonexistentIdValuesTest() + + def runConstraintTest(self, key, operator, value): + raise NotImplementedError + + def testGetDataWithEqualsString(self): + gridData = self.runConstraintTest('icao', '=', self.radarLoc) + for record in gridData: + self.assertEqual(record.getAttribute('icao'), self.radarLoc) + + def testGetDataWithEqualsInt(self): + gridData = self.runConstraintTest('icao', '=', 1000) + for record in gridData: + self.assertEqual(record.getAttribute('icao'), 1000) + + def testGetDataWithEqualsLong(self): + gridData = self.runConstraintTest('icao', '=', 1000) + for record in gridData: + self.assertEqual(record.getAttribute('icao'), 1000) + + def testGetDataWithEqualsFloat(self): + gridData = self.runConstraintTest('icao', '=', 1.0) + for record in gridData: + self.assertEqual(round(record.getAttribute('icao'), 1), 1.0) + + def testGetDataWithEqualsNone(self): + gridData = self.runConstraintTest('icao', '=', None) + for record in gridData: + self.assertIsNone(record.getAttribute('icao')) + + def testGetDataWithNotEquals(self): + gridData = self.runConstraintTest('icao', '!=', self.radarLoc) + for record in gridData: + self.assertNotEqual(record.getAttribute('icao'), self.radarLoc) + + def testGetDataWithNotEqualsNone(self): + gridData = self.runConstraintTest('icao', '!=', None) + for record in gridData: + self.assertIsNotNone(record.getAttribute('icao')) + + def testGetDataWithGreaterThan(self): + gridData = self.runConstraintTest('icao', '>', self.radarLoc) + for record in gridData: + self.assertGreater(record.getAttribute('icao'), self.radarLoc) + + def testGetDataWithLessThan(self): + gridData = self.runConstraintTest('icao', '<', self.radarLoc) + for record in gridData: + self.assertLess(record.getAttribute('icao'), self.radarLoc) + + def testGetDataWithGreaterThanEquals(self): + gridData = self.runConstraintTest('icao', '>=', self.radarLoc) + for record in gridData: + self.assertGreaterEqual(record.getAttribute('icao'), self.radarLoc) + + def testGetDataWithLessThanEquals(self): + gridData = self.runConstraintTest('icao', '<=', self.radarLoc) + for record in gridData: + self.assertLessEqual(record.getAttribute('icao'), self.radarLoc) + + def testGetDataWithInTuple(self): + gridData = self.runConstraintTest('icao', 'in', (self.radarLoc, 'tpbi')) + for record in gridData: + self.assertIn(record.getAttribute('icao'), (self.radarLoc, 'tpbi')) + + def testGetDataWithInList(self): + gridData = self.runConstraintTest('icao', 'in', [self.radarLoc, 'tpbi']) + for record in gridData: + self.assertIn(record.getAttribute('icao'), (self.radarLoc, 'tpbi')) + + def testGetDataWithInGenerator(self): + generator = (item for item in (self.radarLoc, 'tpbi')) + gridData = self.runConstraintTest('icao', 'in', generator) + for record in gridData: + self.assertIn(record.getAttribute('icao'), (self.radarLoc, 'tpbi')) + + def testGetDataWithNotInList(self): + gridData = self.runConstraintTest('icao', 'not in', ['zzzz', self.radarLoc]) + for record in gridData: + self.assertNotIn(record.getAttribute('icao'), ('zzzz', self.radarLoc)) + + def testGetDataWithInvalidConstraintTypeThrowsException(self): + with self.assertRaises(ValueError): + self.runConstraintTest('icao', 'junk', self.radarLoc) + + def testGetDataWithInvalidConstraintValueThrowsException(self): + with self.assertRaises(TypeError): + self.runConstraintTest('icao', '=', {}) + + def testGetDataWithEmptyInConstraintThrowsException(self): + with self.assertRaises(ValueError): + self.runConstraintTest('icao', 'in', []) diff --git a/awips/test/dafTests/params.py b/awips/test/dafTests/params.py new file mode 100644 index 0000000..09b91c5 --- /dev/null +++ b/awips/test/dafTests/params.py @@ -0,0 +1,23 @@ +# +# Site-specific parameters for DAF tests +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# 12/07/16 5981 tgurney Initial creation +# 12/15/16 5981 tgurney Add ENVELOPE +# +# + +from shapely.geometry import box + + +AIRPORT = 'OMA' +OBS_STATION = 'KOMA' +SITE_ID = 'OAX' +STATION_ID = '72558' +RADAR = 'KOAX' +SAMPLE_AREA = (-97.0, 41.0, -96.0, 42.0) + +ENVELOPE = box(*SAMPLE_AREA) diff --git a/awips/test/dafTests/testAcars.py b/awips/test/dafTests/testAcars.py new file mode 100644 index 0000000..2a2b54f --- /dev/null +++ b/awips/test/dafTests/testAcars.py @@ -0,0 +1,39 @@ +# +# Test DAF support for ACARS data +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# 01/19/16 4795 mapeters Initial Creation. +# 04/11/16 5548 tgurney Cleanup +# 04/18/16 5548 tgurney More cleanup +# +# + +from __future__ import print_function +from awips.dataaccess import DataAccessLayer as DAL +from awips.test.dafTests import baseDafTestCase + + +class AcarsTestCase(baseDafTestCase.DafTestCase): + """Test DAF support for ACARS data""" + + datatype = "acars" + + def testGetAvailableParameters(self): + req = DAL.newDataRequest(self.datatype) + self.runParametersTest(req) + + def testGetAvailableLocations(self): + req = DAL.newDataRequest(self.datatype) + self.runLocationsTest(req) + + def testGetAvailableTimes(self): + req = DAL.newDataRequest(self.datatype) + self.runTimesTest(req) + + def testGetGeometryData(self): + req = DAL.newDataRequest(self.datatype) + req.setParameters("flightLevel", "tailNumber") + self.runGeometryDataTest(req) diff --git a/awips/test/dafTests/testAirep.py b/awips/test/dafTests/testAirep.py new file mode 100644 index 0000000..381ed11 --- /dev/null +++ b/awips/test/dafTests/testAirep.py @@ -0,0 +1,146 @@ +# +# Test DAF support for airep data +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# 01/19/16 4795 mapeters Initial Creation. +# 04/11/16 5548 tgurney Cleanup +# 04/18/16 5548 tgurney More cleanup +# 06/09/16 5587 bsteffen Add getIdentifierValues tests +# 06/13/16 5574 tgurney Add advanced query tests +# 06/30/16 5725 tgurney Add test for NOT IN +# +# + +from __future__ import print_function +from awips.dataaccess import DataAccessLayer as DAL + +from dynamicserialize.dstypes.com.raytheon.uf.common.dataquery.requests import RequestConstraint +from awips.test.dafTests import baseDafTestCase + + +class AirepTestCase(baseDafTestCase.DafTestCase): + """Test DAF support for airep data""" + + datatype = "airep" + + def testGetAvailableParameters(self): + req = DAL.newDataRequest(self.datatype) + self.runParametersTest(req) + + def testGetAvailableLocations(self): + req = DAL.newDataRequest(self.datatype) + self.runLocationsTest(req) + + def testGetAvailableTimes(self): + req = DAL.newDataRequest(self.datatype) + self.runTimesTest(req) + + def testGetGeometryData(self): + req = DAL.newDataRequest(self.datatype) + req.setParameters("flightLevel", "reportType") + self.runGeometryDataTest(req) + + def testGetIdentifierValues(self): + req = DAL.newDataRequest(self.datatype) + optionalIds = set(DAL.getOptionalIdentifiers(req)) + self.runGetIdValuesTest(optionalIds) + + def testGetInvalidIdentifierValuesThrowsException(self): + self.runInvalidIdValuesTest() + + def testGetNonexistentIdentifierValuesThrowsException(self): + self.runNonexistentIdValuesTest() + + def _runConstraintTest(self, key, operator, value): + req = DAL.newDataRequest(self.datatype) + constraint = RequestConstraint.new(operator, value) + req.setParameters("flightLevel", "reportType") + req.addIdentifier(key, constraint) + return self.runGeometryDataTest(req) + + def testGetDataWithEqualsString(self): + geometryData = self._runConstraintTest('reportType', '=', 'AIREP') + for record in geometryData: + self.assertEqual(record.getString('reportType'), 'AIREP') + + # No numeric tests since no numeric identifiers are available. + + def testGetDataWithEqualsNone(self): + geometryData = self._runConstraintTest('reportType', '=', None) + for record in geometryData: + self.assertEqual(record.getType('reportType'), 'NULL') + + def testGetDataWithNotEquals(self): + geometryData = self._runConstraintTest('reportType', '!=', 'AIREP') + for record in geometryData: + self.assertNotEqual(record.getString('reportType'), 'AIREP') + + def testGetDataWithNotEqualsNone(self): + geometryData = self._runConstraintTest('reportType', '!=', None) + for record in geometryData: + self.assertNotEqual(record.getType('reportType'), 'NULL') + + def testGetDataWithGreaterThan(self): + geometryData = self._runConstraintTest('reportType', '>', 'AIREP') + for record in geometryData: + self.assertGreater(record.getString('reportType'), 'AIREP') + + def testGetDataWithLessThan(self): + geometryData = self._runConstraintTest('reportType', '<', 'AIREP') + for record in geometryData: + self.assertLess(record.getString('reportType'), 'AIREP') + + def testGetDataWithGreaterThanEquals(self): + geometryData = self._runConstraintTest('reportType', '>=', 'AIREP') + for record in geometryData: + self.assertGreaterEqual(record.getString('reportType'), 'AIREP') + + def testGetDataWithLessThanEquals(self): + geometryData = self._runConstraintTest('reportType', '<=', 'AIREP') + for record in geometryData: + self.assertLessEqual(record.getString('reportType'), 'AIREP') + + def testGetDataWithInTuple(self): + collection = ('AIREP', 'AMDAR') + geometryData = self._runConstraintTest('reportType', 'in', collection) + for record in geometryData: + self.assertIn(record.getString('reportType'), collection) + + def testGetDataWithInList(self): + collection = ['AIREP', 'AMDAR'] + geometryData = self._runConstraintTest('reportType', 'in', collection) + for record in geometryData: + self.assertIn(record.getString('reportType'), collection) + + def testGetDataWithInGenerator(self): + collection = ('AIREP', 'AMDAR') + generator = (item for item in collection) + geometryData = self._runConstraintTest('reportType', 'in', generator) + for record in geometryData: + self.assertIn(record.getString('reportType'), collection) + + def testGetDataWithNotInList(self): + collection = ['AMDAR'] + geometryData = self._runConstraintTest('reportType', 'not in', collection) + for record in geometryData: + self.assertNotIn(record.getString('reportType'), collection) + + def testGetDataWithInvalidConstraintTypeThrowsException(self): + with self.assertRaises(ValueError): + self._runConstraintTest('reportType', 'junk', 'AIREP') + + def testGetDataWithInvalidConstraintValueThrowsException(self): + with self.assertRaises(TypeError): + self._runConstraintTest('reportType', '=', {}) + + def testGetDataWithEmptyInConstraintThrowsException(self): + with self.assertRaises(ValueError): + self._runConstraintTest('reportType', 'in', []) + + def testGetDataWithNestedInConstraintThrowsException(self): + collection = ('AIREP', 'AMDAR', ()) + with self.assertRaises(TypeError): + self._runConstraintTest('reportType', 'in', collection) diff --git a/awips/test/dafTests/testBinLightning.py b/awips/test/dafTests/testBinLightning.py new file mode 100644 index 0000000..02e6382 --- /dev/null +++ b/awips/test/dafTests/testBinLightning.py @@ -0,0 +1,170 @@ +# +# Test DAF support for binlightning data +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# 01/19/16 4795 mapeters Initial Creation. +# 04/11/16 5548 tgurney Cleanup +# 04/18/16 5548 tgurney More cleanup +# 04/21/16 5551 tgurney Add tests to verify #5551 +# 04/25/16 5587 tgurney Enable skipped test added in +# #5551 +# 04/26/16 5587 tgurney Move identifier values tests +# out of base class +# 06/01/16 5587 tgurney Update testGetIdentifierValues +# 06/03/16 5574 tgurney Add advanced query tests +# 06/13/16 5574 tgurney Typo +# 06/30/16 5725 tgurney Add test for NOT IN +# 11/08/16 5985 tgurney Do not check data times +# +# +from __future__ import print_function +from awips.dataaccess import DataAccessLayer as DAL +from awips.ThriftClient import ThriftRequestException +from dynamicserialize.dstypes.com.raytheon.uf.common.dataquery.requests import RequestConstraint +from awips.test.dafTests import baseDafTestCase + + +class BinLightningTestCase(baseDafTestCase.DafTestCase): + """Test DAF support for binlightning data""" + + datatype = "binlightning" + source = "GLMfl" + + def testGetAvailableParameters(self): + req = DAL.newDataRequest(self.datatype) + self.runParametersTest(req) + + def testGetAvailableTimes(self): + req = DAL.newDataRequest(self.datatype) + req.addIdentifier('source', self.source) + self.runTimesTest(req) + + def testGetGeometryDataSingleSourceSingleParameter(self): + req = DAL.newDataRequest(self.datatype) + req.addIdentifier('source', self.source) + req.setParameters('intensity') + self.runGeometryDataTest(req, checkDataTimes=False) + + def testGetGeometryDataInvalidParamRaisesIncompatibleRequestException(self): + req = DAL.newDataRequest(self.datatype) + req.addIdentifier('source', self.source) + req.setParameters('blahblahblah') + with self.assertRaises(ThriftRequestException) as cm: + self.runGeometryDataTest(req) + self.assertIn('IncompatibleRequestException', str(cm.exception)) + + def testGetGeometryDataSingleSourceAllParameters(self): + req = DAL.newDataRequest(self.datatype) + req.addIdentifier('source', self.source) + req.setParameters(*DAL.getAvailableParameters(req)) + self.runGeometryDataTest(req, checkDataTimes=False) + + def testGetIdentifierValues(self): + req = DAL.newDataRequest(self.datatype) + optionalIds = set(DAL.getOptionalIdentifiers(req)) + requiredIds = set(DAL.getRequiredIdentifiers(req)) + self.runGetIdValuesTest(optionalIds | requiredIds) + + def testGetInvalidIdentifierValuesThrowsException(self): + self.runInvalidIdValuesTest() + + def testGetNonexistentIdentifierValuesThrowsException(self): + self.runNonexistentIdValuesTest() + + def _runConstraintTest(self, key, operator, value): + req = DAL.newDataRequest(self.datatype) + constraint = RequestConstraint.new(operator, value) + req.addIdentifier(key, constraint) + req.setParameters('intensity') + return self.runGeometryDataTest(req, checkDataTimes=False) + + def testGetDataWithEqualsString(self): + geomData = self._runConstraintTest('source', '=', self.source) + for record in geomData: + self.assertEqual(record.getAttribute('source'), self.source) + + def testGetDataWithEqualsInt(self): + geomData = self._runConstraintTest('source', '=', 1000) + for record in geomData: + self.assertEqual(record.getAttribute('source'), 1000) + + def testGetDataWithEqualsLong(self): + geomData = self._runConstraintTest('source', '=', 1000) + for record in geomData: + self.assertEqual(record.getAttribute('source'), 1000) + + def testGetDataWithEqualsFloat(self): + geomData = self._runConstraintTest('source', '=', 1.0) + for record in geomData: + self.assertEqual(round(record.getAttribute('source'), 1), 1.0) + + def testGetDataWithEqualsNone(self): + geomData = self._runConstraintTest('source', '=', None) + for record in geomData: + self.assertIsNone(record.getAttribute('source')) + + def testGetDataWithNotEquals(self): + geomData = self._runConstraintTest('source', '!=', self.source) + for record in geomData: + self.assertNotEqual(record.getAttribute('source'), self.source) + + def testGetDataWithNotEqualsNone(self): + geomData = self._runConstraintTest('source', '!=', None) + for record in geomData: + self.assertIsNotNone(record.getAttribute('source')) + + def testGetDataWithGreaterThan(self): + geomData = self._runConstraintTest('source', '>', self.source) + for record in geomData: + self.assertGreater(record.getAttribute('source'), self.source) + + def testGetDataWithLessThan(self): + geomData = self._runConstraintTest('source', '<', self.source) + for record in geomData: + self.assertLess(record.getAttribute('source'), self.source) + + def testGetDataWithGreaterThanEquals(self): + geomData = self._runConstraintTest('source', '>=', self.source) + for record in geomData: + self.assertGreaterEqual(record.getAttribute('source'), self.source) + + def testGetDataWithLessThanEquals(self): + geomData = self._runConstraintTest('source', '<=', self.source) + for record in geomData: + self.assertLessEqual(record.getAttribute('source'), self.source) + + def testGetDataWithInTuple(self): + geomData = self._runConstraintTest('source', 'in', (self.source, 'GLMev')) + for record in geomData: + self.assertIn(record.getAttribute('source'), (self.source, 'GLMev')) + + def testGetDataWithInList(self): + geomData = self._runConstraintTest('source', 'in', [self.source, 'GLMev']) + for record in geomData: + self.assertIn(record.getAttribute('source'), (self.source, 'GLMev')) + + def testGetDataWithInGenerator(self): + generator = (item for item in (self.source, 'GLMev')) + geomData = self._runConstraintTest('source', 'in', generator) + for record in geomData: + self.assertIn(record.getAttribute('source'), (self.source, 'GLMev')) + + def testGetDataWithNotInList(self): + geomData = self._runConstraintTest('source', 'not in', [self.source, 'blah']) + for record in geomData: + self.assertNotIn(record.getAttribute('source'), (self.source, 'blah')) + + def testGetDataWithInvalidConstraintTypeThrowsException(self): + with self.assertRaises(ValueError): + self._runConstraintTest('source', 'junk', self.source) + + def testGetDataWithInvalidConstraintValueThrowsException(self): + with self.assertRaises(TypeError): + self._runConstraintTest('source', '=', {}) + + def testGetDataWithEmptyInConstraintThrowsException(self): + with self.assertRaises(ValueError): + self._runConstraintTest('source', 'in', []) diff --git a/awips/test/dafTests/testBufrMosGfs.py b/awips/test/dafTests/testBufrMosGfs.py new file mode 100644 index 0000000..886f4d4 --- /dev/null +++ b/awips/test/dafTests/testBufrMosGfs.py @@ -0,0 +1,22 @@ +# +# Test DAF support for bufrmosGFS data +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# 01/19/16 4795 mapeters Initial Creation. +# 04/11/16 5548 tgurney Cleanup +# 04/18/16 5548 tgurney More cleanup +# +# + +from awips.test.dafTests import baseBufrMosTestCase + + +class BufrMosGfsTestCase(baseBufrMosTestCase.BufrMosTestCase): + """Test DAF support for bufrmosGFS data""" + + datatype = "bufrmosGFS" + + # All tests inherited from superclass diff --git a/awips/test/dafTests/testBufrUa.py b/awips/test/dafTests/testBufrUa.py new file mode 100644 index 0000000..d2b1c6a --- /dev/null +++ b/awips/test/dafTests/testBufrUa.py @@ -0,0 +1,195 @@ +from __future__ import print_function +from awips.dataaccess import DataAccessLayer as DAL + +from dynamicserialize.dstypes.com.raytheon.uf.common.dataquery.requests import RequestConstraint +from awips.test.dafTests import baseDafTestCase +from awips.test.dafTests import params + +# +# Test DAF support for bufrua data +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# 01/19/16 4795 mapeters Initial Creation. +# 04/11/16 5548 tgurney Cleanup +# 04/18/16 5548 tgurney More cleanup +# 06/09/16 5587 bsteffen Add getIdentifierValues tests +# 06/13/16 5574 tgurney Add advanced query tests +# 06/30/16 5725 tgurney Add test for NOT IN +# 12/07/16 5981 tgurney Parameterize +# 12/15/16 5981 tgurney Add envelope test +# +# + + +class BufrUaTestCase(baseDafTestCase.DafTestCase): + """Test DAF support for bufrua data""" + + datatype = "bufrua" + + location = params.STATION_ID + + def testGetAvailableParameters(self): + req = DAL.newDataRequest(self.datatype) + self.runParametersTest(req) + + def testGetAvailableLocations(self): + req = DAL.newDataRequest(self.datatype) + req.addIdentifier("reportType", "2020") + self.runLocationsTest(req) + + def testGetAvailableTimes(self): + req = DAL.newDataRequest(self.datatype) + req.setLocationNames(self.location) + req.addIdentifier("reportType", "2020") + self.runTimesTest(req) + + def testGetGeometryData(self): + req = DAL.newDataRequest(self.datatype) + req.setLocationNames(self.location) + req.addIdentifier("reportType", "2020") + req.setParameters("sfcPressure", "staName", "rptType", "tdMan") + + print("Testing getGeometryData()") + + geomData = DAL.getGeometryData(req) + self.assertIsNotNone(geomData) + print("Number of geometry records: " + str(len(geomData))) + print("Sample geometry data:") + for record in geomData[:self.sampleDataLimit]: + print("level=", record.getLevel(), end="") + # One dimensional parameters are reported on the 0.0UNKNOWN level. + # 2D parameters are reported on MB levels from pressure. + if record.getLevel() == "0.0UNKNOWN": + print(" sfcPressure=" + record.getString("sfcPressure") + record.getUnit("sfcPressure"), end="") + print(" staName=" + record.getString("staName"), end="") + print(" rptType=" + record.getString("rptType") + record.getUnit("rptType"), end="") + else: + print(" tdMan=" + str(record.getNumber("tdMan")) + record.getUnit("tdMan"), end="") + print(" geometry=", record.getGeometry()) + + print("getGeometryData() complete\n\n") + + def testGetGeometryDataWithEnvelope(self): + req = DAL.newDataRequest(self.datatype) + req.setParameters("staName", "rptType") + req.setEnvelope(params.ENVELOPE) + data = self.runGeometryDataTest(req) + for item in data: + self.assertTrue(params.ENVELOPE.contains(item.getGeometry())) + + def testGetIdentifierValues(self): + req = DAL.newDataRequest(self.datatype) + optionalIds = set(DAL.getOptionalIdentifiers(req)) + self.runGetIdValuesTest(optionalIds) + + def testGetInvalidIdentifierValuesThrowsException(self): + self.runInvalidIdValuesTest() + + def testGetNonexistentIdentifierValuesThrowsException(self): + self.runNonexistentIdValuesTest() + + def _runConstraintTest(self, key, operator, value): + req = DAL.newDataRequest(self.datatype) + constraint = RequestConstraint.new(operator, value) + req.addIdentifier(key, constraint) + # As an identifier it is "reportType" but as a parameter it is + # "rptType"... this is weird... + req.setParameters("staName", "rptType") + return self.runGeometryDataTest(req) + + def testGetDataWithEqualsString(self): + geometryData = self._runConstraintTest('reportType', '=', '2022') + for record in geometryData: + self.assertEqual(record.getString('rptType'), '2022') + + def testGetDataWithEqualsInt(self): + geometryData = self._runConstraintTest('reportType', '=', 2022) + for record in geometryData: + self.assertEqual(record.getString('rptType'), '2022') + + def testGetDataWithEqualsLong(self): + geometryData = self._runConstraintTest('reportType', '=', 2022) + for record in geometryData: + self.assertEqual(record.getString('rptType'), '2022') + + # No float test because no float identifiers are available + + def testGetDataWithEqualsNone(self): + geometryData = self._runConstraintTest('reportType', '=', None) + for record in geometryData: + self.assertEqual(record.getType('rptType'), 'NULL') + + def testGetDataWithNotEquals(self): + geometryData = self._runConstraintTest('reportType', '!=', 2022) + for record in geometryData: + self.assertNotEqual(record.getString('rptType'), '2022') + + def testGetDataWithNotEqualsNone(self): + geometryData = self._runConstraintTest('reportType', '!=', None) + for record in geometryData: + self.assertNotEqual(record.getType('rptType'), 'NULL') + + def testGetDataWithGreaterThan(self): + geometryData = self._runConstraintTest('reportType', '>', 2022) + for record in geometryData: + self.assertGreater(record.getString('rptType'), '2022') + + def testGetDataWithLessThan(self): + geometryData = self._runConstraintTest('reportType', '<', 2022) + for record in geometryData: + self.assertLess(record.getString('rptType'), '2022') + + def testGetDataWithGreaterThanEquals(self): + geometryData = self._runConstraintTest('reportType', '>=', 2022) + for record in geometryData: + self.assertGreaterEqual(record.getString('rptType'), '2022') + + def testGetDataWithLessThanEquals(self): + geometryData = self._runConstraintTest('reportType', '<=', 2022) + for record in geometryData: + self.assertLessEqual(record.getString('rptType'), '2022') + + def testGetDataWithInTuple(self): + collection = ('2022', '2032') + geometryData = self._runConstraintTest('reportType', 'in', collection) + for record in geometryData: + self.assertIn(record.getString('rptType'), collection) + + def testGetDataWithInList(self): + collection = ['2022', '2032'] + geometryData = self._runConstraintTest('reportType', 'in', collection) + for record in geometryData: + self.assertIn(record.getString('rptType'), collection) + + def testGetDataWithInGenerator(self): + collection = ('2022', '2032') + generator = (item for item in collection) + geometryData = self._runConstraintTest('reportType', 'in', generator) + for record in geometryData: + self.assertIn(record.getString('rptType'), collection) + + def testGetDataWithNotInList(self): + collection = ('2022', '2032') + geometryData = self._runConstraintTest('reportType', 'not in', collection) + for record in geometryData: + self.assertNotIn(record.getString('rptType'), collection) + + def testGetDataWithInvalidConstraintTypeThrowsException(self): + with self.assertRaises(ValueError): + self._runConstraintTest('reportType', 'junk', '2022') + + def testGetDataWithInvalidConstraintValueThrowsException(self): + with self.assertRaises(TypeError): + self._runConstraintTest('reportType', '=', {}) + + def testGetDataWithEmptyInConstraintThrowsException(self): + with self.assertRaises(ValueError): + self._runConstraintTest('rptType', 'in', []) + + def testGetDataWithNestedInConstraintThrowsException(self): + collection = ('2022', '2032', ()) + with self.assertRaises(TypeError): + self._runConstraintTest('rptType', 'in', collection) diff --git a/awips/test/dafTests/testCombinedTimeQuery.py b/awips/test/dafTests/testCombinedTimeQuery.py new file mode 100644 index 0000000..08222c8 --- /dev/null +++ b/awips/test/dafTests/testCombinedTimeQuery.py @@ -0,0 +1,49 @@ +# +# Test the CombinedTimedQuery module +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# 06/24/16 5591 bsteffen Initial Creation. +# 11/08/16 5895 tgurney Change grid model +# +# +# + +from awips.dataaccess import DataAccessLayer as DAL +from awips.dataaccess import CombinedTimeQuery as CTQ + +import unittest +import os + + +class CombinedTimeQueryTestCase(unittest.TestCase): + + modelName = "RAP13" + + @classmethod + def setUp(cls): + host = os.environ.get('DAF_TEST_HOST') + if host is None: + host = 'edex-cloud.unidata.ucar.edu' + DAL.changeEDEXHost(host) + + def testSuccessfulQuery(self): + req = DAL.newDataRequest('grid') + req.setLocationNames(self.modelName) + req.setParameters('T', 'GH') + req.setLevels('300MB', '500MB', '700MB') + times = CTQ.getAvailableTimes(req) + self.assertNotEqual(len(times), 0) + + def testNonIntersectingQuery(self): + """ + Test that when a parameter is only available on one of the levels that no times are returned. + """ + req = DAL.newDataRequest('grid') + req.setLocationNames(self.modelName) + req.setParameters('T', 'GH', 'LgSP1hr') + req.setLevels('300MB', '500MB', '700MB', '0.0SFC') + times = CTQ.getAvailableTimes(req) + self.assertEqual(len(times), 0) diff --git a/awips/test/dafTests/testCommonObsSpatial.py b/awips/test/dafTests/testCommonObsSpatial.py new file mode 100644 index 0000000..acb9310 --- /dev/null +++ b/awips/test/dafTests/testCommonObsSpatial.py @@ -0,0 +1,150 @@ +# +# Test DAF support for common_obs_spatial data +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# 01/19/16 4795 mapeters Initial Creation. +# 04/11/16 5548 tgurney Cleanup +# 04/18/16 5548 tgurney More cleanup +# 05/26/16 5587 njensen Added testGetIdentifierValues() +# 06/01/16 5587 tgurney Move testIdentifiers() to +# superclass +# 06/13/16 5574 tgurney Add advanced query tests +# 06/21/16 5548 tgurney Skip tests that cause errors +# 06/30/16 5725 tgurney Add test for NOT IN +# 12/07/16 5981 tgurney Parameterize +# 01/06/17 5981 tgurney Do not check data times +# + +from __future__ import print_function +from awips.dataaccess import DataAccessLayer as DAL +from dynamicserialize.dstypes.com.raytheon.uf.common.dataquery.requests import RequestConstraint +from awips.test.dafTests import baseDafTestCase +from awips.test.dafTests import params + + +class CommonObsSpatialTestCase(baseDafTestCase.DafTestCase): + """Test DAF support for common_obs_spatial data""" + + datatype = "common_obs_spatial" + + def testGetAvailableParameters(self): + req = DAL.newDataRequest(self.datatype) + self.runParametersTest(req) + + def testGetAvailableLocations(self): + req = DAL.newDataRequest(self.datatype) + req.addIdentifier("country", ["US", "CN"]) + self.runLocationsTest(req) + + def testGetIdentifierValues(self): + self.runGetIdValuesTest(['country']) + + def testGetGeometryData(self): + req = DAL.newDataRequest(self.datatype) + req.setEnvelope(params.ENVELOPE) + req.setParameters("name", "stationid") + self.runGeometryDataTest(req, checkDataTimes=False) + + def testRequestingTimesThrowsTimeAgnosticDataException(self): + req = DAL.newDataRequest(self.datatype) + self.runTimeAgnosticTest(req) + + def _runConstraintTest(self, key, operator, value): + req = DAL.newDataRequest(self.datatype) + constraint = RequestConstraint.new(operator, value) + req.addIdentifier(key, constraint) + req.setParameters('catalogtype', 'elevation', 'state') + return self.runGeometryDataTest(req, checkDataTimes=False) + + def testGetDataWithEqualsString(self): + geometryData = self._runConstraintTest('state', '=', 'NE') + for record in geometryData: + self.assertEqual(record.getString('state'), 'NE') + + def testGetDataWithEqualsInt(self): + geometryData = self._runConstraintTest('catalogtype', '=', 32) + for record in geometryData: + self.assertEqual(record.getNumber('catalogtype'), 32) + + def testGetDataWithEqualsLong(self): + geometryData = self._runConstraintTest('elevation', '=', 0) + for record in geometryData: + self.assertEqual(record.getNumber('elevation'), 0) + + # No float test since there are no float identifiers available. Attempting + # to filter a non-float identifier on a float value raises an exception. + + def testGetDataWithEqualsNone(self): + geometryData = self._runConstraintTest('state', '=', None) + for record in geometryData: + self.assertEqual(record.getType('state'), 'NULL') + + def testGetDataWithNotEquals(self): + geometryData = self._runConstraintTest('state', '!=', 'NE') + for record in geometryData: + self.assertNotEqual(record.getString('state'), 'NE') + + def testGetDataWithNotEqualsNone(self): + geometryData = self._runConstraintTest('state', '!=', None) + for record in geometryData: + self.assertNotEqual(record.getType('state'), 'NULL') + + def testGetDataWithGreaterThan(self): + geometryData = self._runConstraintTest('elevation', '>', 500) + for record in geometryData: + self.assertGreater(record.getNumber('elevation'), 500) + + def testGetDataWithLessThan(self): + geometryData = self._runConstraintTest('elevation', '<', 100) + for record in geometryData: + self.assertLess(record.getNumber('elevation'), 100) + + def testGetDataWithGreaterThanEquals(self): + geometryData = self._runConstraintTest('elevation', '>=', 500) + for record in geometryData: + self.assertGreaterEqual(record.getNumber('elevation'), 500) + + def testGetDataWithLessThanEquals(self): + geometryData = self._runConstraintTest('elevation', '<=', 100) + for record in geometryData: + self.assertLessEqual(record.getNumber('elevation'), 100) + + def testGetDataWithInTuple(self): + collection = ('NE', 'TX') + geometryData = self._runConstraintTest('state', 'in', collection) + for record in geometryData: + self.assertIn(record.getString('state'), collection) + + def testGetDataWithInList(self): + collection = ['NE', 'TX'] + geometryData = self._runConstraintTest('state', 'in', collection) + for record in geometryData: + self.assertIn(record.getString('state'), collection) + + def testGetDataWithInGenerator(self): + collection = ('NE', 'TX') + generator = (item for item in collection) + geometryData = self._runConstraintTest('state', 'in', generator) + for record in geometryData: + self.assertIn(record.getString('state'), collection) + + def testGetDataWithNotInList(self): + collection = ('NE', 'TX') + geometryData = self._runConstraintTest('state', 'not in', collection) + for record in geometryData: + self.assertNotIn(record.getString('state'), collection) + + def testGetDataWithInvalidConstraintTypeThrowsException(self): + with self.assertRaises(ValueError): + self._runConstraintTest('state', 'junk', 'NE') + + def testGetDataWithInvalidConstraintValueThrowsException(self): + with self.assertRaises(TypeError): + self._runConstraintTest('state', '=', {}) + + def testGetDataWithEmptyInConstraintThrowsException(self): + with self.assertRaises(ValueError): + self._runConstraintTest('state', 'in', []) diff --git a/awips/test/dafTests/testDataTime.py b/awips/test/dafTests/testDataTime.py new file mode 100644 index 0000000..d543703 --- /dev/null +++ b/awips/test/dafTests/testDataTime.py @@ -0,0 +1,114 @@ +# +# Unit tests for Python implementation of RequestConstraint +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# 08/02/16 2416 tgurney Initial creation +# +# + +from dynamicserialize.dstypes.com.raytheon.uf.common.time import DataTime + +import unittest + + +class DataTimeTestCase(unittest.TestCase): + + def testFromStrRefTimeOnly(self): + s = '2016-08-02 01:23:45' + expected = s + self.assertEqual(expected, str(DataTime(s))) + s = s.replace(' ', '_') + self.assertEqual(expected, str(DataTime(s))) + + def testFromStrRefTimeOnlyZeroMillis(self): + s = '2016-08-02 01:23:45.0' + # result of str() will always drop trailing .0 milliseconds + expected = '2016-08-02 01:23:45' + self.assertEqual(expected, str(DataTime(s))) + s = s.replace(' ', '_') + self.assertEqual(expected, str(DataTime(s))) + + def testFromStrRefTimeOnlyWithMillis(self): + s = '2016-08-02 01:23:45.1' + expected = '2016-08-02 01:23:45.001000' + self.assertEqual(expected, str(DataTime(s))) + s = s.replace(' ', '_') + self.assertEqual(expected, str(DataTime(s))) + + def testFromStrWithFcstTimeHr(self): + s = '2016-08-02 01:23:45 (17)' + expected = s + self.assertEqual(expected, str(DataTime(s))) + s = s.replace(' ', '_') + self.assertEqual(expected, str(DataTime(s))) + + def testFromStrWithFcstTimeHrZeroMillis(self): + s = '2016-08-02 01:23:45.0 (17)' + expected = '2016-08-02 01:23:45 (17)' + self.assertEqual(expected, str(DataTime(s))) + s = s.replace(' ', '_') + self.assertEqual(expected, str(DataTime(s))) + + def testFromStrWithFcstTimeHrAndMillis(self): + s = '2016-08-02 01:23:45.1 (17)' + expected = '2016-08-02 01:23:45.001000 (17)' + self.assertEqual(expected, str(DataTime(s))) + s = s.replace(' ', '_') + self.assertEqual(expected, str(DataTime(s))) + + def testFromStrWithFcstTimeHrMin(self): + s = '2016-08-02 01:23:45 (17:34)' + expected = s + self.assertEqual(expected, str(DataTime(s))) + s = s.replace(' ', '_') + self.assertEqual(expected, str(DataTime(s))) + + def testFromStrWithFcstTimeHrMinZeroMillis(self): + s = '2016-08-02 01:23:45.0 (17:34)' + expected = '2016-08-02 01:23:45 (17:34)' + self.assertEqual(expected, str(DataTime(s))) + s = s.replace(' ', '_') + self.assertEqual(expected, str(DataTime(s))) + + def testFromStrWithPeriod(self): + s = '2016-08-02 01:23:45[2016-08-02 02:34:45--2016-08-02 03:45:56]' + expected = s + self.assertEqual(expected, str(DataTime(s))) + s = s.replace(' ', '_') + self.assertEqual(expected, str(DataTime(s))) + + def testFromStrWithPeriodZeroMillis(self): + s = '2016-08-02 01:23:45.0[2016-08-02 02:34:45.0--2016-08-02 03:45:56.0]' + expected = '2016-08-02 01:23:45[2016-08-02 02:34:45--2016-08-02 03:45:56]' + self.assertEqual(expected, str(DataTime(s))) + s = s.replace(' ', '_') + self.assertEqual(expected, str(DataTime(s))) + + def testFromStrWithEverything(self): + s = '2016-08-02 01:23:45.0_(17:34)[2016-08-02 02:34:45.0--2016-08-02 03:45:56.0]' + expected = '2016-08-02 01:23:45 (17:34)[2016-08-02 02:34:45--2016-08-02 03:45:56]' + self.assertEqual(expected, str(DataTime(s))) + s = s.replace(' ', '_') + self.assertEqual(expected, str(DataTime(s))) + + def testDataTimeReconstructItselfFromString(self): + times = [ + '2016-08-02 01:23:45', + '2016-08-02 01:23:45.0', + '2016-08-02 01:23:45.1', + '2016-08-02 01:23:45.123000', + '2016-08-02 01:23:45 (17)', + '2016-08-02 01:23:45.0 (17)', + '2016-08-02 01:23:45.1 (17)', + '2016-08-02 01:23:45 (17:34)', + '2016-08-02 01:23:45.0 (17:34)', + '2016-08-02 01:23:45.1 (17:34)', + '2016-08-02 01:23:45.0[2016-08-02_02:34:45.0--2016-08-02_03:45:56.0]', + '2016-08-02 01:23:45.0[2016-08-02_02:34:45.123--2016-08-02_03:45:56.456]', + '2016-08-02 01:23:45.456_(17:34)[2016-08-02_02:34:45.0--2016-08-02_03:45:56.0]' + ] + for time in times: + self.assertEqual(DataTime(time), DataTime(str(DataTime(time))), time) diff --git a/awips/test/dafTests/testGfe.py b/awips/test/dafTests/testGfe.py new file mode 100644 index 0000000..6750db8 --- /dev/null +++ b/awips/test/dafTests/testGfe.py @@ -0,0 +1,194 @@ +# +# Test DAF support for GFE data +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# 01/19/16 4795 mapeters Initial Creation. +# 04/11/16 5548 tgurney Cleanup +# 04/18/16 5548 tgurney More cleanup +# 05/23/16 5637 bsteffen Test vectors +# 05/31/16 5587 tgurney Add getIdentifierValues tests +# 06/01/16 5587 tgurney Update testGetIdentifierValues +# 06/17/16 5574 mapeters Add advanced query tests +# 06/30/16 5725 tgurney Add test for NOT IN +# 11/07/16 5991 bsteffen Improve vector tests +# 12/07/16 5981 tgurney Parameterize +# 12/15/16 6040 tgurney Add testGetGridDataWithDbType +# 12/20/16 5981 tgurney Add envelope test +# 10/19/17 6491 tgurney Add test for dbtype identifier +# 11/10/17 6491 tgurney Replace modelName with +# parmId.dbId.modelName +# +# + +from __future__ import print_function +from dynamicserialize.dstypes.com.raytheon.uf.common.dataquery.requests import RequestConstraint +from awips.dataaccess import DataAccessLayer as DAL +from shapely.geometry import box, Point + +from awips.test.dafTests import baseDafTestCase +from awips.test.dafTests import params +import unittest + + +class GfeTestCase(baseDafTestCase.DafTestCase): + """Test DAF support for GFE data""" + + datatype = 'gfe' + + def testGetAvailableParameters(self): + req = DAL.newDataRequest(self.datatype) + self.runParametersTest(req) + + def testGetAvailableLocations(self): + req = DAL.newDataRequest(self.datatype) + req.addIdentifier('parmId.dbId.modelName', 'Fcst') + self.runLocationsTest(req) + + def testGetAvailableTimes(self): + req = DAL.newDataRequest(self.datatype) + req.addIdentifier('parmId.dbId.modelName', 'Fcst') + req.addIdentifier('parmId.dbId.siteId', params.SITE_ID) + self.runTimesTest(req) + + def testGetGridData(self): + req = DAL.newDataRequest(self.datatype) + req.addIdentifier('parmId.dbId.modelName', 'Fcst') + req.addIdentifier('parmId.dbId.siteId', params.SITE_ID) + req.setParameters('T') + gridDatas = self.runGridDataTest(req) + for gridData in gridDatas: + self.assertEqual(gridData.getAttribute('parmId.dbId.dbType'), '') + + def testGetGridDataWithEnvelope(self): + req = DAL.newDataRequest(self.datatype) + req.addIdentifier('parmId.dbId.modelName', 'Fcst') + req.addIdentifier('parmId.dbId.siteId', params.SITE_ID) + req.setParameters('T') + req.setEnvelope(params.ENVELOPE) + gridData = self.runGridDataTest(req) + if not gridData: + raise unittest.SkipTest('no data available') + lons, lats = gridData[0].getLatLonCoords() + lons = lons.reshape(-1) + lats = lats.reshape(-1) + + # Ensure all points are within one degree of the original box + # to allow slight margin of error for reprojection distortion. + testEnv = box(params.ENVELOPE.bounds[0] - 1, params.ENVELOPE.bounds[1] - 1, + params.ENVELOPE.bounds[2] + 1, params.ENVELOPE.bounds[3] + 1) + + for i in range(len(lons)): + self.assertTrue(testEnv.contains(Point(lons[i], lats[i]))) + + def testGetVectorGridData(self): + req = DAL.newDataRequest(self.datatype) + req.addIdentifier('parmId.dbId.modelName', 'Fcst') + req.addIdentifier('parmId.dbId.siteId', params.SITE_ID) + req.setParameters('Wind') + times = DAL.getAvailableTimes(req) + if not times: + raise unittest.SkipTest('No Wind Data available for testing') + gridData = DAL.getGridData(req, [times[0]]) + rawWind = None + rawDir = None + for grid in gridData: + if grid.getParameter() == 'Wind': + self.assertEqual(grid.getUnit(), 'kts') + rawWind = grid.getRawData() + elif grid.getParameter() == 'WindDirection': + self.assertEqual(grid.getUnit(), 'deg') + rawDir = grid.getRawData() + self.assertIsNotNone(rawWind, 'Wind Magnitude grid is not present') + self.assertIsNotNone(rawDir, 'Wind Direction grid is not present') + # rawWind and rawDir are numpy.ndarrays so comparison will result in boolean ndarrays. + self.assertTrue((rawWind >= 0).all(), 'Wind Speed should not contain negative values') + self.assertTrue((rawDir >= 0).all(), 'Wind Direction should not contain negative values') + self.assertTrue((rawDir <= 360).all(), 'Wind Direction should be less than or equal to 360') + self.assertFalse((rawDir == rawWind).all(), 'Wind Direction should be different from Wind Speed') + + def testGetIdentifierValues(self): + req = DAL.newDataRequest(self.datatype) + optionalIds = set(DAL.getOptionalIdentifiers(req)) + requiredIds = set(DAL.getRequiredIdentifiers(req)) + self.runGetIdValuesTest(optionalIds | requiredIds) + + def testGetInvalidIdentifierValuesThrowsException(self): + self.runInvalidIdValuesTest() + + def testGetNonexistentIdentifierValuesThrowsException(self): + self.runNonexistentIdValuesTest() + + def _runConstraintTest(self, key, operator, value): + req = DAL.newDataRequest(self.datatype) + constraint = RequestConstraint.new(operator, value) + req.addIdentifier(key, constraint) + req.setLocationNames(params.SITE_ID) + req.setParameters('T') + return self.runGridDataTest(req) + + def testGetDataWithModelNameEqualsString(self): + gridData = self._runConstraintTest('parmId.dbId.modelName', '=', 'Fcst') + for record in gridData: + self.assertEqual(record.getAttribute('parmId.dbId.modelName'), 'Fcst') + + def testGetDataWithDbTypeEqualsString(self): + gridData = self._runConstraintTest('parmId.dbId.dbType', '=', 'Prac') + for record in gridData: + self.assertEqual(record.getAttribute('parmId.dbId.dbType'), 'Prac') + + # No numeric tests since no numeric identifiers are available. + + def testGetDataWithEqualsNone(self): + gridData = self._runConstraintTest('parmId.dbId.modelName', '=', None) + for record in gridData: + self.assertIsNone(record.getAttribute('parmId.dbId.modelName')) + + def testGetDataWithNotEquals(self): + gridData = self._runConstraintTest('parmId.dbId.modelName', '!=', 'Fcst') + for record in gridData: + self.assertNotEqual(record.getAttribute('parmId.dbId.modelName'), 'Fcst') + + def testGetDataWithNotEqualsNone(self): + gridData = self._runConstraintTest('parmId.dbId.modelName', '!=', None) + for record in gridData: + self.assertIsNotNone(record.getAttribute('parmId.dbId.modelName')) + + def testGetDataWithInTuple(self): + collection = ('Fcst', 'SAT') + gridData = self._runConstraintTest('parmId.dbId.modelName', 'in', collection) + for record in gridData: + self.assertIn(record.getAttribute('parmId.dbId.modelName'), collection) + + def testGetDataWithInList(self): + collection = ['Fcst', 'SAT'] + gridData = self._runConstraintTest('parmId.dbId.modelName', 'in', collection) + for record in gridData: + self.assertIn(record.getAttribute('parmId.dbId.modelName'), collection) + + def testGetDataWithInGenerator(self): + collection = ('Fcst', 'SAT') + generator = (item for item in collection) + gridData = self._runConstraintTest('parmId.dbId.modelName', 'in', generator) + for record in gridData: + self.assertIn(record.getAttribute('parmId.dbId.modelName'), collection) + + def testGetDataWithNotInList(self): + collection = ('Fcst', 'SAT') + gridData = self._runConstraintTest('parmId.dbId.modelName', 'not in', collection) + for record in gridData: + self.assertNotIn(record.getAttribute('parmId.dbId.modelName'), collection) + + def testGetDataWithInvalidConstraintTypeThrowsException(self): + with self.assertRaises(ValueError): + self._runConstraintTest('parmId.dbId.modelName', 'junk', 'Fcst') + + def testGetDataWithInvalidConstraintValueThrowsException(self): + with self.assertRaises(TypeError): + self._runConstraintTest('parmId.dbId.modelName', '=', {}) + + def testGetDataWithEmptyInConstraintThrowsException(self): + with self.assertRaises(ValueError): + self._runConstraintTest('parmId.dbId.modelName', 'in', []) diff --git a/awips/test/dafTests/testGrid.py b/awips/test/dafTests/testGrid.py new file mode 100644 index 0000000..6b86a93 --- /dev/null +++ b/awips/test/dafTests/testGrid.py @@ -0,0 +1,261 @@ +# +# Test DAF support for grid data +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# 01/19/16 4795 mapeters Initial Creation. +# 04/11/16 5548 tgurney Cleanup +# 04/18/16 5548 tgurney More cleanup +# 06/09/16 5587 tgurney Typo in id values test +# 07/06/16 5728 mapeters Add advanced query tests +# 08/03/16 5728 mapeters Add additional identifiers to testGetDataWith* +# tests to shorten run time and prevent EOFError +# 10/13/16 5942 bsteffen Test envelopes +# 11/08/16 5985 tgurney Skip certain tests when no +# data is available +# 12/07/16 5981 tgurney Parameterize +# 01/06/17 5981 tgurney Skip envelope test when no +# data is available +# + +from __future__ import print_function +from dynamicserialize.dstypes.com.raytheon.uf.common.dataquery.requests import RequestConstraint +from awips.dataaccess import DataAccessLayer as DAL +from awips.ThriftClient import ThriftRequestException +from shapely.geometry import box, Point + +from awips.test.dafTests import baseDafTestCase +from awips.test.dafTests import params +import unittest + + +class GridTestCase(baseDafTestCase.DafTestCase): + """Test DAF support for grid data""" + + datatype = 'grid' + + model = 'GFS160' + + def testGetAvailableParameters(self): + req = DAL.newDataRequest(self.datatype) + req.addIdentifier('info.datasetId', self.model) + self.runParametersTest(req) + + def testGetAvailableLocations(self): + req = DAL.newDataRequest(self.datatype) + req.addIdentifier('info.datasetId', self.model) + self.runLocationsTest(req) + + def testGetAvailableLevels(self): + req = DAL.newDataRequest(self.datatype) + req.addIdentifier('info.datasetId', self.model) + self.runLevelsTest(req) + + def testGetAvailableTimes(self): + req = DAL.newDataRequest(self.datatype) + req.addIdentifier('info.datasetId', self.model) + req.setLevels('2FHAG') + self.runTimesTest(req) + + def testGetGridData(self): + req = DAL.newDataRequest(self.datatype) + req.addIdentifier('info.datasetId', self.model) + req.setLevels('2FHAG') + req.setParameters('T') + self.runGridDataTest(req) + + def testGetIdentifierValues(self): + req = DAL.newDataRequest(self.datatype) + req.addIdentifier('info.datasetId', 'ENSEMBLE') + req.setLevels('2FHAG') + req.setParameters('T') + idValues = DAL.getIdentifierValues(req, 'info.ensembleId') + self.assertTrue(hasattr(idValues, '__iter__')) + if idValues: + self.assertIn('ctl1', idValues) + self.assertIn('p1', idValues) + self.assertIn('n1', idValues) + else: + raise unittest.SkipTest("no data available") + + def testGetInvalidIdentifierValuesThrowsException(self): + self.runInvalidIdValuesTest() + + def testGetNonexistentIdentifierValuesThrowsException(self): + self.runNonexistentIdValuesTest() + + def testGetDataWithEnvelope(self): + req = DAL.newDataRequest(self.datatype) + req.addIdentifier('info.datasetId', self.model) + req.setLevels('2FHAG') + req.setParameters('T') + req.setEnvelope(params.ENVELOPE) + gridData = self.runGridDataTest(req) + if len(gridData) == 0: + raise unittest.SkipTest("No data available") + lons, lats = gridData[0].getLatLonCoords() + lons = lons.reshape(-1) + lats = lats.reshape(-1) + + # Ensure all points are within one degree of the original box + # to allow slight margin of error for reprojection distortion. + testEnv = box(params.ENVELOPE.bounds[0] - 1, params.ENVELOPE.bounds[1] - 1, + params.ENVELOPE.bounds[2] + 1, params.ENVELOPE.bounds[3] + 1) + + for i in range(len(lons)): + self.assertTrue(testEnv.contains(Point(lons[i], lats[i]))) + + def _runConstraintTest(self, key, operator, value): + req = DAL.newDataRequest(self.datatype) + constraint = RequestConstraint.new(operator, value) + req.addIdentifier(key, constraint) + req.addIdentifier('info.datasetId', self.model) + req.addIdentifier('info.level.masterLevel.name', 'FHAG') + req.addIdentifier('info.level.leveltwovalue', 3000.0) + req.setParameters('T') + return self.runGridDataTest(req) + + def testGetDataWithEqualsString(self): + gridData = self._runConstraintTest('info.level.levelonevalue', '=', '2000.0') + for record in gridData: + self.assertEqual(record.getAttribute('info.level.levelonevalue'), 2000.0) + + def testGetDataWithEqualsInt(self): + gridData = self._runConstraintTest('info.level.levelonevalue', '=', 2000) + for record in gridData: + self.assertEqual(record.getAttribute('info.level.levelonevalue'), 2000) + + def testGetDataWithEqualsLong(self): + gridData = self._runConstraintTest('info.level.levelonevalue', '=', 2000) + for record in gridData: + self.assertEqual(record.getAttribute('info.level.levelonevalue'), 2000) + + def testGetDataWithEqualsFloat(self): + gridData = self._runConstraintTest('info.level.levelonevalue', '=', 2000.0) + for record in gridData: + self.assertEqual(round(record.getAttribute('info.level.levelonevalue'), 1), 2000.0) + + def testGetDataWithEqualsNone(self): + gridData = self._runConstraintTest('info.level.levelonevalue', '=', None) + for record in gridData: + self.assertIsNone(record.getAttribute('info.level.levelonevalue')) + + def testGetDataWithNotEquals(self): + gridData = self._runConstraintTest('info.level.levelonevalue', '!=', 2000.0) + for record in gridData: + self.assertNotEqual(record.getAttribute('info.level.levelonevalue'), 2000.0) + + def testGetDataWithNotEqualsNone(self): + gridData = self._runConstraintTest('info.level.levelonevalue', '!=', None) + for record in gridData: + self.assertIsNotNone(record.getAttribute('info.level.levelonevalue')) + + def testGetDataWithGreaterThan(self): + gridData = self._runConstraintTest('info.level.levelonevalue', '>', 2000.0) + for record in gridData: + self.assertGreater(record.getAttribute('info.level.levelonevalue'), 2000.0) + + def testGetDataWithLessThan(self): + gridData = self._runConstraintTest('info.level.levelonevalue', '<', 2000.0) + for record in gridData: + self.assertLess(record.getAttribute('info.level.levelonevalue'), 2000.0) + + def testGetDataWithGreaterThanEquals(self): + gridData = self._runConstraintTest('info.level.levelonevalue', '>=', 2000.0) + for record in gridData: + self.assertGreaterEqual(record.getAttribute('info.level.levelonevalue'), 2000.0) + + def testGetDataWithLessThanEquals(self): + gridData = self._runConstraintTest('info.level.levelonevalue', '<=', 2000.0) + for record in gridData: + self.assertLessEqual(record.getAttribute('info.level.levelonevalue'), 2000.0) + + def testGetDataWithInList(self): + collection = [2000.0, 1000.0] + gridData = self._runConstraintTest('info.level.levelonevalue', 'in', collection) + for record in gridData: + self.assertIn(record.getAttribute('info.level.levelonevalue'), collection) + + def testGetDataWithNotInList(self): + collection = [2000.0, 1000.0] + gridData = self._runConstraintTest('info.level.levelonevalue', 'not in', collection) + for record in gridData: + self.assertNotIn(record.getAttribute('info.level.levelonevalue'), collection) + + def testGetDataWithInvalidConstraintTypeThrowsException(self): + with self.assertRaises(ValueError): + self._runConstraintTest('info.level.levelonevalue', 'junk', '2000.0') + + def testGetDataWithInvalidConstraintValueThrowsException(self): + with self.assertRaises(TypeError): + self._runConstraintTest('info.level.levelonevalue', '=', {}) + + def testGetDataWithEmptyInConstraintThrowsException(self): + with self.assertRaises(ValueError): + self._runConstraintTest('info.level.levelonevalue', 'in', []) + + def testGetDataWithLevelOneAndLevelTwoConstraints(self): + req = DAL.newDataRequest(self.datatype) + levelOneConstraint = RequestConstraint.new('>=', 2000.0) + req.addIdentifier('info.level.levelonevalue', levelOneConstraint) + levelTwoConstraint = RequestConstraint.new('in', (4000.0, 5000.0)) + req.addIdentifier('info.level.leveltwovalue', levelTwoConstraint) + req.addIdentifier('info.datasetId', self.model) + req.addIdentifier('info.level.masterLevel.name', 'FHAG') + req.setParameters('T') + gridData = self.runGridDataTest(req) + for record in gridData: + self.assertGreaterEqual(record.getAttribute('info.level.levelonevalue'), 2000.0) + self.assertIn(record.getAttribute('info.level.leveltwovalue'), (4000.0, 5000.0)) + + def testGetDataWithMasterLevelNameInConstraint(self): + req = DAL.newDataRequest(self.datatype) + masterLevelConstraint = RequestConstraint.new('in', ('FHAG', 'K')) + req.addIdentifier('info.level.masterLevel.name', masterLevelConstraint) + req.addIdentifier('info.level.levelonevalue', 2000.0) + req.addIdentifier('info.level.leveltwovalue', 3000.0) + req.addIdentifier('info.datasetId', 'GFS160') + req.setParameters('T') + gridData = self.runGridDataTest(req) + for record in gridData: + self.assertIn(record.getAttribute('info.level.masterLevel.name'), ('FHAG', 'K')) + + def testGetDataWithDatasetIdInConstraint(self): + req = DAL.newDataRequest(self.datatype) + # gfs160 is alias for GFS160 in this namespace + req.addIdentifier('namespace', 'gfeParamInfo') + datasetIdConstraint = RequestConstraint.new('in', ('gfs160', 'HRRR')) + req.addIdentifier('info.datasetId', datasetIdConstraint) + req.addIdentifier('info.level.masterLevel.name', 'FHAG') + req.addIdentifier('info.level.levelonevalue', 2000.0) + req.addIdentifier('info.level.leveltwovalue', 3000.0) + req.setParameters('T') + gridData = self.runGridDataTest(req, testSameShape=False) + for record in gridData: + self.assertIn(record.getAttribute('info.datasetId'), ('gfs160', 'HRRR')) + + def testGetDataWithMasterLevelNameLessThanEqualsConstraint(self): + req = DAL.newDataRequest(self.datatype) + masterLevelConstraint = RequestConstraint.new('<=', 'K') + req.addIdentifier('info.level.masterLevel.name', masterLevelConstraint) + req.addIdentifier('info.level.levelonevalue', 2000.0) + req.addIdentifier('info.level.leveltwovalue', 3000.0) + req.addIdentifier('info.datasetId', 'GFS160') + req.setParameters('T') + gridData = self.runGridDataTest(req) + for record in gridData: + self.assertLessEqual(record.getAttribute('info.level.masterLevel.name'), 'K') + + def testGetDataWithComplexConstraintAndNamespaceThrowsException(self): + req = DAL.newDataRequest(self.datatype) + req.addIdentifier('namespace', 'grib') + masterLevelConstraint = RequestConstraint.new('<=', 'K') + req.addIdentifier('info.level.masterLevel.name', masterLevelConstraint) + req.addIdentifier('info.datasetId', 'GFS160') + req.setParameters('T') + with self.assertRaises(ThriftRequestException) as cm: + self.runGridDataTest(req) + self.assertIn('IncompatibleRequestException', str(cm.exception)) + self.assertIn('info.level.masterLevel.name', str(cm.exception)) diff --git a/awips/test/dafTests/testMaps.py b/awips/test/dafTests/testMaps.py new file mode 100644 index 0000000..2afcecc --- /dev/null +++ b/awips/test/dafTests/testMaps.py @@ -0,0 +1,193 @@ +# +# Test DAF support for maps data +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# 01/19/16 4795 mapeters Initial Creation. +# 04/11/16 5548 tgurney Cleanup +# 04/18/16 5548 tgurney More cleanup +# 04/26/16 5587 tgurney Add identifier values tests +# 06/13/16 5574 mapeters Add advanced query tests +# 06/21/16 5548 tgurney Skip tests that cause errors +# 06/30/16 5725 tgurney Add test for NOT IN +# 01/06/17 5981 tgurney Do not check data times +# +# + +from __future__ import print_function +from dynamicserialize.dstypes.com.raytheon.uf.common.dataquery.requests import RequestConstraint +from awips.dataaccess import DataAccessLayer as DAL +from awips.ThriftClient import ThriftRequestException + +from awips.test.dafTests import baseDafTestCase + + +class MapsTestCase(baseDafTestCase.DafTestCase): + """Test DAF support for maps data""" + + datatype = 'maps' + + def testGetAvailableParameters(self): + req = DAL.newDataRequest(self.datatype) + req.addIdentifier('table', 'mapdata.county') + req.addIdentifier('geomField', 'the_geom') + self.runParametersTest(req) + + def testGetAvailableLocations(self): + req = DAL.newDataRequest(self.datatype) + req.addIdentifier('table', 'mapdata.county') + req.addIdentifier('geomField', 'the_geom') + req.addIdentifier('locationField', 'cwa') + self.runLocationsTest(req) + + def testGetGeometryData(self): + req = DAL.newDataRequest(self.datatype) + req.addIdentifier('table', 'mapdata.county') + req.addIdentifier('geomField', 'the_geom') + req.addIdentifier('inLocation', 'true') + req.addIdentifier('locationField', 'cwa') + req.setLocationNames('OAX') + req.addIdentifier('cwa', 'OAX') + req.setParameters('countyname', 'state', 'fips') + self.runGeometryDataTest(req, checkDataTimes=False) + + def testRequestingTimesThrowsTimeAgnosticDataException(self): + req = DAL.newDataRequest(self.datatype) + self.runTimeAgnosticTest(req) + + def testGetTableIdentifierValues(self): + self.runGetIdValuesTest(['table']) + + def testGetGeomFieldIdentifierValues(self): + req = DAL.newDataRequest(self.datatype) + req.addIdentifier('table', 'mapdata.county') + idValues = DAL.getIdentifierValues(req, 'geomField') + for idValue in idValues: + self.assertTrue(idValue.startswith('the_geom')) + + def testGetGeomFieldIdValuesWithoutTableThrowsException(self): + with self.assertRaises(ThriftRequestException): + self.runGetIdValuesTest(['geomField']) + + def testGetColumnIdValuesWithTable(self): + req = DAL.newDataRequest(self.datatype) + req.addIdentifier('table', 'mapdata.county') + req.addIdentifier('geomField', 'the_geom') + idValues = DAL.getIdentifierValues(req, 'state') + self.assertIn('NE', idValues) + + def testGetColumnIdValuesWithoutTableThrowsException(self): + req = DAL.newDataRequest(self.datatype) + req.addIdentifier('geomField', 'the_geom') + with self.assertRaises(ThriftRequestException): + DAL.getIdentifierValues(req, 'state') + + def testGetInvalidIdentifierValuesThrowsException(self): + self.runInvalidIdValuesTest() + + def testGetNonexistentIdentifierValuesThrowsException(self): + self.runNonexistentIdValuesTest() + + def _runConstraintTest(self, key, operator, value): + req = DAL.newDataRequest(self.datatype) + req.addIdentifier('table', 'mapdata.ffmp_basins') + req.addIdentifier('geomField', 'the_geom') + req.addIdentifier('cwa', 'OAX') + constraint = RequestConstraint.new(operator, value) + req.addIdentifier(key, constraint) + req.setParameters('state', 'reservoir', 'area_sq_mi') + return self.runGeometryDataTest(req, checkDataTimes=False) + + def testGetDataWithEqualsString(self): + geometryData = self._runConstraintTest('state', '=', 'NE') + for record in geometryData: + self.assertEqual(record.getString('state'), 'NE') + + def testGetDataWithEqualsInt(self): + geometryData = self._runConstraintTest('reservoir', '=', 1) + for record in geometryData: + self.assertEqual(record.getNumber('reservoir'), 1) + + def testGetDataWithEqualsLong(self): + geometryData = self._runConstraintTest('reservoir', '=', 1) + for record in geometryData: + self.assertEqual(record.getNumber('reservoir'), 1) + + def testGetDataWithEqualsFloat(self): + geometryData = self._runConstraintTest('area_sq_mi', '=', 5.00) + for record in geometryData: + self.assertEqual(round(record.getNumber('area_sq_mi'), 2), 5.00) + + def testGetDataWithEqualsNone(self): + geometryData = self._runConstraintTest('state', '=', None) + for record in geometryData: + self.assertEqual(record.getType('state'), 'NULL') + + def testGetDataWithNotEquals(self): + geometryData = self._runConstraintTest('state', '!=', 'NE') + for record in geometryData: + self.assertNotEqual(record.getString('state'), 'NE') + + def testGetDataWithNotEqualsNone(self): + geometryData = self._runConstraintTest('state', '!=', None) + for record in geometryData: + self.assertNotEqual(record.getType('state'), 'NULL') + + def testGetDataWithGreaterThan(self): + geometryData = self._runConstraintTest('area_sq_mi', '>', 5) + for record in geometryData: + self.assertGreater(record.getNumber('area_sq_mi'), 5) + + def testGetDataWithLessThan(self): + geometryData = self._runConstraintTest('area_sq_mi', '<', 5) + for record in geometryData: + self.assertLess(record.getNumber('area_sq_mi'), 5) + + def testGetDataWithGreaterThanEquals(self): + geometryData = self._runConstraintTest('area_sq_mi', '>=', 5) + for record in geometryData: + self.assertGreaterEqual(record.getNumber('area_sq_mi'), 5) + + def testGetDataWithLessThanEquals(self): + geometryData = self._runConstraintTest('area_sq_mi', '<=', 5) + for record in geometryData: + self.assertLessEqual(record.getNumber('area_sq_mi'), 5) + + def testGetDataWithInTuple(self): + collection = ('NE', 'TX') + geometryData = self._runConstraintTest('state', 'in', collection) + for record in geometryData: + self.assertIn(record.getString('state'), collection) + + def testGetDataWithInList(self): + collection = ['NE', 'TX'] + geometryData = self._runConstraintTest('state', 'in', collection) + for record in geometryData: + self.assertIn(record.getString('state'), collection) + + def testGetDataWithInGenerator(self): + collection = ('NE', 'TX') + generator = (item for item in collection) + geometryData = self._runConstraintTest('state', 'in', generator) + for record in geometryData: + self.assertIn(record.getString('state'), collection) + + def testGetDataWithNotInList(self): + collection = ['IA', 'TX'] + geometryData = self._runConstraintTest('state', 'not in', collection) + for record in geometryData: + self.assertNotIn(record.getString('state'), collection) + + def testGetDataWithInvalidConstraintTypeThrowsException(self): + with self.assertRaises(ValueError): + self._runConstraintTest('state', 'junk', 'NE') + + def testGetDataWithInvalidConstraintValueThrowsException(self): + with self.assertRaises(TypeError): + self._runConstraintTest('state', '=', {}) + + def testGetDataWithEmptyInConstraintThrowsException(self): + with self.assertRaises(ValueError): + self._runConstraintTest('state', 'in', []) diff --git a/awips/test/dafTests/testModelSounding.py b/awips/test/dafTests/testModelSounding.py new file mode 100644 index 0000000..16acfa1 --- /dev/null +++ b/awips/test/dafTests/testModelSounding.py @@ -0,0 +1,193 @@ +# +# Test DAF support for modelsounding data +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# 01/19/16 4795 mapeters Initial Creation. +# 04/11/16 5548 tgurney Cleanup +# 04/18/16 5548 tgurney More cleanup +# 06/09/16 5587 bsteffen Add getIdentifierValues tests +# 06/13/16 5574 tgurney Add advanced query tests +# 06/30/16 5725 tgurney Add test for NOT IN +# 11/10/16 5985 tgurney Mark expected failures prior +# to 17.3.1 +# 12/07/16 5981 tgurney Parameterize +# 12/19/16 5981 tgurney Remove pre-17.3 expected fails +# 12/20/16 5981 tgurney Add envelope test +# +# + +from __future__ import print_function +from awips.dataaccess import DataAccessLayer as DAL +from dynamicserialize.dstypes.com.raytheon.uf.common.dataquery.requests import RequestConstraint + +from awips.test.dafTests import baseDafTestCase +from awips.test.dafTests import params + + +class ModelSoundingTestCase(baseDafTestCase.DafTestCase): + """Test DAF support for modelsounding data""" + + datatype = "modelsounding" + + reporttype = "ETA" + + def testGetAvailableParameters(self): + req = DAL.newDataRequest(self.datatype) + self.runParametersTest(req) + + def testGetAvailableLocations(self): + req = DAL.newDataRequest(self.datatype) + req.addIdentifier("reportType", self.reporttype) + self.runLocationsTest(req) + + def testGetAvailableTimes(self): + req = DAL.newDataRequest(self.datatype) + req.addIdentifier("reportType", self.reporttype) + req.setLocationNames(params.OBS_STATION) + self.runTimesTest(req) + + def testGetGeometryData(self): + req = DAL.newDataRequest(self.datatype) + req.addIdentifier("reportType", self.reporttype) + req.setLocationNames(params.OBS_STATION) + req.setParameters("temperature", "pressure", "specHum", "sfcPress", "temp2", "q2") + print("Testing getGeometryData()") + geomData = DAL.getGeometryData(req) + print("Number of geometry records: " + str(len(geomData))) + print("Sample geometry data:") + for record in geomData[:self.sampleDataLimit]: + print("level=" + record.getLevel(), end="") + # One dimensional parameters are reported on the 0.0UNKNOWN level. + # 2D parameters are reported on MB levels from pressure. + if record.getLevel() == "0.0UNKNOWN": + print(" sfcPress=" + record.getString("sfcPress") + + record.getUnit("sfcPress"), end="") + print(" temp2=" + record.getString("temp2") + + record.getUnit("temp2"), end="") + print(" q2=" + record.getString("q2") + + record.getUnit("q2"), end="") + else: + print(" pressure=" + record.getString("pressure") + + record.getUnit("pressure"), end="") + print(" temperature=" + record.getString("temperature") + + record.getUnit("temperature"), end="") + print(" specHum=" + record.getString("specHum") + + record.getUnit("specHum"), end="") + print(" geometry=" + str(record.getGeometry())) + print("getGeometryData() complete\n\n") + + def testGetGeometryDataWithEnvelope(self): + req = DAL.newDataRequest(self.datatype) + req.addIdentifier("reportType", self.reporttype) + req.setEnvelope(params.ENVELOPE) + req.setParameters("temperature", "pressure", "specHum", "sfcPress", "temp2", "q2") + print("Testing getGeometryData()") + data = DAL.getGeometryData(req) + for item in data: + self.assertTrue(params.ENVELOPE.contains(item.getGeometry())) + + def testGetIdentifierValues(self): + req = DAL.newDataRequest(self.datatype) + optionalIds = set(DAL.getOptionalIdentifiers(req)) + self.runGetIdValuesTest(optionalIds) + + def testGetInvalidIdentifierValuesThrowsException(self): + self.runInvalidIdValuesTest() + + def testGetNonexistentIdentifierValuesThrowsException(self): + self.runNonexistentIdValuesTest() + + def _runConstraintTest(self, key, operator, value): + req = DAL.newDataRequest(self.datatype) + constraint = RequestConstraint.new(operator, value) + req.setParameters('dataURI') + req.setLocationNames(params.OBS_STATION, 'KORD', 'KOFK', 'KLNK') + req.addIdentifier(key, constraint) + return self.runGeometryDataTest(req) + + # We can filter on reportType but it is not possible to retrieve the value + # of reportType directly. We can look inside the dataURI instead. + # + # For cases like '<=' and '>' the best we can do is send the request and + # see if it throws back an exception. + # + # Can also eyeball the number of returned records. + + def testGetDataWithEqualsString(self): + geometryData = self._runConstraintTest('reportType', '=', 'ETA') + for record in geometryData: + self.assertIn('/ETA/', record.getString('dataURI')) + + # No numeric tests since no numeric identifiers are available. + + def testGetDataWithEqualsNone(self): + self._runConstraintTest('reportType', '=', None) + + def testGetDataWithNotEquals(self): + geometryData = self._runConstraintTest('reportType', '!=', 'ETA') + for record in geometryData: + self.assertNotIn('/ETA/', record.getString('dataURI')) + + def testGetDataWithNotEqualsNone(self): + self._runConstraintTest('reportType', '!=', None) + + def testGetDataWithGreaterThan(self): + self._runConstraintTest('reportType', '>', 'ETA') + + def testGetDataWithLessThan(self): + self._runConstraintTest('reportType', '<', 'ETA') + + def testGetDataWithGreaterThanEquals(self): + self._runConstraintTest('reportType', '>=', 'ETA') + + def testGetDataWithLessThanEquals(self): + self._runConstraintTest('reportType', '<=', 'ETA') + + def testGetDataWithInTuple(self): + collection = ('ETA', 'GFS') + geometryData = self._runConstraintTest('reportType', 'in', collection) + for record in geometryData: + dataURI = record.getString('dataURI') + self.assertTrue('/ETA/' in dataURI or '/GFS/' in dataURI) + + def testGetDataWithInList(self): + collection = ['ETA', 'GFS'] + geometryData = self._runConstraintTest('reportType', 'in', collection) + for record in geometryData: + dataURI = record.getString('dataURI') + self.assertTrue('/ETA/' in dataURI or '/GFS/' in dataURI) + + def testGetDataWithInGenerator(self): + collection = ('ETA', 'GFS') + generator = (item for item in collection) + geometryData = self._runConstraintTest('reportType', 'in', generator) + for record in geometryData: + dataURI = record.getString('dataURI') + self.assertTrue('/ETA/' in dataURI or '/GFS/' in dataURI) + + def testGetDataWithNotInList(self): + collection = ['ETA', 'GFS'] + geometryData = self._runConstraintTest('reportType', 'not in', collection) + for record in geometryData: + dataURI = record.getString('dataURI') + self.assertTrue('/ETA/' not in dataURI and '/GFS/' not in dataURI) + + def testGetDataWithInvalidConstraintTypeThrowsException(self): + with self.assertRaises(ValueError): + self._runConstraintTest('reportType', 'junk', 'ETA') + + def testGetDataWithInvalidConstraintValueThrowsException(self): + with self.assertRaises(TypeError): + self._runConstraintTest('reportType', '=', {}) + + def testGetDataWithEmptyInConstraintThrowsException(self): + with self.assertRaises(ValueError): + self._runConstraintTest('reportType', 'in', []) + + def testGetDataWithNestedInConstraintThrowsException(self): + collection = ('ETA', 'GFS', ()) + with self.assertRaises(TypeError): + self._runConstraintTest('reportType', 'in', collection) diff --git a/awips/test/dafTests/testObs.py b/awips/test/dafTests/testObs.py new file mode 100644 index 0000000..b0d27d2 --- /dev/null +++ b/awips/test/dafTests/testObs.py @@ -0,0 +1,160 @@ +# +# Test DAF support for obs data +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# 01/19/16 4795 mapeters Initial Creation. +# 04/11/16 5548 tgurney Cleanup +# 04/18/16 5548 tgurney More cleanup +# 06/09/16 5587 bsteffen Add getIdentifierValues tests +# 06/13/16 5574 tgurney Add advanced query tests +# 06/30/16 5725 tgurney Add test for NOT IN +# 12/07/16 5981 tgurney Parameterize +# 12/20/16 5981 tgurney Add envelope test +# +# + +from __future__ import print_function +from awips.dataaccess import DataAccessLayer as DAL +from dynamicserialize.dstypes.com.raytheon.uf.common.dataquery.requests import RequestConstraint + +from awips.test.dafTests import baseDafTestCase +from awips.test.dafTests import params + + +class ObsTestCase(baseDafTestCase.DafTestCase): + """Test DAF support for obs data""" + + datatype = "obs" + + def testGetAvailableParameters(self): + req = DAL.newDataRequest(self.datatype) + self.runParametersTest(req) + + def testGetAvailableLocations(self): + req = DAL.newDataRequest(self.datatype) + self.runLocationsTest(req) + + def testGetAvailableTimes(self): + req = DAL.newDataRequest(self.datatype) + req.setLocationNames(params.OBS_STATION) + self.runTimesTest(req) + + def testGetGeometryData(self): + req = DAL.newDataRequest(self.datatype) + req.setLocationNames(params.OBS_STATION) + req.setParameters("temperature", "seaLevelPress", "dewpoint") + self.runGeometryDataTest(req) + + def testGetGeometryDataWithEnvelope(self): + req = DAL.newDataRequest(self.datatype) + req.setEnvelope(params.ENVELOPE) + req.setParameters("temperature", "seaLevelPress", "dewpoint") + data = self.runGeometryDataTest(req) + for item in data: + self.assertTrue(params.ENVELOPE.contains(item.getGeometry())) + + def testGetIdentifierValues(self): + req = DAL.newDataRequest(self.datatype) + optionalIds = set(DAL.getOptionalIdentifiers(req)) + self.runGetIdValuesTest(optionalIds) + + def testGetInvalidIdentifierValuesThrowsException(self): + self.runInvalidIdValuesTest() + + def testGetNonexistentIdentifierValuesThrowsException(self): + self.runNonexistentIdValuesTest() + + def _runConstraintTest(self, key, operator, value): + req = DAL.newDataRequest(self.datatype) + constraint = RequestConstraint.new(operator, value) + req.setParameters("temperature", "reportType") + req.setLocationNames(params.OBS_STATION) + req.addIdentifier(key, constraint) + return self.runGeometryDataTest(req) + + def testGetDataWithEqualsString(self): + geometryData = self._runConstraintTest('reportType', '=', 'METAR') + for record in geometryData: + self.assertEqual(record.getString('reportType'), 'METAR') + + # No numeric tests since no numeric identifiers are available. + + def testGetDataWithEqualsNone(self): + geometryData = self._runConstraintTest('reportType', '=', None) + for record in geometryData: + self.assertEqual(record.getType('reportType'), 'NULL') + + def testGetDataWithNotEquals(self): + geometryData = self._runConstraintTest('reportType', '!=', 'METAR') + for record in geometryData: + self.assertNotEqual(record.getString('reportType'), 'METAR') + + def testGetDataWithNotEqualsNone(self): + geometryData = self._runConstraintTest('reportType', '!=', None) + for record in geometryData: + self.assertNotEqual(record.getType('reportType'), 'NULL') + + def testGetDataWithGreaterThan(self): + geometryData = self._runConstraintTest('reportType', '>', 'METAR') + for record in geometryData: + self.assertGreater(record.getString('reportType'), 'METAR') + + def testGetDataWithLessThan(self): + geometryData = self._runConstraintTest('reportType', '<', 'METAR') + for record in geometryData: + self.assertLess(record.getString('reportType'), 'METAR') + + def testGetDataWithGreaterThanEquals(self): + geometryData = self._runConstraintTest('reportType', '>=', 'METAR') + for record in geometryData: + self.assertGreaterEqual(record.getString('reportType'), 'METAR') + + def testGetDataWithLessThanEquals(self): + geometryData = self._runConstraintTest('reportType', '<=', 'METAR') + for record in geometryData: + self.assertLessEqual(record.getString('reportType'), 'METAR') + + def testGetDataWithInTuple(self): + collection = ('METAR', 'SPECI') + geometryData = self._runConstraintTest('reportType', 'in', collection) + for record in geometryData: + self.assertIn(record.getString('reportType'), collection) + + def testGetDataWithInList(self): + collection = ['METAR', 'SPECI'] + geometryData = self._runConstraintTest('reportType', 'in', collection) + for record in geometryData: + self.assertIn(record.getString('reportType'), collection) + + def testGetDataWithInGenerator(self): + collection = ('METAR', 'SPECI') + generator = (item for item in collection) + geometryData = self._runConstraintTest('reportType', 'in', generator) + for record in geometryData: + self.assertIn(record.getString('reportType'), collection) + + def testGetDataWithNotInList(self): + collection = ['METAR', 'SPECI'] + geometryData = self._runConstraintTest('reportType', 'not in', collection) + for record in geometryData: + self.assertNotIn(record.getString('reportType'), collection) + + def testGetDataWithInvalidConstraintTypeThrowsException(self): + with self.assertRaises(ValueError): + self._runConstraintTest('reportType', 'junk', 'METAR') + + def testGetDataWithInvalidConstraintValueThrowsException(self): + with self.assertRaises(TypeError): + self._runConstraintTest('reportType', '=', {}) + + def testGetDataWithEmptyInConstraintThrowsException(self): + with self.assertRaises(ValueError): + self._runConstraintTest('reportType', 'in', []) + + def testGetDataWithNestedInConstraintThrowsException(self): + collection = ('METAR', 'SPECI', ()) + with self.assertRaises(TypeError): + self._runConstraintTest('reportType', 'in', collection) diff --git a/awips/test/dafTests/testProfiler.py b/awips/test/dafTests/testProfiler.py new file mode 100644 index 0000000..c412c1c --- /dev/null +++ b/awips/test/dafTests/testProfiler.py @@ -0,0 +1,59 @@ +# +# Test DAF support for profiler data +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# 01/19/16 4795 mapeters Initial Creation. +# 04/11/16 5548 tgurney Cleanup +# 04/18/16 5548 tgurney More cleanup +# +# + +from __future__ import print_function +from awips.dataaccess import DataAccessLayer as DAL + +from awips.test.dafTests import baseDafTestCase + + +class ProfilerTestCase(baseDafTestCase.DafTestCase): + """Test DAF support for profiler data""" + + datatype = "profiler" + + def testGetAvailableParameters(self): + req = DAL.newDataRequest(self.datatype) + self.runParametersTest(req) + + def testGetAvailableLocations(self): + req = DAL.newDataRequest(self.datatype) + self.runLocationsTest(req) + + def testGetAvailableTimes(self): + req = DAL.newDataRequest(self.datatype) + self.runTimesTest(req) + + def testGetGeometryData(self): + req = DAL.newDataRequest(self.datatype) + req.setParameters("temperature", "pressure", "uComponent", "vComponent") + + print("Testing getGeometryData()") + + geomData = DAL.getGeometryData(req) + self.assertIsNotNone(geomData) + print("Number of geometry records: " + str(len(geomData))) + print("Sample geometry data:") + for record in geomData[:self.sampleDataLimit]: + print("level:", record.getLevel(), end="") + # One dimensional parameters are reported on the 0.0UNKNOWN level. + # 2D parameters are reported on MB levels from pressure. + if record.getLevel() == "0.0UNKNOWN": + print(" temperature=" + record.getString("temperature") + record.getUnit("temperature"), end="") + print(" pressure=" + record.getString("pressure") + record.getUnit("pressure"), end="") + else: + print(" uComponent=" + record.getString("uComponent") + record.getUnit("uComponent"), end="") + print(" vComponent=" + record.getString("vComponent") + record.getUnit("vComponent"), end="") + print(" geometry:", record.getGeometry()) + + print("getGeometryData() complete\n\n") diff --git a/awips/test/dafTests/testRadarGraphics.py b/awips/test/dafTests/testRadarGraphics.py new file mode 100644 index 0000000..b230bfe --- /dev/null +++ b/awips/test/dafTests/testRadarGraphics.py @@ -0,0 +1,74 @@ +# +# Test DAF support for radar graphics data +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# 08/25/16 2671 tgurney Initial creation. +# 08/31/16 2671 tgurney Add mesocyclone +# 09/08/16 2671 tgurney Add storm track +# 09/27/16 2671 tgurney Add hail index +# 09/30/16 2671 tgurney Add TVS +# 12/07/16 5981 tgurney Parameterize +# 12/19/16 5981 tgurney Do not check data times on +# returned data +# +# + +from dynamicserialize.dstypes.com.raytheon.uf.common.dataquery.requests import RequestConstraint +from awips.dataaccess import DataAccessLayer as DAL + +from awips.test.dafTests import baseRadarTestCase +from awips.test.dafTests import params + + +class RadarGraphicsTestCase(baseRadarTestCase.BaseRadarTestCase): + """Test DAF support for radar data""" + + datatype = 'radar' + + def runConstraintTest(self, key, operator, value): + req = DAL.newDataRequest(self.datatype) + constraint = RequestConstraint.new(operator, value) + req.addIdentifier(key, constraint) + req.setParameters('166') + # TODO: Cannot check datatimes on the result because the times returned + # by getAvailableTimes have level = -1.0, while the time on the actual + # data has the correct level set (>= 0.0). + return self.runGeometryDataTest(req, checkDataTimes=False) + + def testGetGeometryDataMeltingLayer(self): + req = DAL.newDataRequest(self.datatype) + req.setEnvelope(params.ENVELOPE) + req.setLocationNames(self.radarLoc) + req.setParameters('166') + self.runGeometryDataTest(req, checkDataTimes=False) + + def testGetGeometryDataMesocyclone(self): + req = DAL.newDataRequest(self.datatype) + req.setEnvelope(params.ENVELOPE) + req.setLocationNames(self.radarLoc) + req.setParameters('141') + self.runGeometryDataTest(req, checkDataTimes=False) + + def testGetGeometryDataStormTrack(self): + req = DAL.newDataRequest(self.datatype) + req.setEnvelope(params.ENVELOPE) + req.setLocationNames(self.radarLoc) + req.setParameters('58') + self.runGeometryDataTest(req, checkDataTimes=False) + + def testGetGeometryDataHailIndex(self): + req = DAL.newDataRequest(self.datatype) + req.setEnvelope(params.ENVELOPE) + req.setLocationNames(self.radarLoc) + req.setParameters('59') + self.runGeometryDataTest(req, checkDataTimes=False) + + def testGetGeometryDataTVS(self): + req = DAL.newDataRequest(self.datatype) + req.setEnvelope(params.ENVELOPE) + req.setLocationNames(self.radarLoc) + req.setParameters('61') + self.runGeometryDataTest(req, checkDataTimes=False) diff --git a/awips/test/dafTests/testRadarGrid.py b/awips/test/dafTests/testRadarGrid.py new file mode 100644 index 0000000..abcbf85 --- /dev/null +++ b/awips/test/dafTests/testRadarGrid.py @@ -0,0 +1,40 @@ +# +# Test DAF support for radar grid data +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# 08/25/16 2671 tgurney Initial creation +# +# + +from awips.dataaccess import DataAccessLayer as DAL +from dynamicserialize.dstypes.com.raytheon.uf.common.dataquery.requests import RequestConstraint + +from awips.test.dafTests import baseRadarTestCase +from awips.test.dafTests import params + + +class RadarTestCase(baseRadarTestCase.BaseRadarTestCase): + """Test DAF support for radar data""" + + datatype = 'radar' + + parameterList = ['94'] + + def runConstraintTest(self, key, operator, value): + req = DAL.newDataRequest(self.datatype) + constraint = RequestConstraint.new(operator, value) + req.addIdentifier(key, constraint) + req.setParameters(*self.parameterList) + # Don't test shapes since they may differ. + return self.runGridDataTest(req, testSameShape=False) + + def testGetGridData(self): + req = DAL.newDataRequest(self.datatype) + req.setEnvelope(params.ENVELOPE) + req.setLocationNames(self.radarLoc) + req.setParameters(*self.parameterList) + # Don't test shapes since they may differ. + self.runGridDataTest(req, testSameShape=False) diff --git a/awips/test/dafTests/testRadarSpatial.py b/awips/test/dafTests/testRadarSpatial.py new file mode 100644 index 0000000..64ce7f2 --- /dev/null +++ b/awips/test/dafTests/testRadarSpatial.py @@ -0,0 +1,153 @@ +# +# Test DAF support for radar_spatial data +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# 01/19/16 4795 mapeters Initial Creation. +# 04/11/16 5548 tgurney Cleanup +# 04/18/16 5548 tgurney More cleanup +# 05/26/16 5587 njensen Added testGetIdentifierValues() +# 06/01/16 5587 tgurney Move testIdentifiers() to +# superclass +# 06/13/16 5574 tgurney Add advanced query tests +# 06/30/16 5725 tgurney Add test for NOT IN +# 12/07/16 5981 tgurney Parameterize +# 01/06/17 5981 tgurney Do not check data times +# +# + +from __future__ import print_function +from awips.dataaccess import DataAccessLayer as DAL + +from dynamicserialize.dstypes.com.raytheon.uf.common.dataquery.requests import RequestConstraint +from awips.test.dafTests import baseDafTestCase +from awips.test.dafTests import params + + +class RadarSpatialTestCase(baseDafTestCase.DafTestCase): + """Test DAF support for radar_spatial data""" + + datatype = "radar_spatial" + + def testGetAvailableLocations(self): + req = DAL.newDataRequest(self.datatype) + req.setEnvelope(params.ENVELOPE) + self.runLocationsTest(req) + + def testGetAvailableParameters(self): + req = DAL.newDataRequest(self.datatype) + self.runParametersTest(req) + + def testGetIdentifierValues(self): + self.runGetIdValuesTest(['wfo_id']) + + def testGetGeometryData(self): + req = DAL.newDataRequest(self.datatype) + req.setLocationNames("TORD", "TMDW") + req.setParameters("wfo_id", "name", "elevmeter") + self.runGeometryDataTest(req, checkDataTimes=False) + + def testRequestingTimesThrowsTimeAgnosticDataException(self): + req = DAL.newDataRequest(self.datatype) + self.runTimeAgnosticTest(req) + + def _runConstraintTest(self, key, operator, value): + req = DAL.newDataRequest(self.datatype) + constraint = RequestConstraint.new(operator, value) + req.addIdentifier(key, constraint) + req.setParameters('elevmeter', 'eqp_elv', 'wfo_id', 'immutablex') + return self.runGeometryDataTest(req, checkDataTimes=False) + + def testGetDataWithEqualsString(self): + geometryData = self._runConstraintTest('wfo_id', '=', params.SITE_ID) + for record in geometryData: + self.assertEqual(record.getString('wfo_id'), params.SITE_ID) + + def testGetDataWithEqualsInt(self): + geometryData = self._runConstraintTest('immutablex', '=', 57) + for record in geometryData: + self.assertEqual(record.getNumber('immutablex'), 57) + + def testGetDataWithEqualsLong(self): + geometryData = self._runConstraintTest('immutablex', '=', 57) + for record in geometryData: + self.assertEqual(record.getNumber('immutablex'), 57) + + def testGetDataWithEqualsFloat(self): + geometryData = self._runConstraintTest('immutablex', '=', 57.0) + for record in geometryData: + self.assertEqual(round(record.getNumber('immutablex'), 1), 57.0) + + def testGetDataWithEqualsNone(self): + geometryData = self._runConstraintTest('wfo_id', '=', None) + for record in geometryData: + self.assertEqual(record.getType('wfo_id'), 'NULL') + + def testGetDataWithNotEquals(self): + geometryData = self._runConstraintTest('wfo_id', '!=', params.SITE_ID) + for record in geometryData: + self.assertNotEquals(record.getString('wfo_id'), params.SITE_ID) + + def testGetDataWithNotEqualsNone(self): + geometryData = self._runConstraintTest('wfo_id', '!=', None) + for record in geometryData: + self.assertNotEqual(record.getType('wfo_id'), 'NULL') + + def testGetDataWithGreaterThan(self): + geometryData = self._runConstraintTest('elevmeter', '>', 1000) + for record in geometryData: + self.assertGreater(record.getNumber('elevmeter'), 1000) + + def testGetDataWithLessThan(self): + geometryData = self._runConstraintTest('elevmeter', '<', 1000) + for record in geometryData: + self.assertLess(record.getNumber('elevmeter'), 1000) + + def testGetDataWithGreaterThanEquals(self): + geometryData = self._runConstraintTest('eqp_elv', '>=', 1295) + for record in geometryData: + self.assertGreaterEqual(record.getNumber('eqp_elv'), 1295) + + def testGetDataWithLessThanEquals(self): + geometryData = self._runConstraintTest('eqp_elv', '<=', 138) + for record in geometryData: + self.assertLessEqual(record.getNumber('eqp_elv'), 138) + + def testGetDataWithInTuple(self): + collection = (params.SITE_ID, 'GID') + geometryData = self._runConstraintTest('wfo_id', 'in', collection) + for record in geometryData: + self.assertIn(record.getString('wfo_id'), collection) + + def testGetDataWithInList(self): + collection = [params.SITE_ID, 'GID'] + geometryData = self._runConstraintTest('wfo_id', 'in', collection) + for record in geometryData: + self.assertIn(record.getString('wfo_id'), collection) + + def testGetDataWithInGenerator(self): + collection = (params.SITE_ID, 'GID') + generator = (item for item in collection) + geometryData = self._runConstraintTest('wfo_id', 'in', generator) + for record in geometryData: + self.assertIn(record.getString('wfo_id'), collection) + + def testGetDataWithNotInList(self): + collection = [params.SITE_ID, 'GID'] + geometryData = self._runConstraintTest('wfo_id', 'not in', collection) + for record in geometryData: + self.assertNotIn(record.getString('wfo_id'), collection) + + def testGetDataWithInvalidConstraintTypeThrowsException(self): + with self.assertRaises(ValueError): + self._runConstraintTest('wfo_id', 'junk', params.SITE_ID) + + def testGetDataWithInvalidConstraintValueThrowsException(self): + with self.assertRaises(TypeError): + self._runConstraintTest('wfo_id', '=', {}) + + def testGetDataWithEmptyInConstraintThrowsException(self): + with self.assertRaises(ValueError): + self._runConstraintTest('wfo_id', 'in', []) diff --git a/awips/test/dafTests/testRequestConstraint.py b/awips/test/dafTests/testRequestConstraint.py new file mode 100644 index 0000000..3a28cc2 --- /dev/null +++ b/awips/test/dafTests/testRequestConstraint.py @@ -0,0 +1,158 @@ +# +# Unit tests for Python implementation of RequestConstraint +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# 07/22/16 2416 tgurney Initial creation +# +# + +from dynamicserialize.dstypes.com.raytheon.uf.common.dataquery.requests import RequestConstraint + +import unittest + + +class RequestConstraintTestCase(unittest.TestCase): + + def _newRequestConstraint(self, constraintType, constraintValue): + constraint = RequestConstraint() + constraint.constraintType = constraintType + constraint.constraintValue = constraintValue + return constraint + + def testEvaluateEquals(self): + new = RequestConstraint.new + self.assertTrue(new('=', 3).evaluate(3)) + self.assertTrue(new('=', 3).evaluate('3')) + self.assertTrue(new('=', '3').evaluate(3)) + self.assertTrue(new('=', 12345).evaluate(12345)) + self.assertTrue(new('=', 'a').evaluate('a')) + self.assertTrue(new('=', 'a').evaluate(u'a')) + self.assertTrue(new('=', 1.0001).evaluate(2.0 - 0.999999)) + self.assertTrue(new('=', 1.00001).evaluate(1)) + self.assertFalse(new('=', 'a').evaluate(['a'])) + self.assertFalse(new('=', 'a').evaluate(['b'])) + self.assertFalse(new('=', 3).evaluate(4)) + self.assertFalse(new('=', 4).evaluate(3)) + self.assertFalse(new('=', 'a').evaluate('z')) + + def testEvaluateNotEquals(self): + new = RequestConstraint.new + self.assertTrue(new('!=', 'a').evaluate(['a'])) + self.assertTrue(new('!=', 'a').evaluate(['b'])) + self.assertTrue(new('!=', 3).evaluate(4)) + self.assertTrue(new('!=', 4).evaluate(3)) + self.assertTrue(new('!=', 'a').evaluate('z')) + self.assertFalse(new('!=', 3).evaluate('3')) + self.assertFalse(new('!=', '3').evaluate(3)) + self.assertFalse(new('!=', 3).evaluate(3)) + self.assertFalse(new('!=', 12345).evaluate(12345)) + self.assertFalse(new('!=', 'a').evaluate('a')) + self.assertFalse(new('!=', 'a').evaluate(u'a')) + self.assertFalse(new('!=', 1.0001).evaluate(2.0 - 0.9999)) + + def testEvaluateGreaterThan(self): + new = RequestConstraint.new + self.assertTrue(new('>', 1.0001).evaluate(1.0002)) + self.assertTrue(new('>', 'a').evaluate('b')) + self.assertTrue(new('>', 3).evaluate(4)) + self.assertFalse(new('>', 20).evaluate(3)) + self.assertFalse(new('>', 12345).evaluate(12345)) + self.assertFalse(new('>', 'a').evaluate('a')) + self.assertFalse(new('>', 'z').evaluate('a')) + self.assertFalse(new('>', 4).evaluate(3)) + + def testEvaluateGreaterThanEquals(self): + new = RequestConstraint.new + self.assertTrue(new('>=', 3).evaluate(3)) + self.assertTrue(new('>=', 12345).evaluate(12345)) + self.assertTrue(new('>=', 'a').evaluate('a')) + self.assertTrue(new('>=', 1.0001).evaluate(1.0002)) + self.assertTrue(new('>=', 'a').evaluate('b')) + self.assertTrue(new('>=', 3).evaluate(20)) + self.assertFalse(new('>=', 1.0001).evaluate(1.0)) + self.assertFalse(new('>=', 'z').evaluate('a')) + self.assertFalse(new('>=', 40).evaluate(3)) + + def testEvaluateLessThan(self): + new = RequestConstraint.new + self.assertTrue(new('<', 'z').evaluate('a')) + self.assertTrue(new('<', 30).evaluate(4)) + self.assertFalse(new('<', 3).evaluate(3)) + self.assertFalse(new('<', 12345).evaluate(12345)) + self.assertFalse(new('<', 'a').evaluate('a')) + self.assertFalse(new('<', 1.0001).evaluate(1.0002)) + self.assertFalse(new('<', 'a').evaluate('b')) + self.assertFalse(new('<', 3).evaluate(40)) + + def testEvaluateLessThanEquals(self): + new = RequestConstraint.new + self.assertTrue(new('<=', 'z').evaluate('a')) + self.assertTrue(new('<=', 20).evaluate(3)) + self.assertTrue(new('<=', 3).evaluate(3)) + self.assertTrue(new('<=', 12345).evaluate(12345)) + self.assertTrue(new('<=', 'a').evaluate('a')) + self.assertFalse(new('<=', 1.0001).evaluate(1.0002)) + self.assertFalse(new('<=', 'a').evaluate('b')) + self.assertFalse(new('<=', 4).evaluate(30)) + + def testEvaluateIsNull(self): + new = RequestConstraint.new + self.assertTrue(new('=', None).evaluate(None)) + self.assertTrue(new('=', None).evaluate('null')) + self.assertFalse(new('=', None).evaluate(())) + self.assertFalse(new('=', None).evaluate(0)) + self.assertFalse(new('=', None).evaluate(False)) + + def testEvaluateIsNotNull(self): + new = RequestConstraint.new + self.assertTrue(new('!=', None).evaluate(())) + self.assertTrue(new('!=', None).evaluate(0)) + self.assertTrue(new('!=', None).evaluate(False)) + self.assertFalse(new('!=', None).evaluate(None)) + self.assertFalse(new('!=', None).evaluate('null')) + + def testEvaluateIn(self): + new = RequestConstraint.new + self.assertTrue(new('in', [3]).evaluate(3)) + self.assertTrue(new('in', ['a', 'b', 3]).evaluate(3)) + self.assertTrue(new('in', 'a').evaluate('a')) + self.assertTrue(new('in', [3, 4, 5]).evaluate('5')) + self.assertTrue(new('in', [1.0001, 2, 3]).evaluate(2.0 - 0.9999)) + self.assertFalse(new('in', ['a', 'b', 'c']).evaluate('d')) + self.assertFalse(new('in', 'a').evaluate('b')) + + def testEvaluateNotIn(self): + new = RequestConstraint.new + self.assertTrue(new('not in', ['a', 'b', 'c']).evaluate('d')) + self.assertTrue(new('not in', [3, 4, 5]).evaluate(6)) + self.assertTrue(new('not in', 'a').evaluate('b')) + self.assertFalse(new('not in', [3]).evaluate(3)) + self.assertFalse(new('not in', ['a', 'b', 3]).evaluate(3)) + self.assertFalse(new('not in', 'a').evaluate('a')) + self.assertFalse(new('not in', [1.0001, 2, 3]).evaluate(2.0 - 0.9999)) + + def testEvaluateBetween(self): + # cannot make "between" with RequestConstraint.new() + new = self._newRequestConstraint + self.assertTrue(new('BETWEEN', '1--1').evaluate(1)) + self.assertTrue(new('BETWEEN', '1--10').evaluate(1)) + self.assertTrue(new('BETWEEN', '1--10').evaluate(5)) + self.assertTrue(new('BETWEEN', '1--10').evaluate(10)) + self.assertTrue(new('BETWEEN', '1.0--1.1').evaluate(1.0)) + self.assertTrue(new('BETWEEN', '1.0--1.1').evaluate(1.05)) + self.assertTrue(new('BETWEEN', '1.0--1.1').evaluate(1.1)) + self.assertTrue(new('BETWEEN', 'a--x').evaluate('a')) + self.assertTrue(new('BETWEEN', 'a--x').evaluate('j')) + self.assertTrue(new('BETWEEN', 'a--x').evaluate('x')) + self.assertFalse(new('BETWEEN', '1--1').evaluate(2)) + self.assertFalse(new('BETWEEN', '1--2').evaluate(10)) + self.assertFalse(new('BETWEEN', '1--10').evaluate(0)) + self.assertFalse(new('BETWEEN', '1--10').evaluate(11)) + self.assertFalse(new('BETWEEN', '1.0--1.1').evaluate(0.99)) + self.assertFalse(new('BETWEEN', '1.0--1.1').evaluate(1.11)) + self.assertFalse(new('BETWEEN', 'a--x').evaluate(' ')) + self.assertFalse(new('BETWEEN', 'a--x').evaluate('z')) + diff --git a/awips/test/dafTests/testSatellite.py b/awips/test/dafTests/testSatellite.py new file mode 100644 index 0000000..92c5264 --- /dev/null +++ b/awips/test/dafTests/testSatellite.py @@ -0,0 +1,166 @@ +# +# Test DAF support for satellite data +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# 01/19/16 4795 mapeters Initial Creation. +# 04/11/16 5548 tgurney Cleanup +# 04/18/16 5548 tgurney More cleanup +# 04/26/16 5587 tgurney Move identifier values tests +# out of base class +# 06/01/16 5587 tgurney Update testGetIdentifierValues +# 06/07/16 5574 tgurney Add advanced query tests +# 06/13/16 5574 tgurney Typo +# 06/30/16 5725 tgurney Add test for NOT IN +# +# + +from __future__ import print_function +from awips.dataaccess import DataAccessLayer as DAL +from dynamicserialize.dstypes.com.raytheon.uf.common.dataquery.requests import RequestConstraint + +from awips.test.dafTests import baseDafTestCase + + +class SatelliteTestCase(baseDafTestCase.DafTestCase): + """Test DAF support for satellite data""" + + datatype = "satellite" + + def testGetAvailableParameters(self): + req = DAL.newDataRequest(self.datatype) + self.runParametersTest(req) + + def testGetAvailableLocations(self): + req = DAL.newDataRequest(self.datatype) + self.runLocationsTest(req) + + def testGetAvailableTimes(self): + req = DAL.newDataRequest(self.datatype) + req.setLocationNames("West CONUS") + self.runTimesTest(req) + + def testGetGridData(self): + req = DAL.newDataRequest(self.datatype) + req.setParameters("Imager 11 micron IR") + req.setLocationNames("West CONUS") + self.runGridDataTest(req) + + def testGetIdentifierValues(self): + req = DAL.newDataRequest(self.datatype) + optionalIds = set(DAL.getOptionalIdentifiers(req)) + requiredIds = set(DAL.getRequiredIdentifiers(req)) + self.runGetIdValuesTest(optionalIds | requiredIds) + + def testGetInvalidIdentifierValuesThrowsException(self): + self.runInvalidIdValuesTest() + + def testGetNonexistentIdentifierValuesThrowsException(self): + self.runNonexistentIdValuesTest() + + def _runConstraintTest(self, key, operator, value): + req = DAL.newDataRequest(self.datatype) + constraint = RequestConstraint.new(operator, value) + req.addIdentifier(key, constraint) + req.setParameters("Imager 11 micron IR") + req.setLocationNames("West CONUS") + return self.runGridDataTest(req) + + def testGetDataWithEqualsString(self): + gridData = self._runConstraintTest('creatingEntity', '=', 'Composite') + for record in gridData: + self.assertEqual(record.getAttribute('creatingEntity'), 'Composite') + + def testGetDataWithEqualsInt(self): + gridData = self._runConstraintTest('creatingEntity', '=', 1000) + for record in gridData: + self.assertEqual(record.getAttribute('creatingEntity'), 1000) + + def testGetDataWithEqualsLong(self): + gridData = self._runConstraintTest('creatingEntity', '=', 1000) + for record in gridData: + self.assertEqual(record.getAttribute('creatingEntity'), 1000) + + def testGetDataWithEqualsFloat(self): + gridData = self._runConstraintTest('creatingEntity', '=', 1.0) + for record in gridData: + self.assertEqual(round(record.getAttribute('creatingEntity'), 1), 1.0) + + def testGetDataWithEqualsNone(self): + gridData = self._runConstraintTest('creatingEntity', '=', None) + for record in gridData: + self.assertIsNone(record.getAttribute('creatingEntity')) + + def testGetDataWithNotEquals(self): + gridData = self._runConstraintTest('creatingEntity', '!=', 'Composite') + for record in gridData: + self.assertNotEqual(record.getAttribute('creatingEntity'), 'Composite') + + def testGetDataWithNotEqualsNone(self): + gridData = self._runConstraintTest('creatingEntity', '!=', None) + for record in gridData: + self.assertIsNotNone(record.getAttribute('creatingEntity')) + + def testGetDataWithGreaterThan(self): + gridData = self._runConstraintTest('creatingEntity', '>', 'Composite') + for record in gridData: + self.assertGreater(record.getAttribute('creatingEntity'), 'Composite') + + def testGetDataWithLessThan(self): + gridData = self._runConstraintTest('creatingEntity', '<', 'Composite') + for record in gridData: + self.assertLess(record.getAttribute('creatingEntity'), 'Composite') + + def testGetDataWithGreaterThanEquals(self): + gridData = self._runConstraintTest('creatingEntity', '>=', 'Composite') + for record in gridData: + self.assertGreaterEqual(record.getAttribute('creatingEntity'), 'Composite') + + def testGetDataWithLessThanEquals(self): + gridData = self._runConstraintTest('creatingEntity', '<=', 'Composite') + for record in gridData: + self.assertLessEqual(record.getAttribute('creatingEntity'), 'Composite') + + def testGetDataWithInTuple(self): + collection = ('Composite', 'Miscellaneous') + gridData = self._runConstraintTest('creatingEntity', 'in', collection) + for record in gridData: + self.assertIn(record.getAttribute('creatingEntity'), collection) + + def testGetDataWithInList(self): + collection = ('Composite', 'Miscellaneous') + gridData = self._runConstraintTest('creatingEntity', 'in', collection) + for record in gridData: + self.assertIn(record.getAttribute('creatingEntity'), collection) + + def testGetDataWithInGenerator(self): + collection = ('Composite', 'Miscellaneous') + generator = (item for item in collection) + gridData = self._runConstraintTest('creatingEntity', 'in', generator) + for record in gridData: + self.assertIn(record.getAttribute('creatingEntity'), collection) + + def testGetDataWithNotInList(self): + collection = ('Composite', 'Miscellaneous') + gridData = self._runConstraintTest('creatingEntity', 'not in', collection) + for record in gridData: + self.assertNotIn(record.getAttribute('creatingEntity'), collection) + + def testGetDataWithInvalidConstraintTypeThrowsException(self): + with self.assertRaises(ValueError): + self._runConstraintTest('creatingEntity', 'junk', 'Composite') + + def testGetDataWithInvalidConstraintValueThrowsException(self): + with self.assertRaises(TypeError): + self._runConstraintTest('creatingEntity', '=', {}) + + def testGetDataWithEmptyInConstraintThrowsException(self): + with self.assertRaises(ValueError): + self._runConstraintTest('creatingEntity', 'in', []) + + def testGetDataWithNestedInConstraintThrowsException(self): + collection = ('Composite', 'Miscellaneous', ()) + with self.assertRaises(TypeError): + self._runConstraintTest('creatingEntity', 'in', collection) diff --git a/awips/test/dafTests/testSfcObs.py b/awips/test/dafTests/testSfcObs.py new file mode 100644 index 0000000..7abeda4 --- /dev/null +++ b/awips/test/dafTests/testSfcObs.py @@ -0,0 +1,166 @@ +# +# Test DAF support for sfcobs data +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# 01/19/16 4795 mapeters Initial Creation. +# 04/11/16 5548 tgurney Cleanup +# 04/18/16 5548 tgurney More cleanup +# 06/09/16 5587 bsteffen Add getIdentifierValues tests +# 06/13/16 5574 tgurney Add advanced query tests +# 06/30/16 5725 tgurney Add test for NOT IN +# 01/20/17 6095 tgurney Add null identifiers test +# +# + +from __future__ import print_function +from awips.dataaccess import DataAccessLayer as DAL +from dynamicserialize.dstypes.com.raytheon.uf.common.dataquery.requests import RequestConstraint + +from awips.test.dafTests import baseDafTestCase + + +class SfcObsTestCase(baseDafTestCase.DafTestCase): + """Test DAF support for sfcobs data""" + + datatype = "sfcobs" + + def testGetAvailableParameters(self): + req = DAL.newDataRequest(self.datatype) + self.runParametersTest(req) + + def testGetAvailableLocations(self): + req = DAL.newDataRequest(self.datatype) + self.runLocationsTest(req) + + def testGetAvailableTimes(self): + req = DAL.newDataRequest(self.datatype) + req.setLocationNames("14547") + self.runTimesTest(req) + + def testGetGeometryData(self): + req = DAL.newDataRequest(self.datatype) + req.setLocationNames("14547") + req.setParameters("temperature", "seaLevelPress", "dewpoint") + self.runGeometryDataTest(req) + + def testGetGeometryDataNullIdentifiers(self): + req = DAL.newDataRequest(self.datatype) + req.setLocationNames("14547") + req.setParameters("temperature", "seaLevelPress", "dewpoint") + req.identifiers = None + self.runGeometryDataTest(req) + + def testGetIdentifierValues(self): + req = DAL.newDataRequest(self.datatype) + optionalIds = set(DAL.getOptionalIdentifiers(req)) + self.runGetIdValuesTest(optionalIds) + + def testGetInvalidIdentifierValuesThrowsException(self): + self.runInvalidIdValuesTest() + + def testGetNonexistentIdentifierValuesThrowsException(self): + self.runNonexistentIdValuesTest() + + def _runConstraintTest(self, key, operator, value): + req = DAL.newDataRequest(self.datatype) + constraint = RequestConstraint.new(operator, value) + req.addIdentifier(key, constraint) + req.setParameters("temperature", "reportType") + return self.runGeometryDataTest(req) + + def testGetDataWithEqualsString(self): + geometryData = self._runConstraintTest('reportType', '=', '1004') + for record in geometryData: + self.assertEqual(record.getString('reportType'), '1004') + + def testGetDataWithEqualsInt(self): + geometryData = self._runConstraintTest('reportType', '=', 1004) + for record in geometryData: + self.assertEqual(record.getString('reportType'), '1004') + + def testGetDataWithEqualsLong(self): + geometryData = self._runConstraintTest('reportType', '=', 1004) + for record in geometryData: + self.assertEqual(record.getString('reportType'), '1004') + + # No float test because no float identifiers are available + + def testGetDataWithEqualsNone(self): + geometryData = self._runConstraintTest('reportType', '=', None) + for record in geometryData: + self.assertEqual(record.getType('reportType'), 'NULL') + + def testGetDataWithNotEquals(self): + geometryData = self._runConstraintTest('reportType', '!=', 1004) + for record in geometryData: + self.assertNotEqual(record.getString('reportType'), '1004') + + def testGetDataWithNotEqualsNone(self): + geometryData = self._runConstraintTest('reportType', '!=', None) + for record in geometryData: + self.assertNotEqual(record.getType('reportType'), 'NULL') + + def testGetDataWithGreaterThan(self): + geometryData = self._runConstraintTest('reportType', '>', 1004) + for record in geometryData: + self.assertGreater(record.getString('reportType'), '1004') + + def testGetDataWithLessThan(self): + geometryData = self._runConstraintTest('reportType', '<', 1004) + for record in geometryData: + self.assertLess(record.getString('reportType'), '1004') + + def testGetDataWithGreaterThanEquals(self): + geometryData = self._runConstraintTest('reportType', '>=', 1004) + for record in geometryData: + self.assertGreaterEqual(record.getString('reportType'), '1004') + + def testGetDataWithLessThanEquals(self): + geometryData = self._runConstraintTest('reportType', '<=', 1004) + for record in geometryData: + self.assertLessEqual(record.getString('reportType'), '1004') + + def testGetDataWithInTuple(self): + collection = ('1004', '1005') + geometryData = self._runConstraintTest('reportType', 'in', collection) + for record in geometryData: + self.assertIn(record.getString('reportType'), collection) + + def testGetDataWithInList(self): + collection = ['1004', '1005'] + geometryData = self._runConstraintTest('reportType', 'in', collection) + for record in geometryData: + self.assertIn(record.getString('reportType'), collection) + + def testGetDataWithInGenerator(self): + collection = ('1004', '1005') + generator = (item for item in collection) + geometryData = self._runConstraintTest('reportType', 'in', generator) + for record in geometryData: + self.assertIn(record.getString('reportType'), collection) + + def testGetDataWithNotInList(self): + collection = ['1004', '1005'] + geometryData = self._runConstraintTest('reportType', 'not in', collection) + for record in geometryData: + self.assertNotIn(record.getString('reportType'), collection) + + def testGetDataWithInvalidConstraintTypeThrowsException(self): + with self.assertRaises(ValueError): + self._runConstraintTest('reportType', 'junk', '1004') + + def testGetDataWithInvalidConstraintValueThrowsException(self): + with self.assertRaises(TypeError): + self._runConstraintTest('reportType', '=', {}) + + def testGetDataWithEmptyInConstraintThrowsException(self): + with self.assertRaises(ValueError): + self._runConstraintTest('reportType', 'in', []) + + def testGetDataWithNestedInConstraintThrowsException(self): + collection = ('1004', '1005', ()) + with self.assertRaises(TypeError): + self._runConstraintTest('reportType', 'in', collection) diff --git a/awips/test/dafTests/testTopo.py b/awips/test/dafTests/testTopo.py new file mode 100644 index 0000000..0ee4aa2 --- /dev/null +++ b/awips/test/dafTests/testTopo.py @@ -0,0 +1,74 @@ +# +# Test DAF support for topo data +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# 01/19/16 4795 mapeters Initial Creation. +# 04/11/16 5548 tgurney Cleanup +# 04/18/16 5548 tgurney More cleanup +# 05/26/16 5587 tgurney Add test for +# getIdentifierValues() +# 06/01/16 5587 tgurney Update testGetIdentifierValues +# 07/18/17 6253 randerso Removed referenced to GMTED +# + +from __future__ import print_function +from awips.dataaccess import DataAccessLayer as DAL +from awips.ThriftClient import ThriftRequestException +import shapely.geometry + +from awips.test.dafTests import baseDafTestCase + + +class TopoTestCase(baseDafTestCase.DafTestCase): + """Test DAF support for topo data""" + + datatype = "topo" + + def testGetGridData(self): + print("defaultTopo") + req = DAL.newDataRequest(self.datatype) + req.addIdentifier("group", "/") + req.addIdentifier("dataset", "full") + poly = shapely.geometry.LinearRing(((-70, 40), (-71, 40), (-71, 42), (-70, 42))) + req.setEnvelope(poly) + gridData = DAL.getGridData(req) + self.assertIsNotNone(gridData) + print("Number of grid records: " + str(len(gridData))) + print("Sample grid data shape:\n" + str(gridData[0].getRawData().shape) + "\n") + print("Sample grid data:\n" + str(gridData[0].getRawData()) + "\n") + + for topoFile in ["gtopo30"]: + print("\n" + topoFile) + req.addIdentifier("topoFile", topoFile) + gridData = DAL.getGridData(req) + self.assertIsNotNone(gridData) + print("Number of grid records: " + str(len(gridData))) + print("Sample grid data shape:\n" + str(gridData[0].getRawData().shape) + "\n") + print("Sample grid data:\n" + str(gridData[0].getRawData()) + "\n") + + def testRequestingTooMuchDataThrowsResponseTooLargeException(self): + req = DAL.newDataRequest(self.datatype) + req.addIdentifier("group", "/") + req.addIdentifier("dataset", "full") + points = ((-180, 90), (180, 90), (180, -90), (-180, -90)) + poly = shapely.geometry.LinearRing(points) + req.setEnvelope(poly) + + with self.assertRaises(ThriftRequestException) as cm: + DAL.getGridData(req) + self.assertIn('ResponseTooLargeException', str(cm.exception)) + + def testGetIdentifierValues(self): + req = DAL.newDataRequest(self.datatype) + optionalIds = set(DAL.getOptionalIdentifiers(req)) + requiredIds = set(DAL.getRequiredIdentifiers(req)) + self.runGetIdValuesTest(optionalIds | requiredIds) + + def testGetInvalidIdentifierValuesThrowsException(self): + self.runInvalidIdValuesTest() + + def testGetNonexistentIdentifierValuesThrowsException(self): + self.runNonexistentIdValuesTest() diff --git a/awips/test/dafTests/testWarning.py b/awips/test/dafTests/testWarning.py new file mode 100644 index 0000000..a125823 --- /dev/null +++ b/awips/test/dafTests/testWarning.py @@ -0,0 +1,208 @@ +# +# Test DAF support for warning data +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# 01/19/16 4795 mapeters Initial Creation. +# 04/11/16 5548 tgurney Cleanup +# 04/18/16 5548 tgurney More cleanup +# 04/26/16 5587 tgurney Add identifier values tests +# 06/08/16 5574 tgurney Add advanced query tests +# 06/10/16 5548 tgurney Clean up references to name +# of data type +# 06/13/16 5574 tgurney Fix checks for None +# 06/21/16 5548 tgurney Skip tests that cause errors +# 06/30/16 5725 tgurney Add test for NOT IN +# 12/12/16 5981 tgurney Improve test performance +# +# + +from __future__ import print_function +from awips.dataaccess import DataAccessLayer as DAL +from dynamicserialize.dstypes.com.raytheon.uf.common.dataquery.requests import RequestConstraint + +from awips.test.dafTests import baseDafTestCase +import unittest + + +class WarningTestCase(baseDafTestCase.DafTestCase): + """Test DAF support for warning data""" + + datatype = "warning" + + def _getLocationNames(self): + req = DAL.newDataRequest() + req.setDatatype(self.datatype) + return DAL.getAvailableLocationNames(req) + + def _getAllRecords(self): + req = DAL.newDataRequest() + req.setDatatype(self.datatype) + req.setParameters('id') + return DAL.getGeometryData(req) + + def testGetAvailableParameters(self): + req = DAL.newDataRequest(self.datatype) + self.runParametersTest(req) + + def testGetAvailableLocations(self): + req = DAL.newDataRequest(self.datatype) + self.runLocationsTest(req) + + def testGetAvailableTimes(self): + req = DAL.newDataRequest(self.datatype) + req.setParameters("etn", "wmoid") + self.runTimesTest(req) + + def testGetGeometryData(self): + req = DAL.newDataRequest(self.datatype) + req.setParameters("etn", "wmoid") + self.runGeometryDataTest(req) + + def testFilterOnLocationName(self): + allLocationNames = self._getLocationNames() + if len(allLocationNames) == 0: + errmsg = "No {0} data exists on {1}. Try again with {0} data." + raise unittest.SkipTest(errmsg.format(self.datatype, DAL.THRIFT_HOST)) + testCount = 3 # number of different location names to test + for locationName in allLocationNames[:testCount]: + req = DAL.newDataRequest() + req.setDatatype(self.datatype) + req.setParameters('id') + req.setLocationNames(locationName) + geomData = DAL.getGeometryData(req) + for geom in geomData: + self.assertEqual(geom.getLocationName(), locationName) + + def testFilterOnNonexistentLocationReturnsEmpty(self): + req = DAL.newDataRequest() + req.setDatatype(self.datatype) + req.setParameters('id') + req.setLocationNames('ZZZZ') + self.assertEqual(len(DAL.getGeometryData(req)), 0) + + def testFilterOnInvalidLocationThrowsIncompatibleRequestException(self): + req = DAL.newDataRequest() + req.setDatatype(self.datatype) + req.setParameters('id') + req.setLocationNames(') and 0=1') + with self.assertRaises(Exception) as cm: + DAL.getGeometryData(req) + self.assertIn('IncompatibleRequestException', str(cm.exception)) + + def testGetColumnIdentifierValues(self): + self.runGetIdValuesTest(['act']) + + @unittest.skip('avoid EDEX error') + def testGetInvalidIdentifierValuesThrowsException(self): + self.runInvalidIdValuesTest() + + @unittest.skip('avoid EDEX error') + def testGetNonexistentIdentifierValuesThrowsException(self): + self.runNonexistentIdValuesTest() + + def _runConstraintTest(self, key, operator, value): + req = DAL.newDataRequest(self.datatype) + constraint = RequestConstraint.new(operator, value) + req.addIdentifier(key, constraint) + req.setParameters("etn", "wmoid", "sig") + return self.runGeometryDataTest(req) + + def testGetDataWithEqualsString(self): + geometryData = self._runConstraintTest('sig', '=', 'Y') + for record in geometryData: + self.assertEqual(record.getString('sig'), 'Y') + + def testGetDataWithEqualsInt(self): + geometryData = self._runConstraintTest('etn', '=', 1000) + for record in geometryData: + self.assertEqual(record.getString('etn'), '1000') + + def testGetDataWithEqualsLong(self): + geometryData = self._runConstraintTest('etn', '=', 1000) + for record in geometryData: + self.assertEqual(record.getString('etn'), '1000') + + def testGetDataWithEqualsFloat(self): + geometryData = self._runConstraintTest('etn', '=', 1.0) + for record in geometryData: + self.assertEqual(round(float(record.getString('etn')), 1), 1.0) + + def testGetDataWithEqualsNone(self): + geometryData = self._runConstraintTest('sig', '=', None) + for record in geometryData: + self.assertEqual(record.getType('sig'), 'NULL') + + def testGetDataWithNotEquals(self): + geometryData = self._runConstraintTest('sig', '!=', 'Y') + for record in geometryData: + self.assertNotEqual(record.getString('sig'), 'Y') + + def testGetDataWithNotEqualsNone(self): + geometryData = self._runConstraintTest('sig', '!=', None) + for record in geometryData: + self.assertNotEqual(record.getType('sig'), 'NULL') + + def testGetDataWithGreaterThan(self): + geometryData = self._runConstraintTest('sig', '>', 'Y') + for record in geometryData: + self.assertGreater(record.getString('sig'), 'Y') + + def testGetDataWithLessThan(self): + geometryData = self._runConstraintTest('sig', '<', 'Y') + for record in geometryData: + self.assertLess(record.getString('sig'), 'Y') + + def testGetDataWithGreaterThanEquals(self): + geometryData = self._runConstraintTest('sig', '>=', 'Y') + for record in geometryData: + self.assertGreaterEqual(record.getString('sig'), 'Y') + + def testGetDataWithLessThanEquals(self): + geometryData = self._runConstraintTest('sig', '<=', 'Y') + for record in geometryData: + self.assertLessEqual(record.getString('sig'), 'Y') + + def testGetDataWithInTuple(self): + collection = ('Y', 'A') + geometryData = self._runConstraintTest('sig', 'in', collection) + for record in geometryData: + self.assertIn(record.getString('sig'), collection) + + def testGetDataWithInList(self): + collection = ['Y', 'A'] + geometryData = self._runConstraintTest('sig', 'in', collection) + for record in geometryData: + self.assertIn(record.getString('sig'), collection) + + def testGetDataWithInGenerator(self): + collection = ('Y', 'A') + generator = (item for item in collection) + geometryData = self._runConstraintTest('sig', 'in', generator) + for record in geometryData: + self.assertIn(record.getString('sig'), collection) + + def testGetDataWithNotInList(self): + collection = ['Y', 'W'] + geometryData = self._runConstraintTest('sig', 'not in', collection) + for record in geometryData: + self.assertNotIn(record.getString('sig'), collection) + + def testGetDataWithInvalidConstraintTypeThrowsException(self): + with self.assertRaises(ValueError): + self._runConstraintTest('sig', 'junk', 'Y') + + def testGetDataWithInvalidConstraintValueThrowsException(self): + with self.assertRaises(TypeError): + self._runConstraintTest('sig', '=', {}) + + def testGetDataWithEmptyInConstraintThrowsException(self): + with self.assertRaises(ValueError): + self._runConstraintTest('sig', 'in', []) + + def testGetDataWithNestedInConstraintThrowsException(self): + collection = ('Y', 'A', ()) + with self.assertRaises(TypeError): + self._runConstraintTest('sig', 'in', collection) diff --git a/dynamicserialize/DynamicSerializationManager.py b/dynamicserialize/DynamicSerializationManager.py new file mode 100644 index 0000000..171628b --- /dev/null +++ b/dynamicserialize/DynamicSerializationManager.py @@ -0,0 +1,45 @@ +# +# A port of the Java DynamicSerializeManager. Should be used to read/write +# DynamicSerialize binary data. +# +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# 06/09/10 njensen Initial Creation. +# + +from thrift.transport import TTransport +from . import SelfDescribingBinaryProtocol, ThriftSerializationContext + + +class DynamicSerializationManager: + + def __init__(self): + self.transport = None + + def _deserialize(self, ctx): + return ctx.deserializeMessage() + + def deserializeBytes(self, sbytes): + ctx = self._buildSerializationContext(sbytes) + ctx.readMessageStart() + obj = self._deserialize(ctx) + ctx.readMessageEnd() + return obj + + def _buildSerializationContext(self, sbytes=None): + self.transport = TTransport.TMemoryBuffer(sbytes) + protocol = SelfDescribingBinaryProtocol.SelfDescribingBinaryProtocol(self.transport) + return ThriftSerializationContext.ThriftSerializationContext(self, protocol) + + def serializeObject(self, obj): + ctx = self._buildSerializationContext() + ctx.writeMessageStart("dynamicSerialize") + self._serialize(ctx, obj) + ctx.writeMessageEnd() + return self.transport.getvalue() + + def _serialize(self, ctx, obj): + ctx.serializeMessage(obj) diff --git a/dynamicserialize/SelfDescribingBinaryProtocol.py b/dynamicserialize/SelfDescribingBinaryProtocol.py new file mode 100644 index 0000000..85043ff --- /dev/null +++ b/dynamicserialize/SelfDescribingBinaryProtocol.py @@ -0,0 +1,114 @@ +# +# Partially compatible AWIPS-II Thrift Binary Protocol +# +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# 11/11/09 chammack Initial Creation. +# 06/09/10 njensen Added float, list methods +# Apr 24, 2015 4425 nabowle Add F64List support. +# +# + +import struct +import numpy +from thrift.protocol.TProtocol import * +from thrift.protocol.TBinaryProtocol import * + +FLOAT = 64 + +intList = numpy.dtype(numpy.int32).newbyteorder('>') +floatList = numpy.dtype(numpy.float32).newbyteorder('>') +longList = numpy.dtype(numpy.int64).newbyteorder('>') +shortList = numpy.dtype(numpy.int16).newbyteorder('>') +byteList = numpy.dtype(numpy.int8).newbyteorder('>') +doubleList = numpy.dtype(numpy.float64).newbyteorder('>') + + +class SelfDescribingBinaryProtocol(TBinaryProtocol): + + def readFieldBegin(self): + ftype = self.readByte() + if ftype == TType.STOP: + return None, ftype, 0 + name = self.readString() + fid = self.readI16() + return name, ftype, fid + + def readStructBegin(self): + return self.readString() + + def writeStructBegin(self, name): + self.writeString(name) + + def writeFieldBegin(self, name, ftype, fid): + self.writeByte(ftype) + self.writeString(name) + self.writeI16(fid) + + def readFloat(self): + d = self.readI32() + dAsBytes = struct.pack('i', d) + f = struct.unpack('f', dAsBytes) + return f[0] + + def writeFloat(self, f): + dAsBytes = struct.pack('f', f) + i = struct.unpack('i', dAsBytes) + self.writeI32(i[0]) + + def readI32List(self, sz): + buff = self.trans.readAll(4*sz) + val = numpy.frombuffer(buff, dtype=intList, count=sz) + return val + + def readF32List(self, sz): + buff = self.trans.readAll(4*sz) + val = numpy.frombuffer(buff, dtype=floatList, count=sz) + return val + + def readF64List(self, sz): + buff = self.trans.readAll(8*sz) + val = numpy.frombuffer(buff, dtype=doubleList, count=sz) + return val + + def readI64List(self, sz): + buff = self.trans.readAll(8*sz) + val = numpy.frombuffer(buff, dtype=longList, count=sz) + return val + + def readI16List(self, sz): + buff = self.trans.readAll(2*sz) + val = numpy.frombuffer(buff, dtype=shortList, count=sz) + return val + + def readI8List(self, sz): + buff = self.trans.readAll(sz) + val = numpy.frombuffer(buff, dtype=byteList, count=sz) + return val + + def writeI32List(self, buff): + b = numpy.asarray(buff, intList) + self.trans.write(numpy.getbuffer(b)) + + def writeF32List(self, buff): + b = numpy.asarray(buff, floatList) + self.trans.write(numpy.getbuffer(b)) + + def writeF64List(self, buff): + b = numpy.asarray(buff, doubleList) + self.trans.write(numpy.getbuffer(b)) + + def writeI64List(self, buff): + b = numpy.asarray(buff, longList) + self.trans.write(numpy.getbuffer(b)) + + def writeI16List(self, buff): + b = numpy.asarray(buff, shortList) + self.trans.write(numpy.getbuffer(b)) + + def writeI8List(self, buff): + b = numpy.asarray(buff, byteList) + self.trans.write(numpy.getbuffer(b)) diff --git a/dynamicserialize/ThriftSerializationContext.py b/dynamicserialize/ThriftSerializationContext.py new file mode 100644 index 0000000..7d8780b --- /dev/null +++ b/dynamicserialize/ThriftSerializationContext.py @@ -0,0 +1,433 @@ +# +# A port of the Java ThriftSerializationContext, used for reading/writing +# DynamicSerialize objects to/from thrift. +# +# For serialization, it has no knowledge of the expected types in other +# languages, it is instead all based on inspecting the types of the objects +# passed to it. Therefore, ensure the types of python objects and primitives +# match what they should be in the destination language. +# +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# 06/09/10 njensen Initial Creation. +# 06/12/13 #2099 dgilling Implement readObject() and +# writeObject(). +# Apr 24, 2015 4425 nabowle Add Double support +# Oct 17, 2016 5919 njensen Optimized for speed +# Sep 06, 2018 mjames@ucar Python3 compliance +# +# + +import inspect +import sys +import types +import six +import numpy +from thrift.Thrift import TType +import dynamicserialize +from dynamicserialize import dstypes, adapters +from dynamicserialize import SelfDescribingBinaryProtocol + +DS_LEN = len('dynamicserialize.dstypes.') + +dsObjTypes = {} + + +def buildObjMap(module): + if '__all__' in module.__dict__: + for i in module.__all__: + name = module.__name__ + '.' + i + __import__(name) + buildObjMap(sys.modules[name]) + else: + clzName = module.__name__[module.__name__.rfind('.') + 1:] + clz = module.__dict__[clzName] + tname = module.__name__ + tname = tname[DS_LEN:] + dsObjTypes[tname] = clz + + +buildObjMap(dstypes) + +if six.PY2: + pythonToThriftMap = { + types.StringType: TType.STRING, + types.IntType: TType.I32, + types.LongType: TType.I64, + types.ListType: TType.LIST, + unicode: TType.STRING, + types.DictionaryType: TType.MAP, + type(set([])): TType.SET, + types.FloatType: SelfDescribingBinaryProtocol.FLOAT, + # types.FloatType: TType.DOUBLE, + types.BooleanType: TType.BOOL, + types.InstanceType: TType.STRUCT, + types.NoneType: TType.VOID, + numpy.float32: SelfDescribingBinaryProtocol.FLOAT, + numpy.int32: TType.I32, + numpy.ndarray: TType.LIST, + numpy.object_: TType.STRING, # making an assumption here + numpy.string_: TType.STRING, + numpy.float64: TType.DOUBLE, + numpy.int16: TType.I16, + numpy.int8: TType.BYTE, + numpy.int64: TType.I64 + } +else: + pythonToThriftMap = { + bytes: TType.STRING, + int: TType.I32, + int: TType.I64, + list: TType.LIST, + dict: TType.MAP, + type(set([])): TType.SET, + float: SelfDescribingBinaryProtocol.FLOAT, + # types.FloatType: TType.DOUBLE, + bool: TType.BOOL, + object: TType.STRUCT, + str: TType.STRING, + type(None): TType.VOID, + numpy.float32: SelfDescribingBinaryProtocol.FLOAT, + numpy.int32: TType.I32, + numpy.ndarray: TType.LIST, + numpy.object_: TType.STRING, # making an assumption here + numpy.string_: TType.STRING, + numpy.float64: TType.DOUBLE, + numpy.int16: TType.I16, + numpy.int8: TType.BYTE, + numpy.int64: TType.I64 + } + +primitiveSupport = (TType.BYTE, TType.I16, TType.I32, TType.I64, + SelfDescribingBinaryProtocol.FLOAT, TType.DOUBLE) + + +class ThriftSerializationContext(object): + + def __init__(self, serializationManager, selfDescribingBinaryProtocol): + self.serializationManager = serializationManager + self.protocol = selfDescribingBinaryProtocol + self.typeDeserializationMethod = { + TType.STRING: self.protocol.readString, + TType.I16: self.protocol.readI16, + TType.I32: self.protocol.readI32, + TType.LIST: self._deserializeArray, + TType.MAP: self._deserializeMap, + TType.SET: self._deserializeSet, + SelfDescribingBinaryProtocol.FLOAT: self.protocol.readFloat, + TType.BYTE: self.protocol.readByte, + TType.I64: self.protocol.readI64, + TType.DOUBLE: self.protocol.readDouble, + TType.BOOL: self.protocol.readBool, + TType.STRUCT: self.deserializeMessage, + TType.VOID: lambda: None + } + self.typeSerializationMethod = { + TType.STRING: self.protocol.writeString, + TType.I16: self.protocol.writeI16, + TType.I32: self.protocol.writeI32, + TType.LIST: self._serializeArray, + TType.MAP: self._serializeMap, + TType.SET: self._serializeSet, + SelfDescribingBinaryProtocol.FLOAT: self.protocol.writeFloat, + TType.BYTE: self.protocol.writeByte, + TType.I64: self.protocol.writeI64, + TType.DOUBLE: self.protocol.writeDouble, + TType.BOOL: self.protocol.writeBool, + TType.STRUCT: self.serializeMessage, + TType.VOID: lambda x: None + } + self.listDeserializationMethod = { + TType.BYTE: self.protocol.readI8List, + TType.I16: self.protocol.readI16List, + TType.I32: self.protocol.readI32List, + TType.I64: self.protocol.readI64List, + SelfDescribingBinaryProtocol.FLOAT: self.protocol.readF32List, + TType.DOUBLE: self.protocol.readF64List + } + self.listSerializationMethod = { + TType.BYTE: self.protocol.writeI8List, + TType.I16: self.protocol.writeI16List, + TType.I32: self.protocol.writeI32List, + TType.I64: self.protocol.writeI64List, + SelfDescribingBinaryProtocol.FLOAT: self.protocol.writeF32List, + TType.DOUBLE: self.protocol.writeF64List + } + + def readMessageStart(self): + msg = self.protocol.readMessageBegin() + return msg[0] + + def readMessageEnd(self): + self.protocol.readMessageEnd() + + def deserializeMessage(self): + name = self.protocol.readStructBegin() + name = name.decode('cp437') + name = name.replace('_', '.') + if name.isdigit(): + obj = self._deserializeType(int(name)) + return obj + if name in adapters.classAdapterRegistry: + return adapters.classAdapterRegistry[name].deserialize(self) + elif '$' in name: + # it's an inner class, we're going to hope it's an enum, treat it + # special + fieldName, fieldType, fieldId = self.protocol.readFieldBegin() + if fieldName.decode('utf8') != '__enumValue__': + raise dynamicserialize.SerializationException( + "Expected to find enum payload. Found: " + fieldName) + obj = self.protocol.readString() + self.protocol.readFieldEnd() + return obj + else: + clz = dsObjTypes[name] + obj = clz() + + while self._deserializeField(obj): + pass + + self.protocol.readStructEnd() + return obj + + def _deserializeType(self, b): + try: + return self.typeDeserializationMethod[b]() + except KeyError: + raise dynamicserialize.SerializationException( + "Unsupported type value " + str(b)) + + def _deserializeField(self, obj): + fieldName, fieldType, fieldId = self.protocol.readFieldBegin() + if fieldType == TType.STOP: + return False + elif fieldType != TType.VOID: + result = self._deserializeType(fieldType) + fn_str = bytes.decode(fieldName) + lookingFor = "set" + fn_str[0].upper() + fn_str[1:] + + try: + setMethod = getattr(obj, lookingFor) + setMethod(result) + except ValueError: + raise dynamicserialize.SerializationException( + "Couldn't find setter method " + lookingFor) + + self.protocol.readFieldEnd() + return True + + def _deserializeArray(self): + listType, size = self.protocol.readListBegin() + result = [] + if size: + if listType not in primitiveSupport: + m = self.typeDeserializationMethod[listType] + result = [m() for __ in range(size)] + else: + result = self.listDeserializationMethod[listType](size) + self.protocol.readListEnd() + return result + + def _deserializeMap(self): + keyType, valueType, size = self.protocol.readMapBegin() + result = {} + for __ in range(size): + # can't go off the type, due to java generics limitations dynamic serialize is + # serializing keys and values as void + key = self.typeDeserializationMethod[TType.STRUCT]() + value = self.typeDeserializationMethod[TType.STRUCT]() + result[key] = value + self.protocol.readMapEnd() + return result + + def _deserializeSet(self): + setType, setSize = self.protocol.readSetBegin() + result = set([]) + for __ in range(setSize): + result.add(self.typeDeserializationMethod[TType.STRUCT]()) + self.protocol.readSetEnd() + return result + + def _lookupType(self, obj): + pyt = type(obj) + if pyt in pythonToThriftMap: + return pythonToThriftMap[pyt] + elif pyt.__module__[:DS_LEN - 1] == ('dynamicserialize.dstypes'): + if six.PY2: + return pythonToThriftMap[types.InstanceType] + return pythonToThriftMap[object] + raise dynamicserialize.SerializationException( + "Don't know how to serialize object of type: " + str(pyt)) + + def serializeMessage(self, obj): + tt = self._lookupType(obj) + + if tt == TType.STRUCT: + fqn = obj.__module__[DS_LEN:] + if fqn in adapters.classAdapterRegistry: + # get proper class name when writing class name to serialization stream + # in case we have a special inner-class case + m = sys.modules[adapters.classAdapterRegistry[fqn].__name__] + if isinstance(m.ClassAdapter, list): + fqn = m.ClassAdapter[0] + self.protocol.writeStructBegin(fqn) + adapters.classAdapterRegistry[fqn].serialize(self, obj) + return + else: + self.protocol.writeStructBegin(fqn) + methods = inspect.getmembers(obj, inspect.ismethod) + fid = 1 + for m in methods: + methodName = m[0] + if methodName.startswith('get'): + fieldname = methodName[3].lower() + methodName[4:] + val = m[1]() + ft = self._lookupType(val) + if ft == TType.STRUCT: + self._serializeField(fieldname, ft, fid, val) + else: + self._serializeField(fieldname, ft, fid, val) + fid += 1 + self.protocol.writeFieldStop() + + self.protocol.writeStructEnd() + else: + # basic types + self.protocol.writeStructBegin(str(tt)) + self._serializeType(obj, tt) + self.protocol.writeStructEnd() + + def _serializeField(self, fieldName, fieldType, fieldId, fieldValue): + self.protocol.writeFieldBegin(fieldName, fieldType, fieldId) + self._serializeType(fieldValue, fieldType) + self.protocol.writeFieldEnd() + + def _serializeType(self, fieldValue, fieldType): + if fieldType in self.typeSerializationMethod: + return self.typeSerializationMethod[fieldType](fieldValue) + else: + raise dynamicserialize.SerializationException( + "Unsupported type value " + str(fieldType)) + + def _serializeArray(self, obj): + size = len(obj) + if size: + if isinstance(obj, numpy.ndarray): + t = pythonToThriftMap[obj.dtype.type] + size = obj.size + else: + t = self._lookupType(obj[0]) + else: + t = TType.STRUCT + self.protocol.writeListBegin(t, size) + if t == TType.STRING: + if isinstance(obj, numpy.ndarray): + if len(obj.shape) == 1: + for x in obj: + s = str(x).strip() + self.typeSerializationMethod[t](s) + else: + for x in obj: + for y in x: + s = str(y).strip() + self.typeSerializationMethod[t](s) + else: + for x in obj: + s = str(x) + self.typeSerializationMethod[t](s) + elif t not in primitiveSupport: + for x in obj: + self.typeSerializationMethod[t](x) + else: + self.listSerializationMethod[t](obj) + self.protocol.writeListEnd() + + def _serializeMap(self, obj): + size = len(obj) + self.protocol.writeMapBegin(TType.VOID, TType.VOID, size) + for k in list(obj.keys()): + self.typeSerializationMethod[TType.STRUCT](k) + self.typeSerializationMethod[TType.STRUCT](obj[k]) + self.protocol.writeMapEnd() + + def _serializeSet(self, obj): + size = len(obj) + self.protocol.writeSetBegin(TType.VOID, size) + for x in obj: + self.typeSerializationMethod[TType.STRUCT](x) + self.protocol.writeSetEnd() + + def writeMessageStart(self, name): + self.protocol.writeMessageBegin(name, TType.VOID, 0) + + def writeMessageEnd(self): + self.protocol.writeMessageEnd() + + def readBool(self): + return self.protocol.readBool() + + def writeBool(self, b): + self.protocol.writeBool(b) + + def readByte(self): + return self.protocol.readByte() + + def writeByte(self, b): + self.protocol.writeByte(b) + + def readDouble(self): + return self.protocol.readDouble() + + def writeDouble(self, d): + self.protocol.writeDouble(d) + + def readFloat(self): + return self.protocol.readFloat() + + def writeFloat(self, f): + self.protocol.writeFloat(f) + + def readI16(self): + return self.protocol.readI16() + + def writeI16(self, i): + self.protocol.writeI16(i) + + def readI32(self): + return self.protocol.readI32() + + def writeI32(self, i): + self.protocol.writeI32(i) + + def readI64(self): + return self.protocol.readI64() + + def writeI64(self, i): + self.protocol.writeI64(i) + + def readString(self): + return self.protocol.readString() + + def writeString(self, s): + self.protocol.writeString(s) + + def readBinary(self): + numBytes = self.protocol.readI32() + return self.protocol.readI8List(numBytes) + + def readFloatArray(self): + size = self.protocol.readI32() + return self.protocol.readF32List(size) + + def writeFloatArray(self, floats): + self.protocol.writeI32(len(floats)) + self.protocol.writeF32List(floats) + + def readObject(self): + return self.deserializeMessage() + + def writeObject(self, obj): + self.serializeMessage(obj) diff --git a/dynamicserialize/__init__.py b/dynamicserialize/__init__.py new file mode 100644 index 0000000..a825711 --- /dev/null +++ b/dynamicserialize/__init__.py @@ -0,0 +1,35 @@ +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# 08/20/10 njensen Initial Creation. +# +# + +__all__ = ['SerializationException'] + +from . import dstypes, adapters +from . import DynamicSerializationManager + + +class SerializationException(Exception): + + def __init__(self, message=None): + self.message = message + + def __str__(self): + if self.message: + return self.message + else: + return "" + + +def serialize(obj): + dsm = DynamicSerializationManager.DynamicSerializationManager() + return dsm.serializeObject(obj) + + +def deserialize(objbytes): + dsm = DynamicSerializationManager.DynamicSerializationManager() + return dsm.deserializeBytes(objbytes) diff --git a/dynamicserialize/adapters/ByteBufferAdapter.py b/dynamicserialize/adapters/ByteBufferAdapter.py new file mode 100644 index 0000000..80063ef --- /dev/null +++ b/dynamicserialize/adapters/ByteBufferAdapter.py @@ -0,0 +1,21 @@ +# +# Adapter for java.nio.ByteBuffer +# +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# 08/03/11 dgilling Initial Creation. +# + +ClassAdapter = ['java.nio.ByteBuffer', 'java.nio.HeapByteBuffer'] + + +def serialize(context, bufferset): + raise NotImplementedError("Serialization of ByteBuffers is not supported.") + + +def deserialize(context): + byteBuf = context.readBinary() + return byteBuf diff --git a/dynamicserialize/adapters/CalendarAdapter.py b/dynamicserialize/adapters/CalendarAdapter.py new file mode 100644 index 0000000..77d37b0 --- /dev/null +++ b/dynamicserialize/adapters/CalendarAdapter.py @@ -0,0 +1,25 @@ +# +# Adapter for java.util.Calendar +# +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# 09/29/10 wldougher Initial Creation. +# + +from dynamicserialize.dstypes.java.util import Calendar + +ClassAdapter = 'java.util.Calendar' + + +def serialize(context, calendar): + calTiM = calendar.getTimeInMillis() + context.writeI64(calTiM) + + +def deserialize(context): + result = Calendar() + result.setTimeInMillis(context.readI64()) + return result diff --git a/dynamicserialize/adapters/CommutativeTimestampAdapter.py b/dynamicserialize/adapters/CommutativeTimestampAdapter.py new file mode 100644 index 0000000..7afba78 --- /dev/null +++ b/dynamicserialize/adapters/CommutativeTimestampAdapter.py @@ -0,0 +1,25 @@ +# +# Adapter for CommutativeTimestamp +# +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# 9/21/2015 4486 rjpeter Initial creation. +# Jun 23, 2016 5696 rjpeter Handle CommutativeTimestamp. +# + +from dynamicserialize.dstypes.com.raytheon.uf.common.time import CommutativeTimestamp + +ClassAdapter = 'com.raytheon.uf.common.time.CommutativeTimestamp' + + +def serialize(context, date): + context.writeI64(date.getTime()) + + +def deserialize(context): + result = CommutativeTimestamp() + result.setTime(context.readI64()) + return result diff --git a/dynamicserialize/adapters/CoordAdapter.py b/dynamicserialize/adapters/CoordAdapter.py new file mode 100644 index 0000000..1da1006 --- /dev/null +++ b/dynamicserialize/adapters/CoordAdapter.py @@ -0,0 +1,28 @@ +# +# Adapter for com.vividsolutions.jts.geom.Coordinate +# +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# 01/20/11 dgilling Initial Creation. +# + +from dynamicserialize.dstypes.com.vividsolutions.jts.geom import Coordinate + +ClassAdapter = 'com.vividsolutions.jts.geom.Coordinate' + + +def serialize(context, coordinate): + context.writeDouble(coordinate.getX()) + context.writeDouble(coordinate.getY()) + + +def deserialize(context): + x = context.readDouble() + y = context.readDouble() + coord = Coordinate() + coord.setX(x) + coord.setY(y) + return coord diff --git a/dynamicserialize/adapters/DatabaseIDAdapter.py b/dynamicserialize/adapters/DatabaseIDAdapter.py new file mode 100644 index 0000000..87045aa --- /dev/null +++ b/dynamicserialize/adapters/DatabaseIDAdapter.py @@ -0,0 +1,23 @@ +# +# Adapter for com.raytheon.uf.common.dataplugin.gfe.db.objects.DatabaseID +# +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# 03/29/11 dgilling Initial Creation. +# + +from dynamicserialize.dstypes.com.raytheon.uf.common.dataplugin.gfe.db.objects import DatabaseID + +ClassAdapter = 'com.raytheon.uf.common.dataplugin.gfe.db.objects.DatabaseID' + + +def serialize(context, dbId): + context.writeString(str(dbId)) + + +def deserialize(context): + result = DatabaseID(context.readString()) + return result diff --git a/dynamicserialize/adapters/DateAdapter.py b/dynamicserialize/adapters/DateAdapter.py new file mode 100644 index 0000000..769c034 --- /dev/null +++ b/dynamicserialize/adapters/DateAdapter.py @@ -0,0 +1,24 @@ +# +# Adapter for java.util.Date +# +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# 12/06/10 dgilling Initial Creation. +# + +from dynamicserialize.dstypes.java.util import Date + +ClassAdapter = 'java.util.Date' + + +def serialize(context, date): + context.writeI64(date.getTime()) + + +def deserialize(context): + result = Date() + result.setTime(context.readI64()) + return result diff --git a/dynamicserialize/adapters/EnumSetAdapter.py b/dynamicserialize/adapters/EnumSetAdapter.py new file mode 100644 index 0000000..7df3192 --- /dev/null +++ b/dynamicserialize/adapters/EnumSetAdapter.py @@ -0,0 +1,32 @@ +# +# Adapter for java.util.EnumSet +# +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# 07/28/11 dgilling Initial Creation. +# 12/02/13 2537 bsteffen Serialize empty enum sets. +# + +from dynamicserialize.dstypes.java.util import EnumSet + +ClassAdapter = ['java.util.EnumSet', 'java.util.RegularEnumSet'] + + +def serialize(context, bufferset): + setSize = len(bufferset) + context.writeI32(setSize) + context.writeString(bufferset.getEnumClass()) + for val in bufferset: + context.writeString(val) + + +def deserialize(context): + setSize = context.readI32() + enumClassName = context.readString() + valList = [] + for __ in range(setSize): + valList.append(context.readString()) + return EnumSet(enumClassName, valList) diff --git a/dynamicserialize/adapters/FloatBufferAdapter.py b/dynamicserialize/adapters/FloatBufferAdapter.py new file mode 100644 index 0000000..ef23211 --- /dev/null +++ b/dynamicserialize/adapters/FloatBufferAdapter.py @@ -0,0 +1,21 @@ +# +# Adapter for java.nio.FloatBuffer +# +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# 08/01/11 dgilling Initial Creation. +# + +ClassAdapter = ['java.nio.FloatBuffer', 'java.nio.HeapFloatBuffer'] + + +def serialize(context, bufferset): + raise NotImplementedError("Serialization of FloatBuffers is not supported.") + + +def deserialize(context): + floatBuf = context.readFloatArray() + return floatBuf diff --git a/dynamicserialize/adapters/FormattedDateAdapter.py b/dynamicserialize/adapters/FormattedDateAdapter.py new file mode 100644 index 0000000..4637967 --- /dev/null +++ b/dynamicserialize/adapters/FormattedDateAdapter.py @@ -0,0 +1,24 @@ +# +# Adapter for FormattedDate +# +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# 9/21/2015 4486 rjpeter Initial creation. +# + +from dynamicserialize.dstypes.com.raytheon.uf.common.time import FormattedDate + +ClassAdapter = 'com.raytheon.uf.common.time.FormattedDate' + + +def serialize(context, date): + context.writeI64(date.getTime()) + + +def deserialize(context): + result = FormattedDate() + result.setTime(context.readI64()) + return result diff --git a/dynamicserialize/adapters/GeomDataRespAdapter.py b/dynamicserialize/adapters/GeomDataRespAdapter.py new file mode 100644 index 0000000..d1512f5 --- /dev/null +++ b/dynamicserialize/adapters/GeomDataRespAdapter.py @@ -0,0 +1,94 @@ +# +# Efficient adapter for GetGeometryDataResponse +# +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# Oct 17, 2016 5919 njensen Initial creation +# +# +# + +from dynamicserialize.dstypes.com.raytheon.uf.common.dataaccess.response import GeometryResponseData +from dynamicserialize.dstypes.com.raytheon.uf.common.dataaccess.response import GetGeometryDataResponse + +ClassAdapter = 'com.raytheon.uf.common.dataaccess.response.GetGeometryDataResponse' + + +def serialize(context, resp): + wkbs = resp.getGeometryWKBs() + # write list size + context.writeI32(len(wkbs)) + # write byte arrays + for b in wkbs: + context.writeBinary(b) + + geoData = resp.getGeoData() + # write list size + context.writeI32(len(geoData)) + # write objects + for geo in geoData: + context.writeI32(geo.getGeometryWKBindex()) + context.writeObject(geo.getTime()) + context.writeObject(geo.getLevel()) + context.writeObject(geo.getLocationName()) + context.writeObject(geo.getAttributes()) + + # write data map + params = geo.getDataMap() + context.writeI32(len(params)) + for p in params: + context.writeString(p) + value = params[p] + # actual value + context.writeObject(value[0]) + # value type as string + context.writeString(str(value[1])) + # unit + context.writeObject(value[2]) + + +def deserialize(context): + size = context.readI32() + wkbs = [] + for __ in range(size): + wkb = context.readBinary() + wkbs.append(wkb) + + geoData = [] + size = context.readI32() + for _ in range(size): + data = GeometryResponseData() + # wkb index + wkbIndex = context.readI32() + data.setGeometryWKBindex(wkbIndex) + + time = context.readObject() + data.setTime(time) + level = context.readObject() + data.setLevel(level) + locName = context.readObject() + data.setLocationName(locName) + attrs = context.readObject() + data.setAttributes(attrs) + + # parameters + paramSize = context.readI32() + paramMap = {} + for __ in range(paramSize): + paramName = context.readString() + value = context.readObject() + tName = context.readString() + unit = context.readObject() + paramMap[paramName] = [value, tName, unit] + data.setDataMap(paramMap) + geoData.append(data) + + # make the response object + resp = GetGeometryDataResponse() + resp.setGeometryWKBs(wkbs) + resp.setGeoData(geoData) + + return resp diff --git a/dynamicserialize/adapters/GeometryTypeAdapter.py b/dynamicserialize/adapters/GeometryTypeAdapter.py new file mode 100644 index 0000000..3453bac --- /dev/null +++ b/dynamicserialize/adapters/GeometryTypeAdapter.py @@ -0,0 +1,36 @@ +# +# Adapter for com.vividsolutions.jts.geom.Polygon +# +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# 01/20/11 dgilling Initial Creation. +# + +import dynamicserialize + +# TODO: Implement serialization/make deserialization useful. +# Deserialization was simply implemented to allow GridLocation objects to be +# passed through thrift, but the resulting Geometry object will not be transformed into +# useful data; the base byte array is passed to a worthless Geometry class. + +from dynamicserialize.dstypes.com.vividsolutions.jts.geom import Geometry + +# NOTE: At the moment, EDEX serializes Polygon, MultiPolygons, Points, and +# Geometrys with the tag of the base class Geometry. Java's serialization +# adapter is smarter and can determine the exact object by reading the binary +# data. This adapter doesn't need this _yet_, so it has not been implemented. +ClassAdapter = 'com.vividsolutions.jts.geom.Geometry' + + +def serialize(context, coordinate): + raise dynamicserialize.SerializationException('Not implemented yet') + + +def deserialize(context): + data = context.readBinary() + geom = Geometry() + geom.setBinaryData(data) + return geom diff --git a/dynamicserialize/adapters/GregorianCalendarAdapter.py b/dynamicserialize/adapters/GregorianCalendarAdapter.py new file mode 100644 index 0000000..c94e874 --- /dev/null +++ b/dynamicserialize/adapters/GregorianCalendarAdapter.py @@ -0,0 +1,25 @@ +# +# Adapter for java.util.Calendar +# +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# 09/29/10 wldougher Initial Creation. +# + +from dynamicserialize.dstypes.java.util import GregorianCalendar + +ClassAdapter = 'java.util.GregorianCalendar' + + +def serialize(context, calendar): + calTiM = calendar.getTimeInMillis() + context.writeI64(calTiM) + + +def deserialize(context): + result = GregorianCalendar() + result.setTimeInMillis(context.readI64()) + return result diff --git a/dynamicserialize/adapters/GridDataHistoryAdapter.py b/dynamicserialize/adapters/GridDataHistoryAdapter.py new file mode 100644 index 0000000..50c231c --- /dev/null +++ b/dynamicserialize/adapters/GridDataHistoryAdapter.py @@ -0,0 +1,26 @@ +# +# Adapter for com.raytheon.uf.common.dataplugin.gfe.GridDataHistory +# +# TODO: REWRITE THIS ADAPTER when serialization/deserialization of this +# class has been finalized. +# +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# 03/29/11 dgilling Initial Creation. +# + +from dynamicserialize.dstypes.com.raytheon.uf.common.dataplugin.gfe import GridDataHistory + +ClassAdapter = 'com.raytheon.uf.common.dataplugin.gfe.GridDataHistory' + + +def serialize(context, history): + context.writeString(history.getCodedString()) + + +def deserialize(context): + result = GridDataHistory(context.readString()) + return result diff --git a/dynamicserialize/adapters/JTSEnvelopeAdapter.py b/dynamicserialize/adapters/JTSEnvelopeAdapter.py new file mode 100644 index 0000000..fcdd683 --- /dev/null +++ b/dynamicserialize/adapters/JTSEnvelopeAdapter.py @@ -0,0 +1,30 @@ +# +# Adapter for com.vividsolutions.jts.geom.Envelope +# +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# 05/29/13 2023 dgilling Initial Creation. +# + +from dynamicserialize.dstypes.com.vividsolutions.jts.geom import Envelope + +ClassAdapter = 'com.vividsolutions.jts.geom.Envelope' + + +def serialize(context, envelope): + context.writeDouble(envelope.getMinX()) + context.writeDouble(envelope.getMaxX()) + context.writeDouble(envelope.getMinY()) + context.writeDouble(envelope.getMaxY()) + + +def deserialize(context): + env = Envelope() + env.setMinX(context.readDouble()) + env.setMaxX(context.readDouble()) + env.setMinY(context.readDouble()) + env.setMaxY(context.readDouble()) + return env diff --git a/dynamicserialize/adapters/LocalizationLevelSerializationAdapter.py b/dynamicserialize/adapters/LocalizationLevelSerializationAdapter.py new file mode 100644 index 0000000..7b3a5e4 --- /dev/null +++ b/dynamicserialize/adapters/LocalizationLevelSerializationAdapter.py @@ -0,0 +1,31 @@ +# +# Adapter for com.raytheon.uf.common.localization.LocalizationContext$LocalizationLevel +# +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# 01/11/11 dgilling Initial Creation. +# + +from dynamicserialize.dstypes.com.raytheon.uf.common.localization import LocalizationLevel + +ClassAdapter = [ + 'com.raytheon.uf.common.localization.LocalizationContext$LocalizationLevel', + 'com.raytheon.uf.common.localization.LocalizationLevel' + ] + + +def serialize(context, level): + context.writeString(level.getText()) + context.writeI32(level.getOrder()) + context.writeBool(level.isSystemLevel()) + + +def deserialize(context): + text = context.readString() + order = context.readI32() + systemLevel = context.readBool() + level = LocalizationLevel(text, order, systemLevel=systemLevel) + return level diff --git a/dynamicserialize/adapters/LocalizationTypeSerializationAdapter.py b/dynamicserialize/adapters/LocalizationTypeSerializationAdapter.py new file mode 100644 index 0000000..929d4b0 --- /dev/null +++ b/dynamicserialize/adapters/LocalizationTypeSerializationAdapter.py @@ -0,0 +1,26 @@ +# +# Adapter for com.raytheon.uf.common.localization.LocalizationContext$LocalizationType +# +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# 01/11/11 dgilling Initial Creation. +# + +from dynamicserialize.dstypes.com.raytheon.uf.common.localization import LocalizationType + +ClassAdapter = [ + 'com.raytheon.uf.common.localization.LocalizationContext$LocalizationType', + 'com.raytheon.uf.common.localization.LocalizationType' + ] + + +def serialize(context, ltype): + context.writeString(ltype.getText()) + + +def deserialize(context): + typeString = context.readString() + return LocalizationType(typeString) diff --git a/dynamicserialize/adapters/ParmIDAdapter.py b/dynamicserialize/adapters/ParmIDAdapter.py new file mode 100644 index 0000000..54f4baf --- /dev/null +++ b/dynamicserialize/adapters/ParmIDAdapter.py @@ -0,0 +1,23 @@ +# +# Adapter for com.raytheon.uf.common.dataplugin.gfe.db.objects.ParmID +# +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# 03/29/11 dgilling Initial Creation. +# + +from dynamicserialize.dstypes.com.raytheon.uf.common.dataplugin.gfe.db.objects import ParmID + +ClassAdapter = 'com.raytheon.uf.common.dataplugin.gfe.db.objects.ParmID' + + +def serialize(context, parmId): + context.writeString(str(parmId)) + + +def deserialize(context): + result = ParmID(context.readString()) + return result diff --git a/dynamicserialize/adapters/PointAdapter.py b/dynamicserialize/adapters/PointAdapter.py new file mode 100644 index 0000000..288aaa5 --- /dev/null +++ b/dynamicserialize/adapters/PointAdapter.py @@ -0,0 +1,28 @@ +# +# Adapter for java.awt.Point +# +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# 08/31/10 njensen Initial Creation. +# + +from dynamicserialize.dstypes.java.awt import Point + +ClassAdapter = 'java.awt.Point' + + +def serialize(context, point): + context.writeI32(point.getX()) + context.writeI32(point.getY()) + + +def deserialize(context): + x = context.readI32() + y = context.readI32() + point = Point() + point.setX(x) + point.setY(y) + return point diff --git a/dynamicserialize/adapters/StackTraceElementAdapter.py b/dynamicserialize/adapters/StackTraceElementAdapter.py new file mode 100644 index 0000000..7f4b71b --- /dev/null +++ b/dynamicserialize/adapters/StackTraceElementAdapter.py @@ -0,0 +1,28 @@ +# +# Adapter for java.lang.StackTraceElement[] +# +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# 09/21/10 njensen Initial Creation. +# + +import dynamicserialize +from dynamicserialize.dstypes.java.lang import StackTraceElement + +ClassAdapter = 'java.lang.StackTraceElement' + + +def serialize(context, obj): + raise dynamicserialize.SerializationException('Not implemented yet') + + +def deserialize(context): + result = StackTraceElement() + result.setDeclaringClass(context.readString()) + result.setMethodName(context.readString()) + result.setFileName(context.readString()) + result.setLineNumber(context.readI32()) + return result diff --git a/dynamicserialize/adapters/TimeConstraintsAdapter.py b/dynamicserialize/adapters/TimeConstraintsAdapter.py new file mode 100644 index 0000000..5f604ad --- /dev/null +++ b/dynamicserialize/adapters/TimeConstraintsAdapter.py @@ -0,0 +1,25 @@ +# +# Adapter for com.raytheon.uf.common.dataplugin.gfe.db.objects.ParmID +# +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# 03/20/13 #1774 randerso Initial Creation. +# + +from dynamicserialize.dstypes.com.raytheon.uf.common.dataplugin.gfe.db.objects import TimeConstraints + +ClassAdapter = 'com.raytheon.uf.common.dataplugin.gfe.db.objects.TimeConstraints' + + +def serialize(context, timeConstraints): + context.writeI32(timeConstraints.getDuration()) + context.writeI32(timeConstraints.getRepeatInterval()) + context.writeI32(timeConstraints.getStartTime()) + + +def deserialize(context): + result = TimeConstraints(context.readI32(), context.readI32(), context.readI32()) + return result diff --git a/dynamicserialize/adapters/TimeRangeTypeAdapter.py b/dynamicserialize/adapters/TimeRangeTypeAdapter.py new file mode 100644 index 0000000..ab8053e --- /dev/null +++ b/dynamicserialize/adapters/TimeRangeTypeAdapter.py @@ -0,0 +1,40 @@ +# +# Adapter for com.raytheon.uf.common.message.WsId +# +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# 09/16/10 dgilling Initial Creation. +# 01/22/14 2667 bclement use method to get millis from time range +# 02/28/14 2667 bclement deserialize now converts millis to micros +# + +from dynamicserialize.dstypes.com.raytheon.uf.common.time import TimeRange + +ClassAdapter = 'com.raytheon.uf.common.time.TimeRange' + +MICROS_IN_MILLISECOND = 1000 +MILLIS_IN_SECOND = 1000 + + +def serialize(context, timeRange): + context.writeI64(timeRange.getStartInMillis()) + context.writeI64(timeRange.getEndInMillis()) + + +def deserialize(context): + startTime = context.readI64() + endTime = context.readI64() + + timeRange = TimeRange() + # java uses milliseconds, python uses microseconds + startSeconds = startTime // MILLIS_IN_SECOND + endSeconds = endTime // MILLIS_IN_SECOND + startExtraMicros = (startTime % MILLIS_IN_SECOND) * MICROS_IN_MILLISECOND + endExtraMicros = (endTime % MILLIS_IN_SECOND) * MICROS_IN_MILLISECOND + timeRange.setStart(startSeconds, startExtraMicros) + timeRange.setEnd(endSeconds, endExtraMicros) + + return timeRange diff --git a/dynamicserialize/adapters/TimestampAdapter.py b/dynamicserialize/adapters/TimestampAdapter.py new file mode 100644 index 0000000..986bbbc --- /dev/null +++ b/dynamicserialize/adapters/TimestampAdapter.py @@ -0,0 +1,23 @@ +# +# Adapter for java.sql.Timestamp +# +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# 06/30/11 dgilling Initial Creation. +# + +from dynamicserialize.dstypes.java.sql import Timestamp + +ClassAdapter = 'java.sql.Timestamp' + + +def serialize(context, timestamp): + context.writeI64(timestamp.getTime()) + + +def deserialize(context): + result = Timestamp(context.readI64()) + return result diff --git a/dynamicserialize/adapters/WsIdAdapter.py b/dynamicserialize/adapters/WsIdAdapter.py new file mode 100644 index 0000000..40b7121 --- /dev/null +++ b/dynamicserialize/adapters/WsIdAdapter.py @@ -0,0 +1,32 @@ +# +# Adapter for com.raytheon.uf.common.message.WsId +# +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------- -------- --------- --------------------------------------------- +# Sep 16, 2010 dgilling Initial Creation. +# Apr 25, 2012 545 randerso Repurposed the lockKey field as threadId +# Feb 06, 2017 5959 randerso Removed Java .toString() calls +# + +from dynamicserialize.dstypes.com.raytheon.uf.common.message import WsId + +ClassAdapter = 'com.raytheon.uf.common.message.WsId' + + +def serialize(context, wsId): + context.writeString(str(wsId)) + + +def deserialize(context): + wsIdString = context.readString() + wsIdParts = wsIdString.split(":", 5) + wsId = WsId() + wsId.setNetworkId(wsIdParts[0]) + wsId.setUserName(wsIdParts[1]) + wsId.setProgName(wsIdParts[2]) + wsId.setPid(wsIdParts[3]) + wsId.setThreadId(int(wsIdParts[4])) + return wsId diff --git a/dynamicserialize/adapters/__init__.py b/dynamicserialize/adapters/__init__.py new file mode 100644 index 0000000..8da134c --- /dev/null +++ b/dynamicserialize/adapters/__init__.py @@ -0,0 +1,99 @@ +# +# __init__.py for Dynamic Serialize adapters. +# +# Plugins can contribute to dynamicserialize.adapters by either including their +# classes directly in pythonPackages/dynamicserialize/adapters/ within their +# plugin. The plugin's adapter will automatically be added to __all__ at runtime +# and registered. +# Plugins should not include a custom __init__.py in +# pythonPackages/dynamicserialize/adapters/ because it will overwrite this file. +# If custom package initialization is needed, a subpackage should be created +# with an __init__.py that includes the following: +# +# __all__ = ['CustomAdapter1', 'CustomAdapter2'] +# from dynamicserialize.adapters import registerAdapters +# registerAdapters(__name__, __all__) +# +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# 08/31/10 njensen Initial Creation. +# 03/20/13 #1774 randerso Added TimeConstraintsAdapter +# 04/22/13 #1949 rjpeter Added LockTableAdapter +# 02/06/14 #2672 bsteffen Added JTSEnvelopeAdapter +# 09/21/2015 #4486 rjpeter Added FormattedDateAdapter +# 06/23/2016 #5696 rjpeter Added CommutativeTimestampAdapter +# 10/17/2016 #5919 njensen Added GeomDataRespAdapter +# 01/09/2017 #5997 nabowle Allow contribution from plugins. +# + +__all__ = [ + 'PointAdapter', + 'StackTraceElementAdapter', + 'WsIdAdapter', + 'CalendarAdapter', + 'GregorianCalendarAdapter', + 'DateAdapter', + 'FormattedDateAdapter', + 'LocalizationLevelSerializationAdapter', + 'LocalizationTypeSerializationAdapter', + 'GeometryTypeAdapter', + 'CoordAdapter', + 'TimeRangeTypeAdapter', + 'ParmIDAdapter', + 'DatabaseIDAdapter', + 'TimestampAdapter', + 'CommutativeTimestampAdapter', + 'EnumSetAdapter', + 'FloatBufferAdapter', + 'ByteBufferAdapter', + 'TimeConstraintsAdapter', + 'JTSEnvelopeAdapter' +] + +classAdapterRegistry = {} + + +def getAdapterRegistry(): + import pkgutil + + discoveredPackages = [] + # allow other plugins to contribute to adapters by dropping their adapter or + # package into the dynamicserialize.adapters package + for _, modname, ispkg in pkgutil.iter_modules(__path__): + if ispkg: + discoveredPackages.append(modname) + else: + if modname not in __all__: + __all__.append(modname) + + registerAdapters(__name__, __all__) + + for pkg in discoveredPackages: + __import__(__name__ + '.' + pkg) + + +def registerAdapters(package, modules): + import sys + if not package.endswith('.'): + package += '.' + for x in modules: + # TODO: use importlib + exec('import ' + package + x) + m = sys.modules[package + x] + d = m.__dict__ + if 'ClassAdapter' in d: + if isinstance(m.ClassAdapter, list): + for clz in m.ClassAdapter: + classAdapterRegistry[clz] = m + else: + clzName = m.ClassAdapter + classAdapterRegistry[clzName] = m + else: + raise LookupError('Adapter class ' + x + ' has no ClassAdapter field ' + + 'and cannot be registered.') + + +getAdapterRegistry() diff --git a/dynamicserialize/dstypes/__init__.py b/dynamicserialize/dstypes/__init__.py new file mode 100644 index 0000000..c865331 --- /dev/null +++ b/dynamicserialize/dstypes/__init__.py @@ -0,0 +1,6 @@ + +__all__ = [ + 'com', + 'gov', + 'java' + ] diff --git a/dynamicserialize/dstypes/com/__init__.py b/dynamicserialize/dstypes/com/__init__.py new file mode 100644 index 0000000..c85988c --- /dev/null +++ b/dynamicserialize/dstypes/com/__init__.py @@ -0,0 +1,5 @@ + +__all__ = [ + 'raytheon', + 'vividsolutions' + ] diff --git a/dynamicserialize/dstypes/com/raytheon/__init__.py b/dynamicserialize/dstypes/com/raytheon/__init__.py new file mode 100644 index 0000000..e984a6a --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/__init__.py @@ -0,0 +1,4 @@ + +__all__ = [ + 'uf' + ] diff --git a/dynamicserialize/dstypes/com/raytheon/uf/__init__.py b/dynamicserialize/dstypes/com/raytheon/uf/__init__.py new file mode 100644 index 0000000..c03e3e7 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/__init__.py @@ -0,0 +1,4 @@ + +__all__ = [ + 'common' + ] diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/__init__.py b/dynamicserialize/dstypes/com/raytheon/uf/common/__init__.py new file mode 100644 index 0000000..498a169 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/__init__.py @@ -0,0 +1,17 @@ + +__all__ = [ + 'alertviz', + 'auth', + 'dataaccess', + 'dataplugin', + 'dataquery', + 'datastorage', + 'localization', + 'management', + 'message', + 'pointdata', + 'pypies', + 'serialization', + 'site', + 'time' + ] diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/alertviz/AlertVizRequest.py b/dynamicserialize/dstypes/com/raytheon/uf/common/alertviz/AlertVizRequest.py new file mode 100755 index 0000000..b03a9f9 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/alertviz/AlertVizRequest.py @@ -0,0 +1,63 @@ +# Jul 27, 2015 4654 skorolev Added filters + + +class AlertVizRequest(object): + + def __init__(self): + self.message = None + self.machine = None + self.priority = None + self.sourceKey = None + self.category = None + self.audioFile = None + self.filters = None + + def getMessage(self): + return self.message + + def setMessage(self, message): + self.message = message + + def getMachine(self): + return self.machine + + def setMachine(self, machine): + self.machine = machine + + def getPriority(self): + return self.priority + + def setPriority(self, priority): + self.priority = priority + + def getSourceKey(self): + return self.sourceKey + + def setSourceKey(self, sourceKey): + self.sourceKey = sourceKey + + def getCategory(self): + return self.category + + def setCategory(self, category): + self.category = category + + def getAudioFile(self): + return self.audioFile + + def setAudioFile(self, audioFile): + self.audioFile = audioFile + + def getFilters(self): + return self.filters + + def setFilters(self, filters): + if filters is None: + self.filters = {} + elif not(None in filters + or filters.values().count(None) > 0 + or '' in filters + or filters.values().count('') > 0): + self.filters = filters + else: + raise ValueError('Filters must not contain None or empty keys or values: %s' % filters) diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/alertviz/__init__.py b/dynamicserialize/dstypes/com/raytheon/uf/common/alertviz/__init__.py new file mode 100644 index 0000000..68a7109 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/alertviz/__init__.py @@ -0,0 +1,7 @@ + +__all__ = [ + 'AlertVizRequest' + ] + +from .AlertVizRequest import AlertVizRequest + diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/auth/__init__.py b/dynamicserialize/dstypes/com/raytheon/uf/common/auth/__init__.py new file mode 100644 index 0000000..a53d346 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/auth/__init__.py @@ -0,0 +1,5 @@ + +__all__ = [ + 'resp', + 'user' + ] diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/auth/resp/AbstractFailedResponse.py b/dynamicserialize/dstypes/com/raytheon/uf/common/auth/resp/AbstractFailedResponse.py new file mode 100644 index 0000000..9bd4f11 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/auth/resp/AbstractFailedResponse.py @@ -0,0 +1,14 @@ +from six import with_metaclass +import abc + + +class AbstractFailedResponse(with_metaclass(abc.ABCMeta, object)): + @abc.abstractmethod + def __init__(self): + self.request = None + + def getRequest(self): + return self.request + + def setRequest(self, request): + self.request = request diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/auth/resp/AuthServerErrorResponse.py b/dynamicserialize/dstypes/com/raytheon/uf/common/auth/resp/AuthServerErrorResponse.py new file mode 100644 index 0000000..f8dd0d4 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/auth/resp/AuthServerErrorResponse.py @@ -0,0 +1,10 @@ +# nothing to implement here that isn't already covered by ServerErrorResponse +# Just need the separate class for de-serialization. + +from dynamicserialize.dstypes.com.raytheon.uf.common.serialization.comm.response import ServerErrorResponse + + +class AuthServerErrorResponse(ServerErrorResponse): + + def __init__(self): + super(AuthServerErrorResponse, self).__init__() diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/auth/resp/SuccessfulExecution.py b/dynamicserialize/dstypes/com/raytheon/uf/common/auth/resp/SuccessfulExecution.py new file mode 100644 index 0000000..a529036 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/auth/resp/SuccessfulExecution.py @@ -0,0 +1,19 @@ + + +class SuccessfulExecution(object): + + def __init__(self): + self.response = None + self.updatedData = None + + def getResponse(self): + return self.response + + def setResponse(self, response): + self.response = response + + def getUpdatedData(self): + return self.updatedData + + def setUpdatedData(self, updatedData): + self.updatedData = updatedData diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/auth/resp/UserNotAuthorized.py b/dynamicserialize/dstypes/com/raytheon/uf/common/auth/resp/UserNotAuthorized.py new file mode 100644 index 0000000..43f60a6 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/auth/resp/UserNotAuthorized.py @@ -0,0 +1,14 @@ +from dynamicserialize.dstypes.com.raytheon.uf.common.auth.resp import AbstractFailedResponse + + +class UserNotAuthorized(AbstractFailedResponse): + + def __init__(self): + super(UserNotAuthorized, self).__init__() + self.message = None + + def getMessage(self): + return self.message + + def setMessage(self, message): + self.message = message diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/auth/resp/__init__.py b/dynamicserialize/dstypes/com/raytheon/uf/common/auth/resp/__init__.py new file mode 100644 index 0000000..67fb814 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/auth/resp/__init__.py @@ -0,0 +1,12 @@ + +__all__ = [ + 'AbstractFailedResponse', + 'AuthServerErrorResponse', + 'SuccessfulExecution', + 'UserNotAuthorized' + ] + +from .AbstractFailedResponse import AbstractFailedResponse +from .AuthServerErrorResponse import AuthServerErrorResponse +from .SuccessfulExecution import SuccessfulExecution +from .UserNotAuthorized import UserNotAuthorized diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/auth/user/User.py b/dynamicserialize/dstypes/com/raytheon/uf/common/auth/user/User.py new file mode 100644 index 0000000..30515f6 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/auth/user/User.py @@ -0,0 +1,23 @@ +from dynamicserialize.dstypes.com.raytheon.uf.common.auth.user import UserId + + +class User(object): + + def __init__(self, userId=None): + if userId is None: + self.userId = UserId.UserId() + else: + self.userId = userId + self.authenticationData = None + + def getUserId(self): + return self.userId + + def setUserId(self, userId): + self.userId = userId + + def getAuthenticationData(self): + return self.authenticationData + + def setAuthenticationData(self, authenticationData): + self.authenticationData = authenticationData diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/auth/user/UserId.py b/dynamicserialize/dstypes/com/raytheon/uf/common/auth/user/UserId.py new file mode 100644 index 0000000..60668b0 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/auth/user/UserId.py @@ -0,0 +1,24 @@ +import os +try: + import pwd + pwd_error = False +except ImportError: + pwd_error = True + + +class UserId(object): + + def __init__(self, userid=None): + if userid is None: + if not pwd_error: + self.id = pwd.getpwuid(os.getuid()).pw_name + else: + self.id = "GenericUsername" + else: + self.id = userid + + def getId(self): + return self.id + + def setId(self, userid): + self.id = userid diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/auth/user/__init__.py b/dynamicserialize/dstypes/com/raytheon/uf/common/auth/user/__init__.py new file mode 100644 index 0000000..b05112e --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/auth/user/__init__.py @@ -0,0 +1,8 @@ + +__all__ = [ + 'User', + 'UserId' + ] + +from .User import User +from .UserId import UserId diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/dataaccess/__init__.py b/dynamicserialize/dstypes/com/raytheon/uf/common/dataaccess/__init__.py new file mode 100644 index 0000000..9f04796 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/dataaccess/__init__.py @@ -0,0 +1,6 @@ + +__all__ = [ + 'impl', + 'request', + 'response' + ] diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/dataaccess/impl/DefaultDataRequest.py b/dynamicserialize/dstypes/com/raytheon/uf/common/dataaccess/impl/DefaultDataRequest.py new file mode 100644 index 0000000..b7c7cbc --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/dataaccess/impl/DefaultDataRequest.py @@ -0,0 +1,77 @@ +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# 05/28/13 2023 dgilling Initial Creation. +# 12/15/16 6040 tgurney Override __str__ +# +# + +from awips.dataaccess import IDataRequest +from dynamicserialize.dstypes.com.vividsolutions.jts.geom import Envelope +from dynamicserialize.dstypes.com.raytheon.uf.common.dataplugin.level import Level + + +class DefaultDataRequest(IDataRequest): + + def __init__(self): + self.datatype = None + self.identifiers = {} + self.parameters = [] + self.levels = [] + self.locationNames = [] + self.envelope = None + + def setDatatype(self, datatype): + self.datatype = str(datatype) + + def addIdentifier(self, key, value): + self.identifiers[key] = value + + def removeIdentifier(self, key): + del self.identifiers[key] + + def setParameters(self, *params): + self.parameters = list(map(str, params)) + + def setLevels(self, *levels): + self.levels = list(map(self.__makeLevel, levels)) + + def __makeLevel(self, level): + if isinstance(level, Level): + return level + elif isinstance(level, str): + return Level(level) + else: + raise TypeError("Invalid object type specified for level.") + + def setEnvelope(self, env): + self.envelope = Envelope(env.envelope) + + def setLocationNames(self, *locationNames): + self.locationNames = list(map(str, locationNames)) + + def getDatatype(self): + return self.datatype + + def getIdentifiers(self): + return self.identifiers + + def getParameters(self): + return self.parameters + + def getLevels(self): + return self.levels + + def getEnvelope(self): + return self.envelope + + def getLocationNames(self): + return self.locationNames + + def __str__(self): + fmt = ('DefaultDataRequest(datatype={}, identifiers={}, parameters={}, ' + + 'levels={}, locationNames={}, envelope={})') + return fmt.format(self.datatype, self.identifiers, self.parameters, self.levels, + self.locationNames, self.envelope) diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/dataaccess/impl/DefaultNotificationFilter.py b/dynamicserialize/dstypes/com/raytheon/uf/common/dataaccess/impl/DefaultNotificationFilter.py new file mode 100644 index 0000000..4fe2d97 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/dataaccess/impl/DefaultNotificationFilter.py @@ -0,0 +1,38 @@ +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# 06/03/16 2416 rjpeter Initial Creation. +# 08/01/16 2416 tgurney Implement accept() +# +# + +from awips.dataaccess import INotificationFilter +import sys + +if sys.version_info.major == 2: + from itertools import izip + # shadowing built-in zip + zip = izip + + +class DefaultNotificationFilter(INotificationFilter): + + def __init__(self): + self.constraints = None + + def getConstraints(self): + return self.constraints + + def setConstraints(self, constraints): + self.constraints = constraints + + def accept(self, dataUri): + tokens = dataUri.split('/')[1:] + if len(self.constraints) != len(tokens): + return False + for constraint, token in zip(self.constraints, tokens): + if not constraint.evaluate(token): + return False + return True diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/dataaccess/impl/__init__.py b/dynamicserialize/dstypes/com/raytheon/uf/common/dataaccess/impl/__init__.py new file mode 100644 index 0000000..facaec9 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/dataaccess/impl/__init__.py @@ -0,0 +1,8 @@ + +__all__ = [ + 'DefaultDataRequest', + 'DefaultNotificationFilter' + ] + +from .DefaultDataRequest import DefaultDataRequest +from .DefaultNotificationFilter import DefaultNotificationFilter diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/dataaccess/request/AbstractDataAccessRequest.py b/dynamicserialize/dstypes/com/raytheon/uf/common/dataaccess/request/AbstractDataAccessRequest.py new file mode 100644 index 0000000..172646a --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/dataaccess/request/AbstractDataAccessRequest.py @@ -0,0 +1,23 @@ +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# 05/28/13 #2023 dgilling Initial Creation. +# +# + +from six import with_metaclass +import abc + + +class AbstractDataAccessRequest(with_metaclass(abc.ABCMeta, object)): + def __init__(self): + self.requestParameters = None + + def getRequestParameters(self): + return self.requestParameters + + def setRequestParameters(self, requestParameters): + self.requestParameters = requestParameters + diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/dataaccess/request/AbstractIdentifierRequest.py b/dynamicserialize/dstypes/com/raytheon/uf/common/dataaccess/request/AbstractIdentifierRequest.py new file mode 100644 index 0000000..37660b9 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/dataaccess/request/AbstractIdentifierRequest.py @@ -0,0 +1,25 @@ +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# 07/23/14 #3185 njensen Initial Creation. +# Jun 01, 2016 5587 tgurney Change self.datatype to +# self.request +# +# + +from six import with_metaclass +import abc + + +class AbstractIdentifierRequest(with_metaclass(abc.ABCMeta, object)): + def __init__(self): + self.request = None + + def getRequest(self): + return self.request + + def setRequest(self, request): + self.request = request + diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/dataaccess/request/GetAvailableLevelsRequest.py b/dynamicserialize/dstypes/com/raytheon/uf/common/dataaccess/request/GetAvailableLevelsRequest.py new file mode 100644 index 0000000..2e12410 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/dataaccess/request/GetAvailableLevelsRequest.py @@ -0,0 +1,16 @@ +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# 07/23/14 #3185 njensen Initial Creation. +# +# + +from dynamicserialize.dstypes.com.raytheon.uf.common.dataaccess.request import AbstractDataAccessRequest + + +class GetAvailableLevelsRequest(AbstractDataAccessRequest): + + def __init__(self): + super(GetAvailableLevelsRequest, self).__init__() diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/dataaccess/request/GetAvailableLocationNamesRequest.py b/dynamicserialize/dstypes/com/raytheon/uf/common/dataaccess/request/GetAvailableLocationNamesRequest.py new file mode 100644 index 0000000..5e48b55 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/dataaccess/request/GetAvailableLocationNamesRequest.py @@ -0,0 +1,16 @@ +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# 05/28/13 #2023 dgilling Initial Creation. +# +# + +from dynamicserialize.dstypes.com.raytheon.uf.common.dataaccess.request import AbstractDataAccessRequest + + +class GetAvailableLocationNamesRequest(AbstractDataAccessRequest): + + def __init__(self): + super(GetAvailableLocationNamesRequest, self).__init__() diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/dataaccess/request/GetAvailableParametersRequest.py b/dynamicserialize/dstypes/com/raytheon/uf/common/dataaccess/request/GetAvailableParametersRequest.py new file mode 100644 index 0000000..432998b --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/dataaccess/request/GetAvailableParametersRequest.py @@ -0,0 +1,16 @@ +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# 07/23/14 #3185 njensen Initial Creation. +# +# + +from dynamicserialize.dstypes.com.raytheon.uf.common.dataaccess.request import AbstractDataAccessRequest + + +class GetAvailableParametersRequest(AbstractDataAccessRequest): + + def __init__(self): + super(GetAvailableParametersRequest, self).__init__() diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/dataaccess/request/GetAvailableTimesRequest.py b/dynamicserialize/dstypes/com/raytheon/uf/common/dataaccess/request/GetAvailableTimesRequest.py new file mode 100644 index 0000000..707ebae --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/dataaccess/request/GetAvailableTimesRequest.py @@ -0,0 +1,24 @@ +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# 05/28/13 #2023 dgilling Initial Creation. +# 03/03/14 #2673 bsteffen Add ability to query only ref times. +# +# + +from dynamicserialize.dstypes.com.raytheon.uf.common.dataaccess.request import AbstractDataAccessRequest + + +class GetAvailableTimesRequest(AbstractDataAccessRequest): + + def __init__(self): + super(GetAvailableTimesRequest, self).__init__() + self.refTimeOnly = False + + def getRefTimeOnly(self): + return self.refTimeOnly + + def setRefTimeOnly(self, refTimeOnly): + self.refTimeOnly = refTimeOnly diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/dataaccess/request/GetGeometryDataRequest.py b/dynamicserialize/dstypes/com/raytheon/uf/common/dataaccess/request/GetGeometryDataRequest.py new file mode 100644 index 0000000..68a2ca7 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/dataaccess/request/GetGeometryDataRequest.py @@ -0,0 +1,30 @@ +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# 05/28/13 #2023 dgilling Initial Creation. +# +# + +from dynamicserialize.dstypes.com.raytheon.uf.common.dataaccess.request import AbstractDataAccessRequest + + +class GetGeometryDataRequest(AbstractDataAccessRequest): + + def __init__(self): + super(GetGeometryDataRequest, self).__init__() + self.requestedTimes = None + self.requestedPeriod = None + + def getRequestedTimes(self): + return self.requestedTimes + + def setRequestedTimes(self, requestedTimes): + self.requestedTimes = requestedTimes + + def getRequestedPeriod(self): + return self.requestedPeriod + + def setRequestedPeriod(self, requestedPeriod): + self.requestedPeriod = requestedPeriod diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/dataaccess/request/GetGridDataRequest.py b/dynamicserialize/dstypes/com/raytheon/uf/common/dataaccess/request/GetGridDataRequest.py new file mode 100644 index 0000000..aae54f9 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/dataaccess/request/GetGridDataRequest.py @@ -0,0 +1,38 @@ +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# 05/28/13 #2023 dgilling Initial Creation. +# 05/28/13 #5916 bsteffen Add includeLatLonData +# +# + +from dynamicserialize.dstypes.com.raytheon.uf.common.dataaccess.request import AbstractDataAccessRequest + + +class GetGridDataRequest(AbstractDataAccessRequest): + + def __init__(self): + super(GetGridDataRequest, self).__init__() + self.requestedTimes = None + self.requestedPeriod = None + self.includeLatLonData = True + + def getRequestedTimes(self): + return self.requestedTimes + + def setRequestedTimes(self, requestedTimes): + self.requestedTimes = requestedTimes + + def getRequestedPeriod(self): + return self.requestedPeriod + + def setRequestedPeriod(self, requestedPeriod): + self.requestedPeriod = requestedPeriod + + def getIncludeLatLonData(self): + return self.includeLatLonData + + def setIncludeLatLonData(self, includeLatLonData): + self.includeLatLonData = includeLatLonData diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/dataaccess/request/GetGridLatLonRequest.py b/dynamicserialize/dstypes/com/raytheon/uf/common/dataaccess/request/GetGridLatLonRequest.py new file mode 100644 index 0000000..00e3ab1 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/dataaccess/request/GetGridLatLonRequest.py @@ -0,0 +1,40 @@ +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# Oct 10, 2016 5916 bsteffen Generated + + +class GetGridLatLonRequest(object): + + def __init__(self): + self.envelope = None + self.crsWkt = None + self.nx = None + self.ny = None + + def getEnvelope(self): + return self.envelope + + def setEnvelope(self, envelope): + self.envelope = envelope + + def getCrsWkt(self): + return self.crsWkt + + def setCrsWkt(self, crsWkt): + self.crsWkt = crsWkt + + def getNx(self): + return self.nx + + def setNx(self, nx): + self.nx = nx + + def getNy(self): + return self.ny + + def setNy(self, ny): + self.ny = ny + diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/dataaccess/request/GetIdentifierValuesRequest.py b/dynamicserialize/dstypes/com/raytheon/uf/common/dataaccess/request/GetIdentifierValuesRequest.py new file mode 100644 index 0000000..a4c2e3c --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/dataaccess/request/GetIdentifierValuesRequest.py @@ -0,0 +1,23 @@ +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# 04/15/2016 5379 tgurney Initial creation +# +# + +from dynamicserialize.dstypes.com.raytheon.uf.common.dataaccess.request import AbstractDataAccessRequest + + +class GetIdentifierValuesRequest(AbstractDataAccessRequest): + + def __init__(self): + super(GetIdentifierValuesRequest, self).__init__() + self.identifierKey = None + + def getIdentifierKey(self): + return self.identifierKey + + def setIdentifierKey(self, identifierKey): + self.identifierKey = identifierKey diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/dataaccess/request/GetNotificationFilterRequest.py b/dynamicserialize/dstypes/com/raytheon/uf/common/dataaccess/request/GetNotificationFilterRequest.py new file mode 100644 index 0000000..e97460f --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/dataaccess/request/GetNotificationFilterRequest.py @@ -0,0 +1,16 @@ +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# 05/26/16 2416 rjpeter Initial Creation. +# +# + +from dynamicserialize.dstypes.com.raytheon.uf.common.dataaccess.request import AbstractDataAccessRequest + + +class GetNotificationFilterRequest(AbstractDataAccessRequest): + + def __init__(self): + super(GetNotificationFilterRequest, self).__init__() diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/dataaccess/request/GetOptionalIdentifiersRequest.py b/dynamicserialize/dstypes/com/raytheon/uf/common/dataaccess/request/GetOptionalIdentifiersRequest.py new file mode 100644 index 0000000..b76a5fe --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/dataaccess/request/GetOptionalIdentifiersRequest.py @@ -0,0 +1,17 @@ +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# 07/23/14 #3185 njensen Initial Creation. +# 07/30/14 #3185 njensen Renamed valid to optional +# +# + +from dynamicserialize.dstypes.com.raytheon.uf.common.dataaccess.request import AbstractIdentifierRequest + + +class GetOptionalIdentifiersRequest(AbstractIdentifierRequest): + + def __init__(self): + super(GetOptionalIdentifiersRequest, self).__init__() diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/dataaccess/request/GetRequiredIdentifiersRequest.py b/dynamicserialize/dstypes/com/raytheon/uf/common/dataaccess/request/GetRequiredIdentifiersRequest.py new file mode 100644 index 0000000..2203134 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/dataaccess/request/GetRequiredIdentifiersRequest.py @@ -0,0 +1,16 @@ +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# 07/23/14 #3185 njensen Initial Creation. +# +# + +from dynamicserialize.dstypes.com.raytheon.uf.common.dataaccess.request import AbstractIdentifierRequest + + +class GetRequiredIdentifiersRequest(AbstractIdentifierRequest): + + def __init__(self): + super(GetRequiredIdentifiersRequest, self).__init__() diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/dataaccess/request/GetSupportedDatatypesRequest.py b/dynamicserialize/dstypes/com/raytheon/uf/common/dataaccess/request/GetSupportedDatatypesRequest.py new file mode 100644 index 0000000..7a65257 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/dataaccess/request/GetSupportedDatatypesRequest.py @@ -0,0 +1,14 @@ +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# 07/23/14 #3185 njensen Initial Creation. +# +# + + +class GetSupportedDatatypesRequest(object): + + def __init__(self): + pass diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/dataaccess/request/__init__.py b/dynamicserialize/dstypes/com/raytheon/uf/common/dataaccess/request/__init__.py new file mode 100644 index 0000000..a864cd5 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/dataaccess/request/__init__.py @@ -0,0 +1,33 @@ + +__all__ = [ + 'AbstractDataAccessRequest', + 'AbstractIdentifierRequest', + 'GetAvailableLevelsRequest', + 'GetAvailableLocationNamesRequest', + 'GetAvailableParametersRequest', + 'GetAvailableTimesRequest', + 'GetGeometryDataRequest', + 'GetGridDataRequest', + 'GetGridLatLonRequest', + 'GetIdentifierValuesRequest', + 'GetNotificationFilterRequest', + 'GetOptionalIdentifiersRequest', + 'GetRequiredIdentifiersRequest', + 'GetSupportedDatatypesRequest' + ] + +from .AbstractDataAccessRequest import AbstractDataAccessRequest +from .AbstractIdentifierRequest import AbstractIdentifierRequest +from .GetAvailableLevelsRequest import GetAvailableLevelsRequest +from .GetAvailableLocationNamesRequest import GetAvailableLocationNamesRequest +from .GetAvailableParametersRequest import GetAvailableParametersRequest +from .GetAvailableTimesRequest import GetAvailableTimesRequest +from .GetGeometryDataRequest import GetGeometryDataRequest +from .GetGridDataRequest import GetGridDataRequest +from .GetGridLatLonRequest import GetGridLatLonRequest +from .GetIdentifierValuesRequest import GetIdentifierValuesRequest +from .GetNotificationFilterRequest import GetNotificationFilterRequest +from .GetOptionalIdentifiersRequest import GetOptionalIdentifiersRequest +from .GetRequiredIdentifiersRequest import GetRequiredIdentifiersRequest +from .GetSupportedDatatypesRequest import GetSupportedDatatypesRequest + diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/dataaccess/response/AbstractResponseData.py b/dynamicserialize/dstypes/com/raytheon/uf/common/dataaccess/response/AbstractResponseData.py new file mode 100644 index 0000000..7fe05a7 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/dataaccess/response/AbstractResponseData.py @@ -0,0 +1,52 @@ +import abc +import six + + +class AbstractResponseData(six.with_metaclass(abc.ABCMeta, object)): + @abc.abstractmethod + def __init__(self): + self.time = None + self.level = None + self.locationName = None + self.attributes = None + + def getTime(self): + return self.time + + def setTime(self, time): + self.time = time + + def getLevel(self): + return self.level + + def setLevel(self, level): + self.level = level + + def getLocationName(self): + if six.PY2: + return self.locationName + if self.locationName is not None: + return self.locationName.decode('utf-8') + return self.locationName + + def setLocationName(self, locationName): + self.locationName = locationName + + def getAttributes(self): + if six.PY2: + return self.attributes + return self.convert(self.attributes) + + def setAttributes(self, attributes): + self.attributes = attributes + + def convert(self, data): + if isinstance(data, dict): + return dict(map(self.convert, data.items())) + if isinstance(data, bytes): + return data.decode('utf-8') + if isinstance(data, tuple): + return tuple(map(self.convert, data)) + if isinstance(data, list): + return list(map(self.convert, data)) + return data diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/dataaccess/response/GeometryResponseData.py b/dynamicserialize/dstypes/com/raytheon/uf/common/dataaccess/response/GeometryResponseData.py new file mode 100644 index 0000000..7f93dfe --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/dataaccess/response/GeometryResponseData.py @@ -0,0 +1,32 @@ +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# 06/04/13 #2023 dgilling Initial Creation. +# 01/06/14 #2537 bsteffen Store geometry index instead of WKT. +# 06/30/15 #4569 nabowle Rename *WKT* to *WKB*. +# +# + +from dynamicserialize.dstypes.com.raytheon.uf.common.dataaccess.response import AbstractResponseData + + +class GeometryResponseData(AbstractResponseData): + + def __init__(self): + super(GeometryResponseData, self).__init__() + self.dataMap = None + self.geometryWKBindex = None + + def getDataMap(self): + return self.dataMap + + def setDataMap(self, dataMap): + self.dataMap = dataMap + + def getGeometryWKBindex(self): + return self.geometryWKBindex + + def setGeometryWKBindex(self, geometryWKBindex): + self.geometryWKBindex = geometryWKBindex diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/dataaccess/response/GetGeometryDataResponse.py b/dynamicserialize/dstypes/com/raytheon/uf/common/dataaccess/response/GetGeometryDataResponse.py new file mode 100644 index 0000000..e76c656 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/dataaccess/response/GetGeometryDataResponse.py @@ -0,0 +1,19 @@ + + +class GetGeometryDataResponse(object): + + def __init__(self): + self.geometryWKBs = None + self.geoData = None + + def getGeometryWKBs(self): + return self.geometryWKBs + + def setGeometryWKBs(self, geometryWKBs): + self.geometryWKBs = geometryWKBs + + def getGeoData(self): + return self.geoData + + def setGeoData(self, geoData): + self.geoData = geoData diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/dataaccess/response/GetGridDataResponse.py b/dynamicserialize/dstypes/com/raytheon/uf/common/dataaccess/response/GetGridDataResponse.py new file mode 100644 index 0000000..c52ce5e --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/dataaccess/response/GetGridDataResponse.py @@ -0,0 +1,54 @@ + + +class GetGridDataResponse(object): + + def __init__(self): + self.gridData = None + self.siteNxValues = None + self.siteNyValues = None + self.siteLatGrids = None + self.siteLonGrids = None + self.siteEnvelopes = None + self.siteCrsWkt = None + + def getGridData(self): + return self.gridData + + def setGridData(self, gridData): + self.gridData = gridData + + def getSiteNxValues(self): + return self.siteNxValues + + def setSiteNxValues(self, siteNxValues): + self.siteNxValues = siteNxValues + + def getSiteNyValues(self): + return self.siteNyValues + + def setSiteNyValues(self, siteNyValues): + self.siteNyValues = siteNyValues + + def getSiteLatGrids(self): + return self.siteLatGrids + + def setSiteLatGrids(self, siteLatGrids): + self.siteLatGrids = siteLatGrids + + def getSiteLonGrids(self): + return self.siteLonGrids + + def setSiteLonGrids(self, siteLonGrids): + self.siteLonGrids = siteLonGrids + + def getSiteEnvelopes(self): + return self.siteEnvelopes + + def setSiteEnvelopes(self, siteEnvelopes): + self.siteEnvelopes = siteEnvelopes + + def getSiteCrsWkt(self): + return self.siteCrsWkt + + def setSiteCrsWkt(self, siteCrsWkt): + self.siteCrsWkt = siteCrsWkt diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/dataaccess/response/GetGridLatLonResponse.py b/dynamicserialize/dstypes/com/raytheon/uf/common/dataaccess/response/GetGridLatLonResponse.py new file mode 100644 index 0000000..9721f6a --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/dataaccess/response/GetGridLatLonResponse.py @@ -0,0 +1,39 @@ +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# Oct 10, 2016 5916 bsteffen Generated + + +class GetGridLatLonResponse(object): + + def __init__(self): + self.lats = None + self.lons = None + self.nx = None + self.ny = None + + def getLats(self): + return self.lats + + def setLats(self, lats): + self.lats = lats + + def getLons(self): + return self.lons + + def setLons(self, lons): + self.lons = lons + + def getNx(self): + return self.nx + + def setNx(self, nx): + self.nx = nx + + def getNy(self): + return self.ny + + def setNy(self, ny): + self.ny = ny diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/dataaccess/response/GetNotificationFilterResponse.py b/dynamicserialize/dstypes/com/raytheon/uf/common/dataaccess/response/GetNotificationFilterResponse.py new file mode 100644 index 0000000..fb0c571 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/dataaccess/response/GetNotificationFilterResponse.py @@ -0,0 +1,19 @@ + + +class GetNotificationFilterResponse(object): + + def __init__(self): + self.notificationFilter = None + self.jmsConnectionInfo = None + + def getNotificationFilter(self): + return self.notificationFilter + + def setNotificationFilter(self, notificationFilter): + self.notificationFilter = notificationFilter + + def getJmsConnectionInfo(self): + return self.jmsConnectionInfo + + def setJmsConnectionInfo(self, jmsConnectionInfo): + self.jmsConnectionInfo = jmsConnectionInfo diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/dataaccess/response/GridResponseData.py b/dynamicserialize/dstypes/com/raytheon/uf/common/dataaccess/response/GridResponseData.py new file mode 100644 index 0000000..2c77bf1 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/dataaccess/response/GridResponseData.py @@ -0,0 +1,46 @@ +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# 06/04/13 #2023 dgilling Initial Creation. +# +# + +from dynamicserialize.dstypes.com.raytheon.uf.common.dataaccess.response import AbstractResponseData +import six + + +class GridResponseData(AbstractResponseData): + + def __init__(self): + super(GridResponseData, self).__init__() + self.parameter = None + self.unit = None + self.gridData = None + + def getParameter(self): + if six.PY2: + return self.parameter + if self.parameter is not None: + return self.parameter.decode('utf-8') + return self.parameter + + def setParameter(self, parameter): + self.parameter = parameter + + def getUnit(self): + if six.PY2: + return self.unit + if self.unit is not None: + return self.unit.decode('utf-8') + return self.unit + + def setUnit(self, unit): + self.unit = unit + + def getGridData(self): + return self.gridData + + def setGridData(self, gridData): + self.gridData = gridData diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/dataaccess/response/__init__.py b/dynamicserialize/dstypes/com/raytheon/uf/common/dataaccess/response/__init__.py new file mode 100644 index 0000000..c9fc728 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/dataaccess/response/__init__.py @@ -0,0 +1,19 @@ + +__all__ = [ + 'AbstractResponseData', + 'GeometryResponseData', + 'GetGeometryDataResponse', + 'GetGridDataResponse', + 'GetGridLatLonResponse', + 'GetNotificationFilterResponse', + 'GridResponseData' + ] + +from .AbstractResponseData import AbstractResponseData +from .GeometryResponseData import GeometryResponseData +from .GetGeometryDataResponse import GetGeometryDataResponse +from .GetGridDataResponse import GetGridDataResponse +from .GetGridLatLonResponse import GetGridLatLonResponse +from .GetNotificationFilterResponse import GetNotificationFilterResponse +from .GridResponseData import GridResponseData + diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/__init__.py b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/__init__.py new file mode 100644 index 0000000..d14b058 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/__init__.py @@ -0,0 +1,10 @@ + +__all__ = [ + 'events', + 'gfe', + 'grid', + 'level', + 'message', + 'radar', + 'text' + ] diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/events/__init__.py b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/events/__init__.py new file mode 100644 index 0000000..da304bb --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/events/__init__.py @@ -0,0 +1,4 @@ + +__all__ = [ + 'hazards' + ] diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/events/hazards/__init__.py b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/events/hazards/__init__.py new file mode 100644 index 0000000..ccf9030 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/events/hazards/__init__.py @@ -0,0 +1,4 @@ + +__all__ = [ + 'requests' + ] diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/events/hazards/requests/RegionLookupRequest.py b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/events/hazards/requests/RegionLookupRequest.py new file mode 100644 index 0000000..ff0479b --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/events/hazards/requests/RegionLookupRequest.py @@ -0,0 +1,25 @@ +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# Oct 08, 2014 reblum Generated + + +class RegionLookupRequest(object): + + def __init__(self): + self.region = None + self.site = None + + def getRegion(self): + return self.region + + def setRegion(self, region): + self.region = region + + def getSite(self): + return self.site + + def setSite(self, site): + self.site = site diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/events/hazards/requests/__init__.py b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/events/hazards/requests/__init__.py new file mode 100644 index 0000000..701b487 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/events/hazards/requests/__init__.py @@ -0,0 +1,6 @@ + +__all__ = [ + 'RegionLookupRequest' + ] + +from .RegionLookupRequest import RegionLookupRequest diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/GridDataHistory.py b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/GridDataHistory.py new file mode 100644 index 0000000..a7f7e17 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/GridDataHistory.py @@ -0,0 +1,75 @@ + + +class GridDataHistory(object): + + def __init__(self): + self.origin = None + self.originParm = None + self.originTimeRange = None + self.timeModified = None + self.whoModified = None + self.updateTime = None + self.publishTime = None + self.lastSentTime = None + + def __str__(self): + return self.__repr__() + + def __repr__(self): + retVal = "Origin: " + self.origin + '\n' + retVal += "Origin Parm: " + str(self.originParm) + '\n' + retVal += "Origin Time Range: " + str(self.originTimeRange) +\ + " Time Modified: " + str(self.timeModified) +\ + " Who Modified: " + str(self.whoModified) + '\n' + retVal += "Update Time: " + str(self.updateTime) + '\n' + retVal += "Publish Time: " + str(self.publishTime) + '\n' + retVal += "Last Sent Time: " + str(self.lastSentTime) + '\n' + return retVal + + def getOrigin(self): + return self.origin + + def setOrigin(self, origin): + self.origin = origin + + def getOriginParm(self): + return self.originParm + + def setOriginParm(self, originParm): + self.originParm = originParm + + def getOriginTimeRange(self): + return self.originTimeRange + + def setOriginTimeRange(self, originTimeRange): + self.originTimeRange = originTimeRange + + def getTimeModified(self): + return self.timeModified + + def setTimeModified(self, timeModified): + self.timeModified = timeModified + + def getWhoModified(self): + return self.whoModified + + def setWhoModified(self, whoModified): + self.whoModified = whoModified + + def getUpdateTime(self): + return self.updateTime + + def setUpdateTime(self, updateTime): + self.updateTime = updateTime + + def getPublishTime(self): + return self.publishTime + + def setPublishTime(self, publishTime): + self.publishTime = publishTime + + def getLastSentTime(self): + return self.lastSentTime + + def setLastSentTime(self, lastSentTime): + self.lastSentTime = lastSentTime diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/__init__.py b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/__init__.py new file mode 100644 index 0000000..f3684be --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/__init__.py @@ -0,0 +1,22 @@ +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# 06/22/2015 4573 randerso Added svcbu package +# 10/06/2015 mjames@ucar Removed svcbu package +# + +__all__ = [ + 'config', + 'db', + 'discrete', + 'grid', + 'request', + 'server', + 'slice', + 'weather', + 'GridDataHistory' + ] + +from .GridDataHistory import GridDataHistory diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/config/ProjectionData.py b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/config/ProjectionData.py new file mode 100644 index 0000000..ddd87e9 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/config/ProjectionData.py @@ -0,0 +1,95 @@ + + +class ProjectionData(object): + + def __init__(self): + self.projectionID = None + self.projectionType = None + self.latLonLL = None + self.latLonUR = None + self.latLonOrigin = None + self.stdParallelOne = None + self.stdParallelTwo = None + self.gridPointLL = None + self.gridPointUR = None + self.latIntersect = None + self.lonCenter = None + self.lonOrigin = None + + def getProjectionID(self): + return self.projectionID + + def setProjectionID(self, projectionID): + self.projectionID = projectionID + + def getProjectionType(self): + return self.projectionType + + def setProjectionType(self, projectionType): + self.projectionType = projectionType + + def getLatLonLL(self): + return self.latLonLL + + def setLatLonLL(self, latLonLL): + self.latLonLL = latLonLL + + def getLatLonUR(self): + return self.latLonUR + + def setLatLonUR(self, latLonUR): + self.latLonUR = latLonUR + + def getLatLonOrigin(self): + return self.latLonOrigin + + def setLatLonOrigin(self, latLonOrigin): + self.latLonOrigin = latLonOrigin + + def getStdParallelOne(self): + return self.stdParallelOne + + def setStdParallelOne(self, stdParallelOne): + self.stdParallelOne = stdParallelOne + + def getStdParallelTwo(self): + return self.stdParallelTwo + + def setStdParallelTwo(self, stdParallelTwo): + self.stdParallelTwo = stdParallelTwo + + def getGridPointLL(self): + return self.gridPointLL + + def setGridPointLL(self, gridPointLL): + self.gridPointLL = gridPointLL + + def getGridPointUR(self): + return self.gridPointUR + + def setGridPointUR(self, gridPointUR): + self.gridPointUR = gridPointUR + + def getLatIntersect(self): + return self.latIntersect + + def setLatIntersect(self, latIntersect): + self.latIntersect = latIntersect + + def getLonCenter(self): + return self.lonCenter + + def setLonCenter(self, lonCenter): + self.lonCenter = lonCenter + + def getLonOrigin(self): + return self.lonOrigin + + def setLonOrigin(self, lonOrigin): + self.lonOrigin = lonOrigin + + def keys(self): + return ['projectionID', 'projectionType', 'latLonLL', 'latLonUR', + 'latLonOrigin', 'stdParallelOne', 'stdParallelTwo', + 'gridPointLL', 'gridPointUR', 'latIntersect', 'lonCenter', + 'lonOrigin'] diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/config/__init__.py b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/config/__init__.py new file mode 100644 index 0000000..0a311c4 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/config/__init__.py @@ -0,0 +1,6 @@ + +__all__ = [ + 'ProjectionData' + ] + +from .ProjectionData import ProjectionData diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/db/__init__.py b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/db/__init__.py new file mode 100644 index 0000000..d6332d8 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/db/__init__.py @@ -0,0 +1,4 @@ + +__all__ = [ + 'objects' + ] diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/db/objects/DatabaseID.py b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/db/objects/DatabaseID.py new file mode 100644 index 0000000..6ea638e --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/db/objects/DatabaseID.py @@ -0,0 +1,194 @@ +# Modified by njensen to add __repr__ + +import time + + +class DatabaseID(object): + + def __init__(self, dbIdentifier=None): + self.siteId = None + self.format = "NONE" + self.dbType = None + self.modelName = None + self.modelTime = None + self.modelId = None + self.shortModelId = None + if dbIdentifier is not None: + if self.__decodeIdentifier(dbIdentifier): + self.__encodeIdentifier() + else: + self.format = "NONE" + self.dbType = "" + self.siteId = "" + self.modelName = "" + self.modelTime = "00000000_0000" + self.modelId = "" + self.shortModelId = "" + + def isValid(self): + return self.format != "NONE" + + def getSiteId(self): + return self.siteId + + def setSiteId(self, siteId): + self.siteId = siteId + + def getFormat(self): + return self.format + + def setFormat(self, dbformat): + self.format = dbformat + + def getDbType(self): + return self.dbType + + def setDbType(self, dbType): + self.dbType = dbType + + def getModelName(self): + return self.modelName + + def setModelName(self, modelName): + self.modelName = modelName + + def getModelTime(self): + return self.modelTime + + def setModelTime(self, modelTime): + self.modelTime = modelTime + + def getModelId(self): + return self.modelId + + def setModelId(self, modelId): + self.modelId = modelId + + def getShortModelId(self): + return self.shortModelId + + def setShortModelId(self, shortModelId): + self.shortModelId = shortModelId + + def __encodeIdentifier(self): + if self.dbType is not None: + self.modelId = self.siteId + "_" + self.format + "_" \ + + self.dbType + "_" + self.modelName + else: + self.modelId = self.siteId + "_" + self.format + "__" \ + + self.modelName + + self.shortModelId = self.modelName + if self.dbType is not None and self.dbType != "": + self.shortModelId += "_" + self.dbType + + if self.modelTime != "00000000_0000": + self.modelId += "_" + self.modelTime + self.shortModelId += "_" + self.modelTime[6:8] + self.modelTime[9:11] + else: + self.modelId += "_" + "00000000_0000" + + self.shortModelId += " (" + self.siteId + ")" + + def __decodeIdentifier(self, dbIdentifier): + self.format = "NONE" + self.dbType = "" + self.siteId = "" + self.modelName = "" + self.modelTime = "00000000_0000" + + # parse into '_' separated strings + strings = dbIdentifier.split("_") + if len(strings) != 6: + return False + + # store the data + if strings[1] == "GRID": + self.format = "GRID" + else: + return False + + self.siteId = strings[0] + self.dbType = strings[2] + self.modelName = strings[3] + + # date-time group + if len(strings[4]) != 8 or len(strings[5]) != 4: + return False + + # make sure the digits are there + dtg = strings[4] + '_' + strings[5] # back together + if dtg != "00000000_0000": + if not self.__decodeDtg(dtg): + return False + + return True + + @staticmethod + def decodeDtg(dtgString): + dateStruct = time.gmtime(0) + try: + dateStruct = time.strptime(dtgString, "%Y%m%d_%H%M") + except ValueError: + return False, dateStruct + return True, dateStruct + + def __decodeDtg(self, dtgString): + try: + time.strptime(dtgString, "%Y%m%d_%H%M") + self.modelTime = dtgString + except ValueError: + return False + return True + + def getModelTimeAsDate(self): + if self.modelTime == "00000000_0000": + return time.gmtime(0) + else: + return time.strptime(self.modelTime, "%Y%m%d_%H%M") + + def __str__(self): + return self.__repr__() + + def __repr__(self): + return self.modelId + + def __hash__(self): + prime = 31 + result = 1 + result = prime * result + (0 if self.dbType is None else hash(self.dbType)) + result = prime * result + (0 if self.format is None else hash(self.format)) + result = prime * result + (0 if self.modelId is None else hash(self.modelId)) + result = prime * result + (0 if self.modelTime is None else hash(self.modelTime)) + result = prime * result + (0 if self.siteId is None else hash(self.siteId)) + return result + + def __cmp__(self, other): + if not isinstance(other, DatabaseID): + siteComp = cmp(self.siteId, other.siteId) + if siteComp != 0: + return siteComp + + formatComp = cmp(self.format, other.format) + if formatComp != 0: + return formatComp + + typeComp = cmp(self.dbType, other.dbType) + if typeComp != 0: + return typeComp + + nameComp = cmp(self.modelName, other.modelName) + if nameComp != 0: + return nameComp + + return -cmp(self.getModelTimeAsDate(), other.getModelTimeAsDate()) + else: + return NotImplemented + + def __eq__(self, other): + if not isinstance(other, DatabaseID): + return False + return str(self) == str(other) + + def __ne__(self, other): + return not self.__eq__(other) diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/db/objects/GFERecord.py b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/db/objects/GFERecord.py new file mode 100644 index 0000000..46105ca --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/db/objects/GFERecord.py @@ -0,0 +1,92 @@ +from dynamicserialize.dstypes.com.raytheon.uf.common.dataplugin.gfe.db.objects import ParmID +from dynamicserialize.dstypes.com.raytheon.uf.common.time import DataTime +from dynamicserialize.dstypes.com.raytheon.uf.common.time import TimeRange + + +class GFERecord(object): + + def __init__(self, parmId=None, timeRange=None): + self.gridHistory = [] + self.dataURI = None + self.pluginName = "gfe" + self.insertTime = None + self.messageData = None + self.identifier = None + self.dataTime = None + self.parmId = None + if timeRange is not None: + if isinstance(timeRange, TimeRange): + self.dataTime = DataTime(refTime=timeRange.getStart(), validPeriod=timeRange) + else: + raise TypeError("Invalid TimeRange object specified.") + if parmId is not None: + if isinstance(parmId, ParmID.ParmID): + self.parmId = parmId + self.parmName = parmId.getParmName() + self.parmLevel = parmId.getParmLevel() + self.dbId = parmId.getDbId() + else: + raise TypeError("Invalid ParmID object specified. Type:" + str(type(parmId))) + + def getParmName(self): + return self.parmName + + def setParmName(self, parmName): + self.parmName = parmName + + def getParmLevel(self): + return self.parmLevel + + def setParmLevel(self, parmLevel): + self.parmLevel = parmLevel + + def getParmId(self): + return self.parmId + + def setParmId(self, parmId): + self.parmId = parmId + + def getDbId(self): + return self.dbId + + def setDbId(self, dbId): + self.dbId = dbId + + def getGridHistory(self): + return self.gridHistory + + def setGridHistory(self, gridHistory): + self.gridHistory = gridHistory + + def getDataURI(self): + return self.dataURI + + def setDataURI(self, dataURI): + self.dataURI = dataURI + + def getPluginName(self): + return "gfe" + + def getDataTime(self): + return self.dataTime + + def setDataTime(self, dataTime): + self.dataTime = dataTime + + def getInsertTime(self): + return self.insertTime + + def setInsertTime(self, insertTime): + self.insertTime = insertTime + + def getMessageData(self): + return self.messageData + + def setMessageData(self, messageData): + self.messageData = messageData + + def getIdentifier(self): + return self.identifier + + def setIdentifier(self, identifier): + self.identifier = identifier diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/db/objects/GridLocation.py b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/db/objects/GridLocation.py new file mode 100644 index 0000000..ca4fecf --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/db/objects/GridLocation.py @@ -0,0 +1,114 @@ + + +class GridLocation(object): + + def __init__(self): + self.siteId = None + self.nx = None + self.ny = None + self.timeZone = None + self.projection = None + self.origin = None + self.extent = None + self.geometry = None + self.crsWKT = None + self.identifier = None + + def __str__(self): + return self.__repr__() + + def __repr__(self): + # TODO: Handle geometry in dynamicserialize + # ,loc=" + this.geometry.getGeometryType() + s = "[SiteID =" + self.siteId + ",ProjID=" + self.projection.getProjectionID() +\ + ",gridSize=(" + str(self.nx) + ',' + str(self.ny) + ")]" + return s + + def __eq__(self, other): + if not isinstance(other, GridLocation): + return False + if self.siteId != other.siteId: + return False + if self.crsWKT != other.crsWKT: + return False + # FIXME: Geometry/Polygon objects don't really work in dynamicserialize + # commenting out this check unless it causes problems +# if self.geometry != other.geometry: +# return False + if self.nx != other.nx: + return False + if self.ny != other.ny: + return False + return True + + def __ne__(self, other): + return not self.__eq__(other) + + def getSiteId(self): + return self.siteId + + def setSiteId(self, siteId): + self.siteId = siteId + + def getNx(self): + return self.nx + + def setNx(self, nx): + self.nx = nx + + def getNy(self): + return self.ny + + def setNy(self, ny): + self.ny = ny + + def getTimeZone(self): + return self.timeZone + + def setTimeZone(self, timeZone): + self.timeZone = timeZone + + def getProjection(self): + return self.projection + + def setProjection(self, projection): + self.projection = projection + + def getOrigin(self): + return self.origin + + def setOrigin(self, origin): + self.origin = origin + + def getExtent(self): + return self.extent + + def setExtent(self, extent): + self.extent = extent + + def getGeometry(self): + return self.geometry + + def setGeometry(self, geometry): + self.geometry = geometry + + def getCrsWKT(self): + return self.crsWKT + + def setCrsWKT(self, crsWKT): + self.crsWKT = crsWKT + + def getIdentifier(self): + return self.identifier + + def setIdentifier(self, identifier): + self.identifier = identifier + + def isValid(self): + if self.projection is None: + return False + if self.nx < 2 or self.ny < 2: + return False + if self.origin is None or self.extent is None: + return False + return True diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/db/objects/GridParmInfo.py b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/db/objects/GridParmInfo.py new file mode 100644 index 0000000..d78e9b2 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/db/objects/GridParmInfo.py @@ -0,0 +1,183 @@ +from dynamicserialize.dstypes.com.raytheon.uf.common.dataplugin.gfe.db.objects import GridLocation +from dynamicserialize.dstypes.com.raytheon.uf.common.dataplugin.gfe.db.objects import ParmID +from dynamicserialize.dstypes.com.raytheon.uf.common.dataplugin.gfe.db.objects import TimeConstraints + + +class GridParmInfo(object): + + def __init__(self, parmid=None, gridLoc=None, gridType="NONE", unit=None, + descriptiveName="", minValue=0.0, maxValue=0.0, precision=0, + timeIndependentParm=False, timeConstraints=None, rateParm=False): + self.parmID = parmid + self.gridLoc = gridLoc + self.gridType = gridType + self.descriptiveName = descriptiveName + self.unitString = unit + self.minValue = float(minValue) + self.maxValue = float(maxValue) + self.precision = int(precision) + self.rateParm = rateParm + self.timeConstraints = timeConstraints + self.timeIndependentParm = timeIndependentParm + +# (valid, errors) = self.__validCheck() +# if not valid: +# errorMessage = "GridParmInfo is invalid: " + str(errors) +# warnings.warn(errorMessage) +# self.__setDefaultValues() + + def __str__(self): + return self.__repr__() + + def __repr__(self): + if self.isValid(): + out = "ParmID: " + str(self.parmID) + \ + " TimeConstraints: " + str(self.timeConstraints) + \ + " GridLoc: " + str(self.gridLoc) + \ + " Units: " + self.unitString + \ + " Name: " + self.descriptiveName + \ + " Min/Max AllowedValues: " + str(self.minValue) + "," + \ + str(self.maxValue) + " Precision: " + str(self.precision) + \ + " TimeIndependent: " + str(self.timeIndependentParm) + \ + " RateParm: " + str(self.rateParm) + \ + " GridType: " + self.gridType + return out + return "" + + def __eq__(self, other): + if not isinstance(other, GridParmInfo): + return False + if self.descriptiveName != other.descriptiveName: + return False + if self.gridLoc != other.gridLoc: + return False + if self.gridType != other.gridType: + return False + if self.minValue != other.minValue: + return False + if self.maxValue != other.maxValue: + return False + if self.parmID != other.parmID: + return False + if self.precision != other.precision: + return False + if self.rateParm != other.rateParm: + return False + if self.timeConstraints != other.timeConstraints: + return False + if self.timeIndependentParm != other.timeIndependentParm: + return False + if self.unitString != other.unitString: + return False + return True + + def __ne__(self, other): + return not self.__eq__(other) + + def __validCheck(self): + status = [] + + if not self.parmID.isValid(): + status.append("GridParmInfo.ParmID is not valid [" + str(self.parmID) + "]") + if not self.timeConstraints.isValid(): + status.append("GridParmInfo.TimeConstraints are not valid [" + + str(self.timeConstraints) + "]") + if not self.gridLoc.isValid(): + status.append("GridParmInfo.GridLocation is not valid") + if self.timeIndependentParm and self.timeConstraints.anyConstraints(): + status.append("GridParmInfo is invalid. There are time constraints" + + " for a time independent parm. Constraints: " + + str(self.timeConstraints)) + if not self.unitString: + status.append("GridParmInfo.Units are not defined.") + if self.precision < -2 or self.precision > 5: + status.append("GridParmInfo is invalid. Precision out of limits." + + " Precision is: " + str(self.precision) + ". Must be between -2 and 5.") + + retVal = True + if status: + retVal = False + return retVal, status + + def isValid(self): + (valid, errors) = self.__validCheck() + return valid + + def __setDefaultValues(self): + self.parmID = ParmID() + self.gridLoc = GridLocation() + self.gridType = "NONE" + self.descriptiveName = "" + self.unitString = "" + self.minValue = 0.0 + self.maxValue = 0.0 + self.precision = 0 + self.rateParm = False + self.timeConstraints = TimeConstraints() + self.timeIndependentParm = False + + def getParmID(self): + return self.parmID + + def setParmID(self, parmID): + self.parmID = parmID + + def getGridLoc(self): + return self.gridLoc + + def setGridLoc(self, gridLoc): + self.gridLoc = gridLoc + + def getGridType(self): + return self.gridType + + def setGridType(self, gridType): + self.gridType = gridType + + def getDescriptiveName(self): + return self.descriptiveName + + def setDescriptiveName(self, descriptiveName): + self.descriptiveName = descriptiveName + + def getUnitString(self): + return self.unitString + + def setUnitString(self, unitString): + self.unitString = unitString + + def getMinValue(self): + return self.minValue + + def setMinValue(self, minValue): + self.minValue = minValue + + def getMaxValue(self): + return self.maxValue + + def setMaxValue(self, maxValue): + self.maxValue = maxValue + + def getPrecision(self): + return self.precision + + def setPrecision(self, precision): + self.precision = precision + + def getRateParm(self): + return self.rateParm + + def setRateParm(self, rateParm): + self.rateParm = rateParm + + def getTimeConstraints(self): + return self.timeConstraints + + def setTimeConstraints(self, timeConstraints): + self.timeConstraints = timeConstraints + + def getTimeIndependentParm(self): + return self.timeIndependentParm + + def setTimeIndependentParm(self, timeIndependentParm): + self.timeIndependentParm = timeIndependentParm diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/db/objects/ParmID.py b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/db/objects/ParmID.py new file mode 100644 index 0000000..bfb25a5 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/db/objects/ParmID.py @@ -0,0 +1,131 @@ +# Modified by njensen to add __repr__ + +from dynamicserialize.dstypes.com.raytheon.uf.common.dataplugin.gfe.db.objects import DatabaseID + + +class ParmID(object): + + def __init__(self, parmIdentifier=None, dbId=None, level=None): + self.parmName = None + self.parmLevel = None + self.dbId = None + self.compositeName = None + self.shortParmId = None + self.parmId = None + + if (parmIdentifier is not None) and (dbId is not None): + self.parmName = parmIdentifier + + if isinstance(dbId, DatabaseID): + self.dbId = dbId + elif isinstance(dbId, str): + self.dbId = DatabaseID(dbId) + else: + raise TypeError("Invalid database ID specified.") + + if level is None: + self.parmLevel = self.defaultLevel() + else: + self.parmLevel = level + + self.__encodeIdentifier() + + elif parmIdentifier is not None: + self.__decodeIdentifier(parmIdentifier) + self.__encodeIdentifier() + + def getParmName(self): + return self.parmName + + def getParmLevel(self): + return self.parmLevel + + def getDbId(self): + return self.dbId + + def getCompositeName(self): + return self.compositeName + + def getShortParmId(self): + return self.shortParmId + + def getParmId(self): + return self.parmId + + def __decodeIdentifier(self, parmIdentifier): + parts = parmIdentifier.split(":") + nameLevel = parts[0].split("_") + self.dbId = DatabaseID(parts[1]) + if len(nameLevel) == 2: + self.parmName = nameLevel[0] + self.parmLevel = nameLevel[1] + else: + self.parmName = nameLevel[0] + self.parmLevel = self.defaultLevel() + + def __encodeIdentifier(self): + self.compositeName = self.parmName + "_" + self.parmLevel + self.shortParmId = self.compositeName + ":" + self.dbId.getShortModelId() + self.parmId = self.compositeName + ":" + self.dbId.getModelId() + + def isValid(self): + if len(self.parmName) is None or len(self.parmLevel) is None or self.dbId is None: + return False + if len(self.parmName) < 1 or len(self.parmLevel) < 1 or not self.dbId.isValid(): + return False + + if not self.parmName.isalnum(): + return False + if not self.parmLevel.isalnum(): + return False + + return True + + @staticmethod + def defaultLevel(): + return "SFC" + + @staticmethod + def parmNameAndLevel(composite): + pos = composite.find('_') + if pos != -1: + return composite[:pos], composite[pos+1:] + else: + return composite, "SFC" + + def __str__(self): + return self.__repr__() + + def __repr__(self): + return self.parmName + '_' + self.parmLevel + ":" + str(self.dbId) + + def __hash__(self): + return hash(self.parmId) + + def __cmp__(self, other): + if isinstance(other, ParmID): + nameComp = cmp(self.parmName, other.parmName) + if nameComp != 0: + return nameComp + + levelComp = cmp(self.parmLevel, other.parmLevel) + if levelComp != 0: + return levelComp + + return cmp(self.dbId, other.dbId) + else: + return NotImplemented + + def __eq__(self, other): + if not isinstance(other, ParmID): + return False + if self.dbId != other.dbId: + return False + if self.parmLevel != other.parmLevel: + return False + if self.parmName != other.parmName: + return False + return True + + def __ne__(self, other): + return not self.__eq__(other) diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/db/objects/TimeConstraints.py b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/db/objects/TimeConstraints.py new file mode 100644 index 0000000..1a242cb --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/db/objects/TimeConstraints.py @@ -0,0 +1,89 @@ +# +# 03/20/2013 #1774 randerso Removed setters, added isValid. + + +import logging + +HOUR = 3600 +DAY = 24 * HOUR + + +class TimeConstraints(object): + + def __init__(self, duration=0, repeatInterval=0, startTime=0): + duration = int(duration) + repeatInterval = int(repeatInterval) + startTime = int(startTime) + + self.valid = False + if duration == 0 and repeatInterval == 0 and startTime == 0: + self.valid = True + else: + if self.isInvalidInterval(repeatInterval, duration, startTime): + logging.warning("Bad init values for TimeConstraints: " + + str(duration) + ", " + + str(repeatInterval) + ", " + + str(startTime)) + self.valid = False + duration = 0 + repeatInterval = 0 + startTime = 0 + else: + self.valid = True + + self.duration = duration + self.repeatInterval = repeatInterval + self.startTime = startTime + + def __str__(self): + return self.__repr__() + + def __repr__(self): + if not self.isValid(): + return "" + elif not self.anyConstraints(): + return "" + else: + return "[s=" + str(self.startTime / HOUR) + "h, i=" + \ + str(self.repeatInterval / HOUR) + "h, d=" + \ + str(self.duration / HOUR) + "h]" + + def __eq__(self, other): + if not isinstance(other, TimeConstraints): + return False + if self.isValid() != other.isValid(): + return False + if self.duration != other.duration: + return False + if self.repeatInterval != other.repeatInterval: + return False + return self.startTime == other.startTime + + def __ne__(self, other): + return not self.__eq__(other) + + def anyConstraints(self): + return self.duration != 0 + + def isValid(self): + return self.valid + + def getDuration(self): + return self.duration + + def getRepeatInterval(self): + return self.repeatInterval + + def getStartTime(self): + return self.startTime + + def isInvalidInterval(self, interval, duration, startTime): + if interval <= 0 or interval > DAY or interval < duration: + return False + if startTime < 0 or startTime > DAY: + return False + if duration < 0 or duration > DAY: + return False + if DAY % interval != 0: + return False + return True diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/db/objects/__init__.py b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/db/objects/__init__.py new file mode 100644 index 0000000..63376e7 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/db/objects/__init__.py @@ -0,0 +1,16 @@ + +__all__ = [ + 'DatabaseID', + 'GFERecord', + 'GridLocation', + 'GridParmInfo', + 'ParmID', + 'TimeConstraints' + ] + +from .DatabaseID import DatabaseID +from .GFERecord import GFERecord +from .GridLocation import GridLocation +from .GridParmInfo import GridParmInfo +from .ParmID import ParmID +from .TimeConstraints import TimeConstraints diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/discrete/DiscreteKey.py b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/discrete/DiscreteKey.py new file mode 100644 index 0000000..c9ef91d --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/discrete/DiscreteKey.py @@ -0,0 +1,85 @@ +# NOTE: Because the pure python dynamicserialize code does not +# have a means of accessing the DiscreteDefinition, this class +# is only really useful as a container for deserialized data +# from EDEX. I would not recommend trying to use it for anything +# else. + +SUBKEY_SEPARATOR = '^' +AUXDATA_SEPARATOR = ':' + + +class DiscreteKey(object): + + def __init__(self): + self.siteId = None + self.subKeys = None + self.parmID = None + + def __str__(self): + return self.__repr__() + + def __repr__(self): + return SUBKEY_SEPARATOR.join(self.subKeys) + + def __getitem__(self, key): + try: + index = int(key) + except TypeError: + raise TypeError("list indices must be integers, not " + str(type(key))) + if index < 0 or index > len(self.subKeys): + raise IndexError("index out of range") + return self.subKeys[index] + + def __hash__(self): + prime = 31 + result = 1 + result = prime * result + (0 if self.parmID is None else hash(self.parmID)) + result = prime * result + (0 if self.siteId is None else hash(self.siteId)) + result = prime * result + (0 if self.subKeys is None else hash(self.subKeys)) + return result + + def __eq__(self, other): + if not isinstance(other, DiscreteKey): + return False + if self.parmID != other.parmID: + return False + if self.siteId != other.siteId: + return False + return self.subKeys == other.subKeys + + def __ne__(self, other): + return not self.__eq__(other) + + @staticmethod + def auxData(subkey): + pos = subkey.find(AUXDATA_SEPARATOR) + if pos != -1: + return subkey[pos + 1:] + else: + return "" + + @staticmethod + def baseData(subkey): + pos = subkey.find(AUXDATA_SEPARATOR) + if pos != -1: + return subkey[:pos] + else: + return subkey + + def getSiteId(self): + return self.siteId + + def setSiteId(self, siteId): + self.siteId = siteId + + def getSubKeys(self): + return self.subKeys + + def setSubKeys(self, subKeys): + self.subKeys = subKeys + + def getParmID(self): + return self.parmID + + def setParmID(self, parmID): + self.parmID = parmID diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/discrete/__init__.py b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/discrete/__init__.py new file mode 100644 index 0000000..8de379b --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/discrete/__init__.py @@ -0,0 +1,6 @@ + +__all__ = [ + 'DiscreteKey' + ] + +from .DiscreteKey import DiscreteKey diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/grid/Grid2DByte.py b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/grid/Grid2DByte.py new file mode 100644 index 0000000..3a15994 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/grid/Grid2DByte.py @@ -0,0 +1,30 @@ +import numpy + + +class Grid2DByte(object): + + def __init__(self): + self.buffer = None + self.xdim = None + self.ydim = None + + def getBuffer(self): + return self.buffer + + def setBuffer(self, bytebuffer): + self.buffer = bytebuffer + + def getXdim(self): + return self.xdim + + def setXdim(self, xdim): + self.xdim = xdim + + def getYdim(self): + return self.ydim + + def setYdim(self, ydim): + self.ydim = ydim + + def getNumPyGrid(self): + return numpy.resize(self.buffer, (self.xdim, self.ydim)) diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/grid/Grid2DFloat.py b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/grid/Grid2DFloat.py new file mode 100644 index 0000000..60ab294 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/grid/Grid2DFloat.py @@ -0,0 +1,30 @@ +import numpy + + +class Grid2DFloat(object): + + def __init__(self): + self.buffer = None + self.xdim = None + self.ydim = None + + def getBuffer(self): + return self.buffer + + def setBuffer(self, bytebuffer): + self.buffer = bytebuffer + + def getXdim(self): + return self.xdim + + def setXdim(self, xdim): + self.xdim = xdim + + def getYdim(self): + return self.ydim + + def setYdim(self, ydim): + self.ydim = ydim + + def getNumPyGrid(self): + return numpy.resize(self.buffer, (self.xdim, self.ydim)) diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/grid/__init__.py b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/grid/__init__.py new file mode 100644 index 0000000..972b3d7 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/grid/__init__.py @@ -0,0 +1,8 @@ + +__all__ = [ + 'Grid2DByte', + 'Grid2DFloat' + ] + +from .Grid2DByte import Grid2DByte +from .Grid2DFloat import Grid2DFloat diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/request/AbstractGfeRequest.py b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/request/AbstractGfeRequest.py new file mode 100644 index 0000000..9a9522c --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/request/AbstractGfeRequest.py @@ -0,0 +1,21 @@ +import abc +from six import with_metaclass + + +class AbstractGfeRequest(with_metaclass(abc.ABCMeta, object)): + @abc.abstractmethod + def __init__(self): + self.siteID = None + self.workstationID = None + + def getSiteID(self): + return self.siteID + + def setSiteID(self, siteID): + self.siteID = siteID + + def getWorkstationID(self): + return self.workstationID + + def setWorkstationID(self, workstationID): + self.workstationID = workstationID diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/request/CommitGridsRequest.py b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/request/CommitGridsRequest.py new file mode 100644 index 0000000..52d3095 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/request/CommitGridsRequest.py @@ -0,0 +1,26 @@ + + +class CommitGridsRequest(object): + + def __init__(self): + self.commits = None + self.workstationID = None + self.siteID = None + + def getCommits(self): + return self.commits + + def setCommits(self, commits): + self.commits = commits + + def getWorkstationID(self): + return self.workstationID + + def setWorkstationID(self, workstationID): + self.workstationID = workstationID + + def getSiteID(self): + return self.siteID + + def setSiteID(self, siteID): + self.siteID = siteID diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/request/ConfigureTextProductsRequest.py b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/request/ConfigureTextProductsRequest.py new file mode 100644 index 0000000..989c63d --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/request/ConfigureTextProductsRequest.py @@ -0,0 +1,33 @@ + + +class ConfigureTextProductsRequest(object): + + def __init__(self): + self.mode = None + self.template = None + self.site = None + self.destinationDir = None + + def getMode(self): + return self.mode + + def setMode(self, mode): + self.mode = mode + + def getTemplate(self): + return self.template + + def setTemplate(self, template): + self.template = template + + def getSite(self): + return self.site + + def setSite(self, site): + self.site = site + + def getDestinationDir(self): + return self.destinationDir + + def setDestinationDir(self, destinationDir): + self.destinationDir = destinationDir diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/request/ExecuteIfpNetCDFGridRequest.py b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/request/ExecuteIfpNetCDFGridRequest.py new file mode 100644 index 0000000..1227dfa --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/request/ExecuteIfpNetCDFGridRequest.py @@ -0,0 +1,157 @@ +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# xx/xx/?? dgilling Initial Creation. +# 03/13/13 1759 dgilling Add software history header. +# 05/13/15 4427 dgilling Add siteIdOverride field. +# +# + +from dynamicserialize.dstypes.com.raytheon.uf.common.dataplugin.gfe.request import AbstractGfeRequest +from dynamicserialize.dstypes.com.raytheon.uf.common.message import WsId + + +class ExecuteIfpNetCDFGridRequest(AbstractGfeRequest): + + def __init__(self, outputFilename=None, parmList=[], databaseID=None, + startTime=None, endTime=None, mask=None, geoInfo=False, + compressFile=False, configFileName=None, compressFileFactor=0, + trim=False, krunch=False, userID=None, logFileName=None, siteIdOverride=None): + super(ExecuteIfpNetCDFGridRequest, self).__init__() + self.outputFilename = outputFilename + self.parmList = parmList + self.databaseID = databaseID + self.startTime = startTime + self.endTime = endTime + self.mask = mask + self.geoInfo = geoInfo + self.compressFile = compressFile + self.configFileName = configFileName + self.compressFileFactor = compressFileFactor + self.trim = trim + self.krunch = krunch + self.userID = userID + self.logFileName = logFileName + self.siteIdOverride = siteIdOverride + if self.userID is not None: + self.workstationID = WsId(progName='ifpnetCDF', userName=self.userID) + if self.databaseID is not None: + self.siteID = self.databaseID.getSiteId() + + def __cstr__(self): + retVal = "workstationID: " + str(self.workstationID) + ", " + retVal += "siteID: " + str(self.siteID) + ", " + retVal += "outputFilename: " + str(self.outputFilename) + ", " + retVal += "parmList: " + str(self.parmList) + ", " + retVal += "databaseID: " + str(self.databaseID) + ", " + retVal += "startTime: " + str(self.startTime) + ", " + retVal += "endTime: " + str(self.endTime) + ", " + retVal += "mask: " + str(self.mask) + ", " + retVal += "geoInfo: " + str(self.geoInfo) + ", " + retVal += "compressFile: " + str(self.compressFile) + ", " + retVal += "configFileName: " + str(self.configFileName) + ", " + retVal += "compressFileFactor: " + str(self.compressFileFactor) + ", " + retVal += "trim: " + str(self.trim) + ", " + retVal += "krunch: " + str(self.krunch) + ", " + retVal += "userID: " + str(self.userID) + ", " + retVal += "logFileName: " + str(self.logFileName) + ", " + retVal += "siteIdOverride: " + str(self.siteIdOverride) + return retVal + + def __str__(self): + return "ExecuteIfpNetCDFGridRequest[" + self.__cstr__() + "]" + + def __repr__(self): + return "ExecuteIfpNetCDFGridRequest(" + self.__cstr__() + ")" + + def getOutputFilename(self): + return self.outputFilename + + def setOutputFilename(self, outputFilename): + self.outputFilename = outputFilename + + def getParmList(self): + return self.parmList + + def setParmList(self, parmList): + self.parmList = parmList + + def getDatabaseID(self): + return self.databaseID + + def setDatabaseID(self, databaseID): + self.databaseID = databaseID + + def getStartTime(self): + return self.startTime + + def setStartTime(self, startTime): + self.startTime = startTime + + def getEndTime(self): + return self.endTime + + def setEndTime(self, endTime): + self.endTime = endTime + + def getMask(self): + return self.mask + + def setMask(self, mask): + self.mask = mask + + def getGeoInfo(self): + return self.geoInfo + + def setGeoInfo(self, geoInfo): + self.geoInfo = geoInfo + + def getCompressFile(self): + return self.compressFile + + def setCompressFile(self, compressFile): + self.compressFile = compressFile + + def getConfigFileName(self): + return self.configFileName + + def setConfigFileName(self, configFileName): + self.configFileName = configFileName + + def getCompressFileFactor(self): + return self.compressFileFactor + + def setCompressFileFactor(self, compressFileFactor): + self.compressFileFactor = compressFileFactor + + def getTrim(self): + return self.trim + + def setTrim(self, trim): + self.trim = trim + + def getKrunch(self): + return self.krunch + + def setKrunch(self, krunch): + self.krunch = krunch + + def getUserID(self): + return self.userID + + def setUserID(self, userID): + self.userID = userID + + def getLogFileName(self): + return self.logFileName + + def setLogFileName(self, logFileName): + self.logFileName = logFileName + + def getSiteIdOverride(self): + return self.siteIdOverride + + def setSiteIdOverride(self, siteIdOverride): + self.siteIdOverride = siteIdOverride diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/request/ExportGridsRequest.py b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/request/ExportGridsRequest.py new file mode 100644 index 0000000..794c235 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/request/ExportGridsRequest.py @@ -0,0 +1,56 @@ +# +# A pure python implementation of com.raytheon.uf.common.dataplugin.gfe.request.ExportGridsRequest +# for use by the python implementation of DynamicSerialize. +# +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# 04/05/13 dgilling Initial Creation. +# +# +# + +from dynamicserialize.dstypes.com.raytheon.uf.common.dataplugin.gfe.request import AbstractGfeRequest + + +class ExportGridsRequest(AbstractGfeRequest): + + def __init__(self): + super(ExportGridsRequest, self).__init__() + self.site = None + self.mode = None + + def getSite(self): + return self.site + + def setSite(self, site): + self.site = site + + def getMode(self): + return self.mode + + def setMode(self, mode): + validValues = ['CRON', 'MANUAL', 'GRIB2'] + inputVal = str(mode).upper() + if inputVal in validValues: + self.mode = mode + else: + raise ValueError(inputVal + " invalid ExportGridsMode. Must be " + str(validValues)) + + def __str__(self): + retVal = "ExportGridsRequest[" + retVal += "wokstationID: " + str(self.workstationID) + ", " + retVal += "siteID: " + str(self.siteID) + ", " + retVal += "site: " + str(self.site) + ", " + retVal += "mode: " + str(self.mode) + "]" + return retVal + + def __repr__(self): + retVal = "ExportGridsRequest(" + retVal += "wokstationID=" + repr(self.workstationID) + ", " + retVal += "siteID=" + repr(self.siteID) + ", " + retVal += "site=" + repr(self.site) + ", " + retVal += "mode=" + repr(self.mode) + ")" + return retVal diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/request/GetASCIIGridsRequest.py b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/request/GetASCIIGridsRequest.py new file mode 100644 index 0000000..f8d2610 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/request/GetASCIIGridsRequest.py @@ -0,0 +1,47 @@ + + +class GetASCIIGridsRequest(object): + + def __init__(self): + self.databaseIds = None + self.parmIds = None + self.timeRange = None + self.coordConversionString = None + self.workstationID = None + self.siteID = None + + def getDatabaseIds(self): + return self.databaseIds + + def setDatabaseIds(self, databaseIds): + self.databaseIds = databaseIds + + def getParmIds(self): + return self.parmIds + + def setParmIds(self, parmIds): + self.parmIds = parmIds + + def getTimeRange(self): + return self.timeRange + + def setTimeRange(self, timeRange): + self.timeRange = timeRange + + def getCoordConversionString(self): + return self.coordConversionString + + def setCoordConversionString(self, coordConversionString): + self.coordConversionString = coordConversionString + + def getWorkstationID(self): + return self.workstationID + + def setWorkstationID(self, workstationID): + self.workstationID = workstationID + + def getSiteID(self): + return self.siteID + + def setSiteID(self, siteID): + self.siteID = siteID diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/request/GetGridDataRequest.py b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/request/GetGridDataRequest.py new file mode 100644 index 0000000..c9de340 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/request/GetGridDataRequest.py @@ -0,0 +1,38 @@ +import abc +from six import with_metaclass +from dynamicserialize.dstypes.com.raytheon.uf.common.dataplugin.gfe.server.request import GetGridRequest + + +class GetGridDataRequest(with_metaclass(abc.ABCMeta, object)): + @abc.abstractmethod + def __init__(self): + self.requests = [] + self.workstationID = None + self.siteID = None + + def addRequest(self, gridDataReq): + if not isinstance(gridDataReq, GetGridRequest): + raise TypeError("Invalid request specified: " + str(type(gridDataReq)) + + ". Only GetGridRequests are supported.") + else: + self.requests.append(gridDataReq) + + def getRequests(self): + return self.requests + + def setRequests(self, requests): + del self.requests[:] + for req in requests: + self.addRequest(req) + + def getWorkstationID(self): + return self.workstationID + + def setWorkstationID(self, workstationID): + self.workstationID = workstationID + + def getSiteID(self): + return self.siteID + + def setSiteID(self, siteID): + self.siteID = siteID diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/request/GetGridInventoryRequest.py b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/request/GetGridInventoryRequest.py new file mode 100644 index 0000000..53a8c90 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/request/GetGridInventoryRequest.py @@ -0,0 +1,26 @@ + + +class GetGridInventoryRequest(object): + + def __init__(self): + self.parmIds = None + self.workstationID = None + self.siteID = None + + def getParmIds(self): + return self.parmIds + + def setParmIds(self, parmIds): + self.parmIds = parmIds + + def getWorkstationID(self): + return self.workstationID + + def setWorkstationID(self, workstationID): + self.workstationID = workstationID + + def getSiteID(self): + return self.siteID + + def setSiteID(self, siteID): + self.siteID = siteID diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/request/GetLatestDbTimeRequest.py b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/request/GetLatestDbTimeRequest.py new file mode 100644 index 0000000..3bf2e5d --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/request/GetLatestDbTimeRequest.py @@ -0,0 +1,47 @@ +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# 05/22/13 2025 dgilling Initial Creation. +# +# + +from dynamicserialize.dstypes.com.raytheon.uf.common.dataplugin.gfe.request import AbstractGfeRequest +from dynamicserialize.dstypes.com.raytheon.uf.common.dataplugin.gfe.db.objects import DatabaseID + + +class GetLatestDbTimeRequest(AbstractGfeRequest): + + def __init__(self, dbId=None): + super(GetLatestDbTimeRequest, self).__init__() + if dbId is not None and isinstance(dbId, DatabaseID): + self.dbId = dbId + self.siteID = dbId.getSiteId() + elif dbId is not None and not isinstance(dbId, DatabaseID): + raise TypeError( + "Attempt to construct GetLatestDbTimeRequest without providing a valid DatabaseID.") + + def __str__(self): + retVal = "GetLatestDbTimeRequest[" + retVal += "wokstationID: " + str(self.workstationID) + ", " + retVal += "siteID: " + str(self.siteID) + ", " + retVal += "dbId: " + str(self.dbId) + "]" + return retVal + + def __repr__(self): + retVal = "ExecuteIfpNetCDFGridRequest(" + retVal += "wokstationID=" + repr(self.workstationID) + ", " + retVal += "siteID=" + repr(self.siteID) + ", " + retVal += "dbId=" + repr(self.dbId) + ")" + return retVal + + def getDbId(self): + return self.dbId + + def setDbId(self, dbId): + if isinstance(dbId, DatabaseID): + self.dbId = dbId + else: + raise TypeError( + "Attempt to call GetLatestDbTimeRequest.setDbId() without providing a valid DatabaseID.") diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/request/GetLatestModelDbIdRequest.py b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/request/GetLatestModelDbIdRequest.py new file mode 100644 index 0000000..b15b91a --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/request/GetLatestModelDbIdRequest.py @@ -0,0 +1,40 @@ +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# 05/22/13 2025 dgilling Initial Creation. +# +# + +from dynamicserialize.dstypes.com.raytheon.uf.common.dataplugin.gfe.request import AbstractGfeRequest + + +class GetLatestModelDbIdRequest(AbstractGfeRequest): + + def __init__(self, siteId=None, modelName=None): + super(GetLatestModelDbIdRequest, self).__init__() + if siteId is not None: + self.siteID = str(siteId) + if modelName is not None: + self.modelName = str(modelName) + + def __str__(self): + retVal = "GetLatestModelDbIdRequest[" + retVal += "wokstationID: " + str(self.workstationID) + ", " + retVal += "siteID: " + str(self.siteID) + ", " + retVal += "modelName: " + str(self.modelName) + "]" + return retVal + + def __repr__(self): + retVal = "ExecuteIfpNetCDFGridRequest(" + retVal += "wokstationID=" + repr(self.workstationID) + ", " + retVal += "siteID=" + repr(self.siteID) + ", " + retVal += "modelName=" + repr(self.modelName) + ")" + return retVal + + def getModelName(self): + return self.modelName + + def setModelName(self, modelName): + self.modelName = str(modelName) diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/request/GetLockTablesRequest.py b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/request/GetLockTablesRequest.py new file mode 100644 index 0000000..c65c562 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/request/GetLockTablesRequest.py @@ -0,0 +1,26 @@ + + +class GetLockTablesRequest(object): + + def __init__(self): + self.requests = None + self.workstationID = None + self.siteID = None + + def getRequests(self): + return self.requests + + def setRequests(self, requests): + self.requests = requests + + def getWorkstationID(self): + return self.workstationID + + def setWorkstationID(self, workstationID): + self.workstationID = workstationID + + def getSiteID(self): + return self.siteID + + def setSiteID(self, siteID): + self.siteID = siteID diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/request/GetOfficialDbNameRequest.py b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/request/GetOfficialDbNameRequest.py new file mode 100644 index 0000000..b4b77b7 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/request/GetOfficialDbNameRequest.py @@ -0,0 +1,19 @@ + + +class GetOfficialDbNameRequest(object): + + def __init__(self): + self.workstationID = None + self.siteID = None + + def getWorkstationID(self): + return self.workstationID + + def setWorkstationID(self, workstationID): + self.workstationID = workstationID + + def getSiteID(self): + return self.siteID + + def setSiteID(self, siteID): + self.siteID = siteID diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/request/GetParmListRequest.py b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/request/GetParmListRequest.py new file mode 100644 index 0000000..dc6c256 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/request/GetParmListRequest.py @@ -0,0 +1,26 @@ + + +class GetParmListRequest(object): + + def __init__(self): + self.dbIds = None + self.workstationID = None + self.siteID = None + + def getDbIds(self): + return self.dbIds + + def setDbIds(self, dbIds): + self.dbIds = dbIds + + def getWorkstationID(self): + return self.workstationID + + def setWorkstationID(self, workstationID): + self.workstationID = workstationID + + def getSiteID(self): + return self.siteID + + def setSiteID(self, siteID): + self.siteID = siteID diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/request/GetSelectTimeRangeRequest.py b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/request/GetSelectTimeRangeRequest.py new file mode 100644 index 0000000..06fa3fd --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/request/GetSelectTimeRangeRequest.py @@ -0,0 +1,26 @@ + + +class GetSelectTimeRangeRequest(object): + + def __init__(self): + self.name = None + self.workstationID = None + self.siteID = None + + def getName(self): + return self.name + + def setName(self, name): + self.name = name + + def getWorkstationID(self): + return self.workstationID + + def setWorkstationID(self, workstationID): + self.workstationID = workstationID + + def getSiteID(self): + return self.siteID + + def setSiteID(self, siteID): + self.siteID = siteID diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/request/GetSingletonDbIdsRequest.py b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/request/GetSingletonDbIdsRequest.py new file mode 100644 index 0000000..02676f9 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/request/GetSingletonDbIdsRequest.py @@ -0,0 +1,19 @@ + + +class GetSingletonDbIdsRequest(object): + + def __init__(self): + self.workstationID = None + self.siteID = None + + def getWorkstationID(self): + return self.workstationID + + def setWorkstationID(self, workstationID): + self.workstationID = workstationID + + def getSiteID(self): + return self.siteID + + def setSiteID(self, siteID): + self.siteID = siteID diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/request/GetSiteTimeZoneInfoRequest.py b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/request/GetSiteTimeZoneInfoRequest.py new file mode 100644 index 0000000..da8fa63 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/request/GetSiteTimeZoneInfoRequest.py @@ -0,0 +1,23 @@ +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# xx/xx/?? dgilling Initial Creation. +# 12/02/15 5129 dgilling Refactor based on AbstractGfeRequest. +# + +from dynamicserialize.dstypes.com.raytheon.uf.common.dataplugin.gfe.request import AbstractGfeRequest + + +class GetSiteTimeZoneInfoRequest(AbstractGfeRequest): + + def __init__(self): + super(GetSiteTimeZoneInfoRequest, self).__init__() + self.requestedSiteIDs = None + + def getRequestedSiteIDs(self): + return self.requestedSiteIDs + + def setRequestedSiteIDs(self, requestedSiteIDs): + self.requestedSiteIDs = requestedSiteIDs diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/request/GfeClientRequest.py b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/request/GfeClientRequest.py new file mode 100644 index 0000000..afbf643 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/request/GfeClientRequest.py @@ -0,0 +1,62 @@ +from dynamicserialize.dstypes.com.raytheon.uf.common.dataplugin.gfe.request.AbstractGfeRequest import AbstractGfeRequest + +# Manually updated +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------- -------- --------- --------------------------------------------- +# Dec 06, 2016 6092 randerso Initial Creation + + +class GfeClientRequest(AbstractGfeRequest): + + def __init__(self, script, siteID, configFile, user, args=[]): + super(GfeClientRequest, self).__init__() + self.script = script + self.siteID = siteID + self.configFile = configFile + self.user = user + self.args = args + self.time = None + + def getConfigFile(self): + return self.configFile + + def setConfigFile(self, configFile): + self.configFile = configFile + + def getUser(self): + return self.user + + def setUser(self, user): + self.user = user + + def getArgs(self): + return self.args + + def setArgs(self, args): + self.args = args + + def getTime(self): + return self.time + + def setTime(self, time): + self.time = time + + def getScript(self): + return self.script + + def setScript(self, script): + self.script = script + + def __str__(self): + retval = "GfeClientRequest(" + retval += "siteID:" + self.siteID + ", " + retval += "script:" + self.script + ", " + retval += "configFile:" + self.configFile + ", " + retval += "user:" + self.user + ", " + if self.time: + retval += "time:" + str(self.time) + ", " + retval += "args:" + str(self.args) + ")" + return retval diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/request/GridLocRequest.py b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/request/GridLocRequest.py new file mode 100644 index 0000000..d8f7b27 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/request/GridLocRequest.py @@ -0,0 +1,19 @@ + + +class GridLocRequest(object): + + def __init__(self): + self.workstationID = None + self.siteID = None + + def getWorkstationID(self): + return self.workstationID + + def setWorkstationID(self, workstationID): + self.workstationID = workstationID + + def getSiteID(self): + return self.siteID + + def setSiteID(self, siteID): + self.siteID = siteID diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/request/LockChangeRequest.py b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/request/LockChangeRequest.py new file mode 100644 index 0000000..b319021 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/request/LockChangeRequest.py @@ -0,0 +1,26 @@ + + +class LockChangeRequest(object): + + def __init__(self): + self.requests = None + self.workstationID = None + self.siteID = None + + def getRequests(self): + return self.requests + + def setRequests(self, requests): + self.requests = requests + + def getWorkstationID(self): + return self.workstationID + + def setWorkstationID(self, workstationID): + self.workstationID = workstationID + + def getSiteID(self): + return self.siteID + + def setSiteID(self, siteID): + self.siteID = siteID diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/request/ProcessReceivedConfRequest.py b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/request/ProcessReceivedConfRequest.py new file mode 100644 index 0000000..d23aae7 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/request/ProcessReceivedConfRequest.py @@ -0,0 +1,26 @@ + + +class ProcessReceivedConfRequest(object): + + def __init__(self): + self.receivedConfFile = None + self.workstationID = None + self.siteID = None + + def getReceivedConfFile(self): + return self.receivedConfFile + + def setReceivedConfFile(self, receivedConfFile): + self.receivedConfFile = receivedConfFile + + def getWorkstationID(self): + return self.workstationID + + def setWorkstationID(self, workstationID): + self.workstationID = workstationID + + def getSiteID(self): + return self.siteID + + def setSiteID(self, siteID): + self.siteID = siteID diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/request/ProcessReceivedDigitalDataRequest.py b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/request/ProcessReceivedDigitalDataRequest.py new file mode 100644 index 0000000..d35aaeb --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/request/ProcessReceivedDigitalDataRequest.py @@ -0,0 +1,26 @@ + + +class ProcessReceivedDigitalDataRequest(object): + + def __init__(self): + self.receivedConfFile = None + self.workstationID = None + self.siteID = None + + def getReceivedDataFile(self): + return self.receivedConfFile + + def setReceivedDataFile(self, receivedConfFile): + self.receivedConfFile = receivedConfFile + + def getWorkstationID(self): + return self.workstationID + + def setWorkstationID(self, workstationID): + self.workstationID = workstationID + + def getSiteID(self): + return self.siteID + + def setSiteID(self, siteID): + self.siteID = siteID diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/request/PurgeGfeGridsRequest.py b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/request/PurgeGfeGridsRequest.py new file mode 100644 index 0000000..5444b9d --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/request/PurgeGfeGridsRequest.py @@ -0,0 +1,21 @@ +from dynamicserialize.dstypes.com.raytheon.uf.common.dataplugin.gfe.request import AbstractGfeRequest + + +class PurgeGfeGridsRequest(AbstractGfeRequest): + + def __init__(self): + super(PurgeGfeGridsRequest, self).__init__() + self.databaseID = None + + def __str__(self): + retVal = "PurgeGfeGridsRequest[" + retVal += "wokstationID: " + str(self.workstationID) + ", " + retVal += "siteID: " + str(self.siteID) + ", " + retVal += "databaseID: " + str(self.databaseID) + "]" + return retVal + + def getDatabaseID(self): + return self.databaseID + + def setDatabaseID(self, databaseID): + self.databaseID = databaseID diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/request/RsyncGridsToCWFRequest.py b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/request/RsyncGridsToCWFRequest.py new file mode 100644 index 0000000..883f4a5 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/request/RsyncGridsToCWFRequest.py @@ -0,0 +1,18 @@ +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# Jul 15, 2015 #4013 randerso Initial creation (hand generated) +# +# + +from dynamicserialize.dstypes.com.raytheon.uf.common.dataplugin.gfe.request import AbstractGfeRequest + + +class RsyncGridsToCWFRequest(AbstractGfeRequest): + + def __init__(self, siteId=None): + super(RsyncGridsToCWFRequest, self).__init__() + if siteId is not None: + self.siteID = str(siteId) diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/request/SaveASCIIGridsRequest.py b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/request/SaveASCIIGridsRequest.py new file mode 100644 index 0000000..014dfcb --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/request/SaveASCIIGridsRequest.py @@ -0,0 +1,26 @@ + + +class SaveASCIIGridsRequest(object): + + def __init__(self): + self.asciiGridData = None + self.workstationID = None + self.siteID = None + + def getAsciiGridData(self): + return self.asciiGridData + + def setAsciiGridData(self, asciiGridData): + self.asciiGridData = asciiGridData + + def getWorkstationID(self): + return self.workstationID + + def setWorkstationID(self, workstationID): + self.workstationID = workstationID + + def getSiteID(self): + return self.siteID + + def setSiteID(self, siteID): + self.siteID = siteID diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/request/SmartInitRequest.py b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/request/SmartInitRequest.py new file mode 100644 index 0000000..e98bf41 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/request/SmartInitRequest.py @@ -0,0 +1,40 @@ + + +class SmartInitRequest(object): + + def __init__(self): + self.moduleName = None + self.modelTime = None + self.calculateAll = None + self.workstationID = None + self.siteID = None + + def getModuleName(self): + return self.moduleName + + def setModuleName(self, moduleName): + self.moduleName = moduleName + + def getModelTime(self): + return self.modelTime + + def setModelTime(self, modelTime): + self.modelTime = modelTime + + def getCalculateAll(self): + return self.calculateAll + + def setCalculateAll(self, calculateAll): + self.calculateAll = calculateAll + + def getWorkstationID(self): + return self.workstationID + + def setWorkstationID(self, workstationID): + self.workstationID = workstationID + + def getSiteID(self): + return self.siteID + + def setSiteID(self, siteID): + self.siteID = siteID diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/request/__init__.py b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/request/__init__.py new file mode 100644 index 0000000..dbba035 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/request/__init__.py @@ -0,0 +1,61 @@ +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# Jul 15, 2015 #4013 randerso Added RsyncGridsToCWFRequest +# + +__all__ = [ + 'AbstractGfeRequest', + 'CommitGridsRequest', + 'ConfigureTextProductsRequest', + 'ExecuteIfpNetCDFGridRequest', + 'ExportGridsRequest', + 'GetASCIIGridsRequest', + 'GetGridDataRequest', + 'GetGridInventoryRequest', + 'GetLatestDbTimeRequest', + 'GetLatestModelDbIdRequest', + 'GetLockTablesRequest', + 'GetOfficialDbNameRequest', + 'GetParmListRequest', + 'GetSelectTimeRangeRequest', + 'GetSingletonDbIdsRequest', + 'GetSiteTimeZoneInfoRequest', + 'GfeClientRequest', + 'GridLocRequest', + 'LockChangeRequest', + 'ProcessReceivedConfRequest', + 'ProcessReceivedDigitalDataRequest', + 'PurgeGfeGridsRequest', + 'RsyncGridsToCWFRequest', + 'SaveASCIIGridsRequest', + 'SmartInitRequest' + ] + +from .AbstractGfeRequest import AbstractGfeRequest +from .CommitGridsRequest import CommitGridsRequest +from .ConfigureTextProductsRequest import ConfigureTextProductsRequest +from .ExecuteIfpNetCDFGridRequest import ExecuteIfpNetCDFGridRequest +from .ExportGridsRequest import ExportGridsRequest +from .GetASCIIGridsRequest import GetASCIIGridsRequest +from .GetGridDataRequest import GetGridDataRequest +from .GetGridInventoryRequest import GetGridInventoryRequest +from .GetLatestDbTimeRequest import GetLatestDbTimeRequest +from .GetLatestModelDbIdRequest import GetLatestModelDbIdRequest +from .GetLockTablesRequest import GetLockTablesRequest +from .GetOfficialDbNameRequest import GetOfficialDbNameRequest +from .GetParmListRequest import GetParmListRequest +from .GetSelectTimeRangeRequest import GetSelectTimeRangeRequest +from .GetSingletonDbIdsRequest import GetSingletonDbIdsRequest +from .GetSiteTimeZoneInfoRequest import GetSiteTimeZoneInfoRequest +from .GfeClientRequest import GfeClientRequest +from .GridLocRequest import GridLocRequest +from .LockChangeRequest import LockChangeRequest +from .ProcessReceivedConfRequest import ProcessReceivedConfRequest +from .ProcessReceivedDigitalDataRequest import ProcessReceivedDigitalDataRequest +from .PurgeGfeGridsRequest import PurgeGfeGridsRequest +from .SaveASCIIGridsRequest import SaveASCIIGridsRequest +from .SmartInitRequest import SmartInitRequest +from .RsyncGridsToCWFRequest import RsyncGridsToCWFRequest diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/server/__init__.py b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/server/__init__.py new file mode 100644 index 0000000..40db808 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/server/__init__.py @@ -0,0 +1,7 @@ + +__all__ = [ + 'lock', + 'message', + 'notify', + 'request' + ] diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/server/lock/Lock.py b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/server/lock/Lock.py new file mode 100644 index 0000000..0a8b7e0 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/server/lock/Lock.py @@ -0,0 +1,52 @@ +# +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# xx/xx/xxxx xxxxxxx Initial Creation. +# xx/xx/xxxx xxxx njensen Implemented __repr__. +# 06/12/2013 2099 dgilling Make class immutable, +# add getTimeRange(). +# + +import time +from dynamicserialize.dstypes.com.raytheon.uf.common.time import TimeRange + + +class Lock(object): + + def __init__(self, parmId, wsId, startTime, endTime): + self.parmId = parmId + self.wsId = wsId + self.startTime = startTime + self.endTime = endTime + self.timeRange = None + + def getParmId(self): + return self.parmId + + def getWsId(self): + return self.wsId + + def getStartTime(self): + return self.startTime + + def getEndTime(self): + return self.endTime + + def getTimeRange(self): + if not self.timeRange: + start = self.startTime / 1000.0 + end = self.endTime / 1000.0 + self.timeRange = TimeRange(start, end) + return self.timeRange + + def __repr__(self): + t0 = time.gmtime(self.getStartTime() / 1000.0) + t1 = time.gmtime(self.getEndTime() / 1000.0) + timeformat = '%b %d %y %H:%M:%S %Z' + msg = 'TR: (' + time.strftime(timeformat, t0) \ + + ', ' + time.strftime(timeformat, t1) + msg += " WsId: " + str(self.wsId) + return msg diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/server/lock/LockTable.py b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/server/lock/LockTable.py new file mode 100644 index 0000000..6570bbc --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/server/lock/LockTable.py @@ -0,0 +1,42 @@ +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------- -------- --------- --------------------------------------------- +# Feb 06, 2017 5959 randerso Removed Java .toString() calls +# + +# Modified by njensen to add __repr__ + + +class LockTable(object): + + def __init__(self): + self.locks = None + self.wsId = None + self.parmId = None + + def getLocks(self): + return self.locks + + def setLocks(self, locks): + self.locks = locks + + def getWsId(self): + return self.wsId + + def setWsId(self, wsId): + self.wsId = wsId + + def getParmId(self): + return self.parmId + + def setParmId(self, parmId): + self.parmId = parmId + + def __repr__(self): + msg = "ParmID: " + str(self.parmId) + msg += " LockTable WsId: " + str(self.wsId) + for i in self.locks: + msg += "\n Lock: " + str(i) + return msg diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/server/lock/__init__.py b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/server/lock/__init__.py new file mode 100644 index 0000000..c03beee --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/server/lock/__init__.py @@ -0,0 +1,8 @@ + +__all__ = [ + 'Lock', + 'LockTable' + ] + +from .Lock import Lock +from .LockTable import LockTable diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/server/message/ServerMsg.py b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/server/message/ServerMsg.py new file mode 100644 index 0000000..900b724 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/server/message/ServerMsg.py @@ -0,0 +1,12 @@ + + +class ServerMsg(object): + + def __init__(self): + self.message = None + + def getMessage(self): + return self.message + + def setMessage(self, message): + self.message = message diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/server/message/ServerResponse.py b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/server/message/ServerResponse.py new file mode 100644 index 0000000..9be2b7e --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/server/message/ServerResponse.py @@ -0,0 +1,45 @@ + + +class ServerResponse(object): + + def __init__(self): + self.messages = None + self.payload = None + self.notifications = None + + def getMessages(self): + return self.messages + + def setMessages(self, messages): + self.messages = messages + + def getPayload(self): + return self.payload + + def setPayload(self, payload): + self.payload = payload + + def getNotifications(self): + return self.notifications + + def setNotifications(self, notifications): + self.notifications = notifications + + def isOkay(self): + return self.messages is None or len(self.messages) == 0 + + def message(self): + if self.isOkay(): + return "" + else: + compMessage = "" + for serverMsg in self.messages: + compMessage += serverMsg.getMessage() + "\n" + + return compMessage + + def __str__(self): + return self.message() + + def __bool__(self): + return self.isOkay() diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/server/message/__init__.py b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/server/message/__init__.py new file mode 100644 index 0000000..7536226 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/server/message/__init__.py @@ -0,0 +1,8 @@ + +__all__ = [ + 'ServerMsg', + 'ServerResponse' + ] + +from .ServerMsg import ServerMsg +from .ServerResponse import ServerResponse diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/server/notify/CombinationsFileChangedNotification.py b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/server/notify/CombinationsFileChangedNotification.py new file mode 100644 index 0000000..b4c84ce --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/server/notify/CombinationsFileChangedNotification.py @@ -0,0 +1,34 @@ +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# 06/22/2015 4573 randerso Initial creation (hand generated) +# + +from . import GfeNotification + + +class CombinationsFileChangedNotification(GfeNotification.GfeNotification): + + def __init__(self): + super(CombinationsFileChangedNotification, self).__init__() + self.combinationsFileName = None + self.whoChanged = None + + def __str__(self): + msg = "fileName: " + str(self.combinationsFileName) + msg += '\n' + "whoChanged: " + str(self.whoChanged) + return msg + + def getCombinationsFileName(self): + return self.combinationsFileName + + def setCombinationsFileName(self, combinationsFileName): + self.combinationsFileName = combinationsFileName + + def getWhoChanged(self): + return self.whoChanged + + def setWhoChanged(self, whoChanged): + self.whoChanged = whoChanged diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/server/notify/DBInvChangeNotification.py b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/server/notify/DBInvChangeNotification.py new file mode 100644 index 0000000..6bb1f41 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/server/notify/DBInvChangeNotification.py @@ -0,0 +1,36 @@ +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# ??/??/???? ???? njensen Modified to add __repr__ +# 06/22/2015 4573 randerso Change to extend GfeNotification +# removed inventory methods +# + +from . import GfeNotification + + +class DBInvChangeNotification(GfeNotification.GfeNotification): + + def __init__(self): + super(DBInvChangeNotification, self).__init__() + self.additions = None + self.deletions = None + + def getAdditions(self): + return self.additions + + def setAdditions(self, additions): + self.additions = additions + + def getDeletions(self): + return self.deletions + + def setDeletions(self, deletions): + self.deletions = deletions + + def __str__(self): + msg = 'Additions' + str(self.additions) + '\n' + msg += 'Deletions' + str(self.deletions) + return msg diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/server/notify/GfeNotification.py b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/server/notify/GfeNotification.py new file mode 100644 index 0000000..dd18061 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/server/notify/GfeNotification.py @@ -0,0 +1,29 @@ +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# 10/07/2014 3684 randerso Manually updated to add sourceID +# + +import abc +from six import with_metaclass + + +class GfeNotification(with_metaclass(abc.ABCMeta, object)): + @abc.abstractmethod + def __init__(self): + self.siteID = None + self.sourceID = None + + def getSiteID(self): + return self.siteID + + def setSiteID(self, siteID): + self.siteID = siteID + + def getSourceID(self): + return self.sourceID + + def setSourceID(self, sourceID): + self.sourceID = sourceID diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/server/notify/GridHistoryUpdateNotification.py b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/server/notify/GridHistoryUpdateNotification.py new file mode 100644 index 0000000..8edec6f --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/server/notify/GridHistoryUpdateNotification.py @@ -0,0 +1,41 @@ +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# 06/22/2015 4573 randerso Initial creation (hand generated) +# + +from . import GfeNotification + + +class GridHistoryUpdateNotification(GfeNotification): + + def __init__(self): + super(GridHistoryUpdateNotification, self).__init__() + self.parmId = None + self.workstationID = None + self.histories = None + + def getParmId(self): + return self.parmId + + def setParmId(self, parmId): + self.parmId = parmId + + def getWorkstationID(self): + return self.workstationID + + def setWorkstationID(self, workstationID): + self.workstationID = workstationID + + def getHistories(self): + return self.histories + + def setHistories(self, histories): + self.histories = histories + + def __str__(self): + msg = "ParmID: " + str(self.parmId) + msg += '\n' + "Histories: " + str(self.histories) + return msg diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/server/notify/GridUpdateNotification.py b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/server/notify/GridUpdateNotification.py new file mode 100644 index 0000000..bd0e897 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/server/notify/GridUpdateNotification.py @@ -0,0 +1,50 @@ +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# ??/??/???? ???? njensen Modified to add __repr__ +# 06/22/2015 4573 randerso Change to extend GfeNotification +# + +from . import GfeNotification + + +class GridUpdateNotification(GfeNotification): + + def __init__(self): + super(GridUpdateNotification, self).__init__() + self.parmId = None + self.replacementTimeRange = None + self.workstationID = None + self.histories = None + + def getParmId(self): + return self.parmId + + def setParmId(self, parmId): + self.parmId = parmId + + def getReplacementTimeRange(self): + return self.replacementTimeRange + + def setReplacementTimeRange(self, replacementTimeRange): + self.replacementTimeRange = replacementTimeRange + + def getWorkstationID(self): + return self.workstationID + + def setWorkstationID(self, workstationID): + self.workstationID = workstationID + + def getHistories(self): + return self.histories + + def setHistories(self, histories): + self.histories = histories + + def __str__(self): + msg = "ParmID: " + str(self.parmId) + msg += '\n' + "Replacement TimeRange: " + str(self.replacementTimeRange) + msg += '\n' + "Histories: " + str(self.histories) + return msg diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/server/notify/LockNotification.py b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/server/notify/LockNotification.py new file mode 100644 index 0000000..74288df --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/server/notify/LockNotification.py @@ -0,0 +1,26 @@ +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# ??/??/???? ???? njensen Modified to add __repr__ +# 06/22/2015 4573 randerso Change to extend GfeNotification +# + +from . import GfeNotification + + +class LockNotification(GfeNotification): + + def __init__(self): + super(LockNotification, self).__init__() + self.lockTable = None + + def getLockTable(self): + return self.lockTable + + def setLockTable(self, lockTable): + self.lockTable = lockTable + + def __str__(self): + return str(self.lockTable) diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/server/notify/UserMessageNotification.py b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/server/notify/UserMessageNotification.py new file mode 100644 index 0000000..33183d1 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/server/notify/UserMessageNotification.py @@ -0,0 +1,42 @@ +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# 06/22/2015 4573 randerso Change to extend GfeNotification +# + +from . import GfeNotification + + +class UserMessageNotification(GfeNotification): + + def __init__(self): + super(UserMessageNotification, self).__init__() + self.category = None + self.priority = None + self.message = None + + def getCategory(self): + return self.category + + def setCategory(self, category): + self.category = category + + def getPriority(self): + return self.priority + + def setPriority(self, priority): + self.priority = priority + + def getMessage(self): + return self.message + + def setMessage(self, message): + self.message = message + + def __str__(self): + msg = 'Message: ' + str(self.message) + '\n' + msg += 'Priority: ' + str(self.priority) + '\n' + msg += 'Category: ' + str(self.category) + '\n' + return msg diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/server/notify/__init__.py b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/server/notify/__init__.py new file mode 100644 index 0000000..b0ca85e --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/server/notify/__init__.py @@ -0,0 +1,18 @@ + +__all__ = [ + 'CombinationsFileChangedNotification', + 'DBInvChangeNotification', + 'GfeNotification', + 'GridHistoryUpdateNotification', + 'GridUpdateNotification', + 'LockNotification', + 'UserMessageNotification' + ] + +from .CombinationsFileChangedNotification import CombinationsFileChangedNotification +from .DBInvChangeNotification import DBInvChangeNotification +from .GfeNotification import GfeNotification +from .GridHistoryUpdateNotification import GridHistoryUpdateNotification +from .GridUpdateNotification import GridUpdateNotification +from .LockNotification import LockNotification +from .UserMessageNotification import UserMessageNotification diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/server/request/CommitGridRequest.py b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/server/request/CommitGridRequest.py new file mode 100644 index 0000000..81f6b2c --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/server/request/CommitGridRequest.py @@ -0,0 +1,33 @@ + + +class CommitGridRequest(object): + + def __init__(self): + self.parmId = None + self.dbId = None + self.timeRange = None + self.clientSendStatus = False + + def getParmId(self): + return self.parmId + + def setParmId(self, parmId): + self.parmId = parmId + + def getDbId(self): + return self.dbId + + def setDbId(self, dbId): + self.dbId = dbId + + def getTimeRange(self): + return self.timeRange + + def setTimeRange(self, timeRange): + self.timeRange = timeRange + + def getClientSendStatus(self): + return self.clientSendStatus + + def setClientSendStatus(self, clientSendStatus): + self.clientSendStatus = bool(clientSendStatus) diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/server/request/GetGridRequest.py b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/server/request/GetGridRequest.py new file mode 100644 index 0000000..46bd13f --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/server/request/GetGridRequest.py @@ -0,0 +1,30 @@ +from dynamicserialize.dstypes.com.raytheon.uf.common.dataplugin.gfe.db.objects import GFERecord + + +class GetGridRequest(object): + + def __init__(self, parmId=None, trs=[]): + self.convertUnit = False + self.records = [] + self.parmId = parmId + if self.parmId is not None: + for tr in trs: + self.records.append(GFERecord(parmId, tr)) + + def getRecords(self): + return self.records + + def setRecords(self, records): + self.records = records + + def getParmId(self): + return self.parmId + + def setParmId(self, parmId): + self.parmId = parmId + + def getConvertUnit(self): + return self.convertUnit + + def setConvertUnit(self, convertUnit): + self.convertUnit = convertUnit diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/server/request/LockRequest.py b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/server/request/LockRequest.py new file mode 100644 index 0000000..0811432 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/server/request/LockRequest.py @@ -0,0 +1,33 @@ + + +class LockRequest(object): + + def __init__(self): + self.timeRange = None + self.parmId = None + self.dbId = None + self.mode = None + + def getTimeRange(self): + return self.timeRange + + def setTimeRange(self, timeRange): + self.timeRange = timeRange + + def getParmId(self): + return self.parmId + + def setParmId(self, parmId): + self.parmId = parmId + + def getDbId(self): + return self.dbId + + def setDbId(self, dbId): + self.dbId = dbId + + def getMode(self): + return self.mode + + def setMode(self, mode): + self.mode = mode diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/server/request/LockTableRequest.py b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/server/request/LockTableRequest.py new file mode 100644 index 0000000..e48e3c4 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/server/request/LockTableRequest.py @@ -0,0 +1,19 @@ + + +class LockTableRequest(object): + + def __init__(self): + self.parmId = None + self.dbId = None + + def getParmId(self): + return self.parmId + + def setParmId(self, parmId): + self.parmId = parmId + + def getDbId(self): + return self.dbId + + def setDbId(self, dbId): + self.dbId = dbId diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/server/request/__init__.py b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/server/request/__init__.py new file mode 100644 index 0000000..355b490 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/server/request/__init__.py @@ -0,0 +1,12 @@ + +__all__ = [ + 'CommitGridRequest', + 'GetGridRequest', + 'LockRequest', + 'LockTableRequest' + ] + +from .CommitGridRequest import CommitGridRequest +from .GetGridRequest import GetGridRequest +from .LockRequest import LockRequest +from .LockTableRequest import LockTableRequest diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/slice/AbstractGridSlice.py b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/slice/AbstractGridSlice.py new file mode 100644 index 0000000..0129cad --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/slice/AbstractGridSlice.py @@ -0,0 +1,32 @@ +import abc +from six import with_metaclass + + +class AbstractGridSlice(with_metaclass(abc.ABCMeta, object)): + @abc.abstractmethod + def __init__(self): + self.validTime = None + self.gridParmInfo = None + self.gridDataHistory = None + + @abc.abstractmethod + def getNumPyGrid(self): + raise NotImplementedError + + def getValidTime(self): + return self.validTime + + def setValidTime(self, validTime): + self.validTime = validTime + + def getGridParmInfo(self): + return self.gridParmInfo + + def setGridParmInfo(self, gridParmInfo): + self.gridParmInfo = gridParmInfo + + def getGridDataHistory(self): + return self.gridDataHistory + + def setGridDataHistory(self, gridDataHistory): + self.gridDataHistory = gridDataHistory diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/slice/DiscreteGridSlice.py b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/slice/DiscreteGridSlice.py new file mode 100644 index 0000000..1a26e0a --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/slice/DiscreteGridSlice.py @@ -0,0 +1,24 @@ +from dynamicserialize.dstypes.com.raytheon.uf.common.dataplugin.gfe.slice import AbstractGridSlice + + +class DiscreteGridSlice(AbstractGridSlice): + + def __init__(self): + super(DiscreteGridSlice, self).__init__() + self.discreteGrid = None + self.key = [] + + def getDiscreteGrid(self): + return self.discreteGrid + + def setDiscreteGrid(self, discreteGrid): + self.discreteGrid = discreteGrid + + def getNumPyGrid(self): + return self.discreteGrid.getNumPyGrid(), self.key + + def getKey(self): + return self.key + + def setKey(self, key): + self.key = key diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/slice/ScalarGridSlice.py b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/slice/ScalarGridSlice.py new file mode 100644 index 0000000..8cc9ec4 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/slice/ScalarGridSlice.py @@ -0,0 +1,17 @@ +from dynamicserialize.dstypes.com.raytheon.uf.common.dataplugin.gfe.slice import AbstractGridSlice + + +class ScalarGridSlice(AbstractGridSlice): + + def __init__(self): + super(ScalarGridSlice, self).__init__() + self.scalarGrid = None + + def getNumPyGrid(self): + return self.scalarGrid.getNumPyGrid() + + def getScalarGrid(self): + return self.scalarGrid + + def setScalarGrid(self, scalarGrid): + self.scalarGrid = scalarGrid diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/slice/VectorGridSlice.py b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/slice/VectorGridSlice.py new file mode 100644 index 0000000..229d47b --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/slice/VectorGridSlice.py @@ -0,0 +1,23 @@ +from dynamicserialize.dstypes.com.raytheon.uf.common.dataplugin.gfe.slice import ScalarGridSlice + + +class VectorGridSlice(ScalarGridSlice): + + def __init__(self): + super(VectorGridSlice, self).__init__() + self.dirGrid = None + + def getNumPyGrid(self): + return self.scalarGrid.getNumPyGrid(), self.dirGrid.getNumPyGrid() + + def getDirGrid(self): + return self.dirGrid + + def setDirGrid(self, dirGrid): + self.dirGrid = dirGrid + + def getMagGrid(self): + return self.scalarGrid + + def setMagGrid(self, magGrid): + self.scalarGrid = magGrid diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/slice/WeatherGridSlice.py b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/slice/WeatherGridSlice.py new file mode 100644 index 0000000..92e75e3 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/slice/WeatherGridSlice.py @@ -0,0 +1,24 @@ +from dynamicserialize.dstypes.com.raytheon.uf.common.dataplugin.gfe.slice import AbstractGridSlice + + +class WeatherGridSlice(AbstractGridSlice): + + def __init__(self): + super(WeatherGridSlice, self).__init__() + self.weatherGrid = None + self.keys = [] + + def getNumPyGrid(self): + pass + + def getWeatherGrid(self): + return self.weatherGrid + + def setWeatherGrid(self, weatherGrid): + self.weatherGrid = weatherGrid + + def getKeys(self): + return self.keys + + def setKeys(self, keys): + self.keys = keys diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/slice/__init__.py b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/slice/__init__.py new file mode 100644 index 0000000..6109545 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/slice/__init__.py @@ -0,0 +1,14 @@ + +__all__ = [ + 'AbstractGridSlice', + 'DiscreteGridSlice', + 'ScalarGridSlice', + 'VectorGridSlice', + 'WeatherGridSlice' + ] + +from .AbstractGridSlice import AbstractGridSlice +from .DiscreteGridSlice import DiscreteGridSlice +from .ScalarGridSlice import ScalarGridSlice +from .VectorGridSlice import VectorGridSlice +from .WeatherGridSlice import WeatherGridSlice diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/weather/WeatherKey.py b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/weather/WeatherKey.py new file mode 100644 index 0000000..bbdbfc3 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/weather/WeatherKey.py @@ -0,0 +1,47 @@ +SUBKEY_SEPARATOR = '^' + + +class WeatherKey(object): + # FIXME: Implement WeatherSubKey and use it in this class when needed. + + def __init__(self, siteId="", subKeys=[]): + self.siteId = siteId + if isinstance(subKeys, str): + self.__parseString(str(subKeys)) + else: + self.subKeys = subKeys + + def __str__(self): + return self.__repr__() + + def __repr__(self): + return SUBKEY_SEPARATOR.join(self.subKeys) + + def __eq__(self, other): + if not isinstance(other, WeatherKey): + return False + return self.subKeys == self.subKeys + + def __ne__(self, other): + return not self.__eq__(other) + + def __hash__(self): + prime = 31 + result = 1 + result = prime * result + (0 if self.subKeys is None else hash(self.subKeys)) + return result + + def getSiteId(self): + return self.siteId + + def setSiteId(self, siteId): + self.siteId = siteId + + def getSubKeys(self): + return self.subKeys + + def setSubKeys(self, subKeys): + self.subKeys = subKeys + + def __parseString(self, subKeys): + self.subKeys = subKeys.split(SUBKEY_SEPARATOR) diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/weather/WeatherSubKey.py b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/weather/WeatherSubKey.py new file mode 100644 index 0000000..c122883 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/weather/WeatherSubKey.py @@ -0,0 +1,8 @@ +# TODO: Implement WeatherSubKey when it is explicitly needed. For now +# WeatherSubKeys will be list of str within the WeatherKey class. + + +class WeatherSubKey(object): + + def __init__(self): + raise NotImplementedError("WeatherSubKey is not currently supported.") diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/weather/__init__.py b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/weather/__init__.py new file mode 100644 index 0000000..3d74183 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/gfe/weather/__init__.py @@ -0,0 +1,8 @@ + +__all__ = [ + 'WeatherKey', + 'WeatherSubKey' + ] + +from .WeatherKey import WeatherKey +from .WeatherSubKey import WeatherSubKey diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/grid/__init__.py b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/grid/__init__.py new file mode 100644 index 0000000..ed91455 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/grid/__init__.py @@ -0,0 +1,4 @@ + +__all__ = [ + 'request' + ] diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/grid/request/DeleteAllGridDataRequest.py b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/grid/request/DeleteAllGridDataRequest.py new file mode 100644 index 0000000..6f0f0c7 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/grid/request/DeleteAllGridDataRequest.py @@ -0,0 +1,12 @@ + + +class DeleteAllGridDataRequest(object): + + def __init__(self, modelName=None): + self.modelName = modelName + + def getModelName(self): + return self.modelName + + def setModelName(self, modelName): + self.modelName = modelName diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/grid/request/__init__.py b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/grid/request/__init__.py new file mode 100644 index 0000000..ad09320 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/grid/request/__init__.py @@ -0,0 +1,6 @@ + +__all__ = [ + 'DeleteAllGridDataRequest' + ] + +from .DeleteAllGridDataRequest import DeleteAllGridDataRequest diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/level/Level.py b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/level/Level.py new file mode 100644 index 0000000..c92c114 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/level/Level.py @@ -0,0 +1,178 @@ +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# 05/29/13 2023 dgilling Initial Creation. +# 02/12/14 2672 bsteffen Allow String constructor to parse floats. +# 06/29/15 4480 dgilling Implement __hash__, __eq__, +# __str__ and rich comparison operators. +# + +import re +import numpy + +from dynamicserialize.dstypes.com.raytheon.uf.common.dataplugin.level import MasterLevel + +LEVEL_NAMING_REGEX = re.compile("^(\d*(?:\.\d*)?)(?:_(\d*(?:\.\d*)?))?([a-zA-Z]+)$") +INVALID_VALUE = numpy.float64(-999999) + + +class Level(object): + + def __init__(self, levelString=None): + self.id = 0 + self.identifier = None + self.masterLevel = None + self.levelonevalue = INVALID_VALUE + self.leveltwovalue = INVALID_VALUE + + if levelString is not None: + matcher = LEVEL_NAMING_REGEX.match(str(levelString)) + if matcher is not None: + self.levelonevalue = numpy.float64(matcher.group(1)) + self.masterLevel = MasterLevel.MasterLevel(matcher.group(3)) + levelTwo = matcher.group(2) + if levelTwo: + self.leveltwovalue = numpy.float64(levelTwo) + + def __hash__(self): + # XOR-ing the 3 items in a tuple ensures that order of the + # values matters + hashCode = hash(self.masterLevel) ^ hash(self.levelonevalue) ^ hash(self.leveltwovalue) + hashCode ^= hash((self.masterLevel, self.levelonevalue, self.leveltwovalue)) + return hashCode + + def __eq__(self, other): + if isinstance(self, type(other)): + return (self.masterLevel, self.levelonevalue, self.leveltwovalue) == \ + (other.masterLevel, other.levelonevalue, other.leveltwovalue) + return False + + def __ne__(self, other): + return not self.__eq__(other) + + def __lt__(self, other): + if not isinstance(self, type(other)): + return NotImplemented + elif self.masterLevel.getName() != other.masterLevel.getName(): + return NotImplemented + + myLevel1 = self.levelonevalue + myLevel2 = self.leveltwovalue + otherLevel1 = other.levelonevalue + otherLevel2 = other.leveltwovalue + if myLevel1 == INVALID_VALUE and myLevel2 != INVALID_VALUE: + myLevel1 = myLevel2 + myLevel2 = INVALID_VALUE + if otherLevel1 == INVALID_VALUE and otherLevel2 != INVALID_VALUE: + otherLevel1 = otherLevel2 + otherLevel2 = INVALID_VALUE + + # We default to descending order to make sorting levels from the DAF easier + compareType = self.masterLevel.getType() if self.masterLevel.getType() else "DEC" + if myLevel1 != INVALID_VALUE and otherLevel1 != INVALID_VALUE: + level1Cmp = self.__compareLevelValues(compareType, myLevel1, otherLevel1) + if level1Cmp == -1: + if myLevel2 != INVALID_VALUE and otherLevel2 != INVALID_VALUE: + level2Cmp = self.__compareLevelValues(compareType, myLevel2, otherLevel2) + return level2Cmp == -1 + elif myLevel2 != INVALID_VALUE: + level2Cmp = self.__compareLevelValues(compareType, myLevel2, otherLevel1) + return level2Cmp == -1 + return True + return False + + def __le__(self, other): + if not isinstance(self, type(other)): + return NotImplemented + elif self.masterLevel.getName() != other.masterLevel.getName(): + return NotImplemented + + return self.__lt__(other) or self.__eq__(other) + + def __gt__(self, other): + if not isinstance(self, type(other)): + return NotImplemented + elif self.masterLevel.getName() != other.masterLevel.getName(): + return NotImplemented + + myLevel1 = self.levelonevalue + myLevel2 = self.leveltwovalue + otherLevel1 = other.levelonevalue + otherLevel2 = other.leveltwovalue + if myLevel1 == INVALID_VALUE and myLevel2 != INVALID_VALUE: + myLevel1 = myLevel2 + myLevel2 = INVALID_VALUE + if otherLevel1 == INVALID_VALUE and otherLevel2 != INVALID_VALUE: + otherLevel1 = otherLevel2 + otherLevel2 = INVALID_VALUE + + # We default to descending order to make sorting levels from the DAF easier + compareType = self.masterLevel.getType() if self.masterLevel.getType() else "DEC" + if myLevel1 != INVALID_VALUE and otherLevel1 != INVALID_VALUE: + level1Cmp = self.__compareLevelValues(compareType, myLevel1, otherLevel1) + if level1Cmp == 1: + if myLevel2 != INVALID_VALUE and otherLevel2 != INVALID_VALUE: + level2Cmp = self.__compareLevelValues(compareType, myLevel2, otherLevel2) + return level2Cmp == 1 + elif otherLevel2 != INVALID_VALUE: + level2Cmp = self.__compareLevelValues(compareType, myLevel1, otherLevel2) + return level2Cmp == 1 + return True + return False + + def __ge__(self, other): + if not isinstance(self, type(other)): + return NotImplemented + elif self.masterLevel.getName() != other.masterLevel.getName(): + return NotImplemented + + return self.__gt__(other) or self.__eq__(other) + + def __compareLevelValues(self, compareType, val1, val2): + returnVal = 0 + if val1 < val2: + returnVal = -1 if compareType == 'INC' else 1 + elif val2 < val1: + returnVal = 1 if compareType == 'INC' else -1 + return returnVal + + def __str__(self): + retVal = "" + if INVALID_VALUE != self.levelonevalue: + retVal += str(self.levelonevalue) + if INVALID_VALUE != self.leveltwovalue: + retVal += "_" + str(self.leveltwovalue) + retVal += str(self.masterLevel.getName()) + return retVal + + def getId(self): + return self.id + + def setId(self, levelid): + self.id = levelid + + def getMasterLevel(self): + return self.masterLevel + + def setMasterLevel(self, masterLevel): + self.masterLevel = masterLevel + + def getLevelonevalue(self): + return self.levelonevalue + + def setLevelonevalue(self, levelonevalue): + self.levelonevalue = numpy.float64(levelonevalue) + + def getLeveltwovalue(self): + return self.leveltwovalue + + def setLeveltwovalue(self, leveltwovalue): + self.leveltwovalue = numpy.float64(leveltwovalue) + + def getIdentifier(self): + return self.identifier + + def setIdentifier(self, identifier): + self.identifier = identifier diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/level/MasterLevel.py b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/level/MasterLevel.py new file mode 100644 index 0000000..2879500 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/level/MasterLevel.py @@ -0,0 +1,99 @@ +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# 05/29/13 2023 dgilling Initial Creation. +# 06/29/15 4480 dgilling Implement __hash__, __eq__ +# and __str__. + +import six + + +class MasterLevel(object): + + def __init__(self, name=None): + self.name = name + self.description = None + self.unitString = None + self.type = None + self.identifier = None + + def __hash__(self): + return hash(self.name) + + def __eq__(self, other): + if not isinstance(self, type(other)): + return False + else: + return self.name == other.name + + def __ne__(self, other): + return not self.__eq__(other) + + def __str__(self): + if six.PY2: + retVal = "MasterLevel[" + retVal += "name=" + str(self.name) + "," + retVal += "type=" + str(self.type) + "," + retVal += "unit=" + str(self.unitString) + "," + retVal += "description=" + str(self.description) + retVal += "]" + else: + retVal = "MasterLevel[" + retVal += "name=" + str(self.name.decode('utf-8')) + "," + retVal += "type=" + str(self.type.decode('utf-8')) + "," + retVal += "unit=" + str(self.unitString.decode('utf-8')) + "," + retVal += "description=" + str(self.description.decode('utf-8')) + retVal += "]" + return retVal + + def getName(self): + if six.PY2: + return self.name + if (self.name is not None) and (not isinstance(self.name, str)): + return self.name.decode('utf-8') + return self.name + + def setName(self, name): + self.name = name + + def getDescription(self): + if six.PY2: + return self.description + if self.description is not None: + return self.description.decode('utf-8') + return self.description + + def setDescription(self, description): + self.description = description + + def getUnitString(self): + if six.PY2: + return self.unitString + if self.unitString is not None: + return self.unitString.decode('utf-8') + return self.unitString + + def setUnitString(self, unitString): + self.unitString = unitString + + def getType(self): + if six.PY2: + return self.type + if self.type is not None: + return self.type.decode('utf-8') + return self.type + + def setType(self, leveltype): + self.type = leveltype + + def getIdentifier(self): + if six.PY2: + return self.identifier + if self.identifier is not None: + return self.identifier.decode('utf-8') + return self.identifier + + def setIdentifier(self, identifier): + self.identifier = identifier diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/level/__init__.py b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/level/__init__.py new file mode 100644 index 0000000..61c8dd7 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/level/__init__.py @@ -0,0 +1,8 @@ + +__all__ = [ + 'Level', + 'MasterLevel' + ] + +from .Level import Level +from .MasterLevel import MasterLevel diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/message/DataURINotificationMessage.py b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/message/DataURINotificationMessage.py new file mode 100644 index 0000000..872afec --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/message/DataURINotificationMessage.py @@ -0,0 +1,19 @@ + + +class DataURINotificationMessage(object): + + def __init__(self): + self.dataURIs = None + self.ids = None + + def getDataURIs(self): + return self.dataURIs + + def setDataURIs(self, dataURIs): + self.dataURIs = dataURIs + + def getIds(self): + return self.ids + + def setIds(self, ids): + self.ids = ids diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/message/__init__.py b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/message/__init__.py new file mode 100644 index 0000000..8982397 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/message/__init__.py @@ -0,0 +1,6 @@ + +__all__ = [ + 'DataURINotificationMessage' + ] + +from .DataURINotificationMessage import DataURINotificationMessage diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/radar/__init__.py b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/radar/__init__.py new file mode 100644 index 0000000..87fb079 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/radar/__init__.py @@ -0,0 +1,5 @@ + +__all__ = [ + 'request', + 'response' + ] diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/radar/request/GetRadarDataRecordRequest.py b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/radar/request/GetRadarDataRecordRequest.py new file mode 100644 index 0000000..8349d5e --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/radar/request/GetRadarDataRecordRequest.py @@ -0,0 +1,41 @@ +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# Aug 19, 2014 nabowle Generated + +import numpy + + +class GetRadarDataRecordRequest(object): + + def __init__(self): + self.timeRange = None + self.productCode = None + self.radarId = None + self.primaryElevationAngle = None + + def getTimeRange(self): + return self.timeRange + + def setTimeRange(self, timeRange): + self.timeRange = timeRange + + def getProductCode(self): + return self.productCode + + def setProductCode(self, productCode): + self.productCode = productCode + + def getRadarId(self): + return self.radarId + + def setRadarId(self, radarId): + self.radarId = radarId + + def getPrimaryElevationAngle(self): + return self.primaryElevationAngle + + def setPrimaryElevationAngle(self, primaryElevationAngle): + self.primaryElevationAngle = numpy.float64(primaryElevationAngle) diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/radar/request/__init__.py b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/radar/request/__init__.py new file mode 100644 index 0000000..2c5b7f9 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/radar/request/__init__.py @@ -0,0 +1,6 @@ + +__all__ = [ + 'GetRadarDataRecordRequest' + ] + +from .GetRadarDataRecordRequest import GetRadarDataRecordRequest diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/radar/response/GetRadarDataRecordResponse.py b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/radar/response/GetRadarDataRecordResponse.py new file mode 100644 index 0000000..f6797eb --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/radar/response/GetRadarDataRecordResponse.py @@ -0,0 +1,18 @@ +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# Aug 19, 2014 nabowle Generated + + +class GetRadarDataRecordResponse(object): + + def __init__(self): + self.data = None + + def getData(self): + return self.data + + def setData(self, data): + self.data = data diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/radar/response/RadarDataRecord.py b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/radar/response/RadarDataRecord.py new file mode 100644 index 0000000..ab4e315 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/radar/response/RadarDataRecord.py @@ -0,0 +1,67 @@ +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# Aug 19, 2014 nabowle Generated + + +class RadarDataRecord(object): + + def __init__(self): + self.hdf5Data = None + self.trueElevationAngle = None + self.elevationNumber = None + self.elevation = None + self.longitude = None + self.latitude = None + self.dataTime = None + self.volumeCoveragePattern = None + + def getHdf5Data(self): + return self.hdf5Data + + def setHdf5Data(self, hdf5Data): + self.hdf5Data = hdf5Data + + def getTrueElevationAngle(self): + return self.trueElevationAngle + + def setTrueElevationAngle(self, trueElevationAngle): + self.trueElevationAngle = trueElevationAngle + + def getElevationNumber(self): + return self.elevationNumber + + def setElevationNumber(self, elevationNumber): + self.elevationNumber = elevationNumber + + def getElevation(self): + return self.elevation + + def setElevation(self, elevation): + self.elevation = elevation + + def getLongitude(self): + return self.longitude + + def setLongitude(self, longitude): + self.longitude = longitude + + def getLatitude(self): + return self.latitude + + def setLatitude(self, latitude): + self.latitude = latitude + + def getDataTime(self): + return self.dataTime + + def setDataTime(self, dataTime): + self.dataTime = dataTime + + def getVolumeCoveragePattern(self): + return self.volumeCoveragePattern + + def setVolumeCoveragePattern(self, volumeCoveragePattern): + self.volumeCoveragePattern = volumeCoveragePattern diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/radar/response/__init__.py b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/radar/response/__init__.py new file mode 100644 index 0000000..b024490 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/radar/response/__init__.py @@ -0,0 +1,8 @@ + +__all__ = [ + 'GetRadarDataRecordResponse', + 'RadarDataRecord' + ] + +from .GetRadarDataRecordResponse import GetRadarDataRecordResponse +from .RadarDataRecord import RadarDataRecord diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/text/__init__.py b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/text/__init__.py new file mode 100644 index 0000000..0163bda --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/text/__init__.py @@ -0,0 +1,5 @@ + +__all__ = [ + 'dbsrv', + 'subscription' + ] diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/text/dbsrv/TextDBRequest.py b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/text/dbsrv/TextDBRequest.py new file mode 100644 index 0000000..2b127cb --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/text/dbsrv/TextDBRequest.py @@ -0,0 +1,12 @@ + + +class TextDBRequest(object): + + def __init__(self): + self.message = None + + def getMessage(self): + return self.message + + def setMessage(self, message): + self.message = message diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/text/dbsrv/__init__.py b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/text/dbsrv/__init__.py new file mode 100644 index 0000000..f3d27aa --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/text/dbsrv/__init__.py @@ -0,0 +1,6 @@ + +__all__ = [ + 'TextDBRequest' + ] + +from .TextDBRequest import TextDBRequest diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/text/subscription/__init__.py b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/text/subscription/__init__.py new file mode 100644 index 0000000..ed91455 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/text/subscription/__init__.py @@ -0,0 +1,4 @@ + +__all__ = [ + 'request' + ] diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/text/subscription/request/SubscriptionRequest.py b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/text/subscription/request/SubscriptionRequest.py new file mode 100644 index 0000000..1fe746f --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/text/subscription/request/SubscriptionRequest.py @@ -0,0 +1,18 @@ +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# Sep 05, 2014 bclement Generated + + +class SubscriptionRequest(object): + + def __init__(self): + self.message = None + + def getMessage(self): + return self.message + + def setMessage(self, message): + self.message = message diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/text/subscription/request/__init__.py b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/text/subscription/request/__init__.py new file mode 100644 index 0000000..2812cb7 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/dataplugin/text/subscription/request/__init__.py @@ -0,0 +1,6 @@ + +__all__ = [ + 'SubscriptionRequest' + ] + +from .SubscriptionRequest import SubscriptionRequest diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/dataquery/__init__.py b/dynamicserialize/dstypes/com/raytheon/uf/common/dataquery/__init__.py new file mode 100644 index 0000000..ccf9030 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/dataquery/__init__.py @@ -0,0 +1,4 @@ + +__all__ = [ + 'requests' + ] diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/dataquery/requests/RequestConstraint.py b/dynamicserialize/dstypes/com/raytheon/uf/common/dataquery/requests/RequestConstraint.py new file mode 100644 index 0000000..c6558f2 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/dataquery/requests/RequestConstraint.py @@ -0,0 +1,279 @@ +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# Jun 01, 2016 5574 tgurney Initial creation +# Jun 27, 2016 5725 tgurney Add NOT IN +# Jul 22, 2016 2416 tgurney Add evaluate() +# Oct 05, 2018 mjames@ucar Python 3 types +# +# + +import re +import six +from dynamicserialize.dstypes.com.raytheon.uf.common.time import DataTime + + +class RequestConstraint(object): + + TOLERANCE = 0.0001 + + IN_PATTERN = re.compile(',\s?') + + def __init__(self): + self.constraintValue = None + self.constraintType = None + + def getConstraintValue(self): + return self.constraintValue + + def setConstraintValue(self, constraintValue): + if hasattr(self, '_evalValue'): + del self._evalValue + self.constraintValue = constraintValue + + def getConstraintType(self): + return self.constraintType + + def setConstraintType(self, constraintType): + if hasattr(self, '_evalValue'): + del self._evalValue + self.constraintType = constraintType + + def evaluate(self, value): + if not hasattr(self, '_evalValue'): + self._setupEvalValue() + + if self.constraintType == 'EQUALS': + return self._evalEquals(value) + elif self.constraintType == 'NOT_EQUALS': + return not self._evalEquals(value) + elif self.constraintType == 'GREATER_THAN': + return self._evalGreaterThan(value) + elif self.constraintType == 'GREATER_THAN_EQUALS': + return self._evalGreaterThanEquals(value) + elif self.constraintType == 'LESS_THAN': + return self._evalLessThan(value) + elif self.constraintType == 'LESS_THAN_EQUALS': + return self._evalLessThanEquals(value) + elif self.constraintType == 'BETWEEN': + return self._evalBetween(value) + elif self.constraintType == 'IN': + return self._evalIn(value) + elif self.constraintType == 'NOT_IN': + return not self._evalIn(value) + elif self.constraintType == 'LIKE': + return self._evalLike(value) + # setupConstraintType already adds correct flags for ilike + # on regex pattern + elif self.constraintType == 'ILIKE': + return self._evalLike(value) + elif self.constraintType == 'ISNULL': + return self._evalIsNull(value) + elif self.constraintType == 'ISNOTNULL': + return not self._evalIsNull(value) + else: + errmsg = '{} is not a valid constraint type.' + raise ValueError(errmsg.format(self.constraintType)) + + def _makeRegex(self, pattern, flags): + """Make a pattern using % wildcard into a regex""" + pattern = re.escape(pattern) + pattern = pattern.replace('\\%', '.*') + pattern = pattern.replace('\\_', '.') + pattern = pattern + '$' + return re.compile(pattern, flags) + + def _setupEvalValue(self): + if self.constraintType == 'BETWEEN': + self._evalValue = self.constraintValue.split('--') + self._evalValue[0] = self._adjustValueType(self._evalValue[0]) + self._evalValue[1] = self._adjustValueType(self._evalValue[1]) + elif self.constraintType in ('IN', 'NOT_IN'): + splitValue = self.IN_PATTERN.split(self.constraintValue) + self._evalValue = { + self._adjustValueType(value) + for value in splitValue + } + # if collection now contains multiple types we have to force + # everything to string instead + initialType = next(iter(self._evalValue)).__class__ + for item in self._evalValue: + if item.__class__ is not initialType: + self._evalValue = {str(value) for value in splitValue} + break + elif self.constraintType == 'LIKE': + self._evalValue = self._makeRegex(self.constraintValue, re.DOTALL) + elif self.constraintType == 'ILIKE': + self._evalValue = self._makeRegex(self.constraintValue, re.IGNORECASE | re.DOTALL) + elif self.constraintValue is None: + self._evalValue = None + else: + self._evalValue = self._adjustValueType(self.constraintValue) + + def _adjustValueType(self, value): + """ + Try to take part of a constraint value, encoded as a string, and + return it as its 'true type'. + + _adjustValueType('3.0') -> 3.0 + _adjustValueType('3') -> 3.0 + _adjustValueType('a string') -> 'a string' + """ + try: + return float(value) + except ValueError: + pass + try: + return DataTime(value) + except ValueError: + pass + return value + + def _matchType(self, value, otherValue): + """ + Return value coerced to be the same type as otherValue. If this is + not possible, just return value unmodified. + """ + if not isinstance(value, otherValue.__class__): + try: + return otherValue.__class__(value) + except ValueError: + pass + return value + + def _evalEquals(self, value): + value = self._matchType(value, self._evalValue) + if isinstance(value, float): + return abs(float(self._evalValue) - value) < self.TOLERANCE + return value == self._evalValue + + def _evalGreaterThan(self, value): + value = self._matchType(value, self._evalValue) + return value > self._evalValue + + def _evalGreaterThanEquals(self, value): + value = self._matchType(value, self._evalValue) + return value >= self._evalValue + + def _evalLessThan(self, value): + value = self._matchType(value, self._evalValue) + return value < self._evalValue + + def _evalLessThanEquals(self, value): + value = self._matchType(value, self._evalValue) + return value <= self._evalValue + + def _evalBetween(self, value): + value = self._matchType(value, self._evalValue[0]) + return self._evalValue[0] <= value <= self._evalValue[1] + + def _evalIn(self, value): + anEvalValue = next(iter(self._evalValue)) + if isinstance(anEvalValue, float): + for otherValue in self._evalValue: + try: + if abs(otherValue - float(value)) < self.TOLERANCE: + return True + except ValueError: + pass + return False + value = self._matchType(value, anEvalValue) + return value in self._evalValue + + def _evalLike(self, value): + value = self._matchType(value, self._evalValue) + if self.constraintValue == '%': + return True + return self._evalValue.match(value) is not None + + def _evalIsNull(self, value): + return value is None or value == 'null' + + # DAF-specific stuff begins here ########################################## + + CONSTRAINT_MAP = {'=': 'EQUALS', + '!=': 'NOT_EQUALS', + '>': 'GREATER_THAN', + '>=': 'GREATER_THAN_EQUALS', + '<': 'LESS_THAN', + '<=': 'LESS_THAN_EQUALS', + 'IN': 'IN', + 'NOT IN': 'NOT_IN' + } + + @staticmethod + def _stringify(value): + if six.PY2: + if isinstance(value, (str, int, long, bool, float, unicode)): + return str(value) + else: + # Collections are not allowed; they are handled separately. + # Arbitrary objects are not allowed because the string + # representation may not be sufficient to reconstruct the object. + raise TypeError('Constraint values of type ' + repr(type(value)) + + 'are not allowed') + else: + if isinstance(value, (str, int, bool, float)): + return str(value) + else: + # Collections are not allowed; they are handled separately. + # Arbitrary objects are not allowed because the string + # representation may not be sufficient to reconstruct the object. + raise TypeError('Constraint values of type ' + repr(type(value)) + + 'are not allowed') + + @classmethod + def _constructIn(cls, constraintType, constraintValue): + """Build a new "IN" or "NOT IN" constraint from an iterable.""" + try: + iterator = iter(constraintValue) + except TypeError: + raise TypeError("value for IN / NOT IN constraint must be an iterable") + stringValue = ', '.join(cls._stringify(item) for item in iterator) + if not stringValue: + raise ValueError('cannot use IN / NOT IN with empty collection') + obj = cls() + obj.setConstraintType(constraintType) + obj.setConstraintValue(stringValue) + return obj + + @classmethod + def _constructEq(cls, constraintType, constraintValue): + """Build a new = or != constraint. Handle None specially by making an + "is null" or "is not null" instead. + """ + obj = cls() + if constraintValue is None: + if constraintType == 'EQUALS': + obj.setConstraintType('ISNULL') + elif constraintType == 'NOT_EQUALS': + obj.setConstraintType('ISNOTNULL') + else: + obj = cls._construct(constraintType, constraintValue) + return obj + + @classmethod + def _construct(cls, constraintType, constraintValue): + """Build a new constraint.""" + stringValue = cls._stringify(constraintValue) + obj = cls() + obj.setConstraintType(constraintType) + obj.setConstraintValue(stringValue) + return obj + + @classmethod + def new(cls, operator, constraintValue): + """Build a new RequestConstraint.""" + try: + constraintType = cls.CONSTRAINT_MAP[operator.upper()] + except KeyError: + errmsg = '{} is not a valid operator. Valid operators are: {}' + validOperators = list(sorted(cls.CONSTRAINT_MAP.keys())) + raise ValueError(errmsg.format(operator, validOperators)) + if constraintType in ('IN', 'NOT_IN'): + return cls._constructIn(constraintType, constraintValue) + elif constraintType in {'EQUALS', 'NOT_EQUALS'}: + return cls._constructEq(constraintType, constraintValue) + return cls._construct(constraintType, constraintValue) diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/dataquery/requests/__init__.py b/dynamicserialize/dstypes/com/raytheon/uf/common/dataquery/requests/__init__.py new file mode 100644 index 0000000..6c79752 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/dataquery/requests/__init__.py @@ -0,0 +1,6 @@ + +__all__ = [ + 'RequestConstraint' + ] + +from .RequestConstraint import RequestConstraint diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/datastorage/Request.py b/dynamicserialize/dstypes/com/raytheon/uf/common/datastorage/Request.py new file mode 100644 index 0000000..f975420 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/datastorage/Request.py @@ -0,0 +1,40 @@ + + +class Request(object): + + def __init__(self): + self.points = None + self.indices = None + self.minIndexForSlab = None + self.maxIndexForSlab = None + self.type = None + + def getPoints(self): + return self.points + + def setPoints(self, points): + self.points = points + + def getIndices(self): + return self.indices + + def setIndices(self, indices): + self.indices = indices + + def getMinIndexForSlab(self): + return self.minIndexForSlab + + def setMinIndexForSlab(self, minIndexForSlab): + self.minIndexForSlab = minIndexForSlab + + def getMaxIndexForSlab(self): + return self.maxIndexForSlab + + def setMaxIndexForSlab(self, maxIndexForSlab): + self.maxIndexForSlab = maxIndexForSlab + + def getType(self): + return self.type + + def setType(self, requesttype): + self.type = requesttype diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/datastorage/StorageProperties.py b/dynamicserialize/dstypes/com/raytheon/uf/common/datastorage/StorageProperties.py new file mode 100644 index 0000000..9cd9c0e --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/datastorage/StorageProperties.py @@ -0,0 +1,19 @@ + + +class StorageProperties(object): + + def __init__(self): + self.compression = None + self.chunked = None + + def getCompression(self): + return self.compression + + def setCompression(self, compression): + self.compression = compression + + def getChunked(self): + return self.chunked + + def setChunked(self, chunked): + self.chunked = chunked diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/datastorage/StorageStatus.py b/dynamicserialize/dstypes/com/raytheon/uf/common/datastorage/StorageStatus.py new file mode 100644 index 0000000..4e97635 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/datastorage/StorageStatus.py @@ -0,0 +1,19 @@ + + +class StorageStatus(object): + + def __init__(self): + self.operationPerformed = None + self.indexOfAppend = None + + def getOperationPerformed(self): + return self.operationPerformed + + def setOperationPerformed(self, operationPerformed): + self.operationPerformed = operationPerformed + + def getIndexOfAppend(self): + return self.indexOfAppend + + def setIndexOfAppend(self, indexOfAppend): + self.indexOfAppend = indexOfAppend diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/datastorage/__init__.py b/dynamicserialize/dstypes/com/raytheon/uf/common/datastorage/__init__.py new file mode 100644 index 0000000..be939ee --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/datastorage/__init__.py @@ -0,0 +1,21 @@ +# +# Package definition for com.raytheon.uf.common.datastorage +# +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# 08/31/10 njensen Initial Creation. +# + +__all__ = [ + 'records', + 'Request', + 'StorageProperties', + 'StorageStatus' + ] + +from .Request import Request +from .StorageProperties import StorageProperties +from .StorageStatus import StorageStatus diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/datastorage/records/AbstractDataRecord.py b/dynamicserialize/dstypes/com/raytheon/uf/common/datastorage/records/AbstractDataRecord.py new file mode 100644 index 0000000..fd450a3 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/datastorage/records/AbstractDataRecord.py @@ -0,0 +1,78 @@ + +from six import with_metaclass +import abc + + +class AbstractDataRecord(with_metaclass(abc.ABCMeta, object)): + + def __init__(self): + self.name = None + self.dimension = None + self.sizes = None + self.maxSizes = None + self.props = None + self.minIndex = None + self.group = None + self.dataAttributes = None + self.fillValue = None + self.maxChunkSize = None + + def getName(self): + return self.name + + def setName(self, name): + self.name = name + + def getDimension(self): + return self.dimension + + def setDimension(self, dimension): + self.dimension = dimension + + def getSizes(self): + return self.sizes + + def setSizes(self, sizes): + self.sizes = sizes + + def getMaxSizes(self): + return self.maxSizes + + def setMaxSizes(self, maxSizes): + self.maxSizes = maxSizes + + def getProps(self): + return self.props + + def setProps(self, props): + self.props = props + + def getMinIndex(self): + return self.minIndex + + def setMinIndex(self, minIndex): + self.minIndex = minIndex + + def getGroup(self): + return self.group + + def setGroup(self, group): + self.group = group + + def getDataAttributes(self): + return self.dataAttributes + + def setDataAttributes(self, dataAttributes): + self.dataAttributes = dataAttributes + + def getFillValue(self): + return self.fillValue + + def setFillValue(self, fillValue): + self.fillValue = fillValue + + def getMaxChunkSize(self): + return self.maxChunkSize + + def setMaxChunkSize(self, maxChunkSize): + self.maxChunkSize = maxChunkSize diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/datastorage/records/ByteDataRecord.py b/dynamicserialize/dstypes/com/raytheon/uf/common/datastorage/records/ByteDataRecord.py new file mode 100644 index 0000000..0621c22 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/datastorage/records/ByteDataRecord.py @@ -0,0 +1,20 @@ +from dynamicserialize.dstypes.com.raytheon.uf.common.datastorage.records import AbstractDataRecord + + +class ByteDataRecord(AbstractDataRecord): + + def __init__(self): + super(ByteDataRecord, self).__init__() + self.byteData = None + + def getByteData(self): + return self.byteData + + def setByteData(self, byteData): + self.byteData = byteData + + def retrieveDataObject(self): + return self.getByteData() + + def putDataObject(self, obj): + self.setByteData(obj) diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/datastorage/records/DoubleDataRecord.py b/dynamicserialize/dstypes/com/raytheon/uf/common/datastorage/records/DoubleDataRecord.py new file mode 100644 index 0000000..368226e --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/datastorage/records/DoubleDataRecord.py @@ -0,0 +1,20 @@ +from dynamicserialize.dstypes.com.raytheon.uf.common.datastorage.records import AbstractDataRecord + + +class DoubleDataRecord(AbstractDataRecord): + + def __init__(self): + super(DoubleDataRecord, self).__init__() + self.doubleData = None + + def getDoubleData(self): + return self.doubleData + + def setDoubleData(self, doubleData): + self.doubleData = doubleData + + def retrieveDataObject(self): + return self.getDoubleData() + + def putDataObject(self, obj): + self.setDoubleData(obj) diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/datastorage/records/FloatDataRecord.py b/dynamicserialize/dstypes/com/raytheon/uf/common/datastorage/records/FloatDataRecord.py new file mode 100644 index 0000000..8e4d029 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/datastorage/records/FloatDataRecord.py @@ -0,0 +1,20 @@ +from dynamicserialize.dstypes.com.raytheon.uf.common.datastorage.records import AbstractDataRecord + + +class FloatDataRecord(AbstractDataRecord): + + def __init__(self): + super(FloatDataRecord, self).__init__() + self.floatData = None + + def getFloatData(self): + return self.floatData + + def setFloatData(self, floatData): + self.floatData = floatData + + def retrieveDataObject(self): + return self.getFloatData() + + def putDataObject(self, obj): + self.setFloatData(obj) diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/datastorage/records/IntegerDataRecord.py b/dynamicserialize/dstypes/com/raytheon/uf/common/datastorage/records/IntegerDataRecord.py new file mode 100644 index 0000000..ec63a28 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/datastorage/records/IntegerDataRecord.py @@ -0,0 +1,20 @@ +from dynamicserialize.dstypes.com.raytheon.uf.common.datastorage.records import AbstractDataRecord + + +class IntegerDataRecord(AbstractDataRecord): + + def __init__(self): + super(IntegerDataRecord, self).__init__() + self.intData = None + + def getIntData(self): + return self.intData + + def setIntData(self, intData): + self.intData = intData + + def retrieveDataObject(self): + return self.getIntData() + + def putDataObject(self, obj): + self.setIntData(obj) diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/datastorage/records/LongDataRecord.py b/dynamicserialize/dstypes/com/raytheon/uf/common/datastorage/records/LongDataRecord.py new file mode 100644 index 0000000..5fec3d9 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/datastorage/records/LongDataRecord.py @@ -0,0 +1,20 @@ +from dynamicserialize.dstypes.com.raytheon.uf.common.datastorage.records import AbstractDataRecord + + +class LongDataRecord(AbstractDataRecord): + + def __init__(self): + super(LongDataRecord, self).__init__() + self.longData = None + + def getLongData(self): + return self.longData + + def setLongData(self, longData): + self.longData = longData + + def retrieveDataObject(self): + return self.getLongData() + + def putDataObject(self, obj): + self.setLongData(obj) diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/datastorage/records/ShortDataRecord.py b/dynamicserialize/dstypes/com/raytheon/uf/common/datastorage/records/ShortDataRecord.py new file mode 100644 index 0000000..be53273 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/datastorage/records/ShortDataRecord.py @@ -0,0 +1,20 @@ +from dynamicserialize.dstypes.com.raytheon.uf.common.datastorage.records import AbstractDataRecord + + +class ShortDataRecord(AbstractDataRecord): + + def __init__(self): + super(ShortDataRecord, self).__init__() + self.shortData = None + + def getShortData(self): + return self.shortData + + def setShortData(self, shortData): + self.shortData = shortData + + def retrieveDataObject(self): + return self.getShortData() + + def putDataObject(self, obj): + self.setShortData(obj) diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/datastorage/records/StringDataRecord.py b/dynamicserialize/dstypes/com/raytheon/uf/common/datastorage/records/StringDataRecord.py new file mode 100644 index 0000000..86d0a5e --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/datastorage/records/StringDataRecord.py @@ -0,0 +1,37 @@ +from dynamicserialize.dstypes.com.raytheon.uf.common.datastorage.records import AbstractDataRecord + + +class StringDataRecord(AbstractDataRecord): + + def __init__(self): + super(StringDataRecord, self).__init__() + self.stringData = None + self.maxLength = None + self.numpyData = None + + def getStringData(self): + return self.stringData + + def setStringData(self, stringData): + self.stringData = stringData + + def getMaxLength(self): + return self.maxLength + + def setMaxLength(self, maxLength): + self.maxLength = maxLength + + def retrieveDataObject(self): + if not self.numpyData: + import numpy + from h5py import h5t + if self.maxLength: + dtype = h5t.py_create('S' + str(self.maxLength)) + else: + from pypies.impl.H5pyDataStore import vlen_str_type as dtype + # dtype.set_strpad(h5t.STR_NULLTERM) + return numpy.asarray(self.getStringData(), dtype) + return self.numpyData + + def putDataObject(self, obj): + self.setStringData(obj) diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/datastorage/records/__init__.py b/dynamicserialize/dstypes/com/raytheon/uf/common/datastorage/records/__init__.py new file mode 100644 index 0000000..1978888 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/datastorage/records/__init__.py @@ -0,0 +1,31 @@ +# +# Package definition for com.raytheon.uf.common.datastorage.records +# +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# 08/31/10 njensen Initial Creation. +# Apr 24, 2015 4425 nabowle Add DoubleDataRecord +# + +__all__ = [ + 'AbstractDataRecord', + 'ByteDataRecord', + 'DoubleDataRecord', + 'FloatDataRecord', + 'IntegerDataRecord', + 'LongDataRecord', + 'ShortDataRecord', + 'StringDataRecord' + ] + +from .AbstractDataRecord import AbstractDataRecord +from .ByteDataRecord import ByteDataRecord +from .DoubleDataRecord import DoubleDataRecord +from .FloatDataRecord import FloatDataRecord +from .IntegerDataRecord import IntegerDataRecord +from .LongDataRecord import LongDataRecord +from .ShortDataRecord import ShortDataRecord +from .StringDataRecord import StringDataRecord diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/localization/LocalizationContext.py b/dynamicserialize/dstypes/com/raytheon/uf/common/localization/LocalizationContext.py new file mode 100644 index 0000000..c0ce181 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/localization/LocalizationContext.py @@ -0,0 +1,35 @@ + + +class LocalizationContext(object): + + def __init__(self): + self.localizationType = None + self.localizationLevel = None + self.contextName = None + + def getLocalizationType(self): + return self.localizationType + + def setLocalizationType(self, localizationType): + self.localizationType = localizationType + + def getLocalizationLevel(self): + return self.localizationLevel + + def setLocalizationLevel(self, localizationLevel): + self.localizationLevel = localizationLevel + + def getContextName(self): + return self.contextName + + def setContextName(self, contextName): + self.contextName = contextName + + def __str__(self): + return self.__repr__() + + def __repr__(self): + delimitedString = str(self.localizationType).lower() + "." + str(self.localizationLevel).lower() + if self.contextName is not None and self.contextName != "": + delimitedString += "." + self.contextName + return delimitedString diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/localization/LocalizationLevel.py b/dynamicserialize/dstypes/com/raytheon/uf/common/localization/LocalizationLevel.py new file mode 100644 index 0000000..e791020 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/localization/LocalizationLevel.py @@ -0,0 +1,42 @@ + +knownLevels = { + "BASE": {"text": "BASE", "order": 0, "systemLevel": True}, + "CONFIGURED": {"text": "CONFIGURED", "order": 250, "systemLevel": True}, + "SITE": {"text": "SITE", "order": 500, "systemLevel": False}, + "USER": {"text": "USER", "order": 1000, "systemLevel": False}, + "UNKNOWN": {"text": "UNKNOWN", "order": -1} +} + + +class LocalizationLevel(object): + + def __init__(self, level, order=750, systemLevel=False): + if level.upper() in knownLevels: + self.text = level.upper() + self.order = knownLevels[self.text]["order"] + self.systemLevel = knownLevels[self.text]["systemLevel"] + else: + self.text = level.upper() + self.order = int(order) + self.systemLevel = systemLevel + + def getText(self): + return self.text + + def setText(self, text): + self.text = text + + def getOrder(self): + return self.order + + def setOrder(self, order): + self.order = int(order) + + def isSystemLevel(self): + return self.systemLevel + + def __str__(self): + return self.__repr__() + + def __repr__(self): + return str(self.text) diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/localization/LocalizationType.py b/dynamicserialize/dstypes/com/raytheon/uf/common/localization/LocalizationType.py new file mode 100644 index 0000000..d24decd --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/localization/LocalizationType.py @@ -0,0 +1,18 @@ + + +class LocalizationType(object): + + def __init__(self, text=None): + self.text = text + + def __str__(self): + return self.__repr__() + + def __repr__(self): + return str(self.text) + + def getText(self): + return self.text + + def setText(self, text): + self.text = text diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/localization/__init__.py b/dynamicserialize/dstypes/com/raytheon/uf/common/localization/__init__.py new file mode 100644 index 0000000..d8e1699 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/localization/__init__.py @@ -0,0 +1,12 @@ + +__all__ = [ + 'msgs', + 'stream', + 'LocalizationContext', + 'LocalizationLevel', + 'LocalizationType' + ] + +from .LocalizationContext import LocalizationContext +from .LocalizationLevel import LocalizationLevel +from .LocalizationType import LocalizationType diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/localization/msgs/DeleteUtilityCommand.py b/dynamicserialize/dstypes/com/raytheon/uf/common/localization/msgs/DeleteUtilityCommand.py new file mode 100644 index 0000000..807eee6 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/localization/msgs/DeleteUtilityCommand.py @@ -0,0 +1,26 @@ + + +class DeleteUtilityCommand(object): + + def __init__(self): + self.filename = None + self.context = None + self.myContextName = None + + def getFilename(self): + return self.filename + + def setFilename(self, filename): + self.filename = filename + + def getContext(self): + return self.context + + def setContext(self, context): + self.context = context + + def getMyContextName(self): + return self.myContextName + + def setMyContextName(self, contextName): + self.myContextName = str(contextName) diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/localization/msgs/DeleteUtilityResponse.py b/dynamicserialize/dstypes/com/raytheon/uf/common/localization/msgs/DeleteUtilityResponse.py new file mode 100644 index 0000000..c86ea04 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/localization/msgs/DeleteUtilityResponse.py @@ -0,0 +1,39 @@ + + +class DeleteUtilityResponse(object): + + def __init__(self): + self.context = None + self.pathName = None + self.errorText = None + self.timeStamp = None + + def getContext(self): + return self.context + + def setContext(self, context): + self.context = context + + def getPathName(self): + return self.pathName + + def setPathName(self, pathName): + self.pathName = pathName + + def getErrorText(self): + return self.errorText + + def setErrorText(self, errorText): + self.errorText = errorText + + def getTimeStamp(self): + return self.timeStamp + + def setTimeStamp(self, timeStamp): + self.timeStamp = timeStamp + + def getFormattedErrorMessage(self): + return "Error deleting " + self.getContextRelativePath() + ": " + self.getErrorText() + + def getContextRelativePath(self): + return str(self.getContext()) + "/" + self.getPathName() diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/localization/msgs/ListResponseEntry.py b/dynamicserialize/dstypes/com/raytheon/uf/common/localization/msgs/ListResponseEntry.py new file mode 100644 index 0000000..906be7d --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/localization/msgs/ListResponseEntry.py @@ -0,0 +1,60 @@ + + +class ListResponseEntry(object): + + def __init__(self): + self.fileName = None + self.context = None + self.date = None + self.checksum = None + self.directory = None + self.protectedLevel = None + self.existsOnServer = None + + def getFileName(self): + return self.fileName + + def setFileName(self, fileName): + self.fileName = fileName + + def getContext(self): + return self.context + + def setContext(self, context): + self.context = context + + def getDate(self): + return self.date + + def setDate(self, date): + self.date = date + + def getChecksum(self): + return self.checksum + + def setChecksum(self, checksum): + self.checksum = checksum + + def getDirectory(self): + return self.directory + + def setDirectory(self, directory): + self.directory = directory + + def getProtectedFile(self): + return self.protectedLevel is not None + + def getProtectedLevel(self): + return self.protectedLevel + + def setProtectedLevel(self, protectedLevel): + self.protectedLevel = protectedLevel + + def getExistsOnServer(self): + return self.existsOnServer + + def setExistsOnServer(self, existsOnServer): + self.existsOnServer = existsOnServer + + def __str__(self): + return self.fileName diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/localization/msgs/ListUtilityCommand.py b/dynamicserialize/dstypes/com/raytheon/uf/common/localization/msgs/ListUtilityCommand.py new file mode 100644 index 0000000..1219cc2 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/localization/msgs/ListUtilityCommand.py @@ -0,0 +1,40 @@ + + +class ListUtilityCommand(object): + + def __init__(self): + self.subDirectory = None + self.recursive = None + self.filesOnly = None + self.localizedSite = None + self.context = None + + def getSubDirectory(self): + return self.subDirectory + + def setSubDirectory(self, subDirectory): + self.subDirectory = subDirectory + + def getRecursive(self): + return self.recursive + + def setRecursive(self, recursive): + self.recursive = recursive + + def getFilesOnly(self): + return self.filesOnly + + def setFilesOnly(self, filesOnly): + self.filesOnly = filesOnly + + def getLocalizedSite(self): + return self.localizedSite + + def setLocalizedSite(self, localizedSite): + self.localizedSite = localizedSite + + def getContext(self): + return self.context + + def setContext(self, context): + self.context = context diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/localization/msgs/ListUtilityResponse.py b/dynamicserialize/dstypes/com/raytheon/uf/common/localization/msgs/ListUtilityResponse.py new file mode 100644 index 0000000..58cf158 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/localization/msgs/ListUtilityResponse.py @@ -0,0 +1,40 @@ + + +class ListUtilityResponse(object): + + def __init__(self): + self.entries = None + self.context = None + self.pathName = None + self.errorText = None + + def getEntries(self): + return self.entries + + def setEntries(self, entries): + self.entries = entries + + def getContext(self): + return self.context + + def setContext(self, context): + self.context = context + + def getPathName(self): + return self.pathName + + def setPathName(self, pathName): + self.pathName = pathName + + def getErrorText(self): + return self.errorText + + def setErrorText(self, errorText): + self.errorText = errorText + + def __str__(self): + if self.errorText is None: + return str(self.entries) + else: + return "Error retrieving file listing for " + self.pathName + ": " + \ + self.errorText diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/localization/msgs/PrivilegedUtilityRequestMessage.py b/dynamicserialize/dstypes/com/raytheon/uf/common/localization/msgs/PrivilegedUtilityRequestMessage.py new file mode 100644 index 0000000..9fb8604 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/localization/msgs/PrivilegedUtilityRequestMessage.py @@ -0,0 +1,20 @@ +from dynamicserialize.dstypes.com.raytheon.uf.common.auth.user import User + + +class PrivilegedUtilityRequestMessage(object): + + def __init__(self): + self.commands = None + self.user = User() + + def getCommands(self): + return self.commands + + def setCommands(self, commands): + self.commands = commands + + def getUser(self): + return self.user + + def setUser(self, user): + self.user = user diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/localization/msgs/UtilityRequestMessage.py b/dynamicserialize/dstypes/com/raytheon/uf/common/localization/msgs/UtilityRequestMessage.py new file mode 100644 index 0000000..606dd10 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/localization/msgs/UtilityRequestMessage.py @@ -0,0 +1,12 @@ + + +class UtilityRequestMessage(object): + + def __init__(self): + self.commands = None + + def getCommands(self): + return self.commands + + def setCommands(self, commands): + self.commands = commands diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/localization/msgs/UtilityResponseMessage.py b/dynamicserialize/dstypes/com/raytheon/uf/common/localization/msgs/UtilityResponseMessage.py new file mode 100644 index 0000000..65be40f --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/localization/msgs/UtilityResponseMessage.py @@ -0,0 +1,12 @@ + + +class UtilityResponseMessage(object): + + def __init__(self): + self.responses = None + + def getResponses(self): + return self.responses + + def setResponses(self, responses): + self.responses = responses diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/localization/msgs/__init__.py b/dynamicserialize/dstypes/com/raytheon/uf/common/localization/msgs/__init__.py new file mode 100644 index 0000000..2d9f587 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/localization/msgs/__init__.py @@ -0,0 +1,20 @@ + +__all__ = [ + 'DeleteUtilityCommand', + 'DeleteUtilityResponse', + 'ListResponseEntry', + 'ListUtilityCommand', + 'ListUtilityResponse', + 'PrivilegedUtilityRequestMessage', + 'UtilityRequestMessage', + 'UtilityResponseMessage' + ] + +from .DeleteUtilityCommand import DeleteUtilityCommand +from .DeleteUtilityResponse import DeleteUtilityResponse +from .ListResponseEntry import ListResponseEntry +from .ListUtilityCommand import ListUtilityCommand +from .ListUtilityResponse import ListUtilityResponse +from .PrivilegedUtilityRequestMessage import PrivilegedUtilityRequestMessage +from .UtilityRequestMessage import UtilityRequestMessage +from .UtilityResponseMessage import UtilityResponseMessage diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/localization/stream/AbstractLocalizationStreamRequest.py b/dynamicserialize/dstypes/com/raytheon/uf/common/localization/stream/AbstractLocalizationStreamRequest.py new file mode 100644 index 0000000..8fff94e --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/localization/stream/AbstractLocalizationStreamRequest.py @@ -0,0 +1,39 @@ +from six import with_metaclass +import abc +import os +from dynamicserialize.dstypes.com.raytheon.uf.common.auth.user import User + + +class AbstractLocalizationStreamRequest(with_metaclass(abc.ABCMeta, object)): + @abc.abstractmethod + def __init__(self): + self.context = None + self.fileName = None + self.myContextName = None + self.user = User() + + def getContext(self): + return self.context + + def setContext(self, context): + self.context = context + + def getFileName(self): + return self.fileName + + def setFileName(self, fileName): + if fileName[0] == os.sep: + fileName = fileName[1:] + self.fileName = fileName + + def getMyContextName(self): + return self.myContextName + + def setMyContextName(self, contextName): + self.myContextName = str(contextName) + + def getUser(self): + return self.user + + def setUser(self, user): + self.user = user diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/localization/stream/LocalizationStreamGetRequest.py b/dynamicserialize/dstypes/com/raytheon/uf/common/localization/stream/LocalizationStreamGetRequest.py new file mode 100644 index 0000000..c1479f7 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/localization/stream/LocalizationStreamGetRequest.py @@ -0,0 +1,21 @@ +from dynamicserialize.dstypes.com.raytheon.uf.common.localization.stream import AbstractLocalizationStreamRequest + + +class LocalizationStreamGetRequest(AbstractLocalizationStreamRequest): + + def __init__(self): + super(LocalizationStreamGetRequest, self).__init__() + self.offset = None + self.numBytes = None + + def getOffset(self): + return self.offset + + def setOffset(self, offset): + self.offset = offset + + def getNumBytes(self): + return self.numBytes + + def setNumBytes(self, numBytes): + self.numBytes = numBytes diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/localization/stream/LocalizationStreamPutRequest.py b/dynamicserialize/dstypes/com/raytheon/uf/common/localization/stream/LocalizationStreamPutRequest.py new file mode 100644 index 0000000..f46ec7d --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/localization/stream/LocalizationStreamPutRequest.py @@ -0,0 +1,44 @@ + +import uuid +from dynamicserialize.dstypes.com.raytheon.uf.common.localization.stream import AbstractLocalizationStreamRequest + + +class LocalizationStreamPutRequest(AbstractLocalizationStreamRequest): + + def __init__(self): + super(LocalizationStreamPutRequest, self).__init__() + self.id = str(uuid.uuid4()) + self.bytes = None + self.end = None + self.offset = None + self.localizedSite = None + + def getId(self): + return self.id + + def setId(self, requestid): + self.id = requestid + + def getBytes(self): + return self.bytes + + def setBytes(self, streambytes): + self.bytes = streambytes + + def getEnd(self): + return self.end + + def setEnd(self, end): + self.end = end + + def getOffset(self): + return self.offset + + def setOffset(self, offset): + self.offset = offset + + def getLocalizedSite(self): + return self.localizedSite + + def setLocalizedSite(self, localizedSite): + self.localizedSite = localizedSite diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/localization/stream/__init__.py b/dynamicserialize/dstypes/com/raytheon/uf/common/localization/stream/__init__.py new file mode 100644 index 0000000..61fefc3 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/localization/stream/__init__.py @@ -0,0 +1,10 @@ + +__all__ = [ + 'AbstractLocalizationStreamRequest', + 'LocalizationStreamGetRequest', + 'LocalizationStreamPutRequest' + ] + +from .AbstractLocalizationStreamRequest import AbstractLocalizationStreamRequest +from .LocalizationStreamGetRequest import LocalizationStreamGetRequest +from .LocalizationStreamPutRequest import LocalizationStreamPutRequest diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/management/__init__.py b/dynamicserialize/dstypes/com/raytheon/uf/common/management/__init__.py new file mode 100644 index 0000000..87fb079 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/management/__init__.py @@ -0,0 +1,5 @@ + +__all__ = [ + 'request', + 'response' + ] diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/management/request/ChangeContextRequest.py b/dynamicserialize/dstypes/com/raytheon/uf/common/management/request/ChangeContextRequest.py new file mode 100644 index 0000000..675429d --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/management/request/ChangeContextRequest.py @@ -0,0 +1,19 @@ + + +class ChangeContextRequest(object): + + def __init__(self): + self.action = None + self.contextName = None + + def getAction(self): + return self.action + + def setAction(self, action): + self.action = action + + def getContextName(self): + return self.contextName + + def setContextName(self, contextName): + self.contextName = contextName diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/management/request/PassThroughRequest.py b/dynamicserialize/dstypes/com/raytheon/uf/common/management/request/PassThroughRequest.py new file mode 100644 index 0000000..0913594 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/management/request/PassThroughRequest.py @@ -0,0 +1,26 @@ + + +class PassThroughRequest(object): + + def __init__(self): + self.request = None + self.hostname = None + self.jvmName = None + + def getRequest(self): + return self.request + + def setRequest(self, request): + self.request = request + + def getHostname(self): + return self.hostname + + def setHostname(self, hostname): + self.hostname = hostname + + def getJvmName(self): + return self.jvmName + + def setJvmName(self, jvmName): + self.jvmName = jvmName diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/management/request/__init__.py b/dynamicserialize/dstypes/com/raytheon/uf/common/management/request/__init__.py new file mode 100644 index 0000000..8bf2c4d --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/management/request/__init__.py @@ -0,0 +1,9 @@ + +__all__ = [ + 'diagnostic', + 'ChangeContextRequest', + 'PassThroughRequest' + ] + +from .ChangeContextRequest import ChangeContextRequest +from .PassThroughRequest import PassThroughRequest diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/management/request/diagnostic/GetClusterMembersRequest.py b/dynamicserialize/dstypes/com/raytheon/uf/common/management/request/diagnostic/GetClusterMembersRequest.py new file mode 100644 index 0000000..bed8155 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/management/request/diagnostic/GetClusterMembersRequest.py @@ -0,0 +1,6 @@ + + +class GetClusterMembersRequest(object): + + def __init__(self): + pass diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/management/request/diagnostic/GetContextsRequest.py b/dynamicserialize/dstypes/com/raytheon/uf/common/management/request/diagnostic/GetContextsRequest.py new file mode 100644 index 0000000..be04a3a --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/management/request/diagnostic/GetContextsRequest.py @@ -0,0 +1,12 @@ + + +class GetContextsRequest(object): + + def __init__(self): + self.contextState = None + + def getContextState(self): + return self.contextState + + def setContextState(self, contextState): + self.contextState = contextState diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/management/request/diagnostic/StatusRequest.py b/dynamicserialize/dstypes/com/raytheon/uf/common/management/request/diagnostic/StatusRequest.py new file mode 100644 index 0000000..72f5165 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/management/request/diagnostic/StatusRequest.py @@ -0,0 +1,6 @@ + + +class StatusRequest(object): + + def __init__(self): + pass diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/management/request/diagnostic/__init__.py b/dynamicserialize/dstypes/com/raytheon/uf/common/management/request/diagnostic/__init__.py new file mode 100644 index 0000000..dfa38a7 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/management/request/diagnostic/__init__.py @@ -0,0 +1,10 @@ + +__all__ = [ + 'GetClusterMembersRequest', + 'GetContextsRequest', + 'StatusRequest' + ] + +from .GetClusterMembersRequest import GetClusterMembersRequest +from .GetContextsRequest import GetContextsRequest +from .StatusRequest import StatusRequest diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/management/response/__init__.py b/dynamicserialize/dstypes/com/raytheon/uf/common/management/response/__init__.py new file mode 100644 index 0000000..5881904 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/management/response/__init__.py @@ -0,0 +1,4 @@ + +__all__ = [ + 'diagnostic' + ] diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/management/response/diagnostic/ClusterMembersResponse.py b/dynamicserialize/dstypes/com/raytheon/uf/common/management/response/diagnostic/ClusterMembersResponse.py new file mode 100644 index 0000000..4243ed4 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/management/response/diagnostic/ClusterMembersResponse.py @@ -0,0 +1,18 @@ + + +class ClusterMembersResponse(object): + + def __init__(self): + self.status = None + + def getStatus(self): + return self.status + + def setStatus(self, status): + self.status = status + + def __repr__(self): + msg = '' + for x in self.status: + msg += str(x) + '\n' + return msg diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/management/response/diagnostic/ContextsResponse.py b/dynamicserialize/dstypes/com/raytheon/uf/common/management/response/diagnostic/ContextsResponse.py new file mode 100644 index 0000000..2aed7fa --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/management/response/diagnostic/ContextsResponse.py @@ -0,0 +1,22 @@ + + +class ContextsResponse(object): + + def __init__(self): + self.contextState = None + self.contexts = None + + def getContextState(self): + return self.contextState + + def setContextState(self, contextState): + self.contextState = contextState + + def getContexts(self): + return self.contexts + + def setContexts(self, contexts): + self.contexts = contexts + + def __repr__(self): + return str(self.contexts) diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/management/response/diagnostic/StatusResponse.py b/dynamicserialize/dstypes/com/raytheon/uf/common/management/response/diagnostic/StatusResponse.py new file mode 100644 index 0000000..5f9649b --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/management/response/diagnostic/StatusResponse.py @@ -0,0 +1,29 @@ + + +class StatusResponse(object): + + def __init__(self): + self.hostname = None + self.jvmName = None + self.statistics = None + + def getHostname(self): + return self.hostname + + def setHostname(self, hostname): + self.hostname = hostname + + def getJvmName(self): + return self.jvmName + + def setJvmName(self, jvmName): + self.jvmName = jvmName + + def getStatistics(self): + return self.statistics + + def setStatistics(self, statistics): + self.statistics = statistics + + def __repr__(self): + return self.hostname + ':' + self.jvmName diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/management/response/diagnostic/__init__.py b/dynamicserialize/dstypes/com/raytheon/uf/common/management/response/diagnostic/__init__.py new file mode 100644 index 0000000..1cc4ead --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/management/response/diagnostic/__init__.py @@ -0,0 +1,10 @@ + +__all__ = [ + 'ClusterMembersResponse', + 'ContextsResponse', + 'StatusResponse' + ] + +from .ClusterMembersResponse import ClusterMembersResponse +from .ContextsResponse import ContextsResponse +from .StatusResponse import StatusResponse diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/message/Body.py b/dynamicserialize/dstypes/com/raytheon/uf/common/message/Body.py new file mode 100644 index 0000000..b7dfffd --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/message/Body.py @@ -0,0 +1,12 @@ + + +class Body(object): + + def __init__(self): + self.responses = None + + def getResponses(self): + return self.responses + + def setResponses(self, responses): + self.responses = responses diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/message/Header.py b/dynamicserialize/dstypes/com/raytheon/uf/common/message/Header.py new file mode 100644 index 0000000..8d09aa7 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/message/Header.py @@ -0,0 +1,21 @@ +from .Property import Property + + +class Header(object): + + def __init__(self, properties=None, multimap=None): + if properties is None: + self.properties = [] + else: + self.properties = properties + + if multimap is not None: + for k, l in multimap.items(): + for v in l: + self.properties.append(Property(k, v)) + + def getProperties(self): + return self.properties + + def setProperties(self, properties): + self.properties = properties diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/message/Message.py b/dynamicserialize/dstypes/com/raytheon/uf/common/message/Message.py new file mode 100644 index 0000000..2b38d9b --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/message/Message.py @@ -0,0 +1,19 @@ + + +class Message(object): + + def __init__(self, header=None, body=None): + self.header = header + self.body = body + + def getHeader(self): + return self.header + + def setHeader(self, header): + self.header = header + + def getBody(self): + return self.body + + def setBody(self, body): + self.body = body diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/message/Property.py b/dynamicserialize/dstypes/com/raytheon/uf/common/message/Property.py new file mode 100644 index 0000000..5234a76 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/message/Property.py @@ -0,0 +1,19 @@ + + +class Property(object): + + def __init__(self, name=None, value=None): + self.name = name + self.value = value + + def getName(self): + return self.name + + def setName(self, name): + self.name = name + + def getValue(self): + return self.value + + def setValue(self, value): + self.value = value diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/message/WsId.py b/dynamicserialize/dstypes/com/raytheon/uf/common/message/WsId.py new file mode 100644 index 0000000..f0bd3c0 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/message/WsId.py @@ -0,0 +1,86 @@ +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------- -------- --------- --------------------------------------------- +# Apr 25, 2012 545 randerso Repurposed the lockKey field as threadId +# Jun 12, 2013 2099 dgilling Implemented toPrettyString(). +# Feb 06, 2017 5959 randerso Removed Java .toString() calls +# + +import struct +import socket +import os +try: + import pwd + pwd_error = False +except ImportError: + pwd_error = True +try: + import _thread +except ImportError: + import thread as _thread + + +class WsId(object): + + def __init__(self, networkId=None, userName=None, progName=None): + self.networkId = networkId + if networkId is None: + self.networkId = str(struct.unpack('') + compressedBuffer = numpy.getbuffer(self.compressedData) + self.compressedData = None + uncompressedSize = datatype.itemsize + for s in self.sizes: + uncompressedSize *= s + + # zlib.MAX_WBITS | 16, add 16 to window bits to support gzip header/trailer + # http://www.zlib.net/manual.html#Advanced + decompressedBuffer = zlib.decompress(compressedBuffer, zlib.MAX_WBITS | 16, uncompressedSize) + self.uncompressedData = numpy.frombuffer(decompressedBuffer, datatype) + + def retrieveDataObject(self): + if self.uncompressedData is None: + self.decompress() + return self.uncompressedData + + def putDataObject(self, obj): + self.compressedData = None + self.uncompressedData = obj + + prepareStore = decompress diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/pypies/records/__init__.py b/dynamicserialize/dstypes/com/raytheon/uf/common/pypies/records/__init__.py new file mode 100644 index 0000000..c21aec0 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/pypies/records/__init__.py @@ -0,0 +1,6 @@ + +__all__ = [ + 'CompressedDataRecord' + ] + +from .CompressedDataRecord import CompressedDataRecord diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/pypies/request/CopyRequest.py b/dynamicserialize/dstypes/com/raytheon/uf/common/pypies/request/CopyRequest.py new file mode 100644 index 0000000..adb09ee --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/pypies/request/CopyRequest.py @@ -0,0 +1,47 @@ + + +class CopyRequest(object): + + def __init__(self): + self.repack = None + self.repackCompression = None + self.outputDir = None + self.minMillisSinceLastChange = None + self.maxMillisSinceLastChange = None + self.filename = None + + def getRepack(self): + return self.repack + + def setRepack(self, repack): + self.repack = repack + + def getRepackCompression(self): + return self.repackCompression + + def setRepackCompression(self, repackCompression): + self.repackCompression = repackCompression + + def getOutputDir(self): + return self.outputDir + + def setOutputDir(self, outputDir): + self.outputDir = outputDir + + def getMinMillisSinceLastChange(self): + return self.minMillisSinceLastChange + + def setMinMillisSinceLastChange(self, minMillisSinceLastChange): + self.minMillisSinceLastChange = minMillisSinceLastChange + + def getMaxMillisSinceLastChange(self): + return self.maxMillisSinceLastChange + + def setMaxMillisSinceLastChange(self, maxMillisSinceLastChange): + self.maxMillisSinceLastChange = maxMillisSinceLastChange + + def getFilename(self): + return self.filename + + def setFilename(self, filename): + self.filename = filename diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/pypies/request/CreateDatasetRequest.py b/dynamicserialize/dstypes/com/raytheon/uf/common/pypies/request/CreateDatasetRequest.py new file mode 100644 index 0000000..dc22048 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/pypies/request/CreateDatasetRequest.py @@ -0,0 +1,19 @@ + + +class CreateDatasetRequest(object): + + def __init__(self): + self.record = None + self.filename = None + + def getRecord(self): + return self.record + + def setRecord(self, record): + self.record = record + + def getFilename(self): + return self.filename + + def setFilename(self, filename): + self.filename = filename diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/pypies/request/DatasetDataRequest.py b/dynamicserialize/dstypes/com/raytheon/uf/common/pypies/request/DatasetDataRequest.py new file mode 100644 index 0000000..e1f36ce --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/pypies/request/DatasetDataRequest.py @@ -0,0 +1,26 @@ + + +class DatasetDataRequest(object): + + def __init__(self): + self.datasetGroupPath = None + self.request = None + self.filename = None + + def getDatasetGroupPath(self): + return self.datasetGroupPath + + def setDatasetGroupPath(self, datasetGroupPath): + self.datasetGroupPath = datasetGroupPath + + def getRequest(self): + return self.request + + def setRequest(self, request): + self.request = request + + def getFilename(self): + return self.filename + + def setFilename(self, filename): + self.filename = filename diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/pypies/request/DatasetNamesRequest.py b/dynamicserialize/dstypes/com/raytheon/uf/common/pypies/request/DatasetNamesRequest.py new file mode 100644 index 0000000..3eb6fbd --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/pypies/request/DatasetNamesRequest.py @@ -0,0 +1,19 @@ + + +class DatasetNamesRequest(object): + + def __init__(self): + self.group = None + self.filename = None + + def getGroup(self): + return self.group + + def setGroup(self, group): + self.group = group + + def getFilename(self): + return self.filename + + def setFilename(self, filename): + self.filename = filename diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/pypies/request/DeleteFilesRequest.py b/dynamicserialize/dstypes/com/raytheon/uf/common/pypies/request/DeleteFilesRequest.py new file mode 100644 index 0000000..c6da193 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/pypies/request/DeleteFilesRequest.py @@ -0,0 +1,18 @@ + + +class DeleteFilesRequest(object): + + def __init__(self): + self.datesToDelete = None + + def getDatesToDelete(self): + return self.datesToDelete + + def setDatesToDelete(self, datesToDelete): + self.datesToDelete = datesToDelete + + def getFilename(self): + return self.filename + + def setFilename(self, filename): + self.filename = filename diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/pypies/request/DeleteOrphansRequest.py b/dynamicserialize/dstypes/com/raytheon/uf/common/pypies/request/DeleteOrphansRequest.py new file mode 100644 index 0000000..a95fc00 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/pypies/request/DeleteOrphansRequest.py @@ -0,0 +1,26 @@ +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# Jul 27, 2015 1574 nabowle Generated +# Feb 23, 2016 5389 nabowle Regenerated + + +class DeleteOrphansRequest(object): + + def __init__(self): + self.oldestDateMap = None + self.filename = None + + def getOldestDateMap(self): + return self.oldestDateMap + + def setOldestDateMap(self, oldestDateMap): + self.oldestDateMap = oldestDateMap + + def getFilename(self): + return self.filename + + def setFilename(self, filename): + self.filename = filename diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/pypies/request/DeleteRequest.py b/dynamicserialize/dstypes/com/raytheon/uf/common/pypies/request/DeleteRequest.py new file mode 100644 index 0000000..d6d7526 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/pypies/request/DeleteRequest.py @@ -0,0 +1,26 @@ + + +class DeleteRequest(object): + + def __init__(self): + self.datasets = None + self.groups = None + self.filename = None + + def getDatasets(self): + return self.datasets + + def setDatasets(self, datasets): + self.datasets = datasets + + def getGroups(self): + return self.groups + + def setGroups(self, groups): + self.groups = groups + + def getFilename(self): + return self.filename + + def setFilename(self, filename): + self.filename = filename diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/pypies/request/GroupsRequest.py b/dynamicserialize/dstypes/com/raytheon/uf/common/pypies/request/GroupsRequest.py new file mode 100644 index 0000000..82c10c3 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/pypies/request/GroupsRequest.py @@ -0,0 +1,26 @@ + + +class GroupsRequest(object): + + def __init__(self): + self.groups = None + self.request = None + self.filename = None + + def getGroups(self): + return self.groups + + def setGroups(self, groups): + self.groups = groups + + def getRequest(self): + return self.request + + def setRequest(self, request): + self.request = request + + def getFilename(self): + return self.filename + + def setFilename(self, filename): + self.filename = filename diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/pypies/request/RepackRequest.py b/dynamicserialize/dstypes/com/raytheon/uf/common/pypies/request/RepackRequest.py new file mode 100644 index 0000000..481bf22 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/pypies/request/RepackRequest.py @@ -0,0 +1,19 @@ + + +class RepackRequest(object): + + def __init__(self): + self.compression = None + self.filename = None + + def getCompression(self): + return self.compression + + def setCompression(self, compression): + self.compression = compression + + def getFilename(self): + return self.filename + + def setFilename(self, filename): + self.filename = filename diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/pypies/request/RetrieveRequest.py b/dynamicserialize/dstypes/com/raytheon/uf/common/pypies/request/RetrieveRequest.py new file mode 100644 index 0000000..839f287 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/pypies/request/RetrieveRequest.py @@ -0,0 +1,33 @@ + + +class RetrieveRequest(object): + + def __init__(self): + self.group = None + self.dataset = None + self.request = None + self.filename = None + + def getGroup(self): + return self.group + + def setGroup(self, group): + self.group = group + + def getDataset(self): + return self.dataset + + def setDataset(self, dataset): + self.dataset = dataset + + def getRequest(self): + return self.request + + def setRequest(self, request): + self.request = request + + def getFilename(self): + return self.filename + + def setFilename(self, filename): + self.filename = filename diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/pypies/request/StoreRequest.py b/dynamicserialize/dstypes/com/raytheon/uf/common/pypies/request/StoreRequest.py new file mode 100644 index 0000000..d18f0b4 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/pypies/request/StoreRequest.py @@ -0,0 +1,26 @@ + + +class StoreRequest(object): + + def __init__(self): + self.op = None + self.records = None + self.filename = None + + def getOp(self): + return self.op + + def setOp(self, op): + self.op = op + + def getRecords(self): + return self.records + + def setRecords(self, records): + self.records = records + + def getFilename(self): + return self.filename + + def setFilename(self, filename): + self.filename = filename diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/pypies/request/__init__.py b/dynamicserialize/dstypes/com/raytheon/uf/common/pypies/request/__init__.py new file mode 100644 index 0000000..766797d --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/pypies/request/__init__.py @@ -0,0 +1,26 @@ + +__all__ = [ + 'CopyRequest', + 'CreateDatasetRequest', + 'DatasetDataRequest', + 'DatasetNamesRequest', + 'DeleteFilesRequest', + 'DeleteOrphansRequest', + 'DeleteRequest', + 'GroupsRequest', + 'RepackRequest', + 'RetrieveRequest', + 'StoreRequest' + ] + +from .CopyRequest import CopyRequest +from .CreateDatasetRequest import CreateDatasetRequest +from .DatasetDataRequest import DatasetDataRequest +from .DatasetNamesRequest import DatasetNamesRequest +from .DeleteFilesRequest import DeleteFilesRequest +from .DeleteOrphansRequest import DeleteOrphansRequest +from .DeleteRequest import DeleteRequest +from .GroupsRequest import GroupsRequest +from .RepackRequest import RepackRequest +from .RetrieveRequest import RetrieveRequest +from .StoreRequest import StoreRequest diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/pypies/response/DeleteResponse.py b/dynamicserialize/dstypes/com/raytheon/uf/common/pypies/response/DeleteResponse.py new file mode 100644 index 0000000..f95180f --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/pypies/response/DeleteResponse.py @@ -0,0 +1,12 @@ + + +class DeleteResponse(object): + + def __init__(self): + self.success = None + + def getSuccess(self): + return self.success + + def setSuccess(self, success): + self.success = success diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/pypies/response/ErrorResponse.py b/dynamicserialize/dstypes/com/raytheon/uf/common/pypies/response/ErrorResponse.py new file mode 100644 index 0000000..526c34f --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/pypies/response/ErrorResponse.py @@ -0,0 +1,12 @@ + + +class ErrorResponse(object): + + def __init__(self): + self.error = None + + def getError(self): + return self.error + + def setError(self, error): + self.error = error diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/pypies/response/FileActionResponse.py b/dynamicserialize/dstypes/com/raytheon/uf/common/pypies/response/FileActionResponse.py new file mode 100644 index 0000000..9b7a625 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/pypies/response/FileActionResponse.py @@ -0,0 +1,19 @@ + + +class FileActionResponse(object): + + def __init__(self): + self.successfulFiles = None + self.failedFiles = None + + def getSuccessfulFiles(self): + return self.successfulFiles + + def setSuccessfulFiles(self, successfulFiles): + self.successfulFiles = successfulFiles + + def getFailedFiles(self): + return self.failedFiles + + def setFailedFiles(self, failedFiles): + self.failedFiles = failedFiles diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/pypies/response/RetrieveResponse.py b/dynamicserialize/dstypes/com/raytheon/uf/common/pypies/response/RetrieveResponse.py new file mode 100644 index 0000000..a354f92 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/pypies/response/RetrieveResponse.py @@ -0,0 +1,12 @@ + + +class RetrieveResponse(object): + + def __init__(self): + self.records = None + + def getRecords(self): + return self.records + + def setRecords(self, records): + self.records = records diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/pypies/response/StoreResponse.py b/dynamicserialize/dstypes/com/raytheon/uf/common/pypies/response/StoreResponse.py new file mode 100644 index 0000000..f66a596 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/pypies/response/StoreResponse.py @@ -0,0 +1,26 @@ + + +class StoreResponse(object): + + def __init__(self): + self.status = None + self.exceptions = None + self.failedRecords = None + + def getStatus(self): + return self.status + + def setStatus(self, status): + self.status = status + + def getExceptions(self): + return self.exceptions + + def setExceptions(self, exceptions): + self.exceptions = exceptions + + def getFailedRecords(self): + return self.failedRecords + + def setFailedRecords(self, failedRecords): + self.failedRecords = failedRecords diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/pypies/response/__init__.py b/dynamicserialize/dstypes/com/raytheon/uf/common/pypies/response/__init__.py new file mode 100644 index 0000000..22ac327 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/pypies/response/__init__.py @@ -0,0 +1,14 @@ + +__all__ = [ + 'DeleteResponse', + 'ErrorResponse', + 'FileActionResponse', + 'RetrieveResponse', + 'StoreResponse' + ] + +from .DeleteResponse import DeleteResponse +from .ErrorResponse import ErrorResponse +from .FileActionResponse import FileActionResponse +from .RetrieveResponse import RetrieveResponse +from .StoreResponse import StoreResponse diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/serialization/SerializableExceptionWrapper.py b/dynamicserialize/dstypes/com/raytheon/uf/common/serialization/SerializableExceptionWrapper.py new file mode 100644 index 0000000..0014102 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/serialization/SerializableExceptionWrapper.py @@ -0,0 +1,55 @@ +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# 2015-02-27 4174 nabowle Output full stacktrace. +# 2018-10-05 mjames@ucar Fix returned retVal encoding. +# + + +class SerializableExceptionWrapper(object): + + def __init__(self): + self.stackTrace = None + self.message = None + self.exceptionClass = None + self.wrapper = None + + def __str__(self): + return self.__repr__() + + def __repr__(self): + if not self.message: + self.message = '' + retVal = "" + str(self.exceptionClass) + " exception thrown: " + str(self.message) + "\n" + for element in self.stackTrace: + retVal += "\tat " + str(element) + "\n" + + if self.wrapper: + retVal += "Caused by: " + self.wrapper.__repr__() + return str(retVal) + + def getStackTrace(self): + return self.stackTrace + + def setStackTrace(self, stackTrace): + self.stackTrace = stackTrace + + def getMessage(self): + return self.message + + def setMessage(self, message): + self.message = message + + def getExceptionClass(self): + return self.exceptionClass + + def setExceptionClass(self, exceptionClass): + self.exceptionClass = exceptionClass + + def getWrapper(self): + return self.wrapper + + def setWrapper(self, wrapper): + self.wrapper = wrapper diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/serialization/__init__.py b/dynamicserialize/dstypes/com/raytheon/uf/common/serialization/__init__.py new file mode 100644 index 0000000..29e6b99 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/serialization/__init__.py @@ -0,0 +1,7 @@ + +__all__ = [ + 'comm', + 'SerializableExceptionWrapper' + ] + +from .SerializableExceptionWrapper import SerializableExceptionWrapper diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/serialization/comm/__init__.py b/dynamicserialize/dstypes/com/raytheon/uf/common/serialization/comm/__init__.py new file mode 100644 index 0000000..1406bfb --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/serialization/comm/__init__.py @@ -0,0 +1,4 @@ + +__all__ = [ + 'response' + ] diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/serialization/comm/response/ServerErrorResponse.py b/dynamicserialize/dstypes/com/raytheon/uf/common/serialization/comm/response/ServerErrorResponse.py new file mode 100644 index 0000000..832782b --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/serialization/comm/response/ServerErrorResponse.py @@ -0,0 +1,12 @@ + + +class ServerErrorResponse(object): + + def __init__(self): + self.exception = None + + def getException(self): + return self.exception + + def setException(self, exception): + self.exception = exception diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/serialization/comm/response/__init__.py b/dynamicserialize/dstypes/com/raytheon/uf/common/serialization/comm/response/__init__.py new file mode 100644 index 0000000..fcd7335 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/serialization/comm/response/__init__.py @@ -0,0 +1,18 @@ +# +# Package definition for com.raytheon.uf.common.serialization.comm.response +# +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# 09/21/10 njensen Initial Creation. +# +# +# + +__all__ = [ + 'ServerErrorResponse' + ] + +from .ServerErrorResponse import ServerErrorResponse diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/site/__init__.py b/dynamicserialize/dstypes/com/raytheon/uf/common/site/__init__.py new file mode 100644 index 0000000..2005420 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/site/__init__.py @@ -0,0 +1,5 @@ + +__all__ = [ + 'notify', + 'requests' + ] diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/site/notify/ClusterActivationNotification.py b/dynamicserialize/dstypes/com/raytheon/uf/common/site/notify/ClusterActivationNotification.py new file mode 100644 index 0000000..a19e0c7 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/site/notify/ClusterActivationNotification.py @@ -0,0 +1,39 @@ +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# 09/10/14 #3623 randerso Manually created, do not regenerate +# + +from .SiteActivationNotification import SiteActivationNotification + + +class ClusterActivationNotification(SiteActivationNotification): + + def __init__(self): + self.clusterActive = False + SiteActivationNotification.__init__(self) + + def isClusterActive(self): + return self.clusterActive + + def setClusterActive(self, clusterActive): + self.clusterActive = clusterActive + + def __str__(self): + s = self.modifiedSite + + if self.type == 'ACTIVATE': + if self.status == 'FAILURE': + s += " has failed to activate on some or all cluster members. See logs for details" + else: + s += " has been successfully activated on all cluster members" + + else: + if self.status == 'FAILURE': + s += " has failed to deactivate on some or all cluster members. See logs for details" + else: + s += " has been successfully deactivated on all cluster members" + + return s diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/site/notify/SiteActivationNotification.py b/dynamicserialize/dstypes/com/raytheon/uf/common/site/notify/SiteActivationNotification.py new file mode 100644 index 0000000..8672a16 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/site/notify/SiteActivationNotification.py @@ -0,0 +1,69 @@ +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# 09/10/14 #3623 randerso Manually created, do not regenerate +# + + +class SiteActivationNotification(object): + + def __init__(self): + self.type = None + self.status = None + self.primarySite = None + self.modifiedSite = None + self.runMode = None + self.serverName = None + self.pluginName = None + + def getType(self): + return self.type + + def setType(self, notificationType): + self.type = notificationType + + def getStatus(self): + return self.status + + def setStatus(self, status): + self.status = status + + def getPrimarySite(self): + return self.primarySite + + def setPrimarySite(self, primarysite): + self.primarySite = primarysite + + def getModifiedSite(self): + return self.modifiedSite + + def setModifiedSite(self, modifiedSite): + self.modifiedSite = modifiedSite + + def getRunMode(self): + return self.runMode + + def setRunMode(self, runMode): + self.runMode = runMode + + def getServerName(self): + return self.serverName + + def setServerName(self, serverName): + self.serverName = serverName + + def getPluginName(self): + return self.pluginName + + def setPluginName(self, pluginName): + self.pluginName = pluginName + + def __str__(self): + return self.pluginName.upper() + ":" \ + + self.status + ":" \ + + self.type + " " \ + + self.modifiedSite.upper() + " on " \ + + self.serverName + ":" \ + + self.runMode diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/site/notify/__init__.py b/dynamicserialize/dstypes/com/raytheon/uf/common/site/notify/__init__.py new file mode 100644 index 0000000..b8c384e --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/site/notify/__init__.py @@ -0,0 +1,8 @@ + +__all__ = [ + 'ClusterActivationNotification', + 'SiteActivationNotification' + ] + +from .ClusterActivationNotification import ClusterActivationNotification +from .SiteActivationNotification import SiteActivationNotification diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/site/requests/ActivateSiteRequest.py b/dynamicserialize/dstypes/com/raytheon/uf/common/site/requests/ActivateSiteRequest.py new file mode 100644 index 0000000..8b9f225 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/site/requests/ActivateSiteRequest.py @@ -0,0 +1,26 @@ +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# 09/10/14 #3623 randerso Manually created, do not regenerate +# + + +class ActivateSiteRequest(object): + + def __init__(self, siteID=None, plugin=None): + self.siteID = siteID + self.plugin = plugin + + def getSiteID(self): + return self.siteID + + def setSiteID(self, siteID): + self.siteID = siteID + + def getPlugin(self): + return self.plugin + + def setPlugin(self, plugin): + self.plugin = plugin diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/site/requests/DeactivateSiteRequest.py b/dynamicserialize/dstypes/com/raytheon/uf/common/site/requests/DeactivateSiteRequest.py new file mode 100644 index 0000000..d6adafe --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/site/requests/DeactivateSiteRequest.py @@ -0,0 +1,26 @@ +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# 09/10/14 #3623 randerso Manually created, do not regenerate +# + + +class DeactivateSiteRequest(object): + + def __init__(self, siteID=None, plugin=None): + self.siteID = siteID + self.plugin = plugin + + def getSiteID(self): + return self.siteID + + def setSiteID(self, siteID): + self.siteID = siteID + + def getPlugin(self): + return self.plugin + + def setPlugin(self, plugin): + self.plugin = plugin diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/site/requests/GetActiveSitesRequest.py b/dynamicserialize/dstypes/com/raytheon/uf/common/site/requests/GetActiveSitesRequest.py new file mode 100644 index 0000000..8f04b7a --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/site/requests/GetActiveSitesRequest.py @@ -0,0 +1,6 @@ + + +class GetActiveSitesRequest(object): + + def __init__(self): + super(GetActiveSitesRequest, self).__init__() diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/site/requests/GetPrimarySiteRequest.py b/dynamicserialize/dstypes/com/raytheon/uf/common/site/requests/GetPrimarySiteRequest.py new file mode 100644 index 0000000..1f9c9a7 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/site/requests/GetPrimarySiteRequest.py @@ -0,0 +1,6 @@ + + +class GetPrimarySiteRequest(object): + + def __init__(self): + super(GetPrimarySiteRequest, self).__init__() diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/site/requests/ValidateConfigRequest.py b/dynamicserialize/dstypes/com/raytheon/uf/common/site/requests/ValidateConfigRequest.py new file mode 100644 index 0000000..f9f391c --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/site/requests/ValidateConfigRequest.py @@ -0,0 +1,26 @@ +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# 09/12/16 #5888 dgilling Initial creation. +# + + +class ValidateConfigRequest(object): + + def __init__(self, siteID=None, plugin=None): + self.siteID = siteID + self.plugin = plugin + + def getSiteID(self): + return self.siteID + + def setSiteID(self, siteID): + self.siteID = siteID + + def getPlugin(self): + return self.plugin + + def setPlugin(self, plugin): + self.plugin = plugin diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/site/requests/__init__.py b/dynamicserialize/dstypes/com/raytheon/uf/common/site/requests/__init__.py new file mode 100644 index 0000000..3cebd9b --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/site/requests/__init__.py @@ -0,0 +1,14 @@ + +__all__ = [ + 'ActivateSiteRequest', + 'DeactivateSiteRequest', + 'GetActiveSitesRequest', + 'GetPrimarySiteRequest', + 'ValidateConfigRequest' + ] + +from .ActivateSiteRequest import ActivateSiteRequest +from .DeactivateSiteRequest import DeactivateSiteRequest +from .GetActiveSitesRequest import GetActiveSitesRequest +from .GetPrimarySiteRequest import GetPrimarySiteRequest +from .ValidateConfigRequest import ValidateConfigRequest diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/time/CommutativeTimestamp.py b/dynamicserialize/dstypes/com/raytheon/uf/common/time/CommutativeTimestamp.py new file mode 100644 index 0000000..bc3c247 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/time/CommutativeTimestamp.py @@ -0,0 +1,15 @@ +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# 06/23/2016 #5696 rjpeter Initial creation. +# + +from dynamicserialize.dstypes.java.sql import Timestamp + + +class CommutativeTimestamp(Timestamp): + + def __init__(self, timeInMillis=None): + super(CommutativeTimestamp, self).__init__(timeInMillis) diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/time/DataTime.py b/dynamicserialize/dstypes/com/raytheon/uf/common/time/DataTime.py new file mode 100644 index 0000000..5df62f9 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/time/DataTime.py @@ -0,0 +1,265 @@ +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# ??/??/?? xxxxxxxx Initial Creation. +# 05/28/13 2023 dgilling Implement __str__(). +# 01/22/14 2667 bclement preserved milliseconds in string representation +# 03/03/14 2673 bsteffen allow construction using a Date for refTime +# 06/24/14 3096 mnash implement __cmp__ +# 06/24/15 4480 dgilling implement __hash__ and __eq__, +# replace __cmp__ with rich comparison +# operators. +# 05/26/16 2416 rjpeter Added str based constructor. +# 08/02/16 2416 tgurney Forecast time regex bug fix, +# plus misc cleanup +# + +import calendar +import datetime +import re +import time +import numpy +from six.moves import cStringIO as StringIO + +from dynamicserialize.dstypes.java.util import Date +from dynamicserialize.dstypes.java.util import EnumSet + +from .TimeRange import TimeRange + +_DATE = r'(\d{4}-\d{2}-\d{2})' +_TIME = r'(\d{2}:\d{2}:\d{2})' +_MILLIS = '(?:\.(\d{1,3})(?:\d{1,4})?)?' # might have microsecond but that is thrown out +REFTIME_PATTERN_STR = _DATE + '[ _]' + _TIME + _MILLIS +FORECAST_PATTERN_STR = r'(?:[ _]\((\d+)(?::(\d{1,2}))?\))?' +VALID_PERIOD_PATTERN_STR = r'(?:\[' + REFTIME_PATTERN_STR + '--' + REFTIME_PATTERN_STR + r'\])?' +STR_PATTERN = re.compile(REFTIME_PATTERN_STR + FORECAST_PATTERN_STR + VALID_PERIOD_PATTERN_STR) + + +class DataTime(object): + + def __init__(self, refTime=None, fcstTime=None, validPeriod=None): + """ + Construct a new DataTime. + May also be called as DataTime(str) to parse a string and create a + DataTime from it. Some examples of valid DataTime strings: + + '2016-08-02 01:23:45.0' + '2016-08-02 01:23:45.123' + '2016-08-02 01:23:45.0 (17)', + '2016-08-02 01:23:45.0 (17:34)' + '2016-08-02 01:23:45.0[2016-08-02_02:34:45.0--2016-08-02_03:45:56.0]' + '2016-08-02 01:23:45.456_(17:34)[2016-08-02_02:34:45.0--2016-08-02_03:45:56.0]' + """ + if fcstTime is not None: + self.fcstTime = int(fcstTime) + else: + self.fcstTime = 0 + self.refTime = refTime + if validPeriod is not None and not isinstance(validPeriod, TimeRange): + raise ValueError("Invalid validPeriod object specified for DataTime.") + self.validPeriod = validPeriod + self.utilityFlags = EnumSet('com.raytheon.uf.common.time.DataTime$FLAG') + self.levelValue = numpy.float64(-1.0) + + if self.refTime is not None: + if isinstance(self.refTime, datetime.datetime): + self.refTime = int(calendar.timegm(self.refTime.utctimetuple()) * 1000) + elif isinstance(self.refTime, time.struct_time): + self.refTime = int(calendar.timegm(self.refTime) * 1000) + elif hasattr(self.refTime, 'getTime'): + # getTime should be returning ms, there is no way to check this + # This is expected for java Date + self.refTime = int(self.refTime.getTime()) + else: + try: + self.refTime = int(self.refTime) + except ValueError: + # Assume first arg is a string. Attempt to parse. + match = STR_PATTERN.match(self.refTime) + if match is None: + raise ValueError('Could not parse DataTime info from ' + + str(refTime)) + + groups = match.groups() + rMillis = groups[2] or 0 + fcstTimeHr = groups[3] + fcstTimeMin = groups[4] + periodStart = groups[5], groups[6], (groups[7] or 0) + periodEnd = groups[8], groups[9], (groups[10] or 0) + self.refTime = self._getTimeAsEpochMillis(groups[0], groups[1], rMillis) + + if fcstTimeHr is not None: + self.fcstTime = int(fcstTimeHr) * 3600 + if fcstTimeMin is not None: + self.fcstTime += int(fcstTimeMin) * 60 + + if periodStart[0] is not None: + self.validPeriod = TimeRange() + periodStartTime = self._getTimeAsEpochMillis(*periodStart) + self.validPeriod.setStart(periodStartTime / 1000) + periodEndTime = self._getTimeAsEpochMillis(*periodEnd) + self.validPeriod.setEnd(periodEndTime / 1000) + + self.refTime = Date(self.refTime) + + if self.validPeriod is None: + validTimeMillis = self.refTime.getTime() + int(self.fcstTime * 1000) + self.validPeriod = TimeRange() + self.validPeriod.setStart(validTimeMillis / 1000) + self.validPeriod.setEnd(validTimeMillis / 1000) + + # figure out utility flags + if self.fcstTime: + self.utilityFlags.add("FCST_USED") + if self.validPeriod and self.validPeriod.isValid(): + self.utilityFlags.add("PERIOD_USED") + + def getRefTime(self): + return self.refTime + + def setRefTime(self, refTime): + self.refTime = refTime + + def getFcstTime(self): + return self.fcstTime + + def setFcstTime(self, fcstTime): + self.fcstTime = fcstTime + + def getValidPeriod(self): + return self.validPeriod + + def setValidPeriod(self, validPeriod): + self.validPeriod = validPeriod + + def getUtilityFlags(self): + return self.utilityFlags + + def setUtilityFlags(self, utilityFlags): + self.utilityFlags = utilityFlags + + def getLevelValue(self): + return self.levelValue + + def setLevelValue(self, levelValue): + self.levelValue = numpy.float64(levelValue) + + def __str__(self): + sbuffer = StringIO() + + if self.refTime is not None: + refTimeInSecs = self.refTime.getTime() / 1000 + micros = (self.refTime.getTime() % 1000) * 1000 + dtObj = datetime.datetime.utcfromtimestamp(refTimeInSecs) + dtObj = dtObj.replace(microsecond=micros) + # This won't be compatible with java or string from java since its to microsecond + sbuffer.write(dtObj.isoformat(' ')) + + if "FCST_USED" in self.utilityFlags: + hrs = int(self.fcstTime / 3600) + mins = int((self.fcstTime - (hrs * 3600)) / 60) + sbuffer.write(" (" + str(hrs)) + if mins != 0: + sbuffer.write(":" + str(mins)) + sbuffer.write(")") + + if "PERIOD_USED" in self.utilityFlags: + sbuffer.write("[") + sbuffer.write(self.validPeriod.start.isoformat(' ')) + sbuffer.write("--") + sbuffer.write(self.validPeriod.end.isoformat(' ')) + sbuffer.write("]") + + strVal = sbuffer.getvalue() + sbuffer.close() + return strVal + + def __repr__(self): + return "" + + def __hash__(self): + hashCode = hash(self.refTime) ^ hash(self.fcstTime) + if self.validPeriod is not None and self.validPeriod.isValid(): + hashCode ^= hash(self.validPeriod.getStart()) + hashCode ^= hash(self.validPeriod.getEnd()) + hashCode ^= hash(self.levelValue) + return hashCode + + def __eq__(self, other): + if not isinstance(self, type(other)): + return False + + if other.getRefTime() is None: + return self.fcstTime == other.fcstTime + + dataTime1 = (self.refTime, self.fcstTime, self.validPeriod, self.levelValue) + dataTime2 = (other.refTime, other.fcstTime, other.validPeriod, other.levelValue) + return dataTime1 == dataTime2 + + def __ne__(self, other): + return not self.__eq__(other) + + def __lt__(self, other): + if not isinstance(self, type(other)): + return NotImplemented + + myValidTime = self.getRefTime().getTime() + self.getFcstTime() + otherValidTime = other.getRefTime().getTime() + other.getFcstTime() + if myValidTime < otherValidTime: + return True + + if self.fcstTime < other.fcstTime: + return True + + if self.levelValue < other.levelValue: + return True + + myValidPeriod = self.validPeriod + otherValidPeriod = other.validPeriod + if myValidPeriod != otherValidPeriod: + if myValidPeriod.duration() < otherValidPeriod.duration(): + return True + return myValidPeriod.getStartInMillis() < otherValidPeriod.getStartInMillis() + return False + + def __le__(self, other): + if not isinstance(self, type(other)): + return NotImplemented + + return self.__lt__(other) or self.__eq__(other) + + def __gt__(self, other): + if not isinstance(self, type(other)): + return NotImplemented + + myValidTime = self.getRefTime().getTime() + self.getFcstTime() + otherValidTime = other.getRefTime().getTime() + other.getFcstTime() + if myValidTime > otherValidTime: + return True + + if self.fcstTime > other.fcstTime: + return True + + if self.levelValue > other.levelValue: + return True + + myValidPeriod = self.validPeriod + otherValidPeriod = other.validPeriod + if myValidPeriod != otherValidPeriod: + if myValidPeriod.duration() > otherValidPeriod.duration(): + return True + return myValidPeriod.getStartInMillis() > otherValidPeriod.getStartInMillis() + return False + + def __ge__(self, other): + if not isinstance(self, type(other)): + return NotImplemented + + return self.__gt__(other) or self.__eq__(other) + + def _getTimeAsEpochMillis(self, dateStr, timeStr, millis): + t = time.strptime(dateStr + ' ' + timeStr, '%Y-%m-%d %H:%M:%S') + epochSeconds = calendar.timegm(t) + return int(epochSeconds * 1000) + int(millis) diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/time/FormattedDate.py b/dynamicserialize/dstypes/com/raytheon/uf/common/time/FormattedDate.py new file mode 100644 index 0000000..1987b8a --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/time/FormattedDate.py @@ -0,0 +1,17 @@ +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# 09/21/2015 4486 rjpeter Initial creation. +# 06/23/2016 #5696 rjpeter Extend CommutativeTimestamp +# + +from .CommutativeTimestamp import CommutativeTimestamp + + +# TODO: Remove after 16.4.1 no longer in field +class FormattedDate(CommutativeTimestamp): + + def __init__(self, timeInMillis=None): + super(FormattedDate, self).__init__(timeInMillis) diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/time/TimeRange.py b/dynamicserialize/dstypes/com/raytheon/uf/common/time/TimeRange.py new file mode 100644 index 0000000..53e972b --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/time/TimeRange.py @@ -0,0 +1,140 @@ +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# ??/??/?? xxxxxxxx Initial Creation. +# 01/22/14 2667 bclement fixed millisecond support +# 02/28/14 2667 bclement constructor can take extra micros for start and end +# 06/24/15 4480 dgilling fix __eq__. +# +# + +import calendar +import datetime +import time + +MAX_TIME = 2147483647 +MICROS_IN_SECOND = 1000000 + + +class TimeRange(object): + def __init__(self, start=None, end=None, startExtraMicros=None, endExtraMicros=None): + self.start = self.__convertToDateTimeWithExtra(start, startExtraMicros) + self.end = self.__convertToDateTimeWithExtra(end, endExtraMicros) + + def __str__(self): + return self.__repr__() + + def __repr__(self): + return "(" + self.start.strftime("%b %d %y %H:%M:%S %Z") + ", " + \ + self.end.strftime("%b %d %y %H:%M:%S %Z") + ")" + + def __eq__(self, other): + if not isinstance(self, type(other)): + return False + + if self.isValid() and other.isValid(): + return self.getStart() == other.getStart() and self.getEnd() == other.getEnd() + elif not self.isValid() and not other.isValid(): + return True + else: + return False + + def __ne__(self, other): + return not self.__eq__(other) + + def __convertToDateTimeWithExtra(self, timeArg, extraMicros): + rval = self.__convertToDateTime(timeArg) + if rval is not None and extraMicros is not None: + rval = rval + datetime.timedelta(microseconds=extraMicros) + return rval + + def __convertToDateTime(self, timeArg): + if timeArg is None: + return None + if isinstance(timeArg, datetime.datetime): + return timeArg + elif isinstance(timeArg, time.struct_time): + return datetime.datetime(*timeArg[:6]) + elif isinstance(timeArg, float): + # seconds as float, should be avoided due to floating point errors + totalSecs = int(timeArg) + micros = int((timeArg - totalSecs) * MICROS_IN_SECOND) + return self.__convertSecsAndMicros(totalSecs, micros) + elif isinstance(timeArg, int): + # seconds as integer + totalSecs = timeArg + return self.__convertSecsAndMicros(totalSecs, 0) + else: + return None + + def __convertSecsAndMicros(self, seconds, micros): + if seconds < MAX_TIME: + rval = datetime.datetime.utcfromtimestamp(seconds) + else: + extraTime = datetime.timedelta(seconds=(seconds - MAX_TIME)) + rval = datetime.datetime.utcfromtimestamp(MAX_TIME) + extraTime + return rval.replace(microsecond=micros) + + def getStart(self): + return self.start.utctimetuple() + + def getStartInMillis(self): + return self._getInMillis(self.start) + + def setStart(self, start, extraMicros=None): + self.start = self.__convertToDateTimeWithExtra(start, extraMicros) + + def getEnd(self): + return self.end.utctimetuple() + + def getEndInMillis(self): + return self._getInMillis(self.end) + + def _getInMillis(self, time): + rval = int(calendar.timegm(time.utctimetuple()) * 1000) + rval += time.microsecond // 1000 + return rval + + def setEnd(self, end, extraMicros=None): + self.end = self.__convertToDateTimeWithExtra(end, extraMicros) + + def duration(self): + delta = self.end - self.start + return int(delta.total_seconds()) + + def contains(self, timeArg): + if isinstance(timeArg, TimeRange): + if self.duration() == 0: + return self.__eq__(timeArg) + elif timeArg.duration() == 0: + return self.contains(timeArg.start) + return timeArg.start >= self.start and timeArg.end <= self.end + else: + convTime = self.__convertToDateTime(timeArg) + if not isinstance(convTime, datetime.datetime): + raise TypeError("Invalid type for argument time specified to TimeRange.contains().") + if self.duration() != 0: + return self.start <= convTime < self.end + return convTime == self.start + + def isValid(self): + return bool(self.start != self.end) + + def overlaps(self, timeRange): + return timeRange.contains(self.start) or self.contains(timeRange.start) + + def combineWith(self, timeRange): + if self.isValid() and timeRange.isValid(): + newStart = min(self.start, timeRange.start) + newEnd = max(self.end, timeRange.end) + return TimeRange(newStart, newEnd) + elif self.isValid(): + return self + + return timeRange + + @staticmethod + def allTimes(): + return TimeRange(0, MAX_TIME) diff --git a/dynamicserialize/dstypes/com/raytheon/uf/common/time/__init__.py b/dynamicserialize/dstypes/com/raytheon/uf/common/time/__init__.py new file mode 100644 index 0000000..14d6861 --- /dev/null +++ b/dynamicserialize/dstypes/com/raytheon/uf/common/time/__init__.py @@ -0,0 +1,12 @@ + +__all__ = [ + 'CommutativeTimestamp', + 'DataTime', + 'FormattedDate', + 'TimeRange' + ] + +from .DataTime import DataTime +from .TimeRange import TimeRange +from .FormattedDate import FormattedDate +from .CommutativeTimestamp import CommutativeTimestamp diff --git a/dynamicserialize/dstypes/com/vividsolutions/__init__.py b/dynamicserialize/dstypes/com/vividsolutions/__init__.py new file mode 100644 index 0000000..074f0ea --- /dev/null +++ b/dynamicserialize/dstypes/com/vividsolutions/__init__.py @@ -0,0 +1,4 @@ + +__all__ = [ + 'jts' + ] diff --git a/dynamicserialize/dstypes/com/vividsolutions/jts/__init__.py b/dynamicserialize/dstypes/com/vividsolutions/jts/__init__.py new file mode 100644 index 0000000..0699c05 --- /dev/null +++ b/dynamicserialize/dstypes/com/vividsolutions/jts/__init__.py @@ -0,0 +1,4 @@ + +__all__ = [ + 'geom' + ] diff --git a/dynamicserialize/dstypes/com/vividsolutions/jts/geom/Coordinate.py b/dynamicserialize/dstypes/com/vividsolutions/jts/geom/Coordinate.py new file mode 100644 index 0000000..f143f0f --- /dev/null +++ b/dynamicserialize/dstypes/com/vividsolutions/jts/geom/Coordinate.py @@ -0,0 +1,25 @@ + + +class Coordinate(object): + + def __init__(self, x=None, y=None): + self.x = x + self.y = y + + def getX(self): + return self.x + + def getY(self): + return self.y + + def setX(self, x): + self.x = x + + def setY(self, y): + self.y = y + + def __str__(self): + return str((self.x, self.y)) + + def __repr__(self): + return self.__str__() diff --git a/dynamicserialize/dstypes/com/vividsolutions/jts/geom/Envelope.py b/dynamicserialize/dstypes/com/vividsolutions/jts/geom/Envelope.py new file mode 100644 index 0000000..7491741 --- /dev/null +++ b/dynamicserialize/dstypes/com/vividsolutions/jts/geom/Envelope.py @@ -0,0 +1,47 @@ +# This class is a dummy implementation of the +# com.vividsolutions.jts.geom.Envelope class. It was simply created to allow +# serialization/deserialization of IDataRequest objects from the Data Access +# Framework. This should be re-implemented if useful work needs to be +# performed against serialized Envelope objects. +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# 05/29/13 2023 dgilling Initial Creation. +# + + +class Envelope(object): + + def __init__(self, env=None): + self.maxx = -1.0 + self.maxy = -1.0 + self.minx = 0.0 + self.miny = 0.0 + if env is not None: + (self.minx, self.miny, self.maxx, self.maxy) = env.bounds + + def getMaxX(self): + return self.maxx + + def getMaxY(self): + return self.maxy + + def getMinX(self): + return self.minx + + def getMinY(self): + return self.miny + + def setMaxX(self, value): + self.maxx = value + + def setMaxY(self, value): + self.maxy = value + + def setMinX(self, value): + self.minx = value + + def setMinY(self, value): + self.miny = value diff --git a/dynamicserialize/dstypes/com/vividsolutions/jts/geom/Geometry.py b/dynamicserialize/dstypes/com/vividsolutions/jts/geom/Geometry.py new file mode 100644 index 0000000..2b134c7 --- /dev/null +++ b/dynamicserialize/dstypes/com/vividsolutions/jts/geom/Geometry.py @@ -0,0 +1,17 @@ +# This class is a dummy implementation of the +# com.vividsolutions.jts.geom.Geometry class. It was simply created to allow +# serialization/deserialization of GridLocation objects. This should be +# reimplemented if useful work needs to be performed against serialized +# Geometry objects. + + +class Geometry(object): + + def __init__(self): + self.binaryData = None + + def getBinaryData(self): + return self.binaryData + + def setBinaryData(self, data): + self.binaryData = data diff --git a/dynamicserialize/dstypes/com/vividsolutions/jts/geom/__init__.py b/dynamicserialize/dstypes/com/vividsolutions/jts/geom/__init__.py new file mode 100644 index 0000000..93748c2 --- /dev/null +++ b/dynamicserialize/dstypes/com/vividsolutions/jts/geom/__init__.py @@ -0,0 +1,10 @@ + +__all__ = [ + 'Coordinate', + 'Envelope', + 'Geometry' + ] + +from .Coordinate import Coordinate +from .Envelope import Envelope +from .Geometry import Geometry diff --git a/dynamicserialize/dstypes/gov/__init__.py b/dynamicserialize/dstypes/gov/__init__.py new file mode 100644 index 0000000..5dd1b2b --- /dev/null +++ b/dynamicserialize/dstypes/gov/__init__.py @@ -0,0 +1,4 @@ + +__all__ = [ + 'noaa' + ] diff --git a/dynamicserialize/dstypes/gov/noaa/__init__.py b/dynamicserialize/dstypes/gov/noaa/__init__.py new file mode 100644 index 0000000..7d0a4a9 --- /dev/null +++ b/dynamicserialize/dstypes/gov/noaa/__init__.py @@ -0,0 +1,4 @@ + +__all__ = [ + 'nws' + ] diff --git a/dynamicserialize/dstypes/gov/noaa/nws/__init__.py b/dynamicserialize/dstypes/gov/noaa/nws/__init__.py new file mode 100644 index 0000000..24524a7 --- /dev/null +++ b/dynamicserialize/dstypes/gov/noaa/nws/__init__.py @@ -0,0 +1,4 @@ + +__all__ = [ + 'ncep' + ] diff --git a/dynamicserialize/dstypes/gov/noaa/nws/ncep/__init__.py b/dynamicserialize/dstypes/gov/noaa/nws/ncep/__init__.py new file mode 100644 index 0000000..c03e3e7 --- /dev/null +++ b/dynamicserialize/dstypes/gov/noaa/nws/ncep/__init__.py @@ -0,0 +1,4 @@ + +__all__ = [ + 'common' + ] diff --git a/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/__init__.py b/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/__init__.py new file mode 100644 index 0000000..4d826f7 --- /dev/null +++ b/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/__init__.py @@ -0,0 +1,4 @@ + +__all__ = [ + 'dataplugin' + ] diff --git a/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/__init__.py b/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/__init__.py new file mode 100644 index 0000000..aed05b4 --- /dev/null +++ b/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/__init__.py @@ -0,0 +1,7 @@ + +__all__ = [ + 'atcf', + 'gempak', + 'gpd', + 'pgen' + ] diff --git a/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/atcf/__init__.py b/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/atcf/__init__.py new file mode 100644 index 0000000..ed91455 --- /dev/null +++ b/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/atcf/__init__.py @@ -0,0 +1,4 @@ + +__all__ = [ + 'request' + ] diff --git a/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/atcf/request/RetrieveAtcfDeckRequest.py b/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/atcf/request/RetrieveAtcfDeckRequest.py new file mode 100644 index 0000000..be5a1a7 --- /dev/null +++ b/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/atcf/request/RetrieveAtcfDeckRequest.py @@ -0,0 +1,12 @@ + + +class RetrieveAtcfDeckRequest(object): + + def __init__(self): + self.deckID = None + + def getDeckID(self): + return self.deckID + + def setDeckID(self, deckID): + self.deckID = deckID diff --git a/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/atcf/request/__init__.py b/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/atcf/request/__init__.py new file mode 100644 index 0000000..a8c381a --- /dev/null +++ b/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/atcf/request/__init__.py @@ -0,0 +1,6 @@ + +__all__ = [ + 'RetrieveAtcfDeckRequest' + ] + +from .RetrieveAtcfDeckRequest import RetrieveAtcfDeckRequest diff --git a/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/gempak/__init__.py b/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/gempak/__init__.py new file mode 100644 index 0000000..ed91455 --- /dev/null +++ b/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/gempak/__init__.py @@ -0,0 +1,4 @@ + +__all__ = [ + 'request' + ] diff --git a/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/gempak/request/GetGridDataRequest.py b/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/gempak/request/GetGridDataRequest.py new file mode 100644 index 0000000..5fccf34 --- /dev/null +++ b/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/gempak/request/GetGridDataRequest.py @@ -0,0 +1,67 @@ +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# Sep 16, 2016 pmoyer Generated + + +class GetGridDataRequest(object): + + def __init__(self): + self.vcoord = None + self.level2 = None + self.modelId = None + self.parm = None + self.level1 = None + self.reftime = None + self.pluginName = None + self.fcstsec = None + + def getVcoord(self): + return self.vcoord + + def setVcoord(self, vcoord): + self.vcoord = vcoord + + def getLevel2(self): + return self.level2 + + def setLevel2(self, level2): + self.level2 = level2 + + def getModelId(self): + return self.modelId + + def setModelId(self, modelId): + self.modelId = modelId + + def getParm(self): + return self.parm + + def setParm(self, parm): + self.parm = parm + + def getLevel1(self): + return self.level1 + + def setLevel1(self, level1): + self.level1 = level1 + + def getReftime(self): + return self.reftime + + def setReftime(self, reftime): + self.reftime = reftime + + def getPluginName(self): + return self.pluginName + + def setPluginName(self, pluginName): + self.pluginName = pluginName + + def getFcstsec(self): + return self.fcstsec + + def setFcstsec(self, fcstsec): + self.fcstsec = fcstsec diff --git a/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/gempak/request/GetGridInfoRequest.py b/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/gempak/request/GetGridInfoRequest.py new file mode 100644 index 0000000..7ecd148 --- /dev/null +++ b/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/gempak/request/GetGridInfoRequest.py @@ -0,0 +1,39 @@ +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# Sep 16, 2016 pmoyer Generated + + +class GetGridInfoRequest(object): + + def __init__(self): + self.modelId = None + self.reftime = None + self.pluginName = None + self.fcstsec = None + + def getModelId(self): + return self.modelId + + def setModelId(self, modelId): + self.modelId = modelId + + def getReftime(self): + return self.reftime + + def setReftime(self, reftime): + self.reftime = reftime + + def getPluginName(self): + return self.pluginName + + def setPluginName(self, pluginName): + self.pluginName = pluginName + + def getFcstsec(self): + return self.fcstsec + + def setFcstsec(self, fcstsec): + self.fcstsec = fcstsec diff --git a/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/gempak/request/GetGridNavRequest.py b/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/gempak/request/GetGridNavRequest.py new file mode 100644 index 0000000..5284322 --- /dev/null +++ b/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/gempak/request/GetGridNavRequest.py @@ -0,0 +1,25 @@ +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# Sep 16, 2016 pmoyer Generated + + +class GetGridNavRequest(object): + + def __init__(self): + self.modelId = None + self.pluginName = None + + def getModelId(self): + return self.modelId + + def setModelId(self, modelId): + self.modelId = modelId + + def getPluginName(self): + return self.pluginName + + def setPluginName(self, pluginName): + self.pluginName = pluginName diff --git a/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/gempak/request/GetStationsRequest.py b/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/gempak/request/GetStationsRequest.py new file mode 100644 index 0000000..3670c42 --- /dev/null +++ b/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/gempak/request/GetStationsRequest.py @@ -0,0 +1,18 @@ +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# Sep 16, 2016 pmoyer Generated + + +class GetStationsRequest(object): + + def __init__(self): + self.pluginName = None + + def getPluginName(self): + return self.pluginName + + def setPluginName(self, pluginName): + self.pluginName = pluginName diff --git a/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/gempak/request/GetTimesRequest.py b/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/gempak/request/GetTimesRequest.py new file mode 100644 index 0000000..6387132 --- /dev/null +++ b/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/gempak/request/GetTimesRequest.py @@ -0,0 +1,25 @@ +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# Sep 16, 2016 pmoyer Generated + + +class GetTimesRequest(object): + + def __init__(self): + self.pluginName = None + self.timeField = None + + def getPluginName(self): + return self.pluginName + + def setPluginName(self, pluginName): + self.pluginName = pluginName + + def getTimeField(self): + return self.timeField + + def setTimeField(self, timeField): + self.timeField = timeField diff --git a/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/gempak/request/GetTimesResponse.py b/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/gempak/request/GetTimesResponse.py new file mode 100644 index 0000000..0f37030 --- /dev/null +++ b/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/gempak/request/GetTimesResponse.py @@ -0,0 +1,18 @@ +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# Sep 16, 2016 pmoyer Generated + + +class GetTimesResponse(object): + + def __init__(self): + self.times = None + + def getTimes(self): + return self.times + + def setTimes(self, times): + self.times = times diff --git a/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/gempak/request/Station.py b/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/gempak/request/Station.py new file mode 100644 index 0000000..9bae221 --- /dev/null +++ b/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/gempak/request/Station.py @@ -0,0 +1,62 @@ +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# Sep 16, 2016 pmoyer Generated + +import numpy + + +class Station(object): + + def __init__(self): + self.elevation = None + self.state = None + self.stationId = None + self.longitude = None + self.latitude = None + self.wmoIndex = None + self.country = None + + def getElevation(self): + return self.elevation + + def setElevation(self, elevation): + self.elevation = elevation + + def getState(self): + return self.state + + def setState(self, state): + self.state = state + + def getStationId(self): + return self.stationId + + def setStationId(self, stationId): + self.stationId = stationId + + def getLongitude(self): + return self.longitude + + def setLongitude(self, longitude): + self.longitude = numpy.float64(longitude) + + def getLatitude(self): + return self.latitude + + def setLatitude(self, latitude): + self.latitude = numpy.float64(latitude) + + def getWmoIndex(self): + return self.wmoIndex + + def setWmoIndex(self, wmoIndex): + self.wmoIndex = wmoIndex + + def getCountry(self): + return self.country + + def setCountry(self, country): + self.country = country diff --git a/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/gempak/request/StationDataRequest.py b/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/gempak/request/StationDataRequest.py new file mode 100644 index 0000000..b587766 --- /dev/null +++ b/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/gempak/request/StationDataRequest.py @@ -0,0 +1,46 @@ +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# Sep 16, 2016 pmoyer Generated + + +class StationDataRequest(object): + + def __init__(self): + self.refTime = None + self.pluginName = None + self.parmList = None + self.stationId = None + self.partNumber = None + + def getRefTime(self): + return self.refTime + + def setRefTime(self, refTime): + self.refTime = refTime + + def getPluginName(self): + return self.pluginName + + def setPluginName(self, pluginName): + self.pluginName = pluginName + + def getParmList(self): + return self.parmList + + def setParmList(self, parmList): + self.parmList = parmList + + def getStationId(self): + return self.stationId + + def setStationId(self, stationId): + self.stationId = stationId + + def getPartNumber(self): + return self.partNumber + + def setPartNumber(self, partNumber): + self.partNumber = partNumber diff --git a/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/gempak/request/SurfaceDataRequest.py b/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/gempak/request/SurfaceDataRequest.py new file mode 100644 index 0000000..04ae692 --- /dev/null +++ b/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/gempak/request/SurfaceDataRequest.py @@ -0,0 +1,46 @@ +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# Sep 16, 2016 pmoyer Generated + + +class SurfaceDataRequest(object): + + def __init__(self): + self.refTime = None + self.pluginName = None + self.parmList = None + self.stationId = None + self.partNumber = None + + def getRefTime(self): + return self.refTime + + def setRefTime(self, refTime): + self.refTime = refTime + + def getPluginName(self): + return self.pluginName + + def setPluginName(self, pluginName): + self.pluginName = pluginName + + def getParmList(self): + return self.parmList + + def setParmList(self, parmList): + self.parmList = parmList + + def getStationId(self): + return self.stationId + + def setStationId(self, stationId): + self.stationId = stationId + + def getPartNumber(self): + return self.partNumber + + def setPartNumber(self, partNumber): + self.partNumber = partNumber diff --git a/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/gempak/request/UpperAirDataRequest.py b/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/gempak/request/UpperAirDataRequest.py new file mode 100644 index 0000000..ed692ed --- /dev/null +++ b/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/gempak/request/UpperAirDataRequest.py @@ -0,0 +1,46 @@ +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# Sep 16, 2016 pmoyer Generated + + +class UpperAirDataRequest(object): + + def __init__(self): + self.refTime = None + self.pluginName = None + self.parmList = None + self.stationId = None + self.partNumber = None + + def getRefTime(self): + return self.refTime + + def setRefTime(self, refTime): + self.refTime = refTime + + def getPluginName(self): + return self.pluginName + + def setPluginName(self, pluginName): + self.pluginName = pluginName + + def getParmList(self): + return self.parmList + + def setParmList(self, parmList): + self.parmList = parmList + + def getStationId(self): + return self.stationId + + def setStationId(self, stationId): + self.stationId = stationId + + def getPartNumber(self): + return self.partNumber + + def setPartNumber(self, partNumber): + self.partNumber = partNumber diff --git a/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/gempak/request/__init__.py b/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/gempak/request/__init__.py new file mode 100644 index 0000000..ead3450 --- /dev/null +++ b/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/gempak/request/__init__.py @@ -0,0 +1,24 @@ + +__all__ = [ + 'GetGridDataRequest', + 'GetGridInfoRequest', + 'GetGridNavRequest', + 'GetStationsRequest', + 'GetTimesRequest', + 'GetTimesResponse', + 'Station', + 'StationDataRequest', + 'SurfaceDataRequest', + 'UpperAirDataRequest' + ] + +from .GetGridDataRequest import GetGridDataRequest +from .GetGridInfoRequest import GetGridInfoRequest +from .GetGridNavRequest import GetGridNavRequest +from .GetStationsRequest import GetStationsRequest +from .GetTimesRequest import GetTimesRequest +from .GetTimesResponse import GetTimesResponse +from .Station import Station +from .StationDataRequest import StationDataRequest +from .SurfaceDataRequest import SurfaceDataRequest +from .UpperAirDataRequest import UpperAirDataRequest diff --git a/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/gpd/__init__.py b/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/gpd/__init__.py new file mode 100644 index 0000000..8f0707c --- /dev/null +++ b/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/gpd/__init__.py @@ -0,0 +1,4 @@ + +__all__ = [ + 'query' + ] diff --git a/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/gpd/query/GenericPointDataReqMsg.py b/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/gpd/query/GenericPointDataReqMsg.py new file mode 100644 index 0000000..eafa73a --- /dev/null +++ b/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/gpd/query/GenericPointDataReqMsg.py @@ -0,0 +1,82 @@ + + +class GenericPointDataReqMsg(object): + + def __init__(self): + self.reqType = None + self.refTime = None + self.productName = None + self.stnId = None + self.slat = None + self.slon = None + self.productVersion = None + self.querySpecifiedProductVersion = False + self.queryKey = None + self.gpdDataString = None + self.maxNumLevel = 1 + + def getReqType(self): + return self.reqType + + def setReqType(self, reqType): + self.reqType = reqType + + def getRefTime(self): + return self.refTime + + def setRefTime(self, refTime): + self.refTime = refTime + + def getProductName(self): + return self.productName + + def setProductName(self, productName): + self.productName = productName + + def getStnId(self): + return self.stnId + + def setStnId(self, stnId): + self.stnId = stnId + + def getSlat(self): + return self.slat + + def setSlat(self, slat): + self.slat = slat + + def getSlon(self): + return self.slon + + def setSlon(self, slon): + self.slon = slon + + def getMaxNumLevel(self): + return self.maxNumLevel + + def setMaxNumLevel(self, maxNumLevel): + self.maxNumLevel = maxNumLevel + + def getProductVersion(self): + return self.productVersion + + def setProductVersion(self, productVersion): + self.productVersion = productVersion + + def getQuerySpecifiedProductVersion(self): + return self.querySpecifiedProductVersion + + def setQuerySpecifiedProductVersion(self, querySpecifiedProductVersion): + self.querySpecifiedProductVersion = querySpecifiedProductVersion + + def getQueryKey(self): + return self.queryKey + + def setQueryKey(self, queryKey): + self.queryKey = queryKey + + def getGpdDataString(self): + return self.gpdDataString + + def setGpdDataString(self, gpdDataString): + self.gpdDataString = gpdDataString diff --git a/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/gpd/query/__init__.py b/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/gpd/query/__init__.py new file mode 100644 index 0000000..ec61bb1 --- /dev/null +++ b/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/gpd/query/__init__.py @@ -0,0 +1,6 @@ + +__all__ = [ + 'GenericPointDataReqMsg' + ] + +from .GenericPointDataReqMsg import GenericPointDataReqMsg diff --git a/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/pgen/ActivityInfo.py b/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/pgen/ActivityInfo.py new file mode 100644 index 0000000..d72b906 --- /dev/null +++ b/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/pgen/ActivityInfo.py @@ -0,0 +1,75 @@ + + +class ActivityInfo(object): + + def __init__(self): + self.activityName = None + self.activityType = None + self.activitySubtype = None + self.activityLabel = None + self.site = None + self.desk = None + self.forecaster = None + self.refTime = None + self.mode = None + self.status = None + + def getActivityName(self): + return self.activityName + + def setActivityName(self, activityName): + self.activityName = activityName + + def getActivityType(self): + return self.activityType + + def setActivityType(self, activityType): + self.activityType = activityType + + def getActivitySubtype(self): + return self.activitySubtype + + def setActivitySubtype(self, activitySubtype): + self.activitySubtype = activitySubtype + + def getActivityLabel(self): + return self.activityLabel + + def setActivityLabel(self, activityLabel): + self.activityLabel = activityLabel + + def getSite(self): + return self.site + + def setSite(self, site): + self.site = site + + def getDesk(self): + return self.desk + + def setDesk(self, desk): + self.desk = desk + + def getForecaster(self): + return self.forecaster + + def setForecaster(self, forecaster): + self.forecaster = forecaster + + def getRefTime(self): + return self.refTime + + def setRefTime(self, refTime): + self.refTime = refTime + + def getMode(self): + return self.mode + + def setMode(self, mode): + self.mode = mode + + def getStatus(self): + return self.status + + def setStatus(self, status): + self.status = status diff --git a/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/pgen/DerivedProduct.py b/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/pgen/DerivedProduct.py new file mode 100644 index 0000000..647ead2 --- /dev/null +++ b/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/pgen/DerivedProduct.py @@ -0,0 +1,26 @@ + + +class DerivedProduct(object): + + def __init__(self): + self.name = None + self.productType = None + self.product = None + + def getName(self): + return self.name + + def setName(self, name): + self.name = name + + def getProductType(self): + return self.productType + + def setProductType(self, productType): + self.productType = productType + + def getProduct(self): + return self.product + + def setProduct(self, product): + self.product = product diff --git a/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/pgen/ResponseMessageValidate.py b/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/pgen/ResponseMessageValidate.py new file mode 100644 index 0000000..e6c2742 --- /dev/null +++ b/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/pgen/ResponseMessageValidate.py @@ -0,0 +1,40 @@ + + +class ResponseMessageValidate(object): + + def __init__(self): + self.result = None + self.message = None + self.fileType = None + self.dataURI = None + self.validTime = None + + def getResult(self): + return self.result + + def setResult(self, result): + self.result = result + + def getMessage(self): + return self.message + + def setMessage(self, message): + self.message = message + + def getFileType(self): + return self.fileType + + def setFileType(self, fileType): + self.fileType = fileType + + def getDataURI(self): + return self.dataURI + + def setDataURI(self, dataURI): + self.dataURI = dataURI + + def getValidTime(self): + return self.validTime + + def setValidTime(self, validTime): + self.validTime = validTime diff --git a/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/pgen/__init__.py b/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/pgen/__init__.py new file mode 100644 index 0000000..bf0eb37 --- /dev/null +++ b/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/pgen/__init__.py @@ -0,0 +1,12 @@ + +__all__ = [ + 'request', + 'response', + 'ActivityInfo', + 'DerivedProduct', + 'ResponseMessageValidate' + ] + +from .ActivityInfo import ActivityInfo +from .DerivedProduct import DerivedProduct +from .ResponseMessageValidate import ResponseMessageValidate diff --git a/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/pgen/request/RetrieveActivityMapRequest.py b/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/pgen/request/RetrieveActivityMapRequest.py new file mode 100644 index 0000000..852420f --- /dev/null +++ b/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/pgen/request/RetrieveActivityMapRequest.py @@ -0,0 +1,12 @@ +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# May 05, 2016 root Generated + + +class RetrieveActivityMapRequest(object): + + def __init__(self): + return diff --git a/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/pgen/request/RetrieveAllProductsRequest.py b/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/pgen/request/RetrieveAllProductsRequest.py new file mode 100644 index 0000000..73cd3a9 --- /dev/null +++ b/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/pgen/request/RetrieveAllProductsRequest.py @@ -0,0 +1,12 @@ + + +class RetrieveAllProductsRequest(object): + + def __init__(self): + self.dataURI = None + + def getDataURI(self): + return self.dataURI + + def setDataURI(self, dataURI): + self.dataURI = dataURI diff --git a/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/pgen/request/StoreActivityRequest.py b/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/pgen/request/StoreActivityRequest.py new file mode 100644 index 0000000..1512011 --- /dev/null +++ b/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/pgen/request/StoreActivityRequest.py @@ -0,0 +1,19 @@ + + +class StoreActivityRequest(object): + + def __init__(self): + self.activityInfo = None + self.activityXML = None + + def getActivityInfo(self): + return self.activityInfo + + def setActivityInfo(self, activityInfo): + self.activityInfo = activityInfo + + def getActivityXML(self): + return self.activityXML + + def setActivityXML(self, activityXML): + self.activityXML = activityXML diff --git a/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/pgen/request/StoreDerivedProductRequest.py b/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/pgen/request/StoreDerivedProductRequest.py new file mode 100644 index 0000000..3b70af8 --- /dev/null +++ b/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/pgen/request/StoreDerivedProductRequest.py @@ -0,0 +1,19 @@ + + +class StoreDerivedProductRequest(object): + + def __init__(self): + self.dataURI = None + self.productList = None + + def getDataURI(self): + return self.dataURI + + def setDataURI(self, dataURI): + self.dataURI = dataURI + + def getProductList(self): + return self.productList + + def setProductList(self, productList): + self.productList = productList diff --git a/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/pgen/request/__init__.py b/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/pgen/request/__init__.py new file mode 100644 index 0000000..c6784eb --- /dev/null +++ b/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/pgen/request/__init__.py @@ -0,0 +1,12 @@ + +__all__ = [ + 'RetrieveActivityMapRequest', + 'RetrieveAllProductsRequest', + 'StoreActivityRequest', + 'StoreDerivedProductRequest' + ] + +from .RetrieveAllProductsRequest import RetrieveAllProductsRequest +from .StoreActivityRequest import StoreActivityRequest +from .StoreDerivedProductRequest import StoreDerivedProductRequest +from .RetrieveActivityMapRequest import RetrieveActivityMapRequest diff --git a/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/pgen/response/ActivityMapData.py b/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/pgen/response/ActivityMapData.py new file mode 100644 index 0000000..0596caa --- /dev/null +++ b/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/pgen/response/ActivityMapData.py @@ -0,0 +1,53 @@ +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# May 06, 2016 root Generated + + +class ActivityMapData(object): + + def __init__(self): + self.refTime = None + self.activityLabel = None + self.activitySubtype = None + self.dataURI = None + self.activityType = None + self.activityName = None + + def getRefTime(self): + return self.refTime + + def setRefTime(self, refTime): + self.refTime = refTime + + def getActivityLabel(self): + return self.activityLabel + + def setActivityLabel(self, activityLabel): + self.activityLabel = activityLabel + + def getActivitySubtype(self): + return self.activitySubtype + + def setActivitySubtype(self, activitySubtype): + self.activitySubtype = activitySubtype + + def getDataURI(self): + return self.dataURI + + def setDataURI(self, dataURI): + self.dataURI = dataURI + + def getActivityType(self): + return self.activityType + + def setActivityType(self, activityType): + self.activityType = activityType + + def getActivityName(self): + return self.activityName + + def setActivityName(self, activityName): + self.activityName = activityName diff --git a/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/pgen/response/RetrieveActivityMapResponse.py b/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/pgen/response/RetrieveActivityMapResponse.py new file mode 100644 index 0000000..2f6d1bd --- /dev/null +++ b/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/pgen/response/RetrieveActivityMapResponse.py @@ -0,0 +1,18 @@ +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# May 06, 2016 root Generated + + +class RetrieveActivityMapResponse(object): + + def __init__(self): + self.data = None + + def getData(self): + return self.data + + def setData(self, data): + self.data = data diff --git a/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/pgen/response/__init__.py b/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/pgen/response/__init__.py new file mode 100644 index 0000000..34367e3 --- /dev/null +++ b/dynamicserialize/dstypes/gov/noaa/nws/ncep/common/dataplugin/pgen/response/__init__.py @@ -0,0 +1,8 @@ + +__all__ = [ + 'ActivityMapData', + 'RetrieveActivityMapResponse' + ] + +from .ActivityMapData import ActivityMapData +from .RetrieveActivityMapResponse import RetrieveActivityMapResponse diff --git a/dynamicserialize/dstypes/java/__init__.py b/dynamicserialize/dstypes/java/__init__.py new file mode 100644 index 0000000..c0920bb --- /dev/null +++ b/dynamicserialize/dstypes/java/__init__.py @@ -0,0 +1,7 @@ + +__all__ = [ + 'awt', + 'lang', + 'sql', + 'util' + ] diff --git a/dynamicserialize/dstypes/java/awt/Point.py b/dynamicserialize/dstypes/java/awt/Point.py new file mode 100644 index 0000000..45f4028 --- /dev/null +++ b/dynamicserialize/dstypes/java/awt/Point.py @@ -0,0 +1,36 @@ +# +# Custom python class representing a java.awt.Point. +# +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# 08/31/10 njensen Initial Creation. +# +# + + +class Point(object): + + def __init__(self): + self.x = None + self.y = None + + def __str__(self): + return str((self.x, self.y)) + + def __repr__(self): + return self.__str__() + + def getX(self): + return self.x + + def getY(self): + return self.y + + def setX(self, x): + self.x = x + + def setY(self, y): + self.y = y diff --git a/dynamicserialize/dstypes/java/awt/__init__.py b/dynamicserialize/dstypes/java/awt/__init__.py new file mode 100644 index 0000000..5714b72 --- /dev/null +++ b/dynamicserialize/dstypes/java/awt/__init__.py @@ -0,0 +1,16 @@ +# +# Package definition for java.awt +# +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# 08/31/10 njensen Initial Creation. +# + +__all__ = [ + 'Point' + ] + +from .Point import Point diff --git a/dynamicserialize/dstypes/java/lang/StackTraceElement.py b/dynamicserialize/dstypes/java/lang/StackTraceElement.py new file mode 100644 index 0000000..3262c40 --- /dev/null +++ b/dynamicserialize/dstypes/java/lang/StackTraceElement.py @@ -0,0 +1,51 @@ + + +class StackTraceElement(object): + + def __init__(self): + self.declaringClass = None + self.methodName = None + self.fileName = None + self.lineNumber = 0 + + def getDeclaringClass(self): + return self.declaringClass + + def setDeclaringClass(self, clz): + self.declaringClass = clz + + def getMethodName(self): + return self.methodName + + def setMethodName(self, methodName): + self.methodName = methodName + + def getFileName(self): + return self.fileName + + def setFileName(self, filename): + self.fileName = filename + + def getLineNumber(self): + return self.lineNumber + + def setLineNumber(self, lineNumber): + self.lineNumber = int(lineNumber) + + def isNativeMethod(self): + return self.lineNumber == -2 + + def __str__(self): + return self.__repr__() + + def __repr__(self): + msg = self.declaringClass.decode('cp437') + "." + self.methodName.decode('cp437') + if self.isNativeMethod(): + msg += "(Native Method)" + elif self.fileName is not None and self.lineNumber >= 0: + msg += "(" + self.fileName.decode('cp437') + ":" + str(self.lineNumber) + ")" + elif self.fileName is not None: + msg += "(" + self.fileName.decode('cp437') + ")" + else: + msg += "(Unknown Source)" + return msg diff --git a/dynamicserialize/dstypes/java/lang/__init__.py b/dynamicserialize/dstypes/java/lang/__init__.py new file mode 100644 index 0000000..dabec42 --- /dev/null +++ b/dynamicserialize/dstypes/java/lang/__init__.py @@ -0,0 +1,6 @@ + +__all__ = [ + 'StackTraceElement' + ] + +from .StackTraceElement import StackTraceElement diff --git a/dynamicserialize/dstypes/java/sql/Timestamp.py b/dynamicserialize/dstypes/java/sql/Timestamp.py new file mode 100644 index 0000000..fef3a19 --- /dev/null +++ b/dynamicserialize/dstypes/java/sql/Timestamp.py @@ -0,0 +1,22 @@ +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# ??/??/?? xxxxxxxx Initial Creation. +# 06/24/15 4480 dgilling implement based on Date class. +# Jun 23, 2016 5696 rjpeter Make String version match java. +# + +from dynamicserialize.dstypes.java.util import Date +from time import gmtime, strftime + + +class Timestamp(Date): + + def __init__(self, time=None): + super(Timestamp, self).__init__(time) + + def __repr__(self): + return strftime("%Y-%m-%d %H:%M:%S.", gmtime(self.time/1000.0)) + \ + '{:03d}'.format(self.time % 1000) diff --git a/dynamicserialize/dstypes/java/sql/__init__.py b/dynamicserialize/dstypes/java/sql/__init__.py new file mode 100644 index 0000000..a6f3bc1 --- /dev/null +++ b/dynamicserialize/dstypes/java/sql/__init__.py @@ -0,0 +1,6 @@ + +__all__ = [ + 'Timestamp' + ] + +from .Timestamp import Timestamp diff --git a/dynamicserialize/dstypes/java/util/Calendar.py b/dynamicserialize/dstypes/java/util/Calendar.py new file mode 100644 index 0000000..85bcba0 --- /dev/null +++ b/dynamicserialize/dstypes/java/util/Calendar.py @@ -0,0 +1,30 @@ +# Custom python class representing a java.util.GregorianCalendar. +# +# This is a stripped-down version of the class that only supports +# minimal methods for serialization. +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# 09/29/10 wldougher Initial Creation. +# + +from . import GregorianCalendar + + +class Calendar(object): + + def __init__(self): + self.time = None + + # Methods from the real class that we typically use + @staticmethod + def getInstance(): + return GregorianCalendar() + + def getTimeInMillis(self): + return self.time + + def setTimeInMillis(self, timeInMillis): + self.time = timeInMillis diff --git a/dynamicserialize/dstypes/java/util/Date.py b/dynamicserialize/dstypes/java/util/Date.py new file mode 100644 index 0000000..693ab86 --- /dev/null +++ b/dynamicserialize/dstypes/java/util/Date.py @@ -0,0 +1,37 @@ +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# 04/28/2015 4027 randerso Added optional construction parameter to set the time +# 06/26/2015 4480 dgilling Implement __eq__ and __hash__. +# + +from time import gmtime, strftime + + +class Date(object): + + def __init__(self, timeInMillis=None): + self.time = timeInMillis + + def getTime(self): + return self.time + + def setTime(self, timeInMillis): + self.time = timeInMillis + + def __str__(self): + return self.__repr__() + + def __repr__(self): + return strftime("%b %d %y %H:%M:%S GMT", gmtime(self.time/1000.0)) + + def __eq__(self, other): + return self.time == other.time + + def __ne__(self, other): + return not self.__eq__(other) + + def __hash__(self): + return hash(self.time) diff --git a/dynamicserialize/dstypes/java/util/EnumSet.py b/dynamicserialize/dstypes/java/util/EnumSet.py new file mode 100644 index 0000000..1e93c54 --- /dev/null +++ b/dynamicserialize/dstypes/java/util/EnumSet.py @@ -0,0 +1,48 @@ +## +# NOTE: Please do not ever use this class unless you really must. It is not +# designed to be directly accessed from client code. Hide its use from end- +# users as best as you can. +## + +## +# IMPLEMENTATION DETAILS: +# This class is an attempt to simulate Java's EnumSet class. When creating +# a new instance of this class, you must specify the name of the Java enum +# contained within as this is needed for serialization. Do not append the +# "dynamicserialize.dstypes" portion of the Python package to the supplied +# class name as Java won't know what class that is when deserializing. +# +# Since Python has no concept of enums, this class cannot provide the value- +# checking that Java class does. Be very sure that you add only valid enum +# values to your EnumSet. +## + +import collections + + +class EnumSet(collections.MutableSet): + + def __init__(self, enumClassName, iterable=[]): + self.__enumClassName = enumClassName + self.__set = set(iterable) + + def __repr__(self): + return "EnumSet({0})".format(list(self.__set)) + + def __len__(self): + return len(self.__set) + + def __contains__(self, key): + return key in self.__set + + def __iter__(self): + return iter(self.__set) + + def add(self, value): + self.__set.add(value) + + def discard(self, value): + self.__set.discard(value) + + def getEnumClass(self): + return self.__enumClassName diff --git a/dynamicserialize/dstypes/java/util/GregorianCalendar.py b/dynamicserialize/dstypes/java/util/GregorianCalendar.py new file mode 100644 index 0000000..605ea46 --- /dev/null +++ b/dynamicserialize/dstypes/java/util/GregorianCalendar.py @@ -0,0 +1,29 @@ +# +# Custom python class representing a java.util.GregorianCalendar. +# +# This is a stripped-down version of the class that only supports +# minimal methods for serialization. +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# 09/29/10 wldougher Initial Creation. +# + + +class GregorianCalendar(object): + + def __init__(self): + self.time = None + + # Methods from the real class that we typically use + @staticmethod + def getInstance(): + return GregorianCalendar() + + def getTimeInMillis(self): + return self.time + + def setTimeInMillis(self, timeInMillis): + self.time = timeInMillis diff --git a/dynamicserialize/dstypes/java/util/__init__.py b/dynamicserialize/dstypes/java/util/__init__.py new file mode 100644 index 0000000..0fc1b2e --- /dev/null +++ b/dynamicserialize/dstypes/java/util/__init__.py @@ -0,0 +1,12 @@ + +__all__ = [ + 'Calendar', + 'Date', + 'EnumSet', + 'GregorianCalendar' + ] + +from .Calendar import Calendar +from .Date import Date +from .EnumSet import EnumSet +from .GregorianCalendar import GregorianCalendar From 24e9f9b20d2b178dc0b599e103f42ab9be9672a4 Mon Sep 17 00:00:00 2001 From: Shay Carter Date: Tue, 8 Jun 2021 15:47:41 -0600 Subject: [PATCH 2/3] Small change to the workflow to see if the dispatch button becomes available in github --- .github/workflows/sphinx_build_deploy.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/sphinx_build_deploy.yml b/.github/workflows/sphinx_build_deploy.yml index 62bd008..0ff5a86 100644 --- a/.github/workflows/sphinx_build_deploy.yml +++ b/.github/workflows/sphinx_build_deploy.yml @@ -1,6 +1,7 @@ name: Publish Sphinx Built Webpages to Github Pages on: + workflow_dispatch: push: branches: - website From 640e0c9de9d9c50ce5ce32515276a5d9c76451ef Mon Sep 17 00:00:00 2001 From: Shay Carter Date: Tue, 8 Jun 2021 15:56:28 -0600 Subject: [PATCH 3/3] Add back in the /thrift and /rpm dirs --- rpm/component.spec | 113 +++ rpm/patch.diff | 13 + thrift/TSCons.py | 35 + thrift/TSerialization.py | 38 + thrift/Thrift.py | 157 ++++ thrift/__init__.py | 20 + thrift/protocol/TBase.py | 81 ++ thrift/protocol/TBinaryProtocol.py | 264 ++++++ thrift/protocol/TCompactProtocol.py | 403 +++++++++ thrift/protocol/TProtocol.py | 406 +++++++++ thrift/protocol/__init__.py | 20 + thrift/protocol/fastbinary.c | 1219 +++++++++++++++++++++++++++ thrift/server/THttpServer.py | 87 ++ thrift/server/TNonblockingServer.py | 346 ++++++++ thrift/server/TProcessPoolServer.py | 119 +++ thrift/server/TServer.py | 269 ++++++ thrift/server/__init__.py | 20 + thrift/transport/THttpClient.py | 149 ++++ thrift/transport/TSSLSocket.py | 202 +++++ thrift/transport/TSocket.py | 176 ++++ thrift/transport/TTransport.py | 333 ++++++++ thrift/transport/TTwisted.py | 221 +++++ thrift/transport/TZlibTransport.py | 248 ++++++ thrift/transport/__init__.py | 20 + 24 files changed, 4959 insertions(+) create mode 100644 rpm/component.spec create mode 100644 rpm/patch.diff create mode 100644 thrift/TSCons.py create mode 100644 thrift/TSerialization.py create mode 100644 thrift/Thrift.py create mode 100644 thrift/__init__.py create mode 100644 thrift/protocol/TBase.py create mode 100644 thrift/protocol/TBinaryProtocol.py create mode 100644 thrift/protocol/TCompactProtocol.py create mode 100644 thrift/protocol/TProtocol.py create mode 100644 thrift/protocol/__init__.py create mode 100644 thrift/protocol/fastbinary.c create mode 100644 thrift/server/THttpServer.py create mode 100644 thrift/server/TNonblockingServer.py create mode 100644 thrift/server/TProcessPoolServer.py create mode 100644 thrift/server/TServer.py create mode 100644 thrift/server/__init__.py create mode 100644 thrift/transport/THttpClient.py create mode 100644 thrift/transport/TSSLSocket.py create mode 100644 thrift/transport/TSocket.py create mode 100644 thrift/transport/TTransport.py create mode 100644 thrift/transport/TTwisted.py create mode 100644 thrift/transport/TZlibTransport.py create mode 100644 thrift/transport/__init__.py diff --git a/rpm/component.spec b/rpm/component.spec new file mode 100644 index 0000000..7e25240 --- /dev/null +++ b/rpm/component.spec @@ -0,0 +1,113 @@ +%global __os_install_post %(echo '%{__os_install_post}' | sed -e 's!/usr/lib[^[:space:]]*/brp-python-bytecompile[[:space:]].*$!!g') +%define _build_arch %(uname -i) +%define _python_awips_version %(grep ^ver /awips2/repo/python-awips/setup.py | cut -d '"' -f 2) +%define _python_build_loc %{_tmppath}/%{name}-%{version}-%{release}-root-%(%{__id_u} -n) +# +# Python AWIPS Spec File +# +Name: awips2-python-awips +Summary: Python AWIPS Distribution +Version: %{_python_awips_version} +Release: 1%{?dist} +Group: AWIPSII +BuildRoot: %{_build_root} +BuildArch: %{_build_arch} +URL: N/A +License: N/A +Distribution: N/A +Vendor: %{_build_vendor} +Packager: %{_build_site} + +AutoReq: no +Requires: awips2-python +Requires: awips2-python-numpy +Requires: awips2-python-six +Requires: awips2-python-shapely +Provides: awips2-python-awips = %{version} + +Obsoletes: awips2-python-ufpy < 15.1.3-1 +Obsoletes: awips2-python-dynamicserialize < 15.1.3-1 +Obsoletes: awips2-python-thrift < 20080411p1-4 + +BuildRequires: awips2-python +BuildRequires: awips2-python-numpy + +%description +Python AWIPS Site-Package + +%prep +# Verify That The User Has Specified A BuildRoot. +if [ "%{_build_root}" = "" ] +then + echo "A Build Root has not been specified." + echo "Unable To Continue ... Terminating" + exit 1 +fi + +rm -rf %{_build_root} +mkdir -p %{_build_root} +if [ -d %{_python_build_loc} ]; then + rm -rf %{_python_build_loc} +fi +mkdir -p %{_python_build_loc} + +%build +source /etc/profile.d/awips2.sh +RC=$? +if [ ${RC} -ne 0 ]; then + exit 1 +fi + +AWIPS_SRC_DIR="%{_baseline_workspace}/python-awips" +cp -R ${AWIPS_SRC_DIR} %{_python_build_loc}/ +RC=$? +if [ ${RC} -ne 0 ]; then + exit 1 +fi + +cd %{_python_build_loc}/python-awips + +pushd . > /dev/null +/awips2/python/bin/python setup.py clean +RC=$? +if [ ${RC} -ne 0 ]; then + exit 1 +fi +/awips2/python/bin/python setup.py build +RC=$? +if [ ${RC} -ne 0 ]; then + exit 1 +fi +popd > /dev/null + +%install +AWIPS_SRC_DIR="%{_baseline_workspace}/python-awips" + +pushd . > /dev/null +cd %{_python_build_loc}/python-awips +export LD_LIBRARY_PATH=/awips2/python/lib +/awips2/python/bin/python setup.py install \ + --root=%{_build_root} \ + --prefix=/awips2/python +RC=$? +if [ ${RC} -ne 0 ]; then + exit 1 +fi +popd > /dev/null + +%pre + +%post + +%preun + +%postun + +%clean +rm -rf %{_build_root} +rm -rf %{_python_build_loc} + +%files +%defattr(644,awips,fxalpha,755) +%dir /awips2/python/lib/python2.7/site-packages +/awips2/python/lib/python2.7/site-packages/* diff --git a/rpm/patch.diff b/rpm/patch.diff new file mode 100644 index 0000000..d96664f --- /dev/null +++ b/rpm/patch.diff @@ -0,0 +1,13 @@ +diff --git a/dynamicserialize/ThriftSerializationContext.py b/dynamicserialize/ThriftSerializationContext.py +index bbfb153..3691b7f 100644 +--- a/dynamicserialize/ThriftSerializationContext.py ++++ b/dynamicserialize/ThriftSerializationContext.py +@@ -59,7 +59,7 @@ buildObjMap(dstypes) + pythonToThriftMap = { + bytes: TType.STRING, + int: TType.I32, +- int: TType.I64, ++ long: TType.I64, + list: TType.LIST, + dict: TType.MAP, + type(set([])): TType.SET, diff --git a/thrift/TSCons.py b/thrift/TSCons.py new file mode 100644 index 0000000..d3176ed --- /dev/null +++ b/thrift/TSCons.py @@ -0,0 +1,35 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# + +from os import path +from SCons.Builder import Builder + + +def scons_env(env, add=''): + opath = path.dirname(path.abspath('$TARGET')) + lstr = 'thrift --gen cpp -o ' + opath + ' ' + add + ' $SOURCE' + cppbuild = Builder(action=lstr) + env.Append(BUILDERS={'ThriftCpp': cppbuild}) + + +def gen_cpp(env, dir, file): + scons_env(env) + suffixes = ['_types.h', '_types.cpp'] + targets = ['gen-cpp/' + file + s for s in suffixes] + return env.ThriftCpp(targets, dir + file + '.thrift') diff --git a/thrift/TSerialization.py b/thrift/TSerialization.py new file mode 100644 index 0000000..fbbe768 --- /dev/null +++ b/thrift/TSerialization.py @@ -0,0 +1,38 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# + +from .protocol import TBinaryProtocol +from .transport import TTransport + + +def serialize(thrift_object, + protocol_factory=TBinaryProtocol.TBinaryProtocolFactory()): + transport = TTransport.TMemoryBuffer() + protocol = protocol_factory.getProtocol(transport) + thrift_object.write(protocol) + return transport.getvalue() + + +def deserialize(base, + buf, + protocol_factory=TBinaryProtocol.TBinaryProtocolFactory()): + transport = TTransport.TMemoryBuffer(buf) + protocol = protocol_factory.getProtocol(transport) + base.read(protocol) + return base diff --git a/thrift/Thrift.py b/thrift/Thrift.py new file mode 100644 index 0000000..707a8cc --- /dev/null +++ b/thrift/Thrift.py @@ -0,0 +1,157 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# + +import sys + + +class TType: + STOP = 0 + VOID = 1 + BOOL = 2 + BYTE = 3 + I08 = 3 + DOUBLE = 4 + I16 = 6 + I32 = 8 + I64 = 10 + STRING = 11 + UTF7 = 11 + STRUCT = 12 + MAP = 13 + SET = 14 + LIST = 15 + UTF8 = 16 + UTF16 = 17 + + _VALUES_TO_NAMES = ('STOP', + 'VOID', + 'BOOL', + 'BYTE', + 'DOUBLE', + None, + 'I16', + None, + 'I32', + None, + 'I64', + 'STRING', + 'STRUCT', + 'MAP', + 'SET', + 'LIST', + 'UTF8', + 'UTF16') + + +class TMessageType: + CALL = 1 + REPLY = 2 + EXCEPTION = 3 + ONEWAY = 4 + + +class TProcessor: + """Base class for procsessor, which works on two streams.""" + + def process(iprot, oprot): + pass + + +class TException(Exception): + """Base class for all thrift exceptions.""" + + # BaseException.message is deprecated in Python v[2.6,3.0) + if (2, 6, 0) <= sys.version_info < (3, 0): + def _get_message(self): + return self._message + + def _set_message(self, message): + self._message = message + message = property(_get_message, _set_message) + + def __init__(self, message=None): + Exception.__init__(self, message) + self.message = message + + +class TApplicationException(TException): + """Application level thrift exceptions.""" + + UNKNOWN = 0 + UNKNOWN_METHOD = 1 + INVALID_MESSAGE_TYPE = 2 + WRONG_METHOD_NAME = 3 + BAD_SEQUENCE_ID = 4 + MISSING_RESULT = 5 + INTERNAL_ERROR = 6 + PROTOCOL_ERROR = 7 + + def __init__(self, type=UNKNOWN, message=None): + TException.__init__(self, message) + self.type = type + + def __str__(self): + if self.message: + return self.message + elif self.type == self.UNKNOWN_METHOD: + return 'Unknown method' + elif self.type == self.INVALID_MESSAGE_TYPE: + return 'Invalid message type' + elif self.type == self.WRONG_METHOD_NAME: + return 'Wrong method name' + elif self.type == self.BAD_SEQUENCE_ID: + return 'Bad sequence ID' + elif self.type == self.MISSING_RESULT: + return 'Missing result' + else: + return 'Default (unknown) TApplicationException' + + def read(self, iprot): + iprot.readStructBegin() + while True: + (fname, ftype, fid) = iprot.readFieldBegin() + if ftype == TType.STOP: + break + if fid == 1: + if ftype == TType.STRING: + self.message = iprot.readString() + else: + iprot.skip(ftype) + elif fid == 2: + if ftype == TType.I32: + self.type = iprot.readI32() + else: + iprot.skip(ftype) + else: + iprot.skip(ftype) + iprot.readFieldEnd() + iprot.readStructEnd() + + def write(self, oprot): + oprot.writeStructBegin('TApplicationException') + if self.message is not None: + oprot.writeFieldBegin('message', TType.STRING, 1) + oprot.writeString(self.message) + oprot.writeFieldEnd() + if self.type is not None: + oprot.writeFieldBegin('type', TType.I32, 2) + oprot.writeI32(self.type) + oprot.writeFieldEnd() + oprot.writeFieldStop() + oprot.writeStructEnd() diff --git a/thrift/__init__.py b/thrift/__init__.py new file mode 100644 index 0000000..48d659c --- /dev/null +++ b/thrift/__init__.py @@ -0,0 +1,20 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# + +__all__ = ['Thrift', 'TSCons'] diff --git a/thrift/protocol/TBase.py b/thrift/protocol/TBase.py new file mode 100644 index 0000000..6cd6c28 --- /dev/null +++ b/thrift/protocol/TBase.py @@ -0,0 +1,81 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# + +from thrift.Thrift import * +from thrift.protocol import TBinaryProtocol +from thrift.transport import TTransport + +try: + from thrift.protocol import fastbinary +except: + fastbinary = None + + +class TBase(object): + __slots__ = [] + + def __repr__(self): + L = ['%s=%r' % (key, getattr(self, key)) + for key in self.__slots__] + return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) + + def __eq__(self, other): + if not isinstance(other, self.__class__): + return False + for attr in self.__slots__: + my_val = getattr(self, attr) + other_val = getattr(other, attr) + if my_val != other_val: + return False + return True + + def __ne__(self, other): + return not (self == other) + + def read(self, iprot): + if (iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and + isinstance(iprot.trans, TTransport.CReadableTransport) and + self.thrift_spec is not None and + fastbinary is not None): + fastbinary.decode_binary(self, + iprot.trans, + (self.__class__, self.thrift_spec)) + return + iprot.readStruct(self, self.thrift_spec) + + def write(self, oprot): + if (oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and + self.thrift_spec is not None and + fastbinary is not None): + oprot.trans.write( + fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))) + return + oprot.writeStruct(self, self.thrift_spec) + + +class TExceptionBase(Exception): + # old style class so python2.4 can raise exceptions derived from this + # This can't inherit from TBase because of that limitation. + __slots__ = [] + + __repr__ = TBase.__repr__.__func__ + __eq__ = TBase.__eq__.__func__ + __ne__ = TBase.__ne__.__func__ + read = TBase.read.__func__ + write = TBase.write.__func__ diff --git a/thrift/protocol/TBinaryProtocol.py b/thrift/protocol/TBinaryProtocol.py new file mode 100644 index 0000000..dbcb1e9 --- /dev/null +++ b/thrift/protocol/TBinaryProtocol.py @@ -0,0 +1,264 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# + +from .TProtocol import * +from struct import pack, unpack + + +class TBinaryProtocol(TProtocolBase): + """Binary implementation of the Thrift protocol driver.""" + + # NastyHaxx. Python 2.4+ on 32-bit machines forces hex constants to be + # positive, converting this into a long. If we hardcode the int value + # instead it'll stay in 32 bit-land. + + # VERSION_MASK = 0xffff0000 + VERSION_MASK = -65536 + + # VERSION_1 = 0x80010000 + VERSION_1 = -2147418112 + + TYPE_MASK = 0x000000ff + + def __init__(self, trans, strictRead=False, strictWrite=True): + TProtocolBase.__init__(self, trans) + self.strictRead = strictRead + self.strictWrite = strictWrite + + def writeMessageBegin(self, name, type, seqid): + if self.strictWrite: + self.writeI32(TBinaryProtocol.VERSION_1 | type) + self.writeString(name) + self.writeI32(seqid) + else: + self.writeString(name) + self.writeByte(type) + self.writeI32(seqid) + + def writeMessageEnd(self): + pass + + def writeStructBegin(self, name): + pass + + def writeStructEnd(self): + pass + + def writeFieldBegin(self, name, type, id): + self.writeByte(type) + self.writeI16(id) + + def writeFieldEnd(self): + pass + + def writeFieldStop(self): + self.writeByte(TType.STOP) + + def writeMapBegin(self, ktype, vtype, size): + self.writeByte(ktype) + self.writeByte(vtype) + self.writeI32(size) + + def writeMapEnd(self): + pass + + def writeListBegin(self, etype, size): + self.writeByte(etype) + self.writeI32(size) + + def writeListEnd(self): + pass + + def writeSetBegin(self, etype, size): + self.writeByte(etype) + self.writeI32(size) + + def writeSetEnd(self): + pass + + def writeBool(self, bool): + if bool: + self.writeByte(1) + else: + self.writeByte(0) + + def writeByte(self, byte): + buff = pack("!b", byte) + self.trans.write(buff) + + def writeI16(self, i16): + buff = pack("!h", i16) + self.trans.write(buff) + + def writeI32(self, i32): + buff = pack("!i", i32) + self.trans.write(buff) + + def writeI64(self, i64): + buff = pack("!q", i64) + self.trans.write(buff) + + def writeDouble(self, dub): + buff = pack("!d", dub) + self.trans.write(buff) + + def writeString(self, str): + self.writeI32(len(str)) + self.trans.write(str) + + def readMessageBegin(self): + sz = self.readI32() + if sz < 0: + version = sz & TBinaryProtocol.VERSION_MASK + if version != TBinaryProtocol.VERSION_1: + raise TProtocolException( + type=TProtocolException.BAD_VERSION, + message='Bad version in readMessageBegin: %d' % (sz)) + type = sz & TBinaryProtocol.TYPE_MASK + name = self.readString() + seqid = self.readI32() + else: + if self.strictRead: + raise TProtocolException(type=TProtocolException.BAD_VERSION, + message='No protocol version header') + name = self.trans.readAll(sz) + type = self.readByte() + seqid = self.readI32() + return (name, type, seqid) + + def readMessageEnd(self): + pass + + def readStructBegin(self): + pass + + def readStructEnd(self): + pass + + def readFieldBegin(self): + type = self.readByte() + if type == TType.STOP: + return (None, type, 0) + id = self.readI16() + return (None, type, id) + + def readFieldEnd(self): + pass + + def readMapBegin(self): + ktype = self.readByte() + vtype = self.readByte() + size = self.readI32() + return (ktype, vtype, size) + + def readMapEnd(self): + pass + + def readListBegin(self): + etype = self.readByte() + size = self.readI32() + return (etype, size) + + def readListEnd(self): + pass + + def readSetBegin(self): + etype = self.readByte() + size = self.readI32() + return (etype, size) + + def readSetEnd(self): + pass + + def readBool(self): + byte = self.readByte() + if byte == 0: + return False + return True + + def readByte(self): + buff = self.trans.readAll(1) + val, = unpack('!b', buff) + return val + + def readI16(self): + buff = self.trans.readAll(2) + val, = unpack('!h', buff) + return val + + def readI32(self): + buff = self.trans.readAll(4) + try: + val, = unpack('!i', buff) + except TypeError: + #str does not support the buffer interface + val, = unpack('!i', buff) + return val + + def readI64(self): + buff = self.trans.readAll(8) + val, = unpack('!q', buff) + return val + + def readDouble(self): + buff = self.trans.readAll(8) + val, = unpack('!d', buff) + return val + + def readString(self): + len = self.readI32() + str = self.trans.readAll(len) + return str + + +class TBinaryProtocolFactory: + def __init__(self, strictRead=False, strictWrite=True): + self.strictRead = strictRead + self.strictWrite = strictWrite + + def getProtocol(self, trans): + prot = TBinaryProtocol(trans, self.strictRead, self.strictWrite) + return prot + + +class TBinaryProtocolAccelerated(TBinaryProtocol): + """C-Accelerated version of TBinaryProtocol. + + This class does not override any of TBinaryProtocol's methods, + but the generated code recognizes it directly and will call into + our C module to do the encoding, bypassing this object entirely. + We inherit from TBinaryProtocol so that the normal TBinaryProtocol + encoding can happen if the fastbinary module doesn't work for some + reason. (TODO(dreiss): Make this happen sanely in more cases.) + + In order to take advantage of the C module, just use + TBinaryProtocolAccelerated instead of TBinaryProtocol. + + NOTE: This code was contributed by an external developer. + The internal Thrift team has reviewed and tested it, + but we cannot guarantee that it is production-ready. + Please feel free to report bugs and/or success stories + to the public mailing list. + """ + pass + + +class TBinaryProtocolAcceleratedFactory: + def getProtocol(self, trans): + return TBinaryProtocolAccelerated(trans) diff --git a/thrift/protocol/TCompactProtocol.py b/thrift/protocol/TCompactProtocol.py new file mode 100644 index 0000000..a3385e1 --- /dev/null +++ b/thrift/protocol/TCompactProtocol.py @@ -0,0 +1,403 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# + +from .TProtocol import * +from struct import pack, unpack + +__all__ = ['TCompactProtocol', 'TCompactProtocolFactory'] + +CLEAR = 0 +FIELD_WRITE = 1 +VALUE_WRITE = 2 +CONTAINER_WRITE = 3 +BOOL_WRITE = 4 +FIELD_READ = 5 +CONTAINER_READ = 6 +VALUE_READ = 7 +BOOL_READ = 8 + + +def make_helper(v_from, container): + def helper(func): + def nested(self, *args, **kwargs): + assert self.state in (v_from, container), (self.state, v_from, container) + return func(self, *args, **kwargs) + return nested + return helper +writer = make_helper(VALUE_WRITE, CONTAINER_WRITE) +reader = make_helper(VALUE_READ, CONTAINER_READ) + + +def makeZigZag(n, bits): + return (n << 1) ^ (n >> (bits - 1)) + + +def fromZigZag(n): + return (n >> 1) ^ -(n & 1) + + +def writeVarint(trans, n): + out = [] + while True: + if n & ~0x7f == 0: + out.append(n) + break + else: + out.append((n & 0xff) | 0x80) + n = n >> 7 + trans.write(''.join(map(chr, out))) + + +def readVarint(trans): + result = 0 + shift = 0 + while True: + x = trans.readAll(1) + byte = ord(x) + result |= (byte & 0x7f) << shift + if byte >> 7 == 0: + return result + shift += 7 + + +class CompactType: + STOP = 0x00 + TRUE = 0x01 + FALSE = 0x02 + BYTE = 0x03 + I16 = 0x04 + I32 = 0x05 + I64 = 0x06 + DOUBLE = 0x07 + BINARY = 0x08 + LIST = 0x09 + SET = 0x0A + MAP = 0x0B + STRUCT = 0x0C + +CTYPES = {TType.STOP: CompactType.STOP, + TType.BOOL: CompactType.TRUE, # used for collection + TType.BYTE: CompactType.BYTE, + TType.I16: CompactType.I16, + TType.I32: CompactType.I32, + TType.I64: CompactType.I64, + TType.DOUBLE: CompactType.DOUBLE, + TType.STRING: CompactType.BINARY, + TType.STRUCT: CompactType.STRUCT, + TType.LIST: CompactType.LIST, + TType.SET: CompactType.SET, + TType.MAP: CompactType.MAP + } + +TTYPES = {} +for k, v in list(CTYPES.items()): + TTYPES[v] = k +TTYPES[CompactType.FALSE] = TType.BOOL +del k +del v + + +class TCompactProtocol(TProtocolBase): + """Compact implementation of the Thrift protocol driver.""" + + PROTOCOL_ID = 0x82 + VERSION = 1 + VERSION_MASK = 0x1f + TYPE_MASK = 0xe0 + TYPE_SHIFT_AMOUNT = 5 + + def __init__(self, trans): + TProtocolBase.__init__(self, trans) + self.state = CLEAR + self.__last_fid = 0 + self.__bool_fid = None + self.__bool_value = None + self.__structs = [] + self.__containers = [] + + def __writeVarint(self, n): + writeVarint(self.trans, n) + + def writeMessageBegin(self, name, type, seqid): + assert self.state == CLEAR + self.__writeUByte(self.PROTOCOL_ID) + self.__writeUByte(self.VERSION | (type << self.TYPE_SHIFT_AMOUNT)) + self.__writeVarint(seqid) + self.__writeString(name) + self.state = VALUE_WRITE + + def writeMessageEnd(self): + assert self.state == VALUE_WRITE + self.state = CLEAR + + def writeStructBegin(self, name): + assert self.state in (CLEAR, CONTAINER_WRITE, VALUE_WRITE), self.state + self.__structs.append((self.state, self.__last_fid)) + self.state = FIELD_WRITE + self.__last_fid = 0 + + def writeStructEnd(self): + assert self.state == FIELD_WRITE + self.state, self.__last_fid = self.__structs.pop() + + def writeFieldStop(self): + self.__writeByte(0) + + def __writeFieldHeader(self, type, fid): + delta = fid - self.__last_fid + if 0 < delta <= 15: + self.__writeUByte(delta << 4 | type) + else: + self.__writeByte(type) + self.__writeI16(fid) + self.__last_fid = fid + + def writeFieldBegin(self, name, type, fid): + assert self.state == FIELD_WRITE, self.state + if type == TType.BOOL: + self.state = BOOL_WRITE + self.__bool_fid = fid + else: + self.state = VALUE_WRITE + self.__writeFieldHeader(CTYPES[type], fid) + + def writeFieldEnd(self): + assert self.state in (VALUE_WRITE, BOOL_WRITE), self.state + self.state = FIELD_WRITE + + def __writeUByte(self, byte): + self.trans.write(pack('!B', byte)) + + def __writeByte(self, byte): + self.trans.write(pack('!b', byte)) + + def __writeI16(self, i16): + self.__writeVarint(makeZigZag(i16, 16)) + + def __writeSize(self, i32): + self.__writeVarint(i32) + + def writeCollectionBegin(self, etype, size): + assert self.state in (VALUE_WRITE, CONTAINER_WRITE), self.state + if size <= 14: + self.__writeUByte(size << 4 | CTYPES[etype]) + else: + self.__writeUByte(0xf0 | CTYPES[etype]) + self.__writeSize(size) + self.__containers.append(self.state) + self.state = CONTAINER_WRITE + writeSetBegin = writeCollectionBegin + writeListBegin = writeCollectionBegin + + def writeMapBegin(self, ktype, vtype, size): + assert self.state in (VALUE_WRITE, CONTAINER_WRITE), self.state + if size == 0: + self.__writeByte(0) + else: + self.__writeSize(size) + self.__writeUByte(CTYPES[ktype] << 4 | CTYPES[vtype]) + self.__containers.append(self.state) + self.state = CONTAINER_WRITE + + def writeCollectionEnd(self): + assert self.state == CONTAINER_WRITE, self.state + self.state = self.__containers.pop() + writeMapEnd = writeCollectionEnd + writeSetEnd = writeCollectionEnd + writeListEnd = writeCollectionEnd + + def writeBool(self, bool): + if self.state == BOOL_WRITE: + if bool: + ctype = CompactType.TRUE + else: + ctype = CompactType.FALSE + self.__writeFieldHeader(ctype, self.__bool_fid) + elif self.state == CONTAINER_WRITE: + if bool: + self.__writeByte(CompactType.TRUE) + else: + self.__writeByte(CompactType.FALSE) + else: + raise AssertionError("Invalid state in compact protocol") + + writeByte = writer(__writeByte) + writeI16 = writer(__writeI16) + + @writer + def writeI32(self, i32): + self.__writeVarint(makeZigZag(i32, 32)) + + @writer + def writeI64(self, i64): + self.__writeVarint(makeZigZag(i64, 64)) + + @writer + def writeDouble(self, dub): + self.trans.write(pack('!d', dub)) + + def __writeString(self, s): + self.__writeSize(len(s)) + self.trans.write(s) + writeString = writer(__writeString) + + def readFieldBegin(self): + assert self.state == FIELD_READ, self.state + type = self.__readUByte() + if type & 0x0f == TType.STOP: + return (None, 0, 0) + delta = type >> 4 + if delta == 0: + fid = self.__readI16() + else: + fid = self.__last_fid + delta + self.__last_fid = fid + type = type & 0x0f + if type == CompactType.TRUE: + self.state = BOOL_READ + self.__bool_value = True + elif type == CompactType.FALSE: + self.state = BOOL_READ + self.__bool_value = False + else: + self.state = VALUE_READ + return (None, self.__getTType(type), fid) + + def readFieldEnd(self): + assert self.state in (VALUE_READ, BOOL_READ), self.state + self.state = FIELD_READ + + def __readUByte(self): + result, = unpack('!B', self.trans.readAll(1)) + return result + + def __readByte(self): + result, = unpack('!b', self.trans.readAll(1)) + return result + + def __readVarint(self): + return readVarint(self.trans) + + def __readZigZag(self): + return fromZigZag(self.__readVarint()) + + def __readSize(self): + result = self.__readVarint() + if result < 0: + raise TException("Length < 0") + return result + + def readMessageBegin(self): + assert self.state == CLEAR + proto_id = self.__readUByte() + if proto_id != self.PROTOCOL_ID: + raise TProtocolException(TProtocolException.BAD_VERSION, + 'Bad protocol id in the message: %d' % proto_id) + ver_type = self.__readUByte() + type = (ver_type & self.TYPE_MASK) >> self.TYPE_SHIFT_AMOUNT + version = ver_type & self.VERSION_MASK + if version != self.VERSION: + raise TProtocolException(TProtocolException.BAD_VERSION, + 'Bad version: %d (expect %d)' % (version, self.VERSION)) + seqid = self.__readVarint() + name = self.__readString() + return (name, type, seqid) + + def readMessageEnd(self): + assert self.state == CLEAR + assert len(self.__structs) == 0 + + def readStructBegin(self): + assert self.state in (CLEAR, CONTAINER_READ, VALUE_READ), self.state + self.__structs.append((self.state, self.__last_fid)) + self.state = FIELD_READ + self.__last_fid = 0 + + def readStructEnd(self): + assert self.state == FIELD_READ + self.state, self.__last_fid = self.__structs.pop() + + def readCollectionBegin(self): + assert self.state in (VALUE_READ, CONTAINER_READ), self.state + size_type = self.__readUByte() + size = size_type >> 4 + type = self.__getTType(size_type) + if size == 15: + size = self.__readSize() + self.__containers.append(self.state) + self.state = CONTAINER_READ + return type, size + readSetBegin = readCollectionBegin + readListBegin = readCollectionBegin + + def readMapBegin(self): + assert self.state in (VALUE_READ, CONTAINER_READ), self.state + size = self.__readSize() + types = 0 + if size > 0: + types = self.__readUByte() + vtype = self.__getTType(types) + ktype = self.__getTType(types >> 4) + self.__containers.append(self.state) + self.state = CONTAINER_READ + return (ktype, vtype, size) + + def readCollectionEnd(self): + assert self.state == CONTAINER_READ, self.state + self.state = self.__containers.pop() + readSetEnd = readCollectionEnd + readListEnd = readCollectionEnd + readMapEnd = readCollectionEnd + + def readBool(self): + if self.state == BOOL_READ: + return self.__bool_value == CompactType.TRUE + elif self.state == CONTAINER_READ: + return self.__readByte() == CompactType.TRUE + else: + raise AssertionError("Invalid state in compact protocol: %d" % + self.state) + + readByte = reader(__readByte) + __readI16 = __readZigZag + readI16 = reader(__readZigZag) + readI32 = reader(__readZigZag) + readI64 = reader(__readZigZag) + + @reader + def readDouble(self): + buff = self.trans.readAll(8) + val, = unpack('!d', buff) + return val + + def __readString(self): + len = self.__readSize() + return self.trans.readAll(len) + readString = reader(__readString) + + def __getTType(self, byte): + return TTYPES[byte & 0x0f] + + +class TCompactProtocolFactory: + def __init__(self): + pass + + def getProtocol(self, trans): + return TCompactProtocol(trans) diff --git a/thrift/protocol/TProtocol.py b/thrift/protocol/TProtocol.py new file mode 100644 index 0000000..56d323a --- /dev/null +++ b/thrift/protocol/TProtocol.py @@ -0,0 +1,406 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# + +from thrift.Thrift import * + + +class TProtocolException(TException): + """Custom Protocol Exception class""" + + UNKNOWN = 0 + INVALID_DATA = 1 + NEGATIVE_SIZE = 2 + SIZE_LIMIT = 3 + BAD_VERSION = 4 + + def __init__(self, type=UNKNOWN, message=None): + TException.__init__(self, message) + self.type = type + + +class TProtocolBase: + """Base class for Thrift protocol driver.""" + + def __init__(self, trans): + self.trans = trans + + def writeMessageBegin(self, name, type, seqid): + pass + + def writeMessageEnd(self): + pass + + def writeStructBegin(self, name): + pass + + def writeStructEnd(self): + pass + + def writeFieldBegin(self, name, type, id): + pass + + def writeFieldEnd(self): + pass + + def writeFieldStop(self): + pass + + def writeMapBegin(self, ktype, vtype, size): + pass + + def writeMapEnd(self): + pass + + def writeListBegin(self, etype, size): + pass + + def writeListEnd(self): + pass + + def writeSetBegin(self, etype, size): + pass + + def writeSetEnd(self): + pass + + def writeBool(self, bool): + pass + + def writeByte(self, byte): + pass + + def writeI16(self, i16): + pass + + def writeI32(self, i32): + pass + + def writeI64(self, i64): + pass + + def writeDouble(self, dub): + pass + + def writeString(self, str): + pass + + def readMessageBegin(self): + pass + + def readMessageEnd(self): + pass + + def readStructBegin(self): + pass + + def readStructEnd(self): + pass + + def readFieldBegin(self): + pass + + def readFieldEnd(self): + pass + + def readMapBegin(self): + pass + + def readMapEnd(self): + pass + + def readListBegin(self): + pass + + def readListEnd(self): + pass + + def readSetBegin(self): + pass + + def readSetEnd(self): + pass + + def readBool(self): + pass + + def readByte(self): + pass + + def readI16(self): + pass + + def readI32(self): + pass + + def readI64(self): + pass + + def readDouble(self): + pass + + def readString(self): + pass + + def skip(self, type): + if type == TType.STOP: + return + elif type == TType.BOOL: + self.readBool() + elif type == TType.BYTE: + self.readByte() + elif type == TType.I16: + self.readI16() + elif type == TType.I32: + self.readI32() + elif type == TType.I64: + self.readI64() + elif type == TType.DOUBLE: + self.readDouble() + elif type == TType.STRING: + self.readString() + elif type == TType.STRUCT: + name = self.readStructBegin() + while True: + (name, type, id) = self.readFieldBegin() + if type == TType.STOP: + break + self.skip(type) + self.readFieldEnd() + self.readStructEnd() + elif type == TType.MAP: + (ktype, vtype, size) = self.readMapBegin() + for i in range(size): + self.skip(ktype) + self.skip(vtype) + self.readMapEnd() + elif type == TType.SET: + (etype, size) = self.readSetBegin() + for i in range(size): + self.skip(etype) + self.readSetEnd() + elif type == TType.LIST: + (etype, size) = self.readListBegin() + for i in range(size): + self.skip(etype) + self.readListEnd() + + # tuple of: ( 'reader method' name, is_container bool, 'writer_method' name ) + _TTYPE_HANDLERS = ( + (None, None, False), # 0 TType.STOP + (None, None, False), # 1 TType.VOID # TODO: handle void? + ('readBool', 'writeBool', False), # 2 TType.BOOL + ('readByte', 'writeByte', False), # 3 TType.BYTE and I08 + ('readDouble', 'writeDouble', False), # 4 TType.DOUBLE + (None, None, False), # 5 undefined + ('readI16', 'writeI16', False), # 6 TType.I16 + (None, None, False), # 7 undefined + ('readI32', 'writeI32', False), # 8 TType.I32 + (None, None, False), # 9 undefined + ('readI64', 'writeI64', False), # 10 TType.I64 + ('readString', 'writeString', False), # 11 TType.STRING and UTF7 + ('readContainerStruct', 'writeContainerStruct', True), # 12 *.STRUCT + ('readContainerMap', 'writeContainerMap', True), # 13 TType.MAP + ('readContainerSet', 'writeContainerSet', True), # 14 TType.SET + ('readContainerList', 'writeContainerList', True), # 15 TType.LIST + (None, None, False), # 16 TType.UTF8 # TODO: handle utf8 types? + (None, None, False) # 17 TType.UTF16 # TODO: handle utf16 types? + ) + + def readFieldByTType(self, ttype, spec): + try: + (r_handler, w_handler, is_container) = self._TTYPE_HANDLERS[ttype] + except IndexError: + raise TProtocolException(type=TProtocolException.INVALID_DATA, + message='Invalid field type %d' % (ttype)) + if r_handler is None: + raise TProtocolException(type=TProtocolException.INVALID_DATA, + message='Invalid field type %d' % (ttype)) + reader = getattr(self, r_handler) + if not is_container: + return reader() + return reader(spec) + + def readContainerList(self, spec): + results = [] + ttype, tspec = spec[0], spec[1] + r_handler = self._TTYPE_HANDLERS[ttype][0] + reader = getattr(self, r_handler) + (list_type, list_len) = self.readListBegin() + if tspec is None: + # list values are simple types + for idx in range(list_len): + results.append(reader()) + else: + # this is like an inlined readFieldByTType + container_reader = self._TTYPE_HANDLERS[list_type][0] + val_reader = getattr(self, container_reader) + for idx in range(list_len): + val = val_reader(tspec) + results.append(val) + self.readListEnd() + return results + + def readContainerSet(self, spec): + results = set() + ttype, tspec = spec[0], spec[1] + r_handler = self._TTYPE_HANDLERS[ttype][0] + reader = getattr(self, r_handler) + (set_type, set_len) = self.readSetBegin() + if tspec is None: + # set members are simple types + for idx in range(set_len): + results.add(reader()) + else: + container_reader = self._TTYPE_HANDLERS[set_type][0] + val_reader = getattr(self, container_reader) + for idx in range(set_len): + results.add(val_reader(tspec)) + self.readSetEnd() + return results + + def readContainerStruct(self, spec): + (obj_class, obj_spec) = spec + obj = obj_class() + obj.read(self) + return obj + + def readContainerMap(self, spec): + results = dict() + key_ttype, key_spec = spec[0], spec[1] + val_ttype, val_spec = spec[2], spec[3] + (map_ktype, map_vtype, map_len) = self.readMapBegin() + # TODO: compare types we just decoded with thrift_spec and + # abort/skip if types disagree + key_reader = getattr(self, self._TTYPE_HANDLERS[key_ttype][0]) + val_reader = getattr(self, self._TTYPE_HANDLERS[val_ttype][0]) + # list values are simple types + for idx in range(map_len): + if key_spec is None: + k_val = key_reader() + else: + k_val = self.readFieldByTType(key_ttype, key_spec) + if val_spec is None: + v_val = val_reader() + else: + v_val = self.readFieldByTType(val_ttype, val_spec) + # this raises a TypeError with unhashable keys types + # i.e. this fails: d=dict(); d[[0,1]] = 2 + results[k_val] = v_val + self.readMapEnd() + return results + + def readStruct(self, obj, thrift_spec): + self.readStructBegin() + while True: + (fname, ftype, fid) = self.readFieldBegin() + if ftype == TType.STOP: + break + try: + field = thrift_spec[fid] + except IndexError: + self.skip(ftype) + else: + if field is not None and ftype == field[1]: + fname = field[2] + fspec = field[3] + val = self.readFieldByTType(ftype, fspec) + setattr(obj, fname, val) + else: + self.skip(ftype) + self.readFieldEnd() + self.readStructEnd() + + def writeContainerStruct(self, val, spec): + val.write(self) + + def writeContainerList(self, val, spec): + self.writeListBegin(spec[0], len(val)) + r_handler, w_handler, is_container = self._TTYPE_HANDLERS[spec[0]] + e_writer = getattr(self, w_handler) + if not is_container: + for elem in val: + e_writer(elem) + else: + for elem in val: + e_writer(elem, spec[1]) + self.writeListEnd() + + def writeContainerSet(self, val, spec): + self.writeSetBegin(spec[0], len(val)) + r_handler, w_handler, is_container = self._TTYPE_HANDLERS[spec[0]] + e_writer = getattr(self, w_handler) + if not is_container: + for elem in val: + e_writer(elem) + else: + for elem in val: + e_writer(elem, spec[1]) + self.writeSetEnd() + + def writeContainerMap(self, val, spec): + k_type = spec[0] + v_type = spec[2] + ignore, ktype_name, k_is_container = self._TTYPE_HANDLERS[k_type] + ignore, vtype_name, v_is_container = self._TTYPE_HANDLERS[v_type] + k_writer = getattr(self, ktype_name) + v_writer = getattr(self, vtype_name) + self.writeMapBegin(k_type, v_type, len(val)) + for m_key, m_val in val.items(): + if not k_is_container: + k_writer(m_key) + else: + k_writer(m_key, spec[1]) + if not v_is_container: + v_writer(m_val) + else: + v_writer(m_val, spec[3]) + self.writeMapEnd() + + def writeStruct(self, obj, thrift_spec): + self.writeStructBegin(obj.__class__.__name__) + for field in thrift_spec: + if field is None: + continue + fname = field[2] + val = getattr(obj, fname) + if val is None: + # skip writing out unset fields + continue + fid = field[0] + ftype = field[1] + fspec = field[3] + # get the writer method for this value + self.writeFieldBegin(fname, ftype, fid) + self.writeFieldByTType(ftype, val, fspec) + self.writeFieldEnd() + self.writeFieldStop() + self.writeStructEnd() + + def writeFieldByTType(self, ttype, val, spec): + r_handler, w_handler, is_container = self._TTYPE_HANDLERS[ttype] + writer = getattr(self, w_handler) + if is_container: + writer(val, spec) + else: + writer(val) + + +class TProtocolFactory: + def getProtocol(self, trans): + pass diff --git a/thrift/protocol/__init__.py b/thrift/protocol/__init__.py new file mode 100644 index 0000000..d53359b --- /dev/null +++ b/thrift/protocol/__init__.py @@ -0,0 +1,20 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# + +__all__ = ['TProtocol', 'TBinaryProtocol', 'fastbinary', 'TBase'] diff --git a/thrift/protocol/fastbinary.c b/thrift/protocol/fastbinary.c new file mode 100644 index 0000000..d5947a5 --- /dev/null +++ b/thrift/protocol/fastbinary.c @@ -0,0 +1,1219 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +#include +#include "cStringIO.h" +#include +#ifndef _WIN32 +# include +# include +#else +# include +# pragma comment (lib, "ws2_32.lib") +# define BIG_ENDIAN (4321) +# define LITTLE_ENDIAN (1234) +# define BYTE_ORDER LITTLE_ENDIAN +# if defined(_MSC_VER) && _MSC_VER < 1600 + typedef int _Bool; +# define bool _Bool +# define false 0 +# define true 1 +# endif +# define inline __inline +#endif + +/* Fix endianness issues on Solaris */ +#if defined (__SVR4) && defined (__sun) + #if defined(__i386) && !defined(__i386__) + #define __i386__ + #endif + + #ifndef BIG_ENDIAN + #define BIG_ENDIAN (4321) + #endif + #ifndef LITTLE_ENDIAN + #define LITTLE_ENDIAN (1234) + #endif + + /* I386 is LE, even on Solaris */ + #if !defined(BYTE_ORDER) && defined(__i386__) + #define BYTE_ORDER LITTLE_ENDIAN + #endif +#endif + +// TODO(dreiss): defval appears to be unused. Look into removing it. +// TODO(dreiss): Make parse_spec_args recursive, and cache the output +// permanently in the object. (Malloc and orphan.) +// TODO(dreiss): Why do we need cStringIO for reading, why not just char*? +// Can cStringIO let us work with a BufferedTransport? +// TODO(dreiss): Don't ignore the rv from cwrite (maybe). + +/* ====== BEGIN UTILITIES ====== */ + +#define INIT_OUTBUF_SIZE 128 + +// Stolen out of TProtocol.h. +// It would be a huge pain to have both get this from one place. +typedef enum TType { + T_STOP = 0, + T_VOID = 1, + T_BOOL = 2, + T_BYTE = 3, + T_I08 = 3, + T_I16 = 6, + T_I32 = 8, + T_U64 = 9, + T_I64 = 10, + T_DOUBLE = 4, + T_STRING = 11, + T_UTF7 = 11, + T_STRUCT = 12, + T_MAP = 13, + T_SET = 14, + T_LIST = 15, + T_UTF8 = 16, + T_UTF16 = 17 +} TType; + +#ifndef __BYTE_ORDER +# if defined(BYTE_ORDER) && defined(LITTLE_ENDIAN) && defined(BIG_ENDIAN) +# define __BYTE_ORDER BYTE_ORDER +# define __LITTLE_ENDIAN LITTLE_ENDIAN +# define __BIG_ENDIAN BIG_ENDIAN +# else +# error "Cannot determine endianness" +# endif +#endif + +// Same comment as the enum. Sorry. +#if __BYTE_ORDER == __BIG_ENDIAN +# define ntohll(n) (n) +# define htonll(n) (n) +#elif __BYTE_ORDER == __LITTLE_ENDIAN +# if defined(__GNUC__) && defined(__GLIBC__) +# include +# define ntohll(n) bswap_64(n) +# define htonll(n) bswap_64(n) +# else /* GNUC & GLIBC */ +# define ntohll(n) ( (((unsigned long long)ntohl(n)) << 32) + ntohl(n >> 32) ) +# define htonll(n) ( (((unsigned long long)htonl(n)) << 32) + htonl(n >> 32) ) +# endif /* GNUC & GLIBC */ +#else /* __BYTE_ORDER */ +# error "Can't define htonll or ntohll!" +#endif + +// Doing a benchmark shows that interning actually makes a difference, amazingly. +#define INTERN_STRING(value) _intern_ ## value + +#define INT_CONV_ERROR_OCCURRED(v) ( ((v) == -1) && PyErr_Occurred() ) +#define CHECK_RANGE(v, min, max) ( ((v) <= (max)) && ((v) >= (min)) ) + +// Py_ssize_t was not defined before Python 2.5 +#if (PY_VERSION_HEX < 0x02050000) +typedef int Py_ssize_t; +#endif + +/** + * A cache of the spec_args for a set or list, + * so we don't have to keep calling PyTuple_GET_ITEM. + */ +typedef struct { + TType element_type; + PyObject* typeargs; +} SetListTypeArgs; + +/** + * A cache of the spec_args for a map, + * so we don't have to keep calling PyTuple_GET_ITEM. + */ +typedef struct { + TType ktag; + TType vtag; + PyObject* ktypeargs; + PyObject* vtypeargs; +} MapTypeArgs; + +/** + * A cache of the spec_args for a struct, + * so we don't have to keep calling PyTuple_GET_ITEM. + */ +typedef struct { + PyObject* klass; + PyObject* spec; +} StructTypeArgs; + +/** + * A cache of the item spec from a struct specification, + * so we don't have to keep calling PyTuple_GET_ITEM. + */ +typedef struct { + int tag; + TType type; + PyObject* attrname; + PyObject* typeargs; + PyObject* defval; +} StructItemSpec; + +/** + * A cache of the two key attributes of a CReadableTransport, + * so we don't have to keep calling PyObject_GetAttr. + */ +typedef struct { + PyObject* stringiobuf; + PyObject* refill_callable; +} DecodeBuffer; + +/** Pointer to interned string to speed up attribute lookup. */ +static PyObject* INTERN_STRING(cstringio_buf); +/** Pointer to interned string to speed up attribute lookup. */ +static PyObject* INTERN_STRING(cstringio_refill); + +static inline bool +check_ssize_t_32(Py_ssize_t len) { + // error from getting the int + if (INT_CONV_ERROR_OCCURRED(len)) { + return false; + } + if (!CHECK_RANGE(len, 0, INT32_MAX)) { + PyErr_SetString(PyExc_OverflowError, "string size out of range"); + return false; + } + return true; +} + +static inline bool +parse_pyint(PyObject* o, int32_t* ret, int32_t min, int32_t max) { + long val = PyInt_AsLong(o); + + if (INT_CONV_ERROR_OCCURRED(val)) { + return false; + } + if (!CHECK_RANGE(val, min, max)) { + PyErr_SetString(PyExc_OverflowError, "int out of range"); + return false; + } + + *ret = (int32_t) val; + return true; +} + + +/* --- FUNCTIONS TO PARSE STRUCT SPECIFICATOINS --- */ + +static bool +parse_set_list_args(SetListTypeArgs* dest, PyObject* typeargs) { + if (PyTuple_Size(typeargs) != 2) { + PyErr_SetString(PyExc_TypeError, "expecting tuple of size 2 for list/set type args"); + return false; + } + + dest->element_type = PyInt_AsLong(PyTuple_GET_ITEM(typeargs, 0)); + if (INT_CONV_ERROR_OCCURRED(dest->element_type)) { + return false; + } + + dest->typeargs = PyTuple_GET_ITEM(typeargs, 1); + + return true; +} + +static bool +parse_map_args(MapTypeArgs* dest, PyObject* typeargs) { + if (PyTuple_Size(typeargs) != 4) { + PyErr_SetString(PyExc_TypeError, "expecting 4 arguments for typeargs to map"); + return false; + } + + dest->ktag = PyInt_AsLong(PyTuple_GET_ITEM(typeargs, 0)); + if (INT_CONV_ERROR_OCCURRED(dest->ktag)) { + return false; + } + + dest->vtag = PyInt_AsLong(PyTuple_GET_ITEM(typeargs, 2)); + if (INT_CONV_ERROR_OCCURRED(dest->vtag)) { + return false; + } + + dest->ktypeargs = PyTuple_GET_ITEM(typeargs, 1); + dest->vtypeargs = PyTuple_GET_ITEM(typeargs, 3); + + return true; +} + +static bool +parse_struct_args(StructTypeArgs* dest, PyObject* typeargs) { + if (PyTuple_Size(typeargs) != 2) { + PyErr_SetString(PyExc_TypeError, "expecting tuple of size 2 for struct args"); + return false; + } + + dest->klass = PyTuple_GET_ITEM(typeargs, 0); + dest->spec = PyTuple_GET_ITEM(typeargs, 1); + + return true; +} + +static int +parse_struct_item_spec(StructItemSpec* dest, PyObject* spec_tuple) { + + // i'd like to use ParseArgs here, but it seems to be a bottleneck. + if (PyTuple_Size(spec_tuple) != 5) { + PyErr_SetString(PyExc_TypeError, "expecting 5 arguments for spec tuple"); + return false; + } + + dest->tag = PyInt_AsLong(PyTuple_GET_ITEM(spec_tuple, 0)); + if (INT_CONV_ERROR_OCCURRED(dest->tag)) { + return false; + } + + dest->type = PyInt_AsLong(PyTuple_GET_ITEM(spec_tuple, 1)); + if (INT_CONV_ERROR_OCCURRED(dest->type)) { + return false; + } + + dest->attrname = PyTuple_GET_ITEM(spec_tuple, 2); + dest->typeargs = PyTuple_GET_ITEM(spec_tuple, 3); + dest->defval = PyTuple_GET_ITEM(spec_tuple, 4); + return true; +} + +/* ====== END UTILITIES ====== */ + + +/* ====== BEGIN WRITING FUNCTIONS ====== */ + +/* --- LOW-LEVEL WRITING FUNCTIONS --- */ + +static void writeByte(PyObject* outbuf, int8_t val) { + int8_t net = val; + PycStringIO->cwrite(outbuf, (char*)&net, sizeof(int8_t)); +} + +static void writeI16(PyObject* outbuf, int16_t val) { + int16_t net = (int16_t)htons(val); + PycStringIO->cwrite(outbuf, (char*)&net, sizeof(int16_t)); +} + +static void writeI32(PyObject* outbuf, int32_t val) { + int32_t net = (int32_t)htonl(val); + PycStringIO->cwrite(outbuf, (char*)&net, sizeof(int32_t)); +} + +static void writeI64(PyObject* outbuf, int64_t val) { + int64_t net = (int64_t)htonll(val); + PycStringIO->cwrite(outbuf, (char*)&net, sizeof(int64_t)); +} + +static void writeDouble(PyObject* outbuf, double dub) { + // Unfortunately, bitwise_cast doesn't work in C. Bad C! + union { + double f; + int64_t t; + } transfer; + transfer.f = dub; + writeI64(outbuf, transfer.t); +} + + +/* --- MAIN RECURSIVE OUTPUT FUCNTION -- */ + +static int +output_val(PyObject* output, PyObject* value, TType type, PyObject* typeargs) { + /* + * Refcounting Strategy: + * + * We assume that elements of the thrift_spec tuple are not going to be + * mutated, so we don't ref count those at all. Other than that, we try to + * keep a reference to all the user-created objects while we work with them. + * output_val assumes that a reference is already held. The *caller* is + * responsible for handling references + */ + + switch (type) { + + case T_BOOL: { + int v = PyObject_IsTrue(value); + if (v == -1) { + return false; + } + + writeByte(output, (int8_t) v); + break; + } + case T_I08: { + int32_t val; + + if (!parse_pyint(value, &val, INT8_MIN, INT8_MAX)) { + return false; + } + + writeByte(output, (int8_t) val); + break; + } + case T_I16: { + int32_t val; + + if (!parse_pyint(value, &val, INT16_MIN, INT16_MAX)) { + return false; + } + + writeI16(output, (int16_t) val); + break; + } + case T_I32: { + int32_t val; + + if (!parse_pyint(value, &val, INT32_MIN, INT32_MAX)) { + return false; + } + + writeI32(output, val); + break; + } + case T_I64: { + int64_t nval = PyLong_AsLongLong(value); + + if (INT_CONV_ERROR_OCCURRED(nval)) { + return false; + } + + if (!CHECK_RANGE(nval, INT64_MIN, INT64_MAX)) { + PyErr_SetString(PyExc_OverflowError, "int out of range"); + return false; + } + + writeI64(output, nval); + break; + } + + case T_DOUBLE: { + double nval = PyFloat_AsDouble(value); + if (nval == -1.0 && PyErr_Occurred()) { + return false; + } + + writeDouble(output, nval); + break; + } + + case T_STRING: { + Py_ssize_t len = PyString_Size(value); + + if (!check_ssize_t_32(len)) { + return false; + } + + writeI32(output, (int32_t) len); + PycStringIO->cwrite(output, PyString_AsString(value), (int32_t) len); + break; + } + + case T_LIST: + case T_SET: { + Py_ssize_t len; + SetListTypeArgs parsedargs; + PyObject *item; + PyObject *iterator; + + if (!parse_set_list_args(&parsedargs, typeargs)) { + return false; + } + + len = PyObject_Length(value); + + if (!check_ssize_t_32(len)) { + return false; + } + + writeByte(output, parsedargs.element_type); + writeI32(output, (int32_t) len); + + iterator = PyObject_GetIter(value); + if (iterator == NULL) { + return false; + } + + while ((item = PyIter_Next(iterator))) { + if (!output_val(output, item, parsedargs.element_type, parsedargs.typeargs)) { + Py_DECREF(item); + Py_DECREF(iterator); + return false; + } + Py_DECREF(item); + } + + Py_DECREF(iterator); + + if (PyErr_Occurred()) { + return false; + } + + break; + } + + case T_MAP: { + PyObject *k, *v; + Py_ssize_t pos = 0; + Py_ssize_t len; + + MapTypeArgs parsedargs; + + len = PyDict_Size(value); + if (!check_ssize_t_32(len)) { + return false; + } + + if (!parse_map_args(&parsedargs, typeargs)) { + return false; + } + + writeByte(output, parsedargs.ktag); + writeByte(output, parsedargs.vtag); + writeI32(output, len); + + // TODO(bmaurer): should support any mapping, not just dicts + while (PyDict_Next(value, &pos, &k, &v)) { + // TODO(dreiss): Think hard about whether these INCREFs actually + // turn any unsafe scenarios into safe scenarios. + Py_INCREF(k); + Py_INCREF(v); + + if (!output_val(output, k, parsedargs.ktag, parsedargs.ktypeargs) + || !output_val(output, v, parsedargs.vtag, parsedargs.vtypeargs)) { + Py_DECREF(k); + Py_DECREF(v); + return false; + } + Py_DECREF(k); + Py_DECREF(v); + } + break; + } + + // TODO(dreiss): Consider breaking this out as a function + // the way we did for decode_struct. + case T_STRUCT: { + StructTypeArgs parsedargs; + Py_ssize_t nspec; + Py_ssize_t i; + + if (!parse_struct_args(&parsedargs, typeargs)) { + return false; + } + + nspec = PyTuple_Size(parsedargs.spec); + + if (nspec == -1) { + return false; + } + + for (i = 0; i < nspec; i++) { + StructItemSpec parsedspec; + PyObject* spec_tuple; + PyObject* instval = NULL; + + spec_tuple = PyTuple_GET_ITEM(parsedargs.spec, i); + if (spec_tuple == Py_None) { + continue; + } + + if (!parse_struct_item_spec (&parsedspec, spec_tuple)) { + return false; + } + + instval = PyObject_GetAttr(value, parsedspec.attrname); + + if (!instval) { + return false; + } + + if (instval == Py_None) { + Py_DECREF(instval); + continue; + } + + writeByte(output, (int8_t) parsedspec.type); + writeI16(output, parsedspec.tag); + + if (!output_val(output, instval, parsedspec.type, parsedspec.typeargs)) { + Py_DECREF(instval); + return false; + } + + Py_DECREF(instval); + } + + writeByte(output, (int8_t)T_STOP); + break; + } + + case T_STOP: + case T_VOID: + case T_UTF16: + case T_UTF8: + case T_U64: + default: + PyErr_SetString(PyExc_TypeError, "Unexpected TType"); + return false; + + } + + return true; +} + + +/* --- TOP-LEVEL WRAPPER FOR OUTPUT -- */ + +static PyObject * +encode_binary(PyObject *self, PyObject *args) { + PyObject* enc_obj; + PyObject* type_args; + PyObject* buf; + PyObject* ret = NULL; + + if (!PyArg_ParseTuple(args, "OO", &enc_obj, &type_args)) { + return NULL; + } + + buf = PycStringIO->NewOutput(INIT_OUTBUF_SIZE); + if (output_val(buf, enc_obj, T_STRUCT, type_args)) { + ret = PycStringIO->cgetvalue(buf); + } + + Py_DECREF(buf); + return ret; +} + +/* ====== END WRITING FUNCTIONS ====== */ + + +/* ====== BEGIN READING FUNCTIONS ====== */ + +/* --- LOW-LEVEL READING FUNCTIONS --- */ + +static void +free_decodebuf(DecodeBuffer* d) { + Py_XDECREF(d->stringiobuf); + Py_XDECREF(d->refill_callable); +} + +static bool +decode_buffer_from_obj(DecodeBuffer* dest, PyObject* obj) { + dest->stringiobuf = PyObject_GetAttr(obj, INTERN_STRING(cstringio_buf)); + if (!dest->stringiobuf) { + return false; + } + + if (!PycStringIO_InputCheck(dest->stringiobuf)) { + free_decodebuf(dest); + PyErr_SetString(PyExc_TypeError, "expecting stringio input"); + return false; + } + + dest->refill_callable = PyObject_GetAttr(obj, INTERN_STRING(cstringio_refill)); + + if(!dest->refill_callable) { + free_decodebuf(dest); + return false; + } + + if (!PyCallable_Check(dest->refill_callable)) { + free_decodebuf(dest); + PyErr_SetString(PyExc_TypeError, "expecting callable"); + return false; + } + + return true; +} + +static bool readBytes(DecodeBuffer* input, char** output, int len) { + int read; + + // TODO(dreiss): Don't fear the malloc. Think about taking a copy of + // the partial read instead of forcing the transport + // to prepend it to its buffer. + + read = PycStringIO->cread(input->stringiobuf, output, len); + + if (read == len) { + return true; + } else if (read == -1) { + return false; + } else { + PyObject* newiobuf; + + // using building functions as this is a rare codepath + newiobuf = PyObject_CallFunction( + input->refill_callable, "s#i", *output, read, len, NULL); + if (newiobuf == NULL) { + return false; + } + + // must do this *AFTER* the call so that we don't deref the io buffer + Py_CLEAR(input->stringiobuf); + input->stringiobuf = newiobuf; + + read = PycStringIO->cread(input->stringiobuf, output, len); + + if (read == len) { + return true; + } else if (read == -1) { + return false; + } else { + // TODO(dreiss): This could be a valid code path for big binary blobs. + PyErr_SetString(PyExc_TypeError, + "refill claimed to have refilled the buffer, but didn't!!"); + return false; + } + } +} + +static int8_t readByte(DecodeBuffer* input) { + char* buf; + if (!readBytes(input, &buf, sizeof(int8_t))) { + return -1; + } + + return *(int8_t*) buf; +} + +static int16_t readI16(DecodeBuffer* input) { + char* buf; + if (!readBytes(input, &buf, sizeof(int16_t))) { + return -1; + } + + return (int16_t) ntohs(*(int16_t*) buf); +} + +static int32_t readI32(DecodeBuffer* input) { + char* buf; + if (!readBytes(input, &buf, sizeof(int32_t))) { + return -1; + } + return (int32_t) ntohl(*(int32_t*) buf); +} + + +static int64_t readI64(DecodeBuffer* input) { + char* buf; + if (!readBytes(input, &buf, sizeof(int64_t))) { + return -1; + } + + return (int64_t) ntohll(*(int64_t*) buf); +} + +static double readDouble(DecodeBuffer* input) { + union { + int64_t f; + double t; + } transfer; + + transfer.f = readI64(input); + if (transfer.f == -1) { + return -1; + } + return transfer.t; +} + +static bool +checkTypeByte(DecodeBuffer* input, TType expected) { + TType got = readByte(input); + if (INT_CONV_ERROR_OCCURRED(got)) { + return false; + } + + if (expected != got) { + PyErr_SetString(PyExc_TypeError, "got wrong ttype while reading field"); + return false; + } + return true; +} + +static bool +skip(DecodeBuffer* input, TType type) { +#define SKIPBYTES(n) \ + do { \ + if (!readBytes(input, &dummy_buf, (n))) { \ + return false; \ + } \ + } while(0) + + char* dummy_buf; + + switch (type) { + + case T_BOOL: + case T_I08: SKIPBYTES(1); break; + case T_I16: SKIPBYTES(2); break; + case T_I32: SKIPBYTES(4); break; + case T_I64: + case T_DOUBLE: SKIPBYTES(8); break; + + case T_STRING: { + // TODO(dreiss): Find out if these check_ssize_t32s are really necessary. + int len = readI32(input); + if (!check_ssize_t_32(len)) { + return false; + } + SKIPBYTES(len); + break; + } + + case T_LIST: + case T_SET: { + TType etype; + int len, i; + + etype = readByte(input); + if (etype == -1) { + return false; + } + + len = readI32(input); + if (!check_ssize_t_32(len)) { + return false; + } + + for (i = 0; i < len; i++) { + if (!skip(input, etype)) { + return false; + } + } + break; + } + + case T_MAP: { + TType ktype, vtype; + int len, i; + + ktype = readByte(input); + if (ktype == -1) { + return false; + } + + vtype = readByte(input); + if (vtype == -1) { + return false; + } + + len = readI32(input); + if (!check_ssize_t_32(len)) { + return false; + } + + for (i = 0; i < len; i++) { + if (!(skip(input, ktype) && skip(input, vtype))) { + return false; + } + } + break; + } + + case T_STRUCT: { + while (true) { + TType type; + + type = readByte(input); + if (type == -1) { + return false; + } + + if (type == T_STOP) + break; + + SKIPBYTES(2); // tag + if (!skip(input, type)) { + return false; + } + } + break; + } + + case T_STOP: + case T_VOID: + case T_UTF16: + case T_UTF8: + case T_U64: + default: + PyErr_SetString(PyExc_TypeError, "Unexpected TType"); + return false; + + } + + return true; + +#undef SKIPBYTES +} + + +/* --- HELPER FUNCTION FOR DECODE_VAL --- */ + +static PyObject* +decode_val(DecodeBuffer* input, TType type, PyObject* typeargs); + +static bool +decode_struct(DecodeBuffer* input, PyObject* output, PyObject* spec_seq) { + int spec_seq_len = PyTuple_Size(spec_seq); + if (spec_seq_len == -1) { + return false; + } + + while (true) { + TType type; + int16_t tag; + PyObject* item_spec; + PyObject* fieldval = NULL; + StructItemSpec parsedspec; + + type = readByte(input); + if (type == -1) { + return false; + } + if (type == T_STOP) { + break; + } + tag = readI16(input); + if (INT_CONV_ERROR_OCCURRED(tag)) { + return false; + } + if (tag >= 0 && tag < spec_seq_len) { + item_spec = PyTuple_GET_ITEM(spec_seq, tag); + } else { + item_spec = Py_None; + } + + if (item_spec == Py_None) { + if (!skip(input, type)) { + return false; + } else { + continue; + } + } + + if (!parse_struct_item_spec(&parsedspec, item_spec)) { + return false; + } + if (parsedspec.type != type) { + if (!skip(input, type)) { + PyErr_SetString(PyExc_TypeError, "struct field had wrong type while reading and can't be skipped"); + return false; + } else { + continue; + } + } + + fieldval = decode_val(input, parsedspec.type, parsedspec.typeargs); + if (fieldval == NULL) { + return false; + } + + if (PyObject_SetAttr(output, parsedspec.attrname, fieldval) == -1) { + Py_DECREF(fieldval); + return false; + } + Py_DECREF(fieldval); + } + return true; +} + + +/* --- MAIN RECURSIVE INPUT FUCNTION --- */ + +// Returns a new reference. +static PyObject* +decode_val(DecodeBuffer* input, TType type, PyObject* typeargs) { + switch (type) { + + case T_BOOL: { + int8_t v = readByte(input); + if (INT_CONV_ERROR_OCCURRED(v)) { + return NULL; + } + + switch (v) { + case 0: Py_RETURN_FALSE; + case 1: Py_RETURN_TRUE; + // Don't laugh. This is a potentially serious issue. + default: PyErr_SetString(PyExc_TypeError, "boolean out of range"); return NULL; + } + break; + } + case T_I08: { + int8_t v = readByte(input); + if (INT_CONV_ERROR_OCCURRED(v)) { + return NULL; + } + + return PyInt_FromLong(v); + } + case T_I16: { + int16_t v = readI16(input); + if (INT_CONV_ERROR_OCCURRED(v)) { + return NULL; + } + return PyInt_FromLong(v); + } + case T_I32: { + int32_t v = readI32(input); + if (INT_CONV_ERROR_OCCURRED(v)) { + return NULL; + } + return PyInt_FromLong(v); + } + + case T_I64: { + int64_t v = readI64(input); + if (INT_CONV_ERROR_OCCURRED(v)) { + return NULL; + } + // TODO(dreiss): Find out if we can take this fastpath always when + // sizeof(long) == sizeof(long long). + if (CHECK_RANGE(v, LONG_MIN, LONG_MAX)) { + return PyInt_FromLong((long) v); + } + + return PyLong_FromLongLong(v); + } + + case T_DOUBLE: { + double v = readDouble(input); + if (v == -1.0 && PyErr_Occurred()) { + return false; + } + return PyFloat_FromDouble(v); + } + + case T_STRING: { + Py_ssize_t len = readI32(input); + char* buf; + if (!readBytes(input, &buf, len)) { + return NULL; + } + + return PyString_FromStringAndSize(buf, len); + } + + case T_LIST: + case T_SET: { + SetListTypeArgs parsedargs; + int32_t len; + PyObject* ret = NULL; + int i; + + if (!parse_set_list_args(&parsedargs, typeargs)) { + return NULL; + } + + if (!checkTypeByte(input, parsedargs.element_type)) { + return NULL; + } + + len = readI32(input); + if (!check_ssize_t_32(len)) { + return NULL; + } + + ret = PyList_New(len); + if (!ret) { + return NULL; + } + + for (i = 0; i < len; i++) { + PyObject* item = decode_val(input, parsedargs.element_type, parsedargs.typeargs); + if (!item) { + Py_DECREF(ret); + return NULL; + } + PyList_SET_ITEM(ret, i, item); + } + + // TODO(dreiss): Consider biting the bullet and making two separate cases + // for list and set, avoiding this post facto conversion. + if (type == T_SET) { + PyObject* setret; +#if (PY_VERSION_HEX < 0x02050000) + // hack needed for older versions + setret = PyObject_CallFunctionObjArgs((PyObject*)&PySet_Type, ret, NULL); +#else + // official version + setret = PySet_New(ret); +#endif + Py_DECREF(ret); + return setret; + } + return ret; + } + + case T_MAP: { + int32_t len; + int i; + MapTypeArgs parsedargs; + PyObject* ret = NULL; + + if (!parse_map_args(&parsedargs, typeargs)) { + return NULL; + } + + if (!checkTypeByte(input, parsedargs.ktag)) { + return NULL; + } + if (!checkTypeByte(input, parsedargs.vtag)) { + return NULL; + } + + len = readI32(input); + if (!check_ssize_t_32(len)) { + return false; + } + + ret = PyDict_New(); + if (!ret) { + goto error; + } + + for (i = 0; i < len; i++) { + PyObject* k = NULL; + PyObject* v = NULL; + k = decode_val(input, parsedargs.ktag, parsedargs.ktypeargs); + if (k == NULL) { + goto loop_error; + } + v = decode_val(input, parsedargs.vtag, parsedargs.vtypeargs); + if (v == NULL) { + goto loop_error; + } + if (PyDict_SetItem(ret, k, v) == -1) { + goto loop_error; + } + + Py_DECREF(k); + Py_DECREF(v); + continue; + + // Yuck! Destructors, anyone? + loop_error: + Py_XDECREF(k); + Py_XDECREF(v); + goto error; + } + + return ret; + + error: + Py_XDECREF(ret); + return NULL; + } + + case T_STRUCT: { + StructTypeArgs parsedargs; + PyObject* ret; + if (!parse_struct_args(&parsedargs, typeargs)) { + return NULL; + } + + ret = PyObject_CallObject(parsedargs.klass, NULL); + if (!ret) { + return NULL; + } + + if (!decode_struct(input, ret, parsedargs.spec)) { + Py_DECREF(ret); + return NULL; + } + + return ret; + } + + case T_STOP: + case T_VOID: + case T_UTF16: + case T_UTF8: + case T_U64: + default: + PyErr_SetString(PyExc_TypeError, "Unexpected TType"); + return NULL; + } +} + + +/* --- TOP-LEVEL WRAPPER FOR INPUT -- */ + +static PyObject* +decode_binary(PyObject *self, PyObject *args) { + PyObject* output_obj = NULL; + PyObject* transport = NULL; + PyObject* typeargs = NULL; + StructTypeArgs parsedargs; + DecodeBuffer input = {0, 0}; + + if (!PyArg_ParseTuple(args, "OOO", &output_obj, &transport, &typeargs)) { + return NULL; + } + + if (!parse_struct_args(&parsedargs, typeargs)) { + return NULL; + } + + if (!decode_buffer_from_obj(&input, transport)) { + return NULL; + } + + if (!decode_struct(&input, output_obj, parsedargs.spec)) { + free_decodebuf(&input); + return NULL; + } + + free_decodebuf(&input); + + Py_RETURN_NONE; +} + +/* ====== END READING FUNCTIONS ====== */ + + +/* -- PYTHON MODULE SETUP STUFF --- */ + +static PyMethodDef ThriftFastBinaryMethods[] = { + + {"encode_binary", encode_binary, METH_VARARGS, ""}, + {"decode_binary", decode_binary, METH_VARARGS, ""}, + + {NULL, NULL, 0, NULL} /* Sentinel */ +}; + +PyMODINIT_FUNC +initfastbinary(void) { +#define INIT_INTERN_STRING(value) \ + do { \ + INTERN_STRING(value) = PyString_InternFromString(#value); \ + if(!INTERN_STRING(value)) return; \ + } while(0) + + INIT_INTERN_STRING(cstringio_buf); + INIT_INTERN_STRING(cstringio_refill); +#undef INIT_INTERN_STRING + + PycString_IMPORT; + if (PycStringIO == NULL) return; + + (void) Py_InitModule("thrift.protocol.fastbinary", ThriftFastBinaryMethods); +} diff --git a/thrift/server/THttpServer.py b/thrift/server/THttpServer.py new file mode 100644 index 0000000..f6d1ff5 --- /dev/null +++ b/thrift/server/THttpServer.py @@ -0,0 +1,87 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# + +import http.server + +from thrift.server import TServer +from thrift.transport import TTransport + + +class ResponseException(Exception): + """Allows handlers to override the HTTP response + + Normally, THttpServer always sends a 200 response. If a handler wants + to override this behavior (e.g., to simulate a misconfigured or + overloaded web server during testing), it can raise a ResponseException. + The function passed to the constructor will be called with the + RequestHandler as its only argument. + """ + def __init__(self, handler): + self.handler = handler + + +class THttpServer(TServer.TServer): + """A simple HTTP-based Thrift server + + This class is not very performant, but it is useful (for example) for + acting as a mock version of an Apache-based PHP Thrift endpoint. + """ + def __init__(self, + processor, + server_address, + inputProtocolFactory, + outputProtocolFactory=None, + server_class=http.server.HTTPServer): + """Set up protocol factories and HTTP server. + + See BaseHTTPServer for server_address. + See TServer for protocol factories. + """ + if outputProtocolFactory is None: + outputProtocolFactory = inputProtocolFactory + + TServer.TServer.__init__(self, processor, None, None, None, + inputProtocolFactory, outputProtocolFactory) + + thttpserver = self + + class RequestHander(http.server.BaseHTTPRequestHandler): + def do_POST(self): + # Don't care about the request path. + itrans = TTransport.TFileObjectTransport(self.rfile) + otrans = TTransport.TFileObjectTransport(self.wfile) + itrans = TTransport.TBufferedTransport( + itrans, int(self.headers['Content-Length'])) + otrans = TTransport.TMemoryBuffer() + iprot = thttpserver.inputProtocolFactory.getProtocol(itrans) + oprot = thttpserver.outputProtocolFactory.getProtocol(otrans) + try: + thttpserver.processor.process(iprot, oprot) + except ResponseException as exn: + exn.handler(self) + else: + self.send_response(200) + self.send_header("content-type", "application/x-thrift") + self.end_headers() + self.wfile.write(otrans.getvalue()) + + self.httpd = server_class(server_address, RequestHander) + + def serve(self): + self.httpd.serve_forever() diff --git a/thrift/server/TNonblockingServer.py b/thrift/server/TNonblockingServer.py new file mode 100644 index 0000000..764c9ae --- /dev/null +++ b/thrift/server/TNonblockingServer.py @@ -0,0 +1,346 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +"""Implementation of non-blocking server. + +The main idea of the server is to receive and send requests +only from the main thread. + +The thread poool should be sized for concurrent tasks, not +maximum connections +""" +import threading +import socket +import queue +import select +import struct +import logging + +from thrift.transport import TTransport +from thrift.protocol.TBinaryProtocol import TBinaryProtocolFactory + +__all__ = ['TNonblockingServer'] + + +class Worker(threading.Thread): + """Worker is a small helper to process incoming connection.""" + + def __init__(self, queue): + threading.Thread.__init__(self) + self.queue = queue + + def run(self): + """Process queries from task queue, stop if processor is None.""" + while True: + try: + processor, iprot, oprot, otrans, callback = self.queue.get() + if processor is None: + break + processor.process(iprot, oprot) + callback(True, otrans.getvalue()) + except Exception: + logging.exception("Exception while processing request") + callback(False, '') + +WAIT_LEN = 0 +WAIT_MESSAGE = 1 +WAIT_PROCESS = 2 +SEND_ANSWER = 3 +CLOSED = 4 + + +def locked(func): + """Decorator which locks self.lock.""" + def nested(self, *args, **kwargs): + self.lock.acquire() + try: + return func(self, *args, **kwargs) + finally: + self.lock.release() + return nested + + +def socket_exception(func): + """Decorator close object on socket.error.""" + def read(self, *args, **kwargs): + try: + return func(self, *args, **kwargs) + except socket.error: + self.close() + return read + + +class Connection: + """Basic class is represented connection. + + It can be in state: + WAIT_LEN --- connection is reading request len. + WAIT_MESSAGE --- connection is reading request. + WAIT_PROCESS --- connection has just read whole request and + waits for call ready routine. + SEND_ANSWER --- connection is sending answer string (including length + of answer). + CLOSED --- socket was closed and connection should be deleted. + """ + def __init__(self, new_socket, wake_up): + self.socket = new_socket + self.socket.setblocking(False) + self.status = WAIT_LEN + self.len = 0 + self.message = '' + self.lock = threading.Lock() + self.wake_up = wake_up + + def _read_len(self): + """Reads length of request. + + It's a safer alternative to self.socket.recv(4) + """ + read = self.socket.recv(4 - len(self.message)) + if len(read) == 0: + # if we read 0 bytes and self.message is empty, then + # the client closed the connection + if len(self.message) != 0: + logging.error("can't read frame size from socket") + self.close() + return + self.message += read + if len(self.message) == 4: + self.len, = struct.unpack('!i', self.message) + if self.len < 0: + logging.error("negative frame size, it seems client " + "doesn't use FramedTransport") + self.close() + elif self.len == 0: + logging.error("empty frame, it's really strange") + self.close() + else: + self.message = '' + self.status = WAIT_MESSAGE + + @socket_exception + def read(self): + """Reads data from stream and switch state.""" + assert self.status in (WAIT_LEN, WAIT_MESSAGE) + if self.status == WAIT_LEN: + self._read_len() + # go back to the main loop here for simplicity instead of + # falling through, even though there is a good chance that + # the message is already available + elif self.status == WAIT_MESSAGE: + read = self.socket.recv(self.len - len(self.message)) + if len(read) == 0: + logging.error("can't read frame from socket (get %d of " + "%d bytes)" % (len(self.message), self.len)) + self.close() + return + self.message += read + if len(self.message) == self.len: + self.status = WAIT_PROCESS + + @socket_exception + def write(self): + """Writes data from socket and switch state.""" + assert self.status == SEND_ANSWER + sent = self.socket.send(self.message) + if sent == len(self.message): + self.status = WAIT_LEN + self.message = '' + self.len = 0 + else: + self.message = self.message[sent:] + + @locked + def ready(self, all_ok, message): + """Callback function for switching state and waking up main thread. + + This function is the only function witch can be called asynchronous. + + The ready can switch Connection to three states: + WAIT_LEN if request was oneway. + SEND_ANSWER if request was processed in normal way. + CLOSED if request throws unexpected exception. + + The one wakes up main thread. + """ + assert self.status == WAIT_PROCESS + if not all_ok: + self.close() + self.wake_up() + return + self.len = '' + if len(message) == 0: + # it was a oneway request, do not write answer + self.message = '' + self.status = WAIT_LEN + else: + self.message = struct.pack('!i', len(message)) + message + self.status = SEND_ANSWER + self.wake_up() + + @locked + def is_writeable(self): + """Return True if connection should be added to write list of select""" + return self.status == SEND_ANSWER + + # it's not necessary, but... + @locked + def is_readable(self): + """Return True if connection should be added to read list of select""" + return self.status in (WAIT_LEN, WAIT_MESSAGE) + + @locked + def is_closed(self): + """Returns True if connection is closed.""" + return self.status == CLOSED + + def fileno(self): + """Returns the file descriptor of the associated socket.""" + return self.socket.fileno() + + def close(self): + """Closes connection""" + self.status = CLOSED + self.socket.close() + + +class TNonblockingServer: + """Non-blocking server.""" + + def __init__(self, + processor, + lsocket, + inputProtocolFactory=None, + outputProtocolFactory=None, + threads=10): + self.processor = processor + self.socket = lsocket + self.in_protocol = inputProtocolFactory or TBinaryProtocolFactory() + self.out_protocol = outputProtocolFactory or self.in_protocol + self.threads = int(threads) + self.clients = {} + self.tasks = queue.Queue() + self._read, self._write = socket.socketpair() + self.prepared = False + self._stop = False + + def setNumThreads(self, num): + """Set the number of worker threads that should be created.""" + # implement ThreadPool interface + assert not self.prepared, "Can't change number of threads after start" + self.threads = num + + def prepare(self): + """Prepares server for serve requests.""" + if self.prepared: + return + self.socket.listen() + for _ in range(self.threads): + thread = Worker(self.tasks) + thread.setDaemon(True) + thread.start() + self.prepared = True + + def wake_up(self): + """Wake up main thread. + + The server usualy waits in select call in we should terminate one. + The simplest way is using socketpair. + + Select always wait to read from the first socket of socketpair. + + In this case, we can just write anything to the second socket from + socketpair. + """ + self._write.send('1') + + def stop(self): + """Stop the server. + + This method causes the serve() method to return. stop() may be invoked + from within your handler, or from another thread. + + After stop() is called, serve() will return but the server will still + be listening on the socket. serve() may then be called again to resume + processing requests. Alternatively, close() may be called after + serve() returns to close the server socket and shutdown all worker + threads. + """ + self._stop = True + self.wake_up() + + def _select(self): + """Does select on open connections.""" + readable = [self.socket.handle.fileno(), self._read.fileno()] + writable = [] + for i, connection in list(self.clients.items()): + if connection.is_readable(): + readable.append(connection.fileno()) + if connection.is_writeable(): + writable.append(connection.fileno()) + if connection.is_closed(): + del self.clients[i] + return select.select(readable, writable, readable) + + def handle(self): + """Handle requests. + + WARNING! You must call prepare() BEFORE calling handle() + """ + assert self.prepared, "You have to call prepare before handle" + rset, wset, xset = self._select() + for readable in rset: + if readable == self._read.fileno(): + # don't care i just need to clean readable flag + self._read.recv(1024) + elif readable == self.socket.handle.fileno(): + client = self.socket.accept().handle + self.clients[client.fileno()] = Connection(client, + self.wake_up) + else: + connection = self.clients[readable] + connection.read() + if connection.status == WAIT_PROCESS: + itransport = TTransport.TMemoryBuffer(connection.message) + otransport = TTransport.TMemoryBuffer() + iprot = self.in_protocol.getProtocol(itransport) + oprot = self.out_protocol.getProtocol(otransport) + self.tasks.put([self.processor, iprot, oprot, + otransport, connection.ready]) + for writeable in wset: + self.clients[writeable].write() + for oob in xset: + self.clients[oob].close() + del self.clients[oob] + + def close(self): + """Closes the server.""" + for _ in range(self.threads): + self.tasks.put([None, None, None, None, None]) + self.socket.close() + self.prepared = False + + def serve(self): + """Serve requests. + + Serve requests forever, or until stop() is called. + """ + self._stop = False + self.prepare() + while not self._stop: + self.handle() diff --git a/thrift/server/TProcessPoolServer.py b/thrift/server/TProcessPoolServer.py new file mode 100644 index 0000000..3714ead --- /dev/null +++ b/thrift/server/TProcessPoolServer.py @@ -0,0 +1,119 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# + + +import logging +from multiprocessing import Process, Value, Condition, reduction + +from .TServer import TServer +from thrift.transport.TTransport import TTransportException +import collections + + +class TProcessPoolServer(TServer): + """Server with a fixed size pool of worker subprocesses to service requests + + Note that if you need shared state between the handlers - it's up to you! + Written by Dvir Volk, doat.com + """ + def __init__(self, *args): + TServer.__init__(self, *args) + self.numWorkers = 10 + self.workers = [] + self.isRunning = Value('b', False) + self.stopCondition = Condition() + self.postForkCallback = None + + def setPostForkCallback(self, callback): + if not isinstance(callback, collections.Callable): + raise TypeError("This is not a callback!") + self.postForkCallback = callback + + def setNumWorkers(self, num): + """Set the number of worker threads that should be created""" + self.numWorkers = num + + def workerProcess(self): + """Loop getting clients from the shared queue and process them""" + if self.postForkCallback: + self.postForkCallback() + + while self.isRunning.value: + try: + client = self.serverTransport.accept() + self.serveClient(client) + except (KeyboardInterrupt, SystemExit): + return 0 + except Exception as x: + logging.exception(x) + + def serveClient(self, client): + """Process input/output from a client for as long as possible""" + itrans = self.inputTransportFactory.getTransport(client) + otrans = self.outputTransportFactory.getTransport(client) + iprot = self.inputProtocolFactory.getProtocol(itrans) + oprot = self.outputProtocolFactory.getProtocol(otrans) + + try: + while True: + self.processor.process(iprot, oprot) + except TTransportException as tx: + pass + except Exception as x: + logging.exception(x) + + itrans.close() + otrans.close() + + def serve(self): + """Start workers and put into queue""" + # this is a shared state that can tell the workers to exit when False + self.isRunning.value = True + + # first bind and listen to the port + self.serverTransport.listen() + + # fork the children + for i in range(self.numWorkers): + try: + w = Process(target=self.workerProcess) + w.daemon = True + w.start() + self.workers.append(w) + except Exception as x: + logging.exception(x) + + # wait until the condition is set by stop() + while True: + self.stopCondition.acquire() + try: + self.stopCondition.wait() + break + except (SystemExit, KeyboardInterrupt): + break + except Exception as x: + logging.exception(x) + + self.isRunning.value = False + + def stop(self): + self.isRunning.value = False + self.stopCondition.acquire() + self.stopCondition.notify() + self.stopCondition.release() diff --git a/thrift/server/TServer.py b/thrift/server/TServer.py new file mode 100644 index 0000000..9e340f4 --- /dev/null +++ b/thrift/server/TServer.py @@ -0,0 +1,269 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# + +import queue +import logging +import os +import sys +import threading +import traceback + +from thrift.Thrift import TProcessor +from thrift.protocol import TBinaryProtocol +from thrift.transport import TTransport + + +class TServer: + """Base interface for a server, which must have a serve() method. + + Three constructors for all servers: + 1) (processor, serverTransport) + 2) (processor, serverTransport, transportFactory, protocolFactory) + 3) (processor, serverTransport, + inputTransportFactory, outputTransportFactory, + inputProtocolFactory, outputProtocolFactory) + """ + def __init__(self, *args): + if (len(args) == 2): + self.__initArgs__(args[0], args[1], + TTransport.TTransportFactoryBase(), + TTransport.TTransportFactoryBase(), + TBinaryProtocol.TBinaryProtocolFactory(), + TBinaryProtocol.TBinaryProtocolFactory()) + elif (len(args) == 4): + self.__initArgs__(args[0], args[1], args[2], args[2], args[3], args[3]) + elif (len(args) == 6): + self.__initArgs__(args[0], args[1], args[2], args[3], args[4], args[5]) + + def __initArgs__(self, processor, serverTransport, + inputTransportFactory, outputTransportFactory, + inputProtocolFactory, outputProtocolFactory): + self.processor = processor + self.serverTransport = serverTransport + self.inputTransportFactory = inputTransportFactory + self.outputTransportFactory = outputTransportFactory + self.inputProtocolFactory = inputProtocolFactory + self.outputProtocolFactory = outputProtocolFactory + + def serve(self): + pass + + +class TSimpleServer(TServer): + """Simple single-threaded server that just pumps around one transport.""" + + def __init__(self, *args): + TServer.__init__(self, *args) + + def serve(self): + self.serverTransport.listen() + while True: + client = self.serverTransport.accept() + itrans = self.inputTransportFactory.getTransport(client) + otrans = self.outputTransportFactory.getTransport(client) + iprot = self.inputProtocolFactory.getProtocol(itrans) + oprot = self.outputProtocolFactory.getProtocol(otrans) + try: + while True: + self.processor.process(iprot, oprot) + except TTransport.TTransportException as tx: + pass + except Exception as x: + logging.exception(x) + + itrans.close() + otrans.close() + + +class TThreadedServer(TServer): + """Threaded server that spawns a new thread per each connection.""" + + def __init__(self, *args, **kwargs): + TServer.__init__(self, *args) + self.daemon = kwargs.get("daemon", False) + + def serve(self): + self.serverTransport.listen() + while True: + try: + client = self.serverTransport.accept() + t = threading.Thread(target=self.handle, args=(client,)) + t.setDaemon(self.daemon) + t.start() + except KeyboardInterrupt: + raise + except Exception as x: + logging.exception(x) + + def handle(self, client): + itrans = self.inputTransportFactory.getTransport(client) + otrans = self.outputTransportFactory.getTransport(client) + iprot = self.inputProtocolFactory.getProtocol(itrans) + oprot = self.outputProtocolFactory.getProtocol(otrans) + try: + while True: + self.processor.process(iprot, oprot) + except TTransport.TTransportException as tx: + pass + except Exception as x: + logging.exception(x) + + itrans.close() + otrans.close() + + +class TThreadPoolServer(TServer): + """Server with a fixed size pool of threads which service requests.""" + + def __init__(self, *args, **kwargs): + TServer.__init__(self, *args) + self.clients = queue.Queue() + self.threads = 10 + self.daemon = kwargs.get("daemon", False) + + def setNumThreads(self, num): + """Set the number of worker threads that should be created""" + self.threads = num + + def serveThread(self): + """Loop around getting clients from the shared queue and process them.""" + while True: + try: + client = self.clients.get() + self.serveClient(client) + except Exception as x: + logging.exception(x) + + def serveClient(self, client): + """Process input/output from a client for as long as possible""" + itrans = self.inputTransportFactory.getTransport(client) + otrans = self.outputTransportFactory.getTransport(client) + iprot = self.inputProtocolFactory.getProtocol(itrans) + oprot = self.outputProtocolFactory.getProtocol(otrans) + try: + while True: + self.processor.process(iprot, oprot) + except TTransport.TTransportException as tx: + pass + except Exception as x: + logging.exception(x) + + itrans.close() + otrans.close() + + def serve(self): + """Start a fixed number of worker threads and put client into a queue""" + for i in range(self.threads): + try: + t = threading.Thread(target=self.serveThread) + t.setDaemon(self.daemon) + t.start() + except Exception as x: + logging.exception(x) + + # Pump the socket for clients + self.serverTransport.listen() + while True: + try: + client = self.serverTransport.accept() + self.clients.put(client) + except Exception as x: + logging.exception(x) + + +class TForkingServer(TServer): + """A Thrift server that forks a new process for each request + + This is more scalable than the threaded server as it does not cause + GIL contention. + + Note that this has different semantics from the threading server. + Specifically, updates to shared variables will no longer be shared. + It will also not work on windows. + + This code is heavily inspired by SocketServer.ForkingMixIn in the + Python stdlib. + """ + def __init__(self, *args): + TServer.__init__(self, *args) + self.children = [] + + def serve(self): + def try_close(file): + try: + file.close() + except IOError as e: + logging.warning(e, exc_info=True) + + self.serverTransport.listen() + while True: + client = self.serverTransport.accept() + try: + pid = os.fork() + + if pid: # parent + # add before collect, otherwise you race w/ waitpid + self.children.append(pid) + self.collect_children() + + # Parent must close socket or the connection may not get + # closed promptly + itrans = self.inputTransportFactory.getTransport(client) + otrans = self.outputTransportFactory.getTransport(client) + try_close(itrans) + try_close(otrans) + else: + itrans = self.inputTransportFactory.getTransport(client) + otrans = self.outputTransportFactory.getTransport(client) + + iprot = self.inputProtocolFactory.getProtocol(itrans) + oprot = self.outputProtocolFactory.getProtocol(otrans) + + ecode = 0 + try: + try: + while True: + self.processor.process(iprot, oprot) + except TTransport.TTransportException as tx: + pass + except Exception as e: + logging.exception(e) + ecode = 1 + finally: + try_close(itrans) + try_close(otrans) + + os._exit(ecode) + + except TTransport.TTransportException as tx: + pass + except Exception as x: + logging.exception(x) + + def collect_children(self): + while self.children: + try: + pid, status = os.waitpid(0, os.WNOHANG) + except os.error: + pid = None + + if pid: + self.children.remove(pid) + else: + break diff --git a/thrift/server/__init__.py b/thrift/server/__init__.py new file mode 100644 index 0000000..1bf6e25 --- /dev/null +++ b/thrift/server/__init__.py @@ -0,0 +1,20 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# + +__all__ = ['TServer', 'TNonblockingServer'] diff --git a/thrift/transport/THttpClient.py b/thrift/transport/THttpClient.py new file mode 100644 index 0000000..20be338 --- /dev/null +++ b/thrift/transport/THttpClient.py @@ -0,0 +1,149 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# + +import http.client +import os +import socket +import sys +import urllib.request, urllib.parse, urllib.error +import urllib.parse +import warnings + +from io import StringIO + +from .TTransport import * + + +class THttpClient(TTransportBase): + """Http implementation of TTransport base.""" + + def __init__(self, uri_or_host, port=None, path=None): + """THttpClient supports two different types constructor parameters. + + THttpClient(host, port, path) - deprecated + THttpClient(uri) + + Only the second supports https. + """ + if port is not None: + warnings.warn( + "Please use the THttpClient('http://host:port/path') syntax", + DeprecationWarning, + stacklevel=2) + self.host = uri_or_host + self.port = port + assert path + self.path = path + self.scheme = 'http' + else: + parsed = urllib.parse.urlparse(uri_or_host) + self.scheme = parsed.scheme + assert self.scheme in ('http', 'https') + if self.scheme == 'http': + self.port = parsed.port or http.client.HTTP_PORT + elif self.scheme == 'https': + self.port = parsed.port or http.client.HTTPS_PORT + self.host = parsed.hostname + self.path = parsed.path + if parsed.query: + self.path += '?%s' % parsed.query + self.__wbuf = StringIO() + self.__http = None + self.__timeout = None + self.__custom_headers = None + + def open(self): + if self.scheme == 'http': + self.__http = http.client.HTTP(self.host, self.port) + else: + self.__http = http.client.HTTPS(self.host, self.port) + + def close(self): + self.__http.close() + self.__http = None + + def isOpen(self): + return self.__http is not None + + def setTimeout(self, ms): + if not hasattr(socket, 'getdefaulttimeout'): + raise NotImplementedError + + if ms is None: + self.__timeout = None + else: + self.__timeout = ms / 1000.0 + + def setCustomHeaders(self, headers): + self.__custom_headers = headers + + def read(self, sz): + return self.__http.file.read(sz) + + def write(self, buf): + self.__wbuf.write(buf) + + def __withTimeout(f): + def _f(*args, **kwargs): + orig_timeout = socket.getdefaulttimeout() + socket.setdefaulttimeout(args[0].__timeout) + result = f(*args, **kwargs) + socket.setdefaulttimeout(orig_timeout) + return result + return _f + + def flush(self): + if self.isOpen(): + self.close() + self.open() + + # Pull data out of buffer + data = self.__wbuf.getvalue() + self.__wbuf = StringIO() + + # HTTP request + self.__http.putrequest('POST', self.path) + + # Write headers + self.__http.putheader('Host', self.host) + self.__http.putheader('Content-Type', 'application/x-thrift') + self.__http.putheader('Content-Length', str(len(data))) + + if not self.__custom_headers or 'User-Agent' not in self.__custom_headers: + user_agent = 'Python/THttpClient' + script = os.path.basename(sys.argv[0]) + if script: + user_agent = '%s (%s)' % (user_agent, urllib.parse.quote(script)) + self.__http.putheader('User-Agent', user_agent) + + if self.__custom_headers: + for key, val in self.__custom_headers.items(): + self.__http.putheader(key, val) + + self.__http.endheaders() + + # Write payload + self.__http.send(data) + + # Get reply to flush the request + self.code, self.message, self.headers = self.__http.getreply() + + # Decorate if we know how to timeout + if hasattr(socket, 'getdefaulttimeout'): + flush = __withTimeout(flush) diff --git a/thrift/transport/TSSLSocket.py b/thrift/transport/TSSLSocket.py new file mode 100644 index 0000000..e0ff4f9 --- /dev/null +++ b/thrift/transport/TSSLSocket.py @@ -0,0 +1,202 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# + +import os +import socket +import ssl + +from thrift.transport import TSocket +from thrift.transport.TTransport import TTransportException + + +class TSSLSocket(TSocket.TSocket): + """ + SSL implementation of client-side TSocket + + This class creates outbound sockets wrapped using the + python standard ssl module for encrypted connections. + + The protocol used is set using the class variable + SSL_VERSION, which must be one of ssl.PROTOCOL_* and + defaults to ssl.PROTOCOL_TLSv1 for greatest security. + """ + SSL_VERSION = ssl.PROTOCOL_TLSv1 + + def __init__(self, + host='localhost', + port=9090, + validate=True, + ca_certs=None, + unix_socket=None): + """Create SSL TSocket + + @param validate: Set to False to disable SSL certificate validation + @type validate: bool + @param ca_certs: Filename to the Certificate Authority pem file, possibly a + file downloaded from: http://curl.haxx.se/ca/cacert.pem This is passed to + the ssl_wrap function as the 'ca_certs' parameter. + @type ca_certs: str + + Raises an IOError exception if validate is True and the ca_certs file is + None, not present or unreadable. + """ + self.validate = validate + self.is_valid = False + self.peercert = None + if not validate: + self.cert_reqs = ssl.CERT_NONE + else: + self.cert_reqs = ssl.CERT_REQUIRED + self.ca_certs = ca_certs + if validate: + if ca_certs is None or not os.access(ca_certs, os.R_OK): + raise IOError('Certificate Authority ca_certs file "%s" ' + 'is not readable, cannot validate SSL ' + 'certificates.' % (ca_certs)) + TSocket.TSocket.__init__(self, host, port, unix_socket) + + def open(self): + try: + res0 = self._resolveAddr() + for res in res0: + sock_family, sock_type = res[0:2] + ip_port = res[4] + plain_sock = socket.socket(sock_family, sock_type) + self.handle = ssl.wrap_socket(plain_sock, + ssl_version=self.SSL_VERSION, + do_handshake_on_connect=True, + ca_certs=self.ca_certs, + cert_reqs=self.cert_reqs) + self.handle.settimeout(self._timeout) + try: + self.handle.connect(ip_port) + except socket.error as e: + if res is not res0[-1]: + continue + else: + raise e + break + except socket.error as e: + if self._unix_socket: + message = 'Could not connect to secure socket %s' % self._unix_socket + else: + message = 'Could not connect to %s:%d' % (self.host, self.port) + raise TTransportException(type=TTransportException.NOT_OPEN, + message=message) + if self.validate: + self._validate_cert() + + def _validate_cert(self): + """internal method to validate the peer's SSL certificate, and to check the + commonName of the certificate to ensure it matches the hostname we + used to make this connection. Does not support subjectAltName records + in certificates. + + raises TTransportException if the certificate fails validation. + """ + cert = self.handle.getpeercert() + self.peercert = cert + if 'subject' not in cert: + raise TTransportException( + type=TTransportException.NOT_OPEN, + message='No SSL certificate found from %s:%s' % (self.host, self.port)) + fields = cert['subject'] + for field in fields: + # ensure structure we get back is what we expect + if not isinstance(field, tuple): + continue + cert_pair = field[0] + if len(cert_pair) < 2: + continue + cert_key, cert_value = cert_pair[0:2] + if cert_key != 'commonName': + continue + certhost = cert_value + if certhost == self.host: + # success, cert commonName matches desired hostname + self.is_valid = True + return + else: + raise TTransportException( + type=TTransportException.UNKNOWN, + message='Hostname we connected to "%s" doesn\'t match certificate ' + 'provided commonName "%s"' % (self.host, certhost)) + raise TTransportException( + type=TTransportException.UNKNOWN, + message='Could not validate SSL certificate from ' + 'host "%s". Cert=%s' % (self.host, cert)) + + +class TSSLServerSocket(TSocket.TServerSocket): + """SSL implementation of TServerSocket + + This uses the ssl module's wrap_socket() method to provide SSL + negotiated encryption. + """ + SSL_VERSION = ssl.PROTOCOL_TLSv1 + + def __init__(self, + host=None, + port=9090, + certfile='cert.pem', + unix_socket=None): + """Initialize a TSSLServerSocket + + @param certfile: filename of the server certificate, defaults to cert.pem + @type certfile: str + @param host: The hostname or IP to bind the listen socket to, + i.e. 'localhost' for only allowing local network connections. + Pass None to bind to all interfaces. + @type host: str + @param port: The port to listen on for inbound connections. + @type port: int + """ + self.setCertfile(certfile) + TSocket.TServerSocket.__init__(self, host, port) + + def setCertfile(self, certfile): + """Set or change the server certificate file used to wrap new connections. + + @param certfile: The filename of the server certificate, + i.e. '/etc/certs/server.pem' + @type certfile: str + + Raises an IOError exception if the certfile is not present or unreadable. + """ + if not os.access(certfile, os.R_OK): + raise IOError('No such certfile found: %s' % (certfile)) + self.certfile = certfile + + def accept(self): + plain_client, addr = self.handle.accept() + try: + client = ssl.wrap_socket(plain_client, certfile=self.certfile, + server_side=True, ssl_version=self.SSL_VERSION) + except ssl.SSLError as ssl_exc: + # failed handshake/ssl wrap, close socket to client + plain_client.close() + # raise ssl_exc + # We can't raise the exception, because it kills most TServer derived + # serve() methods. + # Instead, return None, and let the TServer instance deal with it in + # other exception handling. (but TSimpleServer dies anyway) + return None + result = TSocket.TSocket() + result.setHandle(client) + return result diff --git a/thrift/transport/TSocket.py b/thrift/transport/TSocket.py new file mode 100644 index 0000000..82ce568 --- /dev/null +++ b/thrift/transport/TSocket.py @@ -0,0 +1,176 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# + +import errno +import os +import socket +import sys + +from .TTransport import * + + +class TSocketBase(TTransportBase): + def _resolveAddr(self): + if self._unix_socket is not None: + return [(socket.AF_UNIX, socket.SOCK_STREAM, None, None, + self._unix_socket)] + else: + return socket.getaddrinfo(self.host, + self.port, + socket.AF_UNSPEC, + socket.SOCK_STREAM, + 0, + socket.AI_PASSIVE | socket.AI_ADDRCONFIG) + + def close(self): + if self.handle: + self.handle.close() + self.handle = None + + +class TSocket(TSocketBase): + """Socket implementation of TTransport base.""" + + def __init__(self, host='localhost', port=9090, unix_socket=None): + """Initialize a TSocket + + @param host(str) The host to connect to. + @param port(int) The (TCP) port to connect to. + @param unix_socket(str) The filename of a unix socket to connect to. + (host and port will be ignored.) + """ + self.host = host + self.port = port + self.handle = None + self._unix_socket = unix_socket + self._timeout = None + + def setHandle(self, h): + self.handle = h + + def isOpen(self): + return self.handle is not None + + def setTimeout(self, ms): + if ms is None: + self._timeout = None + else: + self._timeout = ms / 1000.0 + + if self.handle is not None: + self.handle.settimeout(self._timeout) + + def open(self): + try: + res0 = self._resolveAddr() + for res in res0: + self.handle = socket.socket(res[0], res[1]) + self.handle.settimeout(self._timeout) + try: + self.handle.connect(res[4]) + except socket.error as e: + if res is not res0[-1]: + continue + else: + raise e + break + except socket.error as e: + if self._unix_socket: + message = 'Could not connect to socket %s' % self._unix_socket + else: + message = 'Could not connect to %s:%d' % (self.host, self.port) + raise TTransportException(type=TTransportException.NOT_OPEN, + message=message) + + def read(self, sz): + try: + buff = self.handle.recv(sz) + except socket.error as e: + if (e.args[0] == errno.ECONNRESET and + (sys.platform == 'darwin' or sys.platform.startswith('freebsd'))): + # freebsd and Mach don't follow POSIX semantic of recv + # and fail with ECONNRESET if peer performed shutdown. + # See corresponding comment and code in TSocket::read() + # in lib/cpp/src/transport/TSocket.cpp. + self.close() + # Trigger the check to raise the END_OF_FILE exception below. + buff = '' + else: + raise + if len(buff) == 0: + raise TTransportException(type=TTransportException.END_OF_FILE, + message='TSocket read 0 bytes') + return buff + + def write(self, buff): + if not self.handle: + raise TTransportException(type=TTransportException.NOT_OPEN, + message='Transport not open') + sent = 0 + have = len(buff) + while sent < have: + plus = self.handle.send(buff) + if plus == 0: + raise TTransportException(type=TTransportException.END_OF_FILE, + message='TSocket sent 0 bytes') + sent += plus + buff = buff[plus:] + + def flush(self): + pass + + +class TServerSocket(TSocketBase, TServerTransportBase): + """Socket implementation of TServerTransport base.""" + + def __init__(self, host=None, port=9090, unix_socket=None): + self.host = host + self.port = port + self._unix_socket = unix_socket + self.handle = None + + def listen(self): + res0 = self._resolveAddr() + for res in res0: + if res[0] is socket.AF_INET6 or res is res0[-1]: + break + + # We need remove the old unix socket if the file exists and + # nobody is listening on it. + if self._unix_socket: + tmp = socket.socket(res[0], res[1]) + try: + tmp.connect(res[4]) + except socket.error as err: + eno, message = err.args + if eno == errno.ECONNREFUSED: + os.unlink(res[4]) + + self.handle = socket.socket(res[0], res[1]) + self.handle.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) + if hasattr(self.handle, 'settimeout'): + self.handle.settimeout(None) + self.handle.bind(res[4]) + self.handle.listen(128) + + def accept(self): + client, addr = self.handle.accept() + result = TSocket() + result.setHandle(client) + return result diff --git a/thrift/transport/TTransport.py b/thrift/transport/TTransport.py new file mode 100644 index 0000000..dcedd3d --- /dev/null +++ b/thrift/transport/TTransport.py @@ -0,0 +1,333 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# + +from six import BytesIO +from struct import pack, unpack +from thrift.Thrift import TException + + +class TTransportException(TException): + """Custom Transport Exception class""" + + UNKNOWN = 0 + NOT_OPEN = 1 + ALREADY_OPEN = 2 + TIMED_OUT = 3 + END_OF_FILE = 4 + + def __init__(self, type=UNKNOWN, message=None): + TException.__init__(self, message) + self.type = type + + +class TTransportBase: + """Base class for Thrift transport layer.""" + + def isOpen(self): + pass + + def open(self): + pass + + def close(self): + pass + + def read(self, sz): + pass + + def readAll(self, sz): + buff = b'' + have = 0 + while (have < sz): + chunk = self.read(sz - have) + have += len(chunk) + buff += chunk + + if len(chunk) == 0: + raise EOFError() + + return buff + + def write(self, buf): + pass + + def flush(self): + pass + + +# This class should be thought of as an interface. +class CReadableTransport: + """base class for transports that are readable from C""" + + # TODO(dreiss): Think about changing this interface to allow us to use + # a (Python, not c) StringIO instead, because it allows + # you to write after reading. + + # NOTE: This is a classic class, so properties will NOT work + # correctly for setting. + @property + def cstringio_buf(self): + """A cStringIO buffer that contains the current chunk we are reading.""" + pass + + def cstringio_refill(self, partialread, reqlen): + """Refills cstringio_buf. + + Returns the currently used buffer (which can but need not be the same as + the old cstringio_buf). partialread is what the C code has read from the + buffer, and should be inserted into the buffer before any more reads. The + return value must be a new, not borrowed reference. Something along the + lines of self._buf should be fine. + + If reqlen bytes can't be read, throw EOFError. + """ + pass + + +class TServerTransportBase: + """Base class for Thrift server transports.""" + + def listen(self): + pass + + def accept(self): + pass + + def close(self): + pass + + +class TTransportFactoryBase: + """Base class for a Transport Factory""" + + def getTransport(self, trans): + return trans + + +class TBufferedTransportFactory: + """Factory transport that builds buffered transports""" + + def getTransport(self, trans): + buffered = TBufferedTransport(trans) + return buffered + + +class TBufferedTransport(TTransportBase, CReadableTransport): + """Class that wraps another transport and buffers its I/O. + + The implementation uses a (configurable) fixed-size read buffer + but buffers all writes until a flush is performed. + """ + DEFAULT_BUFFER = 4096 + + def __init__(self, trans, rbuf_size=DEFAULT_BUFFER): + self.__trans = trans + self.__wbuf = BytesIO() + self.__rbuf = BytesIO("") + self.__rbuf_size = rbuf_size + + def isOpen(self): + return self.__trans.isOpen() + + def open(self): + return self.__trans.open() + + def close(self): + return self.__trans.close() + + def read(self, sz): + ret = self.__rbuf.read(sz) + if len(ret) != 0: + return ret + + self.__rbuf = BytesIO(self.__trans.read(max(sz, self.__rbuf_size))) + return self.__rbuf.read(sz) + + def write(self, buf): + self.__wbuf.write(buf) + + def flush(self): + out = self.__wbuf.getvalue() + # reset wbuf before write/flush to preserve state on underlying failure + self.__wbuf = BytesIO() + self.__trans.write(out) + self.__trans.flush() + + # Implement the CReadableTransport interface. + @property + def cstringio_buf(self): + return self.__rbuf + + def cstringio_refill(self, partialread, reqlen): + retstring = partialread + if reqlen < self.__rbuf_size: + # try to make a read of as much as we can. + retstring += self.__trans.read(self.__rbuf_size) + + # but make sure we do read reqlen bytes. + if len(retstring) < reqlen: + retstring += self.__trans.readAll(reqlen - len(retstring)) + + self.__rbuf = BytesIO(retstring) + return self.__rbuf + + +class TMemoryBuffer(TTransportBase, CReadableTransport): + """Wraps a cStringIO object as a TTransport. + + NOTE: Unlike the C++ version of this class, you cannot write to it + then immediately read from it. If you want to read from a + TMemoryBuffer, you must either pass a string to the constructor. + TODO(dreiss): Make this work like the C++ version. + """ + + def __init__(self, value=None): + """value -- a value to read from for stringio + + If value is set, this will be a transport for reading, + otherwise, it is for writing""" + if value is not None: + self._buffer = BytesIO(value) + else: + self._buffer = BytesIO() + + def isOpen(self): + return not self._buffer.closed + + def open(self): + pass + + def close(self): + self._buffer.close() + + def read(self, sz): + return self._buffer.read(sz) + + def write(self, buf): + try: + self._buffer.write(buf) + except TypeError: + self._buffer.write(buf.encode('cp437')) + + def flush(self): + pass + + def getvalue(self): + return self._buffer.getvalue() + + # Implement the CReadableTransport interface. + @property + def cstringio_buf(self): + return self._buffer + + def cstringio_refill(self, partialread, reqlen): + # only one shot at reading... + raise EOFError() + + +class TFramedTransportFactory: + """Factory transport that builds framed transports""" + + def getTransport(self, trans): + framed = TFramedTransport(trans) + return framed + + +class TFramedTransport(TTransportBase, CReadableTransport): + """Class that wraps another transport and frames its I/O when writing.""" + + def __init__(self, trans,): + self.__trans = trans + self.__rbuf = BytesIO() + self.__wbuf = BytesIO() + + def isOpen(self): + return self.__trans.isOpen() + + def open(self): + return self.__trans.open() + + def close(self): + return self.__trans.close() + + def read(self, sz): + ret = self.__rbuf.read(sz) + if len(ret) != 0: + return ret + + self.readFrame() + return self.__rbuf.read(sz) + + def readFrame(self): + buff = self.__trans.readAll(4) + sz, = unpack('!i', buff) + self.__rbuf = BytesIO(self.__trans.readAll(sz)) + + def write(self, buf): + self.__wbuf.write(buf) + + def flush(self): + wout = self.__wbuf.getvalue() + wsz = len(wout) + # reset wbuf before write/flush to preserve state on underlying failure + self.__wbuf = BytesIO() + # N.B.: Doing this string concatenation is WAY cheaper than making + # two separate calls to the underlying socket object. Socket writes in + # Python turn out to be REALLY expensive, but it seems to do a pretty + # good job of managing string buffer operations without excessive copies + buf = pack("!i", wsz) + wout + self.__trans.write(buf) + self.__trans.flush() + + # Implement the CReadableTransport interface. + @property + def cstringio_buf(self): + return self.__rbuf + + def cstringio_refill(self, prefix, reqlen): + # self.__rbuf will already be empty here because fastbinary doesn't + # ask for a refill until the previous buffer is empty. Therefore, + # we can start reading new frames immediately. + while len(prefix) < reqlen: + self.readFrame() + prefix += self.__rbuf.getvalue() + self.__rbuf = BytesIO(prefix) + return self.__rbuf + + +class TFileObjectTransport(TTransportBase): + """Wraps a file-like object to make it work as a Thrift transport.""" + + def __init__(self, fileobj): + self.fileobj = fileobj + + def isOpen(self): + return True + + def close(self): + self.fileobj.close() + + def read(self, sz): + return self.fileobj.read(sz) + + def write(self, buf): + self.fileobj.write(buf) + + def flush(self): + self.fileobj.flush() diff --git a/thrift/transport/TTwisted.py b/thrift/transport/TTwisted.py new file mode 100644 index 0000000..ffe5494 --- /dev/null +++ b/thrift/transport/TTwisted.py @@ -0,0 +1,221 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# + +from io import StringIO + +from zope.interface import implements, Interface, Attribute +from twisted.internet.protocol import Protocol, ServerFactory, ClientFactory, \ + connectionDone +from twisted.internet import defer +from twisted.protocols import basic +from twisted.python import log +from twisted.web import server, resource, http + +from thrift.transport import TTransport + + +class TMessageSenderTransport(TTransport.TTransportBase): + + def __init__(self): + self.__wbuf = StringIO() + + def write(self, buf): + self.__wbuf.write(buf) + + def flush(self): + msg = self.__wbuf.getvalue() + self.__wbuf = StringIO() + self.sendMessage(msg) + + def sendMessage(self, message): + raise NotImplementedError + + +class TCallbackTransport(TMessageSenderTransport): + + def __init__(self, func): + TMessageSenderTransport.__init__(self) + self.func = func + + def sendMessage(self, message): + self.func(message) + + +class ThriftClientProtocol(basic.Int32StringReceiver): + + MAX_LENGTH = 2 ** 31 - 1 + + def __init__(self, client_class, iprot_factory, oprot_factory=None): + self._client_class = client_class + self._iprot_factory = iprot_factory + if oprot_factory is None: + self._oprot_factory = iprot_factory + else: + self._oprot_factory = oprot_factory + + self.recv_map = {} + self.started = defer.Deferred() + + def dispatch(self, msg): + self.sendString(msg) + + def connectionMade(self): + tmo = TCallbackTransport(self.dispatch) + self.client = self._client_class(tmo, self._oprot_factory) + self.started.callback(self.client) + + def connectionLost(self, reason=connectionDone): + for k, v in self.client._reqs.items(): + tex = TTransport.TTransportException( + type=TTransport.TTransportException.END_OF_FILE, + message='Connection closed') + v.errback(tex) + + def stringReceived(self, frame): + tr = TTransport.TMemoryBuffer(frame) + iprot = self._iprot_factory.getProtocol(tr) + (fname, mtype, rseqid) = iprot.readMessageBegin() + + try: + method = self.recv_map[fname] + except KeyError: + method = getattr(self.client, 'recv_' + fname) + self.recv_map[fname] = method + + method(iprot, mtype, rseqid) + + +class ThriftServerProtocol(basic.Int32StringReceiver): + + MAX_LENGTH = 2 ** 31 - 1 + + def dispatch(self, msg): + self.sendString(msg) + + def processError(self, error): + self.transport.loseConnection() + + def processOk(self, _, tmo): + msg = tmo.getvalue() + + if len(msg) > 0: + self.dispatch(msg) + + def stringReceived(self, frame): + tmi = TTransport.TMemoryBuffer(frame) + tmo = TTransport.TMemoryBuffer() + + iprot = self.factory.iprot_factory.getProtocol(tmi) + oprot = self.factory.oprot_factory.getProtocol(tmo) + + d = self.factory.processor.process(iprot, oprot) + d.addCallbacks(self.processOk, self.processError, + callbackArgs=(tmo,)) + + +class IThriftServerFactory(Interface): + + processor = Attribute("Thrift processor") + + iprot_factory = Attribute("Input protocol factory") + + oprot_factory = Attribute("Output protocol factory") + + +class IThriftClientFactory(Interface): + + client_class = Attribute("Thrift client class") + + iprot_factory = Attribute("Input protocol factory") + + oprot_factory = Attribute("Output protocol factory") + + +class ThriftServerFactory(ServerFactory): + + implements(IThriftServerFactory) + + protocol = ThriftServerProtocol + + def __init__(self, processor, iprot_factory, oprot_factory=None): + self.processor = processor + self.iprot_factory = iprot_factory + if oprot_factory is None: + self.oprot_factory = iprot_factory + else: + self.oprot_factory = oprot_factory + + +class ThriftClientFactory(ClientFactory): + + implements(IThriftClientFactory) + + protocol = ThriftClientProtocol + + def __init__(self, client_class, iprot_factory, oprot_factory=None): + self.client_class = client_class + self.iprot_factory = iprot_factory + if oprot_factory is None: + self.oprot_factory = iprot_factory + else: + self.oprot_factory = oprot_factory + + def buildProtocol(self, addr): + p = self.protocol(self.client_class, self.iprot_factory, + self.oprot_factory) + p.factory = self + return p + + +class ThriftResource(resource.Resource): + + allowedMethods = ('POST',) + + def __init__(self, processor, inputProtocolFactory, + outputProtocolFactory=None): + resource.Resource.__init__(self) + self.inputProtocolFactory = inputProtocolFactory + if outputProtocolFactory is None: + self.outputProtocolFactory = inputProtocolFactory + else: + self.outputProtocolFactory = outputProtocolFactory + self.processor = processor + + def getChild(self, path, request): + return self + + def _cbProcess(self, _, request, tmo): + msg = tmo.getvalue() + request.setResponseCode(http.OK) + request.setHeader("content-type", "application/x-thrift") + request.write(msg) + request.finish() + + def render_POST(self, request): + request.content.seek(0, 0) + data = request.content.read() + tmi = TTransport.TMemoryBuffer(data) + tmo = TTransport.TMemoryBuffer() + + iprot = self.inputProtocolFactory.getProtocol(tmi) + oprot = self.outputProtocolFactory.getProtocol(tmo) + + d = self.processor.process(iprot, oprot) + d.addCallback(self._cbProcess, request, tmo) + return server.NOT_DONE_YET diff --git a/thrift/transport/TZlibTransport.py b/thrift/transport/TZlibTransport.py new file mode 100644 index 0000000..a21dc80 --- /dev/null +++ b/thrift/transport/TZlibTransport.py @@ -0,0 +1,248 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# + +"""TZlibTransport provides a compressed transport and transport factory +class, using the python standard library zlib module to implement +data compression. +""" + + +import zlib +from io import StringIO +from .TTransport import TTransportBase, CReadableTransport + + +class TZlibTransportFactory(object): + """Factory transport that builds zlib compressed transports. + + This factory caches the last single client/transport that it was passed + and returns the same TZlibTransport object that was created. + + This caching means the TServer class will get the _same_ transport + object for both input and output transports from this factory. + (For non-threaded scenarios only, since the cache only holds one object) + + The purpose of this caching is to allocate only one TZlibTransport where + only one is really needed (since it must have separate read/write buffers), + and makes the statistics from getCompSavings() and getCompRatio() + easier to understand. + """ + # class scoped cache of last transport given and zlibtransport returned + _last_trans = None + _last_z = None + + def getTransport(self, trans, compresslevel=9): + """Wrap a transport, trans, with the TZlibTransport + compressed transport class, returning a new + transport to the caller. + + @param compresslevel: The zlib compression level, ranging + from 0 (no compression) to 9 (best compression). Defaults to 9. + @type compresslevel: int + + This method returns a TZlibTransport which wraps the + passed C{trans} TTransport derived instance. + """ + if trans == self._last_trans: + return self._last_z + ztrans = TZlibTransport(trans, compresslevel) + self._last_trans = trans + self._last_z = ztrans + return ztrans + + +class TZlibTransport(TTransportBase, CReadableTransport): + """Class that wraps a transport with zlib, compressing writes + and decompresses reads, using the python standard + library zlib module. + """ + # Read buffer size for the python fastbinary C extension, + # the TBinaryProtocolAccelerated class. + DEFAULT_BUFFSIZE = 4096 + + def __init__(self, trans, compresslevel=9): + """Create a new TZlibTransport, wrapping C{trans}, another + TTransport derived object. + + @param trans: A thrift transport object, i.e. a TSocket() object. + @type trans: TTransport + @param compresslevel: The zlib compression level, ranging + from 0 (no compression) to 9 (best compression). Default is 9. + @type compresslevel: int + """ + self.__trans = trans + self.compresslevel = compresslevel + self.__rbuf = StringIO() + self.__wbuf = StringIO() + self._init_zlib() + self._init_stats() + + def _reinit_buffers(self): + """Internal method to initialize/reset the internal StringIO objects + for read and write buffers. + """ + self.__rbuf = StringIO() + self.__wbuf = StringIO() + + def _init_stats(self): + """Internal method to reset the internal statistics counters + for compression ratios and bandwidth savings. + """ + self.bytes_in = 0 + self.bytes_out = 0 + self.bytes_in_comp = 0 + self.bytes_out_comp = 0 + + def _init_zlib(self): + """Internal method for setting up the zlib compression and + decompression objects. + """ + self._zcomp_read = zlib.decompressobj() + self._zcomp_write = zlib.compressobj(self.compresslevel) + + def getCompRatio(self): + """Get the current measured compression ratios (in,out) from + this transport. + + Returns a tuple of: + (inbound_compression_ratio, outbound_compression_ratio) + + The compression ratios are computed as: + compressed / uncompressed + + E.g., data that compresses by 10x will have a ratio of: 0.10 + and data that compresses to half of ts original size will + have a ratio of 0.5 + + None is returned if no bytes have yet been processed in + a particular direction. + """ + r_percent, w_percent = (None, None) + if self.bytes_in > 0: + r_percent = self.bytes_in_comp / self.bytes_in + if self.bytes_out > 0: + w_percent = self.bytes_out_comp / self.bytes_out + return (r_percent, w_percent) + + def getCompSavings(self): + """Get the current count of saved bytes due to data + compression. + + Returns a tuple of: + (inbound_saved_bytes, outbound_saved_bytes) + + Note: if compression is actually expanding your + data (only likely with very tiny thrift objects), then + the values returned will be negative. + """ + r_saved = self.bytes_in - self.bytes_in_comp + w_saved = self.bytes_out - self.bytes_out_comp + return (r_saved, w_saved) + + def isOpen(self): + """Return the underlying transport's open status""" + return self.__trans.isOpen() + + def open(self): + """Open the underlying transport""" + self._init_stats() + return self.__trans.open() + + def listen(self): + """Invoke the underlying transport's listen() method""" + self.__trans.listen() + + def accept(self): + """Accept connections on the underlying transport""" + return self.__trans.accept() + + def close(self): + """Close the underlying transport,""" + self._reinit_buffers() + self._init_zlib() + return self.__trans.close() + + def read(self, sz): + """Read up to sz bytes from the decompressed bytes buffer, and + read from the underlying transport if the decompression + buffer is empty. + """ + ret = self.__rbuf.read(sz) + if len(ret) > 0: + return ret + # keep reading from transport until something comes back + while True: + if self.readComp(sz): + break + ret = self.__rbuf.read(sz) + return ret + + def readComp(self, sz): + """Read compressed data from the underlying transport, then + decompress it and append it to the internal StringIO read buffer + """ + zbuf = self.__trans.read(sz) + zbuf = self._zcomp_read.unconsumed_tail + zbuf + buf = self._zcomp_read.decompress(zbuf) + self.bytes_in += len(zbuf) + self.bytes_in_comp += len(buf) + old = self.__rbuf.read() + self.__rbuf = StringIO(old + buf) + if len(old) + len(buf) == 0: + return False + return True + + def write(self, buf): + """Write some bytes, putting them into the internal write + buffer for eventual compression. + """ + self.__wbuf.write(buf) + + def flush(self): + """Flush any queued up data in the write buffer and ensure the + compression buffer is flushed out to the underlying transport + """ + wout = self.__wbuf.getvalue() + if len(wout) > 0: + zbuf = self._zcomp_write.compress(wout) + self.bytes_out += len(wout) + self.bytes_out_comp += len(zbuf) + else: + zbuf = '' + ztail = self._zcomp_write.flush(zlib.Z_SYNC_FLUSH) + self.bytes_out_comp += len(ztail) + if (len(zbuf) + len(ztail)) > 0: + self.__wbuf = StringIO() + self.__trans.write(zbuf + ztail) + self.__trans.flush() + + @property + def cstringio_buf(self): + """Implement the CReadableTransport interface""" + return self.__rbuf + + def cstringio_refill(self, partialread, reqlen): + """Implement the CReadableTransport interface for refill""" + retstring = partialread + if reqlen < self.DEFAULT_BUFFSIZE: + retstring += self.read(self.DEFAULT_BUFFSIZE) + while len(retstring) < reqlen: + retstring += self.read(reqlen - len(retstring)) + self.__rbuf = StringIO(retstring) + return self.__rbuf diff --git a/thrift/transport/__init__.py b/thrift/transport/__init__.py new file mode 100644 index 0000000..c9596d9 --- /dev/null +++ b/thrift/transport/__init__.py @@ -0,0 +1,20 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# + +__all__ = ['TTransport', 'TSocket', 'THttpClient', 'TZlibTransport']