commit 1600357d51178eee75f57217c345e9383fd35a00 Author: XANTRONIX Development Date: Mon Feb 10 20:05:00 2025 -0500 Initial commit diff --git a/db/nexrad.sql b/db/nexrad.sql new file mode 100644 index 0000000..79ea1b2 --- /dev/null +++ b/db/nexrad.sql @@ -0,0 +1,16 @@ +begin transaction; + +create table nexrad_station ( + id INTEGER PRIMARY KEY NOT NULL, + wban INTEGER, + call TEXT NOT NULL, + name TEXT NOT NULL, + site_elevation FLOAT NOT NULL, + tower_height FLOAT NOT NULL +); + +select AddGeometryColumn( + 'nexrad_station', 'coord', 4326, 'POINT', 'XY' +); + +commit; diff --git a/lib/nexrad/coord.py b/lib/nexrad/coord.py new file mode 100644 index 0000000..92e57e0 --- /dev/null +++ b/lib/nexrad/coord.py @@ -0,0 +1,11 @@ +import re + +class Coord(): + __slots__ = 'lat', 'lon', + + def __init__(self, lat: float, lon: float): + self.lat: float = lat + self.lon: float = lon + + def __str__(self): + return '%f, %f' % (self.lat, self.lon) diff --git a/lib/nexrad/db.py b/lib/nexrad/db.py new file mode 100644 index 0000000..b4fe427 --- /dev/null +++ b/lib/nexrad/db.py @@ -0,0 +1,186 @@ +import enum +import sqlite3 + +from xenu_nntp.config import Config + +class DatabaseOrder(enum.Enum): + DEFAULT = 0 + ASC = 1 + DESC = 2 + +class DatabaseTable(): + __slots__ = '__dirty__', '__dirty_columns__', + + def __init__(self): + object.__setattr__(self, '__dirty__', False) + object.__setattr__(self, '__dirty_columns__', dict()) + + def __reset__(self): + object.__setattr__(self, '__dirty__', False) + object.__setattr__(self, '__dirty_columns__', {k: 0 for k in self.columns}) + + def __setattr__(self, k, v): + object.__setattr__(self, k, v) + + values = object.__getattribute__(self, '__dirty_columns__') + + if k in values: + object.__setattr__(self, '__dirty__', True) + values[k] += 1 + +class DatabaseTableCursor(): + __slots__ = 'cr', 'table', + + def __init__(self, table, cr): + self.cr = cr + self.table = table + + def __getattr__(self, name): + return getattr(self.cr, name) + + def __map__(self, row): + fn = getattr(self.table, '__from_row__', None) + + if fn is not None: + return fn(row) + + obj = self.table() + + for name in self.table.columns: + try: + setattr(obj, name, getattr(row, name)) + except IndexError: + setattr(obj, name, None) + + obj.__reset__() + + return obj + + def fetchone(self): + row = self.cr.fetchone() + + return self.__map__(row) if row is not None else None + + def fetchall(self): + return map(self.__map__, self.cr.fetchall()) + + def each(self): + while True: + obj = self.fetchone() + + if obj is None: + break + + yield obj + +class Database(): + __slots__ = 'db', + + def __init__(self, db): + self.db = db + + def __getattr__(self, name): + return getattr(self.db, name) + + @staticmethod + def connect(path: str): + db = sqlite3.connect(path, row_factory=sqlite3.Row) + + return Database(db) + + def add(self, obj): + table = type(obj) + sql = f"insert into {table.name} (" + sql += ", ".join([c for c in table.columns if c != table.key]) + sql += ') values (' + sql += ", ".join(['?' for c in table.columns if c != table.key]) + sql += f") returning {table.key}" + + fn = getattr(obj, '__values__', None) + + if fn is not None: + values = fn() + else: + values = list() + + for column in table.columns: + if column != table.key: + values.append(getattr(obj, column, None)) + + cr = self.db.execute(sql, values) + + setattr(obj, table.key, cr.fetchone()[0]) + + def update(self, obj): + if not obj.__dirty__: + return + + dirty = [k for k in obj.__dirty_columns__ if obj.__dirty_columns__[k] > 0] + table = type(obj) + sql = f"update {table.name} set " + sql += ", ".join([f"{k} = ?" for k in dirty]) + sql += f" where {table.key} = ?" + + values = [getattr(obj, k) for k in dirty] + values.append(getattr(obj, table.key)) + + self.db.execute(sql, values) + + def query_sql(self, table, sql, values=list()): + cr = DatabaseTableCursor(table, self.db.cursor()) + cr.execute(sql, values) + + return cr + + def query(self, table, values=dict(), order_by=list()): + sql = "select %s from %s" % ( + ', '.join(table.columns), + table.name + ) + + if len(values) > 0: + sql += " where " + sql += " and ".join([f"{table.name}.{k} = ?" for k in values]) + + if len(order_by) > 0: + sql += " order by" + + first = True + + for column, order in order_by: + if first: + first = False + else: + sql += ", " + + if order is None or order is DatabaseOrder.DEFAULT: + sql += f" {column}" + elif order is DatabaseOrder.ASC: + sql += f" {column} asc" + elif order is DatabaseOrder.DESC: + sql += f" {column} desc" + + return self.query_sql(table, sql, list(values.values())) + + def get(self, table, values: dict=dict()): + return self.query(table, values).fetchone() + + def _call(self, table, fn: str, column: str, values: dict=dict()) -> int: + sql = f"select {fn}({column}) as ret from {table.name}" + + if len(values) > 0: + sql += " where " + sql += " and ".join([f"{k} = ?" for k in values]) + + row = self.db.execute(sql, list(values.values())).fetchone() + + return row[0] if row is not None else None + + def min(self, table, column: str, values: dict=dict()) -> int: + return self._call(table, 'min', column, values) + + def max(self, table, column: str, values: dict=dict()) -> int: + return self._call(table, 'max', column, values) + + def count(self, table, values: dict=dict()) -> int: + return self._call(table, 'count', table.key, values) diff --git a/lib/nexrad/station.py b/lib/nexrad/station.py new file mode 100644 index 0000000..4673ae9 --- /dev/null +++ b/lib/nexrad/station.py @@ -0,0 +1,77 @@ +import csv +import re + +from nexrad.coord import Coord + +RE_PARSE = re.compile(r'^\s*(\d+)([NS]*)\s+/\s+(\d+)([EW]*)\s*$') + +def parse_int(text: str): + size = len(text) + + degree = int(text[0:size-4]) + minute = int(text[size-4:size-2]) + second = int(text[size-2:]) + + return degree + (minute / 60) + (second / 3600) + +def parse(text: str): + match = RE_PARSE.match(text) + + if match is None: + raise Exception('Invalid coordinates \'%s\'' % text) + + sign_lat = -1 if match[2] == 'S' else 1 + sign_lon = 1 if match[4] == 'E' else -1 + + lat = parse_int(match[1]) + lon = parse_int(match[3]) + + return Coord(sign_lat * lat, sign_lon * lon) + +class Station(): + __slots__ = 'wban', 'call', 'name', 'coord', 'site_elevation', 'tower_height', + + wban: int + call: str + name: str + coord: Coord + site_elevation: float + tower_height: float + + @staticmethod + def from_tsv_row(row: list): + station = Station() + station.wban = int(row[0]) if row[0] != 'PENDING' else None + station.call = row[1] + station.name = row[2] + station.coord = parse(row[3]) + station.site_elevation = 0.3048 * float(row[4]) + station.tower_height = float(row[5]) + + return station + + @staticmethod + def each_from_tsv(file: str): + with open(file) as fh: + reader = csv.reader(fh, delimiter='\t') + + for row in reader: + for i in range(0, len(row)): + row[i] = row[i].rstrip() + + yield Station.from_tsv_row(row) + + def add_to_db(self, db): + sql = """ + insert into nexrad_station ( + wban, call, name, site_elevation, tower_height, coord + ) values ( + ?, ?, ?, ?, ?, MakePoint(?, ?, 4326) + ) + """ + + db.execute(sql, ( + self.wban, self.call, self.name, + self.site_elevation, self.tower_height, + self.coord.lat, self.coord.lon + )) diff --git a/lib/nexrad/storm.py b/lib/nexrad/storm.py new file mode 100644 index 0000000..3fe57d6 --- /dev/null +++ b/lib/nexrad/storm.py @@ -0,0 +1,135 @@ +import gzip +import csv +import datetime + +from nexrad.coord import Coord + +def time_from_str(time: str): + size = len(time) + + if size <= 2: + return ( + int(time) % 24, + 0 + ) + + return ( + int(time[0:size-2]) % 24, + int(time[size-2:]) % 60 + ) + +def timestamp_from_parts(yearmonth: str, day: str, time: str) -> datetime.datetime: + hour, minute = time_from_str(time) + + return datetime.datetime( + year = int(yearmonth[0:4]), + month = int(yearmonth[4:6]), + day = int(day), + hour = hour, + minute = minute + ) + +def coord_from_str(text_lat: str, text_lon: str): + lat = 0.0 if text_lat == '' else float(text_lat) + lon = 0.0 if text_lon == '' else float(text_lon) + + return Coord(lat, lon) + +class StormReport(): + __slots__ = ( + 'timestamp_start', 'timestamp_end', 'episode_id', 'event_id', + 'state', 'event_type', 'wfo', 'coord_start', 'coord_end', + 'locale_start', 'locale_end', 'tornado_f_rating' + ) + + timestamp_start: datetime.datetime + timestamp_end: datetime.datetime + episode_id: int + event_id: int + state: str + event_type: str + wfo: str + coord_start: Coord + coord_end: Coord + locale_start: str + locale_end: str + tornado_f_rating: str + + @staticmethod + def from_csv_row(row: dict): + report = StormReport() + + report.timestamp_start = timestamp_from_parts(row['BEGIN_YEARMONTH'], row['BEGIN_DAY'], row['BEGIN_TIME']) + report.timestamp_end = timestamp_from_parts(row['END_YEARMONTH'], row['END_DAY'], row['END_TIME']) + report.episode_id = int(row['EPISODE_ID']) + report.event_id = int(row['EVENT_ID']) + report.state = row['STATE'] + report.event_type = row['EVENT_TYPE'] + report.wfo = row['WFO'] + report.coord_start = coord_from_str(row['BEGIN_LAT'], row['BEGIN_LON']) + report.coord_end = coord_from_str(row['END_LAT'], row['END_LON']) + report.locale_start = row['BEGIN_LOCATION'] + report.locale_end = row['END_LOCATION'] + report.tornado_f_rating = row['TOR_F_SCALE'] + + return report + + @staticmethod + def each_from_csv_file(file: str): + with gzip.open(file, 'rt') as fh: + reader = csv.DictReader(fh, dialect='excel') + + for row in reader: + yield StormReport.from_csv_row(row) + + RADAR_SIGNIFICANT_EVENT_TYPES = { + 'Blizzard': True, + 'Coastal Flood': True, + 'Debris Flow': True, + 'Dust Storm': True, + 'Flash Flood': True, + 'Flood': True, + 'Funnel Cloud': True, + 'Hail': True, + 'Heavy Rain': True, + 'Heavy Snow': True, + 'Hurricane (Typhoon)': True, + 'Ice Storm': True, + 'Lake-Effect Snow': True, + 'Lightning': True, + 'Marine Hail': True, + 'Marine Strong Wind': True, + 'Marine Thunderstorm Wind': True, + 'Seiche': True, + 'Storm Surge/Tide': True, + 'Thunderstorm Wind': True, + 'Tornado': True, + 'Tropical Depression': True, + 'Tropical Storm': True, + 'Waterspout': True, + 'Winter Storm': True, + } + + def is_radar_significant(self): + return self.event_type in self.RADAR_SIGNIFICANT_EVENT_TYPES + + def nearest_station(self, db): + sql = """ + select + id, + call, + ST_Distance(coord, MakePoint(?, ?, , 4326), false) as distance, + ST_AsText(coord) as coord + from + nexrad_station + order by + distance asc + limit 3 + """ + + print(sql) + + st = db.execute(sql, (self.coord_start.lat, + self.coord_start.lon))) + + return st.fetchone()