Report actual EVENT_ID field value while archiving

This commit is contained in:
XANTRONIX Industrial 2025-02-13 19:19:27 -05:00
parent 9916fc8d9e
commit 2159f4fea0

View file

@ -23,11 +23,10 @@ db = Database.connect(args.db)
bucket = S3Bucket() bucket = S3Bucket()
archive = Archive(getattr(args, 'archive-dir'), bucket) archive = Archive(getattr(args, 'archive-dir'), bucket)
i = 0
for path in getattr(args, 'csv-report-details'): for path in getattr(args, 'csv-report-details'):
for report in StormReport.each_from_csv_file(path): for report in StormReport.each_from_csv_file(path):
i += 1 if report.coord_start is None or report.coord_end is None:
continue
if not report.is_radar_significant(): if not report.is_radar_significant():
continue continue
@ -37,13 +36,13 @@ for path in getattr(args, 'csv-report-details'):
for key in bucket.each_matching_key(radars, report.timestamp_start, report.timestamp_end): for key in bucket.each_matching_key(radars, report.timestamp_start, report.timestamp_end):
if archive.is_downloaded(key): if archive.is_downloaded(key):
if not args.quiet: if not args.quiet:
print(f"{key} report {i} Already archived") print(f"{key} event {report.id} already archived")
else: else:
if not args.quiet: if not args.quiet:
if args.dry_run: if args.dry_run:
print(f"{key} report {i} Would archive") print(f"{key} report {report.id} would archive")
else: else:
print(f"{key} report {i} Archiving") print(f"{key} report {report.id} archiving")
if not args.dry_run: if not args.dry_run:
archive.download(key) archive.download(key)