Change-Id: I258b2f8c8ddb7f45b02b1035f6fa5006b9fabe1b Former-commit-id:d20799e942
[formerly1ae8fe99aa
] [formerly061583cd50
[formerly 0db3944372ecc381df4e8a918829530391d598db]] Former-commit-id:061583cd50
Former-commit-id:bc572fc945
This commit is contained in:
parent
5bb52ed311
commit
853526b594
6 changed files with 1379 additions and 982 deletions
|
@ -67,34 +67,6 @@ class Procedure (SmartScript.SmartScript):
|
|||
# "Downtown Miami",
|
||||
# ],
|
||||
#
|
||||
# # Threat statements can be overriden here; anything not overriden here
|
||||
# # will use the generic threat statements
|
||||
# 'threatStatements': {
|
||||
# # Section name: "Wind", "Storm Surge", "Flooding Rain" or "Tornado"
|
||||
# "Wind": {
|
||||
# # Threat level: "None", "Low", "Mod", "High" or "Extreme"
|
||||
# "Extreme": {
|
||||
# # tr: "nothing to see here", "recovery", "hunker down",
|
||||
# # "complete preparations" or "check plans"
|
||||
# "hunker down": {
|
||||
# # "Make plans: " will be prepended to this
|
||||
# "planning": "For chance that wind could locally reach major " +\\
|
||||
# "hurricane force; enact your emergency action plan " +\\
|
||||
# "accordingly",
|
||||
# # "Take action: " will be prepended to this
|
||||
# "action": "For extremely dangerous and life threatening wind " +\\
|
||||
# "to possibly occur; failure to act may result in " +\\
|
||||
# "injury or loss of life",
|
||||
# # "Prepare: " will be prepended to this
|
||||
# "preparation": "Aggressively for chance of devastating to " +\\
|
||||
# "catastrophic wind impacts based on threat " +\\
|
||||
# "assessment that considers plausible worst " +\\
|
||||
# "case scenario for safety",
|
||||
# },
|
||||
# },
|
||||
# },
|
||||
# },
|
||||
#
|
||||
# # Potential impacts statements can be overriden here; anything not
|
||||
# # overriden here will use the generic potential impacts statements
|
||||
# 'potentialImpactsStatements': {
|
||||
|
@ -151,7 +123,6 @@ TCV_AreaDictionary = {
|
|||
"""
|
||||
self._zoneSkeletonContents = {
|
||||
'locationsAffected' : [],
|
||||
'threatStatements' : {},
|
||||
'potentialImpactsStatements' : {},
|
||||
'infoSection' : [],
|
||||
}
|
||||
|
|
|
@ -282,7 +282,7 @@ class TextProduct(GenericHazards.TextProduct):
|
|||
# Set up the time range for 0-120 hours
|
||||
|
||||
# Create a time range from the issuanceHour out 120 hours
|
||||
startTime = self._calculateStartTime(time.localtime(self._issueTime_secs))
|
||||
startTime = self._calculateStartTime(time.gmtime(self._issueTime_secs))
|
||||
self._timeRange = self.makeTimeRange(startTime, startTime+120*3600)
|
||||
|
||||
# Determine the time range list, making sure they are on hour boundaries
|
||||
|
@ -300,21 +300,142 @@ class TextProduct(GenericHazards.TextProduct):
|
|||
day = localCreationTime[2]
|
||||
hour = localCreationTime[3]
|
||||
|
||||
# If we are more than halfway though a 6 hr period
|
||||
if hour % 6 > 3:
|
||||
adjust = 6 # move on to the next 6 hr block
|
||||
else:
|
||||
adjust = 0
|
||||
# print "MATT: _calculateStartTime %d adjust = %d" % (hour % 6, adjust)
|
||||
|
||||
# Now "truncate" to a 6-hourly boundary and compute startTime in local Time.
|
||||
hour = int (int(hour/6) * 6)
|
||||
hour = int( (hour/6) * 6) + adjust
|
||||
if hour > 23:
|
||||
hour -= 24
|
||||
elif hour < 0:
|
||||
hour += 24
|
||||
|
||||
startTime = absTimeYMD(year, month, day, hour)
|
||||
# Finally, convert back to GMT
|
||||
localTime, shift = self.determineTimeShift()
|
||||
startTime = startTime - shift
|
||||
|
||||
return startTime
|
||||
|
||||
def _resolution(self):
|
||||
return 3
|
||||
|
||||
def _formatPeriod(self, period, wholePeriod=False, shiftToLocal=True, useEndTime=False,
|
||||
resolution=3):
|
||||
# Format period (a timeRange) resulting in
|
||||
# DAY + MORNING / AFTERNOON / EVENING / OVERNIGHT.
|
||||
# If wholePeriod, format FROM ... TO...
|
||||
|
||||
print "\nMATT Format period wholePeriod = %s, period = %s, useEndTime =%s" % (str(wholePeriod), str(period), str(useEndTime))
|
||||
if period is None: return ""
|
||||
if useEndTime:
|
||||
startTime = period.endTime()
|
||||
else:
|
||||
startTime = period.startTime()
|
||||
result = self._getTimeDesc(startTime, resolution, shiftToLocal)
|
||||
print "MATT result = '%s'" % (result)
|
||||
if wholePeriod:
|
||||
endResult = self._getTimeDesc(period.endTime(), resolution, shiftToLocal)
|
||||
print "MATT endResult = '%s'" % (endResult)
|
||||
if result != endResult:
|
||||
result=result + " TO "+ endResult
|
||||
return result
|
||||
|
||||
def _getTimeDesc(self, startTime, resolution=3, shiftToLocal=True):
|
||||
# Create phrase such as Tuesday morning
|
||||
# Handle today/tonight and "this" morning/afternoon/etc..
|
||||
#
|
||||
print "\n\n**************Formatting Period for GMT starttime ", startTime
|
||||
labels = self.Labels()["SimpleWorded"]
|
||||
currentTime = self._timeRange.startTime()
|
||||
print " currentTime = %s" % (repr(currentTime))
|
||||
if shiftToLocal:
|
||||
currentLocalTime, shift = self.determineTimeShift()
|
||||
startTime = startTime + shift
|
||||
currentTime = currentTime + shift
|
||||
print " shift, shifted start, current", shift/3600, startTime, currentTime
|
||||
hour = startTime.hour
|
||||
prevDay = False
|
||||
prevDay, partOfDay = self._getPartOfDay(hour, resolution)
|
||||
# if prevDay:
|
||||
# startTime = startTime - 24*3600
|
||||
todayFlag = currentTime.day == startTime.day
|
||||
if todayFlag:
|
||||
if partOfDay.upper().find("MIDNIGHT")>0: todayWord = "tonight"
|
||||
else: todayWord = "THIS"
|
||||
weekday = todayWord
|
||||
else:
|
||||
weekday = labels["Weekday"][startTime.weekday()]
|
||||
if partOfDay.find("<weekday>") >= 0:
|
||||
result = partOfDay.replace('<weekday>', weekday)
|
||||
else:
|
||||
result = weekday + " " + partOfDay
|
||||
print "Result = '%s'" % (result)
|
||||
return result
|
||||
|
||||
def _getPartOfDay(self, hour, resolution):
|
||||
prevDay = False
|
||||
if resolution == 3:
|
||||
if hour < 3:
|
||||
prevDay = True
|
||||
partOfDay = "early <weekday> morning"
|
||||
# partOfDay = "AFTER MIDNIGHT"
|
||||
elif hour < 6:
|
||||
partOfDay = "early <weekday> morning"
|
||||
elif hour < 9:
|
||||
partOfDay = "morning"
|
||||
elif hour < 12:
|
||||
partOfDay = "late <weekday> morning"
|
||||
elif hour < 15:
|
||||
partOfDay = "early <weekday> afternoon"
|
||||
elif hour < 18:
|
||||
partOfDay = "late <weekday> afternoon"
|
||||
elif hour < 21:
|
||||
partOfDay = "early <weekday> evening"
|
||||
else:
|
||||
partOfDay = "late <weekday> evening"
|
||||
else:
|
||||
if hour < 6:
|
||||
prevDay = True
|
||||
# partOfDay = "AFTER MIDNIGHT"
|
||||
partOfDay = "early <weekday> morning"
|
||||
elif hour < 12: partOfDay = "morning"
|
||||
elif hour < 18: partOfDay = "afternoon"
|
||||
else: partOfDay = "evening"
|
||||
return prevDay, partOfDay
|
||||
|
||||
###############################################################
|
||||
### Sampling and Statistics related methods
|
||||
|
||||
def moderated_dict(self, parmHisto, timeRange, componentName):
|
||||
"""
|
||||
Specifies the lower percentages and upper percentages of
|
||||
data to be thrown out for moderated stats.
|
||||
"""
|
||||
# COMMENT: This dictionary defines the low and high limit at which
|
||||
# outliers will be removed when calculating moderated stats.
|
||||
# By convention the first value listed is the percentage
|
||||
# allowed for low values and second the percentage allowed
|
||||
# for high values.
|
||||
|
||||
# Get Baseline thresholds
|
||||
dict = SampleAnalysis.SampleAnalysis.moderated_dict(
|
||||
self, parmHisto, timeRange, componentName)
|
||||
|
||||
# Change thresholds
|
||||
dict["Wind"] = (0, 15)
|
||||
dict["WindGust"] = (0, 15)
|
||||
dict["pws34int"] = (0, 5)
|
||||
dict["pws64int"] = (0, 5)
|
||||
dict["pwsD34"] = (0, 5)
|
||||
dict["pwsN34"] = (0, 5)
|
||||
dict["pwsD64"] = (0, 5)
|
||||
dict["pwsN64"] = (0, 5)
|
||||
dict["InundationMax"] = (0, 5)
|
||||
dict["InundationTiming"] = (0, 5)
|
||||
return dict
|
||||
|
||||
def _getStatValue(self, statDict, element, method=None, dataType=None):
|
||||
stats = statDict.get(element, None)
|
||||
if stats is None: return None
|
||||
|
@ -601,25 +722,44 @@ FORECASTER STEWART"""
|
|||
###############################################################
|
||||
### Advisory related methods
|
||||
|
||||
def _archiveCurrentAdvisory(self):
|
||||
### Determine if all actions are canceled
|
||||
allCAN = True
|
||||
for vtecRecord in self._getAllVTECRecords():
|
||||
action = vtecRecord['act']
|
||||
#print "vtecRecord", vtecRecord
|
||||
if action != "CAN":
|
||||
allCAN = False
|
||||
break
|
||||
def _synchronizeAdvisories(self):
|
||||
pathManager = PathManagerFactory.getPathManager()
|
||||
context = pathManager.getContextForSite(LocalizationType.CAVE_STATIC, self._site)
|
||||
|
||||
self._currentAdvisory["AllCAN"] = allCAN
|
||||
self._currentAdvisory["CreationTime"] = self._issueTime_secs
|
||||
self._currentAdvisory["Transmitted"] = False
|
||||
self._currentAdvisory["StormName"] = self._getStormNameFromTCP()
|
||||
self._saveAdvisory("pending", self._currentAdvisory)
|
||||
# Retrieving a directory causes synching to occur
|
||||
file = pathManager.getLocalizationFile(context, self._getAdvisoryPath()).getFile()
|
||||
|
||||
return file
|
||||
|
||||
def _getLocalAdvisoryDirectoryPath(self):
|
||||
file = self._synchronizeAdvisories()
|
||||
|
||||
path = file.getPath()
|
||||
return path
|
||||
|
||||
def _loadLastTwoAdvisories(self):
|
||||
advisoryDirectoryPath = self._getLocalAdvisoryDirectoryPath()
|
||||
filenames = os.listdir(advisoryDirectoryPath)
|
||||
allAdvisories = filter(lambda filename: filename[-5:] == ".json", filenames)
|
||||
|
||||
stormAdvisories = filter(lambda filename: self._getStormNameFromTCP() in filename,
|
||||
allAdvisories)
|
||||
stormAdvisories = map(lambda filename: filename[:-5], stormAdvisories)
|
||||
lastTwoAdvisories = sorted(stormAdvisories)[:2]
|
||||
|
||||
self._previousAdvisory = None
|
||||
if len(lastTwoAdvisories) >= 1:
|
||||
self._previousAdvisory = self._loadAdvisory(lastTwoAdvisories[0])
|
||||
|
||||
self._previousPreviousAdvisory = None
|
||||
if len(lastTwoAdvisories) >= 2:
|
||||
self._previousPreviousAdvisory = self._loadAdvisory(lastTwoAdvisories[1])
|
||||
|
||||
def _loadAdvisory(self, advisoryName):
|
||||
import json
|
||||
|
||||
self._synchronizeAdvisories()
|
||||
|
||||
try:
|
||||
jsonDict = self._getFileContents(LocalizationType.CAVE_STATIC,
|
||||
self._site,
|
||||
|
@ -630,29 +770,24 @@ FORECASTER STEWART"""
|
|||
print pythonDict
|
||||
|
||||
# Only use transmitted advisories
|
||||
if pythonDict["Transmitted"] == False:
|
||||
if pythonDict["Transmitted"] == False and advisoryName != "pending":
|
||||
return None
|
||||
else:
|
||||
return pythonDict
|
||||
except Exception, e:
|
||||
print "SARAH Load Exception for", self._getAdvisoryFilename(advisoryName), ":", e
|
||||
print "SARAH Load Exception for %s : %s" % (self._getAdvisoryFilename(advisoryName), e)
|
||||
return None
|
||||
|
||||
def _saveAdvisory(self, advisoryName, advisoryDict):
|
||||
import json
|
||||
|
||||
try:
|
||||
self._writeFileContents(LocalizationType.CAVE_STATIC,
|
||||
self._site,
|
||||
self._getAdvisoryPath() + advisoryName + ".json",
|
||||
json.dumps(advisoryDict))
|
||||
print "SARAH: Wrote file contents for", (self._getAdvisoryPath() + advisoryName + ".json")
|
||||
except Exception, e:
|
||||
print "SARAH Save Exception for", (self._getAdvisoryPath() + advisoryName + ".json"), ":", e
|
||||
|
||||
def _getAdvisoryPath(self):
|
||||
return "gfe/tcvAdvisories/"
|
||||
|
||||
def _getLocalizationFile(self, loctype, siteID, filename):
|
||||
pathManager = PathManagerFactory.getPathManager()
|
||||
context = pathManager.getContextForSite(loctype, siteID)
|
||||
localizationFile = pathManager.getLocalizationFile(context, filename)
|
||||
|
||||
return localizationFile
|
||||
|
||||
def _getFileContents(self, loctype, siteID, filename):
|
||||
pathManager = PathManagerFactory.getPathManager()
|
||||
context = pathManager.getContextForSite(loctype, siteID)
|
||||
|
@ -662,20 +797,8 @@ FORECASTER STEWART"""
|
|||
|
||||
return fileContents
|
||||
|
||||
def _writeFileContents(self, loctype, siteID, filename, contents):
|
||||
pathManager = PathManagerFactory.getPathManager()
|
||||
context = pathManager.getContextForSite(loctype, siteID)
|
||||
localizationFile = pathManager.getLocalizationFile(context, filename)
|
||||
with File(localizationFile.getFile(), filename, 'w') as pythonFile:
|
||||
pythonFile.write(contents)
|
||||
|
||||
localizationFile.save()
|
||||
|
||||
def _getAdvisoryFilename(self, advisoryName):
|
||||
year = time.gmtime(self._issueTime_secs).tm_year
|
||||
advisoryFilename = self._getAdvisoryPath() + \
|
||||
self._getStormNameFromTCP().upper() + \
|
||||
str(year) + \
|
||||
advisoryName + \
|
||||
".json"
|
||||
return advisoryFilename
|
||||
|
@ -711,7 +834,7 @@ FORECASTER STEWART"""
|
|||
# Order and inclusion of GUI1 buttons
|
||||
# Each entry is (name of button in GUI code, desired label on GUI)
|
||||
"buttonList":[
|
||||
("Next","Next"),
|
||||
("Run","Run"),
|
||||
("Cancel","Cancel"),
|
||||
],
|
||||
}
|
||||
|
@ -725,68 +848,6 @@ FORECASTER STEWART"""
|
|||
###############################################################
|
||||
### TCV Statistics
|
||||
|
||||
def _analysisList_TCV(self):
|
||||
# Sample over 120 hours beginning at current time
|
||||
analysisList = [
|
||||
# Wind Section
|
||||
("Wind", self.vectorModeratedMax, [6]),
|
||||
("WindGust", self.moderatedMax, [6]),
|
||||
("WindThreat", self.mostSignificantDiscreteValue),
|
||||
("pws34int", self.moderatedMax, [6]),
|
||||
("pws64int", self.moderatedMax, [6]),
|
||||
("pwsD34", self.moderatedMax),
|
||||
("pwsN34", self.moderatedMax),
|
||||
("pwsD64", self.moderatedMax),
|
||||
("pwsN64", self.moderatedMax),
|
||||
|
||||
# Flooding Rain Section
|
||||
("QPF", self.accumSum, [72]),
|
||||
("FloodingRainThreat", self.mostSignificantDiscreteValue),
|
||||
|
||||
# Tornado Section
|
||||
("TornadoThreat", self.mostSignificantDiscreteValue),
|
||||
]
|
||||
|
||||
return analysisList
|
||||
|
||||
def _intersectAnalysisList_TCV(self):
|
||||
# The grids for the Surge Section will be intersected with a special edit area
|
||||
analysisList = [
|
||||
("InundationMax", self.moderatedMax, [6]),
|
||||
("InundationTiming", self.moderatedMax, [6]),
|
||||
("StormSurgeThreat", self.mostSignificantDiscreteValue),
|
||||
]
|
||||
|
||||
return analysisList
|
||||
|
||||
def moderated_dict(self, parmHisto, timeRange, componentName):
|
||||
"""
|
||||
Specifies the lower percentages and upper percentages of
|
||||
data to be thrown out for moderated stats.
|
||||
"""
|
||||
# COMMENT: This dictionary defines the low and high limit at which
|
||||
# outliers will be removed when calculating moderated stats.
|
||||
# By convention the first value listed is the percentage
|
||||
# allowed for low values and second the percentage allowed
|
||||
# for high values.
|
||||
|
||||
# Get Baseline thresholds
|
||||
dict = SampleAnalysis.SampleAnalysis.moderated_dict(
|
||||
self, parmHisto, timeRange, componentName)
|
||||
|
||||
# Change thresholds
|
||||
dict["Wind"] = (0, 15)
|
||||
dict["WindGust"] = (0, 15)
|
||||
dict["pws34int"] = (0, 5)
|
||||
dict["pws64int"] = (0, 5)
|
||||
dict["pwsD34"] = (0, 5)
|
||||
dict["pwsN34"] = (0, 5)
|
||||
dict["pwsD64"] = (0, 5)
|
||||
dict["pwsN64"] = (0, 5)
|
||||
dict["InundationMax"] = (0, 5)
|
||||
dict["InundationTiming"] = (0, 5)
|
||||
return dict
|
||||
|
||||
def threatKeyOrder(self):
|
||||
return [None, "None", "Elevated", "Mod", "High", "Extreme"]
|
||||
|
||||
|
@ -812,50 +873,7 @@ FORECASTER STEWART"""
|
|||
def _initializeAdvisories(self):
|
||||
self._currentAdvisory = dict()
|
||||
self._currentAdvisory['ZoneData'] = dict()
|
||||
self._previousAdvisory = self._loadAdvisory("previous")
|
||||
print "SARAH: loaded previous advisory =", self._previousAdvisory
|
||||
self._previousPreviousAdvisory = self._loadAdvisory("previousPrevious")
|
||||
print "SARAH: loaded previousPrevious advisory =", self._previousPreviousAdvisory
|
||||
|
||||
def _sampleTCVData(self, argDict):
|
||||
# Sample the data
|
||||
editAreas = self._makeSegmentEditAreas(argDict)
|
||||
cwa = self._cwa()
|
||||
editAreas.append((cwa, cwa))
|
||||
|
||||
self._sampler = self.getSampler(argDict,
|
||||
(self._analysisList_TCV(), self._timeRangeList, editAreas))
|
||||
|
||||
intersectAreas = self._computeIntersectAreas(editAreas, argDict)
|
||||
|
||||
self._intersectSampler = self.getSampler(argDict,
|
||||
(self._intersectAnalysisList_TCV(), self._timeRangeList, intersectAreas))
|
||||
|
||||
def _getTCVStats(self, argDict, segment, editAreaDict, timeRangeList):
|
||||
# Get statistics for this segment
|
||||
print "SARAH: issue time seconds =", self._issueTime_secs
|
||||
print "SARAH: GMT issue time =", time.gmtime(self._issueTime_secs)
|
||||
|
||||
editArea = editAreaDict[segment]
|
||||
statList = self.getStatList(self._sampler,
|
||||
self._analysisList_TCV(),
|
||||
timeRangeList,
|
||||
editArea)
|
||||
|
||||
windStats = WindSectionStats(self, segment, statList, timeRangeList)
|
||||
floodingRainStats = FloodingRainSectionStats(self, segment, statList, timeRangeList)
|
||||
tornadoStats = TornadoSectionStats(self, segment, statList, timeRangeList)
|
||||
|
||||
# The surge section needs sampling done with an intersected edit area
|
||||
intersectEditArea = "intersect_"+editArea
|
||||
intersectStatList = self.getStatList(self._intersectSampler,
|
||||
self._intersectAnalysisList_TCV(),
|
||||
timeRangeList,
|
||||
intersectEditArea)
|
||||
|
||||
stormSurgeStats = StormSurgeSectionStats(self, segment, intersectStatList, timeRangeList)
|
||||
|
||||
return (windStats, stormSurgeStats, floodingRainStats, tornadoStats)
|
||||
self._loadLastTwoAdvisories()
|
||||
|
||||
def _initializeSegmentZoneData(self, segment):
|
||||
# The current advisory will be populated when setting a section's stats
|
||||
|
@ -894,10 +912,10 @@ FORECASTER STEWART"""
|
|||
if combos is None:
|
||||
LogStream.logVerbose("COMBINATION FILE NOT FOUND: " + self._defaultEditAreas)
|
||||
return [], None
|
||||
print "\nSegments from Zone Combiner", combos
|
||||
#print "\nSegments from Zone Combiner", combos
|
||||
# "Overlay" the forecaster-entered combinations onto the segments
|
||||
segmentList = self._refineSegments(hazSegments, combos)
|
||||
print "\nNew segments", segmentList
|
||||
#print "\nNew segments", segmentList
|
||||
|
||||
# Instead of a segment being a group of zones, it will be just a single zone.
|
||||
# So collapse this list of lists down to a list of zones (aka. segments)
|
||||
|
@ -925,6 +943,7 @@ FORECASTER STEWART"""
|
|||
# (We need to define self._segmentList for the mapping function
|
||||
# to use)
|
||||
self._segmentList = hazSegments
|
||||
#print "self._segmentList = %s" % (repr(self._segmentList))
|
||||
segmentMapping = map(self._findSegment, combo)
|
||||
#print " segmentMapping", segmentMapping
|
||||
|
||||
|
@ -966,568 +985,6 @@ FORECASTER STEWART"""
|
|||
return segment
|
||||
return []
|
||||
|
||||
|
||||
###############################################################
|
||||
### TCV Statistics Classes
|
||||
|
||||
class SectionCommonStats():
|
||||
def __init__(self, textProduct, segment):
|
||||
self._textProduct = textProduct
|
||||
self._segment = segment
|
||||
|
||||
self._initializeAdvisories()
|
||||
|
||||
self._maxThreat = None
|
||||
|
||||
def _initializeAdvisories(self):
|
||||
self._currentAdvisory = self._textProduct._currentAdvisory['ZoneData'][self._segment]
|
||||
|
||||
self._previousAdvisory = None
|
||||
# print "MATT textProduct._previousAdvisory = '%s'" % (textProduct._previousAdvisory)
|
||||
if self._textProduct._previousAdvisory is not None:
|
||||
self._previousAdvisory = self._textProduct._previousAdvisory['ZoneData'][self._segment]
|
||||
|
||||
# print "MATT textProduct._previousPreviousAdvisory = '%s'" % \
|
||||
# (textProduct._previousPreviousAdvisory)
|
||||
self._previousPreviousAdvisory = None
|
||||
if self._textProduct._previousPreviousAdvisory is not None:
|
||||
self._previousPreviousAdvisory = self._textProduct._previousPreviousAdvisory['ZoneData'][self._segment]
|
||||
|
||||
def _updateThreatStats(self, tr, statDict, threatGridName):
|
||||
print "SARAH: updateThreatStats for", threatGridName
|
||||
threatLevel = self._textProduct.getStats(statDict, threatGridName)
|
||||
if threatLevel is not None:
|
||||
threatLevels = self._textProduct.threatKeyOrder()
|
||||
print "SARAH: threatLevel =", threatLevel
|
||||
print "SARAH: maxThreat =", self._maxThreat
|
||||
if self._maxThreat is None or \
|
||||
threatLevels.index(threatLevel) > threatLevels.index(self._maxThreat):
|
||||
print "SARAH: updating max threat to =", threatLevel
|
||||
self._maxThreat = threatLevel
|
||||
|
||||
def _calculateHourOffset(self, targetTime):
|
||||
seconds = targetTime.unixTime() - self._textProduct._issueTime_secs
|
||||
hour = int(round(seconds/60/60))
|
||||
if hour < 0:
|
||||
hour = 0
|
||||
|
||||
return hour
|
||||
|
||||
|
||||
class WindSectionStats(SectionCommonStats):
|
||||
def __init__(self, textProduct, segment, statList, timeRangeList):
|
||||
SectionCommonStats.__init__(self, textProduct, segment)
|
||||
self._maxWind = None
|
||||
self._maxGust = None
|
||||
self._onset34Hour = None
|
||||
self._end34Hour = None
|
||||
self._onset64Hour = None
|
||||
self._end64Hour = None
|
||||
self._windowTS = None
|
||||
self._windowHU = None
|
||||
|
||||
self._setStats(statList, timeRangeList)
|
||||
|
||||
class PwsXXintStats():
|
||||
max = None
|
||||
onsetHour = None
|
||||
|
||||
class PwsTXXStats():
|
||||
onsetHour = None
|
||||
endHour = None
|
||||
|
||||
class TimeInfo():
|
||||
onsetHour = None
|
||||
endHour = None
|
||||
|
||||
class EventsOccurring():
|
||||
pwsTXXEvent = False
|
||||
windXXEvent = False
|
||||
|
||||
def _setStats(self, statList, timeRangeList):
|
||||
pws34intStats = self.PwsXXintStats()
|
||||
pws64intStats = self.PwsXXintStats()
|
||||
pwsT34Stats = self.PwsTXXStats()
|
||||
pwsT64Stats = self.PwsTXXStats()
|
||||
wind34timeInfo = self.TimeInfo()
|
||||
wind64timeInfo = self.TimeInfo()
|
||||
|
||||
events34 = self.EventsOccurring()
|
||||
events64 = self.EventsOccurring()
|
||||
|
||||
for period in range(len(statList)):
|
||||
tr, _ = timeRangeList[period]
|
||||
statDict = statList[period]
|
||||
|
||||
self._updateStatsForPwsXXint(tr, statDict, "pws34int", pws34intStats)
|
||||
self._updateStatsForPwsXXint(tr, statDict, "pws64int", pws64intStats)
|
||||
|
||||
self._updateStatsForPwsTXX(tr, statDict, "pwsD34", "pwsN34", pwsT34Stats, events34, period)
|
||||
self._updateStatsForPwsTXX(tr, statDict, "pwsD64", "pwsN64", pwsT64Stats, events64, period)
|
||||
|
||||
wind = self._textProduct._getStatValue(statDict, "Wind", "Max", self._textProduct.VECTOR())
|
||||
if wind is not None:
|
||||
if wind >= 34:
|
||||
events34.windXXEvent = True
|
||||
if wind >= 64:
|
||||
events64.windXXEvent = True
|
||||
else:
|
||||
events64.windXXEvent = False
|
||||
else:
|
||||
events34.windXXEvent = False
|
||||
events64.windXXEvent = False
|
||||
|
||||
if self._maxWind is None or wind >= self._maxWind:
|
||||
self._maxWind = wind
|
||||
|
||||
self._updateWindTimeInfo(tr, wind34timeInfo, speed=34)
|
||||
self._updateWindTimeInfo(tr, wind64timeInfo, speed=64)
|
||||
|
||||
windGust = self._textProduct._getStatValue(statDict, "WindGust", "Max")
|
||||
if windGust is not None:
|
||||
if self._maxGust is None or windGust > self._maxGust:
|
||||
self._maxGust = windGust
|
||||
|
||||
self._updateThreatStats(tr, statDict, "WindThreat")
|
||||
|
||||
#Tropical Storm
|
||||
onsetEndInfo = self._computeWindOnsetAndEnd(wind34timeInfo, pws34intStats, pwsT34Stats)
|
||||
self._onset34Hour = onsetEndInfo.onsetHour
|
||||
self._end34Hour = onsetEndInfo.endHour
|
||||
|
||||
nonEnding34Event = False
|
||||
if events34.pwsTXXEvent and (wind34timeInfo.endHour is None or events34.windXXEvent):
|
||||
nonEnding34Event = True
|
||||
|
||||
print "SARAH: Tropical Storm Window:"
|
||||
self._windowTS = self._createWindow("Tropical Storm",
|
||||
self._onset34Hour,
|
||||
self._end34Hour,
|
||||
nonEnding34Event)
|
||||
|
||||
#Hurricane
|
||||
onsetEndInfo = self._computeWindOnsetAndEnd(wind64timeInfo, pws64intStats, pwsT64Stats)
|
||||
self._onset64Hour = onsetEndInfo.onsetHour
|
||||
self._end64Hour = onsetEndInfo.endHour
|
||||
|
||||
nonEnding64Event = False
|
||||
if events64.pwsTXXEvent and (wind64timeInfo.endHour is None or events64.windXXEvent):
|
||||
nonEnding64Event = True
|
||||
|
||||
print "SARAH: Hurricane Window:"
|
||||
self._windowHU = self._createWindow("Hurricane",
|
||||
self._onset64Hour,
|
||||
self._end64Hour,
|
||||
nonEnding64Event)
|
||||
|
||||
self._currentAdvisory["WindThreat"] = self._maxThreat
|
||||
self._currentAdvisory["WindForecast"] = self._maxWind
|
||||
|
||||
def _updateStatsForPwsXXint(self, tr, statDict, gridName, pwsXXintStats):
|
||||
pwsXXint = self._textProduct._getStatValue(statDict, gridName, "Max")
|
||||
|
||||
if pwsXXint is not None:
|
||||
if pwsXXintStats.max is None or pwsXXint > pwsXXintStats.max:
|
||||
pwsXXintStats.max = pwsXXint
|
||||
pwsXXintStats.onsetHour = self._calculateHourOffset(tr.startTime())
|
||||
|
||||
print "SARAH: Window Debug: pwsXXintStats gridName =", gridName
|
||||
print "SARAH: Window Debug: pwsXXintStats pwsXXint =", pwsXXint
|
||||
print "SARAH: Window Debug: pwsXXintStats tr =", tr
|
||||
print "SARAH: Window Debug: pwsXXintStats onsetHour =", pwsXXintStats.onsetHour
|
||||
|
||||
def _updateStatsForPwsTXX(self, tr, statDict, dayGridName, nightGridName, pwsTXXStats, events, period):
|
||||
|
||||
# Convert this time to locatime
|
||||
trStartLocalHour = time.localtime(tr.startTime().unixTime()).tm_hour
|
||||
dayStartHour = self._textProduct.DAY()
|
||||
nightStartHour = self._textProduct.NIGHT()
|
||||
print "MATT _updateStatsForPwsTXX = %s localStartHr = %d" % (repr(tr),
|
||||
trStartLocalHour)
|
||||
print "MATT dayStart = %s nightStart = %s" % (repr(dayStartHour),
|
||||
repr(nightStartHour))
|
||||
|
||||
pwsDXX = self._textProduct._getStatValue(statDict, dayGridName, "Max")
|
||||
pwsNXX = self._textProduct._getStatValue(statDict, nightGridName, "Max")
|
||||
maxPws = None
|
||||
print "MATT pwsDXX = %s pwsNXX = %s " % (repr(pwsDXX), repr(pwsNXX))
|
||||
|
||||
# if pwsDXX is not None:
|
||||
# print "SARAH: Window Debug: pwsTXXStats DAY"
|
||||
# maxPws = pwsDXX
|
||||
# elif pwsNXX is not None:
|
||||
# print "SARAH: Window Debug: pwsTXXStats NIGHT"
|
||||
# maxPws = pwsNXX
|
||||
|
||||
# SARAH - if we are close to the end of a day/night period, the first
|
||||
# period we would really want to consider would be the next period.
|
||||
# This is hard-coded to 3 hours to prove the concept.
|
||||
if (nightStartHour >= trStartLocalHour and \
|
||||
(nightStartHour - trStartLocalHour) <= 3) or pwsDXX is None:
|
||||
print "MATT: Window Debug: pwsTXXStats NIGHT"
|
||||
maxPws = pwsNXX
|
||||
elif (dayStartHour >= trStartLocalHour and \
|
||||
(dayStartHour - trStartLocalHour) <= 3) or pwsNXX is None:
|
||||
print "MATT: Window Debug: pwsTXXStats DAY"
|
||||
maxPws = pwsDXX
|
||||
|
||||
threshold34index = 0
|
||||
threshold64index = 1
|
||||
if maxPws is not None:
|
||||
if "64" in dayGridName:
|
||||
index = threshold64index
|
||||
else: #if "34"
|
||||
index = threshold34index
|
||||
|
||||
threshold = None
|
||||
thresholds = self.windSpdProb_thresholds()
|
||||
if period == 0:
|
||||
(thresholdLow, thresholdHigh) = thresholds[period][index]
|
||||
threshold = thresholdLow
|
||||
else:
|
||||
if period >= 10: # SARAH: TODO - remove???
|
||||
period = 9
|
||||
threshold = thresholds[period][index]
|
||||
|
||||
if maxPws > threshold:
|
||||
events.pwsTXXEvent = True
|
||||
|
||||
configuredEndTime = self._getCorrespondingConfiguredTime(tr.endTime(), isOnset = False)
|
||||
pwsTXXStats.endHour = self._calculateHourOffset(configuredEndTime)
|
||||
|
||||
print "SARAH: Window Debug: pwsTXXStats dayGridName =", dayGridName
|
||||
print "SARAH: Window Debug: pwsTXXStats nightGridName =", nightGridName
|
||||
print "SARAH: Window Debug: pwsTXXStats original tr =", tr
|
||||
print "SARAH: Window Debug: pwsTXXStats maxPws =", maxPws
|
||||
print "SARAH: Window Debug: pwsTXXStats endHour =", pwsTXXStats.endHour
|
||||
|
||||
if pwsTXXStats.onsetHour is None:
|
||||
configuredStartTime = self._getCorrespondingConfiguredTime(tr.startTime(), isOnset = True)
|
||||
pwsTXXStats.onsetHour = self._calculateHourOffset(configuredStartTime)
|
||||
|
||||
print "SARAH: Window Debug: pwsTXXStats dayGridName =", dayGridName
|
||||
print "SARAH: Window Debug: pwsTXXStats nightGridName =", nightGridName
|
||||
print "SARAH: Window Debug: pwsTXXStats original tr =", tr
|
||||
print "SARAH: Window Debug: pwsTXXStats maxPws =", maxPws
|
||||
print "SARAH: Window Debug: pwsTXXStats onsetHour =", pwsTXXStats.onsetHour
|
||||
else:
|
||||
events.pwsTXXEvent = False
|
||||
|
||||
def _getCorrespondingConfiguredTime(self, gmtTime, isOnset):
|
||||
dayStartHour = self._textProduct.DAY()
|
||||
nightStartHour = self._textProduct.NIGHT()
|
||||
|
||||
print "SARAH: gmtTime =", gmtTime
|
||||
|
||||
gmtSeconds = gmtTime.unixTime()
|
||||
localTime = time.localtime(gmtSeconds)
|
||||
print "SARAH: localTime =", localTime
|
||||
|
||||
localHour = localTime.tm_hour
|
||||
print "SARAH: localHour =", localHour
|
||||
|
||||
if isOnset:
|
||||
print "SARAH: Window Debug: Adjusting start time"
|
||||
else:
|
||||
print "SARAH: Window Debug: Adjusting end time"
|
||||
|
||||
newHour = None
|
||||
if localHour < dayStartHour:
|
||||
if isOnset:
|
||||
# Subtract 24 hours to get to the previous day
|
||||
newGmtTime = gmtTime - 24*60*60
|
||||
gmtSeconds = newGmtTime.unixTime()
|
||||
localTime = time.localtime(gmtSeconds)
|
||||
print "SARAH: new localTime =", localTime
|
||||
|
||||
newHour = nightStartHour
|
||||
else:
|
||||
newHour = dayStartHour
|
||||
elif dayStartHour <= localHour and localHour < nightStartHour:
|
||||
if isOnset:
|
||||
newHour = dayStartHour
|
||||
else:
|
||||
newHour = nightStartHour
|
||||
else:
|
||||
if isOnset:
|
||||
newHour = nightStartHour
|
||||
else:
|
||||
# Add 24 hours to get to the next day
|
||||
newGmtTime = gmtTime + 24*60*60
|
||||
gmtSeconds = newGmtTime.unixTime()
|
||||
localTime = time.localtime(gmtSeconds)
|
||||
print "SARAH: new localTime =", localTime
|
||||
|
||||
newHour = dayStartHour
|
||||
|
||||
print "SARAH: new localHour =", localHour
|
||||
|
||||
newTimeTuple = localTime[:3] + (newHour,) + localTime[4:]
|
||||
import calendar
|
||||
seconds = calendar.timegm(newTimeTuple)
|
||||
adjustedGmtTime = AbsTime(seconds)
|
||||
print "SARAH: new local time =", adjustedGmtTime
|
||||
|
||||
seconds = time.mktime(newTimeTuple)
|
||||
adjustedGmtTime = AbsTime(seconds)
|
||||
print "SARAH: new GMT time =", adjustedGmtTime
|
||||
return adjustedGmtTime
|
||||
|
||||
# SARAH - we don't want this here. Use the inherited version from the
|
||||
# VectorRelatedPhrases module instead. This way, changes only need to be
|
||||
# made in one place.
|
||||
def windSpdProb_thresholds(self):
|
||||
return [
|
||||
((45.0, 80.0), (25.0, 60.0)), # Per 1
|
||||
(35.0, 20.0), # Per 2
|
||||
(30.0, 15.0), # Per 3
|
||||
(25.0, 12.5), # Per 4
|
||||
(22.5, 10.0), # Per 5
|
||||
(20.0, 8.0), # Per 6
|
||||
(17.5, 7.0), # Per 7
|
||||
(15.0, 6.0), # Per 8
|
||||
(12.5, 5.0), # Per 9
|
||||
(10.0, 4.0), # Per 10
|
||||
]
|
||||
|
||||
def _updateWindTimeInfo(self, tr, timeInfo, speed):
|
||||
if self._maxWind is not None and self._maxWind >= speed:
|
||||
timeInfo.endHour = self._calculateHourOffset(tr.endTime())
|
||||
|
||||
print "SARAH: Window Debug: timeInfo speed =", speed
|
||||
print "SARAH: Window Debug: timeInfo maxWind =", self._maxWind
|
||||
print "SARAH: Window Debug: timeInfo tr =", tr
|
||||
print "SARAH: Window Debug: timeInfo endHour =", timeInfo.endHour
|
||||
|
||||
if timeInfo.onsetHour is None:
|
||||
timeInfo.onsetHour = self._calculateHourOffset(tr.startTime())
|
||||
|
||||
print "SARAH: Window Debug: timeInfo speed =", speed
|
||||
print "SARAH: Window Debug: timeInfo maxWind =", self._maxWind
|
||||
print "SARAH: Window Debug: timeInfo tr =", tr
|
||||
print "SARAH: Window Debug: timeInfo onsetHour =", timeInfo.onsetHour
|
||||
|
||||
def _computeWindOnsetAndEnd(self, windTimeInfo, pwsXXintStats, pwsTXXStats):
|
||||
onsetEndInfo = self.TimeInfo()
|
||||
|
||||
print "SARAH: Window Debug: windTimeInfo.onsetHour =", windTimeInfo.onsetHour
|
||||
print "SARAH: Window Debug: pwsTXXStats.onsetHour =", pwsTXXStats.onsetHour
|
||||
print "SARAH: Window Debug: pwsXXintStats.onsetHour =", pwsXXintStats.onsetHour
|
||||
print "SARAH: Window Debug: windTimeInfo.endHour =", windTimeInfo.endHour
|
||||
print "SARAH: Window Debug: pwsTXXStats.endHour =", pwsTXXStats.endHour
|
||||
|
||||
if windTimeInfo.onsetHour is None:
|
||||
# print "SARAH: Window Debug: windTimeInfo.onsetHour was None; using pwsTXXStats"
|
||||
# windTimeInfo.onsetHour = pwsTXXStats.onsetHour
|
||||
# print "SARAH: Window Debug: pwsTXXStats.onsetHour =", pwsTXXStats.onsetHour
|
||||
|
||||
# Short-circuit this logic as a temporary measure. Basically, do
|
||||
# not include a window if the deterministic winds do not support
|
||||
# a particular threshold
|
||||
onsetEndInfo.endHour = None
|
||||
|
||||
if windTimeInfo.onsetHour is not None and pwsXXintStats.onsetHour is not None:
|
||||
print "SARAH: Window Debug: windTimeInfo.onsetHour & pwsXXintStats.onsetHour not None; taking min"
|
||||
onsetEndInfo.onsetHour = min(windTimeInfo.onsetHour, pwsXXintStats.onsetHour)
|
||||
print "SARAH: Window Debug: min onsetHour =", onsetEndInfo.onsetHour
|
||||
|
||||
if onsetEndInfo.onsetHour is not None:
|
||||
if windTimeInfo.endHour is None:
|
||||
print "SARAH: Window Debug: windTimeInfo.endHour was None; using pwsTXXStats"
|
||||
onsetEndInfo.endHour = pwsTXXStats.endHour
|
||||
print "SARAH: Window Debug: pwsTXXStats.endHour =", pwsTXXStats.endHour
|
||||
elif pwsTXXStats.endHour is not None:
|
||||
print "SARAH: windendHour =", windTimeInfo.endHour
|
||||
print "SARAH: probendHour =", pwsTXXStats.endHour
|
||||
onsetEndInfo.endHour = int(round(self._textProduct.average(windTimeInfo.endHour, pwsTXXStats.endHour)))
|
||||
print "SARAH: endHour =", onsetEndInfo.endHour
|
||||
return onsetEndInfo
|
||||
|
||||
def _createWindow(self, windowName, onsetHour, endHour, nonEndingEvent):
|
||||
window = "Window for " + windowName + " force winds: "
|
||||
print "SARAH: window stats:"
|
||||
print "SARAH: onsetHour =", onsetHour
|
||||
print "SARAH: endHour =", endHour
|
||||
print "SARAH: window nonEndingEvent =", nonEndingEvent
|
||||
|
||||
if onsetHour is None:
|
||||
|
||||
# SARAH - we do not want a statement of a non-existent window
|
||||
# window += "None"
|
||||
window = None
|
||||
else:
|
||||
startTime = AbsTime(self._textProduct._issueTime_secs + onsetHour*60*60)
|
||||
if endHour is not None:
|
||||
endTime = AbsTime(self._textProduct._issueTime_secs + endHour*60*60)
|
||||
windowPeriod = self._textProduct.makeTimeRange(startTime, endTime)
|
||||
else:
|
||||
windowPeriod = self._textProduct.makeTimeRange(startTime, startTime + 1)
|
||||
print "SARAH: window period =", windowPeriod
|
||||
|
||||
startTimeDescriptor = ""
|
||||
if onsetHour >= 18:
|
||||
startTimeDescriptor = self._textProduct._formatPeriod(windowPeriod, resolution = 6)
|
||||
elif 6 <= onsetHour and onsetHour < 18:
|
||||
startTimeDescriptor = self._textProduct._formatPeriod(windowPeriod, resolution = 3)
|
||||
|
||||
if endHour is None or nonEndingEvent:
|
||||
if len(startTimeDescriptor) != 0:
|
||||
window += "Begins " + startTimeDescriptor
|
||||
else:
|
||||
window += "None"
|
||||
else:
|
||||
connector = "through "
|
||||
endTimeDescriptor = "the next few hours"
|
||||
|
||||
if endHour >= 18:
|
||||
endTimeDescriptor = self._textProduct._formatPeriod(windowPeriod,
|
||||
useEndTime = True,
|
||||
resolution = 6)
|
||||
elif 6 <= endHour and endHour < 18:
|
||||
endTimeDescriptor = self._textProduct._formatPeriod(windowPeriod,
|
||||
useEndTime = True,
|
||||
resolution = 3)
|
||||
|
||||
if len(startTimeDescriptor) != 0:
|
||||
connector = " " + connector
|
||||
window += startTimeDescriptor + connector + endTimeDescriptor
|
||||
|
||||
return window
|
||||
|
||||
|
||||
class StormSurgeSectionStats(SectionCommonStats):
|
||||
def __init__(self, textProduct, segment, intersectStatList, timeRangeList):
|
||||
SectionCommonStats.__init__(self, textProduct, segment)
|
||||
self._inundationMax = None
|
||||
self._onsetSurgeHour = None
|
||||
self._endSurgeHour = None
|
||||
self._windowSurge = None
|
||||
|
||||
self._setStats(intersectStatList, timeRangeList)
|
||||
|
||||
def _setStats(self, statList, timeRangeList):
|
||||
phishStartTime = None
|
||||
phishEndTime = None
|
||||
possibleStop = 0
|
||||
|
||||
# print "*"*100
|
||||
# print "MATT phishStartTime = %s phishEndTime = %s possibleStop = %d" % (str(phishStartTime), str(phishEndTime), possibleStop)
|
||||
|
||||
for period in range(len(statList)):
|
||||
tr, _ = timeRangeList[period]
|
||||
statDict = statList[period]
|
||||
|
||||
phishPeak = self._textProduct._getStatValue(statDict, "InundationMax", "Max")
|
||||
if phishPeak is not None:
|
||||
if self._inundationMax is None or phishPeak > self._inundationMax:
|
||||
self._inundationMax = phishPeak
|
||||
|
||||
curPhish = self._textProduct._getStatValue(statDict, "InundationTiming", "Max")
|
||||
# print "MATT tr = %s" % (repr(tr))
|
||||
# print "MATT curPhish = '%s' possibleStop = %d" % (str(curPhish), possibleStop)
|
||||
# print "MATT phishStartTime = %s phishEndTime = %s" % (str(phishStartTime), str(phishEndTime))
|
||||
|
||||
if curPhish is not None and possibleStop != 2:
|
||||
if curPhish > 0:
|
||||
if phishStartTime is None:
|
||||
phishStartTime = tr.startTime()
|
||||
possibleStop = 0
|
||||
phishEndTime = None
|
||||
elif phishStartTime is not None:
|
||||
possibleStop += 1
|
||||
|
||||
if phishEndTime is None:
|
||||
phishEndTime = tr.startTime()
|
||||
|
||||
self._updateThreatStats(tr, statDict, "StormSurgeThreat")
|
||||
|
||||
self._windowSurge = "Window for Storm Surge Inundation: "
|
||||
|
||||
if phishStartTime is None:
|
||||
self._windowSurge += "None"
|
||||
else:
|
||||
self._onsetSurgeHour = self._calculateHourOffset(phishStartTime)
|
||||
startTime = AbsTime(self._textProduct._issueTime_secs + self._onsetSurgeHour*60*60)
|
||||
|
||||
# print "MATT surge startTime = %s self._onsetSurgeHour = %s " % (repr(startTime), self._onsetSurgeHour)
|
||||
if phishEndTime is not None:
|
||||
self._endSurgeHour = self._calculateHourOffset(phishEndTime)
|
||||
endTime = AbsTime(self._textProduct._issueTime_secs + self._endSurgeHour*60*60)
|
||||
windowPeriod = self._textProduct.makeTimeRange(startTime, endTime)
|
||||
else:
|
||||
windowPeriod = self._textProduct.makeTimeRange(startTime, startTime + 1)
|
||||
print "SARAH: window period =", windowPeriod
|
||||
|
||||
startTimeDescriptor = self._textProduct._formatPeriod(windowPeriod)
|
||||
|
||||
if phishEndTime is None:
|
||||
self._windowSurge += "Begins " + startTimeDescriptor
|
||||
elif phishStartTime == phishEndTime:
|
||||
self._windowSurge += startTimeDescriptor
|
||||
else:
|
||||
endTimeDescriptor = self._textProduct._formatPeriod(windowPeriod, useEndTime = True)
|
||||
|
||||
if self._onsetSurgeHour > 12:
|
||||
self._windowSurge += startTimeDescriptor +\
|
||||
" through " +\
|
||||
endTimeDescriptor
|
||||
else:
|
||||
self._windowSurge += "through " + endTimeDescriptor
|
||||
|
||||
self._currentAdvisory["StormSurgeThreat"] = self._maxThreat
|
||||
if self._inundationMax is not None:
|
||||
# Round so we don't store values like 1.600000023841858
|
||||
self._currentAdvisory["StormSurgeForecast"] = \
|
||||
int(self._inundationMax * 10.0) / 10.0
|
||||
|
||||
|
||||
class FloodingRainSectionStats(SectionCommonStats):
|
||||
def __init__(self, textProduct, segment, statList, timeRangeList):
|
||||
SectionCommonStats.__init__(self, textProduct, segment)
|
||||
self._sumAccum = None
|
||||
|
||||
self._setStats(statList, timeRangeList)
|
||||
|
||||
def _setStats(self, statList, timeRangeList):
|
||||
for period in range(len(statList)):
|
||||
tr, _ = timeRangeList[period]
|
||||
statDict = statList[period]
|
||||
|
||||
stats = self._textProduct.getStats(statDict, "QPF")
|
||||
if stats is not None:
|
||||
for (value, tr) in stats:
|
||||
|
||||
if value is not None:
|
||||
if self._sumAccum is None:
|
||||
self._sumAccum = value
|
||||
else:
|
||||
self._sumAccum += value
|
||||
|
||||
self._updateThreatStats(tr, statDict, "FloodingRainThreat")
|
||||
|
||||
self._currentAdvisory["FloodingRainThreat"] = self._maxThreat
|
||||
if self._sumAccum is not None:
|
||||
# Round so that we don't end up with stats like 4.03143835067749
|
||||
self._currentAdvisory["FloodingRainForecast"] = \
|
||||
self._textProduct.round(self._sumAccum, "Nearest", 0.5)
|
||||
|
||||
|
||||
class TornadoSectionStats(SectionCommonStats):
|
||||
def __init__(self, textProduct, segment, statList, timeRangeList):
|
||||
SectionCommonStats.__init__(self, textProduct, segment)
|
||||
|
||||
self._setStats(statList, timeRangeList)
|
||||
|
||||
def _setStats(self, statList, timeRangeList):
|
||||
for period in range(len(statList)):
|
||||
tr, _ = timeRangeList[period]
|
||||
statDict = statList[period]
|
||||
|
||||
self._updateThreatStats(tr, statDict, "TornadoThreat")
|
||||
|
||||
self._currentAdvisory["TornadoThreat"] = self._maxThreat
|
||||
|
||||
|
||||
import Tkinter
|
||||
class Common_Dialog(Dialog):
|
||||
def __init__(self, parent, title, infoDict=None):
|
|
@ -781,8 +781,8 @@ class TextUtils:
|
|||
self._debugDict[name] = count
|
||||
|
||||
# Print the traceback message
|
||||
print "\n\tDEBUG:",name, "in", file, "at line",\
|
||||
lineno,"Class=", self.__class__, count
|
||||
print "DEBUG: %s in %s at line %d" % (name, file, lineno)
|
||||
print "DEBUG: Class = %s %d\n\n" % (self.__class__, count)
|
||||
#print "Super classes:",self.__class__.__bases__
|
||||
|
||||
# If there is a message, print that too
|
||||
|
|
|
@ -51,8 +51,9 @@ DirectFileToProductMapping = {
|
|||
'PublicMarine_EA_Site_MultiPil_Definition': 'AFD',
|
||||
'PublicMarineFireWx_EA_Site_MultiPil_Definition': 'AFD',
|
||||
'FireWxZones_EA_Site_MultiPil_Definition': 'RFW',
|
||||
'MarineZones_EA_Site_MultiPil_Definition': ['CFW', 'MWW'],
|
||||
'MarineZones_EA_Site_MultiPil_Definition': 'MWW',
|
||||
'PublicZones_EA_Site_MultiPil_Definition': ['NPW','WSW','CFW','FFA','AQA'],
|
||||
'Hazard_TCV': 'TCV',
|
||||
'Hazard_HLS': 'HLS',
|
||||
}
|
||||
|
||||
|
@ -62,20 +63,19 @@ NWSProducts = ['ADR', 'AFD', 'AFM', 'AQA', 'AVA', 'AVW', 'CAE', 'CCF', 'CDW', 'C
|
|||
'CFW', 'CWF', 'EQR', 'EQW', 'ESF', 'EVI', 'FFA', 'FRW', 'FWF',
|
||||
'FWM', 'FWS', 'GLF', 'HLS', 'HMW', 'HWO', 'LAE', 'LEW', 'MWS',
|
||||
'MVF', 'NOW', 'NPW', 'NSH', 'NUW', 'OFF', 'PFM', 'PNS', 'RFD',
|
||||
'RFW', 'RHW', 'SAF', 'SFT', 'SPS', 'SPW', 'SRF', 'TOE', 'VOW',
|
||||
'WCN', 'WSW', 'ZFP', 'MWW']
|
||||
'RFW', 'RHW', 'SAF', 'SFT', 'SPS', 'SPW', 'SRF', 'TCV', 'TOE',
|
||||
'VOW', 'WCN', 'WSW', 'ZFP', 'MWW']
|
||||
|
||||
|
||||
#Templated files. Named with "Product" in them, will be replaced with the
|
||||
#actual product name. Dictionary contains the template filename, list
|
||||
#contains the products to be generated (e.g., AFM). These products
|
||||
#follow the Baseline, Region, Site technique.
|
||||
templateProds= ['AFM', 'ZFP', 'CCF', 'CWF', 'CWF_Pacific', 'FWF',
|
||||
'FWFTable', 'FWM', 'GLF', 'MVF', 'NSH', 'PFM', 'SFT', 'SRF', 'OFF', 'AFD',
|
||||
'Hazard_HLS']
|
||||
templateProdsWsaf= ['AFM', 'ZFP', 'CCF', 'CWF', 'CWF_Pacific', 'FWF',
|
||||
templateProds= ['AFM', 'ZFP', 'CCF', 'CWF', 'CWF_Pacific', 'FWF', 'HLS',
|
||||
'FWFTable', 'FWM', 'GLF', 'MVF', 'NSH', 'PFM', 'SFT', 'SRF', 'OFF', 'AFD']
|
||||
templateProdsWsaf= ['AFM', 'ZFP', 'CCF', 'CWF', 'CWF_Pacific', 'FWF', 'HLS',
|
||||
'FWFTable', 'FWM', 'GLF', 'MVF', 'NSH', 'PFM', 'SFT', 'SRF', 'OFF', 'AFD', 'SAF',
|
||||
'FWS', 'Hazard_HLS']
|
||||
'FWS', 'Hazard_TCV', 'Hazard_HLS']
|
||||
templateProds_minus_HLS = ['AFM', 'ZFP', 'CCF', 'CWF', 'CWF_Pacific', 'FWF',
|
||||
'FWFTable', 'FWM', 'GLF', 'MVF', 'NSH', 'PFM', 'SFT', 'SRF', 'OFF', 'AFD']
|
||||
TemplatedProducts = {
|
||||
|
@ -85,3 +85,4 @@ TemplatedProducts = {
|
|||
'Product_Region_Overrides': templateProdsWsaf,
|
||||
'Product_Site_Overrides': templateProdsWsaf,
|
||||
}
|
||||
|
||||
|
|
|
@ -20,20 +20,20 @@ class TextProduct(HLSTCV_Common.TextProduct):
|
|||
Definition["outputFile"] = "{prddir}/TEXT/HLS.txt"
|
||||
Definition["database"] = "Official" # Source database
|
||||
Definition["debug"] = 1
|
||||
Definition["mapNameForCombinations"] = "Zones_<site>"
|
||||
Definition["defaultEditAreas"] = "EditAreas_PublicMarine_<site>"
|
||||
Definition["showZoneCombiner"] = 1 # 1 to cause zone combiner to display
|
||||
Definition["mapNameForCombinations"] = "Zones_MFL"
|
||||
Definition["defaultEditAreas"] = "Combinations_HLS_MFL"
|
||||
Definition["showZoneCombiner"] = 0 # 1 to cause zone combiner to display
|
||||
|
||||
Definition["productName"] = "LOCAL STATEMENT"
|
||||
|
||||
Definition["fullStationID" ] = "<fullStationID>"
|
||||
Definition["wmoID" ] = "<wmoID>"
|
||||
Definition["wfoCityState" ] = "<wfoCityState>"
|
||||
Definition["pil" ] = "<pil>"
|
||||
Definition["textdbPil" ] = "<textdbPil>"
|
||||
Definition["awipsWANPil" ] = "<awipsWANPil>"
|
||||
Definition["site"] = "<site>"
|
||||
Definition["wfoCity"] = "<wfoCity>"
|
||||
Definition["fullStationID" ] = "KMFL"
|
||||
Definition["wmoID" ] = "WTUS82"
|
||||
Definition["wfoCityState" ] = "MIAMI FL"
|
||||
Definition["pil" ] = "HLSMIA"
|
||||
Definition["textdbPil" ] = "MIAHLSMIA"
|
||||
Definition["awipsWANPil" ] = "KMFLHLSMIA"
|
||||
Definition["site"] = "MFL"
|
||||
Definition["wfoCity"] = "MIAMI"
|
||||
|
||||
Definition["areaName"] = "" #optional area name for product
|
||||
Definition["areaDictionary"] = "AreaDictionary"
|
||||
|
@ -173,13 +173,16 @@ class TextProduct(HLSTCV_Common.TextProduct):
|
|||
analysisList = [
|
||||
# Wind Section
|
||||
("WindThreat", self.rankedDiscreteValue),
|
||||
("WindThreat", self.mostSignificantDiscreteValue),
|
||||
|
||||
# Flooding Rain Section
|
||||
("QPFtoFFGRatio", self.moderatedMax, [6]),
|
||||
("FloodingRainThreat", self.rankedDiscreteValue),
|
||||
("FloodingRainThreat", self.mostSignificantDiscreteValue),
|
||||
|
||||
# Tornado Section
|
||||
("TornadoThreat", self.rankedDiscreteValue),
|
||||
("TornadoThreat", self.mostSignificantDiscreteValue),
|
||||
]
|
||||
|
||||
return analysisList
|
||||
|
@ -198,6 +201,7 @@ class TextProduct(HLSTCV_Common.TextProduct):
|
|||
analysisList = [
|
||||
("InundationMax", self.moderatedMax, [6]),
|
||||
("StormSurgeThreat", self.rankedDiscreteValue),
|
||||
("StormSurgeThreat", self.mostSignificantDiscreteValue),
|
||||
]
|
||||
|
||||
return analysisList
|
||||
|
@ -223,7 +227,7 @@ class TextProduct(HLSTCV_Common.TextProduct):
|
|||
if self._ImpactsAnticipated:
|
||||
includedImpacts = sorted(self._IncludedImpacts, key=self._impactsKeyFunction)
|
||||
for ((_, sectionName), _) in includedImpacts:
|
||||
print "SARAH: adding section", sectionName
|
||||
print "SARAH: adding section = '%s'" % (sectionName)
|
||||
partsList.append(sectionName)
|
||||
|
||||
partsList.append('preparednessSection')
|
||||
|
@ -334,7 +338,7 @@ class TextProduct(HLSTCV_Common.TextProduct):
|
|||
|
||||
# If there is only one impact across the entire CWA, and it is the max
|
||||
if impactMax != "none" and impactMin == impactMax and inputThreatDominant != "None":
|
||||
sectionDict['impactRange'] = "Prepare for " + impactMax + " damage across " + self._cwa() + "."
|
||||
sectionDict['impactRange'] = "Prepare for " + impactMax + " damage across " + self._cwa_descriptor() + "."
|
||||
# Handle the case where the impacts are not the same across the entire CWA
|
||||
else:
|
||||
sectionDict['variedImpacts'] = True
|
||||
|
@ -344,14 +348,27 @@ class TextProduct(HLSTCV_Common.TextProduct):
|
|||
|
||||
# If there are additional areas
|
||||
if impactRange != impactMax:
|
||||
sectionDict['additionalImpactRange'].append("Prepare for " +
|
||||
impactRange +
|
||||
" damage across " +
|
||||
self._frame("ENTER AREA DESCRIPTION") + ".")
|
||||
|
||||
curPhrase = "Prepare for %s damage across %s." % \
|
||||
(impactRange, self._frame("ENTER AREA DESCRIPTION"))
|
||||
|
||||
# If this phrase is not already part of the additional impacts
|
||||
if curPhrase not in sectionDict['additionalImpactRange']:
|
||||
|
||||
# Add it now
|
||||
sectionDict['additionalImpactRange'].append(curPhrase)
|
||||
|
||||
# If there is no impact across more than one half the area, include a statement for that as well
|
||||
if inputThreatDominant == "None":
|
||||
sectionDict['additionalImpactRange'].append("Elsewhere across " + self._cwa() + ", little to no impact is anticipated.")
|
||||
|
||||
curPhrase = "Elsewhere across " + self._cwa_descriptor() + \
|
||||
", little to no impact is anticipated."
|
||||
|
||||
# If this phrase is not already part of the additional impacts
|
||||
if curPhrase not in sectionDict['additionalImpactRange']:
|
||||
|
||||
# Add it now
|
||||
sectionDict['additionalImpactRange'].append(curPhrase)
|
||||
|
||||
productDict['windSection'] = sectionDict
|
||||
|
||||
|
@ -384,7 +401,7 @@ class TextProduct(HLSTCV_Common.TextProduct):
|
|||
|
||||
sectionDict['impactRange'] = "Prepare for " + \
|
||||
lifeThreatening + impactMax + \
|
||||
" damage in surge prone areas of " + self._cwa() + ", with the greatest impacts " + \
|
||||
" damage in surge prone areas of " + self._cwa_descriptor() + ", with the greatest impacts " + \
|
||||
self._frame("ENTER AREA DESCRIPTION") + "."
|
||||
|
||||
sectionDict['impactLib'] = self._getPotentialImpactsStatements("Storm Surge", self._impactCategoryToThreatLevel(impactMax))
|
||||
|
@ -414,19 +431,39 @@ class TextProduct(HLSTCV_Common.TextProduct):
|
|||
|
||||
# If there are additional life-threatening surge areas
|
||||
if impactRange != impactMax and impactRange != impactMin:
|
||||
sectionDict['additionalImpactRange'].append("Brace for " +
|
||||
lifeThreatening + impactRange +
|
||||
" damage " + self._frame("ENTER AREA DESCRIPTION"))
|
||||
|
||||
curPhrase = "Brace for %s%s damage across %s." % \
|
||||
(lifeThreatening, impactRange, self._frame("ENTER AREA DESCRIPTION"))
|
||||
|
||||
# If this phrase is not already part of the additional impacts
|
||||
if curPhrase not in sectionDict['additionalImpactRange']:
|
||||
|
||||
# Add it now
|
||||
sectionDict['additionalImpactRange'].append(curPhrase)
|
||||
|
||||
# If there are additional areas
|
||||
if impactRangeRest != impactMax:
|
||||
sectionDict['additionalImpactRange'].append("Prepare for " +
|
||||
impactRangeRest +
|
||||
" damage from storm surge " + self._frame("ENTER AREA DESCRIPTION"))
|
||||
|
||||
curPhrase = "Prepare for %s damage from storm surge across %s." % \
|
||||
(impactRangeRest, self._frame("ENTER AREA DESCRIPTION"))
|
||||
|
||||
# If this phrase is not already part of the additional impacts
|
||||
if curPhrase not in sectionDict['additionalImpactRange']:
|
||||
|
||||
# Add it now
|
||||
sectionDict['additionalImpactRange'].append(curPhrase)
|
||||
|
||||
# If there is no impact across more than one half the area, include a statement for that as well
|
||||
if inputThreatDominant == "None":
|
||||
sectionDict['additionalImpactRange'].append("Elsewhere across " + self._cwa() + ", little to no impact is anticipated.")
|
||||
|
||||
curPhrase = "Elsewhere across " + self._cwa_descriptor() + \
|
||||
", little to no impact is anticipated."
|
||||
|
||||
# If this phrase is not already part of the additional impacts
|
||||
if curPhrase not in sectionDict['additionalImpactRange']:
|
||||
|
||||
# Add it now
|
||||
sectionDict['additionalImpactRange'].append(curPhrase)
|
||||
|
||||
productDict['surgeSection'] = sectionDict
|
||||
|
||||
|
@ -443,6 +480,9 @@ class TextProduct(HLSTCV_Common.TextProduct):
|
|||
impactRange = self._samplingDict['FloodingRainThreat']['impactRange']
|
||||
inputThreatDominant = self._samplingDict['FloodingRainThreat']['inputThreatDominant']
|
||||
|
||||
self.debug_print("In _floodingRainSection", 1)
|
||||
self.debug_print("_samplingDict = %s" % (repr(self._samplingDict['FloodingRainThreat'])), 1)
|
||||
|
||||
# Test the simplest case first
|
||||
if impactMin == "none" and impactMax == "none":
|
||||
sectionDict['impactRange'] = impactRange
|
||||
|
@ -452,7 +492,7 @@ class TextProduct(HLSTCV_Common.TextProduct):
|
|||
|
||||
# If there is only one impact across the entire CWA, and it is the max
|
||||
if impactMax != "none" and impactMin == impactMax and inputThreatDominant != "None":
|
||||
sectionDict['impactRange'] = "Prepare for " + impactMax + " flooding across " + self._cwa() + "."
|
||||
sectionDict['impactRange'] = "Prepare for " + impactMax + " flooding across " + self._cwa_descriptor() + "."
|
||||
# Handle the case where the impacts are not the same across the entire CWA
|
||||
else:
|
||||
sectionDict['variedImpacts'] = True
|
||||
|
@ -462,14 +502,27 @@ class TextProduct(HLSTCV_Common.TextProduct):
|
|||
|
||||
# If there are additional areas
|
||||
if impactRange != impactMax:
|
||||
sectionDict['additionalImpactRange'].append("Prepare for " +
|
||||
impactRange +
|
||||
" flooding impacts " +
|
||||
self._frame("ENTER AREA DESCRIPTION") + ".")
|
||||
|
||||
curPhrase = "Prepare for %s flooding impacts across %s." % \
|
||||
(impactRange, self._frame("ENTER AREA DESCRIPTION"))
|
||||
|
||||
# If this phrase is not already part of the additional impacts
|
||||
if curPhrase not in sectionDict['additionalImpactRange']:
|
||||
|
||||
# Add it now
|
||||
sectionDict['additionalImpactRange'].append(curPhrase)
|
||||
|
||||
# If there is no impact across more than one half the area, include a statement for that as well
|
||||
if inputThreatDominant == "None":
|
||||
sectionDict['additionalImpactRange'].append("Elsewhere across " + self._cwa() + ", little to no impact is anticipated.")
|
||||
|
||||
curPhrase = "Elsewhere across " + self._cwa_descriptor() + \
|
||||
", little to no impact is anticipated."
|
||||
|
||||
# If this phrase is not already part of the additional impacts
|
||||
if curPhrase not in sectionDict['additionalImpactRange']:
|
||||
|
||||
# Add it now
|
||||
sectionDict['additionalImpactRange'].append(curPhrase)
|
||||
|
||||
productDict['floodingRainSection'] = sectionDict
|
||||
|
||||
|
@ -517,7 +570,7 @@ class TextProduct(HLSTCV_Common.TextProduct):
|
|||
|
||||
# If there is only one impact across the entire CWA, and it is the max
|
||||
if impactMax != "none" and impactMin == impactMax and inputThreatDominant != "None":
|
||||
sectionDict['impactRange'] = "Prepare for " + impactMax + " damage across " + self._cwa() + "."
|
||||
sectionDict['impactRange'] = "Prepare for " + impactMax + " damage across " + self._cwa_descriptor() + "."
|
||||
# Handle the case where the impacts are not the same across the entire CWA
|
||||
else:
|
||||
sectionDict['variedImpacts'] = True
|
||||
|
@ -527,14 +580,27 @@ class TextProduct(HLSTCV_Common.TextProduct):
|
|||
|
||||
# If there are additional areas
|
||||
if impactRange != impactMax:
|
||||
sectionDict['additionalImpactRange'].append("Prepare for " +
|
||||
impactRange +
|
||||
" damage " +
|
||||
self._frame("ENTER AREA DESCRIPTION") + ".")
|
||||
|
||||
curPhrase = "Prepare for %s damage across %s." % \
|
||||
(impactRange, self._frame("ENTER AREA DESCRIPTION"))
|
||||
|
||||
# If this phrase is not already part of the additional impacts
|
||||
if curPhrase not in sectionDict['additionalImpactRange']:
|
||||
|
||||
# Add it now
|
||||
sectionDict['additionalImpactRange'].append(curPhrase)
|
||||
|
||||
# If there is no impact across more than one half the area, include a statement for that as well
|
||||
if inputThreatDominant == "None":
|
||||
sectionDict['additionalImpactRange'].append("Elsewhere across " + self._cwa() + ", little to no impact is anticipated.")
|
||||
|
||||
curPhrase = "Elsewhere across " + self._cwa_descriptor() + \
|
||||
", little to no impact is anticipated."
|
||||
|
||||
# If this phrase is not already part of the additional impacts
|
||||
if curPhrase not in sectionDict['additionalImpactRange']:
|
||||
|
||||
# Add it now
|
||||
sectionDict['additionalImpactRange'].append(curPhrase)
|
||||
|
||||
productDict['tornadoSection'] = sectionDict
|
||||
|
||||
|
@ -565,7 +631,7 @@ class TextProduct(HLSTCV_Common.TextProduct):
|
|||
actionsDict['title'] = "Other Preparedness Information"
|
||||
|
||||
import TCVDictionary
|
||||
actionsDict['actions'] = TCVDictionary.OtherPreparednessActions
|
||||
actionsDict['actions'] = TCVDictionary.OtherPreparednessActions[self._GeneralOnsetTime]
|
||||
|
||||
productDict['otherPreparednessActions'] = actionsDict
|
||||
|
||||
|
@ -581,15 +647,15 @@ class TextProduct(HLSTCV_Common.TextProduct):
|
|||
def _nextUpdate(self, productDict, productSegmentGroup, productSegment):
|
||||
if self._NextUpdate == "LastIssuance" or not self._ImpactsAnticipated:
|
||||
productDict['nextUpdate'] = "As it pertains to this event...this will be the last local statement issued by the National Weather Service in " + \
|
||||
self._cwa() + \
|
||||
self._wfoCityState + \
|
||||
" regarding the effects of tropical cyclone hazards upon the area."
|
||||
elif self._NextUpdate == "Conditions":
|
||||
productDict['nextUpdate'] = "The next local statement will be issued by the National Weather Service in " + \
|
||||
self._cwa() + \
|
||||
self._wfoCityState + \
|
||||
" as conditions warrant."
|
||||
elif self._NextUpdate == "Enter":
|
||||
productDict['nextUpdate'] = "The next local statement will be issued by the National Weather Service in " + \
|
||||
self._cwa() + \
|
||||
self._wfoCityState + \
|
||||
" around " + self._NextUpdate_entry.strip() + ", or sooner if conditions warrant."
|
||||
|
||||
def _getPotentialImpactsStatements(self, elementName, maxThreat):
|
||||
|
@ -621,6 +687,10 @@ class TextProduct(HLSTCV_Common.TextProduct):
|
|||
if error is not None:
|
||||
return error
|
||||
|
||||
# Determine time ranges
|
||||
self._initializeTimeVariables(argDict)
|
||||
self._determineTimeRanges(argDict)
|
||||
|
||||
error = self._initializeStormInformation()
|
||||
if error is not None:
|
||||
return error
|
||||
|
@ -629,36 +699,20 @@ class TextProduct(HLSTCV_Common.TextProduct):
|
|||
if self._stormName is None or self._stormName.strip() == "":
|
||||
return "Could not determine the storm name"
|
||||
|
||||
self._loadLastTwoAdvisories()
|
||||
if self._previousAdvisory is None:
|
||||
return "A TCV must be transmitted before an HLS can be run"
|
||||
|
||||
self._initializeHeadlines()
|
||||
|
||||
self._initializeHazardsTable(argDict)
|
||||
|
||||
self._determineHazardStates()
|
||||
|
||||
# Determine time ranges
|
||||
self._initializeTimeVariables(argDict)
|
||||
self._determineTimeRanges(argDict)
|
||||
|
||||
if self._ImpactsAnticipated:
|
||||
# Sample the data
|
||||
self._initializeSamplingDict()
|
||||
|
||||
previousAdvisory = self._loadAdvisory("previous")
|
||||
if previousAdvisory is not None:
|
||||
self._sampleTCVAdvisory(previousAdvisory)
|
||||
else:
|
||||
self._segmentList = self._determineSegments()
|
||||
print "Segment Information: ", self._segmentList, "\n\n"
|
||||
if len(self._segmentList) == 0:
|
||||
return "NO HAZARDS TO REPORT"
|
||||
|
||||
self._initializeAdvisories()
|
||||
self._sampleTCVData(argDict)
|
||||
for segment in self._segmentList:
|
||||
self._initializeSegmentZoneData(segment)
|
||||
self._getTCVStats(argDict, segment, self._editAreaDict, self._timeRangeList)
|
||||
|
||||
self._sampleTCVAdvisory(self._currentAdvisory)
|
||||
|
||||
self._sampleHLSData(argDict)
|
||||
|
||||
for threatName in ['WindThreat', 'StormSurgeThreat', 'FloodingRainThreat', 'TornadoThreat']:
|
||||
|
@ -672,16 +726,14 @@ class TextProduct(HLSTCV_Common.TextProduct):
|
|||
|
||||
|
||||
def _determineHazardStates(self):
|
||||
hazardTable = self._argDict["hazards"]
|
||||
hazardsList = hazardTable.getHazardList(self._allAreas())
|
||||
self._currentHazardsList = []
|
||||
self._changesHazardsList = []
|
||||
for hazard in hazardsList:
|
||||
if hazard['act'] == 'CON':
|
||||
self._currentHazardsList.append(hazard)
|
||||
else:
|
||||
self._changesHazardsList.append(hazard)
|
||||
|
||||
for hazard in self._previousAdvisory["HazardsForHLS"]:
|
||||
print "SARAH DEBUG Hazard: %s" % (repr(hazard))
|
||||
if hazard['act'] != 'CON':
|
||||
self._changesHazardsList.append(hazard)
|
||||
self._currentHazardsList.append(hazard)
|
||||
|
||||
###############################################################
|
||||
### Sampling and Statistics related methods
|
||||
|
@ -698,18 +750,24 @@ class TextProduct(HLSTCV_Common.TextProduct):
|
|||
self._cwa())
|
||||
|
||||
for period in range(len(statList)):
|
||||
|
||||
self.debug_print("=" * 100, 1)
|
||||
self.debug_print("In _sampleHLSData for period %s (%s)" % \
|
||||
(period, repr(self._timeRangeList[period][0])), 1)
|
||||
|
||||
statDict = statList[period]
|
||||
for threatName in ['WindThreat', 'FloodingRainThreat', 'TornadoThreat']:
|
||||
self._sampleThreatGrid(threatName, statDict)
|
||||
self._sampleRankedDiscreteValue(threatName, statDict)
|
||||
self._sampleMostSignificantDiscreteValue(threatName, statDict)
|
||||
|
||||
qpfToFfgRatio = self._getStatValue(statDict, "QPFtoFFGRatio", "Max")
|
||||
decidingField = self._samplingDict['FloodingRainThreat']['decidingField']
|
||||
if decidingField is None or qpfToFfgRatio > decidingField:
|
||||
self._samplingDict['FloodingRainThreat']['decidingField'] = qpfToFfgRatio
|
||||
|
||||
print "SARAH: WindThreat =", self._samplingDict['WindThreat']['inputThreatDominant']
|
||||
print "SARAH: FloodingRainThreat =", self._samplingDict['FloodingRainThreat']['inputThreatDominant']
|
||||
print "SARAH: TornadoThreat =", self._samplingDict['TornadoThreat']['inputThreatDominant']
|
||||
print "SARAH: WindThreat = %s" % (self._samplingDict['WindThreat']['inputThreatDominant'])
|
||||
print "SARAH: FloodingRainThreat = %s" % (self._samplingDict['FloodingRainThreat']['inputThreatDominant'])
|
||||
print "SARAH: TornadoThreat = %s" % (self._samplingDict['TornadoThreat']['inputThreatDominant'])
|
||||
|
||||
|
||||
|
||||
|
@ -744,14 +802,15 @@ class TextProduct(HLSTCV_Common.TextProduct):
|
|||
|
||||
for period in range(len(statList)):
|
||||
statDict = statList[period]
|
||||
self._sampleThreatGrid('StormSurgeThreat', statDict)
|
||||
self._sampleRankedDiscreteValue('StormSurgeThreat', statDict)
|
||||
self._sampleMostSignificantDiscreteValue('StormSurgeThreat', statDict)
|
||||
|
||||
inundationMax = self._getStatValue(statDict, "InundationMax", "Max")
|
||||
decidingField = self._samplingDict['StormSurgeThreat']['decidingField']
|
||||
if decidingField is None or inundationMax > decidingField:
|
||||
self._samplingDict['StormSurgeThreat']['decidingField'] = inundationMax
|
||||
|
||||
print "SARAH: StormSurgeThreat =", self._samplingDict['StormSurgeThreat']['inputThreatDominant']
|
||||
print "SARAH: StormSurgeThreat = %s" % (self._samplingDict['StormSurgeThreat']['inputThreatDominant'])
|
||||
|
||||
def _createWholeDomainEditArea(self, argDict):
|
||||
editAreaUtils = EditAreaUtils.EditAreaUtils()
|
||||
|
@ -765,16 +824,44 @@ class TextProduct(HLSTCV_Common.TextProduct):
|
|||
refData = ReferenceData(gridLoc, refID, grid2Dbit)
|
||||
editAreaUtils.saveEditAreas([refData])
|
||||
|
||||
def _sampleThreatGrid(self, threatName, statDict):
|
||||
rankedThreatLevels = self.getStats(statDict, threatName)
|
||||
print "SARAH: sampling", threatName
|
||||
print "SARAH: sampleData: rankedThreatLevels =", rankedThreatLevels
|
||||
def _sampleMostSignificantDiscreteValue(self, threatName, statDict):
|
||||
print "SARAH: _sampleMostSignificantDiscreteValue for %s" % (threatName)
|
||||
threatLevel = self.getStats(statDict, threatName + "__mostSignificantDiscreteValue")
|
||||
print "SARAH: threatLevel =", threatLevel
|
||||
if threatLevel is not None:
|
||||
inputThreatLow = self._samplingDict[threatName]['inputThreatLow']
|
||||
print "SARAH: current inputThreatLow =", inputThreatLow
|
||||
if inputThreatLow is None:
|
||||
self._samplingDict[threatName]['inputThreatLow'] = threatLevel
|
||||
else:
|
||||
self._samplingDict[threatName]['inputThreatLow'] = self._getLowestThreat(threatName,
|
||||
threatLevel,
|
||||
inputThreatLow)
|
||||
print "SARAH: new inputThreatLow =", self._samplingDict[threatName]['inputThreatLow']
|
||||
|
||||
inputThreatHigh = self._samplingDict[threatName]['inputThreatHigh']
|
||||
print "SARAH: current inputThreatHigh =", inputThreatHigh
|
||||
self._samplingDict[threatName]['inputThreatHigh'] = self._getHighestThreat(threatName,
|
||||
threatLevel,
|
||||
inputThreatHigh)
|
||||
print "SARAH: new inputThreatHigh =", self._samplingDict[threatName]['inputThreatHigh']
|
||||
|
||||
def _sampleRankedDiscreteValue(self, threatName, statDict):
|
||||
print "-" * 60
|
||||
print "_sampleRankedDiscreteValue statDict = %s" % (repr(statDict))
|
||||
rankedThreatLevels = self.getStats(statDict, threatName + "__rankedDiscreteValue")
|
||||
print "SARAH: sampling %s" % (threatName)
|
||||
print "SARAH: sampleData: rankedThreatLevels = %s" % (repr(rankedThreatLevels))
|
||||
if rankedThreatLevels is not None:
|
||||
dominantThreatLevel = self._getDominantThreatLevel(threatName, rankedThreatLevels)
|
||||
print "SARAH: dominantThreatLevel =", dominantThreatLevel
|
||||
|
||||
currentDominantThreatLevel = self._samplingDict[threatName]['inputThreatDominant']
|
||||
print "SARAH: currentDominantThreatLevel =", currentDominantThreatLevel
|
||||
self._samplingDict[threatName]['inputThreatDominant'] = self._getHighestThreat(threatName,
|
||||
dominantThreatLevel,
|
||||
currentDominantThreatLevel)
|
||||
print "SARAH: new dominant =", self._samplingDict[threatName]['inputThreatDominant']
|
||||
|
||||
def _getDominantThreatLevel(self, threatName, rankedThreatLevels):
|
||||
dominantLevelWithHighestRank = None
|
||||
|
@ -805,6 +892,20 @@ class TextProduct(HLSTCV_Common.TextProduct):
|
|||
else:
|
||||
return threatLevel1
|
||||
|
||||
def _getLowestThreat(self, threatName, threatLevel1, threatLevel2):
|
||||
keyOrderDict = self.mostSignificantDiscrete_keyOrder_dict(None, None, None)
|
||||
keyOrder = keyOrderDict[threatName]
|
||||
|
||||
level1Index = keyOrder.index(threatLevel1)
|
||||
level2Index = keyOrder.index(threatLevel2)
|
||||
|
||||
if level1Index < level2Index:
|
||||
return threatLevel1
|
||||
elif level1Index == level2Index:
|
||||
return threatLevel1
|
||||
else:
|
||||
return threatLevel2
|
||||
|
||||
def _initializeVariables(self, argDict):
|
||||
# Get variables
|
||||
error = self._getVariables(argDict)
|
||||
|
@ -860,15 +961,16 @@ class TextProduct(HLSTCV_Common.TextProduct):
|
|||
def _sampleTCVAdvisory(self, advisory):
|
||||
print "SARAH: sampling TCV advisory!"
|
||||
for zone in advisory["ZoneData"]:
|
||||
print "Looking at zone", zone
|
||||
print "-" * 60
|
||||
print "Looking at zone %s" % (zone)
|
||||
for key in advisory["ZoneData"][zone]:
|
||||
if "Threat" not in key:
|
||||
continue
|
||||
|
||||
print "Looking at key", key
|
||||
print "Looking at key '%s'" % (key)
|
||||
|
||||
threatLevel = advisory["ZoneData"][zone][key]
|
||||
print "Threat level =", threatLevel
|
||||
print " Threat level = %s" % (threatLevel)
|
||||
if self._samplingDict[key]['inputThreatLow'] is None:
|
||||
self._samplingDict[key]['inputThreatLow'] = threatLevel
|
||||
if self._samplingDict[key]['inputThreatHigh'] is None:
|
||||
|
@ -883,13 +985,13 @@ class TextProduct(HLSTCV_Common.TextProduct):
|
|||
if threatOrder.index(threatLevel) > threatOrder.index(highThreat):
|
||||
highThreat = threatLevel
|
||||
|
||||
print "low threat =", lowThreat
|
||||
print "high threat =", highThreat
|
||||
print " low threat = %s" % (lowThreat)
|
||||
print " high threat = %s" % (highThreat)
|
||||
|
||||
self._samplingDict[key]['inputThreatLow'] = lowThreat
|
||||
self._samplingDict[key]['inputThreatHigh'] = highThreat
|
||||
|
||||
print "Sampling dict =", self._samplingDict
|
||||
print "Sampling dict = %s" % (repr(self._samplingDict))
|
||||
|
||||
def _setHazardImpactCategories(self, threatName):
|
||||
inputThreatLow = self._samplingDict[threatName]['inputThreatLow']
|
||||
|
@ -898,6 +1000,9 @@ class TextProduct(HLSTCV_Common.TextProduct):
|
|||
decidingField = self._samplingDict[threatName]['decidingField']
|
||||
catastrophicThreshold = self._samplingDict[threatName]['catastrophicThreshold']
|
||||
|
||||
print "-" * 60
|
||||
print "MATT DEBUG: _setHazardImpactCategories for %s" % (threatName)
|
||||
|
||||
impactMin = None
|
||||
impactMax = None
|
||||
impactRange = None
|
||||
|
@ -926,22 +1031,25 @@ class TextProduct(HLSTCV_Common.TextProduct):
|
|||
else:
|
||||
impactMax = "devastating"
|
||||
impactRangeMax = "extensive"
|
||||
elif inputThreatLow == "High":
|
||||
elif inputThreatHigh == "High":
|
||||
impactMax = "extensive"
|
||||
impactRangeMax = "significant"
|
||||
elif inputThreatLow == "Mod":
|
||||
elif inputThreatHigh == "Mod":
|
||||
impactMax = "significant"
|
||||
impactRangeMax = "limited"
|
||||
elif inputThreatLow == "Elevated":
|
||||
elif inputThreatHigh == "Elevated":
|
||||
impactMax = "limited"
|
||||
impactRangeMax = "none"
|
||||
else:
|
||||
impactMax = "none"
|
||||
impactRangeMax = "none"
|
||||
|
||||
print "MATT DEBUG: impactMin = '%s' impactMax = '%s' impactRangeMax = '%s'" % \
|
||||
(impactMin, impactMax, impactRangeMax)
|
||||
|
||||
# Determine dominant impact category for rest of CWA - No impact
|
||||
if impactMin == "none" and impactMax == "none":
|
||||
impactRange = "No impacts are anticipated at this time across " + self._cwa() + "."
|
||||
impactRange = "No impacts are anticipated at this time across " + self._cwa_descriptor() + "."
|
||||
# Otherwise, at least some impact will be experienced across the CWA
|
||||
else:
|
||||
# Do not permit the lowest category to be "None", if the highest category is also not "None"
|
||||
|
@ -968,17 +1076,28 @@ class TextProduct(HLSTCV_Common.TextProduct):
|
|||
# See if there is a headline in this text
|
||||
headlineSearch = re.findall("(?ism)^(\.{3}.+?\.{3}) *\n", text)
|
||||
|
||||
# If we could not find original headlines, try to use 'new' HLS style
|
||||
if headlineSearch is None:
|
||||
headlineSearch = re.findall("(?ism)^\*\*.+?\*\* *\n", text)
|
||||
|
||||
self.debug_print("headlineSearch = %s" % (headlineSearch))
|
||||
|
||||
# If we found a headline
|
||||
if len(headlineSearch) > 0:
|
||||
|
||||
# Remove the first and last ellipses - if they exist
|
||||
headlineSearch[0] = re.sub("^\.\.\.", "", headlineSearch[0])
|
||||
headlineSearch[0] = re.sub("\.\.\.$", "", headlineSearch[0])
|
||||
|
||||
# # Remove the first and last '**' - if they exist
|
||||
# headlineSearch[0] = headlineSearch[0].sub("**", "").strip()
|
||||
|
||||
# Return the first cleaned-up headline string we found
|
||||
return self._cleanText(headlineSearch[0])
|
||||
|
||||
# Otherwise, return an indicator there is no headline in this text
|
||||
else:
|
||||
return '' # Changed to an null string instead of None
|
||||
return "" # Changed to an null string instead of None
|
||||
# (MHB 04/08/2009)
|
||||
|
||||
def _determineHazards(self, segments):
|
||||
|
@ -1071,18 +1190,18 @@ class TextProduct(HLSTCV_Common.TextProduct):
|
|||
self._stormIntensityTrend = "Storm Intensity " + stormDict.get("StormIntensity","")
|
||||
|
||||
print "SARAH: BEGIN STORM INFORMATION"
|
||||
print "storm dict =", stormDict
|
||||
print "storm name =", self._stormName
|
||||
print "type =", self._stormType
|
||||
print "type name =", self._stormTypeName
|
||||
print "time =", self._stormTime
|
||||
print "lat =", self._stormLat
|
||||
print "lon =", self._stormLon
|
||||
print "location =", self._stormLocation
|
||||
print "reference =", self._stormReference
|
||||
print "references =", self._stormLocalReferences
|
||||
print "movement trend =", self._stormMovementTrend
|
||||
print "intensity trend =", self._stormIntensityTrend
|
||||
print "storm dict = %s" % (stormDict)
|
||||
print "storm name = %s" % (self._stormName)
|
||||
print "type = %s" % (self._stormType)
|
||||
print "type name = %s" % (self._stormTypeName)
|
||||
print "time = %s" % (self._stormTime)
|
||||
print "lat = %s" % (self._stormLat)
|
||||
print "lon = %s" % (self._stormLon)
|
||||
print "location = %s" % (str(self._stormLocation))
|
||||
print "reference = %s" % (self._stormReference)
|
||||
print "references = %s" % (self._stormLocalReferences)
|
||||
print "movement trend = %s" % (self._stormMovementTrend)
|
||||
print "intensity trend = %s" % (self._stormIntensityTrend)
|
||||
print "SARAH: END STORM INFORMATION"
|
||||
|
||||
def _grabStormInfo(self, tcp):
|
||||
|
@ -1758,7 +1877,7 @@ class TextProduct(HLSTCV_Common.TextProduct):
|
|||
removedParts.append(part)
|
||||
|
||||
for part in removedParts:
|
||||
print "SARAH: Removing part =", part
|
||||
print "SARAH: Removing part = %s" % (part)
|
||||
partsList.remove(part)
|
||||
|
||||
def _noOpParts(self):
|
||||
|
@ -1805,7 +1924,7 @@ class TextProduct(HLSTCV_Common.TextProduct):
|
|||
upgPhenSig = record['phen'] + "." + record['sig']
|
||||
newRecord = self._findNEWAssociatedWithUPG(upgPhenSig, vtecRecords)
|
||||
record['new_record'] = newRecord
|
||||
print "SARAH: vtecRecord =", record
|
||||
print "SARAH: vtecRecord = %s" % (repr(record))
|
||||
segment_vtecRecords_tuples.append((segment, vtecRecords))
|
||||
|
||||
productSegmentGroup = {
|
||||
|
@ -1888,7 +2007,7 @@ class TextProduct(HLSTCV_Common.TextProduct):
|
|||
def _initializeHazardsTable(self, argDict):
|
||||
import VTECMessageType
|
||||
productID = self._pil[0:3]
|
||||
vtecMode = VTECMessageType.getVTECMessageType(productID)
|
||||
vtecMode = VTECMessageType.getVTECMessageType(productID.upper())
|
||||
argDict["vtecMode"] = vtecMode
|
||||
|
||||
self._setVTECActiveTable(argDict)
|
||||
|
@ -1903,6 +2022,11 @@ class TextProduct(HLSTCV_Common.TextProduct):
|
|||
dataMgr = argDict["dataMgr"]
|
||||
gfeMode = dataMgr.getOpMode().name()
|
||||
|
||||
print "*" *100
|
||||
print "gfeMode = '%s'" % (gfeMode)
|
||||
print "*" *100
|
||||
|
||||
|
||||
if gfeMode == "PRACTICE":
|
||||
argDict["vtecActiveTable"] = "PRACTICE"
|
||||
else:
|
||||
|
@ -1960,7 +2084,7 @@ class TextProduct(HLSTCV_Common.TextProduct):
|
|||
("Tornadoes", 'tornadoSection'),
|
||||
("Other Coastal Hazards", 'coastalHazardsSection')
|
||||
],
|
||||
"default": ["Wind", "Surge", "Flooding Rain", "Tornadoes", "Other Coastal Hazards"],
|
||||
"default": ["Wind", "Surge", "Flooding Rain", "Tornadoes"],
|
||||
},
|
||||
{
|
||||
"name":"LocalReferencePoints",
|
||||
|
@ -1971,14 +2095,14 @@ class TextProduct(HLSTCV_Common.TextProduct):
|
|||
"default": self._localReferencePoints_defaults(),
|
||||
},
|
||||
{
|
||||
"name": "MainHeadline",
|
||||
"label": "Step 5. Input Main Headline (required)",
|
||||
"name":"GeneralOnsetTime",
|
||||
"label": "Step 5. General Time to Onset",
|
||||
"options": [
|
||||
("Enter Unique Headline (below)", "Enter"),
|
||||
("Use Previous HLS Headline", "UsePrev"),
|
||||
("Use Latest TCP Headline", "UseTCP"),
|
||||
("Watch", 'check plans'),
|
||||
("Warning", 'complete preparations'),
|
||||
("Conditions/Ongoing", 'hunker down'),
|
||||
("Recovery", 'recovery'),
|
||||
],
|
||||
"entryField": " ",
|
||||
},
|
||||
{
|
||||
"name": "NextUpdate",
|
||||
|
@ -1991,6 +2115,16 @@ class TextProduct(HLSTCV_Common.TextProduct):
|
|||
"default": "Shortly",
|
||||
"entryField": " e.g. 6 AM EDT",
|
||||
},
|
||||
{
|
||||
"name": "MainHeadline",
|
||||
"label": "Step 7. Input Main Headline (required)",
|
||||
"options": [
|
||||
("Enter Unique Headline (below)", "Enter"),
|
||||
("Use Previous HLS Headline", "UsePrev"),
|
||||
("Use Latest TCP Headline", "UseTCP"),
|
||||
],
|
||||
"entryField": " ",
|
||||
},
|
||||
]
|
||||
|
||||
def _displayGUI(self, infoDict=None):
|
||||
|
@ -2043,10 +2177,12 @@ class Overview_Dialog(HLSTCV_Common.Common_Dialog):
|
|||
boxNum = 0
|
||||
buttonSide=Tkinter.TOP
|
||||
frameSide = Tkinter.LEFT
|
||||
elif index in [3,4]:
|
||||
elif index in [3,4,5]:
|
||||
boxNum = 1
|
||||
buttonSide=Tkinter.LEFT
|
||||
frameSide=Tkinter.TOP
|
||||
# buttonSide=Tkinter.LEFT
|
||||
# frameSide=Tkinter.TOP
|
||||
buttonSide=Tkinter.TOP
|
||||
frameSide=Tkinter.LEFT
|
||||
else:
|
||||
boxNum = 2
|
||||
buttonSide=Tkinter.TOP
|
||||
|
@ -2126,7 +2262,7 @@ class Overview_Dialog(HLSTCV_Common.Common_Dialog):
|
|||
frame = Tkinter.Frame(master)
|
||||
buttonList = self._parent._GUI1_configDict().get("buttonList", [])
|
||||
for button, label in buttonList:
|
||||
if button == "Next":
|
||||
if button == "Run":
|
||||
command = self.okCB
|
||||
else: # Cancel
|
||||
command = self.cancelCB
|
||||
|
@ -2157,7 +2293,7 @@ class Overview_Dialog(HLSTCV_Common.Common_Dialog):
|
|||
checkList.append((options[i], svar.get()))
|
||||
else:
|
||||
if ivarList[i].get():
|
||||
print "SARAH: adding option =", options[i]
|
||||
print "SARAH: adding option = %s" % (repr(options[i]))
|
||||
checkList.append(options[i])
|
||||
value = checkList
|
||||
self._setVarDict(name, value)
|
||||
|
@ -2195,7 +2331,7 @@ class LegacyFormatter():
|
|||
@return text -- product string
|
||||
'''
|
||||
text = ''
|
||||
print "SARAH: productParts =", productParts
|
||||
print "SARAH: productParts = %s" % (productParts)
|
||||
for part in productParts:
|
||||
valtype = type(part)
|
||||
if valtype is str:
|
||||
|
@ -2203,11 +2339,11 @@ class LegacyFormatter():
|
|||
elif valtype is tuple:
|
||||
name = part[0]
|
||||
infoDicts = part[1]
|
||||
print "SARAH: name =", str(name)
|
||||
print "SARAH: infoDicts =", infoDicts
|
||||
print "SARAH: name = %s" % (str(name))
|
||||
print "SARAH: infoDicts = %s" % (repr(infoDicts))
|
||||
newtext = self.processSubParts(productDict.get(name), infoDicts)
|
||||
print "SARAH: newtext type =", type(newtext)
|
||||
print "SARAH: newtext =", repr(newtext)
|
||||
print "SARAH: newtext type = %s" % (type(newtext))
|
||||
print "SARAH: newtext = %s" % (repr(newtext))
|
||||
text += newtext
|
||||
continue
|
||||
elif valtype is list:
|
||||
|
@ -2291,8 +2427,8 @@ class LegacyFormatter():
|
|||
text += '&&\n'
|
||||
elif name not in self._noOpParts():
|
||||
textStr = productDict.get(name)
|
||||
print "SARAH: name =", name
|
||||
print "SARAH: textStr =", textStr
|
||||
print "SARAH: name = %s" % (name)
|
||||
print "SARAH: textStr = '%s'" % (textStr)
|
||||
if textStr:
|
||||
text += textStr + '\n'
|
||||
return text
|
||||
|
@ -2355,7 +2491,7 @@ class LegacyFormatter():
|
|||
text += self._textProduct.indentText("**" + headline + "**\n",
|
||||
maxWidth=self._textProduct._lineLength)
|
||||
|
||||
text = self._textProduct._frame(text) + "\n"
|
||||
text = self._textProduct._frame(text) + "\n\n"
|
||||
|
||||
return text
|
||||
|
||||
|
@ -2456,13 +2592,32 @@ class LegacyFormatter():
|
|||
text += "\n"
|
||||
|
||||
additionalImpactRangeText = ""
|
||||
curAdditionalImpactText = ""
|
||||
count = 1
|
||||
for additionalImpact in sectionDict['additionalImpactRange']:
|
||||
additionalImpactRangeText += additionalImpact + " "
|
||||
|
||||
curAdditionalImpactText += \
|
||||
self._textProduct.indentText(additionalImpact,
|
||||
maxWidth=self._textProduct._lineLength)
|
||||
|
||||
if count != len(sectionDict['additionalImpactRange']) and \
|
||||
len(curAdditionalImpactText) > 0:
|
||||
curAdditionalImpactText += "\n"
|
||||
|
||||
# If this additional impact is not already included in the output
|
||||
if additionalImpactRangeText.find(curAdditionalImpactText) == -1:
|
||||
|
||||
# Add this additional impact text
|
||||
print "Adding current impact."
|
||||
additionalImpactRangeText += curAdditionalImpactText
|
||||
count += 1
|
||||
|
||||
# Remove the trailing space
|
||||
additionalImpactRangeText = additionalImpactRangeText[:-1]
|
||||
# additionalImpactRangeText = additionalImpactRangeText[:-1]
|
||||
|
||||
text += self._textProduct.indentText(additionalImpactRangeText, maxWidth=self._textProduct._lineLength)
|
||||
# text += self._textProduct.indentText(additionalImpactRangeText, maxWidth=self._textProduct._lineLength)
|
||||
|
||||
text += additionalImpactRangeText
|
||||
|
||||
text += "\n"
|
||||
return text
|
||||
|
@ -2476,10 +2631,10 @@ class LegacyFormatter():
|
|||
"""
|
||||
text = ''
|
||||
for i in range(len(subParts)):
|
||||
print "SARAH: subpart subParts[i] =", subParts[i]
|
||||
print "SARAH: subpart infoDicts[i] =", infoDicts[i]
|
||||
print "SARAH: subpart subParts[i] = %s" % (subParts[i])
|
||||
print "SARAH: subpart infoDicts[i] = %s" % (infoDicts[i])
|
||||
newtext = self._processProductParts(subParts[i], infoDicts[i].get('partsList'))
|
||||
print "SARAH: subpart newtext type =", type(newtext)
|
||||
print "SARAH: subpart newtext =", repr(newtext)
|
||||
print "SARAH: subpart newtext type = %s" % (type(newtext))
|
||||
print "SARAH: subpart newtext = '%s'" % (repr(newtext))
|
||||
text += newtext
|
||||
return text
|
||||
|
|
File diff suppressed because it is too large
Load diff
Loading…
Add table
Reference in a new issue