Merge tag 'OB_16.1.2-15' into asm_16.1.2
16.1.2-15 Former-commit-id: 600fc0a61960c03b76611589bf0d1e9cd59d2189
This commit is contained in:
commit
406b33a0ad
4 changed files with 192 additions and 158 deletions
|
@ -5,7 +5,7 @@
|
|||
# TCV_Dictionary
|
||||
# TCV_Dictionary file
|
||||
# Author: GFE Installation Script
|
||||
# Last Modified: Feb 13, 2015
|
||||
# Last Modified: Jan 26, 2016
|
||||
# ----------------------------------------------------------------------------
|
||||
# Needed to prevent an error from the SmartTool module
|
||||
WeatherElementEdited = None
|
||||
|
@ -16,7 +16,7 @@ ThreatStatements = {
|
|||
"check plans": {
|
||||
"planning": "Emergency planning should include a reasonable threat for major hurricane force wind greater than 110 MPH of equivalent Category 3 intensity or higher.",
|
||||
"preparation": "To be safe, aggressively prepare for the potential of devastating to catastrophic wind impacts. Efforts should now be underway to secure all properties.",
|
||||
"action": "Extremely Dangerous and life threatening wind is possible. Failure to adequately shelter may result in serious injury, loss of life, or immense human suffering.",
|
||||
"action": "Extremely dangerous and life threatening wind is possible. Failure to adequately shelter may result in serious injury, loss of life, or immense human suffering.",
|
||||
},
|
||||
"complete preparations": {
|
||||
"planning": "Emergency plans should include a reasonable threat for major hurricane force wind greater than 110 MPH of equivalent Category 3 intensity or higher.",
|
||||
|
|
|
@ -30,7 +30,8 @@ import com.raytheon.uf.common.status.UFStatus.Priority;
|
|||
import com.raytheon.viz.mpe.util.DailyQcUtils.Station;
|
||||
|
||||
/**
|
||||
* TODO Add Description
|
||||
* This routine will estimate 6 hourly periods when 24 hour rain exists. Based on:
|
||||
* ohd/pproc_lib/src/GageQCEngine/TEXT/estimate_daily_stations.c in AWIPS I.
|
||||
*
|
||||
* <pre>
|
||||
*
|
||||
|
@ -38,6 +39,8 @@ import com.raytheon.viz.mpe.util.DailyQcUtils.Station;
|
|||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* Mar 10, 2009 snaples Initial creation
|
||||
* Jan 11, 2016 5173 bkowal Do not estimate a station that has been forced
|
||||
* good. Eliminated warnings.
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -55,8 +58,8 @@ public class EstDailyStations {
|
|||
public void estimate_daily_stations(int j,
|
||||
ArrayList<Station> precip_stations, int numPstations) {
|
||||
|
||||
int dqc_neig = dqc.mpe_dqc_max_precip_neighbors;
|
||||
int isom = dqc.isom;
|
||||
int dqc_neig = DailyQcUtils.mpe_dqc_max_precip_neighbors;
|
||||
int isom = DailyQcUtils.isom;
|
||||
int isohyets_used = dqc.isohyets_used;
|
||||
int method = dqc.method;
|
||||
int m, k, i, l, ii;
|
||||
|
@ -83,18 +86,18 @@ public class EstDailyStations {
|
|||
int details = dqc.mpe_td_details_set;
|
||||
int mpe_td_new_algorithm_set = dqc.mpe_td_new_algorithm_set;
|
||||
|
||||
if (dqc.pdata[j].data_time == null) {
|
||||
if (DailyQcUtils.pdata[j].data_time == null) {
|
||||
return;
|
||||
}
|
||||
Calendar cal = Calendar.getInstance(TimeZone.getTimeZone("GMT"));
|
||||
cal.setTime(dqc.pdata[j].data_time);
|
||||
cal.setTime(DailyQcUtils.pdata[j].data_time);
|
||||
|
||||
/* this routine will estimate 6 hourly periods when 24 hour rain exists */
|
||||
|
||||
for (m = 0; m < max_stations; m++) {
|
||||
/* dont estimate missing 24 hour stations */
|
||||
if (dqc.pdata[j].stn[m].frain[4].data < 0
|
||||
|| dqc.pdata[j].stn[m].frain[4].qual == 4) {
|
||||
if (DailyQcUtils.pdata[j].stn[m].frain[4].data < 0
|
||||
|| DailyQcUtils.pdata[j].stn[m].frain[4].qual == 4) {
|
||||
continue;
|
||||
}
|
||||
|
||||
|
@ -102,17 +105,17 @@ public class EstDailyStations {
|
|||
|
||||
for (k = 0; k < 4; k++) {
|
||||
|
||||
if (dqc.pdata[j].stn[m].frain[k].data >= 0
|
||||
&& dqc.pdata[j].stn[m].frain[k].qual == 2) {
|
||||
if (DailyQcUtils.pdata[j].stn[m].frain[k].data >= 0
|
||||
&& DailyQcUtils.pdata[j].stn[m].frain[k].qual == 2) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (dqc.pdata[j].stn[m].frain[k].qual == 1) {
|
||||
if (DailyQcUtils.pdata[j].stn[m].frain[k].qual == 1) {
|
||||
break;
|
||||
}
|
||||
|
||||
if (dqc.pdata[j].stn[m].rrain[k].data < 0
|
||||
&& dqc.pdata[j].stn[m].frain[k].qual != 2) {
|
||||
if (DailyQcUtils.pdata[j].stn[m].rrain[k].data < 0
|
||||
&& DailyQcUtils.pdata[j].stn[m].frain[k].qual != 2) {
|
||||
break;
|
||||
}
|
||||
|
||||
|
@ -124,8 +127,9 @@ public class EstDailyStations {
|
|||
|
||||
/* dont estimate stations forced good, bad or estimated */
|
||||
|
||||
if (dqc.pdata[j].stn[m].frain[4].qual == 1
|
||||
|| dqc.pdata[j].stn[m].frain[4].qual == 5) {
|
||||
if (DailyQcUtils.pdata[j].stn[m].frain[4].qual == 0
|
||||
|| DailyQcUtils.pdata[j].stn[m].frain[4].qual == 1
|
||||
|| DailyQcUtils.pdata[j].stn[m].frain[4].qual == 5) {
|
||||
continue;
|
||||
}
|
||||
|
||||
|
@ -154,16 +158,16 @@ public class EstDailyStations {
|
|||
|
||||
/* dont estimate unless good or forced good */
|
||||
|
||||
if (dqc.pdata[j].stn[i].frain[k].qual != 0
|
||||
&& dqc.pdata[j].stn[i].frain[k].qual != 8
|
||||
&& dqc.pdata[j].stn[i].frain[k].qual != 3
|
||||
&& dqc.pdata[j].stn[i].frain[k].qual != 2) {
|
||||
if (DailyQcUtils.pdata[j].stn[i].frain[k].qual != 0
|
||||
&& DailyQcUtils.pdata[j].stn[i].frain[k].qual != 8
|
||||
&& DailyQcUtils.pdata[j].stn[i].frain[k].qual != 3
|
||||
&& DailyQcUtils.pdata[j].stn[i].frain[k].qual != 2) {
|
||||
continue;
|
||||
}
|
||||
|
||||
/* dont use missing stations */
|
||||
|
||||
if (dqc.pdata[j].stn[i].frain[k].data < 0) {
|
||||
if (DailyQcUtils.pdata[j].stn[i].frain[k].data < 0) {
|
||||
continue;
|
||||
}
|
||||
|
||||
|
@ -186,10 +190,10 @@ public class EstDailyStations {
|
|||
testdist = 1 / testdist;
|
||||
|
||||
if (method == 2 && isoh > 0 && isoh1 > 0) {
|
||||
padj = dqc.pdata[j].stn[i].frain[k].data
|
||||
padj = DailyQcUtils.pdata[j].stn[i].frain[k].data
|
||||
* isoh1 / isoh;
|
||||
} else {
|
||||
padj = dqc.pdata[j].stn[i].frain[k].data;
|
||||
padj = DailyQcUtils.pdata[j].stn[i].frain[k].data;
|
||||
}
|
||||
|
||||
fdist = testdist + fdist;
|
||||
|
@ -229,14 +233,14 @@ public class EstDailyStations {
|
|||
continue;
|
||||
}
|
||||
|
||||
if (dqc.pdata[j].stn[i].frain[k].qual != 0
|
||||
&& dqc.pdata[j].stn[i].frain[k].qual != 8
|
||||
&& dqc.pdata[j].stn[i].frain[k].qual != 3
|
||||
&& dqc.pdata[j].stn[i].frain[k].qual != 2) {
|
||||
if (DailyQcUtils.pdata[j].stn[i].frain[k].qual != 0
|
||||
&& DailyQcUtils.pdata[j].stn[i].frain[k].qual != 8
|
||||
&& DailyQcUtils.pdata[j].stn[i].frain[k].qual != 3
|
||||
&& DailyQcUtils.pdata[j].stn[i].frain[k].qual != 2) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (dqc.pdata[j].stn[i].frain[k].data < 0) {
|
||||
if (DailyQcUtils.pdata[j].stn[i].frain[k].data < 0) {
|
||||
continue;
|
||||
}
|
||||
|
||||
|
@ -257,10 +261,10 @@ public class EstDailyStations {
|
|||
testdist = 1 / testdist;
|
||||
|
||||
if (method == 2 && isoh > 0 && isoh1 > 0) {
|
||||
padj = dqc.pdata[j].stn[i].frain[k].data
|
||||
padj = DailyQcUtils.pdata[j].stn[i].frain[k].data
|
||||
* isoh1 / isoh;
|
||||
} else {
|
||||
padj = dqc.pdata[j].stn[i].frain[k].data;
|
||||
padj = DailyQcUtils.pdata[j].stn[i].frain[k].data;
|
||||
}
|
||||
|
||||
fdist = testdist + fdist;
|
||||
|
@ -293,27 +297,26 @@ public class EstDailyStations {
|
|||
if (i == m) {
|
||||
continue;
|
||||
}
|
||||
if (dqc.pdata[j].stn[i].frain[k].qual != 0
|
||||
&& dqc.pdata[j].stn[i].frain[k].qual != 8
|
||||
&& dqc.pdata[j].stn[i].frain[k].qual != 3
|
||||
&& dqc.pdata[j].stn[i].frain[k].qual != 2) {
|
||||
if (DailyQcUtils.pdata[j].stn[i].frain[k].qual != 0
|
||||
&& DailyQcUtils.pdata[j].stn[i].frain[k].qual != 8
|
||||
&& DailyQcUtils.pdata[j].stn[i].frain[k].qual != 3
|
||||
&& DailyQcUtils.pdata[j].stn[i].frain[k].qual != 2) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (dqc.pdata[j].stn[i].frain[k].data < 0) {
|
||||
if (DailyQcUtils.pdata[j].stn[i].frain[k].data < 0) {
|
||||
continue;
|
||||
}
|
||||
|
||||
buf = String
|
||||
.format(" %s(%f,%f)",
|
||||
precip_stations.get(i).hb5,
|
||||
dqc.pdata[j].stn[i].frain[k].data,
|
||||
DailyQcUtils.pdata[j].stn[i].frain[k].data,
|
||||
precip_stations.get(i).isoh[isom]);
|
||||
|
||||
dqc.td_fpwr.write(buf);
|
||||
dqc.td_fpwr.newLine();
|
||||
}
|
||||
// fvalue[k] = fdata / fdist;
|
||||
|
||||
} catch (IOException e) {
|
||||
statusHandler.handle(Priority.PROBLEM,
|
||||
|
@ -340,8 +343,8 @@ public class EstDailyStations {
|
|||
dqc.td_fpwr.write(buf);
|
||||
dqc.td_fpwr.newLine();
|
||||
} catch (IOException e) {
|
||||
// TODO Auto-generated catch block
|
||||
e.printStackTrace();
|
||||
statusHandler.handle(Priority.PROBLEM,
|
||||
e.getLocalizedMessage(), e);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -352,14 +355,14 @@ public class EstDailyStations {
|
|||
continue;
|
||||
}
|
||||
|
||||
if (dqc.pdata[j].stn[i].frain[k].qual != 0
|
||||
&& dqc.pdata[j].stn[i].frain[k].qual != 8
|
||||
&& dqc.pdata[j].stn[i].frain[k].qual != 3
|
||||
&& dqc.pdata[j].stn[i].frain[k].qual != 2) {
|
||||
if (DailyQcUtils.pdata[j].stn[i].frain[k].qual != 0
|
||||
&& DailyQcUtils.pdata[j].stn[i].frain[k].qual != 8
|
||||
&& DailyQcUtils.pdata[j].stn[i].frain[k].qual != 3
|
||||
&& DailyQcUtils.pdata[j].stn[i].frain[k].qual != 2) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (dqc.pdata[j].stn[i].frain[k].data < 0) {
|
||||
if (DailyQcUtils.pdata[j].stn[i].frain[k].data < 0) {
|
||||
continue;
|
||||
}
|
||||
|
||||
|
@ -385,7 +388,7 @@ public class EstDailyStations {
|
|||
.format(" %s(%f,%f)",
|
||||
precip_stations
|
||||
.get(closest_good_gage_index).hb5,
|
||||
dqc.pdata[j].stn[closest_good_gage_index].frain[k].data,
|
||||
DailyQcUtils.pdata[j].stn[closest_good_gage_index].frain[k].data,
|
||||
precip_stations
|
||||
.get(closest_good_gage_index).isoh[isom]);
|
||||
|
||||
|
@ -393,8 +396,8 @@ public class EstDailyStations {
|
|||
dqc.td_fpwr.write(buf);
|
||||
dqc.td_fpwr.newLine();
|
||||
} catch (IOException e) {
|
||||
// TODO Auto-generated catch block
|
||||
e.printStackTrace();
|
||||
statusHandler.handle(Priority.PROBLEM,
|
||||
e.getLocalizedMessage(), e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -405,7 +408,7 @@ public class EstDailyStations {
|
|||
fvalue[k] = -9999;
|
||||
} else {
|
||||
if (closest_good_gage_index != -9999) {
|
||||
fvalue[k] = dqc.pdata[j].stn[closest_good_gage_index].frain[k].data;
|
||||
fvalue[k] = DailyQcUtils.pdata[j].stn[closest_good_gage_index].frain[k].data;
|
||||
} else {
|
||||
fvalue[k] = -9999;
|
||||
}
|
||||
|
@ -418,8 +421,8 @@ public class EstDailyStations {
|
|||
|
||||
for (k = 0; k < 4; k++) {
|
||||
|
||||
dqc.pdata[j].stn[m].frain[k].qual = 6;
|
||||
dqc.pdata[j].stn[m].frain[k].data = dqc.pdata[j].stn[m].frain[4].data / 4;
|
||||
DailyQcUtils.pdata[j].stn[m].frain[k].qual = 6;
|
||||
DailyQcUtils.pdata[j].stn[m].frain[k].data = DailyQcUtils.pdata[j].stn[m].frain[4].data / 4;
|
||||
|
||||
}
|
||||
|
||||
|
@ -433,10 +436,10 @@ public class EstDailyStations {
|
|||
|
||||
for (k = 0; k < 4; k++) {
|
||||
|
||||
if ((dqc.pdata[j].stn[m].rrain[k].data >= 0 && dqc.pdata[j].stn[m].frain[k].qual != 1)
|
||||
|| (dqc.pdata[j].stn[m].frain[k].qual == 2)) {
|
||||
if ((DailyQcUtils.pdata[j].stn[m].rrain[k].data >= 0 && DailyQcUtils.pdata[j].stn[m].frain[k].qual != 1)
|
||||
|| (DailyQcUtils.pdata[j].stn[m].frain[k].qual == 2)) {
|
||||
stotal = stotal
|
||||
+ dqc.pdata[j].stn[m].frain[k].data;
|
||||
+ DailyQcUtils.pdata[j].stn[m].frain[k].data;
|
||||
} else {
|
||||
|
||||
num_missing++;
|
||||
|
@ -447,7 +450,7 @@ public class EstDailyStations {
|
|||
|
||||
}
|
||||
|
||||
stotal = dqc.pdata[j].stn[m].frain[4].data - stotal;
|
||||
stotal = DailyQcUtils.pdata[j].stn[m].frain[4].data - stotal;
|
||||
|
||||
if (stotal < 0) {
|
||||
stotal = 0;
|
||||
|
@ -461,22 +464,22 @@ public class EstDailyStations {
|
|||
|
||||
for (k = 0; k < 4; k++) {
|
||||
|
||||
if ((dqc.pdata[j].stn[m].rrain[k].data >= 0 && dqc.pdata[j].stn[m].frain[k].qual != 1)
|
||||
|| (dqc.pdata[j].stn[m].frain[k].qual == 2)) {
|
||||
if ((DailyQcUtils.pdata[j].stn[m].rrain[k].data >= 0 && DailyQcUtils.pdata[j].stn[m].frain[k].qual != 1)
|
||||
|| (DailyQcUtils.pdata[j].stn[m].frain[k].qual == 2)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (ftotal != 0) {
|
||||
|
||||
dqc.pdata[j].stn[m].frain[k].data = (float) (fvalue[k] * fmult);
|
||||
dqc.pdata[j].stn[m].frain[k].qual = 6;
|
||||
DailyQcUtils.pdata[j].stn[m].frain[k].data = (float) (fvalue[k] * fmult);
|
||||
DailyQcUtils.pdata[j].stn[m].frain[k].qual = 6;
|
||||
|
||||
}
|
||||
|
||||
else {
|
||||
|
||||
dqc.pdata[j].stn[m].frain[k].data = (float) (stotal / num_missing);
|
||||
dqc.pdata[j].stn[m].frain[k].qual = 6;
|
||||
DailyQcUtils.pdata[j].stn[m].frain[k].data = (float) (stotal / num_missing);
|
||||
DailyQcUtils.pdata[j].stn[m].frain[k].qual = 6;
|
||||
|
||||
}
|
||||
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
# Version 2015.8.27-0
|
||||
# Version 2016.01.26-0
|
||||
|
||||
import GenericHazards
|
||||
import string, time, os, re, types, copy, LogStream, collections
|
||||
|
@ -549,21 +549,16 @@ class TextProduct(HLSTCV_Common.TextProduct):
|
|||
productDict['stormInformation'] = stormInfoDict
|
||||
|
||||
def _situationOverview(self, productDict, productSegmentGroup, productSegment):
|
||||
overviewSectionTitle = ".Situation Overview...\n"
|
||||
|
||||
# Use generic text for the situation overview
|
||||
productDict['situationOverview'] = self._frame("Succinctly describe the expected evolution of the event for the cwa; which hazards are of greater (or lesser) concern, forecast focus, etc.")
|
||||
|
||||
# Get the WRKHLS product that has the situation overview we want
|
||||
wrkhlsProduct = self.getPreviousProduct("WRKHLS")
|
||||
|
||||
# Try finding the situation overview
|
||||
overviewSearch = re.search("(?ism).*^%s(.+?)^\." % (overviewSectionTitle), wrkhlsProduct)
|
||||
wrkhlsProduct = self.getPreviousProduct("WRKHLS").strip()
|
||||
|
||||
# If we found the overview
|
||||
if overviewSearch is not None:
|
||||
# Clean it up
|
||||
productDict['situationOverview'] = self._cleanText(overviewSearch.group(1).strip())
|
||||
else:
|
||||
# Use generic text for the situation overview
|
||||
productDict['situationOverview'] = self._frame("Succinctly describe the expected evolution of the event for the cwa; which hazards are of greater (or lesser) concern, forecast focus, etc.")
|
||||
if len(wrkhlsProduct) > 0:
|
||||
# Frame the imported overview and use it instead of the generic text
|
||||
productDict['situationOverview'] = self._frame(wrkhlsProduct)
|
||||
|
||||
def _windSection(self, productDict, productSegmentGroup, productSegment):
|
||||
sectionDict = dict()
|
||||
|
@ -2754,4 +2749,3 @@ class LegacyFormatter():
|
|||
self._textProduct.debug_print("subpart newtext = '%s'" % (self._pp.pformat(newtext)))
|
||||
text += newtext
|
||||
return text
|
||||
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
# Version 2015.11.19-0
|
||||
# Version 2016.01.26-0
|
||||
|
||||
import GenericHazards
|
||||
import JsonSupport
|
||||
|
@ -469,14 +469,14 @@ class TextProduct(HLSTCV_Common.TextProduct):
|
|||
if len(self._segmentList) == 0:
|
||||
return "No hazards to report"
|
||||
|
||||
# Determine time ranges
|
||||
self._determineTimeRanges(argDict)
|
||||
|
||||
# Make sure we have all of the necessary grids before continuing
|
||||
error = self._performGridChecks(argDict)
|
||||
if error is not None:
|
||||
return error
|
||||
|
||||
# Determine time ranges
|
||||
self._determineTimeRanges(argDict)
|
||||
|
||||
# Sample the data
|
||||
self._sampleData(argDict)
|
||||
|
||||
|
@ -556,10 +556,25 @@ class TextProduct(HLSTCV_Common.TextProduct):
|
|||
ifpClient = argDict["ifpClient"]
|
||||
dbId = argDict["databaseID"]
|
||||
parmId = ParmID(weatherElement, dbId)
|
||||
gridTimes = ifpClient.getGridInventory(parmId)
|
||||
times = ifpClient.getGridInventory(parmId)
|
||||
|
||||
self.debug_print("Element being tested: %s" % (self._pp.pformat(weatherElement)), 1)
|
||||
self.debug_print("Expected number of grids: %s" % (self._pp.pformat(expectedNumGrids)), 1)
|
||||
|
||||
gridTimes = []
|
||||
for index in range(len(times)):
|
||||
gridTime = TimeRange.TimeRange(times[index])
|
||||
|
||||
if (gridTime.endTime() <= self._timeRange.startTime() or
|
||||
gridTime.startTime() >= self._timeRange.endTime()):
|
||||
|
||||
prettyStartTime = self._pp.pformat(str(gridTime.startTime()))
|
||||
prettyEndTime = self._pp.pformat(str(gridTime.endTime()))
|
||||
self.debug_print("skipping grid %s (%s - %s): outside of time range"
|
||||
% (index, prettyStartTime, prettyEndTime), 1)
|
||||
else:
|
||||
gridTimes.append(gridTime)
|
||||
|
||||
self.debug_print("Length of grid times: %s" % (self._pp.pformat(len(gridTimes))), 1)
|
||||
|
||||
return len(gridTimes) == expectedNumGrids
|
||||
|
@ -592,11 +607,28 @@ class TextProduct(HLSTCV_Common.TextProduct):
|
|||
gridTimes = sorted(gridTimes, key= lambda gridTime: gridTime.startTime())
|
||||
|
||||
totalHours = 0
|
||||
previousEndTime = gridTimes[0].startTime()
|
||||
previousEndTime = None
|
||||
for gridTime in gridTimes:
|
||||
self.debug_print("previous end time: %s" % (self._pp.pformat(str(previousEndTime))), 1)
|
||||
self.debug_print("current start time: %s" % (self._pp.pformat(str(gridTime.startTime()))), 1)
|
||||
|
||||
if gridTime.endTime() <= self._timeRange.startTime():
|
||||
prettyEndTime = self._pp.pformat(str(gridTime.endTime()))
|
||||
prettyStartTime = self._pp.pformat(str(self._timeRange.startTime()))
|
||||
self.debug_print("skipping: grid end time (%s) before time range start time (%s)"
|
||||
% (prettyEndTime, prettyStartTime), 1)
|
||||
continue
|
||||
|
||||
if gridTime.startTime() >= self._timeRange.endTime():
|
||||
prettyStartTime = self._pp.pformat(str(gridTime.startTime()))
|
||||
prettyEndTime = self._pp.pformat(str(self._timeRange.endTime()))
|
||||
self.debug_print("done: grid start time (%s) after time range end time (%s)"
|
||||
% (prettyStartTime, prettyEndTime), 1)
|
||||
break
|
||||
|
||||
if previousEndTime is None:
|
||||
previousEndTime = gridTime.startTime()
|
||||
|
||||
if previousEndTime != gridTime.startTime():
|
||||
# Not continuous
|
||||
return False
|
||||
|
@ -674,10 +706,6 @@ class TextProduct(HLSTCV_Common.TextProduct):
|
|||
|
||||
self.debug_print("vtecRecord = %s" % (self._pp.pformat(vtecRecord)), 1)
|
||||
|
||||
if vtecRecord["phen"] == "SS":
|
||||
# Temporary? Change the vtec mode for SS hazards to be experimental
|
||||
vstr = vstr[0] + 'X' + vstr[2:]
|
||||
|
||||
self.debug_print("final vstr = %s" % vstr, 1)
|
||||
records.append(vstr)
|
||||
segmentDict['vtecRecords'] = records
|
||||
|
@ -1385,7 +1413,8 @@ class SectionCommon():
|
|||
elif self._isThreatIncreasing(shorterTermTrendDifference, longerTermTrendDifference):
|
||||
self._textProduct.debug_print("threat is increasing", 1)
|
||||
threatTrendValue = "INCREASING"
|
||||
elif currentThreat == "Extreme" and \
|
||||
# NOTE: Modified so more threat levels can be classified as increasing when forecast has increased
|
||||
elif currentThreat in ["Mod", "High", "Extreme"] and \
|
||||
self._isMagnitudeIncreasing(forecastKey, magnitudeIncreaseThreshold):
|
||||
self._textProduct.debug_print("Increasing based on magnitude", 1)
|
||||
threatTrendValue = "INCREASING"
|
||||
|
@ -1587,7 +1616,7 @@ class SectionCommon():
|
|||
self._setProductPartValue(segmentDict, 'potentialImpactsSummary', summary)
|
||||
|
||||
def _getPotentialImpactsSummaryText(self, maxThreat):
|
||||
if not self.isThreatNoneForEntireStorm:
|
||||
if self.isThreatNoneForEntireStorm:
|
||||
return "Potential Impacts: Little to None"
|
||||
if self._tr is not None and self._sectionHeaderName in ["Wind", "Storm Surge"]:
|
||||
if self._tr == "hunker down":
|
||||
|
@ -1855,11 +1884,11 @@ class WindSection(SectionCommon):
|
|||
# Dictionary representing wind thresholds in kts
|
||||
# for category 1, 2, 3, 4 or 5 hurricanes.
|
||||
return {
|
||||
'CAT 5 Hurricane': (157, 999),
|
||||
'CAT 4 Hurricane': (130, 157),
|
||||
'CAT 3 Hurricane': (111, 130),
|
||||
'CAT 2 Hurricane': ( 96, 111),
|
||||
'CAT 1 Hurricane': ( 74, 96),
|
||||
'Cat 5 Hurricane': (157, 999),
|
||||
'Cat 4 Hurricane': (130, 157),
|
||||
'Cat 3 Hurricane': (111, 130),
|
||||
'Cat 2 Hurricane': ( 96, 111),
|
||||
'Cat 1 Hurricane': ( 74, 96),
|
||||
'Strong Tropical Storm': ( 58, 73),
|
||||
'Tropical Storm': ( 39, 58),
|
||||
}
|
||||
|
@ -2108,7 +2137,7 @@ class FloodingRainSection(SectionCommon):
|
|||
words = self._rainRange(int(self._stats._sumAccum + 0.5))
|
||||
|
||||
# If we have previous rainfall
|
||||
if self._stats._prevAccum not in [0.0, None]:
|
||||
if self._stats._prevAccum not in [0.0, None] and (int(self._stats._sumAccum + 0.5)) != 0:
|
||||
words = "Additional " + words
|
||||
self._setProductPartValue(segmentDict, 'peakRain', "Peak Rainfall Amounts: " + words)
|
||||
|
||||
|
@ -2116,7 +2145,9 @@ class FloodingRainSection(SectionCommon):
|
|||
minAccum = 0
|
||||
maxAccum = 0
|
||||
|
||||
if sumAccum == 0:
|
||||
if sumAccum == 0 and self._stats._prevAccum not in [0.0, None]:
|
||||
return "No additional significant rainfall forecast"
|
||||
elif sumAccum == 0 and self._stats._prevAccum in [0.0, None]:
|
||||
return "No significant rainfall forecast"
|
||||
elif sumAccum == 1:
|
||||
return "around 1 inch"
|
||||
|
@ -2187,14 +2218,6 @@ class FloodingRainSection(SectionCommon):
|
|||
if len(subsectionDict) > 0:
|
||||
self._setProductPartValue(segmentDict, 'impactsSubsection', subsectionDict)
|
||||
|
||||
def _specialImpactsStatements(self):
|
||||
return {"hunker down": ["Potential impacts from flooding rain are still unfolding.",
|
||||
"The extent of realized impacts will depend on actual rainfall amounts as received at particular locations.",
|
||||
],
|
||||
"recovery": ["For additional information on impacts being caused by flooding rain, refer to the local hazardous weather outlook or hurricane local statement.",
|
||||
],
|
||||
}
|
||||
|
||||
def _potentialImpactsSummary(self, segmentDict, productSegmentGroup, productSegment):
|
||||
if not self._textProduct._WSPGridsAvailable:
|
||||
self._setProductPartValue(segmentDict, 'potentialImpactsSummary',
|
||||
|
@ -2315,14 +2338,6 @@ class TornadoSection(SectionCommon):
|
|||
if len(subsectionDict) > 0:
|
||||
self._setProductPartValue(segmentDict, 'impactsSubsection', subsectionDict)
|
||||
|
||||
def _specialImpactsStatements(self):
|
||||
return {"hunker down": ["Potential impacts from tropical tornadoes are still unfolding.",
|
||||
"The extent of realized impacts will depend on the severity of actual tornado occurrence as experienced at particular locations.",
|
||||
],
|
||||
"recovery": ["For additional information on impacts being caused by tropical tornadoes, refer to the local hazardous weather outlook or hurricane local statement.",
|
||||
],
|
||||
}
|
||||
|
||||
def _potentialImpactsSummary(self, segmentDict, productSegmentGroup, productSegment):
|
||||
if not self._textProduct._WSPGridsAvailable:
|
||||
self._setProductPartValue(segmentDict, 'potentialImpactsSummary',
|
||||
|
@ -2827,16 +2842,13 @@ class StormSurgeSectionStats(SectionCommonStats):
|
|||
def _setStats(self, statList, timeRangeList):
|
||||
phishStartTime = None
|
||||
phishEndTime = None
|
||||
possibleStop = 0
|
||||
possibleStart = 0
|
||||
|
||||
# If this is an inland area, just move on
|
||||
if statList == "InlandArea":
|
||||
return
|
||||
|
||||
self._textProduct.debug_print("*"*100, 1)
|
||||
self._textProduct.debug_print("phishStartTime = %s phishEndTime = %s possibleStop = %d possibleStart = %d" %
|
||||
(str(phishStartTime), str(phishEndTime), possibleStop, possibleStart), 1)
|
||||
self._textProduct.debug_print("Setting Surge Section stats for %s" % self._segment, 1)
|
||||
|
||||
statDict = statList[0]
|
||||
self._inundationMax = self._textProduct._getStatValue(statDict, "InundationMax", "Max")
|
||||
|
@ -2844,54 +2856,80 @@ class StormSurgeSectionStats(SectionCommonStats):
|
|||
self._inundationMax = round(self._inundationMax)
|
||||
self._textProduct.debug_print("self._inundationMax = %s" % (self._inundationMax), 1)
|
||||
|
||||
self._textProduct.debug_print("%s" % (self._textProduct._pp.pformat(statList)), 1)
|
||||
self._textProduct.debug_print("length of statList = %s" % (len(statList)), 1)
|
||||
for period in range(len(statList)):
|
||||
tr, _ = timeRangeList[period]
|
||||
statDict = statList[period]
|
||||
self._textProduct.debug_print("-"*50, 1)
|
||||
self._textProduct.debug_print("tr = %s" % (self._textProduct._pp.pformat(tr)), 1)
|
||||
self._textProduct.debug_print("statDict = %s" % (self._textProduct._pp.pformat(statDict)), 1)
|
||||
|
||||
|
||||
curPhish = self._textProduct._getStatValue(statDict, "InundationTiming", "Max")
|
||||
self._textProduct.debug_print("tr = %s" % (self._textProduct._pp.pformat(tr)), 1)
|
||||
self._textProduct.debug_print("curPhish = '%s' possibleStop = %d possibleStart = %d" %
|
||||
(str(curPhish), possibleStop, possibleStart), 1)
|
||||
self._textProduct.debug_print("curPhish = '%s'" % (str(curPhish)), 1)
|
||||
self._textProduct.debug_print("phishStartTime = %s phishEndTime = %s" %
|
||||
(str(phishStartTime), str(phishEndTime)), 1)
|
||||
|
||||
if (curPhish is None) or (curPhish == 'None'):
|
||||
self._textProduct.debug_print("Done: Reached end of grids (curPhish was None)", 1)
|
||||
break
|
||||
|
||||
if self._inundationMax >= 3:
|
||||
if curPhish >= 1:
|
||||
if possibleStop != 0:
|
||||
possibleStop = 0
|
||||
possibleStart += 1
|
||||
if phishStartTime is None:
|
||||
phishStartTime = tr.startTime()
|
||||
elif curPhish < 1 and possibleStart < 2:
|
||||
possibleStart = 0
|
||||
phishStartTime = None
|
||||
elif phishStartTime is not None and curPhish is not None: # Only checking if valid grid
|
||||
possibleStop += 1
|
||||
if possibleStop < 2:
|
||||
phishEndTime = tr.startTime()
|
||||
else: # possibleStop == 2
|
||||
break
|
||||
else: # curPhish is None so out of grids
|
||||
break
|
||||
|
||||
# For start time:
|
||||
# If inundationMax >= 3:
|
||||
# Looking for 2 consecutive grids with a surge height >= 1
|
||||
# Start will be the start time of the FIRST of the 2 consecutive grids
|
||||
# If 1 <= inundationMax < 3:
|
||||
# Looking for 1 grid with a surge height >= 1
|
||||
# Start will be the start time of this grid
|
||||
#
|
||||
# For end time:
|
||||
# Looking for 2 consecutive grids with a surge height < 1
|
||||
# End will be the start time of the FIRST of the 2 consecutive grids
|
||||
|
||||
# If we have another period after this one, we may need to look at the two
|
||||
# consecutive periods for start and end time conditions
|
||||
isLastPeriod = True
|
||||
if period < len(statList) - 1:
|
||||
isLastPeriod = False
|
||||
nextTr, _ = timeRangeList[period+1]
|
||||
nextStatDict = statList[period+1]
|
||||
nextPhish = self._textProduct._getStatValue(nextStatDict, "InundationTiming", "Max")
|
||||
|
||||
self._textProduct.debug_print("nextTr = %s" % (self._textProduct._pp.pformat(nextTr)), 1)
|
||||
self._textProduct.debug_print("nextStatDict = %s" % (self._textProduct._pp.pformat(nextStatDict)), 1)
|
||||
self._textProduct.debug_print("nextPhish = '%s'" % (str(nextPhish)), 1)
|
||||
|
||||
# Set what the condition is for determining the start time
|
||||
if (self._inundationMax >= 3) and (not isLastPeriod):
|
||||
startCondition = (curPhish >= 1) and (nextPhish >= 1)
|
||||
self._textProduct.debug_print("startCondition looking at 2 periods", 1)
|
||||
elif 1 <= self._inundationMax < 3:
|
||||
if curPhish >= 1:
|
||||
if possibleStop != 0:
|
||||
possibleStop = 0
|
||||
if phishStartTime is None:
|
||||
phishStartTime = tr.startTime()
|
||||
elif phishStartTime is not None and curPhish is not None: # Only checking if valid grid
|
||||
possibleStop += 1
|
||||
if possibleStop < 2:
|
||||
phishEndTime = tr.startTime()
|
||||
else: # possibleStop == 2
|
||||
break
|
||||
else: # curPhish is None so out of grids
|
||||
break
|
||||
startCondition = curPhish >= 1
|
||||
self._textProduct.debug_print("startCondition looking at 1 period", 1)
|
||||
else:
|
||||
startCondition = False
|
||||
self._textProduct.debug_print("no startCondition, done", 1)
|
||||
break
|
||||
|
||||
# Set what the condition is for determining the end time
|
||||
if not isLastPeriod:
|
||||
endCondition = (curPhish < 1) and (nextPhish < 1)
|
||||
self._textProduct.debug_print("endCondition looking at 2 periods", 1)
|
||||
else:
|
||||
endCondition = False
|
||||
self._textProduct.debug_print("this is the last period, no endCondition possible", 1)
|
||||
|
||||
if startCondition and (phishStartTime is None):
|
||||
phishStartTime = tr.startTime()
|
||||
elif endCondition and (phishStartTime is not None) and (phishEndTime is None):
|
||||
phishEndTime = tr.startTime()
|
||||
self._textProduct.debug_print("final phishStartTime = %s final phishEndTime = %s" %
|
||||
(str(phishStartTime), str(phishEndTime)), 1)
|
||||
break
|
||||
|
||||
self._textProduct.debug_print("new phishStartTime = %s new phishEndTime = %s" %
|
||||
(str(phishStartTime), str(phishEndTime)), 1)
|
||||
|
||||
|
||||
self._windowSurge = "Window of concern: "
|
||||
|
@ -2907,7 +2945,7 @@ class StormSurgeSectionStats(SectionCommonStats):
|
|||
|
||||
self._textProduct.debug_print("surge startTime = %s self._onsetSurgeHour = %s " %
|
||||
(self._textProduct._pp.pformat(startTime), self._onsetSurgeHour), 1)
|
||||
if phishEndTime is not None and possibleStop >= 2:
|
||||
if phishEndTime is not None:
|
||||
self._endSurgeHour = self._calculateHourOffset(phishEndTime)
|
||||
endTime = AbsTime(self._textProduct._issueTime_secs + self._endSurgeHour*60*60)
|
||||
windowPeriod = self._textProduct.makeTimeRange(startTime, endTime)
|
||||
|
@ -2917,18 +2955,12 @@ class StormSurgeSectionStats(SectionCommonStats):
|
|||
|
||||
startTimeDescriptor = self._textProduct._formatPeriod(windowPeriod)
|
||||
|
||||
# Modified to handle case where last grid is zero but did not have two grids
|
||||
if phishEndTime is None or possibleStop == 1:
|
||||
if phishEndTime is None:
|
||||
self._windowSurge += "Begins " + startTimeDescriptor
|
||||
elif phishStartTime == phishEndTime:
|
||||
self._windowSurge += startTimeDescriptor
|
||||
else:
|
||||
endTimeDescriptor = self._textProduct._formatPeriod(windowPeriod, useEndTime = True)
|
||||
|
||||
if self._onsetSurgeHour > 12:
|
||||
# self._windowSurge += startTimeDescriptor +\
|
||||
# " through " +\
|
||||
# endTimeDescriptor
|
||||
self._windowSurge += startTimeDescriptor +\
|
||||
" until " +\
|
||||
endTimeDescriptor
|
||||
|
@ -2936,17 +2968,22 @@ class StormSurgeSectionStats(SectionCommonStats):
|
|||
self._windowSurge += "through " + endTimeDescriptor
|
||||
|
||||
if self._inundationMax is not None:
|
||||
# Round so we don't store values like 1.600000023841858
|
||||
# inundationMax is already rounded but should be stored as an int and not a float
|
||||
self._currentAdvisory["StormSurgeForecast"] = int(self._inundationMax)
|
||||
|
||||
self._textProduct.debug_print("+"*60, 1)
|
||||
self._textProduct.debug_print("In StormSurgeSectionStats._setStats", 1)
|
||||
self._textProduct.debug_print("Done in StormSurgeSectionStats._setStats:", 1)
|
||||
self._textProduct.debug_print("self._inundationMax = '%s'" %
|
||||
(self._inundationMax), 1)
|
||||
self._textProduct.debug_print("self._onsetSurgeHour = '%s'" %
|
||||
(self._onsetSurgeHour), 1)
|
||||
self._textProduct.debug_print("self._endSurgeHour = '%s'" %
|
||||
(self._endSurgeHour), 1)
|
||||
self._textProduct.debug_print("self._windowSurge = '%s'" %
|
||||
(self._windowSurge), 1)
|
||||
self._textProduct.debug_print("self._maxThreat = '%s'" %
|
||||
(self._maxThreat), 1)
|
||||
self._textProduct.debug_print("+"*60, 1)
|
||||
|
||||
|
||||
class FloodingRainSectionStats(SectionCommonStats):
|
||||
|
|
Loading…
Add table
Reference in a new issue