Merge tag 'OB_16.2.1-6' into omaha_16.2.1
16.2.1-6 Former-commit-id: 29a43c58b69086667c47e86c4a0bb1c849ad4367
This commit is contained in:
commit
dfcc52e766
194 changed files with 589999 additions and 33946 deletions
|
@ -20,6 +20,7 @@
|
|||
package com.raytheon.rcm.server.dataarchive;
|
||||
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.io.DataInputStream;
|
||||
import java.io.File;
|
||||
import java.io.FileOutputStream;
|
||||
import java.io.IOException;
|
||||
|
@ -682,14 +683,14 @@ public class DataArchiveEndpoint extends RadarEventAdapter {
|
|||
if (pdb.isBzip2Compressed()) {
|
||||
int uncompressedSize = pdb.getUncompressedSize();
|
||||
byte[] uncompressed;
|
||||
try {
|
||||
InputStream ins = new ByteArrayInputStream(msg, 120,
|
||||
msg.length - 120);
|
||||
ins = new BZip2InputStream(ins, false);
|
||||
try (DataInputStream di = new DataInputStream(
|
||||
new BZip2InputStream(
|
||||
new ByteArrayInputStream(msg,
|
||||
120, msg.length - 120), false))) {
|
||||
// ByteArrayOutputStream outs = new
|
||||
// ByteArrayOutputStream(uncompressedSize);
|
||||
uncompressed = new byte[uncompressedSize];
|
||||
ins.read(uncompressed);
|
||||
di.readFully(uncompressed);
|
||||
} catch (IOException e) {
|
||||
Log.errorf("Error decompressing product: %s", e);
|
||||
return msg;
|
||||
|
|
|
@ -94,7 +94,7 @@ if [ $? -ne 0 ]; then
|
|||
fi
|
||||
export apps_dir=${HYDRO_APPS_DIR}
|
||||
|
||||
SWITCHES=()
|
||||
SWITCHES=($SWITCHES)
|
||||
TESTCHECK="$TMCP_HOME/bin/getTestMode"
|
||||
if [ -x ${TESTCHECK} ]; then
|
||||
echo "Calling getTestMode()"
|
||||
|
|
Binary file not shown.
|
@ -1,17 +1,9 @@
|
|||
#!/bin/sh
|
||||
export DISPLAY=":0.0"
|
||||
export FXA_HOME=/awips2/cave/caveEnvironment
|
||||
export TMCP_HOME=/awips2/cave/caveEnvironment
|
||||
|
||||
$FXA_HOME/bin/MonitorTestMode >& /dev/null &
|
||||
|
||||
# Need to make sure we determine what TMCP_HOME should
|
||||
# be if it is not set.
|
||||
dir=${0%/*}
|
||||
if [ "$dir" = "$0" ]; then
|
||||
dir="."
|
||||
fi
|
||||
cd "$dir/.."
|
||||
|
||||
if [ ! -n "${TMCP_HOME}" ]
|
||||
then
|
||||
export TMCP_HOME=.
|
||||
fi
|
||||
|
||||
DISPLAY=$1:0.0; export DISPLAY
|
||||
$TMCP_HOME/bin/MonitorTestMode &
|
||||
|
|
Binary file not shown.
Binary file not shown.
|
@ -5,6 +5,10 @@ path_to_script=`readlink -f $0`
|
|||
RUN_FROM_DIR=`dirname ${path_to_script}`
|
||||
BASE_ENV_DIR=`dirname ${RUN_FROM_DIR}`
|
||||
|
||||
#DR 18113 rehost. /awips/fxa/... Has kicked the bit-bucket.
|
||||
export TMCP_HOME=/awips2/cave/caveEnvironment
|
||||
export FXA_HOME=/awips2/cave/caveEnvironment
|
||||
|
||||
if [ ! -n "${TMCP_HOME}" ]
|
||||
then
|
||||
echo -e "\e[1;31mTMCP_HOME is not set.\e[m"
|
||||
|
@ -37,19 +41,24 @@ then
|
|||
then
|
||||
echo -e "\e[1;31mFXA_WARNGEN_PRODUCT_ID is not set.\e[m"
|
||||
echo -e "\e[0;32mSetting FXA_WARNGEN_PRODUCT_ID to '${TMP_HOST_NUMBER}'.\e[m"
|
||||
export FXA_WARNGEN_PRODUCT_ID=${TMP_HOST_NUMBER}
|
||||
export FXA_WARNGEN_PRODUCT_ID=${TMP_HOST_NUMBER}
|
||||
else
|
||||
echo "FXA_WARNGEN_PRODUCT_ID is '${FXA_WARNGEN_PRODUCT_ID}'."
|
||||
fi
|
||||
else
|
||||
echo -e "\e[1;31mPartner host \""${ALT_HOST}"\" is unreachable by network!\e[m"
|
||||
echo ${ALT_HOME}
|
||||
echo
|
||||
fi
|
||||
|
||||
export LD_LIBRARY_PATH=$TMCP_HOME/lib:$LD_LIBRARY_PATH
|
||||
|
||||
# for TMCP logs
|
||||
if [ ! -d $HOME/tmcpLogs ]; then
|
||||
mkdir $HOME/tmcpLogs
|
||||
if [ ! -d $HOME/caveData/tmcpLogs ]; then
|
||||
mkdir -p $HOME/caveData/tmcpLogs
|
||||
fi
|
||||
export LOG_DIR=$HOME/caveData/tmcpLogs
|
||||
|
||||
export LOG_DIR=$HOME/tmcpLogs
|
||||
|
||||
$TMCP_HOME/bin/tmcp
|
||||
|
||||
|
|
Binary file not shown.
|
@ -1,22 +1,10 @@
|
|||
#!/bin/sh
|
||||
|
||||
# Need to make sure we determine what TMCP_HOME should
|
||||
# be if it is not set because this script may be ran
|
||||
# remotely under certain conditions.
|
||||
dir=${0%/*}
|
||||
if [ "$dir" = "$0" ]; then
|
||||
dir="."
|
||||
fi
|
||||
cd "$dir/.."
|
||||
|
||||
if [ ! -n "${TMCP_HOME}" ]
|
||||
then
|
||||
export TMCP_HOME=.
|
||||
fi
|
||||
|
||||
export DISPLAY=:0.0
|
||||
export FXA_HOME=/awips2/cave/caveEnvironment
|
||||
if [ $6 = "kde" ]
|
||||
then
|
||||
kstart --alldesktops $TMCP_HOME/bin/showBanner $2 $3 $4 $5 &
|
||||
kstart --alldesktops $FXA_HOME/bin/showBanner $2 $3 $4 $5 &
|
||||
else
|
||||
$TMCP_HOME/bin/showBanner $2 $3 $4 $5 &
|
||||
$FXA_HOME/bin/showBanner $2 $3 $4 $5 &
|
||||
fi
|
||||
|
||||
|
|
Binary file not shown.
BIN
cave/build/static/linux/cave/caveEnvironment/bin/tmbRemoteCheck
Normal file → Executable file
BIN
cave/build/static/linux/cave/caveEnvironment/bin/tmbRemoteCheck
Normal file → Executable file
Binary file not shown.
Binary file not shown.
BIN
cave/build/static/linux/cave/caveEnvironment/bin/tmcp
Normal file → Executable file
BIN
cave/build/static/linux/cave/caveEnvironment/bin/tmcp
Normal file → Executable file
Binary file not shown.
|
@ -7,6 +7,7 @@
|
|||
blink and color items followed the A1 configuring master.gcf file.
|
||||
01/31/2012 dyninaj DR14427 added category NDFD
|
||||
11/30/2012 jzeng DR14016 update GFE configuration
|
||||
11/04/2015 pwu DR18174 AlertViz message corrections
|
||||
-->
|
||||
<alertConfiguration name="DEFAULT">
|
||||
<globalConfiguration height="37" width="-1" yPosition="-1" xPosition="-1" logLength="10" audioDuration="30" blinkDuration="5" expandedPopup="false" categoryShown="false" sourceKeyShown="false" priorityShown="false" mode="H2"/>
|
||||
|
@ -307,14 +308,14 @@
|
|||
<metadata omit="false" imageFile="SS.png"/>
|
||||
</configurationMonitor>
|
||||
</source>
|
||||
<source locked="true" name="NWRWAVES" longName="NWRWAVES">
|
||||
<source longName="NWRWAVES" name="NWRWAVES" locked="true">
|
||||
<configurationItem>
|
||||
<metadata foreground="#000000" background="#12ffff" pythonEnabled="false" log="true" priority="SIGNIFICANT" popup="false" blink="false" text="false" audioEnabled="false"/>
|
||||
<metadata foreground="#000000" background="#7e7e7e" pythonEnabled="false" log="false" priority="EVENTB" popup="false" blink="false" text="false" audioEnabled="false"/>
|
||||
<metadata foreground="#000000" background="#ffffff" pythonEnabled="false" log="false" priority="VERBOSE" popup="false" blink="false" text="false" audioEnabled="false"/>
|
||||
<metadata foreground="#000000" background="#ff0000" pythonEnabled="false" log="false" priority="PROBLEM" popup="false" blink="false" text="false" audioEnabled="false"/>
|
||||
<metadata foreground="#ff0000" background="#000000" pythonEnabled="false" log="true" priority="CRITICAL" popup="false" blink="false" text="false" audioEnabled="false"/>
|
||||
<metadata foreground="#000000" background="#00ff00" pythonEnabled="false" log="false" priority="EVENTA" popup="false" blink="false" text="false" audioEnabled="false"/>
|
||||
<metadata foreground="#000000" background="#ff9d00" pythonEnabled="false" log="true" priority="SIGNIFICANT" popup="true" blink="false" text="true" audioEnabled="false"/>
|
||||
<metadata foreground="#000000" background="#ffffff" pythonEnabled="false" log="false" priority="VERBOSE" popup="false" blink="false" text="false" audioEnabled="false"/>
|
||||
<metadata foreground="#000000" background="#ff0000" pythonEnabled="false" log="true" priority="CRITICAL" popup="true" blink="false" text="true" audioEnabled="false"/>
|
||||
<metadata foreground="#000000" background="#feff00" pythonEnabled="false" log="true" priority="PROBLEM" popup="true" blink="false" text="true" audioEnabled="false"/>
|
||||
<metadata foreground="#000000" background="#7e7e7e" pythonEnabled="false" log="false" priority="EVENTB" popup="false" blink="false" text="false" audioEnabled="false"/>
|
||||
</configurationItem>
|
||||
<configurationMonitor>
|
||||
<metadata omit="false"/>
|
||||
|
|
|
@ -185,7 +185,7 @@
|
|||
isTopOfTheHour="false">
|
||||
<binOffset posOffset="1800" negOffset="1800"
|
||||
virtualOffset="0" />
|
||||
<metadataMap>
|
||||
<metadataMap>
|
||||
<mapping key="pluginName">
|
||||
<constraint constraintValue="fssobs"
|
||||
constraintType="EQUALS" />
|
||||
|
@ -194,10 +194,6 @@
|
|||
<constraint constraintValue="1006,1003"
|
||||
constraintType="IN" />
|
||||
</mapping>
|
||||
<mapping key="location.stationId">
|
||||
<constraint constraintValue="${stations}"
|
||||
constraintType="IN" />
|
||||
</mapping>
|
||||
</metadataMap>
|
||||
<alertParser xsi:type="plotAlertParser" />
|
||||
</resourceData>
|
||||
|
@ -256,15 +252,10 @@
|
|||
<constraint constraintValue="fssobs"
|
||||
constraintType="EQUALS" />
|
||||
</mapping>
|
||||
|
||||
<mapping key="reportType">
|
||||
<constraint constraintValue="1007"
|
||||
constraintType="EQUALS" />
|
||||
</mapping>
|
||||
<mapping key="location.stationId">
|
||||
<constraint constraintValue="${stations}"
|
||||
constraintType="IN" />
|
||||
</mapping>
|
||||
</metadataMap>
|
||||
<binOffset virtualOffset="0"
|
||||
posOffset="1800" negOffset="1800" />
|
||||
|
|
|
@ -197,14 +197,11 @@
|
|||
<constraint constraintValue="fssobs"
|
||||
constraintType="EQUALS" />
|
||||
</mapping>
|
||||
|
||||
<mapping key="reportType">
|
||||
<constraint constraintValue="1006,1003"
|
||||
constraintType="IN" />
|
||||
</mapping>
|
||||
<mapping key="location.stationId">
|
||||
<constraint constraintValue="${stations}"
|
||||
constraintType="IN" />
|
||||
</mapping>
|
||||
|
||||
</metadataMap>
|
||||
<alertParser xsi:type="plotAlertParser" />
|
||||
|
@ -227,7 +224,6 @@
|
|||
isRequeryNecessaryOnTimeMatch="true"
|
||||
isTopOfTheHour="false">
|
||||
<metadataMap>
|
||||
|
||||
<mapping key="pluginName">
|
||||
<constraint constraintValue="fssobs"
|
||||
constraintType="EQUALS" />
|
||||
|
@ -240,7 +236,6 @@
|
|||
<constraint constraintValue="${stations}"
|
||||
constraintType="IN" />
|
||||
</mapping>
|
||||
|
||||
</metadataMap>
|
||||
</resourceData>
|
||||
</resource>
|
||||
|
@ -272,10 +267,6 @@
|
|||
<constraint constraintValue="1007"
|
||||
constraintType="EQUALS" />
|
||||
</mapping>
|
||||
<mapping key="location.stationId">
|
||||
<constraint constraintValue="${stations}"
|
||||
constraintType="IN" />
|
||||
</mapping>
|
||||
|
||||
</metadataMap>
|
||||
<binOffset virtualOffset="0"
|
||||
|
|
|
@ -24,6 +24,7 @@ import com.raytheon.uf.viz.monitor.data.ObReport;
|
|||
* May 15, 2012 14510 zhao Modified generateObReport()
|
||||
* Jan 06, 2014 2653 skorolev Included SNOW data into ObReport.
|
||||
* Sep 20, 2015 3873 skorolev Added IsStationary and getReportType.
|
||||
* Dec 02, 2015 3873 dhladky Fixed missing parameters.
|
||||
*
|
||||
*
|
||||
* </pre>
|
||||
|
@ -48,6 +49,7 @@ public class GenerateFSSObReport {
|
|||
// Generate the observation report.
|
||||
ObReport obReport = new ObReport();
|
||||
FSSObsRecord fssData = (FSSObsRecord) report;
|
||||
|
||||
try {
|
||||
obReport.setObservationTime(fssData.getTimeObs().getTime());
|
||||
obReport.setRefHour(fssData.getRefHour().getTime());
|
||||
|
@ -59,7 +61,7 @@ public class GenerateFSSObReport {
|
|||
obReport.setTimesFromFssobDataURI(report.getDataURI());
|
||||
}
|
||||
obReport.setPlatformId(fssData.getPlatformId());
|
||||
obReport.setStationary(fssData.getIsStationary());
|
||||
obReport.setStationary(fssData.isStationary());
|
||||
obReport.setLatitude((float) fssData.getLatitude());
|
||||
obReport.setLongitude((float) fssData.getLongitude());
|
||||
// Table data:
|
||||
|
@ -117,19 +119,31 @@ public class GenerateFSSObReport {
|
|||
return obReport;
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieve the type of the report. ReportType Enumeration
|
||||
*/
|
||||
private static ReportType getReportType(String reportType) {
|
||||
if (reportType == null) {
|
||||
reportType = "";
|
||||
}
|
||||
switch (reportType) {
|
||||
case "1003":
|
||||
case "1004":
|
||||
case "1005":
|
||||
case "1006":
|
||||
case "1007":
|
||||
case "1003":
|
||||
return ReportType.SYNOPTIC_SHIP;
|
||||
case "1004":
|
||||
return ReportType.SYNOPTIC_CMAN;
|
||||
case "1005":
|
||||
return ReportType.SYNOPTIC_MOORED_BUOY;
|
||||
case "1006":
|
||||
return ReportType.DRIFTING_BUOY;
|
||||
case "1007":
|
||||
return ReportType.MARITIME;
|
||||
// TODO:MESONET
|
||||
default:
|
||||
case "SPECI":
|
||||
return ReportType.SPECI;
|
||||
case "METAR":
|
||||
return ReportType.METAR;
|
||||
case "MESONET":
|
||||
return ReportType.MESONET;
|
||||
default:
|
||||
return ReportType.METAR;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -19,11 +19,9 @@
|
|||
**/
|
||||
package com.raytheon.uf.viz.monitor;
|
||||
|
||||
import java.text.SimpleDateFormat;
|
||||
import java.util.Date;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.TimeZone;
|
||||
|
||||
import org.eclipse.core.runtime.IProgressMonitor;
|
||||
import org.eclipse.core.runtime.IStatus;
|
||||
|
@ -66,11 +64,10 @@ public class ProcessObsJob extends Job {
|
|||
protected static final int PROGRESS_FACTOR = 1;
|
||||
|
||||
/** how many hours do FSSObs go back we wish to load here **/
|
||||
public static final int HOUR_BACK = 24;
|
||||
|
||||
public static final int HOUR_BACK = 12;
|
||||
|
||||
private ObsMonitor obsMonitor;
|
||||
|
||||
|
||||
public ProcessObsJob(ObsMonitor obsMonitor) {
|
||||
super("Obs Load Process");
|
||||
this.setSystem(false);
|
||||
|
@ -79,22 +76,23 @@ public class ProcessObsJob extends Job {
|
|||
}
|
||||
|
||||
public IStatus run(IProgressMonitor monitor) {
|
||||
|
||||
|
||||
try {
|
||||
|
||||
|
||||
long backTime = TimeUtil.newCalendar().getTimeInMillis();
|
||||
Date time = new Date(backTime - (HOUR_BACK * TimeUtil.MILLIS_PER_HOUR));
|
||||
|
||||
Date time = new Date(backTime
|
||||
- (HOUR_BACK * TimeUtil.MILLIS_PER_HOUR));
|
||||
|
||||
Map<String, RequestConstraint> vals = new HashMap<String, RequestConstraint>();
|
||||
vals.put("dataTime.refTime", new RequestConstraint(
|
||||
TimeUtil.formatToSqlTimestamp(time),
|
||||
ConstraintType.GREATER_THAN_EQUALS));
|
||||
vals.put("dataTime.refTime",
|
||||
new RequestConstraint(TimeUtil.formatToSqlTimestamp(time),
|
||||
ConstraintType.GREATER_THAN_EQUALS));
|
||||
|
||||
long startPoint = System.currentTimeMillis();
|
||||
FSSObsRecord[] recs = obsMonitor.requestFSSObs(vals);
|
||||
long endPoint = System.currentTimeMillis();
|
||||
SubMonitor smonitor = SubMonitor.convert(monitor, "Loading "+recs.length+" observations...",
|
||||
recs.length);
|
||||
SubMonitor smonitor = SubMonitor.convert(monitor, "Loading "
|
||||
+ recs.length + " observations...", recs.length);
|
||||
smonitor.beginTask(null, recs.length);
|
||||
statusHandler.info("Point Data Request, took: "
|
||||
+ (endPoint - startPoint) + " ms");
|
||||
|
@ -107,13 +105,15 @@ public class ProcessObsJob extends Job {
|
|||
long start = System.currentTimeMillis();
|
||||
doOb(rec, smonitor.newChild(PROGRESS_FACTOR));
|
||||
long end = System.currentTimeMillis();
|
||||
statusHandler.info("Processed "
|
||||
+ rec.getIdentifier()
|
||||
statusHandler.info("Processed " + rec.getIdentifier()
|
||||
+ " in " + (end - start) + " ms");
|
||||
}
|
||||
}
|
||||
|
||||
statusHandler.info("Processed " + recs.length + " FSSObs records.");
|
||||
// fire event to trigger re-paint
|
||||
obsMonitor.fireMonitorEvent();
|
||||
|
||||
} catch (DataCubeException e) {
|
||||
statusHandler.handle(Priority.PROBLEM,
|
||||
"No data in database at startup.");
|
||||
|
@ -122,7 +122,7 @@ public class ProcessObsJob extends Job {
|
|||
|
||||
return Status.OK_STATUS;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Processes the Ob
|
||||
*
|
||||
|
|
|
@ -61,6 +61,7 @@ import com.vividsolutions.jts.geom.Coordinate;
|
|||
* Sep 25 2015 3873 skorolev Corrected addReport for moving platforms.
|
||||
* Oct 19 2015 3841 skorolev Added try to saveConfigXml
|
||||
* Nov 12 2015 3841 dhladky Augmented Slav's fix for moving platforms.
|
||||
* Dec 02 2015 3873 dhladky Pulled 3841 changes to 16.1.1.
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -204,12 +205,12 @@ public class ObHourReports {
|
|||
+ report.getPlatformId() + " Zone: " + zone, e);
|
||||
}
|
||||
}
|
||||
// Update configuration file.
|
||||
try {
|
||||
configMgr.saveConfigXml();
|
||||
} catch (LocalizationException | SerializationException e) {
|
||||
statusHandler.handle(Priority.PROBLEM, "Unable to save "
|
||||
+ configMgr.getConfigFileName(), e);
|
||||
} catch (LocalizationException e) {
|
||||
statusHandler.handle(Priority.PROBLEM, "Problem saving Localization file!", e);
|
||||
} catch (SerializationException e) {
|
||||
statusHandler.handle(Priority.PROBLEM, "Problem serializaing Localization File!", e);
|
||||
}
|
||||
return shipZones;
|
||||
}
|
||||
|
@ -397,4 +398,4 @@ public class ObHourReports {
|
|||
private static double toRad(double value) {
|
||||
return value * Math.PI / 180;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -60,6 +60,7 @@ import com.raytheon.uf.viz.monitor.thresholds.AbstractThresholdMgr;
|
|||
* Sep 04, 2014 3220 skorolev Updated getStationTableData method.
|
||||
* Sep 25, 2015 3873 skorolev Added multiHrsTabData.
|
||||
* Nov 12, 2015 3841 dhladky Augmented Slav's update fix.
|
||||
* Dec 02 2015 3873 dhladky Pulled 3841 changes to 16.1.1.
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -126,6 +127,7 @@ public class ObMultiHrsReports {
|
|||
*/
|
||||
public void addReport(ObReport report) {
|
||||
Date nominalTime = report.getRefHour();
|
||||
|
||||
/**
|
||||
* DR #8723: if wind speed is zero, wind direction should be N/A, not 0
|
||||
*/
|
||||
|
@ -166,11 +168,15 @@ public class ObMultiHrsReports {
|
|||
// update multiHrsReports with new data
|
||||
obHourReports = multiHrsReports.get(nominalTime);
|
||||
}
|
||||
obHourReports.addReport(report);
|
||||
// update data cache
|
||||
multiHrsReports.put(nominalTime, obHourReports);
|
||||
TableData tblData = obHourReports.getZoneTableData();
|
||||
multiHrsTabData.put(nominalTime, tblData);
|
||||
|
||||
if (report != null && obHourReports != null) {
|
||||
obHourReports.addReport(report);
|
||||
// update data cache
|
||||
multiHrsReports.put(nominalTime, obHourReports);
|
||||
TableData tblData = obHourReports.getZoneTableData();
|
||||
multiHrsTabData.put(nominalTime, tblData);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -56,7 +56,7 @@ import com.raytheon.viz.ui.dialogs.CaveSWTDialog;
|
|||
* Nov 20, 2012 1297 skorolev Changes for non-blocking dialog.
|
||||
* Apr 23, 2014 3054 skorolev Added MESONET handling.
|
||||
* Apr 28, 2014 3086 skorolev Removed local getAreaConfigMgr method.
|
||||
* Aug 17, 2015 3841 skorolev Corrected handleAddNewStation method.
|
||||
* Dec 02, 2015 3873 dhladky Pulled 3841 to 16.1.1.
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -68,7 +68,7 @@ public class AddNewStationDlg extends CaveSWTDialog {
|
|||
.getHandler(AddNewStationDlg.class);
|
||||
|
||||
/** Application name. */
|
||||
private final AppName appName;
|
||||
private AppName appName;
|
||||
|
||||
/** METAR radio button. */
|
||||
private Button metarRdo;
|
||||
|
@ -86,10 +86,10 @@ public class AddNewStationDlg extends CaveSWTDialog {
|
|||
private Text stationTF;
|
||||
|
||||
/** Zone */
|
||||
private final String area;
|
||||
private String area;
|
||||
|
||||
/** Call back interface */
|
||||
private final MonitoringAreaConfigDlg macDlg;
|
||||
private MonitoringAreaConfigDlg macDlg;
|
||||
|
||||
/**
|
||||
* Constructor.
|
||||
|
@ -261,11 +261,8 @@ public class AddNewStationDlg extends CaveSWTDialog {
|
|||
+ "' is already in your Monitoring Area or among your Additional Stations.");
|
||||
return;
|
||||
}
|
||||
|
||||
macDlg.addNewStationAction(stn);
|
||||
// add station to area configuration
|
||||
macDlg.getInstance().addNewStation(area, stn, type, true);
|
||||
// update stations in the area configuration
|
||||
macDlg.getInstance().addNewStation(area, stn, type, false);
|
||||
macDlg.getInstance().getStations().add(stn);
|
||||
}
|
||||
|
||||
|
|
|
@ -53,6 +53,7 @@ import com.raytheon.viz.ui.dialogs.CaveSWTDialog;
|
|||
* Feb 10, 2015 3886 skorolev Added fixed width for dialog.
|
||||
* Aug 17, 2015 3841 skorolev Corrected handleAddNewAction method.
|
||||
* Nov 12, 2015 3841 dhladky Augmented Slav's fix for moving platforms.
|
||||
* Dec 02, 2015 3873 dhladky Pulled 3841 to 16.1.1.
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -89,16 +90,16 @@ public class AddNewZoneDlg extends CaveSWTDialog {
|
|||
private static final char Z = 'Z';
|
||||
|
||||
/** Upper Latitude Boundary **/
|
||||
private static double upLatBound = 90.0;
|
||||
public static double upLatBound = 90.0;
|
||||
|
||||
/** Lower Latitude Boundary **/
|
||||
private static double lowLatBound = -90.0;
|
||||
public static double lowLatBound = -90.0;
|
||||
|
||||
/** Upper Longitude Boundary **/
|
||||
private static double upLonBound = 180.0;
|
||||
public static double upLonBound = 180.0;
|
||||
|
||||
/** Lower Longitude Boundary **/
|
||||
private static double lowLonBound = -180.0;
|
||||
public static double lowLonBound = -180.0;
|
||||
|
||||
/**
|
||||
* Constructor.
|
||||
|
|
|
@ -55,6 +55,7 @@ import com.raytheon.viz.ui.dialogs.CaveSWTDialog;
|
|||
* Apr 28, 2014 3086 skorolev Removed local getAreaConfigMgr method.
|
||||
* Aug 17, 2015 3841 skorolev Corrected deleteSelected method.
|
||||
* Nov 12, 2015 3841 dhladky Augmented Slav's work.
|
||||
* Dec 02, 2015 3873 dhladky Pulled 3841 to 16.1.1.
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
|
|
@ -61,6 +61,7 @@ import com.raytheon.viz.ui.dialogs.CaveSWTDialog;
|
|||
* Nov 10, 2014 3741 skorolev Fixed configXML issue.
|
||||
* Aug 17, 2015 3841 skorolev Made editable a content of ID field.
|
||||
* Nov 12, 2015 3841 dhladky Augmented Slav's update fixes.
|
||||
* Dec 02, 2015 3873 dhladky Pulled 3841 to 16.1.1.
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -426,7 +427,7 @@ public class EditNewZoneDlg extends CaveSWTDialog {
|
|||
}
|
||||
double lat = Double.parseDouble(latStr);
|
||||
double lon = Double.parseDouble(lonStr);
|
||||
if (lat > 90.0 || lat < -90.0 || lon > 180.0 || lon < -180.0) {
|
||||
if (lat > AddNewZoneDlg.upLatBound || lat < AddNewZoneDlg.lowLatBound || lon > AddNewZoneDlg.upLonBound || lon < AddNewZoneDlg.lowLonBound) {
|
||||
macDlg.latLonErrorMsg(latStr, lonStr);
|
||||
return;
|
||||
}
|
||||
|
|
|
@ -87,6 +87,7 @@ import com.raytheon.viz.ui.dialogs.ICloseCallback;
|
|||
* Sep 18, 2015 3873 skorolev Added formIsValid method.
|
||||
* Oct 19, 2015 3841 skorolev Corrected formIsValid messages.
|
||||
* Nov 12, 2015 3841 dhladky Augmented Slav's fix for moving platforms.
|
||||
* Dec 02, 2015 3873 dhladky Pulled 3841 to 16.1.1.
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
|
|
@ -101,6 +101,7 @@ import com.vividsolutions.jts.io.ParseException;
|
|||
* Nov 03, 2014 3741 skorolev Updated zoom procedures.
|
||||
* Sep 25, 2015 3873 skorolev Added center definition for moving platforms.
|
||||
* Nov 09, 2015 3841 dhladky Update all tables when zones/stations are updated.
|
||||
* Dec 02, 2015 3873 dhladky Pulled 3841 to 16.1.1.
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
|
|
@ -66,15 +66,17 @@ import com.raytheon.viz.ui.personalities.awips.CAVE;
|
|||
*
|
||||
* SOFTWARE HISTORY
|
||||
*
|
||||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* Aug 4, 2011 njensen Initial creation
|
||||
* Apr 23, 2013 1939 randerso Return null from initializeSerialization
|
||||
* Nov 14, 2013 2361 njensen Remove initializeSerialization()
|
||||
* Nov 06, 2014 3356 njensen Always initialize ILocalizationAdapter
|
||||
* in case cache preference is not enabled
|
||||
* Feb 23, 2015 4164 dlovely Call AlertViz initialize.
|
||||
* Jun 26, 2015 4474 bsteffen Register the PathManager as an OSGi service.
|
||||
* Date Ticket# Engineer Description
|
||||
* ------------- -------- --------- --------------------------------------------
|
||||
* Aug 04, 2011 10477 njensen Initial creation
|
||||
* Apr 23, 2013 1939 randerso Return null from initializeSerialization
|
||||
* Nov 14, 2013 2361 njensen Remove initializeSerialization()
|
||||
* Nov 06, 2014 3356 njensen Always initialize ILocalizationAdapter in
|
||||
* case cache preference is not enabled
|
||||
* Feb 23, 2015 4164 dlovely Call AlertViz initialize.
|
||||
* Jun 26, 2015 4474 bsteffen Register the PathManager as an OSGi service.
|
||||
* Dec 04, 2015 5169 bsteffen Allow ProductAlertObserver to send messages
|
||||
* to the AutoUpdater
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -215,11 +217,7 @@ public class ThinClientComponent extends CAVE implements IThinClientComponent {
|
|||
@Override
|
||||
protected void initializeObservers() {
|
||||
ThinClientNotificationManagerJob.getInstance();
|
||||
IPreferenceStore store = Activator.getDefault().getPreferenceStore();
|
||||
if (store.getBoolean(ThinClientPreferenceConstants.P_DISABLE_JMS) == false) {
|
||||
// JMS Enabled, register product alerts
|
||||
registerProductAlerts();
|
||||
}
|
||||
registerProductAlerts();
|
||||
initializeAlertViz();
|
||||
}
|
||||
|
||||
|
|
|
@ -28,7 +28,6 @@ import com.raytheon.uf.viz.core.alerts.AlertMessage;
|
|||
import com.raytheon.uf.viz.thinclient.Activator;
|
||||
import com.raytheon.uf.viz.thinclient.preferences.ThinClientPreferenceConstants;
|
||||
import com.raytheon.uf.viz.thinclient.refresh.TimedRefresher.RefreshTimerTask;
|
||||
import com.raytheon.viz.alerts.jobs.AutoUpdater;
|
||||
import com.raytheon.viz.alerts.observers.ProductAlertObserver;
|
||||
|
||||
/**
|
||||
|
@ -40,10 +39,12 @@ import com.raytheon.viz.alerts.observers.ProductAlertObserver;
|
|||
*
|
||||
* SOFTWARE HISTORY
|
||||
*
|
||||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* Nov 10, 2011 mschenke Initial creation
|
||||
* Feb 21, 2014 DR 16744 D. Friedman Update all alert observers
|
||||
* Date Ticket# Engineer Description
|
||||
* ------------- -------- ---------- -------------------------------------------
|
||||
* Nov 10, 2011 7393 mschenke Initial creation
|
||||
* Feb 21, 2014 16744 dfriedman Update all alert observers
|
||||
* Dec 04, 2015 5169 bsteffen Allow ProductAlertObserver to send messages
|
||||
* to the AutoUpdater
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -71,8 +72,6 @@ public class DataRefreshTask implements RefreshTimerTask {
|
|||
s.add(am.dataURI);
|
||||
}
|
||||
ProductAlertObserver.processDataURIAlerts(s);
|
||||
|
||||
new AutoUpdater().alertArrived(alerts);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -29,6 +29,7 @@ import java.util.Set;
|
|||
import java.util.TimeZone;
|
||||
|
||||
import com.raytheon.uf.common.dataplugin.PluginDataObject;
|
||||
import com.raytheon.uf.common.dataplugin.grid.GridConstants;
|
||||
import com.raytheon.uf.common.dataquery.requests.DbQueryRequest;
|
||||
import com.raytheon.uf.common.dataquery.requests.RequestConstraint;
|
||||
import com.raytheon.uf.common.dataquery.requests.RequestConstraint.ConstraintType;
|
||||
|
@ -51,18 +52,20 @@ import com.raytheon.viz.grid.inv.RadarUpdater;
|
|||
import com.raytheon.viz.grid.util.RadarAdapter;
|
||||
|
||||
/**
|
||||
* TODO Add Description
|
||||
* Replacement for {@link DataUpdateTree} which will perform updates by querying
|
||||
* the server for updates for any tree items.
|
||||
*
|
||||
* <pre>
|
||||
*
|
||||
* SOFTWARE HISTORY
|
||||
*
|
||||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* Dec 13, 2011 bsteffen Initial creation
|
||||
* Feb 21, 2014 DR 16744 D. Friedman Add radar/grid updates
|
||||
* Apr 1, 2014 DR 17220 D. Friedman Handle uninitialized grid inventory
|
||||
* Dec 15, 2014 3923 bsteffen Retrieve pdo for grid instead of dataURI.
|
||||
* Date Ticket# Engineer Description
|
||||
* ------------- -------- ---------- ------------------------------------------
|
||||
* Dec 13, 2011 bsteffen Initial creation
|
||||
* Feb 21, 2014 16744 dfriedman Add radar/grid updates
|
||||
* Apr 01, 2014 17220 dfriedman Handle uninitialized grid inventory
|
||||
* Dec 15, 2014 3923 bsteffen Retrieve pdo for grid instead of dataURI.
|
||||
* Dec 04, 2015 5169 bsteffen Do not send duplicate grid updates.
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -107,6 +110,14 @@ public class ThinClientDataUpdateTree extends DataUpdateTree {
|
|||
continue;
|
||||
}
|
||||
Map<String, RequestConstraint> metadata = pair.metadata;
|
||||
RequestConstraint pluginConstraint = metadata
|
||||
.get(PluginDataObject.PLUGIN_NAME_ID);
|
||||
if (pluginConstraint != null
|
||||
&& (pluginConstraint.evaluate(GridConstants.GRID) || pluginConstraint
|
||||
.evaluate("radar"))) {
|
||||
/* Grid and radar do their updates differently. */
|
||||
continue;
|
||||
}
|
||||
metadata = new HashMap<String, RequestConstraint>(metadata);
|
||||
metadata.put("insertTime", new RequestConstraint(time,
|
||||
ConstraintType.GREATER_THAN));
|
||||
|
@ -176,7 +187,8 @@ public class ThinClientDataUpdateTree extends DataUpdateTree {
|
|||
private void getGridUpdates(String time, Set<AlertMessage> messages) {
|
||||
Map<String, RequestConstraint> newQuery = new HashMap<String, RequestConstraint>();
|
||||
DbQueryRequest dbRequest = new DbQueryRequest();
|
||||
newQuery.put("pluginName", new RequestConstraint("grid"));
|
||||
newQuery.put(PluginDataObject.PLUGIN_NAME_ID, new RequestConstraint(
|
||||
GridConstants.GRID));
|
||||
newQuery.put("insertTime", new RequestConstraint(time,
|
||||
ConstraintType.GREATER_THAN));
|
||||
dbRequest.setConstraints(newQuery);
|
||||
|
|
|
@ -89,6 +89,7 @@ import com.raytheon.viz.ui.dialogs.CaveSWTDialog;
|
|||
* 10/15/2012 1229 rferrel Changes for non-blocking HelpUsageDlg.
|
||||
* 16 Aug 2013 #2256 lvenable Fixed image and cursor memory leaks.
|
||||
* 19Mar2014 #2925 lvenable Added dispose checks for runAsync.
|
||||
* 12/22/2015 18342 zhao Modified code for 'jnt' in objReceived()
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -886,7 +887,7 @@ public class CigVisDistributionDlg extends CaveSWTDialog implements
|
|||
int flightCat = (Integer) list.get(3);
|
||||
float cig = (Float) list.get(4);
|
||||
float vis = (Float) list.get(5);
|
||||
float jnt = Math.min(cig, vis);
|
||||
float jnt = (Float) list.get(6);
|
||||
|
||||
data.set(month, hour, windDir, flightCat, vis, cig, jnt);
|
||||
} else {
|
||||
|
|
|
@ -34,6 +34,7 @@
|
|||
# Date Ticket# Engineer Description
|
||||
# ------------ ---------- ----------- --------------------------
|
||||
# Jul 07, 2015 16907 zhao Modified to work with new ids- files
|
||||
# Dec 22, 2015 18341 zhao Modified __writeHDFData to avoid 'bad' input
|
||||
|
||||
import sys
|
||||
sys.argv = [__name__]
|
||||
|
@ -465,6 +466,8 @@ required NCDC data.
|
|||
shape = f_col.descr._v_colObjects[col].shape[0] - 1
|
||||
#datum = datum + [self.__get_msng(f_col.type)]*(f_col.shape[0]-len(datum))
|
||||
datum = datum + [self.__get_msng(f_col.type)] * (f_col.descr._v_colObjects[col].shape[0] - len(datum))
|
||||
if len(numpy.array(datum)) != len(row[col]):
|
||||
continue
|
||||
row[col] = numpy.array(datum).astype(f_col.type)
|
||||
except Exception, e:
|
||||
self.__updateMonitor(str(e) + '\n')
|
||||
|
|
|
@ -43,6 +43,10 @@
|
|||
# Generates ceiling/visibility distribution by month, hour and wind direction
|
||||
# George Trojan, SAIC/MDL, December 2005
|
||||
# last update: 03/14/06
|
||||
# Date Ticket# Engineer Description
|
||||
# ------------ ---------- ----------- -----------------------------------
|
||||
# Dec 22, 2015 18342 zhao Modified _process() to also pass 'jnt_count'
|
||||
|
||||
|
||||
import logging, os, time, cPickle
|
||||
import Avn, ClimLib
|
||||
|
@ -174,7 +178,7 @@ def get_data(table, queue):
|
|||
for windDir in range(num_wind_dir):
|
||||
for flightCat in range(num_cat+1):
|
||||
sendObj = [month, hour, windDir, flightCat, float(cig_count[month][hour][windDir][flightCat]),
|
||||
float(vis_count[month][hour][windDir][flightCat])]#, float(jnt_count[month][hour][windDir][flightCat])]
|
||||
float(vis_count[month][hour][windDir][flightCat]), float(jnt_count[month][hour][windDir][flightCat])]
|
||||
#print "sendObj", sendObj
|
||||
queue.put(sendObj)
|
||||
queue.put("done")
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -66,6 +66,7 @@ Import-Package: com.raytheon.uf.common.dissemination,
|
|||
com.raytheon.uf.viz.python.swt,
|
||||
com.raytheon.uf.viz.python.swt.widgets,
|
||||
com.raytheon.uf.viz.ui.menus.widgets.tearoff,
|
||||
com.raytheon.viz.core.gl,
|
||||
com.raytheon.viz.texteditor.util,
|
||||
org.apache.commons.logging,
|
||||
org.eclipse.jdt.ui;resolution:=optional,
|
||||
|
|
|
@ -0,0 +1,36 @@
|
|||
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
|
||||
<!-- Created by config_local_shapefiles of config_awips2.sh on Tue Jul 28
|
||||
19:26:09 GMT 2015 -->
|
||||
<bundle>
|
||||
<displayList>
|
||||
<displays xsi:type="mapRenderableDisplay"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
|
||||
<descriptor xsi:type="mapDescriptor">
|
||||
<resource>
|
||||
<loadProperties>
|
||||
<capabilities>
|
||||
<capability xsi:type="colorableCapability"
|
||||
colorAsString="#9b9b9b" />
|
||||
<capability xsi:type="outlineCapability"
|
||||
lineStyle="SOLID" outlineOn="true"
|
||||
outlineWidth="1" />
|
||||
<capability xsi:type="pointCapability"
|
||||
pointStyle="NONE" />
|
||||
</capabilities>
|
||||
<resourceType>PLAN_VIEW</resourceType>
|
||||
</loadProperties>
|
||||
<properties isSystemResource="false"
|
||||
isBlinking="false" isMapLayer="true" isHoverOn="false"
|
||||
isVisible="true">
|
||||
<pdProps maxDisplayWidth="100000000"
|
||||
minDisplayWidth="0" />
|
||||
</properties>
|
||||
<resourceData xsi:type="dbMapResourceData">
|
||||
<table>mapdata.nhadomain</table>
|
||||
<mapName>NHAdomain</mapName>
|
||||
</resourceData>
|
||||
</resource>
|
||||
</descriptor>
|
||||
</displays>
|
||||
</displayList>
|
||||
</bundle>
|
|
@ -0,0 +1,36 @@
|
|||
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
|
||||
<!-- Created by config_local_shapefiles of config_awips2.sh on Wed Jul 29
|
||||
19:20:36 GMT 2015 -->
|
||||
<bundle>
|
||||
<displayList>
|
||||
<displays xsi:type="mapRenderableDisplay"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
|
||||
<descriptor xsi:type="mapDescriptor">
|
||||
<resource>
|
||||
<loadProperties>
|
||||
<capabilities>
|
||||
<capability xsi:type="colorableCapability"
|
||||
colorAsString="#9b9b9b" />
|
||||
<capability xsi:type="outlineCapability"
|
||||
lineStyle="SOLID" outlineOn="true"
|
||||
outlineWidth="1" />
|
||||
<capability xsi:type="pointCapability"
|
||||
pointStyle="NONE" />
|
||||
</capabilities>
|
||||
<resourceType>PLAN_VIEW</resourceType>
|
||||
</loadProperties>
|
||||
<properties isSystemResource="false"
|
||||
isBlinking="false" isMapLayer="true" isHoverOn="false"
|
||||
isVisible="true">
|
||||
<pdProps maxDisplayWidth="100000000"
|
||||
minDisplayWidth="0" />
|
||||
</properties>
|
||||
<resourceData xsi:type="dbMapResourceData">
|
||||
<table>mapdata.stormsurgeww</table>
|
||||
<mapName>StormSurgeWW</mapName>
|
||||
</resourceData>
|
||||
</resource>
|
||||
</descriptor>
|
||||
</displays>
|
||||
</displayList>
|
||||
</bundle>
|
|
@ -1,9 +1,3 @@
|
|||
#--------------------------------------------------------------------------
|
||||
# SVN: $Revision$ - $Date$
|
||||
#
|
||||
# Converted with gfePorter R3342 on Oct 01, 2013 18:43 GMT
|
||||
# Not tested. Remove these 2 lines when ported and tested.
|
||||
#
|
||||
# ----------------------------------------------------------------------------
|
||||
# This software is in the public domain, furnished "as is", without technical
|
||||
# support, and with no warranty, express or implied, as to its usefulness for
|
||||
|
@ -12,7 +6,7 @@
|
|||
# CopyNHCProposed
|
||||
#
|
||||
# Author: T LeFebvre/P. Santos
|
||||
# Last Modified: Sept 18, 2014
|
||||
# Last Modified: Dec 10, 2015 for 16.1.2
|
||||
# ----------------------------------------------------------------------------
|
||||
|
||||
# The MenuItems list defines the GFE menu item(s) under which the
|
||||
|
@ -54,27 +48,13 @@ class Procedure (SmartScript.SmartScript):
|
|||
# This method will work in either an AWIPS I or AWIPS II environment.
|
||||
def makeTimeRange(self, start=0, end=0):
|
||||
|
||||
try: # AWIPS 1 code
|
||||
import TimeRange
|
||||
import AbsTime
|
||||
import AFPS
|
||||
if start == 0 and end == 0:
|
||||
return TimeRange.TimeRange.allTimes()
|
||||
if start == 0 and end == 0:
|
||||
return TimeRange.allTimes()
|
||||
|
||||
startTime = AbsTime.AbsTime(start)
|
||||
endTime = AbsTime.AbsTime(end)
|
||||
startTime = AbsTime.AbsTime(start)
|
||||
endTime = AbsTime.AbsTime(end)
|
||||
|
||||
tr = TimeRange.TimeRange(startTime, endTime)
|
||||
|
||||
except: # AWIPS 2 code
|
||||
import TimeRange, AbsTime
|
||||
if start == 0 and end == 0:
|
||||
return TimeRange.allTimes()
|
||||
|
||||
startTime = AbsTime.AbsTime(start)
|
||||
endTime = AbsTime.AbsTime(end)
|
||||
|
||||
tr = TimeRange.TimeRange(startTime, endTime)
|
||||
tr = TimeRange.TimeRange(startTime, endTime)
|
||||
|
||||
return tr
|
||||
|
||||
|
@ -116,7 +96,6 @@ class Procedure (SmartScript.SmartScript):
|
|||
return
|
||||
|
||||
weNames = ["ProposedSS"]
|
||||
#weNames = ["ProposedSS"]
|
||||
|
||||
# Remove any pre-existing grids first
|
||||
for weName in weNames:
|
||||
|
@ -141,9 +120,15 @@ class Procedure (SmartScript.SmartScript):
|
|||
continue
|
||||
|
||||
gridTR = trList[-1] # only interested in the latest grid
|
||||
|
||||
|
||||
iscGrid, iscKeys = self.getGrids("ISC", iscWeName, "SFC", gridTR)
|
||||
|
||||
start = gridTR.endTime().unixTime() - (48 * 3600)
|
||||
end = gridTR.endTime().unixTime()
|
||||
createTR = self.makeTimeRange(start, end)
|
||||
|
||||
self.createGrid("Fcst", weName, "DISCRETE", (iscGrid, iscKeys),
|
||||
timeRange, discreteKeys=iscKeys, discreteOverlap=0,
|
||||
createTR, discreteKeys=iscKeys, discreteOverlap=0,
|
||||
#trList, discreteKeys=iscKeys, discreteOverlap=0,
|
||||
discreteAuxDataLength=0, defaultColorTable="StormSurgeHazards")
|
||||
|
||||
|
|
150
cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/TCFloodingRainThreat.py
Executable file → Normal file
150
cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/TCFloodingRainThreat.py
Executable file → Normal file
|
@ -13,7 +13,10 @@
|
|||
# in getQPFGrid method. Also in AWIPS 2 FFG is gridded and called just FFG.
|
||||
# This is fixed in getRFCFFGModels method.
|
||||
#
|
||||
# LeFevbre/Santos: This is the version being turned in for baseline as of 10/20/2014
|
||||
# LeFevbre/Santos: This is the version being turned in for baseline in 16.1.2 as of 12/7/2015. It includes fixes
|
||||
#for new ERP data changes that took place in Summer of 2015 and better handling of grid points where there
|
||||
#is no FFG guidance available.
|
||||
|
||||
#
|
||||
#Search for COMMENTS to see any local config step you might need to take.
|
||||
# ----------------------------------------------------------------------------
|
||||
|
@ -88,10 +91,12 @@ class Procedure (SmartScript.SmartScript):
|
|||
# get the current time, truncates to the last six hour value.
|
||||
# returns a timeRange with this startTime until 72 hrs from this time
|
||||
|
||||
def make72hrTimeRange(self):
|
||||
cTime = int(self._gmtime().unixTime()/ (3600 * 6)) * (3600 * 6)
|
||||
startTime = AbsTime.AbsTime(cTime)
|
||||
end = cTime + (3600 * 24 * 3)
|
||||
def make72hrTimeRange(self, startTime):
|
||||
|
||||
# Make the end time 3 days from the startTime
|
||||
end = startTime + (72 * 3600)
|
||||
# Convert them to AbsTimes
|
||||
startTime = AbsTime.AbsTime(startTime)
|
||||
endTime = AbsTime.AbsTime(end)
|
||||
|
||||
timeRange = TimeRange.TimeRange(startTime, endTime)
|
||||
|
@ -118,11 +123,11 @@ class Procedure (SmartScript.SmartScript):
|
|||
availParms = self.availableParms()
|
||||
|
||||
for pName, level, dbID in availParms:
|
||||
if dbID.modelName().find(modelName) > -1:
|
||||
if pName.find(weName) > -1:
|
||||
if level.find(weLevel) > -1:
|
||||
if modelName in dbID.modelName():
|
||||
if weName in pName:
|
||||
if weLevel in level:
|
||||
if dbID.modelIdentifier() not in modelList:
|
||||
modelList.append(dbID.modelIdentifier())
|
||||
modelList.append(dbID)
|
||||
return modelList
|
||||
|
||||
# A small algorithm to determine the day number
|
||||
|
@ -137,23 +142,33 @@ class Procedure (SmartScript.SmartScript):
|
|||
return 3
|
||||
|
||||
return 0
|
||||
|
||||
def getModelTime(self, modelName):
|
||||
|
||||
timeStr = modelName[-13:]
|
||||
|
||||
year = int(timeStr[0:4])
|
||||
month = int(timeStr[4:6])
|
||||
day = int(timeStr[6:8])
|
||||
hour = int(timeStr[9:11])
|
||||
|
||||
absTime = AbsTime.absTimeYMD(year, month, day, hour, 0, 0)
|
||||
|
||||
return absTime.unixTime()
|
||||
|
||||
def baseModelTime(self, modelTime):
|
||||
|
||||
oneDay = 3600 * 24
|
||||
halfDay = 3600 * 12
|
||||
baseTime = (int((modelTime + halfDay) / oneDay) * oneDay) - halfDay
|
||||
|
||||
return baseTime
|
||||
|
||||
def getLatestERPAnchorTime(self):
|
||||
ERPModelName = "HPCERP"
|
||||
ERPVarName = "ppffg" # This variable commented out
|
||||
ERPLevel = "SFC"
|
||||
# get the list of all available models. They come sorted latest to oldest.
|
||||
modelList = self.getModelList(ERPModelName, ERPVarName, ERPLevel)
|
||||
|
||||
if len(modelList) > 0:
|
||||
anchorTime = self.baseModelTime(modelList[0].modelTime().unixTime())
|
||||
return anchorTime
|
||||
|
||||
self.statusBarMsg("No ERP Guidance found.", "S")
|
||||
return None
|
||||
|
||||
def getERPGrids(self):
|
||||
ERPModelName = "HPCERP"
|
||||
ERPVarName = "ppffg"
|
||||
|
||||
ERPVarName = "ppffg"
|
||||
ERPLevel = "SFC"
|
||||
|
||||
# make a dict and fill with grids with the default value.
|
||||
|
@ -162,28 +177,57 @@ class Procedure (SmartScript.SmartScript):
|
|||
for i in range(1, 4):
|
||||
gridDict[i] = None
|
||||
|
||||
# get the list of all available models
|
||||
# get the list of all available models. They come sorted latest to oldest.
|
||||
modelList = self.getModelList(ERPModelName, ERPVarName, ERPLevel)
|
||||
modelList.sort() # sort oldest to latest
|
||||
|
||||
# for each available model, fetch all the grids
|
||||
|
||||
# Debug output. Remove after testing
|
||||
# print "ERP models available:"
|
||||
# for m in modelList:
|
||||
# print m, "baseTime:", time.asctime(time.gmtime(self.baseModelTime(m.modelTime().unixTime())))
|
||||
|
||||
# Calculate the nominal time we're searching for based on the latest model
|
||||
anchorTime = self.baseModelTime(modelList[0].modelTime().unixTime())
|
||||
|
||||
# Debug output. Remove after testing
|
||||
#print "anchorTime:", time.asctime(time.gmtime(anchorTime))
|
||||
|
||||
# keep only the most recent grids
|
||||
for model in modelList:
|
||||
|
||||
# Only process models with a base time matching the latest base time. Ignore all others.
|
||||
# This will cause the tool to abort if all the current grids are not yet in.
|
||||
if self.baseModelTime(model.modelTime().unixTime()) != anchorTime:
|
||||
# Debug output. remove after testing
|
||||
print "Ignoring model at:", time.asctime(time.gmtime(model.modelTime().unixTime()))
|
||||
continue
|
||||
|
||||
trList = self.getWEInventory(model, ERPVarName)
|
||||
modelTime = self.getModelTime(model)
|
||||
modelTime = model.modelTime()
|
||||
|
||||
for tr in trList:
|
||||
dayNum = self.determineDay(modelTime,
|
||||
tr.startTime().unixTime())
|
||||
grid = self.getGrids(model, ERPVarName, ERPLevel, tr,
|
||||
mode="First")
|
||||
gridDict[dayNum] = grid
|
||||
dayNum = self.determineDay(anchorTime, tr.startTime().unixTime())
|
||||
grid = self.getGrids(model, ERPVarName, ERPLevel, tr, mode="First")
|
||||
if gridDict[dayNum] is None:
|
||||
gridDict[dayNum] = grid
|
||||
print "modelTime and dayNum are: ", modelTime, dayNum
|
||||
|
||||
# Check to see if we found all the grids we need
|
||||
allGridsFound = True
|
||||
for dayNum in range(1, 4):
|
||||
if gridDict[dayNum] is None:
|
||||
allGridsFound = False
|
||||
|
||||
# We found all the grids, return them
|
||||
if allGridsFound:
|
||||
return gridDict
|
||||
|
||||
# After processing all the models, make sure we found all the grids we need.
|
||||
for i in range(1, 4):
|
||||
if gridDict[i] == None:
|
||||
errorStr = "Day" + str(i) + " grid not found in AWIPS database."
|
||||
self.statusBarMsg(errorStr, "S")
|
||||
|
||||
return None
|
||||
|
||||
return gridDict
|
||||
|
||||
# Use this method for testing if you have no luck getting products
|
||||
|
@ -242,7 +286,7 @@ class Procedure (SmartScript.SmartScript):
|
|||
|
||||
for model in modelList:
|
||||
# WARNING!!! This check should be more specific to the DBID string.
|
||||
if model.find(rfcName) > -1:
|
||||
if model.modelIdentifier().find(rfcName) > -1:
|
||||
return model
|
||||
|
||||
return None
|
||||
|
@ -457,10 +501,12 @@ class Procedure (SmartScript.SmartScript):
|
|||
] # low ------ QPF/FFG ratio -------->high
|
||||
|
||||
# COMMENTS: The list of FFG products that contain FFG data for your WFO
|
||||
# The following is set up for testing only. Please change these
|
||||
# entries for your particular office.
|
||||
# productList = ["FFGSHV", "FFGALR"]
|
||||
productList = ["ATLFFGMFL", "ATLFFGTBW", "ATLFFGMLB"]
|
||||
# The following is an example for Miami. Default list is emply. You must
|
||||
# populate it with your CWA FFG guidance.
|
||||
#productList = ["ATLFFGMFL", "ATLFFGTBW", "ATLFFGMLB","ATLFFGKEY"]
|
||||
productList = []
|
||||
if len(productList) == 0:
|
||||
self.statusBarMsg("You have not configured Text FFG in Procedure. Create a site level copy, and configure your text FFG Guidance. Search for COMMENTS in the procedure.", "S")
|
||||
|
||||
### END CONFIGURATION SECTION #################################
|
||||
|
||||
|
@ -480,8 +526,13 @@ class Procedure (SmartScript.SmartScript):
|
|||
threatMatrix[i][j] = "Elevated"
|
||||
|
||||
|
||||
# make a 72 hour timeRange and a list of 6 hour timeRanges
|
||||
timeRange = self.make72hrTimeRange()
|
||||
# Find the nominal start time when we will be making grids
|
||||
anchorTime = self.getLatestERPAnchorTime()
|
||||
|
||||
# make a 72 hour timeRange and a list of 6 hour timeRanges based on the anchorTime
|
||||
timeRange = self.make72hrTimeRange(anchorTime)
|
||||
|
||||
print "Anchor TimeRange:", timeRange
|
||||
|
||||
trList = self.makeTimeRangeList(timeRange, 6)
|
||||
|
||||
|
@ -491,11 +542,17 @@ class Procedure (SmartScript.SmartScript):
|
|||
#print "Getting FFG Grid Now: "
|
||||
ffgGrid = self.getRFCFlashFloodGrid(productList,varDict)
|
||||
#print "GOT FFG Grid"
|
||||
ffgGrid[less(ffgGrid, 0.0)] = 0.0
|
||||
|
||||
# calculate the areas where the FFG is missing. We will fill these values with None eventually
|
||||
missingFFGMask = less_equal(ffgGrid, 0.0)
|
||||
|
||||
# get the ERP grids and stuff them in six hour time blocks to match
|
||||
# the cummulative QPF grids will create later
|
||||
erpGridDict = self.getERPGrids()
|
||||
|
||||
if erpGridDict is None: # We're in that window where we should wait.
|
||||
self.statusBarMsg("The current ERP guidance is not yet completely available. Please re-run this tool at a later time.", "S")
|
||||
return
|
||||
|
||||
for i in range(len(trList)):
|
||||
|
||||
|
@ -518,6 +575,10 @@ class Procedure (SmartScript.SmartScript):
|
|||
tempffgGrid = where(equal(ffgGrid, 0.0), float32(1000.0), ffgGrid)
|
||||
ratioGrid = qpfGrid / tempffgGrid
|
||||
ratioGrid[equal(ffgGrid, 0.0)] = 0.0
|
||||
|
||||
# Clip the ratioGrid to 8.0 to prevent problems when displaying
|
||||
ratioGrid.clip(0.0, 8.0, ratioGrid)
|
||||
|
||||
self.createGrid("Fcst", "ERP", "SCALAR", erpGrid, tr,
|
||||
minAllowedValue = -1, maxAllowedValue=100,
|
||||
precision=2)
|
||||
|
@ -543,7 +604,12 @@ class Procedure (SmartScript.SmartScript):
|
|||
mask = logical_and(ratioMask, erpMask)
|
||||
keyIndex = self.getIndex(threatMatrix[r][e], threatKeys)
|
||||
floodThreat[mask] = keyIndex
|
||||
|
||||
# Now set the values we found missing to the None key
|
||||
noneIndex = self.getIndex("None", threatKeys)
|
||||
floodThreat[missingFFGMask] = noneIndex
|
||||
|
||||
# Create the grid
|
||||
self.createGrid("Fcst", "FloodThreat", "DISCRETE",
|
||||
(floodThreat, threatKeys), tr,
|
||||
discreteKeys=threatKeys,
|
||||
|
|
|
@ -19,9 +19,12 @@
|
|||
# Sept 18, 2014: Added code to pull grids from NHC via ISC if PHISH not
|
||||
# Available on time. Left inactive (commented out) for the moment until that can be fully tested later
|
||||
# in 2014 or in 2015.
|
||||
#
|
||||
# Last Modified: May 22, 2015 (LEFebvre/Santos): Added option to create null grids and manual grids when
|
||||
# May 22, 2015 (LEFebvre/Santos): Added option to create null grids and manual grids when
|
||||
# PSURGE not available. Added checks for current guidance for PHISH and ISC options.
|
||||
#
|
||||
# Last Modified: LeFebvre/Santos, July 27, 2015: Expanded Manual options to include Replace and Add options.
|
||||
# This allows sites to specify manually different threat levels across different edit areas and time ranges.
|
||||
# See 2015HTIUserGuide for details.
|
||||
#
|
||||
# ----------------------------------------------------------------------------
|
||||
# The MenuItems list defines the GFE menu item(s) under which the
|
||||
|
@ -46,9 +49,9 @@ VariableList = [("DEFAULT: Typical. Should only be changed in coordination with
|
|||
"Higher (40% Exceedance; for well-behaved systems within 6 hours of the event)",
|
||||
"Highest (50% Exceedance; for well-behaved systems at time of the event)"]),
|
||||
("Grid Smoothing?", "Yes", "radio", ["Yes","No"]),
|
||||
("Make grids from \nPHISH, ISC, or Manually?", "PHISH", "radio", ["PHISH", "ISC", "Manually"]),
|
||||
("Make grids from \nPHISH, ISC, or Manually?", "PHISH", "radio", ["PHISH", "ISC", "Manually Replace", "Manually Add"]),
|
||||
("Manual Inundation settings:", "", "label"),
|
||||
("Inundation Height:", 1.0, "scale", [0.0, 2.5], 0.5),
|
||||
("Inundation Height:", 1.0, "scale", [0.0, 2.0], 1.0),
|
||||
("Start Hour for Inundation Timing", 0, "scale", [0.0, 72.0], 6.0),
|
||||
("End Hour for Inundation Timing", 6, "scale", [0.0, 78.0], 6.0),
|
||||
]
|
||||
|
@ -56,7 +59,7 @@ VariableList = [("DEFAULT: Typical. Should only be changed in coordination with
|
|||
class Procedure (SmartScript.SmartScript):
|
||||
def __init__(self, dbss):
|
||||
SmartScript.SmartScript.__init__(self, dbss)
|
||||
|
||||
|
||||
def getWEInventory(self, modelName, WEName, level):
|
||||
allTimes = TimeRange.allTimes().toJavaObj()
|
||||
gridInfo = self.getGridInfo(modelName, WEName, level, allTimes)
|
||||
|
@ -69,11 +72,11 @@ class Procedure (SmartScript.SmartScript):
|
|||
trList.append(tr)
|
||||
|
||||
return trList
|
||||
|
||||
|
||||
def baseGuidanceTime(self):
|
||||
startTime = int((self._gmtime().unixTime() - (2 * 3600)) / (6 * 3600)) * (6 * 3600)
|
||||
print "BASETIME IS: ", startTime
|
||||
|
||||
# print "BASETIME IS: ", startTime
|
||||
|
||||
return startTime
|
||||
|
||||
|
||||
|
@ -162,22 +165,22 @@ class Procedure (SmartScript.SmartScript):
|
|||
for tr in trList:
|
||||
grid = self.getGrids(dbName, weName, level, tr, mode="Max")
|
||||
|
||||
|
||||
|
||||
surgeVal = grid.copy()
|
||||
mask = surgeVal > -100
|
||||
grid = np.where(mask,surgeVal*3.28, np.float32(-80.0))
|
||||
|
||||
return grid # convert meters to feet
|
||||
|
||||
def makePhishGrid(self, pctStr, level, smoothThreatGrid):
|
||||
|
||||
|
||||
def makePhishGrid(self, pctStr, level, smoothThreatGrid, mutableID):
|
||||
|
||||
siteID = self.getSiteID()
|
||||
dbName = siteID + "_D2D_TPCSurgeProb"
|
||||
|
||||
|
||||
weName = "Surge" + pctStr + "Pctincr"
|
||||
#print "Attempting to retrieve: ", weName, level
|
||||
trList = self.getWEInventory(dbName, weName, level)
|
||||
|
||||
|
||||
if len(trList) == 0:
|
||||
self.statusBarMsg("No grids available for model:" + dbName, "S")
|
||||
return None
|
||||
|
@ -193,7 +196,7 @@ class Procedure (SmartScript.SmartScript):
|
|||
n = n + 1
|
||||
end = tr.startTime().unixTime()
|
||||
tr6 = TimeRange.TimeRange(AbsTime.AbsTime(start),
|
||||
AbsTime.AbsTime(end))
|
||||
AbsTime.AbsTime(end))
|
||||
phishGrid = self.getGrids(dbName, weName, level, tr)
|
||||
#
|
||||
# For consistency we need to add smoothing here too as we do in execute.
|
||||
|
@ -203,11 +206,11 @@ class Procedure (SmartScript.SmartScript):
|
|||
continue
|
||||
|
||||
if smoothThreatGrid is "Yes":
|
||||
phishGrid = np.where(np.greater(phishGrid, 0.0), self.smoothGrid(phishGrid,3), phishGrid)
|
||||
|
||||
phishGrid = np.where(np.greater(phishGrid, 0.0), self.smoothGrid(phishGrid,3), phishGrid)
|
||||
|
||||
grid = np.where(phishGrid>-100,phishGrid*3.28, np.float32(-80.0))
|
||||
self.createGrid("Fcst", "InundationTiming", "SCALAR", grid, tr6, precision=1)
|
||||
|
||||
self.createGrid(mutableID, "InundationTiming", "SCALAR", grid, tr6, precision=1)
|
||||
|
||||
return
|
||||
|
||||
#**************************************************************************************
|
||||
|
@ -438,7 +441,7 @@ class Procedure (SmartScript.SmartScript):
|
|||
for tr in trList:
|
||||
grid = self.getGrids("ISC", iscWeName, "SFC", tr)
|
||||
if iscWeName == "InundationTimingnc":
|
||||
self.createGrid("Fcst", weName, "SCALAR", grid, tr, precision=2)
|
||||
self.createGrid(mutableID, weName, "SCALAR", grid, tr, precision=2)
|
||||
elif iscWeName == "InundationMaxnc":
|
||||
surgePctGrid = grid
|
||||
elif iscWeName == "SurgeHtPlusTideMSLnc":
|
||||
|
@ -478,6 +481,8 @@ class Procedure (SmartScript.SmartScript):
|
|||
|
||||
def execute(self, varDict, editArea):
|
||||
|
||||
mutableID = self.mutableID()
|
||||
|
||||
# List of elements
|
||||
# See if we should copy from ISC. If so, do the copy and exit
|
||||
smoothThreatGrid = varDict["Grid Smoothing?"]
|
||||
|
@ -534,19 +539,20 @@ class Procedure (SmartScript.SmartScript):
|
|||
surgePctGridNAVD + navdtomhhw, np.float32(-80.0)) # MHHW Grid
|
||||
surgeDiffMLLWMHHW = np.where(np.greater(surgePctGridMLLW,-80.0) & np.greater(surgePctGridMHHW, -80.0), \
|
||||
surgePctGridMLLW-surgePctGridMHHW, np.float32(-80.0)) # Diff Grid Between MLLW and MHHW
|
||||
|
||||
self.makePhishGrid(pctStr, "FHAG0", smoothThreatGrid)
|
||||
|
||||
|
||||
self.makePhishGrid(pctStr, "FHAG0", smoothThreatGrid, mutableID)
|
||||
|
||||
elif makeOption == "ISC":
|
||||
|
||||
|
||||
elementList = ["InundationMax","InundationTiming", "SurgeHtPlusTideMSL","SurgeHtPlusTideMLLW",
|
||||
"SurgeHtPlusTideNAVD","SurgeHtPlusTideMHHW"]
|
||||
surgePctGrid,surgePctGridMSL,surgePctGridMLLW,surgePctGridMHHW,surgePctGridNAVD = self.copyISCGridstoFcst(elementList)
|
||||
if surgePctGrid is None or surgePctGridMSL is None or surgePctGridMLLW is None or \
|
||||
surgePctGridMHHW is None or surgePctGridNAVD is None:
|
||||
return
|
||||
|
||||
elif makeOption == "Manually":
|
||||
|
||||
elif makeOption == "Manually Replace" or makeOption == "Manually Add":
|
||||
|
||||
inundationHeight = float(varDict["Inundation Height:"])
|
||||
inunStartHour = float(varDict["Start Hour for Inundation Timing"])
|
||||
inunEndHour = float(varDict["End Hour for Inundation Timing"])
|
||||
|
@ -560,30 +566,43 @@ class Procedure (SmartScript.SmartScript):
|
|||
if not modifyMask.any():
|
||||
self.statusBarMsg("Please define an area that intersects the StormSurgeEditArea to assign the inundation values.", "S")
|
||||
return # Calculate the intersection of the SSEditArea and selected editAre
|
||||
|
||||
|
||||
if inunStartHour >= inunEndHour:
|
||||
self.statusBarMsg("Please define the end hour after the start hour.", "S")
|
||||
return
|
||||
|
||||
timeRange = TimeRange.allTimes()
|
||||
self.deleteCmd(["InundationTiming"], timeRange)
|
||||
# make the InundationMax grid
|
||||
|
||||
print "Making timing grids"
|
||||
surgePctGrid = self.empty()
|
||||
|
||||
# make the InundationMax grid
|
||||
surgePctGrid[modifyMask] = inundationHeight
|
||||
# Make the timing grids
|
||||
baseTime = self.baseGuidanceTime()
|
||||
# trList = self.makeTimingTRs(baseTime)
|
||||
trList, timingGrids = self.getTimingGrids()
|
||||
print "TRLIST IS: ", trList
|
||||
# print "TRLIST IS: ", trList
|
||||
|
||||
if makeOption != "Manually Add":
|
||||
timeRange = TimeRange.allTimes()
|
||||
self.deleteCmd(["InundationTiming"], timeRange)
|
||||
|
||||
for i in range(len(trList)):
|
||||
# only modify grid in the specified time range
|
||||
start = trList[i].startTime().unixTime()
|
||||
end = trList[i].endTime().unixTime()
|
||||
|
||||
# If we're adding, replace the empty timing grids with the existing grids
|
||||
if makeOption == "Manually Add":
|
||||
try:
|
||||
tGrid = self.getGrids(mutableID, "InundationTiming", "SFC", trList[i])
|
||||
timingGrids[i] = tGrid
|
||||
except:
|
||||
print "Timing grid not found at:", trList[i]
|
||||
|
||||
if (start - baseTime) / 3600 >= inunStartHour and (end - baseTime) / 3600 <= inunEndHour:
|
||||
timingGrids[i][modifyMask] = inundationHeight # populate where needed
|
||||
|
||||
self.createGrid("Fcst", "InundationTiming", "SCALAR", timingGrids[i], trList[i])
|
||||
self.createGrid(mutableID, "InundationTiming", "SCALAR", timingGrids[i], trList[i])
|
||||
|
||||
threatWEName = "StormSurgeThreat"
|
||||
|
||||
|
@ -613,7 +632,8 @@ class Procedure (SmartScript.SmartScript):
|
|||
#print "threshDict[keyMap[key]]: ", keyMap[key], threshDict[keyMap[key]]
|
||||
|
||||
# make a timeRange - 6 hours long
|
||||
elementList = ["StormSurgeThreat","InundationMax","SurgeHtPlusTideMSL","SurgeHtPlusTideMLLW","SurgeHtPlusTideNAVD","SurgeHtPlusTideMHHW"]
|
||||
elementList = ["StormSurgeThreat","InundationMax","SurgeHtPlusTideMSL","SurgeHtPlusTideMLLW",
|
||||
"SurgeHtPlusTideNAVD","SurgeHtPlusTideMHHW"]
|
||||
|
||||
# make a new timeRange that will be used to create new grids
|
||||
timeRange = self.makeNewTimeRange(6)
|
||||
|
@ -624,26 +644,46 @@ class Procedure (SmartScript.SmartScript):
|
|||
startTime = AbsTime.AbsTime(cTime - 24*3600)
|
||||
endTime = startTime + 240*3600
|
||||
deleteTimeRange = TimeRange.TimeRange(startTime, endTime)
|
||||
|
||||
|
||||
# Don't remove the StormSurgeThreat grid if we're adding to the current one.
|
||||
if varDict["Make grids from \nPHISH, ISC, or Manually?"] == "Manually Add":
|
||||
elementList.remove("StormSurgeThreat")
|
||||
elementList.remove("InundationMax")
|
||||
|
||||
for elem in elementList:
|
||||
self.deleteCmd([elem], deleteTimeRange)
|
||||
|
||||
# display the D2D grid for debugging purposes only
|
||||
self.createGrid("Fcst", "InundationMax", "SCALAR", surgePctGrid,
|
||||
timeRange, precision=2)
|
||||
|
||||
if makeOption != "Manually":
|
||||
self.createGrid("Fcst", "SurgeHtPlusTideMSL", "SCALAR", surgePctGridMSL,
|
||||
if makeOption != "Manually Replace" and makeOption != "Manually Add":
|
||||
self.createGrid(mutableID, "SurgeHtPlusTideMSL", "SCALAR", surgePctGridMSL,
|
||||
timeRange, precision=2)
|
||||
self.createGrid("Fcst", "SurgeHtPlusTideMLLW", "SCALAR", surgePctGridMLLW,
|
||||
self.createGrid(mutableID, "SurgeHtPlusTideMLLW", "SCALAR", surgePctGridMLLW,
|
||||
timeRange, precision=2)
|
||||
self.createGrid("Fcst", "SurgeHtPlusTideNAVD", "SCALAR", surgePctGridNAVD,
|
||||
self.createGrid(mutableID, "SurgeHtPlusTideNAVD", "SCALAR", surgePctGridNAVD,
|
||||
timeRange, precision=2)
|
||||
self.createGrid("Fcst", "SurgeHtPlusTideMHHW", "SCALAR", surgePctGridMHHW,
|
||||
self.createGrid(mutableID, "SurgeHtPlusTideMHHW", "SCALAR", surgePctGridMHHW,
|
||||
timeRange, precision=2)
|
||||
|
||||
# make a grid of zeros. This will be the CoastalThreat grid
|
||||
# Make the grid. Start with the existing grid if we have one otherwise zeros
|
||||
coastalThreat = self.empty()
|
||||
|
||||
# Fetch the old grids if we're adding
|
||||
if varDict["Make grids from \nPHISH, ISC, or Manually?"] == "Manually Add":
|
||||
imTRList = self.getWEInventory(mutableID, "InundationMax", "SFC")
|
||||
print "InnundationTFList:", imTRList
|
||||
if len(imTRList) > 0:
|
||||
print "got pctSurgegrid"
|
||||
imTR = imTRList[0]
|
||||
surgePctGrid = self.getGrids(mutableID, "InundationMax", "SFC", imTR)
|
||||
surgePctGrid[modifyMask] = inundationHeight
|
||||
|
||||
coastalTRList = self.getWEInventory(mutableID, threatWEName, "SFC")
|
||||
if len(coastalTRList) > 0:
|
||||
coastalTR = coastalTRList[0]
|
||||
coastalThreat, keys = self.getGrids(mutableID, threatWEName, "SFC", coastalTR)
|
||||
|
||||
self.createGrid(mutableID, "InundationMax", "SCALAR", surgePctGrid, timeRange, precision=2)
|
||||
|
||||
|
||||
# Yet another list to define the order in which we set grid values
|
||||
# This order must be ranked lowest to highest
|
||||
|
@ -655,10 +695,11 @@ class Procedure (SmartScript.SmartScript):
|
|||
#print "THRESHOLD FOR KEY IS: ", key, threshDict[key]
|
||||
thresh = threshDict[key]
|
||||
keyIndex = self.getIndex(key, threatKeys)
|
||||
coastalThreat[ssea & np.greater_equal(surgePctGrid, thresh)] = keyIndex
|
||||
coastalMask = ssea & np.greater_equal(surgePctGrid, thresh)
|
||||
coastalThreat[coastalMask] = keyIndex
|
||||
|
||||
# create the CoastalThreat Grid
|
||||
self.createGrid("Fcst", threatWEName, "DISCRETE",
|
||||
self.createGrid(mutableID, threatWEName, "DISCRETE",
|
||||
(coastalThreat, threatKeys), timeRange,
|
||||
discreteKeys=threatKeys,
|
||||
discreteOverlap=0,
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
# Version 2015.2.10-1
|
||||
# Version 2015.11.18-0
|
||||
|
||||
import GenericHazards
|
||||
import JsonSupport
|
||||
|
@ -157,7 +157,7 @@ class TextProduct(GenericHazards.TextProduct):
|
|||
dict["pwsD64"] = (0, 5)
|
||||
dict["pwsN64"] = (0, 5)
|
||||
dict["InundationMax"] = (0, 5)
|
||||
dict["InundationTiming"] = (0, 5)
|
||||
dict["InundationTiming"] = (0, 3)
|
||||
return dict
|
||||
|
||||
###############################################################
|
||||
|
@ -620,6 +620,17 @@ class TextProduct(GenericHazards.TextProduct):
|
|||
# wording with rainfall for the TCV
|
||||
self._extraSampleTimeRange = self.makeTimeRange(startTime-12*3600,
|
||||
startTime)
|
||||
|
||||
# Create a special time range list broken into 6 hour chunks for surge
|
||||
startTime6Hour = self._calculateStartTime(time.gmtime(self._issueTime_secs), use6Hour=True)
|
||||
timeRange6Hour = self.makeTimeRange(startTime6Hour, startTime6Hour+120*3600)
|
||||
subRanges = self.divideRange(timeRange6Hour, self._resolution(use6Hour=True))
|
||||
trList = []
|
||||
for index, tr in enumerate(subRanges):
|
||||
self.debug_print("In _determineTimeRanges (6 hour) -> tr = %s" %
|
||||
(self._pp.pformat(tr)), 1)
|
||||
trList.append((tr, "Label"))
|
||||
self._timeRangeList6Hour = trList
|
||||
|
||||
# Determine the time range list according to the resolution
|
||||
subRanges = self.divideRange(self._timeRange, self._resolution())
|
||||
|
@ -661,14 +672,14 @@ class TextProduct(GenericHazards.TextProduct):
|
|||
(self._pp.pformat(self._periodList)), 1)
|
||||
self._timeRangeList = trList
|
||||
|
||||
def _calculateStartTime(self, localCreationTime):
|
||||
def _calculateStartTime(self, localCreationTime, use6Hour=False):
|
||||
year = localCreationTime[0]
|
||||
month = localCreationTime[1]
|
||||
day = localCreationTime[2]
|
||||
hour = localCreationTime[3]
|
||||
|
||||
# Define a variable to control which resolution we want
|
||||
resolution = self._resolution() # 6 is also a valid option
|
||||
resolution = self._resolution(use6Hour) # 6 is also a valid option
|
||||
|
||||
# If we are more than halfway though a block we would want
|
||||
if hour % resolution > resolution / 2:
|
||||
|
@ -691,8 +702,11 @@ class TextProduct(GenericHazards.TextProduct):
|
|||
|
||||
return startTime
|
||||
|
||||
def _resolution(self):
|
||||
return 3
|
||||
def _resolution(self, use6Hour=False):
|
||||
if use6Hour:
|
||||
return 6
|
||||
else:
|
||||
return 3
|
||||
|
||||
def _formatPeriod(self, period, wholePeriod=False, shiftToLocal=True, useEndTime=False,
|
||||
resolution=3):
|
||||
|
@ -811,11 +825,14 @@ class TextProduct(GenericHazards.TextProduct):
|
|||
# This pattern will handle multiple word names
|
||||
# (including certain special characters).
|
||||
# This is for the NHC format.
|
||||
mndSearch = re.search("(?im)^.*?(?P<stormType>HURRICANE|(SUB|POST.?)?TROPICAL " +
|
||||
"(STORM|DEPRESSION|CYCLONE)|(SUPER )?TYPHOON|" +
|
||||
"REMNANTS OF) (?P<stormName>[A-Z0-9\-\(\) ]+?)" +
|
||||
"(?P<advisoryType>SPECIAL |INTERMEDIATE )?ADVISORY " +
|
||||
"NUMBER[ ]+(?P<advisoryNumber>[A-Z0-9]+)[ ]*", tcp)
|
||||
mndSearch = re.search("(?im)^.*?(?P<stormType>HURRICANE|" +
|
||||
"(POTENTIAL|SUB|POST.?)" +
|
||||
"?TROPICAL (STORM|DEPRESSION|CYCLONE)|" +
|
||||
"(SUPER )?TYPHOON|REMNANTS OF) " +
|
||||
"(?P<stormName>[A-Z0-9\-\(\) ]+?)" +
|
||||
"(?P<advisoryType>SPECIAL |INTERMEDIATE )" +
|
||||
"?ADVISORY NUMBER[ ]+" +
|
||||
"(?P<advisoryNumber>[A-Z0-9]+)[ ]*", tcp)
|
||||
|
||||
if mndSearch is not None:
|
||||
self._stormType = mndSearch.group("stormType").strip()
|
||||
|
|
|
@ -0,0 +1,986 @@
|
|||
# ----------------------------------------------------------------------------
|
||||
# This software is in the public domain, furnished "as is", without technical
|
||||
# support, and with no warranty, express or implied, as to its usefulness for
|
||||
# any purpose.
|
||||
#
|
||||
# GridManipulation - Version 2.1
|
||||
#
|
||||
# Author: Matthew H. Belk Created: 10/02/2010
|
||||
# WFO Taunton MA Last Modified: 12/03/2015
|
||||
# ----------------------------------------------------------------------------
|
||||
|
||||
import SmartScript
|
||||
import types, re
|
||||
import LogStream
|
||||
|
||||
import numpy as np
|
||||
import TimeRange, AbsTime
|
||||
|
||||
class GridManipulation(SmartScript.SmartScript):
|
||||
|
||||
def __init__(self, dbss):
|
||||
SmartScript.SmartScript.__init__(self, dbss)
|
||||
|
||||
############################################################################
|
||||
# (originally from CheckTandTd by Tom LeFebvre).
|
||||
|
||||
##
|
||||
# Get the list of time ranges at the grid whose element name is WEName
|
||||
# contains grids. The model and level of the weather element are assumed
|
||||
# to be "Fcst" and "SFC" respectively, but can be identified differently.
|
||||
#
|
||||
# @param WEName: Name of a weather element
|
||||
# @type WEName: string
|
||||
# @param model: Name of model to inventory (default = "Fcst")
|
||||
# @type model: string
|
||||
# @param level: Level of a weather element to inventory (default = "SFC")
|
||||
# @type level: string
|
||||
# @return: time ranges at which WEName has data.
|
||||
# @rtype: Python list of Python TimeRange objects
|
||||
def GM_getWEInventory(self, WEName, dbase="Fcst", level="SFC",
|
||||
timeRange=TimeRange.allTimes()):
|
||||
"""Return a list of time ranges with available data for a field from
|
||||
a specific database and level.
|
||||
Args:
|
||||
string WEName: name of field to inventory
|
||||
string dbase: name of database to search (default = 'Fcst')
|
||||
string level: level of data to inventory (default = 'SFC')
|
||||
Returns:
|
||||
Python list of Python time range objects
|
||||
"""
|
||||
|
||||
# print "Getting inventory of -> '%s' from '%s' at '%s'" % \
|
||||
# (WEName, dbase, level)
|
||||
|
||||
trList = []
|
||||
# getGridInfo will just die if the modelName or weName is not valid
|
||||
# so wrap it in a try block and return [] if it fails
|
||||
try:
|
||||
gridInfo = self.getGridInfo(dbase, WEName, level, timeRange)
|
||||
except:
|
||||
return trList
|
||||
|
||||
trList = [g.gridTime() for g in gridInfo
|
||||
if timeRange.overlaps(g.gridTime())]
|
||||
return trList
|
||||
|
||||
|
||||
##
|
||||
# Get time ranges locked by other workstations for the weather element named
|
||||
# weName. The model for weName is taken from this object's mutableID() method;
|
||||
# the level defaults to "SFC", but can be identified for a different level.
|
||||
# @param weName: Name of a weather element.
|
||||
# @type weName: string
|
||||
# @param level: Level of a weather element to inventory (default = "SFC")
|
||||
# @type level: string
|
||||
# @return: time ranges locked by others
|
||||
# @rtype: Python list of TimeRanges; if asJava is True, these are Java
|
||||
# TimeRanges, otherwise they are Python TimeRanges.
|
||||
def GM_getParmLocksByOthers(self, weName, level="SFC"):
|
||||
"""Return a list of time ranges of locked data within the current
|
||||
mutable database (typically 'Fcst').
|
||||
Args:
|
||||
string WEName: name of field to inventory
|
||||
string level: level of data to inventory (default = 'SFC')
|
||||
Returns:
|
||||
Python list of Python time range objects
|
||||
"""
|
||||
|
||||
# returns list of time ranges locked by others for this weather element
|
||||
parm = self.getParm(self.mutableID(), weName, level)
|
||||
if parm is None:
|
||||
return []
|
||||
|
||||
lockTable = parm.getLockTable()
|
||||
locksByOthers = lockTable.lockedByOther()
|
||||
trList = []
|
||||
|
||||
for lock in locksByOthers.toArray():
|
||||
print lock
|
||||
|
||||
start = lock.getStart().getTime() / 1000
|
||||
end = lock.getEnd().getTime() / 1000
|
||||
tr = self.GM_makeTimeRange(start, end)
|
||||
|
||||
trList.append(tr)
|
||||
|
||||
return trList
|
||||
|
||||
|
||||
##
|
||||
# Filter trList, returning only the time ranges that overlap timeRange.
|
||||
# @param timeRange: the time range to test against
|
||||
# @type timeRange: a Python TimeRange
|
||||
# @param trList: the list of time ranges to filter
|
||||
# @type trList: Python list of Python TimeRanges
|
||||
# @param closest: toggle to find time ranges closest to selected time range (False = overlap only)
|
||||
# @type closest: integer
|
||||
# @return: The time ranges in trList that overlap timeRange.
|
||||
# @rtype: a Python list of Python time ranges
|
||||
def GM_overlappingTRs(self, timeRange, trList, closest=False):
|
||||
"""Return a list of time ranges of locked data within the current
|
||||
mutable database (typically 'Fcst').
|
||||
Args:
|
||||
TimeRange timeRange: a Python time range object
|
||||
list trList: list of Python time range objects
|
||||
boolean closest: if True, force new time range list to start and
|
||||
end with the times closest to the start and end of
|
||||
initial selected time range. If False (default),
|
||||
only include times which overlap the initial
|
||||
selected time range.
|
||||
Returns:
|
||||
Python list of Python time range objects
|
||||
"""
|
||||
|
||||
# Get ready to return updated list of times
|
||||
newTRList = []
|
||||
|
||||
# Get ready to track certain times
|
||||
beforeTime = None # start time closest to selected time range start
|
||||
afterTime = None # time range closest to selected time range start
|
||||
beforeTR = None # start time closest to selected time range end
|
||||
afterTR = None # time range closest to selected time range end
|
||||
|
||||
# Get start and end time of selected time range
|
||||
selectStartTime = timeRange.startTime()
|
||||
selectEndTime = timeRange.endTime()
|
||||
|
||||
#=======================================================================
|
||||
# Examine each time range in the list
|
||||
|
||||
for tr in trList:
|
||||
|
||||
# If this time range overlaps the selected range
|
||||
if timeRange.overlaps(tr):
|
||||
|
||||
# Add it to the list
|
||||
newTRList.append(tr)
|
||||
|
||||
# Otherwise, if we should find the closest time ranges
|
||||
elif closest:
|
||||
|
||||
# Get the start time of this time range
|
||||
startTime = tr.startTime()
|
||||
|
||||
# Compute the difference between the start of this grid and
|
||||
# the start and end times of our selected time range
|
||||
diffStartTime = (startTime - selectStartTime)
|
||||
diffEndTime = (startTime - selectEndTime)
|
||||
# print "\t", diffStartTime, diffEndTime
|
||||
|
||||
# If start time of this grid is the closest to start time of
|
||||
# selected time range, or it's the first one
|
||||
if beforeTime is None or \
|
||||
((diffStartTime < 0 and diffStartTime >= beforeTime) or
|
||||
(diffStartTime >= 0 and diffStartTime < beforeTime)):
|
||||
|
||||
# Mark this grid as the closest to the selected start time
|
||||
beforeTime = diffStartTime
|
||||
beforeTR = tr
|
||||
|
||||
# print "beforeTime =", beforeTime, beforeTR
|
||||
|
||||
# If start time of this grid is the closest to end time of
|
||||
# selected time range, or it's the first one
|
||||
if afterTime is None or \
|
||||
(diffEndTime >= 0 and diffEndTime <= abs(afterTime)):
|
||||
|
||||
# Mark this grid as the closest to the selected end time
|
||||
afterTime = diffEndTime
|
||||
afterTR = tr
|
||||
|
||||
# print "afterTime =", afterTime, afterTR
|
||||
|
||||
# print "newTRList = ", newTRList, beforeTR, afterTR
|
||||
|
||||
# If we don't have any grids in the list and we should determine the
|
||||
# closest grid time ranges to the selected time range
|
||||
if len(newTRList) == 0 and closest:
|
||||
|
||||
# Add closest start and end time ranges to selected time range
|
||||
newTRList = [beforeTR, afterTR]
|
||||
|
||||
# Ensure time ranges are sorted when we return them
|
||||
newTRList.sort(self.GM_trSortMethod)
|
||||
|
||||
# Finally, return our completed list
|
||||
return newTRList
|
||||
|
||||
|
||||
##
|
||||
# method so that timeRanges will be sorted earliest to latest
|
||||
# @param first: The first time range to compare
|
||||
# @type first: Python TimeRange
|
||||
# @param last: The second time range to compare
|
||||
# @type last: Python TimeRange
|
||||
# @return: -1 if first starts before last, 1 if first starts after last,
|
||||
# and 0 if first and last start at the same time.
|
||||
# @rtype: integer
|
||||
def GM_trSortMethod(self, first, last):
|
||||
"""Sorts Python time ranges into ascending order.
|
||||
Args:
|
||||
TimeRange first: a Python time range object
|
||||
TimeRange last: a Python time range object
|
||||
Returns:
|
||||
An integer indicating the ascending order of the compared time
|
||||
range objects.
|
||||
"""
|
||||
if first.startTime() < last.startTime():
|
||||
return -1
|
||||
elif first.startTime() == last.startTime():
|
||||
return 0
|
||||
else:
|
||||
return 1
|
||||
|
||||
|
||||
##
|
||||
# Concatenate TRList1 and TRList2 and sort by starting times.
|
||||
# @param TRList1: time ranges of the minT grid
|
||||
# @type TRList1: Python list of Python TimeRange objects.
|
||||
# @param TRList2: time ranges of the maxT grid
|
||||
# @type TRList2: Python list of Python TimeRange objects.
|
||||
# @return: The combined and sorted collection.
|
||||
# @rtype: Python list of Python TimeRange objects
|
||||
def GM_mergeTRLists(self, TRList1, TRList2):
|
||||
"""Merges and sorts Python time range lists into ascending order.
|
||||
Args:
|
||||
TimeRange TRList1: a Python time range object
|
||||
TimeRange TRList2: a Python time range object
|
||||
Returns:
|
||||
A merged and sorted list of Python time range objects.
|
||||
"""
|
||||
|
||||
# Merge the lists
|
||||
combined = set(TRList1) | set(TRList2)
|
||||
|
||||
# Sort the resulting time range list in ascending order
|
||||
newList = sorted(combined, self.GM_trSortMethod)
|
||||
|
||||
# Return the merged and sorted list
|
||||
return newList
|
||||
|
||||
#
|
||||
############################################################################
|
||||
|
||||
############################################################################
|
||||
# Other utility methods originally provided by Tom LeFebvre (GSD)
|
||||
|
||||
##
|
||||
# Gets the maximum possible time range.
|
||||
# @return: The maximum possible Python time range.
|
||||
def GM_makeMaxTimeRange(self):
|
||||
return TimeRange.allTimes()
|
||||
|
||||
|
||||
##
|
||||
# Insert a message into the log files.
|
||||
# @param string: message to insert into log file
|
||||
# @type string: string
|
||||
# @return: nothing
|
||||
# @rtype: nothing
|
||||
def GM_logToolUse(self, string):
|
||||
"""Inserts an entry into the log files.
|
||||
Args:
|
||||
string string: message to be inserted into the log files
|
||||
Returns:
|
||||
Nothing
|
||||
"""
|
||||
|
||||
gtime = self._gmtime().timetuple()
|
||||
ts="%4.4d/%2.2d/%2.2d %2.2d:%2.2d:%2.2d"%(gtime[0], gtime[1], gtime[2],
|
||||
gtime[3], gtime[4], gtime[5])
|
||||
|
||||
# Insert this message into the logs
|
||||
LogStream.logEvent("%s| %s" % (ts, string))
|
||||
|
||||
|
||||
##
|
||||
# Creates a time range
|
||||
# @param start: start of time range in seconds since the epoch began
|
||||
# @type start: double
|
||||
# @param end: end of time range in seconds since the epoch began
|
||||
# @type end: double
|
||||
# @return: time range
|
||||
# @rtype: time range object appropriate for AWIPS version
|
||||
def GM_makeTimeRange(self, start, end):
|
||||
"""Creates a time range.
|
||||
Args:
|
||||
double start - start of time range in seconds since the epoch began
|
||||
double end - end of time range in seconds since the epoch began
|
||||
Returns:
|
||||
Time range appropriate for AWIPS version
|
||||
"""
|
||||
|
||||
startTime = AbsTime.AbsTime(start)
|
||||
endTime = AbsTime.AbsTime(end)
|
||||
|
||||
return TimeRange.TimeRange(startTime, endTime)
|
||||
|
||||
|
||||
##
|
||||
# Creates a list time range objects to process
|
||||
# @param executeTR: time range to use in creating list of time steps
|
||||
# @type executeTR: time range object appropriate to AWIPS version
|
||||
# @param interpHours: time step to use in hours (default = 1)
|
||||
# @type end: integer
|
||||
# @return: time range objects
|
||||
# @rtype: Python list of time range objects
|
||||
def GM_makeTimeRangeList(self, executeTR, interpHours=1):
|
||||
"""Creates a list of time range objects from specified time range.
|
||||
Args:
|
||||
executeTR - time range object appropriate to AWIPS version
|
||||
integer interpHours - number of hours between each time step
|
||||
(default = 1)
|
||||
Returns:
|
||||
Python list of time range appropriate for AWIPS version
|
||||
"""
|
||||
|
||||
start = executeTR.startTime().unixTime()
|
||||
end = executeTR.endTime().unixTime()
|
||||
|
||||
trList = []
|
||||
for t in range(start, end, 3600*interpHours):
|
||||
|
||||
tr = self.GM_makeTimeRange(t, t + 3600)
|
||||
trList.append(tr)
|
||||
return trList
|
||||
|
||||
|
||||
##
|
||||
# Searches a grid inventory for the first available time ranges before and
|
||||
# after the target time range and returns those objects
|
||||
# @param modelInventory: list of available data times for a particular model
|
||||
# @type modelInventory: Python list
|
||||
# @param targetTR: time range to use as basis for search
|
||||
# @type targetTR: time range object
|
||||
# @return: time ranges of available data before and after target time range
|
||||
# @rtype: time range objects appropriate for AWIPS version or None for missing data
|
||||
def GM_getPrevNextModelTimes(self, modelInventory, targetTR):
|
||||
"""Creates a time range.
|
||||
Args:
|
||||
list modelInventory - list of available data times for a model
|
||||
time range targetTR - time range to use as basis for search
|
||||
Returns:
|
||||
Previous and next time range objects appropriate for AWIPS version,
|
||||
or None for missing data
|
||||
"""
|
||||
|
||||
# If we have a model inventory
|
||||
if len(modelInventory) == 0:
|
||||
print "Model Inventory is empty"
|
||||
return None, None
|
||||
|
||||
# Convert target time range object into number of seconds since epoch
|
||||
targetTRsecs = targetTR.startTime().unixTime()
|
||||
|
||||
#-----------------------------------------------------------------------
|
||||
# Make sure we're in range
|
||||
|
||||
# Target time range is before all available model data
|
||||
if targetTRsecs < modelInventory[0].startTime().unixTime():
|
||||
return None, None
|
||||
|
||||
# Target time range is after all available model data
|
||||
if targetTRsecs > modelInventory[-1].startTime().unixTime():
|
||||
return None, None
|
||||
|
||||
#-----------------------------------------------------------------------
|
||||
# Search the model inventory
|
||||
|
||||
for i in range(len(modelInventory)):
|
||||
|
||||
# If we found the first availble model time ranges on both sides
|
||||
# of the target time range
|
||||
if modelInventory[i].startTime().unixTime() < targetTRsecs and \
|
||||
modelInventory[i + 1].startTime().unixTime() > targetTRsecs:
|
||||
|
||||
# Return these time range objects
|
||||
return modelInventory[i], modelInventory[i+1]
|
||||
|
||||
# If we made it this far, indicate we could not find appropriate
|
||||
# time range objects
|
||||
return None, None
|
||||
|
||||
|
||||
##
|
||||
# Interpolates a sounding at the specified time range, if needed.
|
||||
# Otherwise, will use a cached sounding if appropriate.
|
||||
# within the target time range and returns those objects
|
||||
# @param model: model to use to grab cube
|
||||
# @type model: string
|
||||
# @param weName: weather element name to get cube data for
|
||||
# @type weName: string
|
||||
# @param levels: list of levels to use in constructing cube
|
||||
# @type levels: Python list
|
||||
# @param timeRange: time range to use as basis for search
|
||||
# @type timeRange: time range object
|
||||
# @param modelInventory: list of available data times for a particular model
|
||||
# @type modelInventory: Python list
|
||||
# @return: cube of geopotential height and cube of specified field
|
||||
# @rtype: Python tuple of numpy cube data
|
||||
def GM_interpolateSounding(self, model, weName, levels, timeRange,
|
||||
modelInventory):
|
||||
|
||||
prevTR, nextTR = self.GM_getPrevNextModelTimes(modelInventory,
|
||||
timeRange)
|
||||
if prevTR is None or nextTR is None:
|
||||
return None
|
||||
|
||||
prevGHCube, prevCube = self.makeNumericSounding(model, weName, levels,
|
||||
prevTR, noDataError=0)
|
||||
nextGHCube, nextCube = self.makeNumericSounding(model, weName, levels,
|
||||
nextTR, noDataError=0)
|
||||
# calculate weights for a time-weighted average
|
||||
t1 = timeRange.startTime().unixTime() - prevTR.startTime().unixTime()
|
||||
t2 = nextTR.startTime().unixTime() - timeRange.startTime().unixTime()
|
||||
prevWt = float(t2) / float(t1 + t2)
|
||||
nextWt = float(t1) / float(t1 + t2)
|
||||
|
||||
interpGHCube = (prevGHCube * prevWt) + (nextGHCube * nextWt)
|
||||
|
||||
# If this is a cube of scalars
|
||||
if re.search("(?i)wind", weName) is None:
|
||||
interpCube = (prevCube * prevWt) + (nextCube * nextWt)
|
||||
else:
|
||||
|
||||
# Break up the wind into u and v components
|
||||
(prevU, prevV) = self.MagDirToUV(prevCube[0], prevCube[1])
|
||||
(nextU, nextV) = self.MagDirToUV(nextCube[0], nextCube[1])
|
||||
|
||||
# Interpolate the wind components
|
||||
interpU = (prevU * prevWt) + (nextU * nextWt)
|
||||
interpV = (prevV * prevWt) + (nextV * nextWt)
|
||||
|
||||
# Now compute the final wind magnitude and direction
|
||||
interpCube = self.UVToMagDir(interpU, interpV)
|
||||
|
||||
return interpGHCube, interpCube
|
||||
|
||||
|
||||
##
|
||||
# Interpolates a grid field at the specified time range, if needed.
|
||||
# Otherwise, will use a cached sounding if appropriate.
|
||||
# within the target time range and returns those objects
|
||||
# @param model: model to use to grab field
|
||||
# @type model: string
|
||||
# @param weName: weather element name to get cube data for
|
||||
# @type weName: string
|
||||
# @param level: level of data to interpolate
|
||||
# @type level: string
|
||||
# @param timeRange: time range to use as basis for search
|
||||
# @type timeRange: time range object
|
||||
# @param modelInventory: list of available data times for a particular model
|
||||
# @type modelInventory: Python list
|
||||
# @return: grid of specified field
|
||||
# @rtype: numpy grid data
|
||||
def GM_interpolateGrid(self, model, weName, level, timeRange,
|
||||
modelInventory):
|
||||
prevTR, nextTR = self.GM_getPrevNextModelTimes(modelInventory,
|
||||
timeRange)
|
||||
|
||||
if prevTR is None or nextTR is None:
|
||||
return None
|
||||
|
||||
prevGrid = self.getGrids(model, weName, level, prevTR, noDataError=0)
|
||||
nextGrid = self.getGrids(model, weName, level, nextTR, noDataError=0)
|
||||
|
||||
# calculate weights for a time-weighted average
|
||||
t1 = timeRange.startTime().unixTime() - prevTR.startTime().unixTime()
|
||||
t2 = nextTR.startTime().unixTime() - timeRange.startTime().unixTime()
|
||||
prevWt = t2 / float(t1 + t2)
|
||||
nextWt = t1 / float(t1 + t2)
|
||||
|
||||
# If this is a grid of scalars
|
||||
if re.search("(?i)wind", weName) is None:
|
||||
finalGrid = (prevGrid * prevWt) + (nextGrid * nextWt)
|
||||
else:
|
||||
|
||||
# Break up the wind into u and v components
|
||||
(prevU, prevV) = self.MagDirToUV(prevGrid[0], prevGrid[1])
|
||||
(nextU, nextV) = self.MagDirToUV(nextGrid[0], nextGrid[1])
|
||||
|
||||
# Interpolate the wind components
|
||||
interpU = (prevU * prevWt) + (nextU * nextWt)
|
||||
interpV = (prevV * prevWt) + (nextV * nextWt)
|
||||
|
||||
# Now compute the final wind magnitude and direction
|
||||
finalGrid = self.UVToMagDir(interpU, interpV)
|
||||
|
||||
return finalGrid
|
||||
|
||||
#
|
||||
############################################################################
|
||||
|
||||
|
||||
############################################################################
|
||||
# Define a method to manipulate grid times
|
||||
############################################################################
|
||||
|
||||
##
|
||||
# Produces a list of Python time ranges
|
||||
# @param dataDict: time ranges of available data
|
||||
# @type dataDict: Python dictionary keyed by database
|
||||
# @param dataLocks: time ranges which are locked by others
|
||||
# @type dataLocks: Python list of time ranges which are locked by others
|
||||
# @param interpHours: requested time step in hours
|
||||
# @type interpHours: integer
|
||||
# @return: list of Python time range objects
|
||||
# @rtype: Python list
|
||||
def GM_makeNewTRlist(self, dataDict, dataLocks, interpHours=3):
|
||||
"""Produces a list of Python time ranges.
|
||||
Args:
|
||||
dataDict: Python dictionary of time ranges of available data keyed by database
|
||||
dataLocks: Python list of time ranges which are locked by others
|
||||
interpHours: requested time step in hours
|
||||
Returns:
|
||||
Python list of Python time range objects
|
||||
"""
|
||||
|
||||
#=======================================================================
|
||||
# Make a new list of time ranges to iterate over
|
||||
|
||||
newTRlist = []
|
||||
|
||||
#-----------------------------------------------------------------------
|
||||
# Look at all the models we have data for
|
||||
|
||||
for model in dataDict.keys():
|
||||
#-------------------------------------------------------------------
|
||||
# Start with all time steps from this model
|
||||
|
||||
for tr in dataDict[model].keys():
|
||||
#print "TR:", dir(tr)
|
||||
|
||||
pyStart = self._gmtime(tr.startTime().unixTime())
|
||||
startHour = pyStart.tm_hour
|
||||
|
||||
# print "HOUR:", startHour
|
||||
#---------------------------------------------------------------
|
||||
# If this time range is not already locked by someone else, and
|
||||
# it is one we would want to have but do not have yet, and it
|
||||
# is one we have data for from this model
|
||||
|
||||
# print "newTRlist:", newTRlist, "type:", type(newTRlist)
|
||||
# print "dataLocks:", dataLocks, "type:", type(dataLocks)
|
||||
|
||||
if tr not in newTRlist and tr not in dataLocks and \
|
||||
(startHour % interpHours) == 0 and \
|
||||
dataDict[model][tr] is not None:
|
||||
|
||||
# Add this time range to the new time range list
|
||||
newTRlist.append(tr)
|
||||
|
||||
#-----------------------------------------------------------------------
|
||||
# Sort new model time range list by time
|
||||
|
||||
newTRlist.sort(self.GM_trSortMethod)
|
||||
|
||||
#-----------------------------------------------------------------------
|
||||
# Return completed consolidated time range list
|
||||
|
||||
return newTRlist
|
||||
|
||||
|
||||
############################################################################
|
||||
# Define a method to adjust time range which will be deleted - this is so
|
||||
# only grids for which we have data from selected model will be deleted
|
||||
############################################################################
|
||||
|
||||
##
|
||||
# Produces a new time range which can be used to delete data
|
||||
# @param timeRange: initial selected time range
|
||||
# @type timeRange: Python time range object
|
||||
# @param TRList: time ranges with available data
|
||||
# @type TRlist: Python list of time ranges
|
||||
# @param adjustTR: number of hours to delete before and after available data to make for easier interpolation
|
||||
# @type adjustTR: integer
|
||||
# @return: list of Python time range objects
|
||||
# @rtype: Python list
|
||||
def GM_adjustDeleteTimeRange(self, timeRange, TRlist, adjustTR=0):
|
||||
"""Adjusts a time range for purposes of deleting grids. The intent is
|
||||
to make it easier to interpolate between old and new data.
|
||||
Args:
|
||||
timeRange: Python time range object representing selected time
|
||||
ranage
|
||||
TRlist: Python list of Python time range objects where data is
|
||||
available
|
||||
integer adjustTR: number of hours to delete on either side of
|
||||
available data to make for easier interpolation
|
||||
Returns:
|
||||
a TimeRange object spanning adjusted time range
|
||||
"""
|
||||
|
||||
#-----------------------------------------------------------------------
|
||||
# Get ready to set new limits of the time range
|
||||
|
||||
newStart = None
|
||||
newEnd = None
|
||||
|
||||
#-----------------------------------------------------------------------
|
||||
# Look through the time ranges we have for model data
|
||||
|
||||
for tr in TRlist:
|
||||
|
||||
# If this grid is in the selected time range
|
||||
if timeRange.overlaps(tr):
|
||||
|
||||
# If we have not yet determined a start time
|
||||
if newStart is None:
|
||||
|
||||
# Define the new start time
|
||||
newStart = tr.startTime().unixTime() - adjustTR*3600.0
|
||||
|
||||
# If we have not yet determined an end time
|
||||
if tr.endTime().unixTime() > newEnd:
|
||||
|
||||
# Define the new end time
|
||||
newEnd = tr.endTime().unixTime() + adjustTR*3600.0
|
||||
|
||||
## print '+'*90
|
||||
## print newStart, newEnd
|
||||
## print TimeRange.TimeRange(AbsTime.AbsTime(newStart), AbsTime.AbsTime(newEnd))
|
||||
|
||||
#-----------------------------------------------------------------------
|
||||
# Return adjusted time range - if we did adjust it
|
||||
|
||||
if newStart is not None and newEnd is not None:
|
||||
|
||||
return TimeRange.TimeRange(AbsTime.AbsTime(newStart),
|
||||
AbsTime.AbsTime(newEnd))
|
||||
|
||||
# Otherwise, return the original time range
|
||||
else:
|
||||
return timeRange
|
||||
|
||||
|
||||
############################################################################
|
||||
# Define a method to linearly interpolate data
|
||||
############################################################################
|
||||
|
||||
##
|
||||
# Produces an updated Python dictionary with interpolated data where needed
|
||||
# @param dataDict: data for a specific time, can be mixed (e.g. gh, t, p)
|
||||
# @type dataDict: Python dictionary keyed by Python TimeRange object
|
||||
# @param TRList: list of times for
|
||||
# @type TRlist: Python list of time ranges
|
||||
# @param adjustTR: number of hours to delete before and after available data to make for easier interpolation
|
||||
# @type adjustTR: integer
|
||||
# @return: list of Python time range objects
|
||||
# @rtype: Python list
|
||||
def GM_interpolateData(self, dataDict, TRlist, interpHours=3,
|
||||
vector=[], singleLevel=[]):
|
||||
|
||||
#-----------------------------------------------------------------------
|
||||
# Determine the structure (i.e. how many fields are present) of the
|
||||
# data dictionary
|
||||
|
||||
try:
|
||||
numFields = len(dataDict[TRlist[0]])
|
||||
except:
|
||||
print "No data to interpolate!"
|
||||
return dataDict
|
||||
|
||||
#-----------------------------------------------------------------------
|
||||
# Cycle through each time period we already have
|
||||
|
||||
for index in range(len(TRlist) - 1):
|
||||
|
||||
# print "\tindex = ", index
|
||||
|
||||
#-------------------------------------------------------------------
|
||||
# Define a list to hold the times we need to create soundings for
|
||||
|
||||
makeList = []
|
||||
|
||||
#-------------------------------------------------------------------
|
||||
# Get the time range of the current and next soundings we have
|
||||
|
||||
current = TRlist[index]
|
||||
next = TRlist[index + 1]
|
||||
# print '*'*80
|
||||
# print current, next
|
||||
|
||||
#-------------------------------------------------------------------
|
||||
# Get the starting times of each sounding time range
|
||||
|
||||
currentStart = current.startTime().unixTime()
|
||||
nextStart = next.startTime().unixTime()
|
||||
|
||||
#-------------------------------------------------------------------
|
||||
# See how far apart these soundings are in time (hours)
|
||||
|
||||
diffTime = nextStart - currentStart
|
||||
# print diffTime, interpHours*3600
|
||||
|
||||
#-------------------------------------------------------------------
|
||||
# If gap between data time steps are more than what we need
|
||||
|
||||
if int(diffTime) > interpHours*3600:
|
||||
|
||||
#--------------------------------------------------------------
|
||||
# Keep track of seconds we are between data time steps
|
||||
|
||||
curTime = float(interpHours*3600)
|
||||
|
||||
#---------------------------------------------------------------
|
||||
# Make a new time range every three hours
|
||||
# print '\t', int(currentStart + curTime), int(nextStart)
|
||||
|
||||
while int(currentStart + curTime) < int(nextStart):
|
||||
|
||||
#-----------------------------------------------------------
|
||||
# Compute linear interpolation weight
|
||||
|
||||
weight = curTime / diffTime
|
||||
# print "weight = ", weight
|
||||
|
||||
#-----------------------------------------------------------
|
||||
# Make a new TimeRange object for this new time step
|
||||
|
||||
newTR = TimeRange.TimeRange(
|
||||
AbsTime.AbsTime(currentStart + curTime),
|
||||
AbsTime.AbsTime(currentStart + curTime + 3600)
|
||||
)
|
||||
|
||||
#-----------------------------------------------------------
|
||||
# Define an empty string to hold all interpolated data
|
||||
# which should be placed within the final data structure
|
||||
# for this time
|
||||
|
||||
finalData = ""
|
||||
|
||||
#===========================================================
|
||||
# Interpolate data for each field at this time step
|
||||
|
||||
for field in range(numFields):
|
||||
|
||||
# Create a final data structure for interpolated data
|
||||
exec "data%d = []" % (field)
|
||||
|
||||
# If this field is a vector, make component data
|
||||
# structures
|
||||
if field in vector:
|
||||
exec "data%dU = []" % (field)
|
||||
exec "data%dV = []" % (field)
|
||||
|
||||
#-------------------------------------------------------
|
||||
# Get data from the current and next time steps we have
|
||||
|
||||
try:
|
||||
curData = dataDict[current][field]
|
||||
except:
|
||||
# No point in continuing with this time step
|
||||
msg = "Could not get 'current' data -> %s" % \
|
||||
(repr(current))
|
||||
self.statusBarMsg(msg, "R")
|
||||
continue # move on
|
||||
|
||||
try:
|
||||
nextData = dataDict[next][field]
|
||||
except:
|
||||
# No point in continuing with this time step
|
||||
msg = "Could not get 'next' data -> %s" % \
|
||||
(repr(next))
|
||||
self.statusBarMsg(msg, "R")
|
||||
continue # move on
|
||||
|
||||
#-------------------------------------------------------
|
||||
# If this field is a vector, separate it into its'
|
||||
# u and v components
|
||||
|
||||
if field in vector:
|
||||
|
||||
(curU, curV) = self.MagDirToUV(curData[0],
|
||||
curData[1])
|
||||
|
||||
(nextU, nextV) = self.MagDirToUV(nextData[0],
|
||||
nextData[1])
|
||||
|
||||
#=======================================================
|
||||
# If this field is a single level
|
||||
|
||||
if field in singleLevel:
|
||||
|
||||
if not vector:
|
||||
data = (curData + (nextData - curData) * weight)
|
||||
else:
|
||||
u = (curU + (nextU - curU) * weight)
|
||||
v = (curV + (nextV - curV) * weight)
|
||||
|
||||
#---------------------------------------------------
|
||||
# Get the newly interpolated grids
|
||||
|
||||
if not vector:
|
||||
|
||||
if type(data) == types.ListType:
|
||||
dataGrid = data[0]
|
||||
else:
|
||||
dataGrid = data
|
||||
|
||||
else:
|
||||
if type(u) == types.ListType:
|
||||
uGrid = u[0]
|
||||
else:
|
||||
uGrid = u
|
||||
|
||||
if type(v) == types.ListType:
|
||||
vGrid = v[0]
|
||||
else:
|
||||
vGrid = v
|
||||
|
||||
#---------------------------------------------------
|
||||
# Add current level into the new data structure
|
||||
|
||||
if not vector:
|
||||
exec "data%d = array(dataGrid)" % (field)
|
||||
else:
|
||||
exec "data%dU = array(uGrid)" % (field)
|
||||
exec "data%dV = array(vGrid)" % (field)
|
||||
|
||||
#=======================================================
|
||||
# Otherwise, cycle through each level in the sounding
|
||||
|
||||
else:
|
||||
|
||||
for level in xrange(curData.shape[0]):
|
||||
|
||||
#-----------------------------------------------
|
||||
# Construct sounding values for this level
|
||||
|
||||
if not vector:
|
||||
data = (curData[level] +
|
||||
(nextData[level] - curData[level]) *
|
||||
weight)
|
||||
else:
|
||||
u = (curU[level] +
|
||||
(nextU[level] - curU[level]) * weight)
|
||||
|
||||
v = (curV[level] +
|
||||
(nextV[level] - curV[level]) * weight)
|
||||
|
||||
#-----------------------------------------------
|
||||
# Get the newly interpolated grids
|
||||
|
||||
if not vector:
|
||||
|
||||
if type(data) == types.ListType:
|
||||
dataGrid = data[0]
|
||||
else:
|
||||
dataGrid = data
|
||||
|
||||
else:
|
||||
if type(u) == types.ListType:
|
||||
uGrid = u[0]
|
||||
else:
|
||||
uGrid = u
|
||||
|
||||
if type(v) == types.ListType:
|
||||
vGrid = v[0]
|
||||
else:
|
||||
vGrid = v
|
||||
|
||||
#-----------------------------------------------
|
||||
# Add current level into the new sounding
|
||||
|
||||
if not vector:
|
||||
exec "data%d = data%d + [dataGrid]" % \
|
||||
(field, field)
|
||||
else:
|
||||
exec "data%dU = data%dU + [uGrid]" % \
|
||||
(field, field)
|
||||
exec "data%dV = data%dV + [vGrid]" % \
|
||||
(field, field)
|
||||
|
||||
#---------------------------------------------------
|
||||
# Finish off the new cube for this time
|
||||
|
||||
if not vector:
|
||||
exec "data%d = array(data%d)" % (field, field)
|
||||
else:
|
||||
exec "data%dU = array(data%dU)" % (field, field)
|
||||
exec "data%dV = array(data%dV)" % (field, field)
|
||||
|
||||
#=======================================================
|
||||
# If this is a vector field, reconstruct vector from
|
||||
# the components
|
||||
|
||||
if vector:
|
||||
exec "data%d = self.UVToMagDir(data%dU, data%dV)" %\
|
||||
(field, field, field)
|
||||
|
||||
#=======================================================
|
||||
# Add current interpolated data for this time step to
|
||||
# the final data structure
|
||||
|
||||
exec "finalData += 'data%d'" % (field)
|
||||
|
||||
if field < (numFields - 1):
|
||||
finalData += ", "
|
||||
|
||||
#-----------------------------------------------------------
|
||||
# Add this interpolated data to data structure
|
||||
|
||||
exec "dataDict[newTR] = (%s)" % (finalData)
|
||||
|
||||
msg = "Created data for -> %s" % (repr(newTR))
|
||||
self.statusBarMsg(msg, "R")
|
||||
|
||||
#-----------------------------------------------------------
|
||||
# Move on to next desired time step
|
||||
|
||||
curTime += float(interpHours)*3600.0
|
||||
|
||||
#-----------------------------------------------------------------------
|
||||
# Return the completed data dictionary
|
||||
|
||||
return dataDict
|
||||
|
||||
|
||||
############################################################################
|
||||
# Define a method to smooth data
|
||||
############################################################################
|
||||
|
||||
##
|
||||
# Produces a smoother version of a numpy grid.
|
||||
# @param grid: numpy grid to be smoothed
|
||||
# @type grid: numpy array
|
||||
# @param factor: factor to control level of smoothing
|
||||
# @type factor: Python integer
|
||||
# @return: smoother grid object
|
||||
# @rtype: numpy array
|
||||
def GM_smoothGrid(self, grid, factor=3):
|
||||
# This code is essentially the NumericSmooth example
|
||||
# smart tool customized for our purposes.
|
||||
# factors of less than 3 are useless or dangerous
|
||||
if factor < 3:
|
||||
return grid
|
||||
|
||||
half = int(factor)/ 2
|
||||
sg = np.zeros(grid.shape, np.float64)
|
||||
count = np.zeros(grid.shape, np.float64)
|
||||
for y in range(-half, half + 1):
|
||||
for x in range(-half, half + 1):
|
||||
if y < 0:
|
||||
yTargetSlice = slice(-y, None, None)
|
||||
ySrcSlice = slice(0, y, None)
|
||||
if y == 0:
|
||||
yTargetSlice = slice(0, None, None)
|
||||
ySrcSlice = slice(0, None, None)
|
||||
if y > 0:
|
||||
yTargetSlice = slice(0, -y, None)
|
||||
ySrcSlice = slice(y, None, None)
|
||||
if x < 0:
|
||||
xTargetSlice = slice(-x, None, None)
|
||||
xSrcSlice = slice(0, x, None)
|
||||
if x == 0:
|
||||
xTargetSlice = slice(0, None, None)
|
||||
xSrcSlice = slice(0, None, None)
|
||||
if x > 0:
|
||||
xTargetSlice = slice(0, -x, None)
|
||||
xSrcSlice = slice(x, None, None)
|
||||
|
||||
target = [yTargetSlice, xTargetSlice]
|
||||
src = [ySrcSlice, xSrcSlice]
|
||||
sg[target] = sg[target] + grid[src]
|
||||
count[target] += 1
|
||||
return sg / count
|
|
@ -178,17 +178,17 @@ ThreatStatements = {
|
|||
},
|
||||
"High": {
|
||||
"check plans": {
|
||||
"planning": "Emergency planning should include a reasonable threat for major storm surge flooding of 6 to 9 feet above ground.",
|
||||
"planning": "Emergency planning should include a reasonable threat for major storm surge flooding of greater than 6 feet above ground.",
|
||||
"preparation": "To be safe, aggressively prepare for the potential of extensive storm surge flooding impacts. Evacuation efforts should now be underway.",
|
||||
"action": "Life threatening inundation is possible. Failure to heed evacuation orders may result in serious injury, significant loss of life, or human suffering. Leave if evacuation orders are given for your area. Consider voluntary evacuation if recommended. Poor decisions may result in being cut off or needlessly risk lives.",
|
||||
},
|
||||
"complete preparations": {
|
||||
"planning": "Emergency plans should include a reasonable threat for major storm surge flooding of 6 to 9 feet above ground.",
|
||||
"planning": "Emergency plans should include a reasonable threat for major storm surge flooding of greater than 6 feet above ground.",
|
||||
"preparation": "To be safe, aggressively prepare for the potential of extensive storm surge flooding impacts. Evacuation efforts should now be brought to completion. Evacuations must be complete before driving conditions become unsafe.",
|
||||
"action": "Life threatening inundation is possible. Failure to heed evacuation orders may result in serious injury, significant loss of life, or human suffering. Leave if evacuation orders are given for your area. Consider voluntary evacuation if recommended. Poor decisions may result in being cut off or needlessly risk lives.",
|
||||
},
|
||||
"hunker down": {
|
||||
"planning": "Emergency considerations should posture for a reasonable threat for major storm surge flooding of 6 to 9 feet above ground.",
|
||||
"planning": "Emergency considerations should posture for a reasonable threat for major storm surge flooding of greater than 6 feet above ground.",
|
||||
"preparation": "To be safe, evacuees should be located within prescribed shelters and well away from deadly storm surge flooding capable of extensive impacts.",
|
||||
"action": "Life threatening inundation is possible. Those who failed to heed evacuation orders risk serious injury, significant loss of life, or human suffering.",
|
||||
},
|
||||
|
@ -198,24 +198,24 @@ ThreatStatements = {
|
|||
"action": "Failure to exercise due safety may result in additional injury or loss of life. If you have a life-threatening emergency dial 9 1 1.",
|
||||
},
|
||||
"default": {
|
||||
"planning": "Emergency considerations should include a reasonable threat for major storm surge flooding of 6 to 9 feet above ground.",
|
||||
"planning": "Emergency considerations should include a reasonable threat for major storm surge flooding of greater than 6 feet above ground.",
|
||||
"preparation": "Be safe and aggressively guard against the potential of extensive storm surge flooding impacts.",
|
||||
"action": "Life threatening inundation is possible. Failure to heed official instructions may result in serious injury, significant loss of life, or human suffering. Poor decisions may result in being cut off or needlessly risk lives.",
|
||||
},
|
||||
},
|
||||
"Mod": {
|
||||
"check plans": {
|
||||
"planning": "Emergency planning should include a reasonable threat for dangerous storm surge flooding of 3 to 6 feet above ground.",
|
||||
"planning": "Emergency planning should include a reasonable threat for dangerous storm surge flooding of greater than 3 feet above ground.",
|
||||
"preparation": "To be safe, earnestly prepare for the potential of significant storm surge flooding impacts. Evacuation efforts should now be underway.",
|
||||
"action": "Life threatening inundation is possible. Failure to heed evacuation orders may result in serious injury or loss of life. Leave if evacuation orders are given for your area. Consider voluntary evacuation if recommended. Poor decisions may needlessly risk lives.",
|
||||
},
|
||||
"complete preparations": {
|
||||
"planning": "Emergency plans should include a reasonable threat for dangerous storm surge flooding of 3 to 6 feet above ground.",
|
||||
"planning": "Emergency plans should include a reasonable threat for dangerous storm surge flooding of greater than 3 feet above ground.",
|
||||
"preparation": "To be safe, earnestly prepare for the potential of significant storm surge flooding impacts. Evacuation efforts should now be brought to completion. Evacuations must be complete before driving conditions become unsafe.",
|
||||
"action": "Life threatening inundation is possible. Failure to heed evacuation orders may result in serious injury or loss of life. Leave if evacuation orders are given for your area. Consider voluntary evacuation if recommended. Poor decisions may needlessly risk lives.",
|
||||
},
|
||||
"hunker down": {
|
||||
"planning": "Emergency considerations should posture for a reasonable threat for dangerous storm surge flooding of 3 to 6 feet above ground.",
|
||||
"planning": "Emergency considerations should posture for a reasonable threat for dangerous storm surge flooding of greater than 3 feet above ground.",
|
||||
"preparation": "To be safe, evacuees should be located within prescribed shelters and well away from storm surge flooding capable of significant impacts.",
|
||||
"action": "Life threatening inundation is possible. Those who failed to heed evacuation orders risk serious injury or loss of life.",
|
||||
},
|
||||
|
@ -225,24 +225,24 @@ ThreatStatements = {
|
|||
"action": "Failure to exercise due safety may result in additional injury or loss of life. If you have a life-threatening emergency dial 9 1 1.",
|
||||
},
|
||||
"default": {
|
||||
"planning": "Emergency considerations should include a reasonable threat for dangerous storm surge flooding of 3 to 6 feet above ground.",
|
||||
"planning": "Emergency considerations should include a reasonable threat for dangerous storm surge flooding of greater than 3 feet above ground.",
|
||||
"preparation": "Be safe and earnestly guard against the potential of significant storm surge flooding impacts.",
|
||||
"action": "Life threatening inundation is possible. Failure to heed official instructions may result in serious injury or loss of life. Poor decisions may needlessly risk lives.",
|
||||
},
|
||||
},
|
||||
"Elevated": {
|
||||
"check plans": {
|
||||
"planning": "Emergency planning should include a reasonable threat for peak storm surge flooding of 1 to 3 feet above ground.",
|
||||
"planning": "Emergency planning should include a reasonable threat for peak storm surge flooding of greater than 1 foot above ground.",
|
||||
"preparation": "To be safe, prepare for the potential of limited storm surge flooding impacts. Efforts should now be underway.",
|
||||
"action": "Localized inundation is possible. Follow the instructions of local officials. Consider voluntary evacuation if recommended. Leave if evacuation orders are issued.",
|
||||
},
|
||||
"complete preparations": {
|
||||
"planning": "Emergency plans should include a reasonable threat for peak storm surge flooding of 1 to 3 feet above ground.",
|
||||
"planning": "Emergency plans should include a reasonable threat for peak storm surge flooding of greater than 1 foot above ground.",
|
||||
"preparation": "To be safe, prepare for the potential of limited storm surge flooding impacts. Efforts should now be brought to completion before conditions deteriorate.",
|
||||
"action": "Localized inundation is possible. Follow the instructions of local officials. Consider voluntary evacuation if recommended. Leave immediately if evacuation orders are issued.",
|
||||
},
|
||||
"hunker down": {
|
||||
"planning": "Emergency considerations should posture for a reasonable threat for peak storm surge flooding of 1 to 3 feet above ground.",
|
||||
"planning": "Emergency considerations should posture for a reasonable threat for peak storm surge flooding of greater than 1 foot above ground.",
|
||||
"preparation": "To be safe, stay away from storm surge flooding capable of limited impacts.",
|
||||
"action": "Localized inundation is possible. Continue to follow the instructions of local officials.",
|
||||
},
|
||||
|
@ -252,7 +252,7 @@ ThreatStatements = {
|
|||
"action": "Exercise due safety.",
|
||||
},
|
||||
"default": {
|
||||
"planning": "Emergency considerations should include a reasonable threat for peak storm surge flooding of 1 to 3 feet above ground.",
|
||||
"planning": "Emergency considerations should include a reasonable threat for peak storm surge flooding of greater than 1 foot above ground.",
|
||||
"preparation": "Be safe and guard against the potential of limited storm surge flooding impacts.",
|
||||
"action": "Localized inundation is possible. Follow the instructions of local officials.",
|
||||
},
|
||||
|
|
|
@ -24,6 +24,7 @@
|
|||
# ------------ ---------- ----------- --------------------------
|
||||
# Sep 01, 2014 3572 randerso Fix getTopo
|
||||
# Apr 23, 2015 4259 njensen Updated for new JEP API
|
||||
# Dec 2, 2015 18356 yteng Fix typo in __getitem__
|
||||
#
|
||||
########################################################################
|
||||
import DatabaseID, AbsTime, JUtil
|
||||
|
@ -64,7 +65,7 @@ class DBSSWE:
|
|||
if type(result) is numpy.ndarray and result.dtype == numpy.int8:
|
||||
# discrete or weather
|
||||
dkeys = JUtil.javaObjToPyVal(slice.getKeyList())
|
||||
result = [result, keys]
|
||||
result = [result, dkeys]
|
||||
return result
|
||||
return None
|
||||
|
||||
|
|
|
@ -176,6 +176,10 @@ import com.raytheon.viz.ui.simulatedtime.SimulatedTimeOperations;
|
|||
* 09/15/2015 4858 dgilling Disable store/transmit in DRT mode.
|
||||
* 10/14/2015 4959 dgilling Support new function signature for wordWrap.
|
||||
*
|
||||
* 10/26/2015 18244 lshi fixed NullPointerException (pds, updateIssueExpireTimes)
|
||||
* 12/14/2015 18367 ryu Disable finalization of ETN when product is stored to text database.
|
||||
* 12/16/2015 18410 lshi For corrected products, both WMO time and MND time should
|
||||
* match the current time
|
||||
* </pre>
|
||||
*
|
||||
* @author lvenable
|
||||
|
@ -1148,7 +1152,8 @@ public class ProductEditorComp extends Composite implements
|
|||
// prevent the launching of another dialog until the modal dialog is
|
||||
// closed.
|
||||
StoreTransmitDlg storeDlg = new StoreTransmitDlg(parent.getShell(),
|
||||
showStore, this, transmissionCB, pid, !textComp.isCorMode());
|
||||
showStore, this, transmissionCB, pid,
|
||||
!textComp.isCorMode() && (action == Action.TRANSMIT));
|
||||
storeDlg.open();
|
||||
}
|
||||
}
|
||||
|
@ -1955,7 +1960,7 @@ public class ProductEditorComp extends Composite implements
|
|||
this.expireDate = cal.getTime();
|
||||
dateTimeLbl.setText(expireLabelFmt.format(expireDate));
|
||||
|
||||
if (!dead) { // && !editorCorrectionMode) { // && !spellDialog) {
|
||||
if (!dead) {
|
||||
changeTimes();
|
||||
}
|
||||
}
|
||||
|
@ -1974,28 +1979,26 @@ public class ProductEditorComp extends Composite implements
|
|||
// else it will continue on.
|
||||
|
||||
if (textComp != null) {
|
||||
// Update Issue time
|
||||
try {
|
||||
textComp.startUpdate();
|
||||
ProductDataStruct pds = textComp.getProductDataStruct();
|
||||
if (!textComp.isCorMode()) {
|
||||
if (pds != null) {
|
||||
TextIndexPoints pit = pds.getPIT();
|
||||
if (pit != null) {
|
||||
String time = purgeTimeFmt.format(now);
|
||||
textComp.replaceText(pit, time);
|
||||
}
|
||||
if (pds != null) {
|
||||
// update WMO time
|
||||
//if (!textComp.isCorMode()) { ## uncomment this if want to keep WMO time original
|
||||
TextIndexPoints pit = pds.getPIT();
|
||||
if (pit != null) {
|
||||
String time = purgeTimeFmt.format(now);
|
||||
textComp.replaceText(pit, time);
|
||||
}
|
||||
}
|
||||
|
||||
// Update MND time
|
||||
TextIndexPoints tip = pds.getMndMap().get("nwstime");
|
||||
if (tip != null) {
|
||||
SimpleDateFormat fmt = new SimpleDateFormat(longLocalFmtStr);
|
||||
fmt.setTimeZone(localTimeZone);
|
||||
String issueTime = fmt.format(now).toUpperCase();
|
||||
// }
|
||||
|
||||
// Update MND time
|
||||
TextIndexPoints tip = pds.getMndMap().get("nwstime");
|
||||
if (tip != null) {
|
||||
SimpleDateFormat fmt = new SimpleDateFormat(
|
||||
longLocalFmtStr);
|
||||
fmt.setTimeZone(localTimeZone);
|
||||
String issueTime = fmt.format(now).toUpperCase();
|
||||
textComp.replaceText(tip, issueTime);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -91,6 +91,10 @@ import com.raytheon.viz.gfe.textformatter.TextFmtParserUtil;
|
|||
* 08/06/2015 13753 lshi use isSystemTextChange instead of isUpdateTime
|
||||
* 14 OCT 2015 4959 dgilling Use WordWrapperPythonExecutor to get
|
||||
* python calls off UI thread.
|
||||
* 12/04/2015 13753 lshi revert 13753
|
||||
* 12/22/2015 18428 lshi Issuing a Correction of a corrected product via an existing
|
||||
* Product Editor in GFE throws and error and unlocks text,
|
||||
* wordWrap
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -1281,7 +1285,7 @@ public class StyledTextComp extends Composite {
|
|||
* <ol>
|
||||
* <li value=0>The index in the old content of the first character</li>
|
||||
* <li>The index in the old content of the last character</li>
|
||||
* <li>The length of the replacement text</li>
|
||||
* <li>The length of the replacemetruent text</li>
|
||||
* </ol>
|
||||
*/
|
||||
public int[] wordWrap(int cursorIndex, int width) {
|
||||
|
@ -1447,7 +1451,9 @@ public class StyledTextComp extends Composite {
|
|||
post = post.replaceAll("^\\s*", "");
|
||||
|
||||
String text = pre + rchar + post;
|
||||
st.replaceTextRange(startIndex, (1 + endIndex) - startIndex, text);
|
||||
if (startIndex > 0) {
|
||||
st.replaceTextRange(startIndex, (1 + endIndex) - startIndex, text);
|
||||
}
|
||||
int newCaretOffset = startIndex + pre.length();
|
||||
st.setCaretOffset(newCaretOffset);
|
||||
|
||||
|
|
|
@ -51,6 +51,7 @@
|
|||
<field key="Cig" displayTypes="IMAGE"/>
|
||||
<field key="SLDP" displayTypes="IMAGE"/>
|
||||
<field key="TPFI" displayTypes="IMAGE"/>
|
||||
<field key="CAT" displayTypes="IMAGE"/>
|
||||
<field key="TIPD" displayTypes="IMAGE"/>
|
||||
<field key="ICI" displayTypes="IMAGE"/>
|
||||
<field key="ICIP" displayTypes="IMAGE"/>
|
||||
|
|
|
@ -136,6 +136,7 @@ import com.vividsolutions.jts.index.strtree.STRtree;
|
|||
* Jun 26, 2015 17386 xwei Fixed : HydroView crashes in when Refresh Data after loading saved display files
|
||||
* Jul 06, 2015 4215 mpduff Correct the fact that user's cannot click and view time series.
|
||||
* Oct 05, 2015 17978 lbousaidi Enable TimeStep GUI to display multiple values and Parameter Codes for a given lid
|
||||
* Dec 05, 2015 18357 xwei Fixed error in opening Timeseries for Timesteps
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -225,8 +226,6 @@ public class MultiPointResource extends
|
|||
|
||||
private STRtree strTree = new STRtree();
|
||||
|
||||
private STRtree strTreeTimeStep = new STRtree();
|
||||
|
||||
private IFont font;
|
||||
|
||||
private int fontSize;
|
||||
|
@ -1498,13 +1497,14 @@ public class MultiPointResource extends
|
|||
if (pcOptions.getQueryMode() == 1){
|
||||
|
||||
dataMapTimeStep.clear();
|
||||
strTreeTimeStep = new STRtree();
|
||||
|
||||
}else{
|
||||
|
||||
dataMap.clear();
|
||||
strTree = new STRtree();
|
||||
|
||||
}
|
||||
|
||||
strTree = new STRtree();
|
||||
}
|
||||
|
||||
private class TimeSeriesLaunchAction extends AbstractRightClickAction {
|
||||
|
|
|
@ -133,6 +133,8 @@ import com.raytheon.viz.ui.simulatedtime.SimulatedTimeOperations;
|
|||
* Jul 21, 2015 4500 rjpeter Use Number in blind cast.
|
||||
* Oct 13, 2015 4933 rferrel Refactored to use selected variables.
|
||||
* Oct 27, 2015 4900 mduff Don't transmit SHEF files if in DRT.
|
||||
* Nov 06, 2015 17846 lbousaidi change the query so that after QC, the quality_code
|
||||
* is reset from Bad to Good.
|
||||
* </pre>
|
||||
*
|
||||
* @author lvenable
|
||||
|
@ -1866,10 +1868,6 @@ public class TabularTimeSeriesDlg extends CaveSWTDialog implements
|
|||
|
||||
String tablename = DbUtils.getTableName(pe, ts);
|
||||
|
||||
String sql = "update " + tablename + " set value = "
|
||||
+ HydroConstants.MISSING_VALUE
|
||||
+ ", revision= 1, shef_qual_code = 'M' " + ", postingtime= '"
|
||||
+ HydroConstants.DATE_FORMAT.format(postTime) + "' ";
|
||||
|
||||
DataRecord dr = new DataRecord();
|
||||
int[] selectionIndices = bottomListControl.getSelectionIndices();
|
||||
|
@ -1905,6 +1903,11 @@ public class TabularTimeSeriesDlg extends CaveSWTDialog implements
|
|||
|
||||
dr.setRevision((short) 1);
|
||||
|
||||
String sql = "update " + tablename + " set value = "
|
||||
+ HydroConstants.MISSING_VALUE + ", revision= 1, shef_qual_code = 'M' , quality_code= '"
|
||||
+ dr.getQualityCode() + "' " + ", postingtime= '"
|
||||
+ HydroConstants.DATE_FORMAT.format(postTime) + "' ";
|
||||
|
||||
/* code to update an observation to MISSING */
|
||||
if (ts.toUpperCase().startsWith("R")
|
||||
|| ts.toUpperCase().startsWith("P")) {
|
||||
|
|
|
@ -75,6 +75,8 @@ import com.raytheon.viz.hydrocommon.util.DbUtils;
|
|||
* Jul 21, 2015 4500 rjpeter Use Number in blind cast.
|
||||
* Aug 05, 2015 4486 rjpeter Changed Timestamp to Date.
|
||||
* Aug 18, 2015 4793 rjpeter Use Number in blind cast.
|
||||
* Nov 06, 2015 17846 lbousaidi modify edit and insertRejectedData so that quality_code.
|
||||
* is reset from Bad to Good after data QC.
|
||||
* </pre>
|
||||
*
|
||||
* @author dhladky
|
||||
|
@ -99,6 +101,12 @@ public class TimeSeriesDataManager extends HydroDataManager {
|
|||
|
||||
private static SimpleDateFormat dateFormat;
|
||||
|
||||
/** Quality control value for manual "Good" */
|
||||
private final int QC_MANUAL_PASSED = 121;
|
||||
|
||||
/** Quality control value for manual "Bad". */
|
||||
private final int QC_MANUAL_FAILED = 123;
|
||||
|
||||
/**
|
||||
* Map holding the location id and display class.
|
||||
*/
|
||||
|
@ -954,6 +962,9 @@ public class TimeSeriesDataManager extends HydroDataManager {
|
|||
qualityCode = new Integer(dr.getQualityCode());
|
||||
}
|
||||
|
||||
dr.setQualityCode(TimeSeriesUtil.setQcCode(QC_MANUAL_PASSED,
|
||||
dr.getQualityCode()));
|
||||
|
||||
sb.append("insert into rejecteddata(lid, pe, dur, ts, extremum, ");
|
||||
sb.append("probability, validtime, basistime, postingtime, value, ");
|
||||
sb.append("revision, shef_qual_code, product_id, producttime, quality_code, ");
|
||||
|
@ -993,7 +1004,7 @@ public class TimeSeriesDataManager extends HydroDataManager {
|
|||
sb.append("'" + productID + "', ");
|
||||
sb.append("'" + HydroConstants.DATE_FORMAT.format(productTime)
|
||||
+ "', ");
|
||||
sb.append(qualityCode + ", ");
|
||||
sb.append(dr.getQualityCode() + ", ");
|
||||
sb.append("'M', ");
|
||||
sb.append("'" + LocalizationManager.getInstance().getCurrentUser()
|
||||
+ "');");
|
||||
|
@ -1150,11 +1161,14 @@ public class TimeSeriesDataManager extends HydroDataManager {
|
|||
for (int i = 0; i < editList.size(); i++) {
|
||||
ForecastData data = editList.get(i);
|
||||
String tablename = DbUtils.getTableName(data.getPe(), data.getTs());
|
||||
|
||||
//set the QC to GOOD when you set data to missing.
|
||||
data.setQualityCode(TimeSeriesUtil.setQcCode(QC_MANUAL_PASSED,
|
||||
data.getQualityCode()));
|
||||
SqlBuilder sql = new SqlBuilder(tablename);
|
||||
sql.setSqlType(SqlBuilder.UPDATE);
|
||||
sql.addDouble("value", data.getValue());
|
||||
sql.addString("shef_qual_code", "M");
|
||||
sql.addInt("quality_code", data.getQualityCode());
|
||||
sql.addInt("revision", 1);
|
||||
sql.addString("postingTime", HydroConstants.DATE_FORMAT.format(now));
|
||||
if (data.getProductTime() == null) {
|
||||
|
|
|
@ -30,7 +30,8 @@ import java.util.List;
|
|||
* SOFTWARE HISTORY
|
||||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* Sep 15, 2009 2772 mpduff Initial creation.
|
||||
* Sep 15, 2009 2772 mpduff Initial creation.
|
||||
* Dec 18, 2015 5217 mduff Changed Long to Integer.
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -40,13 +41,13 @@ import java.util.List;
|
|||
|
||||
public class HrapBinList {
|
||||
/** the rows */
|
||||
private List<Long> rows = new ArrayList<Long>();
|
||||
private List<Integer> rows = new ArrayList<>();
|
||||
|
||||
/** The beginning columns */
|
||||
private List<Long> beginCols = new ArrayList<Long>();
|
||||
private List<Integer> beginCols = new ArrayList<>();
|
||||
|
||||
/** The ending columns */
|
||||
private List<Long> endCols = new ArrayList<Long>();
|
||||
private List<Integer> endCols = new ArrayList<>();
|
||||
|
||||
/** The number of rows */
|
||||
private long numRows;
|
||||
|
@ -60,7 +61,7 @@ public class HrapBinList {
|
|||
/**
|
||||
* @return the rows
|
||||
*/
|
||||
public List<Long> getRows() {
|
||||
public List<Integer> getRows() {
|
||||
return rows;
|
||||
}
|
||||
|
||||
|
@ -68,14 +69,14 @@ public class HrapBinList {
|
|||
* @param rows
|
||||
* the rows to set
|
||||
*/
|
||||
public void setRows(List<Long> rows) {
|
||||
public void setRows(List<Integer> rows) {
|
||||
this.rows = rows;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the beginCols
|
||||
*/
|
||||
public List<Long> getBeginCols() {
|
||||
public List<Integer> getBeginCols() {
|
||||
return beginCols;
|
||||
}
|
||||
|
||||
|
@ -83,14 +84,14 @@ public class HrapBinList {
|
|||
* @param beginCols
|
||||
* the beginCols to set
|
||||
*/
|
||||
public void setBeginCols(List<Long> beginCols) {
|
||||
public void setBeginCols(List<Integer> beginCols) {
|
||||
this.beginCols = beginCols;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the endCols
|
||||
*/
|
||||
public List<Long> getEndCols() {
|
||||
public List<Integer> getEndCols() {
|
||||
return endCols;
|
||||
}
|
||||
|
||||
|
@ -98,7 +99,7 @@ public class HrapBinList {
|
|||
* @param endCols
|
||||
* the endCols to set
|
||||
*/
|
||||
public void setEndCols(List<Long> endCols) {
|
||||
public void setEndCols(List<Integer> endCols) {
|
||||
this.endCols = endCols;
|
||||
}
|
||||
|
||||
|
@ -146,4 +147,10 @@ public class HrapBinList {
|
|||
public void setArea(double area) {
|
||||
this.area = area;
|
||||
}
|
||||
|
||||
public void addData(double row, double startCol, double endCol) {
|
||||
this.rows.add((int) row);
|
||||
this.beginCols.add((int) startCol);
|
||||
this.endCols.add((int) endCol);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,221 @@
|
|||
/**
|
||||
* This software was developed and / or modified by Raytheon Company,
|
||||
* pursuant to Contract DG133W-05-CQ-1067 with the US Government.
|
||||
*
|
||||
* U.S. EXPORT CONTROLLED TECHNICAL DATA
|
||||
* This software product contains export-restricted data whose
|
||||
* export/transfer/disclosure is restricted by U.S. law. Dissemination
|
||||
* to non-U.S. persons whether in the United States or abroad requires
|
||||
* an export license or other authorization.
|
||||
*
|
||||
* Contractor Name: Raytheon Company
|
||||
* Contractor Address: 6825 Pine Street, Suite 340
|
||||
* Mail Stop B8
|
||||
* Omaha, NE 68106
|
||||
* 402.291.0100
|
||||
*
|
||||
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
||||
* further licensing information.
|
||||
**/
|
||||
package com.raytheon.viz.hydrobase.data;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import com.raytheon.viz.hydrocommon.util.HrapUtil;
|
||||
import com.vividsolutions.jts.geom.Coordinate;
|
||||
import com.vividsolutions.jts.geom.GeometryFactory;
|
||||
import com.vividsolutions.jts.geom.LinearRing;
|
||||
import com.vividsolutions.jts.geom.Point;
|
||||
import com.vividsolutions.jts.geom.Polygon;
|
||||
|
||||
/**
|
||||
* Process the spatial work for loading basins, zones, etc. for hydro.
|
||||
*
|
||||
* <pre>
|
||||
*
|
||||
* SOFTWARE HISTORY
|
||||
*
|
||||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* Dec 18, 2015 5217 mpduff Initial creation
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
* @author mpduff
|
||||
* @version 1.0
|
||||
*/
|
||||
|
||||
public class HydroGeoProcessor {
|
||||
public static GeometryFactory factory = new GeometryFactory();
|
||||
|
||||
public HydroGeoProcessor() {
|
||||
}
|
||||
|
||||
public HrapBinList getHrapBinList(GeoAreaData geoData) {
|
||||
List<Coordinate> coords = getPointsFromArea(geoData);
|
||||
Coordinate[] minMaxXY = getMinMaxXY(coords);
|
||||
|
||||
LinearRing lr = factory.createLinearRing(coords
|
||||
.toArray(new Coordinate[0]));
|
||||
Polygon poly = factory.createPolygon(lr, null);
|
||||
|
||||
Coordinate minC = minMaxXY[0];
|
||||
Coordinate maxC = minMaxXY[1];
|
||||
|
||||
Coordinate hrapMin = HrapUtil.latLonToHrap(minC);
|
||||
Coordinate hrapMax = HrapUtil.latLonToHrap(maxC);
|
||||
|
||||
double wfoMinX = hrapMin.x;
|
||||
double wfoMinY = hrapMin.y;
|
||||
double wfoMaxX = hrapMax.x;
|
||||
double wfoMaxY = hrapMax.y;
|
||||
|
||||
double maxRow = Math.floor(wfoMaxY);
|
||||
double maxCol = Math.floor(wfoMaxX);
|
||||
double minRow = Math.floor(wfoMinY);
|
||||
double minCol = Math.floor(wfoMinX);
|
||||
|
||||
/* expand the box to make sure polygon has been covered */
|
||||
minRow -= 2;
|
||||
minCol -= 2;
|
||||
maxRow += 2;
|
||||
maxCol += 2;
|
||||
|
||||
int rowCtr = 0;
|
||||
double rowNum = 0;
|
||||
double startCol = 0;
|
||||
double endCol = 0;
|
||||
int binCtr = 0;
|
||||
double area = 0;
|
||||
|
||||
HrapBinList binList = new HrapBinList();
|
||||
|
||||
for (double r = minRow + 0.5; r <= maxRow; r++) { // row
|
||||
rowNum = r;
|
||||
startCol = -1;
|
||||
|
||||
for (double c = minCol + 0.5; c <= maxCol; c++) {
|
||||
Coordinate coord = new Coordinate(c, r);
|
||||
Coordinate gridCell = HrapUtil.hrapToLatLon(coord);
|
||||
Point p = factory.createPoint(gridCell);
|
||||
if (poly.intersects(p)) { // inside
|
||||
endCol = c;
|
||||
binCtr++;
|
||||
if (startCol == -1) {
|
||||
// First cell in the row
|
||||
startCol = c;
|
||||
rowCtr++;
|
||||
}
|
||||
area += HrapUtil.getHrapBinArea(coord);
|
||||
}
|
||||
}
|
||||
|
||||
if (startCol != -1) {
|
||||
binList.addData(rowNum, startCol, endCol);
|
||||
binList.setNumBins(binCtr);
|
||||
binList.setNumRows(rowCtr);
|
||||
binList.setArea(area);
|
||||
}
|
||||
}
|
||||
|
||||
return binList;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the min and max corner points.
|
||||
*
|
||||
* @param coords
|
||||
* List of all coordinates
|
||||
*
|
||||
* @return Array of min and max Coordinates
|
||||
*/
|
||||
private Coordinate[] getMinMaxXY(List<Coordinate> coords) {
|
||||
double minX = 9999;
|
||||
double minY = 9999;
|
||||
double maxX = -9999;
|
||||
double maxY = -9999;
|
||||
|
||||
for (Coordinate c : coords) {
|
||||
if (c.x > maxX) {
|
||||
maxX = c.x;
|
||||
} else if (c.x < minX) {
|
||||
minX = c.x;
|
||||
}
|
||||
|
||||
if (c.y > maxY) {
|
||||
maxY = c.y;
|
||||
} else if (c.y < minY) {
|
||||
minY = c.y;
|
||||
}
|
||||
}
|
||||
|
||||
Coordinate[] minMaxCoords = new Coordinate[2];
|
||||
Coordinate min = new Coordinate(minX, minY);
|
||||
Coordinate max = new Coordinate(maxX, maxY);
|
||||
minMaxCoords[0] = min;
|
||||
minMaxCoords[1] = max;
|
||||
|
||||
return minMaxCoords;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates an array of points from the information pointed to by the GeoArea
|
||||
* pointer. Ensures that (1) the last point is the same as the first and (2)
|
||||
* that if n points in a row are the same in the database, only one point is
|
||||
* propagated to the points array
|
||||
*
|
||||
* @param data
|
||||
* The GeoAreaData object
|
||||
*/
|
||||
private List<Coordinate> getPointsFromArea(GeoAreaData data) {
|
||||
ArrayList<Coordinate> points = new ArrayList<Coordinate>();
|
||||
|
||||
/* init the first point */
|
||||
Coordinate coord = new Coordinate(data.getLon()[0], data.getLat()[0]);
|
||||
points.add(coord);
|
||||
double[] lat = data.getLat();
|
||||
double[] lon = data.getLon();
|
||||
|
||||
/*
|
||||
* for each input point from the database, starting with the second
|
||||
* point
|
||||
*/
|
||||
for (int i = 1; i < data.getNumberPoints(); i++) {
|
||||
|
||||
/* if input points are different */
|
||||
if ((lat[i] != lat[i - 1]) || (lon[i] != lon[i - 1])) {
|
||||
coord = new Coordinate(lon[i], lat[i]);
|
||||
points.add(coord);
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* if the first point and the last point are not the same, add a final
|
||||
* point that is the same as the first
|
||||
*/
|
||||
if (!pointsEqual(points.get(0), points.get(points.size() - 1))) {
|
||||
coord = new Coordinate(lon[0], lat[0]);
|
||||
points.add(coord);
|
||||
}
|
||||
|
||||
return points;
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks two Points for equality.
|
||||
*
|
||||
* @param p1
|
||||
* Point 1
|
||||
* @param p2
|
||||
* Point 2
|
||||
* @return true if points are equal
|
||||
*/
|
||||
private boolean pointsEqual(Coordinate p1, Coordinate p2) {
|
||||
if ((p1.x == p2.x) && (p1.y == p2.y)) {
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
}
|
|
@ -57,14 +57,13 @@ import com.raytheon.uf.viz.core.exception.VizException;
|
|||
import com.raytheon.viz.hydrobase.data.GeoAreaData;
|
||||
import com.raytheon.viz.hydrobase.data.GeoDataManager;
|
||||
import com.raytheon.viz.hydrobase.data.HrapBinList;
|
||||
import com.raytheon.viz.hydrobase.data.LineSegment;
|
||||
import com.raytheon.viz.hydrobase.data.HydroGeoProcessor;
|
||||
import com.raytheon.viz.hydrocommon.HydroConstants;
|
||||
import com.raytheon.viz.hydrocommon.HydroConstants.ArealTypeSelection;
|
||||
import com.raytheon.viz.hydrocommon.texteditor.TextEditorDlg;
|
||||
import com.raytheon.viz.hydrocommon.whfslib.GeoUtil;
|
||||
import com.raytheon.viz.ui.dialogs.CaveSWTDialog;
|
||||
import com.raytheon.viz.ui.dialogs.ICloseCallback;
|
||||
import com.vividsolutions.jts.geom.Coordinate;
|
||||
|
||||
/**
|
||||
* This class displays the Areal Definitions dialog.
|
||||
|
@ -79,6 +78,7 @@ import com.vividsolutions.jts.geom.Coordinate;
|
|||
* 16 Jul 2013 2088 rferrel Changes for non-blocking TextEditorDlg.
|
||||
* 29 June 2015 14630 xwei Fixed : Not able to import basins.dat with apostrophe and incorrect data posted
|
||||
* 30 June 2015 17360 xwei Fixed : basins.dat import failed if the first line does not have Lat Lon
|
||||
* Dec 18, 2015 5217 mduff Changes to fix importing geo files.
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -656,8 +656,8 @@ public class ArealDefinitionsDlg extends CaveSWTDialog {
|
|||
private void importGeoArea() {
|
||||
GeoDataManager dman = GeoDataManager.getInstance();
|
||||
Date now = Calendar.getInstance(TimeZone.getTimeZone("GMT")).getTime();
|
||||
long start = now.getTime();
|
||||
openLogFile();
|
||||
log("=====================================");
|
||||
log("Starting import of "
|
||||
+ HydroConstants.GEOAREA_DATANAMES[selectedType.ordinal()]
|
||||
+ " on " + HydroConstants.DATE_FORMAT.format(now));
|
||||
|
@ -674,15 +674,19 @@ public class ArealDefinitionsDlg extends CaveSWTDialog {
|
|||
|
||||
// Open the file for reading
|
||||
File f = getAreaFilename();
|
||||
BufferedReader in = null;
|
||||
try {
|
||||
BufferedReader in = new BufferedReader(new FileReader(f));
|
||||
in = new BufferedReader(new FileReader(f));
|
||||
String line = null;
|
||||
while ((line = in.readLine()) != null) {
|
||||
if (line.length() == 0) {
|
||||
// Skip any blank lines
|
||||
continue;
|
||||
}
|
||||
saveDataBlock = true;
|
||||
linenum++;
|
||||
|
||||
// Process the header line
|
||||
|
||||
/*
|
||||
* extract each of the attributes from the header block for the
|
||||
* subsequent set of points
|
||||
|
@ -694,7 +698,7 @@ public class ArealDefinitionsDlg extends CaveSWTDialog {
|
|||
/* Remove any excess whitespace. */
|
||||
str = str.trim();
|
||||
str = str.replaceAll("\\s{2,}", " ");
|
||||
str = str.replaceAll("'", "''");
|
||||
str = str.replaceAll("'", "''");
|
||||
|
||||
String[] parts = str.split(" ");
|
||||
int numParts = parts.length;
|
||||
|
@ -704,10 +708,11 @@ public class ArealDefinitionsDlg extends CaveSWTDialog {
|
|||
* point is found then assume this line has two lat/lon values
|
||||
* at the end.
|
||||
*/
|
||||
if (numParts == 6 && parts[numParts - 2].contains(".")
|
||||
if (numParts > 2 && parts[numParts - 2].contains(".")
|
||||
&& parts[numParts - 1].contains(".")) {
|
||||
intLat = Double.parseDouble(parts[numParts - 2]);
|
||||
intLon = Double.parseDouble(parts[numParts - 1]);
|
||||
// * -1 for hydro perspective
|
||||
intLon = Double.parseDouble(parts[numParts - 1]) * -1;
|
||||
|
||||
if ((intLat < 0) || (intLat > 90)) {
|
||||
log("ERROR: invalid interior " + intLat
|
||||
|
@ -723,16 +728,16 @@ public class ArealDefinitionsDlg extends CaveSWTDialog {
|
|||
}
|
||||
|
||||
int shiftNum = 0;
|
||||
if ( numParts == 4 ){
|
||||
shiftNum = 2;
|
||||
if (numParts == 4) {
|
||||
shiftNum = 2;
|
||||
}
|
||||
|
||||
|
||||
/*
|
||||
* get the number of lat-lon pairs that follow, from the end of
|
||||
* the line
|
||||
*/
|
||||
nPts = Integer.parseInt(parts[numParts - 3 + shiftNum]);
|
||||
|
||||
nPts = Integer.parseInt(parts[numParts - 3 + shiftNum]);
|
||||
|
||||
double[] lonPoints = new double[nPts];
|
||||
double[] latPoints = new double[nPts];
|
||||
|
||||
|
@ -740,7 +745,8 @@ public class ArealDefinitionsDlg extends CaveSWTDialog {
|
|||
* get the stream order, which is not always specified, from the
|
||||
* field preceding the num of lat-lon pairs
|
||||
*/
|
||||
int streamOrder = Integer.parseInt(parts[numParts - 4 + shiftNum]);
|
||||
int streamOrder = Integer.parseInt(parts[numParts - 4
|
||||
+ shiftNum]);
|
||||
|
||||
if ((streamOrder < -1) || (streamOrder > 50)) {
|
||||
log("WARNING: Error reading stream order in line "
|
||||
|
@ -757,7 +763,6 @@ public class ArealDefinitionsDlg extends CaveSWTDialog {
|
|||
}
|
||||
|
||||
name.trimToSize();
|
||||
|
||||
String nameString = null;
|
||||
if (name.length() > LOC_AREANAME_LEN) {
|
||||
log(String
|
||||
|
@ -798,8 +803,8 @@ public class ArealDefinitionsDlg extends CaveSWTDialog {
|
|||
saveDataBlock = false;
|
||||
} else {
|
||||
double lat = Double.parseDouble(latlon[0]);
|
||||
double lon = Double.parseDouble(latlon[1]);
|
||||
|
||||
double lon = Double.parseDouble(latlon[1]) * -1;
|
||||
|
||||
/* Test the bounds of the longitude value. */
|
||||
if ((lon < -180) || (lon > 180)) {
|
||||
log("ERROR reading or invalid lon for id "
|
||||
|
@ -837,10 +842,22 @@ public class ArealDefinitionsDlg extends CaveSWTDialog {
|
|||
geoData.setSaveDataBlock(saveDataBlock);
|
||||
geoDataList.add(geoData);
|
||||
} // end while ((line = in.readLine()) != null)
|
||||
|
||||
in.close();
|
||||
} catch (IOException e) {
|
||||
e.printStackTrace();
|
||||
statusHandler.error(
|
||||
"Error reading "
|
||||
+ HydroConstants.GEOAREA_DATANAMES[listCbo
|
||||
.getSelectionIndex()] + ".dat file", e);
|
||||
log("Error reading "
|
||||
+ HydroConstants.GEOAREA_DATANAMES[listCbo
|
||||
.getSelectionIndex()] + ".dat file");
|
||||
} finally {
|
||||
if (in != null) {
|
||||
try {
|
||||
in.close();
|
||||
} catch (IOException e) {
|
||||
// no op
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (geoDataList.size() <= 0) {
|
||||
|
@ -865,7 +882,10 @@ public class ArealDefinitionsDlg extends CaveSWTDialog {
|
|||
|
||||
log("DELETE " + numSegDel);
|
||||
} catch (VizException e) {
|
||||
e.printStackTrace();
|
||||
statusHandler.error(
|
||||
"Could not delete rows corresponding to GeoArea type "
|
||||
+ selectedType.toString()
|
||||
+ " from the LineSegs database table.", e);
|
||||
log("Could not delete rows corresponding to GeoArea type");
|
||||
log(selectedType.toString()
|
||||
+ " from the LineSegs database table.");
|
||||
|
@ -885,7 +905,10 @@ public class ArealDefinitionsDlg extends CaveSWTDialog {
|
|||
|
||||
log("DELETE " + numGeoDel);
|
||||
} catch (VizException e) {
|
||||
e.printStackTrace();
|
||||
statusHandler.error(
|
||||
"Could not delete rows from the GeoArea table "
|
||||
+ "corresponding to boundary_type "
|
||||
+ selectedType.toString(), e);
|
||||
log("Could not delete rows from the GeoArea table");
|
||||
log("corresponding to boundary_type " + selectedType.toString());
|
||||
closeLogFile();
|
||||
|
@ -901,7 +924,9 @@ public class ArealDefinitionsDlg extends CaveSWTDialog {
|
|||
throw new VizException();
|
||||
}
|
||||
} catch (VizException e) {
|
||||
e.printStackTrace();
|
||||
statusHandler.error(
|
||||
"ERROR: Database write failed for "
|
||||
+ data.getAreaId(), e);
|
||||
log("ERROR: Database write failed for "
|
||||
+ data.getAreaId());
|
||||
}
|
||||
|
@ -913,13 +938,14 @@ public class ArealDefinitionsDlg extends CaveSWTDialog {
|
|||
}
|
||||
|
||||
// Load the linesegs table
|
||||
HydroGeoProcessor proc = new HydroGeoProcessor();
|
||||
|
||||
if (selectedType != ArealTypeSelection.RESERVOIRS) {
|
||||
for (GeoAreaData data : geoDataList) {
|
||||
/* do the main processing */
|
||||
HrapBinList binList = getHrapBinListForArea(data);
|
||||
HrapBinList binList = proc.getHrapBinList(data);
|
||||
log("Processing area " + data.getAreaId() + ":"
|
||||
+ " Writing " + binList.getNumRows() + "rows");
|
||||
+ " Writing " + binList.getNumRows() + " rows");
|
||||
dman.putLineSegs(data.getAreaId(), binList);
|
||||
}
|
||||
}
|
||||
|
@ -927,76 +953,10 @@ public class ArealDefinitionsDlg extends CaveSWTDialog {
|
|||
|
||||
now = Calendar.getInstance(TimeZone.getTimeZone("GMT")).getTime();
|
||||
log("Import completed on " + HydroConstants.DATE_FORMAT.format(now));
|
||||
|
||||
log("Import time: " + ((now.getTime() - start) / 1000) + " seconds");
|
||||
closeLogFile();
|
||||
}
|
||||
|
||||
/**
|
||||
* This function uses areaId to search the database for the polygon
|
||||
* associated with the area. Then it fills the HrapBinList structure by
|
||||
* finding all the HRAP bins whose centers are inside the area.
|
||||
*
|
||||
* @param data
|
||||
* The GeoAreaData
|
||||
* @return The HrapBinList
|
||||
*/
|
||||
private HrapBinList getHrapBinListForArea(GeoAreaData data) {
|
||||
HrapBinList binList = new HrapBinList();
|
||||
|
||||
ArrayList<Coordinate> points = getPointsFromArea(data);
|
||||
|
||||
java.util.List<LineSegment> segments = LineSegmentUtil
|
||||
.getSegmentsFromPoints(points);
|
||||
|
||||
binList = LineSegmentUtil.getHrapBinListFromSegments(segments);
|
||||
|
||||
return binList;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates an array of points from the information pointed to by the GeoArea
|
||||
* pointer. Ensures that (1) the last point is the same as the first and (2)
|
||||
* that if n points in a row are the same in the database, only one point is
|
||||
* propagated to the points array
|
||||
*
|
||||
* @param data
|
||||
* The GeoAreaData object
|
||||
*/
|
||||
private ArrayList<Coordinate> getPointsFromArea(GeoAreaData data) {
|
||||
ArrayList<Coordinate> points = new ArrayList<Coordinate>();
|
||||
|
||||
/* init the first point */
|
||||
Coordinate coord = new Coordinate(data.getLon()[0], data.getLat()[0]);
|
||||
points.add(coord);
|
||||
double[] lat = data.getLat();
|
||||
double[] lon = data.getLon();
|
||||
|
||||
/*
|
||||
* for each input point from the database, starting with the second
|
||||
* point
|
||||
*/
|
||||
for (int i = 1; i < data.getNumberPoints(); i++) {
|
||||
|
||||
/* if input points are different */
|
||||
if ((lat[i] != lat[i - 1]) || (lon[i] != lon[i - 1])) {
|
||||
coord = new Coordinate(lon[i], lat[i]);
|
||||
points.add(coord);
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* if the first point and the last point are not the same, add a final
|
||||
* point that is the same as the first
|
||||
*/
|
||||
if (!LineSegmentUtil.pointsEqual(points.get(0),
|
||||
points.get(points.size() - 1))) {
|
||||
coord = new Coordinate(lon[0], lat[0]);
|
||||
points.add(coord);
|
||||
}
|
||||
|
||||
return points;
|
||||
}
|
||||
|
||||
/**
|
||||
* Open the log file
|
||||
*/
|
||||
|
@ -1023,7 +983,7 @@ public class ArealDefinitionsDlg extends CaveSWTDialog {
|
|||
}
|
||||
logFileOpen = false;
|
||||
} catch (IOException e) {
|
||||
e.printStackTrace();
|
||||
statusHandler.error("Error closing log file", e);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1042,7 +1002,7 @@ public class ArealDefinitionsDlg extends CaveSWTDialog {
|
|||
try {
|
||||
logger.write(stmt + "\n");
|
||||
} catch (IOException e) {
|
||||
e.printStackTrace();
|
||||
statusHandler.error("Error writing to log file", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,527 +0,0 @@
|
|||
/**
|
||||
* This software was developed and / or modified by Raytheon Company,
|
||||
* pursuant to Contract DG133W-05-CQ-1067 with the US Government.
|
||||
*
|
||||
* U.S. EXPORT CONTROLLED TECHNICAL DATA
|
||||
* This software product contains export-restricted data whose
|
||||
* export/transfer/disclosure is restricted by U.S. law. Dissemination
|
||||
* to non-U.S. persons whether in the United States or abroad requires
|
||||
* an export license or other authorization.
|
||||
*
|
||||
* Contractor Name: Raytheon Company
|
||||
* Contractor Address: 6825 Pine Street, Suite 340
|
||||
* Mail Stop B8
|
||||
* Omaha, NE 68106
|
||||
* 402.291.0100
|
||||
*
|
||||
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
||||
* further licensing information.
|
||||
**/
|
||||
package com.raytheon.viz.hydrobase.dialogs;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import com.raytheon.viz.hydrobase.data.HrapBinList;
|
||||
import com.raytheon.viz.hydrobase.data.LineSegment;
|
||||
import com.raytheon.viz.hydrocommon.HydroConstants;
|
||||
import com.raytheon.viz.hydrocommon.util.HrapUtil;
|
||||
import com.vividsolutions.jts.geom.Coordinate;
|
||||
|
||||
/**
|
||||
* Line Segment Utility Class.
|
||||
*
|
||||
* <pre>
|
||||
*
|
||||
* SOFTWARE HISTORY
|
||||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* Sep 15, 2009 2772 mpduff Initial creation
|
||||
* Apr 16, 2013 1790 rferrel Code clean up for non-blocking dialogs.
|
||||
* June 29, 2015 14630 xwei Fixed : Not able to import basins.dat with apostrophe and incorrect data posted
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
* @author mpduff
|
||||
* @version 1.0
|
||||
*/
|
||||
|
||||
public class LineSegmentUtil {
|
||||
// private static final int LATLON_NORTHWEST_CORNER = 0;
|
||||
|
||||
private static final int LATLON_NORTHEAST_CORNER = 1;
|
||||
|
||||
private static final int LATLON_SOUTHEAST_CORNER = 2;
|
||||
|
||||
private static final int LATLON_SOUTHWEST_CORNER = 3;
|
||||
|
||||
private static final double MIN_DOUBLE_ERROR = 0.00000001;
|
||||
|
||||
/**
|
||||
* Constructs an array from segments from an array of n points. the i th and
|
||||
* i+1 th points are used to initialize n-1 segments.
|
||||
*
|
||||
* @param points
|
||||
* List of Point objects
|
||||
*/
|
||||
public static List<LineSegment> getSegmentsFromPoints(
|
||||
List<Coordinate> points) {
|
||||
List<LineSegment> lineSegments = new ArrayList<LineSegment>();
|
||||
|
||||
for (int i = 0; i < points.size() - 1; i++) {
|
||||
LineSegment segment = initLineSegment(points.get(i),
|
||||
points.get(i + 1));
|
||||
lineSegments.add(segment);
|
||||
}
|
||||
|
||||
return lineSegments;
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks two Points for equality.
|
||||
*
|
||||
* @param p1
|
||||
* Point 1
|
||||
* @param p2
|
||||
* Point 2
|
||||
* @return true if points are equal
|
||||
*/
|
||||
public static boolean pointsEqual(Coordinate p1, Coordinate p2) {
|
||||
boolean equal = false;
|
||||
|
||||
if ((p1.x == p2.x) && (p1.y == p2.y)) {
|
||||
equal = true;
|
||||
}
|
||||
|
||||
return equal;
|
||||
}
|
||||
|
||||
/**
|
||||
* Given 2 points, initializes the LineSegment structure.
|
||||
*
|
||||
* Determines if the segment is a vertical line. Calculates slope and base
|
||||
* (if not vertical) Stores the x_value if it is vertical.
|
||||
*
|
||||
* Calculates the max and min x and y values of the segment.
|
||||
*
|
||||
* @param p1
|
||||
* Point 1
|
||||
* @param p2
|
||||
* Point 2
|
||||
* @return LineSegment object
|
||||
*/
|
||||
public static LineSegment initLineSegment(Coordinate p1, Coordinate p2) {
|
||||
LineSegment seg = new LineSegment();
|
||||
double rise;
|
||||
double run;
|
||||
|
||||
/* copy the points to the LineSegment structure */
|
||||
seg.setPoint1(p1);
|
||||
seg.setPoint2(p2);
|
||||
|
||||
/* calculate rise and run, set slope and base of line */
|
||||
rise = p2.y - p1.y;
|
||||
run = p2.x - p1.x;
|
||||
|
||||
if (run == 0) {
|
||||
seg.setVertical(true);
|
||||
seg.setXValue(p1.x);
|
||||
} else {
|
||||
seg.setVertical(false);
|
||||
seg.setXValue(p1.x);
|
||||
|
||||
seg.setSlope(rise / run);
|
||||
seg.setBase(p1.y - (seg.getSlope() * p1.x));
|
||||
}
|
||||
|
||||
/* set the max and min box */
|
||||
Coordinate min = new Coordinate();
|
||||
seg.setMin(min);
|
||||
seg.getMin().y = Math.min(seg.getPoint1().y, seg.getPoint2().y);
|
||||
seg.getMin().x = Math.min(seg.getPoint1().x, seg.getPoint2().x);
|
||||
|
||||
Coordinate max = new Coordinate();
|
||||
seg.setMax(max);
|
||||
seg.getMax().y = Math.max(seg.getPoint1().y, seg.getPoint2().y);
|
||||
seg.getMax().x = Math.max(seg.getPoint1().x, seg.getPoint2().x);
|
||||
|
||||
return seg;
|
||||
}
|
||||
|
||||
/**
|
||||
* Given an array of line segments, this function fills the HrapBinList
|
||||
* variable with the bins inside the area defined by the segments.
|
||||
*
|
||||
* Algorithm:
|
||||
*
|
||||
* Find the max and min lat-lon corners of the area. Convert the lat-lon to
|
||||
* HRAP coords. Widen the HRAP box to search, to make sure it starts outside
|
||||
* the polygon.
|
||||
*
|
||||
* For each row For each column
|
||||
*
|
||||
* Draw a segment from the previous column. If that segment intersects one
|
||||
* of the polygon's segments, then add the number of intersections to the
|
||||
* total number of intersections for the row. If the row intersections is
|
||||
* odd, then the hrap bin is in the polygon, otherwise it is outside.
|
||||
*
|
||||
* Compute area for each bin that is within the polygon. Add it to total for
|
||||
* area.
|
||||
*
|
||||
* @param segments
|
||||
* List of LineSegment objects
|
||||
*/
|
||||
public static HrapBinList getHrapBinListFromSegments(
|
||||
List<LineSegment> segments) {
|
||||
HrapBinList binList = new HrapBinList();
|
||||
Coordinate maxLatLon;
|
||||
Coordinate minLatLon;
|
||||
Coordinate startLatLon;
|
||||
Coordinate endLatLon;
|
||||
List<Coordinate> points = new ArrayList<Coordinate>();
|
||||
double r;
|
||||
double c;
|
||||
double maxCol;
|
||||
double minCol;
|
||||
double maxRow;
|
||||
double minRow;
|
||||
double singleBinArea = 0;
|
||||
boolean inside = false;
|
||||
int index = 0;
|
||||
int numIntersections = 0;
|
||||
LineSegment segment;
|
||||
|
||||
/* init HrapBinList */
|
||||
binList.setNumBins(0);
|
||||
binList.setNumRows(0);
|
||||
binList.setArea(0);
|
||||
|
||||
/* get the max and min points of the segments */
|
||||
maxLatLon = LineSegmentUtil.getMaxXY(segments);
|
||||
minLatLon = LineSegmentUtil.getMinXY(segments);
|
||||
|
||||
/*
|
||||
* Determine the HRAP box that will completely enclose the latitude /
|
||||
* longitude box defined by the max lat/lon and the min lat/lon pairs
|
||||
* retrieved above.
|
||||
*/
|
||||
Coordinate hrap = HrapUtil.latLonToHrap(new Coordinate(maxLatLon.x, maxLatLon.y));
|
||||
minRow = hrap.y;
|
||||
maxRow = hrap.y;
|
||||
minCol = hrap.x;
|
||||
maxCol = hrap.x;
|
||||
|
||||
for (int i = LATLON_NORTHEAST_CORNER; i <= LATLON_SOUTHWEST_CORNER; i++) {
|
||||
switch (i) {
|
||||
case LATLON_NORTHEAST_CORNER:
|
||||
hrap = HrapUtil.latLonToHrap(new Coordinate(minLatLon.x, maxLatLon.y));
|
||||
break;
|
||||
|
||||
case LATLON_SOUTHEAST_CORNER:
|
||||
hrap = HrapUtil.latLonToHrap(new Coordinate(minLatLon.x, minLatLon.y));
|
||||
break;
|
||||
|
||||
case LATLON_SOUTHWEST_CORNER:
|
||||
hrap = HrapUtil.latLonToHrap(new Coordinate(maxLatLon.x, minLatLon.y));
|
||||
break;
|
||||
|
||||
default:
|
||||
/* Execution should never reach this point. */
|
||||
break;
|
||||
}
|
||||
r = hrap.y;
|
||||
c = hrap.x;
|
||||
|
||||
if (c < minCol) {
|
||||
minCol = c;
|
||||
} else if (c > maxCol) {
|
||||
maxCol = c;
|
||||
} else if (r < minRow) {
|
||||
minRow = r;
|
||||
} else if (r > maxRow) {
|
||||
maxRow = r;
|
||||
}
|
||||
}
|
||||
|
||||
maxRow = Math.floor(maxRow);
|
||||
maxCol = Math.floor(maxCol);
|
||||
minRow = Math.floor(minRow);
|
||||
minCol = Math.floor(minCol);
|
||||
|
||||
/* expand the box to make sure polygon has been covered */
|
||||
minRow -= 2;
|
||||
minCol -= 2;
|
||||
maxRow += 2;
|
||||
maxCol += 2;
|
||||
|
||||
for (r = minRow + 0.5; r <= maxRow; r++) {
|
||||
inside = false;
|
||||
numIntersections = 0;
|
||||
|
||||
/* init the first lat lon point */
|
||||
startLatLon = new Coordinate(HrapUtil.hrapToLatLon(new Coordinate(
|
||||
minCol - 0.5, r)));
|
||||
|
||||
for (c = minCol + 0.5; c <= maxCol; c++) {
|
||||
/* get the lat lon coordinate from the hrap row and column */
|
||||
endLatLon = new Coordinate(
|
||||
HrapUtil.hrapToLatLon(new Coordinate(c, r)));
|
||||
|
||||
/* create a segment from start to end */
|
||||
segment = initLineSegment(startLatLon, endLatLon);
|
||||
|
||||
points = getIntersectionPoints(segment, segments);
|
||||
|
||||
index = (int) binList.getNumRows();
|
||||
|
||||
if (points.size() > 0) {
|
||||
numIntersections += points.size();
|
||||
}
|
||||
|
||||
/*
|
||||
* key of algorithm: if the number of intersections is odd, then
|
||||
* you are inside the polygon, else outside
|
||||
*/
|
||||
if ((numIntersections % 2) == 1) {
|
||||
/* if previous bin was inside */
|
||||
if (inside) {
|
||||
binList.getEndCols().set(index, (long) c);
|
||||
binList.setNumBins(binList.getNumBins() + 1);
|
||||
singleBinArea = HrapUtil.getHrapBinArea(new Coordinate(
|
||||
c, r));
|
||||
binList.setArea(binList.getArea() + singleBinArea);
|
||||
} else {
|
||||
/* previous bin was outside */
|
||||
if ( index >= binList.getRows().size() ){
|
||||
|
||||
binList.getRows().add((long) r);
|
||||
binList.getBeginCols().add((long) c);
|
||||
binList.getEndCols().add((long) c);
|
||||
|
||||
}else{
|
||||
|
||||
binList.getRows().set( index, (long) r );
|
||||
binList.getBeginCols().set( index, (long) c );
|
||||
binList.getEndCols().set( index, (long) c );
|
||||
|
||||
}
|
||||
|
||||
|
||||
binList.setNumBins(binList.getNumBins() + 1);
|
||||
singleBinArea = HrapUtil.getHrapBinArea(new Coordinate(
|
||||
c, r));
|
||||
binList.setArea(binList.getArea() + singleBinArea);
|
||||
|
||||
inside = true;
|
||||
}
|
||||
} else {
|
||||
/*
|
||||
* else if previous bin was inside and, since this one is
|
||||
* not, increment the row counter
|
||||
*/
|
||||
/* intersections is even */
|
||||
if (inside) {
|
||||
inside = false;
|
||||
binList.setNumRows(binList.getNumRows() + 1);
|
||||
}
|
||||
}
|
||||
|
||||
startLatLon = endLatLon;
|
||||
}
|
||||
}
|
||||
|
||||
return binList;
|
||||
}
|
||||
|
||||
/**
|
||||
* Determines the max x and y values for a list of segments.
|
||||
*
|
||||
* @param segments
|
||||
* List<LineSegment>
|
||||
* @return Max Point of the segment list
|
||||
*/
|
||||
public static Coordinate getMaxXY(List<LineSegment> segments) {
|
||||
Coordinate p = segments.get(0).getMax();
|
||||
|
||||
for (int i = 1; i < segments.size(); i++) {
|
||||
p.x = Math.max(segments.get(i).getMax().x, p.x);
|
||||
p.y = Math.max(segments.get(i).getMax().y, p.y);
|
||||
}
|
||||
|
||||
return p;
|
||||
}
|
||||
|
||||
/**
|
||||
* Determines the min x and y values for a list of segments.
|
||||
*
|
||||
* @param segments
|
||||
* List<LineSegment>
|
||||
* @return Min Point of the segment list
|
||||
*/
|
||||
public static Coordinate getMinXY(List<LineSegment> segments) {
|
||||
Coordinate p = new Coordinate();
|
||||
|
||||
p.x = (segments.get(0).getMin().x);
|
||||
p.y = (segments.get(0).getMin().y);
|
||||
|
||||
for (int i = 1; i < segments.size(); i++) {
|
||||
p.x = Math.min(segments.get(i).getMin().x, p.x);
|
||||
p.y = Math.min(segments.get(i).getMin().y, p.y);
|
||||
}
|
||||
|
||||
return p;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the points of intersection between one segment and an array of
|
||||
* segments
|
||||
*
|
||||
* @param segment
|
||||
* Single LineSegment
|
||||
* @param segments
|
||||
* List of LineSegments
|
||||
* @param numSegments
|
||||
* Number of segments
|
||||
*/
|
||||
public static List<Coordinate> getIntersectionPoints(LineSegment segment,
|
||||
List<LineSegment> segments) {
|
||||
Coordinate intersectPoint;
|
||||
List<Coordinate> points = new ArrayList<Coordinate>();
|
||||
|
||||
for (int i = 0; i < segments.size(); i++) {
|
||||
intersectPoint = getIntersectionOfSegments(segment, segments.get(i));
|
||||
|
||||
if (intersectPoint != null) {
|
||||
points.add(intersectPoint);
|
||||
}
|
||||
}
|
||||
|
||||
return points;
|
||||
}
|
||||
|
||||
/**
|
||||
* Determines the point of intersection (if any) between two segments
|
||||
*
|
||||
* @param s1
|
||||
* LineSegment 1
|
||||
* @param s2
|
||||
* LineSegment 2
|
||||
* @return The intersection point, null if no intersection
|
||||
*/
|
||||
public static Coordinate getIntersectionOfSegments(LineSegment s1,
|
||||
LineSegment s2) {
|
||||
Coordinate p = new Coordinate(0, 0);
|
||||
double x;
|
||||
double y;
|
||||
double y1;
|
||||
double y2;
|
||||
|
||||
LineSegment tempVertical;
|
||||
LineSegment tempNonVertical;
|
||||
|
||||
boolean intersect = false;
|
||||
|
||||
if (s1.isVertical() && s2.isVertical()) {
|
||||
intersect = false;
|
||||
} else if (s1.isVertical() || s2.isVertical()) {
|
||||
/* if one is a vertical line and one is not */
|
||||
|
||||
/* assign to tempVertical and tempNonVertical */
|
||||
if (s1.isVertical()) {
|
||||
tempVertical = s1;
|
||||
tempNonVertical = s2;
|
||||
} else {
|
||||
tempVertical = s2;
|
||||
tempNonVertical = s1;
|
||||
}
|
||||
|
||||
/*
|
||||
* see if vertical segment is in the x range of the non vertical
|
||||
* segment
|
||||
*/
|
||||
if (isBetweenInclusive(tempVertical.getXValue(),
|
||||
tempNonVertical.getMin().x, tempNonVertical.getMax().x)) {
|
||||
y = evaluateLineSegmentAtX(tempNonVertical,
|
||||
tempVertical.getXValue());
|
||||
|
||||
if ((y != HydroConstants.MISSING_VALUE)
|
||||
&& isBetweenInclusive(y, tempVertical.getMin().y,
|
||||
tempVertical.getMax().y)) {
|
||||
p.x = (tempVertical.getXValue());
|
||||
p.y = (y);
|
||||
intersect = true;
|
||||
} else {
|
||||
intersect = false;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
/* the lines are both non vertical */
|
||||
x = (s1.getBase() - s2.getBase()) / (s2.getSlope() - s1.getSlope());
|
||||
|
||||
y1 = evaluateLineSegmentAtX(s1, x);
|
||||
y2 = evaluateLineSegmentAtX(s2, x);
|
||||
|
||||
if ((y1 == HydroConstants.MISSING_VALUE)
|
||||
|| (y2 == HydroConstants.MISSING_VALUE)) {
|
||||
intersect = false;
|
||||
} else if ((Math.abs(y1 - y2) > MIN_DOUBLE_ERROR)) {
|
||||
intersect = false;
|
||||
} else {
|
||||
p.x = x;
|
||||
p.y = y1;
|
||||
intersect = true;
|
||||
}
|
||||
}
|
||||
|
||||
if (intersect) {
|
||||
return p;
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Determine the Y value of a segment, given x. If the line is Vertical or X
|
||||
* is not in the domain, then the returned value is invalid.
|
||||
*
|
||||
* @param segment
|
||||
* The line segment
|
||||
* @param x
|
||||
* The x value
|
||||
* @return The y value
|
||||
*/
|
||||
public static double evaluateLineSegmentAtX(LineSegment segment, double x) {
|
||||
double y = 0;
|
||||
|
||||
if (segment.isVertical()) {
|
||||
y = segment.getMin().y;
|
||||
} else if (isBetweenInclusive(x, segment.getMin().x, segment.getMax().x)) {
|
||||
y = (segment.getSlope() * x) + segment.getBase();
|
||||
} else {
|
||||
y = HydroConstants.MISSING_VALUE;
|
||||
}
|
||||
|
||||
return y;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether (x is >= start) and (x >= end)
|
||||
*
|
||||
* @param x
|
||||
* The value to check
|
||||
* @param start
|
||||
* The start value
|
||||
* @param end
|
||||
* The end value
|
||||
* @return true if (x is >= start) and (x >= end)
|
||||
*/
|
||||
public static boolean isBetweenInclusive(double x, double start, double end) {
|
||||
boolean isBetween = false;
|
||||
|
||||
if ((x >= start) && (x <= end)) {
|
||||
isBetween = true;
|
||||
}
|
||||
|
||||
return isBetween;
|
||||
}
|
||||
}
|
|
@ -28,6 +28,8 @@ package com.raytheon.viz.hydrocommon.data;
|
|||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* Sep 14, 2015 17978 lbousaidi Initial Creation
|
||||
*
|
||||
* Dec 05, 2015 18357 xwei Fixed error in opening Timeseries for Timesteps
|
||||
* </pre>
|
||||
*
|
||||
* @author lbousaidi
|
||||
|
@ -74,6 +76,10 @@ public class GageDataTimeStep extends GageData {
|
|||
setValue( gage.getValue() );
|
||||
setThreatIndex( gage.getThreatIndex() );
|
||||
|
||||
setPe( gage.getPe() );
|
||||
setTs( gage.getTs() );
|
||||
setExtremum( gage.getExtremum() );
|
||||
|
||||
setP( gage );
|
||||
setV( gage );
|
||||
return;
|
||||
|
|
|
@ -31,9 +31,13 @@ import java.util.List;
|
|||
import java.util.TimeZone;
|
||||
|
||||
import com.raytheon.uf.common.dataplugin.shef.tables.Hourlypc;
|
||||
import com.raytheon.uf.common.dataplugin.shef.tables.HourlypcId;
|
||||
import com.raytheon.uf.common.dataplugin.shef.tables.Hourlypp;
|
||||
import com.raytheon.uf.common.dataplugin.shef.tables.HourlyppId;
|
||||
import com.raytheon.uf.common.dataplugin.shef.tables.IHourlyTS;
|
||||
import com.raytheon.uf.common.dataplugin.shef.tables.Ingestfilter;
|
||||
import com.raytheon.uf.common.status.IUFStatusHandler;
|
||||
import com.raytheon.uf.common.status.UFStatus;
|
||||
import com.raytheon.uf.viz.core.catalog.DirectDbQuery;
|
||||
import com.raytheon.uf.viz.core.catalog.DirectDbQuery.QueryLanguage;
|
||||
import com.raytheon.uf.viz.core.exception.VizException;
|
||||
|
@ -51,6 +55,10 @@ import com.raytheon.viz.hydrocommon.HydroConstants;
|
|||
* 11/19/2008 1662 grichard Updated loadPeRaw.
|
||||
* 11/24/2008 1662 grichard Added utility methods for raw precip.
|
||||
* 09/26/2012 15385 lbousaidi fixed duplicate entries in gage table.
|
||||
* 11/04/2015 5100 bkowal Fixes to handle records that spanned
|
||||
* hour 24 to hour 1.
|
||||
* 11/16/2015 5100 bkowal Generated a better query to handle the case when
|
||||
* the requested data spans two days.
|
||||
* </pre>
|
||||
*
|
||||
* @author grichard
|
||||
|
@ -112,6 +120,9 @@ public final class PrecipUtil {
|
|||
}
|
||||
}
|
||||
|
||||
private static final IUFStatusHandler statusHandler = UFStatus
|
||||
.getHandler(PrecipUtil.class);
|
||||
|
||||
public static enum PrecipPEmode {
|
||||
PrecipPEbest, PrecipPEPP, PrecipPEPC
|
||||
}
|
||||
|
@ -189,8 +200,8 @@ public final class PrecipUtil {
|
|||
*/
|
||||
public static final String SUM_PC_REPORTS = "sum_pc_reports";
|
||||
|
||||
static {
|
||||
sdf = new SimpleDateFormat("yyyy-MM-dd");
|
||||
static {
|
||||
sdf = new SimpleDateFormat("yyyy-MM-dd");
|
||||
sdf.setTimeZone(TimeZone.getTimeZone("GMT"));
|
||||
}
|
||||
|
||||
|
@ -235,7 +246,7 @@ public final class PrecipUtil {
|
|||
String dur = "";
|
||||
|
||||
if ((typeSource != null) && !typeSource.isEmpty()) {
|
||||
ts_clause = build_ts_clause(typeSource);
|
||||
ts_clause = build_ts_clause(typeSource, "id.ts");
|
||||
if (ts_clause == null) {
|
||||
return null;
|
||||
}
|
||||
|
@ -350,14 +361,12 @@ public final class PrecipUtil {
|
|||
}
|
||||
|
||||
query.append(where.toString());
|
||||
// Echo the query string to the console.
|
||||
System.out.println("Query = " + query.toString());
|
||||
|
||||
try {
|
||||
retVal = (ArrayList<Object[]>) DirectDbQuery.executeQuery(
|
||||
query.toString(), HydroConstants.IHFS, QueryLanguage.SQL);
|
||||
} catch (VizException e) {
|
||||
e.printStackTrace();
|
||||
statusHandler.error("Failed to retrieve the PE raw data.", e);
|
||||
}
|
||||
|
||||
return retVal;
|
||||
|
@ -864,11 +873,11 @@ public final class PrecipUtil {
|
|||
* @param ts
|
||||
* @return
|
||||
*/
|
||||
public String build_ts_clause(List<String> ts) {
|
||||
public String build_ts_clause(List<String> ts, String tsField) {
|
||||
if ((ts == null) || ts.isEmpty()) {
|
||||
return "";
|
||||
}
|
||||
StringBuilder tsClause = new StringBuilder("id.ts ");
|
||||
StringBuilder tsClause = new StringBuilder(tsField.trim() + " ");
|
||||
|
||||
if (ts.get(0).startsWith("!")) {
|
||||
tsClause.append("not in ('");
|
||||
|
@ -1121,7 +1130,7 @@ public final class PrecipUtil {
|
|||
return MISSING_PRECIP;
|
||||
}
|
||||
|
||||
if (pc_timet.after(start_date)) {
|
||||
if (pc_timet.after(start_date) && end_hour != 1) {
|
||||
/*
|
||||
* An exact match for the start date could not be found. Set the
|
||||
* starting hour to 1.
|
||||
|
@ -1183,7 +1192,7 @@ public final class PrecipUtil {
|
|||
start_value = (short) MISSING_PRECIP;
|
||||
|
||||
while ((pStartPCIdx < hourlyPCList.size())
|
||||
&& ((pStartPCIdx != pEndPCIdx) || (start_hour < end_hour))) {
|
||||
&& ((pStartPCIdx != pEndPCIdx) || (start_hour < end_hour) || (start_hour == 24 && end_hour == 1))) {
|
||||
Hourlypc pStartPC = hourlyPCList.get(pStartPCIdx);
|
||||
start_value = get_hour_slot_value(pStartPC, start_hour);
|
||||
|
||||
|
@ -1218,7 +1227,7 @@ public final class PrecipUtil {
|
|||
end_value = (short) MISSING_PRECIP;
|
||||
|
||||
while ((pEndPCIdx > pStartPCIdx)
|
||||
|| ((pEndPCIdx == pStartPCIdx) && (end_hour > start_hour))) {
|
||||
|| ((pEndPCIdx == pStartPCIdx) && (end_hour > start_hour) || (start_hour == 24 && end_hour == 1))) {
|
||||
Hourlypc pEndPC = hourlyPCList.get(pEndPCIdx);
|
||||
hour_index = end_hour - 1;
|
||||
end_value = get_hour_slot_value(pEndPC, end_hour);
|
||||
|
@ -1534,60 +1543,80 @@ public final class PrecipUtil {
|
|||
return ts_group_count;
|
||||
}
|
||||
|
||||
/**
|
||||
* buildWhereClause
|
||||
*
|
||||
* @param query_begin_time
|
||||
* @param query_end_time
|
||||
* @param lid
|
||||
* @param ts
|
||||
* @return
|
||||
*/
|
||||
private String buildWhereClause(Date query_begin_time, Date query_end_time,
|
||||
String lid, List<String> ts) {
|
||||
/*
|
||||
* Need special logic to account for accumulation intervals which start
|
||||
* at 00Z. This is because the 00Z PC value is actually placed in the 24
|
||||
* hour slot of the previous day.
|
||||
*/
|
||||
private String buildHourlyHQL(Date query_begin_time, Date query_end_time,
|
||||
String lid, List<String> ts, final String entityName,
|
||||
String selectAdditional) {
|
||||
|
||||
final String orderBy = " ORDER BY b.id.lid ASC, b.id.ts ASC, b.id.obsdate ASC";
|
||||
|
||||
StringBuilder fromList = new StringBuilder(
|
||||
" b.id.lid, b.id.ts, b.id.obsdate, %s, %s, b.hour1, ");
|
||||
fromList.append("b.hour2, b.hour3, b.hour4, b.hour5, b.hour6, b.hour7, b.hour8, b.hour9, b.hour10, ");
|
||||
fromList.append("b.hour11, b.hour12, b.hour13, b.hour14, b.hour15, b.hour16, b.hour17, b.hour18, ");
|
||||
fromList.append("b.hour19, b.hour20, b.hour21, b.hour22, b.hour23, ");
|
||||
if (selectAdditional != null
|
||||
&& selectAdditional.trim().startsWith(", ") == false) {
|
||||
selectAdditional = ", " + selectAdditional;
|
||||
}
|
||||
|
||||
Calendar pTm = null;
|
||||
pTm = Calendar.getInstance(TimeZone.getTimeZone("GMT"));
|
||||
pTm.setTime(query_begin_time);
|
||||
if (pTm.get(Calendar.HOUR_OF_DAY) == 0) {
|
||||
if (pTm.get(Calendar.HOUR_OF_DAY) == 0
|
||||
|| pTm.get(Calendar.HOUR_OF_DAY) == 1) {
|
||||
pTm.add(Calendar.DAY_OF_MONTH, -1);
|
||||
}
|
||||
/* Need to convert the query begin and end times into dates. */
|
||||
String beginstr = sdf.format(pTm.getTime());
|
||||
|
||||
pTm.setTime(query_end_time);
|
||||
String beginstr = sdf.format(pTm.getTime());
|
||||
|
||||
pTm.setTime(query_end_time);
|
||||
if (pTm.get(Calendar.HOUR_OF_DAY) == 0) {
|
||||
pTm.add(Calendar.DAY_OF_MONTH, -1);
|
||||
}
|
||||
|
||||
|
||||
String endstr = sdf.format(pTm.getTime());
|
||||
|
||||
/* consider according to whether type-source specified. */
|
||||
/* load data which is not missing value (-9999.0) */
|
||||
StringBuilder where = new StringBuilder("WHERE ");
|
||||
String where = null;
|
||||
String minuteOffsetStr = null;
|
||||
String hourlyQCStr = null;
|
||||
if (endstr.equals(beginstr)) {
|
||||
fromList.append("b.hour24 ");
|
||||
if (selectAdditional != null) {
|
||||
fromList.append(selectAdditional);
|
||||
}
|
||||
fromList.append(" FROM ").append(entityName).append(" b ");
|
||||
where = " b.id.obsdate = '" + beginstr + "'";
|
||||
minuteOffsetStr = "b.minuteOffset";
|
||||
hourlyQCStr = "b.hourlyQc";
|
||||
} else {
|
||||
fromList.append("a.hour24 ");
|
||||
if (selectAdditional != null) {
|
||||
fromList.append(selectAdditional);
|
||||
}
|
||||
fromList.append(" FROM ").append(entityName).append(" a, ")
|
||||
.append(entityName).append(" b ");
|
||||
where = " a.id.lid = b.id.lid AND a.id.ts = b.id.ts AND a.id.obsdate = '"
|
||||
+ beginstr + "' AND b.id.obsdate = '" + endstr + "'";
|
||||
minuteOffsetStr = "substring(b.minuteOffset, 1, 23) || substring(a.minuteOffset, 24, 24)";
|
||||
hourlyQCStr = "substring(b.hourlyQc, 1, 23) || substring(a.hourlyQc, 24, 24)";
|
||||
}
|
||||
|
||||
StringBuilder whereStr = new StringBuilder(where);
|
||||
if (lid != null) {
|
||||
where.append("id.lid = '");
|
||||
where.append(lid);
|
||||
where.append("' AND ");
|
||||
whereStr.append(" AND ");
|
||||
whereStr.append("id.lid = '");
|
||||
whereStr.append(lid);
|
||||
}
|
||||
|
||||
if ((ts != null) && (ts.size() > 0)) {
|
||||
where.append(build_ts_clause(ts));
|
||||
where.append(" AND ");
|
||||
whereStr.append(" AND ");
|
||||
whereStr.append(build_ts_clause(ts, "b.id.ts"));
|
||||
}
|
||||
|
||||
|
||||
where.append("id.obsdate between '");
|
||||
where.append(beginstr);
|
||||
where.append("' AND '");
|
||||
where.append(endstr);
|
||||
where.append("' ORDER BY id.lid ASC, id.ts ASC, id.obsdate ASC");
|
||||
return where.toString();
|
||||
return new StringBuilder("SELECT")
|
||||
.append(String.format(fromList.toString(), minuteOffsetStr,
|
||||
hourlyQCStr)).append("WHERE")
|
||||
.append(whereStr.toString()).append(orderBy).toString();
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -1721,7 +1750,7 @@ public final class PrecipUtil {
|
|||
break;
|
||||
|
||||
case 24:
|
||||
|
||||
case 0:
|
||||
precip_value = pHourlyPP.getHour24();
|
||||
break;
|
||||
|
||||
|
@ -1765,17 +1794,49 @@ public final class PrecipUtil {
|
|||
public ArrayList<Hourlypc> load_PC_hourly(Date query_begin_time,
|
||||
Date query_end_time, String lid, List<String> ts) {
|
||||
|
||||
ArrayList<Hourlypc> pHourlyPC = null;
|
||||
final String fullQuery = this.buildHourlyHQL(query_begin_time,
|
||||
query_end_time, lid, ts, Hourlypc.class.getName(), null);
|
||||
List<Object[]> results = null;
|
||||
try {
|
||||
results = DirectDbQuery.executeQuery(fullQuery, "ihfs",
|
||||
QueryLanguage.HQL);
|
||||
} catch (VizException e) {
|
||||
statusHandler.error("Failed to retrieve the Hourly PC data.", e);
|
||||
// will return an empty list by the next if statement due to the
|
||||
// null results.
|
||||
}
|
||||
|
||||
String where = buildWhereClause(query_begin_time, query_end_time, lid,
|
||||
ts);
|
||||
if (results == null || results.isEmpty()) {
|
||||
return new ArrayList<>(1);
|
||||
}
|
||||
|
||||
/* get the data */
|
||||
pHourlyPC = IHFSDbGenerated.GetHourlyPC(where);
|
||||
System.out.println("SELECT * FROM HourlyPC " + where);
|
||||
System.out.println(pHourlyPC.size()
|
||||
+ " records retrieved from HourlyPC. ");
|
||||
return pHourlyPC;
|
||||
ArrayList<Hourlypc> hourlyPcRecords = new ArrayList<>(results.size());
|
||||
for (Object object : results) {
|
||||
Object[] dataValues = (Object[]) object;
|
||||
|
||||
/*
|
||||
* First few fields are needed to build an {@link HourlypcId}.
|
||||
*/
|
||||
HourlypcId id = new HourlypcId((String) dataValues[0],
|
||||
(String) dataValues[1], (Date) dataValues[2]);
|
||||
Hourlypc record = new Hourlypc(id, (String) dataValues[3],
|
||||
(String) dataValues[4], (Short) dataValues[5],
|
||||
(Short) dataValues[6], (Short) dataValues[7],
|
||||
(Short) dataValues[8], (Short) dataValues[9],
|
||||
(Short) dataValues[10], (Short) dataValues[11],
|
||||
(Short) dataValues[12], (Short) dataValues[13],
|
||||
(Short) dataValues[14], (Short) dataValues[15],
|
||||
(Short) dataValues[16], (Short) dataValues[17],
|
||||
(Short) dataValues[18], (Short) dataValues[19],
|
||||
(Short) dataValues[20], (Short) dataValues[21],
|
||||
(Short) dataValues[22], (Short) dataValues[23],
|
||||
(Short) dataValues[24], (Short) dataValues[25],
|
||||
(Short) dataValues[26], (Short) dataValues[27],
|
||||
(Short) dataValues[28]);
|
||||
hourlyPcRecords.add(record);
|
||||
}
|
||||
|
||||
return hourlyPcRecords;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -1790,14 +1851,53 @@ public final class PrecipUtil {
|
|||
public ArrayList<Hourlypp> load_PP_hourly(Date query_begin_time,
|
||||
Date query_end_time, String lid, List<String> ts) {
|
||||
|
||||
ArrayList<Hourlypp> pHourlyPP = null;
|
||||
final String selectAdditional = ", b.sixhr06, b.sixhr12, b.sixhr18, b.sixhr24, b.sixhrqc, b.sixhroffset ";
|
||||
final String fullQuery = this.buildHourlyHQL(query_begin_time,
|
||||
query_end_time, lid, ts, Hourlypp.class.getName(),
|
||||
selectAdditional);
|
||||
List<Object[]> results = null;
|
||||
try {
|
||||
results = DirectDbQuery.executeQuery(fullQuery, "ihfs",
|
||||
QueryLanguage.HQL);
|
||||
} catch (VizException e) {
|
||||
statusHandler.error("Failed to retrieve the Hourly PP data.", e);
|
||||
// will return an empty list by the next if statement due to the
|
||||
// null results.
|
||||
}
|
||||
|
||||
String where = buildWhereClause(query_begin_time, query_end_time, lid,
|
||||
ts);
|
||||
if (results == null || results.isEmpty()) {
|
||||
return new ArrayList<>(1);
|
||||
}
|
||||
|
||||
/* get the data */
|
||||
pHourlyPP = IHFSDbGenerated.GetHourlyPP(where);
|
||||
ArrayList<Hourlypp> hourlyPpRecords = new ArrayList<>(results.size());
|
||||
for (Object object : results) {
|
||||
Object[] dataValues = (Object[]) object;
|
||||
|
||||
return pHourlyPP;
|
||||
/*
|
||||
* First few fields are needed to build an {@link HourlypcId}.
|
||||
*/
|
||||
HourlyppId id = new HourlyppId((String) dataValues[0],
|
||||
(String) dataValues[1], (Date) dataValues[2]);
|
||||
Hourlypp record = new Hourlypp(id, (String) dataValues[3],
|
||||
(String) dataValues[4], (Short) dataValues[5],
|
||||
(Short) dataValues[6], (Short) dataValues[7],
|
||||
(Short) dataValues[8], (Short) dataValues[9],
|
||||
(Short) dataValues[10], (Short) dataValues[11],
|
||||
(Short) dataValues[12], (Short) dataValues[13],
|
||||
(Short) dataValues[14], (Short) dataValues[15],
|
||||
(Short) dataValues[16], (Short) dataValues[17],
|
||||
(Short) dataValues[18], (Short) dataValues[19],
|
||||
(Short) dataValues[20], (Short) dataValues[21],
|
||||
(Short) dataValues[22], (Short) dataValues[23],
|
||||
(Short) dataValues[24], (Short) dataValues[25],
|
||||
(Short) dataValues[26], (Short) dataValues[27],
|
||||
(Short) dataValues[28], (Short) dataValues[29],
|
||||
(Short) dataValues[30], (Short) dataValues[31],
|
||||
(Short) dataValues[32], (String) dataValues[33],
|
||||
(String) dataValues[34]);
|
||||
hourlyPpRecords.add(record);
|
||||
}
|
||||
|
||||
return hourlyPpRecords;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -35,12 +35,10 @@ import javax.measure.converter.UnitConverter;
|
|||
import javax.measure.unit.Unit;
|
||||
|
||||
import org.eclipse.core.commands.Command;
|
||||
import org.eclipse.core.commands.ExecutionEvent;
|
||||
import org.eclipse.core.commands.ExecutionException;
|
||||
import org.eclipse.core.commands.State;
|
||||
import org.eclipse.jface.dialogs.MessageDialog;
|
||||
import org.eclipse.swt.graphics.RGB;
|
||||
import org.eclipse.swt.widgets.Event;
|
||||
import org.eclipse.swt.widgets.Shell;
|
||||
import org.eclipse.ui.IEditorPart;
|
||||
import org.eclipse.ui.PlatformUI;
|
||||
|
@ -48,7 +46,6 @@ import org.eclipse.ui.commands.ICommandService;
|
|||
import org.eclipse.ui.handlers.HandlerUtil;
|
||||
import org.eclipse.ui.handlers.RadioState;
|
||||
import org.eclipse.ui.handlers.RegistryToggleState;
|
||||
import org.eclipse.ui.operations.RedoActionHandler;
|
||||
|
||||
import com.raytheon.uf.common.colormap.ColorMap;
|
||||
import com.raytheon.uf.common.colormap.prefs.ColorMapParameters;
|
||||
|
@ -63,6 +60,7 @@ import com.raytheon.uf.common.time.DataTime;
|
|||
import com.raytheon.uf.common.util.FileUtil;
|
||||
import com.raytheon.uf.viz.core.IDisplayPane;
|
||||
import com.raytheon.uf.viz.core.IDisplayPaneContainer;
|
||||
import com.raytheon.uf.viz.core.IExtent;
|
||||
import com.raytheon.uf.viz.core.RGBColors;
|
||||
import com.raytheon.uf.viz.core.VizApp;
|
||||
import com.raytheon.uf.viz.core.datastructure.LoopProperties;
|
||||
|
@ -84,7 +82,6 @@ import com.raytheon.viz.mpe.MPECommandConstants;
|
|||
import com.raytheon.viz.mpe.MPEDateFormatter;
|
||||
import com.raytheon.viz.mpe.core.MPEDataManager;
|
||||
import com.raytheon.viz.mpe.core.MPEDataManager.MPERadarLoc;
|
||||
import com.raytheon.viz.mpe.ui.actions.ClearMPEData;
|
||||
import com.raytheon.viz.mpe.ui.dialogs.hourlyradar.RadarDataManager;
|
||||
import com.raytheon.viz.mpe.ui.displays.MPEMapRenderableDisplay;
|
||||
import com.raytheon.viz.mpe.ui.rsc.MPEFieldResource;
|
||||
|
@ -116,6 +113,7 @@ import com.raytheon.viz.ui.editor.IMultiPaneEditor;
|
|||
* Jul 8, 2015 16790 snaples Updated setCurrentEditDate to refresh resources when dateMap is stale.
|
||||
* Jul 29, 2015 17471 snaples Updated editTime to ensure that it always references "GMT" timezone.
|
||||
* Sep 29, 2015 16790 snaples Fixed issue with date not following the CAVE time when changed, and fixed time matching issue.
|
||||
* Dec 02, 2015 18104 snaples Fixed issue of not unzooming when using Pan/Zoom tools.
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -314,6 +312,8 @@ public class MPEDisplayManager {
|
|||
private static GageColor gageColor;
|
||||
|
||||
private static GageMissingOptions gageMissing;
|
||||
|
||||
private static IExtent defaultExtent;
|
||||
|
||||
static {
|
||||
gageMissing = getCommandStateEnum(
|
||||
|
@ -480,6 +480,15 @@ public class MPEDisplayManager {
|
|||
@Override
|
||||
public void run() {
|
||||
MPEDisplayManager.this.toggleDisplayMode(DisplayMode.Image);
|
||||
if (defaultExtent == null) {
|
||||
IDisplayPaneContainer container = EditorUtil.getActiveVizContainer();
|
||||
if (container != null) {
|
||||
IDisplayPane pane = container.getActiveDisplayPane();
|
||||
if (pane != null) {
|
||||
setDefaultExtent(pane.getRenderableDisplay().getExtent());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
|
@ -1295,4 +1304,12 @@ public class MPEDisplayManager {
|
|||
|
||||
return displayedAccumHrs;
|
||||
}
|
||||
|
||||
public static IExtent getDefaultExtent() {
|
||||
return defaultExtent;
|
||||
}
|
||||
|
||||
public static void setDefaultExtent(IExtent defaultExtent) {
|
||||
MPEDisplayManager.defaultExtent = defaultExtent;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -20,6 +20,7 @@
|
|||
package com.raytheon.viz.mpe.ui.actions;
|
||||
|
||||
import com.raytheon.uf.common.geospatial.ReferencedCoordinate;
|
||||
import com.raytheon.uf.common.ohd.AppsDefaults;
|
||||
import com.raytheon.viz.mpe.ui.MPEDisplayManager;
|
||||
import com.raytheon.viz.mpe.ui.dialogs.GroupEditStationsDialog;
|
||||
import com.raytheon.viz.mpe.ui.dialogs.QcPrecipOptionsDialog;
|
||||
|
@ -37,6 +38,7 @@ import com.vividsolutions.jts.geom.Coordinate;
|
|||
* Jun 17, 2009 snaples Initial creation
|
||||
* May 06, 2011 #8994 jpiatt Added set precipitation value as zero
|
||||
* Sep 04, 2014 283 cgobs Fixed possible selection of filtered-out gages
|
||||
* Dec 2015 17388 ptilles Add test for mpe_dqc_6hr_24hr_set_bad token value
|
||||
* </pre>
|
||||
*
|
||||
* @author snaples
|
||||
|
@ -80,25 +82,23 @@ public class GroupEditPrecipStns {
|
|||
continue;
|
||||
}
|
||||
|
||||
//precip filter
|
||||
// precip filter
|
||||
if (DailyQcUtils.pdata[DailyQcUtils.pcpn_day].stn[i].frain[time_pos].data < QcPrecipOptionsDialog
|
||||
.getPointFilterValue()) {
|
||||
continue;
|
||||
}
|
||||
|
||||
//reverse precip filter
|
||||
// reverse precip filter
|
||||
if (DailyQcUtils.pdata[DailyQcUtils.pcpn_day].stn[i].frain[time_pos].data > QcPrecipOptionsDialog
|
||||
.getPointFilterReverseValue()) {
|
||||
continue;
|
||||
}
|
||||
|
||||
//elevation filter
|
||||
if (DailyQcUtils.precip_stations.get(i).elev < DailyQcUtils.elevation_filter_value)
|
||||
{
|
||||
|
||||
// elevation filter
|
||||
if (DailyQcUtils.precip_stations.get(i).elev < DailyQcUtils.elevation_filter_value) {
|
||||
continue;
|
||||
}
|
||||
|
||||
|
||||
|
||||
/* Retrieve the latitude and longitude of this station. */
|
||||
lat = DailyQcUtils.precip_stations.get(i).lat;
|
||||
lon = DailyQcUtils.precip_stations.get(i).lon;
|
||||
|
@ -198,17 +198,24 @@ public class GroupEditPrecipStns {
|
|||
}
|
||||
}
|
||||
|
||||
/* 6 hour data set bad set 24 hour bad too */
|
||||
/* 6 hour data set bad */
|
||||
/* check value of mpe_dqc_6hr_24hr_set_bad token */
|
||||
/* if token = ON, then change 24hr QC code to Bad for this station */
|
||||
/* if token = OFF, then do not change 24hr QC code for this station */
|
||||
|
||||
boolean mpe_dqc_6hr_24hr_set_bad = AppsDefaults.getInstance()
|
||||
.getBoolean("mpe_dqc_6hr_24hr_set_bad", true);
|
||||
|
||||
if (time_pos != 4
|
||||
&& GroupEditStationsDialog.group_qual == 1
|
||||
&& DailyQcUtils.pdata[DailyQcUtils.pcpn_day].stn[isave].frain[4].qual != 5
|
||||
&& DailyQcUtils.pdata[DailyQcUtils.pcpn_day].stn[isave].frain[4].qual != 4) {
|
||||
DailyQcUtils.pdata[DailyQcUtils.pcpn_day].stn[isave].frain[4].qual = (short) GroupEditStationsDialog.group_qual;
|
||||
if (mpe_dqc_6hr_24hr_set_bad) {
|
||||
DailyQcUtils.pdata[DailyQcUtils.pcpn_day].stn[isave].frain[4].qual = (short) GroupEditStationsDialog.group_qual;
|
||||
}
|
||||
}
|
||||
|
||||
for (k = 0; k < 5; k++) {
|
||||
|
||||
if (k < 4) {
|
||||
time_pos = DailyQcUtils.pcpn_day * 4 + k;
|
||||
} else {
|
||||
|
|
|
@ -30,6 +30,7 @@ import com.raytheon.uf.viz.core.IDisplayPaneContainer;
|
|||
import com.raytheon.uf.viz.core.IExtent;
|
||||
import com.raytheon.uf.viz.core.drawables.IRenderableDisplay;
|
||||
import com.raytheon.uf.viz.core.exception.VizException;
|
||||
import com.raytheon.viz.mpe.ui.MPEDisplayManager;
|
||||
import com.raytheon.viz.ui.EditorUtil;
|
||||
import com.raytheon.viz.ui.perspectives.AbstractVizPerspectiveManager;
|
||||
import com.raytheon.viz.ui.perspectives.VizPerspectiveListener;
|
||||
|
@ -48,6 +49,7 @@ import com.raytheon.viz.ui.tools.ModalToolManager;
|
|||
* 11Apr2011 8738 jpiatt Initial Creation.
|
||||
* Jun 30, 2015 14317 snaples Fixed issue when toggling back from Areal Zoom,
|
||||
* not going to Pan mode.
|
||||
* Dec 02, 2015 18104 snaples Fixed issue of not unzooming when using Pan/Zoom tools.
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -97,20 +99,28 @@ public class MPEZoomAction extends AbstractHandler {
|
|||
if (container != null) {
|
||||
pane = container.getActiveDisplayPane();
|
||||
if (pane != null) {
|
||||
unZoomedExtent = pane.getRenderableDisplay().getExtent();
|
||||
unZoomedExtent = pane.getRenderableDisplay().getExtent();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
mgr.activateToolSet(ZOOM_ID);
|
||||
break;
|
||||
|
||||
} else {
|
||||
|
||||
pane.getRenderableDisplay().setExtent(unZoomedExtent);
|
||||
pane.getDescriptor().getRenderableDisplay().refresh();
|
||||
if (display == null) {
|
||||
container = EditorUtil.getActiveVizContainer();
|
||||
if (container != null) {
|
||||
pane = container.getActiveDisplayPane();
|
||||
if (pane != null) {
|
||||
if (unZoomedExtent == null){
|
||||
unZoomedExtent = MPEDisplayManager.getDefaultExtent();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
pane.getDescriptor().getRenderableDisplay().setExtent(unZoomedExtent);
|
||||
pane.getDescriptor().getRenderableDisplay().refresh();
|
||||
mgr.activateToolSet(PAN_ID);
|
||||
break;
|
||||
}
|
||||
|
|
|
@ -24,6 +24,7 @@
|
|||
* ------------ ---------- ----------- --------------------------
|
||||
* Jan 7, 2015 16954 cgobs Fix for cv_use issue - using getFieldName() in certain parts.
|
||||
* Feb 4, 2015 17094 cgobs Fix for fieldType being too long for mapx_field_type column in RWResult table.
|
||||
* Nov 05, 2015 15045 snaples Added resourceChanged call at end of save method to read in any edits.
|
||||
* </pre>
|
||||
**/
|
||||
package com.raytheon.viz.mpe.ui.actions;
|
||||
|
@ -49,6 +50,7 @@ import com.raytheon.uf.common.mpe.util.XmrgFile.XmrgHeader;
|
|||
import com.raytheon.uf.common.ohd.AppsDefaults;
|
||||
import com.raytheon.uf.common.status.UFStatus.Priority;
|
||||
import com.raytheon.uf.common.time.SimulatedTime;
|
||||
import com.raytheon.uf.viz.core.rsc.IResourceDataChanged.ChangeType;
|
||||
import com.raytheon.viz.hydrocommon.whfslib.IHFSDbGenerated;
|
||||
import com.raytheon.viz.mpe.MPEDateFormatter;
|
||||
import com.raytheon.viz.mpe.core.MPEDataManager;
|
||||
|
@ -298,6 +300,7 @@ public class SaveBestEstimate {
|
|||
}
|
||||
}
|
||||
MPEDisplayManager.getCurrent().setSavedData(true);
|
||||
MPEDisplayManager.getCurrent().getDisplayedFieldResource().resourceChanged(ChangeType.DATA_UPDATE, editDate);
|
||||
}
|
||||
|
||||
private static void mpegui_save_image(BufferedImage bi, String format,
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -68,13 +68,15 @@ import com.raytheon.viz.ui.dialogs.CaveSWTDialog;
|
|||
* SOFTWARE HISTORY
|
||||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* Sep 02, 2008 randerso Initial creation
|
||||
* May 01, 2013 15920 lbousaidi gages get updated after clicking on
|
||||
* Regenerate Hour Fields without closing 7x7 Gui.
|
||||
* Jun 05, 2013 15961 lbousaidi added routines for set Bad/set not bad buttons
|
||||
* to reflect the state of the gages.
|
||||
* Jul 02, 2013 2160 mpduff Changed to not call deprecated resource.getData() method.
|
||||
* Feb 2, 2014 16201 snaples Added saved data flag support
|
||||
* Sep 02, 2008 randerso Initial creation
|
||||
* May 01, 2013 15920 lbousaidi gages get updated after clicking on
|
||||
* Regenerate Hour Fields without closing 7x7 Gui.
|
||||
* Jun 05, 2013 15961 lbousaidi added routines for set Bad/set not bad buttons
|
||||
* to reflect the state of the gages.
|
||||
* Jul 02, 2013 2160 mpduff Changed to not call deprecated resource.getData() method.
|
||||
* Feb 2, 2014 16201 snaples Added saved data flag support
|
||||
* Oct 30, 2015 18106 snaples Changed order of populateGrid and modified updateGageData to
|
||||
* fix issue with gage data not being in sync with grid data.
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -163,7 +165,9 @@ public class Display7x7Dialog extends CaveSWTDialog {
|
|||
super(parentShell, SWT.DIALOG_TRIM, CAVE.DO_NOT_BLOCK);
|
||||
setText("Display 7 X 7 Gage Editing Utility");
|
||||
mgr = MPEDisplayManager.getCurrent();
|
||||
selectedGage = null;
|
||||
selectedGage = data;
|
||||
gData = null;
|
||||
gData = MPEDataManager.getInstance().getEditedGage(selectedGage);
|
||||
ArrayList<String> bg = MPEDataManager.getInstance().readBadGageList();
|
||||
if (bg.size() > 0) {
|
||||
|
@ -183,7 +187,6 @@ public class Display7x7Dialog extends CaveSWTDialog {
|
|||
@Override
|
||||
protected void initializeComponents(Shell shell) {
|
||||
font = new Font(shell.getDisplay(), "Courier", 10, SWT.NORMAL);
|
||||
|
||||
undoEn = false;
|
||||
|
||||
if (gData != null) {
|
||||
|
@ -205,13 +208,13 @@ public class Display7x7Dialog extends CaveSWTDialog {
|
|||
extent = new Rectangle(xOrig, yOrig, ht, width);
|
||||
|
||||
populateGrid();
|
||||
|
||||
createProductListComp();
|
||||
createGageGridComp();
|
||||
createGageComp();
|
||||
create7x7GridComp();
|
||||
createScaleComp();
|
||||
createButtonBar();
|
||||
updateGageData(selectedGage);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -709,6 +712,7 @@ public class Display7x7Dialog extends CaveSWTDialog {
|
|||
}
|
||||
gridComp.addPaintListener(new PaintListener() {
|
||||
|
||||
@SuppressWarnings("deprecation")
|
||||
@Override
|
||||
public void paintControl(PaintEvent e) {
|
||||
cvt = parameters.getDataToDisplayConverter();
|
||||
|
@ -823,7 +827,9 @@ public class Display7x7Dialog extends CaveSWTDialog {
|
|||
}
|
||||
|
||||
public void updateGageData(MPEGageData data) {
|
||||
selectedGage = null;
|
||||
selectedGage = data;
|
||||
gData = null;
|
||||
gData = MPEDataManager.getInstance().getEditedGage(selectedGage);
|
||||
|
||||
if (gData != null) {
|
||||
|
|
|
@ -87,6 +87,10 @@ import com.vividsolutions.jts.geom.Coordinate;
|
|||
* Jun 18, 2015 14298,17388 ptilles Updated to fix problem with mpe_dqc_6hr_24hr_ste_bad token and problem
|
||||
* with changing a 6hr value in 24hr mode
|
||||
* Sep 11, 2015 17986 snaples Updated q45bnames array to correct order issue, with Screened and Questionable being reversed.
|
||||
* Dec 07, 2015 5171 bkowal Allow the user to change point quality to verified when the
|
||||
* 24-hour value is partial.
|
||||
*
|
||||
* Dec 10, 2015 18391 snaples Updated changeCustomFile to not remove grid when EditStations Apply is clicked.
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -99,7 +103,7 @@ public class EditPrecipStationsDialog extends AbstractMPEDialog implements
|
|||
|
||||
private static final transient IUFStatusHandler statusHandler = UFStatus
|
||||
.getHandler(EditPrecipStationsDialog.class);
|
||||
|
||||
|
||||
private DailyQcUtils dqc = DailyQcUtils.getInstance();
|
||||
|
||||
private Font font;
|
||||
|
@ -116,7 +120,7 @@ public class EditPrecipStationsDialog extends AbstractMPEDialog implements
|
|||
|
||||
private Label[] qualityCodeStatusLabelArray = new Label[5]; // chip
|
||||
|
||||
// private String[][] timefile = DailyQcUtils.timefile;
|
||||
// private String[][] timefile = DailyQcUtils.timefile;
|
||||
|
||||
private int time_pos = 0;
|
||||
|
||||
|
@ -209,8 +213,8 @@ public class EditPrecipStationsDialog extends AbstractMPEDialog implements
|
|||
|
||||
private String[] q2bnames = { "Manual", "Reset to Original" };
|
||||
|
||||
private String[] q45bnames = { "Verified", "Questionable", "Screened (Forced)",
|
||||
"Bad" };
|
||||
private String[] q45bnames = { "Verified", "Questionable",
|
||||
"Screened (Forced)", "Bad" };
|
||||
|
||||
private int initial_qual = F_MANUAL;
|
||||
|
||||
|
@ -242,7 +246,7 @@ public class EditPrecipStationsDialog extends AbstractMPEDialog implements
|
|||
|
||||
ArrayList<Station> precipStationList = DailyQcUtils.precip_stations;
|
||||
|
||||
// ReadPrecipStationList rp = new ReadPrecipStationList();
|
||||
// ReadPrecipStationList rp = new ReadPrecipStationList();
|
||||
|
||||
int max_stations = DailyQcUtils.precip_stations.size();
|
||||
|
||||
|
@ -255,9 +259,9 @@ public class EditPrecipStationsDialog extends AbstractMPEDialog implements
|
|||
int[] allowedQualityCodes = dqc.func;
|
||||
|
||||
int pcpn_day = DailyQcUtils.pcpn_day;
|
||||
|
||||
|
||||
int mpe_dqc_6hr_24hr_flag = 1;
|
||||
|
||||
|
||||
Coordinate coord = new Coordinate();
|
||||
|
||||
boolean mpe_dqc_warningpopup_flag = false;
|
||||
|
@ -389,7 +393,7 @@ public class EditPrecipStationsDialog extends AbstractMPEDialog implements
|
|||
reset_value = 0;
|
||||
initial_qual = frain.qual;
|
||||
new_qual = initial_qual;
|
||||
|
||||
|
||||
Rain srain = pdata[pcpn_day].stn[isave].srain[time_pos];
|
||||
|
||||
if (srain.data > -98) {
|
||||
|
@ -399,7 +403,6 @@ public class EditPrecipStationsDialog extends AbstractMPEDialog implements
|
|||
}
|
||||
}
|
||||
|
||||
|
||||
// Updated to allow editing of time distributed station as in OB 9.x
|
||||
// if (initial_qual == 6) {
|
||||
//
|
||||
|
@ -485,7 +488,8 @@ public class EditPrecipStationsDialog extends AbstractMPEDialog implements
|
|||
"Snow water change is %5.2f in.", srain.data));
|
||||
if (time_pos == HOURS_24 && srain.data >= 0) {
|
||||
snow = true;
|
||||
System.out.println("Snow water change is available for " + selectedStation.hb5);
|
||||
System.out.println("Snow water change is available for "
|
||||
+ selectedStation.hb5);
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -493,8 +497,8 @@ public class EditPrecipStationsDialog extends AbstractMPEDialog implements
|
|||
// only display in 24-hour mode
|
||||
if ((time_pos == HOURS_24) && (dqc.QPEaccum24hr != null)) {
|
||||
|
||||
double accumulatedAmount = get24HourPrecipTotal(
|
||||
dqc.QPEaccum24hr, selectedStation.hrap_x
|
||||
double accumulatedAmount = get24HourPrecipTotal(dqc.QPEaccum24hr,
|
||||
selectedStation.hrap_x
|
||||
- DailyQcUtils.getHrap_grid().hrap_minx,
|
||||
selectedStation.hrap_y
|
||||
- DailyQcUtils.getHrap_grid().hrap_miny);
|
||||
|
@ -764,7 +768,7 @@ public class EditPrecipStationsDialog extends AbstractMPEDialog implements
|
|||
final Integer[] screenedArray = { F_SCREENED, F_BAD };
|
||||
final Integer[] badArray = { F_BAD, F_VERIFIED, F_SCREENED,
|
||||
F_QUESTIONABLE };
|
||||
final Integer[] partialArray = { F_BAD };
|
||||
final Integer[] partialArray = { F_VERIFIED, F_BAD };
|
||||
final Integer[] emptyArray = {};
|
||||
|
||||
// determine which array applies
|
||||
|
@ -889,12 +893,12 @@ public class EditPrecipStationsDialog extends AbstractMPEDialog implements
|
|||
precipValueLabelArray[i].setText(dqc.timefile[2][i]);
|
||||
precipValueTextArray[i] = new Text(stnConComp, SWT.LEFT
|
||||
| SWT.BORDER | SWT.READ_ONLY);
|
||||
|
||||
|
||||
qualityCodeStatusLabelArray[i] = new Label(stnConComp, SWT.CENTER);
|
||||
|
||||
|
||||
int qualityCode = pdata[pcpn_day].stn[isave].frain[i].qual;
|
||||
String qualityText = getQualityTextFromCode(qualityCode);
|
||||
|
||||
|
||||
qualityCodeStatusLabelArray[i].setText(qualityText);
|
||||
|
||||
Rain frainI = pdata[pcpn_day].stn[isave].frain[i];
|
||||
|
@ -1016,20 +1020,15 @@ public class EditPrecipStationsDialog extends AbstractMPEDialog implements
|
|||
protected void resetStationQuality(Integer data) {
|
||||
int k;
|
||||
|
||||
if (pcpn_time_step == 0)
|
||||
{
|
||||
if (pcpn_time_step == 0) {
|
||||
time_pos = pcpn_time;
|
||||
}
|
||||
else
|
||||
{
|
||||
} else {
|
||||
time_pos = HOURS_24;
|
||||
}
|
||||
|
||||
if (data == 1)
|
||||
{
|
||||
if (data == 1) {
|
||||
|
||||
for (k = 0; k < 5; k++)
|
||||
{
|
||||
for (k = 0; k < 5; k++) {
|
||||
|
||||
pdata[pcpn_day].stn[isave].frain[k].qual = pdata[pcpn_day].stn[isave].rrain[k].qual;
|
||||
|
||||
|
@ -1040,17 +1039,13 @@ public class EditPrecipStationsDialog extends AbstractMPEDialog implements
|
|||
reset_value = 1;
|
||||
new_qual = pdata[pcpn_day].stn[isave].rrain[time_pos].qual;
|
||||
|
||||
}
|
||||
else
|
||||
{
|
||||
} else {
|
||||
reset_value = 0;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
protected void changeStationQuality(Integer data) {
|
||||
String header = "EditPrecipStationsDialog.changeStationQuality()";
|
||||
|
||||
if (pcpn_time_step == 0) {
|
||||
time_pos = pcpn_time;
|
||||
} else {
|
||||
|
@ -1072,7 +1067,7 @@ public class EditPrecipStationsDialog extends AbstractMPEDialog implements
|
|||
float val, fdif;
|
||||
String cstr;
|
||||
int k, p;
|
||||
// int[] pcp_in_use = dqc.pcp_in_use;
|
||||
// int[] pcp_in_use = dqc.pcp_in_use;
|
||||
Boolean bval = false;
|
||||
float rtotal;
|
||||
int m;
|
||||
|
@ -1087,28 +1082,25 @@ public class EditPrecipStationsDialog extends AbstractMPEDialog implements
|
|||
custom.setReadable(true, false);
|
||||
custom.setWritable(true, false);
|
||||
|
||||
//token name: mpe_dqc_6hr_24hr_set_bad
|
||||
// token name: mpe_dqc_6hr_24hr_set_bad
|
||||
// token value = OFF
|
||||
// mpe_dqc_6hr_24hr_flag = 0
|
||||
// if user sets 6hr value to Bad, then 24hr value is unaffected
|
||||
|
||||
// mpe_dqc_6hr_24hr_flag = 0
|
||||
// if user sets 6hr value to Bad, then 24hr value is unaffected
|
||||
|
||||
// token value = ON
|
||||
// mpe_dqc_6hr_24hr_flag = 1
|
||||
// if user sets 6hr value to Bad, then 24hr value is set to Bad
|
||||
|
||||
// mpe_dqc_6hr_24hr_flag = 1
|
||||
// if user sets 6hr value to Bad, then 24hr value is set to Bad
|
||||
|
||||
String mpe_dqc_6hr_24hr_string = AppsDefaults.getInstance().getToken(
|
||||
"mpe_dqc_6hr_24hr_set_bad", "ON");
|
||||
|
||||
if (mpe_dqc_6hr_24hr_string.equalsIgnoreCase("OFF"))
|
||||
{
|
||||
if (mpe_dqc_6hr_24hr_string.equalsIgnoreCase("OFF")) {
|
||||
mpe_dqc_6hr_24hr_flag = 0;
|
||||
System.out.println("mpe_dqc_6hr_24hr_flag = 0 -- token = OFF");
|
||||
System.out.println("mpe_dqc_6hr_24hr_flag = 0 -- token = OFF");
|
||||
} else {
|
||||
System.out.println("mpe_dqc_6hr_24hr_flag = 1 -- token = ON");
|
||||
}
|
||||
else
|
||||
{
|
||||
System.out.println("mpe_dqc_6hr_24hr_flag = 1 -- token = ON");
|
||||
}
|
||||
|
||||
|
||||
if (pcpn_time_step == 0) {
|
||||
time_pos = pcpn_time;
|
||||
} else {
|
||||
|
@ -1117,7 +1109,7 @@ public class EditPrecipStationsDialog extends AbstractMPEDialog implements
|
|||
|
||||
try {
|
||||
out = new BufferedWriter(new FileWriter(custom));
|
||||
|
||||
|
||||
for (i = 0; i < max_stations; i++) {
|
||||
Station station = precipStationList.get(i);
|
||||
String rec = String.format("%s %s %d %d\n", station.hb5,
|
||||
|
@ -1147,8 +1139,7 @@ public class EditPrecipStationsDialog extends AbstractMPEDialog implements
|
|||
/* snotel path */
|
||||
|
||||
if (snow == true
|
||||
&& ((bval == true && pdata[pcpn_day].stn[isave].sflag[HOURS_24] == -1) || (bval == false && pdata[pcpn_day].stn[isave].sflag[HOURS_24] == 1)))
|
||||
{
|
||||
&& ((bval == true && pdata[pcpn_day].stn[isave].sflag[HOURS_24] == -1) || (bval == false && pdata[pcpn_day].stn[isave].sflag[HOURS_24] == 1))) {
|
||||
|
||||
pdata[pcpn_day].stn[isave].sflag[HOURS_24] = (short) -pdata[pcpn_day].stn[isave].sflag[HOURS_24];
|
||||
|
||||
|
@ -1165,14 +1156,12 @@ public class EditPrecipStationsDialog extends AbstractMPEDialog implements
|
|||
|
||||
}
|
||||
|
||||
else
|
||||
{
|
||||
else {
|
||||
boolean value_edit_flag = false;
|
||||
val = pdata[pcpn_day].stn[isave].frain[time_pos].data;
|
||||
p = -1;
|
||||
|
||||
for (k = 0; k < 5; k++)
|
||||
{
|
||||
|
||||
for (k = 0; k < 5; k++) {
|
||||
cstr = precipValueStringArray[k];
|
||||
val = Float.parseFloat(cstr);
|
||||
p = cstr.indexOf('M');
|
||||
|
@ -1180,10 +1169,12 @@ public class EditPrecipStationsDialog extends AbstractMPEDialog implements
|
|||
|
||||
/* use manually entered data */
|
||||
|
||||
fdif = Math.abs(val - pdata[pcpn_day].stn[isave].frain[k].data); // changed for DR 17388
|
||||
|
||||
if (fdif > .005 && p == -1 && reset_value == 0)
|
||||
{
|
||||
fdif = Math.abs(val - pdata[pcpn_day].stn[isave].frain[k].data); // changed
|
||||
// for
|
||||
// DR
|
||||
// 17388
|
||||
|
||||
if (fdif > .005 && p == -1 && reset_value == 0) {
|
||||
pdata[pcpn_day].stn[isave].frain[k].data = val;
|
||||
pdata[pcpn_day].stn[isave].frain[k].qual = F_MANUAL;
|
||||
pdata[pcpn_day].stn[isave].sflag[k] = -1;
|
||||
|
@ -1191,14 +1182,11 @@ public class EditPrecipStationsDialog extends AbstractMPEDialog implements
|
|||
|
||||
}
|
||||
}
|
||||
if (value_edit_flag == true && reset_value == 0)
|
||||
{
|
||||
if (value_edit_flag == true && reset_value == 0) {
|
||||
rtotal = 0;
|
||||
|
||||
for (m = 0; m < 4; m++)
|
||||
{
|
||||
if (pdata[pcpn_day].stn[isave].frain[m].data >= 0)
|
||||
{
|
||||
for (m = 0; m < 4; m++) {
|
||||
if (pdata[pcpn_day].stn[isave].frain[m].data >= 0) {
|
||||
rtotal = rtotal
|
||||
+ pdata[pcpn_day].stn[isave].frain[m].data;
|
||||
}
|
||||
|
@ -1209,33 +1197,31 @@ public class EditPrecipStationsDialog extends AbstractMPEDialog implements
|
|||
* values to zero and set their QC codes to "Manual" as well.
|
||||
*/
|
||||
|
||||
if ((Math.abs(pdata[pcpn_day].stn[isave].frain[HOURS_24].data - 0.0) < 0.001) && (time_pos == HOURS_24))
|
||||
{
|
||||
for (m = 0; m < 4; m++)
|
||||
{
|
||||
if ((Math
|
||||
.abs(pdata[pcpn_day].stn[isave].frain[HOURS_24].data - 0.0) < 0.001)
|
||||
&& (time_pos == HOURS_24)) {
|
||||
for (m = 0; m < 4; m++) {
|
||||
pdata[pcpn_day].stn[isave].frain[m].data = 0;
|
||||
pdata[pcpn_day].stn[isave].frain[m].qual = F_MANUAL;
|
||||
}
|
||||
rtotal = 0;
|
||||
}
|
||||
|
||||
if (Math.abs(rtotal - pdata[pcpn_day].stn[isave].frain[HOURS_24].data) > .005)
|
||||
{
|
||||
if (Math.abs(rtotal
|
||||
- pdata[pcpn_day].stn[isave].frain[HOURS_24].data) > .005) {
|
||||
read_text();
|
||||
return;
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
else
|
||||
{
|
||||
} else {
|
||||
|
||||
pdata[pcpn_day].stn[isave].frain[time_pos].qual = (short) new_qual;
|
||||
|
||||
/* 24 hour data set bad/good then 6 hourly bad/good also */
|
||||
|
||||
if (new_qual == F_BAD && time_pos == HOURS_24 && pdata[pcpn_day].stn[isave].sflag[time_pos] == 1)
|
||||
{
|
||||
if (new_qual == F_BAD && time_pos == HOURS_24
|
||||
&& pdata[pcpn_day].stn[isave].sflag[time_pos] == 1) {
|
||||
|
||||
pdata[pcpn_day].stn[isave].frain[time_pos].data = pdata[pcpn_day].stn[isave].rrain[time_pos].data;
|
||||
|
||||
|
@ -1243,72 +1229,64 @@ public class EditPrecipStationsDialog extends AbstractMPEDialog implements
|
|||
|
||||
}
|
||||
|
||||
if (time_pos == HOURS_24 && (new_qual == F_BAD || new_qual == F_SCREENED
|
||||
|| new_qual == F_VERIFIED || new_qual == F_PARTIAL))
|
||||
{
|
||||
if (time_pos == HOURS_24
|
||||
&& (new_qual == F_BAD || new_qual == F_SCREENED
|
||||
|| new_qual == F_VERIFIED || new_qual == F_PARTIAL)) {
|
||||
|
||||
for (k = 0; k < 4; k++)
|
||||
{
|
||||
for (k = 0; k < 4; k++) {
|
||||
pdata[pcpn_day].stn[isave].frain[k].qual = (short) new_qual;
|
||||
}
|
||||
|
||||
}
|
||||
/*-------------------------------------------------------*/
|
||||
/*
|
||||
* if 6 hr QC code set Bad by user and token value = ON, then set 24hr QC code to Bad
|
||||
* following code also allows 24 hr partial data to be set to Bad
|
||||
* if 6 hr QC code set Bad by user and token value = ON, then
|
||||
* set 24hr QC code to Bad following code also allows 24 hr
|
||||
* partial data to be set to Bad
|
||||
*/
|
||||
|
||||
if (time_pos != HOURS_24 && new_qual == F_BAD
|
||||
if (time_pos != HOURS_24
|
||||
&& new_qual == F_BAD
|
||||
&& pdata[pcpn_day].stn[isave].frain[HOURS_24].qual != F_ESTIMATED
|
||||
&& pdata[pcpn_day].stn[isave].frain[HOURS_24].data >= 0)
|
||||
{
|
||||
if (mpe_dqc_6hr_24hr_flag == 1)
|
||||
{
|
||||
System.out.println(header + "6hr qual code set to Bad - 24hr qual code changed to Bad\n");
|
||||
&& pdata[pcpn_day].stn[isave].frain[HOURS_24].data >= 0) {
|
||||
if (mpe_dqc_6hr_24hr_flag == 1) {
|
||||
System.out
|
||||
.println(header
|
||||
+ "6hr qual code set to Bad - 24hr qual code changed to Bad\n");
|
||||
pdata[pcpn_day].stn[isave].frain[HOURS_24].qual = F_BAD;
|
||||
}
|
||||
else
|
||||
{
|
||||
System.out.println(header + "6hr qual code set to Bad - 24hr qual code unchanged\n");
|
||||
} else {
|
||||
System.out
|
||||
.println(header
|
||||
+ "6hr qual code set to Bad - 24hr qual code unchanged\n");
|
||||
}
|
||||
}
|
||||
|
||||
if (pdata[pcpn_day].stn[isave].frain[HOURS_24].qual == F_BAD ||
|
||||
pdata[pcpn_day].stn[isave].frain[HOURS_24].data < 0)
|
||||
{
|
||||
if (tcmode == 0)
|
||||
{
|
||||
if (pdata[pcpn_day].stn[isave].frain[HOURS_24].qual == F_BAD
|
||||
|| pdata[pcpn_day].stn[isave].frain[HOURS_24].data < 0) {
|
||||
if (tcmode == 0) {
|
||||
pdata[pcpn_day].stn[isave].tcons = 1;
|
||||
}
|
||||
else
|
||||
{
|
||||
} else {
|
||||
pdata[pcpn_day].stn[isave].tcons = -1;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
} // end if (value_edit_flag == true && reset_value == 0)
|
||||
|
||||
|
||||
} // end if (snow == true)
|
||||
|
||||
for (k = 0; k < 5; k++)
|
||||
{
|
||||
for (k = 0; k < 5; k++) {
|
||||
|
||||
if (k < 4)
|
||||
{
|
||||
if (k < 4) {
|
||||
time_pos = pcpn_day * 4 + k;
|
||||
}
|
||||
else
|
||||
{
|
||||
} else {
|
||||
time_pos = 40 + pcpn_day;
|
||||
}
|
||||
|
||||
if (pdata[pcpn_day].used[k] != 0)
|
||||
{
|
||||
if (pdata[pcpn_day].used[k] != 0) {
|
||||
pdata[pcpn_day].used[k] = 2;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
QcPrecipOptionsDialog.dataSet.clear();
|
||||
QcPrecipOptionsDialog.dataSet.addAll(QcPrecipOptionsDialog.dataType);
|
||||
for (k = 1; k < 7; k++) {
|
||||
|
@ -1318,7 +1296,7 @@ public class EditPrecipStationsDialog extends AbstractMPEDialog implements
|
|||
String[] a = new String[QcPrecipOptionsDialog.dataSet.size()];
|
||||
QcPrecipOptionsDialog.setDataSetCombo(QcPrecipOptionsDialog.dataSet
|
||||
.toArray(a));
|
||||
|
||||
|
||||
if (pcpn_time_step == 0) {
|
||||
time_pos = pcp_flag;
|
||||
} else {
|
||||
|
@ -1356,21 +1334,19 @@ public class EditPrecipStationsDialog extends AbstractMPEDialog implements
|
|||
* if run DQC on partial time frame and pcpn_day=0
|
||||
*/
|
||||
|
||||
if (pcpn_day == 0 && (dqc.curHr00_06 == 1 || dqc.curHr06_12 == 1 || dqc.curHr18_00 == 1))
|
||||
{
|
||||
// do nothing
|
||||
}
|
||||
else
|
||||
{
|
||||
if (pcpn_day == 0
|
||||
&& (dqc.curHr00_06 == 1 || dqc.curHr06_12 == 1 || dqc.curHr18_00 == 1)) {
|
||||
// do nothing
|
||||
} else {
|
||||
|
||||
EstDailyStations eds = new EstDailyStations();
|
||||
eds.estimate_daily_stations(pcpn_day, precipStationList,
|
||||
max_stations);
|
||||
EstPartStations eps = new EstPartStations();
|
||||
eps.estimate_partial_stations(pcpn_day, precipStationList,
|
||||
max_stations);
|
||||
EstDailyStations eds = new EstDailyStations();
|
||||
eds.estimate_daily_stations(pcpn_day, precipStationList,
|
||||
max_stations);
|
||||
EstPartStations eps = new EstPartStations();
|
||||
eps.estimate_partial_stations(pcpn_day, precipStationList,
|
||||
max_stations);
|
||||
}
|
||||
|
||||
|
||||
QCStations qcs = new QCStations();
|
||||
qcs.quality_control_stations(pcpn_day, precipStationList, max_stations);
|
||||
|
||||
|
@ -1380,9 +1356,6 @@ public class EditPrecipStationsDialog extends AbstractMPEDialog implements
|
|||
bv.restore_bad_values(pcpn_day, precipStationList, max_stations);
|
||||
|
||||
if (k == 1 || k == 3) grids_flag = 1;
|
||||
|
||||
OtherPrecipOptions op = new OtherPrecipOptions();
|
||||
op.send_expose();
|
||||
return;
|
||||
}
|
||||
|
||||
|
@ -1452,8 +1425,7 @@ public class EditPrecipStationsDialog extends AbstractMPEDialog implements
|
|||
* message
|
||||
*/
|
||||
if (pcpn_day == 0
|
||||
&& (dqc.curHr00_06 == 1
|
||||
|| dqc.curHr06_12 == 1 || dqc.curHr18_00 == 1)) {
|
||||
&& (dqc.curHr00_06 == 1 || dqc.curHr06_12 == 1 || dqc.curHr18_00 == 1)) {
|
||||
partial_day_flag = true;
|
||||
} else {
|
||||
partial_day_flag = false;
|
||||
|
@ -1499,7 +1471,7 @@ public class EditPrecipStationsDialog extends AbstractMPEDialog implements
|
|||
// shell.dispose();
|
||||
}
|
||||
|
||||
this.open();//redraw this updated dialog
|
||||
this.open();// redraw this updated dialog
|
||||
}
|
||||
|
||||
protected void read_text() {
|
||||
|
@ -1515,31 +1487,26 @@ public class EditPrecipStationsDialog extends AbstractMPEDialog implements
|
|||
|
||||
Rain frain24 = pdata[pcpn_day].stn[isave].frain[HOURS_24];
|
||||
|
||||
for (k = 0; k < 5; k++)
|
||||
{
|
||||
for (k = 0; k < 5; k++) {
|
||||
|
||||
Rain frain = pdata[pcpn_day].stn[isave].frain[k];
|
||||
cstr = precipValueStringArray[k];
|
||||
val = 0;
|
||||
p = cstr.indexOf('M');
|
||||
if (p == -1)
|
||||
{
|
||||
if (p == -1) {
|
||||
val = Float.parseFloat(cstr);
|
||||
}
|
||||
|
||||
fdif = Math.abs(val - frain.data);
|
||||
|
||||
if (p != -1)
|
||||
{
|
||||
if (p != -1) {
|
||||
pdata[pcpn_day].stn[isave].frain[k].data = -1;
|
||||
p = -1;
|
||||
}
|
||||
else if (fdif > .005 && p == -1)
|
||||
{
|
||||
} else if (fdif > .005 && p == -1) {
|
||||
pdata[pcpn_day].stn[isave].frain[k].data = val;
|
||||
pdata[pcpn_day].stn[isave].frain[k].qual = F_MANUAL;
|
||||
pdata[pcpn_day].stn[isave].sflag[k] = -1;
|
||||
|
||||
|
||||
}
|
||||
cstr = null;
|
||||
}
|
||||
|
@ -1614,8 +1581,7 @@ public class EditPrecipStationsDialog extends AbstractMPEDialog implements
|
|||
*/
|
||||
|
||||
if (pcpn_day == 0
|
||||
&& (dqc.curHr00_06 == 1
|
||||
|| dqc.curHr06_12 == 1 || dqc.curHr18_00 == 1)) {
|
||||
&& (dqc.curHr00_06 == 1 || dqc.curHr06_12 == 1 || dqc.curHr18_00 == 1)) {
|
||||
|
||||
} else {
|
||||
EstDailyStations eds = new EstDailyStations();
|
||||
|
|
|
@ -41,12 +41,12 @@ import org.eclipse.swt.widgets.Group;
|
|||
import org.eclipse.swt.widgets.Label;
|
||||
import org.eclipse.swt.widgets.Shell;
|
||||
import org.eclipse.swt.widgets.Text;
|
||||
import org.opengis.referencing.FactoryException;
|
||||
import org.opengis.referencing.datum.PixelInCell;
|
||||
import org.opengis.referencing.operation.TransformException;
|
||||
|
||||
import com.raytheon.uf.common.geospatial.ReferencedCoordinate;
|
||||
import com.raytheon.uf.common.hydro.spatial.HRAP;
|
||||
import com.raytheon.uf.common.status.IUFStatusHandler;
|
||||
import com.raytheon.uf.common.status.UFStatus;
|
||||
import com.raytheon.uf.viz.app.launcher.handlers.AppLauncherHandler;
|
||||
import com.raytheon.viz.mpe.ui.MPEDisplayManager;
|
||||
import com.raytheon.viz.mpe.ui.actions.GetClimateSource;
|
||||
|
@ -68,6 +68,8 @@ import com.vividsolutions.jts.geom.Coordinate;
|
|||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* Apr 13, 2009 snaples Initial creation
|
||||
* Dec 08, 2015 5179 bkowal Ensure the grid remains displayed when this dialog
|
||||
* is closed. Eliminate warnings and e print stack trace.
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -77,8 +79,11 @@ import com.vividsolutions.jts.geom.Coordinate;
|
|||
|
||||
public class EditTempStationsDialog extends AbstractMPEDialog {
|
||||
|
||||
private static final IUFStatusHandler statusHandler = UFStatus
|
||||
.getHandler(EditTempStationsDialog.class);
|
||||
|
||||
private DailyQcUtils dqc = DailyQcUtils.getInstance();
|
||||
|
||||
|
||||
private Font font;
|
||||
|
||||
private String[] eval = new String[6];
|
||||
|
@ -95,7 +100,7 @@ public class EditTempStationsDialog extends AbstractMPEDialog {
|
|||
|
||||
private int pcpn_time_step = MPEDisplayManager.pcpn_time_step;
|
||||
|
||||
private int pcpn_time = dqc.pcpn_time;
|
||||
private int pcpn_time = DailyQcUtils.pcpn_time;
|
||||
|
||||
private StringBuilder tstnData = new StringBuilder();
|
||||
|
||||
|
@ -135,33 +140,21 @@ public class EditTempStationsDialog extends AbstractMPEDialog {
|
|||
|
||||
String tClimateSource = null;
|
||||
|
||||
// Ts ts[] = DailyQcUtils.ts;
|
||||
int tsmax = DailyQcUtils.tsmax;
|
||||
|
||||
int tsmax = dqc.tsmax;
|
||||
|
||||
int isom = dqc.isom;
|
||||
int isom = DailyQcUtils.isom;
|
||||
|
||||
int win_x;
|
||||
|
||||
int win_y;
|
||||
|
||||
// int gage_char[] = DailyQcUtils.gage_char;
|
||||
|
||||
int method = dqc.method;
|
||||
|
||||
// int qflag[] = DailyQcUtils.qflag;
|
||||
|
||||
// int dflag[] = DailyQcUtils.dflag;
|
||||
|
||||
String mbuf;
|
||||
|
||||
int naflag;
|
||||
|
||||
// ArrayList<Station> station = DailyQcUtils.temperature_stations;
|
||||
|
||||
// ReadTemperatureStationList rt = new ReadTemperatureStationList();
|
||||
|
||||
int max_stations = dqc.temperature_stations.size();
|
||||
int max_stations = DailyQcUtils.temperature_stations.size();
|
||||
|
||||
int i, m, x, y;
|
||||
|
||||
|
@ -169,9 +162,7 @@ public class EditTempStationsDialog extends AbstractMPEDialog {
|
|||
|
||||
int initial_pos;
|
||||
|
||||
// int[] func = DailyQcUtils.func;
|
||||
|
||||
int pcpn_day = dqc.pcpn_day;
|
||||
int pcpn_day = DailyQcUtils.pcpn_day;
|
||||
|
||||
Coordinate coord = new Coordinate();
|
||||
|
||||
|
@ -187,11 +178,11 @@ public class EditTempStationsDialog extends AbstractMPEDialog {
|
|||
try {
|
||||
coord = rcoord.asLatLon();
|
||||
} catch (Exception e) {
|
||||
// TODO Auto-generated catch block
|
||||
e.printStackTrace();
|
||||
statusHandler
|
||||
.error("Failed to convert ReferencedCoordinate to Coordinate.",
|
||||
e);
|
||||
}
|
||||
}
|
||||
// Envelope env = new Envelope(coord);
|
||||
|
||||
AbstractVizPerspectiveManager mgr = VizPerspectiveListener
|
||||
.getCurrentPerspectiveManager();
|
||||
|
@ -271,31 +262,32 @@ public class EditTempStationsDialog extends AbstractMPEDialog {
|
|||
|
||||
for (i = 0; i < max_stations; i++) {
|
||||
|
||||
if (dqc.tdata[pcpn_day].tstn[i].tlevel2[time_pos].data == -999) {
|
||||
if (DailyQcUtils.tdata[pcpn_day].tstn[i].tlevel2[time_pos].data == -999) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if ((dqc.tdata[pcpn_day].tstn[i].tlevel2[time_pos].data > QcTempOptionsDialog
|
||||
if ((DailyQcUtils.tdata[pcpn_day].tstn[i].tlevel2[time_pos].data > QcTempOptionsDialog
|
||||
.getPointFilterReverseValue())
|
||||
&& (dqc.tdata[pcpn_day].tstn[i].tlevel2[time_pos].data < 110.0)) {
|
||||
&& (DailyQcUtils.tdata[pcpn_day].tstn[i].tlevel2[time_pos].data < 110.0)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if ((dqc.temperature_stations.get(i).elev > 0)
|
||||
&& (dqc.temperature_stations.get(i).elev < dqc.elevation_filter_value)) {
|
||||
if ((DailyQcUtils.temperature_stations.get(i).elev > 0)
|
||||
&& (DailyQcUtils.temperature_stations.get(i).elev < DailyQcUtils.elevation_filter_value)) {
|
||||
continue;
|
||||
}
|
||||
if (dqc.tdata[pcpn_day].tstn[i].tlevel2[time_pos].data < QcTempOptionsDialog
|
||||
if (DailyQcUtils.tdata[pcpn_day].tstn[i].tlevel2[time_pos].data < QcTempOptionsDialog
|
||||
.getPointFilterValue()) {
|
||||
continue;
|
||||
}
|
||||
|
||||
lat = dqc.temperature_stations.get(i).lat;
|
||||
lon = dqc.temperature_stations.get(i).lon;
|
||||
lat = DailyQcUtils.temperature_stations.get(i).lat;
|
||||
lon = DailyQcUtils.temperature_stations.get(i).lon;
|
||||
|
||||
for (m = 0; m < tsmax; m++) {
|
||||
char kd = dqc.temperature_stations.get(i).parm.charAt(4);
|
||||
if ((kd == dqc.ts[m].abr.charAt(1) && dqc.dflag[m + 1] == 1)) {
|
||||
char kd = DailyQcUtils.temperature_stations.get(i).parm
|
||||
.charAt(4);
|
||||
if ((kd == DailyQcUtils.ts[m].abr.charAt(1) && DailyQcUtils.dflag[m + 1] == 1)) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
@ -305,8 +297,8 @@ public class EditTempStationsDialog extends AbstractMPEDialog {
|
|||
}
|
||||
|
||||
for (m = 0; m < 9; m++) {
|
||||
if (m == dqc.tdata[pcpn_day].tstn[i].tlevel2[time_pos].qual
|
||||
&& dqc.qflag[m] == 1) {
|
||||
if (m == DailyQcUtils.tdata[pcpn_day].tstn[i].tlevel2[time_pos].qual
|
||||
&& DailyQcUtils.qflag[m] == 1) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
@ -325,8 +317,9 @@ public class EditTempStationsDialog extends AbstractMPEDialog {
|
|||
gridCell = rc.asGridCell(HRAP.getInstance().getGridGeometry(),
|
||||
PixelInCell.CELL_CORNER);
|
||||
} catch (Exception e) {
|
||||
// TODO Auto-generated catch block
|
||||
e.printStackTrace();
|
||||
statusHandler
|
||||
.error("Failed to convert ReferencedCoordinate to Coordinate.",
|
||||
e);
|
||||
}
|
||||
int x1 = (short) gridCell.x;
|
||||
int y1 = (short) gridCell.y;
|
||||
|
@ -334,16 +327,12 @@ public class EditTempStationsDialog extends AbstractMPEDialog {
|
|||
rc = new ReferencedCoordinate(coord);
|
||||
Coordinate hw = null;
|
||||
try {
|
||||
try {
|
||||
hw = rc.asGridCell(HRAP.getInstance().getGridGeometry(),
|
||||
PixelInCell.CELL_CORNER);
|
||||
} catch (TransformException e) {
|
||||
e.printStackTrace();
|
||||
} catch (FactoryException e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
hw = rc.asGridCell(HRAP.getInstance().getGridGeometry(),
|
||||
PixelInCell.CELL_CORNER);
|
||||
} catch (Exception e) {
|
||||
e.printStackTrace();
|
||||
statusHandler
|
||||
.error("Failed to convert ReferencedCoordinate to Coordinate.",
|
||||
e);
|
||||
}
|
||||
win_x = (int) hw.x;
|
||||
win_y = (int) hw.y;
|
||||
|
@ -362,42 +351,39 @@ public class EditTempStationsDialog extends AbstractMPEDialog {
|
|||
}
|
||||
|
||||
reset_value = 0;
|
||||
initial_qual = dqc.tdata[pcpn_day].tstn[isave].tlevel2[time_pos].qual;
|
||||
initial_qual = DailyQcUtils.tdata[pcpn_day].tstn[isave].tlevel2[time_pos].qual;
|
||||
new_qual = initial_qual;
|
||||
|
||||
// Updated to allow editing of time distributed station as in OB 9.x
|
||||
// if (initial_qual == 6) {
|
||||
//
|
||||
// MessageDialog.openError(shell, "Error Time Distributed Station",
|
||||
// "You cannot quality control a time distributed station");
|
||||
// return;
|
||||
// }
|
||||
|
||||
tstnData.append(dqc.temperature_stations.get(isave).hb5);
|
||||
tstnData.append(DailyQcUtils.temperature_stations.get(isave).hb5);
|
||||
tstnData.append(" ");
|
||||
tstnData.append(dqc.temperature_stations.get(isave).parm);
|
||||
tstnData.append(DailyQcUtils.temperature_stations.get(isave).parm);
|
||||
tstnData.append("\n");
|
||||
tstnData.append(dqc.temperature_stations.get(isave).name);
|
||||
tstnData.append(DailyQcUtils.temperature_stations.get(isave).name);
|
||||
tstnData.append("\n");
|
||||
tstnData.append(String.format("%d", dqc.temperature_stations.get(isave).elev));
|
||||
tstnData.append(String.format("%d",
|
||||
DailyQcUtils.temperature_stations.get(isave).elev));
|
||||
tstnData.append(" ft ");
|
||||
tstnData.append("\n");
|
||||
tstnData.append(String.format("Lat: %5.2f Lon: %5.2f",
|
||||
dqc.temperature_stations.get(isave).lat, dqc.temperature_stations.get(isave).lon));
|
||||
DailyQcUtils.temperature_stations.get(isave).lat,
|
||||
DailyQcUtils.temperature_stations.get(isave).lon));
|
||||
tstnData.append("\n");
|
||||
if (dqc.temperature_stations.get(isave).max[isom] > -99) {
|
||||
if (DailyQcUtils.temperature_stations.get(isave).max[isom] > -99) {
|
||||
GetClimateSource gc = new GetClimateSource();
|
||||
tClimateSource = gc.getClimateSource(dqc.temperature_stations.get(isave).cparm);
|
||||
tClimateSource = gc
|
||||
.getClimateSource(DailyQcUtils.temperature_stations
|
||||
.get(isave).cparm);
|
||||
|
||||
tstnData.append(String.format(
|
||||
"monthly average high %5.1f low %5.1f source: %s\n",
|
||||
dqc.temperature_stations.get(isave).max[isom], dqc.temperature_stations.get(isave).min[isom],
|
||||
DailyQcUtils.temperature_stations.get(isave).max[isom],
|
||||
DailyQcUtils.temperature_stations.get(isave).min[isom],
|
||||
tClimateSource));
|
||||
}
|
||||
if (dqc.tdata[pcpn_day].tstn[isave].tlevel2[time_pos].data > -50) {
|
||||
if (DailyQcUtils.tdata[pcpn_day].tstn[isave].tlevel2[time_pos].data > -50) {
|
||||
tstnData.append(String
|
||||
.format("estimate %d ",
|
||||
dqc.tdata[pcpn_day].tstn[isave].tlevel2[time_pos].estimate));
|
||||
DailyQcUtils.tdata[pcpn_day].tstn[isave].tlevel2[time_pos].estimate));
|
||||
}
|
||||
|
||||
createTstationDataComp();
|
||||
|
@ -424,13 +410,13 @@ public class EditTempStationsDialog extends AbstractMPEDialog {
|
|||
hb5Lbl.setLayoutData(gd);
|
||||
|
||||
editVal = new Text(dataComp, SWT.LEFT | SWT.SINGLE | SWT.BORDER);
|
||||
if (dqc.tdata[pcpn_day].tstn[isave].tlevel2[time_pos].data < -50) {
|
||||
if (DailyQcUtils.tdata[pcpn_day].tstn[isave].tlevel2[time_pos].data < -50) {
|
||||
mbuf = "M";
|
||||
editVal.setText(mbuf);
|
||||
} else {
|
||||
mbuf = String
|
||||
.format("%d",
|
||||
(int) dqc.tdata[pcpn_day].tstn[isave].tlevel2[time_pos].data);
|
||||
(int) DailyQcUtils.tdata[pcpn_day].tstn[isave].tlevel2[time_pos].data);
|
||||
editVal.setText(mbuf.trim());
|
||||
|
||||
}
|
||||
|
@ -457,7 +443,6 @@ public class EditTempStationsDialog extends AbstractMPEDialog {
|
|||
stnQualGroup.setLayoutData(gd);
|
||||
|
||||
// Create a container to hold the label and the combo box.
|
||||
// GridData gd = new GridData(SWT.FILL, SWT.DEFAULT, true, false);
|
||||
Composite stnQualComp = new Composite(stnQualGroup, SWT.NONE);
|
||||
GridLayout stnQualCompLayout = new GridLayout(2, true);
|
||||
stnQualCompLayout.marginWidth = 0;
|
||||
|
@ -466,7 +451,7 @@ public class EditTempStationsDialog extends AbstractMPEDialog {
|
|||
stnQualComp.setLayoutData(gd);
|
||||
|
||||
if (initial_qual < 0
|
||||
|| dqc.tdata[pcpn_day].tstn[isave].tlevel2[time_pos].data < -500) {
|
||||
|| DailyQcUtils.tdata[pcpn_day].tstn[isave].tlevel2[time_pos].data < -500) {
|
||||
naflag = 1;
|
||||
} else {
|
||||
naflag = 0;
|
||||
|
@ -524,15 +509,17 @@ public class EditTempStationsDialog extends AbstractMPEDialog {
|
|||
*/
|
||||
private void createStnLocComp() {
|
||||
|
||||
if (dqc.temperature_stations.get(isave).xadd == -1 && dqc.temperature_stations.get(isave).yadd == -1) {
|
||||
if (DailyQcUtils.temperature_stations.get(isave).xadd == -1
|
||||
&& DailyQcUtils.temperature_stations.get(isave).yadd == -1) {
|
||||
initial_pos = 0;
|
||||
} else if (dqc.temperature_stations.get(isave).xadd == 0
|
||||
&& dqc.temperature_stations.get(isave).yadd == -1) {
|
||||
} else if (DailyQcUtils.temperature_stations.get(isave).xadd == 0
|
||||
&& DailyQcUtils.temperature_stations.get(isave).yadd == -1) {
|
||||
initial_pos = 2;
|
||||
} else if (dqc.temperature_stations.get(isave).xadd == -1
|
||||
&& dqc.temperature_stations.get(isave).yadd == 0) {
|
||||
} else if (DailyQcUtils.temperature_stations.get(isave).xadd == -1
|
||||
&& DailyQcUtils.temperature_stations.get(isave).yadd == 0) {
|
||||
initial_pos = 1;
|
||||
} else if (dqc.temperature_stations.get(isave).xadd == 0 && dqc.temperature_stations.get(isave).yadd == 0) {
|
||||
} else if (DailyQcUtils.temperature_stations.get(isave).xadd == 0
|
||||
&& DailyQcUtils.temperature_stations.get(isave).yadd == 0) {
|
||||
initial_pos = 3;
|
||||
}
|
||||
Group stnLocGroup = new Group(shell, SWT.NONE);
|
||||
|
@ -543,7 +530,6 @@ public class EditTempStationsDialog extends AbstractMPEDialog {
|
|||
stnLocGroup.setLayoutData(gd);
|
||||
|
||||
// Create a container to hold the label and the combo box.
|
||||
// GridData gd = new GridData(SWT.FILL, SWT.DEFAULT, true, false);
|
||||
Composite stnLocComp = new Composite(stnLocGroup, SWT.NONE);
|
||||
GridLayout stnLocCompLayout = new GridLayout(2, true);
|
||||
stnLocCompLayout.marginWidth = 0;
|
||||
|
@ -585,7 +571,6 @@ public class EditTempStationsDialog extends AbstractMPEDialog {
|
|||
stnConGroup.setLayoutData(gd);
|
||||
|
||||
// Create a container to hold the label and the combo box.
|
||||
// GridData gd = new GridData(SWT.FILL, SWT.DEFAULT, true, false);
|
||||
Composite stnConComp = new Composite(stnConGroup, SWT.NONE);
|
||||
GridLayout stnConCompLayout = new GridLayout(2, true);
|
||||
stnConCompLayout.marginWidth = 5;
|
||||
|
@ -600,13 +585,13 @@ public class EditTempStationsDialog extends AbstractMPEDialog {
|
|||
sc[i] = new Label(stnConComp, SWT.LEFT);
|
||||
sc[i].setText(dqc.ttimefile[dqc.dqcTimeStringIndex][i]);
|
||||
sv[i] = new Text(stnConComp, SWT.LEFT | SWT.BORDER);
|
||||
if (dqc.tdata[pcpn_day].tstn[isave].tlevel2[i].data < -99) {
|
||||
if (DailyQcUtils.tdata[pcpn_day].tstn[isave].tlevel2[i].data < -99) {
|
||||
muf = "M";
|
||||
sv[i].setText(muf);
|
||||
} else {
|
||||
muf = String
|
||||
.format("%d",
|
||||
(int) dqc.tdata[pcpn_day].tstn[isave].tlevel2[i].data);
|
||||
(int) DailyQcUtils.tdata[pcpn_day].tstn[isave].tlevel2[i].data);
|
||||
sv[i].setText(muf.trim());
|
||||
}
|
||||
eval[i] = sv[i].getText();
|
||||
|
@ -683,8 +668,9 @@ public class EditTempStationsDialog extends AbstractMPEDialog {
|
|||
@Override
|
||||
public void widgetSelected(SelectionEvent e) {
|
||||
AppLauncherHandler alh = new AppLauncherHandler();
|
||||
String lid = dqc.temperature_stations.get(isave).hb5;
|
||||
char[] dataType = dqc.temperature_stations.get(isave).parm.toCharArray();
|
||||
String lid = DailyQcUtils.temperature_stations.get(isave).hb5;
|
||||
char[] dataType = DailyQcUtils.temperature_stations.get(isave).parm
|
||||
.toCharArray();
|
||||
/*
|
||||
* For temperature, use the shef extremum code 'X' for the daily
|
||||
* maximum temperature, 'N' for the daily minimum temperature
|
||||
|
@ -705,12 +691,12 @@ public class EditTempStationsDialog extends AbstractMPEDialog {
|
|||
|
||||
final String TSL_BUNDLE_LOC = "bundles/run-TimeSeriesLite.xml";
|
||||
try {
|
||||
System.out.println("Launching TSL " + lid + ", "
|
||||
+ dataType.toString());
|
||||
statusHandler.info("Launching TSL " + lid + ", "
|
||||
+ dataType.toString() + " ...");
|
||||
alh.execute(TSL_BUNDLE_LOC, lid, dataType.toString());
|
||||
} catch (ExecutionException ee) {
|
||||
// TODO Auto-generated catch block
|
||||
ee.printStackTrace();
|
||||
} catch (ExecutionException e1) {
|
||||
statusHandler.error("Failed to launch TSL " + lid + ", "
|
||||
+ dataType.toString() + ".", e1);
|
||||
}
|
||||
retval = 2;
|
||||
}
|
||||
|
@ -733,14 +719,14 @@ public class EditTempStationsDialog extends AbstractMPEDialog {
|
|||
|
||||
for (k = 0; k < 6; k++) {
|
||||
|
||||
dqc.tdata[pcpn_day].tstn[isave].tlevel2[k].qual = dqc.tdata[pcpn_day].tstn[isave].tlevel1[k].qual;
|
||||
DailyQcUtils.tdata[pcpn_day].tstn[isave].tlevel2[k].qual = DailyQcUtils.tdata[pcpn_day].tstn[isave].tlevel1[k].qual;
|
||||
|
||||
dqc.tdata[pcpn_day].tstn[isave].tlevel2[k].data = dqc.tdata[pcpn_day].tstn[isave].tlevel1[k].data;
|
||||
DailyQcUtils.tdata[pcpn_day].tstn[isave].tlevel2[k].data = DailyQcUtils.tdata[pcpn_day].tstn[isave].tlevel1[k].data;
|
||||
|
||||
}
|
||||
|
||||
reset_value = 1;
|
||||
new_qual = dqc.tdata[pcpn_day].tstn[isave].tlevel1[time_pos].qual;
|
||||
new_qual = DailyQcUtils.tdata[pcpn_day].tstn[isave].tlevel1[time_pos].qual;
|
||||
|
||||
} else {
|
||||
reset_value = 0;
|
||||
|
@ -749,7 +735,6 @@ public class EditTempStationsDialog extends AbstractMPEDialog {
|
|||
}
|
||||
|
||||
protected void changeStationQuality(Integer data) {
|
||||
// logMessage ("thru station_quality %d\n", (int) data);
|
||||
if (pcpn_time_step == 0) {
|
||||
time_pos = pcpn_time;
|
||||
} else {
|
||||
|
@ -761,23 +746,23 @@ public class EditTempStationsDialog extends AbstractMPEDialog {
|
|||
|
||||
protected void changeStationLocation(Integer data) {
|
||||
if (data == 0) {
|
||||
dqc.temperature_stations.get(isave).xadd = -1;
|
||||
dqc.temperature_stations.get(isave).yadd = -1;
|
||||
DailyQcUtils.temperature_stations.get(isave).xadd = -1;
|
||||
DailyQcUtils.temperature_stations.get(isave).yadd = -1;
|
||||
}
|
||||
|
||||
else if (data == 2) {
|
||||
dqc.temperature_stations.get(isave).xadd = 0;
|
||||
dqc.temperature_stations.get(isave).yadd = -1;
|
||||
DailyQcUtils.temperature_stations.get(isave).xadd = 0;
|
||||
DailyQcUtils.temperature_stations.get(isave).yadd = -1;
|
||||
}
|
||||
|
||||
else if (data == 1) {
|
||||
dqc.temperature_stations.get(isave).xadd = -1;
|
||||
dqc.temperature_stations.get(isave).yadd = 0;
|
||||
DailyQcUtils.temperature_stations.get(isave).xadd = -1;
|
||||
DailyQcUtils.temperature_stations.get(isave).yadd = 0;
|
||||
}
|
||||
|
||||
else if (data == 3) {
|
||||
dqc.temperature_stations.get(isave).xadd = 0;
|
||||
dqc.temperature_stations.get(isave).yadd = 0;
|
||||
DailyQcUtils.temperature_stations.get(isave).xadd = 0;
|
||||
DailyQcUtils.temperature_stations.get(isave).yadd = 0;
|
||||
}
|
||||
|
||||
return;
|
||||
|
@ -785,7 +770,7 @@ public class EditTempStationsDialog extends AbstractMPEDialog {
|
|||
|
||||
protected void changeCustomFile(int data) {
|
||||
|
||||
String pathName = getStationListPath(dqc.currentQcArea);
|
||||
String pathName = getStationListPath(DailyQcUtils.currentQcArea);
|
||||
String tstation_list_custom_file = pathName + "_label_position";
|
||||
int i;
|
||||
int time_pos = 0;
|
||||
|
@ -793,13 +778,12 @@ public class EditTempStationsDialog extends AbstractMPEDialog {
|
|||
int idif;
|
||||
String cstr;
|
||||
int k, p;
|
||||
// int[] pcp_in_use = DailyQcUtils.pcp_in_use;
|
||||
Button rpbutton = QcTempOptionsDialog.renderGridsBtn;
|
||||
BufferedWriter out = null;
|
||||
int pcp_flag = dqc.pcp_flag;
|
||||
int grids_flag = dqc.grids_flag;
|
||||
int points_flag = dqc.points_flag;
|
||||
int map_flag = dqc.map_flag;
|
||||
int pcp_flag = DailyQcUtils.pcp_flag;
|
||||
int grids_flag = DailyQcUtils.grids_flag;
|
||||
int points_flag = DailyQcUtils.points_flag;
|
||||
int map_flag = DailyQcUtils.map_flag;
|
||||
|
||||
if (pcpn_time_step == 0) {
|
||||
time_pos = pcpn_time;
|
||||
|
@ -813,16 +797,17 @@ public class EditTempStationsDialog extends AbstractMPEDialog {
|
|||
out = new BufferedWriter(new FileWriter(tstation_list_custom_file));
|
||||
|
||||
for (i = 0; i < max_stations; i++) {
|
||||
String rec = String.format("%s %s %d %d\n", dqc.temperature_stations.get(i).hb5,
|
||||
dqc.temperature_stations.get(i).parm, dqc.temperature_stations.get(i).xadd,
|
||||
dqc.temperature_stations.get(i).yadd);
|
||||
String rec = String.format("%s %s %d %d\n",
|
||||
DailyQcUtils.temperature_stations.get(i).hb5,
|
||||
DailyQcUtils.temperature_stations.get(i).parm,
|
||||
DailyQcUtils.temperature_stations.get(i).xadd,
|
||||
DailyQcUtils.temperature_stations.get(i).yadd);
|
||||
out.write(rec);
|
||||
}
|
||||
out.close();
|
||||
} catch (IOException e) {
|
||||
System.out.println(String.format("Could not open file: %s\n",
|
||||
tstation_list_custom_file));
|
||||
e.printStackTrace();
|
||||
statusHandler.error("Failed to write file: "
|
||||
+ tstation_list_custom_file + ".", e);
|
||||
return;
|
||||
} finally {
|
||||
try {
|
||||
|
@ -830,7 +815,8 @@ public class EditTempStationsDialog extends AbstractMPEDialog {
|
|||
out.close();
|
||||
}
|
||||
} catch (IOException e) {
|
||||
e.printStackTrace();
|
||||
statusHandler.error("Failed to close file: "
|
||||
+ tstation_list_custom_file + ".", e);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -845,18 +831,18 @@ public class EditTempStationsDialog extends AbstractMPEDialog {
|
|||
|
||||
idif = (int) Math
|
||||
.abs(val
|
||||
- dqc.tdata[pcpn_day].tstn[isave].tlevel2[time_pos].data);
|
||||
- DailyQcUtils.tdata[pcpn_day].tstn[isave].tlevel2[time_pos].data);
|
||||
|
||||
if (idif > 1 && p == -1 && reset_value == 0) {
|
||||
|
||||
dqc.tdata[pcpn_day].tstn[isave].tlevel2[time_pos].data = val;
|
||||
dqc.tdata[pcpn_day].tstn[isave].tlevel2[time_pos].qual = 2;
|
||||
DailyQcUtils.tdata[pcpn_day].tstn[isave].tlevel2[time_pos].data = val;
|
||||
DailyQcUtils.tdata[pcpn_day].tstn[isave].tlevel2[time_pos].qual = 2;
|
||||
|
||||
}
|
||||
|
||||
else {
|
||||
|
||||
dqc.tdata[pcpn_day].tstn[isave].tlevel2[time_pos].qual = (short) new_qual;
|
||||
DailyQcUtils.tdata[pcpn_day].tstn[isave].tlevel2[time_pos].qual = (short) new_qual;
|
||||
|
||||
}
|
||||
|
||||
|
@ -876,22 +862,22 @@ public class EditTempStationsDialog extends AbstractMPEDialog {
|
|||
|
||||
idif = (int) Math
|
||||
.abs(val
|
||||
- dqc.tdata[pcpn_day].tstn[isave].tlevel2[k].data);
|
||||
- DailyQcUtils.tdata[pcpn_day].tstn[isave].tlevel2[k].data);
|
||||
|
||||
if (p != -1) {
|
||||
dqc.tdata[pcpn_day].tstn[isave].tlevel2[k].data = -99;
|
||||
dqc.tdata[pcpn_day].tstn[isave].tlevel2[k].qual = -99;
|
||||
DailyQcUtils.tdata[pcpn_day].tstn[isave].tlevel2[k].data = -99;
|
||||
DailyQcUtils.tdata[pcpn_day].tstn[isave].tlevel2[k].qual = -99;
|
||||
} else {
|
||||
if (idif > 1) {
|
||||
dqc.tdata[pcpn_day].tstn[isave].tlevel2[k].data = val;
|
||||
dqc.tdata[pcpn_day].tstn[isave].tlevel2[k].qual = 2;
|
||||
DailyQcUtils.tdata[pcpn_day].tstn[isave].tlevel2[k].data = val;
|
||||
DailyQcUtils.tdata[pcpn_day].tstn[isave].tlevel2[k].qual = 2;
|
||||
}
|
||||
}
|
||||
cstr = null;
|
||||
}
|
||||
}
|
||||
if (dqc.tdata[pcpn_day].used[time_pos] != 0) {
|
||||
dqc.tdata[pcpn_day].used[time_pos] = 2;
|
||||
if (DailyQcUtils.tdata[pcpn_day].used[time_pos] != 0) {
|
||||
DailyQcUtils.tdata[pcpn_day].used[time_pos] = 2;
|
||||
}
|
||||
|
||||
if (pcpn_time_step == 0) {
|
||||
|
@ -902,16 +888,14 @@ public class EditTempStationsDialog extends AbstractMPEDialog {
|
|||
time_pos = 200 + pcpn_day;
|
||||
}
|
||||
|
||||
dqc.pcp_in_use[time_pos] = -1;
|
||||
|
||||
for (k = 0; k < 4; k++) {
|
||||
|
||||
time_pos = 150 + pcpn_day * 4 + k;
|
||||
|
||||
dqc.pcp_in_use[time_pos] = -1;
|
||||
DailyQcUtils.pcp_in_use[time_pos] = -1;
|
||||
|
||||
if (dqc.tdata[pcpn_day].used[k] != 0) {
|
||||
dqc.tdata[pcpn_day].used[k] = 2;
|
||||
if (DailyQcUtils.tdata[pcpn_day].used[k] != 0) {
|
||||
DailyQcUtils.tdata[pcpn_day].used[k] = 2;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -930,7 +914,7 @@ public class EditTempStationsDialog extends AbstractMPEDialog {
|
|||
time_pos = 200 + pcpn_day;
|
||||
}
|
||||
|
||||
if (points_flag == 1 && dqc.pcp_in_use[time_pos] == -1) {
|
||||
if (points_flag == 1 && DailyQcUtils.pcp_in_use[time_pos] == -1) {
|
||||
k = 0;
|
||||
} else if (points_flag == 1 && grids_flag == -1 && map_flag == -1) {
|
||||
k = 0;
|
||||
|
@ -953,19 +937,16 @@ public class EditTempStationsDialog extends AbstractMPEDialog {
|
|||
BadTValues bv = new BadTValues();
|
||||
bv.update_bad_tvalues(pcpn_day);
|
||||
|
||||
// logMessage("estimate\n");
|
||||
|
||||
EstDailyTStations eds = new EstDailyTStations();
|
||||
eds.estimate_daily_tstations(pcpn_day, dqc.temperature_stations, max_stations);
|
||||
|
||||
// logMessage("qc\n");
|
||||
eds.estimate_daily_tstations(pcpn_day,
|
||||
DailyQcUtils.temperature_stations, max_stations);
|
||||
|
||||
QCTStations qcs = new QCTStations();
|
||||
qcs.quality_control_tstations(pcpn_day, dqc.temperature_stations, max_stations);
|
||||
qcs.quality_control_tstations(pcpn_day,
|
||||
DailyQcUtils.temperature_stations, max_stations);
|
||||
|
||||
// logMessage("restore\n");
|
||||
|
||||
bv.restore_bad_tvalues(pcpn_day, dqc.temperature_stations, max_stations);
|
||||
bv.restore_bad_tvalues(pcpn_day, DailyQcUtils.temperature_stations,
|
||||
max_stations);
|
||||
|
||||
OtherPrecipOptions op = new OtherPrecipOptions();
|
||||
op.send_expose();
|
||||
|
@ -974,7 +955,7 @@ public class EditTempStationsDialog extends AbstractMPEDialog {
|
|||
}
|
||||
|
||||
private String getStationListPath(String qcArea) {
|
||||
String station_dir = dqc.mpe_station_list_dir;
|
||||
String station_dir = DailyQcUtils.mpe_station_list_dir;
|
||||
String dir;
|
||||
|
||||
if (qcArea != null) {
|
||||
|
|
|
@ -68,6 +68,8 @@ import com.raytheon.viz.mpe.util.DailyQcUtils;
|
|||
* Mar 10, 2015 14575 snaples Added additional status flag.
|
||||
* Jul 9, 2015 14618 snaples Cleaned up code issues.
|
||||
* Sep 11, 2015 17988 snaples Fixed issue with wait cursor not showing when Rendering Grids.
|
||||
* Nov 18, 2015 18093 snaples Fixed problem with arrows being disabled when new
|
||||
* day rollover >18Z occurs.
|
||||
* </pre>
|
||||
*
|
||||
* @author snaples
|
||||
|
@ -332,20 +334,17 @@ public class QcPrecipOptionsDialog extends AbstractMPEDialog {
|
|||
* Initialize the dialog components.
|
||||
*/
|
||||
private void initializeComponents() {
|
||||
// pdata = dqc.pdata;
|
||||
DailyQcUtils.points_flag = 1;
|
||||
DailyQcUtils.grids_flag = -1;
|
||||
DailyQcUtils.map_flag = -1;
|
||||
DailyQcUtils.contour_flag = -1;
|
||||
if (DailyQcUtils.pdata == null || (DailyQcUtils.pdata.length <= 0)) {
|
||||
Date currDate = ChooseDataPeriodDialog.prevDate;
|
||||
Date currDate = ChooseDataPeriodDialog.getCurrentHydroEditDate();
|
||||
String QcArea = ChooseDataPeriodDialog.prevArea;
|
||||
int qcDays = MPEDisplayManager.getCurrent().getDqcDays();
|
||||
// checks to see if area or date has changed since last data load
|
||||
dqc_good = dqc.qcDataReload(currDate, QcArea, qcDays, false);
|
||||
// pdata = dqc.pdata;
|
||||
}
|
||||
// pcp_in_use = dqc.pcp_in_use;
|
||||
dataSet.clear();
|
||||
dataSet.addAll(dataType);
|
||||
|
||||
|
|
|
@ -65,6 +65,8 @@ import com.vividsolutions.jts.geom.Coordinate;
|
|||
* Mar 05, 2014 17114 lbousaidi display PC data in gage table.
|
||||
* Sep 04, 2014 16699 cgobs Fixed 14.3.1 issue with reading MPE field data.
|
||||
* Oct 19, 2015 18090 lbousaidi fixed best estimate qpe display.
|
||||
* Nov 18, 2015 18093 snaples Added selectedGridIndex to maintain selected grid after table refresh.
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
* @author mpduff
|
||||
|
@ -199,6 +201,7 @@ public class GageTableDataManager {
|
|||
* The selected grid.
|
||||
*/
|
||||
private String selectedGrid = null;
|
||||
private int selectedGridIndex = 0;
|
||||
|
||||
static {
|
||||
sdf = new SimpleDateFormat("yyyyMMddHH");
|
||||
|
@ -1269,6 +1272,21 @@ public class GageTableDataManager {
|
|||
this.selectedGrid = selectedGrid;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the selectedGridIndex value
|
||||
*/
|
||||
public int getSelectedGridIndex() {
|
||||
return selectedGridIndex;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param selectedGridIndex
|
||||
* the int value of the selected GridIndex
|
||||
*/
|
||||
public void setSelectedGridIndex(int selectedGridIndex) {
|
||||
this.selectedGridIndex = selectedGridIndex;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the rows
|
||||
*/
|
||||
|
|
|
@ -106,9 +106,12 @@ import com.raytheon.viz.mpe.ui.dialogs.gagetable.xml.GageTableSortType;
|
|||
* Jan 28, 2014 16994 snaples Updated populateGridCombo to get correct filename prefix for matching up selection.
|
||||
* Feb 02, 2014 16201 snaples Added saved data flag support
|
||||
* Apr 16, 2014 3025 mpduff Fix sort method.
|
||||
*
|
||||
* Nov 18, 2015 18093 snaples Fixed GridComboListener to trigger table update when changing compare column.
|
||||
* Dec 02, 2015 18094 lbousaidi added the sorting method for multi column sorting.
|
||||
* Dec 07, 2015 18137 lbousaidi fixed sorting after editing gages.
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
*
|
||||
* @author mpduff
|
||||
* @version 1.0
|
||||
*/
|
||||
|
@ -144,7 +147,7 @@ public class GageTableDlg extends JFrame implements IEditTimeChangedListener {
|
|||
/**
|
||||
* The grid selection combo box.
|
||||
*/
|
||||
private final JComboBox gridCombo = new JComboBox();
|
||||
private final JComboBox<String> gridCombo = new JComboBox<String>();
|
||||
|
||||
private final GridComboListener gridComboListener = new GridComboListener();
|
||||
|
||||
|
@ -219,6 +222,9 @@ public class GageTableDlg extends JFrame implements IEditTimeChangedListener {
|
|||
hrFormat.setTimeZone(TimeZone.getTimeZone("GMT"));
|
||||
dateFormat = new SimpleDateFormat("MMM dd, yyyy");
|
||||
dateFormat.setTimeZone(TimeZone.getTimeZone("GMT"));
|
||||
AppsDefaults appsDefaults = AppsDefaults.getInstance();
|
||||
|
||||
selectedGrid = appsDefaults.getToken("mpe_selected_grid_gagediff");
|
||||
|
||||
// Get a list of non-data column names
|
||||
for (String colName : GageTableConstants.BASE_COLUMNS) {
|
||||
|
@ -239,9 +245,6 @@ public class GageTableDlg extends JFrame implements IEditTimeChangedListener {
|
|||
displayManager = MPEDisplayManager.getCurrent();
|
||||
currentDate = displayManager.getCurrentEditDate();
|
||||
|
||||
AppsDefaults appsDefaults = AppsDefaults.getInstance();
|
||||
|
||||
selectedGrid = appsDefaults.getToken("mpe_selected_grid_gagediff");
|
||||
dataManager.setSelectedGrid(selectedGrid);
|
||||
|
||||
columnData = dataManager.getColumnDataList();
|
||||
|
@ -1022,8 +1025,11 @@ public class GageTableDlg extends JFrame implements IEditTimeChangedListener {
|
|||
}
|
||||
}
|
||||
}
|
||||
// setting the selected index ensures that when we refresh the combo box it displays the correct field
|
||||
dataManager.setSelectedGridIndex(gridComboSelection);
|
||||
tableModel.refreshTable();
|
||||
sortAllRowsBy(tableModel, sortColumnIndex, ascending);
|
||||
|
||||
gridCombo.setSelectedIndex(dataManager.getSelectedGridIndex());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1272,9 +1278,6 @@ public class GageTableDlg extends JFrame implements IEditTimeChangedListener {
|
|||
rowData);
|
||||
dataChanged = true;
|
||||
}
|
||||
// Update the grid combobox
|
||||
gridCombo.removeAllItems();
|
||||
populateGridCombo();
|
||||
|
||||
} else {
|
||||
Enumeration<TableColumn> colEnum = table.getColumnModel()
|
||||
|
@ -1337,10 +1340,26 @@ public class GageTableDlg extends JFrame implements IEditTimeChangedListener {
|
|||
*/
|
||||
private GageTableSortSettings setSortColumns(
|
||||
GageTableSortSettings settings, int index, boolean ascending) {
|
||||
settings.setSortCol4Index(settings.getSortCol3Index());
|
||||
settings.setSortCol3Index(settings.getSortCol2Index());
|
||||
settings.setSortCol2Index(settings.getSortCol1Index());
|
||||
settings.setSortCol1Index(index);
|
||||
|
||||
int aPos = getSortClickPosition( settings, index );
|
||||
if ( 4 == aPos || 0 == aPos ){
|
||||
|
||||
settings.setSortCol4Index(settings.getSortCol3Index());
|
||||
settings.setSortCol3Index(settings.getSortCol2Index());
|
||||
settings.setSortCol2Index(settings.getSortCol1Index());
|
||||
settings.setSortCol1Index(index);
|
||||
}else if ( 3 == aPos ){
|
||||
|
||||
settings.setSortCol3Index(settings.getSortCol2Index());
|
||||
settings.setSortCol2Index(settings.getSortCol1Index());
|
||||
settings.setSortCol1Index(index);
|
||||
|
||||
}else if ( 2 == aPos ){
|
||||
|
||||
settings.setSortCol2Index(settings.getSortCol1Index());
|
||||
settings.setSortCol1Index(index);
|
||||
|
||||
}
|
||||
|
||||
settings.setAscending4(settings.getAscending3());
|
||||
settings.setAscending3(settings.getAscending2());
|
||||
|
@ -1384,6 +1403,38 @@ public class GageTableDlg extends JFrame implements IEditTimeChangedListener {
|
|||
GageTableProductManager.getInstance().fireUpdateEvent(event);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get click position for sorting
|
||||
*
|
||||
* @param settings
|
||||
* The GageTableColumnSettings
|
||||
* @param index
|
||||
* The selected column index
|
||||
* @return
|
||||
*
|
||||
*
|
||||
**/
|
||||
private int getSortClickPosition( GageTableSortSettings settings, int index ){
|
||||
|
||||
if ( index == settings.getSortCol1Index() ){
|
||||
return 1;
|
||||
}
|
||||
|
||||
if ( index == settings.getSortCol2Index() ){
|
||||
return 2;
|
||||
}
|
||||
|
||||
if ( index == settings.getSortCol3Index() ){
|
||||
return 3;
|
||||
}
|
||||
|
||||
if ( index == settings.getSortCol4Index() ){
|
||||
return 4;
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
*
|
||||
|
|
|
@ -34,6 +34,7 @@ import com.raytheon.viz.mpe.ui.DisplayFieldData;
|
|||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* Jul 1, 2009 2685 mpduff Initial creation.
|
||||
* Nov 16, 2016 18098 lbousaidi Added RubberPolyData attribute persistent.
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -86,6 +87,7 @@ public class RubberPolyData {
|
|||
this.precipValue = precipValue;
|
||||
this.editPoints = hrapPoints;
|
||||
this.visible = visible;
|
||||
this.persistent = persistent;
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -44,7 +44,8 @@ import com.raytheon.viz.hydrocommon.whfslib.colorthreshold.NamedColorUseSet;
|
|||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* Jun 12, 2011 lvenable Initial creation
|
||||
*
|
||||
* Nov 2015 DR 18xxx pstilles fixed problem with qpe grid written to file
|
||||
* reported by OHRFC
|
||||
* </pre>
|
||||
*
|
||||
* @author lvenable
|
||||
|
@ -54,47 +55,45 @@ import com.raytheon.viz.hydrocommon.whfslib.colorthreshold.NamedColorUseSet;
|
|||
public class BestEstimate1HrQpeDlg extends BasePostAnalysisDlg {
|
||||
|
||||
private static final int HOURS_PER_DAY = 24;
|
||||
/**
|
||||
/**
|
||||
* File combo box.
|
||||
*/
|
||||
private Combo fileCbo = null;
|
||||
private String selectedFileName = null;
|
||||
|
||||
|
||||
private List<String> xmrgFileList = null;
|
||||
private PostAnalysisManager paMgr = null;
|
||||
private String qpeDirectory = null;
|
||||
private String adjustedDirectory = null;
|
||||
|
||||
|
||||
private String qpeDirectory = null;
|
||||
private String adjustedDirectory = null;
|
||||
|
||||
private double[][] biasRatioGrid = null;
|
||||
private double[][] disaggGrid = null;
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* Constructor.
|
||||
*
|
||||
* @param parentShell
|
||||
* Parent shell.
|
||||
*/
|
||||
public BestEstimate1HrQpeDlg(Shell parentShell, double[][] biasRatioGrid, double[][] disaggGrid) {
|
||||
public BestEstimate1HrQpeDlg(Shell parentShell, double[][] biasRatioGrid,
|
||||
double[][] disaggGrid) {
|
||||
super(parentShell);
|
||||
|
||||
setText("1hr Best Estimate QPE Fields");
|
||||
paMgr = new PostAnalysisManager();
|
||||
xmrgFileList = paMgr.getListOfAvailableXmrgFiles();
|
||||
|
||||
|
||||
this.biasRatioGrid = biasRatioGrid;
|
||||
this.disaggGrid = disaggGrid;
|
||||
|
||||
qpeDirectory = paMgr.getXmrgFileDirectory().getAbsolutePath();
|
||||
|
||||
File paFileDirectory = paMgr.getPostAnalysisFileDirectory();
|
||||
|
||||
if (paFileDirectory != null)
|
||||
{
|
||||
adjustedDirectory = paFileDirectory.getAbsolutePath();
|
||||
}
|
||||
|
||||
|
||||
qpeDirectory = paMgr.getXmrgFileDirectory().getAbsolutePath();
|
||||
|
||||
File paFileDirectory = paMgr.getPostAnalysisFileDirectory();
|
||||
|
||||
if (paFileDirectory != null) {
|
||||
adjustedDirectory = paFileDirectory.getAbsolutePath();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/*
|
||||
|
@ -133,70 +132,66 @@ public class BestEstimate1HrQpeDlg extends BasePostAnalysisDlg {
|
|||
*/
|
||||
@Override
|
||||
protected String[] getMapLabelNames() {
|
||||
String[] names = new String[] { "1hr Best Estimate QPE", "1hr Best Estimate QPE(Grid Bias Applied)" };
|
||||
String[] names = new String[] { "1hr Best Estimate QPE",
|
||||
"1hr Best Estimate QPE(Grid Bias Applied)" };
|
||||
return names;
|
||||
}
|
||||
|
||||
|
||||
private void loadAdjustAndSaveAllQPEFiles(String destinationDirectoryName)
|
||||
{
|
||||
for (String fileName: xmrgFileList)
|
||||
{
|
||||
loadAdjustAndSave(fileName, destinationDirectoryName);
|
||||
}
|
||||
|
||||
private void loadAdjustAndSaveAllQPEFiles(String destinationDirectoryName) {
|
||||
for (String fileName : xmrgFileList) {
|
||||
loadAdjustAndSave(fileName, destinationDirectoryName);
|
||||
}
|
||||
}
|
||||
|
||||
private void loadAdjustAndSave(String fileName, String destinationDirectoryName)
|
||||
{
|
||||
String header = "BestEstimate1HrQPEDlg.loadAdjustAndSave(): ";
|
||||
|
||||
String originalFilePath = qpeDirectory + '/' + fileName;
|
||||
String destinationFilePath = destinationDirectoryName + '/' + fileName;
|
||||
|
||||
double[][] adjustedGrid = paMgr.readGridData(originalFilePath, true, true);
|
||||
|
||||
if (adjustedGrid != null)
|
||||
{
|
||||
applyGridAdjustments(adjustedGrid, biasRatioGrid, disaggGrid);
|
||||
|
||||
float[] dataArray = paMgr.convertToSingleArray(adjustedGrid, false, true);
|
||||
short[] shortArray= paMgr.convertToShortArray(dataArray, 1.0f);
|
||||
private void loadAdjustAndSave(String fileName,
|
||||
String destinationDirectoryName) {
|
||||
String header = "BestEstimate1HrQPEDlg.loadAdjustAndSave(): ";
|
||||
|
||||
XmrgFile file = new XmrgFile();
|
||||
String originalFilePath = qpeDirectory + '/' + fileName;
|
||||
String destinationFilePath = destinationDirectoryName + '/' + fileName;
|
||||
|
||||
file.setData(shortArray);
|
||||
file.setHrapExtent(paMgr.getExtent());
|
||||
file.setHeader(paMgr.getXmrgHeader());
|
||||
double[][] adjustedGrid = paMgr.readGridData(originalFilePath, true,
|
||||
true);
|
||||
|
||||
if (adjustedGrid != null) {
|
||||
applyGridAdjustments(adjustedGrid, biasRatioGrid, disaggGrid);
|
||||
|
||||
float[] dataArray = paMgr.convertToSingleArray(adjustedGrid, false,
|
||||
false);
|
||||
short[] shortArray = paMgr.convertToShortArray(dataArray, 1.0f);
|
||||
|
||||
XmrgFile file = new XmrgFile();
|
||||
|
||||
file.setData(shortArray);
|
||||
file.setHrapExtent(paMgr.getExtent());
|
||||
file.setHeader(paMgr.getXmrgHeader());
|
||||
|
||||
try {
|
||||
file.save(destinationFilePath);
|
||||
System.out.println(header + "Saved xmrg file to "
|
||||
+ destinationFilePath);
|
||||
} catch (Exception e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
file.save(destinationFilePath);
|
||||
System.out.println(header + "Saved xmrg file to " + destinationFilePath);
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Save separate action.
|
||||
*/
|
||||
private void saveSeparateAction() {
|
||||
|
||||
loadAdjustAndSaveAllQPEFiles(adjustedDirectory);
|
||||
|
||||
|
||||
loadAdjustAndSaveAllQPEFiles(adjustedDirectory);
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Save overwrite action.
|
||||
*/
|
||||
private void saveOverwriteAction()
|
||||
{
|
||||
private void saveOverwriteAction() {
|
||||
|
||||
loadAdjustAndSaveAllQPEFiles(qpeDirectory);
|
||||
loadAdjustAndSaveAllQPEFiles(qpeDirectory);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -208,110 +203,86 @@ public class BestEstimate1HrQpeDlg extends BasePostAnalysisDlg {
|
|||
gd.widthHint = 300;
|
||||
fileCbo = new Combo(shell, SWT.DROP_DOWN | SWT.BORDER | SWT.READ_ONLY);
|
||||
fileCbo.setLayoutData(gd);
|
||||
|
||||
for (String fileName : xmrgFileList)
|
||||
{
|
||||
fileCbo.add(fileName);
|
||||
|
||||
for (String fileName : xmrgFileList) {
|
||||
fileCbo.add(fileName);
|
||||
}
|
||||
|
||||
fileCbo.addSelectionListener(new SelectionAdapter()
|
||||
{
|
||||
public void widgetSelected(SelectionEvent event) {
|
||||
|
||||
System.out.println("fileCbo.addSelectionListener().widgetSelected() ");
|
||||
|
||||
selectedFileName = fileCbo.getText();
|
||||
|
||||
|
||||
fileCbo.addSelectionListener(new SelectionAdapter() {
|
||||
public void widgetSelected(SelectionEvent event) {
|
||||
|
||||
selectedFileName = fileCbo.getText();
|
||||
|
||||
loadImage(event.data);
|
||||
}
|
||||
} );
|
||||
|
||||
//make an initial selection
|
||||
});
|
||||
|
||||
// make an initial selection
|
||||
fileCbo.select(0);
|
||||
selectedFileName = fileCbo.getText();
|
||||
loadImage(null);
|
||||
|
||||
|
||||
}
|
||||
|
||||
|
||||
private void loadImage(Object data) {
|
||||
|
||||
String header = "BestEstimate1HrQpeDir.loadImage(): ";
|
||||
|
||||
System.out.println(header + " method called.");
|
||||
|
||||
|
||||
String filePath1 = qpeDirectory + '/' + selectedFileName;
|
||||
// String filePath2 = adjustedDirectory + '/' + selectedFileName;
|
||||
|
||||
|
||||
System.out.println(header + " filePath1 = " + filePath1);
|
||||
// System.out.println(header + " filePath2 = " + filePath2);
|
||||
|
||||
//do this temporarily until I have enough good fake data
|
||||
// filePath1 = "/home/cgobs2/xmrg_data/MMOSAIC2011081901z";
|
||||
|
||||
setDataFileName1(filePath1);
|
||||
|
||||
// filePath2 = "/home/cgobs2/xmrg_data/MMOSAIC2011081901z"; //do this temporarily until I build the adjusted data
|
||||
|
||||
//save the first image as a grid (a 2D array) so that I can then hold the adjusted grid in memory and then adjust it
|
||||
double[][] adjustedGrid = paMgr.readGridData(filePath1, false, false);
|
||||
applyGridAdjustments(adjustedGrid, biasRatioGrid, disaggGrid);
|
||||
|
||||
float[] dataArray2 = paMgr.convertToSingleArray(adjustedGrid, false, false);
|
||||
setDataArray2(dataArray2);
|
||||
setExtent2(paMgr.getExtent());
|
||||
|
||||
long newEndTime = paMgr.getTimeFromFileName(selectedFileName);
|
||||
String newEndTimeString = "ending at " + getDateTimeStringFromLongTime(newEndTime);
|
||||
//refresh the ColorLegend
|
||||
|
||||
colorLegendMgr.setDateTimeStringForLegend(newEndTimeString);
|
||||
|
||||
//refresh the maps
|
||||
mapsComp.refresh();
|
||||
}
|
||||
|
||||
private void applyGridAdjustments(double[][] adjustedGrid,
|
||||
double[][] biasGrid,
|
||||
double[][] disaggGrid)
|
||||
{
|
||||
|
||||
try
|
||||
{
|
||||
|
||||
int rowCount = adjustedGrid.length;
|
||||
int colCount = adjustedGrid[0].length;
|
||||
|
||||
for (int row = 0; row < rowCount; row++)
|
||||
{
|
||||
for (int col = 0; col < colCount; col++)
|
||||
{
|
||||
double biasValue = biasGrid[col][row];
|
||||
double disaggValue = disaggGrid[col][row] / HOURS_PER_DAY;
|
||||
|
||||
if (biasValue > 0.0)
|
||||
{
|
||||
adjustedGrid[row][col] *= biasValue;
|
||||
}
|
||||
else if (disaggValue > 0.0)
|
||||
{
|
||||
adjustedGrid[row][col] = disaggValue;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
catch (Throwable t)
|
||||
{
|
||||
t.printStackTrace();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/*
|
||||
String header = "BestEstimate1HrQpeDir.loadImage(): ";
|
||||
|
||||
String filePath1 = qpeDirectory + '/' + selectedFileName;
|
||||
|
||||
setDataFileName1(filePath1);
|
||||
|
||||
// save the first image as a grid (a 2D array) so that can then hold
|
||||
// the adjusted grid in memory and then adjust it
|
||||
double[][] adjustedGrid = paMgr.readGridData(filePath1, false, false);
|
||||
applyGridAdjustments(adjustedGrid, biasRatioGrid, disaggGrid);
|
||||
|
||||
float[] dataArray2 = paMgr.convertToSingleArray(adjustedGrid, false,
|
||||
false);
|
||||
setDataArray2(dataArray2);
|
||||
setExtent2(paMgr.getExtent());
|
||||
|
||||
long newEndTime = paMgr.getTimeFromFileName(selectedFileName);
|
||||
String newEndTimeString = "ending at "
|
||||
+ getDateTimeStringFromLongTime(newEndTime);
|
||||
|
||||
// refresh the ColorLegend
|
||||
colorLegendMgr.setDateTimeStringForLegend(newEndTimeString);
|
||||
|
||||
// refresh the maps
|
||||
mapsComp.refresh();
|
||||
}
|
||||
|
||||
private void applyGridAdjustments(double[][] adjustedGrid,
|
||||
double[][] biasGrid, double[][] disaggGrid) {
|
||||
|
||||
try {
|
||||
|
||||
int rowCount = adjustedGrid.length;
|
||||
int colCount = adjustedGrid[0].length;
|
||||
|
||||
for (int row = 0; row < rowCount; row++) {
|
||||
for (int col = 0; col < colCount; col++) {
|
||||
double biasValue = biasGrid[col][row];
|
||||
double disaggValue = disaggGrid[col][row] / HOURS_PER_DAY;
|
||||
|
||||
if (biasValue > 0.0) {
|
||||
adjustedGrid[row][col] *= biasValue;
|
||||
|
||||
} else if (disaggValue > 0.0) {
|
||||
adjustedGrid[row][col] = disaggValue;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
} catch (Throwable t) {
|
||||
t.printStackTrace();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
*
|
||||
* @see com.raytheon.viz.mpe.ui.dialogs.postanalysis.BasePostAnalysisDlg#
|
||||
|
@ -322,21 +293,17 @@ public class BestEstimate1HrQpeDlg extends BasePostAnalysisDlg {
|
|||
return 1;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected NamedColorUseSet createNamedColorUseSet1() {
|
||||
|
||||
@Override
|
||||
protected NamedColorUseSet createNamedColorUseSet1() {
|
||||
|
||||
return PostAnalysisManager.getNamedColorUseSet("PRECIP_ACCUM");
|
||||
//NamedColorUseSet namedColorUseSet1 = TestDriver.getNamedColorUseSet1Hr();
|
||||
//return namedColorUseSet1;
|
||||
}
|
||||
return PostAnalysisManager.getNamedColorUseSet("PRECIP_ACCUM");
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
protected NamedColorUseSet createNamedColorUseSet2() {
|
||||
return PostAnalysisManager.getNamedColorUseSet("PRECIP_ACCUM");
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
protected NamedColorUseSet createNamedColorUseSet2() {
|
||||
return PostAnalysisManager.getNamedColorUseSet("PRECIP_ACCUM");
|
||||
//NamedColorUseSet namedColorUseSet2 = TestDriver.getNamedColorUseSet1Hr();
|
||||
//return namedColorUseSet2;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -83,7 +83,10 @@ import com.raytheon.viz.mpe.ui.rsc.MPEFieldResourceData.MPEFieldFrame;
|
|||
* properly when mapping to screen.
|
||||
* Mar 10, 2014 17059 snaples Added case for Prism data for unit conversion correction.
|
||||
* Mar 19, 2014 17109 snaples Removed code that added an hour to SATPRE, the base file reference time has been adjusted.
|
||||
*
|
||||
* Nov 05, 2015 18095 lbousaidi Fixed hour substitued for satellite field precip when drawing polygon.
|
||||
* Dec 04, 2015 5165/14513 mduff Set this resource on the display manager if not set in the display manager.
|
||||
* Dec 08, 2015 5180 bkowal Made the hour substitution special case precise.
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
* @author mschenke
|
||||
|
@ -95,8 +98,10 @@ public class MPEFieldResource extends
|
|||
implements IPolygonEditsChangedListener {
|
||||
|
||||
private static final short MISSING_VALUE = -899;
|
||||
private static final int BIG_VALUE = 1000 ;
|
||||
private static final int RATIO_CONVERSION_FACTOR = 100;
|
||||
|
||||
private static final int BIG_VALUE = 1000;
|
||||
|
||||
private static final int RATIO_CONVERSION_FACTOR = 100;
|
||||
|
||||
private ContourPreferences contourPreferences;
|
||||
|
||||
|
@ -124,6 +129,12 @@ public class MPEFieldResource extends
|
|||
contourPreferences = createContourPreferences(getCapability(
|
||||
ColorMapCapability.class).getColorMapParameters());
|
||||
PolygonEditManager.registerListener(this);
|
||||
MPEDisplayManager displayManager = MPEDisplayManager
|
||||
.getInstance(descriptor.getRenderableDisplay());
|
||||
MPEFieldResource rsc = displayManager.getDisplayedFieldResource();
|
||||
if (rsc == null) {
|
||||
displayManager.setDisplayedResource(this);
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
|
@ -182,8 +193,21 @@ public class MPEFieldResource extends
|
|||
subData = dataMap.get(edit.getSubDrawSource());
|
||||
if (subData == null) {
|
||||
try {
|
||||
Date date = frame.getDate();
|
||||
/*
|
||||
* SATPRE MPE file time stamp is the start time of the
|
||||
* hour. i.e. a 12z -13z product has a time stamp of
|
||||
* 12z.
|
||||
*/
|
||||
Calendar cal = Calendar.getInstance();
|
||||
cal.setTime(date);
|
||||
if (edit.getSubDrawSource() == DisplayFieldData.satPre) {
|
||||
cal.add(Calendar.HOUR, -1);
|
||||
}
|
||||
|
||||
XmrgFile subFile = MPEDisplayManager.getXmrgFile(
|
||||
edit.getSubDrawSource(), frame.getDate());
|
||||
edit.getSubDrawSource(), cal.getTime());
|
||||
|
||||
subFile.load();
|
||||
subData = subFile.getData();
|
||||
dataMap.put(edit.getSubDrawSource(), subData);
|
||||
|
@ -330,242 +354,208 @@ public class MPEFieldResource extends
|
|||
timeToLoad.setTime(currTime.getRefTime());
|
||||
timeToLoad.add(Calendar.HOUR, -i);
|
||||
|
||||
|
||||
if (displayField==DisplayFieldData.satPre) {
|
||||
//SATPRE MPE file time stamp is the start time of the hour
|
||||
//i.e. a 12z -13z product has a time stamp of 12z.
|
||||
timeToLoad.add(Calendar.HOUR, -1);
|
||||
if (displayField == DisplayFieldData.satPre) {
|
||||
// SATPRE MPE file time stamp is the start time of the hour
|
||||
// i.e. a 12z -13z product has a time stamp of 12z.
|
||||
timeToLoad.add(Calendar.HOUR, -1);
|
||||
}
|
||||
|
||||
|
||||
|
||||
if (displayField.isAComparisonField() )
|
||||
{
|
||||
ComparisonFields comparisonFields = displayField.getComparisonFields();
|
||||
DisplayFieldData field1 = comparisonFields.getField1();
|
||||
DisplayFieldData field2 = comparisonFields.getField2();
|
||||
|
||||
XmrgFile file1 = MPEDisplayManager.getXmrgFile(field1,
|
||||
timeToLoad.getTime());
|
||||
|
||||
XmrgFile file2 = MPEDisplayManager.getXmrgFile(field2,
|
||||
timeToLoad.getTime());
|
||||
|
||||
boolean isDifference = false;
|
||||
boolean isRatio = false;
|
||||
|
||||
if (displayField.equals(DisplayFieldData.precipDifferenceField))
|
||||
{
|
||||
isDifference = true;
|
||||
|
||||
}
|
||||
else if (displayField.equals(DisplayFieldData.precipRatioField))
|
||||
{
|
||||
isRatio = true;
|
||||
}
|
||||
|
||||
try {
|
||||
file1.load();
|
||||
file2.load();
|
||||
} catch (IOException e) {
|
||||
Activator.statusHandler.handle(
|
||||
Priority.INFO,
|
||||
"Error loading XMRG file for "
|
||||
+ field1 + " or " + field2
|
||||
+ " at time "
|
||||
+ MPEDateFormatter
|
||||
.format_MMM_dd_yyyy_HH(timeToLoad
|
||||
.getTime()), e);
|
||||
continue;
|
||||
}
|
||||
|
||||
Rectangle fileExtent = file1.getHrapExtent();
|
||||
short[] file1Data = file1.getData();
|
||||
short[] file2Data = file2.getData();
|
||||
|
||||
for (int y = 0; y < displayExtent.height; ++y) {
|
||||
for (int x = 0; x < displayExtent.width; ++x) {
|
||||
|
||||
int px = x + displayExtent.x;
|
||||
int py = y + displayExtent.y;
|
||||
if (px >= fileExtent.x
|
||||
&& px < (fileExtent.x + fileExtent.width)
|
||||
&& py >= fileExtent.y
|
||||
&& py < (fileExtent.y + fileExtent.height))
|
||||
{
|
||||
int frameIdx = y * displayExtent.width + x;
|
||||
int fx = px - fileExtent.x;
|
||||
int fy = py - fileExtent.y;
|
||||
int fileIdx = fy * fileExtent.width + fx;
|
||||
|
||||
short value1 = file1Data[fileIdx];
|
||||
short value2 = file2Data[fileIdx];
|
||||
|
||||
|
||||
short fi = 0;
|
||||
|
||||
if (isDifference)
|
||||
{
|
||||
short diffValue = calculateDifference(value1, value2);
|
||||
fi = diffValue;
|
||||
}
|
||||
else if (isRatio)
|
||||
{
|
||||
double ratio = calculateRatio(value1, value2);
|
||||
|
||||
if (ratio != MISSING_VALUE)
|
||||
{
|
||||
fi = (short) ( ratio * RATIO_CONVERSION_FACTOR );
|
||||
}
|
||||
else
|
||||
{
|
||||
fi = MISSING_VALUE;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
//short fc = frameData[frameIdx];
|
||||
//fc is initial value of frameData[frameIdx],
|
||||
//it is used to help accumulate precip in a multi-hour accum situation
|
||||
frameData[frameIdx] = fi;
|
||||
if (displayField.isAComparisonField()) {
|
||||
ComparisonFields comparisonFields = displayField
|
||||
.getComparisonFields();
|
||||
DisplayFieldData field1 = comparisonFields.getField1();
|
||||
DisplayFieldData field2 = comparisonFields.getField2();
|
||||
|
||||
} //end if (px >=)
|
||||
} //end for x
|
||||
} //end for y
|
||||
|
||||
}
|
||||
else //is a non-comparison field
|
||||
XmrgFile file1 = MPEDisplayManager.getXmrgFile(field1,
|
||||
timeToLoad.getTime());
|
||||
|
||||
XmrgFile file2 = MPEDisplayManager.getXmrgFile(field2,
|
||||
timeToLoad.getTime());
|
||||
|
||||
boolean isDifference = false;
|
||||
boolean isRatio = false;
|
||||
|
||||
if (displayField.equals(DisplayFieldData.precipDifferenceField)) {
|
||||
isDifference = true;
|
||||
|
||||
} else if (displayField
|
||||
.equals(DisplayFieldData.precipRatioField)) {
|
||||
isRatio = true;
|
||||
}
|
||||
|
||||
try {
|
||||
file1.load();
|
||||
file2.load();
|
||||
} catch (IOException e) {
|
||||
Activator.statusHandler.handle(
|
||||
Priority.INFO,
|
||||
"Error loading XMRG file for "
|
||||
+ field1
|
||||
+ " or "
|
||||
+ field2
|
||||
+ " at time "
|
||||
+ MPEDateFormatter
|
||||
.format_MMM_dd_yyyy_HH(timeToLoad
|
||||
.getTime()), e);
|
||||
continue;
|
||||
}
|
||||
|
||||
Rectangle fileExtent = file1.getHrapExtent();
|
||||
short[] file1Data = file1.getData();
|
||||
short[] file2Data = file2.getData();
|
||||
|
||||
for (int y = 0; y < displayExtent.height; ++y) {
|
||||
for (int x = 0; x < displayExtent.width; ++x) {
|
||||
|
||||
int px = x + displayExtent.x;
|
||||
int py = y + displayExtent.y;
|
||||
if (px >= fileExtent.x
|
||||
&& px < (fileExtent.x + fileExtent.width)
|
||||
&& py >= fileExtent.y
|
||||
&& py < (fileExtent.y + fileExtent.height)) {
|
||||
int frameIdx = y * displayExtent.width + x;
|
||||
int fx = px - fileExtent.x;
|
||||
int fy = py - fileExtent.y;
|
||||
int fileIdx = fy * fileExtent.width + fx;
|
||||
|
||||
short value1 = file1Data[fileIdx];
|
||||
short value2 = file2Data[fileIdx];
|
||||
|
||||
short fi = 0;
|
||||
|
||||
if (isDifference) {
|
||||
short diffValue = calculateDifference(value1,
|
||||
value2);
|
||||
fi = diffValue;
|
||||
} else if (isRatio) {
|
||||
double ratio = calculateRatio(value1, value2);
|
||||
|
||||
if (ratio != MISSING_VALUE) {
|
||||
fi = (short) (ratio * RATIO_CONVERSION_FACTOR);
|
||||
} else {
|
||||
fi = MISSING_VALUE;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
// short fc = frameData[frameIdx];
|
||||
// fc is initial value of frameData[frameIdx],
|
||||
// it is used to help accumulate precip in a
|
||||
// multi-hour accum situation
|
||||
frameData[frameIdx] = fi;
|
||||
|
||||
} // end if (px >=)
|
||||
} // end for x
|
||||
} // end for y
|
||||
|
||||
} else // is a non-comparison field
|
||||
{
|
||||
|
||||
XmrgFile file = MPEDisplayManager.getXmrgFile(displayField,
|
||||
timeToLoad.getTime());
|
||||
try {
|
||||
long fileLength = file.getFile().length();
|
||||
//System.out.printf("FileName = %s, length = %d\n", file.getFile().getPath(), fileLength);
|
||||
if (fileLength > 0)
|
||||
{
|
||||
file.load();
|
||||
}
|
||||
else //can't read the file since it is empty
|
||||
{
|
||||
continue;
|
||||
}
|
||||
} catch (IOException e) {
|
||||
Activator.statusHandler.handle(
|
||||
Priority.INFO,
|
||||
"Error loading XMRG file for "
|
||||
+ displayField
|
||||
+ " at time "
|
||||
+ MPEDateFormatter
|
||||
.format_MMM_dd_yyyy_HH(timeToLoad
|
||||
.getTime()), e);
|
||||
continue;
|
||||
}
|
||||
XmrgFile file = MPEDisplayManager.getXmrgFile(displayField,
|
||||
timeToLoad.getTime());
|
||||
try {
|
||||
long fileLength = file.getFile().length();
|
||||
// System.out.printf("FileName = %s, length = %d\n",
|
||||
// file.getFile().getPath(), fileLength);
|
||||
if (fileLength > 0) {
|
||||
file.load();
|
||||
} else // can't read the file since it is empty
|
||||
{
|
||||
continue;
|
||||
}
|
||||
} catch (IOException e) {
|
||||
Activator.statusHandler.handle(
|
||||
Priority.INFO,
|
||||
"Error loading XMRG file for "
|
||||
+ displayField
|
||||
+ " at time "
|
||||
+ MPEDateFormatter
|
||||
.format_MMM_dd_yyyy_HH(timeToLoad
|
||||
.getTime()), e);
|
||||
continue;
|
||||
}
|
||||
|
||||
|
||||
Rectangle fileExtent = file.getHrapExtent();
|
||||
short[] fileData = file.getData();
|
||||
for (int y = 0; y < displayExtent.height; ++y) {
|
||||
for (int x = 0; x < displayExtent.width; ++x) {
|
||||
int px = x + displayExtent.x;
|
||||
int py = y + displayExtent.y;
|
||||
if (px >= fileExtent.x
|
||||
&& px < (fileExtent.x + fileExtent.width)
|
||||
&& py >= fileExtent.y
|
||||
&& py < (fileExtent.y + fileExtent.height)) {
|
||||
int frameIdx = y * displayExtent.width + x;
|
||||
int fx = px - fileExtent.x;
|
||||
int fy = py - fileExtent.y;
|
||||
int fileIdx = fy * fileExtent.width + fx;
|
||||
short fi = fileData[fileIdx];
|
||||
short fc = frameData[frameIdx];
|
||||
|
||||
Rectangle fileExtent = file.getHrapExtent();
|
||||
short[] fileData = file.getData();
|
||||
for (int y = 0; y < displayExtent.height; ++y) {
|
||||
for (int x = 0; x < displayExtent.width; ++x) {
|
||||
int px = x + displayExtent.x;
|
||||
int py = y + displayExtent.y;
|
||||
if (px >= fileExtent.x
|
||||
&& px < (fileExtent.x + fileExtent.width)
|
||||
&& py >= fileExtent.y
|
||||
&& py < (fileExtent.y + fileExtent.height)) {
|
||||
int frameIdx = y * displayExtent.width + x;
|
||||
int fx = px - fileExtent.x;
|
||||
int fy = py - fileExtent.y;
|
||||
int fileIdx = fy * fileExtent.width + fx;
|
||||
short fi = fileData[fileIdx];
|
||||
short fc = frameData[frameIdx];
|
||||
|
||||
if (fc < 0 && fi >= 0)
|
||||
{
|
||||
//orig precip is missing, and this hour's value is valid (> = 0)
|
||||
// so set the value to the current hour's value
|
||||
frameData[frameIdx] = fi;
|
||||
}
|
||||
else if (fc >= 0 && fi > 0)
|
||||
{
|
||||
//some previous hour's precip has been recorded and this hour's value is valid (> = 0)
|
||||
//so accumulate
|
||||
frameData[frameIdx] += fi;
|
||||
}
|
||||
} //end if (px >=)
|
||||
} //end for x
|
||||
} //end for y
|
||||
} //end else is a non-comparison field
|
||||
|
||||
} //end for i
|
||||
if (fc < 0 && fi >= 0) {
|
||||
// orig precip is missing, and this hour's value
|
||||
// is valid (> = 0)
|
||||
// so set the value to the current hour's value
|
||||
frameData[frameIdx] = fi;
|
||||
} else if (fc >= 0 && fi > 0) {
|
||||
// some previous hour's precip has been recorded
|
||||
// and this hour's value is valid (> = 0)
|
||||
// so accumulate
|
||||
frameData[frameIdx] += fi;
|
||||
}
|
||||
} // end if (px >=)
|
||||
} // end for x
|
||||
} // end for y
|
||||
} // end else is a non-comparison field
|
||||
|
||||
} // end for i
|
||||
|
||||
return new MPEFieldFrame(currTime.getRefTime(), frameData,
|
||||
PolygonEditManager.getPolygonEdits(resourceData.getFieldData(),
|
||||
currTime.getRefTime()));
|
||||
}
|
||||
|
||||
private short calculateDifference(short value1, short value2)
|
||||
{
|
||||
|
||||
private short calculateDifference(short value1, short value2) {
|
||||
short result = 0;
|
||||
|
||||
if (( value1 >= 0) && (value2 >= 0) )
|
||||
{
|
||||
result = (short) (value1 - value2 );
|
||||
}
|
||||
else
|
||||
{
|
||||
|
||||
if ((value1 >= 0) && (value2 >= 0)) {
|
||||
result = (short) (value1 - value2);
|
||||
} else {
|
||||
result = MISSING_VALUE;
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
return result;
|
||||
|
||||
|
||||
}
|
||||
|
||||
|
||||
private double calculateRatio(short numerator, short denominator)
|
||||
{
|
||||
double result = 0;
|
||||
|
||||
if (denominator > 0)
|
||||
{
|
||||
if (numerator >= 0)
|
||||
{
|
||||
result = numerator / denominator;
|
||||
}
|
||||
else
|
||||
{
|
||||
result = MISSING_VALUE;
|
||||
}
|
||||
}
|
||||
|
||||
else if (denominator == 0)
|
||||
{
|
||||
if (numerator == 0)
|
||||
{
|
||||
result = 1.0; //if no rain, they are in agreeement, so show this
|
||||
}
|
||||
else if (numerator > 0)
|
||||
{
|
||||
result = BIG_VALUE;
|
||||
}
|
||||
else // numerator is missing
|
||||
{
|
||||
result = MISSING_VALUE;
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
result = MISSING_VALUE;
|
||||
}
|
||||
|
||||
return result;
|
||||
|
||||
|
||||
private double calculateRatio(short numerator, short denominator) {
|
||||
double result = 0;
|
||||
|
||||
if (denominator > 0) {
|
||||
if (numerator >= 0) {
|
||||
result = numerator / denominator;
|
||||
} else {
|
||||
result = MISSING_VALUE;
|
||||
}
|
||||
}
|
||||
|
||||
else if (denominator == 0) {
|
||||
if (numerator == 0) {
|
||||
result = 1.0; // if no rain, they are in agreeement, so show
|
||||
// this
|
||||
} else if (numerator > 0) {
|
||||
result = BIG_VALUE;
|
||||
} else // numerator is missing
|
||||
{
|
||||
result = MISSING_VALUE;
|
||||
}
|
||||
} else {
|
||||
result = MISSING_VALUE;
|
||||
}
|
||||
|
||||
return result;
|
||||
|
||||
}
|
||||
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
|
@ -610,66 +600,63 @@ public class MPEFieldResource extends
|
|||
int length = data.length;
|
||||
short[] imageData = new short[length];
|
||||
switch (cvuse) {
|
||||
case Locbias:
|
||||
case LocbiasDP:
|
||||
case Height:
|
||||
case Index:
|
||||
case Locspan:
|
||||
case LocspanDP:
|
||||
case mintempPrism:
|
||||
case maxtempPrism:
|
||||
for (int i = 0; i < length; ++i) {
|
||||
short value = data[i];
|
||||
if (value == MISSING_VALUE) {
|
||||
imageData[i] = 0;
|
||||
} else {
|
||||
imageData[i] = (short) dataToImage.convert(value);
|
||||
}
|
||||
}
|
||||
break;
|
||||
|
||||
case Prism:
|
||||
for (int i = 0; i < length; ++i) {
|
||||
short value = data[i];
|
||||
if (value < 0) {
|
||||
imageData[i] = 0;
|
||||
} else {
|
||||
imageData[i] = (short) dataToImage.convert(value);
|
||||
}
|
||||
}
|
||||
break;
|
||||
|
||||
case precipDifferenceField:
|
||||
case precipRatioField:
|
||||
for (int i = 0; i < length; ++i) {
|
||||
short value = data[i];
|
||||
if (value == MISSING_VALUE) {
|
||||
imageData[i] = 0;
|
||||
}
|
||||
else
|
||||
{
|
||||
imageData[i] = (short) dataToImage.convert(value);
|
||||
}
|
||||
}
|
||||
break;
|
||||
|
||||
|
||||
default :
|
||||
for (int i = 0; i < length; ++i) {
|
||||
short value = data[i];
|
||||
if (value == MISSING_VALUE) {
|
||||
imageData[i] = 0;
|
||||
} else if(value <= 0){
|
||||
imageData[i] = 1;
|
||||
} else if(value > 0 && value < 25){
|
||||
value = 10;
|
||||
imageData[i] = (short) dataToImage.convert(value);
|
||||
} else {
|
||||
imageData[i] = (short) dataToImage.convert(value);
|
||||
}
|
||||
}
|
||||
break;
|
||||
case Locbias:
|
||||
case LocbiasDP:
|
||||
case Height:
|
||||
case Index:
|
||||
case Locspan:
|
||||
case LocspanDP:
|
||||
case mintempPrism:
|
||||
case maxtempPrism:
|
||||
for (int i = 0; i < length; ++i) {
|
||||
short value = data[i];
|
||||
if (value == MISSING_VALUE) {
|
||||
imageData[i] = 0;
|
||||
} else {
|
||||
imageData[i] = (short) dataToImage.convert(value);
|
||||
}
|
||||
}
|
||||
break;
|
||||
|
||||
case Prism:
|
||||
for (int i = 0; i < length; ++i) {
|
||||
short value = data[i];
|
||||
if (value < 0) {
|
||||
imageData[i] = 0;
|
||||
} else {
|
||||
imageData[i] = (short) dataToImage.convert(value);
|
||||
}
|
||||
}
|
||||
break;
|
||||
|
||||
case precipDifferenceField:
|
||||
case precipRatioField:
|
||||
for (int i = 0; i < length; ++i) {
|
||||
short value = data[i];
|
||||
if (value == MISSING_VALUE) {
|
||||
imageData[i] = 0;
|
||||
} else {
|
||||
imageData[i] = (short) dataToImage.convert(value);
|
||||
}
|
||||
}
|
||||
break;
|
||||
|
||||
default:
|
||||
for (int i = 0; i < length; ++i) {
|
||||
short value = data[i];
|
||||
if (value == MISSING_VALUE) {
|
||||
imageData[i] = 0;
|
||||
} else if (value <= 0) {
|
||||
imageData[i] = 1;
|
||||
} else if (value > 0 && value < 25) {
|
||||
value = 10;
|
||||
imageData[i] = (short) dataToImage.convert(value);
|
||||
} else {
|
||||
imageData[i] = (short) dataToImage.convert(value);
|
||||
}
|
||||
}
|
||||
break;
|
||||
}
|
||||
return new GriddedImageDisplay2(ShortBuffer.wrap(imageData),
|
||||
gridGeometry, this);
|
||||
}
|
||||
|
|
|
@ -96,6 +96,8 @@ import com.vividsolutions.jts.geom.GeometryFactory;
|
|||
* Feb 14, 2013 1616 bsteffen Add option for interpolation of colormap
|
||||
* parameters, disable colormap interpolation
|
||||
* by default.
|
||||
* Nov 19, 2015 18105 lbousaidi Removed time and PrecipField legend on the right
|
||||
* side of cave to give space to basin info.
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -897,16 +899,8 @@ public class MPELegendResource extends
|
|||
} else if (rsc.getStatus() != ResourceStatus.INITIALIZED) {
|
||||
continue;
|
||||
} else {
|
||||
legend.label = rsc.getName();
|
||||
legend.label="";
|
||||
legend.resource = resourcePair;
|
||||
if (rsc.isTimeAgnostic() == false) {
|
||||
DataTime date = frameInfo.getTimeForResource(rsc);
|
||||
String time = " No Data Available";
|
||||
if (date != null) {
|
||||
time = " - " + date.getLegendString();
|
||||
}
|
||||
legend.label += time;
|
||||
}
|
||||
}
|
||||
|
||||
if (!vis) {
|
||||
|
|
|
@ -66,6 +66,7 @@ import com.vividsolutions.jts.geom.Coordinate;
|
|||
* Mar 10, 2015 14575 snaples Added additional status flags.
|
||||
* Oct 14, 2015 17977 snaples Fixed loadData to read station
|
||||
* lists when new area, which means it needs to read some tokens also.
|
||||
* Nov 25, 2015 17986 snaples Updated array func to adjust QC codes for update to dialogs.
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -437,7 +438,7 @@ public class DailyQcUtils {
|
|||
|
||||
public static int new_area_flag = 0;
|
||||
|
||||
public int func[] = { 8, 0, 3, 1, 2 };
|
||||
public int func[] = { 8, 3, 0, 1, 2 };
|
||||
|
||||
public static int hrgt12z = -1;
|
||||
|
||||
|
|
|
@ -57,12 +57,13 @@ import com.raytheon.viz.radar.frame.RadarDataTime;
|
|||
* <pre>
|
||||
*
|
||||
* SOFTWARE HISTORY
|
||||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* Oct 8, 2009 bsteffen Initial creation
|
||||
* Nov 21, 2009 #3576 rjpeter Refactored use of DerivParamDesc.
|
||||
* May 13, 2015 4461 bsteffen Generate radar times from time queries.
|
||||
*
|
||||
* Date Ticket# Engineer Description
|
||||
* ------------- -------- --------- ----------------------------------------
|
||||
* Oct 08, 2009 bsteffen Initial creation
|
||||
* Nov 21, 2009 3576 rjpeter Refactored use of DerivParamDesc.
|
||||
* May 13, 2015 4461 bsteffen Generate radar times from time queries.
|
||||
* Nov 02, 2015 5071 bsteffen Fix NPE when time query of Unit Status
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -145,10 +146,18 @@ public class RadarDataCubeAdapter extends PointDataCubeAdapter {
|
|||
Number level = (Number) map.get(LEVEL_FIELD);
|
||||
radarTime.setLevelValue(level.doubleValue());
|
||||
Number elevation = (Number) map.get(ELEVATION_FIELD);
|
||||
radarTime.setElevationNumber(elevation.intValue());
|
||||
Number volume = (Number) map.get(VOLUME_FIELD);
|
||||
radarTime.setVolumeScanNumber(volume.intValue());
|
||||
time = radarTime;
|
||||
if (elevation == null) {
|
||||
/*
|
||||
* Certain products such as Unit Status do not apply to a
|
||||
* particular elevation.
|
||||
*/
|
||||
time.setLevelValue(level.doubleValue());
|
||||
} else {
|
||||
radarTime.setElevationNumber(elevation.intValue());
|
||||
Number volume = (Number) map.get(VOLUME_FIELD);
|
||||
radarTime.setVolumeScanNumber(volume.intValue());
|
||||
time = radarTime;
|
||||
}
|
||||
}
|
||||
// Best res requests need this because they span a time period
|
||||
if (time.getRefTime().before(
|
||||
|
|
|
@ -24,6 +24,9 @@ import java.nio.charset.Charset;
|
|||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
import org.eclipse.core.runtime.FileLocator;
|
||||
import org.eclipse.core.runtime.IPath;
|
||||
import org.eclipse.core.runtime.Path;
|
||||
import org.geotools.coverage.grid.GeneralGridEnvelope;
|
||||
import org.geotools.coverage.grid.GridGeometry2D;
|
||||
import org.geotools.geometry.DirectPosition2D;
|
||||
|
@ -33,6 +36,7 @@ import org.opengis.referencing.crs.ProjectedCRS;
|
|||
import org.opengis.referencing.datum.PixelInCell;
|
||||
import org.opengis.referencing.operation.MathTransform;
|
||||
import org.opengis.referencing.operation.TransformException;
|
||||
import org.osgi.framework.Bundle;
|
||||
|
||||
import com.raytheon.uf.common.geospatial.MapUtil;
|
||||
import com.raytheon.uf.common.pointdata.PointDataContainer;
|
||||
|
@ -41,9 +45,7 @@ import com.raytheon.uf.common.pointdata.PointDataView;
|
|||
import com.raytheon.uf.common.status.IUFStatusHandler;
|
||||
import com.raytheon.uf.common.status.UFStatus;
|
||||
import com.raytheon.uf.common.status.UFStatus.Priority;
|
||||
import com.raytheon.uf.viz.core.status.StatusConstants;
|
||||
import com.raytheon.viz.redbook.Activator;
|
||||
import com.raytheon.viz.redbookua.rsc.RedbookUpperAirResource;
|
||||
|
||||
/**
|
||||
* Decoder for redbook upper air products.
|
||||
|
@ -54,6 +56,7 @@ import com.raytheon.viz.redbookua.rsc.RedbookUpperAirResource;
|
|||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* Mar 24, 2010 1029 dfriedma Initial creation
|
||||
* Nov 17, 2015 5134 njensen Fixed loading pointDataDescription
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -289,20 +292,17 @@ public class RedbookUpperAirDecoder {
|
|||
|
||||
private static synchronized PointDataDescription getPointDataDescription() {
|
||||
if (pointDataDescription == null) {
|
||||
InputStream is = RedbookUpperAirResource.class
|
||||
.getResourceAsStream("/res/pointdata/redbookua.xml");
|
||||
if (is != null) {
|
||||
try {
|
||||
try {
|
||||
pointDataDescription = PointDataDescription
|
||||
.fromStream(is);
|
||||
} finally {
|
||||
is.close();
|
||||
}
|
||||
} catch (Exception e) {
|
||||
statusHandler.handle(Priority.PROBLEM,
|
||||
"Could load point data description", e);
|
||||
Bundle bundle = Activator.getDefault().getBundle();
|
||||
IPath path = new Path("/res/pointdata/redbookua.xml");
|
||||
try (InputStream is = FileLocator.openStream(bundle, path, false)) {
|
||||
if (is != null) {
|
||||
pointDataDescription = PointDataDescription.fromStream(is);
|
||||
}
|
||||
} catch (Exception e) {
|
||||
statusHandler
|
||||
.handle(Priority.PROBLEM,
|
||||
"Couldn't load point data description for redbookua",
|
||||
e);
|
||||
}
|
||||
}
|
||||
return pointDataDescription;
|
||||
|
|
|
@ -356,8 +356,11 @@ import com.raytheon.viz.ui.simulatedtime.SimulatedTimeOperations;
|
|||
* simulated time.
|
||||
* Sep 30, 2015 4860 skorolev Corrected misspelling.
|
||||
* 07Oct2015 RM 18132 D. Friedman Exlucde certain phensigs from automatic ETN incrementing.
|
||||
* 11Dec2015 RM14752 mgamazaychikov Fix problems with wrapping in the impact section.
|
||||
* Nov 05, 2015 5039 rferrel Prevent wrapping text to a component name line and clean up of streams.
|
||||
* 19Nov2015 5141 randerso Replace commas with ellipses if product not enabled for
|
||||
* mixed case transmission
|
||||
* 10Dec2015 5206 randerso Replace commas with ellipses only in WarnGen products
|
||||
* 11Dec2015 RM14752 mgamazaychikov Fix problems with wrapping in the impact section.
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -389,6 +392,10 @@ public class TextEditorDialog extends CaveSWTDialog implements VerifyListener,
|
|||
private static List<String> gfePils = Arrays.asList("WSW", "NPW", "HLS",
|
||||
"CFW", "WCN", "FFA", "MWW", "RFW");
|
||||
|
||||
private static final List<String> warngenPils = Arrays.asList("AWW", "EWW",
|
||||
"FFS", "FFW", "FLS", "FLW", "FRW", "MWS", "NOW", "SMW", "SPS",
|
||||
"SVR", "SVS", "TOR");
|
||||
|
||||
/**
|
||||
* Default list of VTEC phenomena significance codes for which the ETN
|
||||
* should not be changed when sending a NEW-action product.
|
||||
|
@ -4019,8 +4026,8 @@ public class TextEditorDialog extends CaveSWTDialog implements VerifyListener,
|
|||
.getCaretOffset()));
|
||||
int caretOffsetOnLine = textEditor.getCaretOffset()
|
||||
- lineStartOffset;
|
||||
int numberOfSpaces = (textEditor.getTabs() - caretOffsetOnLine
|
||||
% textEditor.getTabs());
|
||||
int numberOfSpaces = (textEditor.getTabs() - (caretOffsetOnLine % textEditor
|
||||
.getTabs()));
|
||||
String spaces = "";
|
||||
for (int x = 0; x < numberOfSpaces; x++) {
|
||||
spaces += ' ';
|
||||
|
@ -4243,6 +4250,14 @@ public class TextEditorDialog extends CaveSWTDialog implements VerifyListener,
|
|||
// section
|
||||
setCurrentHeaderAndBody();
|
||||
|
||||
// if product is a WarnGen product and is not enabled for mixed case
|
||||
// transmission, replace all commas with ellipses
|
||||
if (warngenPils.contains(product.getNnnid())
|
||||
&& !MixedCaseProductSupport.isMixedCase(product.getNnnid())) {
|
||||
textEditor.setText(textEditor.getText()
|
||||
.replaceAll(", {0,1}", "..."));
|
||||
}
|
||||
|
||||
// Mark the uneditable warning text
|
||||
if (markUneditableText(textEditor)) {
|
||||
// Add listener to monitor attempt to edit locked text
|
||||
|
@ -4382,6 +4397,17 @@ public class TextEditorDialog extends CaveSWTDialog implements VerifyListener,
|
|||
if (originalText != null) {
|
||||
textEditor.setText(originalText);
|
||||
}
|
||||
|
||||
// if product is not enabled for mixed case transmission,
|
||||
// replace all commas with ellipses
|
||||
StdTextProduct product = TextDisplayModel.getInstance()
|
||||
.getStdTextProduct(token);
|
||||
if ((product != null)
|
||||
&& !MixedCaseProductSupport.isMixedCase(product.getNnnid())) {
|
||||
textEditor.setText(textEditor.getText()
|
||||
.replaceAll(", {0,1}", "..."));
|
||||
}
|
||||
|
||||
// Mark the uneditable warning text
|
||||
if (markUneditableText(textEditor)) {
|
||||
// Add listener to monitor attempt to edit locked text
|
||||
|
@ -5226,7 +5252,7 @@ public class TextEditorDialog extends CaveSWTDialog implements VerifyListener,
|
|||
}
|
||||
boolean result = true;
|
||||
VtecObject vo = VtecUtil.parseMessage(prod.getProduct());
|
||||
if (vo != null && excludedPhenSigs != null
|
||||
if ((vo != null) && (excludedPhenSigs != null)
|
||||
&& excludedPhenSigs.contains(vo.getPhensig())) {
|
||||
result = false;
|
||||
}
|
||||
|
@ -6444,17 +6470,10 @@ public class TextEditorDialog extends CaveSWTDialog implements VerifyListener,
|
|||
return;
|
||||
}
|
||||
|
||||
// textEditor.setWordWrap(false);
|
||||
|
||||
// Set the text editor's contents to the warning message.
|
||||
textEditor.removeVerifyListener(TextEditorDialog.this);
|
||||
textEditor.setText(w);
|
||||
//
|
||||
// // Mark the uneditable warning text
|
||||
// if (markUneditableText(textEditor)) {
|
||||
// // Add listener to monitor attempt to edit locked text
|
||||
// textEditor.addVerifyListener(TextEditorDialog.this);
|
||||
// }
|
||||
|
||||
showDialog();
|
||||
long t1 = System.currentTimeMillis();
|
||||
SimpleDateFormat sdf = new SimpleDateFormat(
|
||||
|
@ -6465,7 +6484,7 @@ public class TextEditorDialog extends CaveSWTDialog implements VerifyListener,
|
|||
+ "ms to show dialog");
|
||||
enterEditor();
|
||||
|
||||
if (autoWrapMenuItem != null
|
||||
if ((autoWrapMenuItem != null)
|
||||
&& !autoWrapMenuItem.isDisposed()) {
|
||||
Menu menu = autoWrapMenuItem.getMenu();
|
||||
for (MenuItem item : menu.getItems()) {
|
||||
|
@ -7084,6 +7103,15 @@ public class TextEditorDialog extends CaveSWTDialog implements VerifyListener,
|
|||
}
|
||||
|
||||
textEditor.append(textProduct);
|
||||
|
||||
// if product is a WarnGen product and is not enabled for mixed case
|
||||
// transmission, replace all commas with ellipses
|
||||
if (warngenPils.contains(product.getNnnid())
|
||||
&& !MixedCaseProductSupport.isMixedCase(product.getNnnid())) {
|
||||
textEditor.setText(textEditor.getText()
|
||||
.replaceAll(", {0,1}", "..."));
|
||||
}
|
||||
|
||||
markUneditableText(textEditor);
|
||||
|
||||
// Update text display model with the product that was
|
||||
|
@ -8112,8 +8140,8 @@ public class TextEditorDialog extends CaveSWTDialog implements VerifyListener,
|
|||
&& (allText.charAt(eol + 1) == '\n')) {
|
||||
deleteLen = 2;
|
||||
} else if (allText.charAt(eol) == '\n') {
|
||||
if (allText.charAt(eol - 1) == '.'
|
||||
&& allText.charAt(eol - 2) != '.') {
|
||||
if ((allText.charAt(eol - 1) == '.')
|
||||
&& (allText.charAt(eol - 2) != '.')) {
|
||||
// do not extend this line.
|
||||
return;
|
||||
} else {
|
||||
|
|
|
@ -24,7 +24,8 @@
|
|||
<contribute xsi:type="menuItem" menuText="Visibility" key="Vis" indentText="false" />
|
||||
<contribute xsi:type="menuItem" menuText="Visibility Anl Uncertainty" key="Viserranl" indentText="false" />
|
||||
<contribute xsi:type="menuItem" menuText="Turbulence" key="TURB" indentText="false" />
|
||||
<contribute xsi:type="menuItem" menuText="Turbulence Index" key="TPFI" indentText="false" />
|
||||
<contribute xsi:type="menuItem" menuText="Composite Clear Air Turbulence - GTG" key="TPFI" indentText="false" />
|
||||
<contribute xsi:type="menuItem" menuText="Clear Air Turbulence - GTG" key="CAT" indentText="false" />
|
||||
<contribute xsi:type="menuItem" menuText="Icing Probability" key="ICPRB" indentText="false" />
|
||||
<contribute xsi:type="menuItem" menuText="Icing Potential" key="ICNG" indentText="false" />
|
||||
<contribute xsi:type="menuItem" menuText="Icing Severity" key="ICSEV" indentText="false" />
|
||||
|
|
|
@ -26,7 +26,9 @@ Require-Bundle: org.eclipse.ui,
|
|||
com.raytheon.uf.common.site,
|
||||
com.raytheon.viz.core.contours,
|
||||
com.raytheon.uf.viz.core.rsc,
|
||||
com.raytheon.uf.viz.d2d.ui
|
||||
com.raytheon.uf.viz.d2d.ui,
|
||||
org.eclipse.core.databinding;bundle-version="1.4.1",
|
||||
com.raytheon.uf.common.auth;bundle-version="1.14.0"
|
||||
Bundle-ActivationPolicy: lazy
|
||||
Export-Package: com.raytheon.viz.warngen,
|
||||
com.raytheon.viz.warngen.gis,
|
||||
|
|
|
@ -99,4 +99,13 @@
|
|||
recursive="true">
|
||||
</path>
|
||||
</extension>
|
||||
<extension
|
||||
point="com.raytheon.viz.ui.contextualMenu">
|
||||
<contextualMenu
|
||||
actionClass="com.raytheon.viz.warngen.gui.ShowExtensionAreaToggleAction"
|
||||
capabilityClass="com.raytheon.viz.warngen.gui.WarngenLayer"
|
||||
name="com.raytheon.viz.warngen.ShowExtensionAreaToggle"
|
||||
sortID="600">
|
||||
</contextualMenu>
|
||||
</extension>
|
||||
</plugin>
|
||||
|
|
|
@ -95,6 +95,8 @@ import com.vividsolutions.jts.precision.SimpleGeometryPrecisionReducer;
|
|||
* 05/07/2015 DR 17438 D. Friedman Clean up debug and performance logging.
|
||||
* 05/08/2015 DR 17310 D. Friedman Prevent reducePoints from generating invalid polygons.
|
||||
* 09/22/2015 DR 18033 Qinglu Lin Updated removeOverlaidLinesegments(), removed one computeSlope().
|
||||
* 12/09/2015 DR 18209 D. Friedman Support cwaStretch.
|
||||
* 12/21/2015 DCS 17942 D. Friedman Support extension area. Work around glitch in contour adjustment.
|
||||
* </pre>
|
||||
*
|
||||
* @author mschenke
|
||||
|
@ -135,9 +137,12 @@ public class PolygonUtil {
|
|||
}
|
||||
|
||||
public Polygon hatchWarningArea(Polygon origPolygon,
|
||||
Geometry origWarningArea, Polygon oldWarningPolygon)
|
||||
Geometry origWarningArea, Geometry extensionArea,
|
||||
Polygon oldWarningPolygon, boolean cwaStretch)
|
||||
throws VizException {
|
||||
float[][] contourAreaData = toFloatData(origWarningArea);
|
||||
if (extensionArea != null)
|
||||
toFloatData(extensionArea, contourAreaData);
|
||||
|
||||
/*
|
||||
* If we have an oldWarningPolygon, we can take a shortcut and see if
|
||||
|
@ -167,7 +172,7 @@ public class PolygonUtil {
|
|||
* that are used to generate origWarningArea.
|
||||
*/
|
||||
Geometry comparableIntersection = layer
|
||||
.buildIdealArea(origPolygon);
|
||||
.buildIdealArea(origPolygon, cwaStretch);
|
||||
float[][] interAreaData = toFloatData(comparableIntersection);
|
||||
if (areasEqual(interAreaData, contourAreaData)) {
|
||||
return polygonIntersection;
|
||||
|
@ -224,7 +229,7 @@ public class PolygonUtil {
|
|||
boolean showContour = false;
|
||||
if (contour != null && !showContour) {
|
||||
rval = awips1PointReduction(contour, origPolygon, origWarningArea,
|
||||
config, oldWarningPolygon);
|
||||
extensionArea, config, oldWarningPolygon, contourAreaData);
|
||||
if (rval == null) {
|
||||
return (Polygon) origPolygon.clone();
|
||||
}
|
||||
|
@ -245,14 +250,38 @@ public class PolygonUtil {
|
|||
* @return null if the original warningPolygon should be used
|
||||
*/
|
||||
private Polygon awips1PointReduction(Coordinate[] longest,
|
||||
Polygon warningPolygon, Geometry warningArea, FortConConfig config,
|
||||
Polygon oldWarningPolygon) throws VizException {
|
||||
Polygon warningPolygon, Geometry warningArea,
|
||||
Geometry extensionArea, FortConConfig config,
|
||||
Polygon oldWarningPolygon, float[][] warningAreaData)
|
||||
throws VizException {
|
||||
if (extensionArea != null) {
|
||||
/*
|
||||
* Attempt to avoid a glitch in the code below in which it chooses
|
||||
* an inappropriate side of the polygon on which to project an
|
||||
* unmatched contour point. The glitch is likely to occur when a
|
||||
* polygon point is outside the contour space, so clip the polygon
|
||||
* to it.
|
||||
*/
|
||||
Polygon wpc = WarngenLayer.convertGeom(warningPolygon, latLonToContour);
|
||||
GeometryFactory gf = new GeometryFactory();
|
||||
Coordinate[] coords = new Coordinate[5];
|
||||
coords[0] = new Coordinate(0, 0);
|
||||
coords[1] = new Coordinate(nx, 0);
|
||||
coords[2] = new Coordinate(nx, ny);
|
||||
coords[3] = new Coordinate(0, ny);
|
||||
coords[4] = new Coordinate(0, 0);
|
||||
Polygon clip = gf.createPolygon(gf.createLinearRing(coords), null);
|
||||
Geometry g = clip.intersection(wpc);
|
||||
if (g instanceof Polygon) {
|
||||
warningPolygon = WarngenLayer.convertGeom((Polygon) g, contourToLatLon);
|
||||
}
|
||||
}
|
||||
Coordinate[] vertices = warningPolygon.getCoordinates();
|
||||
vertices = Arrays.copyOf(vertices, vertices.length - 1);
|
||||
|
||||
// Extract data
|
||||
float[][] contourPolyData = toFloatData(warningPolygon);
|
||||
float[][] currentPolyData = toFloatData(warningArea);
|
||||
float[][] currentPolyData = warningAreaData;
|
||||
|
||||
// If same area is hatched, just use the current polygon.
|
||||
if (areasEqual(contourPolyData, currentPolyData)) {
|
||||
|
@ -1174,7 +1203,13 @@ public class PolygonUtil {
|
|||
}
|
||||
}
|
||||
|
||||
private float[][] toFloatData(Geometry warningArea) throws VizException {
|
||||
public float[][] toFloatData(Geometry warningArea) throws VizException {
|
||||
float[][] contourAreaData = new float[nx][ny];
|
||||
toFloatData(warningArea, contourAreaData);
|
||||
return contourAreaData;
|
||||
}
|
||||
|
||||
public void toFloatData(Geometry warningArea, float[][] contourAreaData) throws VizException {
|
||||
Geometry contoured = layer.convertGeom(warningArea, latLonToContour);
|
||||
List<Geometry> geomList = new ArrayList<Geometry>(
|
||||
contoured.getNumGeometries());
|
||||
|
@ -1188,7 +1223,6 @@ public class PolygonUtil {
|
|||
GeometryFactory gf = warningArea.getFactory();
|
||||
Point point = gf.createPoint(new Coordinate(0, 0));
|
||||
CoordinateSequence pointCS = point.getCoordinateSequence();
|
||||
float[][] contourAreaData = new float[nx][ny];
|
||||
|
||||
for (PreparedGeometry geom : prepped) {
|
||||
Envelope env = geom.getGeometry().getEnvelopeInternal();
|
||||
|
@ -1196,13 +1230,14 @@ public class PolygonUtil {
|
|||
int startY = (int) env.getMinY();
|
||||
int width = (int) env.getMaxX();
|
||||
int height = (int) env.getMaxY();
|
||||
if (startX < 0 || width > nx || startY < 0 || height > ny) {
|
||||
continue;
|
||||
}
|
||||
|
||||
startX = Math.max(0, startX - 1);
|
||||
startY = Math.max(0, startY - 1);
|
||||
width = Math.min(nx, width + 1);
|
||||
height = Math.min(ny, height + 1);
|
||||
if (width < 0 || startX >= nx || height < 0 || startY >= ny) {
|
||||
continue;
|
||||
}
|
||||
|
||||
for (int x = startX; x < width; ++x) {
|
||||
for (int y = startY; y < height; ++y) {
|
||||
|
@ -1215,7 +1250,6 @@ public class PolygonUtil {
|
|||
}
|
||||
}
|
||||
}
|
||||
return contourAreaData;
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -0,0 +1,223 @@
|
|||
package com.raytheon.viz.warngen.gui;
|
||||
|
||||
import javax.measure.converter.UnitConverter;
|
||||
|
||||
import org.eclipse.core.databinding.observable.ChangeEvent;
|
||||
import org.eclipse.core.databinding.observable.IChangeListener;
|
||||
import org.eclipse.core.databinding.observable.value.WritableValue;
|
||||
import org.eclipse.swt.SWT;
|
||||
import org.eclipse.swt.events.ModifyEvent;
|
||||
import org.eclipse.swt.events.ModifyListener;
|
||||
import org.eclipse.swt.events.SelectionAdapter;
|
||||
import org.eclipse.swt.events.SelectionEvent;
|
||||
import org.eclipse.swt.layout.GridData;
|
||||
import org.eclipse.swt.layout.GridLayout;
|
||||
import org.eclipse.swt.widgets.Button;
|
||||
import org.eclipse.swt.widgets.Composite;
|
||||
import org.eclipse.swt.widgets.Label;
|
||||
import org.eclipse.swt.widgets.Text;
|
||||
|
||||
import com.raytheon.viz.warngen.gui.WarngenLayer.ExtensionAreaOptions;
|
||||
|
||||
/**
|
||||
* GUI for advanced WarnGen options.
|
||||
*
|
||||
* <pre>
|
||||
*
|
||||
* SOFTWARE HISTORY
|
||||
*
|
||||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ------------ --------------------------
|
||||
* 12/21/2015 DCS 17942 D. Friedman Initial revision
|
||||
* </pre>
|
||||
*
|
||||
*/
|
||||
public class PolygonOptionsComposite extends Composite {
|
||||
private WarngenLayer warngenLayer;
|
||||
|
||||
private Button allowExtendedPolygonButton;
|
||||
private Text extensionDistanceText;
|
||||
private Text extensionSimplificationToleranceText;
|
||||
private Button visualizeExtensionButton;
|
||||
|
||||
private WritableValue observableOptions;
|
||||
private boolean ignoreControls;
|
||||
private boolean ignoreOptions;
|
||||
|
||||
public PolygonOptionsComposite(Composite parent, WarngenLayer warngenLayer) {
|
||||
super(parent, SWT.NONE);
|
||||
this.warngenLayer = warngenLayer;
|
||||
observableOptions = warngenLayer.getObservableExtensionAreaOptions();
|
||||
createControls();
|
||||
}
|
||||
|
||||
private void createControls() {
|
||||
GridLayout gl = new GridLayout();
|
||||
gl.numColumns = 2;
|
||||
setLayout(gl);
|
||||
|
||||
Label label;
|
||||
|
||||
GridData textGD = new GridData();
|
||||
textGD.horizontalAlignment = GridData.FILL;
|
||||
textGD.grabExcessHorizontalSpace = true;
|
||||
|
||||
GridData fillGD = new GridData();
|
||||
fillGD.horizontalAlignment = GridData.FILL;
|
||||
fillGD.grabExcessHorizontalSpace = true;
|
||||
fillGD.horizontalSpan = 2;
|
||||
|
||||
label = new Label(this, SWT.CENTER);
|
||||
label.setText("Extension Area Options");
|
||||
label.setLayoutData(fillGD);
|
||||
|
||||
allowExtendedPolygonButton = new Button(this, SWT.CHECK);
|
||||
allowExtendedPolygonButton.setText("Allow polygon to extend past valid hatching area");
|
||||
allowExtendedPolygonButton.setLayoutData(fillGD);
|
||||
allowExtendedPolygonButton.addSelectionListener(new SelectionAdapter() {
|
||||
@Override
|
||||
public void widgetSelected(SelectionEvent e) {
|
||||
if (ignoreControls) {
|
||||
return;
|
||||
}
|
||||
|
||||
ExtensionAreaOptions options = getExtensionAreaOptions().clone();
|
||||
options.setEnabled(allowExtendedPolygonButton.getSelection());
|
||||
setOptions(options);
|
||||
}
|
||||
});
|
||||
|
||||
visualizeExtensionButton = new Button(this, SWT.CHECK);
|
||||
visualizeExtensionButton.setText("Show extension area");
|
||||
visualizeExtensionButton.setLayoutData(fillGD);
|
||||
visualizeExtensionButton.setSelection(warngenLayer.isExtensionAreaVisible());
|
||||
visualizeExtensionButton.addSelectionListener(new SelectionAdapter() {
|
||||
@Override
|
||||
public void widgetSelected(SelectionEvent e) {
|
||||
if (ignoreControls) {
|
||||
return;
|
||||
}
|
||||
|
||||
warngenLayer.setExtensionAreaVisualized(visualizeExtensionButton.getSelection());
|
||||
}
|
||||
});
|
||||
warngenLayer.getObservableExtensionAreaVisible().addChangeListener(new IChangeListener() {
|
||||
@Override
|
||||
public void handleChange(ChangeEvent event) {
|
||||
visualizeExtensionButton.setSelection(warngenLayer.isExtensionAreaVisible());
|
||||
}
|
||||
});
|
||||
|
||||
label = new Label(this, SWT.LEFT);
|
||||
label.setText("Extension distance (mi)");
|
||||
extensionDistanceText = new Text(this, SWT.LEFT | SWT.SINGLE | SWT.BORDER);
|
||||
extensionDistanceText.setLayoutData(textGD);
|
||||
new DistanceController() {
|
||||
@Override
|
||||
void setValue(double value) {
|
||||
if (ignoreControls) {
|
||||
return;
|
||||
}
|
||||
|
||||
ExtensionAreaOptions options = getExtensionAreaOptions().clone();
|
||||
options.setDistance(value);
|
||||
setOptions(options);
|
||||
}
|
||||
}.setControl(extensionDistanceText);
|
||||
|
||||
label = new Label(this, SWT.LEFT);
|
||||
label.setText("Simplification tolerance (mi)");
|
||||
extensionSimplificationToleranceText = new Text(this, SWT.LEFT | SWT.SINGLE | SWT.BORDER);
|
||||
extensionSimplificationToleranceText.setLayoutData(textGD);
|
||||
new DistanceController() {
|
||||
@Override
|
||||
void setValue(double value) {
|
||||
if (ignoreControls) {
|
||||
return;
|
||||
}
|
||||
|
||||
ExtensionAreaOptions options = getExtensionAreaOptions().clone();
|
||||
options.setSimplificationTolerance(value);
|
||||
setOptions(options);
|
||||
}
|
||||
@Override
|
||||
public boolean validate(double value) {
|
||||
return value >= WarngenLayer.ExtensionAreaOptions.MINIMUM_SIMPLIFICATION_TOLERANCE;
|
||||
}
|
||||
}.setControl(extensionSimplificationToleranceText);
|
||||
|
||||
realizeExtensionAreaOptions();
|
||||
|
||||
observableOptions.addChangeListener(new IChangeListener() {
|
||||
@Override
|
||||
public void handleChange(ChangeEvent event) {
|
||||
if (! ignoreOptions) {
|
||||
realizeExtensionAreaOptions();
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
private void setOptions(ExtensionAreaOptions options) {
|
||||
ignoreOptions = true;
|
||||
try {
|
||||
observableOptions.setValue(options);
|
||||
} finally {
|
||||
ignoreOptions = false;
|
||||
}
|
||||
}
|
||||
|
||||
private void realizeExtensionAreaOptions() {
|
||||
UnitConverter metersToMile = WarngenLayer.MILES_TO_METER.inverse();
|
||||
ExtensionAreaOptions options = getExtensionAreaOptions();
|
||||
|
||||
ignoreControls = true;
|
||||
try {
|
||||
allowExtendedPolygonButton.setSelection(options.isEnabled());
|
||||
extensionDistanceText.setText(Double.toString(
|
||||
metersToMile.convert(options.getDistance())));
|
||||
extensionSimplificationToleranceText.setText(Double.toString(
|
||||
metersToMile.convert(options.getSimplificationTolerance())));
|
||||
} finally {
|
||||
ignoreControls = false;
|
||||
}
|
||||
}
|
||||
|
||||
private ExtensionAreaOptions getExtensionAreaOptions() {
|
||||
return (ExtensionAreaOptions) observableOptions.getValue();
|
||||
}
|
||||
|
||||
private static abstract class DistanceController implements ModifyListener {
|
||||
Text text;
|
||||
public DistanceController() {
|
||||
}
|
||||
void setControl(Text text) {
|
||||
text.setTextLimit(10);
|
||||
this.text = text;
|
||||
text.addModifyListener(this);
|
||||
}
|
||||
@Override
|
||||
public void modifyText(ModifyEvent event) {
|
||||
boolean ok = false;
|
||||
double newValue = Double.NaN;
|
||||
String s = text.getText();
|
||||
s = s.trim();
|
||||
if (s.length() > 0) {
|
||||
try {
|
||||
newValue = WarngenLayer.MILES_TO_METER.convert(
|
||||
Double.parseDouble(s));
|
||||
} catch (RuntimeException e) {
|
||||
// ignore
|
||||
}
|
||||
ok = validate(newValue);
|
||||
text.setBackground(text.getDisplay().getSystemColor(
|
||||
ok ? SWT.COLOR_LIST_BACKGROUND : SWT.COLOR_RED));
|
||||
if (ok) {
|
||||
setValue(newValue);
|
||||
}
|
||||
}
|
||||
}
|
||||
abstract void setValue(double value);
|
||||
public boolean validate(double value) { return ! Double.isNaN(value); }
|
||||
}
|
||||
}
|
|
@ -0,0 +1,56 @@
|
|||
package com.raytheon.viz.warngen.gui;
|
||||
|
||||
import org.eclipse.jface.action.IAction;
|
||||
|
||||
import com.raytheon.uf.viz.core.drawables.ResourcePair;
|
||||
import com.raytheon.uf.viz.core.rsc.AbstractVizResource;
|
||||
import com.raytheon.viz.ui.cmenu.AbstractRightClickAction;
|
||||
|
||||
/**
|
||||
* Action to toggle the display of the extension are in WarngenLayer
|
||||
*
|
||||
* <pre>
|
||||
*
|
||||
* SOFTWARE HISTORY
|
||||
*
|
||||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ------------ --------------------------
|
||||
* 12/21/2015 DCS 17942 D. Friedman Initial revision
|
||||
* </pre>
|
||||
*
|
||||
*/
|
||||
public class ShowExtensionAreaToggleAction extends AbstractRightClickAction {
|
||||
|
||||
WarngenLayer warngenLayer;
|
||||
|
||||
public void setSelectedRsc(ResourcePair selectedRsc) {
|
||||
super.setSelectedRsc(selectedRsc);
|
||||
AbstractVizResource<?, ?> rsc = selectedRsc != null ? selectedRsc.getResource() : null;
|
||||
if (rsc instanceof WarngenLayer) {
|
||||
warngenLayer = (WarngenLayer) rsc;
|
||||
setChecked(warngenLayer.isExtensionAreaVisible());
|
||||
} else {
|
||||
warngenLayer = null;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void run() {
|
||||
if (warngenLayer != null) {
|
||||
boolean checked = ! warngenLayer.isExtensionAreaVisible();
|
||||
warngenLayer.setExtensionAreaVisualized(checked);
|
||||
setChecked(checked);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getStyle() {
|
||||
return IAction.AS_CHECK_BOX;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getText() {
|
||||
return "Show Extension Area";
|
||||
}
|
||||
|
||||
}
|
|
@ -41,6 +41,8 @@ import org.eclipse.jface.dialogs.ErrorDialog;
|
|||
import org.eclipse.jface.dialogs.ProgressMonitorDialog;
|
||||
import org.eclipse.jface.operation.IRunnableWithProgress;
|
||||
import org.eclipse.swt.SWT;
|
||||
import org.eclipse.swt.custom.CTabFolder;
|
||||
import org.eclipse.swt.custom.CTabItem;
|
||||
import org.eclipse.swt.events.SelectionAdapter;
|
||||
import org.eclipse.swt.events.SelectionEvent;
|
||||
import org.eclipse.swt.graphics.Font;
|
||||
|
@ -62,6 +64,7 @@ import org.eclipse.swt.widgets.Shell;
|
|||
import org.eclipse.swt.widgets.Text;
|
||||
import org.eclipse.ui.PlatformUI;
|
||||
|
||||
import com.raytheon.uf.common.auth.req.CheckAuthorizationRequest;
|
||||
import com.raytheon.uf.common.dataplugin.warning.AbstractWarningRecord;
|
||||
import com.raytheon.uf.common.dataplugin.warning.WarningRecord.WarningAction;
|
||||
import com.raytheon.uf.common.dataplugin.warning.config.BulletActionGroup;
|
||||
|
@ -81,9 +84,11 @@ import com.raytheon.uf.viz.core.VizApp;
|
|||
import com.raytheon.uf.viz.core.exception.VizException;
|
||||
import com.raytheon.uf.viz.core.localization.LocalizationManager;
|
||||
import com.raytheon.uf.viz.core.maps.MapManager;
|
||||
import com.raytheon.uf.viz.core.requests.ThriftClient;
|
||||
import com.raytheon.uf.viz.d2d.ui.map.SideView;
|
||||
import com.raytheon.viz.awipstools.common.stormtrack.StormTrackState.DisplayType;
|
||||
import com.raytheon.viz.awipstools.common.stormtrack.StormTrackState.Mode;
|
||||
import com.raytheon.viz.core.mode.CAVEMode;
|
||||
import com.raytheon.viz.texteditor.msgs.IWarngenObserver;
|
||||
import com.raytheon.viz.texteditor.util.VtecUtil;
|
||||
import com.raytheon.viz.ui.EditorUtil;
|
||||
|
@ -174,7 +179,9 @@ import com.vividsolutions.jts.geom.Polygon;
|
|||
* Nov 9, 2015 DR 14905 Qinglu Lin Updated backupSiteSelected(), disposed(), initializeComponents(), populateBackupGroup(), and
|
||||
* createProductTypeGroup, and moved existing code to newly created setBackupCboColors() and setBackupSite().
|
||||
* Nov 25, 2015 DR 17464 Qinglu Lin Updated changeTemplate().
|
||||
* Dec 9, 2015 DR 18209 D. Friedman Support cwaStretch dam break polygons.
|
||||
* Dec 10, 2015 DR 17908 Qinglu Lin Updated changeStartEndTimes(), recreateDurations(), changeSelected(), and extSelected().
|
||||
* Dec 21, 2015 DCS 17942 D. Friedman Add advanced options tab
|
||||
* </pre>
|
||||
*
|
||||
* @author chammack
|
||||
|
@ -380,7 +387,21 @@ public class WarngenDialog extends CaveSWTDialog implements
|
|||
}
|
||||
});
|
||||
|
||||
Composite mainComposite = new Composite(shell, SWT.NONE);
|
||||
Composite parent = shell;
|
||||
boolean advanced = isAdvancedOptionsEnabled();
|
||||
CTabFolder tabs = null;
|
||||
CTabItem tabItem = null;
|
||||
if (advanced) {
|
||||
tabs = new CTabFolder(shell, SWT.FLAT|SWT.TOP);
|
||||
parent = tabs;
|
||||
}
|
||||
|
||||
Composite mainComposite = new Composite(parent, SWT.NONE);
|
||||
if (advanced) {
|
||||
tabItem = new CTabItem(tabs, SWT.NONE);
|
||||
tabItem.setText("Product");
|
||||
tabItem.setControl(mainComposite);
|
||||
}
|
||||
GridLayout gl = new GridLayout(1, false);
|
||||
gl.verticalSpacing = 2;
|
||||
gl.marginHeight = 1;
|
||||
|
@ -395,6 +416,12 @@ public class WarngenDialog extends CaveSWTDialog implements
|
|||
createBottomButtons(mainComposite);
|
||||
setBackupSite();
|
||||
setInstructions();
|
||||
|
||||
if (advanced) {
|
||||
tabItem = new CTabItem(tabs, SWT.NONE);
|
||||
tabItem.setText("Polygon Options");
|
||||
tabItem.setControl(new PolygonOptionsComposite(tabs, warngenLayer));
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -1597,9 +1624,9 @@ public class WarngenDialog extends CaveSWTDialog implements
|
|||
} else {
|
||||
coordinates.add(coordinates.get(0));
|
||||
PolygonUtil.truncate(coordinates, 2);
|
||||
setPolygonLocked(lockPolygon);
|
||||
warngenLayer.createDamThreatArea(coordinates
|
||||
.toArray(new Coordinate[coordinates.size()]));
|
||||
setPolygonLocked(lockPolygon);
|
||||
warngenLayer.issueRefresh();
|
||||
damBreakInstruct = null;
|
||||
}
|
||||
|
@ -1912,9 +1939,16 @@ public class WarngenDialog extends CaveSWTDialog implements
|
|||
} else {
|
||||
bulletListManager.recreateBulletsFromFollowup(
|
||||
warngenLayer.getConfiguration(), action, oldWarning);
|
||||
if (bulletListManager.isDamNameSeletcted()
|
||||
&& (action != WarningAction.NEW)) {
|
||||
if (bulletListManager.isDamNameSeletcted()) {
|
||||
setPolygonLocked(true);
|
||||
/* Need to set the warning area again now that the dam bullets
|
||||
* are set up so that cwaStretch=true dam polygons will work.
|
||||
*/
|
||||
try {
|
||||
warngenLayer.resetWarningPolygonAndAreaFromRecord(oldWarning);
|
||||
} catch (VizException e) {
|
||||
statusHandler.error("Error updating the warning area for selected dam", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
refreshBulletList();
|
||||
|
@ -2739,4 +2773,25 @@ public class WarngenDialog extends CaveSWTDialog implements
|
|||
}
|
||||
});
|
||||
}
|
||||
|
||||
public boolean isCwaStretchDamBulletSelected() {
|
||||
DamInfoBullet bullet = bulletListManager.getSelectedDamInfoBullet();
|
||||
return bullet != null && bullet.isCwaStretch();
|
||||
}
|
||||
|
||||
private static boolean isAdvancedOptionsEnabled() {
|
||||
boolean hasPermission = false;
|
||||
|
||||
try {
|
||||
String userId = LocalizationManager.getInstance().getCurrentUser();
|
||||
CheckAuthorizationRequest request = new CheckAuthorizationRequest(
|
||||
userId, "advancedOptions", "WarnGen");
|
||||
hasPermission = (Boolean) ThriftClient.sendRequest(request);
|
||||
} catch (Exception e) {
|
||||
statusHandler.error("error checking permissions", e);
|
||||
}
|
||||
|
||||
return ((hasPermission && CAVEMode.getMode() == CAVEMode.PRACTICE)
|
||||
|| WarngenLayer.isWarngenDeveloperMode());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -32,6 +32,9 @@ import java.util.Iterator;
|
|||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.Callable;
|
||||
import java.util.concurrent.Future;
|
||||
import java.util.concurrent.FutureTask;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
|
@ -39,6 +42,9 @@ import javax.measure.converter.UnitConverter;
|
|||
import javax.measure.unit.NonSI;
|
||||
import javax.measure.unit.SI;
|
||||
|
||||
import org.eclipse.core.databinding.observable.ChangeEvent;
|
||||
import org.eclipse.core.databinding.observable.IChangeListener;
|
||||
import org.eclipse.core.databinding.observable.value.WritableValue;
|
||||
import org.eclipse.core.runtime.IProgressMonitor;
|
||||
import org.eclipse.core.runtime.IStatus;
|
||||
import org.eclipse.core.runtime.Status;
|
||||
|
@ -65,6 +71,7 @@ import com.raytheon.uf.common.dataplugin.warning.WarningRecord.WarningAction;
|
|||
import com.raytheon.uf.common.dataplugin.warning.config.AreaSourceConfiguration;
|
||||
import com.raytheon.uf.common.dataplugin.warning.config.BulletActionGroup;
|
||||
import com.raytheon.uf.common.dataplugin.warning.config.DialogConfiguration;
|
||||
import com.raytheon.uf.common.dataplugin.warning.config.ExtensionArea;
|
||||
import com.raytheon.uf.common.dataplugin.warning.config.GridSpacing;
|
||||
import com.raytheon.uf.common.dataplugin.warning.config.WarngenConfiguration;
|
||||
import com.raytheon.uf.common.dataplugin.warning.gis.GenerateGeospatialDataResult;
|
||||
|
@ -141,6 +148,7 @@ import com.vividsolutions.jts.geom.prep.PreparedGeometry;
|
|||
import com.vividsolutions.jts.geom.prep.PreparedGeometryFactory;
|
||||
import com.vividsolutions.jts.io.ParseException;
|
||||
import com.vividsolutions.jts.io.WKTReader;
|
||||
import com.vividsolutions.jts.simplify.TopologyPreservingSimplifier;
|
||||
|
||||
/**
|
||||
* Warngen drawing layer. Need to do EVERYTHING in stereographic over centoid of
|
||||
|
@ -243,6 +251,10 @@ import com.vividsolutions.jts.io.WKTReader;
|
|||
* 11/09/2015 DR 14905 Qinglu Lin Added lastSelectedBackupSite and its accessors, and updated constructor.
|
||||
* 11/25/2015 DR 17464 Qinglu Lin Updated two updateWarnedAreas(), updateWarnedAreaState(), createSquare(),redrawBoxFromTrack(),
|
||||
* redrawBoxFromHatched(), createDamThreatArea(), createPolygonFromRecord(), addOrRemoveCounty().
|
||||
* 12/09/2015 ASM #18209 D. Friedman Support cwaStretch dam break polygons.
|
||||
* 12/21/2015 DCS 17942 D. Friedman Support "extension area": polygon can extend past normal features into WFO's marine/land areas.
|
||||
* Show preview of redrawn polygon when developer mode property is set.
|
||||
* 01/06/2016 ASM #18453 D. Friedman Cache extension areas so they are not regenerated on Restart or (limited) template changes.
|
||||
* </pre>
|
||||
*
|
||||
* @author mschenke
|
||||
|
@ -256,6 +268,9 @@ public class WarngenLayer extends AbstractStormTrackResource {
|
|||
private static final IPerformanceStatusHandler perfLog = PerformanceStatus
|
||||
.getHandler("WG:");
|
||||
|
||||
/*package*/ static final UnitConverter MILES_TO_METER = NonSI.MILE
|
||||
.getConverterTo(SI.METER);
|
||||
|
||||
static String lastSelectedBackupSite;
|
||||
|
||||
String uniqueFip = null;
|
||||
|
@ -285,6 +300,8 @@ public class WarngenLayer extends AbstractStormTrackResource {
|
|||
|
||||
GeospatialData[] features;
|
||||
|
||||
GeospatialData[] cwaStretchFeatures; // contains all from 'features'
|
||||
|
||||
MathTransform latLonToLocal;
|
||||
|
||||
MathTransform localToLatLon;
|
||||
|
@ -294,6 +311,11 @@ public class WarngenLayer extends AbstractStormTrackResource {
|
|||
int nx, ny;
|
||||
|
||||
GeneralGridGeometry localGridGeometry;
|
||||
|
||||
GeospatialData[] getFeatures(boolean cwaStretch) {
|
||||
return cwaStretch && cwaStretchFeatures != null ?
|
||||
cwaStretchFeatures : features;
|
||||
}
|
||||
}
|
||||
|
||||
private static class GeospatialDataAccessor {
|
||||
|
@ -323,11 +345,11 @@ public class WarngenLayer extends AbstractStormTrackResource {
|
|||
* polygon to intersect with in lat/lon space
|
||||
* @return the warning area in screen projection
|
||||
*/
|
||||
private Geometry buildArea(Polygon polygon) {
|
||||
private Geometry buildArea(Geometry polygon, boolean cwaStretch) {
|
||||
polygon = latLonToLocal(polygon);
|
||||
Geometry area = null;
|
||||
if (polygon != null) {
|
||||
for (GeospatialData r : geoData.features) {
|
||||
for (GeospatialData r : geoData.getFeatures(cwaStretch)) {
|
||||
PreparedGeometry prepGeom = (PreparedGeometry) r.attributes
|
||||
.get(GeospatialDataList.LOCAL_PREP_GEOM);
|
||||
try {
|
||||
|
@ -403,6 +425,14 @@ public class WarngenLayer extends AbstractStormTrackResource {
|
|||
return fipsIds;
|
||||
}
|
||||
|
||||
private boolean isEquivalentTo(GeospatialDataAccessor other) {
|
||||
return other != null && geoData == other.geoData
|
||||
&& ((areaConfig.getFipsField() == null && other.areaConfig.getFipsField() == null)
|
||||
|| (areaConfig.getFipsField() != null
|
||||
&& areaConfig.getFipsField().equals(
|
||||
other.areaConfig.getFipsField())));
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private class CustomMaps extends Job {
|
||||
|
@ -478,6 +508,12 @@ public class WarngenLayer extends AbstractStormTrackResource {
|
|||
|
||||
private Polygon oldWarningPolygon;
|
||||
|
||||
private boolean cwaStretch;
|
||||
|
||||
private Future<Geometry> extensionAreaFuture;
|
||||
|
||||
private GeospatialDataAccessor extensionAreaGDA;
|
||||
|
||||
public AreaHatcher(PolygonUtil polygonUtil) {
|
||||
super("Hatching Warning Area");
|
||||
setSystem(true);
|
||||
|
@ -494,11 +530,15 @@ public class WarngenLayer extends AbstractStormTrackResource {
|
|||
protected IStatus run(IProgressMonitor monitor) {
|
||||
Geometry warningArea;
|
||||
Polygon warningPolygon;
|
||||
GeospatialDataAccessor extensionAreaGDA;
|
||||
Future<Geometry> extensionAreaFuture;
|
||||
|
||||
synchronized (polygonUtil) {
|
||||
warningArea = this.warningArea;
|
||||
warningPolygon = this.warningPolygon;
|
||||
this.warningArea = this.warningPolygon = null;
|
||||
extensionAreaGDA = this.extensionAreaGDA;
|
||||
extensionAreaFuture = this.extensionAreaFuture;
|
||||
}
|
||||
|
||||
if ((warningArea != null) && (warningPolygon != null)) {
|
||||
|
@ -507,14 +547,23 @@ public class WarngenLayer extends AbstractStormTrackResource {
|
|||
Polygon outputHatchedArea = null;
|
||||
Geometry outputHatchedWarningArea = null;
|
||||
String adjustmentMessage = null;
|
||||
Geometry extensionArea = null;
|
||||
try {
|
||||
if (extensionAreaGDA != null && extensionAreaFuture != null) {
|
||||
Geometry staticExtensionArea = extensionAreaFuture.get();
|
||||
extensionArea = extensionAreaGDA.buildArea(warningPolygon, false); // never uses cwaStretch
|
||||
if (extensionArea != null && staticExtensionArea != null)
|
||||
extensionArea = GeometryUtil.intersection(extensionArea, staticExtensionArea);
|
||||
}
|
||||
|
||||
warningPolygon = PolygonUtil
|
||||
.removeDuplicateCoordinate(warningPolygon);
|
||||
Polygon hatched = polygonUtil.hatchWarningArea(
|
||||
warningPolygon,
|
||||
removeCounties(warningArea,
|
||||
state.getFipsOutsidePolygon()),
|
||||
oldWarningPolygon);
|
||||
extensionArea, oldWarningPolygon,
|
||||
cwaStretch);
|
||||
if (hatched != null) {
|
||||
// DR 15559
|
||||
Coordinate[] coords = hatched.getCoordinates();
|
||||
|
@ -588,7 +637,8 @@ public class WarngenLayer extends AbstractStormTrackResource {
|
|||
}
|
||||
outputHatchedWarningArea = createWarnedArea(
|
||||
latLonToLocal(outputHatchedArea),
|
||||
latLonToLocal(warningArea));
|
||||
latLonToLocal(warningArea),
|
||||
cwaStretch);
|
||||
if (! outputHatchedArea.isValid()) {
|
||||
statusHandler.debug(String.format("Input %s redrawn to invalid %s",
|
||||
inputWarningPolygon, outputHatchedArea));
|
||||
|
@ -596,6 +646,7 @@ public class WarngenLayer extends AbstractStormTrackResource {
|
|||
}
|
||||
this.hatchedArea = outputHatchedArea;
|
||||
this.hatchedWarningArea = outputHatchedWarningArea;
|
||||
setOutputPolygon(outputHatchedArea);
|
||||
} catch (Exception e) {
|
||||
this.hatchException = e;
|
||||
/*
|
||||
|
@ -607,6 +658,7 @@ public class WarngenLayer extends AbstractStormTrackResource {
|
|||
statusHandler.handle(Priority.DEBUG, String.format(
|
||||
"Error redrawing polygon: %s\n Input: %s\nAdjustments: %s\n",
|
||||
e.getLocalizedMessage(), inputWarningPolygon, adjustmentMessage), e);
|
||||
setOutputPolygon(null);
|
||||
}
|
||||
perfLog.logDuration("AreaHatcher total", System.currentTimeMillis() - t0);
|
||||
}
|
||||
|
@ -620,6 +672,15 @@ public class WarngenLayer extends AbstractStormTrackResource {
|
|||
this.warningPolygon = warningPolygon;
|
||||
this.warningArea = warningArea;
|
||||
this.oldWarningPolygon = oldWarningPolygon;
|
||||
this.cwaStretch = isCwaStretch();
|
||||
|
||||
if (extensionAreaManager.isExtensionAreaActive()) {
|
||||
this.extensionAreaFuture = extensionAreaManager.getGeometryFuture();
|
||||
this.extensionAreaGDA = extensionAreaManager.getGDA();
|
||||
} else {
|
||||
this.extensionAreaFuture = null;
|
||||
this.extensionAreaGDA = null;
|
||||
}
|
||||
|
||||
this.hatchedArea = null;
|
||||
this.hatchedWarningArea = null;
|
||||
|
@ -659,6 +720,324 @@ public class WarngenLayer extends AbstractStormTrackResource {
|
|||
}
|
||||
}
|
||||
|
||||
private void setOutputPolygon(final Polygon polygon) {
|
||||
VizApp.runAsync(new Runnable() {
|
||||
public void run() {
|
||||
outputPolygon = polygon;
|
||||
issueRefresh();
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
public class ExtensionAreaOptions implements Cloneable {
|
||||
public static final double DEFAULT_SIMPLIFICATION_TOLERANCE = 1609.344; // 1 mile
|
||||
public static final double MINIMUM_SIMPLIFICATION_TOLERANCE = 80.4672; // 0.05 miles
|
||||
private boolean enabled;
|
||||
private double distance = 0.0;
|
||||
private double simplificationTolerance = DEFAULT_SIMPLIFICATION_TOLERANCE;
|
||||
|
||||
public ExtensionAreaOptions() {
|
||||
|
||||
}
|
||||
|
||||
public ExtensionAreaOptions(ExtensionArea ea) {
|
||||
if (ea != null) {
|
||||
// Relying on converters to return NaN for NaN input
|
||||
this.distance = MILES_TO_METER.convert(ea.getDistance());
|
||||
if (! (this.distance > 0)) {
|
||||
this.distance = 0.0;
|
||||
}
|
||||
double v = MILES_TO_METER.convert(ea.getSimplificationTolerance());
|
||||
if (Double.isNaN(v)) {
|
||||
v = DEFAULT_SIMPLIFICATION_TOLERANCE;
|
||||
} else if (! (v >= MINIMUM_SIMPLIFICATION_TOLERANCE)) {
|
||||
v = MINIMUM_SIMPLIFICATION_TOLERANCE;
|
||||
}
|
||||
this.simplificationTolerance = v;
|
||||
this.enabled = this.distance > 0.0;
|
||||
} else {
|
||||
this.distance = 0.0;
|
||||
this.simplificationTolerance = DEFAULT_SIMPLIFICATION_TOLERANCE;
|
||||
}
|
||||
}
|
||||
|
||||
public boolean isEnabled() {
|
||||
return enabled;
|
||||
}
|
||||
public void setEnabled(boolean enabled) {
|
||||
this.enabled = enabled;
|
||||
}
|
||||
public double getDistance() {
|
||||
return distance;
|
||||
}
|
||||
public void setDistance(double distance) {
|
||||
this.distance = distance;
|
||||
}
|
||||
public double getSimplificationTolerance() {
|
||||
return simplificationTolerance;
|
||||
}
|
||||
public void setSimplificationTolerance(double simplificationTolerance) {
|
||||
this.simplificationTolerance = simplificationTolerance;
|
||||
}
|
||||
|
||||
public ExtensionAreaOptions clone() {
|
||||
try {
|
||||
return (ExtensionAreaOptions) super.clone();
|
||||
} catch (CloneNotSupportedException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private static class ExtensionAreaRecord {
|
||||
private GeospatialDataAccessor primaryGDA;
|
||||
private GeospatialDataAccessor gda;
|
||||
private ExtensionAreaOptions options;
|
||||
private Geometry geometry;
|
||||
private Geometry extensionAreaVis;
|
||||
public ExtensionAreaRecord(GeospatialDataAccessor primaryGDA,
|
||||
GeospatialDataAccessor gda, ExtensionAreaOptions options,
|
||||
Geometry geometry, Geometry extensionAreaVis) {
|
||||
this.primaryGDA = primaryGDA;
|
||||
this.gda = gda;
|
||||
this.options = options;
|
||||
this.geometry = geometry;
|
||||
this.extensionAreaVis = extensionAreaVis;
|
||||
}
|
||||
}
|
||||
|
||||
private class ExtensionAreaManager extends Job implements IChangeListener {
|
||||
private ExtensionAreaOptions options = new ExtensionAreaOptions();
|
||||
private WritableValue observableOptions = new WritableValue(options, null);
|
||||
|
||||
private GeospatialDataAccessor primaryGDA;
|
||||
private GeospatialDataAccessor gda;
|
||||
|
||||
private FutureTask<Geometry> geometryFuture;
|
||||
|
||||
private Map<String, ExtensionAreaRecord> cache = new HashMap<String, ExtensionAreaRecord>(3);
|
||||
|
||||
public ExtensionAreaManager() {
|
||||
super("Generate extension area");
|
||||
observableOptions.addChangeListener(this);
|
||||
}
|
||||
|
||||
public GeospatialDataAccessor getGDA() {
|
||||
return gda;
|
||||
}
|
||||
|
||||
public Future<Geometry> getGeometryFuture() {
|
||||
return geometryFuture;
|
||||
}
|
||||
|
||||
public boolean isExtensionAreaActive() {
|
||||
return options.isEnabled() && options.getDistance() > 0.0;
|
||||
}
|
||||
|
||||
public void setExtensionAreaConfig(ExtensionArea extensionAreaConfig) {
|
||||
observableOptions.setValue(new ExtensionAreaOptions(extensionAreaConfig));
|
||||
}
|
||||
|
||||
private void realizeOptions(ExtensionAreaOptions options) {
|
||||
if (options == null) {
|
||||
throw new NullPointerException("options must not be null");
|
||||
}
|
||||
boolean recreateArea = true;
|
||||
ExtensionAreaOptions oldOptions = this.options;
|
||||
if (oldOptions != null) {
|
||||
if (primaryGDA != null && primaryGDA.isEquivalentTo(geoAccessor)
|
||||
&& oldOptions.getDistance() == options.getDistance()
|
||||
&& oldOptions.getSimplificationTolerance() ==
|
||||
options.getSimplificationTolerance()) {
|
||||
recreateArea = false;
|
||||
}
|
||||
}
|
||||
this.options = options.clone();
|
||||
if (recreateArea) {
|
||||
if (geometryFuture != null) {
|
||||
geometryFuture.cancel(true);
|
||||
geometryFuture = null;
|
||||
}
|
||||
extensionAreaVis = null;
|
||||
if (extensionAreaShadedShape != null) {
|
||||
extensionAreaShadedShape.reset();
|
||||
issueRefresh();
|
||||
}
|
||||
gda = null;
|
||||
if (isExtensionAreaDefined()) {
|
||||
Exception error = null;
|
||||
primaryGDA = geoAccessor;
|
||||
try {
|
||||
gda = getPolygonExtensionGDA();
|
||||
} catch (Exception e) {
|
||||
error = e;
|
||||
}
|
||||
if (gda != null) {
|
||||
if (! useCachedArea(primaryGDA, gda, options)) {
|
||||
geometryFuture = new FutureTask<Geometry>(
|
||||
new ExtensionAreaGeometryTask(options,
|
||||
primaryGDA, gda));
|
||||
schedule();
|
||||
}
|
||||
} else {
|
||||
statusHandler.handle(Priority.WARN,
|
||||
"Could not determine geospatial data type for polygon extension area",
|
||||
error);
|
||||
}
|
||||
}
|
||||
}
|
||||
Polygon polygon = getWarngenState().getWarningPolygon();
|
||||
if (polygon != null) {
|
||||
try {
|
||||
updateWarnedAreas(true);
|
||||
} catch (VizException e) {
|
||||
statusHandler.error("Error re-hatching", e);
|
||||
}
|
||||
issueRefresh();
|
||||
}
|
||||
}
|
||||
|
||||
private boolean useCachedArea(GeospatialDataAccessor primaryGDA,
|
||||
GeospatialDataAccessor gda, ExtensionAreaOptions options) {
|
||||
ExtensionAreaRecord ear = null;
|
||||
|
||||
synchronized (cache) {
|
||||
ear = cache.get(primaryGDA.areaConfig.getAreaSource());
|
||||
}
|
||||
if (ear != null && ear.primaryGDA.isEquivalentTo(primaryGDA) &&
|
||||
ear.gda.isEquivalentTo(gda) &&
|
||||
ear.options.getDistance() == options.getDistance() &&
|
||||
ear.options.getSimplificationTolerance() == options.getSimplificationTolerance()) {
|
||||
this.geometryFuture = new FutureTask<Geometry>(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
// do nothing
|
||||
}
|
||||
}, ear.geometry);
|
||||
this.geometryFuture.run();
|
||||
extensionAreaVis = ear.extensionAreaVis;
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
private void cacheArea(GeospatialDataAccessor primaryGDA,
|
||||
GeospatialDataAccessor gda, ExtensionAreaOptions options,
|
||||
Geometry area, Geometry vis) {
|
||||
synchronized (cache) {
|
||||
cache.put(primaryGDA.areaConfig.getAreaSource(),
|
||||
new ExtensionAreaRecord(primaryGDA, gda, options, area,
|
||||
vis));
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected IStatus run(IProgressMonitor monitor) {
|
||||
FutureTask<Geometry> future = geometryFuture;
|
||||
if (future != null) {
|
||||
future.run();
|
||||
}
|
||||
return Status.OK_STATUS;
|
||||
}
|
||||
|
||||
public boolean isExtensionAreaDefined() {
|
||||
return options.getDistance() > 0;
|
||||
}
|
||||
|
||||
protected GeospatialDataAccessor getPolygonExtensionGDA() throws Exception {
|
||||
GeoFeatureType geoFeatureType = getDefaultExtensionAreaGeoType();
|
||||
return geoFeatureType != null ? getGeospatialDataAcessor(geoFeatureType)
|
||||
: null;
|
||||
}
|
||||
|
||||
protected GeoFeatureType getDefaultExtensionAreaGeoType() {
|
||||
GeoFeatureType otherType = null;
|
||||
AreaSourceConfiguration asc = getConfiguration().getHatchedAreaSource();
|
||||
if (asc != null) {
|
||||
String areaSource = asc.getAreaSource().toLowerCase();
|
||||
if (areaSource.contains("marinezones"))
|
||||
otherType = GeoFeatureType.COUNTY;
|
||||
else if (areaSource.contains("county") || areaSource.contains("zone")) {
|
||||
otherType = GeoFeatureType.MARINE;
|
||||
} else {
|
||||
otherType = GeoFeatureType.COUNTY;
|
||||
}
|
||||
}
|
||||
return otherType;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void handleChange(ChangeEvent event) {
|
||||
ExtensionAreaOptions options = (ExtensionAreaOptions) ((WritableValue) event
|
||||
.getObservable()).getValue();
|
||||
realizeOptions(options != null ? options : new ExtensionAreaOptions());
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
public WritableValue getObservableExtensionAreaOptions() {
|
||||
return extensionAreaManager.observableOptions;
|
||||
}
|
||||
|
||||
private class ExtensionAreaGeometryTask implements Callable<Geometry> {
|
||||
ExtensionAreaOptions options;
|
||||
GeospatialDataAccessor primaryGDA;
|
||||
GeospatialDataAccessor extensionGDA;
|
||||
|
||||
public ExtensionAreaGeometryTask(ExtensionAreaOptions options,
|
||||
GeospatialDataAccessor primaryGDA, GeospatialDataAccessor extensionGDA) {
|
||||
if (! (options.getDistance() > 0)) {
|
||||
throw new IllegalArgumentException("Extension distance must be greater than zero.");
|
||||
}
|
||||
this.options = options;
|
||||
this.primaryGDA = primaryGDA;
|
||||
this.extensionGDA = extensionGDA;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Geometry call() throws Exception {
|
||||
return createExtensionArea();
|
||||
}
|
||||
|
||||
private Geometry createExtensionArea() throws Exception {
|
||||
GeospatialData[] features = primaryGDA.geoData.getFeatures(false); // Never uses cwaStretch feactures.
|
||||
Geometry[] g = new Geometry[features.length];
|
||||
for (int i = 0; i < g.length; ++i) {
|
||||
/*
|
||||
* Pre-simplify as an optmization. Makes it possible to
|
||||
* change the static extension distance in real time.
|
||||
*/
|
||||
g[i] = extensionSimplify(
|
||||
convertGeom(features[i].geometry, primaryGDA.geoData.latLonToLocal),
|
||||
options.getSimplificationTolerance()).
|
||||
buffer(options.getDistance());
|
||||
}
|
||||
Geometry r = GeometryUtil.union(g);
|
||||
r = createExtensionAreaFromLocal(r);
|
||||
Geometry vis = extensionGDA.buildArea(r, false);
|
||||
extensionAreaManager.cacheArea(primaryGDA, extensionGDA, options, r, vis);
|
||||
extensionAreaVis = vis;
|
||||
issueRefresh();
|
||||
return r;
|
||||
}
|
||||
|
||||
private Geometry createExtensionAreaFromLocal(Geometry geom) {
|
||||
// geom should be simlified so that the following ops are not painful.
|
||||
Geometry r = geom;
|
||||
r = r.buffer(0);
|
||||
r = extensionSimplify(r, options.getSimplificationTolerance());
|
||||
r = convertGeom(r, primaryGDA.geoData.localToLatLon);
|
||||
return r;
|
||||
}
|
||||
|
||||
private Geometry extensionSimplify(Geometry geom, double tolerance) {
|
||||
if (tolerance >= 0) {
|
||||
geom = TopologyPreservingSimplifier.simplify(geom, tolerance);
|
||||
}
|
||||
return geom;
|
||||
}
|
||||
}
|
||||
|
||||
private static class GeomMetaDataUpdateNotificationObserver implements
|
||||
|
@ -790,6 +1169,8 @@ public class WarngenLayer extends AbstractStormTrackResource {
|
|||
|
||||
private GeomMetaDataUpdateNotificationObserver geomUpdateObserver;
|
||||
|
||||
private ExtensionAreaManager extensionAreaManager = new ExtensionAreaManager();
|
||||
|
||||
static {
|
||||
for (int i = 0; i < 128; i++) {
|
||||
if ((i % 32) == 0) {
|
||||
|
@ -835,6 +1216,13 @@ public class WarngenLayer extends AbstractStormTrackResource {
|
|||
|
||||
setSpeedAndAngle();
|
||||
setDuration();
|
||||
|
||||
observableExtensionAreaVisible.addChangeListener(new IChangeListener() {
|
||||
@Override
|
||||
public void handleChange(ChangeEvent event) {
|
||||
issueRefresh();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -965,6 +1353,9 @@ public class WarngenLayer extends AbstractStormTrackResource {
|
|||
if (coveredAreaFrame != null) {
|
||||
coveredAreaFrame.dispose();
|
||||
}
|
||||
if (extensionAreaShadedShape != null) {
|
||||
extensionAreaShadedShape.dispose();
|
||||
}
|
||||
|
||||
manager.dispose();
|
||||
}
|
||||
|
@ -1000,6 +1391,8 @@ public class WarngenLayer extends AbstractStormTrackResource {
|
|||
coveredAreaFrame = target.createWireframeShape(true, this.descriptor);
|
||||
shadedCoveredArea = target.createShadedShape(true,
|
||||
this.descriptor.getGridGeometry(), true);
|
||||
extensionAreaShadedShape = target.createShadedShape(true,
|
||||
this.descriptor.getGridGeometry());
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -1077,6 +1470,20 @@ public class WarngenLayer extends AbstractStormTrackResource {
|
|||
}
|
||||
}
|
||||
|
||||
if ((Boolean) observableExtensionAreaVisible.getValue()) {
|
||||
if (extensionAreaVis != null) {
|
||||
extensionAreaShadedShape.reset();
|
||||
JTSCompiler comp = new JTSCompiler(extensionAreaShadedShape, null, descriptor);
|
||||
Geometry g = extensionAreaVis;
|
||||
extensionAreaVis = null;
|
||||
if (g != null) {
|
||||
comp.handle(g, extensionAreaVisualizationColor);
|
||||
}
|
||||
}
|
||||
target.drawShadedShape(extensionAreaShadedShape,
|
||||
extensionAreaVisualizationAlpha);
|
||||
}
|
||||
|
||||
lastMode = displayState.mode;
|
||||
}
|
||||
|
||||
|
@ -1085,17 +1492,51 @@ public class WarngenLayer extends AbstractStormTrackResource {
|
|||
displayState.intialFrame = trackUtil.getCurrentFrame(info);
|
||||
}
|
||||
|
||||
/**
|
||||
* @param target
|
||||
* @param paintProps
|
||||
* @param thePrimitivePolygon2
|
||||
*/
|
||||
private static class PolygonStyle {
|
||||
public boolean show;
|
||||
public RGB color;
|
||||
public int lineWidth;
|
||||
public boolean showVertices;
|
||||
public PolygonStyle(boolean show, RGB color, int lineWidth, boolean showVertices) {
|
||||
this.show = show;
|
||||
this.color = color;
|
||||
this.lineWidth = lineWidth;
|
||||
this.showVertices = showVertices;
|
||||
}
|
||||
}
|
||||
|
||||
private static final String OUTPUT_POLYGON = "Result";
|
||||
private static final String EDIT_POLYGON = "Edit";
|
||||
|
||||
private Polygon outputPolygon = null;
|
||||
private Map<String, PolygonStyle> polygonStyles = new HashMap<String, PolygonStyle>();
|
||||
{
|
||||
polygonStyles.put(OUTPUT_POLYGON, new PolygonStyle(isWarngenDeveloperMode(),
|
||||
new RGB(0, 128, 128), 5, true));
|
||||
polygonStyles.put(EDIT_POLYGON, new PolygonStyle(true,
|
||||
new RGB(255, 255, 255), 3, true));
|
||||
}
|
||||
|
||||
private void paintPolygon(IGraphicsTarget target,
|
||||
PaintProperties paintProps, Polygon thePrimitivePolygon)
|
||||
throws VizException {
|
||||
RGB color = getCapability(ColorableCapability.class).getColor();
|
||||
float LINE_WIDTH = getCapability(OutlineCapability.class)
|
||||
.getOutlineWidth();
|
||||
if (outputPolygon != null) {
|
||||
paintPolygon(target, paintProps, outputPolygon,
|
||||
polygonStyles.get(OUTPUT_POLYGON));
|
||||
}
|
||||
PolygonStyle editStyle = polygonStyles.get(EDIT_POLYGON);
|
||||
editStyle.color = getCapability(ColorableCapability.class).getColor();
|
||||
editStyle.lineWidth = getCapability(OutlineCapability.class).getOutlineWidth();
|
||||
paintPolygon(target, paintProps, thePrimitivePolygon, editStyle);
|
||||
}
|
||||
|
||||
private void paintPolygon(IGraphicsTarget target,
|
||||
PaintProperties paintProps, Polygon thePrimitivePolygon, PolygonStyle style)
|
||||
throws VizException {
|
||||
if (!style.show)
|
||||
return;
|
||||
RGB color = style.color;
|
||||
float LINE_WIDTH = style.lineWidth;
|
||||
float zoomLevel = paintProps.getZoomLevel();
|
||||
if (LINE_WIDTH < 1.5f) {
|
||||
LINE_WIDTH = 1.5f;
|
||||
|
@ -1125,29 +1566,31 @@ public class WarngenLayer extends AbstractStormTrackResource {
|
|||
line.width = LINE_WIDTH;
|
||||
lines.add(line);
|
||||
|
||||
double delta;
|
||||
if (style.showVertices) {
|
||||
double delta;
|
||||
|
||||
if (!boxEditable) {
|
||||
delta = 25 * zoomLevel;
|
||||
} else {
|
||||
delta = 80 * zoomLevel;
|
||||
if (!boxEditable) {
|
||||
delta = 25 * zoomLevel;
|
||||
} else {
|
||||
delta = 80 * zoomLevel;
|
||||
}
|
||||
// Build triangle control points
|
||||
|
||||
double[] triTop = new double[] { out1[0], out1[1] - delta };
|
||||
double[] triLeft = new double[] { out1[0] - delta,
|
||||
out1[1] + delta };
|
||||
double[] triRight = new double[] { out1[0] + delta,
|
||||
out1[1] + delta };
|
||||
|
||||
DrawableLine line2 = new DrawableLine();
|
||||
line2.setCoordinates(triLeft[0], triLeft[1]);
|
||||
line2.addPoint(triTop[0], triTop[1]);
|
||||
line2.addPoint(triRight[0], triRight[1]);
|
||||
line2.addPoint(triLeft[0], triLeft[1]);
|
||||
line2.basics.color = color;
|
||||
line2.width = LINE_WIDTH;
|
||||
lines.add(line2);
|
||||
}
|
||||
// Build triangle control points
|
||||
|
||||
double[] triTop = new double[] { out1[0], out1[1] - delta };
|
||||
double[] triLeft = new double[] { out1[0] - delta,
|
||||
out1[1] + delta };
|
||||
double[] triRight = new double[] { out1[0] + delta,
|
||||
out1[1] + delta };
|
||||
|
||||
DrawableLine line2 = new DrawableLine();
|
||||
line2.setCoordinates(triLeft[0], triLeft[1]);
|
||||
line2.addPoint(triTop[0], triTop[1]);
|
||||
line2.addPoint(triRight[0], triRight[1]);
|
||||
line2.addPoint(triLeft[0], triLeft[1]);
|
||||
line2.basics.color = color;
|
||||
line2.width = LINE_WIDTH;
|
||||
lines.add(line2);
|
||||
}
|
||||
target.drawLine(lines.toArray(new DrawableLine[0]));
|
||||
}
|
||||
|
@ -1218,6 +1661,51 @@ public class WarngenLayer extends AbstractStormTrackResource {
|
|||
target.drawStrings(strings);
|
||||
}
|
||||
|
||||
private Geometry extensionAreaVis;
|
||||
|
||||
private WritableValue observableExtensionAreaVisible = new WritableValue(false, null);
|
||||
|
||||
private RGB extensionAreaVisualizationColor = new RGB(240, 128, 128);
|
||||
|
||||
private float extensionAreaVisualizationAlpha = 0.4f;
|
||||
|
||||
private IShadedShape extensionAreaShadedShape = null;
|
||||
|
||||
public WritableValue getObservableExtensionAreaVisible() {
|
||||
return observableExtensionAreaVisible;
|
||||
}
|
||||
|
||||
public boolean isExtensionAreaVisible() {
|
||||
return (Boolean) observableExtensionAreaVisible.getValue();
|
||||
}
|
||||
|
||||
public void setExtensionAreaVisualized(boolean visible) {
|
||||
observableExtensionAreaVisible.setValue(visible);
|
||||
}
|
||||
|
||||
public RGB getExtensionAreaVisualizationColor() {
|
||||
return extensionAreaVisualizationColor;
|
||||
}
|
||||
|
||||
public void setExtensionAreaVisualizationColor(
|
||||
RGB extensionAreaVisualizationColor) {
|
||||
if (extensionAreaVisualizationColor == null) {
|
||||
throw new NullPointerException("extensionAreaVisualizationColor must be non-null");
|
||||
}
|
||||
this.extensionAreaVisualizationColor = extensionAreaVisualizationColor;
|
||||
issueRefresh();
|
||||
}
|
||||
|
||||
public float getExtensionAreaVisualizationAlpha() {
|
||||
return extensionAreaVisualizationAlpha;
|
||||
}
|
||||
|
||||
public void setExtensionAreaVisualizationAlpha(
|
||||
float extensionAreaVisualizationAlpha) {
|
||||
this.extensionAreaVisualizationAlpha = extensionAreaVisualizationAlpha;
|
||||
issueRefresh();
|
||||
}
|
||||
|
||||
/**
|
||||
* @param templateName
|
||||
* the templateName to set
|
||||
|
@ -1284,6 +1772,7 @@ public class WarngenLayer extends AbstractStormTrackResource {
|
|||
createAreaAndCentroidMaps();
|
||||
|
||||
this.configuration = config;
|
||||
extensionAreaManager.setExtensionAreaConfig(config.getExtensionArea());
|
||||
}// end synchronize
|
||||
|
||||
perfLog.logDuration("Init warngen config",
|
||||
|
@ -1370,11 +1859,21 @@ public class WarngenLayer extends AbstractStormTrackResource {
|
|||
GeospatialDataSet dataSet, GeospatialMetadata gmd, String currKey,
|
||||
long tq0) throws FactoryException, MismatchedDimensionException,
|
||||
TransformException {
|
||||
gData.features = GeospatialFactory.getGeoSpatialList(dataSet, gmd);
|
||||
GeospatialData[][] gdSets = GeospatialFactory.getGeoSpatialList(dataSet, gmd);
|
||||
GeospatialData[] allFeatures;
|
||||
gData.features = gdSets[0];
|
||||
allFeatures = gData.features;
|
||||
|
||||
GeospatialData[] stretchFeatures = gdSets[1];
|
||||
if (stretchFeatures != null) {
|
||||
allFeatures = Arrays.copyOf(gData.features, gData.features.length + stretchFeatures.length);
|
||||
System.arraycopy(stretchFeatures, 0, allFeatures, gData.features.length, stretchFeatures.length);
|
||||
gData.cwaStretchFeatures = allFeatures;
|
||||
}
|
||||
|
||||
// set the CountyUserData
|
||||
List<Geometry> geoms = new ArrayList<Geometry>(gData.features.length);
|
||||
for (GeospatialData gd : gData.features) {
|
||||
List<Geometry> geoms = new ArrayList<Geometry>(allFeatures.length);
|
||||
for (GeospatialData gd : allFeatures) {
|
||||
geoms.add(gd.geometry);
|
||||
CountyUserData cud = new CountyUserData(gd,
|
||||
String.valueOf(gd.attributes.get(WarngenLayer.GID)));
|
||||
|
@ -1389,7 +1888,7 @@ public class WarngenLayer extends AbstractStormTrackResource {
|
|||
.constructStereographic(MapUtil.AWIPS_EARTH_RADIUS,
|
||||
MapUtil.AWIPS_EARTH_RADIUS, c.y, c.x));
|
||||
gData.localToLatLon = gData.latLonToLocal.inverse();
|
||||
for (GeospatialData gd : gData.features) {
|
||||
for (GeospatialData gd : allFeatures) {
|
||||
Geometry local = JTS.transform(gd.geometry, gData.latLonToLocal);
|
||||
if (! local.isValid()) {
|
||||
TopologyException topologyException = null;
|
||||
|
@ -1510,8 +2009,8 @@ public class WarngenLayer extends AbstractStormTrackResource {
|
|||
GeospatialDataList geoDataList = getGeodataList(areaSource,
|
||||
localizedSite);
|
||||
if (geoDataList != null) {
|
||||
return Arrays.copyOf(geoDataList.features,
|
||||
geoDataList.features.length);
|
||||
GeospatialData[] features = geoDataList.getFeatures(isCwaStretch());
|
||||
return Arrays.copyOf(features, features.length);
|
||||
}
|
||||
return new GeospatialData[0];
|
||||
}
|
||||
|
@ -1733,7 +2232,7 @@ public class WarngenLayer extends AbstractStormTrackResource {
|
|||
throws Exception {
|
||||
Set<String> ugcs = new HashSet<String>();
|
||||
GeospatialDataAccessor gda = getGeospatialDataAcessor(type);
|
||||
for (String fips : gda.getAllFipsInArea(gda.buildArea(polygon))) {
|
||||
for (String fips : gda.getAllFipsInArea(gda.buildArea(polygon, isCwaStretch()))) {
|
||||
ugcs.add(FipsUtil.getUgcFromFips(fips));
|
||||
}
|
||||
return ugcs;
|
||||
|
@ -1743,7 +2242,7 @@ public class WarngenLayer extends AbstractStormTrackResource {
|
|||
// TODO: zig
|
||||
GeospatialDataAccessor gda = getGeospatialDataAcessor(type);
|
||||
Set<String> ugcs = new HashSet<String>();
|
||||
for (GeospatialData r : gda.geoData.features) {
|
||||
for (GeospatialData r : gda.geoData.getFeatures(isCwaStretch())) {
|
||||
ugcs.add(FipsUtil.getUgcFromFips(gda.getFips(r)));
|
||||
}
|
||||
return ugcs;
|
||||
|
@ -1883,7 +2382,7 @@ public class WarngenLayer extends AbstractStormTrackResource {
|
|||
if (includeAllEntries && !idsOutsidePolygon.isEmpty()) {
|
||||
if (geoData != null) {
|
||||
fipsOutsidePolygon = new HashSet<String>();
|
||||
for (GeospatialData f : geoData.features) {
|
||||
for (GeospatialData f : getActiveFeatures()) {
|
||||
CountyUserData data = (CountyUserData) f.geometry
|
||||
.getUserData();
|
||||
String fips = String.valueOf(data.entry.attributes
|
||||
|
@ -1924,7 +2423,7 @@ public class WarngenLayer extends AbstractStormTrackResource {
|
|||
* @return
|
||||
*/
|
||||
private Geometry getArea(Polygon polygon, Map<String, String[]> countyMap) {
|
||||
return getArea(geoAccessor.buildArea(polygon), countyMap, true);
|
||||
return getArea(geoAccessor.buildArea(polygon, isCwaStretch()), countyMap, true);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -1974,7 +2473,8 @@ public class WarngenLayer extends AbstractStormTrackResource {
|
|||
Geometry newWarningArea = createWarnedArea(
|
||||
latLonToLocal(warningPolygon), preservedSelection
|
||||
&& (warningArea != null) ? latLonToLocal(warningArea)
|
||||
: null);
|
||||
: null,
|
||||
isCwaStretch());
|
||||
updateWarnedAreaState(newWarningArea);
|
||||
|
||||
perfLog.logDuration("Determining hatchedArea",
|
||||
|
@ -1992,7 +2492,7 @@ public class WarngenLayer extends AbstractStormTrackResource {
|
|||
* @return
|
||||
*/
|
||||
private Geometry createWarnedArea(Geometry hatchedArea,
|
||||
Geometry preservedSelection) {
|
||||
Geometry preservedSelection, boolean cwaStretch) {
|
||||
Geometry oldWarningPolygon = latLonToLocal(state.getOldWarningPolygon());
|
||||
Geometry oldWarningArea = latLonToLocal(state.getOldWarningArea());
|
||||
Geometry newHatchedArea = null;
|
||||
|
@ -2035,7 +2535,7 @@ public class WarngenLayer extends AbstractStormTrackResource {
|
|||
}
|
||||
|
||||
// Loop through each of our counties returned from the query
|
||||
for (GeospatialData f : geoData.features) {
|
||||
for (GeospatialData f : geoData.getFeatures(cwaStretch)) {
|
||||
// get the geometry of the county and make sure it intersects
|
||||
// with our hatched area
|
||||
PreparedGeometry prepGeom = (PreparedGeometry) f.attributes
|
||||
|
@ -2871,6 +3371,15 @@ public class WarngenLayer extends AbstractStormTrackResource {
|
|||
updateWarnedAreas(true);
|
||||
}
|
||||
|
||||
public void resetWarningPolygonAndAreaFromRecord(
|
||||
AbstractWarningRecord record) throws VizException {
|
||||
setOldWarningPolygon(record);
|
||||
state.setWarningPolygon(getPolygon());
|
||||
state.setWarningArea(getWarningAreaFromPolygon(
|
||||
state.getWarningPolygon(), record));
|
||||
updateWarnedAreas(true);
|
||||
}
|
||||
|
||||
private DataTime recordFrameTime(AbstractWarningRecord warnRecord) {
|
||||
Calendar frameTime;
|
||||
String rawMessage = warnRecord.getRawmessage();
|
||||
|
@ -3243,7 +3752,7 @@ public class WarngenLayer extends AbstractStormTrackResource {
|
|||
GeometryFactory gf = new GeometryFactory();
|
||||
Point point = gf.createPoint(coord);
|
||||
// potentially adding or removing a county, figure out county
|
||||
for (GeospatialData f : geoData.features) {
|
||||
for (GeospatialData f : getActiveFeatures()) {
|
||||
Geometry geom = f.geometry;
|
||||
if (f.prepGeom.contains(point)) {
|
||||
Geometry newWarningArea;
|
||||
|
@ -3326,7 +3835,7 @@ public class WarngenLayer extends AbstractStormTrackResource {
|
|||
boolean useFallback = getConfiguration().getHatchedAreaSource()
|
||||
.isInclusionFallback();
|
||||
|
||||
for (GeospatialData f : geoData.features) {
|
||||
for (GeospatialData f : getActiveFeatures()) {
|
||||
String gid = GeometryUtil.getPrefix(f.geometry.getUserData());
|
||||
Geometry warningAreaForFeature = getWarningAreaForGids(
|
||||
Arrays.asList(gid), warningArea);
|
||||
|
@ -3365,7 +3874,7 @@ public class WarngenLayer extends AbstractStormTrackResource {
|
|||
|
||||
private Collection<GeospatialData> getDataWithFips(String fips) {
|
||||
List<GeospatialData> data = new ArrayList<GeospatialData>();
|
||||
for (GeospatialData d : geoData.features) {
|
||||
for (GeospatialData d : getActiveFeatures()) {
|
||||
if (fips.equals(getFips(d))) {
|
||||
data.add(d);
|
||||
}
|
||||
|
@ -3405,7 +3914,7 @@ public class WarngenLayer extends AbstractStormTrackResource {
|
|||
String gid;
|
||||
geomArea.clear();
|
||||
geomCentroid.clear();
|
||||
for (GeospatialData f : geoData.features) {
|
||||
for (GeospatialData f : geoData.getFeatures(true)) {
|
||||
Geometry geom = f.getGeometry();
|
||||
gid = ((CountyUserData) geom.getUserData()).gid;
|
||||
geomArea.put(gid, geom.getArea());
|
||||
|
@ -3480,7 +3989,7 @@ public class WarngenLayer extends AbstractStormTrackResource {
|
|||
populatePt = new Coordinate(centroid.getX(), centroid.getY());
|
||||
populatePtGeom = PolygonUtil.createPolygonByPoints(gf,
|
||||
populatePt, shift);
|
||||
for (GeospatialData gd : geoData.features) {
|
||||
for (GeospatialData gd : getActiveFeatures()) {
|
||||
geomN = gd.getGeometry();
|
||||
CountyUserData cud = (CountyUserData) geomN.getUserData();
|
||||
prefixN = cud.gid;
|
||||
|
@ -3709,7 +4218,7 @@ public class WarngenLayer extends AbstractStormTrackResource {
|
|||
while (iter.hasNext()) {
|
||||
prefix = iter.next();
|
||||
double size = 0.0d;
|
||||
for (GeospatialData f : geoData.features) {
|
||||
for (GeospatialData f : getActiveFeatures()) {
|
||||
fips = getFips(f);
|
||||
Geometry geom = f.geometry;
|
||||
if (prefix.equals(GeometryUtil.getPrefix(geom.getUserData()))) {
|
||||
|
@ -3749,12 +4258,12 @@ public class WarngenLayer extends AbstractStormTrackResource {
|
|||
* @param inputArea
|
||||
* @return
|
||||
*/
|
||||
public Geometry buildIdealArea(Geometry inputArea) {
|
||||
public Geometry buildIdealArea(Geometry inputArea, boolean stretch) {
|
||||
Geometry localHatchedArea = latLonToLocal(inputArea);
|
||||
Geometry oldWarningArea = latLonToLocal(state.getOldWarningArea());
|
||||
Geometry newHatchedArea = null;
|
||||
|
||||
for (GeospatialData f : geoData.features) {
|
||||
for (GeospatialData f : geoData.getFeatures(stretch)) {
|
||||
// get the geometry of the county and make sure it intersects
|
||||
// with our hatched area
|
||||
PreparedGeometry prepGeom = (PreparedGeometry) f.attributes
|
||||
|
@ -3855,4 +4364,21 @@ public class WarngenLayer extends AbstractStormTrackResource {
|
|||
public static void setLastSelectedBackupSite(String backupSite) {
|
||||
lastSelectedBackupSite = backupSite;
|
||||
}
|
||||
|
||||
private GeospatialData[] getActiveFeatures() {
|
||||
return geoData.getFeatures(isCwaStretch());
|
||||
}
|
||||
|
||||
private boolean isCwaStretch() {
|
||||
return dialog != null && dialog.isCwaStretchDamBulletSelected() &&
|
||||
! isBoxEditable();
|
||||
}
|
||||
|
||||
private static boolean warngenDeveloperMode =
|
||||
Boolean.getBoolean("com.raytheon.viz.warngen.developerMode");
|
||||
|
||||
public static boolean isWarngenDeveloperMode() {
|
||||
return warngenDeveloperMode;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -740,10 +740,15 @@ public class TemplateRunner {
|
|||
if (atIndex > 0) {
|
||||
int hhmmStart = atIndex + 3;
|
||||
hhmmEnd = message.indexOf(", ", hhmmStart);
|
||||
if (hhmmEnd > 0) {
|
||||
context.put("corToNewMarker", "cortonewmarker");
|
||||
context.put("corEventtime",
|
||||
message.substring(hhmmStart, hhmmEnd));
|
||||
if (hhmmEnd < 0) {
|
||||
// check for ellipsis
|
||||
hhmmEnd = message.indexOf("...", hhmmStart);
|
||||
} else {
|
||||
if (hhmmEnd > 0) {
|
||||
context.put("corToNewMarker", "cortonewmarker");
|
||||
context.put("corEventtime",
|
||||
message.substring(hhmmStart, hhmmEnd));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -57,6 +57,7 @@ import com.raytheon.viz.hydrocommon.HydroConstants;
|
|||
* 12 Aug 2014 3049 bkowal Close the BufferedReader when finished.
|
||||
* 21 May 2015 4501 skorolev Changed a way of database connection. Got rid of Vector.
|
||||
* 17 Sep 2015 4886 skorolev Corrected updateRejecteddata.
|
||||
* 17 Dec 2015 18407 xwei Fixed: XDAT in Hydro Perspective allows user only to view 4 days instead of 30 days
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -171,7 +172,7 @@ public class XdatDB {
|
|||
for (Object[] obj : rs) {
|
||||
hours.add((Integer) obj[0]);
|
||||
}
|
||||
if (hours != null && hours.size() > 1) {
|
||||
if (hours != null && hours.size() >= 1) {
|
||||
numHours = hours.get(0);
|
||||
}
|
||||
if (numHours > maxNumHours) {
|
||||
|
|
|
@ -24,6 +24,7 @@ import java.util.ArrayList;
|
|||
import java.util.Date;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.SortedMap;
|
||||
import java.util.TreeMap;
|
||||
import java.util.regex.Matcher;
|
||||
|
@ -31,12 +32,15 @@ import java.util.regex.Pattern;
|
|||
|
||||
import org.hibernate.Session;
|
||||
|
||||
import com.raytheon.uf.common.dataplugin.PluginDataObject;
|
||||
import com.raytheon.uf.common.dataplugin.PluginException;
|
||||
import com.raytheon.uf.common.dataplugin.gfe.db.objects.GridParmInfo;
|
||||
import com.raytheon.uf.common.dataplugin.grid.GridConstants;
|
||||
import com.raytheon.uf.common.dataplugin.grid.GridRecord;
|
||||
import com.raytheon.uf.common.dataplugin.level.Level;
|
||||
import com.raytheon.uf.common.dataquery.db.QueryParam.QueryOperand;
|
||||
import com.raytheon.uf.common.time.DataTime;
|
||||
import com.raytheon.uf.common.time.TimeRange;
|
||||
import com.raytheon.uf.edex.database.DataAccessLayerException;
|
||||
import com.raytheon.uf.edex.database.query.DatabaseQuery;
|
||||
import com.raytheon.uf.edex.plugin.grid.dao.GridDao;
|
||||
|
@ -62,20 +66,20 @@ import com.raytheon.uf.edex.plugin.grid.dao.GridDao;
|
|||
* 10/16/2014 3454 bphillip Upgrading to Hibernate 4
|
||||
* Aug 05, 2015 4486 rjpeter Changed Timestamp to Date.
|
||||
* Aug 14, 2015 17801 bhunderm Fixed logic to choose the parm with lesser
|
||||
* duration when have multiple grids for same fcsthr.
|
||||
* duration when have multiple grids for same fcsthr.
|
||||
* Dec 03, 2015 5168 randerso Added ability to retrieve D2D data by fcsthr or timerange
|
||||
* </pre>
|
||||
*
|
||||
* @author randerso
|
||||
* @version 1.0
|
||||
*/
|
||||
|
||||
// **********************************************************************
|
||||
// TODO: Can this be merged into GridDao/D2DGridDatabase?
|
||||
// **********************************************************************
|
||||
public class GFED2DDao extends GridDao {
|
||||
private static final String FCST_TIME = "dataTime.fcstTime";
|
||||
private static final String FCSTTIME_ID = PluginDataObject.DATATIME_ID
|
||||
+ ".fcstTime";
|
||||
|
||||
private static final String REF_TIME = "dataTime.refTime";
|
||||
private static final String REFTIME_ID = PluginDataObject.DATATIME_ID
|
||||
+ ".refTime";
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
|
@ -100,10 +104,10 @@ public class GFED2DDao extends GridDao {
|
|||
public List<Integer> getForecastTimes(String d2dModelName, Date refTime)
|
||||
throws DataAccessLayerException {
|
||||
DatabaseQuery query = new DatabaseQuery(GridRecord.class.getName());
|
||||
query.addDistinctParameter(FCST_TIME);
|
||||
query.addDistinctParameter(FCSTTIME_ID);
|
||||
query.addQueryParam(GridConstants.DATASET_ID, d2dModelName);
|
||||
query.addQueryParam(REF_TIME, refTime);
|
||||
query.addOrder(FCST_TIME, true);
|
||||
query.addQueryParam(REFTIME_ID, refTime);
|
||||
query.addOrder(FCSTTIME_ID, true);
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
List<Integer> vals = (List<Integer>) this.queryByCriteria(query);
|
||||
|
@ -112,7 +116,7 @@ public class GFED2DDao extends GridDao {
|
|||
|
||||
/**
|
||||
* Retrieves a GridRecord from the grib metadata database based on a ParmID,
|
||||
* TimeRange, and GridParmInfo.
|
||||
* forecastTime, and GridParmInfo.
|
||||
*
|
||||
* @param d2dModelName
|
||||
* @param refTime
|
||||
|
@ -135,17 +139,80 @@ public class GFED2DDao extends GridDao {
|
|||
try {
|
||||
s = getSession();
|
||||
// TODO: clean up so we only make one db query
|
||||
SortedMap<Integer, Integer> rawTimes = queryByParmId(d2dModelName,
|
||||
SortedMap<DataTime, Integer> rawTimes = queryByParmId(d2dModelName,
|
||||
refTime, d2dParmName, d2dLevel, s);
|
||||
|
||||
// if forecastTime is null just pick one,
|
||||
// this is for static data since all times are the same
|
||||
Integer id = null;
|
||||
if (forecastTime == null) {
|
||||
forecastTime = rawTimes.keySet().iterator().next();
|
||||
id = rawTimes.values().iterator().next();
|
||||
} else {
|
||||
for (Entry<DataTime, Integer> entry : rawTimes.entrySet()) {
|
||||
if (entry.getKey().getFcstTime() == forecastTime) {
|
||||
id = entry.getValue();
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
GridRecord retVal = (GridRecord) s.get(GridRecord.class,
|
||||
rawTimes.get(forecastTime));
|
||||
GridRecord retVal = (GridRecord) s.get(GridRecord.class, id);
|
||||
return retVal;
|
||||
|
||||
} finally {
|
||||
if (s != null) {
|
||||
try {
|
||||
s.close();
|
||||
} catch (Exception e) {
|
||||
logger.error("Error occurred closing database session", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieves a GridRecord from the grib metadata database based on a ParmID,
|
||||
* TimeRange, and GridParmInfo.
|
||||
*
|
||||
* @param d2dModelName
|
||||
* @param refTime
|
||||
* @param d2dParmName
|
||||
* @param d2dLevel
|
||||
* @param timeRange
|
||||
* The valid period of the desired GridRecord, null for any
|
||||
* record
|
||||
* @param info
|
||||
* The GridParmInfo for the requested d2d grid.
|
||||
* @return The GridRecord from the grib metadata database
|
||||
* @throws DataAccessLayerException
|
||||
* If errors occur while querying the metadata database
|
||||
*/
|
||||
public GridRecord getGrid(String d2dModelName, Date refTime,
|
||||
String d2dParmName, Level d2dLevel, TimeRange timeRange,
|
||||
GridParmInfo info) throws DataAccessLayerException {
|
||||
Session s = null;
|
||||
|
||||
try {
|
||||
s = getSession();
|
||||
// TODO: clean up so we only make one db query
|
||||
SortedMap<DataTime, Integer> rawTimes = queryByParmId(d2dModelName,
|
||||
refTime, d2dParmName, d2dLevel, s);
|
||||
|
||||
// if forecastTime is null just pick one,
|
||||
// this is for static data since all times are the same
|
||||
Integer id = null;
|
||||
if (timeRange == null) {
|
||||
id = rawTimes.values().iterator().next();
|
||||
} else {
|
||||
for (Entry<DataTime, Integer> entry : rawTimes.entrySet()) {
|
||||
if (entry.getKey().getValidPeriod().equals(timeRange)) {
|
||||
id = entry.getValue();
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
GridRecord retVal = (GridRecord) s.get(GridRecord.class, id);
|
||||
return retVal;
|
||||
|
||||
} finally {
|
||||
|
@ -175,21 +242,21 @@ public class GFED2DDao extends GridDao {
|
|||
* @throws DataAccessLayerException
|
||||
* If errors occur while querying the metadata database
|
||||
*/
|
||||
public SortedMap<Integer, Integer> queryByParmId(String d2dModelName,
|
||||
public SortedMap<DataTime, Integer> queryByParmId(String d2dModelName,
|
||||
Date refTime, String d2dParmName, Level d2dLevel, Session s)
|
||||
throws DataAccessLayerException {
|
||||
|
||||
DatabaseQuery query;
|
||||
query = new DatabaseQuery(GridRecord.class.getName());
|
||||
query.addReturnedField(FCST_TIME);
|
||||
query.addReturnedField(PluginDataObject.DATATIME_ID);
|
||||
query.addReturnedField("id");
|
||||
query.addReturnedField(GridConstants.PARAMETER_ABBREVIATION);
|
||||
query.addQueryParam(GridConstants.DATASET_ID, d2dModelName);
|
||||
query.addQueryParam(REF_TIME, refTime);
|
||||
query.addQueryParam(REFTIME_ID, refTime);
|
||||
query.addQueryParam(GridConstants.PARAMETER_ABBREVIATION, d2dParmName
|
||||
+ "%hr", QueryOperand.LIKE);
|
||||
query.addQueryParam(GridConstants.LEVEL_ID, d2dLevel.getId());
|
||||
query.addOrder(FCST_TIME, true);
|
||||
query.addOrder(FCSTTIME_ID, true);
|
||||
query.addOrder(GridConstants.PARAMETER_ABBREVIATION, true);
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
|
@ -210,30 +277,30 @@ public class GFED2DDao extends GridDao {
|
|||
}
|
||||
}
|
||||
|
||||
SortedMap<Integer, Integer> dataTimes = new TreeMap<Integer, Integer>();
|
||||
SortedMap<DataTime, Integer> dataTimes = new TreeMap<DataTime, Integer>();
|
||||
if (firstTry.isEmpty()) {
|
||||
query = new DatabaseQuery(GridRecord.class.getName());
|
||||
query.addReturnedField(FCST_TIME);
|
||||
query.addReturnedField(PluginDataObject.DATATIME_ID);
|
||||
query.addReturnedField("id");
|
||||
query.addQueryParam(GridConstants.DATASET_ID, d2dModelName);
|
||||
query.addQueryParam(REF_TIME, refTime);
|
||||
query.addQueryParam(REFTIME_ID, refTime);
|
||||
query.addQueryParam(GridConstants.PARAMETER_ABBREVIATION,
|
||||
d2dParmName);
|
||||
query.addQueryParam(GridConstants.LEVEL_ID, d2dLevel.getId());
|
||||
query.addOrder(FCST_TIME, true);
|
||||
query.addOrder(FCSTTIME_ID, true);
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
List<Object[]> secondTry = (List<Object[]>) this
|
||||
.queryByCriteria(query);
|
||||
|
||||
for (Object[] row : secondTry) {
|
||||
dataTimes.put((Integer) row[0], (Integer) row[1]);
|
||||
dataTimes.put((DataTime) row[0], (Integer) row[1]);
|
||||
}
|
||||
} else {
|
||||
int i = 0;
|
||||
while (i < firstTry.size()) {
|
||||
Object[] row = firstTry.get(i++);
|
||||
Integer fcstHr = (Integer) row[0];
|
||||
DataTime dataTime = (DataTime) row[0];
|
||||
Integer id = (Integer) row[1];
|
||||
Matcher matcher = pattern.matcher((String) row[2]);
|
||||
int dur = Integer.MAX_VALUE;
|
||||
|
@ -243,7 +310,8 @@ public class GFED2DDao extends GridDao {
|
|||
|
||||
while (i < firstTry.size()) {
|
||||
Object[] nextRow = firstTry.get(i);
|
||||
if (fcstHr.equals(nextRow[0])) {
|
||||
DataTime nextDataTime = (DataTime) nextRow[0];
|
||||
if (dataTime.getFcstTime() == nextDataTime.getFcstTime()) {
|
||||
i++;
|
||||
String nextParam = (String) nextRow[2];
|
||||
Matcher nextMatcher = pattern.matcher(nextParam);
|
||||
|
@ -258,33 +326,33 @@ public class GFED2DDao extends GridDao {
|
|||
break;
|
||||
}
|
||||
}
|
||||
dataTimes.put(fcstHr, id);
|
||||
dataTimes.put(dataTime, id);
|
||||
}
|
||||
}
|
||||
return dataTimes;
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieve the available Forecast Hours by D2D parm id.
|
||||
* Retrieve the available Data Times by D2D parm id.
|
||||
*
|
||||
* @param d2dModelName
|
||||
* @param refTime
|
||||
* @param d2dParmName
|
||||
* @param d2dLevel
|
||||
* @return the list of forecast hours, empty if none
|
||||
* @return the list of data times, empty if none
|
||||
* @throws DataAccessLayerException
|
||||
*/
|
||||
public List<Integer> queryFcstHourByParmId(String d2dModelName,
|
||||
public List<DataTime> queryDataTimeByParmId(String d2dModelName,
|
||||
Date refTime, String d2dParmName, Level d2dLevel)
|
||||
throws DataAccessLayerException {
|
||||
List<Integer> timeList = new ArrayList<Integer>();
|
||||
List<DataTime> timeList = new ArrayList<>();
|
||||
Session s = null;
|
||||
try {
|
||||
s = getSession();
|
||||
|
||||
SortedMap<Integer, Integer> results = queryByParmId(d2dModelName,
|
||||
SortedMap<DataTime, Integer> results = queryByParmId(d2dModelName,
|
||||
refTime, d2dParmName, d2dLevel, s);
|
||||
for (Integer o : results.keySet()) {
|
||||
for (DataTime o : results.keySet()) {
|
||||
timeList.add(o);
|
||||
}
|
||||
} finally {
|
||||
|
@ -312,9 +380,9 @@ public class GFED2DDao extends GridDao {
|
|||
public List<Date> getModelRunTimes(String d2dModelName, int maxRecords)
|
||||
throws DataAccessLayerException {
|
||||
DatabaseQuery query = new DatabaseQuery(GridRecord.class.getName());
|
||||
query.addDistinctParameter(REF_TIME);
|
||||
query.addDistinctParameter(REFTIME_ID);
|
||||
query.addQueryParam(GridConstants.DATASET_ID, d2dModelName);
|
||||
query.addOrder(REF_TIME, false);
|
||||
query.addOrder(REFTIME_ID, false);
|
||||
if (maxRecords > 0) {
|
||||
query.setMaxResults(maxRecords);
|
||||
}
|
||||
|
|
|
@ -44,6 +44,7 @@ import javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter;
|
|||
* Sep 12, 2012 #1117 dgilling Create field to hold list of
|
||||
* valid levels for each parameter.
|
||||
* Mar 20, 2013 #1774 randerso Added getMinVal and getMaxVal
|
||||
* Dec 03, 2015 #5168 randerso Added dataTimeRangeValid flag
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -61,6 +62,13 @@ public class ParameterInfo {
|
|||
|
||||
public static final float MAX_VALUE = 10000f;
|
||||
|
||||
/**
|
||||
* True if the valid period in the grib data is correct and should be used
|
||||
* by GFE
|
||||
*/
|
||||
@XmlElement(required = false)
|
||||
private boolean useDatabaseTimeRange = false;
|
||||
|
||||
@XmlElement
|
||||
@XmlJavaTypeAdapter(CollapsedStringAdapter.class)
|
||||
private String short_name;
|
||||
|
@ -123,6 +131,13 @@ public class ParameterInfo {
|
|||
this.short_name = parameterName;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return true if database time range should be used
|
||||
*/
|
||||
public boolean useDatabaseTimeRange() {
|
||||
return useDatabaseTimeRange;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the long_name
|
||||
*/
|
||||
|
@ -203,7 +218,7 @@ public class ParameterInfo {
|
|||
*/
|
||||
public float getMinVal() {
|
||||
float min = MIN_VALUE;
|
||||
if (valid_range != null && valid_range.length == 2) {
|
||||
if ((valid_range != null) && (valid_range.length == 2)) {
|
||||
min = valid_range[0];
|
||||
}
|
||||
return min;
|
||||
|
@ -214,7 +229,7 @@ public class ParameterInfo {
|
|||
*/
|
||||
public float getMaxVal() {
|
||||
float min = MAX_VALUE;
|
||||
if (valid_range != null && valid_range.length == 2) {
|
||||
if ((valid_range != null) && (valid_range.length == 2)) {
|
||||
min = valid_range[1];
|
||||
}
|
||||
return min;
|
||||
|
|
|
@ -62,6 +62,7 @@ import com.raytheon.uf.common.dataplugin.gfe.server.request.CommitGridRequest;
|
|||
import com.raytheon.uf.common.dataplugin.gfe.server.request.GetGridRequest;
|
||||
import com.raytheon.uf.common.dataplugin.gfe.server.request.SaveGridRequest;
|
||||
import com.raytheon.uf.common.dataplugin.gfe.slice.IGridSlice;
|
||||
import com.raytheon.uf.common.dataplugin.grid.GridPathProvider;
|
||||
import com.raytheon.uf.common.dataplugin.grid.GridRecord;
|
||||
import com.raytheon.uf.common.dataplugin.satellite.SatelliteRecord;
|
||||
import com.raytheon.uf.common.message.WsId;
|
||||
|
@ -137,6 +138,7 @@ import com.raytheon.uf.edex.database.purge.PurgeLogger;
|
|||
* ingested while deactivated gets recognized
|
||||
* 10/27/2014 #3766 randerso Fixed return type and javadoc for createNewDb
|
||||
* 03/05/2015 #4169 randerso Fix error handling in getDatabase
|
||||
* 12/03/2015 #5168 randerso Added check to skip running smartInit for static data at non-zero fcsthr
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -1285,6 +1287,13 @@ public class GridParmManager {
|
|||
List<GridUpdateNotification> guns = new LinkedList<GridUpdateNotification>();
|
||||
for (GridRecord record : gridRecords) {
|
||||
|
||||
// ignore static parameters for non-zero forecast hours
|
||||
if ((record.getDataTime().getFcstTime() > 0)
|
||||
&& GridPathProvider.STATIC_PARAMETERS.contains(record
|
||||
.getParameter().getAbbreviation())) {
|
||||
continue;
|
||||
}
|
||||
|
||||
String d2dModelName = record.getDatasetId();
|
||||
Date refTime = record.getDataTime().getRefTime();
|
||||
DatabaseID dbId = D2DGridDatabase.getDbId(d2dModelName, refTime,
|
||||
|
@ -1308,6 +1317,25 @@ public class GridParmManager {
|
|||
SmartInitQueue queue = SmartInitQueue.getQueue();
|
||||
if (queue != null) {
|
||||
Date validTime = gun.getReplacementTimeRange().getStart();
|
||||
TimeRange validPeriod = record.getDataTime()
|
||||
.getValidPeriod();
|
||||
|
||||
statusHandler
|
||||
.info("D2D grid received for "
|
||||
+ record.getParameter().getAbbreviation()
|
||||
+ "_"
|
||||
+ record.getLevel()
|
||||
+ " at "
|
||||
+ (validPeriod.isValid() ? validPeriod
|
||||
: record.getDataTime()
|
||||
.getValidTimeAsDate())
|
||||
+ "\nFiring smartInit for "
|
||||
+ dbId
|
||||
+ " at "
|
||||
+ validTime
|
||||
+ "\nGridUpdateNotification.replacementTimeRange = "
|
||||
+ gun.getReplacementTimeRange());
|
||||
|
||||
queue.queue(siteID, config, dbId, validTime, false,
|
||||
SmartInitRecord.LIVE_SMART_INIT_PRIORITY);
|
||||
}
|
||||
|
|
|
@ -80,6 +80,7 @@ import com.raytheon.uf.common.status.IUFStatusHandler;
|
|||
import com.raytheon.uf.common.status.PerformanceStatus;
|
||||
import com.raytheon.uf.common.status.UFStatus;
|
||||
import com.raytheon.uf.common.status.UFStatus.Priority;
|
||||
import com.raytheon.uf.common.time.DataTime;
|
||||
import com.raytheon.uf.common.time.TimeRange;
|
||||
import com.raytheon.uf.common.time.util.TimeUtil;
|
||||
import com.raytheon.uf.common.util.mapping.MultipleMappingException;
|
||||
|
@ -120,6 +121,7 @@ import com.raytheon.uf.edex.database.DataAccessLayerException;
|
|||
* 03/05/2015 #4169 randerso Fix error handling in getDatabase
|
||||
* 06/29/2015 #4537 rferrel Allow for durations less then 1 hour.
|
||||
* 07/13/2015 #4537 randerso Additional changes to allow D2DParms with sub-hourly durations/intervals
|
||||
* 12/03/2015 #5168 randerso Added flag to use database time range if valid
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -261,11 +263,15 @@ public class D2DGridDatabase extends VGridDatabase {
|
|||
|
||||
private final Level level;
|
||||
|
||||
private boolean useDatabaseTimeRange = false;
|
||||
|
||||
public D2DParm(ParmID parmId, GridParmInfo gpi,
|
||||
Map<Integer, TimeRange> fcstHrToTimeRange, String... components) {
|
||||
Map<Integer, TimeRange> fcstHrToTimeRange,
|
||||
boolean dataTimeRangeValid, String... components) {
|
||||
this.parmId = parmId;
|
||||
this.gpi = gpi;
|
||||
this.fcstHrToTimeRange = fcstHrToTimeRange;
|
||||
this.useDatabaseTimeRange = dataTimeRangeValid;
|
||||
this.components = components;
|
||||
|
||||
this.timeRangeToFcstHr = new HashMap<TimeRange, Integer>(
|
||||
|
@ -307,6 +313,13 @@ public class D2DGridDatabase extends VGridDatabase {
|
|||
return level;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return true if time range in database should be used
|
||||
*/
|
||||
public boolean useDatabaseTimeRange() {
|
||||
return useDatabaseTimeRange;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return this.parmId.toString();
|
||||
|
@ -498,7 +511,7 @@ public class D2DGridDatabase extends VGridDatabase {
|
|||
String d2dParmName = getD2DParmName(gfeParmName);
|
||||
|
||||
D2DParm d2dParm = new D2DParm(pid, gpi, possibleInventorySlots,
|
||||
d2dParmName);
|
||||
atts.useDatabaseTimeRange(), d2dParmName);
|
||||
this.gfeParms.put(pid, d2dParm);
|
||||
this.d2dParms.put(compositeName(gfeParmName, level), d2dParm);
|
||||
}
|
||||
|
@ -547,6 +560,7 @@ public class D2DGridDatabase extends VGridDatabase {
|
|||
String vD2dParmName = getD2DParmName(vGfeParmName);
|
||||
|
||||
D2DParm d2dParm = new D2DParm(pid, gpi, possibleInventorySlots,
|
||||
uatts.useDatabaseTimeRange() && vatts.useDatabaseTimeRange(),
|
||||
uD2dParmName, vD2dParmName);
|
||||
this.gfeParms.put(pid, d2dParm);
|
||||
this.d2dParms.put(compositeName(uGfeParmName, level), d2dParm);
|
||||
|
@ -604,11 +618,11 @@ public class D2DGridDatabase extends VGridDatabase {
|
|||
D2DParm parm = this.gfeParms.get(id);
|
||||
if (parm != null) {
|
||||
// get database inventory
|
||||
List<Integer> dbInv = null;
|
||||
List<DataTime> dbInv = null;
|
||||
try {
|
||||
// get database inventory where all components are available
|
||||
for (String component : parm.getComponents()) {
|
||||
List<Integer> compInv = d2dDao.queryFcstHourByParmId(
|
||||
List<DataTime> compInv = d2dDao.queryDataTimeByParmId(
|
||||
d2dModelName, refTime, component, parm.getLevel());
|
||||
|
||||
if (dbInv == null) {
|
||||
|
@ -624,15 +638,22 @@ public class D2DGridDatabase extends VGridDatabase {
|
|||
}
|
||||
|
||||
SortedSet<TimeRange> invSet = new TreeSet<TimeRange>();
|
||||
for (Integer forecastTime : dbInv) {
|
||||
TimeRange tr = parm.getFcstHrToTimeRange().get(forecastTime);
|
||||
if (tr != null) {
|
||||
invSet.add(tr);
|
||||
for (DataTime dataTime : dbInv) {
|
||||
TimeRange tr = null;
|
||||
if (parm.useDatabaseTimeRange()) {
|
||||
tr = dataTime.getValidPeriod();
|
||||
|
||||
} else {
|
||||
statusHandler.warn("No time range found for "
|
||||
+ parm.getParmId() + " at forecast time "
|
||||
+ forecastTime);
|
||||
tr = parm.getFcstHrToTimeRange()
|
||||
.get(dataTime.getFcstTime());
|
||||
|
||||
if (tr == null) {
|
||||
statusHandler.warn("No time range found for "
|
||||
+ parm.getParmId() + " at forecast time "
|
||||
+ dataTime.getFcstTime());
|
||||
}
|
||||
}
|
||||
invSet.add(tr);
|
||||
}
|
||||
inventory = new ArrayList<TimeRange>(invSet);
|
||||
} else {
|
||||
|
@ -836,16 +857,23 @@ public class D2DGridDatabase extends VGridDatabase {
|
|||
if (parm == null) {
|
||||
throw new GfeException("Unknown parmId: " + parmId);
|
||||
}
|
||||
if (!GridPathProvider.STATIC_PARAMETERS.contains(parmId
|
||||
.getParmName())) {
|
||||
fcstHr = parm.getTimeRangeToFcstHr().get(timeRange);
|
||||
if (fcstHr == null) {
|
||||
throw new GfeException("Invalid time range " + timeRange
|
||||
+ " for " + parmId);
|
||||
boolean staticParm = GridPathProvider.STATIC_PARAMETERS
|
||||
.contains(parmId.getParmName());
|
||||
if (!staticParm && parm.useDatabaseTimeRange()) {
|
||||
d2dRecord = d2dDao.getGrid(d2dModelName, refTime,
|
||||
parm.getComponents()[0], parm.getLevel(), timeRange,
|
||||
gpi);
|
||||
} else {
|
||||
if (!staticParm) {
|
||||
fcstHr = parm.getTimeRangeToFcstHr().get(timeRange);
|
||||
if (fcstHr == null) {
|
||||
throw new GfeException("Invalid time range "
|
||||
+ timeRange + " for " + parmId);
|
||||
}
|
||||
}
|
||||
d2dRecord = d2dDao.getGrid(d2dModelName, refTime,
|
||||
parm.getComponents()[0], parm.getLevel(), fcstHr, gpi);
|
||||
}
|
||||
d2dRecord = d2dDao.getGrid(d2dModelName, refTime,
|
||||
parm.getComponents()[0], parm.getLevel(), fcstHr, gpi);
|
||||
} catch (Exception e) {
|
||||
throw new GfeException(
|
||||
"Error retrieving D2D Grid record from database for "
|
||||
|
@ -1294,7 +1322,7 @@ public class D2DGridDatabase extends VGridDatabase {
|
|||
public GridUpdateNotification update(GridRecord record) {
|
||||
String d2dParamName = record.getParameter().getAbbreviation();
|
||||
Level level = record.getLevel();
|
||||
Integer fcstHour = record.getDataTime().getFcstTime();
|
||||
DataTime dataTime = record.getDataTime();
|
||||
|
||||
D2DParm parm = getD2DParm(d2dParamName, level);
|
||||
if (parm == null) {
|
||||
|
@ -1316,14 +1344,14 @@ public class D2DGridDatabase extends VGridDatabase {
|
|||
// if wind see if other component is available
|
||||
if (otherComponent != null) {
|
||||
// get the other components times
|
||||
List<Integer> otherTimes;
|
||||
List<DataTime> otherTimes;
|
||||
try {
|
||||
// TODO: could just query for desired fcstHour instead of all
|
||||
otherTimes = d2dDao.queryFcstHourByParmId(d2dModelName,
|
||||
otherTimes = d2dDao.queryDataTimeByParmId(d2dModelName,
|
||||
refTime, otherComponent, parm.getLevel());
|
||||
|
||||
// if we don't have the other component for this time
|
||||
if (!otherTimes.contains(fcstHour)) {
|
||||
if (!otherTimes.contains(dataTime)) {
|
||||
// need to wait for other component
|
||||
return null;
|
||||
}
|
||||
|
@ -1336,11 +1364,16 @@ public class D2DGridDatabase extends VGridDatabase {
|
|||
}
|
||||
}
|
||||
|
||||
TimeRange tr = getTimeRange(parmID, fcstHour);
|
||||
if (tr == null) {
|
||||
statusHandler.warn("Unexpected fcst hour (" + fcstHour + ") for "
|
||||
+ parmID);
|
||||
return null;
|
||||
TimeRange tr = null;
|
||||
if (parm.useDatabaseTimeRange()) {
|
||||
tr = dataTime.getValidPeriod();
|
||||
} else {
|
||||
tr = getTimeRange(parmID, dataTime.getFcstTime());
|
||||
if (tr == null) {
|
||||
statusHandler.warn("Unexpected fcst hour ("
|
||||
+ dataTime.getFcstTime() + ") for " + parmID);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
List<GridDataHistory> histList = new ArrayList<GridDataHistory>();
|
||||
histList.add(new GridDataHistory(
|
||||
|
|
File diff suppressed because one or more lines are too long
|
@ -299,6 +299,17 @@ rfc.name = "RFC"
|
|||
rfc.editAreaName = ['ISC','site_id']
|
||||
rfc.groupName = 'ISC'
|
||||
|
||||
# NHA ISC area
|
||||
domain = ShapeTable('nhadomain')
|
||||
#segment.name = "AllCWA"
|
||||
domain.groupName = "ISC"
|
||||
domain.editAreaName = "ISC_NHA"
|
||||
|
||||
# Storm Surge Watch/Warning Area
|
||||
stormsurgeww = ShapeTable('stormsurgeww')
|
||||
stormsurgeww.groupName = "SurgeCollab"
|
||||
stormsurgeww.editAreaName = "StormSurgeWW_EditArea"
|
||||
|
||||
# Offshore Marine Zones - unfiltered
|
||||
offshore = ShapeTable('offshore')
|
||||
offshore.name = "Offshore_Marine_Zones"
|
||||
|
@ -313,8 +324,8 @@ offshoreCWA.editAreaName = offshoreZ
|
|||
offshoreCWA.groupName = 'OffShoreMZones_' + CWA
|
||||
|
||||
# this is a complete listing of all maps
|
||||
maps = [ CWAcounties, FIPS, Counties, CWAzones, Zones, FWCWAzones, FWZones,
|
||||
cwas, isc, fwaor, CWAmzones, Mzones, States, rfc, offshore, offshoreCWA ]
|
||||
maps = [ CWAcounties, FIPS, Counties, CWAzones, Zones, FWCWAzones, FWZones, cwas, isc,
|
||||
fwaor, CWAmzones, Mzones, States, rfc, domain, stormsurgeww, offshore, offshoreCWA ]
|
||||
|
||||
# import the local maps file
|
||||
if not BASELINE:
|
||||
|
|
|
@ -69,10 +69,12 @@
|
|||
#
|
||||
# 05/29/2015 #17144 bhunder Added weather Params for URMA25 and OCONUS RTMA
|
||||
# 09/02/2015 #4819 rferrel Added HWRF.
|
||||
# 09/09/2015 16287 amoore Additional validation of user input
|
||||
# 10/07/2015 #4958 dgilling Added support for NationalBlend D2D data.
|
||||
# 10/13/2015 #4961 randerso Updated NewTerrain/BaseTerrain database definitions
|
||||
# 09/09/2015 16287 amoore Additional validation of user input
|
||||
# 10/30/2015 #17940 jendrowski Responded to Code Review. Mostly syntactical changes.
|
||||
# 11/05/2015 #18182 ryu Change D2DDBVERSIONS value for HPCERP to 24
|
||||
#
|
||||
####################################################################################################
|
||||
|
||||
#----------------------------------------------------------------------------
|
||||
|
@ -1903,8 +1905,8 @@ SITES = {
|
|||
#National Centers
|
||||
'HAK' : ( [825,553], ( 1.0, 1.0), (103.0, 69.0), 'EST5EDT', Grid214AK, "nc"),
|
||||
'HUS' : ([1073,689], (19.0, 8.0), ( 67.0, 43.0), 'EST5EDT', Grid211, "nc"),
|
||||
#'NHA' : ([1729,1601], (1.0,1.0), (1728.0, 1600.0), 'EST5EDT', GridForNHA, "nc"),
|
||||
'NHA' : ([1873,1361], (35.5,3.5), (58.5,42.5), 'EST5EDT', Grid211, "nc"), # updated
|
||||
'NHA' : ([1873,1361], (35.5, 3.5), (58.5, 42.5), 'EST5EDT', Grid211, "nc"),
|
||||
|
||||
}
|
||||
|
||||
# Get list of valid office types, for validation.
|
||||
|
@ -2105,7 +2107,7 @@ D2DDBVERSIONS = {
|
|||
"MSAS": 6,
|
||||
"LAPS": 6,
|
||||
"Satellite": 6,
|
||||
"HPCERP": 5,
|
||||
"HPCERP": 24,
|
||||
"TPCProb": 30,
|
||||
"TPCStormSurge": 1,
|
||||
"CRMTopo": 1,
|
||||
|
|
|
@ -1,3 +1,14 @@
|
|||
# ----------------------------------------------------------------------------
|
||||
#
|
||||
# SOFTWARE HISTORY
|
||||
#
|
||||
# Date Ticket# Engineer Description
|
||||
# ------------ ---------- ----------- --------------------------
|
||||
# 10/22/2015 DR #17873 lshi remove quotes from default
|
||||
# SVCBU_USER_ID
|
||||
#
|
||||
#################################################################
|
||||
|
||||
|
||||
#Variables used by service backup:
|
||||
#
|
||||
|
@ -124,6 +135,10 @@ SVCBU_GRIDAREA=ISC_Send_Area
|
|||
SVCBU_ADDRESSEE="ANCF,BNCF"
|
||||
SVCBU_WMO_HEADER=SVCBKPIFP
|
||||
SVCBU_USER=0
|
||||
SVCBU_USER_ID=""
|
||||
|
||||
# Do not use quotes when setting SVCBU_USER_ID
|
||||
# Example syntax: SVCBU_USER_ID=backuser
|
||||
SVCBU_USER_ID=
|
||||
|
||||
EXPORT_GRID=1
|
||||
PRIMARY_SITES=
|
||||
|
|
|
@ -1,10 +1,33 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<gridParamInfo xmlns:ns2="group">
|
||||
<valtimeMINUSreftime>
|
||||
<fcst>3600</fcst>
|
||||
<fcst>7200</fcst>
|
||||
<fcst>10800</fcst>
|
||||
<fcst>14400</fcst>
|
||||
<fcst>18000</fcst>
|
||||
<fcst>21600</fcst>
|
||||
<fcst>25200</fcst>
|
||||
<fcst>28800</fcst>
|
||||
<fcst>32400</fcst>
|
||||
<fcst>36000</fcst>
|
||||
<fcst>39600</fcst>
|
||||
<fcst>43200</fcst>
|
||||
<fcst>46800</fcst>
|
||||
<fcst>50400</fcst>
|
||||
<fcst>54000</fcst>
|
||||
<fcst>57600</fcst>
|
||||
<fcst>61200</fcst>
|
||||
<fcst>64800</fcst>
|
||||
<fcst>68400</fcst>
|
||||
<fcst>72000</fcst>
|
||||
<fcst>75600</fcst>
|
||||
<fcst>79200</fcst>
|
||||
<fcst>82800</fcst>
|
||||
<fcst>86400</fcst>
|
||||
<fcst>90000</fcst>
|
||||
<fcst>93600</fcst>
|
||||
<fcst>97200</fcst>
|
||||
<fcst>108000</fcst>
|
||||
<fcst>129600</fcst>
|
||||
<fcst>151200</fcst>
|
||||
|
|
|
@ -350,8 +350,25 @@
|
|||
</levels>
|
||||
</gridParameterInfo>
|
||||
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
|
||||
<short_name>tp</short_name>
|
||||
<long_name>total precipitation</long_name>
|
||||
<useDatabaseTimeRange>true</useDatabaseTimeRange>
|
||||
<short_name>tp3hr</short_name>
|
||||
<long_name>total precipitation 3hr</long_name>
|
||||
<units>mm</units>
|
||||
<udunits>millimeter</udunits>
|
||||
<uiname>totPrecip</uiname>
|
||||
<valid_range>0.0</valid_range>
|
||||
<valid_range>1000.0</valid_range>
|
||||
<fillValue>-99999.0</fillValue>
|
||||
<n3D>0</n3D>
|
||||
<levelsDesc>SFC</levelsDesc>
|
||||
<levels>
|
||||
<level>SFC</level>
|
||||
</levels>
|
||||
</gridParameterInfo>
|
||||
<gridParameterInfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="parameterInfo">
|
||||
<useDatabaseTimeRange>true</useDatabaseTimeRange>
|
||||
<short_name>tp6hr</short_name>
|
||||
<long_name>total precipitation 6hr</long_name>
|
||||
<units>mm</units>
|
||||
<udunits>millimeter</udunits>
|
||||
<uiname>totPrecip</uiname>
|
||||
|
|
|
@ -1,24 +1,24 @@
|
|||
##
|
||||
# This software was developed and / or modified by Raytheon Company,
|
||||
# pursuant to Contract DG133W-05-CQ-1067 with the US Government.
|
||||
#
|
||||
#
|
||||
# U.S. EXPORT CONTROLLED TECHNICAL DATA
|
||||
# This software product contains export-restricted data whose
|
||||
# export/transfer/disclosure is restricted by U.S. law. Dissemination
|
||||
# to non-U.S. persons whether in the United States or abroad requires
|
||||
# an export license or other authorization.
|
||||
#
|
||||
#
|
||||
# Contractor Name: Raytheon Company
|
||||
# Contractor Address: 6825 Pine Street, Suite 340
|
||||
# Mail Stop B8
|
||||
# Omaha, NE 68106
|
||||
# 402.291.0100
|
||||
#
|
||||
#
|
||||
# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
||||
# further licensing information.
|
||||
##
|
||||
# SOFTWARE HISTORY
|
||||
#
|
||||
#
|
||||
# Date Ticket# Engineer Description
|
||||
# ------------ ---------- ----------- --------------------------
|
||||
# 02/16/12 14439 jdynina modified haines thresholds
|
||||
|
@ -26,16 +26,18 @@
|
|||
# 07/25/12 #957 dgilling implement edit areas as args to calc methods.
|
||||
# 10/05/12 15158 ryu add Forecaster.getDb()
|
||||
# 04/04/13 #1787 randerso fix validTime check to work with accumulative parms
|
||||
# fix logging so you can actually determine why
|
||||
# fix logging so you can actually determine why
|
||||
# a smartInit is not calculating a parameter
|
||||
# 10/29/2013 #2476 njensen Improved getting wx/discrete keys when retrieving data
|
||||
# 10/27/2014 #3766 randerso Changed _getLatest to include error text returned from InitClient.createDB()
|
||||
# Apr 23, 2015 4259 njensen Updated for new JEP API
|
||||
# Apr 23, 2015 #4259 njensen Updated for new JEP API
|
||||
# 08/06/2015 4718 dgilling Prevent numpy 1.9 from wasting memory by
|
||||
# upcasting scalars too high when using where.
|
||||
# Aug 13, 2015 4704 randerso Added NumpyJavaEnforcer support for smartInits
|
||||
# additional code cleanup
|
||||
#
|
||||
# Dec 03, 2015 #5168 randerso Fixed problems running calc methods with both accumulative
|
||||
# and non-accumulative weather elements as inputs
|
||||
#
|
||||
##
|
||||
import string, sys, re, time, types, getopt, fnmatch, LogStream, DatabaseID, JUtil, AbsTime, TimeRange
|
||||
import SmartInitParams
|
||||
|
@ -46,7 +48,35 @@ pytime = time
|
|||
|
||||
import RollBackImporter
|
||||
rollbackImporter = RollBackImporter.RollBackImporter()
|
||||
|
||||
|
||||
MAX_TIME = 2**31-1
|
||||
|
||||
def printTR(tr):
|
||||
if tr is None:
|
||||
return "None"
|
||||
|
||||
if hasattr(tr, 'java_name'):
|
||||
tr = TimeRange.encodeJavaTimeRange(tr)
|
||||
|
||||
msg = '('
|
||||
stime = time.gmtime(tr[0])
|
||||
etime = time.gmtime(tr[1])
|
||||
stime = time.strftime('%Y%m%d_%H%M', stime)
|
||||
etime = time.strftime('%Y%m%d_%H%M', etime)
|
||||
msg += stime + ", " + etime
|
||||
msg += ')'
|
||||
return msg
|
||||
|
||||
def printTRs(trList):
|
||||
msg = '['
|
||||
for tr in trList:
|
||||
s = printTR(tr)
|
||||
msg += s
|
||||
msg += ', '
|
||||
|
||||
msg += ']'
|
||||
return msg
|
||||
|
||||
#--------------------------------------------------------------------------
|
||||
# Main program that calls model-specific algorithms to generate ifp grids.
|
||||
|
@ -87,7 +117,7 @@ class MDB:
|
|||
for db in self.__dbs:
|
||||
keyLow = key.lower()
|
||||
for k in JUtil.javaStringListToPylist(db.getKeys()):
|
||||
if k.lower() == keyLow:
|
||||
if k.lower() == keyLow:
|
||||
return db.getItem(key)
|
||||
|
||||
#for db in self.__dbs:
|
||||
|
@ -296,15 +326,13 @@ class GridUtilities:
|
|||
return hainesT + hainesM
|
||||
|
||||
|
||||
|
||||
|
||||
#-------------------------------------------------------------------------
|
||||
# Weather Element calculations
|
||||
#-------------------------------------------------------------------------
|
||||
class Forecaster(GridUtilities):
|
||||
def __init__(self, srcName, dstName=None):
|
||||
self._srcName = srcName
|
||||
self._dstName = dstName
|
||||
self._dstName = dstName
|
||||
self._ndbkeys = None
|
||||
self.__dbParms = None
|
||||
#host, port = self._getServer()
|
||||
|
@ -312,7 +340,7 @@ class Forecaster(GridUtilities):
|
|||
#if Options is not None and Options.has_key('userID'):
|
||||
# self._client = ifpc.IFPC(host, port, Options['userID'])
|
||||
#else:
|
||||
# self._client = ifpc.IFPC(host, port)
|
||||
# self._client = ifpc.IFPC(host, port)
|
||||
self.whichHainesIndex = "HIGH" # or "LOW", or "MEDIUM"
|
||||
|
||||
if self._srcName is not None:
|
||||
|
@ -337,10 +365,10 @@ class Forecaster(GridUtilities):
|
|||
msg = "No databases for " + self._srcName
|
||||
LogStream.logProblem(msg)
|
||||
return
|
||||
|
||||
|
||||
if self.newdb() is None:
|
||||
return
|
||||
|
||||
|
||||
self.__topo = self.getTopo() * .3048
|
||||
srcdbkeys = self.srcdb().getKeys()
|
||||
if "staticTopo_Dflt" in srcdbkeys:
|
||||
|
@ -352,7 +380,7 @@ class Forecaster(GridUtilities):
|
|||
self.__stopo = None
|
||||
else:
|
||||
self.__stopo = None
|
||||
|
||||
|
||||
self._editAreas = self._client.getEditAreaNames()
|
||||
|
||||
self.__gridShape = self.__topo.shape
|
||||
|
@ -543,22 +571,22 @@ class Forecaster(GridUtilities):
|
|||
# Returns the topography grid.
|
||||
#--------------------------------------------------------------------------
|
||||
def getTopo(self):
|
||||
topo = self._client.getTopo()
|
||||
topo = self._client.getTopo()
|
||||
topo = topo.getNDArray()
|
||||
return topo
|
||||
return topo
|
||||
|
||||
#--------------------------------------------------------------------------
|
||||
# Returns a dictionary of magical values that will be used in other
|
||||
# functions.
|
||||
#--------------------------------------------------------------------------
|
||||
def magicArgs(self):
|
||||
rval = { "topo" : (self.__topo, (0, sys.maxint)),
|
||||
"stopo" : (self.__stopo, (0, sys.maxint)),
|
||||
"ctime" : (None, (0, sys.maxint)),
|
||||
"stime" : (None, (0, sys.maxint)),
|
||||
"mtime" : (None, (0, sys.maxint))}
|
||||
rval = { "topo" : (self.__topo, (0, MAX_TIME)),
|
||||
"stopo" : (self.__stopo, (0, MAX_TIME)),
|
||||
"ctime" : (None, (0, MAX_TIME)),
|
||||
"stime" : (None, (0, MAX_TIME)),
|
||||
"mtime" : (None, (0, MAX_TIME))}
|
||||
for i in self._editAreas:
|
||||
rval[i] = (None, (0, sys.maxint))
|
||||
rval[i] = (None, (0, MAX_TIME))
|
||||
return rval
|
||||
|
||||
#--------------------------------------------------------------------------
|
||||
|
@ -566,8 +594,8 @@ class Forecaster(GridUtilities):
|
|||
#--------------------------------------------------------------------------
|
||||
def run(self):
|
||||
dbName = SmartInitParams.params['dbName']
|
||||
validTime = SmartInitParams.params['validTime']
|
||||
|
||||
validTime = SmartInitParams.params['validTime']
|
||||
|
||||
dbInfo = dbName.split(':')
|
||||
self.__dbName = dbInfo[0]
|
||||
|
||||
|
@ -575,7 +603,7 @@ class Forecaster(GridUtilities):
|
|||
self.__init()
|
||||
if self.newdb() is None:
|
||||
return
|
||||
|
||||
|
||||
msgDest = "Destination database:" + self.newdb().getModelIdentifier()
|
||||
|
||||
if validTime is not None:
|
||||
|
@ -587,7 +615,7 @@ class Forecaster(GridUtilities):
|
|||
self._ifpio = IFPIO(self.srcdb(), self.newdb())
|
||||
self._ifpio.setLevels(self.levels())
|
||||
methods = self.__getMethods()
|
||||
times = self.__sortTimes(methods, validTime)
|
||||
times = self.__sortTimes(methods, validTime)
|
||||
tr, numGrids = self.__process(methods, times, int(dbInfo[1]))
|
||||
stop = time.time()
|
||||
msgTime = "%s: Elapsed time: %-.1f sec." % (self.newdb().getModelIdentifier(), (stop - start))
|
||||
|
@ -607,7 +635,7 @@ class Forecaster(GridUtilities):
|
|||
modelTime = AbsTime.AbsTime(db.getModelTime())
|
||||
modelTime = modelTime.unixTime()
|
||||
else:
|
||||
modelTime = 0
|
||||
modelTime = 0
|
||||
modelIdentifier = db.getShortModelIdentifier()
|
||||
|
||||
if modelTime != 0:
|
||||
|
@ -649,14 +677,14 @@ class Forecaster(GridUtilities):
|
|||
" is empty.")
|
||||
else:
|
||||
srcdbs.append(db)
|
||||
|
||||
|
||||
srcdb = MDB(srcdbs)
|
||||
|
||||
# I (njensen) removed most of what was here. It was looking at
|
||||
# the available D2D netcdf data, and then forming a GFE db id
|
||||
# from that for the target. Instead I'm just passing in
|
||||
# from that for the target. Instead I'm just passing in
|
||||
# the target from Java.
|
||||
|
||||
|
||||
newdb = self.__dbName.replace("D2D", "")
|
||||
if fcstName and fcstName != modelName:
|
||||
newdb = newdb.replace(modelName, fcstName)
|
||||
|
@ -673,7 +701,7 @@ class Forecaster(GridUtilities):
|
|||
break
|
||||
if singletonNeeded:
|
||||
newdb = newdb[:-13] + '00000000_0000'
|
||||
newdb = self.getDb(newdb)
|
||||
newdb = self.getDb(newdb)
|
||||
else:
|
||||
sr = client.createDB(newdb)
|
||||
if sr.isOkay():
|
||||
|
@ -682,7 +710,7 @@ class Forecaster(GridUtilities):
|
|||
msg = "Unable to create database for " + str(newdb) + ":\n" + \
|
||||
str(sr.message())
|
||||
LogStream.logProblem(msg)
|
||||
newdb = None
|
||||
newdb = None
|
||||
|
||||
return srcdb, newdb
|
||||
|
||||
|
@ -745,7 +773,7 @@ class Forecaster(GridUtilities):
|
|||
#--------------------------------------------------------------------------
|
||||
# Returns true if the two timeRanges overlap (share a common time period).
|
||||
#--------------------------------------------------------------------------
|
||||
def _overlaps(self, tr1, tr2):
|
||||
def _overlaps(self, tr1, tr2):
|
||||
if self._contains(tr2, tr1[0]) or self._contains(tr1, tr2[0]):
|
||||
return 1
|
||||
return 0
|
||||
|
@ -781,7 +809,7 @@ class Forecaster(GridUtilities):
|
|||
wenameLevel = wename
|
||||
else:
|
||||
wenameLevel = wename + "_SFC"
|
||||
#if wenameLevel not in self.newdb().keys():
|
||||
#if wenameLevel not in self.newdb().keys():
|
||||
if wenameLevel not in JUtil.javaStringListToPylist(self.newdb().getKeys()):
|
||||
msg = wenameLevel + " not in " + \
|
||||
self.newdb().getModelIdentifier() + " " + "SKIPPING"
|
||||
|
@ -789,7 +817,7 @@ class Forecaster(GridUtilities):
|
|||
continue
|
||||
rval = filter(lambda x,y=wenameLevel : x[0] != y, rval)
|
||||
rval.append((wenameLevel, mthd, fargs))
|
||||
return rval
|
||||
return rval
|
||||
|
||||
#--------------------------------------------------------------------------
|
||||
# Gets and returns a list of dependencies.
|
||||
|
@ -815,13 +843,13 @@ class Forecaster(GridUtilities):
|
|||
rval = []
|
||||
methods = self.__getObjMethods(self.__class__)
|
||||
while len(methods):
|
||||
rval += self.__getdeps(methods[0], methods)
|
||||
rval += self.__getdeps(methods[0], methods)
|
||||
return rval
|
||||
|
||||
def __request(self, db, pname, time):
|
||||
if pname[-2:] == "_c":
|
||||
if pname[-2:] == "_c":
|
||||
time = self.__getSrcWE(
|
||||
pname[:-2] + "_MB500", 0).getTimeRange(time[0])
|
||||
pname[:-2] + "_MB500", 0).getTimeRange(time[0])
|
||||
rval = (pname[:-2], time, 1)
|
||||
else:
|
||||
time = self.__getSrcWE(pname, 0).getTimeRange(time[0])
|
||||
|
@ -844,9 +872,11 @@ class Forecaster(GridUtilities):
|
|||
# Internal function that returns the time periods shared by tr and times.
|
||||
#--------------------------------------------------------------------------
|
||||
def __compTimes(self, tr, times):
|
||||
# TODO: surely there's a better way to do this
|
||||
|
||||
for time in times:
|
||||
if len(time) == 0:
|
||||
return []
|
||||
return []
|
||||
|
||||
rval = []
|
||||
if len(times) == 1:
|
||||
|
@ -877,7 +907,7 @@ class Forecaster(GridUtilities):
|
|||
rval = []
|
||||
calced = []
|
||||
for we, mthd, args in methods:
|
||||
# LogStream.logEvent("Evaluating times for calc"+we)
|
||||
# LogStream.logEvent("Evaluating times for", mthd.func_name)
|
||||
calced.append(we)
|
||||
args = filter(lambda x, ma=self.magicArgs().keys() + [we]:
|
||||
x not in ma, args)
|
||||
|
@ -885,7 +915,7 @@ class Forecaster(GridUtilities):
|
|||
for a in args:
|
||||
nargs = nargs + self.__unpackParm(a)
|
||||
|
||||
ttimes = []
|
||||
ttimes = []
|
||||
for p in nargs:
|
||||
# p is an arg, e.g. gh_MB900
|
||||
try:
|
||||
|
@ -902,7 +932,7 @@ class Forecaster(GridUtilities):
|
|||
for i in range(size):
|
||||
jtr = ranges.get(i)
|
||||
valid = False
|
||||
|
||||
|
||||
if validTime is None:
|
||||
valid = True
|
||||
else:
|
||||
|
@ -910,33 +940,24 @@ class Forecaster(GridUtilities):
|
|||
# need both accumulative and non-accumulative parms
|
||||
valid = validTime.getTime() >= jtr.getStart().getTime() and \
|
||||
validTime.getTime() <= jtr.getEnd().getTime()
|
||||
|
||||
|
||||
if valid:
|
||||
timelist = TimeRange.encodeJavaTimeRange(jtr)
|
||||
timelist = TimeRange.encodeJavaTimeRange(jtr)
|
||||
pylist.append(timelist)
|
||||
|
||||
ttimes.append(pylist)
|
||||
|
||||
# msg = "Times available for " + p + " " + str(validTime) + ":\n"
|
||||
# timeList = ttimes[len(ttimes)-1]
|
||||
# for xtime in timeList:
|
||||
# msg += '('
|
||||
# stime = time.gmtime(xtime[0])
|
||||
# etime = time.gmtime(xtime[1])
|
||||
# stime = time.strftime('%Y%m%d_%H%M', stime)
|
||||
# etime = time.strftime('%Y%m%d_%H%M', etime)
|
||||
# msg += stime + ", " + etime
|
||||
# msg += ')\n'
|
||||
# LogStream.logEvent(msg)
|
||||
|
||||
# msg = "Times available for " + p + " " + str(validTime) + ":\n"
|
||||
# timeList = ttimes[-1]
|
||||
# msg += printTRs(timeList)
|
||||
# LogStream.logEvent(msg)
|
||||
|
||||
# compare the times of each parm and find where they match up
|
||||
times = self.__compTimes(None, ttimes)
|
||||
# LogStream.logEvent("nargs:",nargs)
|
||||
# LogStream.logEvent("ttimes:",ttimes)
|
||||
# LogStream.logEvent("times:",times)
|
||||
# LogStream.logEvent("times:", printTRs(times))
|
||||
|
||||
hadDataButSkipped = {}
|
||||
for i in range(len(ttimes)):
|
||||
for i in range(len(ttimes)):
|
||||
timeList = ttimes[i]
|
||||
parmName = nargs[i]
|
||||
for xtime in timeList:
|
||||
|
@ -945,29 +966,32 @@ class Forecaster(GridUtilities):
|
|||
hadDataButSkipped[xtime].append(parmName)
|
||||
else:
|
||||
hadDataButSkipped[xtime] = [parmName]
|
||||
# LogStream.logEvent("hadDataButSkipped:",hadDataButSkipped)
|
||||
# msg = "hadDataButSkipped: {"
|
||||
# for tr in hadDataButSkipped:
|
||||
# msg += printTR(tr)
|
||||
# msg += ": "
|
||||
# msg += str(hadDataButSkipped[tr])
|
||||
# msg += ", "
|
||||
# msg += "}"
|
||||
# LogStream.logEvent(msg)
|
||||
|
||||
hadNoData = []
|
||||
hadNoData = []
|
||||
for i in range(len(nargs)):
|
||||
timeList = ttimes[i]
|
||||
parmName = nargs[i]
|
||||
if len(timeList) == 0:
|
||||
hadNoData.append(parmName)
|
||||
# LogStream.logEvent("hadNoData:",hadNoData)
|
||||
# LogStream.logEvent("hadNoData:",hadNoData)
|
||||
|
||||
missing = {}
|
||||
missing = {}
|
||||
for xtime in hadDataButSkipped:
|
||||
stime = time.gmtime(xtime[0])
|
||||
etime = time.gmtime(xtime[1])
|
||||
stime = time.strftime('%Y%m%d_%H%M', stime)
|
||||
etime = time.strftime('%Y%m%d_%H%M', etime)
|
||||
msg = stime + ", " + etime
|
||||
msg = printTR(xtime)
|
||||
missing[msg] = []
|
||||
|
||||
|
||||
for parmName in nargs:
|
||||
if not hadDataButSkipped[xtime].__contains__(parmName):
|
||||
missing[msg].append(parmName)
|
||||
|
||||
|
||||
if len(missing) == 0 and len(hadNoData) > 0:
|
||||
msg = ''
|
||||
if (validTime is not None):
|
||||
|
@ -975,9 +999,9 @@ class Forecaster(GridUtilities):
|
|||
vtime = time.gmtime(vtime)
|
||||
msg = time.strftime('%Y%m%d_%H%M', vtime)
|
||||
missing[msg] = hadNoData
|
||||
# LogStream.logEvent("missing:",missing)
|
||||
# LogStream.logEvent("missing:",missing)
|
||||
|
||||
if len(missing):
|
||||
if len(missing):
|
||||
LogStream.logEvent(self.newdb().getModelIdentifier() + ": Skipping calc" + we + " for some times due to the following " +
|
||||
"missing data:", missing)
|
||||
# these become the times to run the method for
|
||||
|
@ -999,102 +1023,119 @@ class Forecaster(GridUtilities):
|
|||
|
||||
def __recursiveArg(self, cache, arg, time):
|
||||
p = self.newdb().getItem(arg)
|
||||
#p = self.newdb()[arg + "_SFC"]
|
||||
tr = p.getTimeRange(time[0])
|
||||
pytr = TimeRange.encodeJavaTimeRange(tr)
|
||||
pkeys = TimeRange.javaTimeRangeListToPyList(p.getKeys())
|
||||
if pytr in pkeys:
|
||||
|
||||
# tr = p.getTimeRange(time[0])
|
||||
tr = TimeRange.TimeRange(AbsTime.AbsTime(time[0]), AbsTime.AbsTime(time[1])).toJavaObj()
|
||||
times = p.getKeys(tr)
|
||||
if times:
|
||||
tr = times[0]
|
||||
LogStream.logEvent("retrieving", arg, printTR(tr))
|
||||
|
||||
pytr = TimeRange.encodeJavaTimeRange(tr)
|
||||
jslice = p.getItem(tr)
|
||||
slice = jslice.getNDArray()
|
||||
if type(slice) is ndarray and slice.dtype == int8:
|
||||
# discrete or weather
|
||||
keys = JUtil.javaObjToPyVal(jslice.getKeyList())
|
||||
slice = [slice, keys]
|
||||
slice = [slice, keys]
|
||||
cache[arg] = (slice, pytr)
|
||||
else:
|
||||
else:
|
||||
LogStream.logEvent("no data for", arg, printTR(tr))
|
||||
cache[arg] = (None, time)
|
||||
|
||||
def __argFill(self, cache, method, time):
|
||||
we, mthd, args = method
|
||||
LogStream.logEvent("getting arguments for", mthd.func_name, printTR(time))
|
||||
|
||||
gargs = []
|
||||
if self._ndbkeys is None:
|
||||
self._ndbkeys = JUtil.javaStringListToPylist(self.newdb().getKeys())
|
||||
ndbkeys = self._ndbkeys
|
||||
ndbkeys = self._ndbkeys
|
||||
for arg in args:
|
||||
if arg in self._editAreas:
|
||||
if cache[arg][0] is None:
|
||||
p = self.newdb().getItem(we)
|
||||
ea = p.getEditArea(arg).getNDArray()
|
||||
cache[arg] = (ea, (0, sys.maxint))
|
||||
cache[arg] = (ea, (0, MAX_TIME))
|
||||
gargs.append(cache[arg][0])
|
||||
continue
|
||||
if not cache.has_key(arg):
|
||||
if not cache.has_key(arg):
|
||||
if arg in ndbkeys:
|
||||
self.__recursiveArg(cache, arg, time)
|
||||
else:
|
||||
val = self._ifpio.get(self.__request(self.srcdb(),
|
||||
arg, time))
|
||||
req = self.__request(self.srcdb(), arg, time)
|
||||
val = self._ifpio.get(req)
|
||||
if arg[-2:] == "_c":
|
||||
self.pres = val[0]
|
||||
val = val[1]
|
||||
cache[arg] = (val, time)
|
||||
cache[arg] = (val, TimeRange.encodeJavaTimeRange(req[1]))
|
||||
else:
|
||||
if cache[arg][1] is not None and \
|
||||
not self._overlaps(time, cache[arg][1]):
|
||||
not self._overlaps(time, cache[arg][1]):
|
||||
if arg in ndbkeys:
|
||||
self.__recursiveArg(cache, arg, time)
|
||||
val = cache[arg][0]
|
||||
else:
|
||||
val = self._ifpio.get(self.__request(self.srcdb(),
|
||||
arg, time))
|
||||
if arg[-2:] == "_c":
|
||||
self.pres = val[0]
|
||||
val = val[1]
|
||||
cache[arg] = (val, time)
|
||||
|
||||
else:
|
||||
req = self.__request(self.srcdb(), arg, time)
|
||||
val = self._ifpio.get(req)
|
||||
if arg[-2:] == "_c":
|
||||
self.pres = val[0]
|
||||
val = val[1]
|
||||
cache[arg] = (val, TimeRange.encodeJavaTimeRange(req[1]))
|
||||
else:
|
||||
LogStream.logEvent("using cached", arg, printTR(cache[arg][1]))
|
||||
|
||||
gargs.append(cache[arg][0])
|
||||
return gargs
|
||||
|
||||
def __runMethod(self, method, time, cache):
|
||||
we, mthd, args = method
|
||||
|
||||
|
||||
if self.mostRecentCacheClear != time:
|
||||
self.mostRecentCacheClear = time
|
||||
self.mostRecentCacheClear = time
|
||||
for key in cache.keys():
|
||||
cacheValue = cache[key]
|
||||
if len(cacheValue) == 2 and key.find('_') > -1:
|
||||
# these are WeatherElements, if they are for time ranges that
|
||||
# we've completed calculations for, immediately set them to
|
||||
# None to free up the memory
|
||||
if time[0] != cacheValue[1][0]:
|
||||
if len(cacheValue) == 2:
|
||||
# if they are for time ranges that we've completed calculations for,
|
||||
# immediately set them to None to free up the memory
|
||||
if not self._overlaps(cacheValue[1],time):
|
||||
LogStream.logEvent("Clearing", key, printTR(cacheValue[1]))
|
||||
cache[key] = (None, cacheValue[1])
|
||||
|
||||
|
||||
gargs = self.__argFill(cache, method, time)
|
||||
|
||||
doStore = False
|
||||
if mthd.im_func is Forecaster.__exists.im_func:
|
||||
msg = self.newdb().getModelIdentifier() + ": Get : " + we + " " + self._timeRangeStr(time)
|
||||
LogStream.logEvent(msg)
|
||||
else:
|
||||
LogStream.logEvent(msg)
|
||||
else:
|
||||
doStore = True
|
||||
msg = self.newdb().getModelIdentifier() + ": Calc : " + we + " " + self._timeRangeStr(time)
|
||||
LogStream.logEvent(msg)
|
||||
|
||||
LogStream.logEvent(msg)
|
||||
|
||||
try:
|
||||
rval = apply(mthd, tuple(gargs))
|
||||
|
||||
if type(rval) is not ndarray:
|
||||
if type(rval) is not tuple:
|
||||
jrval = rval
|
||||
rval = rval.getNDArray()
|
||||
if type(rval) is ndarray and rval.dtype == int8:
|
||||
# discrete or weather
|
||||
keys = JUtil.javaObjToPyVal(jrval.getKeyList())
|
||||
rval = [rval, keys]
|
||||
|
||||
if rval is not None:
|
||||
if type(rval) is not ndarray and rval is not None:
|
||||
if type(rval) is not tuple:
|
||||
jrval = rval
|
||||
rval = rval.getNDArray()
|
||||
if type(rval) is ndarray and rval.dtype == int8:
|
||||
# discrete or weather
|
||||
keys = JUtil.javaObjToPyVal(jrval.getKeyList())
|
||||
rval = [rval, keys]
|
||||
else:
|
||||
LogStream.logEvent("No value returned from calc"+str(we))
|
||||
|
||||
s = 'grid'
|
||||
if rval is None:
|
||||
s = 'None'
|
||||
LogStream.logEvent("Caching", we, s, printTR(time))
|
||||
cache[we] = (rval, time)
|
||||
|
||||
|
||||
if rval is not None and cache['mtime'][0] is not None and doStore:
|
||||
parm = self.__getNewWE(we)
|
||||
parm = self.__getNewWE(we)
|
||||
LogStream.logEvent("Storing", we, printTR(cache['mtime'][0]))
|
||||
self._ifpio.store(parm, cache['mtime'][0], cache[we][0])
|
||||
except:
|
||||
LogStream.logProblem(self.newdb().getModelIdentifier() + ": Error while running method " + str(we) +
|
||||
|
@ -1113,12 +1154,12 @@ class Forecaster(GridUtilities):
|
|||
for i in xrange(len(methods)):
|
||||
for t in times[i]:
|
||||
lst.append((methods[i], t, i))
|
||||
lst.sort(self.__tsort)
|
||||
lst.sort(self.__tsort)
|
||||
return lst
|
||||
|
||||
def __exists(self, mtime, wename):
|
||||
#parm = self.__getNewWE(wename + "_SFC")
|
||||
parm = self.__getNewWE(wename)
|
||||
def __exists(self, mtime, wename):
|
||||
#parm = self.__getNewWE(wename + "_SFC")
|
||||
parm = self.__getNewWE(wename)
|
||||
return parm.getItem(mtime)
|
||||
|
||||
def __prune(self, lst):
|
||||
|
@ -1131,7 +1172,7 @@ class Forecaster(GridUtilities):
|
|||
#parm = self.__getNewWE(m[0] + "_SFC")
|
||||
tr = TimeRange.encodeJavaTimeRange(parm.getTimeRange(t[0]))
|
||||
if tr is None:
|
||||
continue
|
||||
continue
|
||||
parmtr = TimeRange.javaTimeRangeListToPyList(parm.getKeys())
|
||||
if tr in parmtr:
|
||||
# Skip (maybe)
|
||||
|
@ -1171,35 +1212,35 @@ class Forecaster(GridUtilities):
|
|||
def sourceBaseTime(self):
|
||||
modelTime = self.srcdb().getModelTime()
|
||||
if modelTime is None:
|
||||
modelTime = 0
|
||||
modelTime = 0
|
||||
t = AbsTime.AbsTime(modelTime)
|
||||
return t.unixTime()
|
||||
|
||||
|
||||
# JULIYA MODIFY HERE
|
||||
def __process(self, methods, times, mode):
|
||||
numGrids = 0
|
||||
trSpan = None
|
||||
cache = self.magicArgs()
|
||||
cache = self.magicArgs()
|
||||
all = mode#Options['all'] manual=1 automatic=0
|
||||
list = self.__flattenTimes(methods, times)
|
||||
list = self.__flattenTimes(methods, times)
|
||||
if not all:
|
||||
list = self.__prune(list)
|
||||
|
||||
self.mostRecentCacheClear = None
|
||||
for m, t, i in list:
|
||||
cache['ctime'] = (t, (0, sys.maxint))
|
||||
cache['ctime'] = (t, (0, MAX_TIME))
|
||||
parm = self.__getNewWE(m[0])
|
||||
tr = parm.getTimeRange(t[0])
|
||||
|
||||
# A valid time range was not found so the parameter
|
||||
|
||||
# A valid time range was not found so the parameter
|
||||
# cannot be calculated, so continue
|
||||
if not tr.isValid():
|
||||
continue
|
||||
|
||||
cache['mtime'] = (tr, (0, sys.maxint))
|
||||
cache['wename'] = (m[0], (0, sys.maxint))
|
||||
cache['stime'] = (t[0] - self.sourceBaseTime(), (0, sys.maxint))
|
||||
|
||||
|
||||
cache['mtime'] = (tr, (0, MAX_TIME))
|
||||
cache['wename'] = (m[0], (0, MAX_TIME))
|
||||
cache['stime'] = (t[0] - self.sourceBaseTime(), (0, MAX_TIME))
|
||||
|
||||
try:
|
||||
self.__runMethod(m, t, cache)
|
||||
numGrids = numGrids + 1
|
||||
|
@ -1225,7 +1266,7 @@ class IFPIO:
|
|||
|
||||
def getSrcWE(self, wename, lock=1):
|
||||
rval = None
|
||||
try:
|
||||
try:
|
||||
rval = self.__srcwes[wename]
|
||||
except:
|
||||
rval = self.eta.getItem(wename)
|
||||
|
@ -1240,21 +1281,27 @@ class IFPIO:
|
|||
self.__newwes[wename] = rval
|
||||
return rval
|
||||
|
||||
def get(self, qv):
|
||||
def get(self, qv):
|
||||
if len(qv) == 2:
|
||||
name, time = qv
|
||||
docube = 0
|
||||
else:
|
||||
name, time, docube = qv
|
||||
name, time, docube = qv
|
||||
if not docube:
|
||||
slice = self.getSrcWE(name, 0).getItem(time)
|
||||
p = self.getSrcWE(name, 0)
|
||||
times = p.getKeys(time)
|
||||
if times:
|
||||
time = times[0]
|
||||
LogStream.logEvent("retrieving", name, printTR(time))
|
||||
|
||||
slice = p.getItem(time)
|
||||
out = slice.getNDArray()
|
||||
if type(out) is ndarray and out.dtype == int8:
|
||||
if type(out) is ndarray and out.dtype == int8:
|
||||
# discrete or weather
|
||||
keys = JUtil.javaObjToPyVal(slice.getKeyList())
|
||||
out = [out, keys]
|
||||
out = [out, keys]
|
||||
else:
|
||||
out = self._getcube(self.eta, name, time)
|
||||
out = self._getcube(name, time)
|
||||
return out
|
||||
|
||||
#--------------------------------------------------------------------------
|
||||
|
@ -1270,20 +1317,14 @@ class IFPIO:
|
|||
return self._levels
|
||||
|
||||
#--------------------------------------------------------------------------
|
||||
# Returns the data cube for the specified db, parm, and time.
|
||||
# Returns the data cube for the specified parm, and time.
|
||||
#--------------------------------------------------------------------------
|
||||
def _getcube(self, db, parm, time):
|
||||
def _getcube(self, parm, time):
|
||||
lvls = self.levels()
|
||||
lst = []
|
||||
pres = []
|
||||
for l in lvls:
|
||||
p = self.getSrcWE(parm + "_" + l, 0)
|
||||
jslice = p.getItem(time)
|
||||
slice = jslice.getNDArray()
|
||||
if type(slice) is ndarray and slice.dtype == int8:
|
||||
# discrete or weather
|
||||
keys = JUtil.javaObjToPyVal(jslice.getKeyList())
|
||||
slice = [slice, keys]
|
||||
slice = self.get((parm + "_" + l, time))
|
||||
lst.append(slice)
|
||||
pres.append(int(l[2:]))
|
||||
# only scalars will be ndarray, otherwise it was vector, discrete, or wx
|
||||
|
@ -1294,7 +1335,7 @@ class IFPIO:
|
|||
ml.append(i[0])
|
||||
dl.append(i[1])
|
||||
rval = (array(ml), array(dl))
|
||||
else:
|
||||
else:
|
||||
rval = array(lst)
|
||||
return (pres, rval)
|
||||
|
||||
|
@ -1303,15 +1344,15 @@ class IFPIO:
|
|||
# specified time
|
||||
#--------------------------------------------------------------------------
|
||||
def store(self, newwe, time, grid):
|
||||
gridType = newwe.getGridType()
|
||||
gridType = newwe.getGridType()
|
||||
if gridType == "SCALAR":
|
||||
grid = clip(grid, newwe.getMinAllowedValue(), newwe.getMaxAllowedValue())
|
||||
elif gridType == "VECTOR":
|
||||
mag = clip(grid[0], newwe.getMinAllowedValue(), newwe.getMaxAllowedValue())
|
||||
dir = clip(grid[1], 0, 359.5)
|
||||
grid = (mag, dir)
|
||||
tr = TimeRange.encodeJavaTimeRange(time)
|
||||
# safety checks
|
||||
tr = TimeRange.encodeJavaTimeRange(time)
|
||||
# safety checks
|
||||
wrongType = None
|
||||
saved = False
|
||||
if type(grid) is ndarray:
|
||||
|
@ -1325,16 +1366,16 @@ class IFPIO:
|
|||
dirGrid = NumpyJavaEnforcer.checkdTypes(grid[1], float32)
|
||||
# vector save
|
||||
newwe.setItemVector(newwe.getTimeRange(tr[0]), magGrid, dirGrid)
|
||||
saved = True
|
||||
saved = True
|
||||
elif type(grid[0]) is ndarray and type(grid[1]) is list:
|
||||
bgrid = NumpyJavaEnforcer.checkdTypes(grid[0], int8)
|
||||
|
||||
|
||||
if gridType == "DISCRETE":
|
||||
newwe.setItemDiscrete(newwe.getTimeRange(tr[0]), bgrid, str(grid[1]))
|
||||
elif gridType == "WEATHER":
|
||||
newwe.setItemWeather(newwe.getTimeRange(tr[0]), bgrid, str(grid[1]))
|
||||
|
||||
saved = True
|
||||
|
||||
saved = True
|
||||
if not saved:
|
||||
if wrongType is None:
|
||||
wrongType = type(grid)
|
||||
|
@ -1344,10 +1385,10 @@ class IFPIO:
|
|||
#--------------------------------------------------------------------------
|
||||
# Main program
|
||||
#--------------------------------------------------------------------------
|
||||
def runFromJava(dbName, model, validTime):
|
||||
def runFromJava(dbName, model, validTime):
|
||||
SmartInitParams.params['dbName'] = dbName
|
||||
SmartInitParams.params['validTime'] = validTime
|
||||
|
||||
|
||||
mod = __import__(model)
|
||||
mod.main()
|
||||
rollbackImporter.rollback()
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
# Version 2015.7.22-0
|
||||
# Version 2015.8.27-0
|
||||
|
||||
import GenericHazards
|
||||
import string, time, os, re, types, copy, LogStream, collections
|
||||
|
@ -549,7 +549,21 @@ class TextProduct(HLSTCV_Common.TextProduct):
|
|||
productDict['stormInformation'] = stormInfoDict
|
||||
|
||||
def _situationOverview(self, productDict, productSegmentGroup, productSegment):
|
||||
productDict['situationOverview'] = "Succinctly describe the expected evolution of the event for the cwa; which hazards are of greater (or lesser) concern, forecast focus, etc."
|
||||
overviewSectionTitle = ".Situation Overview...\n"
|
||||
|
||||
# Get the WRKHLS product that has the situation overview we want
|
||||
wrkhlsProduct = self.getPreviousProduct("WRKHLS")
|
||||
|
||||
# Try finding the situation overview
|
||||
overviewSearch = re.search("(?ism).*^%s(.+?)^\." % (overviewSectionTitle), wrkhlsProduct)
|
||||
|
||||
# If we found the overview
|
||||
if overviewSearch is not None:
|
||||
# Clean it up
|
||||
productDict['situationOverview'] = self._cleanText(overviewSearch.group(1).strip())
|
||||
else:
|
||||
# Use generic text for the situation overview
|
||||
productDict['situationOverview'] = self._frame("Succinctly describe the expected evolution of the event for the cwa; which hazards are of greater (or lesser) concern, forecast focus, etc.")
|
||||
|
||||
def _windSection(self, productDict, productSegmentGroup, productSegment):
|
||||
sectionDict = dict()
|
||||
|
@ -1607,9 +1621,10 @@ class TextProduct(HLSTCV_Common.TextProduct):
|
|||
# Updated version to handle WFO GUM advisories. This pattern will
|
||||
# handle multiple word names (including certain special characters)
|
||||
# This is for the NHC format.
|
||||
mndSearch = re.search("(?im)^.*?(HURRICANE|(SUB|POST.?)?TROPICAL " +
|
||||
"(STORM|DEPRESSION|CYCLONE)|(SUPER )?TYPHOON|" +
|
||||
"REMNANTS OF) ([A-Z0-9\-\(\) ]+?)" +
|
||||
mndSearch = re.search("(?im)^.*?(HURRICANE|(POTENTIAL|SUB|POST.?)" +
|
||||
"?TROPICAL (STORM|DEPRESSION|CYCLONE)|" +
|
||||
"(SUPER )?TYPHOON|REMNANTS OF) " +
|
||||
"([A-Z0-9\-\(\) ]+?)" +
|
||||
"(SPECIAL |INTERMEDIATE )?ADVISORY", tcp)
|
||||
|
||||
# Display some debug info - if flag is set
|
||||
|
@ -2663,8 +2678,7 @@ class LegacyFormatter():
|
|||
title = "Situation Overview"
|
||||
text = title + "\n" + "-"*len(title) + "\n\n"
|
||||
|
||||
text += self._textProduct.indentText(self._textProduct._frame(overviewText),
|
||||
maxWidth=self._textProduct._lineLength)
|
||||
text += self._textProduct.indentText(overviewText, maxWidth=self._textProduct._lineLength)
|
||||
text += "\n"
|
||||
|
||||
return text
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -233,10 +233,33 @@ LOUV25.722degrees LOUV
|
|||
LAUV17.491degrees LAUV
|
||||
|
||||
// HPCqpfNDFD, this prevents the decoder from appending the duration
|
||||
PPFFG_3600-0 PPFFG
|
||||
PPFFG_7200-0 PPFFG
|
||||
PPFFG_10800-0 PPFFG
|
||||
PPFFG_14400-0 PPFFG
|
||||
PPFFG_18000-0 PPFFG
|
||||
PPFFG_21600-0 PPFFG
|
||||
PPFFG_25200-0 PPFFG
|
||||
PPFFG_28800-0 PPFFG
|
||||
PPFFG_32400-0 PPFFG
|
||||
PPFFG_36000-0 PPFFG
|
||||
PPFFG_39600-0 PPFFG
|
||||
PPFFG_43200-0 PPFFG
|
||||
PPFFG_46800-0 PPFFG
|
||||
PPFFG_50400-0 PPFFG
|
||||
PPFFG_54000-0 PPFFG
|
||||
PPFFG_57600-0 PPFFG
|
||||
PPFFG_61200-0 PPFFG
|
||||
PPFFG_64800-0 PPFFG
|
||||
PPFFG_68400-0 PPFFG
|
||||
PPFFG_72000-0 PPFFG
|
||||
PPFFG_75600-0 PPFFG
|
||||
PPFFG_108000-0 PPFFG
|
||||
PPFFG_172800-0 PPFFG
|
||||
PPFFG_259200-0 PPFFG
|
||||
PPFFG_79200-0 PPFFG
|
||||
PPFFG_82800-0 PPFFG
|
||||
PPFFG_86400-0 PPFFG
|
||||
PPFFG_90000-0 PPFFG
|
||||
PPFFG_93600-0 PPFFG
|
||||
PPFFG_97200-0 PPFFG
|
||||
|
||||
// HPCqpfNDFD PPQPF grids
|
||||
TP0.254mm_HPCQPF-NCEP-HPC_1073x689_21600-0 ProbTP0p01in6hr
|
||||
|
|
|
@ -3194,7 +3194,10 @@
|
|||
<name>MOSGuide</name>
|
||||
<center>7</center>
|
||||
<subcenter>14</subcenter>
|
||||
<grid>184</grid>
|
||||
<grids>
|
||||
<id>184</id>
|
||||
<id>NBM</id>
|
||||
</grids>
|
||||
<process>
|
||||
<id>96</id>
|
||||
</process>
|
||||
|
@ -3305,7 +3308,10 @@
|
|||
<name>EKDMOS</name>
|
||||
<center>7</center>
|
||||
<subcenter>14</subcenter>
|
||||
<grid>184</grid>
|
||||
<grids>
|
||||
<id>184</id>
|
||||
<id>NBM</id>
|
||||
</grids>
|
||||
<process>
|
||||
<id>114</id>
|
||||
</process>
|
||||
|
|
|
@ -756,21 +756,18 @@ public class Level3BaseRadar {
|
|||
byte[] msg = new byte[120];
|
||||
InputStream byt;
|
||||
if (uncompressedSize + msg.length != theRawRadarByteArray.length) {
|
||||
InputStream ins = null;
|
||||
try {
|
||||
theRadarData.reset();
|
||||
theRadarData.readFully(msg);
|
||||
ins = new BZip2InputStream(theRadarData, false);
|
||||
uncompressed = new byte[uncompressedSize];
|
||||
ins.read(uncompressed);
|
||||
try (DataInputStream di = new DataInputStream(
|
||||
new BZip2InputStream(theRadarData, false))) {
|
||||
uncompressed = new byte[uncompressedSize];
|
||||
di.readFully(uncompressed);
|
||||
}
|
||||
} catch (IOException e) {
|
||||
theHandler.handle(Priority.ERROR,
|
||||
"Error decompressing product: ", e);
|
||||
return;
|
||||
} finally {
|
||||
if (ins != null) {
|
||||
ins.close();
|
||||
}
|
||||
}
|
||||
theRawRadarByteArray = new byte[120 + uncompressed.length];
|
||||
System.arraycopy(msg, 0, theRawRadarByteArray, 0, 120);
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Reference in a new issue