12.11.1-3 baseline

Former-commit-id: a65c057df5 [formerly 196afa0e5f] [formerly 2d0c424713] [formerly a65c057df5 [formerly 196afa0e5f] [formerly 2d0c424713] [formerly 5d796f775c [formerly 2d0c424713 [formerly 33ddeff32d02d6302c7e9e61a3b5d8cbf0440b86]]]]
Former-commit-id: 5d796f775c
Former-commit-id: 7fcbb49a3e [formerly 725b2f8ba5] [formerly 0e268084394b033a5de6dba694d14423a1da3040 [formerly 5723fb0031]]
Former-commit-id: eaae8c3b883f287ffd1c17522c62444fa45a6410 [formerly bf1f88482b]
Former-commit-id: c25ebcdc41
This commit is contained in:
Steve Harris 2012-10-15 16:30:31 -05:00
parent 75d99f4e93
commit e42c12350b
36 changed files with 842 additions and 477 deletions

View file

@ -71,6 +71,8 @@ import com.vividsolutions.jts.geom.Coordinate;
* Feb 04, 2011 7953 bkowal Fill values will now be placed
* in the data array for anything
* below 300MB for RUC80.
* Oct 2, 2012 DR 15259 M.Porricelli Allow plotting when 3 levels
* available (DGEX)
*
* </pre>
*
@ -145,7 +147,7 @@ public class GribCSAdapter extends AbstractCrossSectionAdapter<GribRecord> {
}
}
if (xMap.size() < 4) {
if (xMap.size() < 3) {
return null;
}
@ -159,7 +161,7 @@ public class GribCSAdapter extends AbstractCrossSectionAdapter<GribRecord> {
xMap.keySet().retainAll(yMap.keySet());
yMap.keySet().retainAll(xMap.keySet());
if (xMap.size() < 4) {
if (xMap.size() < 3) {
return null;
}

View file

@ -48,7 +48,7 @@ import com.raytheon.viz.core.gl.SharedCoordMap.SharedCoordinateKey;
*
* SOFTWARE HISTORY Date Ticket# Engineer Description ------------ ----------
* ----------- -------------------------- Jun 10, 2010 mschenke Initial creation
*
* OCT 09, 2012 15018 kshresth
* </pre>
*
* @author mschenke
@ -408,6 +408,12 @@ public class RadarRadialMesh extends AbstractGLMesh {
if (jStart == null) {
jStart = 0;
}
//check if numBins and numRadials equals to zero, then angleData does not exist
if (numBins == 0 && numRadials == 0 ) {
return null;
}
float[] angleData = radarData.getAngleData();
CacheKey key = new CacheKey(latitude, longitude, numBins, numRadials,
gateResolution, trueElevationAngle, jStart, angleData,

View file

@ -10,6 +10,18 @@ import java.util.List;
import com.raytheon.uf.viz.xy.graph.IGraph;
import com.raytheon.viz.core.graphing.xy.XYData;
/**
* SOFTWARE HISTORY
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* ?? ?? Initial creation
* Oct 2, 2012 DR 15259 M.Porricelli Interpolate below 850MB
*
* </pre>
*
* @author mschenke
* @version 1.0
*/
public class InterpUtils {
/**
@ -55,6 +67,10 @@ public class InterpUtils {
double maxYAxisVal = ((Number) dataList.get(0).getY()).doubleValue();
double minYAxisVal = ((Number) dataList.get(dataList.size() - 1).getY())
.doubleValue();
// Allow interpolation below 850 when this is lowest level
if (maxYAxisVal == 850.0){
maxYAxisVal = 1000.0;
}
if (maxYAxisVal < minYAxisVal) {
double tmp = maxYAxisVal;

View file

@ -96,7 +96,12 @@ import com.raytheon.viz.ui.dialogs.CaveSWTDialog;
* The actionListeners for certain controls
* have been updated so that they will set it
* to true when an update is actually required.
*
*
* 03 OCT 2012 #15395 Added code to handle TimeStep when default is set
* to be "30 minutes Instantaneous" in the database.
* 09 OCT 2012 #15396 Fixed Instantaneous precip index so legend and map display
* will change each time duration is incremented or decremented
* for the "30 minutes Instantaneous" rainfall map .
* </pre>
*
* @author lvenable
@ -599,6 +604,20 @@ public class PointDataControlDlg extends CaveSWTDialog {
timeTF.setText(dateTimeFmt.format(cal.getTime()));
populatePresetData(null);
/* this is when in the database, the timeStep is set to be the
default one */
if (timeStepRdo.getSelection() == true) {
handleQueryModeSelection(PDCConstants.QueryMode.TIME_STEP_MODE);
previousQueryMode = PDCConstants.QueryMode.TIME_STEP_MODE;
shell.setCursor(waitCursor);
updateData = true;
drawMap();
shell.setCursor(arrowCursor);
}
}
/**
@ -853,11 +872,18 @@ public class PointDataControlDlg extends CaveSWTDialog {
upPrecipBtn.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent event) {
PDCOptionData pcOptions = PDCOptionData.getInstance();
if (precipIndex >= HydroConstants.InstPrecipSelection.values().length - 1) {
precipIndex = 0;
} else {
precipIndex++;
if (precipIndex == HydroConstants.InstPrecipSelection.
values().length - 1) {
precipIndex=0;
}
}
pcOptions.setInstPrecipAccumTimeSelection(precipIndex);
setInstPrecipAccumText();
shell.setCursor(waitCursor);
updateData = true;
@ -871,11 +897,21 @@ public class PointDataControlDlg extends CaveSWTDialog {
downPrecipBtn.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent event) {
PDCOptionData pcOptions = PDCOptionData.getInstance();
if (precipIndex == 0) {
precipIndex = HydroConstants.InstPrecipSelection.values().length - 1;
precipIndex = HydroConstants.InstPrecipSelection.
values().length - 1;
if (precipIndex == HydroConstants.InstPrecipSelection.
values().length - 1) {
precipIndex=HydroConstants.InstPrecipSelection.
values().length - 2;
}
} else {
precipIndex--;
}
pcOptions.setInstPrecipAccumTimeSelection(precipIndex);
setInstPrecipAccumText();
shell.setCursor(waitCursor);
updateData = true;

View file

@ -43,7 +43,10 @@ import com.raytheon.viz.hydrocommon.data.LocationData;
* Sep 09, 2009 2769 mpduff Added copyTableData method and the calls
* to it for copying data from one table to another.
* Oct 20, 2011 11266 lbousaidi added getHSAsForFilter() method to query from
* location table instead of hsa table.
* location table instead of hsa table.
* Oct 05, 2011 15333 lbousaidi changed the queries that retrieves the HSAs from
* the database.
*
* </pre>
*
* @author askripsky
@ -170,15 +173,14 @@ public class AddModifyLocationDataManager extends HydroDataManager {
public ArrayList<String> getHSAs() throws VizException {
ArrayList<String> rval = new ArrayList<String>();
String query = "Select hsa from hsa order by hsa";
String query= "SELECT DISTINCT upper(hsa) from hsa order by upper (hsa)";
QueryResult data = HydroDBDataManager.getInstance().runMappedQuery(
query);
if (data != null) {
for (QueryResultRow currNet : data.getRows()) {
rval.add((String) currNet.getColumn(data.getColumnNames().get(
"hsa")));
"upper")));
}
}
@ -217,16 +219,16 @@ public class AddModifyLocationDataManager extends HydroDataManager {
*/
public ArrayList<String> getWFOs() throws VizException {
ArrayList<String> rval = new ArrayList<String>();
String query = "Select wfo from wfo order by wfo";
String query= "SELECT DISTINCT upper(wfo) from wfo order by upper (wfo)";
QueryResult data = HydroDBDataManager.getInstance().runMappedQuery(
query);
if (data != null) {
for (QueryResultRow currNet : data.getRows()) {
rval.add((String) currNet.getColumn(data.getColumnNames().get(
"wfo")));
"upper")));
}
}

View file

@ -30,6 +30,7 @@ import com.raytheon.uf.common.time.DataTime;
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Sep 15, 2009 mschenke Initial creation
* OCT 04, 2012 15132 kshresth Restored "MSAS/LDAD QC plots" display data
*
* </pre>
*
@ -38,7 +39,8 @@ import com.raytheon.uf.common.time.DataTime;
*/
public class PlotInfo {
public Integer id;
public String stationId;
public Double latitude;
@ -76,4 +78,16 @@ public class PlotInfo {
this.plotQueued = false;
this.sampleQueued = false;
}
public PlotInfo(String stationId, Double latitude,
Double longitude, DataTime dataTime, String dataURI, Integer id) {
this.stationId = stationId;
this.latitude = latitude;
this.longitude = longitude;
this.dataTime = dataTime;
this.dataURI = dataURI;
this.id = id;
this.plotQueued = false;
this.sampleQueued = false;
}
}

View file

@ -57,6 +57,7 @@ import com.raytheon.viz.pointdata.thread.PlotSampleGeneratorJob;
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Apr 22, 2011 njensen Initial creation
* OCT 04, 2012 15132 kshresth Restored "MSAS/LDAD QC plots" display data
*
* </pre>
*
@ -179,7 +180,7 @@ public class PlotModelDataRequestJob extends Job {
private void requestData(List<PlotInfo[]> stationQuery,
List<PlotModelElement> pme) {
Map<String, PlotInfo> plotMap = new HashMap<String, PlotInfo>();
Map<Integer, PlotInfo> plotMap = new HashMap<Integer, PlotInfo>();
List<String> params = new ArrayList<String>();
for (PlotModelElement p : pme) {
@ -204,8 +205,8 @@ public class PlotModelDataRequestJob extends Job {
List<String> str = new ArrayList<String>(stationQuery.size());
for (PlotInfo[] infos : stationQuery) {
for (PlotInfo info : infos) {
str.add(info.dataURI);
plotMap.put(info.dataURI, info);
str.add(Integer.toString(info.id));
plotMap.put(info.id, info);
}
}
@ -219,7 +220,7 @@ public class PlotModelDataRequestJob extends Job {
index++;
j++;
}
map.put("dataURI", rc);
map.put("id", rc);
try {
// Try and get data from datacube
long t0 = System.currentTimeMillis();
@ -243,8 +244,8 @@ public class PlotModelDataRequestJob extends Job {
for (int uriCounter = 0; uriCounter < pdc.getAllocatedSz(); uriCounter++) {
PointDataView pdv = pdc.readRandom(uriCounter);
if (pdv != null) {
String dataURI = pdv.getString("dataURI");
PlotInfo info = plotMap.get(dataURI);
int id = pdv.getInt("id");
PlotInfo info = plotMap.get(id);
// If the id doesn't match, try to match by
// location
if (info == null) {
@ -257,6 +258,8 @@ public class PlotModelDataRequestJob extends Job {
- pdv.getFloat("longitude"));
if (diffLat < 0.01 && diffLon < 0.01) {
info = pi;
pdv.setInt("id", pi.id);
id = pi.id;
}
}
}

View file

@ -48,7 +48,7 @@ import com.raytheon.viz.pointdata.PlotInfo;
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Oct 9, 2009 bsteffen Initial creation
*
* OCT 4, 2012 15132 kshresth Restored "MSAS/LDAD QC plots" display data
* </pre>
*
* @author bsteffen
@ -76,6 +76,7 @@ public class PointDataPlotInfoRetriever extends AbstractDbPlotInfoRetriever {
} else {
dq.addColumn("dataTime");
}
dq.addColumn("id");
}
@Override
@ -103,7 +104,7 @@ public class PointDataPlotInfoRetriever extends AbstractDbPlotInfoRetriever {
statusHandler.handle(Priority.CRITICAL, message, new Exception(
message));
}
stationInfo.id = (Integer) data[5];
return stationInfo;
}

View file

@ -288,6 +288,8 @@ import com.raytheon.viz.ui.dialogs.SWTMessageBox;
* 10Sep2012 15103 M.Gamazaychikov DR15103 -do not clear AFOS command from the text box
* when obs are updated and refactored executeCommand
* 10SEP2012 15401 D.Friedman Fix QC problem caused by DR 15340.
* 27SEP2012 15424 S.Naples Set focus on AFOS command text field after executing retrieval of product.
* 09Oct2012 14889 M.Gamazaychikov Add call to checkAndWrapPreviousLine
* </pre>
*
* @author lvenable
@ -337,6 +339,12 @@ public class TextEditorDialog extends CaveSWTDialog implements VerifyListener,
* When auto wrapping the last line that needs to be wrapped.
*/
private int endWrapLine = -1;
/**
* Last line was wrapped backwards
*/
private boolean isPreviousLineWrapped = false;
private static final String PARAGRAPH_DELIMITERS = "*$.-/^#";
@ -926,7 +934,7 @@ public class TextEditorDialog extends CaveSWTDialog implements VerifyListener,
/**
* Redraw flag indicating if the window should redraw on a resize.
*/
private boolean canRedraw = true;
private final boolean canRedraw = true;
/**
* Flag indicating if the editor is in edit mode.
@ -956,7 +964,7 @@ public class TextEditorDialog extends CaveSWTDialog implements VerifyListener,
/**
* flag to indicate it a product request is from the GUI or an updated ob.
*/
private AtomicInteger updateCount = new AtomicInteger(0);
private final AtomicInteger updateCount = new AtomicInteger(0);
private NotifyExpiration notify;
@ -2858,11 +2866,13 @@ public class TextEditorDialog extends CaveSWTDialog implements VerifyListener,
});
afosCmdTF.addSelectionListener(new SelectionListener() {
public void widgetSelected(SelectionEvent event) {
@Override
public void widgetSelected(SelectionEvent event) {
}
public void widgetDefaultSelected(SelectionEvent event) {
@Override
public void widgetDefaultSelected(SelectionEvent event) {
String tmp = afosCmdTF.getText();
tmp = tmp.trim();
afosCmdTF.setText(tmp);
@ -2883,8 +2893,8 @@ public class TextEditorDialog extends CaveSWTDialog implements VerifyListener,
executeCommand(CommandFactory.getAfosCommand(afosCmdTF
.getText()));
// Highlight the text contained in the Afos Command Field.
afosCmdTF.selectAll();
// Place cursor back in the Afos Command Field.
afosCmdTF.setFocus();
}
});
@ -2936,7 +2946,8 @@ public class TextEditorDialog extends CaveSWTDialog implements VerifyListener,
});
wmoTtaaiiTF.addModifyListener(new ModifyListener() {
public void modifyText(ModifyEvent event) {
@Override
public void modifyText(ModifyEvent event) {
if (wmoTtaaiiTF.getCaretPosition() == wmoTtaaiiTF
.getTextLimit()) {
ccccTF.setFocus();
@ -2945,11 +2956,13 @@ public class TextEditorDialog extends CaveSWTDialog implements VerifyListener,
});
wmoTtaaiiTF.addSelectionListener(new SelectionListener() {
public void widgetSelected(SelectionEvent event) {
@Override
public void widgetSelected(SelectionEvent event) {
}
public void widgetDefaultSelected(SelectionEvent event) {
@Override
public void widgetDefaultSelected(SelectionEvent event) {
wmoTtaaiiTF.setText(wmoTtaaiiTF.getText().toUpperCase());
ccccTF.setText(ccccTF.getText().toUpperCase());
wmoSearch();
@ -3003,11 +3016,13 @@ public class TextEditorDialog extends CaveSWTDialog implements VerifyListener,
});
ccccTF.addSelectionListener(new SelectionListener() {
public void widgetSelected(SelectionEvent event) {
@Override
public void widgetSelected(SelectionEvent event) {
}
public void widgetDefaultSelected(SelectionEvent event) {
@Override
public void widgetDefaultSelected(SelectionEvent event) {
wmoTtaaiiTF.setText(wmoTtaaiiTF.getText().toUpperCase());
ccccTF.setText(ccccTF.getText().toUpperCase());
wmoSearch();
@ -3064,11 +3079,13 @@ public class TextEditorDialog extends CaveSWTDialog implements VerifyListener,
});
awipsIdTF.addSelectionListener(new SelectionListener() {
public void widgetSelected(SelectionEvent event) {
@Override
public void widgetSelected(SelectionEvent event) {
}
public void widgetDefaultSelected(SelectionEvent event) {
@Override
public void widgetDefaultSelected(SelectionEvent event) {
awipsIdTF.setText(awipsIdTF.getText().trim().toUpperCase());
int charCount = awipsIdTF.getCharCount();
if (charCount < 4 || charCount > 6) {
@ -3462,7 +3479,8 @@ public class TextEditorDialog extends CaveSWTDialog implements VerifyListener,
// });
textEditor.addVerifyKeyListener(new VerifyKeyListener() {
public void verifyKey(VerifyEvent event) {
@Override
public void verifyKey(VerifyEvent event) {
if (event.keyCode == SWT.DEL || event.character == SWT.BS
|| event.keyCode == SWT.SHIFT) {
// Do nothing...
@ -5233,7 +5251,8 @@ public class TextEditorDialog extends CaveSWTDialog implements VerifyListener,
return TextDisplayModel.getInstance().getStdTextProduct(token);
}
public void setAfosCmdField(String cmd) {
@Override
public void setAfosCmdField(String cmd) {
afosCmdTF.setText(cmd);
TextDisplayModel.getInstance().setAfosCommand(token, cmd);
}
@ -5246,7 +5265,8 @@ public class TextEditorDialog extends CaveSWTDialog implements VerifyListener,
return addressee;
}
public void executeCommand(ICommand command) {
@Override
public void executeCommand(ICommand command) {
executeCommand(command, false);
}
@ -6334,7 +6354,8 @@ public class TextEditorDialog extends CaveSWTDialog implements VerifyListener,
@Override
public void run() {
getDisplay().syncExec(new Runnable() {
public void run() {
@Override
public void run() {
if (!shell.isDisposed()) {
if (autoSave == AutoSaveTask.this) {
saveEditedProduct(true, false);
@ -6740,6 +6761,10 @@ public class TextEditorDialog extends CaveSWTDialog implements VerifyListener,
// performingWrap = true;
int lineNumber = textEditor.getLineAtOffset(start);
endWrapLine = textEditor.getLineAtOffset(end);
/*
* DR154889 - resetting isPreviousLineWrapped
*/
isPreviousLineWrapped = false;
rewrapInternal(lineNumber);
// The rest of this method is adjusting the view of the display.
@ -6973,6 +6998,11 @@ public class TextEditorDialog extends CaveSWTDialog implements VerifyListener,
wrapAtPositionOrLock(lineStartOffset + charWrapCol, padding);
}
}
/*
* DR14889 - add call to checkAndWrapPreviousLine
*/
checkAndWrapPreviousLine(lineNumber);
checkAndWrapNextLine(lineNumber);
}
@ -7067,11 +7097,54 @@ public class TextEditorDialog extends CaveSWTDialog implements VerifyListener,
// split at the column, no whitespace
wrapAtPositionOrLock(lineStartOffset + charWrapCol, padding);
}
/*
* DR14889 - add call to checkAndWrapPreviousLine
*/
checkAndWrapPreviousLine(lineNumber);
checkAndWrapNextLine(lineNumber);
}
/**
/** checks if the previous line is part of the same paragraph and continues
* wrapping if it is
* @param line
*/
private void checkAndWrapPreviousLine(int line) {
// if there is a previous line
if ( isPreviousLineWrapped ){
return;
}
if (line - 1 > 0) {
// if the previous line does not start a new paragraph
if (!isParagraphStart(line - 1)) {
// if the previous line is not empty ( marks the end of a paragraph
// )
if (!textEditor.getLine(line - 1).trim().isEmpty()) {
// rewrap the previous line
isPreviousLineWrapped = true;
rewrapInternal(line - 1);
} else if (line - 1 < endWrapLine) {
// See if another paragraph needs to be wrapped.
int nextLine = line - 1;
while (nextLine <= endWrapLine
&& textEditor.getLine(nextLine).trim().isEmpty()) {
--nextLine;
}
if (nextLine <= endWrapLine) {
isPreviousLineWrapped = true;
rewrapInternal(nextLine);
}
}
} else if (line - 1 <= endWrapLine) {
isPreviousLineWrapped = true;
rewrapInternal(line - 1);
}
}
}
/**
* checks if the paragraph starting at the line passed in uses two space
* padding for subsequent lines
*

View file

@ -24,6 +24,16 @@ import com.raytheon.uf.viz.core.maps.rsc.DbMapQueryFactory;
import com.raytheon.viz.warngen.gis.ClosestPoint;
import com.vividsolutions.jts.geom.Geometry;
/**
*
* SOFTWARE HISTORY
*
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* pre-history
* Sep 25, 2012 #15425 Qinglu Lin Added getGid().
*
*/
abstract public class AbstractDbSourceDataAdaptor {
protected Set<String> sortFields = new HashSet<String>(
@ -160,4 +170,20 @@ abstract public class AbstractDbSourceDataAdaptor {
return warngenlev;
}
protected int getGid(Set<String> ptFields,
Map<String, Object> attributes) {
int gid = 0;
if (ptFields.contains("gid")) {
try {
gid = Integer.valueOf(String.valueOf(attributes
.get("gid")));
} catch (Exception e) {
// Ignore
}
}
return gid;
}
}

View file

@ -21,7 +21,12 @@ import com.vividsolutions.jts.geom.Geometry;
/**
*
* SOFTWARE HISTORY
*
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* @author jsanchez
* Sep 25, 2012 #15425 Qinglu Lin Updated createClosestPoint().
*
*/
public class DbAreaSourceDataAdaptor extends AbstractDbSourceDataAdaptor {
@ -73,8 +78,9 @@ public class DbAreaSourceDataAdaptor extends AbstractDbSourceDataAdaptor {
int warngenlev = getWangenlev(ptFields, attributes);
List<String> partOfArea = getPartOfArea(ptFields, attributes,
ptRslt.geometry);
int gid = getGid(ptFields, attributes);
return new ClosestPoint(name, point, population, warngenlev, partOfArea);
return new ClosestPoint(name, point, population, warngenlev, partOfArea, gid);
}
/**

View file

@ -47,6 +47,7 @@ import com.vividsolutions.jts.geom.Coordinate;
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Oct 26, 2011 bgonzale Initial creation
* Sep 25, 2012 #15425 Qinglu Lin Updated createClosestPoint().
*
* </pre>
*
@ -85,8 +86,9 @@ public class DbPointSourceDataAdaptor extends AbstractDbSourceDataAdaptor {
Coordinate point = ptRslt.geometry.getCoordinate();
int population = getPopulation(ptFields, attributes);
int warngenlev = getWangenlev(ptFields, attributes);
int gid = getGid(ptFields, attributes);
return new ClosestPoint(name, point, population, warngenlev, null);
return new ClosestPoint(name, point, population, warngenlev, null, gid);
}
@Override

View file

@ -37,6 +37,7 @@ import com.vividsolutions.jts.geom.Coordinate;
* Dec 11, 2007 #601 chammack Initial Creation.
* APr 18, 2012 #14733 Qinglu Lin David's fix is used, which adds
* a copy constructor.
* Sep 25, 2012 #15425 Qinglu Lin Updated two ClosestPoint() and added getGid().
*
* </pre>
*
@ -72,6 +73,8 @@ public class ClosestPoint implements Comparable<ClosestPoint> {
protected List<String> partOfArea;
protected int gid;
public ClosestPoint() {
}
@ -91,19 +94,21 @@ public class ClosestPoint implements Comparable<ClosestPoint> {
this.warngenlev = o.warngenlev;
this.time = o.time;
this.partOfArea = o.partOfArea;
this.gid = o.gid;
}
public ClosestPoint(String name, Coordinate point) {
this(name, point, 0, 0, null);
this(name, point, 0, 0, null, 0);
}
public ClosestPoint(String name, Coordinate point, int population,
int warngenlev, List<String> partOfArea) {
int warngenlev, List<String> partOfArea, int gid) {
this.name = name;
this.point = point;
this.population = population;
this.warngenlev = warngenlev;
this.partOfArea = partOfArea;
this.gid = gid;
}
/**
@ -173,6 +178,10 @@ public class ClosestPoint implements Comparable<ClosestPoint> {
return partOfArea;
}
public int getGid() {
return gid;
}
/**
* Adjusts the angle from -360/360 to be between -180/180
*

View file

@ -37,6 +37,7 @@ import org.apache.commons.lang.ArrayUtils;
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Mar 3, 2011 jsanchez Initial creation
* Sep 25, 2012 15425 Qinglu Lin Implemented sorting on 'gid' in ascending order.
*
* </pre>
*
@ -47,7 +48,7 @@ import org.apache.commons.lang.ArrayUtils;
public class ClosestPointComparator implements Comparator<ClosestPoint> {
private enum Sort {
NAME, POPULATION, DISTANCE, LEVEL, LAT, LON, AREA, PARENT_AREA
NAME, POPULATION, DISTANCE, LEVEL, LAT, LON, AREA, PARENT_AREA, GID
}
private ArrayList<Sort> list;
@ -79,7 +80,9 @@ public class ClosestPointComparator implements Comparator<ClosestPoint> {
} else if (field.equalsIgnoreCase("area")) {
list.add(Sort.AREA);
} else if (field.equalsIgnoreCase("parentArea")) {
list.add(Sort.PARENT_AREA);
list.add(Sort.PARENT_AREA);
} else if (field.equalsIgnoreCase("gid")) {
list.add(Sort.GID);
}
}
}
@ -147,6 +150,10 @@ public class ClosestPointComparator implements Comparator<ClosestPoint> {
value = new Integer(cp1.roundedDistance)
.compareTo(cp2.roundedDistance);
break;
case GID:
value = new Integer(cp1.gid)
.compareTo(cp2.gid);
break;
}
if (value == 0 && counter + 1 < list.size()) {

View file

@ -97,6 +97,8 @@ import com.vividsolutions.jts.geom.Point;
* that loops over availablePoints.
* May 21, 2012 DR14480 Qinglu Lin Added code to prevent duplicate cities
* in pathcast.
* Oct 05, 2012 DR15429 Qinglu Lin Updated code to keep duplicate names of cities
* which are at different locations in pathcast.
*
* </pre>
*
@ -530,7 +532,7 @@ public class Wx {
// with first pathcast and goes through each point within maxCount,
// check for same point in other pathcast objects. If same point
// exists, remove from which ever pathcast is furthest away
Set<String> closestPtNames = new HashSet<String>(30);
Set<Coordinate> closestPtCoords = new HashSet<Coordinate>(30);
List<ClosestPoint> tmpPoints = new ArrayList<ClosestPoint>(maxCount);
Queue<PathCast> tmp = new ArrayDeque<PathCast>(pathcasts);
while (tmp.isEmpty() == false) {
@ -562,12 +564,12 @@ public class Wx {
tmpPoints.clear();
for (int i = 0; i < points.size() && i < maxCount; ++i) {
ClosestPoint point = points.get(i);
String name = point.getName();
if (!closestPtNames.contains(name)) {
Coordinate coord = point.getPoint();
if (!closestPtCoords.contains(coord)) {
// To prevent duplicate cities in pathcast,
// only unused point is added to tmpPoints
tmpPoints.add(point);
closestPtNames.add(name);
closestPtCoords.add(coord);
}
}
if (tmpPoints.size() > 0) {

View file

@ -121,6 +121,8 @@ import com.vividsolutions.jts.geom.Polygon;
* Jul 26, 2012 #15227 Qinglu Lin Added removeDuplicateVertices(), removeOverlaidSegments(),
* adjustLatLon(), etc.
* Sep 05, 2012 DR 15261 D. Friedman Prevent additional counties from being selected for EXPs
* Oct 03, 2012 DR 15426 Qinglu Lin Unlock WarnGen GUI for COR, implemented in corSelected();
* but lock immediate cause, implemented in individual template.
*
* </pre>
*
@ -1852,7 +1854,6 @@ public class WarngenDialog extends CaveSWTDialog implements
allowsNewProduct = true;
}
}
bulletList.setEnabled(false);
// Special case - allows for Correction of Followups
if (!allowsNewProduct) {
newWarn = conSelected(data);

View file

@ -13,6 +13,7 @@ alarmwhfs.cron=0+7,17,27,37,47,57+*+*+*+?
arealqpegen.cron=0+10,25,40,55+*+*+*+?
subscription.cron=0+*+*+*+*+?
dqcpreprocessor.cron=0+20+0,6,12,18+*+*+?
freezingLevel.cron=0+5+2,8,14,20+*+*+?
rpggenvdata.envdata.cron=0+0+*+*+*+?
rpggenvdata.biastable.cron=0+26,46+*+*+*+?
metartohmdb.cron=0+14+*+*+*+?

View file

@ -5,6 +5,7 @@
Evan Bookbinder 2-24-2012
Phil Kurimski 2-28-2012
Qinglu Lin 04-04-2012 DR 14691. Added <feAreaField> tag.
Qinglu Lin 10-03-2012 DR 15426. Changed ic to pc in <lockedGroupsOnFollowup> tag.
-->
<warngenConfig>
@ -58,7 +59,7 @@
<duration>720</duration>
</durations>
<lockedGroupsOnFollowup>ic</lockedGroupsOnFollowup>
<lockedGroupsOnFollowup>pc</lockedGroupsOnFollowup>
<bulletActionGroups>
<bulletActionGroup action="NEW" phen="FA" sig="W">
<bullets>

View file

@ -5,7 +5,9 @@
Modified by Phil Kurimski 09-23-2011 for burn scars and mud slides
Modified by Mike Dangelo 01-25-2012 at CRH TIM
Modified by Mike Dangelo 02-23-2012
Qinglu Lin 04-04-2012 DR 14691. Added <feAreaField> tag.
Qinglu Lin 04-04-2012 DR 14691. Added <feAreaField> tag.
Qinglu Lin 10-03-2012 DR 15426. Added <lockedGroupsOnFollowup> tag,
inserted bulletGroup="ic" after bulletText="Also snow melt".
-->
<warngenConfig>
@ -63,6 +65,7 @@
<duration>480</duration>
</durations>
<lockedGroupsOnFollowup>ic</lockedGroupsOnFollowup>
<bulletActionGroups>
<bulletActionGroup action="NEW" phen="FF" sig="W">
<bullets>
@ -115,7 +118,7 @@
<bulletActionGroup action="EXT" phen="FF" sig="W">
<bullets>
<bullet bulletName="ffwEmergency" bulletText="**SELECT FOR FLASH FLOOD EMERGENCY**" parseString="FLASH FLOOD EMERGENCY"/>
<bullet bulletName="icrs" bulletText="Also snow melt" parseString=".RS." showString=".RS."/>
<bullet bulletName="icrs" bulletText="Also snow melt" bulletGroup="ic" parseString=".RS." showString=".RS."/>
<bullet bulletText="*********** SOURCE (CHOOSE 1) **********" bulletType="title"/>
<bullet bulletName="doppler" bulletText="Doppler radar indicated" bulletGroup="source" bulletDefault="true" parseString="DOPPLER RADAR INDICATED"/>
<bullet bulletName="dopplerGauge" bulletText="Doppler radar and automated gauges" bulletGroup="source" parseString="DOPPLER RADAR AND AUTOMATED RAIN GAUGES"/>
@ -163,7 +166,7 @@
<bulletActionGroup action="COR" phen="FF" sig="W">
<bullets>
<bullet bulletName="ffwEmergency" bulletText="**SELECT FOR FLASH FLOOD EMERGENCY**" parseString="FLASH FLOOD EMERGENCY"/>
<bullet bulletName="icrs" bulletText="Also snow melt" parseString=".RS." showString=".RS."/>
<bullet bulletName="icrs" bulletText="Also snow melt" bulletGroup="ic" parseString=".RS." showString=".RS."/>
<bullet bulletText="*********** SOURCE (CHOOSE 1) **********" bulletType="title"/>
<bullet bulletName="doppler" bulletText="Doppler radar indicated" bulletGroup="source" bulletDefault="true" parseString="DOPPLER RADAR INDICATED"/>
<bullet bulletName="dopplerGauge" bulletText="Doppler radar and automated gauges" bulletGroup="source" parseString="DOPPLER RADAR AND AUTOMATED RAIN GAUGES"/>

View file

@ -5,7 +5,9 @@
Modified Phil Kurimski 09-23-2011 OB 11.0.8-8
Modified Phil Kurimski 01-26-2012 OB 12.1.1-1
Modified Qinglu Lin 04-04-2012 DR 14691. Added <feAreaField> tag.
Modified Phil Kurimski 04-27-2012 -->
Modified Phil Kurimski 04-27-2012
Modified Qinglu Lin 10-03-2012 DR 15426. Added damic to <lockedGroupsOnFollowup> tag.
-->
<!-- Config distance/speed units -->
<unitDistance>mi</unitDistance>
@ -65,7 +67,7 @@
<!-- Customized several sections in bullet section including:
Added Flash Flood Emergency Headline
Changed the CTA Bullet names for easier parsing in the vm file -->
<lockedGroupsOnFollowup>dam,ic</lockedGroupsOnFollowup>
<lockedGroupsOnFollowup>dam,ic,damic</lockedGroupsOnFollowup>
<bulletActionGroups>
<bulletActionGroup action="NEW" phen="FF" sig="W">
<bullets>

View file

@ -4,7 +4,9 @@
Modified Phil Kurimski 09-23-2011 OB 11.0.8-8
Modified Phil Kurimski 01-26-2012 OB 12.1.1-1
Modified Qinglu Lin 04-04-2012 DR 14691. Added <feAreaField> tag.
Modified Phil Kurimski 04-27-2012 -->
Modified Phil Kurimski 04-27-2012
Modified Qinglu Lin 10-04-2012 DR 15426. Added damic to <lockedGroupsOnFollowup> tag.
-->
<warngenConfig>
@ -57,7 +59,7 @@
Added Flash Flood Emergency Headline
Changed the CTA Bullet names for easier parsing in the vm file
Added the Primary Cause to CAN and EXP sections for correct headlines -->
<lockedGroupsOnFollowup>dam,ic</lockedGroupsOnFollowup>
<lockedGroupsOnFollowup>dam,ic,damic</lockedGroupsOnFollowup>
<bulletActionGroups>
<bulletActionGroup>
<bullets>

View file

@ -6,6 +6,7 @@
Modified Phil Kurimski 02-29-2012 OB 12.2.1-3
Modified Qinglu Lin 04-04-2012 DR 14691. Added <feAreaField> tag.
Modified Phil Kurimski 04-27-2012
Modified Qinglu Lin 10-04-2012 DR 15426. Added damic to <lockedGroupsOnFollowup> tag.
-->
<warngenConfig>
@ -59,7 +60,7 @@
Added Flash Flood Emergency Headline
Changed the CTA Bullet names for easier parsing in the vm file
Added the Primary Cause to CAN and EXP sections for correct headlines -->
<lockedGroupsOnFollowup>dam,ic</lockedGroupsOnFollowup>
<lockedGroupsOnFollowup>dam,ic,damic</lockedGroupsOnFollowup>
<bulletActionGroups>
<bulletActionGroup>
<bullets>

View file

@ -6,7 +6,9 @@
Modified Phil Kurimski 01-26-2012 OB 12.1.1-1
Modified Phil Kurimski 02-29-2012 OB 12.2.1-3
Modified Qinglu Lin 04-04-2012 DR 14691. Added <feAreaField> tag.
Modified Phil Kurimski 04-27-2012 -->
Modified Phil Kurimski 04-27-2012
Modified Qinglu Lin 10-04-2012 DR 15426. Added damic to <lockedGroupsOnFollowup> tag.
-->
<!-- Config distance/speed units -->
<unitDistance>mi</unitDistance>
@ -66,7 +68,7 @@
<!-- Customized several sections in bullet section including:
Added Flash Flood Emergency Headline
Changed the CTA Bullet names for easier parsing in the vm file -->
<lockedGroupsOnFollowup>dam,ic</lockedGroupsOnFollowup>
<lockedGroupsOnFollowup>dam,ic,damic</lockedGroupsOnFollowup>
<bulletActionGroups>
<bulletActionGroup action="NEW" phen="FF" sig="W">
<bullets>

View file

@ -24,7 +24,7 @@
# 02/16/12 14439 jdynina modified haines thresholds
# 02/16/12 13917 jdynina merged in changes from TRAC ticket 11391
# 07/25/12 #957 dgilling implement edit areas as args to calc methods.
#
# 10/5/12 15158 ryu add Forecaster.getDb()
#
##
import string, sys, re, time, types, getopt, fnmatch, LogStream, DatabaseID, JUtil, AbsTime, TimeRange
@ -581,12 +581,17 @@ class Forecaster(GridUtilities):
+ ' #Grids=' + `numGrids`
self._client.sendUserMessage(msg, "SMARTINIT")
#--------------------------------------------------------------------------
# Returns the IFPDB object for the given db
#--------------------------------------------------------------------------
def getDb(self, dbString):
from com.raytheon.edex.plugin.gfe.smartinit import IFPDB
return IFPDB(dbString)
#--------------------------------------------------------------------------
# Returns the source and destination databases, given the srcName.
#--------------------------------------------------------------------------
def _getLatest(self, client, srcNames, fcstName=None):
from com.raytheon.edex.plugin.gfe.smartinit import IFPDB
# ryu: Added/modified code to allow multiple sources. The srcdb is
# now an MDB. This is needed for (AK)NAM40 init, which sources
# from both NAM40 and NAM20.
@ -600,7 +605,7 @@ class Forecaster(GridUtilities):
for src in srcNames:
# source model at same model time
fullDBName = self.__dbName.replace(modelName, src)
db = IFPDB(fullDBName)
db = self.getDb(fullDBName)
if db.getKeys().size() == 0:
LogStream.logEvent("Source database " + fullDBName + \
" is empty.")
@ -633,7 +638,7 @@ class Forecaster(GridUtilities):
else:
client.createDB(newdb)
newdb = IFPDB(newdb)
newdb = self.getDb(newdb)
return srcdb, newdb

View file

@ -76,7 +76,7 @@ class TextProduct(GenericHazards.TextProduct):
Definition["language"] = "english"
Definition["lineLength"] = 66 #Maximum line length
Definition["purgeTime"] = 24 # Default Expiration in hours if
Definition["purgeTime"] = 8 # Default Expiration in hours if
Definition["includeCities"] = 0 # Cities not included in area header
Definition["cityDescriptor"] = "INCLUDING THE CITIES OF"
Definition["includeZoneNames"] = 1 # Zone names will be included in the area header

View file

@ -86,6 +86,8 @@ import com.raytheon.uf.edex.wmo.message.AFOSProductId;
* 28Jul2010 2187 cjeanbap Fixed class exception in cccnnnxxxReadVersion.
* 05Oct2010 cjeanbap Fixed a bug introduced on #2187; return distinct rows.
* 23May2012 14952 rferrel Added cccnnnxxxByRefTime.
* 03Oct2012 15244 mgamazaychikov Added the fix to query the appropriate table
* (operational or practice)
* </pre>
*
* @author garmendariz
@ -133,7 +135,7 @@ public class StdTextProductDao extends CoreDao {
private static final String TM_QUERY_FMT = "select refTime from table_name where cccid='%s' and nnnid='%s' and xxxid='%s';";
private static final String AFOS_QUERY_STMT = "from StdTextProduct prod where "
private static final String AFOS_QUERY_STMT = "from StdTextProduct where "
+ ProdCCC_ID
+ " = :"
+ CCC_ID
@ -309,7 +311,15 @@ public class StdTextProductDao extends CoreDao {
}
tx = session.beginTransaction();
Query query = session.createQuery(AFOS_QUERY_STMT);
/*
* DR15244 - Make sure that the query is performed on the appropriate
* table based on what StdTextProduct is requested (ultimately on CAVE mode)
*/
Matcher m = Pattern.compile("StdTextProduct").matcher(AFOS_QUERY_STMT);
String tableName = stdTextProduct.getClass().getSimpleName();
String tableQuery = m.replaceAll(tableName);
Query query = session.createQuery(tableQuery);
if (version >= 0) {
query.setMaxResults(version + 1);

View file

@ -19,7 +19,9 @@ package com.raytheon.uf.edex.dat.utils;
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
* further licensing information.
**/
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Date;
import java.util.HashMap;
import java.util.LinkedHashMap;
@ -68,6 +70,9 @@ public class FreezingLevel {
private int eighteenCount = 0;
// reference time
Calendar refTime = null;
public FreezingLevel(String modelName) {
this.modelName = modelName;
@ -76,14 +81,18 @@ public class FreezingLevel {
sixCount = 0;
twelveCount = 0;
eighteenCount = 0;
refTime = Calendar.getInstance();
// only for get data for hour 00z,06z,12z, or 18z
int adjustedHour = (refTime.get(Calendar.HOUR_OF_DAY) / 6) * 6;
refTime.set(Calendar.HOUR_OF_DAY, adjustedHour);
// populates what ever is missing, sets prevalent forecast hour
for (Entry<String, Integer> entry : getGHLevelMap().entrySet()) {
populateRecord(modelName, entry.getKey());
populateRecord(modelName, entry.getKey(), refTime.getTime());
}
for (Entry<String, Integer> entry : getTLevelMap().entrySet()) {
populateRecord(modelName, entry.getKey());
populateRecord(modelName, entry.getKey(), refTime.getTime());
}
}
@ -122,40 +131,41 @@ public class FreezingLevel {
Integer jtopLevel = null;
Integer ktopLevel = null;
System.out
.println("********** Starting Freezing Level Calculations *****************");
for (Integer level : ghValues.keySet()) {
System.out
.println("********** Starting Freezing Level Calculations *****************");
for (Integer level : ghValues.keySet()) {
Double tValue = tValues.get(level);
Double ghValue = ghValues.get(level);
System.out.println("GH Value: "+ghValue+" TValue: "+tValue);
Double tValue = tValues.get(level);
Double ghValue = ghValues.get(level);
System.out.println("GH Value: " + ghValue + " TValue: "
+ tValue);
if (ghValue != null && ghValue > -9000) {
if (tValue != null && tValue > 273.16) {
if (ghValue != null && ghValue > -9000) {
if (tValue != null && tValue > 273.16) {
fLevel = (ghValues.get(ktopLevel) - ((ghValues
.get(ktopLevel) - ghValue) * ((273.16 - tValues
.get(jtopLevel)
/ (tValue - tValues.get(jtopLevel)))))) * .00328;
System.out.println("Formula:");
System.out.println("(" + ghValues.get(ktopLevel)
+ " - ((" + ghValues.get(ktopLevel) + " - "
+ ghValue + ") * ((273.16 - "
+ tValues.get(jtopLevel) + " / (" + tValue
+ " - " + tValues.get(jtopLevel)
+ "))))) * .00328)");
System.out.println("*** FreezingLevel = " + fLevel);
freezingMap.put(coor, fLevel.floatValue());
break;
} else {
jtopLevel = level;
ktopLevel = level;
}
}
}
System.out
.println("********** Finished Freezing Level Calculations *****************");
}
fLevel = (ghValues.get(ktopLevel) - ((ghValues
.get(ktopLevel) - ghValue) * ((273.16 - tValues
.get(jtopLevel)) / (tValue - tValues
.get(jtopLevel))))) * .00328;
System.out.println("Formula:");
System.out.println("(" + ghValues.get(ktopLevel)
+ " - ((" + ghValues.get(ktopLevel) + " - "
+ ghValue + ") * ((273.16 - "
+ tValues.get(jtopLevel) + ") / (" + tValue
+ " - " + tValues.get(jtopLevel)
+ ")))) * .00328");
System.out.println("*** FreezingLevel = " + fLevel);
freezingMap.put(coor, fLevel.floatValue());
break;
} else {
jtopLevel = level;
ktopLevel = level;
}
}
}
System.out
.println("********** Finished Freezing Level Calculations *****************");
}
return freezingMap;
}
@ -235,12 +245,13 @@ public class FreezingLevel {
* @param param
* @return
*/
private GribRecord populateRecord(String model, String param) {
private GribRecord populateRecord(String model, String param, Date refTime) {
int interval = 1440;
SCANModelParameterXML paramXML = new SCANModelParameterXML();
paramXML.setModelName(model);
paramXML.setParameterName(param);
String sql = getSQL(interval, model, param);
String sql = getSQL(interval, model, param, refTime);
GribRecord modelRec = DATUtils.getMostRecentGribRecord(interval, sql,
paramXML);
@ -327,9 +338,12 @@ public class FreezingLevel {
*
* @return
*/
private String getSQL(int interval, String model, String param) {
private String getSQL(int interval, String model, String param, Date refTime) {
String paramName = null;
String level = null;
SimpleDateFormat sdt = new SimpleDateFormat("yyyy-MM-dd HH:00:00");
String refTimeStr = sdt.format(refTime);
if (param.startsWith("GH")) {
paramName = "GH";
level = param.substring(2, param.length());
@ -346,7 +360,9 @@ public class FreezingLevel {
+ "\' and level_id = (select id from level where masterlevel_name = 'MB' and levelonevalue = '"
+ level
+ "\'"
+ " limit 1)) order by forecasttime desc limit 1";
+ " limit 1)) and reftime='"
+ refTimeStr
+ "' order by reftime desc, forecasttime desc limit 1";
return sql;
}
}

View file

@ -6,7 +6,6 @@
<bean id="dqcPreprocService" class="com.raytheon.uf.edex.ohd.pproc.DqcPreProcSrv" />
<bean id="mpeRucFreezingLevel" class="com.raytheon.uf.edex.ohd.pproc.MpeRUCFreezingLevel" />
<camelContext id="dqcPreproc-context"
xmlns="http://camel.apache.org/schema/spring"
@ -29,14 +28,6 @@
<to uri="log:dqcPreprocessor?level=ERROR&amp;showBody=false&amp;showCaughtException=true&amp;showStackTrace=true"/>
</doCatch>
</doTry>
<doTry>
<bean ref="mpeRucFreezingLevel" method="processMpeRuc" />
<doCatch>
<exception>java.lang.Throwable</exception>
<to uri="log:dqcPreprocessor?level=ERROR&amp;showBody=false&amp;showCaughtException=true&amp;showStackTrace=true"/>
</doCatch>
</doTry>
</route>
</camelContext>

View file

@ -0,0 +1,35 @@
<beans xmlns="http://www.springframework.org/schema/beans"
xmlns:amq="http://activemq.apache.org/schema/core" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans-2.0.xsd
http://activemq.apache.org/schema/core http://activemq.apache.org/schema/core/activemq-core.xsd
http://camel.apache.org/schema/spring http://camel.apache.org/schema/spring/camel-spring.xsd">
<bean id="mpeRucFreezingLevel" class="com.raytheon.uf.edex.ohd.pproc.MpeRUCFreezingLevel" />
<camelContext id="frzLvlProc-context"
xmlns="http://camel.apache.org/schema/spring"
errorHandlerRef="errorHandler">
<endpoint id="frzLvlProcCron" uri="clusteredquartz://pproc/frzLvlProcScheduled/?cron=${freezingLevel.cron}"/>
<!-- Run frzLvlProc on Scheduled timer -->
<route id="frzLvlProcScheduled">
<from uri="frzLvlProcCron" />
<to uri="jms-generic:queue:frzLvlProcScheduledWork" />
</route>
<route id="frzLvlProcScheduledWork">
<from uri="jms-generic:queue:frzLvlProcScheduledWork" />
<doTry>
<bean ref="mpeRucFreezingLevel" method="processMpeRuc" />
<doCatch>
<exception>java.lang.Throwable</exception>
<to uri="log:frzLvlProcessor?level=ERROR&amp;showBody=false&amp;showCaughtException=true&amp;showStackTrace=true"/>
</doCatch>
</doTry>
</route>
</camelContext>
</beans>

View file

@ -33,6 +33,7 @@ import java.io.InputStreamReader;
import java.io.OutputStreamWriter;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Date;
import java.util.HashMap;
import java.util.LinkedHashMap;
@ -61,9 +62,10 @@ public class MpeRUCFreezingLevel {
*
* <pre>
* SOFTWARE HISTORY
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Nov 19, 2011 dhladky Initial Creation.
* Date Ticket# Engineer Description
* ------------ -------- --------- --------------------------
* Nov 19, 2011 dhladky Initial Creation.
* Oct 09, 2012 15168 wkwock Fix incorrect values.
*
* </pre>
*
@ -80,10 +82,15 @@ public class MpeRUCFreezingLevel {
public static String modelOutputFilePath = AppsDefaults.getInstance()
.getToken("mpe_point_freezing_dir");
public static String dqcPreprocessorBasetime = AppsDefaults.getInstance()
.getToken("DQC_PREPROCESSOR_BASETIME");
public File stationFile = null;
final String RUC2 = "RUC236";
// models used by MPE
public static String[] models = new String[] { "RUC130", "RUC80" };
public static String[] models = new String[] { "RUC236" };
public MpeRUCFreezingLevel() {
File directory = new File(stationFilePath);
@ -97,274 +104,342 @@ public class MpeRUCFreezingLevel {
}
}
}
// correct env vairiable dqcPreprocessorBasetime if needed
if (dqcPreprocessorBasetime == null) {
dqcPreprocessorBasetime = "00z";
}
dqcPreprocessorBasetime = dqcPreprocessorBasetime.toLowerCase();
if (!dqcPreprocessorBasetime.equals("00z")
&& !dqcPreprocessorBasetime.equals("06z")
&& !dqcPreprocessorBasetime.equals("12z")
&& !dqcPreprocessorBasetime.equals("18z")) {
dqcPreprocessorBasetime = "00z";
}
}
/**
* Read freezing station list from file
* $mpe_station_list_dir/$SITENAME_freezing_station_list
*/
private LinkedHashMap<String, Coordinate> readFrzStnLst() {
if (stationFile == null) {
statusHandler.handle(Priority.ERROR,
"File freezing_station_list not found....");
return null;
}
LinkedHashMap<String, Coordinate> freezingStations = new LinkedHashMap<String, Coordinate>();
FileInputStream ifstream = null;
DataInputStream in = null;
BufferedReader br = null;
try {
ifstream = new FileInputStream(stationFile);
in = new DataInputStream(ifstream);
br = new BufferedReader(new InputStreamReader(in));
String line;
Double lat = null;
Double lon = null;
while (br.ready()) {
line = br.readLine();
if (line != null) {
statusHandler.handle(Priority.INFO, line);
String[] aline = line.split("\\s+");
if (aline != null && aline.length > 0) {
try {
String stationId = aline[0].trim();
lat = Double.valueOf(aline[2].trim());
// take times negative 1 to make it true West
lon = Double.valueOf(aline[3].trim()) * (-1);
if (stationId != null && lat != null && lon != null) {
Coordinate coor = new Coordinate(lon, lat, 0.0);
freezingStations.put(stationId, coor);
}
} catch (Exception e) {
statusHandler.handle(
Priority.INFO,
"finished parsing "
+ stationFile.getAbsolutePath()
+ " \n");
}
}
}
}
} catch (Exception e) {
statusHandler.handle(Priority.ERROR, "Unable to read file "
+ stationFile.getAbsolutePath() + " \n");
e.printStackTrace();
return null;
} finally {
if (br != null) {
try {
br.close();
} catch (IOException e) {
e.printStackTrace();
}
}
if (in != null) {
try {
in.close();
} catch (IOException e) {
e.printStackTrace();
}
}
if (ifstream != null) {
try {
ifstream.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
return freezingStations;
}
private void writeResult(
LinkedHashMap<String, Coordinate> freezingStations,
HashMap<String, HashMap<Integer, FreezingLevelXML>> freezingTimeMap) {
// Now write to the result file freezing_1_SITENAME_point_yyyymmdd
// compare first forecast hour list against all others
FileOutputStream ofstream = null;
DataOutputStream out = null;
BufferedWriter bw = null;
Calendar dates[] = getSortedDates(dqcPreprocessorBasetime);// get the
// dates and
// hours in
// order
try {
String site = PropertiesFactory.getInstance().getEnvProperties()
.getEnvValue("SITENAME");
ofstream = new FileOutputStream(getAbsoluteOutFileName(
dates[3].getTime(), site));
out = new DataOutputStream(ofstream);
bw = new BufferedWriter(new OutputStreamWriter(out));
String dhStr = "DH" + dqcPreprocessorBasetime.substring(0, 2);
for (Entry<String, Coordinate> entry : freezingStations.entrySet()) {
Coordinate coor = entry.getValue();
StringBuffer buf = new StringBuffer();
// ".E Z$stn $otdate1 DH18/HZIRZ/DIH+6/ $v0/ $v1/ $v2/ $v3\n"
buf.append(".E " + entry.getKey() + " "
+ getFormattedDate(dates[3].getTime()) + " " + dhStr
+ "/HZIRZ/DIH+6/");
int i = 0;
for (int j = 0; j < dates.length; j++) {
FreezingLevelEntry fle = null;
// Does a preference for the first model defined. RUC130
// has higher resolution so it gets preference
if (fle == null) {
if (freezingTimeMap.containsKey(RUC2)) {
HashMap<Integer, FreezingLevelXML> modelFl = freezingTimeMap
.get(RUC2);
if (modelFl.containsKey(dates[j]
.get(Calendar.HOUR_OF_DAY))) {
FreezingLevelXML flx = modelFl.get(dates[j]
.get(Calendar.HOUR_OF_DAY));
if (flx != null && flx.getDate() != null) {
// same expected year,month,day,and hour
if (Math.floor(flx.getDate().getTime() / 1000 / 60 / 60) == Math
.floor(dates[j].getTimeInMillis() / 1000 / 60 / 60)) {
fle = flx.getEntry(coor);
}
}
}
}
}
String fzlev = "M";
if (fle != null) {
fzlev = String.valueOf(fle.getFreezingLevel())
.substring(0, 4) + "S";
}
buf.append(" " + fzlev);
if (i < 3) {
buf.append("/");
}
i++;
}
statusHandler.handle(Priority.INFO, buf.toString());
bw.write(buf.toString() + "\n");
}
} catch (Exception e) {
statusHandler.handle(Priority.ERROR,
"Processing Level SHEF output failed...");
e.printStackTrace();
} finally {
if (bw != null) {
try {
bw.flush();
bw.close();
} catch (IOException e) {
e.printStackTrace();
}
}
if (out != null) {
try {
out.flush();
out.close();
} catch (IOException e) {
e.printStackTrace();
}
}
if (ofstream != null) {
try {
ofstream.flush();
ofstream.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
}
/**
* Process the MPE freezing Levels
*/
public void processMpeRuc() {
Integer forecastHour = null;
HashMap<String, HashMap<Integer, FreezingLevelXML>> freezingTimeMap = new HashMap<String, HashMap<Integer, FreezingLevelXML>>();
LinkedHashMap<String, Coordinate> freezingStations = new LinkedHashMap<String, Coordinate>();
if (stationFile != null) {
HashMap<String, FreezingLevel> FreezeLevelMap = new HashMap<String, FreezingLevel>();
for (String model : models) {
FreezingLevel fl = new FreezingLevel(model);
if (fl != null) {
FreezeLevelMap.put(model, fl);
}
}
Integer forecastHour = null;
Date outputDate = null;
HashMap<String, HashMap<Integer, FreezingLevelXML>> freezingTimeMap = new HashMap<String, HashMap<Integer, FreezingLevelXML>>();
LinkedHashMap<String, Coordinate> freezingStations = new LinkedHashMap<String, Coordinate>();
FileInputStream ifstream = null;
DataInputStream in = null;
BufferedReader br = null;
try {
ifstream = new FileInputStream(stationFile);
in = new DataInputStream(ifstream);
br = new BufferedReader(new InputStreamReader(in));
String line;
Double lat = null;
Double lon = null;
while (br.ready()) {
line = br.readLine();
if (line != null) {
statusHandler.handle(Priority.INFO, line);
String[] aline = line.split("\\s+");
if (aline != null && aline.length > 0) {
try {
String stationId = aline[0].trim();
lat = Double.valueOf(aline[2].trim());
// take times negative 1 to make it true West
lon = Double.valueOf(aline[3].trim()) * (-1);
if (stationId != null && lat != null
&& lon != null) {
Coordinate coor = new Coordinate(lon, lat,
0.0);
freezingStations.put(stationId, coor);
}
} catch (Exception e) {
statusHandler.handle(
Priority.INFO,
"finished parsing "
+ stationFile.getAbsolutePath()
+ " \n");
}
}
}
}
} catch (Exception e) {
statusHandler.handle(Priority.ERROR, "Unable to read file "
+ stationFile.getAbsolutePath() + " \n");
e.printStackTrace();
} finally {
if (br != null) {
try {
br.close();
} catch (IOException e) {
e.printStackTrace();
}
}
if (in != null) {
try {
in.close();
} catch (IOException e) {
e.printStackTrace();
}
}
if (ifstream != null) {
try {
ifstream.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
try {
HashMap<Coordinate, Float> freezingLevels = null;
for (String model : models) {
if (FreezeLevelMap.containsKey(model)) {
FreezingLevel fl = FreezeLevelMap.get(model);
ArrayList<FreezingLevelEntry> freezes = null;
if (freezingLevels == null) {
freezingLevels = fl
.getFreezingLevel(getCoordinates(freezingStations));
freezes = new ArrayList<FreezingLevelEntry>();
for (Entry<Coordinate, Float> entry : freezingLevels
.entrySet()) {
FreezingLevelEntry fle = new FreezingLevelEntry(
entry.getKey(), entry.getValue());
freezes.add(fle);
}
}
FreezingLevelXML flx = new FreezingLevelXML(freezes);
if (forecastHour == null && outputDate == null) {
forecastHour = getConvertedForecastHour(fl);
outputDate = fl.getReferenceTime(fl
.getForecastHour());
}
flx.setForecastHour(forecastHour);
HashMap<Integer, FreezingLevelXML> modelFl = new HashMap<Integer, FreezingLevelXML>();
modelFl.put(forecastHour, flx);
freezingTimeMap.put(model, modelFl);
writeFreezingLevelTemp(flx, model);
}
}
} catch (Exception e) {
statusHandler.handle(Priority.ERROR,
"Processing Freezing Level failed...");
e.printStackTrace();
}
FileOutputStream ofstream = null;
DataOutputStream out = null;
BufferedWriter bw = null;
try {
// Get other hour files read in
for (String model : models) {
HashMap<Integer, FreezingLevelXML> modelFl = freezingTimeMap
.get(model);
if (modelFl != null) {
for (RUC_TIME time : RUC_TIME.values()) {
int fileTime = Integer.valueOf(time.getTime());
if (!modelFl.containsKey(fileTime)) {
FreezingLevelXML flx1 = readFreezingLevel(
fileTime, model);
if (flx1 != null) {
modelFl.put(flx1.getForecastHour(), flx1);
}
}
}
freezingTimeMap.put(model, modelFl);
}
}
// compare first forecast hour list against all others
String site = PropertiesFactory.getInstance()
.getEnvProperties().getEnvValue("SITENAME");
ArrayList<Integer> orderedHours = getOrderedHours(forecastHour);
ofstream = new FileOutputStream(getAbsoluteOutFileName(
outputDate, site));
out = new DataOutputStream(ofstream);
bw = new BufferedWriter(new OutputStreamWriter(out));
for (Entry<String, Coordinate> entry : freezingStations
.entrySet()) {
Coordinate coor = entry.getValue();
StringBuffer buf = new StringBuffer();
// ".E Z$stn $otdate1 DH18/HZIRZ/DIH+6/ $v0/ $v1/ $v2/ $v3\n"
buf.append(".E " + entry.getKey() + " "
+ getFormattedDate(outputDate)
+ " DH18/HZIRZ/DIH+6/");
int i = 0;
for (Integer hour : orderedHours) {
FreezingLevelEntry fle = null;
// Does a preference for the first model defined. RUC130
// has higher resolution so it gets preference
for (String model : models) {
if (fle == null) {
if (freezingTimeMap.containsKey(model)) {
HashMap<Integer, FreezingLevelXML> modelFl = freezingTimeMap
.get(model);
if (modelFl.containsKey(hour)) {
FreezingLevelXML flx = modelFl
.get(hour);
fle = flx.getEntry(coor);
}
}
}
}
String fzlev = "M";
if (fle != null) {
fzlev = "" + fle.getFreezingLevel();
}
buf.append(" " + fzlev);
if (i < 3) {
buf.append("/");
} else {
buf.append("\n");
}
}
statusHandler.handle(Priority.INFO, buf.toString());
bw.write(buf.toString() + "\n");
}
} catch (Exception e) {
statusHandler.handle(Priority.ERROR,
"Processing Level SHEF output failed...");
e.printStackTrace();
} finally {
if (bw != null) {
try {
bw.flush();
bw.close();
} catch (IOException e) {
e.printStackTrace();
}
}
if (out != null) {
try {
out.flush();
out.close();
} catch (IOException e) {
e.printStackTrace();
}
}
if (ofstream != null) {
try {
ofstream.flush();
ofstream.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
} else {
statusHandler.handle(Priority.ERROR,
"File freezing_station_list not found....");
// get the freezing station list from file
freezingStations = readFrzStnLst();
if (freezingStations == null) {
return;
}
// get data from hdf5 for this run
FreezingLevel fl = new FreezingLevel(RUC2);
// Save data for future use
try {
HashMap<Coordinate, Float> freezingLevels = null;
ArrayList<FreezingLevelEntry> freezes = null;
if (freezingLevels == null) {
freezingLevels = fl
.getFreezingLevel(getCoordinates(freezingStations));
freezes = new ArrayList<FreezingLevelEntry>();
for (Entry<Coordinate, Float> entry : freezingLevels.entrySet()) {
FreezingLevelEntry fle = new FreezingLevelEntry(
entry.getKey(), entry.getValue());
freezes.add(fle);
}
}
FreezingLevelXML flx = new FreezingLevelXML(freezes);
Calendar refTime = Calendar.getInstance();
// only for get data for hour 00z,06z,12z, or 18z
int adjustedHour = (refTime.get(Calendar.HOUR_OF_DAY) / 6) * 6;
refTime.set(Calendar.HOUR_OF_DAY, adjustedHour);
flx.setDate(refTime.getTime());
flx.setForecastHour(adjustedHour);
HashMap<Integer, FreezingLevelXML> modelFl = new HashMap<Integer, FreezingLevelXML>();
modelFl.put(forecastHour, flx);
freezingTimeMap.put(RUC2, modelFl);
writeFreezingLevelTemp(flx, RUC2);
} catch (Exception e) {
statusHandler.handle(Priority.ERROR,
"Processing Freezing Level failed...");
e.printStackTrace();
}
// get all data from previously saved data from
// mpe/dailyQC/freezing_level/point/RUC236[0,6,12,18]zFreezingLevel.bin
// Get other hour files read in
for (String model : models) {
HashMap<Integer, FreezingLevelXML> modelFl = freezingTimeMap
.get(model);
if (modelFl != null) {
for (RUC_TIME time : RUC_TIME.values()) {
int fileTime = Integer.valueOf(time.getTime());
if (!modelFl.containsKey(fileTime)) {
FreezingLevelXML flx1 = readFreezingLevel(fileTime,
model);
if (flx1 != null) {
modelFl.put(flx1.getForecastHour(), flx1);
}
}
}
freezingTimeMap.put(model, modelFl);
}
}
// now write to result file freezing_1_SITENAME_point_yyyymmdd
writeResult(freezingStations, freezingTimeMap);
}
/**
* setup dates
*/
private Calendar[] getSortedDates(String dqcPreprocessorBasetime) {
Calendar dates[] = new Calendar[4];
Calendar cdate = Calendar.getInstance();// start date
Calendar tdate = Calendar.getInstance();// end date
int currentHour = cdate.get(Calendar.HOUR_OF_DAY);
int dqcHour = Integer.parseInt(dqcPreprocessorBasetime.substring(0, 2));
if (currentHour < dqcHour && dqcHour != 0) {
cdate.add(Calendar.DATE, -1);
} else if (dqcHour != 0) {
tdate.add(Calendar.DATE, 1);
}
Calendar startDate = tdate;
if (dqcPreprocessorBasetime.equalsIgnoreCase("00z")) {
startDate = (Calendar) tdate.clone();
} else {
startDate = (Calendar) cdate.clone();
}
startDate.set(Calendar.HOUR_OF_DAY, dqcHour);
for (int i = 0; i < dates.length; i++) {
dates[i] = (Calendar) startDate.clone();
startDate.add(Calendar.HOUR_OF_DAY, 6);
}
return dates;
}
/**
@ -468,41 +543,6 @@ public class MpeRUCFreezingLevel {
}
};
/**
* Gets the ordering for the SHEF output
*
* @param forecastHour
* @return
*/
private ArrayList<Integer> getOrderedHours(int forecastHour) {
ArrayList<Integer> orderedHours = new ArrayList<Integer>();
if (forecastHour == 0) {
orderedHours.add(0);
orderedHours.add(6);
orderedHours.add(12);
orderedHours.add(18);
} else if (forecastHour == 6) {
orderedHours.add(6);
orderedHours.add(12);
orderedHours.add(18);
orderedHours.add(0);
} else if (forecastHour == 12) {
orderedHours.add(12);
orderedHours.add(18);
orderedHours.add(0);
orderedHours.add(6);
} else if (forecastHour == 18) {
orderedHours.add(18);
orderedHours.add(0);
orderedHours.add(6);
orderedHours.add(12);
}
return orderedHours;
}
/**
* Formats the output date
*
@ -534,30 +574,4 @@ public class MpeRUCFreezingLevel {
return coors;
}
/**
* Used only by the MPE RUC130 for SHEF output
*
* @param ft
*/
private int getConvertedForecastHour(FreezingLevel fl) {
int flTime = fl.getForecastHour();
int retVal = 0;
if (flTime >= 0 && flTime < 3) {
retVal = 0;
} else if (flTime >= 3 && flTime < 9) {
retVal = 6;
} else if (flTime >= 9 && flTime < 15) {
retVal = 12;
} else if (flTime >= 15 && flTime < 21) {
retVal = 18;
} else if (flTime >= 21) {
retVal = 0;
}
return retVal;
}
}

View file

@ -24,6 +24,7 @@ XML_TEMPLATE = ""
import sys
import os.path
from os import path, access, R_OK
import shutil
class MainData:
@ -92,6 +93,12 @@ fileName = os.path.split(file)[1]
if fileName == "spotters.dat":
workFile = "/tmp/spotters.dat"
if path.exists(workFile) and path.isfile(workFile) and access(workFile, R_OK):
print "Attempting to cleanup work directory"
os.system("rm /tmp/spotters.dat")
else:
print "No preliminary cleanup needed - continuing"
shutil.copy(file, workFile)
os.system("sed -i -e 's/spotterName/spottersName/g' /tmp/spotters.dat")
os.system("sed -i -e 's/spotterAddr/spottersAddr/g' /tmp/spotters.dat")

View file

@ -8,6 +8,7 @@
# Date Ticket# Engineer Description
# ------------ ---------- ----------- --------------------------
# 04/06/2012 10388 mhuang Initial creation
# 10/09/12 DR 13901 D. Friedman Add logging
##############################################################################
# this allows you to run this script from outside of ./bin
@ -27,5 +28,10 @@ export PYTHONPATH=${RUN_FROM_DIR}/src:$PYTHONPATH
_PYTHON="${PYTHON_INSTALL}/bin/python"
_MODULE="${RUN_FROM_DIR}/src/qpidNotify/mhsAckNotify.py"
log_file=${LOG_DIR:-/data/logs/fxa}/$(date -u +%Y%m%d)/mhsAckNotify
if touch "$log_file"; then
exec >& "$log_file"
fi
# quoting of '$@' is used to prevent command line interpretation
$_PYTHON $_MODULE "$@"

View file

@ -30,6 +30,7 @@
# Date Ticket# Engineer Description
# ------------ ---------- ----------- --------------------------
# 09/19/11 8804 mhuang Initial creation
# 10/09/12 DR 13901 D. Friedman Add logging
##############################################################################
# this allows you to run this script from outside of ./bin
@ -49,5 +50,10 @@ export PYTHONPATH=${RUN_FROM_DIR}/src:$PYTHONPATH
_PYTHON="${PYTHON_INSTALL}/bin/python"
_MODULE="${RUN_FROM_DIR}/src/qpidNotify/qpidNotify.py"
log_file=${LOG_DIR:-/data/logs/fxa}/$(date -u +%Y%m%d)/qpidNotify
if touch "$log_file"; then
exec >& "$log_file"
fi
# quoting of '$@' is used to prevent command line interpretation
$_PYTHON $_MODULE "$@"

View file

@ -25,8 +25,12 @@
# Date Ticket# Engineer Description
# ------------ ---------- ----------- --------------------------
# 10/28/08 1585 MW Fegan Initial Creation.
# 10/09/12 DR 13901 D. Friedman Add doWithinTime
##############################################################################
import sys
import Queue
import threading
import traceback
import types
import InputOutput as IO
@ -104,3 +108,31 @@ def convListToDict(list):
temp.append(val)
retVal.append(temp)
return dict(retVal)
def doWithinTime(target_function, description='complete the operation',
max_tries = 3, max_try_time = 10.0, args=(), kwargs={}):
q = Queue.Queue()
def threadFunc(q, target_function, args, kwargs):
try:
r = (True, target_function(*args, **kwargs))
except:
traceback.print_exc()
r = (False, sys.exc_info()[1])
q.put(r)
exc = None
for i in range(0, max_tries):
t = threading.Thread(target=threadFunc, args=(q, target_function, args, kwargs))
t.daemon = True
t.start()
try:
r, val = q.get(True, max_try_time)
if r:
return val
else:
exc = val
break
except Queue.Empty, e:
continue
reason = exc is None and " within the expected time" or (": " + str(exc))
raise StandardError("Failed to %s%s" % (description, reason))

View file

@ -6,6 +6,7 @@
# Date Ticket# Engineer Description
# ------------ ---------- ----------- --------------------------
# 04/06/2012 10388 D. Friedman Initial version
# 10/09/12 DR 13901 D. Friedman Limit execution time
##############################################################################
import getopt
@ -13,6 +14,8 @@ import os
import os.path
import sys
from lib.Util import doWithinTime
import qpid
from qpid.util import connect
from qpid.connection import Connection
@ -32,6 +35,26 @@ def remove_file(*parts):
if os.path.exists(path):
os.remove(path)
def get_qpid_connection(broker_addr):
try:
socket = connect(broker_addr, 5672)
connection = Connection (sock=socket)
connection.start()
return connection
except:
sys.stderr.write("mhsAckNotify: connect to %s: %s\n" % (broker_addr, sys.exc_info()[1],))
return None
def send_message(connection, notif):
session = connection.session(str(uuid4()))
props = session.delivery_properties(routing_key=TOPIC_NAME)
head = session.message_properties(application_headers={'sender':notif.sender,
'response':notif.response})
session.message_transfer(destination=DESTINATION, message=Message(props, head, notif.messageId))
session.close(timeout=10)
connection.close()
def run():
mhs_data_dir = os.getenv('MHS_DATA', '/data/fxa/mhs')
notif = MhsAckNotification()
@ -60,32 +83,19 @@ def run():
except:
sys.stderr.write("mhsAckNotify: error removing MHS file: %s\n" % (sys.exc_info()[1],))
try:
# TODO: Should have BROKER_ADDR in CLI setup.env.
broker_addr = os.getenv('BROKER_ADDR')
if broker_addr is None:
broker_addr = os.getenv('DEFAULT_HOST')
if broker_addr == 'ec':
broker_addr = 'cp1f'
if broker_addr is None:
broker_addr = 'localhost'
# TODO: Should have BROKER_ADDR in CLI setup.env.
broker_addr = os.getenv('BROKER_ADDR')
if broker_addr is None:
broker_addr = os.getenv('DEFAULT_HOST')
if broker_addr == 'ec':
broker_addr = 'cp1f'
if broker_addr is None:
broker_addr = 'localhost'
socket = connect(broker_addr, 5672)
except:
sys.stderr.write("mhsAckNotify: connect to %s: %s\n" % (broker_addr, sys.exc_info()[1],))
return 1
try:
connection = Connection (sock=socket)
connection.start()
session = connection.session(str(uuid4()))
props = session.delivery_properties(routing_key=TOPIC_NAME)
head = session.message_properties(application_headers={'sender':notif.sender,
'response':notif.response})
session.message_transfer(destination=DESTINATION, message=Message(props, head, notif.messageId))
session.close(timeout=10)
connection.close()
connection = doWithinTime(get_qpid_connection, args=(broker_addr,))
if connection:
doWithinTime(send_message, max_tries=1, args=(connection, notif))
except:
sys.stderr.write("mhsAckNotify: error sending message: %s\n" % (sys.exc_info()[1],))
return 1

View file

@ -30,21 +30,21 @@
# Date Ticket# Engineer Description
# ------------ ---------- ----------- --------------------------
# 09/19/11 8804 MHuang Initial creation
# 10/09/12 DR 13901 D. Friedman Limit execution time
##############################################################################
from ufpy import qpidingest
from lib.Util import doWithinTime
import os
import os.path
import sys
import traceback
class mhsFileIngest:
def __init__(self):
print "Initializing mhsFileIngest object"
def qpidNotify(*args):
def startConnection(self):
#Find current QPID running hostname
server=os.getenv('DEFAULT_HOST')
# print "Current EDEX server:", server
#Make connection to QPID
try:
@ -52,6 +52,11 @@ class mhsFileIngest:
except:
print "Cannot connect qpid server:", server
sys.exit(1)
self.conn = cnn
def qpidNotify(self):
cnn = self.conn
#Get uplink files
size=len(sys.argv) - 1
@ -85,14 +90,24 @@ class mhsFileIngest:
cnn.close()
if fileCount == size:
print "Successfully sent", fileCount, "file(s) to EDEX via qpidingest"
sys.exit(0)
return 0
elif errCount == size:
print "Failed to send", fileCount, "file(s) to EDEX via qpidingest"
sys.exit(1)
return 1
elif errCount > 0 and fileCount < size:
print errcount, "out of", size, "failed to be sent to EDEX via qpidingest"
sys.exit(2)
return 2
def run():
try:
m = mhsFileIngest()
doWithinTime(m.startConnection, description='connect to qpid')
exit_code = doWithinTime(m.qpidNotify, description='send messages', max_tries=1)
except:
traceback.print_exc()
sys.exit(1)
else:
sys.exit(exit_code)
if __name__ == '__main__':
q = mhsFileIngest()
q.qpidNotify()
run()