Merge branch 'development' of ssh://lightning.omaha.us.ray.com:29418/AWIPS2_baseline into development
Former-commit-id:615042859f
[formerlye38fc1e5c9
[formerly 16bfc9f7e68cc45b44867fc41cf62cd12f323390]] Former-commit-id:e38fc1e5c9
Former-commit-id:a2d9207ebc
This commit is contained in:
commit
cf9207bf91
9 changed files with 143 additions and 62 deletions
|
@ -35,6 +35,7 @@ import com.raytheon.viz.ui.tools.AbstractTool;
|
|||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* Apr 8, 2009 bgonzale Initial creation
|
||||
* Oct 15, 2012 1229 rferrel Changes for non-blocking DisplayPropertiesDialog.
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -57,13 +58,12 @@ public class DisplayPropertiesAction extends AbstractTool {
|
|||
public Object execute(ExecutionEvent arg0) throws ExecutionException {
|
||||
super.execute(arg0);
|
||||
|
||||
if (dialog == null) {
|
||||
if (dialog == null || dialog.getShell() == null || dialog.isDisposed()) {
|
||||
dialog = new DisplayPropertiesDialog(VizWorkbenchManager
|
||||
.getInstance().getCurrentWindow().getShell());
|
||||
dialog.open();
|
||||
dialog = null;
|
||||
} else {
|
||||
dialog.open();
|
||||
dialog.bringToTop();
|
||||
}
|
||||
|
||||
return null;
|
||||
|
|
|
@ -54,11 +54,9 @@ import com.raytheon.uf.viz.core.rsc.IResourceGroup;
|
|||
import com.raytheon.uf.viz.core.rsc.ResourceList;
|
||||
import com.raytheon.uf.viz.core.rsc.capabilities.AbstractCapability;
|
||||
import com.raytheon.uf.viz.core.rsc.capabilities.OutlineCapability;
|
||||
import com.raytheon.uf.viz.core.status.StatusConstants;
|
||||
import com.raytheon.uf.viz.d2d.core.map.MapScales;
|
||||
import com.raytheon.uf.viz.d2d.core.map.MapScales.MapScale;
|
||||
import com.raytheon.uf.viz.d2d.core.time.LoadMode;
|
||||
import com.raytheon.uf.viz.d2d.ui.Activator;
|
||||
import com.raytheon.uf.viz.d2d.ui.DensityPopulator;
|
||||
import com.raytheon.uf.viz.d2d.ui.MagnificationPopulator;
|
||||
import com.raytheon.uf.viz.d2d.ui.actions.DensityHandler;
|
||||
|
@ -78,6 +76,7 @@ import com.raytheon.viz.ui.dialogs.CaveSWTDialog;
|
|||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* Apr 8, 2009 bgonzale Initial creation
|
||||
* Oct 16, 2012 1229 rferrel Made dialog non-blocking.
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -86,7 +85,8 @@ import com.raytheon.viz.ui.dialogs.CaveSWTDialog;
|
|||
*/
|
||||
|
||||
public class DisplayPropertiesDialog extends CaveSWTDialog {
|
||||
private static final transient IUFStatusHandler statusHandler = UFStatus.getHandler(DisplayPropertiesDialog.class);
|
||||
private final transient IUFStatusHandler statusHandler = UFStatus
|
||||
.getHandler(DisplayPropertiesDialog.class);
|
||||
|
||||
private Combo scale;
|
||||
|
||||
|
@ -171,7 +171,8 @@ public class DisplayPropertiesDialog extends CaveSWTDialog {
|
|||
* @param editor
|
||||
*/
|
||||
public DisplayPropertiesDialog(Shell parentShell) {
|
||||
super(parentShell, SWT.DIALOG_TRIM | SWT.MIN, CAVE.INDEPENDENT_SHELL);
|
||||
super(parentShell, SWT.DIALOG_TRIM | SWT.MIN, CAVE.INDEPENDENT_SHELL
|
||||
| CAVE.DO_NOT_BLOCK);
|
||||
setText("Display Properties");
|
||||
|
||||
this.sHandler = new ScaleHandler();
|
||||
|
|
|
@ -28,12 +28,32 @@ import org.eclipse.ui.PlatformUI;
|
|||
import com.raytheon.uf.viz.d2d.ui.dialogs.CreateProjectionDialog;
|
||||
import com.raytheon.viz.ui.tools.AbstractTool;
|
||||
|
||||
/**
|
||||
* Handler class for controlling the Create Projection dialog.
|
||||
*
|
||||
* <pre>
|
||||
*
|
||||
* SOFTWARE HISTORY
|
||||
*
|
||||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* Initial creation
|
||||
* Oct 16, 2012 1229 rferrel Made dialog non-blocking.
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
* @author rferrel
|
||||
* @version 1.0
|
||||
*/
|
||||
public class CreateProjectionHandler extends AbstractTool implements IHandler {
|
||||
private CreateProjectionDialog dlg;
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
*
|
||||
* @see com.raytheon.viz.ui.tools.AbstractTool#execute(org.eclipse.core.commands.ExecutionEvent)
|
||||
* @see
|
||||
* com.raytheon.viz.ui.tools.AbstractTool#execute(org.eclipse.core.commands
|
||||
* .ExecutionEvent)
|
||||
*/
|
||||
@Override
|
||||
public Object execute(ExecutionEvent arg0) throws ExecutionException {
|
||||
|
@ -42,9 +62,13 @@ public class CreateProjectionHandler extends AbstractTool implements IHandler {
|
|||
Shell shell = PlatformUI.getWorkbench().getActiveWorkbenchWindow()
|
||||
.getShell();
|
||||
|
||||
CreateProjectionDialog dlg = new CreateProjectionDialog(shell);
|
||||
|
||||
dlg.open();
|
||||
if (dlg == null || dlg.getShell() == null || dlg.isDisposed()) {
|
||||
dlg = new CreateProjectionDialog(shell);
|
||||
dlg.setBlockOnOpen(false);
|
||||
dlg.open();
|
||||
} else {
|
||||
dlg.bringToTop();
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
|
|
@ -173,14 +173,15 @@ public class D2DGridDatabase extends VGridDatabase {
|
|||
- subdomain.height;
|
||||
|
||||
if (subdomain.isEmpty()) {
|
||||
valid = false;
|
||||
throw new GfeException("Unable to create " + this.dbId
|
||||
+ ". GFE domain does not overlap dataset domain.");
|
||||
statusHandler.warn(this.dbId
|
||||
+ ": GFE domain does not overlap dataset domain.");
|
||||
this.remap = null;
|
||||
} else {
|
||||
this.remap = new RemapGrid(NetCDFUtils.subGridGL(
|
||||
dbId.toString(), this.inputLoc, subdomain),
|
||||
this.outputLoc);
|
||||
}
|
||||
|
||||
this.remap = new RemapGrid(NetCDFUtils.subGridGL(dbId.toString(),
|
||||
this.inputLoc, subdomain), this.outputLoc);
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -475,7 +476,14 @@ public class D2DGridDatabase extends VGridDatabase {
|
|||
|
||||
switch (gpi.getGridType()) {
|
||||
case SCALAR:
|
||||
Grid2DFloat data = getGrid(parmId, time, gpi, convertUnit);
|
||||
Grid2DFloat data = null;
|
||||
if (this.remap == null) {
|
||||
// GFE domain does not overlap D2D grid, return default grid
|
||||
data = new Grid2DFloat(gpi.getGridLoc().getNx(), gpi
|
||||
.getGridLoc().getNy(), gpi.getMinValue());
|
||||
} else {
|
||||
data = getGrid(parmId, time, gpi, convertUnit);
|
||||
}
|
||||
gs = new ScalarGridSlice(time, gpi, gdh, data);
|
||||
break;
|
||||
case VECTOR:
|
||||
|
@ -483,7 +491,14 @@ public class D2DGridDatabase extends VGridDatabase {
|
|||
.getGridLoc().getNy());
|
||||
Grid2DFloat dir = new Grid2DFloat(gpi.getGridLoc().getNx(), gpi
|
||||
.getGridLoc().getNy());
|
||||
getWindGrid(parmId, time, gpi, mag, dir);
|
||||
|
||||
if (this.remap == null) {
|
||||
// GFE domain does not overlap D2D grid, return default grid
|
||||
mag.setAllValues(gpi.getMinValue());
|
||||
dir.setAllValues(0.0f);
|
||||
} else {
|
||||
getWindGrid(parmId, time, gpi, mag, dir);
|
||||
}
|
||||
gs = new VectorGridSlice(time, gpi, gdh, mag, dir);
|
||||
break;
|
||||
default:
|
||||
|
@ -511,6 +526,7 @@ public class D2DGridDatabase extends VGridDatabase {
|
|||
*/
|
||||
private Grid2DFloat getGrid(ParmID parmId, TimeRange time,
|
||||
GridParmInfo gpi, boolean convertUnit) throws GfeException {
|
||||
|
||||
Grid2DFloat bdata = null;
|
||||
GribRecord d2dRecord = null;
|
||||
|
||||
|
@ -628,6 +644,7 @@ public class D2DGridDatabase extends VGridDatabase {
|
|||
*/
|
||||
private void getWindGrid(ParmID parmId, TimeRange time, GridParmInfo gpi,
|
||||
Grid2DFloat mag, Grid2DFloat dir) throws GfeException {
|
||||
|
||||
GFEDao dao = null;
|
||||
try {
|
||||
dao = (GFEDao) PluginFactory.getInstance().getPluginDao("gfe");
|
||||
|
|
|
@ -201,14 +201,14 @@ public class NetCDFGridDatabase extends VGridDatabase {
|
|||
this.outputGloc);
|
||||
|
||||
if (this.subdomain.isEmpty()) {
|
||||
valid = false;
|
||||
throw new GfeException("Unable to create " + this.dbId
|
||||
+ ". GFE domain does not overlap dataset domain.");
|
||||
statusHandler.warn(this.dbId
|
||||
+ ": GFE domain does not overlap dataset domain.");
|
||||
this.remap = null;
|
||||
} else {
|
||||
this.remap = new RemapGrid(NetCDFUtils.subGridGL(
|
||||
this.dbId.toString(), this.inputGloc, this.subdomain),
|
||||
this.outputGloc);
|
||||
}
|
||||
|
||||
this.remap = new RemapGrid(NetCDFUtils.subGridGL(
|
||||
this.dbId.toString(), this.inputGloc, this.subdomain),
|
||||
this.outputGloc);
|
||||
loadParms();
|
||||
}
|
||||
}
|
||||
|
@ -584,35 +584,51 @@ public class NetCDFGridDatabase extends VGridDatabase {
|
|||
GridDataHistory gdh = new GridDataHistory(OriginType.INITIALIZED,
|
||||
p.getPid(), p.getInv().get(index));
|
||||
|
||||
switch (p.getGpi().getGridType()) {
|
||||
GridParmInfo gpi = p.getGpi();
|
||||
GridLocation gloc = gpi.getGridLoc();
|
||||
|
||||
switch (gpi.getGridType()) {
|
||||
case SCALAR: {
|
||||
Grid2DFloat data = new Grid2DFloat(getGrid(p.getVarName(),
|
||||
p.getIndices()[index], p.getLevel(), p.getGpi()
|
||||
.getMinValue(), p.getGpi().getMaxValue()));
|
||||
Grid2DFloat data = null;
|
||||
if (this.remap == null) {
|
||||
// GFE domain does not overlap D2D grid, return default grid
|
||||
data = new Grid2DFloat(gloc.getNx(), gloc.getNy(),
|
||||
gpi.getMinValue());
|
||||
|
||||
} else {
|
||||
data = new Grid2DFloat(getGrid(p.getVarName(),
|
||||
p.getIndices()[index], p.getLevel(), gpi.getMinValue(),
|
||||
gpi.getMaxValue()));
|
||||
}
|
||||
if (!data.isValid()) {
|
||||
return null;
|
||||
}
|
||||
gs = new ScalarGridSlice(p.getInv().get(index), p.getGpi(),
|
||||
gs = new ScalarGridSlice(p.getInv().get(index), gpi,
|
||||
Arrays.asList(gdh), data);
|
||||
break;
|
||||
}
|
||||
case VECTOR: {
|
||||
Grid2DFloat mag = new Grid2DFloat(p.getGpi().getGridLoc().getNx(),
|
||||
p.getGpi().getGridLoc().getNy());
|
||||
Grid2DFloat dir = new Grid2DFloat(p.getGpi().getGridLoc().getNx(),
|
||||
p.getGpi().getGridLoc().getNy());
|
||||
getWindGrid(p.getIndices()[index], p.getLevel(), p.getGpi()
|
||||
.getMinValue(), p.getGpi().getMaxValue(), mag, dir);
|
||||
Grid2DFloat mag = new Grid2DFloat(gloc.getNx(), gloc.getNy());
|
||||
Grid2DFloat dir = new Grid2DFloat(gloc.getNx(), gloc.getNy());
|
||||
|
||||
if (this.remap == null) {
|
||||
// GFE domain does not overlap D2D grid, return default grid
|
||||
mag.setAllValues(gpi.getMinValue());
|
||||
dir.setAllValues(0.0f);
|
||||
} else {
|
||||
getWindGrid(p.getIndices()[index], p.getLevel(),
|
||||
gpi.getMinValue(), gpi.getMaxValue(), mag, dir);
|
||||
}
|
||||
if (!mag.isValid() || !dir.isValid()) {
|
||||
return null;
|
||||
}
|
||||
gs = new VectorGridSlice(p.getInv().get(index), p.getGpi(),
|
||||
gs = new VectorGridSlice(p.getInv().get(index), gpi,
|
||||
Arrays.asList(gdh), mag, dir);
|
||||
break;
|
||||
}
|
||||
default:
|
||||
statusHandler.handle(Priority.PROBLEM,
|
||||
"unsupported parm type for: " + p.getGpi());
|
||||
"unsupported parm type for: " + gpi);
|
||||
}
|
||||
|
||||
return gs;
|
||||
|
|
|
@ -4,5 +4,9 @@
|
|||
<key>modelInfo.modelName</key>
|
||||
<order>0</order>
|
||||
</pathKey>
|
||||
<pathKey>
|
||||
<key>modelInfo.level</key>
|
||||
<order>1</order>
|
||||
</pathKey>
|
||||
</pathKeySet>
|
||||
|
||||
|
|
|
@ -109,20 +109,22 @@ public class HttpClient {
|
|||
|
||||
private ThreadSafeClientConnManager connManager = null;
|
||||
|
||||
private NetworkStatistics stats = new NetworkStatistics();
|
||||
private final NetworkStatistics stats = new NetworkStatistics();
|
||||
|
||||
private boolean gzipRequests = false;
|
||||
|
||||
private boolean handlingGzipResponses = false;
|
||||
|
||||
/** number of requests currently in process by the application per host */
|
||||
private Map<String, AtomicInteger> currentRequestsCount = new ConcurrentHashMap<String, AtomicInteger>();
|
||||
private final Map<String, AtomicInteger> currentRequestsCount = new ConcurrentHashMap<String, AtomicInteger>();
|
||||
|
||||
private HttpClient() {
|
||||
connManager = new ThreadSafeClientConnManager();
|
||||
DefaultHttpClient client = new DefaultHttpClient(connManager);
|
||||
|
||||
client.addRequestInterceptor(new HttpRequestInterceptor() {
|
||||
|
||||
@Override
|
||||
public void process(final HttpRequest request,
|
||||
final HttpContext context) throws HttpException,
|
||||
IOException {
|
||||
|
@ -138,6 +140,7 @@ public class HttpClient {
|
|||
});
|
||||
|
||||
client.addResponseInterceptor(new HttpResponseInterceptor() {
|
||||
@Override
|
||||
public void process(final HttpResponse response,
|
||||
final HttpContext context) throws HttpException,
|
||||
IOException {
|
||||
|
@ -148,6 +151,8 @@ public class HttpClient {
|
|||
}
|
||||
}
|
||||
});
|
||||
HttpConnectionParams.setTcpNoDelay(client.getParams(), true);
|
||||
|
||||
this.client = client;
|
||||
previousConnectionFailed = false;
|
||||
}
|
||||
|
@ -304,7 +309,7 @@ public class HttpClient {
|
|||
exc = e;
|
||||
}
|
||||
|
||||
if (errorMsg != null && exc != null) {
|
||||
if ((errorMsg != null) && (exc != null)) {
|
||||
if (tries > retryCount) {
|
||||
previousConnectionFailed = true;
|
||||
// close/abort connection
|
||||
|
@ -350,7 +355,7 @@ public class HttpClient {
|
|||
private void processResponse(HttpResponse resp,
|
||||
IStreamHandler handlerCallback) throws CommunicationException {
|
||||
InputStream is = null;
|
||||
if (resp != null && resp.getEntity() != null) {
|
||||
if ((resp != null) && (resp.getEntity() != null)) {
|
||||
try {
|
||||
is = resp.getEntity().getContent();
|
||||
handlerCallback.handleStream(is);
|
||||
|
@ -668,8 +673,8 @@ public class HttpClient {
|
|||
Header ceheader = entity.getContentEncoding();
|
||||
if (ceheader != null) {
|
||||
HeaderElement[] codecs = ceheader.getElements();
|
||||
for (int i = 0; i < codecs.length; i++) {
|
||||
if (codecs[i].getName().equalsIgnoreCase("gzip")) {
|
||||
for (HeaderElement codec : codecs) {
|
||||
if (codec.getName().equalsIgnoreCase("gzip")) {
|
||||
response.setEntity(new GzipDecompressingEntity(response
|
||||
.getEntity()));
|
||||
return;
|
||||
|
|
|
@ -22,6 +22,7 @@ package gov.noaa.nws.ncep.edex.uengine.tasks.profile;
|
|||
|
||||
import java.awt.Point;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
|
@ -206,13 +207,15 @@ public class PointIn {// extends ScriptTask {
|
|||
// arbitrary list of IPersistable could be in any number of data stores
|
||||
Map<IDataStore, List<IPersistable>> dataStoreMap = dao
|
||||
.getDataStoreMap(objList);
|
||||
int rvalIndex = 0;
|
||||
|
||||
int totalRec = 0;
|
||||
|
||||
try {
|
||||
// list for data records retrieved
|
||||
List<IDataRecord> dataRecords = new ArrayList<IDataRecord>(
|
||||
objects.length);
|
||||
// map of IPersistable to its IDataRecord. Since objects not
|
||||
// guaranteed to be in file order have to recreate order after
|
||||
// retrievals done
|
||||
Map<IPersistable, IDataRecord> dataRecords = new HashMap<IPersistable, IDataRecord>(
|
||||
(int) (objects.length * 1.25) + 1);
|
||||
|
||||
for (Map.Entry<IDataStore, List<IPersistable>> entry : dataStoreMap
|
||||
.entrySet()) {
|
||||
|
@ -229,8 +232,13 @@ public class PointIn {// extends ScriptTask {
|
|||
// retrieve data from this data store
|
||||
IDataRecord[] records = dataStore.retrieveGroups(groups,
|
||||
pointRequest);
|
||||
for (IDataRecord rec : records) {
|
||||
dataRecords.add(rec);
|
||||
int index = 0;
|
||||
for (IPersistable persist : persistList) {
|
||||
if (index < records.length) {
|
||||
dataRecords.put(persist, records[index++]);
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -240,16 +248,19 @@ public class PointIn {// extends ScriptTask {
|
|||
}
|
||||
|
||||
int recordIndex = 0;
|
||||
for (IDataRecord record : dataRecords) {
|
||||
float[] data = (float[]) record.getDataObject();
|
||||
// note; data.length should be the same as points.size()
|
||||
// if(k==0)
|
||||
// System.out.println("data[] szie="+data.length+
|
||||
// " parameter group size="+dr.length);
|
||||
totalRec += data.length;
|
||||
for (int pointIndex = 0; pointIndex < data.length; pointIndex++) {
|
||||
float[] pData = rval.get(pointIndex);
|
||||
pData[recordIndex] = data[pointIndex];
|
||||
for (IPersistable persist : objList) {
|
||||
IDataRecord record = dataRecords.get(persist);
|
||||
if (record != null) {
|
||||
float[] data = (float[]) record.getDataObject();
|
||||
// note; data.length should be the same as points.size()
|
||||
// if(k==0)
|
||||
// System.out.println("data[] szie="+data.length+
|
||||
// " parameter group size="+dr.length);
|
||||
totalRec += data.length;
|
||||
for (int pointIndex = 0; pointIndex < data.length; pointIndex++) {
|
||||
float[] pData = rval.get(pointIndex);
|
||||
pData[recordIndex++] = data[pointIndex];
|
||||
}
|
||||
}
|
||||
}
|
||||
System.out.println("total points = " + points.size()
|
||||
|
|
|
@ -662,7 +662,10 @@ class H5pyDataStore(IDataStore.IDataStore):
|
|||
grp = f['/']
|
||||
else:
|
||||
try:
|
||||
grp = f[name]
|
||||
group=name
|
||||
if not group.startswith('/'):
|
||||
group = '/' + group
|
||||
grp = f[group]
|
||||
except:
|
||||
raise StorageException("No group " + name + " found")
|
||||
|
||||
|
|
Loading…
Add table
Reference in a new issue