Issue #1731 Optimize construction of scan resource.

Change-Id: Id99a154e2f26df44b631995eb448ecfdf2badf9b

Former-commit-id: b0870c55f6 [formerly 86b9a4d8c2] [formerly b0870c55f6 [formerly 86b9a4d8c2] [formerly 9263ff1846 [formerly f97d09f3ee362f0f0b6155e0bb704ffd3611ca1a]]]
Former-commit-id: 9263ff1846
Former-commit-id: 8f4c2f2a39 [formerly 5a149c5dfa]
Former-commit-id: 77aef5f955
This commit is contained in:
Ben Steffensmeier 2013-02-28 14:19:47 -06:00
parent 9acad5a1a4
commit c28a151884
5 changed files with 128 additions and 71 deletions

View file

@ -5,10 +5,11 @@ Bundle-SymbolicName: com.raytheon.uf.viz.cwat;singleton:=true
Bundle-Version: 1.12.1174.qualifier
Bundle-Activator: com.raytheon.uf.viz.cwat.Activator
Bundle-Vendor: RAYTHEON
Eclipse-RegisterBuddy: com.raytheon.edex.common, com.raytheon.uf.common.serialization
Eclipse-RegisterBuddy: com.raytheon.uf.common.serialization
Require-Bundle: org.eclipse.ui,
org.eclipse.core.runtime,
com.raytheon.uf.viz.core,
com.raytheon.uf.common.serialization,
com.raytheon.uf.common.time;bundle-version="1.11.1",
com.raytheon.edex.common;bundle-version="1.11.1",
org.geotools;bundle-version="2.5.2",

View file

@ -2,12 +2,13 @@ Manifest-Version: 1.0
Bundle-ManifestVersion: 2
Bundle-Name: Scan Plug-in
Bundle-SymbolicName: com.raytheon.uf.viz.monitor.scan;singleton:=true
Eclipse-RegisterBuddy: com.raytheon.viz.core, com.raytheon.uf.viz.core, com.raytheon.viz.ui, com.raytheon.edex.common, com.raytheon.uf.common.serialization
Eclipse-RegisterBuddy: com.raytheon.uf.common.serialization
Bundle-Version: 1.12.1174.qualifier
Bundle-Activator: com.raytheon.uf.viz.monitor.scan.Activator
Bundle-Vendor: RAYTHEON
Require-Bundle: org.eclipse.ui,
org.eclipse.core.runtime,
com.raytheon.uf.common.serialization,
com.raytheon.uf.common.monitor;bundle-version="1.0.0",
com.raytheon.viz.core;bundle-version="1.10.13",
com.raytheon.viz.alerts;bundle-version="1.10.13",

View file

@ -22,10 +22,15 @@ package com.raytheon.uf.viz.monitor.scan.resource;
import java.io.File;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Calendar;
import java.util.Collection;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
@ -34,13 +39,20 @@ import javax.xml.bind.annotation.XmlType;
import com.raytheon.uf.common.dataplugin.PluginDataObject;
import com.raytheon.uf.common.dataplugin.scan.ScanRecord;
import com.raytheon.uf.common.dataquery.requests.DbQueryRequest;
import com.raytheon.uf.common.dataquery.requests.RequestConstraint;
import com.raytheon.uf.common.dataquery.requests.RequestConstraint.ConstraintType;
import com.raytheon.uf.common.dataquery.responses.DbQueryResponse;
import com.raytheon.uf.common.datastorage.DataStoreFactory;
import com.raytheon.uf.common.datastorage.IDataStore;
import com.raytheon.uf.common.datastorage.Request;
import com.raytheon.uf.common.datastorage.records.ByteDataRecord;
import com.raytheon.uf.common.datastorage.records.IDataRecord;
import com.raytheon.uf.common.monitor.scan.config.SCANConfigEnums.ScanTables;
import com.raytheon.uf.common.time.DataTime;
import com.raytheon.uf.viz.core.HDF5Util;
import com.raytheon.uf.viz.core.comm.Loader;
import com.raytheon.uf.viz.core.exception.VizException;
import com.raytheon.uf.viz.core.requests.ThriftClient;
import com.raytheon.uf.viz.core.rsc.AbstractRequestableResourceData;
import com.raytheon.uf.viz.core.rsc.AbstractVizResource;
import com.raytheon.uf.viz.core.rsc.LoadProperties;
@ -55,8 +67,7 @@ import com.raytheon.uf.viz.monitor.scan.ScanMonitor;
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Oct 13, 2009 dhladky Initial creation
* Feb 28, 2013 1731 bsteffen Remove unneccessary query in
* getDataStore.
* Feb 28, 2013 1731 bsteffen Optimize construction of scan resource.
*
* </pre>
*
@ -96,41 +107,41 @@ public class ScanResourceData extends AbstractRequestableResourceData {
try {
long t0 = System.currentTimeMillis();
// Forces ScanMonitor to grab data back for one extra hour 1/2 past
// the
// first time.
long first = ((ScanRecord) objects[0]).getDataTime().getRefTime()
.getTime()
- (3600 * 1500);
Date firstDate = new Date(first);
// the first time.
Calendar firstCal = ((ScanRecord) objects[0]).getDataTime()
.getRefTimeAsCalendar();
firstCal.add(Calendar.MINUTE, -90);
Date firstDate = firstCal.getTime();
int count = 0;
List<String> urisToLoad = new ArrayList<String>(uris.size());
for (String uri : uris) {
ScanRecord record = getScanRecord(uri);
if (record != null) {
if (record.getDataTime().getRefTime().after(firstDate)) {
record = populateRecord(record);
if ((record.getTableData() != null)
&& (record.getDataTime() != null)
&& (record.getTableData().getVolScanTime() != null)) {
ScanRecord record = new ScanRecord(uri);
if (record.getDataTime().getRefTime().after(firstDate)) {
urisToLoad.add(uri);
}
}
ScanRecord[] records = getScanRecords(urisToLoad);
populateRecords(records);
for (ScanRecord record : records) {
if ((record.getTableData() != null)
&& (record.getDataTime() != null)
&& (record.getTableData().getVolScanTime() != null)) {
getScan().setTableData(icao, record.getTableData(),
/*
* TODO: This should be the volume scan time, but
* {Radar,Scan}Record.getVolScanTime is actually the
* radar product generation time.
*/
record.getDataTime().getRefTime(),
record.getTilt(),
record.getDataTime().getRefTime(),
tableType);
count++;
getScan().setTableData(icao, record.getTableData(),
/*
* TODO: This should be the volume scan time, but
* {Radar,Scan}Record.getVolScanTime is actually the radar
* product generation time.
*/
record.getDataTime().getRefTime(), record.getTilt(),
record.getDataTime().getRefTime(), tableType);
count++;
if (record.getType().equals(ScanTables.DMD.name())) {
if (dataObjectMap == null) {
dataObjectMap = new HashMap<DataTime, ScanRecord>();
}
dataObjectMap.put(record.getDataTime(), record);
}
if (record.getType().equals(ScanTables.DMD.name())) {
if (dataObjectMap == null) {
dataObjectMap = new HashMap<DataTime, ScanRecord>();
}
dataObjectMap.put(record.getDataTime(), record);
}
}
}
@ -155,7 +166,7 @@ public class ScanResourceData extends AbstractRequestableResourceData {
}
System.out
.println("Loaded " + count + " out of " + objects.length
.println("Loaded " + count + " out of " + uris.size()
+ " objects in "
+ (System.currentTimeMillis() - t0) + "ms");
// need to update the dialog here after the
@ -189,20 +200,47 @@ public class ScanResourceData extends AbstractRequestableResourceData {
* @param record
*/
public ScanRecord populateRecord(ScanRecord record) throws VizException {
IDataStore dataStore = getDataStore(record);
record.retrieveMapFromDataStore(dataStore);
populateRecords(new ScanRecord[] { record });
return record;
}
/**
* Get the data store
*
* @param record
* @return
*/
private IDataStore getDataStore(ScanRecord record) {
File loc = HDF5Util.findHDF5Location(record);
return DataStoreFactory.getDataStore(loc);
public void populateRecords(ScanRecord[] records) throws VizException {
Map<File, Set<ScanRecord>> fileMap = new HashMap<File, Set<ScanRecord>>();
for (ScanRecord record : records) {
record.setPluginName("scan");
File loc = HDF5Util.findHDF5Location(record);
Set<ScanRecord> recordSet = fileMap.get(loc);
if (recordSet == null) {
recordSet = new HashSet<ScanRecord>();
fileMap.put(loc, recordSet);
}
recordSet.add(record);
}
for (Entry<File, Set<ScanRecord>> fileEntry : fileMap.entrySet()) {
IDataStore dataStore = DataStoreFactory.getDataStore(fileEntry
.getKey());
String[] datasetGroupPath = new String[fileEntry.getValue().size()];
ScanRecord[] scanRecords = new ScanRecord[datasetGroupPath.length];
int i = 0;
for (ScanRecord record : fileEntry.getValue()) {
datasetGroupPath[i] = record.getDataURI()
+ DataStoreFactory.DEF_SEPARATOR + record.getType();
scanRecords[i] = record;
i += 1;
}
try {
IDataRecord[] dataRecords = dataStore.retrieveDatasets(
datasetGroupPath,
Request.ALL);
for (i = 0; i < dataRecords.length; i += 1) {
ByteDataRecord byteData = (ByteDataRecord) dataRecords[i];
scanRecords[i].setTableData(byteData);
}
} catch (Exception e) {
throw new VizException(e);
}
}
}
// create the monitor instance
@ -249,22 +287,16 @@ public class ScanResourceData extends AbstractRequestableResourceData {
}
}
/**
* Gets the available record
*
* @param uri
* @return
*/
private ScanRecord getScanRecord(String uri) {
Map<String, Object> vals = new HashMap<String, Object>();
vals.put("pluginName", "scan");
vals.put("dataURI", uri);
try {
return (ScanRecord) Loader.loadData(vals);
} catch (VizException e) {
e.printStackTrace();
}
return null;
private ScanRecord[] getScanRecords(Collection<String> uris)
throws VizException {
DbQueryRequest request = new DbQueryRequest();
request.setEntityClass(ScanRecord.class);
RequestConstraint rc = new RequestConstraint(null, ConstraintType.IN);
rc.setConstraintValueList(uris);
request.addConstraint("dataURI", rc);
DbQueryResponse response = (DbQueryResponse) ThriftClient
.sendRequest(request);
return response.getEntityObjects(ScanRecord.class);
}
}

View file

@ -47,6 +47,7 @@ import com.raytheon.uf.common.datastorage.records.ByteDataRecord;
import com.raytheon.uf.common.monitor.scan.config.SCANConfigEnums.ScanTables;
import com.raytheon.uf.common.serialization.DynamicSerializationManager;
import com.raytheon.uf.common.serialization.DynamicSerializationManager.SerializationType;
import com.raytheon.uf.common.serialization.SerializationException;
import com.raytheon.uf.common.serialization.annotations.DynamicSerialize;
import com.raytheon.uf.common.serialization.annotations.DynamicSerializeElement;
import com.raytheon.uf.common.status.IUFStatusHandler;
@ -64,6 +65,7 @@ import com.raytheon.uf.common.status.UFStatus.Priority;
* ------------ ---------- ----------- --------------------------
* 03/17/10 2521 D. Hladky Initial release
* 02/01/13 1649 D. Hladky better logging,
* Feb 28, 2013 1731 bsteffen Optimize construction of scan resource.
*
* </pre>
*
@ -129,6 +131,14 @@ public class ScanRecord extends ServerSpecificPersistablePluginDataObject {
/* cell data only */
public ModelData md = null;
public ScanRecord() {
super();
}
public ScanRecord(String uri) {
super(uri);
}
@Override
public IDecoderGettable getDecoderGettable() {
// TODO Auto-generated method stub
@ -258,6 +268,21 @@ public class ScanRecord extends ServerSpecificPersistablePluginDataObject {
this.stationGeometry = stationGeometry;
}
/**
* Set the TableData from the serialized form that is stored in hdf5.
*
* @param byteData
* @throws SerializationException
*/
public void setTableData(ByteDataRecord byteData)
throws SerializationException {
ByteArrayInputStream bais = new ByteArrayInputStream(
byteData.getByteData());
Object o = DynamicSerializationManager.getManager(
SerializationType.Thrift).deserialize(bais);
setTableData((ScanTableData<?>) o);
}
/**
* Gets the Hash out of the datastore by HUC
*
@ -268,11 +293,7 @@ public class ScanRecord extends ServerSpecificPersistablePluginDataObject {
try {
ByteDataRecord byteData = (ByteDataRecord) dataStore.retrieve(
getDataURI(), getType(), Request.ALL);
ByteArrayInputStream bais = new ByteArrayInputStream(
byteData.getByteData());
Object o = DynamicSerializationManager.getManager(
SerializationType.Thrift).deserialize(bais);
setTableData((ScanTableData<?>) o);
setTableData(byteData);
} catch (Throwable e) {
statusHandler.handle(Priority.ERROR, "Couldn't load Table data!" + getDataURI());
}

View file

@ -176,10 +176,11 @@ public interface IDataStore extends ISerializableObject {
Request request) throws StorageException, FileNotFoundException;
/**
* Retrieve a set of datasets at given the Request parameters
* Retrieve multiple datasets from a single file
*
*
* @param datasetGroupPath
* the full path to a dataset.
* @param request
* the request type to perform
* @return a set of datarecords
@ -190,7 +191,8 @@ public interface IDataStore extends ISerializableObject {
Request request) throws StorageException, FileNotFoundException;
/**
* Retrieve multiple groups from a single file
* Retrieve multiple groups from a single file, retrieves all datasets from
* each group.
*
* NOTE: The request is applied to every group
*