Issue #1257: Fix GFE HDF5 Retrievel to time correlate results to requested times.
Change-Id: I4fb15e8e712155f6099bd27880fbcbdf1948fc5f Former-commit-id:0d10680b28
[formerly 6077558dbd06f8fdb568f03c163352b080a9eb5e] Former-commit-id:2e025e09e2
This commit is contained in:
parent
04053afeb7
commit
f94ba53663
7 changed files with 216 additions and 126 deletions
|
@ -39,7 +39,7 @@ import org.eclipse.swt.widgets.Control;
|
|||
import org.eclipse.swt.widgets.Label;
|
||||
import org.eclipse.swt.widgets.Shell;
|
||||
|
||||
import com.raytheon.viz.gfe.types.Pair;
|
||||
import com.raytheon.uf.common.dataplugin.gfe.type.Pair;
|
||||
import com.raytheon.viz.gfe.ui.zoneselector.ZoneSelector;
|
||||
import com.raytheon.viz.ui.dialogs.CaveJFACEDialog;
|
||||
import com.raytheon.viz.ui.widgets.ToggleSelectList;
|
||||
|
|
|
@ -70,6 +70,7 @@ import com.raytheon.uf.common.dataplugin.gfe.db.objects.ParmID;
|
|||
import com.raytheon.uf.common.dataplugin.gfe.exception.GfeException;
|
||||
import com.raytheon.uf.common.dataplugin.gfe.server.notify.GridUpdateNotification;
|
||||
import com.raytheon.uf.common.dataplugin.gfe.server.notify.LockNotification;
|
||||
import com.raytheon.uf.common.dataplugin.gfe.type.Pair;
|
||||
import com.raytheon.uf.common.dataplugin.gfe.util.GfeUtil;
|
||||
import com.raytheon.uf.common.dataplugin.grib.GribModel;
|
||||
import com.raytheon.uf.common.dataplugin.grib.GribRecord;
|
||||
|
@ -425,14 +426,16 @@ public class GFEDao extends DefaultPluginDao {
|
|||
});
|
||||
|
||||
// we gain nothing by removing from hdf5
|
||||
Map<File, String[]> fileMap = GfeUtil.getHdf5FilesAndGroups(
|
||||
GridDatabase.gfeBaseDataDir, parmId, times);
|
||||
for (Map.Entry<File, String[]> entry : fileMap.entrySet()) {
|
||||
Map<File, Pair<List<TimeRange>, String[]>> fileMap = GfeUtil
|
||||
.getHdf5FilesAndGroups(GridDatabase.gfeBaseDataDir, parmId,
|
||||
times);
|
||||
for (Map.Entry<File, Pair<List<TimeRange>, String[]>> entry : fileMap
|
||||
.entrySet()) {
|
||||
File hdf5File = entry.getKey();
|
||||
IDataStore dataStore = DataStoreFactory.getDataStore(hdf5File);
|
||||
|
||||
try {
|
||||
String[] groupsToDelete = entry.getValue();
|
||||
String[] groupsToDelete = entry.getValue().getSecond();
|
||||
for (String grp : groupsToDelete) {
|
||||
dataStore.delete(grp);
|
||||
}
|
||||
|
@ -931,7 +934,7 @@ public class GFEDao extends DefaultPluginDao {
|
|||
(Date) result.getRowColumnValue(i, 0));
|
||||
try {
|
||||
GridDatabase db = GridParmManager.getDb(dbId);
|
||||
if (db != null && !dbInventory.contains(dbId)) {
|
||||
if ((db != null) && !dbInventory.contains(dbId)) {
|
||||
dbInventory.add(dbId);
|
||||
}
|
||||
} catch (GfeException e) {
|
||||
|
|
|
@ -36,6 +36,7 @@ import com.raytheon.uf.common.dataplugin.gfe.db.objects.ParmID;
|
|||
import com.raytheon.uf.common.dataplugin.gfe.exception.GfeException;
|
||||
import com.raytheon.uf.common.dataplugin.gfe.server.message.ServerResponse;
|
||||
import com.raytheon.uf.common.dataplugin.gfe.slice.IGridSlice;
|
||||
import com.raytheon.uf.common.dataplugin.gfe.type.Pair;
|
||||
import com.raytheon.uf.common.dataplugin.gfe.util.GfeUtil;
|
||||
import com.raytheon.uf.common.datastorage.DataStoreFactory;
|
||||
import com.raytheon.uf.common.datastorage.IDataStore;
|
||||
|
@ -173,27 +174,40 @@ public abstract class GridDatabase {
|
|||
|
||||
public FloatDataRecord[] retrieveFromHDF5(ParmID parmId,
|
||||
List<TimeRange> times) throws GfeException {
|
||||
FloatDataRecord[] scalarData = new FloatDataRecord[times.size()];
|
||||
Map<IDataStore, String[]> dsAndGroups = getDataStoreAndGroups(parmId,
|
||||
times);
|
||||
FloatDataRecord[] scalarData = null;
|
||||
Map<IDataStore, Pair<List<TimeRange>, String[]>> dsAndGroups = getDataStoreAndGroups(
|
||||
parmId, times);
|
||||
|
||||
try {
|
||||
int index = 0;
|
||||
for (Map.Entry<IDataStore, String[]> entry : dsAndGroups.entrySet()) {
|
||||
IDataRecord[] rawData = entry.getKey().retrieveGroups(
|
||||
entry.getValue(), Request.ALL);
|
||||
Map<TimeRange, FloatDataRecord> records = new HashMap<TimeRange, FloatDataRecord>(
|
||||
(int) (1.25 * times.size()) + 1);
|
||||
|
||||
for (IDataRecord record : rawData) {
|
||||
if (index < scalarData.length) {
|
||||
scalarData[index++] = (FloatDataRecord) record;
|
||||
}
|
||||
// loop over the dataStores and their respective groups to pull all
|
||||
// data, stored into records to reorder requests by times
|
||||
for (Map.Entry<IDataStore, Pair<List<TimeRange>, String[]>> entry : dsAndGroups
|
||||
.entrySet()) {
|
||||
Pair<List<TimeRange>, String[]> pair = entry.getValue();
|
||||
String[] groups = pair.getSecond();
|
||||
|
||||
IDataRecord[] rawData = entry.getKey().retrieveGroups(groups,
|
||||
Request.ALL);
|
||||
|
||||
if (rawData.length != groups.length) {
|
||||
throw new IllegalArgumentException(
|
||||
"Invalid number of dataSets returned expected 1 per group, received: "
|
||||
+ ((double) rawData.length / groups.length));
|
||||
}
|
||||
|
||||
int count = 0;
|
||||
for (TimeRange timeRange : pair.getFirst()) {
|
||||
records.put(timeRange, (FloatDataRecord) rawData[count++]);
|
||||
}
|
||||
}
|
||||
|
||||
if (index != scalarData.length) {
|
||||
throw new IllegalArgumentException(
|
||||
"Invalid number of dataSets returned expected 1 per group, received: "
|
||||
+ (index / scalarData.length));
|
||||
scalarData = new FloatDataRecord[times.size()];
|
||||
int count = 0;
|
||||
for (TimeRange timeRange : times) {
|
||||
scalarData[count++] = records.get(timeRange);
|
||||
}
|
||||
} catch (Exception e) {
|
||||
throw new GfeException("Unable to get data from HDF5 for ParmID: "
|
||||
|
@ -211,35 +225,55 @@ public abstract class GridDatabase {
|
|||
|
||||
public FloatDataRecord[][] retrieveVectorFromHDF5(ParmID parmId,
|
||||
List<TimeRange> times) throws GfeException {
|
||||
FloatDataRecord[][] vectorData = new FloatDataRecord[times.size()][2];
|
||||
Map<IDataStore, String[]> dsAndGroups = getDataStoreAndGroups(parmId,
|
||||
times);
|
||||
FloatDataRecord[][] vectorData = null;
|
||||
Map<IDataStore, Pair<List<TimeRange>, String[]>> dsAndGroups = getDataStoreAndGroups(
|
||||
parmId, times);
|
||||
|
||||
try {
|
||||
int index = 0;
|
||||
for (Map.Entry<IDataStore, String[]> entry : dsAndGroups.entrySet()) {
|
||||
IDataRecord[] rawData = entry.getKey().retrieveGroups(
|
||||
entry.getValue(), Request.ALL);
|
||||
Map<TimeRange, FloatDataRecord[]> records = new HashMap<TimeRange, FloatDataRecord[]>(
|
||||
(int) (1.25 * times.size()) + 1);
|
||||
|
||||
for (IDataRecord rec : rawData) {
|
||||
if (index < vectorData.length * 2) {
|
||||
// loop over the dataStores and their respective groups to pull all
|
||||
// data, stored into records to reorder requests by times
|
||||
for (Map.Entry<IDataStore, Pair<List<TimeRange>, String[]>> entry : dsAndGroups
|
||||
.entrySet()) {
|
||||
Pair<List<TimeRange>, String[]> pair = entry.getValue();
|
||||
String[] groups = pair.getSecond();
|
||||
|
||||
IDataRecord[] rawData = entry.getKey().retrieveGroups(groups,
|
||||
Request.ALL);
|
||||
|
||||
if (rawData.length != groups.length * 2) {
|
||||
throw new IllegalArgumentException(
|
||||
"Invalid number of dataSets returned expected per group, received: "
|
||||
+ ((double) rawData.length / groups.length));
|
||||
}
|
||||
|
||||
// iterate over the data from this dataStore adding it records
|
||||
int count = 0;
|
||||
for (TimeRange timeRange : pair.getFirst()) {
|
||||
FloatDataRecord[] recs = new FloatDataRecord[2];
|
||||
for (int i = 0; i < 2; i++) {
|
||||
IDataRecord rec = rawData[count * 2 + i];
|
||||
if ("Mag".equals(rec.getName())) {
|
||||
vectorData[index++ / 2][0] = (FloatDataRecord) rec;
|
||||
recs[0] = (FloatDataRecord) rec;
|
||||
} else if ("Dir".equals(rec.getName())) {
|
||||
vectorData[index++ / 2][1] = (FloatDataRecord) rec;
|
||||
recs[1] = (FloatDataRecord) rec;
|
||||
} else {
|
||||
throw new IllegalArgumentException(
|
||||
"Unknown dataset retrieved for vector data. Valid values: Mag, Dir Received: "
|
||||
+ rec.getName());
|
||||
}
|
||||
}
|
||||
records.put(timeRange, recs);
|
||||
count++;
|
||||
}
|
||||
}
|
||||
|
||||
if (index != vectorData.length * 2) {
|
||||
throw new IllegalArgumentException(
|
||||
"Invalid number of dataSets returned expected per group, received: "
|
||||
+ (index / vectorData.length));
|
||||
vectorData = new FloatDataRecord[times.size()][2];
|
||||
int count = 0;
|
||||
for (TimeRange timeRange : times) {
|
||||
vectorData[count++] = records.get(timeRange);
|
||||
}
|
||||
} catch (Exception e) {
|
||||
throw new GfeException("Unable to get data from HDF5 for ParmID: "
|
||||
|
@ -257,39 +291,56 @@ public abstract class GridDatabase {
|
|||
|
||||
public ByteDataRecord[][] retrieveDiscreteFromHDF5(ParmID parmId,
|
||||
List<TimeRange> times) throws GfeException {
|
||||
ByteDataRecord[][] byteRecords = new ByteDataRecord[times.size()][2];
|
||||
Map<IDataStore, String[]> dsAndGroups = getDataStoreAndGroups(parmId,
|
||||
times);
|
||||
ByteDataRecord[][] byteRecords = null;
|
||||
Map<IDataStore, Pair<List<TimeRange>, String[]>> dsAndGroups = getDataStoreAndGroups(
|
||||
parmId, times);
|
||||
|
||||
try {
|
||||
int index = 0;
|
||||
// loop over the dataStores and their respective groups to pull all
|
||||
// data
|
||||
for (Map.Entry<IDataStore, String[]> entry : dsAndGroups.entrySet()) {
|
||||
IDataRecord[] rawData = entry.getKey().retrieveGroups(
|
||||
entry.getValue(), Request.ALL);
|
||||
Map<TimeRange, ByteDataRecord[]> records = new HashMap<TimeRange, ByteDataRecord[]>(
|
||||
(int) (1.25 * times.size()) + 1);
|
||||
|
||||
for (Map.Entry<IDataStore, Pair<List<TimeRange>, String[]>> entry : dsAndGroups
|
||||
.entrySet()) {
|
||||
Pair<List<TimeRange>, String[]> pair = entry.getValue();
|
||||
String[] groups = pair.getSecond();
|
||||
|
||||
IDataRecord[] rawData = entry.getKey().retrieveGroups(groups,
|
||||
Request.ALL);
|
||||
|
||||
if (rawData.length != groups.length * 2) {
|
||||
throw new IllegalArgumentException(
|
||||
"Invalid number of dataSets returned expected 2 per group, received: "
|
||||
+ ((double) rawData.length / groups.length));
|
||||
}
|
||||
|
||||
// iterate over the data from this dataStore adding it records
|
||||
int count = 0;
|
||||
for (TimeRange timeRange : pair.getFirst()) {
|
||||
ByteDataRecord[] recs = new ByteDataRecord[2];
|
||||
for (int i = 0; i < 2; i++) {
|
||||
IDataRecord rec = rawData[count * 2 + i];
|
||||
|
||||
// iterate over the data from this dataStore adding it
|
||||
// byteRecords
|
||||
for (IDataRecord rec : rawData) {
|
||||
if (index < byteRecords.length * 2) {
|
||||
if ("Data".equals(rec.getName())) {
|
||||
byteRecords[index++ / 2][0] = (ByteDataRecord) rec;
|
||||
recs[0] = (ByteDataRecord) rec;
|
||||
} else if ("Keys".equals(rec.getName())) {
|
||||
byteRecords[index++ / 2][1] = (ByteDataRecord) rec;
|
||||
recs[1] = (ByteDataRecord) rec;
|
||||
} else {
|
||||
throw new IllegalArgumentException(
|
||||
"Unknown dataset retrieved for vector data. Valid values: Mag, Dir Received: "
|
||||
"Unknown dataset retrieved for vector data. Valid values: Data, Keys Received: "
|
||||
+ rec.getName());
|
||||
}
|
||||
}
|
||||
records.put(timeRange, recs);
|
||||
count++;
|
||||
}
|
||||
}
|
||||
|
||||
if (index != byteRecords.length * 2) {
|
||||
throw new IllegalArgumentException(
|
||||
"Invalid number of dataSets returned expected per group, received: "
|
||||
+ (index / byteRecords.length));
|
||||
byteRecords = new ByteDataRecord[times.size()][2];
|
||||
int count = 0;
|
||||
for (TimeRange timeRange : times) {
|
||||
byteRecords[count++] = records.get(timeRange);
|
||||
}
|
||||
} catch (Exception e) {
|
||||
throw new GfeException("Unable to get data from HDF5 for ParmID: "
|
||||
|
@ -298,14 +349,16 @@ public abstract class GridDatabase {
|
|||
return byteRecords;
|
||||
}
|
||||
|
||||
protected Map<IDataStore, String[]> getDataStoreAndGroups(ParmID parmId,
|
||||
List<TimeRange> times) {
|
||||
Map<File, String[]> fileMap = GfeUtil.getHdf5FilesAndGroups(
|
||||
GridDatabase.gfeBaseDataDir, parmId, times);
|
||||
protected Map<IDataStore, Pair<List<TimeRange>, String[]>> getDataStoreAndGroups(
|
||||
ParmID parmId, List<TimeRange> times) {
|
||||
Map<File, Pair<List<TimeRange>, String[]>> fileMap = GfeUtil
|
||||
.getHdf5FilesAndGroups(GridDatabase.gfeBaseDataDir, parmId,
|
||||
times);
|
||||
// size hashMap accounting for load factor
|
||||
Map<IDataStore, String[]> rval = new HashMap<IDataStore, String[]>(
|
||||
Map<IDataStore, Pair<List<TimeRange>, String[]>> rval = new HashMap<IDataStore, Pair<List<TimeRange>, String[]>>(
|
||||
(int) (fileMap.size() * 1.25) + 1);
|
||||
for (Map.Entry<File, String[]> entry : fileMap.entrySet()) {
|
||||
for (Map.Entry<File, Pair<List<TimeRange>, String[]>> entry : fileMap
|
||||
.entrySet()) {
|
||||
rval.put(DataStoreFactory.getDataStore(entry.getKey()),
|
||||
entry.getValue());
|
||||
}
|
||||
|
|
|
@ -67,6 +67,7 @@ import com.raytheon.uf.common.dataplugin.gfe.slice.IGridSlice;
|
|||
import com.raytheon.uf.common.dataplugin.gfe.slice.ScalarGridSlice;
|
||||
import com.raytheon.uf.common.dataplugin.gfe.slice.VectorGridSlice;
|
||||
import com.raytheon.uf.common.dataplugin.gfe.slice.WeatherGridSlice;
|
||||
import com.raytheon.uf.common.dataplugin.gfe.type.Pair;
|
||||
import com.raytheon.uf.common.dataplugin.gfe.util.GfeUtil;
|
||||
import com.raytheon.uf.common.dataplugin.gfe.weather.WeatherKey;
|
||||
import com.raytheon.uf.common.datastorage.DataStoreFactory;
|
||||
|
@ -2115,45 +2116,56 @@ public class IFPGridDatabase extends GridDatabase {
|
|||
@Override
|
||||
public FloatDataRecord[] retrieveFromHDF5(ParmID parmId,
|
||||
List<TimeRange> times) throws GfeException {
|
||||
FloatDataRecord[] scalarData = new FloatDataRecord[times.size()];
|
||||
Map<IDataStore, String[]> dsAndGroups = getDataStoreAndGroups(parmId,
|
||||
times);
|
||||
FloatDataRecord[] scalarData = null;
|
||||
Map<IDataStore, Pair<List<TimeRange>, String[]>> dsAndGroups = getDataStoreAndGroups(
|
||||
parmId, times);
|
||||
|
||||
try {
|
||||
// overall index into scalar data
|
||||
int scalarDataIndex = 0;
|
||||
for (Map.Entry<IDataStore, String[]> entry : dsAndGroups.entrySet()) {
|
||||
IDataRecord[] rawData = entry.getKey().retrieveGroups(
|
||||
entry.getValue(), Request.ALL);
|
||||
Map<TimeRange, FloatDataRecord> records = new HashMap<TimeRange, FloatDataRecord>(
|
||||
(int) (1.25 * times.size()) + 1);
|
||||
|
||||
for (IDataRecord rec : rawData) {
|
||||
if (scalarDataIndex < scalarData.length) {
|
||||
if (rec instanceof FloatDataRecord) {
|
||||
scalarData[scalarDataIndex++] = (FloatDataRecord) rec;
|
||||
} else if (gridConfig == null) {
|
||||
throw new IllegalArgumentException(
|
||||
"Data array for "
|
||||
+ parmId.getParmName()
|
||||
+ " "
|
||||
+ parmId.getParmLevel()
|
||||
+ " is not a float array, but database "
|
||||
+ toString()
|
||||
+ " does not contain a grid configuration.");
|
||||
} else {
|
||||
// Convert to a FloatDataRecord for internal use
|
||||
ParmStorageInfo psi = parmStorageInfo.get(parmId
|
||||
.getCompositeName());
|
||||
scalarData[scalarDataIndex++] = storageToFloat(rec,
|
||||
psi);
|
||||
}
|
||||
// loop over the dataStores and their respective groups to pull all
|
||||
// data, stored into records to reorder requests by times
|
||||
for (Map.Entry<IDataStore, Pair<List<TimeRange>, String[]>> entry : dsAndGroups
|
||||
.entrySet()) {
|
||||
Pair<List<TimeRange>, String[]> pair = entry.getValue();
|
||||
String[] groups = pair.getSecond();
|
||||
|
||||
IDataRecord[] rawData = entry.getKey().retrieveGroups(groups,
|
||||
Request.ALL);
|
||||
|
||||
if (rawData.length != groups.length) {
|
||||
throw new IllegalArgumentException(
|
||||
"Invalid number of dataSets returned expected 1 per group, received: "
|
||||
+ ((double) rawData.length / groups.length));
|
||||
}
|
||||
|
||||
int count = 0;
|
||||
for (TimeRange timeRange : pair.getFirst()) {
|
||||
IDataRecord rec = rawData[count++];
|
||||
|
||||
if (rec instanceof FloatDataRecord) {
|
||||
records.put(timeRange, (FloatDataRecord) rec);
|
||||
} else if (gridConfig == null) {
|
||||
throw new IllegalArgumentException("Data array for "
|
||||
+ parmId.getParmName() + " "
|
||||
+ parmId.getParmLevel()
|
||||
+ " is not a float array, but database "
|
||||
+ toString()
|
||||
+ " does not contain a grid configuration.");
|
||||
} else {
|
||||
// Convert to a FloatDataRecord for internal use
|
||||
ParmStorageInfo psi = parmStorageInfo.get(parmId
|
||||
.getCompositeName());
|
||||
records.put(timeRange, storageToFloat(rec, psi));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (scalarDataIndex != scalarData.length) {
|
||||
throw new IllegalArgumentException(
|
||||
"Invalid number of dataSets returned expected 1 per group, received: "
|
||||
+ (scalarDataIndex / scalarData.length));
|
||||
scalarData = new FloatDataRecord[times.size()];
|
||||
int count = 0;
|
||||
for (TimeRange timeRange : times) {
|
||||
scalarData[count++] = records.get(timeRange);
|
||||
}
|
||||
} catch (Exception e) {
|
||||
throw new GfeException("Unable to get data from HDF5 for ParmID: "
|
||||
|
@ -2166,29 +2178,40 @@ public class IFPGridDatabase extends GridDatabase {
|
|||
@Override
|
||||
public FloatDataRecord[][] retrieveVectorFromHDF5(ParmID parmId,
|
||||
List<TimeRange> times) throws GfeException {
|
||||
FloatDataRecord[][] vectorData = new FloatDataRecord[times.size()][2];
|
||||
Map<IDataStore, String[]> dsAndGroups = getDataStoreAndGroups(parmId,
|
||||
times);
|
||||
FloatDataRecord[][] vectorData = null;
|
||||
Map<IDataStore, Pair<List<TimeRange>, String[]>> dsAndGroups = getDataStoreAndGroups(
|
||||
parmId, times);
|
||||
|
||||
try {
|
||||
// overall index into vector data
|
||||
int vectorDataIndex = 0;
|
||||
// iterate over dataStore and their respective groups for the
|
||||
// requested parm/time ranges
|
||||
for (Map.Entry<IDataStore, String[]> entry : dsAndGroups.entrySet()) {
|
||||
IDataRecord[] rawData = entry.getKey().retrieveGroups(
|
||||
entry.getValue(), Request.ALL);
|
||||
Map<TimeRange, FloatDataRecord[]> records = new HashMap<TimeRange, FloatDataRecord[]>(
|
||||
(int) (1.25 * times.size()) + 1);
|
||||
|
||||
// iterate over the data retrieved from this dataStore for the
|
||||
// groups
|
||||
for (int i = 0; i < rawData.length; i += 2, vectorDataIndex++) {
|
||||
// loop over the dataStores and their respective groups to pull all
|
||||
// data, stored into records to reorder requests by times
|
||||
for (Map.Entry<IDataStore, Pair<List<TimeRange>, String[]>> entry : dsAndGroups
|
||||
.entrySet()) {
|
||||
Pair<List<TimeRange>, String[]> pair = entry.getValue();
|
||||
String[] groups = pair.getSecond();
|
||||
|
||||
IDataRecord[] rawData = entry.getKey().retrieveGroups(groups,
|
||||
Request.ALL);
|
||||
|
||||
if (rawData.length != groups.length * 2) {
|
||||
throw new IllegalArgumentException(
|
||||
"Invalid number of dataSets returned expected per group, received: "
|
||||
+ ((double) rawData.length / groups.length));
|
||||
}
|
||||
|
||||
// iterate over the data from this dataStore adding it records
|
||||
int count = 0;
|
||||
for (TimeRange timeRange : pair.getFirst()) {
|
||||
IDataRecord magRec = null;
|
||||
IDataRecord dirRec = null;
|
||||
|
||||
// Should be vector data and each group should have had a
|
||||
// Dir and Mag dataset
|
||||
for (int j = 0; j < 2; j++) {
|
||||
IDataRecord rec = rawData[i + j];
|
||||
for (int i = 0; i < 2; i++) {
|
||||
IDataRecord rec = rawData[count * 2 + i];
|
||||
if ("Mag".equals(rec.getName())) {
|
||||
magRec = rec;
|
||||
} else if ("Dir".equals(rec.getName())) {
|
||||
|
@ -2200,10 +2223,12 @@ public class IFPGridDatabase extends GridDatabase {
|
|||
}
|
||||
}
|
||||
|
||||
FloatDataRecord[] recs = new FloatDataRecord[2];
|
||||
|
||||
if (magRec.getClass() == dirRec.getClass()) {
|
||||
if (magRec instanceof FloatDataRecord) {
|
||||
vectorData[vectorDataIndex][0] = (FloatDataRecord) magRec;
|
||||
vectorData[vectorDataIndex][1] = (FloatDataRecord) dirRec;
|
||||
recs[0] = (FloatDataRecord) magRec;
|
||||
recs[1] = (FloatDataRecord) dirRec;
|
||||
} else if (gridConfig == null) {
|
||||
throw new IllegalArgumentException(
|
||||
"Data array for "
|
||||
|
@ -2224,11 +2249,12 @@ public class IFPGridDatabase extends GridDatabase {
|
|||
VECTOR_DIR_DATA_OFFSET,
|
||||
VECTOR_DIR_DATA_MULTIPLIER,
|
||||
magStorageInfo.storageType());
|
||||
vectorData[vectorDataIndex][0] = storageToFloat(
|
||||
magRec, magStorageInfo);
|
||||
vectorData[vectorDataIndex][1] = storageToFloat(
|
||||
dirRec, dirStorageInfo);
|
||||
recs[0] = storageToFloat(magRec, magStorageInfo);
|
||||
recs[1] = storageToFloat(dirRec, dirStorageInfo);
|
||||
}
|
||||
|
||||
records.put(timeRange, recs);
|
||||
count++;
|
||||
} else {
|
||||
throw new IllegalArgumentException(
|
||||
"Magnitude and direction grids are not of the same type.");
|
||||
|
@ -2236,10 +2262,10 @@ public class IFPGridDatabase extends GridDatabase {
|
|||
}
|
||||
}
|
||||
|
||||
if (vectorDataIndex != vectorData.length) {
|
||||
throw new IllegalArgumentException(
|
||||
"Invalid number of dataSets returned expected 2 per group, received: "
|
||||
+ (vectorDataIndex / vectorData.length) * 2);
|
||||
vectorData = new FloatDataRecord[times.size()][2];
|
||||
int count = 0;
|
||||
for (TimeRange timeRange : times) {
|
||||
vectorData[count++] = records.get(timeRange);
|
||||
}
|
||||
} catch (Exception e) {
|
||||
throw new GfeException("Unable to get data from HDF5 for ParmID: "
|
||||
|
|
|
@ -36,6 +36,7 @@ Export-Package: com.raytheon.uf.common.dataplugin.gfe,
|
|||
com.raytheon.uf.common.dataplugin.gfe.slice,
|
||||
com.raytheon.uf.common.dataplugin.gfe.textproduct,
|
||||
com.raytheon.uf.common.dataplugin.gfe.time,
|
||||
com.raytheon.uf.common.dataplugin.gfe.type,
|
||||
com.raytheon.uf.common.dataplugin.gfe.util,
|
||||
com.raytheon.uf.common.dataplugin.gfe.weather,
|
||||
com.raytheon.uf.common.dataplugin.gfe.weatherelement
|
||||
|
|
|
@ -17,7 +17,7 @@
|
|||
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
||||
* further licensing information.
|
||||
**/
|
||||
package com.raytheon.viz.gfe.types;
|
||||
package com.raytheon.uf.common.dataplugin.gfe.type;
|
||||
|
||||
/**
|
||||
* TODO Add Description
|
|
@ -41,6 +41,7 @@ import com.raytheon.uf.common.dataplugin.gfe.db.objects.DatabaseID;
|
|||
import com.raytheon.uf.common.dataplugin.gfe.db.objects.GridLocation;
|
||||
import com.raytheon.uf.common.dataplugin.gfe.db.objects.ParmID;
|
||||
import com.raytheon.uf.common.dataplugin.gfe.grid.Grid2DBit;
|
||||
import com.raytheon.uf.common.dataplugin.gfe.type.Pair;
|
||||
import com.raytheon.uf.common.time.TimeRange;
|
||||
import com.vividsolutions.jts.geom.Coordinate;
|
||||
import com.vividsolutions.jts.geom.Geometry;
|
||||
|
@ -170,7 +171,8 @@ public class GfeUtil {
|
|||
public static File getHdf5File(String baseDir, ParmID parmId, TimeRange time) {
|
||||
List<TimeRange> list = new ArrayList<TimeRange>(1);
|
||||
list.add(time);
|
||||
Map<File, String[]> map = getHdf5FilesAndGroups(baseDir, parmId, list);
|
||||
Map<File, Pair<List<TimeRange>, String[]>> map = getHdf5FilesAndGroups(
|
||||
baseDir, parmId, list);
|
||||
File rval = null;
|
||||
|
||||
if (!map.isEmpty()) {
|
||||
|
@ -191,14 +193,14 @@ public class GfeUtil {
|
|||
* @param times
|
||||
* @return
|
||||
*/
|
||||
public static Map<File, String[]> getHdf5FilesAndGroups(String baseDir,
|
||||
ParmID parmId, List<TimeRange> times) {
|
||||
public static Map<File, Pair<List<TimeRange>, String[]>> getHdf5FilesAndGroups(
|
||||
String baseDir, ParmID parmId, List<TimeRange> times) {
|
||||
DatabaseID dbId = parmId.getDbId();
|
||||
File directory = getHdf5Dir(baseDir, dbId);
|
||||
boolean isSingleton = DatabaseID.NO_MODEL_TIME.equals(dbId
|
||||
.getModelTime());
|
||||
|
||||
Map<File, String[]> rval = null;
|
||||
Map<File, Pair<List<TimeRange>, String[]>> rval = null;
|
||||
if (isSingleton) {
|
||||
// file per parm per day
|
||||
StringBuffer tmp = new StringBuffer(40);
|
||||
|
@ -234,13 +236,16 @@ public class GfeUtil {
|
|||
}
|
||||
|
||||
// initialize map size, accounting for load factor
|
||||
rval = new HashMap<File, String[]>(
|
||||
rval = new HashMap<File, Pair<List<TimeRange>, String[]>>(
|
||||
(int) (dateMap.size() * 1.25) + 1);
|
||||
for (Map.Entry<String, List<TimeRange>> entry : dateMap.entrySet()) {
|
||||
tmp.setLength(0);
|
||||
tmp.append(preString).append(entry.getKey()).append(postString);
|
||||
File h5File = new File(directory, tmp.toString());
|
||||
rval.put(h5File, getHDF5Groups(parmId, entry.getValue()));
|
||||
Pair<List<TimeRange>, String[]> p = new Pair<List<TimeRange>, String[]>(
|
||||
entry.getValue(), getHDF5Groups(parmId,
|
||||
entry.getValue()));
|
||||
rval.put(h5File, p);
|
||||
}
|
||||
} else {
|
||||
// file per parm
|
||||
|
@ -250,8 +255,10 @@ public class GfeUtil {
|
|||
fileName.append(parmId.getParmLevel()).append(
|
||||
DATASTORE_FILE_EXTENSION);
|
||||
File h5File = new File(directory, fileName.toString());
|
||||
rval = new HashMap<File, String[]>(2);
|
||||
rval.put(h5File, getHDF5Groups(parmId, times));
|
||||
rval = new HashMap<File, Pair<List<TimeRange>, String[]>>(2);
|
||||
Pair<List<TimeRange>, String[]> p = new Pair<List<TimeRange>, String[]>(
|
||||
times, getHDF5Groups(parmId, times));
|
||||
rval.put(h5File, p);
|
||||
}
|
||||
|
||||
return rval;
|
||||
|
|
Loading…
Add table
Reference in a new issue