Merge branch 'ss_builds' (12.11.1-10) into Unified_Grid
Former-commit-id: 8bcb077174ef7bcb923dd974e53e82f12936b580
This commit is contained in:
commit
6f2c1ede9e
9 changed files with 251 additions and 160 deletions
|
@ -39,7 +39,7 @@ import org.eclipse.swt.widgets.Control;
|
|||
import org.eclipse.swt.widgets.Label;
|
||||
import org.eclipse.swt.widgets.Shell;
|
||||
|
||||
import com.raytheon.viz.gfe.types.Pair;
|
||||
import com.raytheon.uf.common.dataplugin.gfe.type.Pair;
|
||||
import com.raytheon.viz.gfe.ui.zoneselector.ZoneSelector;
|
||||
import com.raytheon.viz.ui.dialogs.CaveJFACEDialog;
|
||||
import com.raytheon.viz.ui.widgets.ToggleSelectList;
|
||||
|
|
|
@ -1237,11 +1237,11 @@ elif SID == "HFO":
|
|||
|
||||
# San Juan OCONUS
|
||||
elif SID == "SJU":
|
||||
SATDATA = [("NESDIS/GOES-14(O)/East CONUS/Imager Visible", "visibleEast"),
|
||||
("NESDIS/GOES-14(O)/East CONUS/Imager 11 micron IR", "ir11East"),
|
||||
("NESDIS/GOES-14(O)/East CONUS/Imager 12 micron IR", "ir13East"),
|
||||
("NESDIS/GOES-14(O)/East CONUS/Imager 3.9 micron IR", "ir39East"),
|
||||
("NESDIS/GOES-14(O)/East CONUS/Imager 6.7-6.5 micron IR (WV)", "waterVaporEast")]
|
||||
SATDATA = [("NESDIS/GOES-13(N)/East CONUS/Imager Visible", "visibleEast"),
|
||||
("NESDIS/GOES-13(N)/East CONUS/Imager 11 micron IR", "ir11East"),
|
||||
("NESDIS/GOES-13(N)/East CONUS/Imager 12 micron IR", "ir13East"),
|
||||
("NESDIS/GOES-13(N)/East CONUS/Imager 3.9 micron IR", "ir39East"),
|
||||
("NESDIS/GOES-13(N)/East CONUS/Imager 6.7-6.5 micron IR (WV)", "waterVaporEast")]
|
||||
|
||||
# Guam OCONUS
|
||||
elif SID == "GUM":
|
||||
|
@ -1249,16 +1249,16 @@ elif SID == "GUM":
|
|||
|
||||
#CONUS sites
|
||||
else:
|
||||
SATDATA = [("NESDIS/GOES-15(P)/West CONUS/Imager Visible", "visibleWest"),
|
||||
("NESDIS/GOES-15(P)/West CONUS/Imager 11 micron IR", "ir11West"),
|
||||
("NESDIS/GOES-15(P)/West CONUS/Imager 12 micron IR", "ir13West"),
|
||||
("NESDIS/GOES-15(P)/West CONUS/Imager 3.9 micron IR", "ir39West"),
|
||||
("NESDIS/GOES-15(P)/West CONUS/Imager 6.7-6.5 micron IR (WV)", "waterVaporWest"),
|
||||
("NESDIS/GOES-14(O)/East CONUS/Imager Visible", "visibleEast"),
|
||||
("NESDIS/GOES-14(O)/East CONUS/Imager 11 micron IR", "ir11East"),
|
||||
("NESDIS/GOES-14(O)/East CONUS/Imager 12 micron IR", "ir13East"),
|
||||
("NESDIS/GOES-14(O)/East CONUS/Imager 3.9 micron IR", "ir39East"),
|
||||
("NESDIS/GOES-14(O)/East CONUS/Imager 6.7-6.5 micron IR (WV)", "waterVaporEast")]
|
||||
SATDATA = [("NESDIS/GOES-11(L)/West CONUS/Imager Visible", "visibleWest"),
|
||||
("NESDIS/GOES-11(L)/West CONUS/Imager 11 micron IR", "ir11West"),
|
||||
("NESDIS/GOES-11(L)/West CONUS/Imager 12 micron IR", "ir13West"),
|
||||
("NESDIS/GOES-11(L)/West CONUS/Imager 3.9 micron IR", "ir39West"),
|
||||
("NESDIS/GOES-11(L)/West CONUS/Imager 6.7-6.5 micron IR (WV)", "waterVaporWest"),
|
||||
("NESDIS/GOES-13(N)/East CONUS/Imager Visible", "visibleEast"),
|
||||
("NESDIS/GOES-13(N)/East CONUS/Imager 11 micron IR", "ir11East"),
|
||||
("NESDIS/GOES-13(N)/East CONUS/Imager 12 micron IR", "ir13East"),
|
||||
("NESDIS/GOES-13(N)/East CONUS/Imager 3.9 micron IR", "ir39East"),
|
||||
("NESDIS/GOES-13(N)/East CONUS/Imager 6.7-6.5 micron IR (WV)", "waterVaporEast")]
|
||||
|
||||
#---------------------------------------------------------------------------
|
||||
#
|
||||
|
|
|
@ -70,6 +70,7 @@ import com.raytheon.uf.common.dataplugin.gfe.db.objects.ParmID;
|
|||
import com.raytheon.uf.common.dataplugin.gfe.exception.GfeException;
|
||||
import com.raytheon.uf.common.dataplugin.gfe.server.notify.GridUpdateNotification;
|
||||
import com.raytheon.uf.common.dataplugin.gfe.server.notify.LockNotification;
|
||||
import com.raytheon.uf.common.dataplugin.gfe.type.Pair;
|
||||
import com.raytheon.uf.common.dataplugin.gfe.util.GfeUtil;
|
||||
import com.raytheon.uf.common.dataplugin.grid.GridConstants;
|
||||
import com.raytheon.uf.common.dataplugin.grid.GridInfoConstants;
|
||||
|
@ -422,14 +423,16 @@ public class GFEDao extends DefaultPluginDao {
|
|||
});
|
||||
|
||||
// we gain nothing by removing from hdf5
|
||||
Map<File, String[]> fileMap = GfeUtil.getHdf5FilesAndGroups(
|
||||
GridDatabase.gfeBaseDataDir, parmId, times);
|
||||
for (Map.Entry<File, String[]> entry : fileMap.entrySet()) {
|
||||
Map<File, Pair<List<TimeRange>, String[]>> fileMap = GfeUtil
|
||||
.getHdf5FilesAndGroups(GridDatabase.gfeBaseDataDir, parmId,
|
||||
times);
|
||||
for (Map.Entry<File, Pair<List<TimeRange>, String[]>> entry : fileMap
|
||||
.entrySet()) {
|
||||
File hdf5File = entry.getKey();
|
||||
IDataStore dataStore = DataStoreFactory.getDataStore(hdf5File);
|
||||
|
||||
try {
|
||||
String[] groupsToDelete = entry.getValue();
|
||||
String[] groupsToDelete = entry.getValue().getSecond();
|
||||
for (String grp : groupsToDelete) {
|
||||
dataStore.delete(grp);
|
||||
}
|
||||
|
@ -919,7 +922,7 @@ public class GFEDao extends DefaultPluginDao {
|
|||
(Date) obj);
|
||||
try {
|
||||
GridDatabase db = GridParmManager.getDb(dbId);
|
||||
if (db != null && !dbInventory.contains(dbId)) {
|
||||
if ((db != null) && !dbInventory.contains(dbId)) {
|
||||
dbInventory.add(dbId);
|
||||
}
|
||||
} catch (GfeException e) {
|
||||
|
|
|
@ -36,6 +36,7 @@ import com.raytheon.uf.common.dataplugin.gfe.db.objects.ParmID;
|
|||
import com.raytheon.uf.common.dataplugin.gfe.exception.GfeException;
|
||||
import com.raytheon.uf.common.dataplugin.gfe.server.message.ServerResponse;
|
||||
import com.raytheon.uf.common.dataplugin.gfe.slice.IGridSlice;
|
||||
import com.raytheon.uf.common.dataplugin.gfe.type.Pair;
|
||||
import com.raytheon.uf.common.dataplugin.gfe.util.GfeUtil;
|
||||
import com.raytheon.uf.common.datastorage.DataStoreFactory;
|
||||
import com.raytheon.uf.common.datastorage.IDataStore;
|
||||
|
@ -173,27 +174,40 @@ public abstract class GridDatabase {
|
|||
|
||||
public FloatDataRecord[] retrieveFromHDF5(ParmID parmId,
|
||||
List<TimeRange> times) throws GfeException {
|
||||
FloatDataRecord[] scalarData = new FloatDataRecord[times.size()];
|
||||
Map<IDataStore, String[]> dsAndGroups = getDataStoreAndGroups(parmId,
|
||||
times);
|
||||
FloatDataRecord[] scalarData = null;
|
||||
Map<IDataStore, Pair<List<TimeRange>, String[]>> dsAndGroups = getDataStoreAndGroups(
|
||||
parmId, times);
|
||||
|
||||
try {
|
||||
int index = 0;
|
||||
for (Map.Entry<IDataStore, String[]> entry : dsAndGroups.entrySet()) {
|
||||
IDataRecord[] rawData = entry.getKey().retrieveGroups(
|
||||
entry.getValue(), Request.ALL);
|
||||
Map<TimeRange, FloatDataRecord> records = new HashMap<TimeRange, FloatDataRecord>(
|
||||
(int) (1.25 * times.size()) + 1);
|
||||
|
||||
for (IDataRecord record : rawData) {
|
||||
if (index < scalarData.length) {
|
||||
scalarData[index++] = (FloatDataRecord) record;
|
||||
}
|
||||
// loop over the dataStores and their respective groups to pull all
|
||||
// data, stored into records to reorder requests by times
|
||||
for (Map.Entry<IDataStore, Pair<List<TimeRange>, String[]>> entry : dsAndGroups
|
||||
.entrySet()) {
|
||||
Pair<List<TimeRange>, String[]> pair = entry.getValue();
|
||||
String[] groups = pair.getSecond();
|
||||
|
||||
IDataRecord[] rawData = entry.getKey().retrieveGroups(groups,
|
||||
Request.ALL);
|
||||
|
||||
if (rawData.length != groups.length) {
|
||||
throw new IllegalArgumentException(
|
||||
"Invalid number of dataSets returned expected 1 per group, received: "
|
||||
+ ((double) rawData.length / groups.length));
|
||||
}
|
||||
|
||||
int count = 0;
|
||||
for (TimeRange timeRange : pair.getFirst()) {
|
||||
records.put(timeRange, (FloatDataRecord) rawData[count++]);
|
||||
}
|
||||
}
|
||||
|
||||
if (index != scalarData.length) {
|
||||
throw new IllegalArgumentException(
|
||||
"Invalid number of dataSets returned expected 1 per group, received: "
|
||||
+ (index / scalarData.length));
|
||||
scalarData = new FloatDataRecord[times.size()];
|
||||
int count = 0;
|
||||
for (TimeRange timeRange : times) {
|
||||
scalarData[count++] = records.get(timeRange);
|
||||
}
|
||||
} catch (Exception e) {
|
||||
throw new GfeException("Unable to get data from HDF5 for ParmID: "
|
||||
|
@ -211,35 +225,55 @@ public abstract class GridDatabase {
|
|||
|
||||
public FloatDataRecord[][] retrieveVectorFromHDF5(ParmID parmId,
|
||||
List<TimeRange> times) throws GfeException {
|
||||
FloatDataRecord[][] vectorData = new FloatDataRecord[times.size()][2];
|
||||
Map<IDataStore, String[]> dsAndGroups = getDataStoreAndGroups(parmId,
|
||||
times);
|
||||
FloatDataRecord[][] vectorData = null;
|
||||
Map<IDataStore, Pair<List<TimeRange>, String[]>> dsAndGroups = getDataStoreAndGroups(
|
||||
parmId, times);
|
||||
|
||||
try {
|
||||
int index = 0;
|
||||
for (Map.Entry<IDataStore, String[]> entry : dsAndGroups.entrySet()) {
|
||||
IDataRecord[] rawData = entry.getKey().retrieveGroups(
|
||||
entry.getValue(), Request.ALL);
|
||||
Map<TimeRange, FloatDataRecord[]> records = new HashMap<TimeRange, FloatDataRecord[]>(
|
||||
(int) (1.25 * times.size()) + 1);
|
||||
|
||||
for (IDataRecord rec : rawData) {
|
||||
if (index < vectorData.length * 2) {
|
||||
// loop over the dataStores and their respective groups to pull all
|
||||
// data, stored into records to reorder requests by times
|
||||
for (Map.Entry<IDataStore, Pair<List<TimeRange>, String[]>> entry : dsAndGroups
|
||||
.entrySet()) {
|
||||
Pair<List<TimeRange>, String[]> pair = entry.getValue();
|
||||
String[] groups = pair.getSecond();
|
||||
|
||||
IDataRecord[] rawData = entry.getKey().retrieveGroups(groups,
|
||||
Request.ALL);
|
||||
|
||||
if (rawData.length != groups.length * 2) {
|
||||
throw new IllegalArgumentException(
|
||||
"Invalid number of dataSets returned expected per group, received: "
|
||||
+ ((double) rawData.length / groups.length));
|
||||
}
|
||||
|
||||
// iterate over the data from this dataStore adding it records
|
||||
int count = 0;
|
||||
for (TimeRange timeRange : pair.getFirst()) {
|
||||
FloatDataRecord[] recs = new FloatDataRecord[2];
|
||||
for (int i = 0; i < 2; i++) {
|
||||
IDataRecord rec = rawData[count * 2 + i];
|
||||
if ("Mag".equals(rec.getName())) {
|
||||
vectorData[index++ / 2][0] = (FloatDataRecord) rec;
|
||||
recs[0] = (FloatDataRecord) rec;
|
||||
} else if ("Dir".equals(rec.getName())) {
|
||||
vectorData[index++ / 2][1] = (FloatDataRecord) rec;
|
||||
recs[1] = (FloatDataRecord) rec;
|
||||
} else {
|
||||
throw new IllegalArgumentException(
|
||||
"Unknown dataset retrieved for vector data. Valid values: Mag, Dir Received: "
|
||||
+ rec.getName());
|
||||
}
|
||||
}
|
||||
records.put(timeRange, recs);
|
||||
count++;
|
||||
}
|
||||
}
|
||||
|
||||
if (index != vectorData.length * 2) {
|
||||
throw new IllegalArgumentException(
|
||||
"Invalid number of dataSets returned expected per group, received: "
|
||||
+ (index / vectorData.length));
|
||||
vectorData = new FloatDataRecord[times.size()][2];
|
||||
int count = 0;
|
||||
for (TimeRange timeRange : times) {
|
||||
vectorData[count++] = records.get(timeRange);
|
||||
}
|
||||
} catch (Exception e) {
|
||||
throw new GfeException("Unable to get data from HDF5 for ParmID: "
|
||||
|
@ -257,39 +291,56 @@ public abstract class GridDatabase {
|
|||
|
||||
public ByteDataRecord[][] retrieveDiscreteFromHDF5(ParmID parmId,
|
||||
List<TimeRange> times) throws GfeException {
|
||||
ByteDataRecord[][] byteRecords = new ByteDataRecord[times.size()][2];
|
||||
Map<IDataStore, String[]> dsAndGroups = getDataStoreAndGroups(parmId,
|
||||
times);
|
||||
ByteDataRecord[][] byteRecords = null;
|
||||
Map<IDataStore, Pair<List<TimeRange>, String[]>> dsAndGroups = getDataStoreAndGroups(
|
||||
parmId, times);
|
||||
|
||||
try {
|
||||
int index = 0;
|
||||
// loop over the dataStores and their respective groups to pull all
|
||||
// data
|
||||
for (Map.Entry<IDataStore, String[]> entry : dsAndGroups.entrySet()) {
|
||||
IDataRecord[] rawData = entry.getKey().retrieveGroups(
|
||||
entry.getValue(), Request.ALL);
|
||||
Map<TimeRange, ByteDataRecord[]> records = new HashMap<TimeRange, ByteDataRecord[]>(
|
||||
(int) (1.25 * times.size()) + 1);
|
||||
|
||||
for (Map.Entry<IDataStore, Pair<List<TimeRange>, String[]>> entry : dsAndGroups
|
||||
.entrySet()) {
|
||||
Pair<List<TimeRange>, String[]> pair = entry.getValue();
|
||||
String[] groups = pair.getSecond();
|
||||
|
||||
IDataRecord[] rawData = entry.getKey().retrieveGroups(groups,
|
||||
Request.ALL);
|
||||
|
||||
if (rawData.length != groups.length * 2) {
|
||||
throw new IllegalArgumentException(
|
||||
"Invalid number of dataSets returned expected 2 per group, received: "
|
||||
+ ((double) rawData.length / groups.length));
|
||||
}
|
||||
|
||||
// iterate over the data from this dataStore adding it records
|
||||
int count = 0;
|
||||
for (TimeRange timeRange : pair.getFirst()) {
|
||||
ByteDataRecord[] recs = new ByteDataRecord[2];
|
||||
for (int i = 0; i < 2; i++) {
|
||||
IDataRecord rec = rawData[count * 2 + i];
|
||||
|
||||
// iterate over the data from this dataStore adding it
|
||||
// byteRecords
|
||||
for (IDataRecord rec : rawData) {
|
||||
if (index < byteRecords.length * 2) {
|
||||
if ("Data".equals(rec.getName())) {
|
||||
byteRecords[index++ / 2][0] = (ByteDataRecord) rec;
|
||||
recs[0] = (ByteDataRecord) rec;
|
||||
} else if ("Keys".equals(rec.getName())) {
|
||||
byteRecords[index++ / 2][1] = (ByteDataRecord) rec;
|
||||
recs[1] = (ByteDataRecord) rec;
|
||||
} else {
|
||||
throw new IllegalArgumentException(
|
||||
"Unknown dataset retrieved for vector data. Valid values: Mag, Dir Received: "
|
||||
"Unknown dataset retrieved for vector data. Valid values: Data, Keys Received: "
|
||||
+ rec.getName());
|
||||
}
|
||||
}
|
||||
records.put(timeRange, recs);
|
||||
count++;
|
||||
}
|
||||
}
|
||||
|
||||
if (index != byteRecords.length * 2) {
|
||||
throw new IllegalArgumentException(
|
||||
"Invalid number of dataSets returned expected per group, received: "
|
||||
+ (index / byteRecords.length));
|
||||
byteRecords = new ByteDataRecord[times.size()][2];
|
||||
int count = 0;
|
||||
for (TimeRange timeRange : times) {
|
||||
byteRecords[count++] = records.get(timeRange);
|
||||
}
|
||||
} catch (Exception e) {
|
||||
throw new GfeException("Unable to get data from HDF5 for ParmID: "
|
||||
|
@ -298,14 +349,16 @@ public abstract class GridDatabase {
|
|||
return byteRecords;
|
||||
}
|
||||
|
||||
protected Map<IDataStore, String[]> getDataStoreAndGroups(ParmID parmId,
|
||||
List<TimeRange> times) {
|
||||
Map<File, String[]> fileMap = GfeUtil.getHdf5FilesAndGroups(
|
||||
GridDatabase.gfeBaseDataDir, parmId, times);
|
||||
protected Map<IDataStore, Pair<List<TimeRange>, String[]>> getDataStoreAndGroups(
|
||||
ParmID parmId, List<TimeRange> times) {
|
||||
Map<File, Pair<List<TimeRange>, String[]>> fileMap = GfeUtil
|
||||
.getHdf5FilesAndGroups(GridDatabase.gfeBaseDataDir, parmId,
|
||||
times);
|
||||
// size hashMap accounting for load factor
|
||||
Map<IDataStore, String[]> rval = new HashMap<IDataStore, String[]>(
|
||||
Map<IDataStore, Pair<List<TimeRange>, String[]>> rval = new HashMap<IDataStore, Pair<List<TimeRange>, String[]>>(
|
||||
(int) (fileMap.size() * 1.25) + 1);
|
||||
for (Map.Entry<File, String[]> entry : fileMap.entrySet()) {
|
||||
for (Map.Entry<File, Pair<List<TimeRange>, String[]>> entry : fileMap
|
||||
.entrySet()) {
|
||||
rval.put(DataStoreFactory.getDataStore(entry.getKey()),
|
||||
entry.getValue());
|
||||
}
|
||||
|
|
|
@ -67,6 +67,7 @@ import com.raytheon.uf.common.dataplugin.gfe.slice.IGridSlice;
|
|||
import com.raytheon.uf.common.dataplugin.gfe.slice.ScalarGridSlice;
|
||||
import com.raytheon.uf.common.dataplugin.gfe.slice.VectorGridSlice;
|
||||
import com.raytheon.uf.common.dataplugin.gfe.slice.WeatherGridSlice;
|
||||
import com.raytheon.uf.common.dataplugin.gfe.type.Pair;
|
||||
import com.raytheon.uf.common.dataplugin.gfe.util.GfeUtil;
|
||||
import com.raytheon.uf.common.dataplugin.gfe.weather.WeatherKey;
|
||||
import com.raytheon.uf.common.datastorage.DataStoreFactory;
|
||||
|
@ -2115,45 +2116,56 @@ public class IFPGridDatabase extends GridDatabase {
|
|||
@Override
|
||||
public FloatDataRecord[] retrieveFromHDF5(ParmID parmId,
|
||||
List<TimeRange> times) throws GfeException {
|
||||
FloatDataRecord[] scalarData = new FloatDataRecord[times.size()];
|
||||
Map<IDataStore, String[]> dsAndGroups = getDataStoreAndGroups(parmId,
|
||||
times);
|
||||
FloatDataRecord[] scalarData = null;
|
||||
Map<IDataStore, Pair<List<TimeRange>, String[]>> dsAndGroups = getDataStoreAndGroups(
|
||||
parmId, times);
|
||||
|
||||
try {
|
||||
// overall index into scalar data
|
||||
int scalarDataIndex = 0;
|
||||
for (Map.Entry<IDataStore, String[]> entry : dsAndGroups.entrySet()) {
|
||||
IDataRecord[] rawData = entry.getKey().retrieveGroups(
|
||||
entry.getValue(), Request.ALL);
|
||||
Map<TimeRange, FloatDataRecord> records = new HashMap<TimeRange, FloatDataRecord>(
|
||||
(int) (1.25 * times.size()) + 1);
|
||||
|
||||
for (IDataRecord rec : rawData) {
|
||||
if (scalarDataIndex < scalarData.length) {
|
||||
if (rec instanceof FloatDataRecord) {
|
||||
scalarData[scalarDataIndex++] = (FloatDataRecord) rec;
|
||||
} else if (gridConfig == null) {
|
||||
throw new IllegalArgumentException(
|
||||
"Data array for "
|
||||
+ parmId.getParmName()
|
||||
+ " "
|
||||
+ parmId.getParmLevel()
|
||||
+ " is not a float array, but database "
|
||||
+ toString()
|
||||
+ " does not contain a grid configuration.");
|
||||
} else {
|
||||
// Convert to a FloatDataRecord for internal use
|
||||
ParmStorageInfo psi = parmStorageInfo.get(parmId
|
||||
.getCompositeName());
|
||||
scalarData[scalarDataIndex++] = storageToFloat(rec,
|
||||
psi);
|
||||
}
|
||||
// loop over the dataStores and their respective groups to pull all
|
||||
// data, stored into records to reorder requests by times
|
||||
for (Map.Entry<IDataStore, Pair<List<TimeRange>, String[]>> entry : dsAndGroups
|
||||
.entrySet()) {
|
||||
Pair<List<TimeRange>, String[]> pair = entry.getValue();
|
||||
String[] groups = pair.getSecond();
|
||||
|
||||
IDataRecord[] rawData = entry.getKey().retrieveGroups(groups,
|
||||
Request.ALL);
|
||||
|
||||
if (rawData.length != groups.length) {
|
||||
throw new IllegalArgumentException(
|
||||
"Invalid number of dataSets returned expected 1 per group, received: "
|
||||
+ ((double) rawData.length / groups.length));
|
||||
}
|
||||
|
||||
int count = 0;
|
||||
for (TimeRange timeRange : pair.getFirst()) {
|
||||
IDataRecord rec = rawData[count++];
|
||||
|
||||
if (rec instanceof FloatDataRecord) {
|
||||
records.put(timeRange, (FloatDataRecord) rec);
|
||||
} else if (gridConfig == null) {
|
||||
throw new IllegalArgumentException("Data array for "
|
||||
+ parmId.getParmName() + " "
|
||||
+ parmId.getParmLevel()
|
||||
+ " is not a float array, but database "
|
||||
+ toString()
|
||||
+ " does not contain a grid configuration.");
|
||||
} else {
|
||||
// Convert to a FloatDataRecord for internal use
|
||||
ParmStorageInfo psi = parmStorageInfo.get(parmId
|
||||
.getCompositeName());
|
||||
records.put(timeRange, storageToFloat(rec, psi));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (scalarDataIndex != scalarData.length) {
|
||||
throw new IllegalArgumentException(
|
||||
"Invalid number of dataSets returned expected 1 per group, received: "
|
||||
+ (scalarDataIndex / scalarData.length));
|
||||
scalarData = new FloatDataRecord[times.size()];
|
||||
int count = 0;
|
||||
for (TimeRange timeRange : times) {
|
||||
scalarData[count++] = records.get(timeRange);
|
||||
}
|
||||
} catch (Exception e) {
|
||||
throw new GfeException("Unable to get data from HDF5 for ParmID: "
|
||||
|
@ -2166,29 +2178,40 @@ public class IFPGridDatabase extends GridDatabase {
|
|||
@Override
|
||||
public FloatDataRecord[][] retrieveVectorFromHDF5(ParmID parmId,
|
||||
List<TimeRange> times) throws GfeException {
|
||||
FloatDataRecord[][] vectorData = new FloatDataRecord[times.size()][2];
|
||||
Map<IDataStore, String[]> dsAndGroups = getDataStoreAndGroups(parmId,
|
||||
times);
|
||||
FloatDataRecord[][] vectorData = null;
|
||||
Map<IDataStore, Pair<List<TimeRange>, String[]>> dsAndGroups = getDataStoreAndGroups(
|
||||
parmId, times);
|
||||
|
||||
try {
|
||||
// overall index into vector data
|
||||
int vectorDataIndex = 0;
|
||||
// iterate over dataStore and their respective groups for the
|
||||
// requested parm/time ranges
|
||||
for (Map.Entry<IDataStore, String[]> entry : dsAndGroups.entrySet()) {
|
||||
IDataRecord[] rawData = entry.getKey().retrieveGroups(
|
||||
entry.getValue(), Request.ALL);
|
||||
Map<TimeRange, FloatDataRecord[]> records = new HashMap<TimeRange, FloatDataRecord[]>(
|
||||
(int) (1.25 * times.size()) + 1);
|
||||
|
||||
// iterate over the data retrieved from this dataStore for the
|
||||
// groups
|
||||
for (int i = 0; i < rawData.length; i += 2, vectorDataIndex++) {
|
||||
// loop over the dataStores and their respective groups to pull all
|
||||
// data, stored into records to reorder requests by times
|
||||
for (Map.Entry<IDataStore, Pair<List<TimeRange>, String[]>> entry : dsAndGroups
|
||||
.entrySet()) {
|
||||
Pair<List<TimeRange>, String[]> pair = entry.getValue();
|
||||
String[] groups = pair.getSecond();
|
||||
|
||||
IDataRecord[] rawData = entry.getKey().retrieveGroups(groups,
|
||||
Request.ALL);
|
||||
|
||||
if (rawData.length != groups.length * 2) {
|
||||
throw new IllegalArgumentException(
|
||||
"Invalid number of dataSets returned expected per group, received: "
|
||||
+ ((double) rawData.length / groups.length));
|
||||
}
|
||||
|
||||
// iterate over the data from this dataStore adding it records
|
||||
int count = 0;
|
||||
for (TimeRange timeRange : pair.getFirst()) {
|
||||
IDataRecord magRec = null;
|
||||
IDataRecord dirRec = null;
|
||||
|
||||
// Should be vector data and each group should have had a
|
||||
// Dir and Mag dataset
|
||||
for (int j = 0; j < 2; j++) {
|
||||
IDataRecord rec = rawData[i + j];
|
||||
for (int i = 0; i < 2; i++) {
|
||||
IDataRecord rec = rawData[count * 2 + i];
|
||||
if ("Mag".equals(rec.getName())) {
|
||||
magRec = rec;
|
||||
} else if ("Dir".equals(rec.getName())) {
|
||||
|
@ -2200,10 +2223,12 @@ public class IFPGridDatabase extends GridDatabase {
|
|||
}
|
||||
}
|
||||
|
||||
FloatDataRecord[] recs = new FloatDataRecord[2];
|
||||
|
||||
if (magRec.getClass() == dirRec.getClass()) {
|
||||
if (magRec instanceof FloatDataRecord) {
|
||||
vectorData[vectorDataIndex][0] = (FloatDataRecord) magRec;
|
||||
vectorData[vectorDataIndex][1] = (FloatDataRecord) dirRec;
|
||||
recs[0] = (FloatDataRecord) magRec;
|
||||
recs[1] = (FloatDataRecord) dirRec;
|
||||
} else if (gridConfig == null) {
|
||||
throw new IllegalArgumentException(
|
||||
"Data array for "
|
||||
|
@ -2224,11 +2249,12 @@ public class IFPGridDatabase extends GridDatabase {
|
|||
VECTOR_DIR_DATA_OFFSET,
|
||||
VECTOR_DIR_DATA_MULTIPLIER,
|
||||
magStorageInfo.storageType());
|
||||
vectorData[vectorDataIndex][0] = storageToFloat(
|
||||
magRec, magStorageInfo);
|
||||
vectorData[vectorDataIndex][1] = storageToFloat(
|
||||
dirRec, dirStorageInfo);
|
||||
recs[0] = storageToFloat(magRec, magStorageInfo);
|
||||
recs[1] = storageToFloat(dirRec, dirStorageInfo);
|
||||
}
|
||||
|
||||
records.put(timeRange, recs);
|
||||
count++;
|
||||
} else {
|
||||
throw new IllegalArgumentException(
|
||||
"Magnitude and direction grids are not of the same type.");
|
||||
|
@ -2236,10 +2262,10 @@ public class IFPGridDatabase extends GridDatabase {
|
|||
}
|
||||
}
|
||||
|
||||
if (vectorDataIndex != vectorData.length) {
|
||||
throw new IllegalArgumentException(
|
||||
"Invalid number of dataSets returned expected 2 per group, received: "
|
||||
+ (vectorDataIndex / vectorData.length) * 2);
|
||||
vectorData = new FloatDataRecord[times.size()][2];
|
||||
int count = 0;
|
||||
for (TimeRange timeRange : times) {
|
||||
vectorData[count++] = records.get(timeRange);
|
||||
}
|
||||
} catch (Exception e) {
|
||||
throw new GfeException("Unable to get data from HDF5 for ParmID: "
|
||||
|
|
|
@ -34,6 +34,7 @@ from zones2cities import *
|
|||
# Date Ticket# Engineer Description
|
||||
# ------------ ---------- ----------- --------------------------
|
||||
# 01/08/10 #1209 randerso Initial Creation.
|
||||
# 10/19/12 #1091 dgilling Support localMaps.py.
|
||||
#
|
||||
#
|
||||
#
|
||||
|
@ -133,25 +134,25 @@ def createAreaDictionary(outputDir, mapDict):
|
|||
if len(ean):
|
||||
try:
|
||||
d = {}
|
||||
if att.containsKey('ZONE') and att.containsKey('STATE'):
|
||||
d['ugcCode'] = str(att.get('STATE')) + "Z" + str(att.get('ZONE'))
|
||||
elif att.containsKey('ID'):
|
||||
d['ugcCode'] = str(att.get('ID'))
|
||||
elif att.containsKey('FIPS') and att.containsKey('STATE') and \
|
||||
att.containsKey('COUNTYNAME'):
|
||||
d['ugcCode'] = str(att.get('STATE')) + "C" + str(att.get('FIPS'))[-3:]
|
||||
d['ugcName'] = string.strip(str(att.get('COUNTYNAME')))
|
||||
if att.containsKey('zone') and att.containsKey('state'):
|
||||
d['ugcCode'] = str(att.get('state')) + "Z" + str(att.get('zone'))
|
||||
elif att.containsKey('id'):
|
||||
d['ugcCode'] = str(att.get('id'))
|
||||
elif att.containsKey('fips') and att.containsKey('state') and \
|
||||
att.containsKey('countyname'):
|
||||
d['ugcCode'] = str(att.get('state')) + "C" + str(att.get('fips'))[-3:]
|
||||
d['ugcName'] = string.strip(str(att.get('countyname')))
|
||||
else:
|
||||
continue
|
||||
|
||||
if att.containsKey('STATE'):
|
||||
d["stateAbbr"] = str(att.get('STATE'))
|
||||
if att.containsKey('state'):
|
||||
d["stateAbbr"] = str(att.get('state'))
|
||||
|
||||
if att.containsKey('NAME'):
|
||||
d["ugcName"] = string.strip(str(att.get('NAME')))
|
||||
if att.containsKey('name'):
|
||||
d["ugcName"] = string.strip(str(att.get('name')))
|
||||
|
||||
if att.containsKey('TIME_ZONE'):
|
||||
tzvalue = getRealTimeZone(str(att.get('TIME_ZONE')))
|
||||
if att.containsKey('time_zone'):
|
||||
tzvalue = getRealTimeZone(str(att.get('time_zone')))
|
||||
if tzvalue is not None:
|
||||
d["ugcTimeZone"] = tzvalue
|
||||
|
||||
|
@ -315,17 +316,17 @@ def createCityLocation(outputDir, mapDict):
|
|||
attList = mapDict[mapname]
|
||||
for att in attList:
|
||||
#LogStream.logProblem("att:", att)
|
||||
ean = att['NAME']
|
||||
state = att['ST']
|
||||
county_FIP = att['COUNTY_FIP']
|
||||
ean = att['name']
|
||||
state = att['st']
|
||||
county_FIP = att['county_fip']
|
||||
|
||||
if len(ean) and len(state) and len(county_FIP):
|
||||
fip = state + 'C' + county_FIP
|
||||
if not citydict.has_key(fip):
|
||||
citydict[fip] = {}
|
||||
try:
|
||||
latitude = float(string.strip(att['LAT']))
|
||||
longitude = float(string.strip(att['LON']))
|
||||
latitude = float(string.strip(att['lat']))
|
||||
longitude = float(string.strip(att['lon']))
|
||||
citydict[fip][ean.upper()] = (latitude, longitude)
|
||||
except:
|
||||
LogStream.logProblem("Problem creating city location ",
|
||||
|
|
|
@ -36,6 +36,7 @@ Export-Package: com.raytheon.uf.common.dataplugin.gfe,
|
|||
com.raytheon.uf.common.dataplugin.gfe.slice,
|
||||
com.raytheon.uf.common.dataplugin.gfe.textproduct,
|
||||
com.raytheon.uf.common.dataplugin.gfe.time,
|
||||
com.raytheon.uf.common.dataplugin.gfe.type,
|
||||
com.raytheon.uf.common.dataplugin.gfe.util,
|
||||
com.raytheon.uf.common.dataplugin.gfe.weather,
|
||||
com.raytheon.uf.common.dataplugin.gfe.weatherelement
|
||||
|
|
|
@ -17,7 +17,7 @@
|
|||
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
||||
* further licensing information.
|
||||
**/
|
||||
package com.raytheon.viz.gfe.types;
|
||||
package com.raytheon.uf.common.dataplugin.gfe.type;
|
||||
|
||||
/**
|
||||
* TODO Add Description
|
|
@ -41,6 +41,7 @@ import com.raytheon.uf.common.dataplugin.gfe.db.objects.DatabaseID;
|
|||
import com.raytheon.uf.common.dataplugin.gfe.db.objects.GridLocation;
|
||||
import com.raytheon.uf.common.dataplugin.gfe.db.objects.ParmID;
|
||||
import com.raytheon.uf.common.dataplugin.gfe.grid.Grid2DBit;
|
||||
import com.raytheon.uf.common.dataplugin.gfe.type.Pair;
|
||||
import com.raytheon.uf.common.time.TimeRange;
|
||||
import com.vividsolutions.jts.geom.Coordinate;
|
||||
import com.vividsolutions.jts.geom.Geometry;
|
||||
|
@ -170,7 +171,8 @@ public class GfeUtil {
|
|||
public static File getHdf5File(String baseDir, ParmID parmId, TimeRange time) {
|
||||
List<TimeRange> list = new ArrayList<TimeRange>(1);
|
||||
list.add(time);
|
||||
Map<File, String[]> map = getHdf5FilesAndGroups(baseDir, parmId, list);
|
||||
Map<File, Pair<List<TimeRange>, String[]>> map = getHdf5FilesAndGroups(
|
||||
baseDir, parmId, list);
|
||||
File rval = null;
|
||||
|
||||
if (!map.isEmpty()) {
|
||||
|
@ -191,14 +193,14 @@ public class GfeUtil {
|
|||
* @param times
|
||||
* @return
|
||||
*/
|
||||
public static Map<File, String[]> getHdf5FilesAndGroups(String baseDir,
|
||||
ParmID parmId, List<TimeRange> times) {
|
||||
public static Map<File, Pair<List<TimeRange>, String[]>> getHdf5FilesAndGroups(
|
||||
String baseDir, ParmID parmId, List<TimeRange> times) {
|
||||
DatabaseID dbId = parmId.getDbId();
|
||||
File directory = getHdf5Dir(baseDir, dbId);
|
||||
boolean isSingleton = DatabaseID.NO_MODEL_TIME.equals(dbId
|
||||
.getModelTime());
|
||||
|
||||
Map<File, String[]> rval = null;
|
||||
Map<File, Pair<List<TimeRange>, String[]>> rval = null;
|
||||
if (isSingleton) {
|
||||
// file per parm per day
|
||||
StringBuffer tmp = new StringBuffer(40);
|
||||
|
@ -234,13 +236,16 @@ public class GfeUtil {
|
|||
}
|
||||
|
||||
// initialize map size, accounting for load factor
|
||||
rval = new HashMap<File, String[]>(
|
||||
rval = new HashMap<File, Pair<List<TimeRange>, String[]>>(
|
||||
(int) (dateMap.size() * 1.25) + 1);
|
||||
for (Map.Entry<String, List<TimeRange>> entry : dateMap.entrySet()) {
|
||||
tmp.setLength(0);
|
||||
tmp.append(preString).append(entry.getKey()).append(postString);
|
||||
File h5File = new File(directory, tmp.toString());
|
||||
rval.put(h5File, getHDF5Groups(parmId, entry.getValue()));
|
||||
Pair<List<TimeRange>, String[]> p = new Pair<List<TimeRange>, String[]>(
|
||||
entry.getValue(), getHDF5Groups(parmId,
|
||||
entry.getValue()));
|
||||
rval.put(h5File, p);
|
||||
}
|
||||
} else {
|
||||
// file per parm
|
||||
|
@ -250,8 +255,10 @@ public class GfeUtil {
|
|||
fileName.append(parmId.getParmLevel()).append(
|
||||
DATASTORE_FILE_EXTENSION);
|
||||
File h5File = new File(directory, fileName.toString());
|
||||
rval = new HashMap<File, String[]>(2);
|
||||
rval.put(h5File, getHDF5Groups(parmId, times));
|
||||
rval = new HashMap<File, Pair<List<TimeRange>, String[]>>(2);
|
||||
Pair<List<TimeRange>, String[]> p = new Pair<List<TimeRange>, String[]>(
|
||||
times, getHDF5Groups(parmId, times));
|
||||
rval.put(h5File, p);
|
||||
}
|
||||
|
||||
return rval;
|
||||
|
|
Loading…
Add table
Reference in a new issue