12.11.1-10 baseline
Former-commit-id:0ba69044a2
[formerlye845c605e6
[formerly 9dd41e6322bad3e7dced1a32867d9a7f886d70cf]] Former-commit-id:e845c605e6
Former-commit-id:63dddc5f5f
This commit is contained in:
parent
59e5b55b96
commit
7779b584c0
9 changed files with 251 additions and 160 deletions
|
@ -39,7 +39,7 @@ import org.eclipse.swt.widgets.Control;
|
||||||
import org.eclipse.swt.widgets.Label;
|
import org.eclipse.swt.widgets.Label;
|
||||||
import org.eclipse.swt.widgets.Shell;
|
import org.eclipse.swt.widgets.Shell;
|
||||||
|
|
||||||
import com.raytheon.viz.gfe.types.Pair;
|
import com.raytheon.uf.common.dataplugin.gfe.type.Pair;
|
||||||
import com.raytheon.viz.gfe.ui.zoneselector.ZoneSelector;
|
import com.raytheon.viz.gfe.ui.zoneselector.ZoneSelector;
|
||||||
import com.raytheon.viz.ui.dialogs.CaveJFACEDialog;
|
import com.raytheon.viz.ui.dialogs.CaveJFACEDialog;
|
||||||
import com.raytheon.viz.ui.widgets.ToggleSelectList;
|
import com.raytheon.viz.ui.widgets.ToggleSelectList;
|
||||||
|
|
|
@ -1237,11 +1237,11 @@ elif SID == "HFO":
|
||||||
|
|
||||||
# San Juan OCONUS
|
# San Juan OCONUS
|
||||||
elif SID == "SJU":
|
elif SID == "SJU":
|
||||||
SATDATA = [("NESDIS/GOES-14(O)/East CONUS/Imager Visible", "visibleEast"),
|
SATDATA = [("NESDIS/GOES-13(N)/East CONUS/Imager Visible", "visibleEast"),
|
||||||
("NESDIS/GOES-14(O)/East CONUS/Imager 11 micron IR", "ir11East"),
|
("NESDIS/GOES-13(N)/East CONUS/Imager 11 micron IR", "ir11East"),
|
||||||
("NESDIS/GOES-14(O)/East CONUS/Imager 12 micron IR", "ir13East"),
|
("NESDIS/GOES-13(N)/East CONUS/Imager 12 micron IR", "ir13East"),
|
||||||
("NESDIS/GOES-14(O)/East CONUS/Imager 3.9 micron IR", "ir39East"),
|
("NESDIS/GOES-13(N)/East CONUS/Imager 3.9 micron IR", "ir39East"),
|
||||||
("NESDIS/GOES-14(O)/East CONUS/Imager 6.7-6.5 micron IR (WV)", "waterVaporEast")]
|
("NESDIS/GOES-13(N)/East CONUS/Imager 6.7-6.5 micron IR (WV)", "waterVaporEast")]
|
||||||
|
|
||||||
# Guam OCONUS
|
# Guam OCONUS
|
||||||
elif SID == "GUM":
|
elif SID == "GUM":
|
||||||
|
@ -1249,16 +1249,16 @@ elif SID == "GUM":
|
||||||
|
|
||||||
#CONUS sites
|
#CONUS sites
|
||||||
else:
|
else:
|
||||||
SATDATA = [("NESDIS/GOES-15(P)/West CONUS/Imager Visible", "visibleWest"),
|
SATDATA = [("NESDIS/GOES-11(L)/West CONUS/Imager Visible", "visibleWest"),
|
||||||
("NESDIS/GOES-15(P)/West CONUS/Imager 11 micron IR", "ir11West"),
|
("NESDIS/GOES-11(L)/West CONUS/Imager 11 micron IR", "ir11West"),
|
||||||
("NESDIS/GOES-15(P)/West CONUS/Imager 12 micron IR", "ir13West"),
|
("NESDIS/GOES-11(L)/West CONUS/Imager 12 micron IR", "ir13West"),
|
||||||
("NESDIS/GOES-15(P)/West CONUS/Imager 3.9 micron IR", "ir39West"),
|
("NESDIS/GOES-11(L)/West CONUS/Imager 3.9 micron IR", "ir39West"),
|
||||||
("NESDIS/GOES-15(P)/West CONUS/Imager 6.7-6.5 micron IR (WV)", "waterVaporWest"),
|
("NESDIS/GOES-11(L)/West CONUS/Imager 6.7-6.5 micron IR (WV)", "waterVaporWest"),
|
||||||
("NESDIS/GOES-14(O)/East CONUS/Imager Visible", "visibleEast"),
|
("NESDIS/GOES-13(N)/East CONUS/Imager Visible", "visibleEast"),
|
||||||
("NESDIS/GOES-14(O)/East CONUS/Imager 11 micron IR", "ir11East"),
|
("NESDIS/GOES-13(N)/East CONUS/Imager 11 micron IR", "ir11East"),
|
||||||
("NESDIS/GOES-14(O)/East CONUS/Imager 12 micron IR", "ir13East"),
|
("NESDIS/GOES-13(N)/East CONUS/Imager 12 micron IR", "ir13East"),
|
||||||
("NESDIS/GOES-14(O)/East CONUS/Imager 3.9 micron IR", "ir39East"),
|
("NESDIS/GOES-13(N)/East CONUS/Imager 3.9 micron IR", "ir39East"),
|
||||||
("NESDIS/GOES-14(O)/East CONUS/Imager 6.7-6.5 micron IR (WV)", "waterVaporEast")]
|
("NESDIS/GOES-13(N)/East CONUS/Imager 6.7-6.5 micron IR (WV)", "waterVaporEast")]
|
||||||
|
|
||||||
#---------------------------------------------------------------------------
|
#---------------------------------------------------------------------------
|
||||||
#
|
#
|
||||||
|
|
|
@ -70,6 +70,7 @@ import com.raytheon.uf.common.dataplugin.gfe.db.objects.ParmID;
|
||||||
import com.raytheon.uf.common.dataplugin.gfe.exception.GfeException;
|
import com.raytheon.uf.common.dataplugin.gfe.exception.GfeException;
|
||||||
import com.raytheon.uf.common.dataplugin.gfe.server.notify.GridUpdateNotification;
|
import com.raytheon.uf.common.dataplugin.gfe.server.notify.GridUpdateNotification;
|
||||||
import com.raytheon.uf.common.dataplugin.gfe.server.notify.LockNotification;
|
import com.raytheon.uf.common.dataplugin.gfe.server.notify.LockNotification;
|
||||||
|
import com.raytheon.uf.common.dataplugin.gfe.type.Pair;
|
||||||
import com.raytheon.uf.common.dataplugin.gfe.util.GfeUtil;
|
import com.raytheon.uf.common.dataplugin.gfe.util.GfeUtil;
|
||||||
import com.raytheon.uf.common.dataplugin.grib.GribModel;
|
import com.raytheon.uf.common.dataplugin.grib.GribModel;
|
||||||
import com.raytheon.uf.common.dataplugin.grib.GribRecord;
|
import com.raytheon.uf.common.dataplugin.grib.GribRecord;
|
||||||
|
@ -425,14 +426,16 @@ public class GFEDao extends DefaultPluginDao {
|
||||||
});
|
});
|
||||||
|
|
||||||
// we gain nothing by removing from hdf5
|
// we gain nothing by removing from hdf5
|
||||||
Map<File, String[]> fileMap = GfeUtil.getHdf5FilesAndGroups(
|
Map<File, Pair<List<TimeRange>, String[]>> fileMap = GfeUtil
|
||||||
GridDatabase.gfeBaseDataDir, parmId, times);
|
.getHdf5FilesAndGroups(GridDatabase.gfeBaseDataDir, parmId,
|
||||||
for (Map.Entry<File, String[]> entry : fileMap.entrySet()) {
|
times);
|
||||||
|
for (Map.Entry<File, Pair<List<TimeRange>, String[]>> entry : fileMap
|
||||||
|
.entrySet()) {
|
||||||
File hdf5File = entry.getKey();
|
File hdf5File = entry.getKey();
|
||||||
IDataStore dataStore = DataStoreFactory.getDataStore(hdf5File);
|
IDataStore dataStore = DataStoreFactory.getDataStore(hdf5File);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
String[] groupsToDelete = entry.getValue();
|
String[] groupsToDelete = entry.getValue().getSecond();
|
||||||
for (String grp : groupsToDelete) {
|
for (String grp : groupsToDelete) {
|
||||||
dataStore.delete(grp);
|
dataStore.delete(grp);
|
||||||
}
|
}
|
||||||
|
@ -931,7 +934,7 @@ public class GFEDao extends DefaultPluginDao {
|
||||||
(Date) result.getRowColumnValue(i, 0));
|
(Date) result.getRowColumnValue(i, 0));
|
||||||
try {
|
try {
|
||||||
GridDatabase db = GridParmManager.getDb(dbId);
|
GridDatabase db = GridParmManager.getDb(dbId);
|
||||||
if (db != null && !dbInventory.contains(dbId)) {
|
if ((db != null) && !dbInventory.contains(dbId)) {
|
||||||
dbInventory.add(dbId);
|
dbInventory.add(dbId);
|
||||||
}
|
}
|
||||||
} catch (GfeException e) {
|
} catch (GfeException e) {
|
||||||
|
|
|
@ -36,6 +36,7 @@ import com.raytheon.uf.common.dataplugin.gfe.db.objects.ParmID;
|
||||||
import com.raytheon.uf.common.dataplugin.gfe.exception.GfeException;
|
import com.raytheon.uf.common.dataplugin.gfe.exception.GfeException;
|
||||||
import com.raytheon.uf.common.dataplugin.gfe.server.message.ServerResponse;
|
import com.raytheon.uf.common.dataplugin.gfe.server.message.ServerResponse;
|
||||||
import com.raytheon.uf.common.dataplugin.gfe.slice.IGridSlice;
|
import com.raytheon.uf.common.dataplugin.gfe.slice.IGridSlice;
|
||||||
|
import com.raytheon.uf.common.dataplugin.gfe.type.Pair;
|
||||||
import com.raytheon.uf.common.dataplugin.gfe.util.GfeUtil;
|
import com.raytheon.uf.common.dataplugin.gfe.util.GfeUtil;
|
||||||
import com.raytheon.uf.common.datastorage.DataStoreFactory;
|
import com.raytheon.uf.common.datastorage.DataStoreFactory;
|
||||||
import com.raytheon.uf.common.datastorage.IDataStore;
|
import com.raytheon.uf.common.datastorage.IDataStore;
|
||||||
|
@ -173,27 +174,40 @@ public abstract class GridDatabase {
|
||||||
|
|
||||||
public FloatDataRecord[] retrieveFromHDF5(ParmID parmId,
|
public FloatDataRecord[] retrieveFromHDF5(ParmID parmId,
|
||||||
List<TimeRange> times) throws GfeException {
|
List<TimeRange> times) throws GfeException {
|
||||||
FloatDataRecord[] scalarData = new FloatDataRecord[times.size()];
|
FloatDataRecord[] scalarData = null;
|
||||||
Map<IDataStore, String[]> dsAndGroups = getDataStoreAndGroups(parmId,
|
Map<IDataStore, Pair<List<TimeRange>, String[]>> dsAndGroups = getDataStoreAndGroups(
|
||||||
times);
|
parmId, times);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
int index = 0;
|
Map<TimeRange, FloatDataRecord> records = new HashMap<TimeRange, FloatDataRecord>(
|
||||||
for (Map.Entry<IDataStore, String[]> entry : dsAndGroups.entrySet()) {
|
(int) (1.25 * times.size()) + 1);
|
||||||
IDataRecord[] rawData = entry.getKey().retrieveGroups(
|
|
||||||
entry.getValue(), Request.ALL);
|
|
||||||
|
|
||||||
for (IDataRecord record : rawData) {
|
// loop over the dataStores and their respective groups to pull all
|
||||||
if (index < scalarData.length) {
|
// data, stored into records to reorder requests by times
|
||||||
scalarData[index++] = (FloatDataRecord) record;
|
for (Map.Entry<IDataStore, Pair<List<TimeRange>, String[]>> entry : dsAndGroups
|
||||||
}
|
.entrySet()) {
|
||||||
|
Pair<List<TimeRange>, String[]> pair = entry.getValue();
|
||||||
|
String[] groups = pair.getSecond();
|
||||||
|
|
||||||
|
IDataRecord[] rawData = entry.getKey().retrieveGroups(groups,
|
||||||
|
Request.ALL);
|
||||||
|
|
||||||
|
if (rawData.length != groups.length) {
|
||||||
|
throw new IllegalArgumentException(
|
||||||
|
"Invalid number of dataSets returned expected 1 per group, received: "
|
||||||
|
+ ((double) rawData.length / groups.length));
|
||||||
|
}
|
||||||
|
|
||||||
|
int count = 0;
|
||||||
|
for (TimeRange timeRange : pair.getFirst()) {
|
||||||
|
records.put(timeRange, (FloatDataRecord) rawData[count++]);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (index != scalarData.length) {
|
scalarData = new FloatDataRecord[times.size()];
|
||||||
throw new IllegalArgumentException(
|
int count = 0;
|
||||||
"Invalid number of dataSets returned expected 1 per group, received: "
|
for (TimeRange timeRange : times) {
|
||||||
+ (index / scalarData.length));
|
scalarData[count++] = records.get(timeRange);
|
||||||
}
|
}
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
throw new GfeException("Unable to get data from HDF5 for ParmID: "
|
throw new GfeException("Unable to get data from HDF5 for ParmID: "
|
||||||
|
@ -211,35 +225,55 @@ public abstract class GridDatabase {
|
||||||
|
|
||||||
public FloatDataRecord[][] retrieveVectorFromHDF5(ParmID parmId,
|
public FloatDataRecord[][] retrieveVectorFromHDF5(ParmID parmId,
|
||||||
List<TimeRange> times) throws GfeException {
|
List<TimeRange> times) throws GfeException {
|
||||||
FloatDataRecord[][] vectorData = new FloatDataRecord[times.size()][2];
|
FloatDataRecord[][] vectorData = null;
|
||||||
Map<IDataStore, String[]> dsAndGroups = getDataStoreAndGroups(parmId,
|
Map<IDataStore, Pair<List<TimeRange>, String[]>> dsAndGroups = getDataStoreAndGroups(
|
||||||
times);
|
parmId, times);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
int index = 0;
|
Map<TimeRange, FloatDataRecord[]> records = new HashMap<TimeRange, FloatDataRecord[]>(
|
||||||
for (Map.Entry<IDataStore, String[]> entry : dsAndGroups.entrySet()) {
|
(int) (1.25 * times.size()) + 1);
|
||||||
IDataRecord[] rawData = entry.getKey().retrieveGroups(
|
|
||||||
entry.getValue(), Request.ALL);
|
|
||||||
|
|
||||||
for (IDataRecord rec : rawData) {
|
// loop over the dataStores and their respective groups to pull all
|
||||||
if (index < vectorData.length * 2) {
|
// data, stored into records to reorder requests by times
|
||||||
|
for (Map.Entry<IDataStore, Pair<List<TimeRange>, String[]>> entry : dsAndGroups
|
||||||
|
.entrySet()) {
|
||||||
|
Pair<List<TimeRange>, String[]> pair = entry.getValue();
|
||||||
|
String[] groups = pair.getSecond();
|
||||||
|
|
||||||
|
IDataRecord[] rawData = entry.getKey().retrieveGroups(groups,
|
||||||
|
Request.ALL);
|
||||||
|
|
||||||
|
if (rawData.length != groups.length * 2) {
|
||||||
|
throw new IllegalArgumentException(
|
||||||
|
"Invalid number of dataSets returned expected per group, received: "
|
||||||
|
+ ((double) rawData.length / groups.length));
|
||||||
|
}
|
||||||
|
|
||||||
|
// iterate over the data from this dataStore adding it records
|
||||||
|
int count = 0;
|
||||||
|
for (TimeRange timeRange : pair.getFirst()) {
|
||||||
|
FloatDataRecord[] recs = new FloatDataRecord[2];
|
||||||
|
for (int i = 0; i < 2; i++) {
|
||||||
|
IDataRecord rec = rawData[count * 2 + i];
|
||||||
if ("Mag".equals(rec.getName())) {
|
if ("Mag".equals(rec.getName())) {
|
||||||
vectorData[index++ / 2][0] = (FloatDataRecord) rec;
|
recs[0] = (FloatDataRecord) rec;
|
||||||
} else if ("Dir".equals(rec.getName())) {
|
} else if ("Dir".equals(rec.getName())) {
|
||||||
vectorData[index++ / 2][1] = (FloatDataRecord) rec;
|
recs[1] = (FloatDataRecord) rec;
|
||||||
} else {
|
} else {
|
||||||
throw new IllegalArgumentException(
|
throw new IllegalArgumentException(
|
||||||
"Unknown dataset retrieved for vector data. Valid values: Mag, Dir Received: "
|
"Unknown dataset retrieved for vector data. Valid values: Mag, Dir Received: "
|
||||||
+ rec.getName());
|
+ rec.getName());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
records.put(timeRange, recs);
|
||||||
|
count++;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (index != vectorData.length * 2) {
|
vectorData = new FloatDataRecord[times.size()][2];
|
||||||
throw new IllegalArgumentException(
|
int count = 0;
|
||||||
"Invalid number of dataSets returned expected per group, received: "
|
for (TimeRange timeRange : times) {
|
||||||
+ (index / vectorData.length));
|
vectorData[count++] = records.get(timeRange);
|
||||||
}
|
}
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
throw new GfeException("Unable to get data from HDF5 for ParmID: "
|
throw new GfeException("Unable to get data from HDF5 for ParmID: "
|
||||||
|
@ -257,39 +291,56 @@ public abstract class GridDatabase {
|
||||||
|
|
||||||
public ByteDataRecord[][] retrieveDiscreteFromHDF5(ParmID parmId,
|
public ByteDataRecord[][] retrieveDiscreteFromHDF5(ParmID parmId,
|
||||||
List<TimeRange> times) throws GfeException {
|
List<TimeRange> times) throws GfeException {
|
||||||
ByteDataRecord[][] byteRecords = new ByteDataRecord[times.size()][2];
|
ByteDataRecord[][] byteRecords = null;
|
||||||
Map<IDataStore, String[]> dsAndGroups = getDataStoreAndGroups(parmId,
|
Map<IDataStore, Pair<List<TimeRange>, String[]>> dsAndGroups = getDataStoreAndGroups(
|
||||||
times);
|
parmId, times);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
int index = 0;
|
|
||||||
// loop over the dataStores and their respective groups to pull all
|
// loop over the dataStores and their respective groups to pull all
|
||||||
// data
|
// data
|
||||||
for (Map.Entry<IDataStore, String[]> entry : dsAndGroups.entrySet()) {
|
Map<TimeRange, ByteDataRecord[]> records = new HashMap<TimeRange, ByteDataRecord[]>(
|
||||||
IDataRecord[] rawData = entry.getKey().retrieveGroups(
|
(int) (1.25 * times.size()) + 1);
|
||||||
entry.getValue(), Request.ALL);
|
|
||||||
|
for (Map.Entry<IDataStore, Pair<List<TimeRange>, String[]>> entry : dsAndGroups
|
||||||
|
.entrySet()) {
|
||||||
|
Pair<List<TimeRange>, String[]> pair = entry.getValue();
|
||||||
|
String[] groups = pair.getSecond();
|
||||||
|
|
||||||
|
IDataRecord[] rawData = entry.getKey().retrieveGroups(groups,
|
||||||
|
Request.ALL);
|
||||||
|
|
||||||
|
if (rawData.length != groups.length * 2) {
|
||||||
|
throw new IllegalArgumentException(
|
||||||
|
"Invalid number of dataSets returned expected 2 per group, received: "
|
||||||
|
+ ((double) rawData.length / groups.length));
|
||||||
|
}
|
||||||
|
|
||||||
|
// iterate over the data from this dataStore adding it records
|
||||||
|
int count = 0;
|
||||||
|
for (TimeRange timeRange : pair.getFirst()) {
|
||||||
|
ByteDataRecord[] recs = new ByteDataRecord[2];
|
||||||
|
for (int i = 0; i < 2; i++) {
|
||||||
|
IDataRecord rec = rawData[count * 2 + i];
|
||||||
|
|
||||||
// iterate over the data from this dataStore adding it
|
|
||||||
// byteRecords
|
|
||||||
for (IDataRecord rec : rawData) {
|
|
||||||
if (index < byteRecords.length * 2) {
|
|
||||||
if ("Data".equals(rec.getName())) {
|
if ("Data".equals(rec.getName())) {
|
||||||
byteRecords[index++ / 2][0] = (ByteDataRecord) rec;
|
recs[0] = (ByteDataRecord) rec;
|
||||||
} else if ("Keys".equals(rec.getName())) {
|
} else if ("Keys".equals(rec.getName())) {
|
||||||
byteRecords[index++ / 2][1] = (ByteDataRecord) rec;
|
recs[1] = (ByteDataRecord) rec;
|
||||||
} else {
|
} else {
|
||||||
throw new IllegalArgumentException(
|
throw new IllegalArgumentException(
|
||||||
"Unknown dataset retrieved for vector data. Valid values: Mag, Dir Received: "
|
"Unknown dataset retrieved for vector data. Valid values: Data, Keys Received: "
|
||||||
+ rec.getName());
|
+ rec.getName());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
records.put(timeRange, recs);
|
||||||
|
count++;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (index != byteRecords.length * 2) {
|
byteRecords = new ByteDataRecord[times.size()][2];
|
||||||
throw new IllegalArgumentException(
|
int count = 0;
|
||||||
"Invalid number of dataSets returned expected per group, received: "
|
for (TimeRange timeRange : times) {
|
||||||
+ (index / byteRecords.length));
|
byteRecords[count++] = records.get(timeRange);
|
||||||
}
|
}
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
throw new GfeException("Unable to get data from HDF5 for ParmID: "
|
throw new GfeException("Unable to get data from HDF5 for ParmID: "
|
||||||
|
@ -298,14 +349,16 @@ public abstract class GridDatabase {
|
||||||
return byteRecords;
|
return byteRecords;
|
||||||
}
|
}
|
||||||
|
|
||||||
protected Map<IDataStore, String[]> getDataStoreAndGroups(ParmID parmId,
|
protected Map<IDataStore, Pair<List<TimeRange>, String[]>> getDataStoreAndGroups(
|
||||||
List<TimeRange> times) {
|
ParmID parmId, List<TimeRange> times) {
|
||||||
Map<File, String[]> fileMap = GfeUtil.getHdf5FilesAndGroups(
|
Map<File, Pair<List<TimeRange>, String[]>> fileMap = GfeUtil
|
||||||
GridDatabase.gfeBaseDataDir, parmId, times);
|
.getHdf5FilesAndGroups(GridDatabase.gfeBaseDataDir, parmId,
|
||||||
|
times);
|
||||||
// size hashMap accounting for load factor
|
// size hashMap accounting for load factor
|
||||||
Map<IDataStore, String[]> rval = new HashMap<IDataStore, String[]>(
|
Map<IDataStore, Pair<List<TimeRange>, String[]>> rval = new HashMap<IDataStore, Pair<List<TimeRange>, String[]>>(
|
||||||
(int) (fileMap.size() * 1.25) + 1);
|
(int) (fileMap.size() * 1.25) + 1);
|
||||||
for (Map.Entry<File, String[]> entry : fileMap.entrySet()) {
|
for (Map.Entry<File, Pair<List<TimeRange>, String[]>> entry : fileMap
|
||||||
|
.entrySet()) {
|
||||||
rval.put(DataStoreFactory.getDataStore(entry.getKey()),
|
rval.put(DataStoreFactory.getDataStore(entry.getKey()),
|
||||||
entry.getValue());
|
entry.getValue());
|
||||||
}
|
}
|
||||||
|
|
|
@ -67,6 +67,7 @@ import com.raytheon.uf.common.dataplugin.gfe.slice.IGridSlice;
|
||||||
import com.raytheon.uf.common.dataplugin.gfe.slice.ScalarGridSlice;
|
import com.raytheon.uf.common.dataplugin.gfe.slice.ScalarGridSlice;
|
||||||
import com.raytheon.uf.common.dataplugin.gfe.slice.VectorGridSlice;
|
import com.raytheon.uf.common.dataplugin.gfe.slice.VectorGridSlice;
|
||||||
import com.raytheon.uf.common.dataplugin.gfe.slice.WeatherGridSlice;
|
import com.raytheon.uf.common.dataplugin.gfe.slice.WeatherGridSlice;
|
||||||
|
import com.raytheon.uf.common.dataplugin.gfe.type.Pair;
|
||||||
import com.raytheon.uf.common.dataplugin.gfe.util.GfeUtil;
|
import com.raytheon.uf.common.dataplugin.gfe.util.GfeUtil;
|
||||||
import com.raytheon.uf.common.dataplugin.gfe.weather.WeatherKey;
|
import com.raytheon.uf.common.dataplugin.gfe.weather.WeatherKey;
|
||||||
import com.raytheon.uf.common.datastorage.DataStoreFactory;
|
import com.raytheon.uf.common.datastorage.DataStoreFactory;
|
||||||
|
@ -2115,45 +2116,56 @@ public class IFPGridDatabase extends GridDatabase {
|
||||||
@Override
|
@Override
|
||||||
public FloatDataRecord[] retrieveFromHDF5(ParmID parmId,
|
public FloatDataRecord[] retrieveFromHDF5(ParmID parmId,
|
||||||
List<TimeRange> times) throws GfeException {
|
List<TimeRange> times) throws GfeException {
|
||||||
FloatDataRecord[] scalarData = new FloatDataRecord[times.size()];
|
FloatDataRecord[] scalarData = null;
|
||||||
Map<IDataStore, String[]> dsAndGroups = getDataStoreAndGroups(parmId,
|
Map<IDataStore, Pair<List<TimeRange>, String[]>> dsAndGroups = getDataStoreAndGroups(
|
||||||
times);
|
parmId, times);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
// overall index into scalar data
|
Map<TimeRange, FloatDataRecord> records = new HashMap<TimeRange, FloatDataRecord>(
|
||||||
int scalarDataIndex = 0;
|
(int) (1.25 * times.size()) + 1);
|
||||||
for (Map.Entry<IDataStore, String[]> entry : dsAndGroups.entrySet()) {
|
|
||||||
IDataRecord[] rawData = entry.getKey().retrieveGroups(
|
|
||||||
entry.getValue(), Request.ALL);
|
|
||||||
|
|
||||||
for (IDataRecord rec : rawData) {
|
// loop over the dataStores and their respective groups to pull all
|
||||||
if (scalarDataIndex < scalarData.length) {
|
// data, stored into records to reorder requests by times
|
||||||
if (rec instanceof FloatDataRecord) {
|
for (Map.Entry<IDataStore, Pair<List<TimeRange>, String[]>> entry : dsAndGroups
|
||||||
scalarData[scalarDataIndex++] = (FloatDataRecord) rec;
|
.entrySet()) {
|
||||||
} else if (gridConfig == null) {
|
Pair<List<TimeRange>, String[]> pair = entry.getValue();
|
||||||
throw new IllegalArgumentException(
|
String[] groups = pair.getSecond();
|
||||||
"Data array for "
|
|
||||||
+ parmId.getParmName()
|
IDataRecord[] rawData = entry.getKey().retrieveGroups(groups,
|
||||||
+ " "
|
Request.ALL);
|
||||||
+ parmId.getParmLevel()
|
|
||||||
+ " is not a float array, but database "
|
if (rawData.length != groups.length) {
|
||||||
+ toString()
|
throw new IllegalArgumentException(
|
||||||
+ " does not contain a grid configuration.");
|
"Invalid number of dataSets returned expected 1 per group, received: "
|
||||||
} else {
|
+ ((double) rawData.length / groups.length));
|
||||||
// Convert to a FloatDataRecord for internal use
|
}
|
||||||
ParmStorageInfo psi = parmStorageInfo.get(parmId
|
|
||||||
.getCompositeName());
|
int count = 0;
|
||||||
scalarData[scalarDataIndex++] = storageToFloat(rec,
|
for (TimeRange timeRange : pair.getFirst()) {
|
||||||
psi);
|
IDataRecord rec = rawData[count++];
|
||||||
}
|
|
||||||
|
if (rec instanceof FloatDataRecord) {
|
||||||
|
records.put(timeRange, (FloatDataRecord) rec);
|
||||||
|
} else if (gridConfig == null) {
|
||||||
|
throw new IllegalArgumentException("Data array for "
|
||||||
|
+ parmId.getParmName() + " "
|
||||||
|
+ parmId.getParmLevel()
|
||||||
|
+ " is not a float array, but database "
|
||||||
|
+ toString()
|
||||||
|
+ " does not contain a grid configuration.");
|
||||||
|
} else {
|
||||||
|
// Convert to a FloatDataRecord for internal use
|
||||||
|
ParmStorageInfo psi = parmStorageInfo.get(parmId
|
||||||
|
.getCompositeName());
|
||||||
|
records.put(timeRange, storageToFloat(rec, psi));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (scalarDataIndex != scalarData.length) {
|
scalarData = new FloatDataRecord[times.size()];
|
||||||
throw new IllegalArgumentException(
|
int count = 0;
|
||||||
"Invalid number of dataSets returned expected 1 per group, received: "
|
for (TimeRange timeRange : times) {
|
||||||
+ (scalarDataIndex / scalarData.length));
|
scalarData[count++] = records.get(timeRange);
|
||||||
}
|
}
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
throw new GfeException("Unable to get data from HDF5 for ParmID: "
|
throw new GfeException("Unable to get data from HDF5 for ParmID: "
|
||||||
|
@ -2166,29 +2178,40 @@ public class IFPGridDatabase extends GridDatabase {
|
||||||
@Override
|
@Override
|
||||||
public FloatDataRecord[][] retrieveVectorFromHDF5(ParmID parmId,
|
public FloatDataRecord[][] retrieveVectorFromHDF5(ParmID parmId,
|
||||||
List<TimeRange> times) throws GfeException {
|
List<TimeRange> times) throws GfeException {
|
||||||
FloatDataRecord[][] vectorData = new FloatDataRecord[times.size()][2];
|
FloatDataRecord[][] vectorData = null;
|
||||||
Map<IDataStore, String[]> dsAndGroups = getDataStoreAndGroups(parmId,
|
Map<IDataStore, Pair<List<TimeRange>, String[]>> dsAndGroups = getDataStoreAndGroups(
|
||||||
times);
|
parmId, times);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
// overall index into vector data
|
Map<TimeRange, FloatDataRecord[]> records = new HashMap<TimeRange, FloatDataRecord[]>(
|
||||||
int vectorDataIndex = 0;
|
(int) (1.25 * times.size()) + 1);
|
||||||
// iterate over dataStore and their respective groups for the
|
|
||||||
// requested parm/time ranges
|
|
||||||
for (Map.Entry<IDataStore, String[]> entry : dsAndGroups.entrySet()) {
|
|
||||||
IDataRecord[] rawData = entry.getKey().retrieveGroups(
|
|
||||||
entry.getValue(), Request.ALL);
|
|
||||||
|
|
||||||
// iterate over the data retrieved from this dataStore for the
|
// loop over the dataStores and their respective groups to pull all
|
||||||
// groups
|
// data, stored into records to reorder requests by times
|
||||||
for (int i = 0; i < rawData.length; i += 2, vectorDataIndex++) {
|
for (Map.Entry<IDataStore, Pair<List<TimeRange>, String[]>> entry : dsAndGroups
|
||||||
|
.entrySet()) {
|
||||||
|
Pair<List<TimeRange>, String[]> pair = entry.getValue();
|
||||||
|
String[] groups = pair.getSecond();
|
||||||
|
|
||||||
|
IDataRecord[] rawData = entry.getKey().retrieveGroups(groups,
|
||||||
|
Request.ALL);
|
||||||
|
|
||||||
|
if (rawData.length != groups.length * 2) {
|
||||||
|
throw new IllegalArgumentException(
|
||||||
|
"Invalid number of dataSets returned expected per group, received: "
|
||||||
|
+ ((double) rawData.length / groups.length));
|
||||||
|
}
|
||||||
|
|
||||||
|
// iterate over the data from this dataStore adding it records
|
||||||
|
int count = 0;
|
||||||
|
for (TimeRange timeRange : pair.getFirst()) {
|
||||||
IDataRecord magRec = null;
|
IDataRecord magRec = null;
|
||||||
IDataRecord dirRec = null;
|
IDataRecord dirRec = null;
|
||||||
|
|
||||||
// Should be vector data and each group should have had a
|
// Should be vector data and each group should have had a
|
||||||
// Dir and Mag dataset
|
// Dir and Mag dataset
|
||||||
for (int j = 0; j < 2; j++) {
|
for (int i = 0; i < 2; i++) {
|
||||||
IDataRecord rec = rawData[i + j];
|
IDataRecord rec = rawData[count * 2 + i];
|
||||||
if ("Mag".equals(rec.getName())) {
|
if ("Mag".equals(rec.getName())) {
|
||||||
magRec = rec;
|
magRec = rec;
|
||||||
} else if ("Dir".equals(rec.getName())) {
|
} else if ("Dir".equals(rec.getName())) {
|
||||||
|
@ -2200,10 +2223,12 @@ public class IFPGridDatabase extends GridDatabase {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
FloatDataRecord[] recs = new FloatDataRecord[2];
|
||||||
|
|
||||||
if (magRec.getClass() == dirRec.getClass()) {
|
if (magRec.getClass() == dirRec.getClass()) {
|
||||||
if (magRec instanceof FloatDataRecord) {
|
if (magRec instanceof FloatDataRecord) {
|
||||||
vectorData[vectorDataIndex][0] = (FloatDataRecord) magRec;
|
recs[0] = (FloatDataRecord) magRec;
|
||||||
vectorData[vectorDataIndex][1] = (FloatDataRecord) dirRec;
|
recs[1] = (FloatDataRecord) dirRec;
|
||||||
} else if (gridConfig == null) {
|
} else if (gridConfig == null) {
|
||||||
throw new IllegalArgumentException(
|
throw new IllegalArgumentException(
|
||||||
"Data array for "
|
"Data array for "
|
||||||
|
@ -2224,11 +2249,12 @@ public class IFPGridDatabase extends GridDatabase {
|
||||||
VECTOR_DIR_DATA_OFFSET,
|
VECTOR_DIR_DATA_OFFSET,
|
||||||
VECTOR_DIR_DATA_MULTIPLIER,
|
VECTOR_DIR_DATA_MULTIPLIER,
|
||||||
magStorageInfo.storageType());
|
magStorageInfo.storageType());
|
||||||
vectorData[vectorDataIndex][0] = storageToFloat(
|
recs[0] = storageToFloat(magRec, magStorageInfo);
|
||||||
magRec, magStorageInfo);
|
recs[1] = storageToFloat(dirRec, dirStorageInfo);
|
||||||
vectorData[vectorDataIndex][1] = storageToFloat(
|
|
||||||
dirRec, dirStorageInfo);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
records.put(timeRange, recs);
|
||||||
|
count++;
|
||||||
} else {
|
} else {
|
||||||
throw new IllegalArgumentException(
|
throw new IllegalArgumentException(
|
||||||
"Magnitude and direction grids are not of the same type.");
|
"Magnitude and direction grids are not of the same type.");
|
||||||
|
@ -2236,10 +2262,10 @@ public class IFPGridDatabase extends GridDatabase {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (vectorDataIndex != vectorData.length) {
|
vectorData = new FloatDataRecord[times.size()][2];
|
||||||
throw new IllegalArgumentException(
|
int count = 0;
|
||||||
"Invalid number of dataSets returned expected 2 per group, received: "
|
for (TimeRange timeRange : times) {
|
||||||
+ (vectorDataIndex / vectorData.length) * 2);
|
vectorData[count++] = records.get(timeRange);
|
||||||
}
|
}
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
throw new GfeException("Unable to get data from HDF5 for ParmID: "
|
throw new GfeException("Unable to get data from HDF5 for ParmID: "
|
||||||
|
|
|
@ -34,6 +34,7 @@ from zones2cities import *
|
||||||
# Date Ticket# Engineer Description
|
# Date Ticket# Engineer Description
|
||||||
# ------------ ---------- ----------- --------------------------
|
# ------------ ---------- ----------- --------------------------
|
||||||
# 01/08/10 #1209 randerso Initial Creation.
|
# 01/08/10 #1209 randerso Initial Creation.
|
||||||
|
# 10/19/12 #1091 dgilling Support localMaps.py.
|
||||||
#
|
#
|
||||||
#
|
#
|
||||||
#
|
#
|
||||||
|
@ -133,25 +134,25 @@ def createAreaDictionary(outputDir, mapDict):
|
||||||
if len(ean):
|
if len(ean):
|
||||||
try:
|
try:
|
||||||
d = {}
|
d = {}
|
||||||
if att.containsKey('ZONE') and att.containsKey('STATE'):
|
if att.containsKey('zone') and att.containsKey('state'):
|
||||||
d['ugcCode'] = str(att.get('STATE')) + "Z" + str(att.get('ZONE'))
|
d['ugcCode'] = str(att.get('state')) + "Z" + str(att.get('zone'))
|
||||||
elif att.containsKey('ID'):
|
elif att.containsKey('id'):
|
||||||
d['ugcCode'] = str(att.get('ID'))
|
d['ugcCode'] = str(att.get('id'))
|
||||||
elif att.containsKey('FIPS') and att.containsKey('STATE') and \
|
elif att.containsKey('fips') and att.containsKey('state') and \
|
||||||
att.containsKey('COUNTYNAME'):
|
att.containsKey('countyname'):
|
||||||
d['ugcCode'] = str(att.get('STATE')) + "C" + str(att.get('FIPS'))[-3:]
|
d['ugcCode'] = str(att.get('state')) + "C" + str(att.get('fips'))[-3:]
|
||||||
d['ugcName'] = string.strip(str(att.get('COUNTYNAME')))
|
d['ugcName'] = string.strip(str(att.get('countyname')))
|
||||||
else:
|
else:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if att.containsKey('STATE'):
|
if att.containsKey('state'):
|
||||||
d["stateAbbr"] = str(att.get('STATE'))
|
d["stateAbbr"] = str(att.get('state'))
|
||||||
|
|
||||||
if att.containsKey('NAME'):
|
if att.containsKey('name'):
|
||||||
d["ugcName"] = string.strip(str(att.get('NAME')))
|
d["ugcName"] = string.strip(str(att.get('name')))
|
||||||
|
|
||||||
if att.containsKey('TIME_ZONE'):
|
if att.containsKey('time_zone'):
|
||||||
tzvalue = getRealTimeZone(str(att.get('TIME_ZONE')))
|
tzvalue = getRealTimeZone(str(att.get('time_zone')))
|
||||||
if tzvalue is not None:
|
if tzvalue is not None:
|
||||||
d["ugcTimeZone"] = tzvalue
|
d["ugcTimeZone"] = tzvalue
|
||||||
|
|
||||||
|
@ -315,17 +316,17 @@ def createCityLocation(outputDir, mapDict):
|
||||||
attList = mapDict[mapname]
|
attList = mapDict[mapname]
|
||||||
for att in attList:
|
for att in attList:
|
||||||
#LogStream.logProblem("att:", att)
|
#LogStream.logProblem("att:", att)
|
||||||
ean = att['NAME']
|
ean = att['name']
|
||||||
state = att['ST']
|
state = att['st']
|
||||||
county_FIP = att['COUNTY_FIP']
|
county_FIP = att['county_fip']
|
||||||
|
|
||||||
if len(ean) and len(state) and len(county_FIP):
|
if len(ean) and len(state) and len(county_FIP):
|
||||||
fip = state + 'C' + county_FIP
|
fip = state + 'C' + county_FIP
|
||||||
if not citydict.has_key(fip):
|
if not citydict.has_key(fip):
|
||||||
citydict[fip] = {}
|
citydict[fip] = {}
|
||||||
try:
|
try:
|
||||||
latitude = float(string.strip(att['LAT']))
|
latitude = float(string.strip(att['lat']))
|
||||||
longitude = float(string.strip(att['LON']))
|
longitude = float(string.strip(att['lon']))
|
||||||
citydict[fip][ean.upper()] = (latitude, longitude)
|
citydict[fip][ean.upper()] = (latitude, longitude)
|
||||||
except:
|
except:
|
||||||
LogStream.logProblem("Problem creating city location ",
|
LogStream.logProblem("Problem creating city location ",
|
||||||
|
|
|
@ -36,6 +36,7 @@ Export-Package: com.raytheon.uf.common.dataplugin.gfe,
|
||||||
com.raytheon.uf.common.dataplugin.gfe.slice,
|
com.raytheon.uf.common.dataplugin.gfe.slice,
|
||||||
com.raytheon.uf.common.dataplugin.gfe.textproduct,
|
com.raytheon.uf.common.dataplugin.gfe.textproduct,
|
||||||
com.raytheon.uf.common.dataplugin.gfe.time,
|
com.raytheon.uf.common.dataplugin.gfe.time,
|
||||||
|
com.raytheon.uf.common.dataplugin.gfe.type,
|
||||||
com.raytheon.uf.common.dataplugin.gfe.util,
|
com.raytheon.uf.common.dataplugin.gfe.util,
|
||||||
com.raytheon.uf.common.dataplugin.gfe.weather,
|
com.raytheon.uf.common.dataplugin.gfe.weather,
|
||||||
com.raytheon.uf.common.dataplugin.gfe.weatherelement
|
com.raytheon.uf.common.dataplugin.gfe.weatherelement
|
||||||
|
|
|
@ -17,7 +17,7 @@
|
||||||
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
||||||
* further licensing information.
|
* further licensing information.
|
||||||
**/
|
**/
|
||||||
package com.raytheon.viz.gfe.types;
|
package com.raytheon.uf.common.dataplugin.gfe.type;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* TODO Add Description
|
* TODO Add Description
|
|
@ -41,6 +41,7 @@ import com.raytheon.uf.common.dataplugin.gfe.db.objects.DatabaseID;
|
||||||
import com.raytheon.uf.common.dataplugin.gfe.db.objects.GridLocation;
|
import com.raytheon.uf.common.dataplugin.gfe.db.objects.GridLocation;
|
||||||
import com.raytheon.uf.common.dataplugin.gfe.db.objects.ParmID;
|
import com.raytheon.uf.common.dataplugin.gfe.db.objects.ParmID;
|
||||||
import com.raytheon.uf.common.dataplugin.gfe.grid.Grid2DBit;
|
import com.raytheon.uf.common.dataplugin.gfe.grid.Grid2DBit;
|
||||||
|
import com.raytheon.uf.common.dataplugin.gfe.type.Pair;
|
||||||
import com.raytheon.uf.common.time.TimeRange;
|
import com.raytheon.uf.common.time.TimeRange;
|
||||||
import com.vividsolutions.jts.geom.Coordinate;
|
import com.vividsolutions.jts.geom.Coordinate;
|
||||||
import com.vividsolutions.jts.geom.Geometry;
|
import com.vividsolutions.jts.geom.Geometry;
|
||||||
|
@ -170,7 +171,8 @@ public class GfeUtil {
|
||||||
public static File getHdf5File(String baseDir, ParmID parmId, TimeRange time) {
|
public static File getHdf5File(String baseDir, ParmID parmId, TimeRange time) {
|
||||||
List<TimeRange> list = new ArrayList<TimeRange>(1);
|
List<TimeRange> list = new ArrayList<TimeRange>(1);
|
||||||
list.add(time);
|
list.add(time);
|
||||||
Map<File, String[]> map = getHdf5FilesAndGroups(baseDir, parmId, list);
|
Map<File, Pair<List<TimeRange>, String[]>> map = getHdf5FilesAndGroups(
|
||||||
|
baseDir, parmId, list);
|
||||||
File rval = null;
|
File rval = null;
|
||||||
|
|
||||||
if (!map.isEmpty()) {
|
if (!map.isEmpty()) {
|
||||||
|
@ -191,14 +193,14 @@ public class GfeUtil {
|
||||||
* @param times
|
* @param times
|
||||||
* @return
|
* @return
|
||||||
*/
|
*/
|
||||||
public static Map<File, String[]> getHdf5FilesAndGroups(String baseDir,
|
public static Map<File, Pair<List<TimeRange>, String[]>> getHdf5FilesAndGroups(
|
||||||
ParmID parmId, List<TimeRange> times) {
|
String baseDir, ParmID parmId, List<TimeRange> times) {
|
||||||
DatabaseID dbId = parmId.getDbId();
|
DatabaseID dbId = parmId.getDbId();
|
||||||
File directory = getHdf5Dir(baseDir, dbId);
|
File directory = getHdf5Dir(baseDir, dbId);
|
||||||
boolean isSingleton = DatabaseID.NO_MODEL_TIME.equals(dbId
|
boolean isSingleton = DatabaseID.NO_MODEL_TIME.equals(dbId
|
||||||
.getModelTime());
|
.getModelTime());
|
||||||
|
|
||||||
Map<File, String[]> rval = null;
|
Map<File, Pair<List<TimeRange>, String[]>> rval = null;
|
||||||
if (isSingleton) {
|
if (isSingleton) {
|
||||||
// file per parm per day
|
// file per parm per day
|
||||||
StringBuffer tmp = new StringBuffer(40);
|
StringBuffer tmp = new StringBuffer(40);
|
||||||
|
@ -234,13 +236,16 @@ public class GfeUtil {
|
||||||
}
|
}
|
||||||
|
|
||||||
// initialize map size, accounting for load factor
|
// initialize map size, accounting for load factor
|
||||||
rval = new HashMap<File, String[]>(
|
rval = new HashMap<File, Pair<List<TimeRange>, String[]>>(
|
||||||
(int) (dateMap.size() * 1.25) + 1);
|
(int) (dateMap.size() * 1.25) + 1);
|
||||||
for (Map.Entry<String, List<TimeRange>> entry : dateMap.entrySet()) {
|
for (Map.Entry<String, List<TimeRange>> entry : dateMap.entrySet()) {
|
||||||
tmp.setLength(0);
|
tmp.setLength(0);
|
||||||
tmp.append(preString).append(entry.getKey()).append(postString);
|
tmp.append(preString).append(entry.getKey()).append(postString);
|
||||||
File h5File = new File(directory, tmp.toString());
|
File h5File = new File(directory, tmp.toString());
|
||||||
rval.put(h5File, getHDF5Groups(parmId, entry.getValue()));
|
Pair<List<TimeRange>, String[]> p = new Pair<List<TimeRange>, String[]>(
|
||||||
|
entry.getValue(), getHDF5Groups(parmId,
|
||||||
|
entry.getValue()));
|
||||||
|
rval.put(h5File, p);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
// file per parm
|
// file per parm
|
||||||
|
@ -250,8 +255,10 @@ public class GfeUtil {
|
||||||
fileName.append(parmId.getParmLevel()).append(
|
fileName.append(parmId.getParmLevel()).append(
|
||||||
DATASTORE_FILE_EXTENSION);
|
DATASTORE_FILE_EXTENSION);
|
||||||
File h5File = new File(directory, fileName.toString());
|
File h5File = new File(directory, fileName.toString());
|
||||||
rval = new HashMap<File, String[]>(2);
|
rval = new HashMap<File, Pair<List<TimeRange>, String[]>>(2);
|
||||||
rval.put(h5File, getHDF5Groups(parmId, times));
|
Pair<List<TimeRange>, String[]> p = new Pair<List<TimeRange>, String[]>(
|
||||||
|
times, getHDF5Groups(parmId, times));
|
||||||
|
rval.put(h5File, p);
|
||||||
}
|
}
|
||||||
|
|
||||||
return rval;
|
return rval;
|
||||||
|
|
Loading…
Add table
Reference in a new issue