Issue #1257: Change Radar and GFE hdf5 file layouts. Update PluginDao to use ThreadLocal for date formatting instead of sync blocks. Fix for NSharp with different grid path.

Change-Id: I426718d765dfad3aba24e504b2d01ab9b194625f

Former-commit-id: 5ee6c1c984 [formerly 202c7b885e] [formerly 12bcfbda80] [formerly 12bcfbda80 [formerly ca0c9b7502]] [formerly 5ee6c1c984 [formerly 202c7b885e] [formerly 12bcfbda80] [formerly 12bcfbda80 [formerly ca0c9b7502]] [formerly bcb0560291 [formerly 12bcfbda80 [formerly ca0c9b7502] [formerly bcb0560291 [formerly 535a9c40538da25dd493c7bdfa6a731160819f85]]]]]
Former-commit-id: bcb0560291
Former-commit-id: ea70e732f2 [formerly 6c0bfb9858] [formerly 46870808e0] [formerly c3223e3516c79b112046053dc392eb03aad59f96 [formerly a9406efb61625c7381ceb47856f2fa57a257622d] [formerly 46870808e0 [formerly ce85c8e1a6]]]
Former-commit-id: 568745a304b3ec23737090f84de05de1795262e6 [formerly cc07adda4c70d3d12938f64d2565a522222ba657] [formerly 61892fee88 [formerly 881582885c]]
Former-commit-id: 61892fee88
Former-commit-id: 9c9e23d8a2
This commit is contained in:
Richard Peter 2012-10-12 16:08:42 -05:00 committed by Gerrit Code Review
parent 59c096bba0
commit f3f852029a
18 changed files with 732 additions and 498 deletions

View file

@ -242,20 +242,20 @@ public class GFEDao extends DefaultPluginDao {
try {
q.setString("dataURI", rec.getDataURI());
List<?> list = q.list();
if (list == null || list.size() == 0) {
if ((list == null) || (list.size() == 0)) {
sess.save(rec);
} else {
rec.setId(((Number) list.get(0)).intValue());
sess.update(rec);
}
if (index % batchSize == 0 || persistIndividually
if ((index % batchSize == 0) || persistIndividually
|| !notDone) {
sess.flush();
sess.clear();
tx.commit();
tx = null;
commitPoint = index;
if (persistIndividually && index % batchSize == 0) {
if (persistIndividually && (index % batchSize == 0)) {
// batch persisted individually switch back to batch
persistIndividually = false;
}
@ -424,26 +424,28 @@ public class GFEDao extends DefaultPluginDao {
}
});
File hdf5File = GfeUtil.getHDF5File(GridDatabase.gfeBaseDataDir,
parmId.getDbId());
IDataStore dataStore = DataStoreFactory.getDataStore(hdf5File);
String[] groupsToDelete = new String[times.size()];
for (int i = 0; i < times.size(); i++) {
groupsToDelete[i] = GfeUtil.getHDF5Group(parmId, times.get(i));
}
try {
for (String grp : groupsToDelete) {
dataStore.delete(grp);
// we gain nothing by removing from hdf5
Map<File, String[]> fileMap = GfeUtil.getHdf5FilesAndGroups(
GridDatabase.gfeBaseDataDir, parmId, times);
for (Map.Entry<File, String[]> entry : fileMap.entrySet()) {
File hdf5File = entry.getKey();
IDataStore dataStore = DataStoreFactory.getDataStore(hdf5File);
try {
String[] groupsToDelete = entry.getValue();
for (String grp : groupsToDelete) {
dataStore.delete(grp);
}
statusHandler.handle(Priority.DEBUG,
"Deleted: " + Arrays.toString(groupsToDelete)
+ " from " + hdf5File.getName());
} catch (Exception e) {
statusHandler.handle(Priority.PROBLEM,
"Error deleting hdf5 records", e);
}
statusHandler.handle(Priority.DEBUG,
"Deleted: " + Arrays.toString(groupsToDelete) + " from "
+ hdf5File.getName());
} catch (Exception e) {
statusHandler.handle(Priority.PROBLEM,
"Error deleting hdf5 records", e);
}
}
@SuppressWarnings("unchecked")
@ -759,13 +761,13 @@ public class GFEDao extends DefaultPluginDao {
int lowestHr = -1;
for (GribModel m : (List<GribModel>) results) {
String param = m.getParameterAbbreviation().toLowerCase();
if (param.equals(abbreviation) && lowestHr < 0) {
if (param.equals(abbreviation) && (lowestHr < 0)) {
model = m;
} else {
Matcher matcher = p.matcher(param);
if (matcher.matches()) {
int hr = Integer.parseInt(matcher.group(1));
if (lowestHr < 0 || hr < lowestHr) {
if ((lowestHr < 0) || (hr < lowestHr)) {
model = m;
lowestHr = hr;
}
@ -1004,17 +1006,17 @@ public class GFEDao extends DefaultPluginDao {
* The parm and level to delete
* @param dbId
* The database to delete from
* @param ds
* The data store file
* @throws DataAccessLayerException
* If errors occur
*/
public void removeOldParm(String parmAndLevel, DatabaseID dbId,
IDataStore ds) throws DataAccessLayerException {
public void removeOldParm(String parmAndLevel, DatabaseID dbId)
throws DataAccessLayerException {
ParmID pid = new ParmID(parmAndLevel + ":" + dbId.toString());
try {
IDataStore ds = DataStoreFactory.getDataStore(GfeUtil
.getGridParmHdf5File(GridDatabase.gfeBaseDataDir, dbId));
ds.delete("/GridParmInfo/" + parmAndLevel);
} catch (Exception e1) {
throw new DataAccessLayerException("Error deleting data from HDF5",

View file

@ -22,6 +22,7 @@ package com.raytheon.edex.plugin.gfe.server.database;
import java.io.File;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@ -143,12 +144,14 @@ public abstract class GridDatabase {
* The record to remove
*/
public void removeFromHDF5(GFERecord record) {
File hdf5File = GfeUtil.getHDF5File(gfeBaseDataDir, dbId);
File hdf5File = GfeUtil.getHdf5File(gfeBaseDataDir, record.getParmId(),
record.getDataTime().getValidPeriod());
/*
* Remove the grid from HDF5
*/
String groupName = GfeUtil.getHDF5Group(record.getParmId(), record
.getDataTime().getValidPeriod());
String groupName = GfeUtil.getHDF5Group(record.getParmId(),
record.getTimeRange());
IDataStore dataStore = DataStoreFactory.getDataStore(hdf5File);
@ -171,21 +174,26 @@ public abstract class GridDatabase {
public FloatDataRecord[] retrieveFromHDF5(ParmID parmId,
List<TimeRange> times) throws GfeException {
FloatDataRecord[] scalarData = new FloatDataRecord[times.size()];
IDataStore dataStore = getDataStore(parmId);
String groups[] = GfeUtil.getHDF5Groups(parmId, times);
Map<IDataStore, String[]> dsAndGroups = getDataStoreAndGroups(parmId,
times);
try {
IDataRecord[] rawData = dataStore.retrieveGroups(groups,
Request.ALL);
if (rawData.length != times.size()) {
throw new IllegalArgumentException(
"Invalid number of dataSets returned expected 1 per group, received: "
+ (rawData.length / times.size()));
int index = 0;
for (Map.Entry<IDataStore, String[]> entry : dsAndGroups.entrySet()) {
IDataRecord[] rawData = entry.getKey().retrieveGroups(
entry.getValue(), Request.ALL);
for (IDataRecord record : rawData) {
if (index < scalarData.length) {
scalarData[index++] = (FloatDataRecord) record;
}
}
}
for (int i = 0; i < rawData.length; i++) {
IDataRecord rec = rawData[i];
scalarData[i] = (FloatDataRecord) rec;
if (index != scalarData.length) {
throw new IllegalArgumentException(
"Invalid number of dataSets returned expected 1 per group, received: "
+ (index / scalarData.length));
}
} catch (Exception e) {
throw new GfeException("Unable to get data from HDF5 for ParmID: "
@ -204,33 +212,40 @@ public abstract class GridDatabase {
public FloatDataRecord[][] retrieveVectorFromHDF5(ParmID parmId,
List<TimeRange> times) throws GfeException {
FloatDataRecord[][] vectorData = new FloatDataRecord[times.size()][2];
IDataStore dataStore = getDataStore(parmId);
String groups[] = GfeUtil.getHDF5Groups(parmId, times);
Map<IDataStore, String[]> dsAndGroups = getDataStoreAndGroups(parmId,
times);
try {
IDataRecord[] rawData = dataStore.retrieveGroups(groups,
Request.ALL);
if (rawData.length / 2 != times.size()) {
throw new IllegalArgumentException(
"Invalid number of dataSets returned expected 2 per group, received: "
+ (rawData.length / times.size()));
int index = 0;
for (Map.Entry<IDataStore, String[]> entry : dsAndGroups.entrySet()) {
IDataRecord[] rawData = entry.getKey().retrieveGroups(
entry.getValue(), Request.ALL);
for (IDataRecord rec : rawData) {
if (index < vectorData.length * 2) {
if ("Mag".equals(rec.getName())) {
vectorData[index++ / 2][0] = (FloatDataRecord) rec;
} else if ("Dir".equals(rec.getName())) {
vectorData[index++ / 2][1] = (FloatDataRecord) rec;
} else {
throw new IllegalArgumentException(
"Unknown dataset retrieved for vector data. Valid values: Mag, Dir Received: "
+ rec.getName());
}
}
}
}
for (int i = 0; i < rawData.length; i++) {
IDataRecord rec = rawData[i];
if ("Mag".equals(rec.getName())) {
vectorData[i / 2][0] = (FloatDataRecord) rec;
} else if ("Dir".equals(rec.getName())) {
vectorData[i / 2][1] = (FloatDataRecord) rec;
} else {
throw new IllegalArgumentException(
"Unknown dataset retrieved for vector data. Valid values: Mag, Dir Received: "
+ rec.getName());
}
if (index != vectorData.length * 2) {
throw new IllegalArgumentException(
"Invalid number of dataSets returned expected per group, received: "
+ (index / vectorData.length));
}
} catch (Exception e) {
throw new GfeException("Unable to get data from HDF5 for ParmID: "
+ parmId + " TimeRange: " + times, e);
}
return vectorData;
}
@ -243,28 +258,38 @@ public abstract class GridDatabase {
public ByteDataRecord[][] retrieveDiscreteFromHDF5(ParmID parmId,
List<TimeRange> times) throws GfeException {
ByteDataRecord[][] byteRecords = new ByteDataRecord[times.size()][2];
IDataStore dataStore = getDataStore(parmId);
String groups[] = GfeUtil.getHDF5Groups(parmId, times);
Map<IDataStore, String[]> dsAndGroups = getDataStoreAndGroups(parmId,
times);
try {
IDataRecord[] rawData = dataStore.retrieveGroups(groups,
Request.ALL);
if (rawData.length / 2 != times.size()) {
throw new IllegalArgumentException(
"Invalid number of dataSets returned expected 2 per group, received: "
+ (rawData.length / times.size()));
int index = 0;
// loop over the dataStores and their respective groups to pull all
// data
for (Map.Entry<IDataStore, String[]> entry : dsAndGroups.entrySet()) {
IDataRecord[] rawData = entry.getKey().retrieveGroups(
entry.getValue(), Request.ALL);
// iterate over the data from this dataStore adding it
// byteRecords
for (IDataRecord rec : rawData) {
if (index < byteRecords.length * 2) {
if ("Data".equals(rec.getName())) {
byteRecords[index++ / 2][0] = (ByteDataRecord) rec;
} else if ("Keys".equals(rec.getName())) {
byteRecords[index++ / 2][1] = (ByteDataRecord) rec;
} else {
throw new IllegalArgumentException(
"Unknown dataset retrieved for vector data. Valid values: Mag, Dir Received: "
+ rec.getName());
}
}
}
}
for (int i = 0; i < rawData.length; i++) {
IDataRecord rec = rawData[i];
if ("Data".equals(rec.getName())) {
byteRecords[i / 2][0] = (ByteDataRecord) rec;
} else if ("Keys".equals(rec.getName())) {
byteRecords[i / 2][1] = (ByteDataRecord) rec;
} else {
throw new IllegalArgumentException(
"Unknown dataset retrieved for discrete data. Valid values: Data, Keys Received: "
+ rec.getName());
}
if (index != byteRecords.length * 2) {
throw new IllegalArgumentException(
"Invalid number of dataSets returned expected per group, received: "
+ (index / byteRecords.length));
}
} catch (Exception e) {
throw new GfeException("Unable to get data from HDF5 for ParmID: "
@ -273,9 +298,18 @@ public abstract class GridDatabase {
return byteRecords;
}
protected IDataStore getDataStore(ParmID parmId) {
File hdf5File = GfeUtil.getHDF5File(gfeBaseDataDir, parmId.getDbId());
return DataStoreFactory.getDataStore(hdf5File);
protected Map<IDataStore, String[]> getDataStoreAndGroups(ParmID parmId,
List<TimeRange> times) {
Map<File, String[]> fileMap = GfeUtil.getHdf5FilesAndGroups(
GridDatabase.gfeBaseDataDir, parmId, times);
// size hashMap accounting for load factor
Map<IDataStore, String[]> rval = new HashMap<IDataStore, String[]>(
(int) (fileMap.size() * 1.25) + 1);
for (Map.Entry<File, String[]> entry : fileMap.entrySet()) {
rval.put(DataStoreFactory.getDataStore(entry.getKey()),
entry.getValue());
}
return rval;
}
/**
@ -371,7 +405,7 @@ public abstract class GridDatabase {
}
public void deleteModelHDF5() {
File hdf5File = GfeUtil.getHDF5Dir(GridDatabase.gfeBaseDataDir, dbId);
File hdf5File = GfeUtil.getHdf5Dir(GridDatabase.gfeBaseDataDir, dbId);
IDataStore ds = DataStoreFactory.getDataStore(hdf5File);
try {
ds.deleteFiles(null);

View file

@ -407,7 +407,6 @@ public class IFPGridDatabase extends GridDatabase {
return;
}
List<GFERecord> updatedRecords = new ArrayList<GFERecord>();
Set<String> locationsToDelete = new HashSet<String>();
for (GFERecord rec : records) {
switch (gridType) {
case SCALAR:
@ -435,7 +434,6 @@ public class IFPGridDatabase extends GridDatabase {
newGPI.getMinValue(), newGPI.getMaxValue());
rec.setMessageData(scalarRecord);
updatedRecords.add(rec);
locationsToDelete.add(scalarRecord.getGroup());
break;
case VECTOR:
List<TimeRange> vectorTimes = new ArrayList<TimeRange>();
@ -474,16 +472,12 @@ public class IFPGridDatabase extends GridDatabase {
vSlice.setDirGrid(rawData2);
rec.setMessageData(vSlice);
updatedRecords.add(rec);
locationsToDelete.add(vectorRecord[0].getGroup());
break;
}
}
if (!updatedRecords.isEmpty()) {
File file = GfeUtil.getHDF5File(gfeBaseDataDir, parmId.getDbId());
try {
DataStoreFactory.getDataStore(file).delete(
locationsToDelete.toArray(new String[locationsToDelete
.size()]));
this.saveGridsToHdf5(updatedRecords, newPSI);
} catch (Exception e) {
statusHandler
@ -674,9 +668,7 @@ public class IFPGridDatabase extends GridDatabase {
statusHandler.handle(Priority.INFO, "Removing: " + item
+ " from the " + this.dbId + " database.");
try {
dao.removeOldParm(item, this.dbId, DataStoreFactory
.getDataStore(GfeUtil.getHDF5File(gfeBaseDataDir,
this.dbId)));
dao.removeOldParm(item, this.dbId);
this.parmInfo.remove(item);
this.parmStorageInfo.remove(item);
} catch (DataAccessLayerException e) {
@ -1333,8 +1325,8 @@ public class IFPGridDatabase extends GridDatabase {
initGridParmInfo();
}
try {
IDataStore ds = DataStoreFactory.getDataStore(GfeUtil.getHDF5File(
gfeBaseDataDir, this.dbId));
IDataStore ds = DataStoreFactory.getDataStore(GfeUtil
.getGridParmHdf5File(gfeBaseDataDir, this.dbId));
IDataRecord[] parmInfoRecords = ds.retrieve(GRID_PARM_INFO_GRP);
for (IDataRecord gpiRecord : parmInfoRecords) {
@ -1520,12 +1512,11 @@ public class IFPGridDatabase extends GridDatabase {
* @return The HDF5 file
*/
protected void initGridParmInfo() {
IDataStore ds = DataStoreFactory.getDataStore(GfeUtil.getHDF5File(
gfeBaseDataDir, this.dbId));
try {
if ((gridConfig != null)
&& (gridConfig.parmAndLevelList().size() > 0)) {
IDataStore ds = DataStoreFactory.getDataStore(GfeUtil
.getGridParmHdf5File(gfeBaseDataDir, this.dbId));
ds.getDatasets(GRID_PARM_INFO_GRP);
parmInfoInitialized = true;
}
@ -1765,8 +1756,8 @@ public class IFPGridDatabase extends GridDatabase {
Map<File, List<GFERecord>> recordMap = new HashMap<File, List<GFERecord>>();
for (GFERecord rec : dataObjects) {
File file = GfeUtil.getHDF5File(gfeBaseDataDir, rec.getParmId()
.getDbId());
File file = GfeUtil.getHdf5File(gfeBaseDataDir,
rec.getParmId(), rec.getTimeRange());
List<GFERecord> recList = recordMap.get(file);
if (recList == null) {
recList = new ArrayList<GFERecord>();
@ -1784,7 +1775,7 @@ public class IFPGridDatabase extends GridDatabase {
for (GFERecord rec : entry.getValue()) {
Object data = rec.getMessageData();
String groupName = GfeUtil.getHDF5Group(rec.getParmId(),
rec.getDataTime().getValidPeriod());
rec.getTimeRange());
if (parmStorageInfo == null) {
parmStorageInfo = findStorageInfo(rec.getParmId());
@ -2125,35 +2116,44 @@ public class IFPGridDatabase extends GridDatabase {
public FloatDataRecord[] retrieveFromHDF5(ParmID parmId,
List<TimeRange> times) throws GfeException {
FloatDataRecord[] scalarData = new FloatDataRecord[times.size()];
IDataStore dataStore = getDataStore(parmId);
String groups[] = GfeUtil.getHDF5Groups(parmId, times);
Map<IDataStore, String[]> dsAndGroups = getDataStoreAndGroups(parmId,
times);
try {
IDataRecord[] rawData = dataStore.retrieveGroups(groups,
Request.ALL);
if (rawData.length != times.size()) {
throw new IllegalArgumentException(
"Invalid number of dataSets returned expected 1 per group, received: "
+ (rawData.length / times.size()));
// overall index into scalar data
int scalarDataIndex = 0;
for (Map.Entry<IDataStore, String[]> entry : dsAndGroups.entrySet()) {
IDataRecord[] rawData = entry.getKey().retrieveGroups(
entry.getValue(), Request.ALL);
for (IDataRecord rec : rawData) {
if (scalarDataIndex < scalarData.length) {
if (rec instanceof FloatDataRecord) {
scalarData[scalarDataIndex++] = (FloatDataRecord) rec;
} else if (gridConfig == null) {
throw new IllegalArgumentException(
"Data array for "
+ parmId.getParmName()
+ " "
+ parmId.getParmLevel()
+ " is not a float array, but database "
+ toString()
+ " does not contain a grid configuration.");
} else {
// Convert to a FloatDataRecord for internal use
ParmStorageInfo psi = parmStorageInfo.get(parmId
.getCompositeName());
scalarData[scalarDataIndex++] = storageToFloat(rec,
psi);
}
}
}
}
for (int i = 0; i < rawData.length; i++) {
IDataRecord rec = rawData[i];
if (rec instanceof FloatDataRecord) {
scalarData[i] = (FloatDataRecord) rec;
} else if (gridConfig == null) {
throw new IllegalArgumentException("Data array for "
+ parmId.getParmName() + " "
+ parmId.getParmLevel()
+ " is not a float array, but database "
+ toString()
+ " does not contain a grid configuration.");
} else {
// Convert to a FloatDataRecord for internal use
ParmStorageInfo psi = parmStorageInfo.get(parmId
.getCompositeName());
scalarData[i] = storageToFloat(rec, psi);
}
if (scalarDataIndex != scalarData.length) {
throw new IllegalArgumentException(
"Invalid number of dataSets returned expected 1 per group, received: "
+ (scalarDataIndex / scalarData.length));
}
} catch (Exception e) {
throw new GfeException("Unable to get data from HDF5 for ParmID: "
@ -2167,63 +2167,79 @@ public class IFPGridDatabase extends GridDatabase {
public FloatDataRecord[][] retrieveVectorFromHDF5(ParmID parmId,
List<TimeRange> times) throws GfeException {
FloatDataRecord[][] vectorData = new FloatDataRecord[times.size()][2];
IDataStore dataStore = getDataStore(parmId);
String groups[] = GfeUtil.getHDF5Groups(parmId, times);
try {
IDataRecord[] rawData = dataStore.retrieveGroups(groups,
Request.ALL);
if (rawData.length / 2 != times.size()) {
throw new IllegalArgumentException(
"Invalid number of dataSets returned expected 2 per group, received: "
+ (rawData.length / times.size()));
}
Map<IDataStore, String[]> dsAndGroups = getDataStoreAndGroups(parmId,
times);
for (int i = 0; i < rawData.length; i += 2) {
IDataRecord magRec = null;
IDataRecord dirRec = null;
for (int j = 0; j < 2; j++) {
IDataRecord rec = rawData[i + j];
if ("Mag".equals(rec.getName())) {
magRec = rec;
} else if ("Dir".equals(rec.getName())) {
dirRec = rec;
try {
// overall index into vector data
int vectorDataIndex = 0;
// iterate over dataStore and their respective groups for the
// requested parm/time ranges
for (Map.Entry<IDataStore, String[]> entry : dsAndGroups.entrySet()) {
IDataRecord[] rawData = entry.getKey().retrieveGroups(
entry.getValue(), Request.ALL);
// iterate over the data retrieved from this dataStore for the
// groups
for (int i = 0; i < rawData.length; i += 2, vectorDataIndex++) {
IDataRecord magRec = null;
IDataRecord dirRec = null;
// Should be vector data and each group should have had a
// Dir and Mag dataset
for (int j = 0; j < 2; j++) {
IDataRecord rec = rawData[i + j];
if ("Mag".equals(rec.getName())) {
magRec = rec;
} else if ("Dir".equals(rec.getName())) {
dirRec = rec;
} else {
throw new IllegalArgumentException(
"Unknown dataset retrieved for vector data. Valid values: Mag, Dir Received: "
+ rec.getName());
}
}
if (magRec.getClass() == dirRec.getClass()) {
if (magRec instanceof FloatDataRecord) {
vectorData[vectorDataIndex][0] = (FloatDataRecord) magRec;
vectorData[vectorDataIndex][1] = (FloatDataRecord) dirRec;
} else if (gridConfig == null) {
throw new IllegalArgumentException(
"Data array for "
+ parmId.getParmName()
+ " "
+ parmId.getParmLevel()
+ " is not a float array, but database "
+ toString()
+ " does not contain a grid configuration.");
} else {
ParmStorageInfo magStorageInfo = parmStorageInfo
.get(parmId.getCompositeName());
ParmStorageInfo dirStorageInfo = new ParmStorageInfo(
magStorageInfo.dataType(),
magStorageInfo.gridSize(),
magStorageInfo.parmName(),
magStorageInfo.level(),
VECTOR_DIR_DATA_OFFSET,
VECTOR_DIR_DATA_MULTIPLIER,
magStorageInfo.storageType());
vectorData[vectorDataIndex][0] = storageToFloat(
magRec, magStorageInfo);
vectorData[vectorDataIndex][1] = storageToFloat(
dirRec, dirStorageInfo);
}
} else {
throw new IllegalArgumentException(
"Unknown dataset retrieved for vector data. Valid values: Mag, Dir Received: "
+ rec.getName());
"Magnitude and direction grids are not of the same type.");
}
}
}
if (magRec.getClass() == dirRec.getClass()) {
if (magRec instanceof FloatDataRecord) {
vectorData[i / 2][0] = (FloatDataRecord) magRec;
vectorData[i / 2][1] = (FloatDataRecord) dirRec;
} else if (gridConfig == null) {
throw new IllegalArgumentException("Data array for "
+ parmId.getParmName() + " "
+ parmId.getParmLevel()
+ " is not a float array, but database "
+ toString()
+ " does not contain a grid configuration.");
} else {
ParmStorageInfo magStorageInfo = parmStorageInfo
.get(parmId.getCompositeName());
ParmStorageInfo dirStorageInfo = new ParmStorageInfo(
magStorageInfo.dataType(),
magStorageInfo.gridSize(),
magStorageInfo.parmName(),
magStorageInfo.level(), VECTOR_DIR_DATA_OFFSET,
VECTOR_DIR_DATA_MULTIPLIER,
magStorageInfo.storageType());
vectorData[i / 2][0] = storageToFloat(magRec,
magStorageInfo);
vectorData[i / 2][1] = storageToFloat(dirRec,
dirStorageInfo);
}
} else {
throw new IllegalArgumentException(
"Magnitude and direction grids are not of the same type.");
}
if (vectorDataIndex != vectorData.length) {
throw new IllegalArgumentException(
"Invalid number of dataSets returned expected 2 per group, received: "
+ (vectorDataIndex / vectorData.length) * 2);
}
} catch (Exception e) {
throw new GfeException("Unable to get data from HDF5 for ParmID: "
@ -2304,8 +2320,8 @@ public class IFPGridDatabase extends GridDatabase {
private void storeGridParmInfo(List<GridParmInfo> gridParmInfo,
List<ParmStorageInfo> parmStorageInfoList, StoreOp storeOp)
throws Exception {
IDataStore ds = DataStoreFactory.getDataStore(GfeUtil.getHDF5File(
gfeBaseDataDir, this.dbId));
IDataStore ds = DataStoreFactory.getDataStore(GfeUtil
.getGridParmHdf5File(gfeBaseDataDir, this.dbId));
String parmNameAndLevel = null;
for (GridParmInfo gpi : gridParmInfo) {
parmNameAndLevel = gpi.getParmID().getParmName() + "_"

View file

@ -86,9 +86,9 @@ public class TopoDatabaseManager {
private static Map<String, TopoDatabase> topoDbMap = new HashMap<String, TopoDatabase>();
private IFPServerConfig config;
private final IFPServerConfig config;
private IDataStore dataStore;
private final IDataStore dataStore;
public static void initializeTopoDatabase(String siteID)
throws GfeException {
@ -116,7 +116,7 @@ public class TopoDatabaseManager {
// get GridParmInfo configuration
GridLocation gloc = config.dbDomain();
File hdf5File = GfeUtil.getHDF5File(GridDatabase.gfeBaseDataDir,
File hdf5File = GfeUtil.getHdf5TopoFile(GridDatabase.gfeBaseDataDir,
getTopoDbId(siteID));
dataStore = DataStoreFactory.getDataStore(hdf5File);
@ -239,7 +239,7 @@ public class TopoDatabaseManager {
for (int i = 0; i < heights.length; i++) {
if (!Float.isNaN(heights[i])) {
heights[i] = (float) cvt.convert(heights[i]);
if (!allowValuesBelowZero && heights[i] < 0) {
if (!allowValuesBelowZero && (heights[i] < 0)) {
heights[i] = 0.0f;
}
}
@ -391,7 +391,7 @@ public class TopoDatabaseManager {
dataStore.addDataRecord(output);
StorageStatus status = dataStore.store(StoreOp.REPLACE);
StorageException[] exceptions = status.getExceptions();
if (exceptions != null && exceptions.length > 0) {
if ((exceptions != null) && (exceptions.length > 0)) {
statusHandler
.handle(Priority.PROBLEM,
"Storage exceptions occurred during hdf5 save. "

View file

@ -4,4 +4,5 @@
<key>modelInfo.modelName</key>
<order>0</order>
</pathKey>
</pathKeySet>
</pathKeySet>

View file

@ -4,4 +4,9 @@
<key>icao</key>
<order>0</order>
</pathKey>
</pathKeySet>
<pathKey>
<key>primaryElevationAngle</key>
<order>1</order>
</pathKey>
</pathKeySet>

View file

@ -80,19 +80,16 @@ public class TextArchiveFileNameFormatter implements
endTime);
Set<String> newFileEntries = new HashSet<String>();
if (pdos != null && !pdos.isEmpty()) {
if ((pdos != null) && !pdos.isEmpty()) {
if (pdos.get(0) instanceof StdTextProduct) {
for (PersistableDataObject pdo : pdos) {
StdTextProduct casted = (StdTextProduct) pdo;
// no refTime to use, so we use creation time
Date time = new Date(casted.getRefTime());
String timeString = null;
synchronized (DefaultPathProvider.fileNameFormat) {
timeString = DefaultPathProvider.fileNameFormat
.format(time);
}
String path = pluginName + timeString;
String path = pluginName
+ DefaultPathProvider.fileNameFormat.get().format(
time);
newFileEntries.add(path);
List<PersistableDataObject> list = pdoMap.get(path);

View file

@ -74,7 +74,7 @@ public class DatabaseID implements Serializable, Comparable<DatabaseID>,
public static final String MODEL_TIME_FORMAT = "yyyyMMdd_HHmm";
private static final ThreadLocal<SimpleDateFormat> dateFormat = new ThreadLocal<SimpleDateFormat>() {
public static final ThreadLocal<SimpleDateFormat> dateFormat = new ThreadLocal<SimpleDateFormat>() {
@Override
protected SimpleDateFormat initialValue() {
@ -321,7 +321,7 @@ public class DatabaseID implements Serializable, Comparable<DatabaseID>,
modelName = strings[3];
// date-time group
if (strings[4].length() != 8 || strings[5].length() != 4) {
if ((strings[4].length() != 8) || (strings[5].length() != 4)) {
return false;
}
@ -336,8 +336,8 @@ public class DatabaseID implements Serializable, Comparable<DatabaseID>,
}
private boolean decodeDtg(String dtgString) {
if (dtgString == null
|| dtgString.length() != MODEL_TIME_FORMAT.length()) {
if ((dtgString == null)
|| (dtgString.length() != MODEL_TIME_FORMAT.length())) {
return false;
}
try {
@ -361,7 +361,7 @@ public class DatabaseID implements Serializable, Comparable<DatabaseID>,
}
shortModelId = modelName;
if (dbType != null && !dbType.isEmpty()) {
if ((dbType != null) && !dbType.isEmpty()) {
shortModelId += "_" + dbType;
}
@ -477,7 +477,7 @@ public class DatabaseID implements Serializable, Comparable<DatabaseID>,
public Date getModelDate() {
Date date = null;
if (modelTime != null && !NO_MODEL_TIME.equalsIgnoreCase(modelTime)) {
if ((modelTime != null) && !NO_MODEL_TIME.equalsIgnoreCase(modelTime)) {
try {
date = dateFormat.get().parse(this.modelTime);
} catch (ParseException e) {

View file

@ -27,8 +27,10 @@ import java.util.Calendar;
import java.util.Collection;
import java.util.Date;
import java.util.GregorianCalendar;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TimeZone;
import java.util.regex.Matcher;
@ -70,9 +72,31 @@ import com.vividsolutions.jts.operation.polygonize.Polygonizer;
*/
public class GfeUtil {
/** Date formatter for generating correct path names */
public static final SimpleDateFormat DateFormatter = new SimpleDateFormat(
"yyyy_MM_dd_HH");
private static final String FIELD_SEPARATOR = "_";
private static final String DATASTORE_FILE_EXTENSION = ".h5";
private static final String GROUP_SEPARATOR = "/";
/** Date formatter for generating correct group names */
private static final ThreadLocal<SimpleDateFormat> groupDateFormatter = new ThreadLocal<SimpleDateFormat>() {
@Override
protected SimpleDateFormat initialValue() {
SimpleDateFormat sdf = new SimpleDateFormat("yyyy_MM_dd_HH");
sdf.setTimeZone(TimeZone.getTimeZone("GMT"));
return sdf;
}
};
/** Date formatter for generating correct path names for singleton database */
private static final ThreadLocal<SimpleDateFormat> singletonDateFormatter = new ThreadLocal<SimpleDateFormat>() {
@Override
protected SimpleDateFormat initialValue() {
SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMdd");
sdf.setTimeZone(TimeZone.getTimeZone("GMT"));
return sdf;
}
};
public static final String KEY_SEPARATOR = "|";
@ -99,28 +123,12 @@ public class GfeUtil {
* @return The group name for the data
*/
public static String getHDF5Group(ParmID parmId, TimeRange time) {
synchronized (DateFormatter) {
String groupName = parmId.getParmName() + "/"
+ parmId.getParmLevel() + "/"
+ DateFormatter.format(correctDate(time.getStart())) + "--"
+ DateFormatter.format(correctDate(time.getEnd()));
return groupName;
}
}
/**
* Creates the group for storing data to the HDF5 data store
*
* @param parmId
* The parmId of the data to be stored
* @return The group name for the data
*/
public static String getHDF5Group(ParmID parmId) {
synchronized (DateFormatter) {
String groupName = parmId.getParmName() + "/"
+ parmId.getParmLevel();
return groupName;
}
SimpleDateFormat sdf = groupDateFormatter.get();
String groupName = parmId.getParmName() + GROUP_SEPARATOR
+ parmId.getParmLevel() + GROUP_SEPARATOR
+ sdf.format(time.getStart()) + "--"
+ sdf.format(time.getEnd());
return groupName;
}
/**
@ -135,23 +143,137 @@ public class GfeUtil {
public static String[] getHDF5Groups(ParmID parmId, List<TimeRange> times) {
String[] rval = new String[times.size()];
StringBuilder tmp = new StringBuilder(100);
tmp.append(parmId.getParmName()).append('/')
.append(parmId.getParmLevel()).append('/');
tmp.append(parmId.getParmName()).append(GROUP_SEPARATOR)
.append(parmId.getParmLevel()).append(GROUP_SEPARATOR);
String mainString = tmp.toString();
int i = 0;
synchronized (DateFormatter) {
for (TimeRange tr : times) {
tmp.setLength(0);
tmp.append(mainString);
tmp.append(DateFormatter.format(correctDate(tr.getStart())));
tmp.append("--");
tmp.append(DateFormatter.format(correctDate(tr.getEnd())));
rval[i++] = tmp.toString();
}
SimpleDateFormat sdf = groupDateFormatter.get();
for (TimeRange tr : times) {
tmp.setLength(0);
tmp.append(mainString);
tmp.append(sdf.format(tr.getStart()));
tmp.append("--");
tmp.append(sdf.format(tr.getEnd()));
rval[i++] = tmp.toString();
}
return rval;
}
/**
* Returns the hdf5 file for a given parm at a time.
*
* @param baseDir
* @param parmId
* @param time
* @return
*/
public static File getHdf5File(String baseDir, ParmID parmId, TimeRange time) {
List<TimeRange> list = new ArrayList<TimeRange>(1);
list.add(time);
Map<File, String[]> map = getHdf5FilesAndGroups(baseDir, parmId, list);
File rval = null;
if (!map.isEmpty()) {
// can only be at most 1 entry since we only passed in 1 time.
rval = map.keySet().iterator().next();
}
return rval;
}
/**
* Returns a map of File to groups for the specified parm/time range.
* Singleton databases are a file per parm per day. Non singleton databases
* are a file per database per parm.
*
* @param baseDir
* @param parmId
* @param times
* @return
*/
public static Map<File, String[]> getHdf5FilesAndGroups(String baseDir,
ParmID parmId, List<TimeRange> times) {
DatabaseID dbId = parmId.getDbId();
File directory = getHdf5Dir(baseDir, dbId);
boolean isSingleton = DatabaseID.NO_MODEL_TIME.equals(dbId
.getModelTime());
Map<File, String[]> rval = null;
if (isSingleton) {
// file per parm per day
StringBuffer tmp = new StringBuffer(40);
// generate filename for before date string
tmp.append(dbId.getSiteId()).append(FIELD_SEPARATOR)
.append(dbId.getFormat()).append(FIELD_SEPARATOR);
if (dbId.getDbType() != null) {
tmp.append(dbId.getDbType());
}
tmp.append(FIELD_SEPARATOR).append(dbId.getModelName())
.append(FIELD_SEPARATOR);
String preString = tmp.toString();
// generate filename for after date string
tmp.setLength(0);
tmp.append(FIELD_SEPARATOR).append(parmId.getParmName())
.append(FIELD_SEPARATOR);
tmp.append(parmId.getParmLevel()).append(DATASTORE_FILE_EXTENSION);
String postString = tmp.toString();
// sort time ranges into files per day based on end of time range
Map<String, List<TimeRange>> dateMap = new HashMap<String, List<TimeRange>>();
SimpleDateFormat sdf = singletonDateFormatter.get();
for (TimeRange tr : times) {
String day = sdf.format(tr.getEnd());
List<TimeRange> rangeList = dateMap.get(day);
if (rangeList == null) {
rangeList = new ArrayList<TimeRange>(24);
dateMap.put(day, rangeList);
}
rangeList.add(tr);
}
// initialize map size, accounting for load factor
rval = new HashMap<File, String[]>(
(int) (dateMap.size() * 1.25) + 1);
for (Map.Entry<String, List<TimeRange>> entry : dateMap.entrySet()) {
tmp.setLength(0);
tmp.append(preString).append(entry.getKey()).append(postString);
File h5File = new File(directory, tmp.toString());
rval.put(h5File, getHDF5Groups(parmId, entry.getValue()));
}
} else {
// file per parm
StringBuffer fileName = new StringBuffer(40);
fileName.append(dbId.toString()).append(FIELD_SEPARATOR);
fileName.append(parmId.getParmName()).append(FIELD_SEPARATOR);
fileName.append(parmId.getParmLevel()).append(
DATASTORE_FILE_EXTENSION);
File h5File = new File(directory, fileName.toString());
rval = new HashMap<File, String[]>(2);
rval.put(h5File, getHDF5Groups(parmId, times));
}
return rval;
}
/**
* Gets the HDF5 file name for the topography database.
*
* @param baseDir
* the base directory
* @param id
* The database ID
* @return The HDF5 file name
*/
public static File getHdf5TopoFile(String baseDir, DatabaseID topoDbid) {
String hdf5FilePath = getHdf5Dir(baseDir, topoDbid).toString()
+ GROUP_SEPARATOR + topoDbid.toString()
+ DATASTORE_FILE_EXTENSION;
return new File(hdf5FilePath);
}
/**
* Gets the HDF5 file name for singleton databases based on a databaseID and
* a timeRange
@ -162,31 +284,41 @@ public class GfeUtil {
* The database ID
* @return The HDF5 file name
*/
public static File getHDF5File(String baseDir, DatabaseID id) {
public static File getGridParmHdf5File(String baseDir, DatabaseID id) {
String hdf5FilePath = getHDF5Dir(baseDir, id).toString() + "/"
+ id.toString() + ".h5";
return new File(hdf5FilePath);
StringBuffer path = new StringBuffer(120);
path.append(getHdf5Dir(baseDir, id).toString()).append(GROUP_SEPARATOR)
.append(id.toString()).append(FIELD_SEPARATOR)
.append("GridParm").append(DATASTORE_FILE_EXTENSION);
return new File(path.toString());
}
public static File getTempHDF5File(String baseDir, ParmID id) {
String hdf5FilePath = getTempHDF5Dir(baseDir, id).toString() + "/"
+ id.toString() + ".h5";
String hdf5FilePath = getTempHDF5Dir(baseDir, id).toString()
+ GROUP_SEPARATOR + id.toString() + DATASTORE_FILE_EXTENSION;
return new File(hdf5FilePath);
}
public static File getTempHDF5Dir(String baseDir, ParmID id) {
return new File(baseDir + id.getDbId().getSiteId() + "/" + TEMP_D2D_DIR
+ "/");
return new File(baseDir + id.getDbId().getSiteId() + GROUP_SEPARATOR
+ TEMP_D2D_DIR + GROUP_SEPARATOR);
}
public static File getHDF5Dir(String baseDir, DatabaseID id) {
/**
* Returns directory for a model.
*
* @param baseDir
* @param id
* @return
*/
public static File getHdf5Dir(String baseDir, DatabaseID id) {
String hdf5DirPath = "";
String dbModelTime = id.getModelTime();
String gfeDataDir = baseDir;
gfeDataDir = baseDir + id.getSiteId() + "/" + id.getModelName() + "/";
gfeDataDir = baseDir + id.getSiteId() + GROUP_SEPARATOR
+ id.getModelName() + GROUP_SEPARATOR;
/*
* Creates the appropriate file structure for the data. HDF5 files are
* created based on the end time of the data
@ -195,16 +327,17 @@ public class GfeUtil {
/*
* Create the file structure for a singleton database.
*/
hdf5DirPath = gfeDataDir + "/";
hdf5DirPath = gfeDataDir + GROUP_SEPARATOR;
} else {
/*
* Create the file structure for a model database.
*/
hdf5DirPath = gfeDataDir + dbModelTime.substring(0, 4) + "_"
+ dbModelTime.substring(4, 6) + "_"
+ dbModelTime.substring(6, 8) + "_"
+ dbModelTime.substring(9) + "/";
hdf5DirPath = gfeDataDir + dbModelTime.substring(0, 4)
+ FIELD_SEPARATOR + dbModelTime.substring(4, 6)
+ FIELD_SEPARATOR + dbModelTime.substring(6, 8)
+ FIELD_SEPARATOR + dbModelTime.substring(9)
+ GROUP_SEPARATOR;
}
return new File(hdf5DirPath);

View file

@ -54,10 +54,10 @@ public class GribPathProvider extends DefaultPathProvider {
public static final String FORECAST_HR_TOKEN = "-FH-";
private static GribPathProvider instance = new GribPathProvider();
public static final List<String> STATIC_PARAMETERS;
static{
static {
STATIC_PARAMETERS = new ArrayList<String>();
STATIC_PARAMETERS.add("staticTopo");
STATIC_PARAMETERS.add("staticXspacing");
@ -97,13 +97,10 @@ public class GribPathProvider extends DefaultPathProvider {
StringBuffer sb = new StringBuffer(64);
sb.append(pdo.getModelInfo().getModelName());
Date refTime = pdo.getDataTime().getRefTime();
String refTimeString = null;
synchronized (fileNameFormat) {
refTimeString = fileNameFormat.format(refTime);
}
sb.append(refTimeString);
sb.append(fileNameFormat.get().format(refTime));
sb.append(FORECAST_HR_TOKEN);
if (STATIC_PARAMETERS.contains(pdo.getModelInfo().getParameterAbbreviation())) {
if (STATIC_PARAMETERS.contains(pdo.getModelInfo()
.getParameterAbbreviation())) {
sb.append("000");
} else {
long number = pdo.getDataTime().getFcstTime() / SECONDS_PER_HOUR;
@ -119,10 +116,6 @@ public class GribPathProvider extends DefaultPathProvider {
}
public String formatTime(Date date) {
String retVal = null;
synchronized (fileNameFormat) {
retVal = fileNameFormat.format(date);
}
return retVal;
return fileNameFormat.get().format(date);
}
}

View file

@ -20,8 +20,6 @@
package com.raytheon.uf.common.dataplugin.radar;
import java.util.Date;
import com.raytheon.uf.common.dataplugin.persist.DefaultPathProvider;
import com.raytheon.uf.common.dataplugin.persist.IPersistable;
@ -78,12 +76,8 @@ public class RadarPathProvider extends DefaultPathProvider {
sb.append(pluginName);
sb.append("-");
sb.append(pdo.getIcao());
Date refTime = pdo.getDataTime().getRefTime();
String refTimeString = null;
synchronized (fileNameFormat) {
refTimeString = fileNameFormat.format(refTime);
}
sb.append(refTimeString);
sb.append("-");
sb.append(pdo.getDataTime().toString().replaceAll(" ", "_"));
sb.append(".h5");
return sb.toString();

View file

@ -64,9 +64,14 @@ public class DefaultPathProvider implements IHDFFilePathProvider {
private static final transient IUFStatusHandler statusHandler = UFStatus
.getHandler(DefaultPathProvider.class);
/** Note: Do not use this without synchronization */
public static final SimpleDateFormat fileNameFormat = new SimpleDateFormat(
"-yyyy-MM-dd-HH");
public static final ThreadLocal<SimpleDateFormat> fileNameFormat = new ThreadLocal<SimpleDateFormat>() {
@Override
protected SimpleDateFormat initialValue() {
SimpleDateFormat sdf = new SimpleDateFormat("-yyyy-MM-dd-HH");
sdf.setTimeZone(TimeZone.getTimeZone("GMT"));
return sdf;
}
};
/**
* The list of keys used to construct the HDF5 directory path. These keys
@ -74,10 +79,6 @@ public class DefaultPathProvider implements IHDFFilePathProvider {
*/
protected static ConcurrentHashMap<String, List<String>> keyMap = new ConcurrentHashMap<String, List<String>>();
static {
fileNameFormat.setTimeZone(TimeZone.getTimeZone("GMT"));
}
private static DefaultPathProvider instance = new DefaultPathProvider();
public static DefaultPathProvider getInstance() {
@ -115,9 +116,9 @@ public class DefaultPathProvider implements IHDFFilePathProvider {
// through to get the appropriate field
if (key.contains(".")) {
String[] subClasses = key.split("\\.");
for (int i = 0; i < subClasses.length; i++) {
for (String subClass : subClasses) {
property = PropertyUtils.getProperty(property,
subClasses[i]);
subClass);
}
}
@ -152,6 +153,7 @@ public class DefaultPathProvider implements IHDFFilePathProvider {
return pathBuilder.toString();
}
@Override
public List<String> getKeyNames(String pluginName) {
if (pluginName == null) {
@ -257,23 +259,19 @@ public class DefaultPathProvider implements IHDFFilePathProvider {
Date refTime = ((PluginDataObject) persistable).getDataTime()
.getRefTime();
String refTimeString = null;
synchronized (fileNameFormat) {
refTimeString = fileNameFormat.format(refTime);
}
sb.append(refTimeString);
sb.append(fileNameFormat.get().format(refTime));
if (partition != null) {
sb.append("-");
sb.append(partition);
sb.append("-");
sb.append(partition);
}
sb.append(".h5");
return sb.toString();
}
if (partition == null) {
return pluginName + ".h5";
return pluginName + ".h5";
}
return pluginName + "-" + partition + ".h5";

View file

@ -89,7 +89,8 @@ public interface IDataStore extends ISerializableObject {
/**
* Delete a (set of) location(s), where a location is either a group or a
* dataset
* dataset. If all datasets have been deleted from a file, the file will be
* deleted also.
*
* @param location
* the full path to the group or dataset

View file

@ -344,8 +344,8 @@ public abstract class PluginDao extends CoreDao {
* @param objects
* The objects to retrieve the HDF5 component for
* @param tileSet
* The tile set to retrieve. Any value less than or equal
* to zero returns the "base" data only.
* The tile set to retrieve. Any value less than or equal to zero
* returns the "base" data only.
* @return The HDF5 data records
* @throws StorageException
* If problems occur while interacting with HDF5 data stores
@ -361,7 +361,7 @@ public abstract class PluginDao extends CoreDao {
/* connect to the data store and retrieve the data */
IDataStore dataStore = getDataStore((IPersistable) obj);
boolean interpolated = DataStoreFactory.isInterpolated(tileSet);
if(!interpolated) {
if (!interpolated) {
tileSet = 0;
}
IDataRecord[] record = new IDataRecord[tileSet + 1];
@ -374,8 +374,8 @@ public abstract class PluginDao extends CoreDao {
DataStoreFactory.DEF_DATASET_NAME, Request.ALL);
// Now get the interpolated data, if any!
for (int tile = 1; tile < record.length; tile++) {
record[tile] = dataStore.retrieve(group,
String.valueOf(tile), Request.ALL);
record[tile] = dataStore.retrieve(group,
String.valueOf(tile), Request.ALL);
}
} catch (Exception e) {
throw new PluginException("Error getting HDF5 data", e);
@ -883,6 +883,48 @@ public abstract class PluginDao extends CoreDao {
return DataStoreFactory.getDataStore(persistFile);
}
/**
* Takes a list of IPersistable objects and return a map of IDataStore
* objects and a list of IPersistable objects that are stored in that data
* store.
*
* @param objs
* A list of IPersistable objects to get their respsective data
* stores.
* @return
*/
public Map<IDataStore, List<IPersistable>> getDataStoreMap(
List<IPersistable> objs) {
StringBuilder tmp = new StringBuilder(120);
Map<String, List<IPersistable>> fileMap = new HashMap<String, List<IPersistable>>();
// group objects by file
for (IPersistable obj : objs) {
tmp.setLength(0);
tmp.append(pathProvider.getHDFPath(this.pluginName, obj));
tmp.append(File.separatorChar);
tmp.append(pathProvider.getHDFFileName(this.pluginName, obj));
String path = tmp.toString();
List<IPersistable> objsInFile = fileMap.get(path);
if (objsInFile == null) {
objsInFile = new ArrayList<IPersistable>();
fileMap.put(path, objsInFile);
}
objsInFile.add(obj);
}
Map<IDataStore, List<IPersistable>> dataStoreMap = new HashMap<IDataStore, List<IPersistable>>(
(int) (fileMap.size() * 1.25) + 1);
for (Map.Entry<String, List<IPersistable>> entry : fileMap.entrySet()) {
dataStoreMap.put(
DataStoreFactory.getDataStore(new File(PLUGIN_HDF5_DIR
+ entry.getKey())), entry.getValue());
}
return dataStoreMap;
}
/**
* Gets a list of the distinct product keys for this plugin
*
@ -1005,7 +1047,7 @@ public abstract class PluginDao extends CoreDao {
results += pdos.size();
}
} while (idList != null && !idList.isEmpty());
} while ((idList != null) && !idList.isEmpty());
return results;
}
@ -1115,7 +1157,7 @@ public abstract class PluginDao extends CoreDao {
query.addOrder("insertTime", true);
query.setMaxResults(1);
List<Calendar> result = (List<Calendar>) this.queryByCriteria(query);
if (result == null || result.isEmpty()) {
if ((result == null) || result.isEmpty()) {
return null;
} else {
return result.get(0).getTime();
@ -1165,8 +1207,8 @@ public abstract class PluginDao extends CoreDao {
}
String[] keyTokens = productKey.trim().split(";");
for (int i = 0; i < keyTokens.length; i++) {
String[] constraintTokens = keyTokens[i].split("=");
for (String keyToken : keyTokens) {
String[] constraintTokens = keyToken.split("=");
constraintTokens[0] = constraintTokens[0].trim();
constraintTokens[1] = constraintTokens[1].trim();
params.add(constraintTokens);
@ -1288,7 +1330,7 @@ public abstract class PluginDao extends CoreDao {
SerializationException, IOException {
List<PersistableDataObject> pdos = getRecordsToArchive(insertStartTime,
insertEndTime);
if (pdos != null && pdos.size() > 0) {
if ((pdos != null) && (pdos.size() > 0)) {
// map of file to list of pdo
Map<String, List<PersistableDataObject>> pdoMap = new HashMap<String, List<PersistableDataObject>>();
if (pdos.get(0) instanceof IPersistable) {
@ -1316,19 +1358,13 @@ public abstract class PluginDao extends CoreDao {
PluginDataObject pluginDataObj = (PluginDataObject) pdo;
Date time = pluginDataObj.getDataTime()
.getRefTimeAsCalendar().getTime();
synchronized (DefaultPathProvider.fileNameFormat) {
timeString = DefaultPathProvider.fileNameFormat
.format(time);
}
timeString = DefaultPathProvider.fileNameFormat.get()
.format(time);
} else {
// no refTime to use bounded insert query bounds
Date time = insertStartTime.getTime();
synchronized (DefaultPathProvider.fileNameFormat) {
timeString = DefaultPathProvider.fileNameFormat
.format(time);
}
timeString = DefaultPathProvider.fileNameFormat.get()
.format(time);
}
String path = pluginName + timeString;
@ -1349,7 +1385,7 @@ public abstract class PluginDao extends CoreDao {
// remove .h5
int index = path.lastIndexOf('.');
if (index > 0 && path.length() - index < 5) {
if ((index > 0) && (path.length() - index < 5)) {
// ensure its end of string in case extension is
// dropped/changed
path = path.substring(0, index);

View file

@ -77,7 +77,7 @@ public class DefaultPluginArchiveFileNameFormatter implements
endTime);
Set<String> newFileEntries = new HashSet<String>();
if (pdos != null && !pdos.isEmpty()) {
if ((pdos != null) && !pdos.isEmpty()) {
if (pdos.get(0) instanceof IPersistable) {
IHDFFilePathProvider pathProvider = dao.pathProvider;
@ -104,19 +104,13 @@ public class DefaultPluginArchiveFileNameFormatter implements
PluginDataObject pluginDataObj = (PluginDataObject) pdo;
Date time = pluginDataObj.getDataTime()
.getRefTimeAsCalendar().getTime();
synchronized (DefaultPathProvider.fileNameFormat) {
timeString = DefaultPathProvider.fileNameFormat
.format(time);
}
timeString = DefaultPathProvider.fileNameFormat.get()
.format(time);
} else {
// no refTime to use bounded insert query bounds
Date time = startTime.getTime();
synchronized (DefaultPathProvider.fileNameFormat) {
timeString = DefaultPathProvider.fileNameFormat
.format(time);
}
timeString = DefaultPathProvider.fileNameFormat.get()
.format(time);
}
String path = pluginName + timeString;

View file

@ -135,10 +135,11 @@ public abstract class PointDataPluginDao<T extends PluginDataObject> extends
private double[] values;
public void setLevels(String parameter, double[] values) {
if (this != SPECIFIC)
if (this != SPECIFIC) {
throw new IllegalArgumentException(
"Can't specify specific levels for level + "
+ this.name());
}
this.parameter = parameter;
this.values = values;
@ -160,7 +161,7 @@ public abstract class PointDataPluginDao<T extends PluginDataObject> extends
};
private LinkedBlockingQueue<BeanMap> beanMapCache;
private final LinkedBlockingQueue<BeanMap> beanMapCache;
protected PointDataDbDescription dbDataDescription;
@ -227,7 +228,7 @@ public abstract class PointDataPluginDao<T extends PluginDataObject> extends
q.setString("dataURI",
(String) pdo.getIdentifier());
List<?> list = q.list();
if (list == null || list.size() == 0) {
if ((list == null) || (list.size() == 0)) {
ss.insert(pdo);
index++;
} else {
@ -277,7 +278,7 @@ public abstract class PointDataPluginDao<T extends PluginDataObject> extends
dupOccurred = false;
// only persist individually through one commit interval
while (itr.hasNext() && index / COMMIT_INTERVAL == 0) {
while (itr.hasNext() && (index / COMMIT_INTERVAL == 0)) {
try {
tx = ss.beginTransaction();
PersistableDataObject pdo = (PersistableDataObject) itr
@ -288,7 +289,7 @@ public abstract class PointDataPluginDao<T extends PluginDataObject> extends
q = ss.createSQLQuery(sql);
q.setString("dataURI", (String) pdo.getIdentifier());
List<?> list = q.list();
if (list == null || list.size() == 0) {
if ((list == null) || (list.size() == 0)) {
ss.insert(pdo);
tx.commit();
index++;
@ -436,10 +437,11 @@ public abstract class PointDataPluginDao<T extends PluginDataObject> extends
List<PersistableDataObject> persist = new ArrayList<PersistableDataObject>(
Arrays.asList(records));
persistAll(persist);
if (persist.size() != records.length)
if (persist.size() != records.length) {
return persist.toArray(new PluginDataObject[persist.size()]);
else
} else {
return records;
}
}
public File getFullFilePath(PluginDataObject p) {
@ -538,8 +540,8 @@ public abstract class PointDataPluginDao<T extends PluginDataObject> extends
pts[i] = new Point(indexes[i], 0);
}
dsRequest = Request.buildPointRequest(pts);
} else if (request == LevelRequest.ALL
|| request == LevelRequest.SPECIFIC) {
} else if ((request == LevelRequest.ALL)
|| (request == LevelRequest.SPECIFIC)) {
int[] copy = new int[indexes.length];
System.arraycopy(indexes, 0, copy, 0, indexes.length);
dsRequest = Request.buildYLineRequest(copy);
@ -566,7 +568,7 @@ public abstract class PointDataPluginDao<T extends PluginDataObject> extends
}
double[] vals = request.getValues();
if (vals == null || vals.length == 0) {
if ((vals == null) || (vals.length == 0)) {
throw new IllegalArgumentException(
"Specific level requested without values specified");
}
@ -670,7 +672,7 @@ public abstract class PointDataPluginDao<T extends PluginDataObject> extends
// went off the end of search. double check the other half of
// the array
boolean found = false;
search2: for (k = 0; k < originalPointer && k < iip.length; k++) {
search2: for (k = 0; (k < originalPointer) && (k < iip.length); k++) {
if (iip[k].index == retrievedIndexes[i]) {
correlatedIds[i] = iip[k].id;
break search2;
@ -706,19 +708,17 @@ public abstract class PointDataPluginDao<T extends PluginDataObject> extends
}
bm.putAll(obj);
T bean = (T) bm.getBean();
synchronized (DefaultPathProvider.fileNameFormat) {
return HDF5_DIR
+ File.separator
+ this.pluginName
+ File.separator
+ this.pathProvider.getHDFPath(this.pluginName,
(IPersistable) bean)
+ File.separator
+ getPointDataFileName(bean).replace(".h5", "")
+ DefaultPathProvider.fileNameFormat
.format(((PluginDataObject) bean).getDataTime()
.getRefTime()) + ".h5";
}
return HDF5_DIR
+ File.separator
+ this.pluginName
+ File.separator
+ this.pathProvider.getHDFPath(this.pluginName,
(IPersistable) bean)
+ File.separator
+ getPointDataFileName(bean).replace(".h5", "")
+ DefaultPathProvider.fileNameFormat.get().format(
((PluginDataObject) bean).getDataTime()
.getRefTime()) + ".h5";
} finally {
this.beanMapCache.offer(bm);
}
@ -737,11 +737,7 @@ public abstract class PointDataPluginDao<T extends PluginDataObject> extends
(T) persistable).replace(".h5", ""));
Date refTime = ((PluginDataObject) persistable).getDataTime()
.getRefTime();
String refTimeString = null;
synchronized (fileNameFormat) {
refTimeString = fileNameFormat.format(refTime);
}
tmp.append(refTimeString);
tmp.append(fileNameFormat.get().format(refTime));
tmp.append(".h5");
return tmp.toString();
}

View file

@ -20,32 +20,20 @@
package gov.noaa.nws.ncep.edex.uengine.tasks.profile;
import gov.noaa.nws.ncep.common.dataplugin.ncgrib.NcgribRecord;
import gov.noaa.nws.ncep.edex.plugin.ncgrib.dao.NcgribDao;
import java.awt.Point;
import java.io.File;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import com.raytheon.uf.edex.core.EDEXUtil;
import com.raytheon.uf.edex.database.dao.CoreDao;
import com.raytheon.uf.edex.database.dao.DaoConfig;
import com.raytheon.uf.edex.database.plugin.PluginDao;
import com.raytheon.uf.edex.database.plugin.PluginFactory;
import com.raytheon.edex.uengine.tasks.ScriptTask;
import com.raytheon.uf.common.dataplugin.PluginDataObject;
import com.raytheon.uf.common.dataplugin.PluginException;
import com.raytheon.uf.common.dataplugin.persist.IHDFFilePathProvider;
import com.raytheon.uf.common.dataplugin.persist.IPersistable;
import com.raytheon.uf.common.datastorage.DataStoreFactory;
import com.raytheon.uf.common.datastorage.IDataStore;
import com.raytheon.uf.common.datastorage.Request;
import com.raytheon.uf.common.datastorage.records.IDataRecord;
import com.raytheon.uf.common.datastorage.records.FloatDataRecord;
import com.raytheon.uf.common.localization.IPathManager;
import com.raytheon.uf.common.datastorage.records.IDataRecord;
import com.raytheon.uf.edex.database.plugin.PluginDao;
import com.raytheon.uf.edex.database.plugin.PluginFactory;
/**
* PointIn task derived from original uEngine PointIn task. Reads a file in from
@ -58,18 +46,19 @@ import com.raytheon.uf.common.localization.IPathManager;
* Mar 29, 2007 njensen Initial Creation
* 03/28/2012 Chin Chen Add new APIs to support query multiple Points at one shoot and using
* dataStore.retrieveGroups()
*
* </PRE>
*
*/
public class PointIn {//extends ScriptTask {
public class PointIn {// extends ScriptTask {
private PluginDataObject dataRecord;
private final PluginDataObject dataRecord;
private PluginDao dao;
private int indX;
private int indY;
private final int indX;
private final int indY;
/**
* Constructor
@ -79,28 +68,30 @@ public class PointIn {//extends ScriptTask {
* @param aDataRecord
* the data record to read in
*/
public PointIn(String aPlugin, PluginDataObject aDataRecord, int xInd, int yInd) {
public PointIn(String aPlugin, PluginDataObject aDataRecord, int xInd,
int yInd) {
dataRecord = aDataRecord;
indX = xInd;
indY = yInd;
try {
dao = PluginFactory.getInstance().getPluginDao(aPlugin);
// dataRecord.getPluginName());
// dataRecord.getPluginName());
} catch (PluginException e) {
System.out.println("Unable to get " + dataRecord.getPluginName()
+ " dao");
+ " dao");
}
}
public PointIn(String aPlugin, PluginDataObject aDataRecord) {
dataRecord = aDataRecord;
indX = 0;
indY = 0;
try {
dao = PluginFactory.getInstance().getPluginDao(aPlugin);
// dataRecord.getPluginName());
// dataRecord.getPluginName());
} catch (PluginException e) {
System.out.println("Unable to get " + dataRecord.getPluginName()
+ " dao");
+ " dao");
}
}
@ -108,61 +99,54 @@ public class PointIn {//extends ScriptTask {
* (non-Javadoc)
*
* @see com.raytheon.edex.uengine.js.tasks.ScriptTask#execute()
*
@Override
public Object execute() throws PluginException {
IDataRecord record = getHDF5DataPointNew(dataRecord, indX, indY );
FloatDataRecord fdr = (FloatDataRecord)record;
return fdr.getFloatData()[0];
}*/
*
* @Override public Object execute() throws PluginException { IDataRecord
* record = getHDF5DataPointNew(dataRecord, indX, indY ); FloatDataRecord
* fdr = (FloatDataRecord)record; return fdr.getFloatData()[0]; }
*/
public float getPointData() throws PluginException {
return ((FloatDataRecord)getHDF5DataPoint(dataRecord, indX, indY )).getFloatData()[0];
return ((FloatDataRecord) getHDF5DataPoint(dataRecord, indX, indY))
.getFloatData()[0];
}
//public Object[] retrieveGroup() throws PluginException {
// return dao.getHDF5Data(dataRecord, -1);
//}
/*public IDataRecord getHDF5DataPoint(PluginDataObject object,
int xInd, int yInd) throws PluginException {
// public Object[] retrieveGroup() throws PluginException {
// return dao.getHDF5Data(dataRecord, -1);
// }
Request pointRequest = Request.buildPointRequest(new Point(xInd, yInd) );
IDataRecord[] record = null;
record = new IDataRecord[1];
/*
* public IDataRecord getHDF5DataPoint(PluginDataObject object, int xInd,
* int yInd) throws PluginException {
*
* Request pointRequest = Request.buildPointRequest(new Point(xInd, yInd) );
* IDataRecord[] record = null; record = new IDataRecord[1];
*
* if (object instanceof IPersistable) { // connect to the data store and
* retrieve the data //chin remove this line NcgribDao dao = new
* NcgribDao(); IDataStore dataStore = dao.getDataStore((IPersistable)
* object); try { record[0] = dataStore.retrieve(object.getDataURI(),
* "Data", pointRequest);
*
* } catch (Exception e) { throw new
* PluginException("Error getting HDF5 data", e); } } return record[0]; }
*/
public IDataRecord getHDF5DataPoint(PluginDataObject object, int xInd,
int yInd) throws PluginException {
if (object instanceof IPersistable) {
// connect to the data store and retrieve the data
//chin remove this line NcgribDao dao = new NcgribDao();
IDataStore dataStore = dao.getDataStore((IPersistable) object);
try {
record[0] = dataStore.retrieve(object.getDataURI(),
"Data", pointRequest);
} catch (Exception e) {
throw new PluginException("Error getting HDF5 data", e);
}
}
return record[0];
}*/
public IDataRecord getHDF5DataPoint(PluginDataObject object,
int xInd, int yInd) throws PluginException {
Request pointRequest = Request.buildPointRequest(new Point(xInd, yInd) );
Request pointRequest = Request.buildPointRequest(new Point(xInd, yInd));
IDataRecord[] dr = null;
//record = new IDataRecord[1];
// record = new IDataRecord[1];
if (object instanceof IPersistable) {
//chin remove this line NcgribDao dao = new NcgribDao();
IDataStore dataStore = dao.getDataStore((IPersistable) object);
// chin remove this line NcgribDao dao = new NcgribDao();
IDataStore dataStore = dao.getDataStore((IPersistable) object);
try {
String[] groups = new String[1];
groups[0] = object.getDataURI();
dr= dataStore.retrieveGroups(groups, pointRequest);
for (int k = 0; k < dr.length; k++) {
float[] data = (float[]) dr[k].getDataObject();
String[] groups = new String[1];
groups[0] = object.getDataURI();
dr = dataStore.retrieveGroups(groups, pointRequest);
for (IDataRecord element : dr) {
float[] data = (float[]) element.getDataObject();
}
} catch (Exception e) {
@ -171,84 +155,109 @@ public class PointIn {//extends ScriptTask {
}
return dr[0];
}
/*
//from efficientRetirevePoint()
public float[] getHDF5GroupDataPoint(Object[] objects) throws PluginException {
float[] rval = new float[objects.length];
Request pointRequest = Request.buildPointRequest(new Point(indX, indY) );
IDataRecord[] dr = null;
//record = new IDataRecord[1];
if (objects[0] instanceof IPersistable) {
IDataStore dataStore = dao.getDataStore((IPersistable) objects[0]);
try {
String[] groups = new String[objects.length];
for(int i=0; i<objects.length; i++){
groups[i] = ((PluginDataObject)objects[i]).getDataURI();
}
dr= dataStore.retrieveGroups(groups, pointRequest);
for (int k = 0, index=0; k < dr.length; k++, index++) {
float[] data = (float[]) dr[k].getDataObject();
rval[index] = data[0];
}
} catch (Exception e) {
throw new PluginException("Error getting HDF5 data", e);
}
}
return rval;
}
*/
* //from efficientRetirevePoint() public float[]
* getHDF5GroupDataPoint(Object[] objects) throws PluginException { float[]
* rval = new float[objects.length]; Request pointRequest =
* Request.buildPointRequest(new Point(indX, indY) ); IDataRecord[] dr =
* null; //record = new IDataRecord[1];
*
* if (objects[0] instanceof IPersistable) { IDataStore dataStore =
* dao.getDataStore((IPersistable) objects[0]); try { String[] groups = new
* String[objects.length]; for(int i=0; i<objects.length; i++){ groups[i] =
* ((PluginDataObject)objects[i]).getDataURI(); } dr=
* dataStore.retrieveGroups(groups, pointRequest); for (int k = 0, index=0;
* k < dr.length; k++, index++) { float[] data = (float[])
* dr[k].getDataObject(); rval[index] = data[0]; }
*
* } catch (Exception e) { throw new
* PluginException("Error getting HDF5 data", e); } } return rval; }
*/
/**
*
* This API is to query grid data for multiple Points and multiple parameters.
* Parameters can be same parameter but at different pressure level. They will be treated
* as different parameters.
* @param objects :parameters to be query
* @param points : query locations, they are index in a 2 dimensional grid (can not use
* lat/lon directly). Use PointUtil.determineIndex to convert from lat/lon
* to Point.
* This API is to query grid data for multiple Points and multiple
* parameters. Parameters can be same parameter but at different pressure
* level. They will be treated as different parameters.
*
* @param objects
* :parameters to be query
* @param points
* : query locations, they are index in a 2 dimensional grid (can
* not use lat/lon directly). Use PointUtil.determineIndex to
* convert from lat/lon to Point.
*
*/
public List<float[]> getHDF5GroupDataPoints(Object[] objects, List<Point> points) throws PluginException {
List<float[]> rval = new ArrayList<float[]>();
Request pointRequest = (Request.buildPointRequest(points.toArray(new Point[points.size()])));
IDataRecord[] dr = null;
//record = new IDataRecord[1];
public List<float[]> getHDF5GroupDataPoints(Object[] objects,
List<Point> points) throws PluginException {
int pointsRequested = points.size();
List<float[]> rval = new ArrayList<float[]>(pointsRequested);
Request pointRequest = (Request.buildPointRequest(points
.toArray(new Point[pointsRequested])));
if (objects[0] instanceof IPersistable) {
/* connect to the data store and retrieve the data */
IDataStore dataStore = dao.getDataStore((IPersistable) objects[0]);
try {
String[] groups = new String[objects.length];
for(int i=0; i<objects.length; i++){
groups[i] = ((PluginDataObject)objects[i]).getDataURI();
}
dr= dataStore.retrieveGroups(groups, pointRequest);
int totalRec=0;
if( dr.length >0){
for(Point pt: points){
float[] ptData = new float[dr.length];
rval.add(ptData);
}
}
for (int k = 0, index=0; k < dr.length; k++, index++) {
float[] data = (float[]) dr[k].getDataObject();
//note; data.length should be the same as points.size()
//if(k==0)
// System.out.println("data[] szie="+data.length+ " parameter group size="+dr.length);
totalRec = totalRec + data.length;
for(int i=0; i< data.length; i++){
float[] pData = rval.get(i);
pData[k]= data[i];
}
}
System.out.println("total points = "+ points.size()+ " totalRec = "+totalRec);
} catch (Exception e) {
throw new PluginException("Error getting HDF5 data", e);
List<IPersistable> objList = new ArrayList<IPersistable>(objects.length);
for (Object obj : objects) {
// shouldn't need to check every object, better to be safe
if (obj instanceof IPersistable) {
objList.add((IPersistable) obj);
}
}
// arbitrary list of IPersistable could be in any number of data stores
Map<IDataStore, List<IPersistable>> dataStoreMap = dao
.getDataStoreMap(objList);
int rvalIndex = 0;
int totalRec = 0;
try {
// list for data records retrieved
List<IDataRecord> dataRecords = new ArrayList<IDataRecord>(
objects.length);
for (Map.Entry<IDataStore, List<IPersistable>> entry : dataStoreMap
.entrySet()) {
/* connect to the data store and retrieve the data */
IDataStore dataStore = entry.getKey();
List<IPersistable> persistList = entry.getValue();
String[] groups = new String[persistList.size()];
int i = 0;
for (IPersistable persist : persistList) {
groups[i++] = ((PluginDataObject) persist).getDataURI();
}
// retrieve data from this data store
IDataRecord[] records = dataStore.retrieveGroups(groups,
pointRequest);
for (IDataRecord rec : records) {
dataRecords.add(rec);
}
}
if (dataRecords.size() > 0) {
for (int i = 0; i < pointsRequested; i++) {
rval.add(new float[dataRecords.size()]);
}
int recordIndex = 0;
for (IDataRecord record : dataRecords) {
float[] data = (float[]) record.getDataObject();
// note; data.length should be the same as points.size()
// if(k==0)
// System.out.println("data[] szie="+data.length+
// " parameter group size="+dr.length);
totalRec += data.length;
for (int pointIndex = 0; pointIndex < data.length; pointIndex++) {
float[] pData = rval.get(pointIndex);
pData[recordIndex] = data[pointIndex];
}
}
System.out.println("total points = " + points.size()
+ " totalRec = " + totalRec);
}
} catch (Exception e) {
throw new PluginException("Error getting HDF5 data", e);
}
return rval;
}
}

View file

@ -317,22 +317,47 @@ class H5pyDataStore(IDataStore.IDataStore):
fn = request.getFilename()
f, lock = self.__openFile(fn, 'w')
resp = DeleteResponse()
resp.setSuccess(True)
resp.setSuccess(True)
deleteFile = False
try:
locs = request.getLocations()
for dataset in locs:
ds = self.__getGroup(f, dataset)
grp = ds.parent
grp.id.unlink(ds.name)
# check if file has any remaining data sets
# if no data sets, flag file for deletion
f.flush()
deleteFile = not self.__hasDataSet(f)
finally:
t0=time.time()
f.close()
t1=time.time()
timeMap['closeFile']=t1-t0
if deleteFile:
try:
os.remove(fn)
except Exception, e:
logger.error('Error occurred deleting file [' + str(fn) + ']: ' + IDataStore._exc())
LockManager.releaseLock(lock)
return resp
# recursively looks for data sets
def __hasDataSet(self, group):
for key in group.keys():
child=group[key]
if type(child) == h5py.highlevel.Dataset:
return True
elif type(child) == h5py.highlevel.Group:
if self.__hasDataSet(child):
return True
return False
def retrieve(self, request):
fn = request.getFilename()
@ -638,7 +663,7 @@ class H5pyDataStore(IDataStore.IDataStore):
else:
try:
grp = f[name]
except KeyError:
except:
raise StorageException("No group " + name + " found")
t1=time.time()