Merge "Omaha #5208 Decode and store scale and offset for pointset data." into omaha_16.2.2

Former-commit-id: ec013899b059f9ce267d64455735668936249c18
This commit is contained in:
Ben Steffensmeier 2016-01-25 16:28:12 -06:00 committed by Gerrit Code Review
commit e51377a1a6
7 changed files with 292 additions and 35 deletions

View file

@ -29,13 +29,13 @@ package com.raytheon.uf.common.dataplugin.pointset;
* SOFTWARE HISTORY * SOFTWARE HISTORY
* *
* Date Ticket# Engineer Description * Date Ticket# Engineer Description
* ------------- -------- --------- -------------------------- * ------------- -------- --------- --------------------
* Aug 28, 2015 4709 bsteffen Initial creation * Aug 28, 2015 4709 bsteffen Initial creation
* Jan 21, 2016 5208 bsteffen Add scale and offset
* *
* </pre> * </pre>
* *
* @author bsteffen * @author bsteffen
* @version 1.0
*/ */
public class PointSetConstants { public class PointSetConstants {
@ -60,4 +60,8 @@ public class PointSetConstants {
public static final String LEVEL_TWO = LEVEL + ".leveltwovalue"; public static final String LEVEL_TWO = LEVEL + ".leveltwovalue";
public static final String ADD_OFFSET = "add_offset";
public static final String SCALE_FACTOR = "scale_factor";
} }

View file

@ -0,0 +1,158 @@
/**
* This software was developed and / or modified by Raytheon Company,
* pursuant to Contract DG133W-05-CQ-1067 with the US Government.
*
* U.S. EXPORT CONTROLLED TECHNICAL DATA
* This software product contains export-restricted data whose
* export/transfer/disclosure is restricted by U.S. law. Dissemination
* to non-U.S. persons whether in the United States or abroad requires
* an export license or other authorization.
*
* Contractor Name: Raytheon Company
* Contractor Address: 6825 Pine Street, Suite 340
* Mail Stop B8
* Omaha, NE 68106
* 402.291.0100
*
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
* further licensing information.
**/
package com.raytheon.uf.common.dataplugin.pointset;
import java.nio.Buffer;
import java.nio.ByteBuffer;
import java.nio.FloatBuffer;
import java.nio.IntBuffer;
import java.nio.ShortBuffer;
import javax.measure.converter.AddConverter;
import javax.measure.converter.MultiplyConverter;
import javax.measure.converter.RationalConverter;
import javax.measure.converter.UnitConverter;
/**
*
* Store the raw data for pointset. The primary structure is a {@link Buffer}
* containing numeric data for each point. This also has support for products
* that use scale and offset values in addition to the raw numeric data(This is
* for packing the data into bytes or shorts).
*
* <pre>
*
* SOFTWARE HISTORY
*
* Date Ticket# Engineer Description
* ------------- -------- --------- --------------------------
* Jan 20, 2016 5208 bsteffen Initial creation
*
* </pre>
*
* @author bsteffen
*/
public class PointSetData {
private Buffer data;
private Float scale;
private Float offset;
public PointSetData() {
}
public PointSetData(Buffer data) {
this.data = data;
}
public Buffer getData() {
return data;
}
public void setData(Buffer data) {
this.data = data;
}
public Float getScale() {
return scale;
}
public void setScale(Float scale) {
this.scale = scale;
}
public Float getOffset() {
return offset;
}
public void setOffset(Float offset) {
this.offset = offset;
}
/**
* Converts the data using the specified converter. If there is a scale or
* offset present and the converter is a simple add or multiply then it is
* incorporated into the scale/offset and the data is left untouched, which
* keeps packed data small. For all other cases the data is converted to
* floats and the converter is applied to the data.
*
* @param converter
*/
public void convert(UnitConverter converter) {
if (data == null || converter == null
|| converter == UnitConverter.IDENTITY) {
return;
}
if (scale != null
&& (converter instanceof MultiplyConverter || converter instanceof RationalConverter)) {
this.scale = (float) converter.convert(this.scale);
if (this.offset != null) {
this.offset = (float) converter.convert(this.offset);
}
} else if (offset != null && converter instanceof AddConverter) {
this.offset = (float) converter.convert(this.offset);
} else {
FloatBuffer buffer = null;
if (this.data instanceof FloatBuffer) {
buffer = (FloatBuffer) this.data;
} else if (this.data instanceof ShortBuffer) {
ShortBuffer sbuffer = (ShortBuffer) this.data;
buffer = FloatBuffer.allocate(this.data.capacity());
for (int i = 0; i < buffer.capacity(); i += 1) {
buffer.put(i, sbuffer.get(i));
}
} else if (this.data instanceof ByteBuffer) {
ByteBuffer bbuffer = (ByteBuffer) this.data;
buffer = FloatBuffer.allocate(this.data.capacity());
for (int i = 0; i < buffer.capacity(); i += 1) {
buffer.put(i, bbuffer.get(i));
}
} else if (this.data instanceof IntBuffer) {
IntBuffer ibuffer = (IntBuffer) this.data;
buffer = FloatBuffer.allocate(this.data.capacity());
for (int i = 0; i < buffer.capacity(); i += 1) {
buffer.put(i, ibuffer.get(i));
}
} else {
throw new IllegalStateException("Unsupported buffer of type: "
+ this.data.getClass().getSimpleName());
}
if (scale != null) {
for (int i = 0; i < buffer.capacity(); i += 1) {
buffer.put(i, buffer.get(i) * scale);
}
}
if (offset != null) {
for (int i = 0; i < buffer.capacity(); i += 1) {
buffer.put(i, buffer.get(i) + offset);
}
}
for (int i = 0; i < buffer.capacity(); i += 1) {
buffer.put(i, (float) converter.convert(buffer.get(i)));
}
this.scale = null;
this.offset = null;
this.data = buffer;
}
}
}

View file

@ -19,7 +19,6 @@
**/ **/
package com.raytheon.uf.common.dataplugin.pointset; package com.raytheon.uf.common.dataplugin.pointset;
import java.nio.Buffer;
import java.nio.file.Path; import java.nio.file.Path;
import java.nio.file.Paths; import java.nio.file.Paths;
@ -53,6 +52,7 @@ import com.raytheon.uf.common.serialization.annotations.DynamicSerializeElement;
* Date Ticket# Engineer Description * Date Ticket# Engineer Description
* ------------- -------- --------- -------------------------- * ------------- -------- --------- --------------------------
* Aug 11, 2015 4709 bsteffen Initial creation * Aug 11, 2015 4709 bsteffen Initial creation
* Jan 21, 2016 5208 bsteffen Store data in new object.
* *
* </pre> * </pre>
* *
@ -89,7 +89,8 @@ public class PointSetRecord extends PersistablePluginDataObject {
@DynamicSerializeElement @DynamicSerializeElement
private String locationId; private String locationId;
private transient Buffer data; private transient PointSetData data;
public String getDatasetId() { public String getDatasetId() {
return datasetId; return datasetId;
@ -123,11 +124,11 @@ public class PointSetRecord extends PersistablePluginDataObject {
this.locationId = locationId; this.locationId = locationId;
} }
public Buffer getData() { public PointSetData getData() {
return data; return data;
} }
public void setData(Buffer data) { public void setData(PointSetData data) {
this.data = data; this.data = data;
} }

View file

@ -48,6 +48,7 @@ import ucar.nc2.NetcdfFile;
import ucar.nc2.Variable; import ucar.nc2.Variable;
import com.raytheon.uf.common.dataplugin.level.LevelFactory; import com.raytheon.uf.common.dataplugin.level.LevelFactory;
import com.raytheon.uf.common.dataplugin.pointset.PointSetData;
import com.raytheon.uf.common.dataplugin.pointset.PointSetLocation; import com.raytheon.uf.common.dataplugin.pointset.PointSetLocation;
import com.raytheon.uf.common.dataplugin.pointset.PointSetRecord; import com.raytheon.uf.common.dataplugin.pointset.PointSetRecord;
import com.raytheon.uf.common.dataplugin.pointset.triangulate.DelauneyTriangulator; import com.raytheon.uf.common.dataplugin.pointset.triangulate.DelauneyTriangulator;
@ -56,7 +57,7 @@ import com.raytheon.uf.common.datastorage.StorageException;
import com.raytheon.uf.common.localization.IPathManager; import com.raytheon.uf.common.localization.IPathManager;
import com.raytheon.uf.common.localization.LocalizationFile; import com.raytheon.uf.common.localization.LocalizationFile;
import com.raytheon.uf.common.localization.exception.LocalizationException; import com.raytheon.uf.common.localization.exception.LocalizationException;
import com.raytheon.uf.common.parameter.lookup.ParameterLookup; import com.raytheon.uf.edex.netcdf.decoder.util.NetcdfDecoderUtils;
import com.raytheon.uf.edex.netcdf.description.VariableDescription; import com.raytheon.uf.edex.netcdf.description.VariableDescription;
import com.raytheon.uf.edex.netcdf.description.exception.InvalidDescriptionException; import com.raytheon.uf.edex.netcdf.description.exception.InvalidDescriptionException;
import com.raytheon.uf.edex.plugin.pointset.netcdf.description.PointSetProductDescriptions; import com.raytheon.uf.edex.plugin.pointset.netcdf.description.PointSetProductDescriptions;
@ -79,8 +80,10 @@ import com.raytheon.uf.edex.plugin.pointset.netcdf.description.TriangulationDesc
* SOFTWARE HISTORY * SOFTWARE HISTORY
* *
* Date Ticket# Engineer Description * Date Ticket# Engineer Description
* ------------- -------- --------- -------------------------- * ------------- -------- --------- --------------------------------------------
* Aug 11, 2015 4709 bsteffen Initial creation * Aug 11, 2015 4709 bsteffen Initial creation
* Jan 21, 2016 5208 bsteffen Decode scale, offset, units, long_name when
* they are present
* *
* </pre> * </pre>
* *
@ -103,15 +106,10 @@ public class PointSetNetcdfDecoder {
private LevelFactory levelFactory; private LevelFactory levelFactory;
private ParameterLookup parameterLookup;
public PointSetRecord[] decode(File file) { public PointSetRecord[] decode(File file) {
if (levelFactory == null) { if (levelFactory == null) {
levelFactory = LevelFactory.getInstance(); levelFactory = LevelFactory.getInstance();
} }
if (parameterLookup == null) {
parameterLookup = ParameterLookup.getInstance();
}
try { try {
NetcdfFile netcdfFile = NetcdfFile.open(file.getAbsolutePath()); NetcdfFile netcdfFile = NetcdfFile.open(file.getAbsolutePath());
Map<String, String> locationCache = new HashMap<String, String>(); Map<String, String> locationCache = new HashMap<String, String>();
@ -244,7 +242,7 @@ public class PointSetNetcdfDecoder {
} }
} }
PointSetRecord record = description.getRecord(file, parameterLookup, PointSetRecord record = description.getRecord(file,
levelFactory); levelFactory);
if (record == null) { if (record == null) {
if (debug) { if (debug) {
@ -309,7 +307,27 @@ public class PointSetNetcdfDecoder {
dataVarName, dataType); dataVarName, dataType);
return null; return null;
} }
record.setData(numericData);
Attribute longNameAttribute = dataVariable.findAttribute("long_name");
if (longNameAttribute != null) {
record.getParameter().setName(longNameAttribute.getStringValue());
}
Attribute unitsAttribute = dataVariable.findAttribute("units");
if (unitsAttribute != null) {
record.getParameter()
.setUnitString(unitsAttribute.getStringValue());
}
PointSetData data = new PointSetData(numericData);
Number scale = NetcdfDecoderUtils.getScaleFactor(dataVariable);
if (scale != NetcdfDecoderUtils.DEFAULT_SCALE_FACTOR) {
data.setScale(scale.floatValue());
}
Number offset = NetcdfDecoderUtils.getAddOffset(dataVariable);
if (offset != NetcdfDecoderUtils.DEFAULT_ADD_OFFSET) {
data.setOffset(offset.floatValue());
}
record.setData(data);
StringBuilder locationKeyBuilder = new StringBuilder(); StringBuilder locationKeyBuilder = new StringBuilder();
lonVariable.getNameAndDimensions(locationKeyBuilder); lonVariable.getNameAndDimensions(locationKeyBuilder);
@ -377,19 +395,19 @@ public class PointSetNetcdfDecoder {
PointSetProductDescriptions descriptions = new PointSetProductDescriptions(); PointSetProductDescriptions descriptions = new PointSetProductDescriptions();
for (LocalizationFile file : files) { for (LocalizationFile file : files) {
logger.info("Loading pointset data description from " logger.info("Loading pointset data description from "
+ file.getName()); + file.getPath());
try (InputStream inputStream = file.openInputStream()) { try (InputStream inputStream = file.openInputStream()) {
PointSetProductDescriptions unmarshalled = JAXB.unmarshal( PointSetProductDescriptions unmarshalled = JAXB.unmarshal(
inputStream, PointSetProductDescriptions.class); inputStream, PointSetProductDescriptions.class);
for (ProductDescription description : unmarshalled for (ProductDescription description : unmarshalled
.getDescriptions()) { .getDescriptions()) {
if (validate(file.getName(), description)) { if (validate(file.getPath(), description)) {
descriptions.addDescription(description); descriptions.addDescription(description);
} }
} }
} catch (LocalizationException | IOException e) { } catch (LocalizationException | IOException e) {
logger.error("Unable to load product descriptions from {}", logger.error("Unable to load product descriptions from {}",
file.getName(), e); file.getPath(), e);
} }
} }
this.descriptions = descriptions; this.descriptions = descriptions;
@ -403,10 +421,6 @@ public class PointSetNetcdfDecoder {
this.levelFactory = levelFactory; this.levelFactory = levelFactory;
} }
public void setParameterLookup(ParameterLookup parameterLookup) {
this.parameterLookup = parameterLookup;
}
protected boolean validate(String fileName, ProductDescription description) { protected boolean validate(String fileName, ProductDescription description) {
VariableDescription data = description.getData(); VariableDescription data = description.getData();
if (data == null) { if (data == null) {

View file

@ -30,7 +30,7 @@ import ucar.nc2.NetcdfFile;
import com.raytheon.uf.common.dataplugin.level.Level; import com.raytheon.uf.common.dataplugin.level.Level;
import com.raytheon.uf.common.dataplugin.level.LevelFactory; import com.raytheon.uf.common.dataplugin.level.LevelFactory;
import com.raytheon.uf.common.dataplugin.pointset.PointSetRecord; import com.raytheon.uf.common.dataplugin.pointset.PointSetRecord;
import com.raytheon.uf.common.parameter.lookup.ParameterLookup; import com.raytheon.uf.common.parameter.Parameter;
import com.raytheon.uf.common.time.DataTime; import com.raytheon.uf.common.time.DataTime;
import com.raytheon.uf.edex.netcdf.description.AbstractFieldDescription; import com.raytheon.uf.edex.netcdf.description.AbstractFieldDescription;
import com.raytheon.uf.edex.netcdf.description.AttributeDescription; import com.raytheon.uf.edex.netcdf.description.AttributeDescription;
@ -52,8 +52,9 @@ import com.raytheon.uf.edex.plugin.pointset.netcdf.PointSetNetcdfDecoder;
* SOFTWARE HISTORY * SOFTWARE HISTORY
* *
* Date Ticket# Engineer Description * Date Ticket# Engineer Description
* ------------- -------- --------- -------------------------- * ------------- -------- --------- ----------------------------------
* Aug 11, 2015 4709 bsteffen Initial creation * Aug 11, 2015 4709 bsteffen Initial creation
* Jan 21, 2016 5208 bsteffen Move parameter persistence to dao
* *
* </pre> * </pre>
* *
@ -183,15 +184,14 @@ public class ProductDescription {
* Extract the the datasetId, parameter, level and datatime from the file * Extract the the datasetId, parameter, level and datatime from the file
* using the attributes contained in this description. * using the attributes contained in this description.
*/ */
public PointSetRecord getRecord(NetcdfFile file, public PointSetRecord getRecord(NetcdfFile file, LevelFactory levelFactory)
ParameterLookup parameterLookup, LevelFactory levelFactory)
throws InvalidDescriptionException { throws InvalidDescriptionException {
String datasetId = this.datasetId.getString(file); String datasetId = this.datasetId.getString(file);
if (datasetId == null) { if (datasetId == null) {
return null; return null;
} }
String parameter = this.parameter.getString(file); String parameterAbbrev = this.parameter.getString(file);
if (parameter == null) { if (parameterAbbrev == null) {
return null; return null;
} }
Level level = this.level.getLevel(file, levelFactory); Level level = this.level.getLevel(file, levelFactory);
@ -204,7 +204,7 @@ public class ProductDescription {
} }
PointSetRecord record = new PointSetRecord(); PointSetRecord record = new PointSetRecord();
record.setDatasetId(datasetId); record.setDatasetId(datasetId);
record.setParameter(parameterLookup.getParameter(parameter)); record.setParameter(new Parameter(parameterAbbrev));
record.setLevel(level); record.setLevel(level);
record.setDataTime(dataTime); record.setDataTime(dataTime);
return record; return record;

View file

@ -8,4 +8,6 @@ Bundle-RequiredExecutionEnvironment: JavaSE-1.7
Require-Bundle: com.raytheon.uf.common.dataplugin;bundle-version="1.14.0", Require-Bundle: com.raytheon.uf.common.dataplugin;bundle-version="1.14.0",
com.raytheon.uf.common.dataplugin.pointset;bundle-version="1.15.0", com.raytheon.uf.common.dataplugin.pointset;bundle-version="1.15.0",
com.raytheon.uf.common.datastorage;bundle-version="1.15.0", com.raytheon.uf.common.datastorage;bundle-version="1.15.0",
com.raytheon.uf.edex.database;bundle-version="1.15.0" com.raytheon.uf.edex.database;bundle-version="1.15.0",
javax.measure;bundle-version="1.0.0",
com.raytheon.uf.common.status;bundle-version="1.15.0"

View file

@ -20,12 +20,25 @@
package com.raytheon.uf.edex.plugin.pointset; package com.raytheon.uf.edex.plugin.pointset;
import java.nio.Buffer; import java.nio.Buffer;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.measure.converter.UnitConverter;
import com.raytheon.uf.common.dataplugin.PluginDataObject;
import com.raytheon.uf.common.dataplugin.PluginException; import com.raytheon.uf.common.dataplugin.PluginException;
import com.raytheon.uf.common.dataplugin.persist.IPersistable; import com.raytheon.uf.common.dataplugin.persist.IPersistable;
import com.raytheon.uf.common.dataplugin.pointset.PointSetConstants;
import com.raytheon.uf.common.dataplugin.pointset.PointSetData;
import com.raytheon.uf.common.dataplugin.pointset.PointSetRecord; import com.raytheon.uf.common.dataplugin.pointset.PointSetRecord;
import com.raytheon.uf.common.datastorage.DataStoreFactory; import com.raytheon.uf.common.datastorage.DataStoreFactory;
import com.raytheon.uf.common.datastorage.IDataStore; import com.raytheon.uf.common.datastorage.IDataStore;
import com.raytheon.uf.common.datastorage.StorageStatus;
import com.raytheon.uf.common.datastorage.records.IDataRecord;
import com.raytheon.uf.common.parameter.Parameter;
import com.raytheon.uf.common.parameter.lookup.ParameterLookup;
import com.raytheon.uf.edex.database.plugin.PluginDao; import com.raytheon.uf.edex.database.plugin.PluginDao;
/** /**
@ -37,8 +50,9 @@ import com.raytheon.uf.edex.database.plugin.PluginDao;
* SOFTWARE HISTORY * SOFTWARE HISTORY
* *
* Date Ticket# Engineer Description * Date Ticket# Engineer Description
* ------------- -------- --------- -------------------------- * ------------- -------- --------- -----------------------
* Aug 11, 2015 4709 bsteffen Initial creation * Aug 11, 2015 4709 bsteffen Initial creation
* Jan 21, 2016 5208 bsteffen Store scale and offset
* *
* </pre> * </pre>
* *
@ -54,14 +68,78 @@ public class PointSetDao extends PluginDao {
super("pointset"); super("pointset");
} }
@Override
public PluginDataObject[] persistToDatabase(PluginDataObject... records) {
return super.persistToDatabase(verifyRecords(records));
}
@Override
public StorageStatus persistToHDF5(PluginDataObject... records)
throws PluginException {
return super.persistToHDF5(verifyRecords(records));
}
private PluginDataObject[] verifyRecords(PluginDataObject... records) {
List<PluginDataObject> toPersist = new ArrayList<PluginDataObject>(
records.length);
for (PluginDataObject record : records) {
PointSetRecord rec = (PointSetRecord) record;
if (validateParameter(rec)) {
toPersist.add(rec);
}
}
return toPersist.toArray(new PointSetRecord[toPersist.size()]);
}
protected boolean validateParameter(PointSetRecord record) {
Parameter parameter = record.getParameter();
boolean result = true;
if (parameter == null) {
result = false;
} else if (parameter.getName() == null) {
result = false;
} else if (parameter.getName().equals("Missing")) {
result = false;
} else {
Parameter dbParameter = ParameterLookup.getInstance().getParameter(
parameter, true);
if (!parameter.equals(dbParameter)) {
UnitConverter converter = Parameter.compareUnits(parameter,
dbParameter);
record.getData().convert(converter);
}
record.setParameter(dbParameter);
}
if (!result) {
logger.info("Discarding record due to missing or unknown parameter mapping: "
+ record);
}
return result;
}
@Override @Override
protected IDataStore populateDataStore(IDataStore dataStore, protected IDataStore populateDataStore(IDataStore dataStore,
IPersistable obj) throws Exception { IPersistable obj) throws Exception {
if (obj instanceof PointSetRecord) { if (obj instanceof PointSetRecord) {
PointSetRecord points = (PointSetRecord) obj; PointSetRecord points = (PointSetRecord) obj;
Buffer data = points.getData(); PointSetData data = points.getData();
dataStore.addDataRecord(DataStoreFactory.createStorageRecord( Buffer buffer = data.getData();
"Data", points.getDataURI(), data.array())); IDataRecord dataRecord = DataStoreFactory.createStorageRecord(
"Data", points.getDataURI(), buffer.array());
if (data.getOffset() != null || data.getScale() != null) {
Map<String, Object> dataAttributes = new HashMap<>();
if (data.getOffset() != null) {
dataAttributes.put(PointSetConstants.ADD_OFFSET,
data.getOffset());
}
if (data.getScale() != null) {
dataAttributes.put(PointSetConstants.SCALE_FACTOR,
data.getScale());
}
dataRecord.setDataAttributes(dataAttributes);
}
dataStore.addDataRecord(dataRecord);
} else if (obj != null) { } else if (obj != null) {
throw new IllegalArgumentException("Cannot handle " throw new IllegalArgumentException("Cannot handle "
+ obj.getClass().getSimpleName()); + obj.getClass().getSimpleName());