Issue #2667 added bin lightning data access factory
renamed lightning record's lightSource field to source added delta script for db column change added attribute method comments for IData and DefaultGeometryData Former-commit-id:e580f1f490
[formerlyff97757a38
] [formerly54516275f1
] [formerly54516275f1
[formerly402bef9c5b
]] [formerlye580f1f490
[formerlyff97757a38
] [formerly54516275f1
] [formerly54516275f1
[formerly402bef9c5b
]] [formerlybd9fbc5f38
[formerly54516275f1
[formerly402bef9c5b
] [formerlybd9fbc5f38
[formerly b6c50729919ea8a9b29ca963e18f68268c3a0b7f]]]]] Former-commit-id:bd9fbc5f38
Former-commit-id:ecbc8bc2a5
[formerly5bc0ffe4af
] [formerly265137fd91
] [formerly 26658ead3742382825db1c91f0c2e9e2ab9bd73d [formerly 72626aac11a4c66833f161115fc8c8d2adc2b74c] [formerly265137fd91
[formerly83ed9dc04d
]]] Former-commit-id: 7b5efaa3c67cc47daecb7da40a071c4e39f9fe3b [formerly c5bb42d8f6a8c636420817814985ab0a5d1a0c8d] [formerlyabb1c97149
[formerly77e31f815e
]] Former-commit-id: 5c4a17ab2547578604ac82cabad7eb5ae448e585 [formerlyabb1c97149
] Former-commit-id:c82cc659af
This commit is contained in:
parent
4f4038d22f
commit
e2c863a88d
10 changed files with 427 additions and 21 deletions
|
@ -93,6 +93,7 @@ import com.raytheon.uf.viz.core.rsc.capabilities.MagnificationCapability;
|
|||
* Sep 4, 2012 15335 kshresth Will now display lightning/wind
|
||||
* fields when magnification set to 0
|
||||
* Feb 27, 2013 DCS 152 jgerth/elau Support for WWLLN and multiple sources
|
||||
* Jan 21, 2014 2667 bclement renamed record's lightSource field to source
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -487,7 +488,7 @@ public class LightningResource extends
|
|||
Map<DataTime, List<BinLightningRecord>> recordMap = new HashMap<DataTime, List<BinLightningRecord>>();
|
||||
|
||||
for (BinLightningRecord obj : objs) {
|
||||
if (obj.getLightSource().equals(this.lightSource) || this.lightSource.isEmpty()) {
|
||||
if (obj.getSource().equals(this.lightSource) || this.lightSource.isEmpty()) {
|
||||
DataTime time = new DataTime(obj.getStartTime());
|
||||
DataTime end = new DataTime(obj.getStopTime());
|
||||
time = this.getResourceData().getBinOffset()
|
||||
|
|
9
deltaScripts/14.3.1/renameLightningSource.sh
Executable file
9
deltaScripts/14.3.1/renameLightningSource.sh
Executable file
|
@ -0,0 +1,9 @@
|
|||
#!/bin/bash
|
||||
# DR #2667 Add binlightning support to Data Access Framework
|
||||
|
||||
PSQL="/awips2/psql/bin/psql"
|
||||
|
||||
SQL_COMMAND="
|
||||
ALTER TABLE binlightning RENAME COLUMN lightsource TO source;
|
||||
"
|
||||
${PSQL} -U awips -d metadata -c "${SQL_COMMAND}"
|
|
@ -36,6 +36,7 @@ import com.raytheon.uf.common.time.DataTime;
|
|||
* ------------ ---------- ----------- --------------------------
|
||||
* Oct 10, 2012 njensen Initial creation
|
||||
* Jun 03, 2013 #2023 dgilling Add getAttributes().
|
||||
* Jan 21, 2014 2667 bclement attribute method comments
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -46,7 +47,8 @@ import com.raytheon.uf.common.time.DataTime;
|
|||
public interface IData {
|
||||
|
||||
/**
|
||||
* Gets an attribute of the data based on the key.
|
||||
* Gets an attribute of the data based on the key. Attributes are metadata
|
||||
* providing additional information on the dataset.
|
||||
*
|
||||
* @param key
|
||||
* @return the attribute
|
||||
|
@ -54,7 +56,8 @@ public interface IData {
|
|||
public Object getAttribute(String key);
|
||||
|
||||
/**
|
||||
* Gets the list of attributes associated with this data.
|
||||
* Gets the list of attributes associated with this data. Attributes are
|
||||
* metadata providing additional information on the dataset.
|
||||
*
|
||||
* @return the attributes
|
||||
*/
|
||||
|
|
|
@ -43,6 +43,7 @@ import com.vividsolutions.jts.geom.Geometry;
|
|||
* ------------ ---------- ----------- --------------------------
|
||||
* Nov 09, 2012 njensen Initial creation
|
||||
* Jun 03, 2013 #2023 dgilling Implement getAttributes().
|
||||
* Jan 21, 2014 2667 bclement attribute method comments
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -277,6 +278,13 @@ public class DefaultGeometryData implements IGeometryData {
|
|||
this.dataMap.put(parameter, data);
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a key/value pair to the attributes map. Attributes are metadata
|
||||
* providing additional information on the dataset.
|
||||
*
|
||||
* @param key
|
||||
* @param value
|
||||
*/
|
||||
public void addAttribute(String key, Object value) {
|
||||
attributes.put(key, value);
|
||||
}
|
||||
|
@ -297,6 +305,12 @@ public class DefaultGeometryData implements IGeometryData {
|
|||
this.locationName = locationName;
|
||||
}
|
||||
|
||||
/**
|
||||
* Replace the attribute map with attrs. Attributes are metadata providing
|
||||
* additional information on the dataset.
|
||||
*
|
||||
* @param attrs
|
||||
*/
|
||||
public void setAttributes(Map<String, Object> attrs) {
|
||||
this.attributes = attrs;
|
||||
}
|
||||
|
|
|
@ -53,6 +53,7 @@ import com.raytheon.uf.common.localization.IPathManager;
|
|||
* ------------ ---------- ----------- --------------------------
|
||||
* Jan 03, 2013 bkowal Initial creation
|
||||
* Jan 31, 2013 #1555 bkowal Made hdf5 variable generic
|
||||
* Jan 21, 2014 2667 bclement added getHDF5File method
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -67,7 +68,13 @@ public final class PDOUtil {
|
|||
private PDOUtil() {
|
||||
}
|
||||
|
||||
public static IDataStore getDataStore(PluginDataObject pdo) {
|
||||
/**
|
||||
* Get the hdf5 file for the data object
|
||||
*
|
||||
* @param pdo
|
||||
* @return
|
||||
*/
|
||||
public static File getHDF5File(PluginDataObject pdo){
|
||||
final String pluginName = pdo.getPluginName();
|
||||
final IPersistable persistable = (IPersistable) pdo;
|
||||
|
||||
|
@ -76,10 +83,19 @@ public final class PDOUtil {
|
|||
persistable);
|
||||
String hdf5File = pathProvider.getHDFFileName(pluginName,
|
||||
persistable);
|
||||
File file = new File(pluginName + IPathManager.SEPARATOR
|
||||
return new File(pluginName + IPathManager.SEPARATOR
|
||||
+ hdf5Path + IPathManager.SEPARATOR
|
||||
+ hdf5File);
|
||||
return DataStoreFactory.getDataStore(file);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the datastore for the data object
|
||||
*
|
||||
* @param pdo
|
||||
* @return
|
||||
*/
|
||||
public static IDataStore getDataStore(PluginDataObject pdo) {
|
||||
return DataStoreFactory.getDataStore(getHDF5File(pdo));
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -7,12 +7,17 @@ Bundle-Vendor: RAYTHEON
|
|||
Eclipse-RegisterBuddy: com.raytheon.uf.common.serialization
|
||||
Bundle-RequiredExecutionEnvironment: JavaSE-1.6
|
||||
Export-Package: com.raytheon.uf.common.dataplugin.binlightning,
|
||||
com.raytheon.uf.common.dataplugin.binlightning.dataaccess,
|
||||
com.raytheon.uf.common.dataplugin.binlightning.impl
|
||||
Require-Bundle: com.raytheon.uf.common.serialization,
|
||||
org.geotools,
|
||||
javax.persistence,
|
||||
javax.measure,
|
||||
com.raytheon.uf.common.dataplugin,
|
||||
com.raytheon.uf.common.datastorage
|
||||
Import-Package: com.raytheon.uf.edex.decodertools.core,
|
||||
com.raytheon.uf.common.datastorage,
|
||||
com.raytheon.uf.common.dataaccess;bundle-version="1.0.0"
|
||||
Import-Package: com.raytheon.uf.common.dataquery.requests,
|
||||
com.raytheon.uf.common.dataquery.responses,
|
||||
com.raytheon.uf.common.status,
|
||||
com.raytheon.uf.edex.decodertools.core,
|
||||
com.raytheon.uf.edex.decodertools.time
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
source.. = src/
|
||||
output.. = bin/
|
||||
bin.includes = META-INF/,\
|
||||
.
|
||||
.,\
|
||||
res/
|
||||
|
|
|
@ -0,0 +1,12 @@
|
|||
<beans xmlns="http://www.springframework.org/schema/beans"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans-3.1.xsd">
|
||||
|
||||
<bean id="binLightningDataAccessFactory" class="com.raytheon.uf.common.dataplugin.binlightning.dataaccess.BinLightingAccessFactory" />
|
||||
|
||||
<bean factory-bean="dataAccessRegistry" factory-method="register">
|
||||
<constructor-arg value="binlightning"/>
|
||||
<constructor-arg ref="binLightningDataAccessFactory"/>
|
||||
</bean>
|
||||
|
||||
</beans>
|
|
@ -79,6 +79,7 @@ import com.raytheon.uf.edex.decodertools.time.TimeTools;
|
|||
* PluginDataObject.
|
||||
* Aug 30, 2013 2298 rjpeter Make getPluginName abstract
|
||||
* Oct 22, 2013 2361 njensen Removed XML annotations
|
||||
* Jan 21, 2014 2667 bclement renamed record's lightSource field to source
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -157,7 +158,7 @@ public class BinLightningRecord extends PersistablePluginDataObject implements
|
|||
@Column(length = 5)
|
||||
@DataURI(position = 3)
|
||||
@DynamicSerializeElement
|
||||
private String lightSource;
|
||||
private String source;
|
||||
|
||||
// Used to track
|
||||
@Transient
|
||||
|
@ -231,19 +232,19 @@ public class BinLightningRecord extends PersistablePluginDataObject implements
|
|||
*/
|
||||
public void addStrike(LightningStrikePoint strike) {
|
||||
// jjg add
|
||||
if (lightSource == null) {
|
||||
if (source == null) {
|
||||
if (strike.getLightSource() == null) {
|
||||
lightSource = "NLDN";
|
||||
source = "NLDN";
|
||||
} else if (strike.getLightSource().isEmpty()) {
|
||||
lightSource = "UNKN";
|
||||
source = "UNKN";
|
||||
} else {
|
||||
lightSource = strike.getLightSource();
|
||||
source = strike.getLightSource();
|
||||
}
|
||||
} else {
|
||||
if (strike.getLightSource() == null) {
|
||||
lightSource = "NLDN";
|
||||
} else if (!lightSource.equals(strike.getLightSource())) {
|
||||
lightSource = "UNKN";
|
||||
source = "NLDN";
|
||||
} else if (!source.equals(strike.getLightSource())) {
|
||||
source = "UNKN";
|
||||
}
|
||||
}
|
||||
// end
|
||||
|
@ -426,8 +427,8 @@ public class BinLightningRecord extends PersistablePluginDataObject implements
|
|||
*
|
||||
* @return
|
||||
*/
|
||||
public String getLightSource() {
|
||||
return lightSource;
|
||||
public String getSource() {
|
||||
return source;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -435,8 +436,8 @@ public class BinLightningRecord extends PersistablePluginDataObject implements
|
|||
*
|
||||
* @param lightSource
|
||||
*/
|
||||
public void setLightSource(String lightSource) {
|
||||
this.lightSource = lightSource;
|
||||
public void setSource(String lightSource) {
|
||||
this.source = lightSource;
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -0,0 +1,344 @@
|
|||
/**
|
||||
* This software was developed and / or modified by Raytheon Company,
|
||||
* pursuant to Contract DG133W-05-CQ-1067 with the US Government.
|
||||
*
|
||||
* U.S. EXPORT CONTROLLED TECHNICAL DATA
|
||||
* This software product contains export-restricted data whose
|
||||
* export/transfer/disclosure is restricted by U.S. law. Dissemination
|
||||
* to non-U.S. persons whether in the United States or abroad requires
|
||||
* an export license or other authorization.
|
||||
*
|
||||
* Contractor Name: Raytheon Company
|
||||
* Contractor Address: 6825 Pine Street, Suite 340
|
||||
* Mail Stop B8
|
||||
* Omaha, NE 68106
|
||||
* 402.291.0100
|
||||
*
|
||||
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
||||
* further licensing information.
|
||||
**/
|
||||
package com.raytheon.uf.common.dataplugin.binlightning.dataaccess;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FileNotFoundException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Date;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Map.Entry;
|
||||
|
||||
import com.raytheon.uf.common.dataaccess.IDataRequest;
|
||||
import com.raytheon.uf.common.dataaccess.exception.IncompatibleRequestException;
|
||||
import com.raytheon.uf.common.dataaccess.geom.IGeometryData;
|
||||
import com.raytheon.uf.common.dataaccess.geom.IGeometryData.Type;
|
||||
import com.raytheon.uf.common.dataaccess.impl.AbstractDataPluginFactory;
|
||||
import com.raytheon.uf.common.dataaccess.impl.DefaultGeometryData;
|
||||
import com.raytheon.uf.common.dataaccess.util.PDOUtil;
|
||||
import com.raytheon.uf.common.dataplugin.binlightning.BinLightningRecord;
|
||||
import com.raytheon.uf.common.dataquery.requests.RequestConstraint;
|
||||
import com.raytheon.uf.common.dataquery.responses.DbQueryResponse;
|
||||
import com.raytheon.uf.common.datastorage.DataStoreFactory;
|
||||
import com.raytheon.uf.common.datastorage.IDataStore;
|
||||
import com.raytheon.uf.common.datastorage.Request;
|
||||
import com.raytheon.uf.common.datastorage.StorageException;
|
||||
import com.raytheon.uf.common.datastorage.records.ByteDataRecord;
|
||||
import com.raytheon.uf.common.datastorage.records.FloatDataRecord;
|
||||
import com.raytheon.uf.common.datastorage.records.IDataRecord;
|
||||
import com.raytheon.uf.common.datastorage.records.IntegerDataRecord;
|
||||
import com.raytheon.uf.common.datastorage.records.LongDataRecord;
|
||||
import com.raytheon.uf.common.status.IUFStatusHandler;
|
||||
import com.raytheon.uf.common.status.UFStatus;
|
||||
import com.raytheon.uf.common.time.DataTime;
|
||||
import com.vividsolutions.jts.geom.Coordinate;
|
||||
import com.vividsolutions.jts.geom.GeometryFactory;
|
||||
|
||||
/**
|
||||
* Data access framework factory for bin lightning
|
||||
*
|
||||
* <pre>
|
||||
*
|
||||
* SOFTWARE HISTORY
|
||||
*
|
||||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* Jan 21, 2014 2667 bclement Initial creation
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
* @author bclement
|
||||
* @version 1.0
|
||||
*/
|
||||
public class BinLightingAccessFactory extends AbstractDataPluginFactory {
|
||||
|
||||
private static final String sourceKey = "source";
|
||||
|
||||
private static final IUFStatusHandler log = UFStatus
|
||||
.getHandler(BinLightningRecord.class);
|
||||
|
||||
private static final GeometryFactory geomFactory = new GeometryFactory();
|
||||
|
||||
private static final String timeKey = "obsTime";
|
||||
|
||||
private static final String latKey = "latitude";
|
||||
|
||||
private static final String lonKey = "longitude";
|
||||
|
||||
private static final String[] requiredKeys = { timeKey, latKey, lonKey };
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
*
|
||||
* @see
|
||||
* com.raytheon.uf.common.dataaccess.IDataFactory#getAvailableLocationNames
|
||||
* (com.raytheon.uf.common.dataaccess.IDataRequest)
|
||||
*/
|
||||
@Override
|
||||
public String[] getAvailableLocationNames(IDataRequest request) {
|
||||
throw new IncompatibleRequestException(this.getClass()
|
||||
+ " does not support location names");
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
*
|
||||
* @see com.raytheon.uf.common.dataaccess.impl.AbstractDataFactory#
|
||||
* getRequiredIdentifiers()
|
||||
*/
|
||||
@Override
|
||||
public String[] getRequiredIdentifiers() {
|
||||
return new String[] { sourceKey };
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
*
|
||||
* @see com.raytheon.uf.common.dataaccess.impl.AbstractDataFactory#
|
||||
* getValidIdentifiers()
|
||||
*/
|
||||
@Override
|
||||
public String[] getValidIdentifiers() {
|
||||
return new String[] { sourceKey };
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
*
|
||||
* @see com.raytheon.uf.common.dataaccess.impl.AbstractDataPluginFactory#
|
||||
* buildConstraintsFromRequest
|
||||
* (com.raytheon.uf.common.dataaccess.IDataRequest)
|
||||
*/
|
||||
@Override
|
||||
protected Map<String, RequestConstraint> buildConstraintsFromRequest(
|
||||
IDataRequest request) {
|
||||
Map<String, RequestConstraint> rcMap = new HashMap<String, RequestConstraint>();
|
||||
|
||||
Map<String, Object> identifiers = request.getIdentifiers();
|
||||
if (identifiers != null) {
|
||||
for (Entry<String, Object> entry : identifiers.entrySet()) {
|
||||
rcMap.put(entry.getKey(), new RequestConstraint(entry
|
||||
.getValue().toString()));
|
||||
}
|
||||
}
|
||||
return rcMap;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
*
|
||||
* @see com.raytheon.uf.common.dataaccess.impl.AbstractDataPluginFactory#
|
||||
* getGeometryData(com.raytheon.uf.common.dataaccess.IDataRequest,
|
||||
* com.raytheon.uf.common.dataquery.responses.DbQueryResponse)
|
||||
*/
|
||||
@Override
|
||||
protected IGeometryData[] getGeometryData(IDataRequest request,
|
||||
DbQueryResponse dbQueryResponse) {
|
||||
Map<File, List<BinLightningRecord>> results = unpackResults(dbQueryResponse);
|
||||
|
||||
List<IGeometryData> rval = new ArrayList<IGeometryData>();
|
||||
for (Entry<File, List<BinLightningRecord>> resultEntry : results
|
||||
.entrySet()) {
|
||||
Map<String, List<String>> srcDatasets = getSourceDatasets(request,
|
||||
resultEntry.getValue());
|
||||
IDataStore ds = DataStoreFactory.getDataStore(resultEntry.getKey());
|
||||
for (Entry<String, List<String>> groupEntry : srcDatasets
|
||||
.entrySet()) {
|
||||
addGeometryData(rval, ds, groupEntry.getKey(),
|
||||
groupEntry.getValue());
|
||||
}
|
||||
}
|
||||
return rval.toArray(new IGeometryData[rval.size()]);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Add geometry data elements to dataList from data store
|
||||
*
|
||||
* @param dataList
|
||||
* target result list
|
||||
* @param ds
|
||||
* datastore
|
||||
* @param source
|
||||
* lightning source value from metadata
|
||||
* @param datasets
|
||||
* requested datasets from datastore
|
||||
*/
|
||||
private void addGeometryData(List<IGeometryData> dataList, IDataStore ds,
|
||||
String source, List<String> datasets) {
|
||||
// Go fetch data
|
||||
try {
|
||||
IDataRecord[] records = ds.retrieveDatasets(
|
||||
datasets.toArray(new String[datasets.size()]), Request.ALL);
|
||||
|
||||
Map<String, List<IDataRecord>> recordMap = new HashMap<String, List<IDataRecord>>();
|
||||
// Throw in a map for easy accessibility
|
||||
for (IDataRecord rec : records) {
|
||||
List<IDataRecord> recordList = recordMap.get(rec.getName());
|
||||
if (recordList == null) {
|
||||
recordList = new ArrayList<IDataRecord>();
|
||||
recordMap.put(rec.getName(), recordList);
|
||||
}
|
||||
recordList.add(rec);
|
||||
}
|
||||
|
||||
// remove required records from map so they won't be used again when
|
||||
// we look for optional records
|
||||
List<IDataRecord> times = recordMap.remove(timeKey);
|
||||
List<IDataRecord> lats = recordMap.remove(latKey);
|
||||
List<IDataRecord> lons = recordMap.remove(lonKey);
|
||||
|
||||
int k = 0;
|
||||
for (IDataRecord timeRec : times) {
|
||||
LongDataRecord time = (LongDataRecord) timeRec;
|
||||
|
||||
long[] timeData = time.getLongData();
|
||||
float[] latitudeData = ((FloatDataRecord) lats.get(k))
|
||||
.getFloatData();
|
||||
float[] longitudeData = ((FloatDataRecord) lons.get(k))
|
||||
.getFloatData();
|
||||
|
||||
for (int i = 0; i < timeData.length; i++) {
|
||||
DataTime dt = new DataTime(new Date(timeData[i]));
|
||||
DefaultGeometryData data = new DefaultGeometryData();
|
||||
data.setDataTime(dt);
|
||||
data.addAttribute(sourceKey, source);
|
||||
data.setGeometry(geomFactory.createPoint(new Coordinate(
|
||||
longitudeData[i], latitudeData[i])));
|
||||
// add the optional parameter records
|
||||
addParameterData(data, recordMap, k, i);
|
||||
dataList.add(data);
|
||||
}
|
||||
k++;
|
||||
}
|
||||
} catch (StorageException e) {
|
||||
log.error("Storage error retrieving lightning data", e);
|
||||
} catch (FileNotFoundException e) {
|
||||
log.error("Unable to open lightning file", e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Add parameters from record map to data
|
||||
*
|
||||
* @param data
|
||||
* target geometry data
|
||||
* @param recordMap
|
||||
* map of parameter names to list of data records
|
||||
* @param recordIndex
|
||||
* index into list of data records
|
||||
* @param valueIndex
|
||||
* index into the target data record's value array
|
||||
*/
|
||||
private void addParameterData(DefaultGeometryData data,
|
||||
Map<String, List<IDataRecord>> recordMap, int recordIndex,
|
||||
int valueIndex) {
|
||||
for (Entry<String, List<IDataRecord>> entry : recordMap.entrySet()) {
|
||||
String parameterName = entry.getKey();
|
||||
IDataRecord record = entry.getValue().get(recordIndex);
|
||||
if (record instanceof IntegerDataRecord) {
|
||||
int value = ((IntegerDataRecord) record).getIntData()[valueIndex];
|
||||
data.addData(parameterName, value, Type.INT);
|
||||
} else if (record instanceof ByteDataRecord) {
|
||||
int value = ((ByteDataRecord) record).getByteData()[valueIndex];
|
||||
data.addData(parameterName, value, Type.INT);
|
||||
} else {
|
||||
// lightning only uses ints and bytes, we can add support for
|
||||
// more types if needed
|
||||
log.warn("Unsupported parameter record type for lightning: "
|
||||
+ record.getClass());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Return mapping of lightning data source to list of datasets
|
||||
*
|
||||
* @param recList
|
||||
* @return
|
||||
*/
|
||||
private Map<String, List<String>> getSourceDatasets(IDataRequest request,
|
||||
List<BinLightningRecord> recList) {
|
||||
List<String> includedDatasets = getIncludedDatasets(request);
|
||||
|
||||
Map<String, List<String>> rval = new HashMap<String, List<String>>();
|
||||
for (BinLightningRecord record : recList) {
|
||||
String src = record.getSource();
|
||||
List<String> groups = rval.get(src);
|
||||
if (groups == null) {
|
||||
groups = new ArrayList<String>();
|
||||
rval.put(src, groups);
|
||||
}
|
||||
for (String dataset : includedDatasets) {
|
||||
groups.add(record.getDataURI() + DataStoreFactory.DEF_SEPARATOR
|
||||
+ dataset);
|
||||
}
|
||||
}
|
||||
return rval;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a list of HDF5 datasets to request
|
||||
*
|
||||
* @param request
|
||||
* @return
|
||||
*/
|
||||
private List<String> getIncludedDatasets(IDataRequest request){
|
||||
String[] parameters = request.getParameters();
|
||||
List<String> rval = new ArrayList<String>(parameters.length
|
||||
+ requiredKeys.length);
|
||||
rval.addAll(Arrays.asList(requiredKeys));
|
||||
rval.addAll(Arrays.asList(parameters));
|
||||
return rval;
|
||||
}
|
||||
|
||||
/**
|
||||
* Unpack records from response and group by HDF5 file
|
||||
*
|
||||
* @param dbQueryResponse
|
||||
* @return
|
||||
*/
|
||||
private Map<File, List<BinLightningRecord>> unpackResults(
|
||||
DbQueryResponse dbQueryResponse) {
|
||||
// Bin up requests to the same hdf5
|
||||
Map<File, List<BinLightningRecord>> fileMap = new HashMap<File, List<BinLightningRecord>>();
|
||||
|
||||
for (Map<String, Object> result : dbQueryResponse.getResults()) {
|
||||
Object object = result.get(null);
|
||||
if (object == null || !(object instanceof BinLightningRecord)) {
|
||||
log.warn("Unexpected result for bin lightning: " + object);
|
||||
continue;
|
||||
}
|
||||
BinLightningRecord record = (BinLightningRecord) object;
|
||||
File hdf5File = PDOUtil.getHDF5File(record);
|
||||
List<BinLightningRecord> recList = fileMap.get(hdf5File);
|
||||
if (recList == null) {
|
||||
recList = new ArrayList<BinLightningRecord>();
|
||||
fileMap.put(hdf5File, recList);
|
||||
}
|
||||
recList.add(record);
|
||||
}
|
||||
return fileMap;
|
||||
}
|
||||
|
||||
}
|
Loading…
Add table
Reference in a new issue