Issue #798 - Code review changes

Issue #798 - Code review updates
Change-Id: I5447574392a42c79fa4295ce4f9bb5ac295d06bb

Former-commit-id: cb05a9f95384806537106dc804833331d6c7db48
This commit is contained in:
James Korman 2012-07-13 10:04:26 -05:00
parent 97a1d083cd
commit 38a2e426d9
22 changed files with 1769 additions and 727 deletions

View file

@ -28,6 +28,7 @@ import org.apache.commons.collections.keyvalue.MultiKey;
import org.geotools.coverage.grid.GridGeometry2D;
import org.opengis.referencing.datum.PixelInCell;
import com.raytheon.uf.common.datastorage.DataStoreFactory;
import com.raytheon.uf.common.datastorage.StorageException;
import com.raytheon.uf.viz.core.IGraphicsTarget;
import com.raytheon.uf.viz.core.data.IDataPreparer;
@ -53,7 +54,8 @@ import com.raytheon.uf.viz.core.rsc.capabilities.ColorMapCapability;
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Feb 15, 2007 chammack Initial Creation.
*
* - AWIPS2 Baseline Repository --------
* Jul 18, 2012 798 jkorman Modified {@link #createTile} to remove hard-coded interpolation groups.
* </pre>
*
* @author chammack
@ -97,7 +99,7 @@ public class FileBasedTileSet extends AbstractTileSet {
this.dataset = dataset;
}
/*
/**
* (non-Javadoc)
*
* @see
@ -109,10 +111,10 @@ public class FileBasedTileSet extends AbstractTileSet {
throws VizException {
IImage raster = target.getExtension(IColormappedImageExtension.class)
.initializeRaster(
new HDF5DataRetriever(new File(this.hdf5File), "/"
+ this.group + "/" + this.dataset
+ "-interpolated/" + level, this.tileSet
.getTile(level, i, j).getRectangle()),
new HDF5DataRetriever(new File(this.hdf5File),
DataStoreFactory.createDataSetName(group,
dataset, level), this.tileSet.getTile(
level, i, j).getRectangle()),
rsc.getCapability(ColorMapCapability.class)
.getColorMapParameters());
return raster;

View file

@ -23,6 +23,7 @@ import java.awt.Rectangle;
import java.nio.ByteBuffer;
import com.raytheon.uf.common.dataplugin.PluginDataObject;
import com.raytheon.uf.common.datastorage.DataStoreFactory;
import com.raytheon.uf.common.datastorage.Request;
import com.raytheon.uf.common.datastorage.records.ByteDataRecord;
import com.raytheon.uf.common.datastorage.records.IDataRecord;
@ -44,7 +45,8 @@ import com.raytheon.uf.viz.core.datastructure.VizDataCubeException;
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Oct 28, 2009 mschenke Initial creation
*
* - AWIPS2 Baseline Repository --------
* Jul 18, 2012 798 jkorman Modified constructor to remove hard-coded dataset name.
* </pre>
*
* @author mschenke
@ -69,7 +71,9 @@ public class SatDataRetriever implements IColorMapDataRetrievalCallback {
Rectangle dataSetBounds, boolean signed, ByteBuffer retreivedBuffer) {
this.pdo = pdo;
this.datasetBounds = dataSetBounds;
this.dataset = "Data" + "-interpolated/" + level;
dataset = DataStoreFactory.createDataSetName(null, DataStoreFactory.DEF_DATASET_NAME, level);
this.signed = signed;
this.retreivedBuffer = retreivedBuffer;
}
@ -88,12 +92,7 @@ public class SatDataRetriever implements IColorMapDataRetrievalCallback {
retreivedBuffer = null;
} else {
try {
byte[] data = getRawData();
if (data != null) {
satBuffer = ByteBuffer.wrap(data);
} else {
System.out.println("Problem!");
}
satBuffer = ByteBuffer.wrap(getRawData());
} catch (Exception e) {
statusHandler.handle(Priority.SIGNIFICANT,
"Error retrieving satellite data", e);
@ -108,6 +107,8 @@ public class SatDataRetriever implements IColorMapDataRetrievalCallback {
}
public byte[] getRawData() {
byte [] retData = null;
Request req = Request.buildSlab(new int[] { this.datasetBounds.x,
this.datasetBounds.y }, new int[] {
this.datasetBounds.x + this.datasetBounds.width,
@ -116,14 +117,14 @@ public class SatDataRetriever implements IColorMapDataRetrievalCallback {
try {
dataRecord = DataCubeContainer
.getDataRecord(pdo, req, this.dataset);
if (dataRecord != null && dataRecord.length == 1) {
retData = ((ByteDataRecord) dataRecord[0]).getByteData();
}
} catch (VizDataCubeException e) {
statusHandler.handle(Priority.SIGNIFICANT,
"Error retrieving satellite data", e);
}
if (dataRecord != null && dataRecord.length == 1) {
return ((ByteDataRecord) dataRecord[0]).getByteData();
}
return null;
return retData;
}
/*

View file

@ -45,6 +45,7 @@ import com.raytheon.uf.common.dataplugin.satellite.units.counts.DerivedWVPixel;
import com.raytheon.uf.common.dataplugin.satellite.units.generic.GenericPixel;
import com.raytheon.uf.common.dataplugin.satellite.units.goes.PolarPrecipWaterPixel;
import com.raytheon.uf.common.dataplugin.satellite.units.water.BlendedTPWPixel;
import com.raytheon.uf.common.datastorage.DataStoreFactory;
import com.raytheon.uf.common.geospatial.ISpatialObject;
import com.raytheon.uf.common.geospatial.MapUtil;
import com.raytheon.uf.common.geospatial.ReferencedCoordinate;
@ -94,7 +95,9 @@ import com.raytheon.viz.satellite.SatelliteConstants;
* 03/25/2009 2086 jsanchez Mapped correct converter to parameter type.
* Updated the call to ColormapParametersFactory.build
* 03/30/2009 2169 jsanchez Updated numLevels handling.
*
* - AWIPS2 Baseline Repository --------
* 07/17/2012 798 jkorman Use decimationLevels from SatelliteRecord. Removed hard-coded
* data set names.
* </pre>
*
* @author chammack
@ -266,15 +269,8 @@ public class SatResource extends
getCapability(ColorMapCapability.class).setColorMapParameters(
colorMapParameters);
numLevels = 1;
int newSzX = record.getSpatialObject().getNx();
int newSzY = record.getSpatialObject().getNy();
while ((newSzX > 512 && newSzY > 512)) {
newSzX /= 2;
newSzY /= 2;
numLevels++;
}
// number of interpolation levels plus the base level!
numLevels = record.getInterpolationLevels() + 1;
}
@Override
@ -490,13 +486,13 @@ public class SatResource extends
}
if (baseTile == null) {
tile = baseTile = new SatFileBasedTileSet(record, "Data",
tile = baseTile = new SatFileBasedTileSet(record, DataStoreFactory.DEF_DATASET_NAME,
numLevels, 256,
MapUtil.getGridGeometry(((SatelliteRecord) record)
.getSpatialObject()), this,
PixelInCell.CELL_CORNER, viewType);
} else {
tile = new SatFileBasedTileSet(record, "Data", baseTile);
tile = new SatFileBasedTileSet(record, DataStoreFactory.DEF_DATASET_NAME, baseTile);
}
tile.addMeshCallback(this);
tile.setMapDescriptor(this.descriptor);

View file

@ -0,0 +1,143 @@
#!/usr/bin/python
#
"""
Convert to append the coverageid to the satellite datauris, and
modify the associated satellite hdf5 group names to append the
coverageid. The new groups are added as an alias to the existing
datasets.
Date Ticket# Engineer Description
------------ ---------- ----------- --------------------------
20120711 798 jkorman Initial Development
"""
from subprocess import Popen, PIPE
import sys
from time import time
import h5py
POSTGRES_CMD = "psql -U awips -d metadata -t -q -A -c "
HDF5_LOC = "/awips2/edex/data/hdf5"
DATAURI_IDX = 1
COVERAGE_IDX = 2
def update_satellite_table():
"""
Add the interpolationLevels column to the satellite table.
"""
result = queryPostgres("select count(*) from information_schema.columns where table_name='satellite' and column_name='interpolationlevels';")
if(result[0][0] == '0'):
result = result = queryPostgres("alter table satellite add column interpolationlevels integer;")
print "Adding interpolationlevels column to satellite table"
def formatFileTime(refTime):
"""
Extract and format the year (YYYY), month (MM), day (DD), and hour (HH)
from the reference time. The output is formatted as YYYY-MM-DD-HH
"""
return refTime[0:4] + "-" + refTime[5:7] + "-" + refTime[8:10] + "-" + refTime[11:13]
def getFilename(refTime):
"""
Create the satellite data hdf filename corresponding to the given reference time.
"""
return "satellite-" + formatFileTime(refTime) + ".h5"
def queryPostgres(sql):
"""
Extract and format the year (YYYY), month (MM), day (DD), and hour (HH)
from the reference time. The output is formatted as YYYY-MM-DD-HH
"""
result = Popen(POSTGRES_CMD + "\"" + sql + "\"", stdout=PIPE, shell=True)
retVal = []
for line in result.stdout:
retVal.append(line.strip().split("|"))
return retVal
def get_sectorids():
"""
Get a list of unique sector identifiers from the satellite table.
"""
return queryPostgres("select distinct sectorid from satellite;")
def get_satellite_rows(sectorid):
"""
Extract and format the year (YYYY), month (MM), day (DD), and hour (HH)
from the reference time. The output is formatted as YYYY-MM-DD-HH
"""
keys = {}
rows = queryPostgres("select id, dataURI, coverage_gid, sectorid, physicalelement, reftime from satellite where sectorid=" + repr(sectorid) + ";")
for row in rows:
# updateSql = "update satellite set datauri='" + row[DATAURI_IDX] + "/" + row[COVERAGE_IDX] + "' where id=" + row[0] + ";"
# queryPostgres(updateSql)
# create the key for this entry.
key = "/satellite/" + row[3] + "/" + row[4] + "/" + getFilename(row[5])
# have we found this key already?
if(key in keys):
# if so, get the row list for this key
rowList = keys[key]
else:
# otherwise create an empty list to put the row in
rowList = []
# add it to the collection
keys[key] = rowList
# and add the row to the list
rowList.append(row)
return keys
def process_all_satellite():
"""
Process all entries in the satellite table.
Do one sector id at a time.
"""
sectorids = get_sectorids()
if(sectorids):
for sectorid in sectorids:
print "Processing sector " + sectorid[0]
keys = get_satellite_rows(sectorid[0])
if(keys):
for key in keys:
print "=========================================================="
print " Processing key = " + key
fname = HDF5_LOC + key
try:
f = h5py.File(fname,'r+')
for row in keys[key]:
newGroupName = row[DATAURI_IDX] + "/" + row[COVERAGE_IDX]
group = f.create_group(newGroupName)
group = f.create_group(newGroupName + "/Data-interpolated")
oldds = row[DATAURI_IDX] + "/Data"
newds = newGroupName + "/Data"
# Link to the old data set
f[newds] = h5py.SoftLink(oldds)
group = f[row[DATAURI_IDX] + "/Data-interpolated"]
numLevels = 1
for n in group.keys():
numLevels += 1
newds = newGroupName + "/Data-interpolated/" + n
if (n == '0'):
# special case for this link.
# dataset /Data-interpolated/0 points to /Data
oldds = row[DATAURI_IDX] + "/Data"
else:
oldds = row[DATAURI_IDX] + "/Data-interpolated/" + n
f[newds] = h5py.SoftLink(oldds)
# now back up one for the Data,Data-interpolated/0 link
numLevels -= 1
updateSql = "update satellite set datauri='" + row[DATAURI_IDX] + "/" + row[COVERAGE_IDX] + "'"
updateSql += ", interpolationlevels=" + repr(numLevels)
updateSql += " where id=" + row[0] + ";"
queryPostgres(updateSql)
f.close()
except:
print "Error occurred processing file " + fname
else:
print "No keys found for the sector id " + sectorid[0]
else:
print "No sector identifiers found in the satellite table"
if __name__ == '__main__':
t = time()
update_satellite_table()
process_all_satellite()
print "Total Conversion time %ds" % (time() - t)

View file

@ -23,3 +23,4 @@
# Java Library Path (location of Wrapper.DLL or libwrapper.so)
wrapper.java.library.path.1=%EDEX_HOME%/bin/linux-x86-32/

View file

@ -32,6 +32,9 @@ export RADAR_SERVER=tcp://localhost:8813
# set the AWIPS II shared directory
export SHARE_DIR=/awips2/edex/data/share
# set the AWIPS II temporary directory
export TEMP_DIR=/awips2/edex/data/tmp
# set hydroapps directory path
export apps_dir=${SHARE_DIR}/hydroapps
# site identifier for hydroapps

View file

@ -123,6 +123,7 @@ wrapper.java.additional.42=-Dweb.port=8080
wrapper.java.additional.43=-Dconfidential.port=8443
wrapper.java.additional.44=-Dhttp.port=%HTTP_PORT%
wrapper.java.additional.45=-Dedex.arch=%EDEX_ARCH%
wrapper.java.additional.46=-Dedex.tmp=%TEMP_DIR%
# Initial Java Heap Size (in MB)
wrapper.java.initmemory=%INIT_MEM%

View file

@ -38,7 +38,9 @@ import com.raytheon.edex.util.satellite.SatellitePosition;
import com.raytheon.edex.util.satellite.SatelliteUnit;
import com.raytheon.uf.common.dataplugin.PluginDataObject;
import com.raytheon.uf.common.dataplugin.satellite.SatMapCoverage;
import com.raytheon.uf.common.dataplugin.satellite.SatelliteMessageData;
import com.raytheon.uf.common.dataplugin.satellite.SatelliteRecord;
import com.raytheon.uf.common.datastorage.records.IDataRecord;
import com.raytheon.uf.common.time.DataTime;
import com.raytheon.uf.edex.decodertools.time.TimeTools;
import com.raytheon.uf.edex.wmo.message.WMOHeader;
@ -50,7 +52,7 @@ import com.raytheon.uf.edex.wmo.message.WMOHeader;
*
* OFTWARE HISTORY
*
* ate Ticket# Engineer Description
* Date Ticket# Engineer Description
* ----------- ---------- ----------- --------------------------
* 006 garmenda Initial Creation
* /14/2007 139 Phillippe Modified to follow refactored plugin pattern
@ -65,7 +67,8 @@ import com.raytheon.uf.edex.wmo.message.WMOHeader;
* 02/05/2010 4120 jkorman Modified removeWmoHeader to handle WMOHeader in
* various start locations.
* 04/17/2012 14724 kshresth This is a temporary workaround - Projection off CONUS
*
* - AWIPS2 Baseline Repository --------
* 06/27/2012 798 jkorman Using SatelliteMessageData to "carry" the decoded image.
* </pre>
*
* @author bphillip
@ -242,8 +245,7 @@ public class SatelliteDecoder extends AbstractDecoder {
SatellitePosition position = dao
.getSatellitePosition(record.getCreatingEntity());
if (position == null) {
logger
.info("Unable to determine geostationary location of ["
logger.info("Unable to determine geostationary location of ["
+ record.getCreatingEntity()
+ "]. Zeroing out fields.");
} else {
@ -265,7 +267,7 @@ public class SatelliteDecoder extends AbstractDecoder {
/*
* Rotate image if necessary
*/
// TODO: Can these numbers be an enum or constants?
switch (scanMode) {
case 1:
Util.flipHoriz(tempBytes, ny, nx);
@ -280,8 +282,8 @@ public class SatelliteDecoder extends AbstractDecoder {
default:
break;
}
record.setMessageData(tempBytes);
SatelliteMessageData messageData = new SatelliteMessageData(
tempBytes, nx, ny);
// get the latitude of the first point
byteBuffer.position(20);
@ -315,7 +317,8 @@ public class SatelliteDecoder extends AbstractDecoder {
float dx = 0.0f, dy = 0.0f, lov = 0.0f, lo2 = 0.0f, la2 = 0.0f;
// Do specialized decoding and retrieve spatial data for Lambert
// Conformal and Polar Stereographic projections
if ((mapProjection == 3) || (mapProjection == 5)) {
if ((mapProjection == SatMapCoverage.PROJ_LAMBERT)
|| (mapProjection == SatMapCoverage.PROJ_POLAR_STEREO)) {
byteBuffer.position(30);
byteBuffer.get(threeBytesArray, 0, 3);
dx = byteArrayToFloat(threeBytesArray) / 10;
@ -332,7 +335,7 @@ public class SatelliteDecoder extends AbstractDecoder {
// Do specialized decoding and retrieve spatial data for
// Mercator
// projection
else if (mapProjection == 1) {
else if (mapProjection == SatMapCoverage.PROJ_MERCATOR) {
dx = byteBuffer.getShort(33);
dy = byteBuffer.getShort(35);
@ -353,12 +356,14 @@ public class SatelliteDecoder extends AbstractDecoder {
try {
/**
* This is a temporary workaround for DR14724, hopefully to be removed after NESDIS changes
* the product header
* This is a temporary workaround for DR14724, hopefully to
* be removed after NESDIS changes the product header
*/
if ((mapProjection == 3)
&& (record.getPhysicalElement().equalsIgnoreCase("Imager 13 micron (IR)") )
&& (record.getSectorID().equalsIgnoreCase("West CONUS"))){
if ((mapProjection == SatMapCoverage.PROJ_LAMBERT)
&& (record.getPhysicalElement()
.equalsIgnoreCase("Imager 13 micron (IR)"))
&& (record.getSectorID()
.equalsIgnoreCase("West CONUS"))) {
nx = 1100;
ny = 1280;
dx = 4063.5f;
@ -374,8 +379,7 @@ public class SatelliteDecoder extends AbstractDecoder {
latin, la1, lo1, la2, lo2);
} catch (Exception e) {
StringBuffer buf = new StringBuffer();
buf
.append(
buf.append(
"Error getting or constructing SatMapCoverage for values: ")
.append("\n\t");
buf.append("mapProjection=" + mapProjection).append("\n\t");
@ -399,8 +403,11 @@ public class SatelliteDecoder extends AbstractDecoder {
.getTime());
record.setPluginName("satellite");
record.constructDataURI();
// Create the data record.
IDataRecord dataRec = messageData.getStorageRecord(record,
SatelliteRecord.SAT_DATASET_NAME);
record.setMessageData(dataRec);
}
}
}
if (record == null) {
@ -435,16 +442,15 @@ public class SatelliteDecoder extends AbstractDecoder {
}
String msgStr = new String(message);
Matcher matcher = null;
if (msgStr != null) {
matcher = Pattern.compile(WMOHeader.WMO_HEADER).matcher(msgStr);
if (matcher.find()) {
int headerStart = matcher.start();
if (SAT_HDR_TT.equals(msgStr.substring(headerStart,
headerStart + 2))) {
if (SAT_HDR_TT.equals(msgStr
.substring(headerStart, headerStart + 2))) {
int startOfSatellite = matcher.end();
retMessage = new byte[messageData.length - startOfSatellite];
System.arraycopy(messageData, startOfSatellite, retMessage,
0, retMessage.length);
System.arraycopy(messageData, startOfSatellite, retMessage, 0,
retMessage.length);
} else {
throw new DecoderException(
"First character of the WMO header must be 'T'");
@ -452,10 +458,6 @@ public class SatelliteDecoder extends AbstractDecoder {
} else {
throw new DecoderException("Cannot decode an empty WMO header");
}
} else {
throw new DecoderException(
"Could not create data for WMO header search");
}
return retMessage;
}

View file

@ -40,6 +40,8 @@ import com.raytheon.uf.edex.database.query.DatabaseQuery;
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* 7/24/07 353 bphillip Initial Check in
* - AWIPS2 Baseline Repository --------
* 06/27/2012 798 jkorman Corrected id query type.
*
* </pre>
*
@ -64,7 +66,7 @@ public class SatMapCoverageDao extends CoreDao {
* @return A SatelliteMapCoverage object with the corresponding ID. Null if
* not found.
*/
public SatMapCoverage queryByMapId(String mapId) {
public SatMapCoverage queryByMapId(Integer mapId) {
return (SatMapCoverage) this.queryById(mapId);
}
@ -152,7 +154,7 @@ public class SatMapCoverageDao extends CoreDao {
query.addQueryParam("la1",la1);
query.addQueryParam("lo1",lo1);
if (mapProjection == 1) {
if (mapProjection == SatMapCoverage.PROJ_MERCATOR) {
query.addQueryParam("la2",la2);
query.addQueryParam("lo2",lo2);
}

View file

@ -19,9 +19,13 @@
**/
package com.raytheon.edex.plugin.satellite.dao;
import java.awt.Rectangle;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.Map;
import org.opengis.referencing.operation.TransformException;
import com.raytheon.edex.util.satellite.SatelliteCreatingEntity;
import com.raytheon.edex.util.satellite.SatellitePhysicalElement;
@ -32,14 +36,23 @@ import com.raytheon.edex.util.satellite.SatelliteUnit;
import com.raytheon.uf.common.dataplugin.PluginDataObject;
import com.raytheon.uf.common.dataplugin.PluginException;
import com.raytheon.uf.common.dataplugin.persist.IPersistable;
import com.raytheon.uf.common.dataplugin.satellite.SatMapCoverage;
import com.raytheon.uf.common.dataplugin.satellite.SatelliteMessageData;
import com.raytheon.uf.common.dataplugin.satellite.SatelliteRecord;
import com.raytheon.uf.common.dataquery.db.QueryResult;
import com.raytheon.uf.common.datastorage.DataStoreFactory;
import com.raytheon.uf.common.datastorage.IDataStore;
import com.raytheon.uf.common.datastorage.StorageException;
import com.raytheon.uf.common.datastorage.StorageProperties;
import com.raytheon.uf.common.datastorage.records.AbstractStorageRecord;
import com.raytheon.uf.common.datastorage.records.ByteDataRecord;
import com.raytheon.uf.common.datastorage.records.IDataRecord;
import com.raytheon.uf.common.datastorage.records.ShortDataRecord;
import com.raytheon.uf.common.geospatial.MapUtil;
import com.raytheon.uf.common.geospatial.interpolation.GridDownscaler;
import com.raytheon.uf.common.geospatial.interpolation.data.AbstractDataWrapper;
import com.raytheon.uf.common.geospatial.interpolation.data.ByteArrayWrapper;
import com.raytheon.uf.common.geospatial.interpolation.data.DataDestination;
import com.raytheon.uf.common.geospatial.interpolation.data.ShortArrayWrapper;
import com.raytheon.uf.edex.core.dataplugin.PluginRegistry;
import com.raytheon.uf.edex.database.DataAccessLayerException;
import com.raytheon.uf.edex.database.plugin.PluginDao;
@ -55,7 +68,8 @@ import com.raytheon.uf.edex.database.query.DatabaseQuery;
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Feb 11, 2009 bphillip Initial creation
*
* - AWIPS2 Baseline Repository --------
* 07/09/2012 798 jkorman Modified datastore population.
* </pre>
*
* @author bphillip
@ -99,19 +113,22 @@ public class SatelliteDao extends PluginDao {
super(pluginName);
}
/**
* Populated a given IDataStore object with the data record to be persisted.
*
* @param dataStore
* Storage object to be populated.
* @param record
* The persistable record containing the data to be persisted.
* @return The populated data storage object.
*/
@Override
protected IDataStore populateDataStore(IDataStore dataStore,
IPersistable record) throws StorageException {
SatelliteRecord satRecord = (SatelliteRecord) record;
AbstractStorageRecord storageRecord = null;
long nx = satRecord.getCoverage().getNx();
long ny = satRecord.getCoverage().getNy();
long[] sizes = new long[] { nx, ny };
storageRecord = new ByteDataRecord("Data", satRecord.getDataURI(),
(byte[]) satRecord.getMessageData(), 2, sizes);
IDataRecord storageRecord = (IDataRecord) satRecord.getMessageData();
if (storageRecord != null) {
StorageProperties props = new StorageProperties();
String compression = PluginRegistry.getInstance()
.getRegisteredObject(pluginName).getCompression();
@ -119,11 +136,54 @@ public class SatelliteDao extends PluginDao {
props.setCompression(StorageProperties.Compression
.valueOf(compression));
}
props.setDownscaled(true);
props.setDownscaled(false);
storageRecord.setProperties(props);
storageRecord.setCorrelationObject(satRecord);
// Store the base record.
dataStore.addDataRecord(storageRecord);
Map<String, Object> attributes = storageRecord.getDataAttributes();
Float fillValue = getAttribute(attributes,
SatelliteRecord.SAT_FILL_VALUE, 0.0f);
SatMapCoverage coverage = satRecord.getCoverage();
GridDownscaler downScaler = createDownscaler(coverage,
storageRecord, fillValue);
// How many interpolation levels do we need for this data?
// Subtract one for the base level data.
int levels = downScaler.getNumberOfDownscaleLevels() - 1;
// set the number of levels in the 'parent' satellite data.
satRecord.setInterpolationLevels(levels);
if (DataStoreFactory.isInterpolated(levels)) {
for (int downscaleLevel = 1; downscaleLevel <= levels; downscaleLevel++) {
Rectangle size = downScaler
.getDownscaleSize(downscaleLevel);
AbstractDataWrapper dest = getDestination(storageRecord,
size);
dest.setFillValue(fillValue);
try {
downScaler.downscale(downscaleLevel, dest);
IDataRecord dr = createDataRecord(satRecord, dest,
downscaleLevel, size);
// Set the attributes and properties from the parent
// data.
dr.setDataAttributes(attributes);
dr.setProperties(props);
dataStore.addDataRecord(dr);
} catch (TransformException e) {
throw new StorageException(
"Error creating downscaled data",
storageRecord, e);
}
}
}
}
return dataStore;
}
@ -166,11 +226,12 @@ public class SatelliteDao extends PluginDao {
query.addQueryParam("dataTime.refTime", theDate);
query.addOrder("dataTime.refTime", true);
try {
PluginDataObject[] pdos = this.getFullRecord(query, -1);
PluginDataObject[] pdos = this.getFullRecord(query, 0);
for (int i = 0; i < pdos.length; i++) {
satRecords.add((SatelliteRecord) pdos[i]);
satRecords.get(i).setMessageData(
(ByteDataRecord) ((IDataRecord[]) satRecords.get(i)
satRecords.get(i)
.setMessageData(
((IDataRecord[]) satRecords.get(i)
.getMessageData())[0]);
}
} catch (Exception e) {
@ -328,4 +389,130 @@ public class SatelliteDao extends PluginDao {
this.positionDao = positionDao;
}
/**
* Create an {@link AbstractDataWrapper} destination from the supplied
* {@link IDataRecord} with given dimensions.
*
* @param rec
* The record containing data to be wrapped.
* @param size
* A {@link Rectangle} containing the size of the input data.
* @return The wrapped data.
*/
private AbstractDataWrapper getDestination(IDataRecord rec, Rectangle size) {
AbstractDataWrapper dest = null;
if (rec instanceof ByteDataRecord) {
dest = new ByteArrayWrapper(size.width, size.height);
} else if (rec instanceof ShortDataRecord) {
dest = new ShortArrayWrapper(size.width, size.height);
}
return dest;
}
/**
* Create an {@link AbstractDataWrapper} source from the supplied
* {@link IDataRecord} with given dimensions.
*
* @param rec
* The record containing data to be wrapped.
* @param nx
* Number of items on the x axis.
* @param ny
* Number of items on the y axis.
* @return The wrapped data.
*/
private AbstractDataWrapper getSource(IDataRecord rec, int nx, int ny) {
AbstractDataWrapper source = null;
if (rec instanceof ByteDataRecord) {
byte[] b = ((ByteDataRecord) rec).getByteData();
source = new ByteArrayWrapper(b, nx, ny);
} else if (rec instanceof ShortDataRecord) {
short[] s = ((ShortDataRecord) rec).getShortData();
source = new ShortArrayWrapper(s, nx, ny);
}
return source;
}
/**
* Create the {@link IDataRecord} from the {@link DataDestination} using the
* original satellite data, size and
*
* @param satRec
* The original satellite data record.
* @param data
* The down-scaled data.
* @param downscaledLevel
* The level identifier for this data.
* @param size
* Size of the down-scaled data.
* @return The created data record to be stored.
*/
private IDataRecord createDataRecord(SatelliteRecord satRec,
DataDestination data, int downscaleLevel, Rectangle size) {
SatelliteMessageData msgData = null;
Object o = null;
if (data instanceof ByteArrayWrapper) {
o = ((ByteArrayWrapper) data).getArray();
} else if (data instanceof ShortArrayWrapper) {
o = ((ShortArrayWrapper) data).getArray();
}
if (o != null) {
msgData = new SatelliteMessageData(o, size.width, size.height);
}
IDataRecord rec = msgData.getStorageRecord(satRec,
String.valueOf(downscaleLevel));
rec.setCorrelationObject(satRec);
rec.setGroup(DataStoreFactory.createGroupName(satRec.getDataURI(),
SatelliteRecord.SAT_DATASET_NAME, true));
return rec;
}
/**
* Create a down scaler for the given data.
*
* @param coverage
* Satellite Map Coverage for the source data.
* @param rec
* The original data that will be down-scaled.
* @param fillValue
* The declared fill value for the data.
* @return
*/
private GridDownscaler createDownscaler(SatMapCoverage coverage,
IDataRecord rec, double fillValue) {
GridDownscaler downScaler = null;
AbstractDataWrapper dataSource = getSource(rec, coverage.getNx(),
coverage.getNy());
dataSource.setFillValue(fillValue);
downScaler = new GridDownscaler(MapUtil.getGridGeometry(coverage),
dataSource);
return downScaler;
}
/**
* Get the value of an named attribute.
*
* @param attrs
* Attributes that contain the value.
* @param attrName
* Name of the attribute.
* @param defValue
* A default value.
* @return
*/
public static Float getAttribute(Map<String, Object> attrs,
String attrName, Float defValue) {
Float retValue = defValue;
if ((attrs != null) && (attrName != null)) {
retValue = (Float) attrs.get(attrName);
}
return retValue;
}
}

View file

@ -42,7 +42,8 @@ import com.vividsolutions.jts.io.WKTReader;
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* 12/19/07 439 bphillip Initial creation
*
* - AWIPS2 Baseline Repository --------
* 07/12/2012 798 jkorman Changed projection "magic" numbers
*
* </pre>
*/
@ -172,7 +173,7 @@ public class SatSpatialFactory {
ProjectedCRS crs = null;
// Get the correct CRS
if (mapProjection == 1) {
if (mapProjection == SatMapCoverage.PROJ_MERCATOR) {
double cm = 0.0;
if ((lo1 > 0.0) && (lo2 < 0.0)) {
cm = 180.0;
@ -200,7 +201,7 @@ public class SatSpatialFactory {
* Projection is Mercator. Determine corner points from la1,lo1,la2,lo2
* provided in the satellite file
*/
if (mapProjection == 1) {
if (mapProjection == SatMapCoverage.PROJ_MERCATOR) {
logger.debug("Determining corner points for Mercator projection");
corner1.x = lo1;
corner1.y = la1;

View file

@ -21,6 +21,7 @@
package com.raytheon.uf.common.dataplugin.satellite;
import javax.persistence.Column;
import javax.persistence.Embeddable;
import javax.persistence.Entity;
import javax.persistence.Id;
import javax.persistence.Table;
@ -35,6 +36,7 @@ import org.hibernate.annotations.Type;
import org.opengis.referencing.FactoryException;
import org.opengis.referencing.crs.CoordinateReferenceSystem;
import com.raytheon.uf.common.dataplugin.annotations.DataURI;
import com.raytheon.uf.common.dataplugin.persist.PersistableDataObject;
import com.raytheon.uf.common.geospatial.CRSCache;
import com.raytheon.uf.common.geospatial.ISpatialObject;
@ -57,7 +59,8 @@ import com.vividsolutions.jts.geom.Polygon;
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* 7/24/07 353 bphillip Initial Checkin
*
* - AWIPS2 Baseline Repository --------
* 07/12/2012 798 jkorman Changed projection "magic" numbers
*
* </pre>
*/
@ -65,13 +68,23 @@ import com.vividsolutions.jts.geom.Polygon;
@Table(name = "satellite_spatial")
@XmlAccessorType(XmlAccessType.NONE)
@DynamicSerialize
@Embeddable
public class SatMapCoverage extends PersistableDataObject implements
ISpatialObject {
private static final long serialVersionUID = 1L;
public static final int PROJ_MERCATOR = 1;
public static final int PROJ_LAMBERT = 3;
public static final int PROJ_POLAR_STEREO = 5;
public static final int PROJ_CYLIN_EQUIDISTANT = 7;
@Id
@DynamicSerializeElement
@DataURI(position = 0)
private int gid;
/**

View file

@ -26,9 +26,13 @@ import com.raytheon.uf.common.dataplugin.satellite.SatelliteRecord;
import com.raytheon.uf.common.datastorage.records.ByteDataRecord;
import com.raytheon.uf.common.datastorage.records.IDataRecord;
import com.raytheon.uf.common.datastorage.records.ShortDataRecord;
import com.raytheon.uf.common.serialization.annotations.DynamicSerialize;
import com.raytheon.uf.common.serialization.annotations.DynamicSerializeElement;
/**
* TODO Add Description
* Encapsulate satellite image data as well as the dimensions of
* the image grid. Attributes about the data may also be added. As an
* example these attributes could include "scale factor" and/or "fill_value".
*
* <pre>
*
@ -36,31 +40,39 @@ import com.raytheon.uf.common.datastorage.records.ShortDataRecord;
*
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Jun 27, 2012 jkorman Initial creation
* Jun 27, 2012 798 jkorman Initial creation
*
* </pre>
*
* @author jkorman
* @version 1.0
*/
@DynamicSerialize
public class SatelliteMessageData {
private static final int DATA_DIMS = 2;
// Number of columns in the image data
@DynamicSerializeElement
private int nx;
// Number of rows in the image data
@DynamicSerializeElement
private int ny;
// The image grid data - Usually some type (T [])
@DynamicSerializeElement
private Object messageData;
@DynamicSerializeElement
private Map<String, Object> dataAttributes;
/**
*
* @param messageData
* @param numCols
* @param numRows
* Create a message object containing the gridded image data as well as
* its dimensions.
* @param messageData The image grid data - Usually some type (T [])
* @param numCols Number of columns in the image grid.
* @param numRows Number of rows in the image grid.
*/
public SatelliteMessageData(Object messageData, int numCols, int numRows) {
this.messageData = messageData;
@ -69,7 +81,8 @@ public class SatelliteMessageData {
}
/**
*
* Set the gridded image data.
* @param data The image grid data - Usually some type (T [])
* @see com.raytheon.uf.common.dataplugin.satellite.SatelliteMessageData#setMessageData(java.lang.Object)
*/
public void setMessageData(Object data) {
@ -77,7 +90,9 @@ public class SatelliteMessageData {
}
/**
*
* Set an attribute associated with the image data.
* @param key Name to store the information against.
* @param value The value to store against the given key.
*/
public void setDataAttribute(String key, Object value) {
if (dataAttributes == null) {
@ -87,7 +102,9 @@ public class SatelliteMessageData {
}
/**
*
* Set the dimensions of the data.
* @param nx Number of columns in the image grid.
* @param ny Number of rows in the image grid.
* @see com.raytheon.uf.common.dataplugin.satellite.SatelliteMessageData#setDimensions(int,
* int)
*/
@ -97,7 +114,11 @@ public class SatelliteMessageData {
}
/**
*
* Create a data record that encapsulates the data in this class.
* @param dataRec A satellite record that will supply the information needed to
* populate the data record being built.
* @param dataSetName The name that will be used to identify the data set.
* @return The created data record.
* @see com.raytheon.uf.common.dataplugin.satellite.SatelliteMessageData#getStorageRecord()
*/
public IDataRecord getStorageRecord(SatelliteRecord dataRec, String dataSetName) {
@ -120,4 +141,59 @@ public class SatelliteMessageData {
return storageRecord;
}
/**
* Get the number of columns in the image grid.
* @return The number of columns in the image grid.
*/
public int getNx() {
return nx;
}
/**
* Set the number of columns in the image grid.
* @param nx Number of columns in the image grid.
*/
public void setNx(int nx) {
this.nx = nx;
}
/**
* Get the number of rows in the image grid.
* @return The number of rows in the image grid.
*/
public int getNy() {
return ny;
}
/**
* Set the number of rows in the image grid.
* @param ny Number of rows in the image grid.
*/
public void setNy(int ny) {
this.ny = ny;
}
/**
* Get the data attributes.
* @return The data attributes.
*/
public Map<String, Object> getDataAttributes() {
return dataAttributes;
}
/**
* Set the data attributes.
* @param dataAttributes The data attributes.
*/
public void setDataAttributes(Map<String, Object> dataAttributes) {
this.dataAttributes = dataAttributes;
}
/**
* Get the underlying message data object.
* @return The underlying message data object.
*/
public Object getMessageData() {
return messageData;
}
}

View file

@ -35,6 +35,7 @@ import javax.xml.bind.annotation.XmlRootElement;
import com.raytheon.uf.common.dataplugin.IDecoderGettable;
import com.raytheon.uf.common.dataplugin.annotations.DataURI;
import com.raytheon.uf.common.dataplugin.persist.ServerSpecificPersistablePluginDataObject;
import com.raytheon.uf.common.datastorage.DataStoreFactory;
import com.raytheon.uf.common.geospatial.ISpatialEnabled;
import com.raytheon.uf.common.serialization.annotations.DynamicSerialize;
import com.raytheon.uf.common.serialization.annotations.DynamicSerializeElement;
@ -69,6 +70,14 @@ public class SatelliteRecord extends ServerSpecificPersistablePluginDataObject
private static final long serialVersionUID = 1L;
public static final String SAT_DATASET_NAME = DataStoreFactory.DEF_DATASET_NAME;
public static final String SAT_FILL_VALUE = "FILL_VALUE";
public static final String SAT_ADD_OFFSET = "ADD_OFFSET";
public static final String SAT_SCALE_FACTOR = "SCALE_FACTOR";
/**
* The source of the data - NESDIS
*/
@ -148,6 +157,13 @@ public class SatelliteRecord extends ServerSpecificPersistablePluginDataObject
@DynamicSerializeElement
private String units;
/** Number of interpolation levels in the data store */
@Column
@XmlAttribute
@DynamicSerializeElement
private Integer interpolationLevels;
@DataURI(position = 5, embedded=true)
@ManyToOne
@PrimaryKeyJoinColumn
@XmlElement
@ -300,4 +316,26 @@ public class SatelliteRecord extends ServerSpecificPersistablePluginDataObject
this.physicalElement = physicalElement;
}
/**
* Get the number of interpolation levels in the data store.
* @return The number of interpolation levels. Data that is not interpolated
* should return a value of 0.
*/
public Integer getInterpolationLevels() {
return interpolationLevels;
}
/**
* Set the number of interpolation levels in the data store. If the data
* are not interpolated a value of 0 should be used.
* @param levels The number of interpolation levels in the data. Any value less than
* zero is set to zero.
*/
public void setInterpolationLevels(Integer levels) {
if(!DataStoreFactory.isInterpolated(levels)) {
levels = 0;
}
interpolationLevels = levels;
}
}

View file

@ -58,6 +58,7 @@ import ncsa.hdf.hdf5lib.exceptions.HDF5Exception;
import ncsa.hdf.hdf5lib.exceptions.HDF5LibraryException;
import ncsa.hdf.hdf5lib.exceptions.HDF5SymbolTableException;
import com.raytheon.uf.common.datastorage.DataStoreFactory;
import com.raytheon.uf.common.datastorage.DuplicateRecordStorageException;
import com.raytheon.uf.common.datastorage.IDataStore;
import com.raytheon.uf.common.datastorage.Request;
@ -90,6 +91,8 @@ import com.raytheon.uf.common.util.FileUtil;
* Sep 25, 2007 chammack Added replace record functionality
* Apr 01, 2008 1041 chammack Added delete functionality
* Jun 30, 2008 2538 jsanchez Update readProperties for Strings.
* - AWIPS2 Baseline Repository --------
* Jul 18, 2012 798 jkorman Removed some hard-coded interpolation code/constants.
* </pre>
*
* @author chammack
@ -438,7 +441,7 @@ public class HDF5DataStore implements IDataStore {
String[] datasets = getDatasets(group);
for (String ds : datasets) {
if (includeInterpolated && ds.endsWith("-interpolated")) {
if (includeInterpolated && ds.endsWith(DataStoreFactory.DEF_INTERPOLATED_GROUP)) {
IDataRecord[] subresults;
subresults = this.retrieve(group + "/" + ds, false);
@ -447,7 +450,7 @@ public class HDF5DataStore implements IDataStore {
records.add(result);
}
}
} else if (!ds.endsWith("-interpolated")) {
} else if (!ds.endsWith(DataStoreFactory.DEF_INTERPOLATED_GROUP)) {
IDataRecord record = this.retrieve(group, ds, Request.ALL);
records.add(record);
}
@ -1626,9 +1629,8 @@ public class HDF5DataStore implements IDataStore {
.getData());
}
rec.setName("" + level);
rec.setGroup(originalGroup + "/" + originalDatasetName
+ "-interpolated");
rec.setName(String.valueOf(level));
rec.setGroup(DataStoreFactory.createGroupName(originalGroup, originalDatasetName, true));
rec.setSizes(new long[] { w / 2, h / 2 });

View file

@ -43,6 +43,9 @@ import com.raytheon.uf.common.datastorage.records.StringDataRecord;
* Feb 12, 2007 chammack Initial Creation.
* 20070914 379 jkorman Added createStorageRecord factory methods.
* Refactored from HDFDataStore.
* - AWIPS2 Baseline Repository --------
* Jul 18, 2012 798 jkorman Extracted methods {@link #createDataSetName}, {@link #createGroupName}, and
* {@link #isInterpolated} from various classes.
* </pre>
*
* @author chammack
@ -50,6 +53,28 @@ import com.raytheon.uf.common.datastorage.records.StringDataRecord;
*/
public class DataStoreFactory {
/**
* Default data set name; {@value #DEF_DATASET_NAME}.
*/
public static final String DEF_DATASET_NAME = "Data";
/**
* Default interpolation suffix ({@value #DEF_INTERPOLATED_GROUP}) for
* interpolated groups.
*/
public static final String DEF_INTERPOLATED_GROUP = "-interpolated";
/**
* Default group element separator; {@value #DEF_SEPARATOR}.
*/
public static final String DEF_SEPARATOR = "/";
/**
* Base interpolation level. Any interpolation level greater than this value
* ({@value #BASE_LEVEL}) is considered to be a valid interpolation level.
*/
public static final int BASE_LEVEL = 0;
private static final DataStoreFactory instance = new DataStoreFactory();
private IDataStoreFactory underlyingFactory;
@ -160,4 +185,190 @@ public class DataStoreFactory {
return record;
}
/**
* Create a storage dataset name using the group and data set name and an
* interpolation level. Any interpolation level less than or equal to zero
* generates the base dataset name with no interpolation. Any interpolation
* levels greater than zero are considered to be decimated levels of the
* original data.
* <table>
* <tr>
* <th>base group</th>
* <th>dataset</th>
* <th>interpolation</th>
* <th>result</th>
* </tr>
* <tr>
* <td>null</td>
* <td>null</td>
* <td>-1</td>
* <td>/Data</td>
* </tr>
* </tr>
* <tr>
* <td>null</td>
* <td>null</td>
* <td>0</td>
* <td>/Data</td>
* </tr>
* <tr>
* <td>null</td>
* <td>null</td>
* <td>4</td>
* <td>/Data-interpolated/4</td>
* </tr>
* <tr>
* <td>/data/group</td>
* <td>null</td>
* <td>0</td>
* <td>/data/group/Data</td>
* </tr>
* <tr>
* <td>/data/group</td>
* <td>null</td>
* <td>3</td>
* <td>/data/group/Data-interpolated/3</td>
* </tr>
* <tr>
* <td>/data/group</td>
* <td>dsname</td>
* <td>-1</td>
* <td>/data/group/dsname</td>
* </tr>
* <tr>
* <td>/data/group</td>
* <td>dsname</td>
* <td>2</td>
* <td>/data/group/dsname-interpolated/2</td>
* </tr>
* </table>
*
* @param groupName
* The group name this data set belongs to. If null, an empty
* group name is generated.
* @param baseDataSet
* Data set name The dataset name. This name and the
* {@link DEF_INTERPOLATED_GROUP} are used to create the
* interpolated suffix for the group name. If null and
* interpolation is requested, a default value
* {@link DEF_DATASET_NAME} will be used.
* @param interpolatedLevel
* The interpolation level data set numeric identifier.
* @return The generated fully qualified dataset name.
*/
public static String createDataSetName(String groupName,
String baseDataSet, int interpolatedLevel) {
boolean interpolated = isInterpolated(interpolatedLevel);
StringBuilder interpolatedGroup = new StringBuilder(createGroupName(
groupName, baseDataSet, interpolated));
interpolatedGroup.append(DEF_SEPARATOR);
if (interpolated) {
interpolatedGroup.append(String.valueOf(interpolatedLevel));
} else {
if (baseDataSet != null) {
interpolatedGroup.append(baseDataSet);
} else {
interpolatedGroup.append(DEF_DATASET_NAME);
}
}
return interpolatedGroup.toString();
}
/**
* Create a hierarchical group name, given a base group name, a dataset name
* and if interpolated levels are being created. If interpolation is not
* requested then the base group name is returned unchanged or if null an
* empty string is returned. When interpolation is requested, the dataset
* name is appended with an interpolation identifer to create an
* interpolation level group name.
* <table>
* <tr>
* <th>base group</th>
* <th>dataset</th>
* <th>interpolation</th>
* <th>result</th>
* </tr>
* <tr>
* <td>null</td>
* <td>null</td>
* <td>false</td>
* <td>zero length string</td>
* </tr>
* <tr>
* <td>null</td>
* <td>null</td>
* <td>true</td>
* <td>/Data-interpolated</td>
* </tr>
* <tr>
* <td>/data/group</td>
* <td>null</td>
* <td>false</td>
* <td>/data/group</td>
* </tr>
* <tr>
* <td>/data/group</td>
* <td>null</td>
* <td>true</td>
* <td>/data/group/Data-interpolated</td>
* </tr>
* <tr>
* <td>/data/group</td>
* <td>dsname</td>
* <td>false</td>
* <td>/data/group</td>
* </tr>
* <tr>
* <td>/data/group</td>
* <td>dsname</td>
* <td>true</td>
* <td>/data/group/dsname-interpolated</td>
* </tr>
* </table>
*
* @param groupName
* The base group name.
* @param baseDataSet
* Data set name The dataset name. This name and the
* {@link #DEF_INTERPOLATED_GROUP} are used to create the
* interpolated suffix for the group name. If null and
* interpolation is requested, a default value
* {@link #DEF_DATASET_NAME} will be used.
* @param interpolatedLevel
* Create an interpolated group name.
* @return The generated group name.
*/
public static String createGroupName(String groupName, String baseDataSet,
boolean interpolated) {
StringBuilder interpolatedGroup = new StringBuilder();
if (groupName != null) {
interpolatedGroup.append(groupName);
}
if (interpolated) {
interpolatedGroup.append(DEF_SEPARATOR);
if (baseDataSet != null) {
if (baseDataSet.length() > BASE_LEVEL) {
interpolatedGroup.append(baseDataSet);
} else {
interpolatedGroup.append(DEF_DATASET_NAME);
}
} else {
interpolatedGroup.append(DEF_DATASET_NAME);
}
interpolatedGroup.append(DEF_INTERPOLATED_GROUP);
}
return interpolatedGroup.toString();
}
/**
* Is the specified interpolation greater than the {@link BASE_LEVEL}?
*
* @param interpolatedLevel
* An interpolation level.
* @return
*/
public static boolean isInterpolated(int interpolatedLevel) {
return (interpolatedLevel > BASE_LEVEL);
}
}

View file

@ -0,0 +1,116 @@
/**
* This software was developed and / or modified by Raytheon Company,
* pursuant to Contract DG133W-05-CQ-1067 with the US Government.
*
* U.S. EXPORT CONTROLLED TECHNICAL DATA
* This software product contains export-restricted data whose
* export/transfer/disclosure is restricted by U.S. law. Dissemination
* to non-U.S. persons whether in the United States or abroad requires
* an export license or other authorization.
*
* Contractor Name: Raytheon Company
* Contractor Address: 6825 Pine Street, Suite 340
* Mail Stop B8
* Omaha, NE 68106
* 402.291.0100
*
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
* further licensing information.
**/
package com.raytheon.uf.common.geospatial.interpolation.data;
import org.geotools.coverage.grid.GeneralGridGeometry;
/**
* {@link AbstractDataWrapper} implementation for byte array data.
*
* <pre>
*
* SOFTWARE HISTORY
*
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Jul 13, 2012 jkorman Initial creation
*
* </pre>
*
* @author jkorman
* @version 1.0
*/
public class ByteArrayWrapper extends DataWrapper1D {
// The wrapped byte array data.
protected final byte[] array;
/**
* Wrap a byte array using a specified geometry.
* @param array Byte array data to be wrapped.
* @param geometry A {@link GeneralGridGeometry} that will be used to discover
* the shape of the input data.
*/
public ByteArrayWrapper(byte[] array, GeneralGridGeometry geometry) {
super(geometry);
this.array = array;
}
/**
* Wrap a byte array using given x and y axis dimensions.
* @param array Byte array data to be wrapped.
* @param nx Number of elements on the x axis.
* @param ny Number of elements on the y axis.
*/
public ByteArrayWrapper(byte[] array, int nx, int ny) {
super(nx, ny);
this.array = array;
}
/**
* Create an instance with a byte array using given x and y axis dimensions.
* @param nx Number of elements on the x axis.
* @param ny Number of elements on the y axis.
*/
public ByteArrayWrapper(int nx, int ny) {
this(new byte[nx * ny], nx, ny);
}
/**
* Create an instance with a byte array using a specified geometry.
* @param geometry A {@link GeneralGridGeometry} that will be used to discover
* the shape of the input data.
*/
public ByteArrayWrapper(GeneralGridGeometry geometry) {
// assume this is going to be a destination and avoid passing
// geometry to super to save time on checking for wrapping.
this(geometry.getGridRange().getSpan(0), geometry.getGridRange()
.getSpan(1));
}
/**
* Get a reference to the internal wrapped data.
* @return The internal byte array data.
*/
public byte[] getArray() {
return array;
}
/**
* Get the value of the internal data at a specified position.
* @param index Position within the internal data to get.
* @return The value of the internal data.
*/
@Override
protected double getDataValueInternal(int index) {
return array[index];
}
/**
* Set the value of the internal data at a specified position.
* @param dataValue A value to set at the given index.
* @param index Position within the internal data to set.
*/
@Override
public void setDataValueInternal(double dataValue, int index) {
array[index] = (byte) dataValue;
}
}

View file

@ -21,6 +21,7 @@
package com.raytheon.uf.common.util;
import java.io.ByteArrayOutputStream;
import java.io.Closeable;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
@ -28,6 +29,7 @@ import java.io.FilenameFilter;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.nio.channels.FileChannel;
import java.util.ArrayList;
import java.util.regex.Pattern;
import java.util.zip.GZIPInputStream;
@ -47,6 +49,9 @@ import java.util.zip.GZIPOutputStream;
* return false when unable to
* obtain directory listing.
* Sep 16, 2008 1250 jelkins Added join function
* - AWIPS2 Baseline Repository --------
* Jul 06, 2012 798 jkorman Added more robust {@link #copyFile}. Added methods
* to create temporary directories and files.
*
* </pre>
*
@ -311,29 +316,23 @@ public class FileUtil {
return unmangled.toString();
}
/**
* Copy a file to a another file.
*
* @param fileToCopy
* The source file. This file reference must exist.
* @param outputFile
* The destination file. This file may exist, if so it will be
* overwritten.
* @throws IOException
* An error occurred while copying the data.
* @throws NullPointerException
* Either the source or target file references are null.
*/
public static void copyFile(File fileToCopy, File outputFile)
throws IOException {
FileInputStream fis = null;
FileOutputStream fos = null;
try {
fis = new FileInputStream(fileToCopy);
outputFile.getParentFile().mkdirs();
fos = new FileOutputStream(outputFile);
byte[] bytes = new byte[2048];
int len = fis.read(bytes);
while (len > -1) {
fos.write(bytes, 0, len);
len = fis.read(bytes);
}
} finally {
if (fos != null) {
fos.close();
}
if (fis != null) {
fis.close();
}
}
// Copy the entire file.
copyFile(fileToCopy, outputFile, 0);
}
public static String file2String(File file) throws IOException {
@ -621,4 +620,215 @@ public class FileUtil {
return VALID_FILENAME.matcher(fileName).matches();
}
/**
* Copy a file from one location to another. The file copy may begin at some
* specified position within the source file.
*
* @param source
* The source file. This file reference must exist.
* @param target
* The destination file. This file may exist, if so it will be
* overwritten.
* @param position
* The start position within the source file where the copy
* operation will begin. The position must be greater than or
* equal to zero, and less than the file length of the source.
* @return Was the required data copied to the target file.
* @throws IOException
* An error occurred while copying the data.
* @throws IllegalArgumentException
* The position is less than zero or greater than the length of
* the source file or either of the source, target files are null.
*/
public static boolean copyFile(File source, File target, int position)
throws IOException {
boolean status = false;
if (source != null) {
if (target != null) {
if ((position >= 0) && (position < source.length())) {
FileInputStream fis = null;
FileOutputStream fos = null;
try {
fis = new FileInputStream(source);
FileChannel fci = fis.getChannel();
fos = new FileOutputStream(target);
FileChannel fco = fos.getChannel();
long count = source.length() - position;
long transfered = fci.transferTo(position, count, fco);
// ensure we copied all of the data.
status = (transfered == count);
} finally {
String cause = null;
try {
close(fis);
} catch (IOException e) {
cause = String.format(
"copyFile.source.close[%s][%s]", e
.getClass().getName(), e
.getMessage());
}
try {
close(fos);
} catch (IOException e) {
if (cause == null) {
cause = String.format(
"copyFile.target.close[%s][%s]", e
.getClass().getName(), e
.getMessage());
} else {
cause = String.format(
"%s copyFile.target.close[%s][%s]",
cause, e.getClass().getName(),
e.getMessage());
}
}
// One or more closes failed. Construct and throw an
// exception.
if (cause != null) {
throw new IOException(cause);
}
}
} else {
String msg = String.format(
"position [%d] is out of range. Max is [%d]",
position, source.length());
throw new IllegalArgumentException(msg);
}
} else {
throw new IllegalArgumentException("target file reference is null");
}
} else {
throw new IllegalArgumentException("source file reference is null");
}
return status;
}
/**
* Attempt to create a temporary directory under a given base directory for
* temporary directories. If the directory already exists it is returned,
* otherwise it is created.
*
* @param tempPath
* The base path for temporary directories.
* @param componentName
* The component requesting a temporary directory. If this is
* null the tempPath will be used.
* @return The file reference to the created or existing temporary
* directory.
* @throws IOException
* The attempt to create the temporary directory failed.
* @throws IllegalArgumentException
* The temporary directory path is null.
*/
public static File createTempDir(String tempPath, String componentName)
throws IOException {
File tempDir = null;
if (tempPath != null) {
if (componentName == null) {
tempDir = new File(tempPath);
} else {
tempDir = new File(tempPath, componentName);
}
try {
// Check if the directory already exists...
if (!tempDir.exists()) {
// it doesn't, so create it.
if (!tempDir.mkdirs()) {
throw new IOException(
"Could not create temporary directory "
+ tempDir.getAbsolutePath());
}
} else {
if (!tempDir.isDirectory()) {
String msg = String
.format("Path [%s] is not a directory, cannot create temporary directory",
tempDir.getAbsolutePath());
throw new IOException(msg);
}
}
} catch (SecurityException se) {
throw new IOException("Could not create temporary directory "
+ tempDir.getAbsolutePath(), se);
}
} else {
throw new IllegalArgumentException("Temporary path is null");
}
return tempDir;
}
/**
* Create an empty temporary file. The file is created in the directory
* referenced by tempPath. The file created will be named
*
* <pre>
* tempPath / namePrefix_tempFileUniquePart.nameSuffix
* </pre>
*
* @param tempPath
* Base path to the temporary directory.
* @param namePrefix
* The temporary filename prefix. If this is null a default
* prefix of "<strong>tempFile</strong>" will be used.
* @param nameSuffix
* The temporary filename suffix. If this is null the default
* suffix "<strong>.tmp</strong>" will be used.
* @return The File reference to the created temporary file.
* @throws IOException
* The tempPath does not exist and could not be created or an
* error occurred while creating the temporary file.
* @throws IllegalArgumentException
* The temporary path was null.
*/
public static File createTempFile(File tempPath, String namePrefix,
String nameSuffix) throws IOException {
String defaultPrefix = "tempFile";
String prefixFiller = "xxx";
File tempFile = null;
if (tempPath != null) {
if (!tempPath.exists()) {
if (!tempPath.mkdirs()) {
throw new IOException(
"Could not create temporary directory "
+ tempPath.getAbsolutePath());
}
}
// isDirectory will not work until we actually have a path that
// exists!
if (!tempPath.isDirectory()) {
String msg = String
.format("Path [%s] is not a directory, cannot create temporary file",
tempPath.getAbsolutePath());
throw new IOException(msg);
}
if (namePrefix == null) {
namePrefix = defaultPrefix;
} else if (namePrefix.length() < 3) {
namePrefix += prefixFiller;
}
namePrefix += "_";
tempFile = File.createTempFile(namePrefix, nameSuffix, tempPath);
} else {
throw new IllegalArgumentException("Temporary path is null");
}
return tempFile;
}
/**
* Attempt to close a {@link Closeable} object.
*
* @param c
* An object that needs to be closed.
* @throws IOException
* An error occurred attempting to close the object.
*/
public static void close(Closeable c) throws IOException {
if (c != null) {
c.close();
}
}
}

View file

@ -344,7 +344,8 @@ public abstract class PluginDao extends CoreDao {
* @param objects
* The objects to retrieve the HDF5 component for
* @param tileSet
* The tile set to retrieve. Currently unimplemented
* The tile set to retrieve. Any value less than or equal
* to zero returns the "base" data only.
* @return The HDF5 data records
* @throws StorageException
* If problems occur while interacting with HDF5 data stores
@ -355,33 +356,30 @@ public abstract class PluginDao extends CoreDao {
List<IDataRecord[]> retVal = new ArrayList<IDataRecord[]>();
for (PluginDataObject obj : objects) {
IDataRecord[] record = null;
if (obj instanceof IPersistable) {
/* connect to the data store and retrieve the data */
IDataStore dataStore = getDataStore((IPersistable) obj);
if (tileSet != -1) {
record = new IDataRecord[tileSet + 1];
for (int i = 0; i <= tileSet; i++) {
try {
record[i] = dataStore.retrieve(obj.getDataURI()
+ File.separator + "Data-interpolated"
+ File.separator, String.valueOf(tileSet),
Request.ALL);
} catch (Exception e) {
throw new PluginException(
"Error getting HDF5 data", e);
boolean interpolated = DataStoreFactory.isInterpolated(tileSet);
if(!interpolated) {
tileSet = 0;
}
}
} else {
record = new IDataRecord[1];
IDataRecord[] record = new IDataRecord[tileSet + 1];
try {
String group = DataStoreFactory.createGroupName(
obj.getDataURI(),
DataStoreFactory.DEF_DATASET_NAME, interpolated);
// Retrieve the base record.
record[0] = dataStore.retrieve(obj.getDataURI(),
"Data", Request.ALL);
DataStoreFactory.DEF_DATASET_NAME, Request.ALL);
// Now get the interpolated data, if any!
for (int tile = 1; tile < record.length; tile++) {
record[tile] = dataStore.retrieve(group,
String.valueOf(tile), Request.ALL);
}
} catch (Exception e) {
throw new PluginException("Error getting HDF5 data", e);
}
}
retVal.add(record);
}
}

View file

@ -40,9 +40,11 @@ import com.raytheon.uf.edex.decodertools.time.TimeTools;
import com.raytheon.uf.edex.plugin.satellite.mcidas.util.McidasSatelliteLookups;
import com.raytheon.uf.edex.plugin.satellite.mcidas.util.McidasSatelliteLookups.PhysicalElementValue;
/** McIDAS AREA Decoder
/**
* McIDAS AREA Decoder
*
* <p>Implemented:
* <p>
* Implemented:
* <ul>
* <li>Mercator projection</li>
* <li>Multiple bands</li>
@ -55,32 +57,53 @@ import com.raytheon.uf.edex.plugin.satellite.mcidas.util.McidasSatelliteLookups.
* <li>Calibration block</li>
* <li>Non-byte data types</li>
* </ul>
* <pre>
*
* OFTWARE HISTORY
*
* Date Ticket# Engineer Description
* ----------- ---------- ----------- --------------------------
* No previous history
* - AWIPS2 Baseline Repository --------
* 07/12/2012 798 jkorman Changed projection "magic" numbers
* </pre>
*
* @author
* @version
*/
public class McidasSatelliteDecoder {
private static final IUFStatusHandler theHandler = UFStatus
.getHandler(McidasSatelliteDecoder.class);
private static final String UNEXPECTED_HEADER_VALUE = "Unexpected value in format";
private static final int EXPECTED_IMAGE_TYPE_LE = 4;
private static final int EXPECTED_IMAGE_TYPE_BE = 0x04000000;
private static final double HALFPI = Math.PI / 2.;
private static final double RTD = 180. / Math.PI;
private static final double DTR = Math.PI / 180.;
private SatelliteDao dao;
private String traceId;
public McidasSatelliteDecoder() {
}
public PluginDataObject[] decode(byte[] data, Headers headers) throws Exception {
public PluginDataObject[] decode(byte[] data, Headers headers)
throws Exception {
traceId = (String) headers.get("traceId");
try {
return decodeMcidasArea(data);
} catch (DecoderException e) {
// Any DecoderExceptions throw by this decoder do not need a stack trace
// Any DecoderExceptions throw by this decoder do not need a stack
// trace
theHandler.error(e.getMessage(), e);
return new PluginDataObject[0];
}
@ -159,15 +182,14 @@ public class McidasSatelliteDecoder {
// Decode the navigation block
buf.position(navBlockOffset);
SatMapCoverage coverage = decodeNavigation(elementResolution, lineResolution,
ulImageElement, ulImageLine,
nElementsPerLine, nLines, buf);
SatMapCoverage coverage = decodeNavigation(elementResolution,
lineResolution, ulImageElement, ulImageLine, nElementsPerLine,
nLines, buf);
// Decode the data block, creating a SatelliteRecord for each band.
PluginDataObject[] result = new PluginDataObject[nBands];
int bitIndex = 0;
RECORD:
for (int ri = 0; ri < nBands; ++ri) {
RECORD: for (int ri = 0; ri < nBands; ++ri) {
while ((bandBits & (1L << bitIndex)) == 0)
if (++bitIndex >= 64)
break RECORD; // shouldn't happen
@ -216,12 +238,12 @@ public class McidasSatelliteDecoder {
}
/**
* Reference: http://www.ssec.wisc.edu/mcidas/doc/prog_man/2006/formats-13a.html
* Reference:
* http://www.ssec.wisc.edu/mcidas/doc/prog_man/2006/formats-13a.html
*
*/
private SatMapCoverage decodeNavigation(int xImgRes, int yImgRes,
int ulX, int ulY,
int nx, int ny, ByteBuffer buf) throws Exception {
private SatMapCoverage decodeNavigation(int xImgRes, int yImgRes, int ulX,
int ulY, int nx, int ny, ByteBuffer buf) throws Exception {
SatMapCoverage result = new SatMapCoverage();
String navType = get4cc(buf);
if (navType.equals("MERC")) {
@ -239,8 +261,10 @@ public class McidasSatelliteDecoder {
boolean westPositive = buf.getInt() >= 0;
float la1, lo1, la2, lo2;
/* The following is based on
* gov.noaa.nws.ncep.edex.plugin.mcidas/src/gov/noaa/nws/ncep/edex/plugin/mcidas/decoder/McidasDecoder.java
/*
* The following is based on
* gov.noaa.nws.ncep.edex.plugin.mcidas/src
* /gov/noaa/nws/ncep/edex/plugin/mcidas/decoder/McidasDecoder.java
*/
double clon = flipLon(unpackDdmmss(nrmlLonDDMMSS), westPositive);
@ -269,9 +293,9 @@ public class McidasSatelliteDecoder {
lo2 = (float) prnlon(lo2);
}
result = SatSpatialFactory.getInstance().getMapCoverage(1, nx, ny,
(float) dx, (float) dy, (float) clon, (float) clat,
la1, lo1, la2, lo2);
result = SatSpatialFactory.getInstance().getMapCoverage(
SatMapCoverage.PROJ_MERCATOR, nx, ny, (float) dx,
(float) dy, (float) clon, (float) clat, la1, lo1, la2, lo2);
} else
unimplemented(String.format("navigation type \"%s\"", navType));
@ -282,8 +306,7 @@ public class McidasSatelliteDecoder {
double dlon = lon - (int) (lon / 360.f) * 360.f;
if (lon < -180.) {
dlon = lon + 360.f;
}
else if (lon > 180.) {
} else if (lon > 180.) {
dlon = (double) (lon - 360.);
}
return dlon;
@ -322,18 +345,22 @@ public class McidasSatelliteDecoder {
}
private String getCreatingEntity(int sensorSourceNumber) {
String value = McidasSatelliteLookups.getInstance().getCreatingEntity(sensorSourceNumber);
return value != null ? value : String.format("Unknown-%d", sensorSourceNumber);
String value = McidasSatelliteLookups.getInstance().getCreatingEntity(
sensorSourceNumber);
return value != null ? value : String.format("Unknown-%d",
sensorSourceNumber);
}
private PhysicalElementValue getPhysicalElement(int ssn, int bandIndex) {
PhysicalElementValue value = McidasSatelliteLookups.getInstance().getPhysicalElement(ssn, bandIndex);
return value != null ? value :
new PhysicalElementValue(String.format("Unknown-%d", bandIndex), null);
PhysicalElementValue value = McidasSatelliteLookups.getInstance()
.getPhysicalElement(ssn, bandIndex);
return value != null ? value : new PhysicalElementValue(String.format(
"Unknown-%d", bandIndex), null);
}
private String getAreaName(int areaNumber) {
String value = McidasSatelliteLookups.getInstance().getAreaName(areaNumber);
String value = McidasSatelliteLookups.getInstance().getAreaName(
areaNumber);
return value != null ? value : String.format("AREA%04d", areaNumber);
}
@ -342,7 +369,8 @@ public class McidasSatelliteDecoder {
}
protected void unimplemented(String feature) throws DecoderException {
throw new DecoderException(String.format("%s: unimplemented: %s", traceId, feature));
throw new DecoderException(String.format("%s: unimplemented: %s",
traceId, feature));
}
/**
@ -353,7 +381,8 @@ public class McidasSatelliteDecoder {
}
/**
* @param dao the dao to set
* @param dao
* the dao to set
*/
public void setDao(SatelliteDao dao) {
this.dao = dao;

View file

@ -29,38 +29,23 @@ import com.raytheon.uf.common.dataplugin.satellite.SatelliteRecord;
import ucar.nc2.Attribute;
import ucar.nc2.NetcdfFile;
/**
* Decoder implementation for alaska and regional satellite plugin.
* This decoder ingests netcdf3 files generated by the Alaska Region and GOES-R Proving Ground
* for their satellite data.
* Decoder implementation for alaska and regional satellite plugin. This decoder
* ingests netcdf3 files generated by the Alaska Region and GOES-R Proving
* Ground for their satellite data.
*
* The following are the relevant elements in the netcdf3 files being used by the decoder
* dimensions:
* y = 1024 ;
* x = 1280 ;
* The following are the relevant elements in the netcdf3 files being used by
* the decoder dimensions: y = 1024 ; x = 1280 ;
*
* variables:
* byte image(y, x) ;
* double validTime ;
* validTime:units = "seconds since 1970-1-1 00:00:00.00 0:00" ;
* validTime:long_name = "Valid Time" ;
* variables: byte image(y, x) ; double validTime ; validTime:units =
* "seconds since 1970-1-1 00:00:00.00 0:00" ; validTime:long_name =
* "Valid Time" ;
*
* global attributes:
* :channel = "0.58 - 0.68 micron VISL" ;
* :depictorName = "AkSec1a1" ;
* :satelliteName = "HRPT" ;
* :projName = "STEREOGRAPHIC" ;
* :centralLat = 90.f ;
* :centralLon = -156.f ;
* :lat00 = 62.057667f ;
* :lon00 = -168.81633f ;
* :latNxNy = 52.910168f ;
* :lonNxNy = -146.53101f ;
* :dxKm = 1.0164f ;
* :dyKm = 1.0164f ;
* :latDxDy = 58.f ;
* :lonDxDy = -156.f ;
* global attributes: :channel = "0.58 - 0.68 micron VISL" ; :depictorName =
* "AkSec1a1" ; :satelliteName = "HRPT" ; :projName = "STEREOGRAPHIC" ;
* :centralLat = 90.f ; :centralLon = -156.f ; :lat00 = 62.057667f ; :lon00 =
* -168.81633f ; :latNxNy = 52.910168f ; :lonNxNy = -146.53101f ; :dxKm =
* 1.0164f ; :dyKm = 1.0164f ; :latDxDy = 58.f ; :lonDxDy = -156.f ;
*
* <pre>
*
@ -69,7 +54,8 @@ import ucar.nc2.NetcdfFile;
* date Ticket# Engineer Description
* ----------- ---------- ----------- --------------------------
* 7/15/11 tk Initial Creation
*
* - AWIPS2 Baseline Repository --------
* 07/12/2012 798 jkorman Changed projection "magic" numbers
* </pre>
*
* @author tk
@ -87,18 +73,21 @@ public class RegionalSatDecoder extends AbstractDecoder {
private String filename;
/**
* The decoder method uses the NetcdfFile API to retrieve the attributes and satellite image data
* from the Alaska Region and GOES-R Proving Ground netcdf3 files. These netcdf3 files are generated
* for use in Alaska and the metadata and data are specified by the requirements for the Alaska Region.
* Once the netcdf3 file is decoded, the metadata is stored in the Satellite table in Postgres and
* the image data is stored in the HDF5 repository as Satellite records. The GIS map metadata is stored
* in the satellite_spatial table by creating a SatMapCoverage object.
* The decoder method uses the NetcdfFile API to retrieve the attributes and
* satellite image data from the Alaska Region and GOES-R Proving Ground
* netcdf3 files. These netcdf3 files are generated for use in Alaska and
* the metadata and data are specified by the requirements for the Alaska
* Region. Once the netcdf3 file is decoded, the metadata is stored in the
* Satellite table in Postgres and the image data is stored in the HDF5
* repository as Satellite records. The GIS map metadata is stored in the
* satellite_spatial table by creating a SatMapCoverage object.
*
* The following parameters are set in the spring configuraiton file alaskasat-ingest.xml and the
* dao and source members are set when the RegionalSatDecoder instance is initialized:
* The following parameters are set in the spring configuraiton file
* alaskasat-ingest.xml and the dao and source members are set when the
* RegionalSatDecoder instance is initialized:
*
* * @param dao the data access object for satellite records
* * @param source the source of the satellite images (Alaska Region)
* * @param dao the data access object for satellite records * @param source
* the source of the satellite images (Alaska Region)
*/
public PluginDataObject[] decode(byte[] data) throws Exception {
@ -115,13 +104,14 @@ public class RegionalSatDecoder extends AbstractDecoder {
record = new SatelliteRecord();
//String filename = "alaska_netcdf3"; // dummy filename; TODO: get filename from camel context?
// String filename = "alaska_netcdf3"; // dummy filename; TODO: get
// filename from camel context?
netCdfFile = NetcdfFile.openInMemory(filename, data);
// set the source; Alaska Region
if (source == null) {
source = "Source"; // use to look up source value; default of Source
source = "Source"; // use to look up source value; default of
// Source
}
record.setSource(getSource(source)); // lookup source value
@ -137,17 +127,17 @@ public class RegionalSatDecoder extends AbstractDecoder {
String parsed = getCreatingEntity(entity);
if (parsed != null && parsed.length() > 0) {
record.setCreatingEntity(parsed);
}
else {
} else {
record.setCreatingEntity(entity);
}
}
else {
} else {
record.setCreatingEntity("Unknown");
} // end of error block
// read the sector ID, may need to change to use satelliteSector attribute?
String sector = netCdfFile.findGlobalAttribute("depictorName").getStringValue().trim();
// read the sector ID, may need to change to use satelliteSector
// attribute?
String sector = netCdfFile.findGlobalAttribute("depictorName")
.getStringValue().trim();
record.setSectorID(sector);
// read and set the physical element
@ -169,8 +159,7 @@ public class RegionalSatDecoder extends AbstractDecoder {
// read and set the units (IRPixel, GenericPixel, ...)
// defined in physicalElements.xml lookup file
if(pev != null)
{
if (pev != null) {
String units = pev.units;
if (pev.units != null) {
record.setUnits(units);
@ -187,41 +176,45 @@ public class RegionalSatDecoder extends AbstractDecoder {
record.setSizeRecords(recordSize);
// read the valid time in seconds and store the time in milliseconds
long time = netCdfFile.findVariable("validTime").readScalarLong(); // time in seconds
calendar.setTimeInMillis(time * 1000); // need to convert seconds to milliseconds
long time = netCdfFile.findVariable("validTime").readScalarLong(); // time
// in
// seconds
calendar.setTimeInMillis(time * 1000); // need to convert seconds to
// milliseconds
/*
Date date = new Date(); // used for setting the test data time
long time = date.getTime();
calendar.setTimeInMillis(time); // need to convert seconds to millisconds
* Date date = new Date(); // used for setting the test data time
* long time = date.getTime(); calendar.setTimeInMillis(time); //
* need to convert seconds to millisconds
*/
record.setDataTime(new DataTime(calendar));
// set lov to central lon
float lov = netCdfFile.findGlobalAttribute("centralLon").getNumericValue().floatValue();
float lov = netCdfFile.findGlobalAttribute("centralLon")
.getNumericValue().floatValue();
int mapProjection = 5; // STEREOGRAPHIC projection default
int mapProjection = SatMapCoverage.PROJ_POLAR_STEREO; // STEREOGRAPHIC
// projection
// default
float latin = 0.0f; // set to zero for Stereographic projections
float rotation = 0.0f;
// read the projection
String projection = netCdfFile.findGlobalAttribute("projName").getStringValue().trim();
if(!projection.equalsIgnoreCase("STEREOGRAPHIC"))
{
String projection = netCdfFile.findGlobalAttribute("projName")
.getStringValue().trim();
if (!projection.equalsIgnoreCase("STEREOGRAPHIC")) {
// get latin for projection from data
latin = netCdfFile.findGlobalAttribute("centralLat").getNumericValue().floatValue();
if(projection.equalsIgnoreCase("LAMBERT") || projection.equalsIgnoreCase("LAMBERT_CONFORMAL"))
{
mapProjection = 3;
}
else if(projection.equalsIgnoreCase("MERCATOR"))
{
mapProjection = 1;
}
else if(projection.equalsIgnoreCase("CYLINDRICAL_EQUIDISTANT"))
{
mapProjection = 7;
latin = netCdfFile.findGlobalAttribute("centralLat")
.getNumericValue().floatValue();
if (projection.equalsIgnoreCase("LAMBERT")
|| projection.equalsIgnoreCase("LAMBERT_CONFORMAL")) {
mapProjection = SatMapCoverage.PROJ_LAMBERT;
} else if (projection.equalsIgnoreCase("MERCATOR")) {
mapProjection = SatMapCoverage.PROJ_MERCATOR;
} else if (projection
.equalsIgnoreCase("CYLINDRICAL_EQUIDISTANT")) {
mapProjection = SatMapCoverage.PROJ_CYLIN_EQUIDISTANT;
}
} else {
@ -238,7 +231,10 @@ public class RegionalSatDecoder extends AbstractDecoder {
int nx = 0, ny = 0;
// Do specialized decoding and retrieve spatial data for projections
if ((mapProjection == 3) || (mapProjection == 5) || (mapProjection == 1) || (mapProjection == 7)) {
if ((mapProjection == SatMapCoverage.PROJ_MERCATOR)
|| (mapProjection == SatMapCoverage.PROJ_LAMBERT)
|| (mapProjection == SatMapCoverage.PROJ_POLAR_STEREO)
|| (mapProjection == SatMapCoverage.PROJ_CYLIN_EQUIDISTANT)) {
// set number of points along x-axis
nx = recordSize;
@ -246,33 +242,42 @@ public class RegionalSatDecoder extends AbstractDecoder {
ny = numRecords;
// read the image as byte data and store as byte array
record.setMessageData((byte []) netCdfFile.readSection("image").get1DJavaArray(Class.forName("java.lang.Byte")));
record.setMessageData((byte[]) netCdfFile.readSection("image")
.get1DJavaArray(Class.forName("java.lang.Byte")));
// get the latitude of the first point, upper left corner
la1 = netCdfFile.findGlobalAttribute("lat00").getNumericValue().floatValue();
la1 = netCdfFile.findGlobalAttribute("lat00").getNumericValue()
.floatValue();
// get longitude of the first point, upper left corner
lo1 = (netCdfFile.findGlobalAttribute("lon00").getNumericValue().floatValue());
lo1 = (netCdfFile.findGlobalAttribute("lon00")
.getNumericValue().floatValue());
// get the pixel spacing
dx = netCdfFile.findGlobalAttribute("dxKm").getNumericValue().floatValue();
dx = netCdfFile.findGlobalAttribute("dxKm").getNumericValue()
.floatValue();
dx *= 1000f; // convert to meters from km
dy = netCdfFile.findGlobalAttribute("dyKm").getNumericValue().floatValue();
dy = netCdfFile.findGlobalAttribute("dyKm").getNumericValue()
.floatValue();
dy *= 1000f; // convert to meters from km
la2 = netCdfFile.findGlobalAttribute("latNxNy").getNumericValue().floatValue();
lo2 = netCdfFile.findGlobalAttribute("lonNxNy").getNumericValue().floatValue();
la2 = netCdfFile.findGlobalAttribute("latNxNy")
.getNumericValue().floatValue();
lo2 = netCdfFile.findGlobalAttribute("lonNxNy")
.getNumericValue().floatValue();
} else {
throw new DecoderException(
"Unable to decode Satellite: Encountered Unknown projection");
} // end of if map projection block
// Get latitude of upper right hand corner
float urLat = 0; // not used so set to zero, if required get and set value
float urLat = 0; // not used so set to zero, if required get and set
// value
record.setUpperRightLat(urLat);
// Get longitude of upper right hand corner
float urLon = 0; // not used so set to zero, if required get and set value
float urLon = 0; // not used so set to zero, if required get and set
// value
record.setUpperRightLon(urLon);
SatMapCoverage mapCoverage = null;
@ -283,8 +288,7 @@ public class RegionalSatDecoder extends AbstractDecoder {
latin, la1, lo1, la2, lo2);
} catch (Exception e) {
StringBuffer buf = new StringBuffer();
buf
.append(
buf.append(
"Error getting or constructing SatMapCoverage for values: ")
.append("\n\t");
buf.append("mapProjection=" + mapProjection).append("\n\t");
@ -320,20 +324,25 @@ public class RegionalSatDecoder extends AbstractDecoder {
return retData;
}
// uses lookup map instead of database to store creating entity parameter configuration
// uses lookup map instead of database to store creating entity parameter
// configuration
private String getCreatingEntity(String name) {
String value = RegionalSatLookups.getInstance().getCreatingEntity(name);
return value != null ? value : String.format("Unknown-%s", name);
}
// uses lookup map instead of database to store physical element parameter configuration
private PhysicalElementValue getPhysicalElement(String satName, String channel) {
PhysicalElementValue value = RegionalSatLookups.getInstance().getPhysicalElement(satName, channel);
return value != null ? value :
new PhysicalElementValue(String.format("Unknown-%s", channel), null);
// uses lookup map instead of database to store physical element parameter
// configuration
private PhysicalElementValue getPhysicalElement(String satName,
String channel) {
PhysicalElementValue value = RegionalSatLookups.getInstance()
.getPhysicalElement(satName, channel);
return value != null ? value : new PhysicalElementValue(String.format(
"Unknown-%s", channel), null);
}
// uses lookup map instead of database to store source parameter configuration
// uses lookup map instead of database to store source parameter
// configuration
private String getSource(String name) {
String value = RegionalSatLookups.getInstance().getSource(name);
return value != null ? value : String.format("Unknown-%s", name);

View file

@ -37,7 +37,8 @@ import com.vividsolutions.jts.io.WKTReader;
* date Ticket# Engineer Description
* ----------- ---------- ----------- --------------------------
* 7/15/11 tk Initial Creation
*
* - AWIPS2 Baseline Repository --------
* 07/12/2012 798 jkorman Changed projection "magic" numbers
* </pre>
*
* @author tk
@ -179,13 +180,13 @@ public class RegionalSatSpatialFactory {
ProjectedCRS crs = null;
// Get the correct CRS
if (mapProjection == 1) {
if (mapProjection == SatMapCoverage.PROJ_MERCATOR) {
crs = MapUtil.constructMercator(MapUtil.AWIPS_EARTH_RADIUS,
MapUtil.AWIPS_EARTH_RADIUS, latin, lov);
} else if (mapProjection == 3) {
} else if (mapProjection == SatMapCoverage.PROJ_LAMBERT) {
crs = MapUtil.constructLambertConformal(MapUtil.AWIPS_EARTH_RADIUS,
MapUtil.AWIPS_EARTH_RADIUS, latin, latin, lov);
} else if (mapProjection == 7) {
} else if (mapProjection == SatMapCoverage.PROJ_CYLIN_EQUIDISTANT) {
crs = MapUtil.constructEquidistantCylindrical(MapUtil.AWIPS_EARTH_RADIUS,
MapUtil.AWIPS_EARTH_RADIUS, lov, latin);
} else {