Merge branch 'development' into development_on_ss_builds
Former-commit-id:24799cd93d
[formerly fa3dac43f4e945514906fb7844f10fb3782bcead] Former-commit-id:a1071e3204
This commit is contained in:
commit
8f4ddad22c
22 changed files with 259 additions and 169 deletions
|
@ -489,17 +489,17 @@ public abstract class AbstractTileSet implements IRenderable, IMeshCallback {
|
|||
}
|
||||
|
||||
drawableImages.add(di);
|
||||
} else {
|
||||
rsc.issueRefresh();
|
||||
}
|
||||
|
||||
if (image == null || image.getStatus() != Status.LOADED
|
||||
|| tile.coverage.getMesh() == null) {
|
||||
if (image == null || image.getStatus() != Status.LOADED) {
|
||||
rsc.issueRefresh();
|
||||
needDrawLower = true;
|
||||
}
|
||||
|
||||
if (tile.coverage.getMesh() == null) {
|
||||
tile.coverage.setMesh(target.getExtension(
|
||||
IMapMeshExtension.class).constructMesh(
|
||||
tile.imageGeometry, mapDescriptor.getGridGeometry()));
|
||||
target.setNeedsRefresh(true);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -307,11 +307,12 @@ public class GridResource extends
|
|||
.getLocation().getGridGeometry();
|
||||
GridGeometry2D expectedGridGeometry = this.gridGeometry[0];
|
||||
if (!realGridGeometry.equals(expectedGridGeometry)) {
|
||||
GridReprojection reproj = new GridReprojection(realGridGeometry,
|
||||
expectedGridGeometry);
|
||||
com.raytheon.uf.common.geospatial.interpolation.Interpolation interp;
|
||||
GridReprojection reproj = new GridReprojection(
|
||||
realGridGeometry, expectedGridGeometry);
|
||||
com.raytheon.uf.common.geospatial.interpolation.Interpolation interp;
|
||||
|
||||
if (getCapability(ImagingCapability.class).isInterpolationState()) {
|
||||
if (getCapability(ImagingCapability.class)
|
||||
.isInterpolationState()) {
|
||||
BilinearInterpolation blInterp = new BilinearInterpolation();
|
||||
blInterp.setMissingThreshold(1.0f);
|
||||
interp = blInterp;
|
||||
|
@ -849,11 +850,11 @@ public class GridResource extends
|
|||
baseTileEntry.setValue(combinedResourceData);
|
||||
}
|
||||
}
|
||||
DataTime[] primaryDataTimes = descriptor.getTimeMatchingMap().get(
|
||||
this);
|
||||
for (int i = 0; i < primaryDataTimes.length; i++) {
|
||||
DataTime[] primaryDataTimes = tileSet.keySet().toArray(
|
||||
new DataTime[0]);
|
||||
for (DataTime primaryDataTime : primaryDataTimes) {
|
||||
Map<Float, GridMemoryBasedTileSet> map = tileSet
|
||||
.get(primaryDataTimes[i]);
|
||||
.get(primaryDataTime);
|
||||
if (map != null) {
|
||||
for (Map.Entry<Float, GridMemoryBasedTileSet> tile : map
|
||||
.entrySet()) {
|
||||
|
@ -861,7 +862,7 @@ public class GridResource extends
|
|||
if (combineResources) {
|
||||
Map<Float, GridMemoryBasedTileSet> map2 = new HashMap<Float, GridResource.GridMemoryBasedTileSet>();
|
||||
map2.put(tile.getKey(), tile.getValue());
|
||||
combinedSet.put(primaryDataTimes[i], map2);
|
||||
combinedSet.put(primaryDataTime, map2);
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
@ -873,7 +874,7 @@ public class GridResource extends
|
|||
.getValue());
|
||||
Map<Float, GridMemoryBasedTileSet> map2 = new HashMap<Float, GridResource.GridMemoryBasedTileSet>();
|
||||
map2.put(tile.getKey(), combinedResourceData);
|
||||
combinedSet.put(primaryDataTimes[i], map2);
|
||||
combinedSet.put(primaryDataTime, map2);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1520,7 +1521,8 @@ public class GridResource extends
|
|||
// TODO: check if interpolation state really changed
|
||||
try {
|
||||
if (descriptor != null)
|
||||
project(descriptor.getGridGeometry().getCoordinateReferenceSystem());
|
||||
project(descriptor.getGridGeometry()
|
||||
.getCoordinateReferenceSystem());
|
||||
} catch (VizException e) {
|
||||
statusHandler.handle(Priority.PROBLEM,
|
||||
"Error updating grid resource imaging", e);
|
||||
|
|
|
@ -359,57 +359,59 @@ public class LightningResource extends
|
|||
.get(this.lastPaintedTime);
|
||||
|
||||
if (cacheObject != null) {
|
||||
LightningFrame bundle = cacheObject.getObjectAsync();
|
||||
if (bundle == null) {
|
||||
needsUpdate = true;
|
||||
issueRefresh();
|
||||
} else {
|
||||
if (needsUpdate) {
|
||||
needsUpdate = false;
|
||||
currNegList = new ArrayList<double[]>(
|
||||
bundle.posLatLonList.size());
|
||||
currPosList = new ArrayList<double[]>(
|
||||
bundle.negLatLonList.size());
|
||||
synchronized (cacheObject.getMetadata()) {
|
||||
LightningFrame bundle = cacheObject.getObjectAsync();
|
||||
if (bundle == null) {
|
||||
needsUpdate = true;
|
||||
issueRefresh();
|
||||
} else {
|
||||
if (needsUpdate) {
|
||||
needsUpdate = false;
|
||||
currNegList = new ArrayList<double[]>(
|
||||
bundle.posLatLonList.size());
|
||||
currPosList = new ArrayList<double[]>(
|
||||
bundle.negLatLonList.size());
|
||||
|
||||
if (resourceData.isHandlingPositiveStrikes()) {
|
||||
for (double[] pos : bundle.posLatLonList) {
|
||||
currPosList.add(descriptor.worldToPixel(pos));
|
||||
}
|
||||
}
|
||||
if (resourceData.isHandlingNegativeStrikes()) {
|
||||
for (double[] neg : bundle.negLatLonList) {
|
||||
currNegList.add(descriptor.worldToPixel(neg));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (resourceData.isHandlingPositiveStrikes()) {
|
||||
for (double[] pos : bundle.posLatLonList) {
|
||||
currPosList.add(descriptor.worldToPixel(pos));
|
||||
List<double[]> positive = new ArrayList<double[]>(
|
||||
currPosList.size());
|
||||
for (double[] pos : currPosList) {
|
||||
if (extent.contains(pos)) {
|
||||
positive.add(pos);
|
||||
}
|
||||
}
|
||||
posCount = positive.size();
|
||||
|
||||
target.drawPoints(positive, color, PointStyle.CROSS,
|
||||
magnification);
|
||||
}
|
||||
|
||||
if (resourceData.isHandlingNegativeStrikes()) {
|
||||
for (double[] neg : bundle.negLatLonList) {
|
||||
currNegList.add(descriptor.worldToPixel(neg));
|
||||
List<double[]> negative = new ArrayList<double[]>(
|
||||
currPosList.size());
|
||||
for (double[] neg : currNegList) {
|
||||
if (extent.contains(neg)) {
|
||||
negative.add(neg);
|
||||
}
|
||||
}
|
||||
negCount = negative.size();
|
||||
|
||||
target.drawPoints(negative, color, PointStyle.DASH,
|
||||
magnification);
|
||||
}
|
||||
}
|
||||
|
||||
if (resourceData.isHandlingPositiveStrikes()) {
|
||||
List<double[]> positive = new ArrayList<double[]>(
|
||||
currPosList.size());
|
||||
for (double[] pos : currPosList) {
|
||||
if (extent.contains(pos)) {
|
||||
positive.add(pos);
|
||||
}
|
||||
}
|
||||
posCount = positive.size();
|
||||
|
||||
target.drawPoints(positive, color, PointStyle.CROSS,
|
||||
magnification);
|
||||
}
|
||||
|
||||
if (resourceData.isHandlingNegativeStrikes()) {
|
||||
List<double[]> negative = new ArrayList<double[]>(
|
||||
currPosList.size());
|
||||
for (double[] neg : currNegList) {
|
||||
if (extent.contains(neg)) {
|
||||
negative.add(neg);
|
||||
}
|
||||
}
|
||||
negCount = negative.size();
|
||||
|
||||
target.drawPoints(negative, color, PointStyle.DASH,
|
||||
magnification);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -85,6 +85,9 @@ import com.raytheon.uf.viz.derivparam.library.IDerivParamField;
|
|||
* ------------ ---------- ----------- --------------------------
|
||||
* Jul 27, 2009 jsanchez Initial creation
|
||||
* Nov 21, 2009 #3576 rjpeter Refactored use of DerivParamDesc.
|
||||
* - AWIPS2 Baseline Repository --------
|
||||
* 08/03/2012 798 jkorman Explicitly set interpolationLevels
|
||||
* from "source" record.
|
||||
* </pre>
|
||||
*
|
||||
* @author jsanchez
|
||||
|
@ -166,6 +169,9 @@ public class SatelliteDataCubeAdapter implements IDataCubeAdapter {
|
|||
listOfRequests.add(request);
|
||||
SatelliteRecord derivedRecord = new SatelliteRecord(
|
||||
record.getDataURI());
|
||||
// Make sure to get the number of interpolation levels!
|
||||
derivedRecord.setInterpolationLevels(record.getInterpolationLevels());
|
||||
|
||||
derivedRecord.setPhysicalElement(originalQuery.get(PE)
|
||||
.getConstraintValue());
|
||||
derivedRecord.setMessageData(request);
|
||||
|
|
|
@ -112,34 +112,10 @@
|
|||
<else>
|
||||
<var name="plugin.utility"
|
||||
value="${plugin.base}/utility" />
|
||||
|
||||
<if>
|
||||
<available file="${plugin.bin}" type="dir" />
|
||||
<then>
|
||||
<jar destfile="${jar.destfile}"
|
||||
manifest="${plugin.base}/META-INF/MANIFEST.MF">
|
||||
<fileset dir="${plugin.bin}"
|
||||
includes="**/*.class" />
|
||||
<fileset dir="${plugin.base}"
|
||||
includes="res/**/*" />
|
||||
<fileset dir="${plugin.base}"
|
||||
includes="META-INF/**/*" />
|
||||
<fileset dir="${plugin.base}"
|
||||
includes="*.py" />
|
||||
</jar>
|
||||
</then>
|
||||
<else>
|
||||
<jar destfile="${jar.destfile}"
|
||||
manifest="${plugin.base}/META-INF/MANIFEST.MF">
|
||||
<fileset dir="${plugin.base}"
|
||||
includes="res/**/*" />
|
||||
<fileset dir="${plugin.base}"
|
||||
includes="META-INF/**/*" />
|
||||
<fileset dir="${plugin.base}"
|
||||
includes="*.py" />
|
||||
</jar>
|
||||
</else>
|
||||
</if>
|
||||
|
||||
<jarPlugin
|
||||
plugin.directory323="${plugin.base}"
|
||||
jar.file323="${jar.destfile}" />
|
||||
|
||||
<!-- copy the utility (localization) files. -->
|
||||
<if>
|
||||
|
@ -157,4 +133,80 @@
|
|||
</if>
|
||||
</sequential>
|
||||
</macrodef>
|
||||
|
||||
<macrodef name="jarPlugin">
|
||||
<!--
|
||||
random character suffixes added to
|
||||
maintain uniqueness of variable names
|
||||
-->
|
||||
<attribute name="plugin.directory323" />
|
||||
<attribute name="jar.file323" />
|
||||
|
||||
<sequential>
|
||||
<!--
|
||||
initial jar file: currently we make the assumption
|
||||
every plugin has a manifest file.
|
||||
-->
|
||||
<jar destfile="@{jar.file323}"
|
||||
manifest="@{plugin.directory323}/META-INF/MANIFEST.MF" />
|
||||
|
||||
<!-- scan build.properties -->
|
||||
<var file="@{plugin.directory323}/build.properties" />
|
||||
|
||||
<!--
|
||||
* iterate through bin.includes
|
||||
* '.' is seen as the ${output..} directory, itself
|
||||
* artifacts are only deployed if they are present
|
||||
-->
|
||||
<for list="${bin.includes}" param="include"
|
||||
delimiter="," trim="true">
|
||||
<sequential>
|
||||
<if>
|
||||
<available file="@{plugin.directory323}/@{include}"
|
||||
type="dir" />
|
||||
<then>
|
||||
<!-- deploy a directory -->
|
||||
|
||||
<!-- is this the output directory? -->
|
||||
<if>
|
||||
<equals arg1="@{include}" arg2="." />
|
||||
<then>
|
||||
<jar destfile="@{jar.file323}" update="true">
|
||||
<fileset
|
||||
dir="@{plugin.directory323}/${output..}"
|
||||
includes="**" />
|
||||
</jar>
|
||||
</then>
|
||||
<else>
|
||||
<jar destfile="@{jar.file323}" update="true">
|
||||
<fileset
|
||||
dir="@{plugin.directory323}"
|
||||
includes="@{include}/**" />
|
||||
</jar>
|
||||
</else>
|
||||
</if>
|
||||
</then>
|
||||
<else>
|
||||
<!-- are we deploying a file? -->
|
||||
<if>
|
||||
<available file="@{plugin.directory323}/@{include}"
|
||||
type="file" />
|
||||
<then>
|
||||
<jar destfile="@{jar.file323}" update="true">
|
||||
<fileset
|
||||
file="@{plugin.directory323}/@{include}" />
|
||||
</jar>
|
||||
</then>
|
||||
<else>
|
||||
<!-- fail: unknown deployment artifact -->
|
||||
<fail
|
||||
message="Unable to deploy '@{include}' specified in @{plugin.directory323}/build.properties; unable to find the file / directory." />
|
||||
</else>
|
||||
</if>
|
||||
</else>
|
||||
</if>
|
||||
</sequential>
|
||||
</for>
|
||||
</sequential>
|
||||
</macrodef>
|
||||
</project>
|
|
@ -148,33 +148,48 @@ public class SatelliteDao extends PluginDao {
|
|||
SatelliteRecord.SAT_FILL_VALUE, 0.0f);
|
||||
|
||||
SatMapCoverage coverage = satRecord.getCoverage();
|
||||
GridDownscaler downScaler = createDownscaler(coverage,
|
||||
storageRecord, fillValue);
|
||||
AbstractDataWrapper dataSource = getSource(storageRecord,
|
||||
coverage.getNx(), coverage.getNy());
|
||||
dataSource.setFillValue(fillValue);
|
||||
GridDownscaler downScaler = new GridDownscaler(
|
||||
MapUtil.getGridGeometry(coverage));
|
||||
|
||||
// How many interpolation levels do we need for this data?
|
||||
int levels = downScaler.getNumberOfDownscaleLevels();
|
||||
// set the number of levels in the 'parent' satellite data.
|
||||
// Subtract one for the base level data.
|
||||
satRecord.setInterpolationLevels(levels - 1);
|
||||
|
||||
// How many interpolation levels do we need for this data? Includes
|
||||
// the base level!
|
||||
// Subtract one for the base level data.
|
||||
int downScaleLevels = downScaler.getNumberOfDownscaleLevels() - 1;
|
||||
// set the number of downscale levels in the satellite metadata.
|
||||
satRecord.setInterpolationLevels(downScaleLevels);
|
||||
if (DataStoreFactory.isInterpolated(levels)) {
|
||||
for (int downscaleLevel = 1; downscaleLevel <= levels; downscaleLevel++) {
|
||||
for (int level = 0; level < downScaleLevels; level++) {
|
||||
int downScaleLevel = level + 1;
|
||||
Rectangle size = downScaler
|
||||
.getDownscaleSize(downscaleLevel);
|
||||
.getDownscaleSize(downScaleLevel);
|
||||
|
||||
AbstractDataWrapper dest = getDestination(storageRecord,
|
||||
size);
|
||||
dest.setFillValue(fillValue);
|
||||
try {
|
||||
downScaler.downscale(downscaleLevel, dest);
|
||||
// Downscale from previous level
|
||||
downScaler.downscale(downScaleLevel - 1,
|
||||
downScaleLevel, dataSource, dest);
|
||||
|
||||
IDataRecord dr = createDataRecord(satRecord, dest,
|
||||
downscaleLevel, size);
|
||||
downScaleLevel, size);
|
||||
// Set the attributes and properties from the parent
|
||||
// data.
|
||||
dr.setDataAttributes(attributes);
|
||||
dr.setProperties(props);
|
||||
dataStore.addDataRecord(dr);
|
||||
|
||||
// Set source to current level
|
||||
dataSource = dest;
|
||||
} catch (TransformException e) {
|
||||
throw new StorageException(
|
||||
"Error creating downscaled data",
|
||||
|
@ -470,31 +485,6 @@ public class SatelliteDao extends PluginDao {
|
|||
return rec;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a down scaler for the given data.
|
||||
*
|
||||
* @param coverage
|
||||
* Satellite Map Coverage for the source data.
|
||||
* @param rec
|
||||
* The original data that will be down-scaled.
|
||||
* @param fillValue
|
||||
* The declared fill value for the data.
|
||||
* @return
|
||||
*/
|
||||
private GridDownscaler createDownscaler(SatMapCoverage coverage,
|
||||
IDataRecord rec, double fillValue) {
|
||||
GridDownscaler downScaler = null;
|
||||
|
||||
AbstractDataWrapper dataSource = getSource(rec, coverage.getNx(),
|
||||
coverage.getNy());
|
||||
dataSource.setFillValue(fillValue);
|
||||
|
||||
downScaler = new GridDownscaler(MapUtil.getGridGeometry(coverage),
|
||||
dataSource);
|
||||
|
||||
return downScaler;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the value of an named attribute.
|
||||
*
|
||||
|
|
|
@ -19,5 +19,16 @@
|
|||
further_licensing_information.
|
||||
-->
|
||||
<requestPatterns xmlns:ns2="group">
|
||||
<regex>TI.... ....</regex>
|
||||
<!--
|
||||
This pattern excludes the current "prototype" WMO headers for future GOES-R data.
|
||||
Point of contact
|
||||
Brian M. Rapp
|
||||
Communications Software Team Lead
|
||||
Raytheon AWIPS Team
|
||||
|
||||
Brian.Rapp@noaa.gov
|
||||
Office: (301) 495-2221
|
||||
Cell: (301) 787-9925
|
||||
-->
|
||||
<regex>TI[^RS]... ....</regex>
|
||||
</requestPatterns>
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
source.. = src/
|
||||
output.. = bin/
|
||||
bin.includes = META-INF/,\
|
||||
.,\
|
||||
plugin.xml
|
||||
.
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
source.. = src/
|
||||
output.. = bin/
|
||||
bin.includes = META-INF/,\
|
||||
.,\
|
||||
res
|
||||
.
|
||||
|
|
|
@ -55,6 +55,8 @@ import com.raytheon.uf.common.serialization.annotations.DynamicSerializeElement;
|
|||
* getPersistenceTime() from new IPersistable
|
||||
* 20071129 472 jkorman Added IDecoderGettable interface.
|
||||
* 20081106 1515 jkorman Changed units length from 16 to 26
|
||||
* - AWIPS2 Baseline Repository --------
|
||||
* 07/30/2012 798 jkorman Support for common satellite data.
|
||||
* </pre>
|
||||
*
|
||||
* @author bphillip
|
||||
|
@ -70,13 +72,26 @@ public class SatelliteRecord extends ServerSpecificPersistablePluginDataObject
|
|||
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
||||
/**
|
||||
* The default dataset name to use for persisted satellite data.
|
||||
*/
|
||||
public static final String SAT_DATASET_NAME = DataStoreFactory.DEF_DATASET_NAME;
|
||||
|
||||
public static final String SAT_FILL_VALUE = "FILL_VALUE";
|
||||
/**
|
||||
* The attribute name for a value that will be used to "fill" undefined
|
||||
* data.
|
||||
*/
|
||||
public static final String SAT_FILL_VALUE = "_FillValue";
|
||||
|
||||
public static final String SAT_ADD_OFFSET = "ADD_OFFSET";
|
||||
/**
|
||||
* The attribute name for the data additive offset value.
|
||||
*/
|
||||
public static final String SAT_ADD_OFFSET = "add_offset";
|
||||
|
||||
public static final String SAT_SCALE_FACTOR = "SCALE_FACTOR";
|
||||
/**
|
||||
* The attribute name for the data scale factor value..
|
||||
*/
|
||||
public static final String SAT_SCALE_FACTOR = "scale_factor";
|
||||
|
||||
/**
|
||||
* The source of the data - NESDIS
|
||||
|
|
|
@ -103,39 +103,33 @@ public class GridDownscaler {
|
|||
return downscaleSizes.toArray(new Rectangle[downscaleSizes.size()]);
|
||||
}
|
||||
|
||||
private DataSource dataSource;
|
||||
|
||||
private GeneralGridGeometry sourceGeometry;
|
||||
private Envelope sourceEnvelope;
|
||||
|
||||
private Rectangle[] downscaleGeometries;
|
||||
|
||||
private Interpolation interpolation;
|
||||
|
||||
/**
|
||||
* Constructs a GridDownscaler for the given source geometry and data source
|
||||
* using the default interpolation method
|
||||
* Constructs a GridDownscaler for the given source geometry using the
|
||||
* default interpolation method
|
||||
*
|
||||
* @param sourceGeometry
|
||||
* @param dataSource
|
||||
*/
|
||||
public GridDownscaler(GeneralGridGeometry sourceGeometry,
|
||||
DataSource dataSource) {
|
||||
this(sourceGeometry, dataSource, new NearestNeighborInterpolation());
|
||||
public GridDownscaler(GeneralGridGeometry sourceGeometry) {
|
||||
this(sourceGeometry, new NearestNeighborInterpolation());
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructs a GridDownscaler for the given source
|
||||
* {@link GeneralGridGeometry} and {@link DataSource} using the specified
|
||||
* {@link Interpolation}
|
||||
* {@link GeneralGridGeometry} using the specified {@link Interpolation}
|
||||
*
|
||||
* @param sourceGeometry
|
||||
* @param dataSource
|
||||
* @param interpolation
|
||||
*/
|
||||
public GridDownscaler(GeneralGridGeometry sourceGeometry,
|
||||
DataSource dataSource, Interpolation interpolation) {
|
||||
this.sourceGeometry = sourceGeometry;
|
||||
this.dataSource = dataSource;
|
||||
Interpolation interpolation) {
|
||||
this.sourceEnvelope = sourceGeometry.getEnvelope();
|
||||
this.downscaleGeometries = getDownscaleSizes(sourceGeometry);
|
||||
this.interpolation = interpolation;
|
||||
}
|
||||
|
@ -176,16 +170,18 @@ public class GridDownscaler {
|
|||
* @param destination
|
||||
* @throws TransformException
|
||||
*/
|
||||
public void downscale(int downscaleLevel, DataDestination destination)
|
||||
throws TransformException {
|
||||
Rectangle destSize = getDownscaleSize(downscaleLevel);
|
||||
public void downscale(int fromLevel, int toLevel, DataSource source,
|
||||
DataDestination destination) throws TransformException {
|
||||
Rectangle sourceSize = getDownscaleSize(fromLevel);
|
||||
GeneralGridGeometry sourceGeometry = new GeneralGridGeometry(
|
||||
new GridEnvelope2D(sourceSize), sourceEnvelope);
|
||||
Rectangle destSize = getDownscaleSize(toLevel);
|
||||
GeneralGridGeometry destGeometry = new GeneralGridGeometry(
|
||||
new GridEnvelope2D(destSize), sourceGeometry.getEnvelope());
|
||||
new GridEnvelope2D(destSize), sourceEnvelope);
|
||||
GridReprojection reprojection = new GridReprojection(sourceGeometry,
|
||||
destGeometry);
|
||||
try {
|
||||
reprojection
|
||||
.reprojectedGrid(interpolation, dataSource, destination);
|
||||
reprojection.reprojectedGrid(interpolation, source, destination);
|
||||
} catch (FactoryException e) {
|
||||
throw new TransformException(
|
||||
"Error creating transforms required for downscaling", e);
|
||||
|
|
|
@ -86,7 +86,7 @@ public abstract class AbstractDataWrapper implements DataSource,
|
|||
public double getDataValue(int x, int y) {
|
||||
if (y < 0 || y > ny - 1) {
|
||||
// outside y range
|
||||
return Float.NaN;
|
||||
return Double.NaN;
|
||||
} else if (x < 0 || x > nx - 1) {
|
||||
// outside x range
|
||||
if (wrapX > 0) {
|
||||
|
|
|
@ -1,11 +0,0 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<projectDescription>
|
||||
<name>nativeLib</name>
|
||||
<comment></comment>
|
||||
<projects>
|
||||
</projects>
|
||||
<buildSpec>
|
||||
</buildSpec>
|
||||
<natures>
|
||||
</natures>
|
||||
</projectDescription>
|
|
@ -1,12 +1,27 @@
|
|||
ARCHFLAGS =
|
||||
# This script will currently only influence how various cdt
|
||||
# project dependencies are built rather than the cdt builds
|
||||
# themselves.
|
||||
|
||||
# leave blank to let the OS determine the architecture or
|
||||
# set to one of: {-m32, -m64} to specify the architecture.
|
||||
# setting the flag to anything other than -m32 or -m64 will
|
||||
# cause unknown, potentially chaotic, build results.
|
||||
ARCHFLAGS = -m32
|
||||
|
||||
# ARCHFLAGS is set into the build environment so that scripts
|
||||
# that utilize this script can execute their own architecture
|
||||
# specific steps.
|
||||
|
||||
ifndef ARCHFLAGS
|
||||
ARCH ?= $(shell uname -i)
|
||||
ifeq ($(ARCH),x86_64)
|
||||
export CFLAGS := $(CFLAGS) -m64
|
||||
export ARCHFLAGS := -m64
|
||||
else
|
||||
export CFLAGS := $(CFLAGS) -m32
|
||||
export ARCHFLAGS := -m32
|
||||
endif
|
||||
else
|
||||
export CFLAGS := $(CFLAGS) $(ARCHFLAGS)
|
||||
export ARCHFLAGS := $(ARCHFLAGS)
|
||||
endif
|
|
@ -45,7 +45,7 @@
|
|||
</option>
|
||||
<option id="gnu.c.compiler.option.misc.other.426315830" name="Other flags" superClass="gnu.c.compiler.option.misc.other" value="-c -fmessage-length=0 -fPIC -m32" valueType="string"/>
|
||||
<option id="gnu.c.compiler.option.include.files.1528986146" name="Include files (-include)" superClass="gnu.c.compiler.option.include.files"/>
|
||||
<option id="gnu.c.compiler.option.preprocessor.def.symbols.2138840959" name="Defined symbols (-D)" superClass="gnu.c.compiler.option.preprocessor.def.symbols" valueType="definedSymbols"/>
|
||||
<option id="gnu.c.compiler.option.preprocessor.def.symbols.2138840959" name="Defined symbols (-D)" superClass="gnu.c.compiler.option.preprocessor.def.symbols"/>
|
||||
<inputType id="cdt.managedbuild.tool.gnu.c.compiler.input.431036435" superClass="cdt.managedbuild.tool.gnu.c.compiler.input"/>
|
||||
</tool>
|
||||
<tool command="gcc" commandLinePattern="${COMMAND} ${FLAGS} ${OUTPUT_FLAG}${OUTPUT_PREFIX}${OUTPUT} ${INPUTS}" id="cdt.managedbuild.tool.gnu.c.linker.so.debug.1356804857" name="GCC C Linker" superClass="cdt.managedbuild.tool.gnu.c.linker.so.debug">
|
||||
|
|
|
@ -22,6 +22,9 @@ INC=-I../../include \
|
|||
#CFLAGS=-q64 -O3 -qarch=auto -qcpluscmt $(INC) $(DEFS)
|
||||
CFLAGS = $(INC) $(DEFS) -O3 -fPIC
|
||||
-include ../../../../build.native/makefile.arch
|
||||
ifeq ($(ARCHFLAGS),-m64)
|
||||
export CFLAGS := $(CFLAGS) -D__64BIT__
|
||||
endif
|
||||
|
||||
CC=gcc
|
||||
LIB=libgrib2c.a
|
||||
|
|
|
@ -38,6 +38,7 @@
|
|||
|
||||
#include <Python.h>
|
||||
#include <stdio.h>
|
||||
#include <stdlib.h>
|
||||
#include <string.h>
|
||||
#include "numpy/arrayobject.h"
|
||||
#include "grib2.h"
|
||||
|
@ -45,7 +46,7 @@
|
|||
static PyObject *Grib2FileError;
|
||||
|
||||
int getRecord(FILE * fptr, gribfield ** gfld, int recordNumber,
|
||||
int fieldNumber, int unpack) {
|
||||
g2int fieldNumber, g2int unpack) {
|
||||
|
||||
unsigned char *cgrib;
|
||||
g2int listsec0[3], listsec1[13];
|
||||
|
@ -72,6 +73,10 @@ int getRecord(FILE * fptr, gribfield ** gfld, int recordNumber,
|
|||
|
||||
// Pull out the data
|
||||
cgrib = (unsigned char *) malloc(lgrib);
|
||||
if (cgrib == NULL) {
|
||||
printf("getRecord: failed to malloc cgrib\n");
|
||||
return -1;
|
||||
}
|
||||
ret = fseek(fptr, lskip, SEEK_SET);
|
||||
lengrib = fread(cgrib, sizeof(unsigned char), lgrib, fptr);
|
||||
iseek = lskip + lgrib;
|
||||
|
@ -121,7 +126,7 @@ static PyObject * grib2_getData(PyObject *self, PyObject* args)
|
|||
PyObject * fileInfo;
|
||||
FILE * fptr;
|
||||
int recordNumber;
|
||||
int fieldNumber;
|
||||
g2int fieldNumber;
|
||||
Py_ssize_t sizeSection = 0;
|
||||
int sectionCounter = 0;
|
||||
|
||||
|
|
|
@ -414,7 +414,7 @@
|
|||
</command>
|
||||
|
||||
</menu>
|
||||
<menu
|
||||
<!--menu
|
||||
id="NCHelp"
|
||||
label="Help"
|
||||
mnemonic="H">
|
||||
|
@ -442,7 +442,7 @@
|
|||
name="aboutAdditions"
|
||||
visible="false">
|
||||
</separator>
|
||||
</menu>
|
||||
</menu-->
|
||||
</menuContribution>
|
||||
<menuContribution
|
||||
locationURI="menu:NCHelp?after=additions">
|
||||
|
@ -917,10 +917,11 @@
|
|||
pattern="com\.raytheon\.viz\.ui\.tools\.nav/.*"> <!-- Raytheons Pan and Zoom -->
|
||||
</activityPatternBinding>
|
||||
|
||||
<activityPatternBinding
|
||||
<!-- Raytheons CAVE Menu -->
|
||||
<!--activityPatternBinding
|
||||
activityId="gov.noaa.nws.ncep.NC.activity"
|
||||
pattern="com\.raytheon\.viz\.ui\.personalities\.awips/.*"> <!-- Raytheons CAVE Menu -->
|
||||
</activityPatternBinding>
|
||||
pattern="com\.raytheon\.viz\.ui\.personalities\.awips/.*">
|
||||
</activityPatternBinding-->
|
||||
|
||||
<!-- this gets rid of all Raytheon menus and toolbars!!!! activityPatternBinding
|
||||
activityId="gov.noaa.nws.ncep.NC.activity"
|
||||
|
|
|
@ -6,6 +6,10 @@
|
|||
# This script is started by the awips2.cave rpm build.sh script; so, it is able to get the workspace
|
||||
# directory from the environment as well as the build architecture.
|
||||
|
||||
if [ "${UFRAME_ECLIPSE}" = "" ]; then
|
||||
export UFRAME_ECLIPSE="/opt/uframe-eclipse"
|
||||
fi
|
||||
|
||||
CAVE_RPM_DIST_DIR="${WORKSPACE}/rpms/awips2.cave/setup/dist"
|
||||
if [ ! -d ${CAVE_RPM_DIST_DIR} ]; then
|
||||
echo "ERROR: ${CAVE_RPM_DIST_DIR} does not exist."
|
||||
|
@ -33,7 +37,7 @@ fi
|
|||
|
||||
# Execute the CAVE PDE Build.
|
||||
# The Sun JDK Build.
|
||||
time /bin/bash build.sh -eclipse=/opt/uframe-eclipse
|
||||
time /bin/bash build.sh -eclipse=${UFRAME_ECLIPSE}
|
||||
RC=$?
|
||||
|
||||
if [ ${RC} -ne 0 ]; then
|
||||
|
@ -67,6 +71,7 @@ fi
|
|||
# Execute the P2 Repo PDE Build.
|
||||
# The Sun JDK Build.
|
||||
time /awips2/ant/bin/ant -f p2-build.xml \
|
||||
-Declipse.dir=${UFRAME_ECLIPSE} \
|
||||
-Dbuild.version=${AWIPSII_VERSION} \
|
||||
-Dbuild.arch=${CAVE_BUILD_ARCH}
|
||||
RC=$?
|
||||
|
|
|
@ -7,7 +7,7 @@
|
|||
Name: awips2-python
|
||||
Summary: AWIPS II Python Distribution
|
||||
Version: 2.7.1
|
||||
Release: 5
|
||||
Release: 6
|
||||
Group: AWIPSII
|
||||
BuildRoot: %{_build_root}
|
||||
BuildArch: %{_build_arch}
|
||||
|
|
Binary file not shown.
Binary file not shown.
Loading…
Add table
Reference in a new issue