Merge branch 'omaha_13.3.1' into development
Conflicts: cave/com.raytheon.viz.ui/src/com/raytheon/viz/ui/perspectives/AbstractVizPerspectiveManager.java edexOsgi/com.raytheon.edex.plugin.grib/src/com/raytheon/edex/plugin/grib/decoderpostprocessors/EnsembleGridAssembler.java edexOsgi/com.raytheon.uf.edex.common.core.feature/feature.xml edexOsgi/com.raytheon.uf.edex.database/src/com/raytheon/uf/edex/database/plugin/PluginDao.java edexOsgi/com.raytheon.uf.edex.datadelivery.feature/feature.xml edexOsgi/com.raytheon.uf.edex.registry.feature/feature.xml Former-commit-id: d05c9f3a6a4b080b0da71c856f1ba6cd0d6d264e
This commit is contained in:
commit
9a321b6ac8
53 changed files with 1278 additions and 344 deletions
|
@ -440,13 +440,13 @@ class SmartScript(BaseTool.BaseTool):
|
|||
if timeRangeList is not None:
|
||||
retVal = {}
|
||||
for i in xrange(len(timeRangeList)):
|
||||
iresult = self._getGridsResult(timeRangeList[i], noDataError, mode, result[i])
|
||||
iresult = self._getGridsResult(timeRangeList[i], noDataError, mode, exprName, result[i])
|
||||
retVal[timeRangeList[i]] = iresult
|
||||
return retVal
|
||||
else:
|
||||
return self._getGridsResult(timeRange, noDataError, mode, result)
|
||||
return self._getGridsResult(timeRange, noDataError, mode, exprName, result)
|
||||
|
||||
def _getGridsResult(self, timeRange, noDataError, mode, result):
|
||||
def _getGridsResult(self, timeRange, noDataError, mode, exprName, result):
|
||||
retVal = None
|
||||
if result is not None:
|
||||
if len(result) == 0:
|
||||
|
|
|
@ -25,11 +25,11 @@ import java.util.concurrent.Executors;
|
|||
import com.raytheon.uf.common.datadelivery.bandwidth.data.BandwidthGraphData;
|
||||
import com.raytheon.uf.common.datadelivery.bandwidth.request.GraphDataRequest;
|
||||
import com.raytheon.uf.common.datadelivery.bandwidth.response.GraphDataResponse;
|
||||
import com.raytheon.uf.common.datadelivery.request.DataDeliveryConstants;
|
||||
import com.raytheon.uf.common.serialization.comm.RequestRouter;
|
||||
import com.raytheon.uf.common.status.IUFStatusHandler;
|
||||
import com.raytheon.uf.common.status.UFStatus;
|
||||
import com.raytheon.uf.common.status.UFStatus.Priority;
|
||||
import com.raytheon.uf.viz.core.exception.VizException;
|
||||
import com.raytheon.uf.viz.core.requests.ThriftClient;
|
||||
|
||||
/**
|
||||
*
|
||||
|
@ -45,6 +45,7 @@ import com.raytheon.uf.viz.core.requests.ThriftClient;
|
|||
* ------------ ---------- ----------- --------------------------
|
||||
* Dec 12, 2012 1269 lvenable Initial creation
|
||||
* Feb 14, 2013 1596 djohnson Remove sysouts, correct statusHandler class, handle null response.
|
||||
* Mar 26, 2013 1827 djohnson Graph data should be requested from data delivery.
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -146,8 +147,9 @@ public class GraphDataUtil implements Runnable {
|
|||
*/
|
||||
private GraphDataResponse sendRequest(GraphDataRequest req) {
|
||||
try {
|
||||
return (GraphDataResponse) ThriftClient.sendRequest(req);
|
||||
} catch (VizException e) {
|
||||
return (GraphDataResponse) RequestRouter.route(req,
|
||||
DataDeliveryConstants.DATA_DELIVERY_SERVER);
|
||||
} catch (Exception e) {
|
||||
statusHandler.handle(Priority.ERROR, "Error Requesting Data", e);
|
||||
}
|
||||
|
||||
|
|
|
@ -392,6 +392,7 @@ public class MPEDisplayManager {
|
|||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
private final Set<IEditTimeChangedListener> timeChangedListeners = new LinkedHashSet<IEditTimeChangedListener>();
|
||||
|
||||
private final Set<IDisplayFieldChangedListener> fieldChangedListeners = new LinkedHashSet<IDisplayFieldChangedListener>();
|
||||
|
@ -706,6 +707,7 @@ public class MPEDisplayManager {
|
|||
// Remove old resource
|
||||
list.removeRsc(displayedFieldResource);
|
||||
}
|
||||
|
||||
fieldResourceData.setFieldData(fieldToDisplay);
|
||||
fieldResourceData.setArealDisplay(arealDisplay);
|
||||
fieldResourceData.setAccumulationInterval(accumulationHrs);
|
||||
|
@ -723,6 +725,7 @@ public class MPEDisplayManager {
|
|||
listener.displayFieldChanged(oldField, fieldToDisplay);
|
||||
}
|
||||
}
|
||||
|
||||
// reset gages
|
||||
List<MPEGageResource> rscs = display.getDescriptor()
|
||||
.getResourceList()
|
||||
|
|
|
@ -109,8 +109,6 @@ public class MPEGageResource extends AbstractMPEInputResource implements
|
|||
|
||||
private static final double POINT_RADIUS = 2;
|
||||
|
||||
private static final RGB WHITE = new RGB(255, 255, 255);
|
||||
|
||||
private final SimpleDateFormat sdf;
|
||||
|
||||
private final Object mutex = new Object();
|
||||
|
@ -363,10 +361,7 @@ public class MPEGageResource extends AbstractMPEInputResource implements
|
|||
for (Coordinate point : dataMap.keySet()) {
|
||||
if (extent.contains(new double[] { point.x, point.y })) {
|
||||
MPEGageData gageData = dataMap.get(point);
|
||||
RGB gageColor = WHITE;
|
||||
if (displayIsEdit) {
|
||||
gageColor = getGageColor(gageData);
|
||||
}
|
||||
RGB gageColor = getGageColor(gageData);
|
||||
|
||||
boolean isReportedMissing = gageData.isReported_missing();
|
||||
boolean isMissing = ((gageData.getGval() == -999.f || gageData
|
||||
|
|
|
@ -297,7 +297,6 @@ public class RegenHrFlds {
|
|||
e.printStackTrace();
|
||||
}
|
||||
|
||||
/* Clear gage edits */
|
||||
MPEDataManager.getInstance().clearEditGages();
|
||||
shell.setCursor(null);
|
||||
}
|
||||
|
|
|
@ -72,6 +72,7 @@ import com.raytheon.viz.ui.tools.ModalToolManager;
|
|||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* Jul 22, 2008 randerso Initial creation
|
||||
* Mar 26, 2013 1799 bsteffen Fix pan/zoom when in views.
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -143,6 +144,9 @@ public abstract class AbstractVizPerspectiveManager implements
|
|||
if (newPart instanceof IEditorPart) {
|
||||
AbstractVizPerspectiveManager mgr = VizPerspectiveListener
|
||||
.getCurrentPerspectiveManager();
|
||||
IWorkbenchPart newPart = part.getSite().getPage()
|
||||
.getActivePart();
|
||||
if (newPart instanceof IEditorPart) {
|
||||
if (mgr != null) {
|
||||
for (AbstractModalTool tool : mgr.getToolManager()
|
||||
.getSelectedModalTools()) {
|
||||
|
@ -154,6 +158,7 @@ public abstract class AbstractVizPerspectiveManager implements
|
|||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void partOpened(IWorkbenchPart part) {
|
||||
|
|
|
@ -11,6 +11,7 @@ import java.util.Map;
|
|||
import java.util.Set;
|
||||
|
||||
import javax.measure.converter.UnitConverter;
|
||||
import javax.measure.unit.SI;
|
||||
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
import org.geotools.geometry.jts.JTS;
|
||||
|
@ -19,6 +20,7 @@ import org.opengis.referencing.operation.MathTransform;
|
|||
|
||||
import com.raytheon.uf.common.dataplugin.warning.config.PathcastConfiguration;
|
||||
import com.raytheon.uf.common.dataplugin.warning.config.PointSourceConfiguration;
|
||||
import com.raytheon.uf.common.dataplugin.warning.config.PointSourceConfiguration.PointType;
|
||||
import com.raytheon.uf.common.dataplugin.warning.config.WarngenConfiguration;
|
||||
import com.raytheon.uf.common.dataquery.requests.RequestConstraint;
|
||||
import com.raytheon.uf.common.geospatial.ISpatialQuery.SearchMode;
|
||||
|
@ -50,6 +52,7 @@ import com.vividsolutions.jts.geom.Point;
|
|||
* Oct 17, 2012 jsanchez Added pathcast algorithm.
|
||||
* Feb 12, 2013 1600 jsanchez Used adjustAngle method from AbstractStormTrackResource.
|
||||
* Mar 5, 2013 1600 jsanchez Used AdjustAngle instead of AbstractStormTrackResource to handle angle adjusting.
|
||||
* Mar 26, 2013 1819 jsanchez Allowed points to be not be based on point source inclusion constraints.
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -62,6 +65,9 @@ abstract public class AbstractDbSourceDataAdaptor {
|
|||
|
||||
private static final String GEOM_FIELD = "the_geom";
|
||||
|
||||
private static UnitConverter meterSqToKmSq = SI.METRE.times(SI.METRE)
|
||||
.getConverterTo(SI.KILOMETRE.times(SI.KILOMETRE));
|
||||
|
||||
protected Set<String> undatabasedSortableFields = new HashSet<String>(
|
||||
Arrays.asList(new String[] {
|
||||
ClosestPointComparator.Sort.DISTANCE.toString(),
|
||||
|
@ -209,14 +215,51 @@ abstract public class AbstractDbSourceDataAdaptor {
|
|||
ClosestPoint cp = createClosestPoint(pointField, ptFields,
|
||||
ptRslt);
|
||||
cp.setGid(getGid(ptFields, ptRslt.attributes));
|
||||
if (pointConfig.getType() == PointType.POINT
|
||||
|| includeArea(pointConfig, ptRslt.geometry)) {
|
||||
points.add(cp);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return points;
|
||||
}
|
||||
|
||||
/**
|
||||
* Determines if the geom surpasses the inclusion percent and/or inclusion
|
||||
* area configurations.
|
||||
*
|
||||
* @param pointConfig
|
||||
* @param geom
|
||||
* @return
|
||||
*/
|
||||
private boolean includeArea(PointSourceConfiguration pointConfig,
|
||||
Geometry geom) {
|
||||
String inclusionAndOr = pointConfig.getInclusionAndOr();
|
||||
double inclusionPercent = pointConfig.getInclusionPercent();
|
||||
double inclusionArea = pointConfig.getInclusionArea();
|
||||
|
||||
Geometry intersection = searchArea.intersection(geom);
|
||||
double ratio = intersection.getArea() / geom.getArea();
|
||||
double ratioInPercent = ratio * 100;
|
||||
double areaOfGeom = geom.getArea();
|
||||
double areaInKmSqOfIntersection = meterSqToKmSq.convert(areaOfGeom
|
||||
* ratio);
|
||||
|
||||
boolean includeArea = false;
|
||||
if (inclusionAndOr.equalsIgnoreCase("AND")
|
||||
&& ratioInPercent >= inclusionPercent
|
||||
&& areaInKmSqOfIntersection > inclusionArea) {
|
||||
includeArea = true;
|
||||
} else if (inclusionAndOr.equalsIgnoreCase("OR")
|
||||
&& (ratioInPercent >= inclusionPercent || areaInKmSqOfIntersection > inclusionArea)) {
|
||||
includeArea = true;
|
||||
}
|
||||
|
||||
return includeArea;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a list of implacted points/areas that are relative to the
|
||||
* centroid.
|
||||
|
|
|
@ -53,6 +53,7 @@ import com.raytheon.viz.warngen.gis.AffectedAreas;
|
|||
* moved the following methods from InitialLockingBehavior to this class:
|
||||
* bulletIndices(), header(), firstBullet(), secondBullet(), getImmediateCausesPtrn();
|
||||
* updated body(), header(), and secondBullet();
|
||||
* Mar 13, 2013 DR 15892 D. Friedman Fix bullet parsing.
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -141,10 +142,13 @@ abstract public class AbstractLockingBehavior implements ICommonPatterns {
|
|||
private Integer[] bulletIndices() {
|
||||
List<Integer> bulletIndices = new ArrayList<Integer>();
|
||||
|
||||
int index = text.indexOf("* ");
|
||||
/* Assumes first line cannot be a bullet and that the '*' is
|
||||
* at the start of a line.
|
||||
*/
|
||||
int index = text.indexOf("\n* ");
|
||||
while (index >= 0) {
|
||||
bulletIndices.add(index);
|
||||
index = text.indexOf("* ", index + 2);
|
||||
bulletIndices.add(index + 1);
|
||||
index = text.indexOf("\n* ", index + 3);
|
||||
}
|
||||
|
||||
return bulletIndices.toArray(new Integer[bulletIndices.size()]);
|
||||
|
|
|
@ -39,6 +39,8 @@ import com.raytheon.viz.warngen.gis.AffectedAreas;
|
|||
* ------------ ---------- ----------- --------------------------
|
||||
* Sep 24, 2012 15322 jsanchez Initial creation
|
||||
* Jan 8, 2013 15664 Qinglu Lin Updated body().
|
||||
* Mar 13, 2013 15892 D. Friedman Fix headline locking. Do not
|
||||
* lock "AND" or "FOR".
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -51,9 +53,7 @@ public class FollowUpLockingBehavior extends AbstractLockingBehavior {
|
|||
*/
|
||||
@Override
|
||||
public void body() {
|
||||
if (action != WarningAction.COR)
|
||||
headlines();
|
||||
else
|
||||
super.body();
|
||||
}
|
||||
|
||||
|
@ -66,7 +66,7 @@ public class FollowUpLockingBehavior extends AbstractLockingBehavior {
|
|||
// should be blank.
|
||||
Pattern headlinePtrn = Pattern
|
||||
.compile(
|
||||
"^\\.\\.\\.(A|THE) (.*) (WARNING|ADVISORY) .*(REMAINS|EXPIRE|CANCELLED).*(\\.\\.\\.)$",
|
||||
"^\\.\\.\\.(AN?|THE) (.*) (WARNING|ADVISORY) .*(REMAINS|EXPIRE|CANCELLED).*(\\.\\.\\.)$",
|
||||
Pattern.MULTILINE);
|
||||
Matcher m = headlinePtrn.matcher(text);
|
||||
|
||||
|
@ -187,16 +187,8 @@ public class FollowUpLockingBehavior extends AbstractLockingBehavior {
|
|||
+ LOCK_START + "..." + LOCK_END;
|
||||
}
|
||||
// Locks warning type (i.e. SEVERE THUNDERSTORM)
|
||||
headline = headline.replaceAll("(A|THE) (" + warningType + ")",
|
||||
LOCK_START + "$0" + LOCK_END);
|
||||
|
||||
// Locks the 'FOR' in the headline
|
||||
headline = headline.replaceFirst(" FOR ", " " + LOCK_START + "FOR"
|
||||
+ LOCK_END + " ");
|
||||
|
||||
// Locks the 'AND' in the headline
|
||||
headline = headline.replaceFirst(" AND ", " " + LOCK_START + "AND"
|
||||
+ LOCK_END + " ");
|
||||
headline = headline.replaceAll("(AN?|THE)( [\\w\\s]*?)(" + warningType + ")",
|
||||
LOCK_START + "$1" + LOCK_END + "$2" + LOCK_START + "$3" + LOCK_END);
|
||||
|
||||
return headline;
|
||||
}
|
||||
|
|
|
@ -32,6 +32,7 @@ import java.util.regex.Pattern;
|
|||
* ------------ ---------- ----------- --------------------------
|
||||
* Sep 24, 2012 15332 jsanchez Initial creation
|
||||
* Oct 18, 2012 15332 jsanchez Replaced listOfAreaNamesPtrn with String pattern.
|
||||
* Mar 13, 2013 DR 15892 D. Friedman Allow some punctuation in area names.
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -55,7 +56,7 @@ public interface ICommonPatterns {
|
|||
// LOCK_END can be added at the start of the line if a previous line has
|
||||
// been locked.
|
||||
public static final String listOfAreaName = "^((" + LOCK_END
|
||||
+ "){0,1}(((\\w+\\s{1})+\\w{2}-)*((\\w+\\s{1})+\\w{2}-)))";
|
||||
+ "){0,1}((([\\?\\(\\)\\w\\.,/'-]+\\s{1})+\\w{2}-)*(([\\?\\(\\)\\w\\.,/'-]+\\s{1})+\\w{2}-)))";
|
||||
|
||||
// LOCK_END should not be found at the beginning of a first bullet since the
|
||||
// previous line should be blank.
|
||||
|
|
|
@ -346,7 +346,7 @@ public class FipsUtil {
|
|||
* @param fips
|
||||
* @return
|
||||
*/
|
||||
private static ArrayList<String> getListCounties(String fips) {
|
||||
public static ArrayList<String> getListCounties(String fips) {
|
||||
ArrayList<String> rval = new ArrayList<String>();
|
||||
String matchStr = "";
|
||||
|
||||
|
|
|
@ -28,6 +28,7 @@ import com.raytheon.viz.warngen.text.ICommonPatterns;
|
|||
* ------------ ---------- ----------- --------------------------
|
||||
* Jul 22, 2008 #1284 bwoodle Initial creation
|
||||
* Oct 18, 2012 15332 jsanchez Fixed refactor bugs.
|
||||
* Mar 13, 2013 DR 15892 D. Friedman Handle SMW format in canceledAreasFromText
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -43,6 +44,8 @@ public class FollowUpUtil {
|
|||
public static final Pattern vtecPtrn = Pattern
|
||||
.compile("/[OTEX]\\.([A-Z]{3})\\.[A-Za-z0-9]{4}\\.[A-Z]{2}\\.[WAYSFON]\\.\\d{4}\\.\\d{6}T\\d{4}Z-\\d{6}T\\d{4}Z/");
|
||||
|
||||
private static final String SMW_CANCELED_AREAS_HEADER = "THE AFFECTED AREAS WERE...";
|
||||
|
||||
/**
|
||||
* This method checks whether a particular followup should be available
|
||||
* given a Warning Record, a vtec Action, and a template configuration
|
||||
|
@ -173,7 +176,8 @@ public class FollowUpUtil {
|
|||
String headline = "";
|
||||
Pattern listOfAreaNamePtrn = Pattern
|
||||
.compile(ICommonPatterns.listOfAreaName);
|
||||
for (String line : originalText.trim().split("\n")) {
|
||||
String[] splitLines = originalText.trim().split("\n");
|
||||
for (String line : splitLines) {
|
||||
if (line.contains("TEST") || line.trim().length() == 0) {
|
||||
continue;
|
||||
}
|
||||
|
@ -198,8 +202,15 @@ public class FollowUpUtil {
|
|||
headline += line;
|
||||
}
|
||||
}
|
||||
String[] ugcs = ugcLine.split("-");
|
||||
String[] names = namesLine.split("-");
|
||||
String[] ugcs = FipsUtil.getListCounties(ugcLine).toArray(new String[0]);
|
||||
String[] names;
|
||||
boolean smwAreas = false;
|
||||
if (namesLine.length() > 0)
|
||||
names = namesLine.split("-");
|
||||
else {
|
||||
names = parseSMWCanceledAreas(splitLines);
|
||||
smwAreas = true;
|
||||
}
|
||||
String[] areas = headline.split("\\.\\.\\.");
|
||||
|
||||
ArrayList<AffectedAreas> al = new ArrayList<AffectedAreas>();
|
||||
|
@ -223,12 +234,20 @@ public class FollowUpUtil {
|
|||
}
|
||||
}
|
||||
|
||||
if (ugc.length() < 3)
|
||||
continue; // TODO: log?
|
||||
|
||||
fips = ugc.substring(ugc.length() - 3);
|
||||
|
||||
if (i < names.length) {
|
||||
if (!smwAreas && names[i].length() >= 3) {
|
||||
name = names[i].substring(0, names[i].length() - 3);
|
||||
stateAbbreviation = names[i].substring(names[i].length() - 2);
|
||||
} else {
|
||||
name = names[i];
|
||||
}
|
||||
} else
|
||||
break;
|
||||
|
||||
if (name != null) {
|
||||
for (String area : areas) {
|
||||
|
@ -334,4 +353,32 @@ public class FollowUpUtil {
|
|||
|
||||
return rval;
|
||||
}
|
||||
|
||||
/** Parses the canceled areas of an SMW, which have a different format
|
||||
* from other products.
|
||||
*/
|
||||
private static String[] parseSMWCanceledAreas(String[] splitLines) {
|
||||
StringBuilder text = new StringBuilder(64);
|
||||
boolean inAreas = false;
|
||||
for (String line : splitLines) {
|
||||
String trimmedLine = line.trim();
|
||||
if (SMW_CANCELED_AREAS_HEADER.equals(trimmedLine))
|
||||
inAreas = true;
|
||||
else if (inAreas) {
|
||||
if (trimmedLine.length() > 0) {
|
||||
text.append(trimmedLine);
|
||||
text.append('\n');
|
||||
} else
|
||||
break;
|
||||
}
|
||||
}
|
||||
int len = text.length();
|
||||
if (len >= 4 && "...\n".equals(text.substring(len - 4)))
|
||||
text.delete(len - 4, len);
|
||||
String[] areas = text.toString().split("\\.\\.\\.\\n");
|
||||
// Unwrap lines.
|
||||
for (int i = 0; i < areas.length; ++i)
|
||||
areas[i] = areas[i].replace("\n", " ");
|
||||
return areas;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -86,6 +86,14 @@
|
|||
<param name="feature"
|
||||
value="com.raytheon.uf.edex.npp.feature" />
|
||||
</antcall>
|
||||
<antcall target="build">
|
||||
<param name="feature"
|
||||
value="com.raytheon.uf.edex.registry.client.feature" />
|
||||
</antcall>
|
||||
<antcall target="build">
|
||||
<param name="feature"
|
||||
value="com.raytheon.uf.edex.datadelivery.client.feature" />
|
||||
</antcall>
|
||||
<antcall target="build">
|
||||
<param name="feature"
|
||||
value="com.raytheon.uf.edex.registry.feature" />
|
||||
|
|
|
@ -168,6 +168,24 @@
|
|||
<appender-ref ref="TextLog"/>
|
||||
</appender>
|
||||
|
||||
<appender name="PerformanceLog" class="org.apache.log4j.rolling.RollingFileAppender">
|
||||
<rollingPolicy class="org.apache.log4j.rolling.TimeBasedRollingPolicy">
|
||||
<param name="FileNamePattern" value="${edex.home}/logs/edex-${edex.run.mode}-performance-%d{yyyyMMdd}.log"/>
|
||||
</rollingPolicy>
|
||||
<layout class="org.apache.log4j.PatternLayout">
|
||||
<param name="ConversionPattern" value="%-5p %d [%t] %c{1}: %m%n"/>
|
||||
</layout>
|
||||
</appender>
|
||||
|
||||
<appender name="PerformanceLogAsync" class="org.apache.log4j.AsyncAppender">
|
||||
<appender-ref ref="PerformanceLog" />
|
||||
</appender>
|
||||
|
||||
<logger name="PerformanceLogger" additivity="false">
|
||||
<level value="DEBUG"/>
|
||||
<appender-ref ref="PerformanceLogAsync" />
|
||||
</logger>
|
||||
|
||||
<logger name="com.raytheon">
|
||||
<level value="INFO"/>
|
||||
</logger>
|
||||
|
|
|
@ -44,7 +44,7 @@
|
|||
<!-- Performance log -->
|
||||
<appender name="PerformanceLog" class="org.apache.log4j.rolling.RollingFileAppender">
|
||||
<rollingPolicy class="org.apache.log4j.rolling.TimeBasedRollingPolicy">
|
||||
<param name="FileNamePattern" value="${edex.home}/logs/edex-request-performance-%d{yyyyMMdd}.log"/>
|
||||
<param name="FileNamePattern" value="${edex.home}/logs/edex-${edex.run.mode}-performance-%d{yyyyMMdd}.log"/>
|
||||
</rollingPolicy>
|
||||
<layout class="org.apache.log4j.PatternLayout">
|
||||
<param name="ConversionPattern" value="%-5p %d [%t] %c{1}: %m%n"/>
|
||||
|
|
|
@ -5,6 +5,7 @@
|
|||
##### Qinglu Lin 08-13-2012 DR 14493. Use corToNewMarker and corEventtime.
|
||||
##### D. Friedman 11-09-2012 DR 15430. Rework included watches.
|
||||
##### QINGLU LIN 12-27-2012 DR 15594. Added $lock to headlineLocList.
|
||||
##### D. Friedman 03-13-2013 DR 15892. Do not lock portion of state in firstBullet.
|
||||
####################################################################################################
|
||||
Mile Marker Test Code
|
||||
macro "mmarkers" use (called out of VM_global_library.vm):
|
||||
|
@ -750,7 +751,7 @@ THE ${area.name}##
|
|||
#if(${intFIPS.parseInt($FIPS)} < 500 || ${area.stateabbr} == "TX")
|
||||
<L>${area.name} ${area.areaNotation}</L> IN #areaFormat(${area.partOfParentRegion} true false) <L>${area.parentRegion}...</L>
|
||||
#else
|
||||
<L>${area.name}</L> IN <L>#areaFormat(${area.partOfParentRegion} true false) ${area.parentRegion}...</L>
|
||||
<L>${area.name}</L> IN #areaFormat(${area.partOfParentRegion} true false) <L>${area.parentRegion}...</L>
|
||||
#end
|
||||
#end
|
||||
## COMMENTED OUT 5 LINES BELOW THIS IS GENERALLY NOT UTILIZED - you can unREMARK if desired
|
||||
|
|
|
@ -4,6 +4,7 @@
|
|||
## Evan Bookbinder 4-25-2012 for OB 12.3.1 (corText)
|
||||
## QINGLU LIN 7-31-2012 DR 15217 use roundAndPad
|
||||
## Qinglu Lin 12-27-2012 DR 15594. Appended true to headlineLocList's parameter list.
|
||||
## D. Friedman 03-13-2013 DR 15892. Use printcoords.
|
||||
################################################
|
||||
##
|
||||
### CREATE PHRASING DEPENDING ON WHETHER WE ISSUE EXP PRIOR TO EXPIRATION TIME OR NOT
|
||||
|
@ -482,10 +483,7 @@ THIS IS A TEST MESSAGE.##
|
|||
THIS IS A TEST MESSAGE. DO NOT TAKE ACTION BASED ON THIS MESSAGE.
|
||||
|
||||
#end
|
||||
LAT...LON ##
|
||||
#foreach(${coord} in ${areaPoly})
|
||||
#llFormat(${coord.y}) #llFormat(${coord.x}) ##
|
||||
#end
|
||||
#printcoords(${areaPoly}, ${list})
|
||||
|
||||
TIME...MOT...LOC ##
|
||||
${dateUtil.format(${event}, ${timeFormat.time})}Z ##
|
||||
|
|
|
@ -8,6 +8,7 @@
|
|||
## Evan Bookbinder 4-25-2012 for OB 12.3.1 (MND)
|
||||
## QINGLU LIN 7-31-2012 DR 15217 use roundAndPad ##
|
||||
## Qinglu Lin 12-27-2012 DR 15594. Appended true to headlineLocList's parameter list.
|
||||
## D. Friemdan 13-03-2013 DR 15892. Do not lock locations in headline.
|
||||
######################################################
|
||||
##
|
||||
##SET SOME INITIAL VARIABLES
|
||||
|
@ -120,7 +121,7 @@ THIS IS A TEST MESSAGE. ##
|
|||
#end
|
||||
#if(${windSpeed} >= 40 || ${hailSize} >= 0.70)
|
||||
...SIGNIFICANT WEATHER ADVISORY FOR ##
|
||||
#headlineLocList(${areas} true false true false true) #secondBullet(${dateUtil},${expire},${timeFormat},${localtimezone},${secondtimezone})
|
||||
#headlineLocList(${areas} true false true false false) #secondBullet(${dateUtil},${expire},${timeFormat},${localtimezone},${secondtimezone})
|
||||
...##
|
||||
#elseif(${windSpeed} == 0 && ${hailSize} == 0)
|
||||
!** YOU DID NOT SELECT ANY WIND OR HAIL THREATS. PLEASE RE-GENERATE THIS ADVISORY **!
|
||||
|
|
|
@ -108,6 +108,14 @@
|
|||
id="com.raytheon.uf.edex.npp.feature"
|
||||
version="0.0.0"/>
|
||||
|
||||
<includes
|
||||
id="com.raytheon.uf.edex.datadelivery.client.feature"
|
||||
version="0.0.0"/>
|
||||
|
||||
<includes
|
||||
id="com.raytheon.uf.edex.registry.client.feature"
|
||||
version="0.0.0"/>
|
||||
|
||||
<includes
|
||||
id="com.raytheon.uf.edex.datadelivery.feature"
|
||||
version="0.0.0"/>
|
||||
|
|
|
@ -22,6 +22,7 @@ package com.raytheon.edex.plugin.grib.decoderpostprocessors;
|
|||
|
||||
import java.io.File;
|
||||
import java.io.FilenameFilter;
|
||||
import java.util.Arrays;
|
||||
import java.util.Calendar;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
|
@ -38,7 +39,6 @@ import com.raytheon.uf.common.datastorage.StorageException;
|
|||
import com.raytheon.uf.common.datastorage.StorageStatus;
|
||||
import com.raytheon.uf.common.datastorage.records.FloatDataRecord;
|
||||
import com.raytheon.uf.common.gridcoverage.GridCoverage;
|
||||
import com.raytheon.uf.common.gridcoverage.LatLonGridCoverage;
|
||||
import com.raytheon.uf.common.localization.IPathManager;
|
||||
import com.raytheon.uf.common.localization.LocalizationContext.LocalizationLevel;
|
||||
import com.raytheon.uf.common.localization.LocalizationContext.LocalizationType;
|
||||
|
@ -55,6 +55,7 @@ import com.raytheon.uf.edex.database.cluster.ClusterLockUtils;
|
|||
import com.raytheon.uf.edex.database.cluster.ClusterLockUtils.LockState;
|
||||
import com.raytheon.uf.edex.database.cluster.ClusterTask;
|
||||
import com.raytheon.uf.edex.database.plugin.PluginFactory;
|
||||
import com.raytheon.uf.edex.database.query.DatabaseQuery;
|
||||
import com.raytheon.uf.edex.plugin.grid.dao.GridDao;
|
||||
|
||||
/**
|
||||
|
@ -70,6 +71,8 @@ import com.raytheon.uf.edex.plugin.grid.dao.GridDao;
|
|||
* ------------ ---------- ----------- --------------------------
|
||||
* 4/09/10 4638 bphillip Initial Creation
|
||||
* Mar 14, 2013 1794 djohnson FileUtil.listFiles now returns List.
|
||||
* Mar 27, 2013 1821 bsteffen Reduce db and pypies requests in grid
|
||||
* assembler.
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -126,10 +129,8 @@ public class EnsembleGridAssembler implements IDecoderPostProcessor {
|
|||
|
||||
@Override
|
||||
public GridRecord[] process(GridRecord rec) throws GribException {
|
||||
Map<Integer, GridRecord> newRecords = new HashMap<Integer, GridRecord>();
|
||||
String compositeModel = getCompositeModel(rec.getDatasetId());
|
||||
if (compositeModel != null) {
|
||||
GridRecord newRec = null;
|
||||
String lockName = compositeModel + "_"
|
||||
+ rec.getParameter().getAbbreviation() + "_"
|
||||
+ rec.getLevel().toString();
|
||||
|
@ -146,9 +147,7 @@ public class EnsembleGridAssembler implements IDecoderPostProcessor {
|
|||
ct = ClusterLockUtils.lock(CLUSTER_TASK_NAME, lockName,
|
||||
120000, true);
|
||||
}
|
||||
newRec = processGrid(rec,
|
||||
getCompositeModelObject(compositeModel));
|
||||
newRecords.put(newRec.getId(), newRec);
|
||||
processGrid(rec, getCompositeModelObject(compositeModel));
|
||||
} catch (Exception e) {
|
||||
clearTime = true;
|
||||
throw new GribException("Error processing ensemble grid", e);
|
||||
|
@ -199,34 +198,86 @@ public class EnsembleGridAssembler implements IDecoderPostProcessor {
|
|||
* @return The new grib record
|
||||
* @throws Exception
|
||||
*/
|
||||
private GridRecord processGrid(GridRecord record, CompositeModel thinned)
|
||||
private void processGrid(GridRecord record, CompositeModel thinned)
|
||||
throws Exception {
|
||||
|
||||
GridDao dao = (GridDao) PluginFactory.getInstance().getPluginDao(
|
||||
GridConstants.GRID);
|
||||
String modelName = record.getDatasetId();
|
||||
String oldGrid = record.getLocation().getId().toString();
|
||||
String newGrid = GribSpatialCache.getInstance()
|
||||
.getGridByName(thinned.getGrid()).getId().toString();
|
||||
String dataURI = record.getDataURI();
|
||||
String assembledDataURI = dataURI.replace(modelName,
|
||||
thinned.getModelName()).replace(oldGrid, newGrid);
|
||||
|
||||
List<?> result = dao.queryBySingleCriteria("dataURI", assembledDataURI);
|
||||
GridRecord assembledRecord = null;
|
||||
GridRecord assembledRecord = createAssembledRecord(record, thinned);
|
||||
DatabaseQuery query = new DatabaseQuery(GridRecord.class);
|
||||
query.addReturnedField("dataURI");
|
||||
query.addQueryParam("dataURI", assembledRecord.getDataURI());
|
||||
List<?> result = dao.queryByCriteria(query);
|
||||
if (result.isEmpty()) {
|
||||
assembledRecord = createRecord(record, dao, thinned);
|
||||
persistNewRecord(record, assembledRecord, thinned, dao);
|
||||
} else {
|
||||
assembledRecord = (GridRecord) result.get(0);
|
||||
FloatDataRecord rec = (FloatDataRecord) dao.getHDF5Data(
|
||||
assembledRecord, -1)[0];
|
||||
assembledRecord.setMessageData(rec);
|
||||
assembledRecord.setPluginName(GridConstants.GRID);
|
||||
updateExistingRecord(record, assembledRecord, thinned, dao);
|
||||
}
|
||||
EDEXUtil.getMessageProducer().sendAsync("notificationAggregation",
|
||||
new String[] { assembledRecord.getDataURI() });
|
||||
}
|
||||
|
||||
mergeData(record, assembledRecord, dao, thinned);
|
||||
return assembledRecord;
|
||||
private GridRecord createAssembledRecord(GridRecord record,
|
||||
CompositeModel thinned) throws GribException {
|
||||
GridRecord newRecord = new GridRecord();
|
||||
|
||||
GridCoverage coverage = GribSpatialCache.getInstance().getGridByName(
|
||||
thinned.getGrid());
|
||||
|
||||
newRecord.setLocation(coverage);
|
||||
newRecord.setDatasetId(thinned.getModelName());
|
||||
newRecord.setLevel(record.getLevel());
|
||||
newRecord.setParameter(record.getParameter());
|
||||
newRecord.setEnsembleId(record.getEnsembleId());
|
||||
newRecord.setDataTime(record.getDataTime());
|
||||
newRecord.setDataURI(null);
|
||||
newRecord.setPluginName(GridConstants.GRID);
|
||||
newRecord.setInsertTime(Calendar.getInstance());
|
||||
try {
|
||||
newRecord.constructDataURI();
|
||||
} catch (PluginException e) {
|
||||
throw new GribException(
|
||||
"Error constructing DataURI for grib record", e);
|
||||
}
|
||||
return newRecord;
|
||||
}
|
||||
|
||||
private void persistNewRecord(GridRecord record,
|
||||
GridRecord assembledRecord, CompositeModel thinned, GridDao dao)
|
||||
throws GribException {
|
||||
GridCoverage coverage = assembledRecord.getLocation();
|
||||
float[] data = new float[coverage.getNx() * coverage.getNy()];
|
||||
Arrays.fill(data, Util.GRID_FILL_VALUE);
|
||||
assembledRecord.setMessageData(data);
|
||||
mergeData(record, assembledRecord, thinned);
|
||||
try {
|
||||
StorageStatus ss = dao.persistToHDF5(assembledRecord);
|
||||
StorageException[] exceptions = ss.getExceptions();
|
||||
// Only one record is stored, so logically there should only be one
|
||||
// possible exception in the exception array
|
||||
if (exceptions.length > 0) {
|
||||
throw new GribException("Error storing new record to HDF5",
|
||||
exceptions[0]);
|
||||
}
|
||||
dao.persistToDatabase(assembledRecord);
|
||||
} catch (PluginException e) {
|
||||
throw new GribException("Error storing new record to HDF5", e);
|
||||
}
|
||||
}
|
||||
|
||||
private void updateExistingRecord(GridRecord record,
|
||||
GridRecord assembledRecord, CompositeModel thinned, GridDao dao)
|
||||
throws GribException {
|
||||
try {
|
||||
FloatDataRecord rec = (FloatDataRecord) dao.getHDF5Data(
|
||||
assembledRecord, -1)[0];
|
||||
assembledRecord.setMessageData(rec.getFloatData());
|
||||
mergeData(record, assembledRecord, thinned);
|
||||
assembledRecord.setOverwriteAllowed(true);
|
||||
dao.persistToHDF5(assembledRecord);
|
||||
} catch (PluginException e) {
|
||||
throw new GribException("Error storing assembled grid to HDF5", e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -236,25 +287,19 @@ public class EnsembleGridAssembler implements IDecoderPostProcessor {
|
|||
* The GridRecord containing the data to add
|
||||
* @param assembledRecord
|
||||
* The composite GridRecord
|
||||
* @param dao
|
||||
* An instance of the grib data access object
|
||||
* @param thinned
|
||||
* The composite model definition
|
||||
* @return The composite GridRecord
|
||||
* @throws Exception
|
||||
* @throws GribException
|
||||
*/
|
||||
private GridRecord mergeData(GridRecord record, GridRecord assembledRecord,
|
||||
GridDao dao, CompositeModel thinned) throws Exception {
|
||||
|
||||
private void mergeData(GridRecord record, GridRecord assembledRecord,
|
||||
CompositeModel thinned) throws GribException {
|
||||
String modelName = record.getDatasetId();
|
||||
GridCoverage coverage = record.getLocation();
|
||||
|
||||
long[] sizes = ((FloatDataRecord) assembledRecord.getMessageData())
|
||||
.getSizes();
|
||||
GridCoverage assembledCoverage = assembledRecord.getLocation();
|
||||
|
||||
float[][] assembledData = Util.resizeDataTo2D(
|
||||
((FloatDataRecord) assembledRecord.getMessageData())
|
||||
.getFloatData(), (int) sizes[0], (int) sizes[1]);
|
||||
(float[]) assembledRecord.getMessageData(),
|
||||
assembledCoverage.getNx(), assembledCoverage.getNy());
|
||||
|
||||
int nx = coverage.getNx();
|
||||
int ny = coverage.getNy();
|
||||
|
@ -278,79 +323,6 @@ public class EnsembleGridAssembler implements IDecoderPostProcessor {
|
|||
}
|
||||
|
||||
assembledRecord.setMessageData(Util.resizeDataTo1D(assembledData,
|
||||
(int) sizes[1], (int) sizes[0]));
|
||||
assembledRecord.setOverwriteAllowed(true);
|
||||
try {
|
||||
dao.persistToHDF5(assembledRecord);
|
||||
} catch (PluginException e) {
|
||||
throw new GribException("Error storing assembled grid to HDF5", e);
|
||||
}
|
||||
EDEXUtil.getMessageProducer().sendAsync("notificationAggregation",
|
||||
new String[] { assembledRecord.getDataURI() });
|
||||
assembledRecord.setMessageData(null);
|
||||
return assembledRecord;
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates the composite grib record and stores it to the HDF5 repository
|
||||
*
|
||||
* @param record
|
||||
* The recieved GridRecord used to initialize the composite grid
|
||||
* with
|
||||
* @param dao
|
||||
* An instance of the grib data access object
|
||||
* @param thinned
|
||||
* The composite grid definition
|
||||
* @return The composite record
|
||||
* @throws GribException
|
||||
*/
|
||||
private GridRecord createRecord(GridRecord record, GridDao dao,
|
||||
CompositeModel thinned) throws GribException {
|
||||
LatLonGridCoverage coverage = (LatLonGridCoverage) GribSpatialCache
|
||||
.getInstance().getGridByName(thinned.getGrid());
|
||||
|
||||
float[] data = new float[coverage.getNx() * coverage.getNy()];
|
||||
for (int i = 0; i < data.length; i++) {
|
||||
data[i] = Util.GRID_FILL_VALUE;
|
||||
}
|
||||
GridRecord newRecord = new GridRecord();
|
||||
|
||||
newRecord.setLocation(coverage);
|
||||
newRecord.setDatasetId(thinned.getModelName());
|
||||
newRecord.setLevel(record.getLevel());
|
||||
newRecord.setParameter(record.getParameter());
|
||||
newRecord.setEnsembleId(record.getEnsembleId());
|
||||
newRecord.setMessageData(data);
|
||||
newRecord.setDataTime(record.getDataTime());
|
||||
newRecord.setDataURI(null);
|
||||
newRecord.setPluginName(GridConstants.GRID);
|
||||
newRecord.setInsertTime(Calendar.getInstance());
|
||||
newRecord.getInfo().setId(null);
|
||||
try {
|
||||
newRecord.constructDataURI();
|
||||
} catch (PluginException e) {
|
||||
throw new GribException(
|
||||
"Error constructing DataURI for grib record", e);
|
||||
}
|
||||
try {
|
||||
StorageStatus ss = dao.persistToHDF5(newRecord);
|
||||
StorageException[] exceptions = ss.getExceptions();
|
||||
// Only one record is stored, so logically there should only be one
|
||||
// possible exception in the exception array
|
||||
if (exceptions.length > 0) {
|
||||
throw new GribException("Error storing new record to HDF5",
|
||||
exceptions[0]);
|
||||
}
|
||||
dao.persistToDatabase(newRecord);
|
||||
newRecord = (GridRecord) dao.getMetadata(newRecord.getDataURI());
|
||||
FloatDataRecord rec = (FloatDataRecord) dao.getHDF5Data(newRecord,
|
||||
-1)[0];
|
||||
newRecord.setMessageData(rec);
|
||||
newRecord.setPluginName(GridConstants.GRID);
|
||||
} catch (PluginException e) {
|
||||
throw new GribException("Error storing new record to HDF5", e);
|
||||
}
|
||||
return newRecord;
|
||||
assembledCoverage.getNy(), assembledCoverage.getNx()));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,7 +0,0 @@
|
|||
<project basedir="." default="deploy" name="com.raytheon.uf.common.datadelivery.request">
|
||||
<available file="../build.edex" property="build.dir.location" value="../build.edex"/>
|
||||
<available file="../../../../../build.edex" property="build.dir.location" value="../../../../../build.edex"/>
|
||||
|
||||
<import file="${build.dir.location}/basebuilds/component_deploy_base.xml" />
|
||||
|
||||
</project>
|
|
@ -27,6 +27,7 @@ import com.raytheon.uf.common.dataquery.requests.RequestableMetadataMarshaller;
|
|||
* ------------ ---------- ----------- --------------------------
|
||||
* Aug 17, 2011 mschenke Initial creation
|
||||
* Jan 31, 2013 1557 jsanchez Added the XMLElement allowDuplicates.
|
||||
* Mar 26, 2013 1819 jsanchez Added inclusionPercent, inclusionArea, inclusionAndOr.
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -88,6 +89,15 @@ public class PointSourceConfiguration {
|
|||
@XmlElement(name = "sort")
|
||||
private String[] sortBy;
|
||||
|
||||
@XmlElement
|
||||
private double inclusionPercent = 0.00;
|
||||
|
||||
@XmlElement
|
||||
private String inclusionAndOr = "AND";
|
||||
|
||||
@XmlElement
|
||||
private double inclusionArea = 0.00;
|
||||
|
||||
public String getVariable() {
|
||||
return variable;
|
||||
}
|
||||
|
@ -185,4 +195,28 @@ public class PointSourceConfiguration {
|
|||
this.allowDuplicates = allowDuplicates;
|
||||
}
|
||||
|
||||
public double getInclusionPercent() {
|
||||
return inclusionPercent;
|
||||
}
|
||||
|
||||
public void setInclusionPercent(double inclusionPercent) {
|
||||
this.inclusionPercent = inclusionPercent;
|
||||
}
|
||||
|
||||
public String getInclusionAndOr() {
|
||||
return inclusionAndOr;
|
||||
}
|
||||
|
||||
public void setInclusionAndOr(String inclusionAndOr) {
|
||||
this.inclusionAndOr = inclusionAndOr;
|
||||
}
|
||||
|
||||
public double getInclusionArea() {
|
||||
return inclusionArea;
|
||||
}
|
||||
|
||||
public void setInclusionArea(double inclusionArea) {
|
||||
this.inclusionArea = inclusionArea;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -365,4 +365,26 @@ public final class SerializationUtil {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Transforms an InputStream byte data from the thrift protocol to an object using
|
||||
* DynamicSerialize.
|
||||
*
|
||||
* @param is
|
||||
* the input stream to read from
|
||||
* @return the Java object
|
||||
* @throws SerializationException
|
||||
* if a serialization or class cast exception occurs
|
||||
*/
|
||||
public static <T> T transformFromThrift(Class<T> clazz, InputStream is)
|
||||
throws SerializationException {
|
||||
DynamicSerializationManager dsm = DynamicSerializationManager
|
||||
.getManager(SerializationType.Thrift);
|
||||
try {
|
||||
return clazz.cast(dsm.deserialize(is));
|
||||
} catch (ClassCastException cce) {
|
||||
throw new SerializationException(cce);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -92,6 +92,8 @@ import com.raytheon.uf.edex.database.query.DatabaseQuery;
|
|||
* from the environment.
|
||||
* Feb 12, 2013 #1608 randerso Changed to call deleteDatasets
|
||||
* Feb 26, 2013 1638 mschenke Moved OGC specific functions to OGC project
|
||||
* Mar 27, 2013 1821 bsteffen Remove extra store in persistToHDF5 for
|
||||
* replace only operations.
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -247,7 +249,7 @@ public abstract class PluginDao extends CoreDao {
|
|||
// directory.mkdirs();
|
||||
// }
|
||||
|
||||
IDataStore dataStore = DataStoreFactory.getDataStore(file);
|
||||
IDataStore dataStore = null;
|
||||
IDataStore replaceDataStore = null;
|
||||
|
||||
for (IPersistable persistable : persistables) {
|
||||
|
@ -261,6 +263,9 @@ public abstract class PluginDao extends CoreDao {
|
|||
|
||||
populateDataStore(replaceDataStore, persistable);
|
||||
} else {
|
||||
if (dataStore == null) {
|
||||
dataStore = DataStoreFactory.getDataStore(file);
|
||||
}
|
||||
populateDataStore(dataStore, persistable);
|
||||
}
|
||||
} catch (Exception e) {
|
||||
|
@ -268,6 +273,7 @@ public abstract class PluginDao extends CoreDao {
|
|||
}
|
||||
}
|
||||
|
||||
if (dataStore != null) {
|
||||
try {
|
||||
StorageStatus s = dataStore.store();
|
||||
// add exceptions to a list for aggregation
|
||||
|
@ -275,7 +281,7 @@ public abstract class PluginDao extends CoreDao {
|
|||
} catch (StorageException e) {
|
||||
logger.error("Error persisting to HDF5", e);
|
||||
}
|
||||
|
||||
}
|
||||
if (replaceDataStore != null) {
|
||||
try {
|
||||
StorageStatus s = replaceDataStore.store(StoreOp.REPLACE);
|
||||
|
|
|
@ -0,0 +1,17 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<projectDescription>
|
||||
<name>com.raytheon.uf.edex.datadelivery.client.feature</name>
|
||||
<comment></comment>
|
||||
<projects>
|
||||
</projects>
|
||||
<buildSpec>
|
||||
<buildCommand>
|
||||
<name>org.eclipse.pde.FeatureBuilder</name>
|
||||
<arguments>
|
||||
</arguments>
|
||||
</buildCommand>
|
||||
</buildSpec>
|
||||
<natures>
|
||||
<nature>org.eclipse.pde.FeatureNature</nature>
|
||||
</natures>
|
||||
</projectDescription>
|
|
@ -0,0 +1 @@
|
|||
bin.includes = feature.xml
|
|
@ -0,0 +1,54 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<feature
|
||||
id="com.raytheon.uf.edex.datadelivery.client.feature"
|
||||
label="EDEX DataDelivery Client Feature"
|
||||
version="1.0.0.qualifier"
|
||||
provider-name="RAYTHEON">
|
||||
|
||||
<description url="http://www.example.com/description">
|
||||
[Enter Feature Description here.]
|
||||
</description>
|
||||
|
||||
<copyright url="http://www.example.com/copyright">
|
||||
[Enter Copyright Description here.]
|
||||
</copyright>
|
||||
|
||||
<license url="http://www.example.com/license">
|
||||
[Enter License Description here.]
|
||||
</license>
|
||||
|
||||
<requires>
|
||||
<import feature="com.raytheon.uf.edex.common.core.feature" version="0.0.0"/>
|
||||
<import feature="com.raytheon.uf.edex.core.feature" version="0.0.0"/>
|
||||
<import feature="com.raytheon.uf.edex.registry.client.feature" version="0.0.0"/>
|
||||
</requires>
|
||||
|
||||
<plugin
|
||||
id="com.raytheon.uf.common.datadelivery.request"
|
||||
download-size="0"
|
||||
install-size="0"
|
||||
version="0.0.0"
|
||||
unpack="false"/>
|
||||
|
||||
<plugin
|
||||
id="com.raytheon.uf.edex.datadelivery.request"
|
||||
download-size="0"
|
||||
install-size="0"
|
||||
version="0.0.0"
|
||||
unpack="false"/>
|
||||
|
||||
<plugin
|
||||
id="com.raytheon.uf.common.datadelivery.registry"
|
||||
download-size="0"
|
||||
install-size="0"
|
||||
version="0.0.0"
|
||||
unpack="false"/>
|
||||
|
||||
<plugin
|
||||
id="com.raytheon.uf.common.datadelivery.event"
|
||||
download-size="0"
|
||||
install-size="0"
|
||||
version="0.0.0"
|
||||
unpack="false"/>
|
||||
|
||||
</feature>
|
|
@ -22,15 +22,9 @@
|
|||
<import feature="com.raytheon.uf.edex.dataplugins.feature" version="1.0.0.qualifier"/>
|
||||
<import feature="com.raytheon.uf.edex.registry.feature" version="0.0.0"/>
|
||||
<import feature="com.raytheon.uf.edex.core.feature" version="0.0.0"/>
|
||||
<import feature="com.raytheon.uf.edex.datadelivery.client.feature" version="0.0.0"/>
|
||||
</requires>
|
||||
|
||||
<plugin
|
||||
id="com.raytheon.uf.common.datadelivery.registry"
|
||||
download-size="0"
|
||||
install-size="0"
|
||||
version="0.0.0"
|
||||
unpack="false"/>
|
||||
|
||||
<plugin
|
||||
id="org.codehaus.jackson"
|
||||
download-size="0"
|
||||
|
@ -78,13 +72,6 @@
|
|||
version="0.0.0"
|
||||
unpack="false"/>
|
||||
|
||||
<plugin
|
||||
id="com.raytheon.uf.common.datadelivery.request"
|
||||
download-size="0"
|
||||
install-size="0"
|
||||
version="0.0.0"
|
||||
unpack="false"/>
|
||||
|
||||
<plugin
|
||||
id="com.raytheon.uf.common.datadelivery.service"
|
||||
download-size="0"
|
||||
|
@ -99,13 +86,6 @@
|
|||
version="0.0.0"
|
||||
unpack="false"/>
|
||||
|
||||
<plugin
|
||||
id="com.raytheon.uf.common.datadelivery.event"
|
||||
download-size="0"
|
||||
install-size="0"
|
||||
version="0.0.0"
|
||||
unpack="false"/>
|
||||
|
||||
<plugin
|
||||
id="net.dods"
|
||||
download-size="0"
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<projectDescription>
|
||||
<name>com.raytheon.uf.edex.datadelivery.registry</name>
|
||||
<name>com.raytheon.uf.edex.datadelivery.request</name>
|
||||
<comment></comment>
|
||||
<projects>
|
||||
</projects>
|
|
@ -0,0 +1,8 @@
|
|||
#Thu Apr 12 13:31:11 CDT 2012
|
||||
eclipse.preferences.version=1
|
||||
org.eclipse.jdt.core.compiler.codegen.inlineJsrBytecode=enabled
|
||||
org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.6
|
||||
org.eclipse.jdt.core.compiler.compliance=1.6
|
||||
org.eclipse.jdt.core.compiler.problem.assertIdentifier=error
|
||||
org.eclipse.jdt.core.compiler.problem.enumIdentifier=error
|
||||
org.eclipse.jdt.core.compiler.source=1.6
|
|
@ -0,0 +1,7 @@
|
|||
Manifest-Version: 1.0
|
||||
Bundle-ManifestVersion: 2
|
||||
Bundle-Name: Edex DataDelivery Request
|
||||
Bundle-SymbolicName: com.raytheon.uf.edex.datadelivery.request
|
||||
Bundle-Version: 1.0.0.qualifier
|
||||
Export-Package:
|
||||
com.raytheon.uf.edex.datadelivery.request
|
|
@ -0,0 +1,5 @@
|
|||
source.. = src/
|
||||
output.. = bin/
|
||||
bin.includes = META-INF/,\
|
||||
.,\
|
||||
res/
|
|
@ -0,0 +1,41 @@
|
|||
/**
|
||||
* This software was developed and / or modified by Raytheon Company,
|
||||
* pursuant to Contract DG133W-05-CQ-1067 with the US Government.
|
||||
*
|
||||
* U.S. EXPORT CONTROLLED TECHNICAL DATA
|
||||
* This software product contains export-restricted data whose
|
||||
* export/transfer/disclosure is restricted by U.S. law. Dissemination
|
||||
* to non-U.S. persons whether in the United States or abroad requires
|
||||
* an export license or other authorization.
|
||||
*
|
||||
* Contractor Name: Raytheon Company
|
||||
* Contractor Address: 6825 Pine Street, Suite 340
|
||||
* Mail Stop B8
|
||||
* Omaha, NE 68106
|
||||
* 402.291.0100
|
||||
*
|
||||
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
||||
* further licensing information.
|
||||
**/
|
||||
package com.raytheon.uf.edex.datadelivery.request;
|
||||
|
||||
/**
|
||||
* Marker class because this plugin is only required for the Spring files at
|
||||
* this time, but a class is required for the including feature to not have a
|
||||
* warning.
|
||||
*
|
||||
* <pre>
|
||||
*
|
||||
* SOFTWARE HISTORY
|
||||
*
|
||||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* Mar 26, 2013 1827 djohnson Initial creation
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
* @author djohnson
|
||||
* @version 1.0
|
||||
*/
|
||||
public class MarkerClass {
|
||||
}
|
|
@ -71,6 +71,7 @@ import com.raytheon.uf.edex.plugin.grid.dao.GridDao;
|
|||
* Dec 3, 2010 rjpeter Initial creation
|
||||
* Feb 15, 2013 1638 mschenke Moved DataURINotificationMessage to uf.common.dataplugin
|
||||
* Mar 07, 2013 1587 bsteffen rewrite static data generation.
|
||||
* Mar 14, 2013 1587 bsteffen Fix persisting to datastore.
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -268,7 +269,7 @@ public class StaticDataGenerator {
|
|||
for (GridRecord staticRecord : datastoreRecords) {
|
||||
populateMessageData(staticRecord);
|
||||
}
|
||||
dao.persistToHDF5(databaseRecords.toArray(new PluginDataObject[0]));
|
||||
dao.persistToHDF5(datastoreRecords.toArray(new PluginDataObject[0]));
|
||||
}
|
||||
if (!databaseRecords.isEmpty()) {
|
||||
dao.persistToDatabase(databaseRecords
|
||||
|
@ -424,7 +425,8 @@ public class StaticDataGenerator {
|
|||
datasets = Collections.emptyList();
|
||||
}
|
||||
}
|
||||
if (datasets.contains(missing)) {
|
||||
if (!datasets.contains(staticRecord.getParameter()
|
||||
.getAbbreviation())) {
|
||||
missing.add(staticRecord);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -19,10 +19,13 @@
|
|||
**/
|
||||
package com.raytheon.uf.edex.maintenance.archive;
|
||||
|
||||
import java.io.BufferedInputStream;
|
||||
import java.io.BufferedOutputStream;
|
||||
import java.io.File;
|
||||
import java.io.FileInputStream;
|
||||
import java.io.FileOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.OutputStream;
|
||||
import java.text.ParseException;
|
||||
import java.text.SimpleDateFormat;
|
||||
|
@ -31,11 +34,11 @@ import java.util.Calendar;
|
|||
import java.util.Date;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.TimeZone;
|
||||
import java.util.zip.GZIPInputStream;
|
||||
import java.util.zip.GZIPOutputStream;
|
||||
|
||||
import com.raytheon.uf.common.dataplugin.PluginDataObject;
|
||||
|
@ -127,6 +130,7 @@ public class DatabaseArchiver implements IPluginArchiver {
|
|||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings("rawtypes")
|
||||
public boolean archivePluginData(String pluginName, String archivePath,
|
||||
DataArchiveConfig conf) {
|
||||
// set archive time
|
||||
|
@ -288,6 +292,7 @@ public class DatabaseArchiver implements IPluginArchiver {
|
|||
return true;
|
||||
}
|
||||
|
||||
@SuppressWarnings("rawtypes")
|
||||
protected int savePdoMap(String pluginName, String archivePath,
|
||||
Map<String, List<PersistableDataObject>> pdoMap,
|
||||
boolean compressMetadata) throws SerializationException,
|
||||
|
@ -312,34 +317,62 @@ public class DatabaseArchiver implements IPluginArchiver {
|
|||
|
||||
if (file.exists()) {
|
||||
// read previous list in from disk (in gz format)
|
||||
byte[] data = FileUtil.file2bytes(file, compressMetadata);
|
||||
InputStream is = null;
|
||||
|
||||
// debug transform back for object inspection
|
||||
try {
|
||||
|
||||
// created gzip'd stream
|
||||
is = (compressMetadata ? new GZIPInputStream(
|
||||
new FileInputStream(file), 8192)
|
||||
: new BufferedInputStream(
|
||||
new FileInputStream(file), 8192));
|
||||
|
||||
// transform back for list append
|
||||
@SuppressWarnings("unchecked")
|
||||
List<PersistableDataObject> prev = (List<PersistableDataObject>) SerializationUtil
|
||||
.transformFromThrift(data);
|
||||
List<PersistableDataObject<Object>> prev = SerializationUtil
|
||||
.transformFromThrift(List.class, is);
|
||||
|
||||
statusHandler.debug(pluginName + ": Read in " + prev.size()
|
||||
+ " records from disk");
|
||||
statusHandler.info(pluginName + ": Read in " + prev.size()
|
||||
+ " records from file " + file.getAbsolutePath());
|
||||
|
||||
// merge records by data URI
|
||||
int mapInitialSize = (int) (1.3f * (prev.size() + pdosToSerialize
|
||||
.size()));
|
||||
Map<Object, PersistableDataObject> dataMap = new LinkedHashMap<Object, PersistableDataObject>(
|
||||
mapInitialSize);
|
||||
for (PersistableDataObject pdo : prev) {
|
||||
dataMap.put(pdo.getIdentifier(), pdo);
|
||||
}
|
||||
List<PersistableDataObject> newList = new ArrayList<PersistableDataObject>(
|
||||
prev.size() + pdosToSerialize.size());
|
||||
|
||||
// get set of new identifiers
|
||||
Set<Object> identifierSet = new HashSet<Object>(
|
||||
pdosToSerialize.size(), 1);
|
||||
for (PersistableDataObject pdo : pdosToSerialize) {
|
||||
dataMap.put(pdo.getIdentifier(), pdo);
|
||||
identifierSet.add(pdo.getIdentifier());
|
||||
}
|
||||
|
||||
pdosToSerialize = new ArrayList<PersistableDataObject>(
|
||||
dataMap.values());
|
||||
// merge records by Identifier, to remove old duplicate
|
||||
for (PersistableDataObject pdo : prev) {
|
||||
if (!identifierSet.contains(pdo.getIdentifier())) {
|
||||
newList.add(pdo);
|
||||
}
|
||||
}
|
||||
|
||||
statusHandler.debug(pluginName + ": Serializing "
|
||||
+ pdosToSerialize.size() + " records");
|
||||
// release prev
|
||||
prev = null;
|
||||
|
||||
newList.addAll(pdosToSerialize);
|
||||
pdosToSerialize = newList;
|
||||
} finally {
|
||||
if (is != null) {
|
||||
try {
|
||||
is.close();
|
||||
} catch (IOException e) {
|
||||
statusHandler.error(pluginName
|
||||
+ ": Error occurred closing input stream",
|
||||
e);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
statusHandler.info(pluginName + ": Serializing "
|
||||
+ pdosToSerialize.size() + " records to file "
|
||||
+ file.getAbsolutePath());
|
||||
|
||||
OutputStream os = null;
|
||||
|
||||
|
|
|
@ -8,8 +8,8 @@
|
|||
|
||||
<bean id="ffmpThreadPool"
|
||||
class="org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor">
|
||||
<property name="corePoolSize" value="2" />
|
||||
<property name="maxPoolSize" value="4" />
|
||||
<property name="corePoolSize" value="1" />
|
||||
<property name="maxPoolSize" value="1" />
|
||||
<property name="keepAliveSeconds" value="60000" />
|
||||
</bean>
|
||||
|
||||
|
|
|
@ -734,11 +734,21 @@ public class FFMPGenerator extends CompositeProductGenerator implements
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (!source.getSourceType().equals(
|
||||
SOURCE_TYPE.GUIDANCE.getSourceType())) {
|
||||
String sourceSiteDataKey = getSourceSiteDataKey(source,
|
||||
dataKey, ffmpRec);
|
||||
ffmpData.remove(sourceSiteDataKey);
|
||||
statusHandler.info("Removing from memory: "+sourceSiteDataKey);
|
||||
|
||||
}
|
||||
} // ffmp.isFFTI
|
||||
} // record not null
|
||||
} // end sitekey for loop
|
||||
} // end datakey loop
|
||||
} // end process
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -1820,6 +1830,7 @@ public class FFMPGenerator extends CompositeProductGenerator implements
|
|||
}
|
||||
|
||||
ffmpData.remove(siteDataKey);
|
||||
statusHandler.info("Removing from memory: "+siteDataKey);
|
||||
accumulator.setReset(false);
|
||||
writeFFTIData(siteDataKey, accumulator);
|
||||
}
|
||||
|
|
|
@ -31,4 +31,5 @@ Require-Bundle: com.raytheon.uf.common.parameter;bundle-version="1.0.0",
|
|||
org.springframework;bundle-version="2.5.6",
|
||||
javax.measure;bundle-version="1.0.0",
|
||||
com.raytheon.uf.common.status;bundle-version="1.12.1174",
|
||||
org.apache.commons.logging;bundle-version="1.1.1"
|
||||
org.apache.commons.logging;bundle-version="1.1.1",
|
||||
com.raytheon.uf.common.comm;bundle-version="1.12.1174"
|
||||
|
|
|
@ -50,6 +50,7 @@ import com.raytheon.uf.common.gridcoverage.GridCoverage;
|
|||
import com.raytheon.uf.common.gridcoverage.lookup.GridCoverageLookup;
|
||||
import com.raytheon.uf.common.parameter.Parameter;
|
||||
import com.raytheon.uf.common.parameter.lookup.ParameterLookup;
|
||||
import com.raytheon.uf.common.status.UFStatus.Priority;
|
||||
import com.raytheon.uf.edex.core.EDEXUtil;
|
||||
import com.raytheon.uf.edex.core.EdexException;
|
||||
import com.raytheon.uf.edex.core.dataplugin.PluginRegistry;
|
||||
|
@ -66,6 +67,8 @@ import com.raytheon.uf.edex.database.plugin.PluginDao;
|
|||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* 4/7/09 1994 bphillip Initial Creation
|
||||
* Mar 14, 2013 1587 bsteffen Fix static data persisting to datastore.
|
||||
* Mar 27, 2013 1821 bsteffen Speed up GridInfoCache.
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -99,10 +102,13 @@ public class GridDao extends PluginDao {
|
|||
long[] sizes = new long[] { location.getNx(), location.getNy() };
|
||||
String abbrev = gridRec.getParameter().getAbbreviation();
|
||||
String group = gridRec.getDataURI();
|
||||
String datasetName = "Data";
|
||||
if (GridPathProvider.STATIC_PARAMETERS.contains(abbrev)) {
|
||||
group = "/" + location.getId();
|
||||
datasetName = abbrev;
|
||||
}
|
||||
AbstractStorageRecord storageRecord = new FloatDataRecord("Data",
|
||||
AbstractStorageRecord storageRecord = new FloatDataRecord(
|
||||
datasetName,
|
||||
group, (float[]) messageData, 2, sizes);
|
||||
|
||||
storageRecord.setCorrelationObject(gridRec);
|
||||
|
@ -231,8 +237,15 @@ public class GridDao extends PluginDao {
|
|||
if (!validateCoverage(record)) {
|
||||
return false;
|
||||
}
|
||||
record.setInfo(GridInfoCache.getInstance()
|
||||
.getGridInfo(record.getInfo()));
|
||||
try {
|
||||
record.setInfo(GridInfoCache.getInstance().getGridInfo(
|
||||
record.getInfo()));
|
||||
} catch (DataAccessLayerException e) {
|
||||
statusHandler.handle(Priority.PROBLEM,
|
||||
"Cannot load GridInfoRecord from DB for: "
|
||||
+ record.getDataURI(), e);
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
|
||||
}
|
||||
|
|
|
@ -19,21 +19,26 @@
|
|||
**/
|
||||
package com.raytheon.uf.edex.plugin.grid.dao;
|
||||
|
||||
import java.lang.ref.SoftReference;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.WeakHashMap;
|
||||
|
||||
import com.raytheon.uf.common.comm.CommunicationException;
|
||||
import com.raytheon.uf.common.dataplugin.grid.GridInfoConstants;
|
||||
import com.raytheon.uf.common.dataplugin.grid.GridInfoRecord;
|
||||
import com.raytheon.uf.common.dataplugin.persist.PersistableDataObject;
|
||||
import com.raytheon.uf.common.dataplugin.level.LevelFactory;
|
||||
import com.raytheon.uf.common.gridcoverage.lookup.GridCoverageLookup;
|
||||
import com.raytheon.uf.common.parameter.lookup.ParameterLookup;
|
||||
import com.raytheon.uf.edex.database.DataAccessLayerException;
|
||||
import com.raytheon.uf.edex.database.cluster.ClusterLockUtils;
|
||||
import com.raytheon.uf.edex.database.cluster.ClusterLockUtils.LockState;
|
||||
import com.raytheon.uf.edex.database.cluster.ClusterTask;
|
||||
import com.raytheon.uf.edex.database.dao.CoreDao;
|
||||
import com.raytheon.uf.edex.database.dao.DaoConfig;
|
||||
import com.raytheon.uf.edex.database.query.DatabaseQuery;
|
||||
|
||||
/**
|
||||
* Cache the gridInfo objects from the database to avoid repeated lookups.
|
||||
|
@ -45,6 +50,7 @@ import com.raytheon.uf.edex.database.dao.DaoConfig;
|
|||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* May 21, 2012 bsteffen Initial creation
|
||||
* Mar 27, 2013 1821 bsteffen Speed up GridInfoCache.
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -54,24 +60,111 @@ import com.raytheon.uf.edex.database.dao.DaoConfig;
|
|||
|
||||
public class GridInfoCache {
|
||||
|
||||
// 6 hours
|
||||
private static final int ROTATION_INTERVAL = 1 * 1 * 60 * 1000;
|
||||
|
||||
private static GridInfoCache instance = new GridInfoCache();
|
||||
|
||||
public static GridInfoCache getInstance() {
|
||||
return instance;
|
||||
}
|
||||
|
||||
private final CoreDao dao;
|
||||
private static final CoreDao dao = new CoreDao(
|
||||
DaoConfig.forClass(GridInfoRecord.class));
|
||||
|
||||
// A weak hashmap of soft references is used as a SoftSet.
|
||||
private Map<GridInfoRecord, SoftReference<GridInfoRecord>> cache = null;
|
||||
private Map<String, DatasetCache> cache = null;
|
||||
|
||||
private long lastRotationTime;
|
||||
|
||||
private GridInfoCache() {
|
||||
cache = Collections
|
||||
.synchronizedMap(new WeakHashMap<GridInfoRecord, SoftReference<GridInfoRecord>>());
|
||||
dao = new CoreDao(DaoConfig.forClass(GridInfoRecord.class));
|
||||
.synchronizedMap(new HashMap<String, DatasetCache>());
|
||||
lastRotationTime = System.currentTimeMillis();
|
||||
}
|
||||
|
||||
public GridInfoRecord getGridInfo(GridInfoRecord record) {
|
||||
public GridInfoRecord getGridInfo(GridInfoRecord record)
|
||||
throws DataAccessLayerException {
|
||||
DatasetCache dCache = cache.get(record.getDatasetId());
|
||||
if (dCache == null) {
|
||||
dCache = createDatasetCache(record.getDatasetId());
|
||||
}
|
||||
GridInfoRecord result = dCache.getGridInfo(record);
|
||||
if (System.currentTimeMillis() > lastRotationTime + ROTATION_INTERVAL) {
|
||||
rotateCache();
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove the info records with the specified ids from the cache.
|
||||
*
|
||||
* @param infoKeys
|
||||
*/
|
||||
public void purgeCache(Collection<Integer> infoKeys) {
|
||||
synchronized (cache) {
|
||||
Iterator<DatasetCache> it = cache.values().iterator();
|
||||
while (it.hasNext()) {
|
||||
DatasetCache next = it.next();
|
||||
next.purgeCache(infoKeys);
|
||||
if (next.isEmpty()) {
|
||||
it.remove();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private DatasetCache createDatasetCache(String datasetId)
|
||||
throws DataAccessLayerException {
|
||||
synchronized (cache) {
|
||||
DatasetCache dCache = cache.get(datasetId);
|
||||
if (dCache == null) {
|
||||
dCache = new DatasetCache(datasetId);
|
||||
cache.put(datasetId, dCache);
|
||||
}
|
||||
return dCache;
|
||||
}
|
||||
}
|
||||
|
||||
private void rotateCache() {
|
||||
synchronized (cache) {
|
||||
if (System.currentTimeMillis() > lastRotationTime
|
||||
+ ROTATION_INTERVAL) {
|
||||
Iterator<DatasetCache> it = cache.values().iterator();
|
||||
while (it.hasNext()) {
|
||||
DatasetCache next = it.next();
|
||||
next.rotateCache();
|
||||
if (next.isEmpty()) {
|
||||
it.remove();
|
||||
}
|
||||
}
|
||||
}
|
||||
lastRotationTime = System.currentTimeMillis();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* A second chance cache for all GridInfoRecords for a single datasetid.
|
||||
*
|
||||
*/
|
||||
private static class DatasetCache {
|
||||
|
||||
private Map<GridInfoRecord, GridInfoRecord> primaryCache;
|
||||
|
||||
private Map<GridInfoRecord, GridInfoRecord> secondChanceCache;
|
||||
|
||||
public DatasetCache(String datasetid) throws DataAccessLayerException {
|
||||
primaryCache = Collections
|
||||
.synchronizedMap(new HashMap<GridInfoRecord, GridInfoRecord>());
|
||||
secondChanceCache = Collections
|
||||
.synchronizedMap(new HashMap<GridInfoRecord, GridInfoRecord>());
|
||||
DatabaseQuery query = new DatabaseQuery(GridInfoRecord.class);
|
||||
query.addQueryParam(GridInfoConstants.DATASET_ID, datasetid);
|
||||
queryAndAdd(query);
|
||||
}
|
||||
|
||||
public GridInfoRecord getGridInfo(GridInfoRecord record)
|
||||
throws DataAccessLayerException {
|
||||
GridInfoRecord result = checkLocalCache(record);
|
||||
if (result == null) {
|
||||
result = query(record);
|
||||
|
@ -82,11 +175,28 @@ public class GridInfoCache {
|
|||
return result;
|
||||
}
|
||||
|
||||
public void purgeCache(Collection<Integer> infoKeys) {
|
||||
purgeCache(infoKeys, primaryCache);
|
||||
purgeCache(infoKeys, secondChanceCache);
|
||||
}
|
||||
|
||||
public void rotateCache() {
|
||||
secondChanceCache = primaryCache;
|
||||
primaryCache = Collections
|
||||
.synchronizedMap(new HashMap<GridInfoRecord, GridInfoRecord>());
|
||||
}
|
||||
|
||||
public boolean isEmpty() {
|
||||
return primaryCache.isEmpty() && secondChanceCache.isEmpty();
|
||||
}
|
||||
|
||||
private GridInfoRecord checkLocalCache(GridInfoRecord record) {
|
||||
GridInfoRecord result = null;
|
||||
SoftReference<GridInfoRecord> ref = cache.get(record);
|
||||
if (ref != null) {
|
||||
result = ref.get();
|
||||
GridInfoRecord result = primaryCache.get(record);
|
||||
if (result == null) {
|
||||
result = secondChanceCache.get(record);
|
||||
if (result != null) {
|
||||
addToCache(result);
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
@ -97,44 +207,71 @@ public class GridInfoCache {
|
|||
*
|
||||
* @param record
|
||||
* @return
|
||||
* @throws DataAccessLayerException
|
||||
*/
|
||||
private GridInfoRecord query(GridInfoRecord record) {
|
||||
// It is possible that this query will return multiple
|
||||
// results, for example if the record we are looking for has
|
||||
// a null secondaryId but some db entries have a secondaryId
|
||||
// set then this query will return all matching models
|
||||
// ignoring secondaryId. In general these cases should be
|
||||
// rare and small. So we handle it by caching everything
|
||||
// returned and then double checking the cache.
|
||||
List<PersistableDataObject<Integer>> dbList = dao
|
||||
.queryByExample(record);
|
||||
if (dbList != null && !dbList.isEmpty()) {
|
||||
for (PersistableDataObject<Integer> pdo : dbList) {
|
||||
GridInfoRecord gir = (GridInfoRecord) pdo;
|
||||
// if we don't remove then when an entry exists already the key
|
||||
// and value become references to different objects which is not
|
||||
// what we want.
|
||||
cache.remove(gir);
|
||||
cache.put(gir, new SoftReference<GridInfoRecord>(gir));
|
||||
}
|
||||
}
|
||||
private GridInfoRecord query(GridInfoRecord record)
|
||||
throws DataAccessLayerException {
|
||||
DatabaseQuery query = new DatabaseQuery(GridInfoRecord.class);
|
||||
query.addQueryParam(GridInfoConstants.DATASET_ID,
|
||||
record.getDatasetId());
|
||||
query.addQueryParam(GridInfoConstants.PARAMETER_ABBREVIATION,
|
||||
record.getParameter().getAbbreviation());
|
||||
query.addQueryParam(GridInfoConstants.LEVEL_ID, record.getLevel()
|
||||
.getId());
|
||||
query.addQueryParam(GridInfoConstants.LOCATION_ID, record
|
||||
.getLocation().getId());
|
||||
queryAndAdd(query);
|
||||
return checkLocalCache(record);
|
||||
}
|
||||
|
||||
private void queryAndAdd(DatabaseQuery query)
|
||||
throws DataAccessLayerException {
|
||||
List<?> dbList = dao.queryByCriteria(query);
|
||||
if (dbList != null && !dbList.isEmpty()) {
|
||||
for (Object pdo : dbList) {
|
||||
addToCache((GridInfoRecord) pdo);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Insert the record into the database if there is no current record that
|
||||
* equals this one. This method uses a fairly broad cluster lock so only one
|
||||
* thread at a time across all clustered edices can insert at a time. This
|
||||
* method should not be used much on running systems since gridded models
|
||||
* maintain fairly consistent info records over time.
|
||||
* Replace several fields with cached versions to save memory and then
|
||||
* add to the primaryCache.
|
||||
*
|
||||
* @param record
|
||||
*/
|
||||
private void addToCache(GridInfoRecord record) {
|
||||
record.setLocation(GridCoverageLookup.getInstance().getCoverage(
|
||||
record.getLocation().getId()));
|
||||
record.setParameter(ParameterLookup.getInstance().getParameter(
|
||||
record.getParameter().getAbbreviation()));
|
||||
try {
|
||||
record.setLevel(LevelFactory.getInstance().getLevel(
|
||||
record.getLevel().getId()));
|
||||
} catch (CommunicationException e) {
|
||||
// This should never hit and if it does ignore it, the only side
|
||||
// affect is thatthe level in the record will not be the same as
|
||||
// the other records on the same level.
|
||||
}
|
||||
primaryCache.put(record, record);
|
||||
}
|
||||
|
||||
/**
|
||||
* Insert the record into the database if there is no current record
|
||||
* that equals this one. This method uses a fairly broad cluster lock so
|
||||
* only one thread at a time across all clustered edices can insert at a
|
||||
* time. This method should not be used much on running systems since
|
||||
* gridded models maintain fairly consistent info records over time.
|
||||
*
|
||||
* @param record
|
||||
* @return
|
||||
*/
|
||||
private GridInfoRecord insert(GridInfoRecord record) {
|
||||
private GridInfoRecord insert(GridInfoRecord record)
|
||||
throws DataAccessLayerException {
|
||||
ClusterTask ct = null;
|
||||
do {
|
||||
ct = ClusterLockUtils.lock("grid_info", "newEntry", 30000, true);
|
||||
ct = ClusterLockUtils.lock("grid_info_create",
|
||||
record.getDatasetId(), 30000, true);
|
||||
} while (!LockState.SUCCESSFUL.equals(ct.getLockState()));
|
||||
try {
|
||||
GridInfoRecord existing = query(record);
|
||||
|
@ -145,16 +282,12 @@ public class GridInfoCache {
|
|||
} finally {
|
||||
ClusterLockUtils.unlock(ct, false);
|
||||
}
|
||||
cache.put(record, new SoftReference<GridInfoRecord>(record));
|
||||
addToCache(record);
|
||||
return record;
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove the info records with the specified ids from the cache.
|
||||
*
|
||||
* @param infoKeys
|
||||
*/
|
||||
public void purgeCache(Collection<Integer> infoKeys) {
|
||||
private void purgeCache(Collection<Integer> infoKeys,
|
||||
Map<GridInfoRecord, GridInfoRecord> cache) {
|
||||
synchronized (cache) {
|
||||
Iterator<GridInfoRecord> it = cache.keySet().iterator();
|
||||
while (it.hasNext()) {
|
||||
|
@ -167,3 +300,5 @@ public class GridInfoCache {
|
|||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -0,0 +1,17 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<projectDescription>
|
||||
<name>com.raytheon.uf.edex.registry.client.feature</name>
|
||||
<comment></comment>
|
||||
<projects>
|
||||
</projects>
|
||||
<buildSpec>
|
||||
<buildCommand>
|
||||
<name>org.eclipse.pde.FeatureBuilder</name>
|
||||
<arguments>
|
||||
</arguments>
|
||||
</buildCommand>
|
||||
</buildSpec>
|
||||
<natures>
|
||||
<nature>org.eclipse.pde.FeatureNature</nature>
|
||||
</natures>
|
||||
</projectDescription>
|
|
@ -0,0 +1 @@
|
|||
bin.includes = feature.xml
|
|
@ -0,0 +1,45 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<feature
|
||||
id="com.raytheon.uf.edex.registry.client.feature"
|
||||
label="EDEX Registry Client Feature"
|
||||
version="1.0.0.qualifier"
|
||||
provider-name="RAYTHEON">
|
||||
|
||||
<description url="http://www.example.com/description">
|
||||
[Enter Feature Description here.]
|
||||
</description>
|
||||
|
||||
<copyright url="http://www.example.com/copyright">
|
||||
[Enter Copyright Description here.]
|
||||
</copyright>
|
||||
|
||||
<license url="http://www.example.com/license">
|
||||
[Enter License Description here.]
|
||||
</license>
|
||||
|
||||
<requires>
|
||||
<import feature="com.raytheon.uf.edex.common.core.feature" version="1.0.0.qualifier"/>
|
||||
</requires>
|
||||
|
||||
<plugin
|
||||
id="com.raytheon.uf.common.registry.ebxml"
|
||||
download-size="0"
|
||||
install-size="0"
|
||||
version="0.0.0"
|
||||
unpack="false"/>
|
||||
|
||||
<plugin
|
||||
id="com.raytheon.uf.common.registry.schemas.ebxml"
|
||||
download-size="0"
|
||||
install-size="0"
|
||||
version="0.0.0"
|
||||
unpack="false"/>
|
||||
|
||||
<plugin
|
||||
id="com.raytheon.uf.common.registry.event"
|
||||
download-size="0"
|
||||
install-size="0"
|
||||
version="0.0.0"
|
||||
unpack="false"/>
|
||||
|
||||
</feature>
|
|
@ -17,26 +17,7 @@
|
|||
[Enter License Description here.]
|
||||
</license>
|
||||
|
||||
<plugin
|
||||
id="com.raytheon.uf.common.registry.ebxml"
|
||||
download-size="0"
|
||||
install-size="0"
|
||||
version="0.0.0"
|
||||
unpack="false"/>
|
||||
|
||||
<plugin
|
||||
id="com.raytheon.uf.common.registry.event"
|
||||
download-size="0"
|
||||
install-size="0"
|
||||
version="0.0.0"
|
||||
unpack="false"/>
|
||||
|
||||
<plugin
|
||||
id="com.raytheon.uf.common.registry.schemas.ebxml"
|
||||
download-size="0"
|
||||
install-size="0"
|
||||
version="0.0.0"
|
||||
unpack="false"/>
|
||||
<import feature="com.raytheon.uf.edex.registry.client.feature" version="1.0.0.qualifier"/>
|
||||
|
||||
<plugin
|
||||
id="com.raytheon.uf.common.registry.schemas.iso19115"
|
||||
|
|
|
@ -37,6 +37,7 @@ import com.raytheon.uf.common.localization.LocalizationContext;
|
|||
import com.raytheon.uf.common.localization.LocalizationContext.LocalizationType;
|
||||
import com.raytheon.uf.common.localization.LocalizationFile;
|
||||
import com.raytheon.uf.common.localization.PathManagerFactory;
|
||||
import com.raytheon.uf.common.localization.exception.LocalizationException;
|
||||
import com.raytheon.uf.common.serialization.JAXBManager;
|
||||
import com.raytheon.uf.common.stats.xml.StatisticsAggregate;
|
||||
import com.raytheon.uf.common.stats.xml.StatisticsConfig;
|
||||
|
@ -44,6 +45,7 @@ import com.raytheon.uf.common.stats.xml.StatisticsEvent;
|
|||
import com.raytheon.uf.common.stats.xml.StatisticsGroup;
|
||||
import com.raytheon.uf.common.status.IUFStatusHandler;
|
||||
import com.raytheon.uf.common.status.UFStatus;
|
||||
import com.raytheon.uf.common.status.UFStatus.Priority;
|
||||
import com.raytheon.uf.common.util.ReflectionException;
|
||||
import com.raytheon.uf.common.util.ReflectionUtil;
|
||||
|
||||
|
@ -59,6 +61,8 @@ import com.raytheon.uf.common.util.ReflectionUtil;
|
|||
* Nov 07, 2012 1317 mpduff Update config files.
|
||||
* Nov 29, 2012 1350 rjpeter Updated to static, fixed localization, increased validation.
|
||||
* Jan 15, 2013 1487 djohnson Make validate() static and public, so it can be run independently.
|
||||
* Mar 27, 2013 1834 mpduff Filter for xml files on localization file read, wrap unmarshall and
|
||||
* log error if one occurs
|
||||
* </pre>
|
||||
*
|
||||
* @author jsanchez
|
||||
|
@ -123,11 +127,12 @@ public class ConfigLoader {
|
|||
LocalizationContext[] searchContext = pm
|
||||
.getLocalSearchHierarchy(LocalizationType.COMMON_STATIC);
|
||||
Map<String, LocalizationFile> statConfs = new HashMap<String, LocalizationFile>();
|
||||
String[] extensions = new String[] { ".xml" };
|
||||
|
||||
// grab all stats from contexts, allowing overwrite by name
|
||||
for (LocalizationContext ctx : searchContext) {
|
||||
LocalizationFile[] localizationFiles = pm.listFiles(ctx, STATS_DIR,
|
||||
null, false, true);
|
||||
extensions, false, true);
|
||||
for (LocalizationFile lf : localizationFiles) {
|
||||
String name = lf.getName();
|
||||
if (!statConfs.containsKey(name)) {
|
||||
|
@ -142,6 +147,7 @@ public class ConfigLoader {
|
|||
Map<String, StatisticsEvent> myEvents = new HashMap<String, StatisticsEvent>();
|
||||
|
||||
for (LocalizationFile lf : statConfs.values()) {
|
||||
try {
|
||||
StatisticsConfig config = lf.jaxbUnmarshal(
|
||||
StatisticsConfig.class, jaxbManager);
|
||||
if (config != null) {
|
||||
|
@ -150,6 +156,10 @@ public class ConfigLoader {
|
|||
myConfigurations.add(config);
|
||||
}
|
||||
}
|
||||
} catch (LocalizationException e) {
|
||||
statusHandler.handle(Priority.PROBLEM,
|
||||
"Unable to open file [" + lf.getName() + "]", e);
|
||||
}
|
||||
}
|
||||
|
||||
configurations = myConfigurations;
|
||||
|
|
|
@ -768,7 +768,7 @@ static PyObject* pyjobject_numpy(PyJobject_Object *obj) {
|
|||
|
||||
getMethod = (*env)->GetMethodID(env, numpyable, "getNumPy", "()[Ljava/lang/Object;");
|
||||
objarray = (jobjectArray) (*env)->CallObjectMethod(env, obj->object, getMethod);
|
||||
if(objarray == NULL)
|
||||
if(process_java_exception(env) || !objarray)
|
||||
{
|
||||
Py_INCREF(Py_None);
|
||||
return Py_None;
|
||||
|
|
|
@ -31,6 +31,7 @@
|
|||
# combining similar patterns.
|
||||
#20121009 1252 jkorman Corrections and additions from Dale Morris.
|
||||
#20121211 1411 rferrel More corrections and additions from Dale Morris.
|
||||
#20130326 1828 rferrel Fix patterns for intlsigmets and text pattern not placing files in proper directory.
|
||||
#***************************************************************
|
||||
# AWIPS 1 PATTERN GRAPHIC ^[PQ].* /redbook/Raw
|
||||
# PGNA00 KWNS 010001 !redbook 1_1/NMCGPHMCD/MCDSUM/PXSF001CN/20110201 0001
|
||||
|
@ -376,7 +377,7 @@ IDS|DDPLUS ^(M[A-Z]{3}[0-9]{2}) ([KPTMC].{3}) (..)(..)(..)
|
|||
FILE -overwrite -log -close -edex /data_store/text/(\3:yyyy)(\3:mm)\3/\4/\1_\2_\3\4\5_(seq).%Y%m%d%H
|
||||
|
||||
IDS|DDPLUS ^(T[BCHPRTWXY][A-Z]{2}[0-9]{2}) ([A-Z]{4}) (..)(..)(..)
|
||||
FILE -overwrite -log -close -edex /data_store/text/\3/\4/\1_\2_\3\4\5_(seq).%Y%m%d%H
|
||||
FILE -overwrite -log -close -edex /data_store/text/(\3:yyyy)(\3:mm)\3/\4/\1_\2_\3\4\5_(seq).%Y%m%d%H
|
||||
|
||||
# summaries
|
||||
#IDS|DDPLUS ^(A.{5}) (.{4}) (..)(..)(..)
|
||||
|
@ -564,9 +565,9 @@ IDS|DDPLUS ^(WAUS4[1-6]) (.{4}) (..)(..)(..)
|
|||
FILE -overwrite -log -close -edex /data_store/airmet/(\3:yyyy)(\3:mm)\3/\4/\1_\2_\3\4\5_(seq).%Y%m%d%H
|
||||
# intlsigmets
|
||||
IDS|DDPLUS ^(W[CSV]PA((0[1-9])|(1[1-3]))) (PHFO) (..)(..)(..)
|
||||
FILE -overwrite -log -close -edex /data_store/intlsigmet/(\6:yyyy)(\6:mm)\6/\7/\4_\1_\5_\6\7\8_(seq).%Y%m%d%H
|
||||
FILE -overwrite -log -close -edex /data_store/intlsigmet/(\6:yyyy)(\6:mm)\6/\7/\1_\5_\6\7\8_(seq).%Y%m%d%H
|
||||
IDS|DDPLUS ^(W[CSV]NT((0[1-9])|(1[1-3]))) (KKCI) (..)(..)(..)
|
||||
FILE -overwrite -log -close -edex /data_store/intlsigmet/(\6:yyyy)(\6:mm)\6/\7/\4_\1_\5_\6\7\8_(seq).%Y%m%d%H
|
||||
FILE -overwrite -log -close -edex /data_store/intlsigmet/(\6:yyyy)(\6:mm)\6/\7/\1_\5_\6\7\8_(seq).%Y%m%d%H
|
||||
IDS|DDPLUS ^(WAAK4[789]) (PAWU) (..)(..)(..)
|
||||
FILE -overwrite -log -close -edex /data_store/intlsigmet/(\3:yyyy)(\3:mm)\3/\4/\1_\2_\3\4\5_(seq).%Y%m%d%H
|
||||
IDS|DDPLUS ^(W[CSV]PN0[1-6]) (KKCI) (..)(..)(..)
|
||||
|
|
|
@ -344,6 +344,7 @@ if [ "${1}" = "-viz" ]; then
|
|||
buildRPM "awips2"
|
||||
buildRPM "awips2-common-base"
|
||||
buildCAVE
|
||||
buildRPM "awips2-rcm"
|
||||
if [ $? -ne 0 ]; then
|
||||
exit 1
|
||||
fi
|
||||
|
|
418
rpms/build/i386/build.sh_preRCM
Normal file
418
rpms/build/i386/build.sh_preRCM
Normal file
|
@ -0,0 +1,418 @@
|
|||
#!/bin/bash
|
||||
|
||||
function buildRPM()
|
||||
{
|
||||
# Arguments:
|
||||
# ${1} == the name of the rpm.
|
||||
lookupRPM "${1}"
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "ERROR: '${1}' is not a recognized AWIPS II RPM."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
/usr/bin/rpmbuild -ba \
|
||||
--define '_topdir %(echo ${AWIPSII_TOP_DIR})' \
|
||||
--define '_baseline_workspace %(echo ${WORKSPACE})' \
|
||||
--define '_uframe_eclipse %(echo ${UFRAME_ECLIPSE})' \
|
||||
--define '_awipscm_share %(echo ${AWIPSCM_SHARE})' \
|
||||
--define '_build_root %(echo ${AWIPSII_BUILD_ROOT})' \
|
||||
--define '_component_version %(echo ${AWIPSII_VERSION})' \
|
||||
--define '_component_release %(echo ${AWIPSII_RELEASE})' \
|
||||
--define '_component_build_date %(echo ${COMPONENT_BUILD_DATE})' \
|
||||
--define '_component_build_time %(echo ${COMPONENT_BUILD_TIME})' \
|
||||
--define '_component_build_system %(echo ${COMPONENT_BUILD_SYSTEM})' \
|
||||
--buildroot ${AWIPSII_BUILD_ROOT} \
|
||||
${RPM_SPECIFICATION}/component.spec
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "ERROR: Failed to build RPM ${1}."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
return 0
|
||||
}
|
||||
|
||||
# This script will build all of the 32-bit rpms.
|
||||
# Ensure that we are on a machine with the correct architecture.
|
||||
|
||||
architecture=`uname -i`
|
||||
if [ ! "${architecture}" = "i386" ]; then
|
||||
echo "ERROR: This build can only be performed on a 32-bit Operating System."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Determine which directory we are running from.
|
||||
path_to_script=`readlink -f $0`
|
||||
dir=$(dirname $path_to_script)
|
||||
|
||||
common_dir=`cd ${dir}/../common; pwd;`
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "ERROR: Unable to find the common functions directory."
|
||||
exit 1
|
||||
fi
|
||||
# source the common functions.
|
||||
source ${common_dir}/lookupRPM.sh
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "ERROR: Unable to source the common functions."
|
||||
exit 1
|
||||
fi
|
||||
source ${common_dir}/usage.sh
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "ERROR: Unable to source the common functions."
|
||||
exit 1
|
||||
fi
|
||||
source ${common_dir}/rpms.sh
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "ERROR: Unable to source the common functions."
|
||||
exit 1
|
||||
fi
|
||||
source ${common_dir}/systemInfo.sh
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "ERROR: Unable to retrieve the system information."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# prepare the build environment.
|
||||
source ${dir}/buildEnvironment.sh
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "ERROR: Unable to prepare the build environment."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
export LIGHTNING=true
|
||||
# Determine if the optional '-nobinlightning' argument has been specified.
|
||||
if [ "${2}" = "-nobinlightning" ]; then
|
||||
LIGHTNING=false
|
||||
fi
|
||||
|
||||
if [ "${1}" = "-python-qpid" ]; then
|
||||
buildRPM "awips2"
|
||||
buildRPM "awips2-python-qpid"
|
||||
buildRPM "awips2-python"
|
||||
buildRPM "awips2-python-cherrypy"
|
||||
buildRPM "awips2-python-dynamicserialize"
|
||||
buildRPM "awips2-python-nose"
|
||||
buildRPM "awips2-python-numpy"
|
||||
buildRPM "awips2-python-h5py"
|
||||
buildRPM "awips2-python-jimporter"
|
||||
buildRPM "awips2-python-matplotlib"
|
||||
buildRPM "awips2-python-pil"
|
||||
buildRPM "awips2-python-pmw"
|
||||
buildRPM "awips2-python-pupynere"
|
||||
buildRPM "awips2-python-scientific"
|
||||
buildRPM "awips2-python-scipy"
|
||||
buildRPM "awips2-python-tables"
|
||||
buildRPM "awips2-python-thrift"
|
||||
buildRPM "awips2-python-tpg"
|
||||
buildRPM "awips2-python-ufpy"
|
||||
buildRPM "awips2-python-werkzeug"
|
||||
buildRPM "awips2-python-pygtk"
|
||||
buildRPM "awips2-python-pycairo"
|
||||
if [ $? -ne 0 ]; then
|
||||
exit 1
|
||||
fi
|
||||
|
||||
buildQPID
|
||||
if [ $? -ne 0 ]; then
|
||||
exit 1
|
||||
fi
|
||||
|
||||
#buildRPM "awips2-ant"
|
||||
#unpackHttpdPypies
|
||||
if [ $? -ne 0 ]; then
|
||||
exit 1
|
||||
fi
|
||||
#buildRPM "awips2-httpd-pypies"
|
||||
#buildRPM "awips2-java"
|
||||
#buildRPM "awips2-ldm"
|
||||
#buildRPM "awips2-postgresql"
|
||||
#buildRPM "awips2-psql"
|
||||
#buildRPM "awips2-tools"
|
||||
buildRPM "awips2-python-shapely"
|
||||
|
||||
exit 0
|
||||
fi
|
||||
|
||||
|
||||
if [ "${1}" = "-delta" ]; then
|
||||
buildCAVE
|
||||
if [ $? -ne 0 ]; then
|
||||
exit 1
|
||||
fi
|
||||
buildRPM "awips2-alertviz"
|
||||
buildEDEX
|
||||
if [ $? -ne 0 ]; then
|
||||
exit 1
|
||||
fi
|
||||
|
||||
buildRPM "awips2"
|
||||
buildRPM "Installer.ncep-database"
|
||||
buildRPM "awips2-gfesuite-client"
|
||||
buildRPM "awips2-gfesuite-server"
|
||||
buildRPM "awips2-python-dynamicserialize"
|
||||
buildRPM "awips2-python-ufpy"
|
||||
buildRPM "awips2-python-qpid"
|
||||
|
||||
buildRPM "awips2-adapt-native"
|
||||
buildRPM "awips2-aviation-shared"
|
||||
buildRPM "awips2-cli"
|
||||
buildRPM "awips2-database"
|
||||
buildRPM "awips2-database-server-configuration"
|
||||
buildRPM "awips2-database-standalone-configuration"
|
||||
buildRPM "awips2-data.hdf5-gfe.climo"
|
||||
buildRPM "awips2-hydroapps-shared"
|
||||
buildRPM "awips2-localapps-environment"
|
||||
buildRPM "awips2-maps-database"
|
||||
buildRPM "awips2-notification"
|
||||
buildRPM "awips2-pypies"
|
||||
buildRPM "awips2-data.hdf5-topo"
|
||||
buildRPM "awips2-data.gfe"
|
||||
buildRPM "awips2-rcm"
|
||||
buildRPM "awips2-edex-environment"
|
||||
buildLocalizationRPMs
|
||||
if [ $? -ne 0 ]; then
|
||||
exit 1
|
||||
fi
|
||||
|
||||
exit 0
|
||||
fi
|
||||
|
||||
if [ "${1}" = "-full" ]; then
|
||||
buildCAVE
|
||||
if [ $? -ne 0 ]; then
|
||||
exit 1
|
||||
fi
|
||||
buildRPM "Installer.ncep-database"
|
||||
buildRPM "awips2-alertviz"
|
||||
buildEDEX
|
||||
if [ $? -ne 0 ]; then
|
||||
exit 1
|
||||
fi
|
||||
buildRPM "awips2-python"
|
||||
buildRPM "awips2-python-cherrypy"
|
||||
buildRPM "awips2-python-dynamicserialize"
|
||||
buildRPM "awips2-python-h5py"
|
||||
buildRPM "awips2-python-jimporter"
|
||||
buildRPM "awips2-python-matplotlib"
|
||||
buildRPM "awips2-python-nose"
|
||||
buildRPM "awips2-python-numpy"
|
||||
buildRPM "awips2-python-pil"
|
||||
buildRPM "awips2-python-pmw"
|
||||
buildRPM "awips2-python-pupynere"
|
||||
buildRPM "awips2-python-qpid"
|
||||
buildRPM "awips2-python-scientific"
|
||||
buildRPM "awips2-python-scipy"
|
||||
buildRPM "awips2-python-tables"
|
||||
buildRPM "awips2-python-thrift"
|
||||
buildRPM "awips2-python-tpg"
|
||||
buildRPM "awips2-python-ufpy"
|
||||
buildRPM "awips2-python-werkzeug"
|
||||
buildRPM "awips2-python-pygtk"
|
||||
buildRPM "awips2-python-pycairo"
|
||||
|
||||
buildRPM "awips2"
|
||||
buildRPM "awips2-adapt-native"
|
||||
buildRPM "awips2-aviation-shared"
|
||||
buildRPM "awips2-cli"
|
||||
buildRPM "awips2-database"
|
||||
buildRPM "awips2-database-server-configuration"
|
||||
buildRPM "awips2-database-standalone-configuration"
|
||||
buildRPM "awips2-data.hdf5-gfe.climo"
|
||||
buildRPM "awips2-data.gfe"
|
||||
buildRPM "awips2-gfesuite-client"
|
||||
buildRPM "awips2-gfesuite-server"
|
||||
buildRPM "awips2-hydroapps-shared"
|
||||
buildRPM "awips2-localapps-environment"
|
||||
buildRPM "awips2-maps-database"
|
||||
buildRPM "awips2-notification"
|
||||
buildRPM "awips2-pypies"
|
||||
buildRPM "awips2-data.hdf5-topo"
|
||||
buildRPM "awips2-rcm"
|
||||
buildLocalizationRPMs
|
||||
if [ $? -ne 0 ]; then
|
||||
exit 1
|
||||
fi
|
||||
|
||||
buildQPID
|
||||
if [ $? -ne 0 ]; then
|
||||
exit 1
|
||||
fi
|
||||
|
||||
buildRPM "awips2-ant"
|
||||
unpackHttpdPypies
|
||||
if [ $? -ne 0 ]; then
|
||||
exit 1
|
||||
fi
|
||||
buildRPM "awips2-httpd-pypies"
|
||||
buildRPM "awips2-java"
|
||||
#buildRPM "awips2-ldm"
|
||||
buildRPM "awips2-postgresql"
|
||||
buildRPM "awips2-psql"
|
||||
buildRPM "awips2-tools"
|
||||
buildRPM "awips2-edex-environment"
|
||||
buildRPM "awips2-openfire"
|
||||
buildRPM "awips2-httpd-collaboration"
|
||||
buildRPM "awips2-python-shapely"
|
||||
|
||||
exit 0
|
||||
fi
|
||||
|
||||
if [ "${1}" = "-ade" ]; then
|
||||
buildRPM "awips2-eclipse"
|
||||
buildRPM "awips2-java"
|
||||
buildRPM "awips2-ant"
|
||||
buildRPM "awips2-python"
|
||||
buildRPM "awips2-python-cherrypy"
|
||||
buildRPM "awips2-python-dynamicserialize"
|
||||
buildRPM "awips2-python-h5py"
|
||||
buildRPM "awips2-python-jimporter"
|
||||
buildRPM "awips2-python-matplotlib"
|
||||
buildRPM "awips2-python-nose"
|
||||
buildRPM "awips2-python-numpy"
|
||||
buildRPM "awips2-python-pil"
|
||||
buildRPM "awips2-python-pmw"
|
||||
buildRPM "awips2-python-pupynere"
|
||||
buildRPM "awips2-python-qpid"
|
||||
buildRPM "awips2-python-scientific"
|
||||
buildRPM "awips2-python-scipy"
|
||||
buildRPM "awips2-python-tables"
|
||||
buildRPM "awips2-python-thrift"
|
||||
buildRPM "awips2-python-tpg"
|
||||
buildRPM "awips2-python-ufpy"
|
||||
buildRPM "awips2-python-werkzeug"
|
||||
buildRPM "awips2-python-pygtk"
|
||||
buildRPM "awips2-python-pycairo"
|
||||
buildRPM "awips2-python-shapely"
|
||||
buildQPID -ade
|
||||
if [ $? -ne 0 ]; then
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Package the ade.
|
||||
# Create the containing directory.
|
||||
ade_directory="awips2-ade-${AWIPSII_VERSION}-${AWIPSII_RELEASE}"
|
||||
if [ -d ${WORKSPACE}/${ade_directory} ]; then
|
||||
rm -rf ${WORKSPACE}/${ade_directory}
|
||||
if [ $? -ne 0 ]; then
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
mkdir -p ${WORKSPACE}/${ade_directory}
|
||||
if [ $? -ne 0 ]; then
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Copy the rpms to the directory.
|
||||
cp -v ${AWIPSII_TOP_DIR}/RPMS/i386/* \
|
||||
${AWIPSII_TOP_DIR}/RPMS/noarch/* \
|
||||
${WORKSPACE}/${ade_directory}
|
||||
if [ $? -ne 0 ]; then
|
||||
exit 1
|
||||
fi
|
||||
|
||||
awips2_ade_directory="${WORKSPACE}/rpms/awips2.ade"
|
||||
# Copy the install and uninstall script to the directory.
|
||||
cp -v ${awips2_ade_directory}/tar.ade/scripts/*.sh \
|
||||
${WORKSPACE}/${ade_directory}
|
||||
if [ $? -ne 0 ]; then
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Tar the directory.
|
||||
pushd . > /dev/null 2>&1
|
||||
cd ${WORKSPACE}
|
||||
tar -cvf ${ade_directory}.tar ${ade_directory}
|
||||
popd > /dev/null 2>&1
|
||||
RC=$?
|
||||
if [ ${RC} -ne 0 ]; then
|
||||
exit 1
|
||||
fi
|
||||
|
||||
exit 0
|
||||
fi
|
||||
|
||||
if [ "${1}" = "-viz" ]; then
|
||||
buildRPM "awips2"
|
||||
buildCAVE
|
||||
if [ $? -ne 0 ]; then
|
||||
exit 1
|
||||
fi
|
||||
buildRPM "awips2-alertviz"
|
||||
|
||||
exit 0
|
||||
fi
|
||||
|
||||
if [ "${1}" = "-edex" ]; then
|
||||
buildRPM "awips2"
|
||||
buildRPM "awips2-cli"
|
||||
buildRPM "awips2-gfesuite-client"
|
||||
buildRPM "awips2-gfesuite-server"
|
||||
buildRPM "Installer.ncep-database"
|
||||
buildEDEX
|
||||
if [ $? -ne 0 ]; then
|
||||
exit 1
|
||||
fi
|
||||
|
||||
exit 0
|
||||
fi
|
||||
|
||||
if [ "${1}" = "-qpid" ]; then
|
||||
buildQPID
|
||||
if [ $? -ne 0 ]; then
|
||||
exit 1
|
||||
fi
|
||||
|
||||
exit 0
|
||||
fi
|
||||
|
||||
if [ "${1}" = "-ldm" ]; then
|
||||
# Ensure that the user has root privileges.
|
||||
if [ ! ${UID} = 0 ]; then
|
||||
echo "ERROR: You must have root privileges to build ldm."
|
||||
exit 1
|
||||
fi
|
||||
buildRPM "awips2-ldm"
|
||||
|
||||
exit 0
|
||||
fi
|
||||
|
||||
if [ "${1}" = "-package" ]; then
|
||||
repository_directory="awips2-repository-${AWIPSII_VERSION}-${AWIPSII_RELEASE}"
|
||||
if [ -d ${WORKSPACE}/${repository_directory} ]; then
|
||||
rm -rf ${WORKSPACE}/${repository_directory}
|
||||
if [ $? -ne 0 ]; then
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
mkdir -p ${WORKSPACE}/${repository_directory}/${AWIPSII_VERSION}-${AWIPSII_RELEASE}
|
||||
if [ $? -ne 0 ]; then
|
||||
exit 1
|
||||
fi
|
||||
|
||||
cp -r ${AWIPSII_TOP_DIR}/RPMS/* \
|
||||
${WORKSPACE}/${repository_directory}/${AWIPSII_VERSION}-${AWIPSII_RELEASE}
|
||||
if [ $? -ne 0 ]; then
|
||||
exit 1
|
||||
fi
|
||||
|
||||
rpms_directory="${WORKSPACE}/rpms"
|
||||
comps_xml="${rpms_directory}/common/yum/arch.x86/comps.xml"
|
||||
cp -v ${comps_xml} ${WORKSPACE}/${repository_directory}
|
||||
if [ $? -ne 0 ]; then
|
||||
exit 1
|
||||
fi
|
||||
|
||||
pushd . > /dev/null
|
||||
cd ${WORKSPACE}
|
||||
tar -cvf ${repository_directory}.tar ${repository_directory}
|
||||
RC=$?
|
||||
popd > /dev/null
|
||||
if [ ${RC} -ne 0 ]; then
|
||||
exit 1
|
||||
fi
|
||||
|
||||
exit 0
|
||||
fi
|
||||
|
||||
|
||||
usage
|
||||
exit 0
|
Loading…
Add table
Reference in a new issue