Merge branch 'master_14.1.1' into omaha_14.1.1
Former-commit-id:70c79bb7e6
[formerlyd1b2cdd566
] [formerly65ebfc1a18
] [formerlyc0bb3a1102
[formerly65ebfc1a18
[formerly c333fe23736110bcb273aafd90ee6551ff7e921d]]] Former-commit-id:c0bb3a1102
Former-commit-id: 44a31523c0eab129c3c048ad551394d20361765b [formerlye8c6af3060
] Former-commit-id:eff32cfbf3
This commit is contained in:
commit
2ed2e93b03
24 changed files with 1418 additions and 1279 deletions
|
@ -24,6 +24,10 @@ import java.util.Calendar;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.concurrent.atomic.AtomicInteger;
|
import java.util.concurrent.atomic.AtomicInteger;
|
||||||
|
|
||||||
|
import org.eclipse.core.runtime.IProgressMonitor;
|
||||||
|
import org.eclipse.core.runtime.IStatus;
|
||||||
|
import org.eclipse.core.runtime.Status;
|
||||||
|
import org.eclipse.core.runtime.jobs.Job;
|
||||||
import org.eclipse.jface.dialogs.MessageDialog;
|
import org.eclipse.jface.dialogs.MessageDialog;
|
||||||
import org.eclipse.swt.SWT;
|
import org.eclipse.swt.SWT;
|
||||||
import org.eclipse.swt.events.SelectionAdapter;
|
import org.eclipse.swt.events.SelectionAdapter;
|
||||||
|
@ -71,6 +75,7 @@ import com.raytheon.viz.ui.dialogs.CaveSWTDialog;
|
||||||
* Jul 24, 2013 2220 rferrel Changes to queue size request for all data.
|
* Jul 24, 2013 2220 rferrel Changes to queue size request for all data.
|
||||||
* Aug 01, 2013 2221 rferrel Changes for select configuration.
|
* Aug 01, 2013 2221 rferrel Changes for select configuration.
|
||||||
* Aug 06, 2013 2222 rferrel Changes to display all selected data.
|
* Aug 06, 2013 2222 rferrel Changes to display all selected data.
|
||||||
|
* Nov 14, 2013 2549 rferrel Get category data moved off the UI thread.
|
||||||
* </pre>
|
* </pre>
|
||||||
*
|
*
|
||||||
* @author bgonzale
|
* @author bgonzale
|
||||||
|
@ -452,25 +457,57 @@ public abstract class AbstractArchiveDlg extends CaveSWTDialog implements
|
||||||
* adjust sizes on the display table.
|
* adjust sizes on the display table.
|
||||||
*/
|
*/
|
||||||
protected void populateTableComp() {
|
protected void populateTableComp() {
|
||||||
String archiveName = getSelectedArchiveName();
|
final String archiveName = getSelectedArchiveName();
|
||||||
String categoryName = getSelectedCategoryName();
|
final String categoryName = getSelectedCategoryName();
|
||||||
|
|
||||||
setCursorBusy(true);
|
setCursorBusy(true);
|
||||||
|
|
||||||
try {
|
setShowingSelected(false);
|
||||||
setShowingSelected(false);
|
tableComp.populateTable(archiveName, categoryName,
|
||||||
|
new ArrayList<DisplayData>(0));
|
||||||
|
tableComp.refresh();
|
||||||
|
|
||||||
List<DisplayData> displayDatas = sizeJob.changeDisplay(archiveName,
|
Job job = new Job("populate category table") {
|
||||||
categoryName);
|
|
||||||
if (displayDatas != null) {
|
@Override
|
||||||
tableComp
|
protected IStatus run(IProgressMonitor monitor) {
|
||||||
.populateTable(archiveName, categoryName, displayDatas);
|
getCategoryTableData(archiveName, categoryName);
|
||||||
} else {
|
return Status.OK_STATUS;
|
||||||
tableComp.refresh();
|
|
||||||
}
|
}
|
||||||
} finally {
|
};
|
||||||
setCursorBusy(false);
|
job.schedule();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This gets the desired categories data. Assumed called from non-UI thread
|
||||||
|
* since it is possible getting the data may take time which would hang up
|
||||||
|
* the UI thread.
|
||||||
|
*
|
||||||
|
* @param archiveName
|
||||||
|
* @param categoryName
|
||||||
|
*/
|
||||||
|
private void getCategoryTableData(final String archiveName,
|
||||||
|
final String categoryName) {
|
||||||
|
|
||||||
|
final List<DisplayData> displayDatas = sizeJob.changeDisplay(
|
||||||
|
archiveName, categoryName);
|
||||||
|
|
||||||
|
VizApp.runAsync(new Runnable() {
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void run() {
|
||||||
|
try {
|
||||||
|
if (displayDatas != null) {
|
||||||
|
tableComp.populateTable(archiveName, categoryName,
|
||||||
|
displayDatas);
|
||||||
|
} else {
|
||||||
|
tableComp.refresh();
|
||||||
|
}
|
||||||
|
} finally {
|
||||||
|
setCursorBusy(false);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
|
@ -312,9 +312,24 @@ public class CurrentWarnings {
|
||||||
public AbstractWarningRecord getNewestByTracking(String etn, String phensig) {
|
public AbstractWarningRecord getNewestByTracking(String etn, String phensig) {
|
||||||
AbstractWarningRecord rval = null;
|
AbstractWarningRecord rval = null;
|
||||||
synchronized (officeId) {
|
synchronized (officeId) {
|
||||||
List<AbstractWarningRecord> warnings = warningMap.get(toKey(
|
List<AbstractWarningRecord> keyWarnings = warningMap.get(toKey(
|
||||||
phensig, etn));
|
phensig, etn));
|
||||||
if (warnings != null) {
|
if (keyWarnings != null) {
|
||||||
|
// filter out "future" warnings.
|
||||||
|
List<AbstractWarningRecord> warnings = null;
|
||||||
|
if (SimulatedTime.getSystemTime().isRealTime()) {
|
||||||
|
warnings = keyWarnings;
|
||||||
|
} else {
|
||||||
|
warnings = new ArrayList<AbstractWarningRecord>(
|
||||||
|
keyWarnings.size());
|
||||||
|
long currentTime = TimeUtil.newCalendar().getTimeInMillis();
|
||||||
|
for (AbstractWarningRecord warning : keyWarnings) {
|
||||||
|
if (warning.getIssueTime().getTimeInMillis() <= currentTime) {
|
||||||
|
warnings.add(warning);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// See if we have a NEW warning
|
// See if we have a NEW warning
|
||||||
for (AbstractWarningRecord warning : warnings) {
|
for (AbstractWarningRecord warning : warnings) {
|
||||||
if (getAction(warning.getAct()) == WarningAction.NEW) {
|
if (getAction(warning.getAct()) == WarningAction.NEW) {
|
||||||
|
@ -399,8 +414,7 @@ public class CurrentWarnings {
|
||||||
if (warnings != null) {
|
if (warnings != null) {
|
||||||
Calendar c = TimeUtil.newCalendar();
|
Calendar c = TimeUtil.newCalendar();
|
||||||
c.add(Calendar.MINUTE, -10);
|
c.add(Calendar.MINUTE, -10);
|
||||||
TimeRange t = new TimeRange(c.getTime(), SimulatedTime
|
TimeRange t = new TimeRange(c.getTime(), TimeUtil.newDate());
|
||||||
.getSystemTime().getTime());
|
|
||||||
|
|
||||||
for (AbstractWarningRecord warning : warnings) {
|
for (AbstractWarningRecord warning : warnings) {
|
||||||
if (t.contains(warning.getIssueTime().getTime())) {
|
if (t.contains(warning.getIssueTime().getTime())) {
|
||||||
|
@ -438,8 +452,7 @@ public class CurrentWarnings {
|
||||||
ArrayList<AbstractWarningRecord> conProds = new ArrayList<AbstractWarningRecord>();
|
ArrayList<AbstractWarningRecord> conProds = new ArrayList<AbstractWarningRecord>();
|
||||||
Calendar c = TimeUtil.newCalendar();
|
Calendar c = TimeUtil.newCalendar();
|
||||||
c.add(Calendar.MINUTE, -10);
|
c.add(Calendar.MINUTE, -10);
|
||||||
TimeRange t = new TimeRange(c.getTime(), SimulatedTime
|
TimeRange t = new TimeRange(c.getTime(), TimeUtil.newDate());
|
||||||
.getSystemTime().getTime());
|
|
||||||
for (AbstractWarningRecord warning : warnings) {
|
for (AbstractWarningRecord warning : warnings) {
|
||||||
WarningAction action = getAction(warning.getAct());
|
WarningAction action = getAction(warning.getAct());
|
||||||
if (t.contains(warning.getIssueTime().getTime())
|
if (t.contains(warning.getIssueTime().getTime())
|
||||||
|
@ -545,12 +558,20 @@ public class CurrentWarnings {
|
||||||
List<AbstractWarningRecord> records = new ArrayList<AbstractWarningRecord>(
|
List<AbstractWarningRecord> records = new ArrayList<AbstractWarningRecord>(
|
||||||
recordsMap.values());
|
recordsMap.values());
|
||||||
|
|
||||||
// Sort by insert time
|
// Sort by issue time when null fall back to insert time.
|
||||||
Collections.sort(records, new Comparator<AbstractWarningRecord>() {
|
Collections.sort(records, new Comparator<AbstractWarningRecord>() {
|
||||||
@Override
|
@Override
|
||||||
public int compare(AbstractWarningRecord o1,
|
public int compare(AbstractWarningRecord o1,
|
||||||
AbstractWarningRecord o2) {
|
AbstractWarningRecord o2) {
|
||||||
return o1.getInsertTime().compareTo(o2.getInsertTime());
|
Calendar c1 = o1.getIssueTime();
|
||||||
|
if (c1 == null) {
|
||||||
|
c1 = o1.getInsertTime();
|
||||||
|
}
|
||||||
|
Calendar c2 = o2.getIssueTime();
|
||||||
|
if (c2 == null) {
|
||||||
|
c2 = o2.getInsertTime();
|
||||||
|
}
|
||||||
|
return c1.compareTo(c2);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -602,7 +623,10 @@ public class CurrentWarnings {
|
||||||
|
|
||||||
Map<String, List<AbstractWarningRecord>> recordMap = new HashMap<String, List<AbstractWarningRecord>>();
|
Map<String, List<AbstractWarningRecord>> recordMap = new HashMap<String, List<AbstractWarningRecord>>();
|
||||||
for (AbstractWarningRecord rec : newRecords) {
|
for (AbstractWarningRecord rec : newRecords) {
|
||||||
List<AbstractWarningRecord> recs = recordMap.get(rec.getOfficeid());
|
// This used the key rec.getOfficeid() which can be null; which
|
||||||
|
// can drop alerts when more then one new Record.
|
||||||
|
// Changed to use the same key as the put.
|
||||||
|
List<AbstractWarningRecord> recs = recordMap.get(rec.getXxxid());
|
||||||
if (recs == null) {
|
if (recs == null) {
|
||||||
recs = new ArrayList<AbstractWarningRecord>();
|
recs = new ArrayList<AbstractWarningRecord>();
|
||||||
recordMap.put(rec.getXxxid(), recs);
|
recordMap.put(rec.getXxxid(), recs);
|
||||||
|
|
|
@ -26,6 +26,7 @@ import com.raytheon.uf.common.status.UFStatus;
|
||||||
import com.raytheon.uf.common.time.DataTime;
|
import com.raytheon.uf.common.time.DataTime;
|
||||||
import com.raytheon.uf.common.time.SimulatedTime;
|
import com.raytheon.uf.common.time.SimulatedTime;
|
||||||
import com.raytheon.uf.common.time.TimeRange;
|
import com.raytheon.uf.common.time.TimeRange;
|
||||||
|
import com.raytheon.uf.common.time.util.TimeUtil;
|
||||||
import com.raytheon.uf.viz.core.DrawableString;
|
import com.raytheon.uf.viz.core.DrawableString;
|
||||||
import com.raytheon.uf.viz.core.IGraphicsTarget;
|
import com.raytheon.uf.viz.core.IGraphicsTarget;
|
||||||
import com.raytheon.uf.viz.core.IGraphicsTarget.HorizontalAlignment;
|
import com.raytheon.uf.viz.core.IGraphicsTarget.HorizontalAlignment;
|
||||||
|
@ -82,6 +83,7 @@ import com.vividsolutions.jts.geom.prep.PreparedGeometryFactory;
|
||||||
* Check if geometry is null when inspecting.
|
* Check if geometry is null when inspecting.
|
||||||
* Jul 22, 2013 2176 jsanchez Updated the wire frame and text for EMERGENCY warnings.
|
* Jul 22, 2013 2176 jsanchez Updated the wire frame and text for EMERGENCY warnings.
|
||||||
* Sep 4, 2013 2176 jsanchez Made the polygon line width thicker and made regular text not bold.
|
* Sep 4, 2013 2176 jsanchez Made the polygon line width thicker and made regular text not bold.
|
||||||
|
* Nov 11, 2013 2439 rferrel Changes to prevent getting future warning when in DRT mode.
|
||||||
* </pre>
|
* </pre>
|
||||||
*
|
*
|
||||||
* @author jsanchez
|
* @author jsanchez
|
||||||
|
@ -128,7 +130,7 @@ public abstract class AbstractWWAResource extends
|
||||||
protected static PreparedGeometryFactory pgf = new PreparedGeometryFactory();
|
protected static PreparedGeometryFactory pgf = new PreparedGeometryFactory();
|
||||||
|
|
||||||
/** one hour ahead, entirely arbitrary/magic **/
|
/** one hour ahead, entirely arbitrary/magic **/
|
||||||
private static final long LAST_FRAME_ADJ = (60 * 60 * 1000);
|
private static final long LAST_FRAME_ADJ = TimeUtil.MILLIS_PER_HOUR;
|
||||||
|
|
||||||
protected String resourceName;
|
protected String resourceName;
|
||||||
|
|
||||||
|
@ -465,13 +467,20 @@ public abstract class AbstractWWAResource extends
|
||||||
|
|
||||||
if (lastFrame) {
|
if (lastFrame) {
|
||||||
// use current system time to determine what to display
|
// use current system time to determine what to display
|
||||||
Date timeToDisplay = SimulatedTime.getSystemTime().getTime();
|
Date timeToDisplay = TimeUtil.newDate();
|
||||||
// change frame time
|
// change frame time
|
||||||
frameTime = timeToDisplay;
|
frameTime = timeToDisplay;
|
||||||
// point paint time to different time
|
// point paint time to different time
|
||||||
paintTime = new DataTime(timeToDisplay);
|
paintTime = new DataTime(timeToDisplay);
|
||||||
// point framePeriod to new frame
|
// point framePeriod to new frame
|
||||||
framePeriod = new TimeRange(frameTime, LAST_FRAME_ADJ);
|
if (SimulatedTime.getSystemTime().isRealTime()) {
|
||||||
|
framePeriod = new TimeRange(frameTime, LAST_FRAME_ADJ);
|
||||||
|
} else {
|
||||||
|
// Prevent getting "future" records by keeping interval in the
|
||||||
|
// same minute.
|
||||||
|
framePeriod = new TimeRange(frameTime,
|
||||||
|
30 * TimeUtil.MILLIS_PER_SECOND);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// check if the warning is cancelled
|
// check if the warning is cancelled
|
||||||
|
|
|
@ -147,4 +147,4 @@ if [ $DEBUG_FLAG == "on" ]; then
|
||||||
echo "To Debug ... Connect to Port: ${EDEX_DEBUG_PORT}."
|
echo "To Debug ... Connect to Port: ${EDEX_DEBUG_PORT}."
|
||||||
fi
|
fi
|
||||||
|
|
||||||
java -jar ${EDEX_HOME}/bin/yajsw/wrapper.jar -c ${EDEX_HOME}/conf/${CONF_FILE} ${WRAPPER_ARGS}
|
java -Xmx32m -XX:MaxPermSize=12m -XX:ReservedCodeCacheSize=4m -jar ${EDEX_HOME}/bin/yajsw/wrapper.jar -c ${EDEX_HOME}/conf/${CONF_FILE} ${WRAPPER_ARGS}
|
||||||
|
|
|
@ -5,79 +5,87 @@
|
||||||
xsi:schemaLocation="http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans-3.1.xsd
|
xsi:schemaLocation="http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans-3.1.xsd
|
||||||
http://camel.apache.org/schema/spring http://camel.apache.org/schema/spring/camel-spring.xsd
|
http://camel.apache.org/schema/spring http://camel.apache.org/schema/spring/camel-spring.xsd
|
||||||
http://www.springframework.org/schema/context http://www.springframework.org/schema/context/spring-context-3.1.xsd">
|
http://www.springframework.org/schema/context http://www.springframework.org/schema/context/spring-context-3.1.xsd">
|
||||||
|
|
||||||
|
|
||||||
<bean id="uriAggregator" class="com.raytheon.uf.edex.esb.camel.DataUriAggregator" />
|
<bean id="uriAggregator" class="com.raytheon.uf.edex.esb.camel.DataUriAggregator" />
|
||||||
<bean id="toDataURI" class="com.raytheon.uf.edex.esb.camel.ToDataURI" />
|
<bean id="toDataURI" class="com.raytheon.uf.edex.esb.camel.ToDataURI" />
|
||||||
|
|
||||||
<bean id="persist" class="com.raytheon.edex.services.PersistSrv" factory-method="getInstance"/>
|
<bean id="dupElim" class="com.raytheon.edex.ingestsrv.DupElimSrv"/>
|
||||||
<bean id="index" class="com.raytheon.edex.services.IndexSrv"/>
|
<bean id="persist" class="com.raytheon.edex.services.PersistSrv" factory-method="getInstance" />
|
||||||
|
<bean id="index" class="com.raytheon.edex.services.IndexSrv" />
|
||||||
<bean id="persistCamelRegistered" factory-bean="contextManager"
|
|
||||||
factory-method="register">
|
<bean id="persistCamelRegistered" factory-bean="contextManager" factory-method="register">
|
||||||
<constructor-arg ref="persist-camel"/>
|
<constructor-arg ref="persist-camel" />
|
||||||
</bean>
|
</bean>
|
||||||
|
|
||||||
<camelContext id="persist-camel" xmlns="http://camel.apache.org/schema/spring" errorHandlerRef="errorHandler">
|
<camelContext id="persist-camel" xmlns="http://camel.apache.org/schema/spring"
|
||||||
|
errorHandlerRef="errorHandler">
|
||||||
<!-- Generic persist and indexing
|
|
||||||
Intended for routes that need persisting to HDF5,
|
<!-- Generic persist and indexing
|
||||||
Indexing but no alert processing
|
Intended for routes that need persisting to HDF5,
|
||||||
-->
|
Indexing but no alert processing -->
|
||||||
<route id="persistIndex">
|
<route id="persistIndex">
|
||||||
<from uri="direct-vm:persistIndex"/>
|
<from uri="direct-vm:persistIndex" />
|
||||||
<bean ref="persist" method="persist"/>
|
<doTry>
|
||||||
<bean ref="index" method="index"/>
|
<bean ref="persist" method="persist" />
|
||||||
<bean ref="processUtil" method="log"/>
|
<bean ref="index" method="index" />
|
||||||
|
<bean ref="processUtil" method="log" />
|
||||||
|
<doCatch>
|
||||||
|
<exception>java.lang.Throwable</exception>
|
||||||
|
<to uri="log:persist?level=ERROR" />
|
||||||
|
</doCatch>
|
||||||
|
</doTry>
|
||||||
</route>
|
</route>
|
||||||
|
|
||||||
<!-- Generic persist, index and alert route
|
<!-- Generic persist, index and alert route
|
||||||
Intended for routes that need persisting to HDF5,
|
Intended for routes that need persisting to HDF5,
|
||||||
Indexing and Alerting
|
Indexing and Alerting
|
||||||
-->
|
-->
|
||||||
<route id="persistIndexAlert">
|
<route id="persistIndexAlert">
|
||||||
<from uri="direct-vm:persistIndexAlert"/>
|
<from uri="direct-vm:persistIndexAlert" />
|
||||||
<bean ref="persist" method="persist"/>
|
<doTry>
|
||||||
<bean ref="index" method="index"/>
|
<bean ref="persist" method="persist" />
|
||||||
<bean ref="processUtil" method="log"/>
|
<bean ref="index" method="index" />
|
||||||
<bean ref="toDataURI" method="toDataURI"/>
|
<bean ref="processUtil" method="log" />
|
||||||
<to uri="vm:stageNotification"/>
|
<bean ref="toDataURI" method="toDataURI" />
|
||||||
|
<to uri="vm:stageNotification" />
|
||||||
|
<doCatch>
|
||||||
|
<exception>java.lang.Throwable</exception>
|
||||||
|
<to uri="log:persist?level=ERROR" />
|
||||||
|
</doCatch>
|
||||||
|
</doTry>
|
||||||
</route>
|
</route>
|
||||||
|
|
||||||
<!-- Generic index and alert route
|
<!-- Generic index and alert route
|
||||||
Intended for routes that need Indexing and Alerting
|
Intended for routes that need Indexing and Alerting
|
||||||
-->
|
-->
|
||||||
<route id="indexAlert">
|
<route id="indexAlert">
|
||||||
<from uri="direct-vm:indexAlert"/>
|
<from uri="direct-vm:indexAlert" />
|
||||||
<bean ref="index" method="index"/>
|
<doTry>
|
||||||
<bean ref="processUtil" method="log"/>
|
<bean ref="index" method="index" />
|
||||||
<bean ref="toDataURI" method="toDataURI"/>
|
<bean ref="processUtil" method="log" />
|
||||||
<to uri="vm:stageNotification"/>
|
<bean ref="toDataURI" method="toDataURI" />
|
||||||
|
<to uri="vm:stageNotification" />
|
||||||
|
<doCatch>
|
||||||
|
<exception>java.lang.Throwable</exception>
|
||||||
|
<to uri="log:persist?level=ERROR" />
|
||||||
|
</doCatch>
|
||||||
|
</doTry>
|
||||||
</route>
|
</route>
|
||||||
|
|
||||||
<route id="notificationAggregation">
|
<route id="notificationAggregation">
|
||||||
<from uri="vm:stageNotification"/>
|
<from uri="vm:stageNotification" />
|
||||||
<bean ref="uriAggregator" method="addDataUris" />
|
<bean ref="uriAggregator" method="addDataUris" />
|
||||||
<!--
|
|
||||||
<multicast>
|
|
||||||
<pipeline>
|
|
||||||
<bean ref="uriAggregator" method="addDataUris" />
|
|
||||||
</pipeline>
|
|
||||||
<pipeline>
|
|
||||||
<to uri="jms-generic:queue:subscriptions" />
|
|
||||||
</pipeline>
|
|
||||||
</multicast>
|
|
||||||
-->
|
|
||||||
</route>
|
</route>
|
||||||
|
|
||||||
<route id="notificationTimer">
|
<route id="notificationTimer">
|
||||||
<from uri="timer://notificationTimer?fixedRate=true&period=5000" />
|
<from uri="timer://notificationTimer?fixedRate=true&period=5000" />
|
||||||
<filter>
|
<filter>
|
||||||
<method bean="uriAggregator" method="hasUris" />
|
<method bean="uriAggregator" method="hasUris" />
|
||||||
<bean ref="uriAggregator" method="sendQueuedUris" />
|
<bean ref="uriAggregator" method="sendQueuedUris" />
|
||||||
<bean ref="serializationUtil" method="transformToThrift" />
|
<bean ref="serializationUtil" method="transformToThrift" />
|
||||||
<to uri="jms-generic:topic:edex.alerts?timeToLive=60000"/>
|
<to uri="jms-generic:topic:edex.alerts?timeToLive=60000" />
|
||||||
</filter>
|
</filter>
|
||||||
</route>
|
</route>
|
||||||
</camelContext>
|
</camelContext>
|
||||||
</beans>
|
</beans>
|
|
@ -0,0 +1,131 @@
|
||||||
|
/**
|
||||||
|
* This software was developed and / or modified by Raytheon Company,
|
||||||
|
* pursuant to Contract DG133W-05-CQ-1067 with the US Government.
|
||||||
|
*
|
||||||
|
* U.S. EXPORT CONTROLLED TECHNICAL DATA
|
||||||
|
* This software product contains export-restricted data whose
|
||||||
|
* export/transfer/disclosure is restricted by U.S. law. Dissemination
|
||||||
|
* to non-U.S. persons whether in the United States or abroad requires
|
||||||
|
* an export license or other authorization.
|
||||||
|
*
|
||||||
|
* Contractor Name: Raytheon Company
|
||||||
|
* Contractor Address: 6825 Pine Street, Suite 340
|
||||||
|
* Mail Stop B8
|
||||||
|
* Omaha, NE 68106
|
||||||
|
* 402.291.0100
|
||||||
|
*
|
||||||
|
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
||||||
|
* further licensing information.
|
||||||
|
**/
|
||||||
|
package com.raytheon.edex.ingestsrv;
|
||||||
|
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
import com.raytheon.uf.common.dataplugin.PluginDataObject;
|
||||||
|
import com.raytheon.uf.common.dataplugin.annotations.DataURIUtil;
|
||||||
|
import com.raytheon.uf.common.status.IPerformanceStatusHandler;
|
||||||
|
import com.raytheon.uf.common.status.IUFStatusHandler;
|
||||||
|
import com.raytheon.uf.common.status.PerformanceStatus;
|
||||||
|
import com.raytheon.uf.common.status.UFStatus;
|
||||||
|
import com.raytheon.uf.common.time.util.ITimer;
|
||||||
|
import com.raytheon.uf.common.time.util.TimeUtil;
|
||||||
|
import com.raytheon.uf.common.util.CollectionUtil;
|
||||||
|
import com.raytheon.uf.edex.database.plugin.PluginDao;
|
||||||
|
import com.raytheon.uf.edex.database.plugin.PluginFactory;
|
||||||
|
import com.raytheon.uf.edex.database.query.DatabaseQuery;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Checks database for duplicates of data. Does not account for clustering.
|
||||||
|
*
|
||||||
|
* <pre>
|
||||||
|
*
|
||||||
|
* SOFTWARE HISTORY
|
||||||
|
*
|
||||||
|
* Date Ticket# Engineer Description
|
||||||
|
* ------------ ---------- ----------- --------------------------
|
||||||
|
* Nov 11, 2013 2478 rjpeter Initial creation
|
||||||
|
* </pre>
|
||||||
|
*
|
||||||
|
* @author rjpeter
|
||||||
|
* @version 1.0
|
||||||
|
*/
|
||||||
|
|
||||||
|
public class DupElimSrv {
|
||||||
|
private static final IUFStatusHandler statusHandler = UFStatus
|
||||||
|
.getHandler(DupElimSrv.class);
|
||||||
|
|
||||||
|
private final IPerformanceStatusHandler perfLog = PerformanceStatus
|
||||||
|
.getHandler("DupElim:");
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Checks the passed pdos against database for existence. If duplicates
|
||||||
|
* found returns a new array containing only the new plugin data objects. If
|
||||||
|
* an errors occurs the original pdos array will be returned.
|
||||||
|
*
|
||||||
|
* @param pluginName
|
||||||
|
* @param pdos
|
||||||
|
* @return
|
||||||
|
*/
|
||||||
|
public PluginDataObject[] dupElim(PluginDataObject[] pdos) {
|
||||||
|
if ((pdos == null) || (pdos.length == 0)) {
|
||||||
|
return new PluginDataObject[0];
|
||||||
|
}
|
||||||
|
|
||||||
|
ITimer dupCheckTimer = TimeUtil.getTimer();
|
||||||
|
dupCheckTimer.start();
|
||||||
|
|
||||||
|
int numBefore = pdos.length;
|
||||||
|
String pluginName = pdos[0].getPluginName();
|
||||||
|
|
||||||
|
try {
|
||||||
|
|
||||||
|
PluginDao dao = PluginFactory.getInstance()
|
||||||
|
.getPluginDao(pluginName);
|
||||||
|
List<PluginDataObject> newPdos = new ArrayList<PluginDataObject>(
|
||||||
|
pdos.length);
|
||||||
|
|
||||||
|
// TODO: Bulk querying, groups of 100 using IN lists?
|
||||||
|
for (PluginDataObject pdo : pdos) {
|
||||||
|
DatabaseQuery dbQuery = new DatabaseQuery(pdo.getClass());
|
||||||
|
Map<String, Object> dataUriFields = DataURIUtil
|
||||||
|
.createDataURIMap(pdo);
|
||||||
|
for (Map.Entry<String, Object> field : dataUriFields.entrySet()) {
|
||||||
|
String fieldName = field.getKey();
|
||||||
|
// ignore pluginName
|
||||||
|
if (!DataURIUtil.PLUGIN_NAME_KEY.equals(fieldName)) {
|
||||||
|
dbQuery.addQueryParam(field.getKey(), field.getValue());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@SuppressWarnings("unchecked")
|
||||||
|
List<PluginDataObject> dbPdos = (List<PluginDataObject>) dao
|
||||||
|
.queryByCriteria(dbQuery);
|
||||||
|
if (CollectionUtil.isNullOrEmpty(dbPdos)) {
|
||||||
|
newPdos.add(pdo);
|
||||||
|
} else {
|
||||||
|
// shouldn't be more than 1
|
||||||
|
PluginDataObject dbPdo = dbPdos.get(1);
|
||||||
|
if ((dbPdo == null)
|
||||||
|
|| !pdo.getDataURI().equals(dbPdo.getDataURI())) {
|
||||||
|
newPdos.add(pdo);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (pdos.length != newPdos.size()) {
|
||||||
|
pdos = newPdos.toArray(new PluginDataObject[newPdos.size()]);
|
||||||
|
}
|
||||||
|
} catch (Exception e) {
|
||||||
|
statusHandler
|
||||||
|
.error("Error occurred during duplicate elimination processing",
|
||||||
|
e);
|
||||||
|
}
|
||||||
|
dupCheckTimer.stop();
|
||||||
|
|
||||||
|
perfLog.logDuration(pluginName + ": Eliminated "
|
||||||
|
+ (numBefore - pdos.length) + " of " + numBefore
|
||||||
|
+ " record(s): Time to process", dupCheckTimer.getElapsedTime());
|
||||||
|
return pdos;
|
||||||
|
}
|
||||||
|
}
|
|
@ -96,6 +96,7 @@ import com.raytheon.uf.edex.database.query.DatabaseQuery;
|
||||||
* 08/08/13 DR16485 ryu Remove call to getDatabaseId() from getMaxInsertTimeByDbId()
|
* 08/08/13 DR16485 ryu Remove call to getDatabaseId() from getMaxInsertTimeByDbId()
|
||||||
* so new GFE databases aren't accidentally created.
|
* so new GFE databases aren't accidentally created.
|
||||||
* 09/30/2013 #2147 rferrel Changes to archive hdf5 files.
|
* 09/30/2013 #2147 rferrel Changes to archive hdf5 files.
|
||||||
|
* 11/13/2013 #2517 randerso Added ORDER BY clause to getOverlappingTimes
|
||||||
* </pre>
|
* </pre>
|
||||||
*
|
*
|
||||||
* @author bphillip
|
* @author bphillip
|
||||||
|
@ -814,7 +815,8 @@ public class GFEDao extends DefaultPluginDao {
|
||||||
.find("SELECT dataTime.validPeriod"
|
.find("SELECT dataTime.validPeriod"
|
||||||
+ " FROM GFERecord WHERE parmId = ?"
|
+ " FROM GFERecord WHERE parmId = ?"
|
||||||
+ " AND dataTime.validPeriod.start < ?"
|
+ " AND dataTime.validPeriod.start < ?"
|
||||||
+ " AND dataTime.validPeriod.end > ?",
|
+ " AND dataTime.validPeriod.end > ?"
|
||||||
|
+ " ORDER BY dataTime.validPeriod.start",
|
||||||
new Object[] { parmId, tr.getEnd(),
|
new Object[] { parmId, tr.getEnd(),
|
||||||
tr.getStart() });
|
tr.getStart() });
|
||||||
return rval;
|
return rval;
|
||||||
|
|
|
@ -83,6 +83,10 @@ import com.raytheon.uf.common.util.Pair;
|
||||||
* Scalar/VectorGridSlices, refactor
|
* Scalar/VectorGridSlices, refactor
|
||||||
* Discrete/WeatherGridSlices builders.
|
* Discrete/WeatherGridSlices builders.
|
||||||
* Jun 05, 2013 #2063 dgilling Port history() from A1.
|
* Jun 05, 2013 #2063 dgilling Port history() from A1.
|
||||||
|
* Nov 11, 2013 #2517 randerso Changed put() to support multiple discontiguous saves
|
||||||
|
* Added getKeys(tr) to get grid times overlapping a time range
|
||||||
|
* Removed caching of inventory as it was not being updated when
|
||||||
|
* grids were updated/deleted
|
||||||
*
|
*
|
||||||
* </pre>
|
* </pre>
|
||||||
*
|
*
|
||||||
|
@ -103,8 +107,6 @@ public class IFPWE {
|
||||||
|
|
||||||
private final GridParmInfo gpi;
|
private final GridParmInfo gpi;
|
||||||
|
|
||||||
private List<TimeRange> availableTimes;
|
|
||||||
|
|
||||||
private final WsId wsId;
|
private final WsId wsId;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -126,23 +128,41 @@ public class IFPWE {
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Returns the available times of data for the parm
|
* Returns all available times of data for the parm
|
||||||
*
|
*
|
||||||
* @return
|
* @return the time ranges of all available data for the parm
|
||||||
*/
|
*/
|
||||||
public List<TimeRange> getKeys() {
|
public List<TimeRange> getKeys() {
|
||||||
if (availableTimes == null) {
|
List<TimeRange> availableTimes;
|
||||||
availableTimes = new ArrayList<TimeRange>();
|
ServerResponse<List<TimeRange>> sr = GridParmManager
|
||||||
List<TimeRange> times = GridParmManager.getGridInventory(parmId)
|
.getGridInventory(parmId);
|
||||||
.getPayload();
|
if (sr.isOkay()) {
|
||||||
if (times != null) {
|
availableTimes = sr.getPayload();
|
||||||
Collections.sort(times);
|
} else {
|
||||||
availableTimes.addAll(times);
|
availableTimes = Collections.emptyList();
|
||||||
}
|
|
||||||
}
|
}
|
||||||
return availableTimes;
|
return availableTimes;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns available times of data for the parm that overlap a time range
|
||||||
|
*
|
||||||
|
* @param tr
|
||||||
|
* the desired time range
|
||||||
|
* @return the time ranges of data that overlap the desired time range
|
||||||
|
*/
|
||||||
|
public List<TimeRange> getKeys(TimeRange tr) {
|
||||||
|
List<TimeRange> overlappingTimes;
|
||||||
|
ServerResponse<List<TimeRange>> sr = GridParmManager.getGridInventory(
|
||||||
|
parmId, tr);
|
||||||
|
if (sr.isOkay()) {
|
||||||
|
overlappingTimes = sr.getPayload();
|
||||||
|
} else {
|
||||||
|
overlappingTimes = Collections.emptyList();
|
||||||
|
}
|
||||||
|
return overlappingTimes;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Returns the grid parm info
|
* Returns the grid parm info
|
||||||
*
|
*
|
||||||
|
@ -245,67 +265,71 @@ public class IFPWE {
|
||||||
* storage.
|
* storage.
|
||||||
*
|
*
|
||||||
* @param inventory
|
* @param inventory
|
||||||
* A Map of TimeRanges to IGridSlices to be saved. Time is the
|
* A Map of TimeRanges to List of IGridSlices. TimeRange is the
|
||||||
* slice's valid time.
|
* replacement time range
|
||||||
* @param timeRangeSpan
|
|
||||||
* The replacement time range of grids to be saved. Must cover
|
|
||||||
* each individual TimeRange in inventory.
|
|
||||||
* @throws GfeException
|
* @throws GfeException
|
||||||
* If an error occurs while trying to obtain a lock on the
|
* If an error occurs while trying to obtain a lock on the
|
||||||
* destination database.
|
* destination database.
|
||||||
*/
|
*/
|
||||||
public void put(LinkedHashMap<TimeRange, IGridSlice> inventory,
|
public void put(LinkedHashMap<TimeRange, List<IGridSlice>> inventory)
|
||||||
TimeRange timeRangeSpan) throws GfeException {
|
throws GfeException {
|
||||||
statusHandler.debug("Getting lock for ParmID: " + parmId + " TR: "
|
|
||||||
+ timeRangeSpan);
|
|
||||||
ServerResponse<List<LockTable>> lockResponse = LockManager
|
|
||||||
.getInstance().requestLockChange(
|
|
||||||
new LockRequest(parmId, timeRangeSpan, LockMode.LOCK),
|
|
||||||
wsId, siteId);
|
|
||||||
if (lockResponse.isOkay()) {
|
|
||||||
statusHandler.debug("LOCKING: Lock granted for: " + wsId
|
|
||||||
+ " for time range: " + timeRangeSpan);
|
|
||||||
} else {
|
|
||||||
statusHandler.error("Could not lock TimeRange " + timeRangeSpan
|
|
||||||
+ " for parm [" + parmId + "]: " + lockResponse.message());
|
|
||||||
throw new GfeException("Request lock failed. "
|
|
||||||
+ lockResponse.message());
|
|
||||||
}
|
|
||||||
|
|
||||||
List<GFERecord> records = new ArrayList<GFERecord>(inventory.size());
|
for (Entry<TimeRange, List<IGridSlice>> entry : inventory.entrySet()) {
|
||||||
for (Entry<TimeRange, IGridSlice> entry : inventory.entrySet()) {
|
TimeRange timeRangeSpan = entry.getKey();
|
||||||
GFERecord rec = new GFERecord(parmId, entry.getKey());
|
statusHandler.debug("Getting lock for ParmID: " + parmId + " TR: "
|
||||||
rec.setGridHistory(entry.getValue().getHistory());
|
+ timeRangeSpan);
|
||||||
rec.setMessageData(entry.getValue());
|
ServerResponse<List<LockTable>> lockResponse = LockManager
|
||||||
records.add(rec);
|
|
||||||
}
|
|
||||||
SaveGridRequest sgr = new SaveGridRequest(parmId, timeRangeSpan,
|
|
||||||
records);
|
|
||||||
|
|
||||||
try {
|
|
||||||
ServerResponse<?> sr = GridParmManager.saveGridData(
|
|
||||||
Arrays.asList(sgr), wsId, siteId);
|
|
||||||
if (sr.isOkay()) {
|
|
||||||
SendNotifications.send(sr.getNotifications());
|
|
||||||
} else {
|
|
||||||
statusHandler.error("Unable to save grids for parm [" + parmId
|
|
||||||
+ "] over time range " + timeRangeSpan + ": "
|
|
||||||
+ sr.message());
|
|
||||||
}
|
|
||||||
} finally {
|
|
||||||
ServerResponse<List<LockTable>> unLockResponse = LockManager
|
|
||||||
.getInstance().requestLockChange(
|
.getInstance().requestLockChange(
|
||||||
new LockRequest(parmId, timeRangeSpan,
|
new LockRequest(parmId, timeRangeSpan,
|
||||||
LockMode.UNLOCK), wsId, siteId);
|
LockMode.LOCK), wsId, siteId);
|
||||||
if (unLockResponse.isOkay()) {
|
if (lockResponse.isOkay()) {
|
||||||
statusHandler.debug("LOCKING: Unlocked for: " + wsId + " TR: "
|
statusHandler.debug("LOCKING: Lock granted for: " + wsId
|
||||||
+ timeRangeSpan);
|
+ " for time range: " + timeRangeSpan);
|
||||||
} else {
|
} else {
|
||||||
statusHandler.error("Could not unlock TimeRange "
|
statusHandler.error("Could not lock TimeRange " + timeRangeSpan
|
||||||
+ timeRangeSpan + " for parm [" + parmId + "]: "
|
+ " for parm [" + parmId + "]: "
|
||||||
+ lockResponse.message());
|
+ lockResponse.message());
|
||||||
throw new GfeException("Request unlock failed. "
|
throw new GfeException("Request lock failed. "
|
||||||
+ unLockResponse.message());
|
+ lockResponse.message());
|
||||||
|
}
|
||||||
|
|
||||||
|
List<IGridSlice> gridSlices = entry.getValue();
|
||||||
|
List<GFERecord> records = new ArrayList<GFERecord>(
|
||||||
|
gridSlices.size());
|
||||||
|
for (IGridSlice slice : gridSlices) {
|
||||||
|
GFERecord rec = new GFERecord(parmId, slice.getValidTime());
|
||||||
|
rec.setGridHistory(slice.getHistory());
|
||||||
|
rec.setMessageData(slice);
|
||||||
|
records.add(rec);
|
||||||
|
}
|
||||||
|
SaveGridRequest sgr = new SaveGridRequest(parmId, timeRangeSpan,
|
||||||
|
records);
|
||||||
|
|
||||||
|
try {
|
||||||
|
ServerResponse<?> sr = GridParmManager.saveGridData(
|
||||||
|
Arrays.asList(sgr), wsId, siteId);
|
||||||
|
if (sr.isOkay()) {
|
||||||
|
SendNotifications.send(sr.getNotifications());
|
||||||
|
} else {
|
||||||
|
statusHandler.error("Unable to save grids for parm ["
|
||||||
|
+ parmId + "] over time range " + timeRangeSpan
|
||||||
|
+ ": " + sr.message());
|
||||||
|
}
|
||||||
|
} finally {
|
||||||
|
ServerResponse<List<LockTable>> unLockResponse = LockManager
|
||||||
|
.getInstance().requestLockChange(
|
||||||
|
new LockRequest(parmId, timeRangeSpan,
|
||||||
|
LockMode.UNLOCK), wsId, siteId);
|
||||||
|
if (unLockResponse.isOkay()) {
|
||||||
|
statusHandler.debug("LOCKING: Unlocked for: " + wsId
|
||||||
|
+ " TR: " + timeRangeSpan);
|
||||||
|
} else {
|
||||||
|
statusHandler.error("Could not unlock TimeRange "
|
||||||
|
+ timeRangeSpan + " for parm [" + parmId + "]: "
|
||||||
|
+ lockResponse.message());
|
||||||
|
throw new GfeException("Request unlock failed. "
|
||||||
|
+ unLockResponse.message());
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -1,30 +1,30 @@
|
||||||
##
|
##
|
||||||
# This software was developed and / or modified by Raytheon Company,
|
# This software was developed and / or modified by Raytheon Company,
|
||||||
# pursuant to Contract DG133W-05-CQ-1067 with the US Government.
|
# pursuant to Contract DG133W-05-CQ-1067 with the US Government.
|
||||||
#
|
#
|
||||||
# U.S. EXPORT CONTROLLED TECHNICAL DATA
|
# U.S. EXPORT CONTROLLED TECHNICAL DATA
|
||||||
# This software product contains export-restricted data whose
|
# This software product contains export-restricted data whose
|
||||||
# export/transfer/disclosure is restricted by U.S. law. Dissemination
|
# export/transfer/disclosure is restricted by U.S. law. Dissemination
|
||||||
# to non-U.S. persons whether in the United States or abroad requires
|
# to non-U.S. persons whether in the United States or abroad requires
|
||||||
# an export license or other authorization.
|
# an export license or other authorization.
|
||||||
#
|
#
|
||||||
# Contractor Name: Raytheon Company
|
# Contractor Name: Raytheon Company
|
||||||
# Contractor Address: 6825 Pine Street, Suite 340
|
# Contractor Address: 6825 Pine Street, Suite 340
|
||||||
# Mail Stop B8
|
# Mail Stop B8
|
||||||
# Omaha, NE 68106
|
# Omaha, NE 68106
|
||||||
# 402.291.0100
|
# 402.291.0100
|
||||||
#
|
#
|
||||||
# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
||||||
# further licensing information.
|
# further licensing information.
|
||||||
##
|
##
|
||||||
|
|
||||||
import string, IrtAccess, JUtil
|
import string, IrtAccess, JUtil, logging
|
||||||
import xml, pickle, tempfile, os
|
import xml, pickle, tempfile, os
|
||||||
from xml.etree import ElementTree
|
from xml.etree import ElementTree
|
||||||
from xml.etree.ElementTree import Element, SubElement
|
from xml.etree.ElementTree import Element, SubElement
|
||||||
import LogStream
|
import LogStream
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from time import gmtime,strftime
|
from time import gmtime, strftime
|
||||||
from java.io import File
|
from java.io import File
|
||||||
from com.raytheon.uf.common.time import TimeRange
|
from com.raytheon.uf.common.time import TimeRange
|
||||||
from com.raytheon.uf.common.dataplugin.gfe.db.objects import GridLocation
|
from com.raytheon.uf.common.dataplugin.gfe.db.objects import GridLocation
|
||||||
|
@ -34,68 +34,69 @@ from com.raytheon.edex.plugin.gfe.config import IFPServerConfigManager
|
||||||
from com.raytheon.uf.common.serialization import SerializationUtil
|
from com.raytheon.uf.common.serialization import SerializationUtil
|
||||||
from com.raytheon.uf.common.localization import LocalizationFile
|
from com.raytheon.uf.common.localization import LocalizationFile
|
||||||
from com.raytheon.uf.common.localization import PathManagerFactory
|
from com.raytheon.uf.common.localization import PathManagerFactory
|
||||||
from com.raytheon.uf.common.localization import LocalizationContext
|
from com.raytheon.uf.common.localization import LocalizationContext
|
||||||
from com.raytheon.uf.common.localization import LocalizationContext_LocalizationType as LocalizationType
|
from com.raytheon.uf.common.localization import LocalizationContext_LocalizationType as LocalizationType
|
||||||
from com.raytheon.uf.common.localization import LocalizationContext_LocalizationLevel as LocalizationLevel
|
from com.raytheon.uf.common.localization import LocalizationContext_LocalizationLevel as LocalizationLevel
|
||||||
|
|
||||||
#
|
#
|
||||||
# Utility module of isc functions
|
# Utility module of isc functions
|
||||||
#
|
#
|
||||||
# SOFTWARE HISTORY
|
# SOFTWARE HISTORY
|
||||||
#
|
#
|
||||||
# Date Ticket# Engineer Description
|
# Date Ticket# Engineer Description
|
||||||
# ------------ ---------- ----------- --------------------------
|
# ------------ ---------- ----------- --------------------------
|
||||||
# 07/06/09 1995 bphillip Initial Creation.
|
# 07/06/09 1995 bphillip Initial Creation.
|
||||||
# 02/19/13 1637 randerso Removed unused import
|
# 02/19/13 1637 randerso Removed unused import
|
||||||
# 03/11/13 1759 dgilling Move siteConfig import into
|
# 03/11/13 1759 dgilling Move siteConfig import into
|
||||||
# methods where it's needed.
|
# methods where it's needed.
|
||||||
#
|
# 11/07/13 2517 randerso Allow getLogger to override logLevel
|
||||||
#
|
#
|
||||||
|
#
|
||||||
#
|
#
|
||||||
|
|
||||||
def getEditArea(name, siteID):
|
def getEditArea(name, siteID):
|
||||||
|
|
||||||
pathMgr = PathManagerFactory.getPathManager();
|
pathMgr = PathManagerFactory.getPathManager();
|
||||||
|
|
||||||
commonStaticConfig = pathMgr.getContext(LocalizationType.COMMON_STATIC, LocalizationLevel.SITE)
|
commonStaticConfig = pathMgr.getContext(LocalizationType.COMMON_STATIC, LocalizationLevel.SITE)
|
||||||
commonStaticConfig.setContextName(siteID)
|
commonStaticConfig.setContextName(siteID)
|
||||||
file = pathMgr.getFile(commonStaticConfig,"gfe/editAreas"+ File.separator + name + ".xml")
|
file = pathMgr.getFile(commonStaticConfig, "gfe/editAreas" + File.separator + name + ".xml")
|
||||||
|
|
||||||
if not os.path.exists(file.getPath()):
|
if not os.path.exists(file.getPath()):
|
||||||
commonStaticConfig = pathMgr.getContext(LocalizationType.COMMON_STATIC, LocalizationLevel.CONFIGURED)
|
commonStaticConfig = pathMgr.getContext(LocalizationType.COMMON_STATIC, LocalizationLevel.CONFIGURED)
|
||||||
commonStaticConfig.setContextName(siteID)
|
commonStaticConfig.setContextName(siteID)
|
||||||
file = pathMgr.getFile(commonStaticConfig,"gfe/editAreas"+ File.separator + name + ".xml")
|
file = pathMgr.getFile(commonStaticConfig, "gfe/editAreas" + File.separator + name + ".xml")
|
||||||
|
|
||||||
refData = None
|
refData = None
|
||||||
|
|
||||||
try:
|
try:
|
||||||
if os.path.exists(file.getPath()):
|
if os.path.exists(file.getPath()):
|
||||||
refData = SerializationUtil.jaxbUnmarshalFromXmlFile(file.getPath());
|
refData = SerializationUtil.jaxbUnmarshalFromXmlFile(file.getPath());
|
||||||
else:
|
else:
|
||||||
LogStream.logProblem("EDIT AREA NOT FOUND: ",name," for site ",siteID)
|
LogStream.logProblem("EDIT AREA NOT FOUND: ", name, " for site ", siteID)
|
||||||
except:
|
except:
|
||||||
LogStream.logProblem("Unable to unmarshal " + name + " in iscExtract")
|
LogStream.logProblem("Unable to unmarshal " + name + " in iscExtract")
|
||||||
|
|
||||||
return refData
|
return refData
|
||||||
|
|
||||||
def saveEditAreaGrid(maskName, iscMask, siteID):
|
def saveEditAreaGrid(maskName, iscMask, siteID):
|
||||||
iscMask.getPolygons(CoordinateType.LATLON);
|
iscMask.getPolygons(CoordinateType.LATLON);
|
||||||
|
|
||||||
pathMgr = PathManagerFactory.getPathManager();
|
pathMgr = PathManagerFactory.getPathManager();
|
||||||
commonStaticConfig = pathMgr.getContext(LocalizationType.COMMON_STATIC, LocalizationLevel.CONFIGURED)
|
commonStaticConfig = pathMgr.getContext(LocalizationType.COMMON_STATIC, LocalizationLevel.CONFIGURED)
|
||||||
commonStaticConfig.setContextName(siteID)
|
commonStaticConfig.setContextName(siteID)
|
||||||
sitePath = pathMgr.getFile(commonStaticConfig,"gfe/editAreas").getPath()
|
sitePath = pathMgr.getFile(commonStaticConfig, "gfe/editAreas").getPath()
|
||||||
editAreaPath = str(sitePath) + "/" + maskName + ".xml"
|
editAreaPath = str(sitePath) + "/" + maskName + ".xml"
|
||||||
SerializationUtil.jaxbMarshalToXmlFile(iscMask, editAreaPath)
|
SerializationUtil.jaxbMarshalToXmlFile(iscMask, editAreaPath)
|
||||||
|
|
||||||
def deleteEditArea(name, siteID):
|
def deleteEditArea(name, siteID):
|
||||||
pathMgr = PathManagerFactory.getPathManager()
|
pathMgr = PathManagerFactory.getPathManager()
|
||||||
commonStaticConfig = pathMgr.getContext(LocalizationType.COMMON_STATIC, LocalizationLevel.CONFIGURED)
|
commonStaticConfig = pathMgr.getContext(LocalizationType.COMMON_STATIC, LocalizationLevel.CONFIGURED)
|
||||||
commonStaticConfig.setContextName(siteID)
|
commonStaticConfig.setContextName(siteID)
|
||||||
file = pathMgr.getFile(commonStaticConfig,"gfe/editAreas"+ File.separator + name + ".xml")
|
file = pathMgr.getFile(commonStaticConfig, "gfe/editAreas" + File.separator + name + ".xml")
|
||||||
file.delete()
|
file.delete()
|
||||||
|
|
||||||
def transformTime(tr):
|
def transformTime(tr):
|
||||||
return (int(tr.getStart().getTime() / 1000), int(tr.getEnd().getTime() / 1000))
|
return (int(tr.getStart().getTime() / 1000), int(tr.getEnd().getTime() / 1000))
|
||||||
|
|
||||||
def toJavaTimeRange(tr):
|
def toJavaTimeRange(tr):
|
||||||
|
@ -106,7 +107,7 @@ def swapCoord(coord):
|
||||||
coord.y = coord.x
|
coord.y = coord.x
|
||||||
coord.x = temp
|
coord.x = temp
|
||||||
return coord
|
return coord
|
||||||
|
|
||||||
def serverBoxText(server):
|
def serverBoxText(server):
|
||||||
#returns text based on the server dictionary that should be placed
|
#returns text based on the server dictionary that should be placed
|
||||||
#into the dialog
|
#into the dialog
|
||||||
|
@ -117,13 +118,13 @@ def serverBoxText(server):
|
||||||
hostport = server['host'] + "-primary"
|
hostport = server['host'] + "-primary"
|
||||||
elif server['port'] == "98000001":
|
elif server['port'] == "98000001":
|
||||||
hostport = server['host'] + "-svcbu"
|
hostport = server['host'] + "-svcbu"
|
||||||
|
|
||||||
if hostport is None:
|
if hostport is None:
|
||||||
hostport = server['host'] + "/" + server['port']
|
hostport = server['host'] + "/" + server['port']
|
||||||
|
|
||||||
return server['site'] + "-> " + hostport + "@" + \
|
return server['site'] + "-> " + hostport + "@" + \
|
||||||
server['mhsid'].lower()
|
server['mhsid'].lower()
|
||||||
|
|
||||||
def sortServers(a, b):
|
def sortServers(a, b):
|
||||||
# sort function for the list of servers. Sorts in priority order for
|
# sort function for the list of servers. Sorts in priority order for
|
||||||
# most likely to have the data. Order is:
|
# most likely to have the data. Order is:
|
||||||
|
@ -135,15 +136,15 @@ def sortServers(a, b):
|
||||||
sameSiteA = (a['mhsid'] == a['site'])
|
sameSiteA = (a['mhsid'] == a['site'])
|
||||||
sameSiteB = (b['mhsid'] == b['site'])
|
sameSiteB = (b['mhsid'] == b['site'])
|
||||||
if sameSiteA and not sameSiteB:
|
if sameSiteA and not sameSiteB:
|
||||||
return - 1
|
return -1
|
||||||
elif not sameSiteA and sameSiteB:
|
elif not sameSiteA and sameSiteB:
|
||||||
return 1
|
return 1
|
||||||
#both are same sites, check for host next
|
#both are same sites, check for host next
|
||||||
else:
|
else:
|
||||||
regHostA = (a['host'][0:3] in ['dx4', 'px3'])
|
regHostA = (a['host'][0:3] in ['dx4', 'px3'])
|
||||||
regHostB = (b['host'][0:3] in ['dx4', 'px3'])
|
regHostB = (b['host'][0:3] in ['dx4', 'px3'])
|
||||||
if regHostA and not regHostB:
|
if regHostA and not regHostB:
|
||||||
return - 1
|
return -1
|
||||||
elif not regHostA and regHostB:
|
elif not regHostA and regHostB:
|
||||||
return 1
|
return 1
|
||||||
# same host, but not preferred host
|
# same host, but not preferred host
|
||||||
|
@ -151,11 +152,11 @@ def sortServers(a, b):
|
||||||
regPortA = (a['port'] == "98000000")
|
regPortA = (a['port'] == "98000000")
|
||||||
regPortB = (b['port'] == "98000000")
|
regPortB = (b['port'] == "98000000")
|
||||||
if regPortA and not regPortB:
|
if regPortA and not regPortB:
|
||||||
return - 1
|
return -1
|
||||||
elif not regPortA and regPortB:
|
elif not regPortA and regPortB:
|
||||||
return 1
|
return 1
|
||||||
return 1 #must be non-standard, put at end of list
|
return 1 #must be non-standard, put at end of list
|
||||||
|
|
||||||
def createDomainDict(xml):
|
def createDomainDict(xml):
|
||||||
irt = IrtAccess.IrtAccess("")
|
irt = IrtAccess.IrtAccess("")
|
||||||
#decodes the packet of information from the ISC_REQUEST_QUERY call
|
#decodes the packet of information from the ISC_REQUEST_QUERY call
|
||||||
|
@ -171,7 +172,7 @@ def createDomainDict(xml):
|
||||||
return None
|
return None
|
||||||
if serversE.tag != "servers":
|
if serversE.tag != "servers":
|
||||||
LogStream.logEvent('servers tag not found in createDomainDict')
|
LogStream.logEvent('servers tag not found in createDomainDict')
|
||||||
return None #invalid xml
|
return None #invalid xml
|
||||||
|
|
||||||
#decode XML and create dictionary and parms list
|
#decode XML and create dictionary and parms list
|
||||||
domains = {}
|
domains = {}
|
||||||
|
@ -185,7 +186,7 @@ def createDomainDict(xml):
|
||||||
if name == "site":
|
if name == "site":
|
||||||
site = value
|
site = value
|
||||||
break
|
break
|
||||||
if site is None:
|
if site is None:
|
||||||
LogStream.logProblem('Malformed domain site XML')
|
LogStream.logProblem('Malformed domain site XML')
|
||||||
continue
|
continue
|
||||||
for addressE in domainE.getchildren():
|
for addressE in domainE.getchildren():
|
||||||
|
@ -196,62 +197,62 @@ def createDomainDict(xml):
|
||||||
list.append(info)
|
list.append(info)
|
||||||
guiText = serverBoxText(info)
|
guiText = serverBoxText(info)
|
||||||
serverDictT2S[guiText] = info
|
serverDictT2S[guiText] = info
|
||||||
serverDictS2T[str(info)] = guiText
|
serverDictS2T[str(info)] = guiText
|
||||||
list.sort(sortServers)
|
list.sort(sortServers)
|
||||||
domains[site] = list
|
domains[site] = list
|
||||||
|
|
||||||
elif domainE.tag == "welist":
|
elif domainE.tag == "welist":
|
||||||
for parmE in domainE.getchildren():
|
for parmE in domainE.getchildren():
|
||||||
welist.append(parmE.text)
|
welist.append(parmE.text)
|
||||||
welist.sort()
|
welist.sort()
|
||||||
|
|
||||||
retVal = {}
|
retVal = {}
|
||||||
retVal['serverDictS2T'] = serverDictS2T
|
retVal['serverDictS2T'] = serverDictS2T
|
||||||
retVal['serverDictT2S'] = serverDictT2S
|
retVal['serverDictT2S'] = serverDictT2S
|
||||||
retVal['domains'] = domains
|
retVal['domains'] = domains
|
||||||
|
|
||||||
tempfile.tempdir = "/tmp/"
|
tempfile.tempdir = "/tmp/"
|
||||||
fname = tempfile.mktemp(".bin")
|
fname = tempfile.mktemp(".bin")
|
||||||
FILE = open(fname, "w")
|
FILE = open(fname, "w")
|
||||||
pickle.dump(retVal, FILE)
|
pickle.dump(retVal, FILE)
|
||||||
FILE.close()
|
FILE.close()
|
||||||
|
|
||||||
FILE = open(fname, "r")
|
FILE = open(fname, "r")
|
||||||
lines = FILE.readlines()
|
lines = FILE.readlines()
|
||||||
FILE.close()
|
FILE.close()
|
||||||
os.remove(fname)
|
os.remove(fname)
|
||||||
|
|
||||||
pickledFile = ""
|
pickledFile = ""
|
||||||
for line in lines:
|
for line in lines:
|
||||||
pickledFile += line
|
pickledFile += line
|
||||||
|
|
||||||
return pickledFile
|
return pickledFile
|
||||||
|
|
||||||
def unPickle(str):
|
def unPickle(str):
|
||||||
import pickle,tempfile,os,JUtil
|
import pickle, tempfile, os, JUtil
|
||||||
tempfile.tempdir = "/tmp/"
|
tempfile.tempdir = "/tmp/"
|
||||||
fname = tempfile.mktemp(".bin")
|
fname = tempfile.mktemp(".bin")
|
||||||
FILE = open(fname,"w")
|
FILE = open(fname, "w")
|
||||||
FILE.write(str)
|
FILE.write(str)
|
||||||
FILE.close()
|
FILE.close()
|
||||||
|
|
||||||
FILE = open(fname,"r")
|
FILE = open(fname, "r")
|
||||||
retVal = pickle.load(FILE)
|
retVal = pickle.load(FILE)
|
||||||
FILE.close()
|
FILE.close()
|
||||||
return retVal
|
return retVal
|
||||||
|
|
||||||
|
|
||||||
def getRequestXML(xml,selectedServers, selectedWEList):
|
def getRequestXML(xml, selectedServers, selectedWEList):
|
||||||
irt = IrtAccess.IrtAccess("")
|
irt = IrtAccess.IrtAccess("")
|
||||||
selectedServers = JUtil.javaStringListToPylist(selectedServers)
|
selectedServers = JUtil.javaStringListToPylist(selectedServers)
|
||||||
selectedWElist = JUtil.javaStringListToPylist(selectedWEList)
|
selectedWElist = JUtil.javaStringListToPylist(selectedWEList)
|
||||||
|
|
||||||
response = unPickle(createDomainDict(xml))
|
response = unPickle(createDomainDict(xml))
|
||||||
serverDictT2S = response['serverDictT2S']
|
serverDictT2S = response['serverDictT2S']
|
||||||
domainDict = response['domains']
|
domainDict = response['domains']
|
||||||
|
|
||||||
iscReqE = Element('iscrequest')
|
iscReqE = Element('iscrequest')
|
||||||
servers = []
|
servers = []
|
||||||
for serverT in selectedServers:
|
for serverT in selectedServers:
|
||||||
server = serverDictT2S[serverT]
|
server = serverDictT2S[serverT]
|
||||||
servers.append(server)
|
servers.append(server)
|
||||||
|
@ -275,46 +276,44 @@ def getRequestXML(xml,selectedServers, selectedWEList):
|
||||||
|
|
||||||
# send to ifpServer
|
# send to ifpServer
|
||||||
xmlreq = ElementTree.tostring(iscReqE)
|
xmlreq = ElementTree.tostring(iscReqE)
|
||||||
|
|
||||||
return xmlreq;
|
return xmlreq;
|
||||||
|
|
||||||
def getLogger(scriptName, logName=None):
|
def getLogger(scriptName, logName=None, logLevel=logging.INFO):
|
||||||
import logging
|
# be relocating this import here we allow
|
||||||
# be relocating this import here we allow
|
|
||||||
# com.raytheon.edex.plugin.gfe.isc.IscScript to dynamically
|
# com.raytheon.edex.plugin.gfe.isc.IscScript to dynamically
|
||||||
# modify its include path with the proper siteConfig just before
|
# modify its include path with the proper siteConfig just before
|
||||||
# execution time
|
# execution time
|
||||||
import siteConfig
|
import siteConfig
|
||||||
|
|
||||||
if logName is None:
|
if logName is None:
|
||||||
logPath=siteConfig.GFESUITE_LOGDIR+"/"+strftime("%Y%m%d", gmtime())
|
logPath = siteConfig.GFESUITE_LOGDIR + "/" + strftime("%Y%m%d", gmtime())
|
||||||
logName=scriptName+".log"
|
logName = scriptName + ".log"
|
||||||
else:
|
else:
|
||||||
logPath=os.path.dirname(logName)
|
logPath = os.path.dirname(logName)
|
||||||
if len(logPath)==0:
|
if len(logPath) == 0:
|
||||||
logPath=siteConfig.GFESUITE_LOGDIR+"/"+strftime("%Y%m%d", gmtime())
|
logPath = siteConfig.GFESUITE_LOGDIR + "/" + strftime("%Y%m%d", gmtime())
|
||||||
logName=os.path.basename(logName)
|
logName = os.path.basename(logName)
|
||||||
|
|
||||||
logFile=logPath+"/"+logName
|
logFile = logPath + "/" + logName
|
||||||
|
|
||||||
if not os.path.exists(logPath):
|
if not os.path.exists(logPath):
|
||||||
os.makedirs(logPath)
|
os.makedirs(logPath)
|
||||||
|
|
||||||
theLog = logging.getLogger(scriptName)
|
theLog = logging.getLogger(scriptName)
|
||||||
theLog.setLevel(logging.INFO)
|
theLog.setLevel(logLevel)
|
||||||
ch = logging.FileHandler(logFile)
|
ch = logging.FileHandler(logFile)
|
||||||
|
|
||||||
ch.setLevel(logging.INFO)
|
ch.setLevel(logLevel)
|
||||||
formatter = logging.Formatter("%(levelname)s %(asctime)s [%(process)d:%(thread)d] %(filename)s: %(message)s")
|
formatter = logging.Formatter("%(levelname)s %(asctime)s [%(process)d:%(thread)d] %(filename)s: %(message)s")
|
||||||
ch.setFormatter(formatter)
|
ch.setFormatter(formatter)
|
||||||
for h in theLog.handlers:
|
for h in theLog.handlers:
|
||||||
theLog.removeHandler(h)
|
theLog.removeHandler(h)
|
||||||
theLog.addHandler(ch)
|
theLog.addHandler(ch)
|
||||||
return theLog
|
return theLog
|
||||||
|
|
||||||
def tupleToString(*msg):
|
def tupleToString(*msg):
|
||||||
concatMsg=""
|
concatMsg = ""
|
||||||
for m in msg:
|
for m in msg:
|
||||||
concatMsg=concatMsg+" "+str(m)
|
concatMsg = concatMsg + " " + str(m)
|
||||||
return concatMsg
|
return concatMsg
|
||||||
|
|
|
@ -1,19 +1,19 @@
|
||||||
##
|
##
|
||||||
# This software was developed and / or modified by Raytheon Company,
|
# This software was developed and / or modified by Raytheon Company,
|
||||||
# pursuant to Contract DG133W-05-CQ-1067 with the US Government.
|
# pursuant to Contract DG133W-05-CQ-1067 with the US Government.
|
||||||
#
|
#
|
||||||
# U.S. EXPORT CONTROLLED TECHNICAL DATA
|
# U.S. EXPORT CONTROLLED TECHNICAL DATA
|
||||||
# This software product contains export-restricted data whose
|
# This software product contains export-restricted data whose
|
||||||
# export/transfer/disclosure is restricted by U.S. law. Dissemination
|
# export/transfer/disclosure is restricted by U.S. law. Dissemination
|
||||||
# to non-U.S. persons whether in the United States or abroad requires
|
# to non-U.S. persons whether in the United States or abroad requires
|
||||||
# an export license or other authorization.
|
# an export license or other authorization.
|
||||||
#
|
#
|
||||||
# Contractor Name: Raytheon Company
|
# Contractor Name: Raytheon Company
|
||||||
# Contractor Address: 6825 Pine Street, Suite 340
|
# Contractor Address: 6825 Pine Street, Suite 340
|
||||||
# Mail Stop B8
|
# Mail Stop B8
|
||||||
# Omaha, NE 68106
|
# Omaha, NE 68106
|
||||||
# 402.291.0100
|
# 402.291.0100
|
||||||
#
|
#
|
||||||
# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
||||||
# further licensing information.
|
# further licensing information.
|
||||||
##
|
##
|
||||||
|
@ -30,14 +30,15 @@ import LogStream, fcntl
|
||||||
# Vector: ((magGrid, dirGrid), history)
|
# Vector: ((magGrid, dirGrid), history)
|
||||||
# Weather: ((byteGrid, key), history)
|
# Weather: ((byteGrid, key), history)
|
||||||
# Discrete: ((byteGrid, key), history)
|
# Discrete: ((byteGrid, key), history)
|
||||||
#
|
#
|
||||||
# SOFTWARE HISTORY
|
# SOFTWARE HISTORY
|
||||||
#
|
#
|
||||||
# Date Ticket# Engineer Description
|
# Date Ticket# Engineer Description
|
||||||
# ------------ ---------- ----------- --------------------------
|
# ------------ ---------- ----------- --------------------------
|
||||||
# 07/06/09 1995 bphillip Initial Creation.
|
# 07/06/09 1995 bphillip Initial Creation.
|
||||||
#
|
# 11/05/13 2517 randerso Improve memory utilization
|
||||||
#
|
#
|
||||||
|
#
|
||||||
#
|
#
|
||||||
|
|
||||||
|
|
||||||
|
@ -54,7 +55,7 @@ class MergeGrid:
|
||||||
# gridType = 'SCALAR', 'VECTOR', 'WEATHER', 'DISCRETE'
|
# gridType = 'SCALAR', 'VECTOR', 'WEATHER', 'DISCRETE'
|
||||||
#---------------------------------------------------------------------
|
#---------------------------------------------------------------------
|
||||||
def __init__(self, creationTime, siteID, inFillValue, outFillValue,
|
def __init__(self, creationTime, siteID, inFillValue, outFillValue,
|
||||||
areaMask, gridType, discreteKeys = None):
|
areaMask, gridType, discreteKeys=None):
|
||||||
self.__creationTime = creationTime
|
self.__creationTime = creationTime
|
||||||
self.__siteID = siteID
|
self.__siteID = siteID
|
||||||
self.__inFillV = inFillValue
|
self.__inFillV = inFillValue
|
||||||
|
@ -91,13 +92,13 @@ class MergeGrid:
|
||||||
gridB = wxB[0]
|
gridB = wxB[0]
|
||||||
key = wxA[1]
|
key = wxA[1]
|
||||||
newGrid = numpy.zeros_like(gridB)
|
newGrid = numpy.zeros_like(gridB)
|
||||||
|
|
||||||
for k in range(len(wxB[1])):
|
for k in range(len(wxB[1])):
|
||||||
index = self.__findKey(wxB[1][k], key)
|
index = self.__findKey(wxB[1][k], key)
|
||||||
newGrid = numpy.where(gridB == k, index, newGrid)
|
newGrid[gridB == k] = index
|
||||||
|
|
||||||
return (key, wxA[0], newGrid)
|
return (key, wxA[0], newGrid)
|
||||||
|
|
||||||
|
|
||||||
#---------------------------------------------------------------------
|
#---------------------------------------------------------------------
|
||||||
# update history strings
|
# update history strings
|
||||||
|
@ -107,17 +108,17 @@ class MergeGrid:
|
||||||
# returns None if no history is present.
|
# returns None if no history is present.
|
||||||
#---------------------------------------------------------------------
|
#---------------------------------------------------------------------
|
||||||
def __updateHistoryStrings(self, historyA, historyB):
|
def __updateHistoryStrings(self, historyA, historyB):
|
||||||
|
|
||||||
out = []
|
out = []
|
||||||
|
|
||||||
# removal any old entry
|
# removal any old entry
|
||||||
if historyB is not None:
|
if historyB is not None:
|
||||||
for h in historyB:
|
for h in historyB:
|
||||||
index = string.find(h, ":"+ self.__siteID + "_GRID")
|
index = string.find(h, ":" + self.__siteID + "_GRID")
|
||||||
if index == -1:
|
if index == -1:
|
||||||
out.append(h)
|
out.append(h)
|
||||||
|
|
||||||
# if add mode, add in new entries
|
# if add mode, add in new entries
|
||||||
if historyA is not None:
|
if historyA is not None:
|
||||||
for h in historyA:
|
for h in historyA:
|
||||||
out.append(h)
|
out.append(h)
|
||||||
|
@ -125,33 +126,33 @@ class MergeGrid:
|
||||||
if len(out) > 0:
|
if len(out) > 0:
|
||||||
return out
|
return out
|
||||||
else:
|
else:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
#---------------------------------------------------------------------
|
#---------------------------------------------------------------------
|
||||||
# merge scalar grid
|
# merge scalar grid
|
||||||
# Note: gridA can be None, which indicates that the data
|
# Note: gridA can be None, which indicates that the data
|
||||||
# is to be blanked out, i.e., made invalid. gridB can also be
|
# is to be blanked out, i.e., made invalid. gridB can also be
|
||||||
# none, which indicates that there is no destination grid and one must
|
# none, which indicates that there is no destination grid and one must
|
||||||
# be created.
|
# be created.
|
||||||
#---------------------------------------------------------------------
|
#---------------------------------------------------------------------
|
||||||
def __mergeScalarGrid(self, gridA, gridB):
|
def __mergeScalarGrid(self, gridA, gridB):
|
||||||
if gridA is None and gridB is None:
|
if gridA is None and gridB is None:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
# merge the grids
|
# merge the grids
|
||||||
if gridA is not None:
|
if gridA is not None:
|
||||||
inMask = numpy.not_equal(gridA, self.__inFillV)
|
mask = numpy.not_equal(gridA, self.__inFillV)
|
||||||
mask = numpy.logical_and(inMask, self.__areaMask)
|
numpy.logical_and(mask, self.__areaMask, mask)
|
||||||
|
|
||||||
if gridB is None:
|
if gridB is None:
|
||||||
gridB = numpy.zeros(gridA.shape) + self.__outFillV
|
return numpy.where(mask, gridA, self.__outFillV)
|
||||||
return numpy.where(mask, gridA, gridB)
|
else:
|
||||||
|
return numpy.where(mask, gridA, gridB)
|
||||||
|
|
||||||
# blank out the data
|
# blank out the data
|
||||||
else:
|
else:
|
||||||
blankGrid = numpy.zeros(gridB.shape) + self.__outFillV
|
return numpy.where(self.__areaMask, self.__outFillV, gridB)
|
||||||
return numpy.where(self.__areaMask, blankGrid, gridB)
|
|
||||||
|
|
||||||
#---------------------------------------------------------------------
|
#---------------------------------------------------------------------
|
||||||
# merge vector grid
|
# merge vector grid
|
||||||
# Note: gridA can be None, which indicates that the data
|
# Note: gridA can be None, which indicates that the data
|
||||||
|
@ -159,50 +160,47 @@ class MergeGrid:
|
||||||
# none, which indicates that there is no destination grid and one must
|
# none, which indicates that there is no destination grid and one must
|
||||||
# be created.
|
# be created.
|
||||||
#---------------------------------------------------------------------
|
#---------------------------------------------------------------------
|
||||||
def __mergeVectorGrid(self, gridA, gridB):
|
def __mergeVectorGrid(self, gridA, gridB):
|
||||||
if gridA is None and gridB is None:
|
if gridA is None and gridB is None:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
# merge the grids
|
# merge the grids
|
||||||
if gridA is not None:
|
if gridA is not None:
|
||||||
inMask = numpy.not_equal(gridA[0], self.__inFillV)
|
mask = numpy.not_equal(gridA[0], self.__inFillV)
|
||||||
mask = numpy.logical_and(inMask, self.__areaMask)
|
numpy.logical_and(mask, self.__areaMask, mask)
|
||||||
|
|
||||||
if gridB is None:
|
if gridB is None:
|
||||||
gridSize = gridA[0].shape
|
magGrid = numpy.where(mask, gridA[0], self.__outFillV)
|
||||||
gridB = (numpy.zeros(gridSize) + self.__outFillV,
|
dirGrid = numpy.where(mask, gridA[1], 0.0)
|
||||||
numpy.zeros(gridSize) + 0.0)
|
else:
|
||||||
|
magGrid = numpy.where(mask, gridA[0], gridB[0])
|
||||||
magGrid = numpy.where(mask, gridA[0], gridB[0])
|
dirGrid = numpy.where(mask, gridA[1], gridB[1])
|
||||||
dirGrid = numpy.where(mask, gridA[1], gridB[1])
|
|
||||||
return (magGrid, dirGrid)
|
return (magGrid, dirGrid)
|
||||||
|
|
||||||
# blank out the data
|
# blank out the data
|
||||||
else:
|
else:
|
||||||
blankGrid = numpy.zeros(gridB[0].shape) + self.__outFillV
|
magGrid = numpy.where(self.__areaMask, self.__outFillV, gridB[0])
|
||||||
blankDirGrid = numpy.zeros_like(gridB[1])
|
dirGrid = numpy.where(self.__areaMask, 0.0, gridB[1])
|
||||||
magGrid = numpy.where(self.__areaMask, blankGrid, gridB[0])
|
|
||||||
dirGrid = numpy.where(self.__areaMask, blankDirGrid, gridB[1])
|
|
||||||
return (magGrid, dirGrid)
|
return (magGrid, dirGrid)
|
||||||
|
|
||||||
|
|
||||||
#---------------------------------------------------------------------
|
#---------------------------------------------------------------------
|
||||||
# merge weather grid
|
# merge weather grid
|
||||||
#
|
#
|
||||||
# Note the outFillV is ignored for now, all out-of-bounds points will
|
# Note the outFillV is ignored for now, all out-of-bounds points will
|
||||||
# get the <NoWx> value.
|
# get the <NoWx> value.
|
||||||
#---------------------------------------------------------------------
|
#---------------------------------------------------------------------
|
||||||
def __mergeWeatherGrid(self, gridA, gridB):
|
def __mergeWeatherGrid(self, gridA, gridB):
|
||||||
|
|
||||||
if gridA is None and gridB is None:
|
if gridA is None and gridB is None:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
noWx = "<NoCov>:<NoWx>:<NoInten>:<NoVis>:"
|
noWx = "<NoCov>:<NoWx>:<NoInten>:<NoVis>:"
|
||||||
# merge the grids
|
# merge the grids
|
||||||
if gridA is not None:
|
if gridA is not None:
|
||||||
inMask = numpy.not_equal(gridA[0], self.__inFillV)
|
mask = numpy.not_equal(gridA[0], self.__inFillV)
|
||||||
mask = numpy.logical_and(inMask, self.__areaMask)
|
numpy.logical_and(mask, self.__areaMask, mask)
|
||||||
|
|
||||||
if gridB is None: #make an empty grid
|
if gridB is None: #make an empty grid
|
||||||
noWxKeys = []
|
noWxKeys = []
|
||||||
noWxGrid = numpy.empty_like(gridA[0])
|
noWxGrid = numpy.empty_like(gridA[0])
|
||||||
|
@ -211,15 +209,15 @@ class MergeGrid:
|
||||||
(commonkey, remapG, dbG) = self.__commonizeKey(gridA, gridB)
|
(commonkey, remapG, dbG) = self.__commonizeKey(gridA, gridB)
|
||||||
mergedGrid = numpy.where(mask, remapG, dbG)
|
mergedGrid = numpy.where(mask, remapG, dbG)
|
||||||
return (mergedGrid, commonkey)
|
return (mergedGrid, commonkey)
|
||||||
|
|
||||||
# blank out the data
|
# blank out the data
|
||||||
else:
|
else:
|
||||||
blankGrid = numpy.empty_like(gridB[0])
|
blankGrid = numpy.empty_like(gridB[0])
|
||||||
blankGrid.fill(self.__findKey(noWx, gridB[1]))
|
blankGrid.fill(self.__findKey(noWx, gridB[1]))
|
||||||
key = gridB[1]
|
key = gridB[1]
|
||||||
grid = numpy.where(self.__areaMask, blankGrid, gridB[0])
|
grid = numpy.where(self.__areaMask, blankGrid, gridB[0])
|
||||||
return (grid, key)
|
return (grid, key)
|
||||||
|
|
||||||
#---------------------------------------------------------------------
|
#---------------------------------------------------------------------
|
||||||
# merge discrete grid
|
# merge discrete grid
|
||||||
#
|
#
|
||||||
|
@ -231,23 +229,23 @@ class MergeGrid:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
noKey = self.__discreteKeys[0]
|
noKey = self.__discreteKeys[0]
|
||||||
|
|
||||||
# merge the grids
|
# merge the grids
|
||||||
if gridA is not None:
|
if gridA is not None:
|
||||||
inMask = numpy.not_equal(gridA[0], self.__inFillV)
|
mask = numpy.not_equal(gridA[0], self.__inFillV)
|
||||||
mask = numpy.logical_and(inMask, self.__areaMask)
|
numpy.logical_and(mask, self.__areaMask)
|
||||||
|
|
||||||
if gridB is None: #make an empty grid
|
if gridB is None: #make an empty grid
|
||||||
noKeys = []
|
noKeys = []
|
||||||
noGrid = numpy.empty_like(gridA[0])
|
noGrid = numpy.empty_like(gridA[0])
|
||||||
noGrid.fill(self.__findKey(noKey, noKeys))
|
noGrid.fill(self.__findKey(noKey, noKeys))
|
||||||
gridB = (noGrid, noKeys)
|
gridB = (noGrid, noKeys)
|
||||||
|
|
||||||
(commonkey, remapG, dbG) = \
|
(commonkey, remapG, dbG) = \
|
||||||
self.__commonizeKey(gridA, gridB)
|
self.__commonizeKey(gridA, gridB)
|
||||||
mergedGrid = numpy.where(mask, remapG, dbG)
|
mergedGrid = numpy.where(mask, remapG, dbG)
|
||||||
return (mergedGrid, commonkey)
|
return (mergedGrid, commonkey)
|
||||||
|
|
||||||
# blank out the data
|
# blank out the data
|
||||||
else:
|
else:
|
||||||
blankGrid = numpy.empty_like(gridB[0])
|
blankGrid = numpy.empty_like(gridB[0])
|
||||||
|
@ -255,7 +253,7 @@ class MergeGrid:
|
||||||
key = gridB[1]
|
key = gridB[1]
|
||||||
grid = numpy.where(self.__areaMask, blankGrid, gridB[0])
|
grid = numpy.where(self.__areaMask, blankGrid, gridB[0])
|
||||||
return (grid, key)
|
return (grid, key)
|
||||||
|
|
||||||
#---------------------------------------------------------------------
|
#---------------------------------------------------------------------
|
||||||
# mergeGrid
|
# mergeGrid
|
||||||
# Merges the grid
|
# Merges the grid
|
||||||
|
@ -270,8 +268,8 @@ class MergeGrid:
|
||||||
# none, which indicates that there is no destination grid and one must
|
# none, which indicates that there is no destination grid and one must
|
||||||
# be created.
|
# be created.
|
||||||
#---------------------------------------------------------------------
|
#---------------------------------------------------------------------
|
||||||
def mergeGrid(self, gridAIn, gridBIn):
|
def mergeGrid(self, gridAIn, gridBIn):
|
||||||
# merge the grids
|
# merge the grids
|
||||||
if gridAIn is not None:
|
if gridAIn is not None:
|
||||||
gridA = gridAIn[0]
|
gridA = gridAIn[0]
|
||||||
historyA = gridAIn[1]
|
historyA = gridAIn[1]
|
||||||
|
@ -279,28 +277,28 @@ class MergeGrid:
|
||||||
gridA = None
|
gridA = None
|
||||||
historyA = None
|
historyA = None
|
||||||
if gridBIn is not None:
|
if gridBIn is not None:
|
||||||
gridB = gridBIn[0]
|
gridB = gridBIn[0]
|
||||||
historyB = gridBIn[1]
|
historyB = gridBIn[1]
|
||||||
else:
|
else:
|
||||||
gridB = None
|
gridB = None
|
||||||
historyB = None
|
historyB = None
|
||||||
|
|
||||||
if self.__gridType == 'SCALAR':
|
if self.__gridType == 'SCALAR':
|
||||||
mergedGrid = self.__mergeScalarGrid(gridA, gridB)
|
mergedGrid = self.__mergeScalarGrid(gridA, gridB)
|
||||||
|
|
||||||
elif self.__gridType == 'VECTOR':
|
elif self.__gridType == 'VECTOR':
|
||||||
mergedGrid = self.__mergeVectorGrid(gridA, gridB)
|
mergedGrid = self.__mergeVectorGrid(gridA, gridB)
|
||||||
|
|
||||||
elif self.__gridType == 'WEATHER':
|
elif self.__gridType == 'WEATHER':
|
||||||
mergedGrid = self.__mergeWeatherGrid(gridA, gridB)
|
mergedGrid = self.__mergeWeatherGrid(gridA, gridB)
|
||||||
|
|
||||||
elif self.__gridType == 'DISCRETE':
|
elif self.__gridType == 'DISCRETE':
|
||||||
mergedGrid = self.__mergeDiscreteGrid(gridA, gridB)
|
mergedGrid = self.__mergeDiscreteGrid(gridA, gridB)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
mergedGrid = None
|
mergedGrid = None
|
||||||
|
|
||||||
# merge History
|
# merge History
|
||||||
history = self.__updateHistoryStrings(historyA, historyB)
|
history = self.__updateHistoryStrings(historyA, historyB)
|
||||||
|
|
||||||
return (mergedGrid, history)
|
return (mergedGrid, history)
|
||||||
|
|
|
@ -1,67 +1,53 @@
|
||||||
<beans xmlns="http://www.springframework.org/schema/beans"
|
<beans
|
||||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
xmlns="http://www.springframework.org/schema/beans"
|
||||||
xsi:schemaLocation="http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans-3.1.xsd
|
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||||
|
xsi:schemaLocation="http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans-3.1.xsd
|
||||||
http://camel.apache.org/schema/spring http://camel.apache.org/schema/spring/camel-spring.xsd">
|
http://camel.apache.org/schema/spring http://camel.apache.org/schema/spring/camel-spring.xsd">
|
||||||
|
|
||||||
<bean id="obsDecoder" class="com.raytheon.edex.plugin.obs.ObsDecoder"/>
|
<bean id="obsDecoder" class="com.raytheon.edex.plugin.obs.ObsDecoder" />
|
||||||
|
|
||||||
<bean id="metarPointData" class="com.raytheon.edex.plugin.obs.metar.MetarPointDataTransform"/>
|
|
||||||
|
|
||||||
<bean id="obsSeparator" class="com.raytheon.edex.plugin.obs.metar.MetarSeparator" />
|
|
||||||
|
|
||||||
<bean id="obsDistRegistry" factory-bean="distributionSrv"
|
<bean id="metarPointData" class="com.raytheon.edex.plugin.obs.metar.MetarPointDataTransform" />
|
||||||
factory-method="register">
|
|
||||||
<constructor-arg value="obs" />
|
|
||||||
<constructor-arg value="jms-dist:queue:Ingest.obs"/>
|
|
||||||
</bean>
|
|
||||||
|
|
||||||
<bean id="obsCamelRegistered" factory-bean="contextManager"
|
<bean id="obsSeparator" class="com.raytheon.edex.plugin.obs.metar.MetarSeparator" />
|
||||||
factory-method="register"
|
|
||||||
depends-on="persistCamelRegistered,
|
<bean id="obsDistRegistry" factory-bean="distributionSrv" factory-method="register">
|
||||||
|
<constructor-arg value="obs" />
|
||||||
|
<constructor-arg value="jms-dist:queue:Ingest.obs" />
|
||||||
|
</bean>
|
||||||
|
|
||||||
|
<bean id="obsCamelRegistered" factory-bean="contextManager" factory-method="register"
|
||||||
|
depends-on="persistCamelRegistered,
|
||||||
shefCamelRegistered,
|
shefCamelRegistered,
|
||||||
metarToHMDBCamelRegistered">
|
metarToHMDBCamelRegistered">
|
||||||
<constructor-arg ref="obs-camel"/>
|
<constructor-arg ref="obs-camel" />
|
||||||
</bean>
|
</bean>
|
||||||
|
|
||||||
<camelContext id="obs-camel"
|
<camelContext id="obs-camel" xmlns="http://camel.apache.org/schema/spring"
|
||||||
xmlns="http://camel.apache.org/schema/spring"
|
errorHandlerRef="errorHandler" autoStartup="false">
|
||||||
errorHandlerRef="errorHandler"
|
|
||||||
autoStartup="false">
|
<!-- Begin METAR routes -->
|
||||||
<!--
|
<route id="metarIngestRoute">
|
||||||
<endpoint id="metarFileEndpoint" uri="file:${edex.home}/data/sbn/metar?noop=true&idempotent=false" />
|
<from uri="jms-durable:queue:Ingest.obs" />
|
||||||
|
<setHeader headerName="pluginName">
|
||||||
<route id="metarFileConsumerRoute">
|
<constant>obs</constant>
|
||||||
<from ref="metarFileEndpoint" />
|
</setHeader>
|
||||||
<bean ref="fileToString" />
|
<doTry>
|
||||||
<setHeader headerName="pluginName">
|
<pipeline>
|
||||||
<constant>obs</constant>
|
<bean ref="stringToFile" />
|
||||||
</setHeader>
|
<bean ref="obsDecoder" method="decode" />
|
||||||
<to uri="jms-durable:queue:Ingest.obs" />
|
<bean ref="dupElim" />
|
||||||
</route>
|
<bean ref="metarPointData" method="toPointData" />
|
||||||
-->
|
<multicast>
|
||||||
|
<to uri="direct-vm:persistIndexAlert" />
|
||||||
<!-- Begin METAR routes -->
|
<to uri="direct-vm:metarToShef" />
|
||||||
<route id="metarIngestRoute">
|
<to uri="direct-vm:metarToHMDB" />
|
||||||
<from uri="jms-durable:queue:Ingest.obs"/>
|
</multicast>
|
||||||
<setHeader headerName="pluginName">
|
</pipeline>
|
||||||
<constant>obs</constant>
|
<doCatch>
|
||||||
</setHeader>
|
<exception>java.lang.Throwable</exception>
|
||||||
<doTry>
|
<to uri="log:metar?level=ERROR" />
|
||||||
<pipeline>
|
</doCatch>
|
||||||
<bean ref="stringToFile" />
|
</doTry>
|
||||||
<bean ref="obsDecoder" method="decode" />
|
</route>
|
||||||
<bean ref="metarPointData" method="toPointData" />
|
</camelContext>
|
||||||
<multicast>
|
|
||||||
<to uri="direct-vm:persistIndexAlert" />
|
|
||||||
<to uri="direct-vm:metarToShef" />
|
|
||||||
<to uri="direct-vm:metarToHMDB"/>
|
|
||||||
</multicast>
|
|
||||||
</pipeline>
|
|
||||||
<doCatch>
|
|
||||||
<exception>java.lang.Throwable</exception>
|
|
||||||
<to uri="log:metar?level=ERROR"/>
|
|
||||||
</doCatch>
|
|
||||||
</doTry>
|
|
||||||
</route>
|
|
||||||
</camelContext>
|
|
||||||
</beans>
|
</beans>
|
|
@ -1,68 +1,52 @@
|
||||||
<beans xmlns="http://www.springframework.org/schema/beans"
|
<beans
|
||||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
xmlns="http://www.springframework.org/schema/beans"
|
||||||
xsi:schemaLocation="http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans-3.1.xsd
|
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||||
|
xsi:schemaLocation="http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans-3.1.xsd
|
||||||
http://camel.apache.org/schema/spring http://camel.apache.org/schema/spring/camel-spring.xsd">
|
http://camel.apache.org/schema/spring http://camel.apache.org/schema/spring/camel-spring.xsd">
|
||||||
|
|
||||||
<bean id="sfcobsDecoder" class="com.raytheon.edex.plugin.sfcobs.SfcObsDecoder" />
|
<bean id="sfcobsDecoder" class="com.raytheon.edex.plugin.sfcobs.SfcObsDecoder" />
|
||||||
<bean id="sfcobsSeparator" class="com.raytheon.edex.plugin.sfcobs.SfcObsSeparator" />
|
<bean id="sfcobsSeparator" class="com.raytheon.edex.plugin.sfcobs.SfcObsSeparator" />
|
||||||
|
|
||||||
<bean id="sfcobsPointData" class="com.raytheon.uf.common.dataplugin.sfcobs.dao.SfcObsPointDataTransform">
|
<bean id="sfcobsPointData" class="com.raytheon.uf.common.dataplugin.sfcobs.dao.SfcObsPointDataTransform">
|
||||||
<constructor-arg ref="sfcobsPluginName" />
|
<constructor-arg ref="sfcobsPluginName" />
|
||||||
</bean>
|
</bean>
|
||||||
|
|
||||||
<bean id="sfcobsDistRegistry" factory-bean="distributionSrv"
|
<bean id="sfcobsDistRegistry" factory-bean="distributionSrv" factory-method="register">
|
||||||
factory-method="register">
|
<constructor-arg value="sfcobs" />
|
||||||
<constructor-arg value="sfcobs" />
|
<constructor-arg value="jms-dist:queue:Ingest.sfcobs" />
|
||||||
<constructor-arg value="jms-dist:queue:Ingest.sfcobs"/>
|
</bean>
|
||||||
</bean>
|
|
||||||
|
<bean id="sfcobsCamelRegistered" factory-bean="contextManager" factory-method="register"
|
||||||
<bean id="sfcobsCamelRegistered" factory-bean="contextManager"
|
depends-on="persistCamelRegistered,
|
||||||
factory-method="register"
|
|
||||||
depends-on="persistCamelRegistered,
|
|
||||||
shefCamelRegistered">
|
shefCamelRegistered">
|
||||||
<constructor-arg ref="sfcobs-camel"/>
|
<constructor-arg ref="sfcobs-camel" />
|
||||||
</bean>
|
</bean>
|
||||||
|
|
||||||
<camelContext id="sfcobs-camel"
|
<camelContext id="sfcobs-camel" xmlns="http://camel.apache.org/schema/spring"
|
||||||
xmlns="http://camel.apache.org/schema/spring"
|
errorHandlerRef="errorHandler" autoStartup="false">
|
||||||
errorHandlerRef="errorHandler"
|
|
||||||
autoStartup="false">
|
|
||||||
<!--
|
|
||||||
<endpoint id="sfcobsFileEndpoint"
|
|
||||||
uri="file:${edex.home}/data/sbn/sfcobs?noop=true&idempotent=false" />
|
|
||||||
|
|
||||||
<route id="sfcobsFileConsumerRoute">
|
<!-- Begin sfcobs routes -->
|
||||||
<from ref="sfcobsFileEndpoint" />
|
<route id="sfcobsIngestRoute">
|
||||||
<bean ref="fileToString" />
|
<from uri="jms-durable:queue:Ingest.sfcobs" />
|
||||||
<setHeader headerName="pluginName">
|
<setHeader headerName="pluginName">
|
||||||
<constant>sfcobs</constant>
|
<constant>sfcobs</constant>
|
||||||
</setHeader>
|
</setHeader>
|
||||||
<to uri="jms-durable:queue:Ingest.sfcobs" />
|
<bean ref="stringToFile" />
|
||||||
</route>
|
<doTry>
|
||||||
-->
|
<pipeline>
|
||||||
|
<bean ref="sfcobsDecoder" method="decode" />
|
||||||
<!-- Begin sfcobs routes -->
|
<bean ref="dupElim" />
|
||||||
<route id="sfcobsIngestRoute">
|
|
||||||
<from uri="jms-durable:queue:Ingest.sfcobs"/>
|
|
||||||
<setHeader headerName="pluginName">
|
|
||||||
<constant>sfcobs</constant>
|
|
||||||
</setHeader>
|
|
||||||
<bean ref="stringToFile" />
|
|
||||||
<doTry>
|
|
||||||
<pipeline>
|
|
||||||
<bean ref="sfcobsDecoder" method="decode" />
|
|
||||||
<bean ref="sfcobsPointData" method="toPointData" />
|
<bean ref="sfcobsPointData" method="toPointData" />
|
||||||
<multicast>
|
<multicast>
|
||||||
<to uri="direct-vm:persistIndexAlert" />
|
<to uri="direct-vm:persistIndexAlert" />
|
||||||
<to uri="direct-vm:synopticToShef"/>
|
<to uri="direct-vm:synopticToShef" />
|
||||||
</multicast>
|
</multicast>
|
||||||
</pipeline>
|
</pipeline>
|
||||||
<doCatch>
|
<doCatch>
|
||||||
<exception>java.lang.Throwable</exception>
|
<exception>java.lang.Throwable</exception>
|
||||||
<to uri="log:sfcobs?level=ERROR"/>
|
<to uri="log:sfcobs?level=ERROR" />
|
||||||
</doCatch>
|
</doCatch>
|
||||||
</doTry>
|
</doTry>
|
||||||
</route>
|
</route>
|
||||||
|
</camelContext>
|
||||||
</camelContext>
|
|
||||||
</beans>
|
</beans>
|
|
@ -63,7 +63,7 @@ import com.raytheon.uf.common.util.ConvertUtil;
|
||||||
*/
|
*/
|
||||||
public class DataURIUtil {
|
public class DataURIUtil {
|
||||||
|
|
||||||
private static final String PLUGIN_NAME_KEY = "pluginName";
|
public static final String PLUGIN_NAME_KEY = "pluginName";
|
||||||
|
|
||||||
private static final String FIELD_SEPARATOR = ".";
|
private static final String FIELD_SEPARATOR = ".";
|
||||||
|
|
||||||
|
|
|
@ -6,6 +6,8 @@ archive.cron=0+40+*+*+*+?
|
||||||
archive.purge.enable=true
|
archive.purge.enable=true
|
||||||
# purge archives
|
# purge archives
|
||||||
archive.purge.cron=0+5+0/3+*+*+?
|
archive.purge.cron=0+5+0/3+*+*+?
|
||||||
|
# compress database records
|
||||||
|
archive.compression.enable=true
|
||||||
|
|
||||||
# to disable a specific archive, use property archive.disable=pluginName,pluginName...
|
# to disable a specific archive, use property archive.disable=pluginName,pluginName...
|
||||||
#archive.disable=grid,text,acars
|
#archive.disable=grid,text,acars
|
|
@ -19,6 +19,8 @@
|
||||||
**/
|
**/
|
||||||
package com.raytheon.uf.edex.archive;
|
package com.raytheon.uf.edex.archive;
|
||||||
|
|
||||||
|
import java.io.BufferedInputStream;
|
||||||
|
import java.io.BufferedOutputStream;
|
||||||
import java.io.BufferedWriter;
|
import java.io.BufferedWriter;
|
||||||
import java.io.File;
|
import java.io.File;
|
||||||
import java.io.FileInputStream;
|
import java.io.FileInputStream;
|
||||||
|
@ -80,7 +82,8 @@ import com.raytheon.uf.edex.database.plugin.PluginFactory;
|
||||||
* Jan 18, 2013 1469 bkowal Removed the hdf5 data directory.
|
* Jan 18, 2013 1469 bkowal Removed the hdf5 data directory.
|
||||||
* Oct 23, 2013 2478 rferrel Make date format thread safe.
|
* Oct 23, 2013 2478 rferrel Make date format thread safe.
|
||||||
* Add debug information.
|
* Add debug information.
|
||||||
* Nov 05, 2013 2499 rjpeter Repackaged, removed config files, always compresses.
|
* Nov 05, 2013 2499 rjpeter Repackaged, removed config files, always compresses hdf5.
|
||||||
|
* Nov 11, 2013 2478 rjpeter Updated data store copy to always copy hdf5.
|
||||||
* </pre>
|
* </pre>
|
||||||
*
|
*
|
||||||
* @author rjpeter
|
* @author rjpeter
|
||||||
|
@ -114,12 +117,17 @@ public class DatabaseArchiver implements IPluginArchiver {
|
||||||
/** Cluster time out on lock. */
|
/** Cluster time out on lock. */
|
||||||
private static final int CLUSTER_LOCK_TIMEOUT = 60000;
|
private static final int CLUSTER_LOCK_TIMEOUT = 60000;
|
||||||
|
|
||||||
|
/** Chunk size for I/O Buffering and Compression */
|
||||||
|
private static final int CHUNK_SIZE = 8192;
|
||||||
|
|
||||||
/** Mapping for plug-in formatters. */
|
/** Mapping for plug-in formatters. */
|
||||||
private final Map<String, IPluginArchiveFileNameFormatter> pluginArchiveFormatters;
|
private final Map<String, IPluginArchiveFileNameFormatter> pluginArchiveFormatters;
|
||||||
|
|
||||||
/** When true dump the pdos. */
|
/** When true dump the pdos. */
|
||||||
private final boolean debugArchiver;
|
private final boolean debugArchiver;
|
||||||
|
|
||||||
|
private final boolean compressDatabaseFiles;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* The constructor.
|
* The constructor.
|
||||||
*/
|
*/
|
||||||
|
@ -128,6 +136,8 @@ public class DatabaseArchiver implements IPluginArchiver {
|
||||||
pluginArchiveFormatters.put("default",
|
pluginArchiveFormatters.put("default",
|
||||||
new DefaultPluginArchiveFileNameFormatter());
|
new DefaultPluginArchiveFileNameFormatter());
|
||||||
debugArchiver = Boolean.getBoolean("archive.debug.enable");
|
debugArchiver = Boolean.getBoolean("archive.debug.enable");
|
||||||
|
compressDatabaseFiles = Boolean
|
||||||
|
.getBoolean("archive.compression.enable");
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -259,12 +269,9 @@ public class DatabaseArchiver implements IPluginArchiver {
|
||||||
.join(archivePath, pluginName, dataStoreFile));
|
.join(archivePath, pluginName, dataStoreFile));
|
||||||
|
|
||||||
try {
|
try {
|
||||||
// data must be older than 30 minutes, and no older than
|
// copy the changed hdf5 file, does repack if
|
||||||
// hours to keep hours need to lookup plugin and see if
|
// compRequired, otherwise pure file copy
|
||||||
// compression matches, or embed in configuration the
|
ds.copy(outputDir, compRequired, null, 0, 0);
|
||||||
// compression level on archive, but would still need to
|
|
||||||
// lookup plugin
|
|
||||||
ds.copy(outputDir, compRequired, "lastArchived", 0, 0);
|
|
||||||
} catch (StorageException e) {
|
} catch (StorageException e) {
|
||||||
statusHandler.handle(Priority.PROBLEM,
|
statusHandler.handle(Priority.PROBLEM,
|
||||||
e.getLocalizedMessage());
|
e.getLocalizedMessage());
|
||||||
|
@ -325,7 +332,11 @@ public class DatabaseArchiver implements IPluginArchiver {
|
||||||
path.setLength(path.length() - 3);
|
path.setLength(path.length() - 3);
|
||||||
}
|
}
|
||||||
int pathDebugLength = path.length();
|
int pathDebugLength = path.length();
|
||||||
path.append(".bin.gz");
|
if (compressDatabaseFiles) {
|
||||||
|
path.append(".bin.gz");
|
||||||
|
} else {
|
||||||
|
path.append(".bin");
|
||||||
|
}
|
||||||
|
|
||||||
File file = new File(path.toString());
|
File file = new File(path.toString());
|
||||||
List<PersistableDataObject> pdosToSerialize = entry.getValue();
|
List<PersistableDataObject> pdosToSerialize = entry.getValue();
|
||||||
|
@ -338,7 +349,13 @@ public class DatabaseArchiver implements IPluginArchiver {
|
||||||
try {
|
try {
|
||||||
|
|
||||||
// created gzip'd stream
|
// created gzip'd stream
|
||||||
is = new GZIPInputStream(new FileInputStream(file), 8192);
|
if (compressDatabaseFiles) {
|
||||||
|
is = new GZIPInputStream(new FileInputStream(file),
|
||||||
|
CHUNK_SIZE);
|
||||||
|
} else {
|
||||||
|
is = new BufferedInputStream(new FileInputStream(file),
|
||||||
|
CHUNK_SIZE);
|
||||||
|
}
|
||||||
|
|
||||||
// transform back for list append
|
// transform back for list append
|
||||||
@SuppressWarnings("unchecked")
|
@SuppressWarnings("unchecked")
|
||||||
|
@ -400,7 +417,12 @@ public class DatabaseArchiver implements IPluginArchiver {
|
||||||
}
|
}
|
||||||
|
|
||||||
// created gzip'd stream
|
// created gzip'd stream
|
||||||
os = new GZIPOutputStream(new FileOutputStream(file), 8192);
|
if (compressDatabaseFiles) {
|
||||||
|
os = new GZIPOutputStream(new FileOutputStream(file), CHUNK_SIZE);
|
||||||
|
} else {
|
||||||
|
os = new BufferedOutputStream(new FileOutputStream(file),
|
||||||
|
CHUNK_SIZE);
|
||||||
|
}
|
||||||
|
|
||||||
// Thrift serialize pdo list
|
// Thrift serialize pdo list
|
||||||
SerializationUtil.transformToThriftUsingStream(pdosToSerialize,
|
SerializationUtil.transformToThriftUsingStream(pdosToSerialize,
|
||||||
|
|
|
@ -27,6 +27,7 @@
|
||||||
* Aug 05, 2013 2224 rferrel Changes to add dataSet tags.
|
* Aug 05, 2013 2224 rferrel Changes to add dataSet tags.
|
||||||
* Oct 01, 2013 2147 rferrel Date time stamp no longer requires an hour field.
|
* Oct 01, 2013 2147 rferrel Date time stamp no longer requires an hour field.
|
||||||
* Nov 05, 2013 2497 rferrel Change root directory.
|
* Nov 05, 2013 2497 rferrel Change root directory.
|
||||||
|
* Nov 13, 2013 2549 rferrel Changes to GFE and modelsounding.
|
||||||
*
|
*
|
||||||
* @author rferrel
|
* @author rferrel
|
||||||
* @version 1.0
|
* @version 1.0
|
||||||
|
@ -151,7 +152,7 @@
|
||||||
<dateGroupIndices>3,4,5,6</dateGroupIndices>
|
<dateGroupIndices>3,4,5,6</dateGroupIndices>
|
||||||
</dataSet>
|
</dataSet>
|
||||||
<dataSet>
|
<dataSet>
|
||||||
<dirPattern>gfe/(.*)/(Fcst|Official)</dirPattern>
|
<dirPattern>gfe/(.*)/(.*)</dirPattern>
|
||||||
<filePattern>.*_(\d{4})(\d{2})(\d{2})_.*</filePattern>
|
<filePattern>.*_(\d{4})(\d{2})(\d{2})_.*</filePattern>
|
||||||
<displayLabel>{1} - {2}</displayLabel>
|
<displayLabel>{1} - {2}</displayLabel>
|
||||||
<dateGroupIndices>3,4,5</dateGroupIndices>
|
<dateGroupIndices>3,4,5</dateGroupIndices>
|
||||||
|
@ -177,11 +178,11 @@
|
||||||
<filePattern>.*-(\d{4})-(\d{2})-(\d{2})-(\d{2})-.*</filePattern>
|
<filePattern>.*-(\d{4})-(\d{2})-(\d{2})-(\d{2})-.*</filePattern>
|
||||||
</dataSet>
|
</dataSet>
|
||||||
<dataSet>
|
<dataSet>
|
||||||
<dirPattern>(modelsounding)/(.*)</dirPattern>
|
<dirPattern>(modelsounding)/(.*)/.*</dirPattern>
|
||||||
<dirPattern>(bufrmos)(.*)</dirPattern>
|
<dirPattern>(bufrmos)(.*)</dirPattern>
|
||||||
<displayLabel>{1} - {2}</displayLabel>
|
<displayLabel>{1} - {2}</displayLabel>
|
||||||
<dateGroupIndices>3,4,5,6</dateGroupIndices>
|
<dateGroupIndices>3,4,5,6</dateGroupIndices>
|
||||||
<filePattern>.*(\d{4})-(\d{2})-(\d{2})[-_](\d{2}).*</filePattern>
|
<filePattern>.*(\d{4})-(\d{2})-(\d{2})-(\d{2}).*</filePattern>
|
||||||
</dataSet>
|
</dataSet>
|
||||||
</category>
|
</category>
|
||||||
<category>
|
<category>
|
||||||
|
|
|
@ -219,6 +219,20 @@
|
||||||
<dateGroupIndices>1,2,3,4</dateGroupIndices>
|
<dateGroupIndices>1,2,3,4</dateGroupIndices>
|
||||||
</dataSet>
|
</dataSet>
|
||||||
</category>
|
</category>
|
||||||
|
<category>
|
||||||
|
<name>Radar (Local)</name>
|
||||||
|
<extRetentionHours>168</extRetentionHours>
|
||||||
|
<dataSet>
|
||||||
|
<dirPattern>radar/([k|t|e|f]\w{3})/.*</dirPattern> <!-- one level like GSM or HI --> <!-- e and f are for FAA ASR and ARSR radars -->
|
||||||
|
<dirPattern>radar/(k...|t...|e...|f...)/.*/.*</dirPattern> <!-- two levels like ML -->
|
||||||
|
<dirPattern>radar/(k...|t...|e...|f...)/.*/.*/.*</dirPattern> <!-- three levels like ML -->
|
||||||
|
<dirPattern>radar/(k...|t...|e...|f...)/.*/.*/.*/.*</dirPattern> <!-- four levels like Z -->
|
||||||
|
<dirPattern>radar/(k...|t...|e...|f...)/.*/.*/.*/.*/.*</dirPattern> <!-- five levels like Z (superres) -->
|
||||||
|
<filePattern>(\w{4}).(\d*).(\d{4})(\d{2})(\d{2})_(\d{2})(\d{2})(.*)</filePattern>
|
||||||
|
<displayLabel>{1}</displayLabel>
|
||||||
|
<dateGroupIndices>4,5,6,7</dateGroupIndices>
|
||||||
|
</dataSet>
|
||||||
|
</category>
|
||||||
<category>
|
<category>
|
||||||
<name>Satellite</name>
|
<name>Satellite</name>
|
||||||
<extRetentionHours>168</extRetentionHours>
|
<extRetentionHours>168</extRetentionHours>
|
||||||
|
|
|
@ -1,126 +1,126 @@
|
||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
if [ ${#AWIPS_HOME} = 0 ]
|
if [ ${#AWIPS_HOME} = 0 ]
|
||||||
then
|
then
|
||||||
path_to_script=`readlink -f $0`
|
path_to_script=`readlink -f $0`
|
||||||
export AWIPS_HOME=$(dirname $(dirname $(dirname $(dirname $path_to_script))))
|
export AWIPS_HOME=$(dirname $(dirname $(dirname $(dirname $path_to_script))))
|
||||||
fi
|
fi
|
||||||
|
|
||||||
. ${AWIPS_HOME}/GFESuite/ServiceBackup/configuration/svcbu.env
|
. ${AWIPS_HOME}/GFESuite/ServiceBackup/configuration/svcbu.env
|
||||||
|
|
||||||
# Create the log file
|
# Create the log file
|
||||||
logdir=${IFPS_LOG}/`date +%Y%m%d`
|
logdir=${IFPS_LOG}/`date +%Y%m%d`
|
||||||
logfil=svcbu_receive_grids_from_bksite`date +%H%M`
|
logfil=svcbu_receive_grids_from_bksite`date +%H%M`
|
||||||
logfile=${logdir}/${logfil}
|
logfile=${logdir}/${logfil}
|
||||||
[ ! -d ${logdir} ] && (umask 000;mkdir ${logdir})
|
[ ! -d ${logdir} ] && (umask 000;mkdir ${logdir})
|
||||||
touch ${logdir}/${logfil}
|
touch ${logdir}/${logfil}
|
||||||
exec 1>${logdir}/${logfil} 2>&1
|
exec 1>${logdir}/${logfil} 2>&1
|
||||||
|
|
||||||
# Check the status of the lock file to see if we are OK to proceed
|
# Check the status of the lock file to see if we are OK to proceed
|
||||||
if [ -f ${LOCK_DIR}/importBkSiteGrids ];
|
if [ -f ${LOCK_DIR}/importBkSiteGrids ];
|
||||||
then
|
then
|
||||||
log_msg Cannot process grids.
|
log_msg Cannot process grids.
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
touch ${LOCK_DIR}/importBkSiteGrids
|
touch ${LOCK_DIR}/importBkSiteGrids
|
||||||
|
|
||||||
log_msg 0
|
log_msg 0
|
||||||
|
|
||||||
# Retrieve the name of the site from the tar file.
|
# Retrieve the name of the site from the tar file.
|
||||||
import_grd_file=${1}
|
import_grd_file=${1}
|
||||||
if [ -a ${import_grd_file} ]
|
if [ -a ${import_grd_file} ]
|
||||||
then
|
then
|
||||||
|
|
||||||
log_msg "Import Grids file is ${import_grd_file}"
|
log_msg "Import Grids file is ${import_grd_file}"
|
||||||
mv ${import_grd_file} ${GFESUITE_HOME}/Grd
|
mv ${import_grd_file} ${GFESUITE_HOME}/Grd
|
||||||
cd ${GFESUITE_HOME}
|
cd ${GFESUITE_HOME}
|
||||||
|
|
||||||
tar xf Grd
|
tar xf Grd
|
||||||
if [ $? -eq 0 ]; then
|
if [ $? -eq 0 ]; then
|
||||||
SITE=`cat siteID.txt | tr '[a-z]' '[A-Z]'`
|
SITE=`cat siteID.txt | tr '[a-z]' '[A-Z]'`
|
||||||
site=`echo $SITE | tr '[A-Z]' '[a-z]'`
|
site=`echo $SITE | tr '[A-Z]' '[a-z]'`
|
||||||
gunzip -f ${site}Grd.netcdf.gz
|
gunzip -f ${site}Grd.netcdf.gz
|
||||||
else
|
else
|
||||||
# move the file to appropriate directory.
|
# move the file to appropriate directory.
|
||||||
mv -f Grd Grd.netcdf.gz
|
mv -f Grd Grd.netcdf.gz
|
||||||
chmod 777 Grd.netcdf.gz
|
chmod 777 Grd.netcdf.gz
|
||||||
|
|
||||||
log_msg "Gunzipping ${GFESUITE_HOME}/Grd.netcdf.gz"
|
log_msg "Gunzipping ${GFESUITE_HOME}/Grd.netcdf.gz"
|
||||||
gunzip -f Grd.netcdf.gz
|
gunzip -f Grd.netcdf.gz
|
||||||
if [ $? -ne 0 ];
|
if [ $? -ne 0 ];
|
||||||
then
|
then
|
||||||
log_msg "ERROR: Could not gunzip ${GFESUITE_HOME}/Grd.netcdf.gz"
|
log_msg "ERROR: Could not gunzip ${GFESUITE_HOME}/Grd.netcdf.gz"
|
||||||
rm -f ${LOCK_DIR}/importBkSiteGrids
|
rm -f ${LOCK_DIR}/importBkSiteGrids
|
||||||
log_msg 100
|
log_msg 100
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
log_msg "Done Gunzipping!"
|
log_msg "Done Gunzipping!"
|
||||||
|
|
||||||
log_msg "Finding site-id using ncdump method."
|
log_msg "Finding site-id using ncdump method."
|
||||||
hdr=`mktemp`
|
hdr=`mktemp`
|
||||||
ncdump -h ${GFESUITE_HOME}/Grd.netcdf > $hdr
|
ncdump -h ${GFESUITE_HOME}/Grd.netcdf > $hdr
|
||||||
SITE=`grep siteID $hdr | head -1 | cut -d'"' -f2`
|
SITE=`grep siteID $hdr | head -1 | cut -d'"' -f2`
|
||||||
site=`echo $SITE | tr '[A-Z]' '[a-z]'`
|
site=`echo $SITE | tr '[A-Z]' '[a-z]'`
|
||||||
if [ -z $site ]
|
if [ -z $site ]
|
||||||
then
|
then
|
||||||
log_msg "ERROR: Could not find out the site from ncdump method..."
|
log_msg "ERROR: Could not find out the site from ncdump method..."
|
||||||
rm -f $hdr
|
rm -f $hdr
|
||||||
rm -f ${LOCK_DIR}/importBkSiteGrids
|
rm -f ${LOCK_DIR}/importBkSiteGrids
|
||||||
log_msg 100
|
log_msg 100
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
mv -f ${GFESUITE_HOME}/Grd.netcdf ${GFESUITE_HOME}/${site}Grd.netcdf
|
mv -f ${GFESUITE_HOME}/Grd.netcdf ${GFESUITE_HOME}/${site}Grd.netcdf
|
||||||
rm -f $hdr
|
rm -f $hdr
|
||||||
fi
|
fi
|
||||||
log_msg "site is $site"
|
log_msg "site is $site"
|
||||||
|
|
||||||
|
|
||||||
else
|
else
|
||||||
log_msg "Unable to locate the gridded data of the site, ${import_grd_file}"
|
log_msg "Unable to locate the gridded data of the site, ${import_grd_file}"
|
||||||
log_msg "You will need to request your backup site to send grids again."
|
log_msg "You will need to request your backup site to send grids again."
|
||||||
log_msg 100
|
log_msg 100
|
||||||
rm -f ${LOCK_DIR}/importBkSiteGrids
|
rm -f ${LOCK_DIR}/importBkSiteGrids
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
|
||||||
log_msg 50
|
log_msg 50
|
||||||
# Check if import file was supplied correctly by msg_send.
|
# Check if import file was supplied correctly by msg_send.
|
||||||
import_file=${GFESUITE_HOME}/${site}Grd.netcdf
|
import_file=${GFESUITE_HOME}/${site}Grd.netcdf
|
||||||
log_msg "import_file=${import_file}"
|
log_msg "import_file=${import_file}"
|
||||||
SITE=`echo ${SITE} | tr '[a-z]' '[A-Z]'`
|
SITE=`echo ${SITE} | tr '[a-z]' '[A-Z]'`
|
||||||
if [ -a ${import_file} ]
|
if [ -a ${import_file} ]
|
||||||
then
|
then
|
||||||
#use iscMosaic to load grids into databases
|
#use iscMosaic to load grids into databases
|
||||||
log_msg "Running iscMosaic to unpack griddded data..."
|
log_msg "Running iscMosaic to unpack griddded data..."
|
||||||
${GFESUITE_BIN}/iscMosaic -h $SVCBU_HOST -r $CDSPORT -d ${SITE}_GRID__Restore_00000000_0000 -f ${import_file} -n -x
|
${GFESUITE_BIN}/iscMosaic -h $SVCBU_HOST -r $CDSPORT -d ${SITE}_GRID__Restore_00000000_0000 -f ${import_file} -n -x
|
||||||
if [ $? -ne 0 ];
|
if [ $? -ne 0 ];
|
||||||
then
|
then
|
||||||
log_msg "ERROR: iscMosaic failed to run correctly. Please re-run iscMosaic manually."
|
log_msg "ERROR: iscMosaic failed to run correctly. Please re-run iscMosaic manually."
|
||||||
log_msg 100
|
log_msg 100
|
||||||
rm -f ${LOCK_DIR}/importBkSiteGrids
|
rm -f ${LOCK_DIR}/importBkSiteGrids
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
log_msg "Finished running iscMosaic..."
|
log_msg "Finished running iscMosaic..."
|
||||||
# Generate a GFE message saying new Grids have arrived in Restore database.
|
# Generate a GFE message saying new Grids have arrived in Restore database.
|
||||||
cd ${GFESUITE_BIN}
|
cd ${GFESUITE_BIN}
|
||||||
sendGfeMessage -h ${SVCBU_HOST} -p ${CDSPORT} -u -m "Restore database has been populated with new grids."
|
./sendGfeMessage -h ${SVCBU_HOST} -p ${CDSPORT} -u -m "Restore database has been populated with new grids."
|
||||||
else
|
else
|
||||||
log_msg "Unable to locate the gridded data of the site,${import_file} You will need to request your backup site to send grids again."
|
log_msg "Unable to locate the gridded data of the site,${import_file} You will need to request your backup site to send grids again."
|
||||||
log_msg 100
|
log_msg 100
|
||||||
rm -f ${LOCK_DIR}/importBkSiteGrids
|
rm -f ${LOCK_DIR}/importBkSiteGrids
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
|
||||||
#clean-up.
|
#clean-up.
|
||||||
rm -f ${GFESUITE_HOME}/Grd
|
rm -f ${GFESUITE_HOME}/Grd
|
||||||
rm -f ${GFESUITE_HOME}/${SITE}Grd* siteID.txt
|
rm -f ${GFESUITE_HOME}/${SITE}Grd* siteID.txt
|
||||||
log_msg 100
|
log_msg 100
|
||||||
rm -f ${LOCK_DIR}/importBkSiteGrids
|
rm -f ${LOCK_DIR}/importBkSiteGrids
|
||||||
#ALL well, send a msg and get out of here
|
#ALL well, send a msg and get out of here
|
||||||
log_msg "Importing Grids from backup site is completed. You may start your GFE now."
|
log_msg "Importing Grids from backup site is completed. You may start your GFE now."
|
||||||
log_msg "Grids received from backup site are stored in ${SITE}_GRID__Restore_00000000_0000 database."
|
log_msg "Grids received from backup site are stored in ${SITE}_GRID__Restore_00000000_0000 database."
|
||||||
exit 0
|
exit 0
|
||||||
|
|
||||||
|
|
|
@ -1,25 +0,0 @@
|
||||||
Copyright (c) 2009, Swiss AviationSoftware Ltd. All rights reserved.
|
|
||||||
|
|
||||||
Redistribution and use in source and binary forms, with or without
|
|
||||||
modification, are permitted provided that the following conditions are met:
|
|
||||||
|
|
||||||
- Redistributions of source code must retain the above copyright notice,
|
|
||||||
this list of conditions and the following disclaimer.
|
|
||||||
- Redistributions in binary form must reproduce the above copyright notice,
|
|
||||||
this list of conditions and the following disclaimer in the documentation
|
|
||||||
and/or other materials provided with the distribution.
|
|
||||||
- Neither the name of the Swiss AviationSoftware Ltd. nor the names of its
|
|
||||||
contributors may be used to endorse or promote products derived from this
|
|
||||||
software without specific prior written permission.
|
|
||||||
|
|
||||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
|
||||||
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
|
||||||
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
|
||||||
ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
|
||||||
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
|
||||||
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
|
||||||
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
|
||||||
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
|
||||||
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
|
||||||
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
|
||||||
POSSIBILITY OF SUCH DAMAGE.
|
|
|
@ -1,178 +0,0 @@
|
||||||
#
|
|
||||||
# AWIPS II Eclipse Spec File
|
|
||||||
#
|
|
||||||
|
|
||||||
# --define arguments:
|
|
||||||
# %{_uframe_eclipse}
|
|
||||||
# %{_build_root}
|
|
||||||
# %{_baseline_workspace}
|
|
||||||
|
|
||||||
Name: awips2-eclipse
|
|
||||||
Summary: AWIPS II Eclipse Distribution
|
|
||||||
Version: 3.6.1
|
|
||||||
Release: 1
|
|
||||||
Group: AWIPSII
|
|
||||||
BuildRoot: %{_build_root}
|
|
||||||
URL: N/A
|
|
||||||
License: N/A
|
|
||||||
Distribution: N/A
|
|
||||||
Vendor: Raytheon
|
|
||||||
Packager: Bryan Kowal
|
|
||||||
|
|
||||||
AutoReq: no
|
|
||||||
provides: awips2-eclipse
|
|
||||||
|
|
||||||
%description
|
|
||||||
AWIPS II Eclipse Distribution - Contains the AWIPS II Eclipse Distribution.
|
|
||||||
|
|
||||||
# Turn off the brp-python-bytecompile script
|
|
||||||
%global __os_install_post %(echo '%{__os_install_post}' | sed -e 's!/usr/lib[^[:space:]]*/brp-python-bytecompile[[:space:]].*$!!g')
|
|
||||||
%global __os_install_post %(echo '%{__os_install_post}' | sed -e 's!/usr/lib[^[:space:]]*/brp-java-repack-jars[[:space:]].*$!!g')
|
|
||||||
|
|
||||||
%prep
|
|
||||||
# Verify That The User Has Specified A BuildRoot.
|
|
||||||
if [ "%{_build_root}" = "/tmp" ]
|
|
||||||
then
|
|
||||||
echo "An Actual BuildRoot Must Be Specified. Use The --buildroot Parameter."
|
|
||||||
echo "Unable To Continue ... Terminating"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ -d %{_build_root} ]; then
|
|
||||||
rm -rf %{_build_root}
|
|
||||||
fi
|
|
||||||
mkdir -p %{_build_root}/awips2/eclipse
|
|
||||||
|
|
||||||
%build
|
|
||||||
|
|
||||||
%install
|
|
||||||
mkdir -p %{_build_root}/awips2/eclipse
|
|
||||||
# The location of the awips2 eclipse source directory will be
|
|
||||||
# specified as a command line argument. Fail if the specified
|
|
||||||
# directory cannot be found.
|
|
||||||
if [ ! -d %{_uframe_eclipse} ]; then
|
|
||||||
echo "ERROR: Unable To Find The AWIPS II Eclipse Distribution."
|
|
||||||
echo "Unable To Continue ... Terminating"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Copy the uframe eclipse distribution.
|
|
||||||
cp -r %{_uframe_eclipse}/* %{_build_root}/awips2/eclipse
|
|
||||||
|
|
||||||
# Copy eclipse.sh to our build-directory.
|
|
||||||
cp %{_baseline_workspace}/rpms/awips2.ade/Installer.eclipse/scripts/* \
|
|
||||||
%{_build_root}/awips2/eclipse
|
|
||||||
|
|
||||||
# delete the basemaps and etc links
|
|
||||||
rm -f %{_build_root}/awips2/eclipse/basemaps
|
|
||||||
rm -f %{_build_root}/awips2/eclipse/etc
|
|
||||||
|
|
||||||
%pre
|
|
||||||
JAVA_INSTALL="<Not Installed>"
|
|
||||||
PYTHON_INSTALL="<Not Installed>"
|
|
||||||
ANT_INSTALL="<Not Installed>"
|
|
||||||
|
|
||||||
INSTALL_PATH="/awips2/java"
|
|
||||||
if [ -d ${INSTALL_PATH} ]; then
|
|
||||||
JAVA_INSTALL=${INSTALL_PATH}
|
|
||||||
fi
|
|
||||||
|
|
||||||
INSTALL_PATH="/awips2/python"
|
|
||||||
if [ -d ${INSTALL_PATH} ]; then
|
|
||||||
PYTHON_INSTALL=${INSTALL_PATH}
|
|
||||||
fi
|
|
||||||
|
|
||||||
INSTALL_PATH="/awips2/ant"
|
|
||||||
if [ -d ${INSTALL_PATH} ]; then
|
|
||||||
ANT_INSTALL=${INSTALL_PATH}
|
|
||||||
fi
|
|
||||||
|
|
||||||
echo -e "\e[1;34m--------------------------------------------------------------------------------\e[m"
|
|
||||||
echo -e "\e[1;34m\| Installing the AWIPS II Eclipse Distribution...\e[m"
|
|
||||||
echo -e "\e[1;34m--------------------------------------------------------------------------------\e[m"
|
|
||||||
echo -e "\e[1;34m Java Detected At: ${JAVA_INSTALL}\e[m"
|
|
||||||
echo -e "\e[1;34m Python Detected At: ${PYTHON_INSTALL}\e[m"
|
|
||||||
echo -e "\e[1;34m Ant Detected At: ${ANT_INSTALL}\e[m"
|
|
||||||
|
|
||||||
%post
|
|
||||||
echo -e "\e[1;34m--------------------------------------------------------------------------------\e[m"
|
|
||||||
echo -e "\e[1;34m\| Creating ADE Eclipse Desktop Shortcut...\e[m"
|
|
||||||
echo -e "\e[1;34m--------------------------------------------------------------------------------\e[m"
|
|
||||||
ADE_ECLIPSE_SHORTCUT="ade-eclipse"
|
|
||||||
SHORTCUT_OWNER="${USER}"
|
|
||||||
CREATE_SHORTCUT="true"
|
|
||||||
if [ ! "${SUDO_USER}" = "" ]; then
|
|
||||||
SHORTCUT_OWNER="${SUDO_USER}"
|
|
||||||
fi
|
|
||||||
echo -e "\e[1;34m Creating Shortcut For User: ${SHORTCUT_OWNER}\e[m"
|
|
||||||
|
|
||||||
USER_HOME_DIR="~${SHORTCUT_OWNER}"
|
|
||||||
if [ ! -d ${USER_HOME_DIR} ]; then
|
|
||||||
USER_HOME_DIR="/home/${SHORTCUT_OWNER}"
|
|
||||||
echo " (Assuming User Home Directory Is Under '/home')"
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ ! -d ${USER_HOME_DIR}/Desktop ]; then
|
|
||||||
echo -e "\e[1;31m ERROR: Unable To Find The User's Desktop!!!"
|
|
||||||
CREATE_SHORTCUT="false"
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ "${CREATE_SHORTCUT}" = "true" ]; then
|
|
||||||
SHORTCUT_TMP="${USER_HOME_DIR}/Desktop/${ADE_ECLIPSE_SHORTCUT}.tmp"
|
|
||||||
SHORTCUT="${USER_HOME_DIR}/Desktop/${ADE_ECLIPSE_SHORTCUT}.desktop"
|
|
||||||
|
|
||||||
if [ -f ${SHORTCUT} ]; then
|
|
||||||
echo -n " Attempting To Remove The Existing Shortcut ... "
|
|
||||||
sudo -u ${SHORTCUT_OWNER} rm -f ${SHORTCUT}
|
|
||||||
if [ ! -f ${SHORTCUT} ]; then
|
|
||||||
echo -n "SUCCESS"
|
|
||||||
else
|
|
||||||
echo -n "FAILURE"
|
|
||||||
fi
|
|
||||||
echo ""
|
|
||||||
fi
|
|
||||||
sudo -u ${SHORTCUT_OWNER} touch ${SHORTCUT_TMP}
|
|
||||||
sudo -u ${SHORTCUT_OWNER} chmod 666 ${SHORTCUT_TMP}
|
|
||||||
|
|
||||||
echo "[Desktop Entry]" >> ${SHORTCUT_TMP}
|
|
||||||
echo "Version=1.0" >> ${SHORTCUT_TMP}
|
|
||||||
echo "Encoding=UTF-8" >> ${SHORTCUT_TMP}
|
|
||||||
echo "Name=ADE Eclipse" >> ${SHORTCUT_TMP}
|
|
||||||
echo "GenericName=Eclipse" >> ${SHORTCUT_TMP}
|
|
||||||
echo "Comment=IDE" >> ${SHORTCUT_TMP}
|
|
||||||
echo "Exec=/bin/bash -i -c \"xterm -title 'AWIPS II ADE Eclipse' -e '/awips2/eclipse/eclipseShortcutWrap.sh'\"" >> ${SHORTCUT_TMP}
|
|
||||||
echo "Icon=/awips2/eclipse/icon.xpm" >> ${SHORTCUT_TMP}
|
|
||||||
echo "Terminal=false" >> ${SHORTCUT_TMP}
|
|
||||||
echo "Type=Application" >> ${SHORTCUT_TMP}
|
|
||||||
echo "Categories=Development;IDE;" >> ${SHORTCUT_TMP}
|
|
||||||
|
|
||||||
sudo -u ${SHORTCUT_OWNER} mv ${SHORTCUT_TMP} ${SHORTCUT}
|
|
||||||
sudo -u ${SHORTCUT_OWNER} chmod 644 ${SHORTCUT}
|
|
||||||
fi
|
|
||||||
|
|
||||||
echo -e "\e[1;32m--------------------------------------------------------------------------------\e[m"
|
|
||||||
echo -e "\e[1;32m\| AWIPS II Eclipse Distribution Installation - COMPLETE\e[m"
|
|
||||||
echo -e "\e[1;32m--------------------------------------------------------------------------------\e[m"
|
|
||||||
|
|
||||||
%preun
|
|
||||||
|
|
||||||
%postun
|
|
||||||
|
|
||||||
%clean
|
|
||||||
rm -rf ${RPM_BUILD_ROOT}
|
|
||||||
|
|
||||||
%files
|
|
||||||
%defattr(644,awips,fxalpha,755)
|
|
||||||
%dir /awips2/eclipse
|
|
||||||
/awips2/eclipse/*
|
|
||||||
%defattr(755,awips,fxalpha,755)
|
|
||||||
/awips2/eclipse/about.html
|
|
||||||
/awips2/eclipse/artifacts.xml
|
|
||||||
/awips2/eclipse/eclipse
|
|
||||||
/awips2/eclipse/eclipse.ini
|
|
||||||
/awips2/eclipse/eclipse.sh
|
|
||||||
/awips2/eclipse/eclipseShortcutWrap.sh
|
|
||||||
/awips2/eclipse/epl-v10.html
|
|
||||||
/awips2/eclipse/icon.xpm
|
|
||||||
/awips2/eclipse/libcairo-swt.so
|
|
||||||
/awips2/eclipse/notice.html
|
|
|
@ -48,4 +48,4 @@ if [ ! -f ${JAVA} ]; then
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
$JAVA -jar ${QPID_HOME}/bin/yajsw/wrapper.jar -c ${QPID_HOME}/conf/${CONF_FILE}
|
$JAVA -Xmx32m -XX:MaxPermSize=12m -XX:ReservedCodeCacheSize=4m -jar ${QPID_HOME}/bin/yajsw/wrapper.jar -c ${QPID_HOME}/conf/${CONF_FILE}
|
||||||
|
|
|
@ -14,7 +14,7 @@ diff -crB a/qpid-java.spec b/qpid-java.spec
|
||||||
!
|
!
|
||||||
! Name: awips2-qpid-java
|
! Name: awips2-qpid-java
|
||||||
Version: 0.18
|
Version: 0.18
|
||||||
! Release: 3%{?dist}
|
! Release: 4%{?dist}
|
||||||
Summary: Java implementation of Apache Qpid
|
Summary: Java implementation of Apache Qpid
|
||||||
License: Apache Software License
|
License: Apache Software License
|
||||||
Group: Development/Java
|
Group: Development/Java
|
||||||
|
|
|
@ -400,14 +400,16 @@ if [ "${1}" = "-viz" ]; then
|
||||||
buildRPM "awips2"
|
buildRPM "awips2"
|
||||||
buildRPM "awips2-common-base"
|
buildRPM "awips2-common-base"
|
||||||
#buildRPM "awips2-python-dynamicserialize"
|
#buildRPM "awips2-python-dynamicserialize"
|
||||||
buildRPM "awips2-python"
|
buildRPM "awips2-gfesuite-client"
|
||||||
buildRPM "awips2-adapt-native"
|
buildRPM "awips2-gfesuite-server"
|
||||||
|
#buildRPM "awips2-python"
|
||||||
|
#buildRPM "awips2-adapt-native"
|
||||||
#unpackHttpdPypies
|
#unpackHttpdPypies
|
||||||
#if [ $? -ne 0 ]; then
|
#if [ $? -ne 0 ]; then
|
||||||
# exit 1
|
# exit 1
|
||||||
#fi
|
#fi
|
||||||
#buildRPM "awips2-httpd-pypies"
|
#buildRPM "awips2-httpd-pypies"
|
||||||
buildRPM "awips2-hydroapps-shared"
|
#buildRPM "awips2-hydroapps-shared"
|
||||||
#buildRPM "awips2-rcm"
|
#buildRPM "awips2-rcm"
|
||||||
#buildRPM "awips2-tools"
|
#buildRPM "awips2-tools"
|
||||||
#buildRPM "awips2-cli"
|
#buildRPM "awips2-cli"
|
||||||
|
|
Loading…
Add table
Reference in a new issue