Merge "Issue #2351 Fixed bugs in Bandwidth and adhoc pointdata flow Change-Id: I6510f8bd685033627b4cdfa53c6269aeee21a4d6" into dd_pre_release_2.1
Former-commit-id:98c4525dab
[formerly2696f8bc3b
[formerly 9cbd8325e0225e1523e0cba20c3c08ded3beb32c]] Former-commit-id:2696f8bc3b
Former-commit-id:5058c9cb9c
This commit is contained in:
commit
915b198377
10 changed files with 267 additions and 123 deletions
|
@ -20,7 +20,7 @@
|
|||
package com.raytheon.uf.viz.datadelivery.subscription.subset;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Date;
|
||||
import java.util.Calendar;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
|
@ -42,6 +42,7 @@ import com.raytheon.uf.common.datadelivery.registry.PointTime;
|
|||
import com.raytheon.uf.common.datadelivery.registry.Subscription;
|
||||
import com.raytheon.uf.common.datadelivery.registry.Time;
|
||||
import com.raytheon.uf.common.datadelivery.retrieval.util.PointDataSizeUtils;
|
||||
import com.raytheon.uf.common.time.util.TimeUtil;
|
||||
import com.raytheon.uf.common.util.SizeUtil;
|
||||
import com.raytheon.uf.viz.datadelivery.subscription.subset.presenter.PointTimeSubsetPresenter;
|
||||
import com.raytheon.uf.viz.datadelivery.subscription.subset.xml.PointTimeXML;
|
||||
|
@ -61,6 +62,7 @@ import com.raytheon.uf.viz.datadelivery.subscription.subset.xml.SubsetXML;
|
|||
* Jun 14, 2013 2108 mpduff Refactored DataSizeUtils and
|
||||
* implement subset size.
|
||||
* Sep 05, 2013 2335 mpduff Fix times for adhoc point queries.
|
||||
* Sep 10, 2013 2351 dhladky Finished adhoc queries
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -181,11 +183,13 @@ public class PointSubsetManagerDlg extends
|
|||
newTimePoint.setFormat(dataSet.getTime().getFormat());
|
||||
int interval = timingTabControls.getSaveInfo()
|
||||
.getDataRetrievalInterval();
|
||||
Calendar cal = TimeUtil.newGmtCalendar();
|
||||
newTimePoint.setInterval(interval);
|
||||
newTimePoint.setStartDate(new Date());
|
||||
newTimePoint.setEndDate(new Date());
|
||||
newTimePoint.setInterval(timingTabControls.getDataRetrievalInterval());
|
||||
newTimePoint.setStartDate(cal.getTime());
|
||||
cal.add(Calendar.MINUTE, interval * -1);
|
||||
newTimePoint.setEndDate(cal.getTime());
|
||||
|
||||
sub.setLatencyInMinutes(interval);
|
||||
return newTimePoint;
|
||||
}
|
||||
|
||||
|
|
|
@ -45,7 +45,8 @@ import com.raytheon.uf.common.serialization.annotations.DynamicSerializeElement;
|
|||
* SOFTWARE HISTORY
|
||||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* Aug 21, 2012 754 dhladky Initial creation
|
||||
* Aug 21, 2012 754 dhladky Initial creation
|
||||
* Sept 11, 2013 2351 dhladky Added more point intervals
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -68,6 +69,11 @@ public class PointTime extends Time implements ISerializableObject,
|
|||
@XmlElements({ @XmlElement(name = "times", type = Date.class) })
|
||||
@DynamicSerializeElement
|
||||
private List<Date> times;
|
||||
|
||||
/**
|
||||
* Intervals for point request
|
||||
*/
|
||||
public static final SortedSet<Integer> INTERVALS = Sets.newTreeSet(Arrays.asList(5, 10, 15, 20, 30, 60));
|
||||
|
||||
/**
|
||||
* Default Constructor.
|
||||
|
@ -136,6 +142,6 @@ public class PointTime extends Time implements ISerializableObject,
|
|||
* @return the allowed refresh intervals
|
||||
*/
|
||||
public static SortedSet<Integer> getAllowedRefreshIntervals() {
|
||||
return Sets.newTreeSet(Arrays.asList(5, 10, 15, 30));
|
||||
return INTERVALS;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -56,6 +56,7 @@ public class Provider implements ISerializableObject {
|
|||
* ------------ ---------- ----------- --------------------------
|
||||
* Feb 16, 2012 dhladky Initial creation
|
||||
* Aug 16, 2012 1022 djohnson Add bytesPerParameterRequest.
|
||||
* Sept 10, 2013 2352 dhladky Changed default size for point overhead
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -65,7 +66,7 @@ public class Provider implements ISerializableObject {
|
|||
public enum ServiceType {
|
||||
|
||||
// TODO: Only OPENDAP and WFS have the correct amounts
|
||||
OPENDAP(5000, BYTES_IN_FLOAT), WCS(5000, BYTES_IN_FLOAT), WFS(1411724,
|
||||
OPENDAP(5000, BYTES_IN_FLOAT), WCS(5000, BYTES_IN_FLOAT), WFS(711724,
|
||||
OneByOneBox), WMS(5000, BYTES_IN_FLOAT), WXXM(5000,
|
||||
BYTES_IN_FLOAT);
|
||||
|
||||
|
@ -100,7 +101,8 @@ public class Provider implements ISerializableObject {
|
|||
public long getRequestBytesPerLatLonBoxAndTime(double latSpan,
|
||||
double lonSpan, int timeSpan) {
|
||||
// increments are in 5 minutes so 5/5 = 1
|
||||
return (long) (latSpan * lonSpan * timeSpan / 5 * requestOverheadInBytes);
|
||||
// 30 min increment would be 30/5 = 6 etc.
|
||||
return (long) (latSpan * lonSpan * (timeSpan/5) * requestOverheadInBytes);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -48,6 +48,7 @@ import com.vividsolutions.jts.geom.Coordinate;
|
|||
* Dec 10, 2012 1259 bsteffen Switch Data Delivery from LatLon to referenced envelopes.
|
||||
* Jun 11, 2013 2021 dhladky WFS semi-scientific sizing.
|
||||
* Jun 14, 2013 2108 mpduff Abstracted the class.
|
||||
* Sept 09, 2013 2351 dhladky Fixed incorrect calculation for default pointdata overhead
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -115,7 +116,6 @@ public abstract class DataSizeUtils<DS extends DataSet> {
|
|||
PointTime time = (PointTime) ra.getTime();
|
||||
long l = st.getRequestBytesPerLatLonBoxAndTime(latSpan, lonSpan,
|
||||
time.getInterval());
|
||||
|
||||
return l;
|
||||
} else {
|
||||
throw new IllegalStateException(
|
||||
|
|
|
@ -34,7 +34,6 @@ import com.raytheon.uf.common.datadelivery.registry.Network;
|
|||
import com.raytheon.uf.common.datadelivery.registry.PointTime;
|
||||
import com.raytheon.uf.common.datadelivery.registry.SiteSubscription;
|
||||
import com.raytheon.uf.common.datadelivery.registry.Subscription;
|
||||
import com.raytheon.uf.common.datadelivery.registry.Time;
|
||||
import com.raytheon.uf.common.datadelivery.registry.handlers.DataDeliveryHandlers;
|
||||
import com.raytheon.uf.common.event.EventBus;
|
||||
import com.raytheon.uf.common.registry.handler.RegistryHandlerException;
|
||||
|
@ -115,6 +114,7 @@ import com.raytheon.uf.edex.registry.ebxml.exception.EbxmlRegistryException;
|
|||
* Jul 11, 2013 2106 djohnson Propose changing available bandwidth returns subscription names.
|
||||
* Jul 18, 2013 1653 mpduff Added case GET_SUBSCRIPTION_STATUS.
|
||||
* Aug 06, 2013 1654 bgonzale Added SubscriptionRequestEvents.
|
||||
* Sep 11, 2013 2351 dhladky Fixed adhoc requests for pointdata
|
||||
* </pre>
|
||||
*
|
||||
* @author dhladky
|
||||
|
@ -440,15 +440,13 @@ public abstract class BandwidthManager extends
|
|||
// Store the AdhocSubscription with a base time of now..
|
||||
subscriptions.add(bandwidthDao.newBandwidthSubscription(subscription,
|
||||
now));
|
||||
|
||||
// Check start time in Time, if it is blank, we need to add the most
|
||||
// recent MetaData for the DataSet subscribed to.
|
||||
final Time subTime = subscription.getTime();
|
||||
if (subTime.getStart() == null) {
|
||||
subscription = bandwidthDaoUtil.setAdhocMostRecentUrlAndTime(
|
||||
subscription, true);
|
||||
}
|
||||
|
||||
/**
|
||||
* This check allows for retrieval of data older than current for grid.
|
||||
* This is not allowed for pointdata types, they must grab current URL
|
||||
* and time.
|
||||
*/
|
||||
subscription = bandwidthDaoUtil.setAdhocMostRecentUrlAndTime(
|
||||
subscription, true);
|
||||
// Use SimpleSubscriptionAggregator (i.e. no aggregation) to generate a
|
||||
// SubscriptionRetrieval for this AdhocSubscription
|
||||
SimpleSubscriptionAggregator a = new SimpleSubscriptionAggregator(
|
||||
|
|
|
@ -21,6 +21,7 @@ package com.raytheon.uf.edex.datadelivery.bandwidth;
|
|||
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.Date;
|
||||
import java.util.EnumMap;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
|
@ -36,6 +37,7 @@ import com.google.common.collect.Lists;
|
|||
import com.raytheon.uf.common.datadelivery.registry.Network;
|
||||
import com.raytheon.uf.common.status.IUFStatusHandler;
|
||||
import com.raytheon.uf.common.status.UFStatus;
|
||||
import com.raytheon.uf.common.status.UFStatus.Priority;
|
||||
import com.raytheon.uf.edex.datadelivery.bandwidth.dao.BandwidthBucket;
|
||||
import com.raytheon.uf.edex.datadelivery.bandwidth.dao.IBandwidthBucketDao;
|
||||
import com.raytheon.uf.edex.datadelivery.bandwidth.retrieval.RetrievalPlan;
|
||||
|
@ -52,6 +54,7 @@ import com.raytheon.uf.edex.datadelivery.bandwidth.util.BandwidthUtil;
|
|||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* Jun 18, 2013 2106 djohnson Extracted from {@link RetrievalPlan}.
|
||||
* Spet 08, 2013 2351 dhladky Changed from ascending to descending bandwidth bucket selection
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -179,9 +182,13 @@ public class InMemoryBandwidthBucketDao implements IBandwidthBucketDao {
|
|||
public SortedSet<BandwidthBucket> getBucketsInWindow(Long startMillis,
|
||||
Long endMillis, Network network) {
|
||||
// Get the bucket for the startTime and endTime.
|
||||
Long startKey = ceilingKey(startMillis, network);
|
||||
Long endKey = floorBucket(endMillis, network);
|
||||
if (statusHandler.isPriorityEnabled(Priority.DEBUG)) {
|
||||
statusHandler.debug("startMillis: " + new Date(startMillis) + "/n"
|
||||
+ "endMillis: " + new Date(endMillis));
|
||||
}
|
||||
|
||||
Long startKey = floorBucket(startMillis, network);
|
||||
Long endKey = ceilingKey(endMillis, network);
|
||||
// Handle the case where an invalid range was somehow specified
|
||||
// (shouldn't happen, so just throw an exception with as much
|
||||
// information as we have)
|
||||
|
@ -194,6 +201,14 @@ public class InMemoryBandwidthBucketDao implements IBandwidthBucketDao {
|
|||
|
||||
final NavigableMap<Long, BandwidthBucket> buckets = allBuckets
|
||||
.get(network);
|
||||
|
||||
if (statusHandler.isPriorityEnabled(Priority.DEBUG)) {
|
||||
statusHandler.debug("startKey: " + new Date(startKey) + "\n"
|
||||
+ "endKey: " + new Date(endKey) + "\n" + "firstKey: "
|
||||
+ new Date(buckets.firstKey()) + "\n" + "lastKey: "
|
||||
+ new Date(buckets.lastKey()));
|
||||
}
|
||||
|
||||
NavigableMap<Long, BandwidthBucket> window = buckets.subMap(startKey,
|
||||
true, endKey, true);
|
||||
return new TreeSet<BandwidthBucket>(
|
||||
|
@ -210,7 +225,12 @@ public class InMemoryBandwidthBucketDao implements IBandwidthBucketDao {
|
|||
private Long floorBucket(long key, Network network) {
|
||||
final NavigableMap<Long, BandwidthBucket> buckets = allBuckets
|
||||
.get(network);
|
||||
return buckets.floorKey(key);
|
||||
Long firstKey = buckets.floorKey(key);
|
||||
if (firstKey == null) {
|
||||
firstKey = buckets.firstKey();
|
||||
}
|
||||
|
||||
return firstKey;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -223,7 +243,12 @@ public class InMemoryBandwidthBucketDao implements IBandwidthBucketDao {
|
|||
private Long ceilingKey(long key, Network network) {
|
||||
final NavigableMap<Long, BandwidthBucket> buckets = allBuckets
|
||||
.get(network);
|
||||
return buckets.ceilingKey(key);
|
||||
Long lastKey = buckets.ceilingKey(key);
|
||||
if (lastKey == null) {
|
||||
lastKey = buckets.lastKey();
|
||||
}
|
||||
|
||||
return lastKey;
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -29,8 +29,12 @@ import java.util.TreeSet;
|
|||
|
||||
import com.google.common.collect.Sets;
|
||||
import com.raytheon.uf.common.datadelivery.registry.AdhocSubscription;
|
||||
import com.raytheon.uf.common.datadelivery.registry.DataSetMetaData;
|
||||
import com.raytheon.uf.common.datadelivery.registry.DataType;
|
||||
import com.raytheon.uf.common.datadelivery.registry.Subscription;
|
||||
import com.raytheon.uf.common.datadelivery.registry.Time;
|
||||
import com.raytheon.uf.common.datadelivery.registry.handlers.DataDeliveryHandlers;
|
||||
import com.raytheon.uf.common.registry.handler.RegistryHandlerException;
|
||||
import com.raytheon.uf.common.status.IUFStatusHandler;
|
||||
import com.raytheon.uf.common.status.UFStatus;
|
||||
import com.raytheon.uf.common.status.UFStatus.Priority;
|
||||
|
@ -58,6 +62,7 @@ import com.raytheon.uf.edex.datadelivery.bandwidth.retrieval.RetrievalStatus;
|
|||
* Feb 14, 2013 1595 djohnson Fix not using calendar copies, and backwards max/min operations.
|
||||
* Jun 03, 2013 2038 djohnson Add ability to schedule down to minute granularity.
|
||||
* Jun 04, 2013 223 mpduff Refactor changes.
|
||||
* Sept 10, 2013 2351 dhladky Made adhoc queries for pointdata work
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -308,46 +313,87 @@ public class BandwidthDaoUtil {
|
|||
AdhocSubscription adhoc, boolean mostRecent) {
|
||||
AdhocSubscription retVal = null;
|
||||
|
||||
List<BandwidthDataSetUpdate> dataSetMetaDataUpdates = bandwidthDao
|
||||
.getBandwidthDataSetUpdate(adhoc.getProvider(),
|
||||
adhoc.getDataSetName());
|
||||
if (!dataSetMetaDataUpdates.isEmpty()) {
|
||||
// getDataSetMetaData returns the dataset meta-data in descending
|
||||
// order of time, so walk the iterator finding the first subscribed
|
||||
// to cycle
|
||||
BandwidthDataSetUpdate daoToUse = null;
|
||||
Time adhocTime = adhoc.getTime();
|
||||
for (BandwidthDataSetUpdate current : dataSetMetaDataUpdates) {
|
||||
if (mostRecent
|
||||
|| adhocTime.getCycleTimes().contains(
|
||||
current.getDataSetBaseTime().get(
|
||||
Calendar.HOUR_OF_DAY))) {
|
||||
daoToUse = current;
|
||||
break;
|
||||
}
|
||||
if (adhoc.getDataSetType() == DataType.POINT) {
|
||||
|
||||
List<DataSetMetaData> dataSetMetaDatas = null;
|
||||
try {
|
||||
dataSetMetaDatas = DataDeliveryHandlers
|
||||
.getDataSetMetaDataHandler().getByDataSet(
|
||||
adhoc.getDataSetName(), adhoc.getProvider());
|
||||
} catch (RegistryHandlerException e) {
|
||||
statusHandler.handle(Priority.PROBLEM,
|
||||
"No DataSetMetaData matching query! DataSetName: "
|
||||
+ adhoc.getDataSetName() + " Provider: "
|
||||
+ adhoc.getProvider(), e);
|
||||
}
|
||||
|
||||
if (daoToUse == null) {
|
||||
if (statusHandler.isPriorityEnabled(Priority.DEBUG)) {
|
||||
statusHandler
|
||||
.debug(String
|
||||
.format("There wasn't applicable most recent dataset metadata to use for the adhoc subscription [%s].",
|
||||
adhoc.getName()));
|
||||
}
|
||||
} else {
|
||||
if (statusHandler.isPriorityEnabled(Priority.DEBUG)) {
|
||||
statusHandler
|
||||
.debug(String
|
||||
.format("Found most recent metadata for adhoc subscription [%s], using url [%s]",
|
||||
adhoc.getName(), daoToUse.getUrl()));
|
||||
}
|
||||
adhoc.setUrl(daoToUse.getUrl());
|
||||
adhocTime.setStartDate(daoToUse.getDataSetBaseTime().getTime());
|
||||
|
||||
|
||||
if (dataSetMetaDatas != null && !dataSetMetaDatas.isEmpty()) {
|
||||
// just grab the most recent one, all we need is the URL
|
||||
adhoc.setUrl(dataSetMetaDatas.get(0).getUrl());
|
||||
retVal = adhoc;
|
||||
}
|
||||
|
||||
} else if (adhoc.getDataSetType() == DataType.GRID) {
|
||||
|
||||
// if the start time is there, it already has it, skip
|
||||
if (adhoc.getTime().getStart() == null) {
|
||||
|
||||
List<BandwidthDataSetUpdate> dataSetMetaDataUpdates = bandwidthDao
|
||||
.getBandwidthDataSetUpdate(adhoc.getProvider(),
|
||||
adhoc.getDataSetName());
|
||||
|
||||
if (dataSetMetaDataUpdates != null
|
||||
&& !dataSetMetaDataUpdates.isEmpty()) {
|
||||
// getDataSetMetaData returns the dataset meta-data in
|
||||
// descending
|
||||
// order of time, so walk the iterator finding the first
|
||||
// subscribed
|
||||
// to cycle
|
||||
BandwidthDataSetUpdate daoToUse = null;
|
||||
Time adhocTime = adhoc.getTime();
|
||||
for (BandwidthDataSetUpdate current : dataSetMetaDataUpdates) {
|
||||
if (mostRecent
|
||||
|| adhocTime.getCycleTimes().contains(
|
||||
current.getDataSetBaseTime().get(
|
||||
Calendar.HOUR_OF_DAY))) {
|
||||
daoToUse = current;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (daoToUse == null) {
|
||||
if (statusHandler.isPriorityEnabled(Priority.DEBUG)) {
|
||||
statusHandler
|
||||
.debug(String
|
||||
.format("There wasn't applicable most recent dataset metadata to use for the adhoc subscription [%s].",
|
||||
adhoc.getName()));
|
||||
}
|
||||
} else {
|
||||
if (statusHandler.isPriorityEnabled(Priority.DEBUG)) {
|
||||
statusHandler
|
||||
.debug(String
|
||||
.format("Found most recent metadata for adhoc subscription [%s], using url [%s]",
|
||||
adhoc.getName(),
|
||||
daoToUse.getUrl()));
|
||||
}
|
||||
|
||||
adhoc.setUrl(daoToUse.getUrl());
|
||||
adhocTime.setStartDate(daoToUse.getDataSetBaseTime()
|
||||
.getTime());
|
||||
|
||||
retVal = adhoc;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// Just return the adhoc sub
|
||||
retVal = adhoc;
|
||||
}
|
||||
} else {
|
||||
throw new IllegalArgumentException("DataType: "
|
||||
+ adhoc.getDataSetType()
|
||||
+ " Not yet implemented for adhoc subscriptions");
|
||||
}
|
||||
|
||||
return retVal;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -23,7 +23,6 @@ Require-Bundle: com.raytheon.uf.common.datadelivery.retrieval;bundle-version="1.
|
|||
com.raytheon.uf.common.dataplugin.madis;bundle-version="1.0.0",
|
||||
com.raytheon.uf.edex.plugin.madis.ogc;bundle-version="1.0.0",
|
||||
com.raytheon.uf.edex.plugin.madis;bundle-version="1.0.0",
|
||||
com.raytheon.uf.edex.wfs;bundle-version="1.0.0",
|
||||
org.eclipse.xsd;bundle-version="2.8.1"
|
||||
com.raytheon.uf.edex.wfs;bundle-version="1.0.0"
|
||||
Export-Package: com.raytheon.uf.edex.datadelivery.retrieval.wfs,
|
||||
com.raytheon.uf.edex.datadelivery.retrieval.wfs.metadata
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
package com.raytheon.uf.edex.datadelivery.retrieval.wfs;
|
||||
|
||||
import java.text.SimpleDateFormat;
|
||||
import java.util.Date;
|
||||
import java.util.TimeZone;
|
||||
|
||||
import org.geotools.geometry.jts.ReferencedEnvelope;
|
||||
|
@ -33,6 +32,7 @@ import com.vividsolutions.jts.geom.Coordinate;
|
|||
* Jun 11, 2013 1763 dhladky Made operational.
|
||||
* Jun 18, 2013 2120 dhladky Added times and max feature processing
|
||||
* Aug 07, 2013 2097 dhladky Revamped WFS to use POST (still 1.1.0 WFS)
|
||||
* Sept 11, 2013 2351 dhladky Fixed adhoc request times
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -69,6 +69,50 @@ public class WfsRequestBuilder extends RequestBuilder {
|
|||
|
||||
public static final String AMPERSAND = "&";
|
||||
|
||||
public static final String SPACE = " ";
|
||||
|
||||
public static final String NEW_LINE = "\n";
|
||||
|
||||
public static final String PROPERTTY_OPEN = "<ogc:PropertyName>";
|
||||
|
||||
public static final String PROPERTTY_CLOSE = "</ogc:PropertyName>";
|
||||
|
||||
public static final String PROPRERTYISGREATERTHAN_OPEN = "<ogc:PropertyIsGreaterThan>";
|
||||
|
||||
public static final String PROPRERTYISGREATERTHAN_CLOSE = "</ogc:PropertyIsGreaterThan>";
|
||||
|
||||
public static final String PROPRERTYISLESSTHAN_OPEN = "<ogc:PropertyIsLessThan>";
|
||||
|
||||
public static final String PROPRERTYISLESSTHAN_CLOSE = "</ogc:PropertyIsLessThan>";
|
||||
|
||||
public static final String ISLITERAL_OPEN = "<ogc:Literal>";
|
||||
|
||||
public static final String ISLITERAL_CLOSE = "</ogc:Literal>";
|
||||
|
||||
public static final String LOWER_CORNER_OPEN = "<gml:lowerCorner>";
|
||||
|
||||
public static final String LOWER_CORNER_CLOSE = "</gml:lowerCorner>";
|
||||
|
||||
public static final String UPPER_CORNER_OPEN = "<gml:upperCorner>";
|
||||
|
||||
public static final String UPPER_CORNER_CLOSE = "</gml:upperCorner>";
|
||||
|
||||
public static final String WITHIN_OPEN = "<ogc:Within>";
|
||||
|
||||
public static final String WITHIN_CLOSE = "</ogc:Within>";
|
||||
|
||||
public static final String AND_OPEN = "<ogc:And>";
|
||||
|
||||
public static final String AND_CLOSE = "</ogc:And>";
|
||||
|
||||
public static final String FILTER_OPEN = "<ogc:Filter>";
|
||||
|
||||
public static final String FILTER_CLOSE = "</ogc:Filter>";
|
||||
|
||||
public static final String ENVELOPE_OPEN = "<gml:Envelope";
|
||||
|
||||
public static final String ENVELOPE_CLOSE = "</gml:Envelope>";
|
||||
|
||||
private final String wfsURL;
|
||||
|
||||
private String typeName = null;
|
||||
|
@ -80,22 +124,19 @@ public class WfsRequestBuilder extends RequestBuilder {
|
|||
this.typeName = attXML.getPlugin();
|
||||
StringBuilder buffer = new StringBuilder(256);
|
||||
buffer.append(createHeader());
|
||||
buffer.append("<ogc:Filter>\n");
|
||||
buffer.append(FILTER_OPEN).append(NEW_LINE);
|
||||
|
||||
if (attXML.getCoverage() != null && attXML.getTime() != null) {
|
||||
buffer.append("<ogc:And>\n");
|
||||
buffer.append(AND_OPEN).append(NEW_LINE);
|
||||
}
|
||||
|
||||
buffer.append(processTime(attXML.getTime()));
|
||||
buffer.append(processCoverage());
|
||||
buffer.append(processTime(attXML.getTime())).append(processCoverage());
|
||||
|
||||
if (attXML.getCoverage() != null && attXML.getTime() != null) {
|
||||
buffer.append("</ogc:And>\n");
|
||||
buffer.append(AND_CLOSE).append(NEW_LINE);
|
||||
}
|
||||
|
||||
buffer.append("</ogc:Filter>\n");
|
||||
buffer.append(createFooter());
|
||||
|
||||
buffer.append(FILTER_CLOSE).append(NEW_LINE).append(createFooter());
|
||||
this.wfsURL = buffer.toString().trim();
|
||||
}
|
||||
|
||||
|
@ -146,28 +187,48 @@ public class WfsRequestBuilder extends RequestBuilder {
|
|||
public String processTime(Time inTime) {
|
||||
|
||||
try {
|
||||
if (inTime.getStartDate() != null) {
|
||||
|
||||
Date sDate = inTime.getStartDate();
|
||||
Date eDate = inTime.getEndDate();
|
||||
String endDateString = ogcDateFormat.get().format(eDate);
|
||||
String startDateString = ogcDateFormat.get().format(sDate);
|
||||
|
||||
StringBuilder sb = new StringBuilder(256);
|
||||
sb.append("<ogc:PropertyIsGreaterThan>\n");
|
||||
sb.append("<ogc:PropertyName>").append(typeName).append(":timeObs</ogc:PropertyName>\n");
|
||||
sb.append("<ogc:Literal>").append(startDateString).append("</ogc:Literal>\n");
|
||||
sb.append("</ogc:PropertyIsGreaterThan>\n");
|
||||
|
||||
if (endDateString != null) {
|
||||
sb.append("<ogc:PropertyIsLessThan>\n");
|
||||
sb.append("<ogc:PropertyName>").append(typeName).append(":timeObs</ogc:PropertyName>\n");
|
||||
sb.append("<ogc:Literal>").append(endDateString).append("</ogc:Literal>\n");
|
||||
sb.append("</ogc:PropertyIsLessThan>\n");
|
||||
}
|
||||
String endDateString = null;
|
||||
String startDateString = null;
|
||||
|
||||
return sb.toString();
|
||||
if (inTime.getStart() != null && inTime.getEnd() != null) {
|
||||
/**
|
||||
* THESE ARE ADHOC requests!!!!! They go backwards from normal
|
||||
* They are calculated (now - interval)
|
||||
* Hence the start time is before the end time.
|
||||
* TODO: We should look into this more. I think more problems exist in BWM
|
||||
*/
|
||||
startDateString = inTime.getEnd();
|
||||
endDateString = inTime.getStart();
|
||||
|
||||
} else if (inTime.getStartDate() != null
|
||||
&& inTime.getEndDate() != null) {
|
||||
/**
|
||||
* Normal recurring subscription requests
|
||||
*/
|
||||
endDateString = ogcDateFormat.get().format(inTime.getEndDate());
|
||||
startDateString = ogcDateFormat.get().format(inTime.getStartDate());
|
||||
}
|
||||
|
||||
StringBuilder sb = new StringBuilder(256);
|
||||
sb.append(PROPRERTYISGREATERTHAN_OPEN).append(NEW_LINE);
|
||||
sb.append(PROPERTTY_OPEN).append(typeName).append(":timeObs")
|
||||
.append(PROPERTTY_CLOSE).append(NEW_LINE);
|
||||
sb.append(ISLITERAL_OPEN).append(startDateString)
|
||||
.append(ISLITERAL_CLOSE).append(NEW_LINE);
|
||||
sb.append(PROPRERTYISGREATERTHAN_CLOSE).append(NEW_LINE);
|
||||
|
||||
if (endDateString != null) {
|
||||
sb.append(PROPRERTYISLESSTHAN_OPEN).append(NEW_LINE);
|
||||
sb.append(PROPERTTY_OPEN).append(typeName).append(":timeObs")
|
||||
.append(PROPERTTY_CLOSE).append(NEW_LINE);
|
||||
sb.append(ISLITERAL_OPEN).append(endDateString)
|
||||
.append(ISLITERAL_CLOSE).append(NEW_LINE);
|
||||
sb.append(PROPRERTYISLESSTHAN_CLOSE).append(NEW_LINE);
|
||||
}
|
||||
|
||||
return sb.toString();
|
||||
|
||||
} catch (Exception e) {
|
||||
statusHandler.error("Couldn't parse Time object.", e);
|
||||
}
|
||||
|
@ -192,22 +253,23 @@ public class WfsRequestBuilder extends RequestBuilder {
|
|||
double upperLon = ur.x;
|
||||
double upperLat = ur.y;
|
||||
|
||||
sb.append("<ogc:Within>\n");
|
||||
sb.append("<ogc:PropertyName>location/location</ogc:PropertyName>\n");
|
||||
sb.append("<gml:Envelope srsName=\"").append(CRS)
|
||||
.append("\">\n");
|
||||
sb.append("<gml:lowerCorner>");
|
||||
sb.append(WITHIN_OPEN).append(NEW_LINE);
|
||||
sb.append(PROPERTTY_OPEN).append("location/location")
|
||||
.append(PROPERTTY_CLOSE).append(NEW_LINE);
|
||||
sb.append(ENVELOPE_OPEN).append(" srsName=\"").append(CRS)
|
||||
.append("\">").append(NEW_LINE);
|
||||
sb.append(LOWER_CORNER_OPEN);
|
||||
sb.append(lowerLon);
|
||||
sb.append(" ");
|
||||
sb.append(SPACE);
|
||||
sb.append(lowerLat);
|
||||
sb.append("</gml:lowerCorner>\n");
|
||||
sb.append("<gml:upperCorner>");
|
||||
sb.append(LOWER_CORNER_CLOSE).append(NEW_LINE);
|
||||
sb.append(UPPER_CORNER_OPEN);
|
||||
sb.append(upperLon);
|
||||
sb.append(" ");
|
||||
sb.append(SPACE);
|
||||
sb.append(upperLat);
|
||||
sb.append("</gml:upperCorner>\n");
|
||||
sb.append("</gml:Envelope>\n");
|
||||
sb.append("</ogc:Within>\n");
|
||||
sb.append(UPPER_CORNER_CLOSE).append(NEW_LINE);
|
||||
sb.append(ENVELOPE_CLOSE).append(NEW_LINE);
|
||||
sb.append(WITHIN_CLOSE).append(NEW_LINE);
|
||||
|
||||
return sb.toString();
|
||||
|
||||
|
|
|
@ -45,6 +45,7 @@ import com.vividsolutions.jts.geom.Envelope;
|
|||
* Aug 18, 2013 #2097 dhladky Restored original functionality before renaming of this class
|
||||
* Aug 30, 2013 #2098 dhladky Incorrect time returned
|
||||
* Sept 2, 2013 #2098 dhladky Improved time management.
|
||||
* Sept 9, 2013 #2351 dhladky Speed improvements
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -96,35 +97,36 @@ public class MadisRegistryCollectorAddon extends
|
|||
|
||||
Collection<MadisRecord> withInGeoConstraint = new ArrayList<MadisRecord>();
|
||||
PluginDataObject[] pdor = null;
|
||||
Envelope e = null;
|
||||
|
||||
if (getCoverage() != null) {
|
||||
|
||||
e = getCoverage().getEnvelope();
|
||||
|
||||
for (PluginDataObject record : pdos) {
|
||||
for (PluginDataObject record : pdos) {
|
||||
|
||||
MadisRecord rec = (MadisRecord) record;
|
||||
MadisRecord rec = (MadisRecord) record;
|
||||
|
||||
if (rec != null) {
|
||||
if (rec != null && rec.getLocation() != null) {
|
||||
|
||||
Envelope e = getCoverage().getEnvelope();
|
||||
Coordinate c = rec.getLocation().getLocation()
|
||||
.getCoordinate();
|
||||
|
||||
if (rec.getLocation() != null) {
|
||||
if (c != null) {
|
||||
|
||||
Coordinate c = rec.getLocation().getLocation()
|
||||
.getCoordinate();
|
||||
|
||||
if (c != null) {
|
||||
|
||||
if (e.contains(c)) {
|
||||
withInGeoConstraint.add(rec);
|
||||
} else {
|
||||
statusHandler.handle(
|
||||
Priority.DEBUG,
|
||||
"Madis record discarded: outside of range: "
|
||||
+ rec.getLatitude() + " "
|
||||
+ rec.getLongitude());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if (e.contains(c)) {
|
||||
withInGeoConstraint.add(rec);
|
||||
} else {
|
||||
statusHandler.handle(
|
||||
Priority.DEBUG,
|
||||
"Madis record discarded: outside of range: "
|
||||
+ rec.getLatitude() + " "
|
||||
+ rec.getLongitude());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (!withInGeoConstraint.isEmpty()) {
|
||||
int size = withInGeoConstraint.size();
|
||||
|
|
Loading…
Add table
Reference in a new issue