Issue #1469 - pypies will now read the hdf5 root directory from configuration. Relative paths can now be passed to pypies. Rebased to fix merge conflict.
Change-Id: I38873e478729a3644db7ce2813e9bd6c15d9bdf3 Former-commit-id:4db9c6c321
[formerly ed2e78738b391bcb9a40ddde7b7a6acf7ccd1b30] Former-commit-id:e2a12845d3
This commit is contained in:
parent
a3c5aded12
commit
fba28ea69b
33 changed files with 302 additions and 328 deletions
|
@ -48,9 +48,8 @@ public class HDF5Util {
|
|||
String fileName = pathProvider.getHDFFileName(
|
||||
object.getPluginName(), persistable);
|
||||
|
||||
file = new File(VizApp.getServerDataDir() + IPathManager.SEPARATOR
|
||||
+ object.getPluginName() + IPathManager.SEPARATOR + path
|
||||
+ IPathManager.SEPARATOR + fileName);
|
||||
file = new File(object.getPluginName() + IPathManager.SEPARATOR
|
||||
+ path + IPathManager.SEPARATOR + fileName);
|
||||
}
|
||||
|
||||
return file;
|
||||
|
|
|
@ -49,6 +49,7 @@ import com.raytheon.uf.viz.core.localization.LocalizationManager;
|
|||
* ------------ ---------- ----------- --------------------------
|
||||
* 7/1/06 chammack Initial Creation.
|
||||
* Sep 12, 2012 1167 djohnson Add datadelivery servers.
|
||||
* Jan 14, 2013 1469 bkowal Removed the hdf5 data directory.
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -79,8 +80,6 @@ public final class VizApp {
|
|||
|
||||
private static String dataDeliveryQueryServer;
|
||||
|
||||
private static String serverDataDir;
|
||||
|
||||
static {
|
||||
ManagementFactory.getRuntimeMXBean().getName();
|
||||
}
|
||||
|
@ -257,14 +256,6 @@ public final class VizApp {
|
|||
VizApp.pypiesServer = pypiesServer;
|
||||
}
|
||||
|
||||
public static String getServerDataDir() {
|
||||
return VizApp.serverDataDir;
|
||||
}
|
||||
|
||||
public static void setServerDataDir(String serverDataDir) {
|
||||
VizApp.serverDataDir = serverDataDir;
|
||||
}
|
||||
|
||||
private static String host = null;
|
||||
|
||||
/**
|
||||
|
|
|
@ -49,6 +49,8 @@ import com.raytheon.uf.viz.core.status.StatusConstants;
|
|||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* Jan 16, 2008 njensen Initial creation
|
||||
* Jan 14, 2013 1469 bkowal The hdf5 root will no longer be appended to the
|
||||
* beginning of the file name.
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -77,7 +79,7 @@ public class CubeUtil {
|
|||
if (record != null) {
|
||||
File file = HDF5Util.findHDF5Location(record);
|
||||
if (file != null)
|
||||
filename = file.getAbsolutePath();
|
||||
filename = file.getPath();
|
||||
}
|
||||
return filename;
|
||||
}
|
||||
|
|
|
@ -49,6 +49,7 @@ import com.raytheon.uf.viz.core.requests.ThriftClient;
|
|||
* ------------ ---------- ----------- --------------------------
|
||||
* Nov 5, 2009 mschenke Initial creation
|
||||
* Sep 12, 2012 1167 djohnson Add datadelivery servers.
|
||||
* Jan 14, 2013 1469 bkowal Removed the hdf5 data directory.
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -126,7 +127,6 @@ public class LocalizationInitializer {
|
|||
VizApp.setHttpServer(resp.getHttpServer());
|
||||
VizApp.setJmsServer(resp.getJmsServer());
|
||||
VizApp.setPypiesServer(resp.getPypiesServer());
|
||||
VizApp.setServerDataDir(resp.getServerDataDir());
|
||||
VizServers.getInstance().setServerLocations(resp.getServerLocations());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -91,6 +91,7 @@ import com.raytheon.uf.viz.core.requests.ThriftClient;
|
|||
* Mar 26, 2008 njensen Added rename() and getFileContents().
|
||||
* May 19, 2007 #1127 randerso Implemented error handling
|
||||
* Sep 12, 2012 1167 djohnson Add datadelivery servers.
|
||||
* Jan 14, 2013 1469 bkowal Removed the hdf5 data directory.
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -223,7 +224,6 @@ public class LocalizationManager implements IPropertyChangeListener {
|
|||
VizApp.setHttpServer(resp.getHttpServer());
|
||||
VizApp.setJmsServer(resp.getJmsServer());
|
||||
VizApp.setPypiesServer(resp.getPypiesServer());
|
||||
VizApp.setServerDataDir(resp.getServerDataDir());
|
||||
VizServers.getInstance().setServerLocations(
|
||||
resp.getServerLocations());
|
||||
} catch (VizException e) {
|
||||
|
|
|
@ -42,6 +42,8 @@ import com.vividsolutions.jts.geom.Coordinate;
|
|||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* Oct 12, 2009 mschenke Initial creation
|
||||
* Jan 14, 2013 1469 bkowal The hdf5 root directory is no longer passed
|
||||
* as an argument to the common TopoQuery constructor.
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -59,8 +61,7 @@ public class TopoQuery implements ITopoQuery {
|
|||
* @return Initialized TopoQuery instance
|
||||
*/
|
||||
public static synchronized ITopoQuery getInstance() {
|
||||
return com.raytheon.uf.edex.topo.TopoQuery.getInstance(
|
||||
VizApp.getServerDataDir(), 0);
|
||||
return com.raytheon.uf.edex.topo.TopoQuery.getInstance(0);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -68,8 +69,7 @@ public class TopoQuery implements ITopoQuery {
|
|||
*/
|
||||
public static synchronized ITopoQuery getInstance(int topoLevel,
|
||||
boolean useCaching) {
|
||||
return com.raytheon.uf.edex.topo.TopoQuery.getInstance(
|
||||
VizApp.getServerDataDir(), 0);
|
||||
return com.raytheon.uf.edex.topo.TopoQuery.getInstance(0);
|
||||
}
|
||||
|
||||
private TopoQuery(int level, boolean useCaching) {
|
||||
|
|
|
@ -48,6 +48,7 @@ import com.raytheon.uf.viz.thinclient.preferences.ThinClientPreferenceConstants;
|
|||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* Nov 8, 2011 mschenke Initial creation
|
||||
* Jan 14, 2013 1469 bkowal The hdf5 data directory is no longer a preference.
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -62,8 +63,6 @@ public class ThinClientServerPreferences extends FieldEditorPreferencePage {
|
|||
|
||||
private StringFieldEditor servicesServer;
|
||||
|
||||
private StringFieldEditor serverDataDir;
|
||||
|
||||
private Button connectivityButton;
|
||||
|
||||
/**
|
||||
|
@ -104,12 +103,6 @@ public class ThinClientServerPreferences extends FieldEditorPreferencePage {
|
|||
pypiesServer.setErrorMessage("Cannot connect to Pypies server");
|
||||
addField(pypiesServer);
|
||||
|
||||
serverDataDir = new StringFieldEditor(
|
||||
ThinClientPreferenceConstants.P_SERVER_DATA_DIR,
|
||||
"&Server Data Dir: ", getFieldEditorParent());
|
||||
|
||||
addField(serverDataDir);
|
||||
|
||||
addConnectivityButton();
|
||||
}
|
||||
|
||||
|
@ -198,7 +191,6 @@ public class ThinClientServerPreferences extends FieldEditorPreferencePage {
|
|||
boolean useProxies = this.useProxies.getBooleanValue();
|
||||
servicesServer.setEnabled(useProxies, connectivityButton.getParent());
|
||||
pypiesServer.setEnabled(useProxies, connectivityButton.getParent());
|
||||
serverDataDir.setEnabled(useProxies, connectivityButton.getParent());
|
||||
connectivityButton.setEnabled(useProxies);
|
||||
}
|
||||
|
||||
|
|
|
@ -49,6 +49,7 @@ import com.raytheon.uf.viz.thinclient.ui.ThinClientConnectivityDialog;
|
|||
* ------------ ---------- ----------- --------------------------
|
||||
* Nov 23, 2011 bsteffen Initial creation
|
||||
* Dec 06, 2012 1396 njensen Added setting VizServers
|
||||
* Jan 14, 2013 1469 bkowal Removed setting the hdf5 data directory
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -86,17 +87,10 @@ public class ThinClientLocalizationInitializer extends LocalizationInitializer {
|
|||
String servicesProxy = store
|
||||
.getString(ThinClientPreferenceConstants.P_SERVICES_PROXY);
|
||||
LocalizationManager.getInstance().setCurrentServer(servicesProxy);
|
||||
String dataDir = VizApp.getServerDataDir();
|
||||
if (dataDir == null || dataDir.isEmpty()) {
|
||||
dataDir = store
|
||||
.getString(ThinClientPreferenceConstants.P_SERVER_DATA_DIR);
|
||||
VizApp.setServerDataDir(dataDir);
|
||||
}
|
||||
if (!disableJMS || dataDir == null || dataDir.isEmpty()) {
|
||||
if (!disableJMS) {
|
||||
GetServersRequest req = new GetServersRequest();
|
||||
GetServersResponse resp = (GetServersResponse) ThriftClient
|
||||
.sendLocalizationRequest(req);
|
||||
VizApp.setServerDataDir(resp.getServerDataDir());
|
||||
if (!disableJMS) {
|
||||
VizApp.setJmsServer(resp.getJmsServer());
|
||||
}
|
||||
|
@ -117,8 +111,5 @@ public class ThinClientLocalizationInitializer extends LocalizationInitializer {
|
|||
VizApp.setJmsServer(null);
|
||||
}
|
||||
}
|
||||
store.setValue(ThinClientPreferenceConstants.P_SERVER_DATA_DIR,
|
||||
VizApp.getServerDataDir());
|
||||
|
||||
}
|
||||
}
|
||||
|
|
|
@ -29,6 +29,7 @@ package com.raytheon.uf.viz.thinclient.preferences;
|
|||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* Oct 20, 2011 mschenke Initial creation
|
||||
* Jan 14, 2013 1469 bkowal The hdf5 data directory is no longer a preference constant.
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -50,8 +51,6 @@ public class ThinClientPreferenceConstants {
|
|||
|
||||
public static String P_PYPIES_PROXY = "pypiesProxyAddress";
|
||||
|
||||
public static String P_SERVER_DATA_DIR = "serverDataDir";
|
||||
|
||||
public static String P_MENU_TIME_UPDATE_INTERVALS = "menuTimeUpdateInterval";
|
||||
|
||||
public static String P_DATA_UPDATE_INTERVALS = "dataUpdateInterval";
|
||||
|
|
|
@ -54,6 +54,8 @@ import com.raytheon.viz.core.rsc.hdf5.FileBasedTileSet;
|
|||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* Feb 15, 2007 chammack Initial Creation.
|
||||
* Jan 14, 2013 1469 bkowal The hdf5 data directory is no longer included in the
|
||||
* DATA_FILE
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -65,7 +67,7 @@ public class TopoTileSet extends FileBasedTileSet {
|
|||
private static String DATA_FILE = "/topo/srtm30.hdf";
|
||||
|
||||
static {
|
||||
DATA_FILE = new File(VizApp.getServerDataDir(), DATA_FILE)
|
||||
DATA_FILE = new File(DATA_FILE)
|
||||
.getAbsolutePath();
|
||||
}
|
||||
|
||||
|
|
|
@ -38,6 +38,12 @@
|
|||
<adapterServiceQueue>edex.AdapterSrv</adapterServiceQueue>
|
||||
<resFolder>../conf/res</resFolder>
|
||||
<pluginScriptFolder>${env:edex.home}/conf/db/commonScripts/</pluginScriptFolder>
|
||||
<!--
|
||||
hdf5Dir now refers only to the local hdf5 directory; pypies keeps track
|
||||
of its own hdf5 directory. The local hdf5 directory will only be used
|
||||
by plugins that do not store / retrieve their data through pypies
|
||||
(ex: QC).
|
||||
-->
|
||||
<hdf5Dir>${env:edex.home}/data/hdf5</hdf5Dir>
|
||||
<shareDir>${env:edex.home}/data/share</shareDir>
|
||||
<utilityDir>${env:edex.home}/data/utility</utilityDir>
|
||||
|
|
|
@ -36,8 +36,6 @@ import com.raytheon.uf.common.datastorage.IDataStore;
|
|||
import com.raytheon.uf.common.datastorage.Request;
|
||||
import com.raytheon.uf.common.datastorage.StorageException;
|
||||
import com.raytheon.uf.common.datastorage.records.IDataRecord;
|
||||
import com.raytheon.uf.edex.core.props.EnvProperties;
|
||||
import com.raytheon.uf.edex.core.props.PropertiesFactory;
|
||||
|
||||
/**
|
||||
* Data access object for saving and retrieving data from the HDF5 repository.
|
||||
|
@ -51,6 +49,8 @@ import com.raytheon.uf.edex.core.props.PropertiesFactory;
|
|||
* 7/24/07 353 bphillip Initial Check in
|
||||
* 20070914 379 jkorman Changed to use IPersistable populateDataStore
|
||||
* and getPersistenceTime methods.
|
||||
* 01/14/13 1469 bkowal No longer retrieves the hdf5 data directory from the
|
||||
* environment.
|
||||
* </pre>
|
||||
*
|
||||
* @author bphillip
|
||||
|
@ -58,27 +58,18 @@ import com.raytheon.uf.edex.core.props.PropertiesFactory;
|
|||
*/
|
||||
public class HDF5Dao {
|
||||
|
||||
public static final String HDF5DIR = "HDF5DIR";
|
||||
|
||||
public static final IHDFFilePathProvider DEFAULT_PATH_PROVIDER = DefaultPathProvider
|
||||
.getInstance();
|
||||
|
||||
/** The logger */
|
||||
protected Log logger = LogFactory.getLog(getClass());
|
||||
|
||||
private String hdf5Dir = null;
|
||||
|
||||
private IHDFFilePathProvider pathProvider = null;
|
||||
|
||||
/**
|
||||
* Construct an instance of the HDFDao using system default values.
|
||||
*/
|
||||
public HDF5Dao() {
|
||||
EnvProperties properties = PropertiesFactory.getInstance()
|
||||
.getEnvProperties();
|
||||
if (properties != null) {
|
||||
hdf5Dir = properties.getEnvValue(HDF5DIR);
|
||||
}
|
||||
pathProvider = DEFAULT_PATH_PROVIDER;
|
||||
}
|
||||
|
||||
|
@ -86,32 +77,12 @@ public class HDF5Dao {
|
|||
* Construct an instance of the HDFDao using user supplied properties and
|
||||
* path provider.
|
||||
*
|
||||
* @param properties
|
||||
* An environment properties instance that must contain an entry
|
||||
* from the property HDF5DIR.
|
||||
*/
|
||||
public HDF5Dao(EnvProperties properties) {
|
||||
if (properties != null) {
|
||||
hdf5Dir = properties.getEnvValue(HDF5DIR);
|
||||
}
|
||||
pathProvider = DEFAULT_PATH_PROVIDER;
|
||||
}
|
||||
|
||||
/**
|
||||
* Construct an instance of the HDFDao using user supplied properties and
|
||||
* path provider.
|
||||
*
|
||||
* @param properties
|
||||
* An environment properties instance that must contain an entry
|
||||
* from the property HDF5DIR.
|
||||
* @param pathProvider
|
||||
* The path provider to use that creates a path to a specific HDF
|
||||
* repository. This path provider must not provide the name of
|
||||
* the repository, and the path must be relative to the base
|
||||
* directory given in the properties HDF5DIR property.
|
||||
* the repository.
|
||||
*/
|
||||
public HDF5Dao(EnvProperties properties, IHDFFilePathProvider pathProvider) {
|
||||
this(properties);
|
||||
public HDF5Dao(IHDFFilePathProvider pathProvider) {
|
||||
this.pathProvider = pathProvider;
|
||||
}
|
||||
|
||||
|
@ -130,9 +101,8 @@ public class HDF5Dao {
|
|||
if (obj instanceof IPersistable) {
|
||||
IPersistable persistable = (IPersistable) obj;
|
||||
|
||||
String persistDir = hdf5Dir
|
||||
+ pathProvider.getHDFPath(obj.getPluginName(), persistable)
|
||||
+ File.separator;
|
||||
String persistDir = pathProvider.getHDFPath(obj.getPluginName(),
|
||||
persistable) + File.separator;
|
||||
String archive = pathProvider.getHDFFileName(obj.getPluginName(),
|
||||
persistable);
|
||||
|
||||
|
@ -166,9 +136,8 @@ public class HDF5Dao {
|
|||
if (obj instanceof PersistablePluginDataObject) {
|
||||
IPersistable pRecord = (IPersistable) obj;
|
||||
|
||||
String persistDir = hdf5Dir
|
||||
+ pathProvider.getHDFPath(obj.getPluginName(), pRecord)
|
||||
+ File.separator;
|
||||
String persistDir = pathProvider.getHDFPath(obj.getPluginName(),
|
||||
pRecord) + File.separator;
|
||||
String archive = pathProvider.getHDFFileName(obj.getPluginName(),
|
||||
pRecord);
|
||||
|
||||
|
@ -199,19 +168,4 @@ public class HDF5Dao {
|
|||
public void setPathProvider(IHDFFilePathProvider pathProvider) {
|
||||
this.pathProvider = pathProvider;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the hdf5Dir
|
||||
*/
|
||||
public String getHdf5Dir() {
|
||||
return hdf5Dir;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param hdf5Dir
|
||||
* the hdf5Dir to set
|
||||
*/
|
||||
public void setHdf5Dir(String hdf5Dir) {
|
||||
this.hdf5Dir = hdf5Dir;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -49,8 +49,6 @@ import com.raytheon.uf.common.status.IUFStatusHandler;
|
|||
import com.raytheon.uf.common.status.UFStatus;
|
||||
import com.raytheon.uf.common.status.UFStatus.Priority;
|
||||
import com.raytheon.uf.common.time.TimeRange;
|
||||
import com.raytheon.uf.edex.core.props.EnvProperties;
|
||||
import com.raytheon.uf.edex.core.props.PropertiesFactory;
|
||||
import com.raytheon.uf.edex.database.plugin.PluginFactory;
|
||||
|
||||
/**
|
||||
|
@ -73,6 +71,8 @@ import com.raytheon.uf.edex.database.plugin.PluginFactory;
|
|||
* 06/17/08 #940 bphillip Implemented GFE Locking
|
||||
* 06/19/08 njensen Added retrieval of discrete
|
||||
* 05/04/12 #574 dgilling Update class to better match AWIPS1.
|
||||
* 01/14/13 #1469 bkowal The hdf5 data directory is no longer included
|
||||
* in the gfeBaseDataDir.
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -95,9 +95,7 @@ public abstract class GridDatabase {
|
|||
protected boolean valid;
|
||||
|
||||
static {
|
||||
EnvProperties env = PropertiesFactory.getInstance().getEnvProperties();
|
||||
gfeBaseDataDir = env.getEnvValue("HDF5DIR") + File.separator + "gfe"
|
||||
+ File.separator;
|
||||
gfeBaseDataDir = "gfe" + File.separator;
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -81,6 +81,7 @@ import com.raytheon.uf.edex.database.query.DatabaseQuery;
|
|||
* call updateCaches().
|
||||
* 11/05/12 #1310 dgilling Remove code from updateCatches()
|
||||
* that sent notification to D2DParmIdCache.
|
||||
* 01/14/13 #1469 bkowal Removed the hdf5 data directory
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -417,8 +418,7 @@ public class GribDao extends PluginDao {
|
|||
persistable.setHdfFileId(EDEXUtil.getServerId());
|
||||
|
||||
// get the directory
|
||||
String directory = HDF5_DIR + File.separator + pdo.getPluginName()
|
||||
+ File.separator
|
||||
String directory = pdo.getPluginName() + File.separator
|
||||
+ pathProvider.getHDFPath(pdo.getPluginName(), pdo);
|
||||
File dataStoreFile = new File(directory + File.separator
|
||||
+ pathProvider.getHDFFileName(pdo.getPluginName(), persistable));
|
||||
|
|
|
@ -41,6 +41,8 @@ import com.raytheon.uf.edex.core.props.PropertiesFactory;
|
|||
* ------------ ---------- ----------- --------------------------
|
||||
* Aug 6, 2009 mschenke Initial creation
|
||||
* Sep 12, 2012 1167 djohnson Add datadelivery servers.
|
||||
* Jan 14, 2013 1469 bkowal No longer includes the hdf5 data directory
|
||||
* in the response.
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -65,14 +67,10 @@ public class GetServersHandler extends GenericRegistry<String, String>
|
|||
logger.info("jms.server=" + jmsServer);
|
||||
logger.info("pypies.server=" + pypiesServer);
|
||||
logger.info("server locations=" + registry);
|
||||
|
||||
String hdf5DataDir = PropertiesFactory.getInstance().getEnvProperties()
|
||||
.getEnvValue("HDF5DIR");
|
||||
|
||||
;
|
||||
response.setHttpServer(httpServer);
|
||||
response.setJmsServer(jmsServer);
|
||||
response.setPypiesServer(pypiesServer);
|
||||
response.setServerDataDir(hdf5DataDir);
|
||||
response.setServerLocations(Collections.unmodifiableMap(this.registry));
|
||||
|
||||
return response;
|
||||
|
|
|
@ -49,10 +49,7 @@ import com.raytheon.uf.common.gridcoverage.PolarStereoGridCoverage;
|
|||
import com.raytheon.uf.common.gridcoverage.exception.GridCoverageException;
|
||||
import com.raytheon.uf.common.gridcoverage.subgrid.SubGrid;
|
||||
import com.raytheon.uf.common.localization.IPathManager;
|
||||
import com.raytheon.uf.common.localization.msgs.GetServersRequest;
|
||||
import com.raytheon.uf.common.localization.msgs.GetServersResponse;
|
||||
import com.raytheon.uf.common.parameter.lookup.ParameterLookup;
|
||||
import com.raytheon.uf.common.serialization.comm.RequestRouter;
|
||||
import com.vividsolutions.jts.geom.Coordinate;
|
||||
|
||||
/**
|
||||
|
@ -66,6 +63,8 @@ import com.vividsolutions.jts.geom.Coordinate;
|
|||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* Nov 14, 2012 bsteffen Initial creation
|
||||
* Jan 14, 2013 1469 bkowal No longer needs to retrieve the location
|
||||
* of the hdf5 data directory.
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -75,8 +74,6 @@ import com.vividsolutions.jts.geom.Coordinate;
|
|||
|
||||
public class GridDataRetriever {
|
||||
|
||||
protected static String serverDataDir;
|
||||
|
||||
protected GridRecord record;
|
||||
|
||||
protected GridCoverage requestCoverage;
|
||||
|
@ -331,28 +328,7 @@ public class GridDataRetriever {
|
|||
String fileName = pathProvider.getHDFFileName(record.getPluginName(),
|
||||
record);
|
||||
|
||||
return new File(getServerDataDir() + IPathManager.SEPARATOR
|
||||
+ record.getPluginName() + IPathManager.SEPARATOR + path
|
||||
return new File(record.getPluginName() + IPathManager.SEPARATOR + path
|
||||
+ IPathManager.SEPARATOR + fileName);
|
||||
}
|
||||
|
||||
private static synchronized String getServerDataDir()
|
||||
throws StorageException {
|
||||
if (serverDataDir == null) {
|
||||
// TODO cave already knows the server data dir in VizApp, and edex
|
||||
// has it in system properties but we can't access either because
|
||||
// this is common code, architecturally we need some way around
|
||||
// this. For now this will send it's own request which is slightly
|
||||
// wasteful but not terribly harmful.
|
||||
try {
|
||||
GetServersResponse response = (GetServersResponse) RequestRouter
|
||||
.route(new GetServersRequest());
|
||||
serverDataDir = response.getServerDataDir();
|
||||
} catch (Exception e) {
|
||||
throw new StorageException("Error communicating with server.",
|
||||
null, e);
|
||||
}
|
||||
}
|
||||
return serverDataDir;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -36,6 +36,7 @@ import com.raytheon.uf.common.serialization.annotations.DynamicSerializeElement;
|
|||
* ------------ ---------- ----------- --------------------------
|
||||
* Aug 6, 2009 mschenke Initial creation
|
||||
* Sep 12, 2012 1167 djohnson Add datadelivery servers.
|
||||
* Jan 14, 2013 1469 bkowal Removed the hdf5 data directory
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -54,9 +55,6 @@ public class GetServersResponse implements ISerializableObject {
|
|||
@DynamicSerializeElement
|
||||
private String pypiesServer;
|
||||
|
||||
@DynamicSerializeElement
|
||||
private String serverDataDir;
|
||||
|
||||
@DynamicSerializeElement
|
||||
private Map<String, String> serverLocations;
|
||||
|
||||
|
@ -84,14 +82,6 @@ public class GetServersResponse implements ISerializableObject {
|
|||
this.pypiesServer = pypiesServer;
|
||||
}
|
||||
|
||||
public String getServerDataDir() {
|
||||
return serverDataDir;
|
||||
}
|
||||
|
||||
public void setServerDataDir(String serverDataDir) {
|
||||
this.serverDataDir = serverDataDir;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return
|
||||
*/
|
||||
|
|
|
@ -103,6 +103,8 @@ import com.vividsolutions.jts.geom.Polygon;
|
|||
* 6/29/12 #828 dgilling Force getPurgeRulesForPlugin()
|
||||
* to search only COMMON_STATIC.
|
||||
* Oct 10, 2012 1261 djohnson Add some generics wildcarding.
|
||||
* Jan 14, 2013 1469 bkowal No longer retrieves the hdf5 data directory
|
||||
* from the environment.
|
||||
* </pre>
|
||||
*
|
||||
* @author bphillip
|
||||
|
@ -119,10 +121,6 @@ public abstract class PluginDao extends CoreDao {
|
|||
/** The hdf5 file system suffix */
|
||||
public static final String HDF5_SUFFIX = ".h5";
|
||||
|
||||
/** The base path of the hdf5 data store */
|
||||
public static final String HDF5_DIR = PropertiesFactory.getInstance()
|
||||
.getEnvProperties().getEnvValue("HDF5DIR");
|
||||
|
||||
/** The base path of the folder containing HDF5 data for the owning plugin */
|
||||
public final String PLUGIN_HDF5_DIR;
|
||||
|
||||
|
@ -156,8 +154,7 @@ public abstract class PluginDao extends CoreDao {
|
|||
}
|
||||
|
||||
this.pluginName = pluginName;
|
||||
PLUGIN_HDF5_DIR = HDF5_DIR + File.separator + pluginName
|
||||
+ File.separator;
|
||||
PLUGIN_HDF5_DIR = pluginName + File.separator;
|
||||
dupCheckSql = dupCheckSql.replace(":tableName", PluginFactory
|
||||
.getInstance().getPrimaryTable(pluginName));
|
||||
pathProvider = PluginFactory.getInstance().getPathProvider(pluginName);
|
||||
|
@ -227,9 +224,7 @@ public abstract class PluginDao extends CoreDao {
|
|||
IPersistable persistable = (IPersistable) pdo;
|
||||
|
||||
// get the directory
|
||||
String directory = HDF5_DIR
|
||||
+ File.separator
|
||||
+ pdo.getPluginName()
|
||||
String directory = pdo.getPluginName()
|
||||
+ File.separator
|
||||
+ pathProvider.getHDFPath(pdo.getPluginName(),
|
||||
persistable);
|
||||
|
|
|
@ -66,7 +66,6 @@ import com.raytheon.uf.common.status.IUFStatusHandler;
|
|||
import com.raytheon.uf.common.status.UFStatus;
|
||||
import com.raytheon.uf.common.status.UFStatus.Priority;
|
||||
import com.raytheon.uf.common.util.RunProcess;
|
||||
import com.raytheon.uf.edex.core.props.PropertiesFactory;
|
||||
import com.raytheon.uf.edex.database.cluster.ClusterLockUtils;
|
||||
import com.raytheon.uf.edex.database.cluster.ClusterLockUtils.LockState;
|
||||
import com.raytheon.uf.edex.database.cluster.ClusterTask;
|
||||
|
@ -84,6 +83,7 @@ import com.vividsolutions.jts.geom.Coordinate;
|
|||
* 09/19/2011 10955 rferrel Use RunProcess
|
||||
* 04/18/2012 DR 14694 D. Friedman Fixes for static topography generation
|
||||
* 05/09/2012 DR 14939 D. Friedman Fix errors in DR 14694
|
||||
* 01/14/2013 1469 bkowal Removed the hdf5 data directory
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -116,9 +116,7 @@ public class StaticTopoData {
|
|||
private static final String DAT_GZ_SUFFIX = ".dat.gz";
|
||||
|
||||
/** The base directory in which the topo files reside */
|
||||
private static final String FILE_PREFIX = PropertiesFactory.getInstance()
|
||||
.getEnvProperties().getEnvValue("HDF5DIR")
|
||||
+ "/topo/";
|
||||
private static final String FILE_PREFIX = "topo/";
|
||||
|
||||
/** The file containing the complete static topo data sets */
|
||||
private static final File topoFile = new File(FILE_PREFIX + "staticTopo.h5");
|
||||
|
@ -538,8 +536,7 @@ public class StaticTopoData {
|
|||
|
||||
for (TiledTopoSource source : topoSources) {
|
||||
statusHandler.handle(Priority.INFO, "Extracting topo data from "
|
||||
+
|
||||
source.getDataset());
|
||||
+ source.getDataset());
|
||||
GridReprojection reprojection = new GridReprojection(
|
||||
source.getGridGeometry(), inGeom);
|
||||
GridSampler sampler = new GridSampler(source, interp);
|
||||
|
|
|
@ -14,8 +14,6 @@ import com.raytheon.uf.common.status.IUFStatusHandler;
|
|||
import com.raytheon.uf.common.status.UFStatus;
|
||||
import com.raytheon.uf.common.status.UFStatus.Priority;
|
||||
import com.raytheon.uf.edex.core.dataplugin.PluginRegistry;
|
||||
import com.raytheon.uf.edex.core.props.EnvProperties;
|
||||
import com.raytheon.uf.edex.core.props.PropertiesFactory;
|
||||
import com.raytheon.uf.edex.pointdata.PointDataPluginDao;
|
||||
|
||||
/**
|
||||
|
@ -48,6 +46,7 @@ import com.raytheon.uf.edex.pointdata.PointDataPluginDao;
|
|||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* Nov 1, 2011 njensen Initial creation
|
||||
* Jan 14, 2013 1469 bkowal Removed the hdf5 data directory
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -62,14 +61,9 @@ public class DataStoreRepacker {
|
|||
|
||||
private List<String> pluginsToRepack;
|
||||
|
||||
private String hdf5Dir;
|
||||
|
||||
private Compression compression = Compression.NONE;
|
||||
|
||||
public DataStoreRepacker(String compression) {
|
||||
EnvProperties properties = PropertiesFactory.getInstance()
|
||||
.getEnvProperties();
|
||||
hdf5Dir = properties.getEnvValue("HDF5DIR");
|
||||
this.compression = Compression.valueOf(compression);
|
||||
}
|
||||
|
||||
|
@ -81,8 +75,7 @@ public class DataStoreRepacker {
|
|||
// TODO change log statement if more than pointdata is hooked into this
|
||||
statusHandler.info("Starting repack of pointdata datastore");
|
||||
for (String plugin : pluginsToRepack) {
|
||||
String dir = hdf5Dir + File.separator + plugin;
|
||||
IDataStore ds = DataStoreFactory.getDataStore(new File(dir));
|
||||
IDataStore ds = DataStoreFactory.getDataStore(new File(plugin));
|
||||
try {
|
||||
ds.repack(compression);
|
||||
} catch (StorageException e) {
|
||||
|
|
|
@ -28,8 +28,6 @@ import com.raytheon.uf.common.datastorage.StorageProperties.Compression;
|
|||
import com.raytheon.uf.common.status.IUFStatusHandler;
|
||||
import com.raytheon.uf.common.status.UFStatus;
|
||||
import com.raytheon.uf.common.status.UFStatus.Priority;
|
||||
import com.raytheon.uf.edex.core.props.EnvProperties;
|
||||
import com.raytheon.uf.edex.core.props.PropertiesFactory;
|
||||
import com.raytheon.uf.edex.maintenance.archive.config.DataArchiveConfig;
|
||||
|
||||
/**
|
||||
|
@ -44,6 +42,7 @@ import com.raytheon.uf.edex.maintenance.archive.config.DataArchiveConfig;
|
|||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* Dec 8, 2011 njensen Initial creation
|
||||
* Jan 14, 2013 1469 bkowal Removed the hdf5 data directory.
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -56,14 +55,9 @@ public class DataStoreArchiver {
|
|||
private static final transient IUFStatusHandler statusHandler = UFStatus
|
||||
.getHandler(DataStoreArchiver.class);
|
||||
|
||||
private String hdf5Dir;
|
||||
|
||||
private Compression compression = Compression.NONE;
|
||||
|
||||
public DataStoreArchiver(String compression) {
|
||||
EnvProperties properties = PropertiesFactory.getInstance()
|
||||
.getEnvProperties();
|
||||
hdf5Dir = properties.getEnvValue("HDF5DIR");
|
||||
this.compression = Compression.valueOf(compression);
|
||||
}
|
||||
|
||||
|
|
|
@ -73,6 +73,7 @@ import com.raytheon.uf.edex.maintenance.archive.config.DataArchiveConfig;
|
|||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* Nov 17, 2011 rjpeter Initial creation
|
||||
* Jan 18, 2013 1469 bkowal Removed the hdf5 data directory.
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -228,8 +229,7 @@ public class DatabaseArchiver implements IPluginArchiver {
|
|||
|
||||
for (String dataStoreFile : datastoreFilesToArchive) {
|
||||
IDataStore ds = DataStoreFactory.getDataStore(new File(
|
||||
FileUtil.join(PluginDao.HDF5_DIR, pluginName,
|
||||
dataStoreFile)));
|
||||
FileUtil.join(pluginName, dataStoreFile)));
|
||||
int pathSep = dataStoreFile.lastIndexOf(File.separatorChar);
|
||||
String outputDir = (pathSep > 0 ? FileUtil.join(
|
||||
archivePath, pluginName,
|
||||
|
|
|
@ -38,8 +38,6 @@ import com.raytheon.uf.common.datastorage.IDataStore;
|
|||
import com.raytheon.uf.common.datastorage.StorageException;
|
||||
import com.raytheon.uf.common.hydro.spatial.HRAP;
|
||||
import com.raytheon.uf.edex.core.EdexException;
|
||||
import com.raytheon.uf.edex.core.props.EnvProperties;
|
||||
import com.raytheon.uf.edex.core.props.PropertiesFactory;
|
||||
import com.raytheon.uf.edex.database.dao.CoreDao;
|
||||
import com.raytheon.uf.edex.database.dao.DaoConfig;
|
||||
import com.vividsolutions.jts.geom.Coordinate;
|
||||
|
@ -54,6 +52,7 @@ import com.vividsolutions.jts.geom.Coordinate;
|
|||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* Jan 06, 2011 5951 jnjanga Initial creation
|
||||
* Jan 18, 2013 1469 bkowal Removed the hdf5 data directory.
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -112,17 +111,12 @@ public class MpeLightningSrv {
|
|||
// set up a lightning record
|
||||
BinLightningRecord ltngRec = new BinLightningRecord(dataURI);
|
||||
|
||||
EnvProperties properties = PropertiesFactory.getInstance()
|
||||
.getEnvProperties();
|
||||
;
|
||||
String hdf5Dir = properties.getEnvValue("HDF5DIR");
|
||||
|
||||
// create custom path provider for binlightning repository
|
||||
BinLightningPathProvider pathProvider = BinLightningPathProvider
|
||||
.getInstance();
|
||||
|
||||
// obtain the hdf5 filename
|
||||
String persistDir = hdf5Dir + pathProvider.getHDFPath(ltngRec)
|
||||
String persistDir = pathProvider.getHDFPath(ltngRec)
|
||||
+ File.separator;
|
||||
String archive = pathProvider.getHDFFileName(
|
||||
ltngRec.getPluginName(), ltngRec);
|
||||
|
|
|
@ -78,6 +78,7 @@ import com.raytheon.uf.edex.database.plugin.PluginDao;
|
|||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* Apr 13, 2009 chammack Initial creation
|
||||
* Jan 14, 2013 1469 bkowal Removed the hdf5 data directory.
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -446,8 +447,7 @@ public abstract class PointDataPluginDao<T extends PluginDataObject> extends
|
|||
|
||||
public File getFullFilePath(PluginDataObject p) {
|
||||
File file;
|
||||
String directory = HDF5_DIR + File.separator + p.getPluginName()
|
||||
+ File.separator
|
||||
String directory = p.getPluginName() + File.separator
|
||||
+ pathProvider.getHDFPath(p.getPluginName(), (IPersistable) p);
|
||||
file = new File(directory
|
||||
+ File.separator
|
||||
|
@ -708,9 +708,7 @@ public abstract class PointDataPluginDao<T extends PluginDataObject> extends
|
|||
}
|
||||
bm.putAll(obj);
|
||||
T bean = (T) bm.getBean();
|
||||
return HDF5_DIR
|
||||
+ File.separator
|
||||
+ this.pluginName
|
||||
return this.pluginName
|
||||
+ File.separator
|
||||
+ this.pathProvider.getHDFPath(this.pluginName,
|
||||
(IPersistable) bean)
|
||||
|
|
|
@ -87,7 +87,8 @@ import com.vividsolutions.jts.geom.Coordinate;
|
|||
* 11/19/2007 #377 randerso Initial creation
|
||||
* Jun 13, 2008 #1160 randerso Moved to server side
|
||||
* 03/09/2012 DR 14581 D. Friedman Fix grid referencing and use custom
|
||||
* nearest-neighbor resampling.
|
||||
* nearest-neighbor resampling.i
|
||||
* 01/14/2013 #1469 bkowal Removed the hdf5 data directory.
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -117,19 +118,7 @@ public class TopoQuery implements ITopoQuery {
|
|||
* @return Initialized TopoQuery instance
|
||||
*/
|
||||
public static synchronized TopoQuery getInstance(int topoLevel) {
|
||||
String hdf5Dir = null;
|
||||
|
||||
EnvProperties properties = PropertiesFactory.getInstance()
|
||||
.getEnvProperties();
|
||||
if (properties != null) {
|
||||
hdf5Dir = properties.getEnvValue("HDF5DIR");
|
||||
}
|
||||
return getInstance(new File(hdf5Dir + TOPO_FILE), topoLevel);
|
||||
}
|
||||
|
||||
public static synchronized TopoQuery getInstance(String hdf5Dir,
|
||||
int topoLevel) {
|
||||
return getInstance(new File(hdf5Dir + TOPO_FILE), topoLevel);
|
||||
return getInstance(new File(TOPO_FILE), topoLevel);
|
||||
}
|
||||
|
||||
public static synchronized TopoQuery getInstance(File hdf5File,
|
||||
|
|
|
@ -69,6 +69,7 @@ import com.raytheon.uf.edex.database.DataAccessLayerException;
|
|||
* ------------ ---------- ----------- --------------------------
|
||||
* 4/7/09 1994 bphillip Initial Creation
|
||||
* 12/16/10 mli extend NcepDefaultPluginDao to enable purge
|
||||
* 01/14/13 1469 bkowal Removed the hdf5 data directory.
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -112,70 +113,70 @@ public class NcgribDao extends NcepDefaultPluginDao {
|
|||
this("ncgrib");
|
||||
}
|
||||
|
||||
// public void purgeExpiredData() {
|
||||
// QueryResult models = null;
|
||||
// try {
|
||||
// models = (QueryResult) executeNativeSql(MODEL_QUERY);
|
||||
// } catch (DataAccessLayerException e) {
|
||||
// logger.error("Error purging ncgrib data. Unable to get models", e);
|
||||
// }
|
||||
//
|
||||
// String currentModel = null;
|
||||
// for (int i = 0; i < models.getResultCount(); i++) {
|
||||
// currentModel = (String) models.getRowColumnValue(i, 0);
|
||||
// QueryResult refTimes = null;
|
||||
// try {
|
||||
// refTimes = (QueryResult) executeNativeSql(REFTIME_QUERY
|
||||
// .replace("?", currentModel));
|
||||
// } catch (DataAccessLayerException e) {
|
||||
// logger
|
||||
// .error("Error purging ncgrib data. Unable to get reference times for model ["
|
||||
// + currentModel + "]");
|
||||
// continue;
|
||||
// }
|
||||
//
|
||||
// // FIXME: Add rules for purging here instead of just keeping 2
|
||||
// // runs
|
||||
// List<String> filesKept = new ArrayList<String>();
|
||||
// File modelDirectory = new File(PLUGIN_HDF5_DIR + File.separator
|
||||
// + currentModel);
|
||||
//
|
||||
// for (int j = 0; j < refTimes.getResultCount(); j++) {
|
||||
// Date time = (Date) refTimes.getRowColumnValue(j, 0);
|
||||
// File hdf5File = new File(modelDirectory.getAbsolutePath()
|
||||
// + File.separator
|
||||
// + ((NcgribPathProvider) pathProvider).formatTime(time)
|
||||
// + ".h5");
|
||||
//
|
||||
// if (j < MODELCOUNT) {
|
||||
// filesKept.add(hdf5File.getAbsolutePath());
|
||||
// continue;
|
||||
// }
|
||||
//
|
||||
// try {
|
||||
// purgeDb(time, currentModel);
|
||||
// } catch (DataAccessLayerException e) {
|
||||
// logger.error("Error purging database for ncgrib model ["
|
||||
// + currentModel + "]");
|
||||
// }
|
||||
// }
|
||||
//
|
||||
// List<File> files = FileUtil.listFiles(modelDirectory, fileFilter,
|
||||
// false);
|
||||
//
|
||||
// for (File file : files) {
|
||||
// if (!filesKept.contains(file.getAbsolutePath())) {
|
||||
// if (!file.delete()) {
|
||||
// logger
|
||||
// .error("Error purging HDF5 files for ncgrib model ["
|
||||
// + currentModel + "]");
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
//
|
||||
// }
|
||||
//
|
||||
// }
|
||||
// public void purgeExpiredData() {
|
||||
// QueryResult models = null;
|
||||
// try {
|
||||
// models = (QueryResult) executeNativeSql(MODEL_QUERY);
|
||||
// } catch (DataAccessLayerException e) {
|
||||
// logger.error("Error purging ncgrib data. Unable to get models", e);
|
||||
// }
|
||||
//
|
||||
// String currentModel = null;
|
||||
// for (int i = 0; i < models.getResultCount(); i++) {
|
||||
// currentModel = (String) models.getRowColumnValue(i, 0);
|
||||
// QueryResult refTimes = null;
|
||||
// try {
|
||||
// refTimes = (QueryResult) executeNativeSql(REFTIME_QUERY
|
||||
// .replace("?", currentModel));
|
||||
// } catch (DataAccessLayerException e) {
|
||||
// logger
|
||||
// .error("Error purging ncgrib data. Unable to get reference times for model ["
|
||||
// + currentModel + "]");
|
||||
// continue;
|
||||
// }
|
||||
//
|
||||
// // FIXME: Add rules for purging here instead of just keeping 2
|
||||
// // runs
|
||||
// List<String> filesKept = new ArrayList<String>();
|
||||
// File modelDirectory = new File(PLUGIN_HDF5_DIR + File.separator
|
||||
// + currentModel);
|
||||
//
|
||||
// for (int j = 0; j < refTimes.getResultCount(); j++) {
|
||||
// Date time = (Date) refTimes.getRowColumnValue(j, 0);
|
||||
// File hdf5File = new File(modelDirectory.getAbsolutePath()
|
||||
// + File.separator
|
||||
// + ((NcgribPathProvider) pathProvider).formatTime(time)
|
||||
// + ".h5");
|
||||
//
|
||||
// if (j < MODELCOUNT) {
|
||||
// filesKept.add(hdf5File.getAbsolutePath());
|
||||
// continue;
|
||||
// }
|
||||
//
|
||||
// try {
|
||||
// purgeDb(time, currentModel);
|
||||
// } catch (DataAccessLayerException e) {
|
||||
// logger.error("Error purging database for ncgrib model ["
|
||||
// + currentModel + "]");
|
||||
// }
|
||||
// }
|
||||
//
|
||||
// List<File> files = FileUtil.listFiles(modelDirectory, fileFilter,
|
||||
// false);
|
||||
//
|
||||
// for (File file : files) {
|
||||
// if (!filesKept.contains(file.getAbsolutePath())) {
|
||||
// if (!file.delete()) {
|
||||
// logger
|
||||
// .error("Error purging HDF5 files for ncgrib model ["
|
||||
// + currentModel + "]");
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
//
|
||||
// }
|
||||
//
|
||||
// }
|
||||
|
||||
private int purgeDb(final Date date, String modelName)
|
||||
throws DataAccessLayerException {
|
||||
|
@ -198,8 +199,11 @@ public class NcgribDao extends NcepDefaultPluginDao {
|
|||
AbstractStorageRecord hybridLevels = null;
|
||||
AbstractStorageRecord thinnedPts = null;
|
||||
|
||||
//System.out.println (" good data to be populated, rec datauri=" + gribRec.getDataURI());
|
||||
//System.out.println (" good data to be populated, rec messagedata=" + gribRec.getMessageData());
|
||||
// System.out.println (" good data to be populated, rec datauri=" +
|
||||
// gribRec.getDataURI());
|
||||
// System.out.println
|
||||
// (" good data to be populated, rec messagedata=" +
|
||||
// gribRec.getMessageData());
|
||||
|
||||
/*
|
||||
* Stores the binary data to the HDF5 data store
|
||||
|
@ -210,9 +214,9 @@ public class NcgribDao extends NcepDefaultPluginDao {
|
|||
long[] sizes = new long[] {
|
||||
(gribRec.getSpatialObject()).getNx(),
|
||||
(gribRec.getSpatialObject()).getNy() };
|
||||
storageRecord = new FloatDataRecord("Data", gribRec
|
||||
.getDataURI(), (float[]) gribRec.getMessageData(),
|
||||
2, sizes);
|
||||
storageRecord = new FloatDataRecord("Data",
|
||||
gribRec.getDataURI(),
|
||||
(float[]) gribRec.getMessageData(), 2, sizes);
|
||||
} else
|
||||
throw new Exception(
|
||||
"Cannot create data record, spatialData = "
|
||||
|
@ -231,8 +235,8 @@ public class NcgribDao extends NcepDefaultPluginDao {
|
|||
* Stores any data from the local section if present
|
||||
*/
|
||||
if (gribRec.isLocalSectionUsed()) {
|
||||
localSection = new IntegerDataRecord(LOCAL_SECTION, gribRec
|
||||
.getDataURI(), gribRec.getLocalSection());
|
||||
localSection = new IntegerDataRecord(LOCAL_SECTION,
|
||||
gribRec.getDataURI(), gribRec.getLocalSection());
|
||||
localSection.setCorrelationObject(gribRec);
|
||||
dataStore.addDataRecord(localSection);
|
||||
}
|
||||
|
@ -241,8 +245,8 @@ public class NcgribDao extends NcepDefaultPluginDao {
|
|||
* Stores any hybrid coordinate data if present
|
||||
*/
|
||||
if (gribRec.isHybridGrid()) {
|
||||
hybridLevels = new FloatDataRecord(HYBRID_LEVELS, gribRec
|
||||
.getDataURI(), gribRec.getHybridCoordList());
|
||||
hybridLevels = new FloatDataRecord(HYBRID_LEVELS,
|
||||
gribRec.getDataURI(), gribRec.getHybridCoordList());
|
||||
hybridLevels.setCorrelationObject(gribRec);
|
||||
dataStore.addDataRecord(hybridLevels);
|
||||
}
|
||||
|
@ -251,8 +255,8 @@ public class NcgribDao extends NcepDefaultPluginDao {
|
|||
* Stores any thinned point data for quasi-regular grids if present
|
||||
*/
|
||||
if (gribRec.isThinnedGrid()) {
|
||||
thinnedPts = new IntegerDataRecord(THINNED_PTS, gribRec
|
||||
.getDataURI(), gribRec.getThinnedPts());
|
||||
thinnedPts = new IntegerDataRecord(THINNED_PTS,
|
||||
gribRec.getDataURI(), gribRec.getThinnedPts());
|
||||
thinnedPts.setCorrelationObject(gribRec);
|
||||
dataStore.addDataRecord(thinnedPts);
|
||||
}
|
||||
|
@ -318,11 +322,11 @@ public class NcgribDao extends NcepDefaultPluginDao {
|
|||
NcgribModel model = rec.getModelInfo();
|
||||
if (model.getParameterName() == null
|
||||
|| model.getParameterName().equals("Missing")) {
|
||||
//System.out.println (" persist missing or null, rec datauri=" + rec.getDataURI());
|
||||
// System.out.println (" persist missing or null, rec datauri="
|
||||
// + rec.getDataURI());
|
||||
|
||||
logger
|
||||
.info("Discarding record due to missing or unknown parameter mapping: "
|
||||
+ record);
|
||||
logger.info("Discarding record due to missing or unknown parameter mapping: "
|
||||
+ record);
|
||||
} else {
|
||||
boolean validLevel = false;
|
||||
Level level = model.getLevel();
|
||||
|
@ -339,9 +343,8 @@ public class NcgribDao extends NcepDefaultPluginDao {
|
|||
if (validLevel) {
|
||||
toPersist.add(rec);
|
||||
} else {
|
||||
logger
|
||||
.info("Discarding record due to missing or unknown level mapping: "
|
||||
+ record);
|
||||
logger.info("Discarding record due to missing or unknown level mapping: "
|
||||
+ record);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -367,11 +370,11 @@ public class NcgribDao extends NcepDefaultPluginDao {
|
|||
NcgribModel model = rec.getModelInfo();
|
||||
if (model.getParameterName() == null
|
||||
|| model.getParameterName().equals("Missing")) {
|
||||
//System.out.println (" verify missing or null, rec datauri=" + rec.getDataURI());
|
||||
// System.out.println (" verify missing or null, rec datauri=" +
|
||||
// rec.getDataURI());
|
||||
|
||||
logger
|
||||
.info("Discarding record due to missing or unknown parameter mapping: "
|
||||
+ record);
|
||||
logger.info("Discarding record due to missing or unknown parameter mapping: "
|
||||
+ record);
|
||||
} else {
|
||||
boolean validLevel = false;
|
||||
Level level = model.getLevel();
|
||||
|
@ -388,9 +391,8 @@ public class NcgribDao extends NcepDefaultPluginDao {
|
|||
if (validLevel) {
|
||||
toPersist.add(rec);
|
||||
} else {
|
||||
logger
|
||||
.info("Discarding record due to missing or unknown level mapping: "
|
||||
+ record);
|
||||
logger.info("Discarding record due to missing or unknown level mapping: "
|
||||
+ record);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -404,8 +406,7 @@ public class NcgribDao extends NcepDefaultPluginDao {
|
|||
persistable.setHdfFileId(EDEXUtil.getServerId());
|
||||
|
||||
// get the directory
|
||||
String directory = HDF5_DIR + File.separator + pdo.getPluginName()
|
||||
+ File.separator
|
||||
String directory = pdo.getPluginName() + File.separator
|
||||
+ pathProvider.getHDFPath(this.pluginName, persistable);
|
||||
File dataStoreFile = new File(directory + File.separator
|
||||
+ pathProvider.getHDFFileName(pdo.getPluginName(), persistable));
|
||||
|
|
|
@ -1543,8 +1543,7 @@ public class Dgdriv {
|
|||
// file = new File(File.separator + dataURI.split("/")[1]
|
||||
// + File.separator + path + File.separator + sb.toString());
|
||||
//} else if (DataMode.getSystemMode() == DataMode.PYPIES) {
|
||||
file = new File(VizApp.getServerDataDir() + File.separator
|
||||
+ dataURI.split("/")[1] + File.separator + path
|
||||
file = new File(dataURI.split("/")[1] + File.separator + path
|
||||
+ File.separator + sb.toString());
|
||||
//} else {
|
||||
// file = new File(VizApp.getDataDir() + File.separator
|
||||
|
|
|
@ -1058,8 +1058,7 @@ public class TestDgdriv {
|
|||
// file = new File(File.separator + dataURI.split("/")[1]
|
||||
// + File.separator + path + File.separator + sb.toString());
|
||||
//} else if (DataMode.getSystemMode() == DataMode.PYPIES) {
|
||||
file = new File(VizApp.getServerDataDir() + File.separator
|
||||
+ dataURI.split("/")[1] + File.separator + path
|
||||
file = new File(dataURI.split("/")[1] + File.separator + path
|
||||
+ File.separator + sb.toString());
|
||||
//} else {
|
||||
// file = new File(VizApp.getDataDir() + File.separator
|
||||
|
|
|
@ -32,7 +32,8 @@
|
|||
#
|
||||
#
|
||||
|
||||
|
||||
[edex_data]
|
||||
hdf5dir=/awips2/edex/data/hdf5
|
||||
|
||||
[loggers]
|
||||
keys=root,minutes,hours
|
||||
|
|
33
pythonPackages/pypies/pypies/config/__init__.py
Normal file
33
pythonPackages/pypies/pypies/config/__init__.py
Normal file
|
@ -0,0 +1,33 @@
|
|||
##
|
||||
# This software was developed and / or modified by Raytheon Company,
|
||||
# pursuant to Contract DG133W-05-CQ-1067 with the US Government.
|
||||
#
|
||||
# U.S. EXPORT CONTROLLED TECHNICAL DATA
|
||||
# This software product contains export-restricted data whose
|
||||
# export/transfer/disclosure is restricted by U.S. law. Dissemination
|
||||
# to non-U.S. persons whether in the United States or abroad requires
|
||||
# an export license or other authorization.
|
||||
#
|
||||
# Contractor Name: Raytheon Company
|
||||
# Contractor Address: 6825 Pine Street, Suite 340
|
||||
# Mail Stop B8
|
||||
# Omaha, NE 68106
|
||||
# 402.291.0100
|
||||
#
|
||||
# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
||||
# further licensing information.
|
||||
##
|
||||
|
||||
|
||||
#
|
||||
# __init__.py for hdf5 implementation
|
||||
#
|
||||
#
|
||||
# SOFTWARE HISTORY
|
||||
#
|
||||
# Date Ticket# Engineer Description
|
||||
# ------------ ---------- ----------- --------------------------
|
||||
# 01/10/13 bkowal Initial Creation.
|
||||
#
|
||||
#
|
||||
#
|
|
@ -0,0 +1,68 @@
|
|||
##
|
||||
# This software was developed and / or modified by Raytheon Company,
|
||||
# pursuant to Contract DG133W-05-CQ-1067 with the US Government.
|
||||
#
|
||||
# U.S. EXPORT CONTROLLED TECHNICAL DATA
|
||||
# This software product contains export-restricted data whose
|
||||
# export/transfer/disclosure is restricted by U.S. law. Dissemination
|
||||
# to non-U.S. persons whether in the United States or abroad requires
|
||||
# an export license or other authorization.
|
||||
#
|
||||
# Contractor Name: Raytheon Company
|
||||
# Contractor Address: 6825 Pine Street, Suite 340
|
||||
# Mail Stop B8
|
||||
# Omaha, NE 68106
|
||||
# 402.291.0100
|
||||
#
|
||||
# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
||||
# further licensing information.
|
||||
##
|
||||
|
||||
|
||||
#
|
||||
# Configuration for pypies logging
|
||||
#
|
||||
#
|
||||
# SOFTWARE HISTORY
|
||||
#
|
||||
# Date Ticket# Engineer Description
|
||||
# ------------ ---------- ----------- --------------------------
|
||||
# 01/10/13 bkowal Initial Creation.
|
||||
#
|
||||
#
|
||||
#
|
||||
|
||||
import os, ConfigParser
|
||||
|
||||
class PypiesConfigurationManager:
|
||||
|
||||
def __init__(self):
|
||||
self.__configLoaded = False
|
||||
|
||||
self.__initConfigLocation()
|
||||
if (not self.__configLoc):
|
||||
raise RuntimeError("No pypies.cfg found")
|
||||
|
||||
self.__loadConfig()
|
||||
|
||||
def __initConfigLocation(self):
|
||||
self.__configLoc = '/awips2/pypies/conf/pypies.cfg'
|
||||
if not os.path.exists(self.__configLoc):
|
||||
print "Unable to find pypies.cfg at ", self.__configLoc
|
||||
self.__configLoc = None
|
||||
else:
|
||||
print "Found pypies.cfg at ", self.__configLoc
|
||||
|
||||
def __loadConfig(self):
|
||||
self.__scp = ConfigParser.SafeConfigParser()
|
||||
self.__scp.read(self.__configLoc)
|
||||
self.__configLoaded = True
|
||||
|
||||
def getConfigurationLocation(self):
|
||||
return self.__configLoc
|
||||
|
||||
def hasConfigurationBeenLoaded(self):
|
||||
return self.__configLoaded
|
||||
|
||||
def getConfiguration(self):
|
||||
return self.__scp
|
|
@ -29,7 +29,7 @@
|
|||
# Date Ticket# Engineer Description
|
||||
# ------------ ---------- ----------- --------------------------
|
||||
# 08/17/10 njensen Initial Creation.
|
||||
#
|
||||
# 01/11/13 bkowal Pypies will now read the hdf5 root from configuration
|
||||
#
|
||||
#
|
||||
|
||||
|
@ -37,12 +37,14 @@ from werkzeug import Request, Response, ClosingIterator
|
|||
import time, logging, os
|
||||
import pypies
|
||||
from pypies import IDataStore
|
||||
import pypies.config.pypiesConfigurationManager
|
||||
import dynamicserialize
|
||||
from dynamicserialize.dstypes.com.raytheon.uf.common.pypies.request import *
|
||||
from dynamicserialize.dstypes.com.raytheon.uf.common.pypies.response import *
|
||||
|
||||
logger = pypies.logger
|
||||
timeMap = pypies.timeMap
|
||||
hdf5Dir = None
|
||||
|
||||
from pypies.impl import H5pyDataStore
|
||||
datastore = H5pyDataStore.H5pyDataStore()
|
||||
|
@ -60,6 +62,26 @@ datastoreMap = {
|
|||
CopyRequest: (datastore.copy, "CopyRequest")
|
||||
}
|
||||
|
||||
pypiesConfigurationManager = pypies.config.pypiesConfigurationManager.PypiesConfigurationManager()
|
||||
if (pypiesConfigurationManager.hasConfigurationBeenLoaded()):
|
||||
configLocation = pypiesConfigurationManager.getConfigurationLocation()
|
||||
infoMessage = 'using ' + configLocation + ' for pypies config'
|
||||
logger.info(infoMessage)
|
||||
|
||||
# determine the edex hdf5 root
|
||||
scp = pypiesConfigurationManager.getConfiguration()
|
||||
hdf5Dir = scp.get('edex_data', 'hdf5dir')
|
||||
# add a trailing directory separator (when necessary)
|
||||
if (not hdf5Dir.endswith('/')):
|
||||
hdf5Dir = hdf5Dir + '/'
|
||||
|
||||
if not os.path.exists(hdf5Dir):
|
||||
os.makedirs(hdf5Dir)
|
||||
infoMessage = 'using hdf5 directory: ' + hdf5Dir
|
||||
logger.info(infoMessage)
|
||||
|
||||
# TODO: error and halt when configuration cannot be loaded
|
||||
|
||||
@Request.application
|
||||
def pypies_response(request):
|
||||
timeMap.clear()
|
||||
|
@ -74,6 +96,9 @@ def pypies_response(request):
|
|||
resp.setError(msg)
|
||||
return __prepareResponse(resp)
|
||||
timeMap['deserialize']=time.time()-startTime
|
||||
# add the hdf5 directory path to the file name
|
||||
filename = hdf5Dir + obj.getFilename()
|
||||
obj.setFilename(filename)
|
||||
|
||||
clz = obj.__class__
|
||||
if logger.isEnabledFor(logging.DEBUG):
|
||||
|
|
|
@ -35,13 +35,15 @@
|
|||
|
||||
import logging, os, ConfigParser
|
||||
import logging.handlers, logging.config
|
||||
import pypies.config.pypiesConfigurationManager
|
||||
|
||||
class LogConfig:
|
||||
|
||||
def __init__(self):
|
||||
cfgLoc = self.__getConfigLocation()
|
||||
if cfgLoc:
|
||||
scp = self.__loadConfig(cfgLoc)
|
||||
pypiesConfigurationManager = pypies.config.pypiesConfigurationManager.PypiesConfigurationManager()
|
||||
|
||||
if pypiesConfigurationManager.hasConfigurationBeenLoaded():
|
||||
self.__configure(pypiesConfigurationManager)
|
||||
self.pypiesLogger = logging.getLogger('root')
|
||||
self.minutesLogger = logging.getLogger('minute')
|
||||
self.hoursLogger = logging.getLogger('hourly')
|
||||
|
@ -50,26 +52,14 @@ class LogConfig:
|
|||
self.minutesLogger = self.pypiesLogger
|
||||
self.hoursLogger = self.pypiesLogger
|
||||
|
||||
def __getConfigLocation(self):
|
||||
configLoc = '/awips2/pypies/conf/pypies.cfg'
|
||||
if not os.path.exists(configLoc):
|
||||
print "Unable to find pypies.cfg at ", configLoc
|
||||
configLoc = None
|
||||
else:
|
||||
print "Found pypies.cfg at ", configLoc
|
||||
return configLoc
|
||||
def __configure(self, configurationManager):
|
||||
scp = configurationManager.getConfiguration()
|
||||
print "using", configurationManager.getConfigurationLocation(), "for logging config"
|
||||
|
||||
def __loadConfig(self, configLoc):
|
||||
scp = ConfigParser.SafeConfigParser()
|
||||
if not configLoc:
|
||||
raise RuntimeError("No pypies.cfg found")
|
||||
else:
|
||||
print "using", configLoc, "for logging config"
|
||||
scp.read(configLoc)
|
||||
logFileDir = scp.get('handler_pypiesHandler', 'logFileDir')
|
||||
if not os.path.exists(logFileDir):
|
||||
os.makedirs(logFileDir)
|
||||
logging.config.fileConfig(configLoc)
|
||||
logging.config.fileConfig(configurationManager.getConfigurationLocation())
|
||||
|
||||
def __getDefaultLogger(self):
|
||||
import logging, logging.handlers
|
||||
|
|
Loading…
Add table
Reference in a new issue