Issue #1469 - pypies will now read the hdf5 root directory from configuration. Relative paths can now be passed to pypies. Rebased to fix merge conflict.
Change-Id: I38873e478729a3644db7ce2813e9bd6c15d9bdf3 Former-commit-id:4db9c6c321
[formerly ed2e78738b391bcb9a40ddde7b7a6acf7ccd1b30] Former-commit-id:e2a12845d3
This commit is contained in:
parent
a3c5aded12
commit
fba28ea69b
33 changed files with 302 additions and 328 deletions
|
@ -48,9 +48,8 @@ public class HDF5Util {
|
||||||
String fileName = pathProvider.getHDFFileName(
|
String fileName = pathProvider.getHDFFileName(
|
||||||
object.getPluginName(), persistable);
|
object.getPluginName(), persistable);
|
||||||
|
|
||||||
file = new File(VizApp.getServerDataDir() + IPathManager.SEPARATOR
|
file = new File(object.getPluginName() + IPathManager.SEPARATOR
|
||||||
+ object.getPluginName() + IPathManager.SEPARATOR + path
|
+ path + IPathManager.SEPARATOR + fileName);
|
||||||
+ IPathManager.SEPARATOR + fileName);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return file;
|
return file;
|
||||||
|
|
|
@ -49,6 +49,7 @@ import com.raytheon.uf.viz.core.localization.LocalizationManager;
|
||||||
* ------------ ---------- ----------- --------------------------
|
* ------------ ---------- ----------- --------------------------
|
||||||
* 7/1/06 chammack Initial Creation.
|
* 7/1/06 chammack Initial Creation.
|
||||||
* Sep 12, 2012 1167 djohnson Add datadelivery servers.
|
* Sep 12, 2012 1167 djohnson Add datadelivery servers.
|
||||||
|
* Jan 14, 2013 1469 bkowal Removed the hdf5 data directory.
|
||||||
*
|
*
|
||||||
* </pre>
|
* </pre>
|
||||||
*
|
*
|
||||||
|
@ -79,8 +80,6 @@ public final class VizApp {
|
||||||
|
|
||||||
private static String dataDeliveryQueryServer;
|
private static String dataDeliveryQueryServer;
|
||||||
|
|
||||||
private static String serverDataDir;
|
|
||||||
|
|
||||||
static {
|
static {
|
||||||
ManagementFactory.getRuntimeMXBean().getName();
|
ManagementFactory.getRuntimeMXBean().getName();
|
||||||
}
|
}
|
||||||
|
@ -257,14 +256,6 @@ public final class VizApp {
|
||||||
VizApp.pypiesServer = pypiesServer;
|
VizApp.pypiesServer = pypiesServer;
|
||||||
}
|
}
|
||||||
|
|
||||||
public static String getServerDataDir() {
|
|
||||||
return VizApp.serverDataDir;
|
|
||||||
}
|
|
||||||
|
|
||||||
public static void setServerDataDir(String serverDataDir) {
|
|
||||||
VizApp.serverDataDir = serverDataDir;
|
|
||||||
}
|
|
||||||
|
|
||||||
private static String host = null;
|
private static String host = null;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
|
@ -49,6 +49,8 @@ import com.raytheon.uf.viz.core.status.StatusConstants;
|
||||||
* Date Ticket# Engineer Description
|
* Date Ticket# Engineer Description
|
||||||
* ------------ ---------- ----------- --------------------------
|
* ------------ ---------- ----------- --------------------------
|
||||||
* Jan 16, 2008 njensen Initial creation
|
* Jan 16, 2008 njensen Initial creation
|
||||||
|
* Jan 14, 2013 1469 bkowal The hdf5 root will no longer be appended to the
|
||||||
|
* beginning of the file name.
|
||||||
*
|
*
|
||||||
* </pre>
|
* </pre>
|
||||||
*
|
*
|
||||||
|
@ -77,7 +79,7 @@ public class CubeUtil {
|
||||||
if (record != null) {
|
if (record != null) {
|
||||||
File file = HDF5Util.findHDF5Location(record);
|
File file = HDF5Util.findHDF5Location(record);
|
||||||
if (file != null)
|
if (file != null)
|
||||||
filename = file.getAbsolutePath();
|
filename = file.getPath();
|
||||||
}
|
}
|
||||||
return filename;
|
return filename;
|
||||||
}
|
}
|
||||||
|
|
|
@ -49,6 +49,7 @@ import com.raytheon.uf.viz.core.requests.ThriftClient;
|
||||||
* ------------ ---------- ----------- --------------------------
|
* ------------ ---------- ----------- --------------------------
|
||||||
* Nov 5, 2009 mschenke Initial creation
|
* Nov 5, 2009 mschenke Initial creation
|
||||||
* Sep 12, 2012 1167 djohnson Add datadelivery servers.
|
* Sep 12, 2012 1167 djohnson Add datadelivery servers.
|
||||||
|
* Jan 14, 2013 1469 bkowal Removed the hdf5 data directory.
|
||||||
*
|
*
|
||||||
* </pre>
|
* </pre>
|
||||||
*
|
*
|
||||||
|
@ -126,7 +127,6 @@ public class LocalizationInitializer {
|
||||||
VizApp.setHttpServer(resp.getHttpServer());
|
VizApp.setHttpServer(resp.getHttpServer());
|
||||||
VizApp.setJmsServer(resp.getJmsServer());
|
VizApp.setJmsServer(resp.getJmsServer());
|
||||||
VizApp.setPypiesServer(resp.getPypiesServer());
|
VizApp.setPypiesServer(resp.getPypiesServer());
|
||||||
VizApp.setServerDataDir(resp.getServerDataDir());
|
|
||||||
VizServers.getInstance().setServerLocations(resp.getServerLocations());
|
VizServers.getInstance().setServerLocations(resp.getServerLocations());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -91,6 +91,7 @@ import com.raytheon.uf.viz.core.requests.ThriftClient;
|
||||||
* Mar 26, 2008 njensen Added rename() and getFileContents().
|
* Mar 26, 2008 njensen Added rename() and getFileContents().
|
||||||
* May 19, 2007 #1127 randerso Implemented error handling
|
* May 19, 2007 #1127 randerso Implemented error handling
|
||||||
* Sep 12, 2012 1167 djohnson Add datadelivery servers.
|
* Sep 12, 2012 1167 djohnson Add datadelivery servers.
|
||||||
|
* Jan 14, 2013 1469 bkowal Removed the hdf5 data directory.
|
||||||
*
|
*
|
||||||
* </pre>
|
* </pre>
|
||||||
*
|
*
|
||||||
|
@ -223,7 +224,6 @@ public class LocalizationManager implements IPropertyChangeListener {
|
||||||
VizApp.setHttpServer(resp.getHttpServer());
|
VizApp.setHttpServer(resp.getHttpServer());
|
||||||
VizApp.setJmsServer(resp.getJmsServer());
|
VizApp.setJmsServer(resp.getJmsServer());
|
||||||
VizApp.setPypiesServer(resp.getPypiesServer());
|
VizApp.setPypiesServer(resp.getPypiesServer());
|
||||||
VizApp.setServerDataDir(resp.getServerDataDir());
|
|
||||||
VizServers.getInstance().setServerLocations(
|
VizServers.getInstance().setServerLocations(
|
||||||
resp.getServerLocations());
|
resp.getServerLocations());
|
||||||
} catch (VizException e) {
|
} catch (VizException e) {
|
||||||
|
|
|
@ -42,6 +42,8 @@ import com.vividsolutions.jts.geom.Coordinate;
|
||||||
* Date Ticket# Engineer Description
|
* Date Ticket# Engineer Description
|
||||||
* ------------ ---------- ----------- --------------------------
|
* ------------ ---------- ----------- --------------------------
|
||||||
* Oct 12, 2009 mschenke Initial creation
|
* Oct 12, 2009 mschenke Initial creation
|
||||||
|
* Jan 14, 2013 1469 bkowal The hdf5 root directory is no longer passed
|
||||||
|
* as an argument to the common TopoQuery constructor.
|
||||||
*
|
*
|
||||||
* </pre>
|
* </pre>
|
||||||
*
|
*
|
||||||
|
@ -59,8 +61,7 @@ public class TopoQuery implements ITopoQuery {
|
||||||
* @return Initialized TopoQuery instance
|
* @return Initialized TopoQuery instance
|
||||||
*/
|
*/
|
||||||
public static synchronized ITopoQuery getInstance() {
|
public static synchronized ITopoQuery getInstance() {
|
||||||
return com.raytheon.uf.edex.topo.TopoQuery.getInstance(
|
return com.raytheon.uf.edex.topo.TopoQuery.getInstance(0);
|
||||||
VizApp.getServerDataDir(), 0);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -68,8 +69,7 @@ public class TopoQuery implements ITopoQuery {
|
||||||
*/
|
*/
|
||||||
public static synchronized ITopoQuery getInstance(int topoLevel,
|
public static synchronized ITopoQuery getInstance(int topoLevel,
|
||||||
boolean useCaching) {
|
boolean useCaching) {
|
||||||
return com.raytheon.uf.edex.topo.TopoQuery.getInstance(
|
return com.raytheon.uf.edex.topo.TopoQuery.getInstance(0);
|
||||||
VizApp.getServerDataDir(), 0);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private TopoQuery(int level, boolean useCaching) {
|
private TopoQuery(int level, boolean useCaching) {
|
||||||
|
|
|
@ -48,6 +48,7 @@ import com.raytheon.uf.viz.thinclient.preferences.ThinClientPreferenceConstants;
|
||||||
* Date Ticket# Engineer Description
|
* Date Ticket# Engineer Description
|
||||||
* ------------ ---------- ----------- --------------------------
|
* ------------ ---------- ----------- --------------------------
|
||||||
* Nov 8, 2011 mschenke Initial creation
|
* Nov 8, 2011 mschenke Initial creation
|
||||||
|
* Jan 14, 2013 1469 bkowal The hdf5 data directory is no longer a preference.
|
||||||
*
|
*
|
||||||
* </pre>
|
* </pre>
|
||||||
*
|
*
|
||||||
|
@ -62,8 +63,6 @@ public class ThinClientServerPreferences extends FieldEditorPreferencePage {
|
||||||
|
|
||||||
private StringFieldEditor servicesServer;
|
private StringFieldEditor servicesServer;
|
||||||
|
|
||||||
private StringFieldEditor serverDataDir;
|
|
||||||
|
|
||||||
private Button connectivityButton;
|
private Button connectivityButton;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -104,12 +103,6 @@ public class ThinClientServerPreferences extends FieldEditorPreferencePage {
|
||||||
pypiesServer.setErrorMessage("Cannot connect to Pypies server");
|
pypiesServer.setErrorMessage("Cannot connect to Pypies server");
|
||||||
addField(pypiesServer);
|
addField(pypiesServer);
|
||||||
|
|
||||||
serverDataDir = new StringFieldEditor(
|
|
||||||
ThinClientPreferenceConstants.P_SERVER_DATA_DIR,
|
|
||||||
"&Server Data Dir: ", getFieldEditorParent());
|
|
||||||
|
|
||||||
addField(serverDataDir);
|
|
||||||
|
|
||||||
addConnectivityButton();
|
addConnectivityButton();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -198,7 +191,6 @@ public class ThinClientServerPreferences extends FieldEditorPreferencePage {
|
||||||
boolean useProxies = this.useProxies.getBooleanValue();
|
boolean useProxies = this.useProxies.getBooleanValue();
|
||||||
servicesServer.setEnabled(useProxies, connectivityButton.getParent());
|
servicesServer.setEnabled(useProxies, connectivityButton.getParent());
|
||||||
pypiesServer.setEnabled(useProxies, connectivityButton.getParent());
|
pypiesServer.setEnabled(useProxies, connectivityButton.getParent());
|
||||||
serverDataDir.setEnabled(useProxies, connectivityButton.getParent());
|
|
||||||
connectivityButton.setEnabled(useProxies);
|
connectivityButton.setEnabled(useProxies);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -49,6 +49,7 @@ import com.raytheon.uf.viz.thinclient.ui.ThinClientConnectivityDialog;
|
||||||
* ------------ ---------- ----------- --------------------------
|
* ------------ ---------- ----------- --------------------------
|
||||||
* Nov 23, 2011 bsteffen Initial creation
|
* Nov 23, 2011 bsteffen Initial creation
|
||||||
* Dec 06, 2012 1396 njensen Added setting VizServers
|
* Dec 06, 2012 1396 njensen Added setting VizServers
|
||||||
|
* Jan 14, 2013 1469 bkowal Removed setting the hdf5 data directory
|
||||||
*
|
*
|
||||||
* </pre>
|
* </pre>
|
||||||
*
|
*
|
||||||
|
@ -86,17 +87,10 @@ public class ThinClientLocalizationInitializer extends LocalizationInitializer {
|
||||||
String servicesProxy = store
|
String servicesProxy = store
|
||||||
.getString(ThinClientPreferenceConstants.P_SERVICES_PROXY);
|
.getString(ThinClientPreferenceConstants.P_SERVICES_PROXY);
|
||||||
LocalizationManager.getInstance().setCurrentServer(servicesProxy);
|
LocalizationManager.getInstance().setCurrentServer(servicesProxy);
|
||||||
String dataDir = VizApp.getServerDataDir();
|
if (!disableJMS) {
|
||||||
if (dataDir == null || dataDir.isEmpty()) {
|
|
||||||
dataDir = store
|
|
||||||
.getString(ThinClientPreferenceConstants.P_SERVER_DATA_DIR);
|
|
||||||
VizApp.setServerDataDir(dataDir);
|
|
||||||
}
|
|
||||||
if (!disableJMS || dataDir == null || dataDir.isEmpty()) {
|
|
||||||
GetServersRequest req = new GetServersRequest();
|
GetServersRequest req = new GetServersRequest();
|
||||||
GetServersResponse resp = (GetServersResponse) ThriftClient
|
GetServersResponse resp = (GetServersResponse) ThriftClient
|
||||||
.sendLocalizationRequest(req);
|
.sendLocalizationRequest(req);
|
||||||
VizApp.setServerDataDir(resp.getServerDataDir());
|
|
||||||
if (!disableJMS) {
|
if (!disableJMS) {
|
||||||
VizApp.setJmsServer(resp.getJmsServer());
|
VizApp.setJmsServer(resp.getJmsServer());
|
||||||
}
|
}
|
||||||
|
@ -117,8 +111,5 @@ public class ThinClientLocalizationInitializer extends LocalizationInitializer {
|
||||||
VizApp.setJmsServer(null);
|
VizApp.setJmsServer(null);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
store.setValue(ThinClientPreferenceConstants.P_SERVER_DATA_DIR,
|
|
||||||
VizApp.getServerDataDir());
|
|
||||||
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -29,6 +29,7 @@ package com.raytheon.uf.viz.thinclient.preferences;
|
||||||
* Date Ticket# Engineer Description
|
* Date Ticket# Engineer Description
|
||||||
* ------------ ---------- ----------- --------------------------
|
* ------------ ---------- ----------- --------------------------
|
||||||
* Oct 20, 2011 mschenke Initial creation
|
* Oct 20, 2011 mschenke Initial creation
|
||||||
|
* Jan 14, 2013 1469 bkowal The hdf5 data directory is no longer a preference constant.
|
||||||
*
|
*
|
||||||
* </pre>
|
* </pre>
|
||||||
*
|
*
|
||||||
|
@ -50,8 +51,6 @@ public class ThinClientPreferenceConstants {
|
||||||
|
|
||||||
public static String P_PYPIES_PROXY = "pypiesProxyAddress";
|
public static String P_PYPIES_PROXY = "pypiesProxyAddress";
|
||||||
|
|
||||||
public static String P_SERVER_DATA_DIR = "serverDataDir";
|
|
||||||
|
|
||||||
public static String P_MENU_TIME_UPDATE_INTERVALS = "menuTimeUpdateInterval";
|
public static String P_MENU_TIME_UPDATE_INTERVALS = "menuTimeUpdateInterval";
|
||||||
|
|
||||||
public static String P_DATA_UPDATE_INTERVALS = "dataUpdateInterval";
|
public static String P_DATA_UPDATE_INTERVALS = "dataUpdateInterval";
|
||||||
|
|
|
@ -54,6 +54,8 @@ import com.raytheon.viz.core.rsc.hdf5.FileBasedTileSet;
|
||||||
* Date Ticket# Engineer Description
|
* Date Ticket# Engineer Description
|
||||||
* ------------ ---------- ----------- --------------------------
|
* ------------ ---------- ----------- --------------------------
|
||||||
* Feb 15, 2007 chammack Initial Creation.
|
* Feb 15, 2007 chammack Initial Creation.
|
||||||
|
* Jan 14, 2013 1469 bkowal The hdf5 data directory is no longer included in the
|
||||||
|
* DATA_FILE
|
||||||
*
|
*
|
||||||
* </pre>
|
* </pre>
|
||||||
*
|
*
|
||||||
|
@ -65,7 +67,7 @@ public class TopoTileSet extends FileBasedTileSet {
|
||||||
private static String DATA_FILE = "/topo/srtm30.hdf";
|
private static String DATA_FILE = "/topo/srtm30.hdf";
|
||||||
|
|
||||||
static {
|
static {
|
||||||
DATA_FILE = new File(VizApp.getServerDataDir(), DATA_FILE)
|
DATA_FILE = new File(DATA_FILE)
|
||||||
.getAbsolutePath();
|
.getAbsolutePath();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -38,6 +38,12 @@
|
||||||
<adapterServiceQueue>edex.AdapterSrv</adapterServiceQueue>
|
<adapterServiceQueue>edex.AdapterSrv</adapterServiceQueue>
|
||||||
<resFolder>../conf/res</resFolder>
|
<resFolder>../conf/res</resFolder>
|
||||||
<pluginScriptFolder>${env:edex.home}/conf/db/commonScripts/</pluginScriptFolder>
|
<pluginScriptFolder>${env:edex.home}/conf/db/commonScripts/</pluginScriptFolder>
|
||||||
|
<!--
|
||||||
|
hdf5Dir now refers only to the local hdf5 directory; pypies keeps track
|
||||||
|
of its own hdf5 directory. The local hdf5 directory will only be used
|
||||||
|
by plugins that do not store / retrieve their data through pypies
|
||||||
|
(ex: QC).
|
||||||
|
-->
|
||||||
<hdf5Dir>${env:edex.home}/data/hdf5</hdf5Dir>
|
<hdf5Dir>${env:edex.home}/data/hdf5</hdf5Dir>
|
||||||
<shareDir>${env:edex.home}/data/share</shareDir>
|
<shareDir>${env:edex.home}/data/share</shareDir>
|
||||||
<utilityDir>${env:edex.home}/data/utility</utilityDir>
|
<utilityDir>${env:edex.home}/data/utility</utilityDir>
|
||||||
|
|
|
@ -36,8 +36,6 @@ import com.raytheon.uf.common.datastorage.IDataStore;
|
||||||
import com.raytheon.uf.common.datastorage.Request;
|
import com.raytheon.uf.common.datastorage.Request;
|
||||||
import com.raytheon.uf.common.datastorage.StorageException;
|
import com.raytheon.uf.common.datastorage.StorageException;
|
||||||
import com.raytheon.uf.common.datastorage.records.IDataRecord;
|
import com.raytheon.uf.common.datastorage.records.IDataRecord;
|
||||||
import com.raytheon.uf.edex.core.props.EnvProperties;
|
|
||||||
import com.raytheon.uf.edex.core.props.PropertiesFactory;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Data access object for saving and retrieving data from the HDF5 repository.
|
* Data access object for saving and retrieving data from the HDF5 repository.
|
||||||
|
@ -51,6 +49,8 @@ import com.raytheon.uf.edex.core.props.PropertiesFactory;
|
||||||
* 7/24/07 353 bphillip Initial Check in
|
* 7/24/07 353 bphillip Initial Check in
|
||||||
* 20070914 379 jkorman Changed to use IPersistable populateDataStore
|
* 20070914 379 jkorman Changed to use IPersistable populateDataStore
|
||||||
* and getPersistenceTime methods.
|
* and getPersistenceTime methods.
|
||||||
|
* 01/14/13 1469 bkowal No longer retrieves the hdf5 data directory from the
|
||||||
|
* environment.
|
||||||
* </pre>
|
* </pre>
|
||||||
*
|
*
|
||||||
* @author bphillip
|
* @author bphillip
|
||||||
|
@ -58,27 +58,18 @@ import com.raytheon.uf.edex.core.props.PropertiesFactory;
|
||||||
*/
|
*/
|
||||||
public class HDF5Dao {
|
public class HDF5Dao {
|
||||||
|
|
||||||
public static final String HDF5DIR = "HDF5DIR";
|
|
||||||
|
|
||||||
public static final IHDFFilePathProvider DEFAULT_PATH_PROVIDER = DefaultPathProvider
|
public static final IHDFFilePathProvider DEFAULT_PATH_PROVIDER = DefaultPathProvider
|
||||||
.getInstance();
|
.getInstance();
|
||||||
|
|
||||||
/** The logger */
|
/** The logger */
|
||||||
protected Log logger = LogFactory.getLog(getClass());
|
protected Log logger = LogFactory.getLog(getClass());
|
||||||
|
|
||||||
private String hdf5Dir = null;
|
|
||||||
|
|
||||||
private IHDFFilePathProvider pathProvider = null;
|
private IHDFFilePathProvider pathProvider = null;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Construct an instance of the HDFDao using system default values.
|
* Construct an instance of the HDFDao using system default values.
|
||||||
*/
|
*/
|
||||||
public HDF5Dao() {
|
public HDF5Dao() {
|
||||||
EnvProperties properties = PropertiesFactory.getInstance()
|
|
||||||
.getEnvProperties();
|
|
||||||
if (properties != null) {
|
|
||||||
hdf5Dir = properties.getEnvValue(HDF5DIR);
|
|
||||||
}
|
|
||||||
pathProvider = DEFAULT_PATH_PROVIDER;
|
pathProvider = DEFAULT_PATH_PROVIDER;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -86,32 +77,12 @@ public class HDF5Dao {
|
||||||
* Construct an instance of the HDFDao using user supplied properties and
|
* Construct an instance of the HDFDao using user supplied properties and
|
||||||
* path provider.
|
* path provider.
|
||||||
*
|
*
|
||||||
* @param properties
|
|
||||||
* An environment properties instance that must contain an entry
|
|
||||||
* from the property HDF5DIR.
|
|
||||||
*/
|
|
||||||
public HDF5Dao(EnvProperties properties) {
|
|
||||||
if (properties != null) {
|
|
||||||
hdf5Dir = properties.getEnvValue(HDF5DIR);
|
|
||||||
}
|
|
||||||
pathProvider = DEFAULT_PATH_PROVIDER;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Construct an instance of the HDFDao using user supplied properties and
|
|
||||||
* path provider.
|
|
||||||
*
|
|
||||||
* @param properties
|
|
||||||
* An environment properties instance that must contain an entry
|
|
||||||
* from the property HDF5DIR.
|
|
||||||
* @param pathProvider
|
* @param pathProvider
|
||||||
* The path provider to use that creates a path to a specific HDF
|
* The path provider to use that creates a path to a specific HDF
|
||||||
* repository. This path provider must not provide the name of
|
* repository. This path provider must not provide the name of
|
||||||
* the repository, and the path must be relative to the base
|
* the repository.
|
||||||
* directory given in the properties HDF5DIR property.
|
|
||||||
*/
|
*/
|
||||||
public HDF5Dao(EnvProperties properties, IHDFFilePathProvider pathProvider) {
|
public HDF5Dao(IHDFFilePathProvider pathProvider) {
|
||||||
this(properties);
|
|
||||||
this.pathProvider = pathProvider;
|
this.pathProvider = pathProvider;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -130,9 +101,8 @@ public class HDF5Dao {
|
||||||
if (obj instanceof IPersistable) {
|
if (obj instanceof IPersistable) {
|
||||||
IPersistable persistable = (IPersistable) obj;
|
IPersistable persistable = (IPersistable) obj;
|
||||||
|
|
||||||
String persistDir = hdf5Dir
|
String persistDir = pathProvider.getHDFPath(obj.getPluginName(),
|
||||||
+ pathProvider.getHDFPath(obj.getPluginName(), persistable)
|
persistable) + File.separator;
|
||||||
+ File.separator;
|
|
||||||
String archive = pathProvider.getHDFFileName(obj.getPluginName(),
|
String archive = pathProvider.getHDFFileName(obj.getPluginName(),
|
||||||
persistable);
|
persistable);
|
||||||
|
|
||||||
|
@ -166,9 +136,8 @@ public class HDF5Dao {
|
||||||
if (obj instanceof PersistablePluginDataObject) {
|
if (obj instanceof PersistablePluginDataObject) {
|
||||||
IPersistable pRecord = (IPersistable) obj;
|
IPersistable pRecord = (IPersistable) obj;
|
||||||
|
|
||||||
String persistDir = hdf5Dir
|
String persistDir = pathProvider.getHDFPath(obj.getPluginName(),
|
||||||
+ pathProvider.getHDFPath(obj.getPluginName(), pRecord)
|
pRecord) + File.separator;
|
||||||
+ File.separator;
|
|
||||||
String archive = pathProvider.getHDFFileName(obj.getPluginName(),
|
String archive = pathProvider.getHDFFileName(obj.getPluginName(),
|
||||||
pRecord);
|
pRecord);
|
||||||
|
|
||||||
|
@ -199,19 +168,4 @@ public class HDF5Dao {
|
||||||
public void setPathProvider(IHDFFilePathProvider pathProvider) {
|
public void setPathProvider(IHDFFilePathProvider pathProvider) {
|
||||||
this.pathProvider = pathProvider;
|
this.pathProvider = pathProvider;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* @return the hdf5Dir
|
|
||||||
*/
|
|
||||||
public String getHdf5Dir() {
|
|
||||||
return hdf5Dir;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param hdf5Dir
|
|
||||||
* the hdf5Dir to set
|
|
||||||
*/
|
|
||||||
public void setHdf5Dir(String hdf5Dir) {
|
|
||||||
this.hdf5Dir = hdf5Dir;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -49,8 +49,6 @@ import com.raytheon.uf.common.status.IUFStatusHandler;
|
||||||
import com.raytheon.uf.common.status.UFStatus;
|
import com.raytheon.uf.common.status.UFStatus;
|
||||||
import com.raytheon.uf.common.status.UFStatus.Priority;
|
import com.raytheon.uf.common.status.UFStatus.Priority;
|
||||||
import com.raytheon.uf.common.time.TimeRange;
|
import com.raytheon.uf.common.time.TimeRange;
|
||||||
import com.raytheon.uf.edex.core.props.EnvProperties;
|
|
||||||
import com.raytheon.uf.edex.core.props.PropertiesFactory;
|
|
||||||
import com.raytheon.uf.edex.database.plugin.PluginFactory;
|
import com.raytheon.uf.edex.database.plugin.PluginFactory;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -73,6 +71,8 @@ import com.raytheon.uf.edex.database.plugin.PluginFactory;
|
||||||
* 06/17/08 #940 bphillip Implemented GFE Locking
|
* 06/17/08 #940 bphillip Implemented GFE Locking
|
||||||
* 06/19/08 njensen Added retrieval of discrete
|
* 06/19/08 njensen Added retrieval of discrete
|
||||||
* 05/04/12 #574 dgilling Update class to better match AWIPS1.
|
* 05/04/12 #574 dgilling Update class to better match AWIPS1.
|
||||||
|
* 01/14/13 #1469 bkowal The hdf5 data directory is no longer included
|
||||||
|
* in the gfeBaseDataDir.
|
||||||
*
|
*
|
||||||
* </pre>
|
* </pre>
|
||||||
*
|
*
|
||||||
|
@ -95,9 +95,7 @@ public abstract class GridDatabase {
|
||||||
protected boolean valid;
|
protected boolean valid;
|
||||||
|
|
||||||
static {
|
static {
|
||||||
EnvProperties env = PropertiesFactory.getInstance().getEnvProperties();
|
gfeBaseDataDir = "gfe" + File.separator;
|
||||||
gfeBaseDataDir = env.getEnvValue("HDF5DIR") + File.separator + "gfe"
|
|
||||||
+ File.separator;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
|
@ -81,6 +81,7 @@ import com.raytheon.uf.edex.database.query.DatabaseQuery;
|
||||||
* call updateCaches().
|
* call updateCaches().
|
||||||
* 11/05/12 #1310 dgilling Remove code from updateCatches()
|
* 11/05/12 #1310 dgilling Remove code from updateCatches()
|
||||||
* that sent notification to D2DParmIdCache.
|
* that sent notification to D2DParmIdCache.
|
||||||
|
* 01/14/13 #1469 bkowal Removed the hdf5 data directory
|
||||||
*
|
*
|
||||||
* </pre>
|
* </pre>
|
||||||
*
|
*
|
||||||
|
@ -417,8 +418,7 @@ public class GribDao extends PluginDao {
|
||||||
persistable.setHdfFileId(EDEXUtil.getServerId());
|
persistable.setHdfFileId(EDEXUtil.getServerId());
|
||||||
|
|
||||||
// get the directory
|
// get the directory
|
||||||
String directory = HDF5_DIR + File.separator + pdo.getPluginName()
|
String directory = pdo.getPluginName() + File.separator
|
||||||
+ File.separator
|
|
||||||
+ pathProvider.getHDFPath(pdo.getPluginName(), pdo);
|
+ pathProvider.getHDFPath(pdo.getPluginName(), pdo);
|
||||||
File dataStoreFile = new File(directory + File.separator
|
File dataStoreFile = new File(directory + File.separator
|
||||||
+ pathProvider.getHDFFileName(pdo.getPluginName(), persistable));
|
+ pathProvider.getHDFFileName(pdo.getPluginName(), persistable));
|
||||||
|
|
|
@ -41,6 +41,8 @@ import com.raytheon.uf.edex.core.props.PropertiesFactory;
|
||||||
* ------------ ---------- ----------- --------------------------
|
* ------------ ---------- ----------- --------------------------
|
||||||
* Aug 6, 2009 mschenke Initial creation
|
* Aug 6, 2009 mschenke Initial creation
|
||||||
* Sep 12, 2012 1167 djohnson Add datadelivery servers.
|
* Sep 12, 2012 1167 djohnson Add datadelivery servers.
|
||||||
|
* Jan 14, 2013 1469 bkowal No longer includes the hdf5 data directory
|
||||||
|
* in the response.
|
||||||
*
|
*
|
||||||
* </pre>
|
* </pre>
|
||||||
*
|
*
|
||||||
|
@ -65,14 +67,10 @@ public class GetServersHandler extends GenericRegistry<String, String>
|
||||||
logger.info("jms.server=" + jmsServer);
|
logger.info("jms.server=" + jmsServer);
|
||||||
logger.info("pypies.server=" + pypiesServer);
|
logger.info("pypies.server=" + pypiesServer);
|
||||||
logger.info("server locations=" + registry);
|
logger.info("server locations=" + registry);
|
||||||
|
;
|
||||||
String hdf5DataDir = PropertiesFactory.getInstance().getEnvProperties()
|
|
||||||
.getEnvValue("HDF5DIR");
|
|
||||||
|
|
||||||
response.setHttpServer(httpServer);
|
response.setHttpServer(httpServer);
|
||||||
response.setJmsServer(jmsServer);
|
response.setJmsServer(jmsServer);
|
||||||
response.setPypiesServer(pypiesServer);
|
response.setPypiesServer(pypiesServer);
|
||||||
response.setServerDataDir(hdf5DataDir);
|
|
||||||
response.setServerLocations(Collections.unmodifiableMap(this.registry));
|
response.setServerLocations(Collections.unmodifiableMap(this.registry));
|
||||||
|
|
||||||
return response;
|
return response;
|
||||||
|
|
|
@ -49,10 +49,7 @@ import com.raytheon.uf.common.gridcoverage.PolarStereoGridCoverage;
|
||||||
import com.raytheon.uf.common.gridcoverage.exception.GridCoverageException;
|
import com.raytheon.uf.common.gridcoverage.exception.GridCoverageException;
|
||||||
import com.raytheon.uf.common.gridcoverage.subgrid.SubGrid;
|
import com.raytheon.uf.common.gridcoverage.subgrid.SubGrid;
|
||||||
import com.raytheon.uf.common.localization.IPathManager;
|
import com.raytheon.uf.common.localization.IPathManager;
|
||||||
import com.raytheon.uf.common.localization.msgs.GetServersRequest;
|
|
||||||
import com.raytheon.uf.common.localization.msgs.GetServersResponse;
|
|
||||||
import com.raytheon.uf.common.parameter.lookup.ParameterLookup;
|
import com.raytheon.uf.common.parameter.lookup.ParameterLookup;
|
||||||
import com.raytheon.uf.common.serialization.comm.RequestRouter;
|
|
||||||
import com.vividsolutions.jts.geom.Coordinate;
|
import com.vividsolutions.jts.geom.Coordinate;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -66,6 +63,8 @@ import com.vividsolutions.jts.geom.Coordinate;
|
||||||
* Date Ticket# Engineer Description
|
* Date Ticket# Engineer Description
|
||||||
* ------------ ---------- ----------- --------------------------
|
* ------------ ---------- ----------- --------------------------
|
||||||
* Nov 14, 2012 bsteffen Initial creation
|
* Nov 14, 2012 bsteffen Initial creation
|
||||||
|
* Jan 14, 2013 1469 bkowal No longer needs to retrieve the location
|
||||||
|
* of the hdf5 data directory.
|
||||||
*
|
*
|
||||||
* </pre>
|
* </pre>
|
||||||
*
|
*
|
||||||
|
@ -75,8 +74,6 @@ import com.vividsolutions.jts.geom.Coordinate;
|
||||||
|
|
||||||
public class GridDataRetriever {
|
public class GridDataRetriever {
|
||||||
|
|
||||||
protected static String serverDataDir;
|
|
||||||
|
|
||||||
protected GridRecord record;
|
protected GridRecord record;
|
||||||
|
|
||||||
protected GridCoverage requestCoverage;
|
protected GridCoverage requestCoverage;
|
||||||
|
@ -331,28 +328,7 @@ public class GridDataRetriever {
|
||||||
String fileName = pathProvider.getHDFFileName(record.getPluginName(),
|
String fileName = pathProvider.getHDFFileName(record.getPluginName(),
|
||||||
record);
|
record);
|
||||||
|
|
||||||
return new File(getServerDataDir() + IPathManager.SEPARATOR
|
return new File(record.getPluginName() + IPathManager.SEPARATOR + path
|
||||||
+ record.getPluginName() + IPathManager.SEPARATOR + path
|
|
||||||
+ IPathManager.SEPARATOR + fileName);
|
+ IPathManager.SEPARATOR + fileName);
|
||||||
}
|
}
|
||||||
|
|
||||||
private static synchronized String getServerDataDir()
|
|
||||||
throws StorageException {
|
|
||||||
if (serverDataDir == null) {
|
|
||||||
// TODO cave already knows the server data dir in VizApp, and edex
|
|
||||||
// has it in system properties but we can't access either because
|
|
||||||
// this is common code, architecturally we need some way around
|
|
||||||
// this. For now this will send it's own request which is slightly
|
|
||||||
// wasteful but not terribly harmful.
|
|
||||||
try {
|
|
||||||
GetServersResponse response = (GetServersResponse) RequestRouter
|
|
||||||
.route(new GetServersRequest());
|
|
||||||
serverDataDir = response.getServerDataDir();
|
|
||||||
} catch (Exception e) {
|
|
||||||
throw new StorageException("Error communicating with server.",
|
|
||||||
null, e);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return serverDataDir;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -36,6 +36,7 @@ import com.raytheon.uf.common.serialization.annotations.DynamicSerializeElement;
|
||||||
* ------------ ---------- ----------- --------------------------
|
* ------------ ---------- ----------- --------------------------
|
||||||
* Aug 6, 2009 mschenke Initial creation
|
* Aug 6, 2009 mschenke Initial creation
|
||||||
* Sep 12, 2012 1167 djohnson Add datadelivery servers.
|
* Sep 12, 2012 1167 djohnson Add datadelivery servers.
|
||||||
|
* Jan 14, 2013 1469 bkowal Removed the hdf5 data directory
|
||||||
*
|
*
|
||||||
* </pre>
|
* </pre>
|
||||||
*
|
*
|
||||||
|
@ -54,9 +55,6 @@ public class GetServersResponse implements ISerializableObject {
|
||||||
@DynamicSerializeElement
|
@DynamicSerializeElement
|
||||||
private String pypiesServer;
|
private String pypiesServer;
|
||||||
|
|
||||||
@DynamicSerializeElement
|
|
||||||
private String serverDataDir;
|
|
||||||
|
|
||||||
@DynamicSerializeElement
|
@DynamicSerializeElement
|
||||||
private Map<String, String> serverLocations;
|
private Map<String, String> serverLocations;
|
||||||
|
|
||||||
|
@ -84,14 +82,6 @@ public class GetServersResponse implements ISerializableObject {
|
||||||
this.pypiesServer = pypiesServer;
|
this.pypiesServer = pypiesServer;
|
||||||
}
|
}
|
||||||
|
|
||||||
public String getServerDataDir() {
|
|
||||||
return serverDataDir;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setServerDataDir(String serverDataDir) {
|
|
||||||
this.serverDataDir = serverDataDir;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @return
|
* @return
|
||||||
*/
|
*/
|
||||||
|
|
|
@ -103,6 +103,8 @@ import com.vividsolutions.jts.geom.Polygon;
|
||||||
* 6/29/12 #828 dgilling Force getPurgeRulesForPlugin()
|
* 6/29/12 #828 dgilling Force getPurgeRulesForPlugin()
|
||||||
* to search only COMMON_STATIC.
|
* to search only COMMON_STATIC.
|
||||||
* Oct 10, 2012 1261 djohnson Add some generics wildcarding.
|
* Oct 10, 2012 1261 djohnson Add some generics wildcarding.
|
||||||
|
* Jan 14, 2013 1469 bkowal No longer retrieves the hdf5 data directory
|
||||||
|
* from the environment.
|
||||||
* </pre>
|
* </pre>
|
||||||
*
|
*
|
||||||
* @author bphillip
|
* @author bphillip
|
||||||
|
@ -119,10 +121,6 @@ public abstract class PluginDao extends CoreDao {
|
||||||
/** The hdf5 file system suffix */
|
/** The hdf5 file system suffix */
|
||||||
public static final String HDF5_SUFFIX = ".h5";
|
public static final String HDF5_SUFFIX = ".h5";
|
||||||
|
|
||||||
/** The base path of the hdf5 data store */
|
|
||||||
public static final String HDF5_DIR = PropertiesFactory.getInstance()
|
|
||||||
.getEnvProperties().getEnvValue("HDF5DIR");
|
|
||||||
|
|
||||||
/** The base path of the folder containing HDF5 data for the owning plugin */
|
/** The base path of the folder containing HDF5 data for the owning plugin */
|
||||||
public final String PLUGIN_HDF5_DIR;
|
public final String PLUGIN_HDF5_DIR;
|
||||||
|
|
||||||
|
@ -156,8 +154,7 @@ public abstract class PluginDao extends CoreDao {
|
||||||
}
|
}
|
||||||
|
|
||||||
this.pluginName = pluginName;
|
this.pluginName = pluginName;
|
||||||
PLUGIN_HDF5_DIR = HDF5_DIR + File.separator + pluginName
|
PLUGIN_HDF5_DIR = pluginName + File.separator;
|
||||||
+ File.separator;
|
|
||||||
dupCheckSql = dupCheckSql.replace(":tableName", PluginFactory
|
dupCheckSql = dupCheckSql.replace(":tableName", PluginFactory
|
||||||
.getInstance().getPrimaryTable(pluginName));
|
.getInstance().getPrimaryTable(pluginName));
|
||||||
pathProvider = PluginFactory.getInstance().getPathProvider(pluginName);
|
pathProvider = PluginFactory.getInstance().getPathProvider(pluginName);
|
||||||
|
@ -227,9 +224,7 @@ public abstract class PluginDao extends CoreDao {
|
||||||
IPersistable persistable = (IPersistable) pdo;
|
IPersistable persistable = (IPersistable) pdo;
|
||||||
|
|
||||||
// get the directory
|
// get the directory
|
||||||
String directory = HDF5_DIR
|
String directory = pdo.getPluginName()
|
||||||
+ File.separator
|
|
||||||
+ pdo.getPluginName()
|
|
||||||
+ File.separator
|
+ File.separator
|
||||||
+ pathProvider.getHDFPath(pdo.getPluginName(),
|
+ pathProvider.getHDFPath(pdo.getPluginName(),
|
||||||
persistable);
|
persistable);
|
||||||
|
|
|
@ -66,7 +66,6 @@ import com.raytheon.uf.common.status.IUFStatusHandler;
|
||||||
import com.raytheon.uf.common.status.UFStatus;
|
import com.raytheon.uf.common.status.UFStatus;
|
||||||
import com.raytheon.uf.common.status.UFStatus.Priority;
|
import com.raytheon.uf.common.status.UFStatus.Priority;
|
||||||
import com.raytheon.uf.common.util.RunProcess;
|
import com.raytheon.uf.common.util.RunProcess;
|
||||||
import com.raytheon.uf.edex.core.props.PropertiesFactory;
|
|
||||||
import com.raytheon.uf.edex.database.cluster.ClusterLockUtils;
|
import com.raytheon.uf.edex.database.cluster.ClusterLockUtils;
|
||||||
import com.raytheon.uf.edex.database.cluster.ClusterLockUtils.LockState;
|
import com.raytheon.uf.edex.database.cluster.ClusterLockUtils.LockState;
|
||||||
import com.raytheon.uf.edex.database.cluster.ClusterTask;
|
import com.raytheon.uf.edex.database.cluster.ClusterTask;
|
||||||
|
@ -84,6 +83,7 @@ import com.vividsolutions.jts.geom.Coordinate;
|
||||||
* 09/19/2011 10955 rferrel Use RunProcess
|
* 09/19/2011 10955 rferrel Use RunProcess
|
||||||
* 04/18/2012 DR 14694 D. Friedman Fixes for static topography generation
|
* 04/18/2012 DR 14694 D. Friedman Fixes for static topography generation
|
||||||
* 05/09/2012 DR 14939 D. Friedman Fix errors in DR 14694
|
* 05/09/2012 DR 14939 D. Friedman Fix errors in DR 14694
|
||||||
|
* 01/14/2013 1469 bkowal Removed the hdf5 data directory
|
||||||
*
|
*
|
||||||
* </pre>
|
* </pre>
|
||||||
*
|
*
|
||||||
|
@ -116,9 +116,7 @@ public class StaticTopoData {
|
||||||
private static final String DAT_GZ_SUFFIX = ".dat.gz";
|
private static final String DAT_GZ_SUFFIX = ".dat.gz";
|
||||||
|
|
||||||
/** The base directory in which the topo files reside */
|
/** The base directory in which the topo files reside */
|
||||||
private static final String FILE_PREFIX = PropertiesFactory.getInstance()
|
private static final String FILE_PREFIX = "topo/";
|
||||||
.getEnvProperties().getEnvValue("HDF5DIR")
|
|
||||||
+ "/topo/";
|
|
||||||
|
|
||||||
/** The file containing the complete static topo data sets */
|
/** The file containing the complete static topo data sets */
|
||||||
private static final File topoFile = new File(FILE_PREFIX + "staticTopo.h5");
|
private static final File topoFile = new File(FILE_PREFIX + "staticTopo.h5");
|
||||||
|
@ -538,8 +536,7 @@ public class StaticTopoData {
|
||||||
|
|
||||||
for (TiledTopoSource source : topoSources) {
|
for (TiledTopoSource source : topoSources) {
|
||||||
statusHandler.handle(Priority.INFO, "Extracting topo data from "
|
statusHandler.handle(Priority.INFO, "Extracting topo data from "
|
||||||
+
|
+ source.getDataset());
|
||||||
source.getDataset());
|
|
||||||
GridReprojection reprojection = new GridReprojection(
|
GridReprojection reprojection = new GridReprojection(
|
||||||
source.getGridGeometry(), inGeom);
|
source.getGridGeometry(), inGeom);
|
||||||
GridSampler sampler = new GridSampler(source, interp);
|
GridSampler sampler = new GridSampler(source, interp);
|
||||||
|
|
|
@ -14,8 +14,6 @@ import com.raytheon.uf.common.status.IUFStatusHandler;
|
||||||
import com.raytheon.uf.common.status.UFStatus;
|
import com.raytheon.uf.common.status.UFStatus;
|
||||||
import com.raytheon.uf.common.status.UFStatus.Priority;
|
import com.raytheon.uf.common.status.UFStatus.Priority;
|
||||||
import com.raytheon.uf.edex.core.dataplugin.PluginRegistry;
|
import com.raytheon.uf.edex.core.dataplugin.PluginRegistry;
|
||||||
import com.raytheon.uf.edex.core.props.EnvProperties;
|
|
||||||
import com.raytheon.uf.edex.core.props.PropertiesFactory;
|
|
||||||
import com.raytheon.uf.edex.pointdata.PointDataPluginDao;
|
import com.raytheon.uf.edex.pointdata.PointDataPluginDao;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -48,6 +46,7 @@ import com.raytheon.uf.edex.pointdata.PointDataPluginDao;
|
||||||
* Date Ticket# Engineer Description
|
* Date Ticket# Engineer Description
|
||||||
* ------------ ---------- ----------- --------------------------
|
* ------------ ---------- ----------- --------------------------
|
||||||
* Nov 1, 2011 njensen Initial creation
|
* Nov 1, 2011 njensen Initial creation
|
||||||
|
* Jan 14, 2013 1469 bkowal Removed the hdf5 data directory
|
||||||
*
|
*
|
||||||
* </pre>
|
* </pre>
|
||||||
*
|
*
|
||||||
|
@ -62,14 +61,9 @@ public class DataStoreRepacker {
|
||||||
|
|
||||||
private List<String> pluginsToRepack;
|
private List<String> pluginsToRepack;
|
||||||
|
|
||||||
private String hdf5Dir;
|
|
||||||
|
|
||||||
private Compression compression = Compression.NONE;
|
private Compression compression = Compression.NONE;
|
||||||
|
|
||||||
public DataStoreRepacker(String compression) {
|
public DataStoreRepacker(String compression) {
|
||||||
EnvProperties properties = PropertiesFactory.getInstance()
|
|
||||||
.getEnvProperties();
|
|
||||||
hdf5Dir = properties.getEnvValue("HDF5DIR");
|
|
||||||
this.compression = Compression.valueOf(compression);
|
this.compression = Compression.valueOf(compression);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -81,8 +75,7 @@ public class DataStoreRepacker {
|
||||||
// TODO change log statement if more than pointdata is hooked into this
|
// TODO change log statement if more than pointdata is hooked into this
|
||||||
statusHandler.info("Starting repack of pointdata datastore");
|
statusHandler.info("Starting repack of pointdata datastore");
|
||||||
for (String plugin : pluginsToRepack) {
|
for (String plugin : pluginsToRepack) {
|
||||||
String dir = hdf5Dir + File.separator + plugin;
|
IDataStore ds = DataStoreFactory.getDataStore(new File(plugin));
|
||||||
IDataStore ds = DataStoreFactory.getDataStore(new File(dir));
|
|
||||||
try {
|
try {
|
||||||
ds.repack(compression);
|
ds.repack(compression);
|
||||||
} catch (StorageException e) {
|
} catch (StorageException e) {
|
||||||
|
|
|
@ -28,8 +28,6 @@ import com.raytheon.uf.common.datastorage.StorageProperties.Compression;
|
||||||
import com.raytheon.uf.common.status.IUFStatusHandler;
|
import com.raytheon.uf.common.status.IUFStatusHandler;
|
||||||
import com.raytheon.uf.common.status.UFStatus;
|
import com.raytheon.uf.common.status.UFStatus;
|
||||||
import com.raytheon.uf.common.status.UFStatus.Priority;
|
import com.raytheon.uf.common.status.UFStatus.Priority;
|
||||||
import com.raytheon.uf.edex.core.props.EnvProperties;
|
|
||||||
import com.raytheon.uf.edex.core.props.PropertiesFactory;
|
|
||||||
import com.raytheon.uf.edex.maintenance.archive.config.DataArchiveConfig;
|
import com.raytheon.uf.edex.maintenance.archive.config.DataArchiveConfig;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -44,6 +42,7 @@ import com.raytheon.uf.edex.maintenance.archive.config.DataArchiveConfig;
|
||||||
* Date Ticket# Engineer Description
|
* Date Ticket# Engineer Description
|
||||||
* ------------ ---------- ----------- --------------------------
|
* ------------ ---------- ----------- --------------------------
|
||||||
* Dec 8, 2011 njensen Initial creation
|
* Dec 8, 2011 njensen Initial creation
|
||||||
|
* Jan 14, 2013 1469 bkowal Removed the hdf5 data directory.
|
||||||
*
|
*
|
||||||
* </pre>
|
* </pre>
|
||||||
*
|
*
|
||||||
|
@ -56,14 +55,9 @@ public class DataStoreArchiver {
|
||||||
private static final transient IUFStatusHandler statusHandler = UFStatus
|
private static final transient IUFStatusHandler statusHandler = UFStatus
|
||||||
.getHandler(DataStoreArchiver.class);
|
.getHandler(DataStoreArchiver.class);
|
||||||
|
|
||||||
private String hdf5Dir;
|
|
||||||
|
|
||||||
private Compression compression = Compression.NONE;
|
private Compression compression = Compression.NONE;
|
||||||
|
|
||||||
public DataStoreArchiver(String compression) {
|
public DataStoreArchiver(String compression) {
|
||||||
EnvProperties properties = PropertiesFactory.getInstance()
|
|
||||||
.getEnvProperties();
|
|
||||||
hdf5Dir = properties.getEnvValue("HDF5DIR");
|
|
||||||
this.compression = Compression.valueOf(compression);
|
this.compression = Compression.valueOf(compression);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -73,6 +73,7 @@ import com.raytheon.uf.edex.maintenance.archive.config.DataArchiveConfig;
|
||||||
* Date Ticket# Engineer Description
|
* Date Ticket# Engineer Description
|
||||||
* ------------ ---------- ----------- --------------------------
|
* ------------ ---------- ----------- --------------------------
|
||||||
* Nov 17, 2011 rjpeter Initial creation
|
* Nov 17, 2011 rjpeter Initial creation
|
||||||
|
* Jan 18, 2013 1469 bkowal Removed the hdf5 data directory.
|
||||||
*
|
*
|
||||||
* </pre>
|
* </pre>
|
||||||
*
|
*
|
||||||
|
@ -228,8 +229,7 @@ public class DatabaseArchiver implements IPluginArchiver {
|
||||||
|
|
||||||
for (String dataStoreFile : datastoreFilesToArchive) {
|
for (String dataStoreFile : datastoreFilesToArchive) {
|
||||||
IDataStore ds = DataStoreFactory.getDataStore(new File(
|
IDataStore ds = DataStoreFactory.getDataStore(new File(
|
||||||
FileUtil.join(PluginDao.HDF5_DIR, pluginName,
|
FileUtil.join(pluginName, dataStoreFile)));
|
||||||
dataStoreFile)));
|
|
||||||
int pathSep = dataStoreFile.lastIndexOf(File.separatorChar);
|
int pathSep = dataStoreFile.lastIndexOf(File.separatorChar);
|
||||||
String outputDir = (pathSep > 0 ? FileUtil.join(
|
String outputDir = (pathSep > 0 ? FileUtil.join(
|
||||||
archivePath, pluginName,
|
archivePath, pluginName,
|
||||||
|
|
|
@ -38,8 +38,6 @@ import com.raytheon.uf.common.datastorage.IDataStore;
|
||||||
import com.raytheon.uf.common.datastorage.StorageException;
|
import com.raytheon.uf.common.datastorage.StorageException;
|
||||||
import com.raytheon.uf.common.hydro.spatial.HRAP;
|
import com.raytheon.uf.common.hydro.spatial.HRAP;
|
||||||
import com.raytheon.uf.edex.core.EdexException;
|
import com.raytheon.uf.edex.core.EdexException;
|
||||||
import com.raytheon.uf.edex.core.props.EnvProperties;
|
|
||||||
import com.raytheon.uf.edex.core.props.PropertiesFactory;
|
|
||||||
import com.raytheon.uf.edex.database.dao.CoreDao;
|
import com.raytheon.uf.edex.database.dao.CoreDao;
|
||||||
import com.raytheon.uf.edex.database.dao.DaoConfig;
|
import com.raytheon.uf.edex.database.dao.DaoConfig;
|
||||||
import com.vividsolutions.jts.geom.Coordinate;
|
import com.vividsolutions.jts.geom.Coordinate;
|
||||||
|
@ -54,6 +52,7 @@ import com.vividsolutions.jts.geom.Coordinate;
|
||||||
* Date Ticket# Engineer Description
|
* Date Ticket# Engineer Description
|
||||||
* ------------ ---------- ----------- --------------------------
|
* ------------ ---------- ----------- --------------------------
|
||||||
* Jan 06, 2011 5951 jnjanga Initial creation
|
* Jan 06, 2011 5951 jnjanga Initial creation
|
||||||
|
* Jan 18, 2013 1469 bkowal Removed the hdf5 data directory.
|
||||||
*
|
*
|
||||||
* </pre>
|
* </pre>
|
||||||
*
|
*
|
||||||
|
@ -112,17 +111,12 @@ public class MpeLightningSrv {
|
||||||
// set up a lightning record
|
// set up a lightning record
|
||||||
BinLightningRecord ltngRec = new BinLightningRecord(dataURI);
|
BinLightningRecord ltngRec = new BinLightningRecord(dataURI);
|
||||||
|
|
||||||
EnvProperties properties = PropertiesFactory.getInstance()
|
|
||||||
.getEnvProperties();
|
|
||||||
;
|
|
||||||
String hdf5Dir = properties.getEnvValue("HDF5DIR");
|
|
||||||
|
|
||||||
// create custom path provider for binlightning repository
|
// create custom path provider for binlightning repository
|
||||||
BinLightningPathProvider pathProvider = BinLightningPathProvider
|
BinLightningPathProvider pathProvider = BinLightningPathProvider
|
||||||
.getInstance();
|
.getInstance();
|
||||||
|
|
||||||
// obtain the hdf5 filename
|
// obtain the hdf5 filename
|
||||||
String persistDir = hdf5Dir + pathProvider.getHDFPath(ltngRec)
|
String persistDir = pathProvider.getHDFPath(ltngRec)
|
||||||
+ File.separator;
|
+ File.separator;
|
||||||
String archive = pathProvider.getHDFFileName(
|
String archive = pathProvider.getHDFFileName(
|
||||||
ltngRec.getPluginName(), ltngRec);
|
ltngRec.getPluginName(), ltngRec);
|
||||||
|
|
|
@ -78,6 +78,7 @@ import com.raytheon.uf.edex.database.plugin.PluginDao;
|
||||||
* Date Ticket# Engineer Description
|
* Date Ticket# Engineer Description
|
||||||
* ------------ ---------- ----------- --------------------------
|
* ------------ ---------- ----------- --------------------------
|
||||||
* Apr 13, 2009 chammack Initial creation
|
* Apr 13, 2009 chammack Initial creation
|
||||||
|
* Jan 14, 2013 1469 bkowal Removed the hdf5 data directory.
|
||||||
*
|
*
|
||||||
* </pre>
|
* </pre>
|
||||||
*
|
*
|
||||||
|
@ -446,8 +447,7 @@ public abstract class PointDataPluginDao<T extends PluginDataObject> extends
|
||||||
|
|
||||||
public File getFullFilePath(PluginDataObject p) {
|
public File getFullFilePath(PluginDataObject p) {
|
||||||
File file;
|
File file;
|
||||||
String directory = HDF5_DIR + File.separator + p.getPluginName()
|
String directory = p.getPluginName() + File.separator
|
||||||
+ File.separator
|
|
||||||
+ pathProvider.getHDFPath(p.getPluginName(), (IPersistable) p);
|
+ pathProvider.getHDFPath(p.getPluginName(), (IPersistable) p);
|
||||||
file = new File(directory
|
file = new File(directory
|
||||||
+ File.separator
|
+ File.separator
|
||||||
|
@ -708,9 +708,7 @@ public abstract class PointDataPluginDao<T extends PluginDataObject> extends
|
||||||
}
|
}
|
||||||
bm.putAll(obj);
|
bm.putAll(obj);
|
||||||
T bean = (T) bm.getBean();
|
T bean = (T) bm.getBean();
|
||||||
return HDF5_DIR
|
return this.pluginName
|
||||||
+ File.separator
|
|
||||||
+ this.pluginName
|
|
||||||
+ File.separator
|
+ File.separator
|
||||||
+ this.pathProvider.getHDFPath(this.pluginName,
|
+ this.pathProvider.getHDFPath(this.pluginName,
|
||||||
(IPersistable) bean)
|
(IPersistable) bean)
|
||||||
|
|
|
@ -87,7 +87,8 @@ import com.vividsolutions.jts.geom.Coordinate;
|
||||||
* 11/19/2007 #377 randerso Initial creation
|
* 11/19/2007 #377 randerso Initial creation
|
||||||
* Jun 13, 2008 #1160 randerso Moved to server side
|
* Jun 13, 2008 #1160 randerso Moved to server side
|
||||||
* 03/09/2012 DR 14581 D. Friedman Fix grid referencing and use custom
|
* 03/09/2012 DR 14581 D. Friedman Fix grid referencing and use custom
|
||||||
* nearest-neighbor resampling.
|
* nearest-neighbor resampling.i
|
||||||
|
* 01/14/2013 #1469 bkowal Removed the hdf5 data directory.
|
||||||
*
|
*
|
||||||
* </pre>
|
* </pre>
|
||||||
*
|
*
|
||||||
|
@ -117,19 +118,7 @@ public class TopoQuery implements ITopoQuery {
|
||||||
* @return Initialized TopoQuery instance
|
* @return Initialized TopoQuery instance
|
||||||
*/
|
*/
|
||||||
public static synchronized TopoQuery getInstance(int topoLevel) {
|
public static synchronized TopoQuery getInstance(int topoLevel) {
|
||||||
String hdf5Dir = null;
|
return getInstance(new File(TOPO_FILE), topoLevel);
|
||||||
|
|
||||||
EnvProperties properties = PropertiesFactory.getInstance()
|
|
||||||
.getEnvProperties();
|
|
||||||
if (properties != null) {
|
|
||||||
hdf5Dir = properties.getEnvValue("HDF5DIR");
|
|
||||||
}
|
|
||||||
return getInstance(new File(hdf5Dir + TOPO_FILE), topoLevel);
|
|
||||||
}
|
|
||||||
|
|
||||||
public static synchronized TopoQuery getInstance(String hdf5Dir,
|
|
||||||
int topoLevel) {
|
|
||||||
return getInstance(new File(hdf5Dir + TOPO_FILE), topoLevel);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public static synchronized TopoQuery getInstance(File hdf5File,
|
public static synchronized TopoQuery getInstance(File hdf5File,
|
||||||
|
|
|
@ -69,6 +69,7 @@ import com.raytheon.uf.edex.database.DataAccessLayerException;
|
||||||
* ------------ ---------- ----------- --------------------------
|
* ------------ ---------- ----------- --------------------------
|
||||||
* 4/7/09 1994 bphillip Initial Creation
|
* 4/7/09 1994 bphillip Initial Creation
|
||||||
* 12/16/10 mli extend NcepDefaultPluginDao to enable purge
|
* 12/16/10 mli extend NcepDefaultPluginDao to enable purge
|
||||||
|
* 01/14/13 1469 bkowal Removed the hdf5 data directory.
|
||||||
*
|
*
|
||||||
* </pre>
|
* </pre>
|
||||||
*
|
*
|
||||||
|
@ -112,70 +113,70 @@ public class NcgribDao extends NcepDefaultPluginDao {
|
||||||
this("ncgrib");
|
this("ncgrib");
|
||||||
}
|
}
|
||||||
|
|
||||||
// public void purgeExpiredData() {
|
// public void purgeExpiredData() {
|
||||||
// QueryResult models = null;
|
// QueryResult models = null;
|
||||||
// try {
|
// try {
|
||||||
// models = (QueryResult) executeNativeSql(MODEL_QUERY);
|
// models = (QueryResult) executeNativeSql(MODEL_QUERY);
|
||||||
// } catch (DataAccessLayerException e) {
|
// } catch (DataAccessLayerException e) {
|
||||||
// logger.error("Error purging ncgrib data. Unable to get models", e);
|
// logger.error("Error purging ncgrib data. Unable to get models", e);
|
||||||
// }
|
// }
|
||||||
//
|
//
|
||||||
// String currentModel = null;
|
// String currentModel = null;
|
||||||
// for (int i = 0; i < models.getResultCount(); i++) {
|
// for (int i = 0; i < models.getResultCount(); i++) {
|
||||||
// currentModel = (String) models.getRowColumnValue(i, 0);
|
// currentModel = (String) models.getRowColumnValue(i, 0);
|
||||||
// QueryResult refTimes = null;
|
// QueryResult refTimes = null;
|
||||||
// try {
|
// try {
|
||||||
// refTimes = (QueryResult) executeNativeSql(REFTIME_QUERY
|
// refTimes = (QueryResult) executeNativeSql(REFTIME_QUERY
|
||||||
// .replace("?", currentModel));
|
// .replace("?", currentModel));
|
||||||
// } catch (DataAccessLayerException e) {
|
// } catch (DataAccessLayerException e) {
|
||||||
// logger
|
// logger
|
||||||
// .error("Error purging ncgrib data. Unable to get reference times for model ["
|
// .error("Error purging ncgrib data. Unable to get reference times for model ["
|
||||||
// + currentModel + "]");
|
// + currentModel + "]");
|
||||||
// continue;
|
// continue;
|
||||||
// }
|
// }
|
||||||
//
|
//
|
||||||
// // FIXME: Add rules for purging here instead of just keeping 2
|
// // FIXME: Add rules for purging here instead of just keeping 2
|
||||||
// // runs
|
// // runs
|
||||||
// List<String> filesKept = new ArrayList<String>();
|
// List<String> filesKept = new ArrayList<String>();
|
||||||
// File modelDirectory = new File(PLUGIN_HDF5_DIR + File.separator
|
// File modelDirectory = new File(PLUGIN_HDF5_DIR + File.separator
|
||||||
// + currentModel);
|
// + currentModel);
|
||||||
//
|
//
|
||||||
// for (int j = 0; j < refTimes.getResultCount(); j++) {
|
// for (int j = 0; j < refTimes.getResultCount(); j++) {
|
||||||
// Date time = (Date) refTimes.getRowColumnValue(j, 0);
|
// Date time = (Date) refTimes.getRowColumnValue(j, 0);
|
||||||
// File hdf5File = new File(modelDirectory.getAbsolutePath()
|
// File hdf5File = new File(modelDirectory.getAbsolutePath()
|
||||||
// + File.separator
|
// + File.separator
|
||||||
// + ((NcgribPathProvider) pathProvider).formatTime(time)
|
// + ((NcgribPathProvider) pathProvider).formatTime(time)
|
||||||
// + ".h5");
|
// + ".h5");
|
||||||
//
|
//
|
||||||
// if (j < MODELCOUNT) {
|
// if (j < MODELCOUNT) {
|
||||||
// filesKept.add(hdf5File.getAbsolutePath());
|
// filesKept.add(hdf5File.getAbsolutePath());
|
||||||
// continue;
|
// continue;
|
||||||
// }
|
// }
|
||||||
//
|
//
|
||||||
// try {
|
// try {
|
||||||
// purgeDb(time, currentModel);
|
// purgeDb(time, currentModel);
|
||||||
// } catch (DataAccessLayerException e) {
|
// } catch (DataAccessLayerException e) {
|
||||||
// logger.error("Error purging database for ncgrib model ["
|
// logger.error("Error purging database for ncgrib model ["
|
||||||
// + currentModel + "]");
|
// + currentModel + "]");
|
||||||
// }
|
// }
|
||||||
// }
|
// }
|
||||||
//
|
//
|
||||||
// List<File> files = FileUtil.listFiles(modelDirectory, fileFilter,
|
// List<File> files = FileUtil.listFiles(modelDirectory, fileFilter,
|
||||||
// false);
|
// false);
|
||||||
//
|
//
|
||||||
// for (File file : files) {
|
// for (File file : files) {
|
||||||
// if (!filesKept.contains(file.getAbsolutePath())) {
|
// if (!filesKept.contains(file.getAbsolutePath())) {
|
||||||
// if (!file.delete()) {
|
// if (!file.delete()) {
|
||||||
// logger
|
// logger
|
||||||
// .error("Error purging HDF5 files for ncgrib model ["
|
// .error("Error purging HDF5 files for ncgrib model ["
|
||||||
// + currentModel + "]");
|
// + currentModel + "]");
|
||||||
// }
|
// }
|
||||||
// }
|
// }
|
||||||
// }
|
// }
|
||||||
//
|
//
|
||||||
// }
|
// }
|
||||||
//
|
//
|
||||||
// }
|
// }
|
||||||
|
|
||||||
private int purgeDb(final Date date, String modelName)
|
private int purgeDb(final Date date, String modelName)
|
||||||
throws DataAccessLayerException {
|
throws DataAccessLayerException {
|
||||||
|
@ -198,8 +199,11 @@ public class NcgribDao extends NcepDefaultPluginDao {
|
||||||
AbstractStorageRecord hybridLevels = null;
|
AbstractStorageRecord hybridLevels = null;
|
||||||
AbstractStorageRecord thinnedPts = null;
|
AbstractStorageRecord thinnedPts = null;
|
||||||
|
|
||||||
//System.out.println (" good data to be populated, rec datauri=" + gribRec.getDataURI());
|
// System.out.println (" good data to be populated, rec datauri=" +
|
||||||
//System.out.println (" good data to be populated, rec messagedata=" + gribRec.getMessageData());
|
// gribRec.getDataURI());
|
||||||
|
// System.out.println
|
||||||
|
// (" good data to be populated, rec messagedata=" +
|
||||||
|
// gribRec.getMessageData());
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Stores the binary data to the HDF5 data store
|
* Stores the binary data to the HDF5 data store
|
||||||
|
@ -210,9 +214,9 @@ public class NcgribDao extends NcepDefaultPluginDao {
|
||||||
long[] sizes = new long[] {
|
long[] sizes = new long[] {
|
||||||
(gribRec.getSpatialObject()).getNx(),
|
(gribRec.getSpatialObject()).getNx(),
|
||||||
(gribRec.getSpatialObject()).getNy() };
|
(gribRec.getSpatialObject()).getNy() };
|
||||||
storageRecord = new FloatDataRecord("Data", gribRec
|
storageRecord = new FloatDataRecord("Data",
|
||||||
.getDataURI(), (float[]) gribRec.getMessageData(),
|
gribRec.getDataURI(),
|
||||||
2, sizes);
|
(float[]) gribRec.getMessageData(), 2, sizes);
|
||||||
} else
|
} else
|
||||||
throw new Exception(
|
throw new Exception(
|
||||||
"Cannot create data record, spatialData = "
|
"Cannot create data record, spatialData = "
|
||||||
|
@ -231,8 +235,8 @@ public class NcgribDao extends NcepDefaultPluginDao {
|
||||||
* Stores any data from the local section if present
|
* Stores any data from the local section if present
|
||||||
*/
|
*/
|
||||||
if (gribRec.isLocalSectionUsed()) {
|
if (gribRec.isLocalSectionUsed()) {
|
||||||
localSection = new IntegerDataRecord(LOCAL_SECTION, gribRec
|
localSection = new IntegerDataRecord(LOCAL_SECTION,
|
||||||
.getDataURI(), gribRec.getLocalSection());
|
gribRec.getDataURI(), gribRec.getLocalSection());
|
||||||
localSection.setCorrelationObject(gribRec);
|
localSection.setCorrelationObject(gribRec);
|
||||||
dataStore.addDataRecord(localSection);
|
dataStore.addDataRecord(localSection);
|
||||||
}
|
}
|
||||||
|
@ -241,8 +245,8 @@ public class NcgribDao extends NcepDefaultPluginDao {
|
||||||
* Stores any hybrid coordinate data if present
|
* Stores any hybrid coordinate data if present
|
||||||
*/
|
*/
|
||||||
if (gribRec.isHybridGrid()) {
|
if (gribRec.isHybridGrid()) {
|
||||||
hybridLevels = new FloatDataRecord(HYBRID_LEVELS, gribRec
|
hybridLevels = new FloatDataRecord(HYBRID_LEVELS,
|
||||||
.getDataURI(), gribRec.getHybridCoordList());
|
gribRec.getDataURI(), gribRec.getHybridCoordList());
|
||||||
hybridLevels.setCorrelationObject(gribRec);
|
hybridLevels.setCorrelationObject(gribRec);
|
||||||
dataStore.addDataRecord(hybridLevels);
|
dataStore.addDataRecord(hybridLevels);
|
||||||
}
|
}
|
||||||
|
@ -251,8 +255,8 @@ public class NcgribDao extends NcepDefaultPluginDao {
|
||||||
* Stores any thinned point data for quasi-regular grids if present
|
* Stores any thinned point data for quasi-regular grids if present
|
||||||
*/
|
*/
|
||||||
if (gribRec.isThinnedGrid()) {
|
if (gribRec.isThinnedGrid()) {
|
||||||
thinnedPts = new IntegerDataRecord(THINNED_PTS, gribRec
|
thinnedPts = new IntegerDataRecord(THINNED_PTS,
|
||||||
.getDataURI(), gribRec.getThinnedPts());
|
gribRec.getDataURI(), gribRec.getThinnedPts());
|
||||||
thinnedPts.setCorrelationObject(gribRec);
|
thinnedPts.setCorrelationObject(gribRec);
|
||||||
dataStore.addDataRecord(thinnedPts);
|
dataStore.addDataRecord(thinnedPts);
|
||||||
}
|
}
|
||||||
|
@ -318,11 +322,11 @@ public class NcgribDao extends NcepDefaultPluginDao {
|
||||||
NcgribModel model = rec.getModelInfo();
|
NcgribModel model = rec.getModelInfo();
|
||||||
if (model.getParameterName() == null
|
if (model.getParameterName() == null
|
||||||
|| model.getParameterName().equals("Missing")) {
|
|| model.getParameterName().equals("Missing")) {
|
||||||
//System.out.println (" persist missing or null, rec datauri=" + rec.getDataURI());
|
// System.out.println (" persist missing or null, rec datauri="
|
||||||
|
// + rec.getDataURI());
|
||||||
|
|
||||||
logger
|
logger.info("Discarding record due to missing or unknown parameter mapping: "
|
||||||
.info("Discarding record due to missing or unknown parameter mapping: "
|
+ record);
|
||||||
+ record);
|
|
||||||
} else {
|
} else {
|
||||||
boolean validLevel = false;
|
boolean validLevel = false;
|
||||||
Level level = model.getLevel();
|
Level level = model.getLevel();
|
||||||
|
@ -339,9 +343,8 @@ public class NcgribDao extends NcepDefaultPluginDao {
|
||||||
if (validLevel) {
|
if (validLevel) {
|
||||||
toPersist.add(rec);
|
toPersist.add(rec);
|
||||||
} else {
|
} else {
|
||||||
logger
|
logger.info("Discarding record due to missing or unknown level mapping: "
|
||||||
.info("Discarding record due to missing or unknown level mapping: "
|
+ record);
|
||||||
+ record);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -367,11 +370,11 @@ public class NcgribDao extends NcepDefaultPluginDao {
|
||||||
NcgribModel model = rec.getModelInfo();
|
NcgribModel model = rec.getModelInfo();
|
||||||
if (model.getParameterName() == null
|
if (model.getParameterName() == null
|
||||||
|| model.getParameterName().equals("Missing")) {
|
|| model.getParameterName().equals("Missing")) {
|
||||||
//System.out.println (" verify missing or null, rec datauri=" + rec.getDataURI());
|
// System.out.println (" verify missing or null, rec datauri=" +
|
||||||
|
// rec.getDataURI());
|
||||||
|
|
||||||
logger
|
logger.info("Discarding record due to missing or unknown parameter mapping: "
|
||||||
.info("Discarding record due to missing or unknown parameter mapping: "
|
+ record);
|
||||||
+ record);
|
|
||||||
} else {
|
} else {
|
||||||
boolean validLevel = false;
|
boolean validLevel = false;
|
||||||
Level level = model.getLevel();
|
Level level = model.getLevel();
|
||||||
|
@ -388,9 +391,8 @@ public class NcgribDao extends NcepDefaultPluginDao {
|
||||||
if (validLevel) {
|
if (validLevel) {
|
||||||
toPersist.add(rec);
|
toPersist.add(rec);
|
||||||
} else {
|
} else {
|
||||||
logger
|
logger.info("Discarding record due to missing or unknown level mapping: "
|
||||||
.info("Discarding record due to missing or unknown level mapping: "
|
+ record);
|
||||||
+ record);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -404,8 +406,7 @@ public class NcgribDao extends NcepDefaultPluginDao {
|
||||||
persistable.setHdfFileId(EDEXUtil.getServerId());
|
persistable.setHdfFileId(EDEXUtil.getServerId());
|
||||||
|
|
||||||
// get the directory
|
// get the directory
|
||||||
String directory = HDF5_DIR + File.separator + pdo.getPluginName()
|
String directory = pdo.getPluginName() + File.separator
|
||||||
+ File.separator
|
|
||||||
+ pathProvider.getHDFPath(this.pluginName, persistable);
|
+ pathProvider.getHDFPath(this.pluginName, persistable);
|
||||||
File dataStoreFile = new File(directory + File.separator
|
File dataStoreFile = new File(directory + File.separator
|
||||||
+ pathProvider.getHDFFileName(pdo.getPluginName(), persistable));
|
+ pathProvider.getHDFFileName(pdo.getPluginName(), persistable));
|
||||||
|
|
|
@ -1543,8 +1543,7 @@ public class Dgdriv {
|
||||||
// file = new File(File.separator + dataURI.split("/")[1]
|
// file = new File(File.separator + dataURI.split("/")[1]
|
||||||
// + File.separator + path + File.separator + sb.toString());
|
// + File.separator + path + File.separator + sb.toString());
|
||||||
//} else if (DataMode.getSystemMode() == DataMode.PYPIES) {
|
//} else if (DataMode.getSystemMode() == DataMode.PYPIES) {
|
||||||
file = new File(VizApp.getServerDataDir() + File.separator
|
file = new File(dataURI.split("/")[1] + File.separator + path
|
||||||
+ dataURI.split("/")[1] + File.separator + path
|
|
||||||
+ File.separator + sb.toString());
|
+ File.separator + sb.toString());
|
||||||
//} else {
|
//} else {
|
||||||
// file = new File(VizApp.getDataDir() + File.separator
|
// file = new File(VizApp.getDataDir() + File.separator
|
||||||
|
|
|
@ -1058,8 +1058,7 @@ public class TestDgdriv {
|
||||||
// file = new File(File.separator + dataURI.split("/")[1]
|
// file = new File(File.separator + dataURI.split("/")[1]
|
||||||
// + File.separator + path + File.separator + sb.toString());
|
// + File.separator + path + File.separator + sb.toString());
|
||||||
//} else if (DataMode.getSystemMode() == DataMode.PYPIES) {
|
//} else if (DataMode.getSystemMode() == DataMode.PYPIES) {
|
||||||
file = new File(VizApp.getServerDataDir() + File.separator
|
file = new File(dataURI.split("/")[1] + File.separator + path
|
||||||
+ dataURI.split("/")[1] + File.separator + path
|
|
||||||
+ File.separator + sb.toString());
|
+ File.separator + sb.toString());
|
||||||
//} else {
|
//} else {
|
||||||
// file = new File(VizApp.getDataDir() + File.separator
|
// file = new File(VizApp.getDataDir() + File.separator
|
||||||
|
|
|
@ -32,7 +32,8 @@
|
||||||
#
|
#
|
||||||
#
|
#
|
||||||
|
|
||||||
|
[edex_data]
|
||||||
|
hdf5dir=/awips2/edex/data/hdf5
|
||||||
|
|
||||||
[loggers]
|
[loggers]
|
||||||
keys=root,minutes,hours
|
keys=root,minutes,hours
|
||||||
|
|
33
pythonPackages/pypies/pypies/config/__init__.py
Normal file
33
pythonPackages/pypies/pypies/config/__init__.py
Normal file
|
@ -0,0 +1,33 @@
|
||||||
|
##
|
||||||
|
# This software was developed and / or modified by Raytheon Company,
|
||||||
|
# pursuant to Contract DG133W-05-CQ-1067 with the US Government.
|
||||||
|
#
|
||||||
|
# U.S. EXPORT CONTROLLED TECHNICAL DATA
|
||||||
|
# This software product contains export-restricted data whose
|
||||||
|
# export/transfer/disclosure is restricted by U.S. law. Dissemination
|
||||||
|
# to non-U.S. persons whether in the United States or abroad requires
|
||||||
|
# an export license or other authorization.
|
||||||
|
#
|
||||||
|
# Contractor Name: Raytheon Company
|
||||||
|
# Contractor Address: 6825 Pine Street, Suite 340
|
||||||
|
# Mail Stop B8
|
||||||
|
# Omaha, NE 68106
|
||||||
|
# 402.291.0100
|
||||||
|
#
|
||||||
|
# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
||||||
|
# further licensing information.
|
||||||
|
##
|
||||||
|
|
||||||
|
|
||||||
|
#
|
||||||
|
# __init__.py for hdf5 implementation
|
||||||
|
#
|
||||||
|
#
|
||||||
|
# SOFTWARE HISTORY
|
||||||
|
#
|
||||||
|
# Date Ticket# Engineer Description
|
||||||
|
# ------------ ---------- ----------- --------------------------
|
||||||
|
# 01/10/13 bkowal Initial Creation.
|
||||||
|
#
|
||||||
|
#
|
||||||
|
#
|
|
@ -0,0 +1,68 @@
|
||||||
|
##
|
||||||
|
# This software was developed and / or modified by Raytheon Company,
|
||||||
|
# pursuant to Contract DG133W-05-CQ-1067 with the US Government.
|
||||||
|
#
|
||||||
|
# U.S. EXPORT CONTROLLED TECHNICAL DATA
|
||||||
|
# This software product contains export-restricted data whose
|
||||||
|
# export/transfer/disclosure is restricted by U.S. law. Dissemination
|
||||||
|
# to non-U.S. persons whether in the United States or abroad requires
|
||||||
|
# an export license or other authorization.
|
||||||
|
#
|
||||||
|
# Contractor Name: Raytheon Company
|
||||||
|
# Contractor Address: 6825 Pine Street, Suite 340
|
||||||
|
# Mail Stop B8
|
||||||
|
# Omaha, NE 68106
|
||||||
|
# 402.291.0100
|
||||||
|
#
|
||||||
|
# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
||||||
|
# further licensing information.
|
||||||
|
##
|
||||||
|
|
||||||
|
|
||||||
|
#
|
||||||
|
# Configuration for pypies logging
|
||||||
|
#
|
||||||
|
#
|
||||||
|
# SOFTWARE HISTORY
|
||||||
|
#
|
||||||
|
# Date Ticket# Engineer Description
|
||||||
|
# ------------ ---------- ----------- --------------------------
|
||||||
|
# 01/10/13 bkowal Initial Creation.
|
||||||
|
#
|
||||||
|
#
|
||||||
|
#
|
||||||
|
|
||||||
|
import os, ConfigParser
|
||||||
|
|
||||||
|
class PypiesConfigurationManager:
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self.__configLoaded = False
|
||||||
|
|
||||||
|
self.__initConfigLocation()
|
||||||
|
if (not self.__configLoc):
|
||||||
|
raise RuntimeError("No pypies.cfg found")
|
||||||
|
|
||||||
|
self.__loadConfig()
|
||||||
|
|
||||||
|
def __initConfigLocation(self):
|
||||||
|
self.__configLoc = '/awips2/pypies/conf/pypies.cfg'
|
||||||
|
if not os.path.exists(self.__configLoc):
|
||||||
|
print "Unable to find pypies.cfg at ", self.__configLoc
|
||||||
|
self.__configLoc = None
|
||||||
|
else:
|
||||||
|
print "Found pypies.cfg at ", self.__configLoc
|
||||||
|
|
||||||
|
def __loadConfig(self):
|
||||||
|
self.__scp = ConfigParser.SafeConfigParser()
|
||||||
|
self.__scp.read(self.__configLoc)
|
||||||
|
self.__configLoaded = True
|
||||||
|
|
||||||
|
def getConfigurationLocation(self):
|
||||||
|
return self.__configLoc
|
||||||
|
|
||||||
|
def hasConfigurationBeenLoaded(self):
|
||||||
|
return self.__configLoaded
|
||||||
|
|
||||||
|
def getConfiguration(self):
|
||||||
|
return self.__scp
|
|
@ -29,7 +29,7 @@
|
||||||
# Date Ticket# Engineer Description
|
# Date Ticket# Engineer Description
|
||||||
# ------------ ---------- ----------- --------------------------
|
# ------------ ---------- ----------- --------------------------
|
||||||
# 08/17/10 njensen Initial Creation.
|
# 08/17/10 njensen Initial Creation.
|
||||||
#
|
# 01/11/13 bkowal Pypies will now read the hdf5 root from configuration
|
||||||
#
|
#
|
||||||
#
|
#
|
||||||
|
|
||||||
|
@ -37,12 +37,14 @@ from werkzeug import Request, Response, ClosingIterator
|
||||||
import time, logging, os
|
import time, logging, os
|
||||||
import pypies
|
import pypies
|
||||||
from pypies import IDataStore
|
from pypies import IDataStore
|
||||||
|
import pypies.config.pypiesConfigurationManager
|
||||||
import dynamicserialize
|
import dynamicserialize
|
||||||
from dynamicserialize.dstypes.com.raytheon.uf.common.pypies.request import *
|
from dynamicserialize.dstypes.com.raytheon.uf.common.pypies.request import *
|
||||||
from dynamicserialize.dstypes.com.raytheon.uf.common.pypies.response import *
|
from dynamicserialize.dstypes.com.raytheon.uf.common.pypies.response import *
|
||||||
|
|
||||||
logger = pypies.logger
|
logger = pypies.logger
|
||||||
timeMap = pypies.timeMap
|
timeMap = pypies.timeMap
|
||||||
|
hdf5Dir = None
|
||||||
|
|
||||||
from pypies.impl import H5pyDataStore
|
from pypies.impl import H5pyDataStore
|
||||||
datastore = H5pyDataStore.H5pyDataStore()
|
datastore = H5pyDataStore.H5pyDataStore()
|
||||||
|
@ -60,6 +62,26 @@ datastoreMap = {
|
||||||
CopyRequest: (datastore.copy, "CopyRequest")
|
CopyRequest: (datastore.copy, "CopyRequest")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pypiesConfigurationManager = pypies.config.pypiesConfigurationManager.PypiesConfigurationManager()
|
||||||
|
if (pypiesConfigurationManager.hasConfigurationBeenLoaded()):
|
||||||
|
configLocation = pypiesConfigurationManager.getConfigurationLocation()
|
||||||
|
infoMessage = 'using ' + configLocation + ' for pypies config'
|
||||||
|
logger.info(infoMessage)
|
||||||
|
|
||||||
|
# determine the edex hdf5 root
|
||||||
|
scp = pypiesConfigurationManager.getConfiguration()
|
||||||
|
hdf5Dir = scp.get('edex_data', 'hdf5dir')
|
||||||
|
# add a trailing directory separator (when necessary)
|
||||||
|
if (not hdf5Dir.endswith('/')):
|
||||||
|
hdf5Dir = hdf5Dir + '/'
|
||||||
|
|
||||||
|
if not os.path.exists(hdf5Dir):
|
||||||
|
os.makedirs(hdf5Dir)
|
||||||
|
infoMessage = 'using hdf5 directory: ' + hdf5Dir
|
||||||
|
logger.info(infoMessage)
|
||||||
|
|
||||||
|
# TODO: error and halt when configuration cannot be loaded
|
||||||
|
|
||||||
@Request.application
|
@Request.application
|
||||||
def pypies_response(request):
|
def pypies_response(request):
|
||||||
timeMap.clear()
|
timeMap.clear()
|
||||||
|
@ -74,6 +96,9 @@ def pypies_response(request):
|
||||||
resp.setError(msg)
|
resp.setError(msg)
|
||||||
return __prepareResponse(resp)
|
return __prepareResponse(resp)
|
||||||
timeMap['deserialize']=time.time()-startTime
|
timeMap['deserialize']=time.time()-startTime
|
||||||
|
# add the hdf5 directory path to the file name
|
||||||
|
filename = hdf5Dir + obj.getFilename()
|
||||||
|
obj.setFilename(filename)
|
||||||
|
|
||||||
clz = obj.__class__
|
clz = obj.__class__
|
||||||
if logger.isEnabledFor(logging.DEBUG):
|
if logger.isEnabledFor(logging.DEBUG):
|
||||||
|
|
|
@ -35,13 +35,15 @@
|
||||||
|
|
||||||
import logging, os, ConfigParser
|
import logging, os, ConfigParser
|
||||||
import logging.handlers, logging.config
|
import logging.handlers, logging.config
|
||||||
|
import pypies.config.pypiesConfigurationManager
|
||||||
|
|
||||||
class LogConfig:
|
class LogConfig:
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
cfgLoc = self.__getConfigLocation()
|
pypiesConfigurationManager = pypies.config.pypiesConfigurationManager.PypiesConfigurationManager()
|
||||||
if cfgLoc:
|
|
||||||
scp = self.__loadConfig(cfgLoc)
|
if pypiesConfigurationManager.hasConfigurationBeenLoaded():
|
||||||
|
self.__configure(pypiesConfigurationManager)
|
||||||
self.pypiesLogger = logging.getLogger('root')
|
self.pypiesLogger = logging.getLogger('root')
|
||||||
self.minutesLogger = logging.getLogger('minute')
|
self.minutesLogger = logging.getLogger('minute')
|
||||||
self.hoursLogger = logging.getLogger('hourly')
|
self.hoursLogger = logging.getLogger('hourly')
|
||||||
|
@ -50,26 +52,14 @@ class LogConfig:
|
||||||
self.minutesLogger = self.pypiesLogger
|
self.minutesLogger = self.pypiesLogger
|
||||||
self.hoursLogger = self.pypiesLogger
|
self.hoursLogger = self.pypiesLogger
|
||||||
|
|
||||||
def __getConfigLocation(self):
|
def __configure(self, configurationManager):
|
||||||
configLoc = '/awips2/pypies/conf/pypies.cfg'
|
scp = configurationManager.getConfiguration()
|
||||||
if not os.path.exists(configLoc):
|
print "using", configurationManager.getConfigurationLocation(), "for logging config"
|
||||||
print "Unable to find pypies.cfg at ", configLoc
|
|
||||||
configLoc = None
|
|
||||||
else:
|
|
||||||
print "Found pypies.cfg at ", configLoc
|
|
||||||
return configLoc
|
|
||||||
|
|
||||||
def __loadConfig(self, configLoc):
|
|
||||||
scp = ConfigParser.SafeConfigParser()
|
|
||||||
if not configLoc:
|
|
||||||
raise RuntimeError("No pypies.cfg found")
|
|
||||||
else:
|
|
||||||
print "using", configLoc, "for logging config"
|
|
||||||
scp.read(configLoc)
|
|
||||||
logFileDir = scp.get('handler_pypiesHandler', 'logFileDir')
|
logFileDir = scp.get('handler_pypiesHandler', 'logFileDir')
|
||||||
if not os.path.exists(logFileDir):
|
if not os.path.exists(logFileDir):
|
||||||
os.makedirs(logFileDir)
|
os.makedirs(logFileDir)
|
||||||
logging.config.fileConfig(configLoc)
|
logging.config.fileConfig(configurationManager.getConfigurationLocation())
|
||||||
|
|
||||||
def __getDefaultLogger(self):
|
def __getDefaultLogger(self):
|
||||||
import logging, logging.handlers
|
import logging, logging.handlers
|
||||||
|
|
Loading…
Add table
Reference in a new issue