From fba28ea69b8003593ddb7b70644952178a7e58fa Mon Sep 17 00:00:00 2001 From: Bryan Kowal Date: Mon, 14 Jan 2013 11:50:00 -0600 Subject: [PATCH] Issue #1469 - pypies will now read the hdf5 root directory from configuration. Relative paths can now be passed to pypies. Rebased to fix merge conflict. Change-Id: I38873e478729a3644db7ce2813e9bd6c15d9bdf3 Former-commit-id: 4db9c6c321f26d9a198c6ef07e55b18f77a273a1 [formerly ed2e78738b391bcb9a40ddde7b7a6acf7ccd1b30] Former-commit-id: e2a12845d3e2dc4ff1bec07188010b982398cf78 --- .../com/raytheon/uf/viz/core/HDF5Util.java | 5 +- .../src/com/raytheon/uf/viz/core/VizApp.java | 11 +- .../uf/viz/core/datastructure/CubeUtil.java | 4 +- .../localization/LocalizationInitializer.java | 2 +- .../localization/LocalizationManager.java | 2 +- .../raytheon/uf/viz/core/topo/TopoQuery.java | 8 +- .../ThinClientServerPreferences.java | 10 +- .../ThinClientLocalizationInitializer.java | 13 +- .../ThinClientPreferenceConstants.java | 3 +- .../raytheon/viz/core/topo/TopoTileSet.java | 4 +- .../esb/conf/res/base/environment.xml | 6 + .../src/com/raytheon/edex/db/dao/HDF5Dao.java | 62 +----- .../gfe/server/database/GridDatabase.java | 8 +- .../edex/plugin/grib/dao/GribDao.java | 4 +- .../edex/services/GetServersHandler.java | 8 +- .../grid/datastorage/GridDataRetriever.java | 30 +-- .../localization/msgs/GetServersResponse.java | 12 +- .../uf/edex/database/plugin/PluginDao.java | 13 +- .../grid/staticdata/topo/StaticTopoData.java | 9 +- .../edex/maintenance/DataStoreRepacker.java | 11 +- .../archive/DataStoreArchiver.java | 8 +- .../maintenance/archive/DatabaseArchiver.java | 4 +- .../uf/edex/ohd/pproc/MpeLightningSrv.java | 10 +- .../uf/edex/pointdata/PointDataPluginDao.java | 8 +- .../com/raytheon/uf/edex/topo/TopoQuery.java | 17 +- .../edex/plugin/ncgrib/dao/NcgribDao.java | 189 +++++++++--------- .../ncep/viz/rsc/ncgrid/dgdriv/Dgdriv.java | 3 +- .../viz/rsc/ncgrid/dgdriv/TestDgdriv.java | 3 +- pythonPackages/pypies/pypies.cfg | 3 +- .../pypies/pypies/config/__init__.py | 33 +++ .../config/pypiesConfigurationManager.py | 68 +++++++ pythonPackages/pypies/pypies/handlers.py | 29 ++- .../pypies/pypies/logging/logConfig.py | 30 +-- 33 files changed, 302 insertions(+), 328 deletions(-) create mode 100644 pythonPackages/pypies/pypies/config/__init__.py create mode 100644 pythonPackages/pypies/pypies/config/pypiesConfigurationManager.py diff --git a/cave/com.raytheon.uf.viz.core/src/com/raytheon/uf/viz/core/HDF5Util.java b/cave/com.raytheon.uf.viz.core/src/com/raytheon/uf/viz/core/HDF5Util.java index 6fddc7cc50..1eedaffc78 100644 --- a/cave/com.raytheon.uf.viz.core/src/com/raytheon/uf/viz/core/HDF5Util.java +++ b/cave/com.raytheon.uf.viz.core/src/com/raytheon/uf/viz/core/HDF5Util.java @@ -48,9 +48,8 @@ public class HDF5Util { String fileName = pathProvider.getHDFFileName( object.getPluginName(), persistable); - file = new File(VizApp.getServerDataDir() + IPathManager.SEPARATOR - + object.getPluginName() + IPathManager.SEPARATOR + path - + IPathManager.SEPARATOR + fileName); + file = new File(object.getPluginName() + IPathManager.SEPARATOR + + path + IPathManager.SEPARATOR + fileName); } return file; diff --git a/cave/com.raytheon.uf.viz.core/src/com/raytheon/uf/viz/core/VizApp.java b/cave/com.raytheon.uf.viz.core/src/com/raytheon/uf/viz/core/VizApp.java index 51f946ad63..642e0ea3bf 100644 --- a/cave/com.raytheon.uf.viz.core/src/com/raytheon/uf/viz/core/VizApp.java +++ b/cave/com.raytheon.uf.viz.core/src/com/raytheon/uf/viz/core/VizApp.java @@ -49,6 +49,7 @@ import com.raytheon.uf.viz.core.localization.LocalizationManager; * ------------ ---------- ----------- -------------------------- * 7/1/06 chammack Initial Creation. * Sep 12, 2012 1167 djohnson Add datadelivery servers. + * Jan 14, 2013 1469 bkowal Removed the hdf5 data directory. * * * @@ -79,8 +80,6 @@ public final class VizApp { private static String dataDeliveryQueryServer; - private static String serverDataDir; - static { ManagementFactory.getRuntimeMXBean().getName(); } @@ -257,14 +256,6 @@ public final class VizApp { VizApp.pypiesServer = pypiesServer; } - public static String getServerDataDir() { - return VizApp.serverDataDir; - } - - public static void setServerDataDir(String serverDataDir) { - VizApp.serverDataDir = serverDataDir; - } - private static String host = null; /** diff --git a/cave/com.raytheon.uf.viz.core/src/com/raytheon/uf/viz/core/datastructure/CubeUtil.java b/cave/com.raytheon.uf.viz.core/src/com/raytheon/uf/viz/core/datastructure/CubeUtil.java index 12ea483717..7a8369caa4 100644 --- a/cave/com.raytheon.uf.viz.core/src/com/raytheon/uf/viz/core/datastructure/CubeUtil.java +++ b/cave/com.raytheon.uf.viz.core/src/com/raytheon/uf/viz/core/datastructure/CubeUtil.java @@ -49,6 +49,8 @@ import com.raytheon.uf.viz.core.status.StatusConstants; * Date Ticket# Engineer Description * ------------ ---------- ----------- -------------------------- * Jan 16, 2008 njensen Initial creation + * Jan 14, 2013 1469 bkowal The hdf5 root will no longer be appended to the + * beginning of the file name. * * * @@ -77,7 +79,7 @@ public class CubeUtil { if (record != null) { File file = HDF5Util.findHDF5Location(record); if (file != null) - filename = file.getAbsolutePath(); + filename = file.getPath(); } return filename; } diff --git a/cave/com.raytheon.uf.viz.core/src/com/raytheon/uf/viz/core/localization/LocalizationInitializer.java b/cave/com.raytheon.uf.viz.core/src/com/raytheon/uf/viz/core/localization/LocalizationInitializer.java index 8dabc2bc5e..0cd5778453 100644 --- a/cave/com.raytheon.uf.viz.core/src/com/raytheon/uf/viz/core/localization/LocalizationInitializer.java +++ b/cave/com.raytheon.uf.viz.core/src/com/raytheon/uf/viz/core/localization/LocalizationInitializer.java @@ -49,6 +49,7 @@ import com.raytheon.uf.viz.core.requests.ThriftClient; * ------------ ---------- ----------- -------------------------- * Nov 5, 2009 mschenke Initial creation * Sep 12, 2012 1167 djohnson Add datadelivery servers. + * Jan 14, 2013 1469 bkowal Removed the hdf5 data directory. * * * @@ -126,7 +127,6 @@ public class LocalizationInitializer { VizApp.setHttpServer(resp.getHttpServer()); VizApp.setJmsServer(resp.getJmsServer()); VizApp.setPypiesServer(resp.getPypiesServer()); - VizApp.setServerDataDir(resp.getServerDataDir()); VizServers.getInstance().setServerLocations(resp.getServerLocations()); } } diff --git a/cave/com.raytheon.uf.viz.core/src/com/raytheon/uf/viz/core/localization/LocalizationManager.java b/cave/com.raytheon.uf.viz.core/src/com/raytheon/uf/viz/core/localization/LocalizationManager.java index 43ca8f3a26..9d946e81af 100644 --- a/cave/com.raytheon.uf.viz.core/src/com/raytheon/uf/viz/core/localization/LocalizationManager.java +++ b/cave/com.raytheon.uf.viz.core/src/com/raytheon/uf/viz/core/localization/LocalizationManager.java @@ -91,6 +91,7 @@ import com.raytheon.uf.viz.core.requests.ThriftClient; * Mar 26, 2008 njensen Added rename() and getFileContents(). * May 19, 2007 #1127 randerso Implemented error handling * Sep 12, 2012 1167 djohnson Add datadelivery servers. + * Jan 14, 2013 1469 bkowal Removed the hdf5 data directory. * * * @@ -223,7 +224,6 @@ public class LocalizationManager implements IPropertyChangeListener { VizApp.setHttpServer(resp.getHttpServer()); VizApp.setJmsServer(resp.getJmsServer()); VizApp.setPypiesServer(resp.getPypiesServer()); - VizApp.setServerDataDir(resp.getServerDataDir()); VizServers.getInstance().setServerLocations( resp.getServerLocations()); } catch (VizException e) { diff --git a/cave/com.raytheon.uf.viz.core/src/com/raytheon/uf/viz/core/topo/TopoQuery.java b/cave/com.raytheon.uf.viz.core/src/com/raytheon/uf/viz/core/topo/TopoQuery.java index f1d10d32aa..856e279517 100644 --- a/cave/com.raytheon.uf.viz.core/src/com/raytheon/uf/viz/core/topo/TopoQuery.java +++ b/cave/com.raytheon.uf.viz.core/src/com/raytheon/uf/viz/core/topo/TopoQuery.java @@ -42,6 +42,8 @@ import com.vividsolutions.jts.geom.Coordinate; * Date Ticket# Engineer Description * ------------ ---------- ----------- -------------------------- * Oct 12, 2009 mschenke Initial creation + * Jan 14, 2013 1469 bkowal The hdf5 root directory is no longer passed + * as an argument to the common TopoQuery constructor. * * * @@ -59,8 +61,7 @@ public class TopoQuery implements ITopoQuery { * @return Initialized TopoQuery instance */ public static synchronized ITopoQuery getInstance() { - return com.raytheon.uf.edex.topo.TopoQuery.getInstance( - VizApp.getServerDataDir(), 0); + return com.raytheon.uf.edex.topo.TopoQuery.getInstance(0); } /** @@ -68,8 +69,7 @@ public class TopoQuery implements ITopoQuery { */ public static synchronized ITopoQuery getInstance(int topoLevel, boolean useCaching) { - return com.raytheon.uf.edex.topo.TopoQuery.getInstance( - VizApp.getServerDataDir(), 0); + return com.raytheon.uf.edex.topo.TopoQuery.getInstance(0); } private TopoQuery(int level, boolean useCaching) { diff --git a/cave/com.raytheon.uf.viz.thinclient.cave/src/com/raytheon/uf/viz/thinclient/cave/preferences/ThinClientServerPreferences.java b/cave/com.raytheon.uf.viz.thinclient.cave/src/com/raytheon/uf/viz/thinclient/cave/preferences/ThinClientServerPreferences.java index 956bceeb48..5aec4e3dba 100644 --- a/cave/com.raytheon.uf.viz.thinclient.cave/src/com/raytheon/uf/viz/thinclient/cave/preferences/ThinClientServerPreferences.java +++ b/cave/com.raytheon.uf.viz.thinclient.cave/src/com/raytheon/uf/viz/thinclient/cave/preferences/ThinClientServerPreferences.java @@ -48,6 +48,7 @@ import com.raytheon.uf.viz.thinclient.preferences.ThinClientPreferenceConstants; * Date Ticket# Engineer Description * ------------ ---------- ----------- -------------------------- * Nov 8, 2011 mschenke Initial creation + * Jan 14, 2013 1469 bkowal The hdf5 data directory is no longer a preference. * * * @@ -62,8 +63,6 @@ public class ThinClientServerPreferences extends FieldEditorPreferencePage { private StringFieldEditor servicesServer; - private StringFieldEditor serverDataDir; - private Button connectivityButton; /** @@ -104,12 +103,6 @@ public class ThinClientServerPreferences extends FieldEditorPreferencePage { pypiesServer.setErrorMessage("Cannot connect to Pypies server"); addField(pypiesServer); - serverDataDir = new StringFieldEditor( - ThinClientPreferenceConstants.P_SERVER_DATA_DIR, - "&Server Data Dir: ", getFieldEditorParent()); - - addField(serverDataDir); - addConnectivityButton(); } @@ -198,7 +191,6 @@ public class ThinClientServerPreferences extends FieldEditorPreferencePage { boolean useProxies = this.useProxies.getBooleanValue(); servicesServer.setEnabled(useProxies, connectivityButton.getParent()); pypiesServer.setEnabled(useProxies, connectivityButton.getParent()); - serverDataDir.setEnabled(useProxies, connectivityButton.getParent()); connectivityButton.setEnabled(useProxies); } diff --git a/cave/com.raytheon.uf.viz.thinclient/src/com/raytheon/uf/viz/thinclient/localization/ThinClientLocalizationInitializer.java b/cave/com.raytheon.uf.viz.thinclient/src/com/raytheon/uf/viz/thinclient/localization/ThinClientLocalizationInitializer.java index a7a11b6be9..e7c4116e77 100644 --- a/cave/com.raytheon.uf.viz.thinclient/src/com/raytheon/uf/viz/thinclient/localization/ThinClientLocalizationInitializer.java +++ b/cave/com.raytheon.uf.viz.thinclient/src/com/raytheon/uf/viz/thinclient/localization/ThinClientLocalizationInitializer.java @@ -49,6 +49,7 @@ import com.raytheon.uf.viz.thinclient.ui.ThinClientConnectivityDialog; * ------------ ---------- ----------- -------------------------- * Nov 23, 2011 bsteffen Initial creation * Dec 06, 2012 1396 njensen Added setting VizServers + * Jan 14, 2013 1469 bkowal Removed setting the hdf5 data directory * * * @@ -86,17 +87,10 @@ public class ThinClientLocalizationInitializer extends LocalizationInitializer { String servicesProxy = store .getString(ThinClientPreferenceConstants.P_SERVICES_PROXY); LocalizationManager.getInstance().setCurrentServer(servicesProxy); - String dataDir = VizApp.getServerDataDir(); - if (dataDir == null || dataDir.isEmpty()) { - dataDir = store - .getString(ThinClientPreferenceConstants.P_SERVER_DATA_DIR); - VizApp.setServerDataDir(dataDir); - } - if (!disableJMS || dataDir == null || dataDir.isEmpty()) { + if (!disableJMS) { GetServersRequest req = new GetServersRequest(); GetServersResponse resp = (GetServersResponse) ThriftClient .sendLocalizationRequest(req); - VizApp.setServerDataDir(resp.getServerDataDir()); if (!disableJMS) { VizApp.setJmsServer(resp.getJmsServer()); } @@ -117,8 +111,5 @@ public class ThinClientLocalizationInitializer extends LocalizationInitializer { VizApp.setJmsServer(null); } } - store.setValue(ThinClientPreferenceConstants.P_SERVER_DATA_DIR, - VizApp.getServerDataDir()); - } } diff --git a/cave/com.raytheon.uf.viz.thinclient/src/com/raytheon/uf/viz/thinclient/preferences/ThinClientPreferenceConstants.java b/cave/com.raytheon.uf.viz.thinclient/src/com/raytheon/uf/viz/thinclient/preferences/ThinClientPreferenceConstants.java index bb9e2af67f..cd79aefbcb 100644 --- a/cave/com.raytheon.uf.viz.thinclient/src/com/raytheon/uf/viz/thinclient/preferences/ThinClientPreferenceConstants.java +++ b/cave/com.raytheon.uf.viz.thinclient/src/com/raytheon/uf/viz/thinclient/preferences/ThinClientPreferenceConstants.java @@ -29,6 +29,7 @@ package com.raytheon.uf.viz.thinclient.preferences; * Date Ticket# Engineer Description * ------------ ---------- ----------- -------------------------- * Oct 20, 2011 mschenke Initial creation + * Jan 14, 2013 1469 bkowal The hdf5 data directory is no longer a preference constant. * * * @@ -50,8 +51,6 @@ public class ThinClientPreferenceConstants { public static String P_PYPIES_PROXY = "pypiesProxyAddress"; - public static String P_SERVER_DATA_DIR = "serverDataDir"; - public static String P_MENU_TIME_UPDATE_INTERVALS = "menuTimeUpdateInterval"; public static String P_DATA_UPDATE_INTERVALS = "dataUpdateInterval"; diff --git a/cave/com.raytheon.viz.core/src/com/raytheon/viz/core/topo/TopoTileSet.java b/cave/com.raytheon.viz.core/src/com/raytheon/viz/core/topo/TopoTileSet.java index 9b067b418b..3bdb6af835 100644 --- a/cave/com.raytheon.viz.core/src/com/raytheon/viz/core/topo/TopoTileSet.java +++ b/cave/com.raytheon.viz.core/src/com/raytheon/viz/core/topo/TopoTileSet.java @@ -54,6 +54,8 @@ import com.raytheon.viz.core.rsc.hdf5.FileBasedTileSet; * Date Ticket# Engineer Description * ------------ ---------- ----------- -------------------------- * Feb 15, 2007 chammack Initial Creation. + * Jan 14, 2013 1469 bkowal The hdf5 data directory is no longer included in the + * DATA_FILE * * * @@ -65,7 +67,7 @@ public class TopoTileSet extends FileBasedTileSet { private static String DATA_FILE = "/topo/srtm30.hdf"; static { - DATA_FILE = new File(VizApp.getServerDataDir(), DATA_FILE) + DATA_FILE = new File(DATA_FILE) .getAbsolutePath(); } diff --git a/edexOsgi/build.edex/esb/conf/res/base/environment.xml b/edexOsgi/build.edex/esb/conf/res/base/environment.xml index 38058b4d1e..db72e6e88b 100644 --- a/edexOsgi/build.edex/esb/conf/res/base/environment.xml +++ b/edexOsgi/build.edex/esb/conf/res/base/environment.xml @@ -38,6 +38,12 @@ edex.AdapterSrv ../conf/res ${env:edex.home}/conf/db/commonScripts/ + ${env:edex.home}/data/hdf5 ${env:edex.home}/data/share ${env:edex.home}/data/utility diff --git a/edexOsgi/com.raytheon.edex.common/src/com/raytheon/edex/db/dao/HDF5Dao.java b/edexOsgi/com.raytheon.edex.common/src/com/raytheon/edex/db/dao/HDF5Dao.java index 42c1bf31ba..fcc6337efc 100644 --- a/edexOsgi/com.raytheon.edex.common/src/com/raytheon/edex/db/dao/HDF5Dao.java +++ b/edexOsgi/com.raytheon.edex.common/src/com/raytheon/edex/db/dao/HDF5Dao.java @@ -36,8 +36,6 @@ import com.raytheon.uf.common.datastorage.IDataStore; import com.raytheon.uf.common.datastorage.Request; import com.raytheon.uf.common.datastorage.StorageException; import com.raytheon.uf.common.datastorage.records.IDataRecord; -import com.raytheon.uf.edex.core.props.EnvProperties; -import com.raytheon.uf.edex.core.props.PropertiesFactory; /** * Data access object for saving and retrieving data from the HDF5 repository. @@ -51,6 +49,8 @@ import com.raytheon.uf.edex.core.props.PropertiesFactory; * 7/24/07 353 bphillip Initial Check in * 20070914 379 jkorman Changed to use IPersistable populateDataStore * and getPersistenceTime methods. + * 01/14/13 1469 bkowal No longer retrieves the hdf5 data directory from the + * environment. * * * @author bphillip @@ -58,27 +58,18 @@ import com.raytheon.uf.edex.core.props.PropertiesFactory; */ public class HDF5Dao { - public static final String HDF5DIR = "HDF5DIR"; - public static final IHDFFilePathProvider DEFAULT_PATH_PROVIDER = DefaultPathProvider .getInstance(); /** The logger */ protected Log logger = LogFactory.getLog(getClass()); - private String hdf5Dir = null; - private IHDFFilePathProvider pathProvider = null; /** * Construct an instance of the HDFDao using system default values. */ public HDF5Dao() { - EnvProperties properties = PropertiesFactory.getInstance() - .getEnvProperties(); - if (properties != null) { - hdf5Dir = properties.getEnvValue(HDF5DIR); - } pathProvider = DEFAULT_PATH_PROVIDER; } @@ -86,32 +77,12 @@ public class HDF5Dao { * Construct an instance of the HDFDao using user supplied properties and * path provider. * - * @param properties - * An environment properties instance that must contain an entry - * from the property HDF5DIR. - */ - public HDF5Dao(EnvProperties properties) { - if (properties != null) { - hdf5Dir = properties.getEnvValue(HDF5DIR); - } - pathProvider = DEFAULT_PATH_PROVIDER; - } - - /** - * Construct an instance of the HDFDao using user supplied properties and - * path provider. - * - * @param properties - * An environment properties instance that must contain an entry - * from the property HDF5DIR. * @param pathProvider * The path provider to use that creates a path to a specific HDF * repository. This path provider must not provide the name of - * the repository, and the path must be relative to the base - * directory given in the properties HDF5DIR property. + * the repository. */ - public HDF5Dao(EnvProperties properties, IHDFFilePathProvider pathProvider) { - this(properties); + public HDF5Dao(IHDFFilePathProvider pathProvider) { this.pathProvider = pathProvider; } @@ -130,9 +101,8 @@ public class HDF5Dao { if (obj instanceof IPersistable) { IPersistable persistable = (IPersistable) obj; - String persistDir = hdf5Dir - + pathProvider.getHDFPath(obj.getPluginName(), persistable) - + File.separator; + String persistDir = pathProvider.getHDFPath(obj.getPluginName(), + persistable) + File.separator; String archive = pathProvider.getHDFFileName(obj.getPluginName(), persistable); @@ -166,9 +136,8 @@ public class HDF5Dao { if (obj instanceof PersistablePluginDataObject) { IPersistable pRecord = (IPersistable) obj; - String persistDir = hdf5Dir - + pathProvider.getHDFPath(obj.getPluginName(), pRecord) - + File.separator; + String persistDir = pathProvider.getHDFPath(obj.getPluginName(), + pRecord) + File.separator; String archive = pathProvider.getHDFFileName(obj.getPluginName(), pRecord); @@ -199,19 +168,4 @@ public class HDF5Dao { public void setPathProvider(IHDFFilePathProvider pathProvider) { this.pathProvider = pathProvider; } - - /** - * @return the hdf5Dir - */ - public String getHdf5Dir() { - return hdf5Dir; - } - - /** - * @param hdf5Dir - * the hdf5Dir to set - */ - public void setHdf5Dir(String hdf5Dir) { - this.hdf5Dir = hdf5Dir; - } } diff --git a/edexOsgi/com.raytheon.edex.plugin.gfe/src/com/raytheon/edex/plugin/gfe/server/database/GridDatabase.java b/edexOsgi/com.raytheon.edex.plugin.gfe/src/com/raytheon/edex/plugin/gfe/server/database/GridDatabase.java index 947f01e1bf..269851fe8f 100644 --- a/edexOsgi/com.raytheon.edex.plugin.gfe/src/com/raytheon/edex/plugin/gfe/server/database/GridDatabase.java +++ b/edexOsgi/com.raytheon.edex.plugin.gfe/src/com/raytheon/edex/plugin/gfe/server/database/GridDatabase.java @@ -49,8 +49,6 @@ import com.raytheon.uf.common.status.IUFStatusHandler; import com.raytheon.uf.common.status.UFStatus; import com.raytheon.uf.common.status.UFStatus.Priority; import com.raytheon.uf.common.time.TimeRange; -import com.raytheon.uf.edex.core.props.EnvProperties; -import com.raytheon.uf.edex.core.props.PropertiesFactory; import com.raytheon.uf.edex.database.plugin.PluginFactory; /** @@ -73,6 +71,8 @@ import com.raytheon.uf.edex.database.plugin.PluginFactory; * 06/17/08 #940 bphillip Implemented GFE Locking * 06/19/08 njensen Added retrieval of discrete * 05/04/12 #574 dgilling Update class to better match AWIPS1. + * 01/14/13 #1469 bkowal The hdf5 data directory is no longer included + * in the gfeBaseDataDir. * * * @@ -95,9 +95,7 @@ public abstract class GridDatabase { protected boolean valid; static { - EnvProperties env = PropertiesFactory.getInstance().getEnvProperties(); - gfeBaseDataDir = env.getEnvValue("HDF5DIR") + File.separator + "gfe" - + File.separator; + gfeBaseDataDir = "gfe" + File.separator; } /** diff --git a/edexOsgi/com.raytheon.edex.plugin.grib/src/com/raytheon/edex/plugin/grib/dao/GribDao.java b/edexOsgi/com.raytheon.edex.plugin.grib/src/com/raytheon/edex/plugin/grib/dao/GribDao.java index ca47b43192..fbebd4b33f 100644 --- a/edexOsgi/com.raytheon.edex.plugin.grib/src/com/raytheon/edex/plugin/grib/dao/GribDao.java +++ b/edexOsgi/com.raytheon.edex.plugin.grib/src/com/raytheon/edex/plugin/grib/dao/GribDao.java @@ -81,6 +81,7 @@ import com.raytheon.uf.edex.database.query.DatabaseQuery; * call updateCaches(). * 11/05/12 #1310 dgilling Remove code from updateCatches() * that sent notification to D2DParmIdCache. + * 01/14/13 #1469 bkowal Removed the hdf5 data directory * * * @@ -417,8 +418,7 @@ public class GribDao extends PluginDao { persistable.setHdfFileId(EDEXUtil.getServerId()); // get the directory - String directory = HDF5_DIR + File.separator + pdo.getPluginName() - + File.separator + String directory = pdo.getPluginName() + File.separator + pathProvider.getHDFPath(pdo.getPluginName(), pdo); File dataStoreFile = new File(directory + File.separator + pathProvider.getHDFFileName(pdo.getPluginName(), persistable)); diff --git a/edexOsgi/com.raytheon.edex.utilitysrv/src/com/raytheon/edex/services/GetServersHandler.java b/edexOsgi/com.raytheon.edex.utilitysrv/src/com/raytheon/edex/services/GetServersHandler.java index fe95dbbd70..7d9be2b7d2 100644 --- a/edexOsgi/com.raytheon.edex.utilitysrv/src/com/raytheon/edex/services/GetServersHandler.java +++ b/edexOsgi/com.raytheon.edex.utilitysrv/src/com/raytheon/edex/services/GetServersHandler.java @@ -41,6 +41,8 @@ import com.raytheon.uf.edex.core.props.PropertiesFactory; * ------------ ---------- ----------- -------------------------- * Aug 6, 2009 mschenke Initial creation * Sep 12, 2012 1167 djohnson Add datadelivery servers. + * Jan 14, 2013 1469 bkowal No longer includes the hdf5 data directory + * in the response. * * * @@ -65,14 +67,10 @@ public class GetServersHandler extends GenericRegistry logger.info("jms.server=" + jmsServer); logger.info("pypies.server=" + pypiesServer); logger.info("server locations=" + registry); - - String hdf5DataDir = PropertiesFactory.getInstance().getEnvProperties() - .getEnvValue("HDF5DIR"); - + ; response.setHttpServer(httpServer); response.setJmsServer(jmsServer); response.setPypiesServer(pypiesServer); - response.setServerDataDir(hdf5DataDir); response.setServerLocations(Collections.unmodifiableMap(this.registry)); return response; diff --git a/edexOsgi/com.raytheon.uf.common.dataplugin.grid/src/com/raytheon/uf/common/dataplugin/grid/datastorage/GridDataRetriever.java b/edexOsgi/com.raytheon.uf.common.dataplugin.grid/src/com/raytheon/uf/common/dataplugin/grid/datastorage/GridDataRetriever.java index cd99edc7ef..b403dcfab3 100644 --- a/edexOsgi/com.raytheon.uf.common.dataplugin.grid/src/com/raytheon/uf/common/dataplugin/grid/datastorage/GridDataRetriever.java +++ b/edexOsgi/com.raytheon.uf.common.dataplugin.grid/src/com/raytheon/uf/common/dataplugin/grid/datastorage/GridDataRetriever.java @@ -49,10 +49,7 @@ import com.raytheon.uf.common.gridcoverage.PolarStereoGridCoverage; import com.raytheon.uf.common.gridcoverage.exception.GridCoverageException; import com.raytheon.uf.common.gridcoverage.subgrid.SubGrid; import com.raytheon.uf.common.localization.IPathManager; -import com.raytheon.uf.common.localization.msgs.GetServersRequest; -import com.raytheon.uf.common.localization.msgs.GetServersResponse; import com.raytheon.uf.common.parameter.lookup.ParameterLookup; -import com.raytheon.uf.common.serialization.comm.RequestRouter; import com.vividsolutions.jts.geom.Coordinate; /** @@ -66,6 +63,8 @@ import com.vividsolutions.jts.geom.Coordinate; * Date Ticket# Engineer Description * ------------ ---------- ----------- -------------------------- * Nov 14, 2012 bsteffen Initial creation + * Jan 14, 2013 1469 bkowal No longer needs to retrieve the location + * of the hdf5 data directory. * * * @@ -75,8 +74,6 @@ import com.vividsolutions.jts.geom.Coordinate; public class GridDataRetriever { - protected static String serverDataDir; - protected GridRecord record; protected GridCoverage requestCoverage; @@ -331,28 +328,7 @@ public class GridDataRetriever { String fileName = pathProvider.getHDFFileName(record.getPluginName(), record); - return new File(getServerDataDir() + IPathManager.SEPARATOR - + record.getPluginName() + IPathManager.SEPARATOR + path + return new File(record.getPluginName() + IPathManager.SEPARATOR + path + IPathManager.SEPARATOR + fileName); } - - private static synchronized String getServerDataDir() - throws StorageException { - if (serverDataDir == null) { - // TODO cave already knows the server data dir in VizApp, and edex - // has it in system properties but we can't access either because - // this is common code, architecturally we need some way around - // this. For now this will send it's own request which is slightly - // wasteful but not terribly harmful. - try { - GetServersResponse response = (GetServersResponse) RequestRouter - .route(new GetServersRequest()); - serverDataDir = response.getServerDataDir(); - } catch (Exception e) { - throw new StorageException("Error communicating with server.", - null, e); - } - } - return serverDataDir; - } } diff --git a/edexOsgi/com.raytheon.uf.common.localization/src/com/raytheon/uf/common/localization/msgs/GetServersResponse.java b/edexOsgi/com.raytheon.uf.common.localization/src/com/raytheon/uf/common/localization/msgs/GetServersResponse.java index 2c0a0569ba..e78f3542cd 100644 --- a/edexOsgi/com.raytheon.uf.common.localization/src/com/raytheon/uf/common/localization/msgs/GetServersResponse.java +++ b/edexOsgi/com.raytheon.uf.common.localization/src/com/raytheon/uf/common/localization/msgs/GetServersResponse.java @@ -36,6 +36,7 @@ import com.raytheon.uf.common.serialization.annotations.DynamicSerializeElement; * ------------ ---------- ----------- -------------------------- * Aug 6, 2009 mschenke Initial creation * Sep 12, 2012 1167 djohnson Add datadelivery servers. + * Jan 14, 2013 1469 bkowal Removed the hdf5 data directory * * * @@ -54,9 +55,6 @@ public class GetServersResponse implements ISerializableObject { @DynamicSerializeElement private String pypiesServer; - @DynamicSerializeElement - private String serverDataDir; - @DynamicSerializeElement private Map serverLocations; @@ -84,14 +82,6 @@ public class GetServersResponse implements ISerializableObject { this.pypiesServer = pypiesServer; } - public String getServerDataDir() { - return serverDataDir; - } - - public void setServerDataDir(String serverDataDir) { - this.serverDataDir = serverDataDir; - } - /** * @return */ diff --git a/edexOsgi/com.raytheon.uf.edex.database/src/com/raytheon/uf/edex/database/plugin/PluginDao.java b/edexOsgi/com.raytheon.uf.edex.database/src/com/raytheon/uf/edex/database/plugin/PluginDao.java index f85d98a7e5..586a425c23 100644 --- a/edexOsgi/com.raytheon.uf.edex.database/src/com/raytheon/uf/edex/database/plugin/PluginDao.java +++ b/edexOsgi/com.raytheon.uf.edex.database/src/com/raytheon/uf/edex/database/plugin/PluginDao.java @@ -103,6 +103,8 @@ import com.vividsolutions.jts.geom.Polygon; * 6/29/12 #828 dgilling Force getPurgeRulesForPlugin() * to search only COMMON_STATIC. * Oct 10, 2012 1261 djohnson Add some generics wildcarding. + * Jan 14, 2013 1469 bkowal No longer retrieves the hdf5 data directory + * from the environment. * * * @author bphillip @@ -119,10 +121,6 @@ public abstract class PluginDao extends CoreDao { /** The hdf5 file system suffix */ public static final String HDF5_SUFFIX = ".h5"; - /** The base path of the hdf5 data store */ - public static final String HDF5_DIR = PropertiesFactory.getInstance() - .getEnvProperties().getEnvValue("HDF5DIR"); - /** The base path of the folder containing HDF5 data for the owning plugin */ public final String PLUGIN_HDF5_DIR; @@ -156,8 +154,7 @@ public abstract class PluginDao extends CoreDao { } this.pluginName = pluginName; - PLUGIN_HDF5_DIR = HDF5_DIR + File.separator + pluginName - + File.separator; + PLUGIN_HDF5_DIR = pluginName + File.separator; dupCheckSql = dupCheckSql.replace(":tableName", PluginFactory .getInstance().getPrimaryTable(pluginName)); pathProvider = PluginFactory.getInstance().getPathProvider(pluginName); @@ -227,9 +224,7 @@ public abstract class PluginDao extends CoreDao { IPersistable persistable = (IPersistable) pdo; // get the directory - String directory = HDF5_DIR - + File.separator - + pdo.getPluginName() + String directory = pdo.getPluginName() + File.separator + pathProvider.getHDFPath(pdo.getPluginName(), persistable); diff --git a/edexOsgi/com.raytheon.uf.edex.grid.staticdata/src/com/raytheon/uf/edex/grid/staticdata/topo/StaticTopoData.java b/edexOsgi/com.raytheon.uf.edex.grid.staticdata/src/com/raytheon/uf/edex/grid/staticdata/topo/StaticTopoData.java index 33e3a3bd66..0ca361e803 100644 --- a/edexOsgi/com.raytheon.uf.edex.grid.staticdata/src/com/raytheon/uf/edex/grid/staticdata/topo/StaticTopoData.java +++ b/edexOsgi/com.raytheon.uf.edex.grid.staticdata/src/com/raytheon/uf/edex/grid/staticdata/topo/StaticTopoData.java @@ -66,7 +66,6 @@ import com.raytheon.uf.common.status.IUFStatusHandler; import com.raytheon.uf.common.status.UFStatus; import com.raytheon.uf.common.status.UFStatus.Priority; import com.raytheon.uf.common.util.RunProcess; -import com.raytheon.uf.edex.core.props.PropertiesFactory; import com.raytheon.uf.edex.database.cluster.ClusterLockUtils; import com.raytheon.uf.edex.database.cluster.ClusterLockUtils.LockState; import com.raytheon.uf.edex.database.cluster.ClusterTask; @@ -84,6 +83,7 @@ import com.vividsolutions.jts.geom.Coordinate; * 09/19/2011 10955 rferrel Use RunProcess * 04/18/2012 DR 14694 D. Friedman Fixes for static topography generation * 05/09/2012 DR 14939 D. Friedman Fix errors in DR 14694 + * 01/14/2013 1469 bkowal Removed the hdf5 data directory * * * @@ -116,9 +116,7 @@ public class StaticTopoData { private static final String DAT_GZ_SUFFIX = ".dat.gz"; /** The base directory in which the topo files reside */ - private static final String FILE_PREFIX = PropertiesFactory.getInstance() - .getEnvProperties().getEnvValue("HDF5DIR") - + "/topo/"; + private static final String FILE_PREFIX = "topo/"; /** The file containing the complete static topo data sets */ private static final File topoFile = new File(FILE_PREFIX + "staticTopo.h5"); @@ -538,8 +536,7 @@ public class StaticTopoData { for (TiledTopoSource source : topoSources) { statusHandler.handle(Priority.INFO, "Extracting topo data from " - + - source.getDataset()); + + source.getDataset()); GridReprojection reprojection = new GridReprojection( source.getGridGeometry(), inGeom); GridSampler sampler = new GridSampler(source, interp); diff --git a/edexOsgi/com.raytheon.uf.edex.maintenance/src/com/raytheon/uf/edex/maintenance/DataStoreRepacker.java b/edexOsgi/com.raytheon.uf.edex.maintenance/src/com/raytheon/uf/edex/maintenance/DataStoreRepacker.java index d3b1f9a83d..d6d20b8648 100644 --- a/edexOsgi/com.raytheon.uf.edex.maintenance/src/com/raytheon/uf/edex/maintenance/DataStoreRepacker.java +++ b/edexOsgi/com.raytheon.uf.edex.maintenance/src/com/raytheon/uf/edex/maintenance/DataStoreRepacker.java @@ -14,8 +14,6 @@ import com.raytheon.uf.common.status.IUFStatusHandler; import com.raytheon.uf.common.status.UFStatus; import com.raytheon.uf.common.status.UFStatus.Priority; import com.raytheon.uf.edex.core.dataplugin.PluginRegistry; -import com.raytheon.uf.edex.core.props.EnvProperties; -import com.raytheon.uf.edex.core.props.PropertiesFactory; import com.raytheon.uf.edex.pointdata.PointDataPluginDao; /** @@ -48,6 +46,7 @@ import com.raytheon.uf.edex.pointdata.PointDataPluginDao; * Date Ticket# Engineer Description * ------------ ---------- ----------- -------------------------- * Nov 1, 2011 njensen Initial creation + * Jan 14, 2013 1469 bkowal Removed the hdf5 data directory * * * @@ -62,14 +61,9 @@ public class DataStoreRepacker { private List pluginsToRepack; - private String hdf5Dir; - private Compression compression = Compression.NONE; public DataStoreRepacker(String compression) { - EnvProperties properties = PropertiesFactory.getInstance() - .getEnvProperties(); - hdf5Dir = properties.getEnvValue("HDF5DIR"); this.compression = Compression.valueOf(compression); } @@ -81,8 +75,7 @@ public class DataStoreRepacker { // TODO change log statement if more than pointdata is hooked into this statusHandler.info("Starting repack of pointdata datastore"); for (String plugin : pluginsToRepack) { - String dir = hdf5Dir + File.separator + plugin; - IDataStore ds = DataStoreFactory.getDataStore(new File(dir)); + IDataStore ds = DataStoreFactory.getDataStore(new File(plugin)); try { ds.repack(compression); } catch (StorageException e) { diff --git a/edexOsgi/com.raytheon.uf.edex.maintenance/src/com/raytheon/uf/edex/maintenance/archive/DataStoreArchiver.java b/edexOsgi/com.raytheon.uf.edex.maintenance/src/com/raytheon/uf/edex/maintenance/archive/DataStoreArchiver.java index b55f2a9deb..d386da9309 100644 --- a/edexOsgi/com.raytheon.uf.edex.maintenance/src/com/raytheon/uf/edex/maintenance/archive/DataStoreArchiver.java +++ b/edexOsgi/com.raytheon.uf.edex.maintenance/src/com/raytheon/uf/edex/maintenance/archive/DataStoreArchiver.java @@ -28,8 +28,6 @@ import com.raytheon.uf.common.datastorage.StorageProperties.Compression; import com.raytheon.uf.common.status.IUFStatusHandler; import com.raytheon.uf.common.status.UFStatus; import com.raytheon.uf.common.status.UFStatus.Priority; -import com.raytheon.uf.edex.core.props.EnvProperties; -import com.raytheon.uf.edex.core.props.PropertiesFactory; import com.raytheon.uf.edex.maintenance.archive.config.DataArchiveConfig; /** @@ -44,6 +42,7 @@ import com.raytheon.uf.edex.maintenance.archive.config.DataArchiveConfig; * Date Ticket# Engineer Description * ------------ ---------- ----------- -------------------------- * Dec 8, 2011 njensen Initial creation + * Jan 14, 2013 1469 bkowal Removed the hdf5 data directory. * * * @@ -56,14 +55,9 @@ public class DataStoreArchiver { private static final transient IUFStatusHandler statusHandler = UFStatus .getHandler(DataStoreArchiver.class); - private String hdf5Dir; - private Compression compression = Compression.NONE; public DataStoreArchiver(String compression) { - EnvProperties properties = PropertiesFactory.getInstance() - .getEnvProperties(); - hdf5Dir = properties.getEnvValue("HDF5DIR"); this.compression = Compression.valueOf(compression); } diff --git a/edexOsgi/com.raytheon.uf.edex.maintenance/src/com/raytheon/uf/edex/maintenance/archive/DatabaseArchiver.java b/edexOsgi/com.raytheon.uf.edex.maintenance/src/com/raytheon/uf/edex/maintenance/archive/DatabaseArchiver.java index eab3c16fff..ce0adc23f0 100644 --- a/edexOsgi/com.raytheon.uf.edex.maintenance/src/com/raytheon/uf/edex/maintenance/archive/DatabaseArchiver.java +++ b/edexOsgi/com.raytheon.uf.edex.maintenance/src/com/raytheon/uf/edex/maintenance/archive/DatabaseArchiver.java @@ -73,6 +73,7 @@ import com.raytheon.uf.edex.maintenance.archive.config.DataArchiveConfig; * Date Ticket# Engineer Description * ------------ ---------- ----------- -------------------------- * Nov 17, 2011 rjpeter Initial creation + * Jan 18, 2013 1469 bkowal Removed the hdf5 data directory. * * * @@ -228,8 +229,7 @@ public class DatabaseArchiver implements IPluginArchiver { for (String dataStoreFile : datastoreFilesToArchive) { IDataStore ds = DataStoreFactory.getDataStore(new File( - FileUtil.join(PluginDao.HDF5_DIR, pluginName, - dataStoreFile))); + FileUtil.join(pluginName, dataStoreFile))); int pathSep = dataStoreFile.lastIndexOf(File.separatorChar); String outputDir = (pathSep > 0 ? FileUtil.join( archivePath, pluginName, diff --git a/edexOsgi/com.raytheon.uf.edex.ohd/src/com/raytheon/uf/edex/ohd/pproc/MpeLightningSrv.java b/edexOsgi/com.raytheon.uf.edex.ohd/src/com/raytheon/uf/edex/ohd/pproc/MpeLightningSrv.java index 98653ac91e..5ee4cd0bbf 100644 --- a/edexOsgi/com.raytheon.uf.edex.ohd/src/com/raytheon/uf/edex/ohd/pproc/MpeLightningSrv.java +++ b/edexOsgi/com.raytheon.uf.edex.ohd/src/com/raytheon/uf/edex/ohd/pproc/MpeLightningSrv.java @@ -38,8 +38,6 @@ import com.raytheon.uf.common.datastorage.IDataStore; import com.raytheon.uf.common.datastorage.StorageException; import com.raytheon.uf.common.hydro.spatial.HRAP; import com.raytheon.uf.edex.core.EdexException; -import com.raytheon.uf.edex.core.props.EnvProperties; -import com.raytheon.uf.edex.core.props.PropertiesFactory; import com.raytheon.uf.edex.database.dao.CoreDao; import com.raytheon.uf.edex.database.dao.DaoConfig; import com.vividsolutions.jts.geom.Coordinate; @@ -54,6 +52,7 @@ import com.vividsolutions.jts.geom.Coordinate; * Date Ticket# Engineer Description * ------------ ---------- ----------- -------------------------- * Jan 06, 2011 5951 jnjanga Initial creation + * Jan 18, 2013 1469 bkowal Removed the hdf5 data directory. * * * @@ -112,17 +111,12 @@ public class MpeLightningSrv { // set up a lightning record BinLightningRecord ltngRec = new BinLightningRecord(dataURI); - EnvProperties properties = PropertiesFactory.getInstance() - .getEnvProperties(); - ; - String hdf5Dir = properties.getEnvValue("HDF5DIR"); - // create custom path provider for binlightning repository BinLightningPathProvider pathProvider = BinLightningPathProvider .getInstance(); // obtain the hdf5 filename - String persistDir = hdf5Dir + pathProvider.getHDFPath(ltngRec) + String persistDir = pathProvider.getHDFPath(ltngRec) + File.separator; String archive = pathProvider.getHDFFileName( ltngRec.getPluginName(), ltngRec); diff --git a/edexOsgi/com.raytheon.uf.edex.pointdata/src/com/raytheon/uf/edex/pointdata/PointDataPluginDao.java b/edexOsgi/com.raytheon.uf.edex.pointdata/src/com/raytheon/uf/edex/pointdata/PointDataPluginDao.java index bea9b272b3..97cbf327f2 100644 --- a/edexOsgi/com.raytheon.uf.edex.pointdata/src/com/raytheon/uf/edex/pointdata/PointDataPluginDao.java +++ b/edexOsgi/com.raytheon.uf.edex.pointdata/src/com/raytheon/uf/edex/pointdata/PointDataPluginDao.java @@ -78,6 +78,7 @@ import com.raytheon.uf.edex.database.plugin.PluginDao; * Date Ticket# Engineer Description * ------------ ---------- ----------- -------------------------- * Apr 13, 2009 chammack Initial creation + * Jan 14, 2013 1469 bkowal Removed the hdf5 data directory. * * * @@ -446,8 +447,7 @@ public abstract class PointDataPluginDao extends public File getFullFilePath(PluginDataObject p) { File file; - String directory = HDF5_DIR + File.separator + p.getPluginName() - + File.separator + String directory = p.getPluginName() + File.separator + pathProvider.getHDFPath(p.getPluginName(), (IPersistable) p); file = new File(directory + File.separator @@ -708,9 +708,7 @@ public abstract class PointDataPluginDao extends } bm.putAll(obj); T bean = (T) bm.getBean(); - return HDF5_DIR - + File.separator - + this.pluginName + return this.pluginName + File.separator + this.pathProvider.getHDFPath(this.pluginName, (IPersistable) bean) diff --git a/edexOsgi/com.raytheon.uf.edex.topo/src/com/raytheon/uf/edex/topo/TopoQuery.java b/edexOsgi/com.raytheon.uf.edex.topo/src/com/raytheon/uf/edex/topo/TopoQuery.java index 1e891747a3..d74ec59337 100644 --- a/edexOsgi/com.raytheon.uf.edex.topo/src/com/raytheon/uf/edex/topo/TopoQuery.java +++ b/edexOsgi/com.raytheon.uf.edex.topo/src/com/raytheon/uf/edex/topo/TopoQuery.java @@ -87,7 +87,8 @@ import com.vividsolutions.jts.geom.Coordinate; * 11/19/2007 #377 randerso Initial creation * Jun 13, 2008 #1160 randerso Moved to server side * 03/09/2012 DR 14581 D. Friedman Fix grid referencing and use custom - * nearest-neighbor resampling. + * nearest-neighbor resampling.i + * 01/14/2013 #1469 bkowal Removed the hdf5 data directory. * * * @@ -117,19 +118,7 @@ public class TopoQuery implements ITopoQuery { * @return Initialized TopoQuery instance */ public static synchronized TopoQuery getInstance(int topoLevel) { - String hdf5Dir = null; - - EnvProperties properties = PropertiesFactory.getInstance() - .getEnvProperties(); - if (properties != null) { - hdf5Dir = properties.getEnvValue("HDF5DIR"); - } - return getInstance(new File(hdf5Dir + TOPO_FILE), topoLevel); - } - - public static synchronized TopoQuery getInstance(String hdf5Dir, - int topoLevel) { - return getInstance(new File(hdf5Dir + TOPO_FILE), topoLevel); + return getInstance(new File(TOPO_FILE), topoLevel); } public static synchronized TopoQuery getInstance(File hdf5File, diff --git a/ncep/gov.noaa.nws.ncep.edex.plugin.ncgrib/src/gov/noaa/nws/ncep/edex/plugin/ncgrib/dao/NcgribDao.java b/ncep/gov.noaa.nws.ncep.edex.plugin.ncgrib/src/gov/noaa/nws/ncep/edex/plugin/ncgrib/dao/NcgribDao.java index adedb0bf82..c0eb7fb7cc 100644 --- a/ncep/gov.noaa.nws.ncep.edex.plugin.ncgrib/src/gov/noaa/nws/ncep/edex/plugin/ncgrib/dao/NcgribDao.java +++ b/ncep/gov.noaa.nws.ncep.edex.plugin.ncgrib/src/gov/noaa/nws/ncep/edex/plugin/ncgrib/dao/NcgribDao.java @@ -69,6 +69,7 @@ import com.raytheon.uf.edex.database.DataAccessLayerException; * ------------ ---------- ----------- -------------------------- * 4/7/09 1994 bphillip Initial Creation * 12/16/10 mli extend NcepDefaultPluginDao to enable purge + * 01/14/13 1469 bkowal Removed the hdf5 data directory. * * * @@ -112,70 +113,70 @@ public class NcgribDao extends NcepDefaultPluginDao { this("ncgrib"); } -// public void purgeExpiredData() { -// QueryResult models = null; -// try { -// models = (QueryResult) executeNativeSql(MODEL_QUERY); -// } catch (DataAccessLayerException e) { -// logger.error("Error purging ncgrib data. Unable to get models", e); -// } -// -// String currentModel = null; -// for (int i = 0; i < models.getResultCount(); i++) { -// currentModel = (String) models.getRowColumnValue(i, 0); -// QueryResult refTimes = null; -// try { -// refTimes = (QueryResult) executeNativeSql(REFTIME_QUERY -// .replace("?", currentModel)); -// } catch (DataAccessLayerException e) { -// logger -// .error("Error purging ncgrib data. Unable to get reference times for model [" -// + currentModel + "]"); -// continue; -// } -// -// // FIXME: Add rules for purging here instead of just keeping 2 -// // runs -// List filesKept = new ArrayList(); -// File modelDirectory = new File(PLUGIN_HDF5_DIR + File.separator -// + currentModel); -// -// for (int j = 0; j < refTimes.getResultCount(); j++) { -// Date time = (Date) refTimes.getRowColumnValue(j, 0); -// File hdf5File = new File(modelDirectory.getAbsolutePath() -// + File.separator -// + ((NcgribPathProvider) pathProvider).formatTime(time) -// + ".h5"); -// -// if (j < MODELCOUNT) { -// filesKept.add(hdf5File.getAbsolutePath()); -// continue; -// } -// -// try { -// purgeDb(time, currentModel); -// } catch (DataAccessLayerException e) { -// logger.error("Error purging database for ncgrib model [" -// + currentModel + "]"); -// } -// } -// -// List files = FileUtil.listFiles(modelDirectory, fileFilter, -// false); -// -// for (File file : files) { -// if (!filesKept.contains(file.getAbsolutePath())) { -// if (!file.delete()) { -// logger -// .error("Error purging HDF5 files for ncgrib model [" -// + currentModel + "]"); -// } -// } -// } -// -// } -// -// } + // public void purgeExpiredData() { + // QueryResult models = null; + // try { + // models = (QueryResult) executeNativeSql(MODEL_QUERY); + // } catch (DataAccessLayerException e) { + // logger.error("Error purging ncgrib data. Unable to get models", e); + // } + // + // String currentModel = null; + // for (int i = 0; i < models.getResultCount(); i++) { + // currentModel = (String) models.getRowColumnValue(i, 0); + // QueryResult refTimes = null; + // try { + // refTimes = (QueryResult) executeNativeSql(REFTIME_QUERY + // .replace("?", currentModel)); + // } catch (DataAccessLayerException e) { + // logger + // .error("Error purging ncgrib data. Unable to get reference times for model [" + // + currentModel + "]"); + // continue; + // } + // + // // FIXME: Add rules for purging here instead of just keeping 2 + // // runs + // List filesKept = new ArrayList(); + // File modelDirectory = new File(PLUGIN_HDF5_DIR + File.separator + // + currentModel); + // + // for (int j = 0; j < refTimes.getResultCount(); j++) { + // Date time = (Date) refTimes.getRowColumnValue(j, 0); + // File hdf5File = new File(modelDirectory.getAbsolutePath() + // + File.separator + // + ((NcgribPathProvider) pathProvider).formatTime(time) + // + ".h5"); + // + // if (j < MODELCOUNT) { + // filesKept.add(hdf5File.getAbsolutePath()); + // continue; + // } + // + // try { + // purgeDb(time, currentModel); + // } catch (DataAccessLayerException e) { + // logger.error("Error purging database for ncgrib model [" + // + currentModel + "]"); + // } + // } + // + // List files = FileUtil.listFiles(modelDirectory, fileFilter, + // false); + // + // for (File file : files) { + // if (!filesKept.contains(file.getAbsolutePath())) { + // if (!file.delete()) { + // logger + // .error("Error purging HDF5 files for ncgrib model [" + // + currentModel + "]"); + // } + // } + // } + // + // } + // + // } private int purgeDb(final Date date, String modelName) throws DataAccessLayerException { @@ -198,8 +199,11 @@ public class NcgribDao extends NcepDefaultPluginDao { AbstractStorageRecord hybridLevels = null; AbstractStorageRecord thinnedPts = null; - //System.out.println (" good data to be populated, rec datauri=" + gribRec.getDataURI()); - //System.out.println (" good data to be populated, rec messagedata=" + gribRec.getMessageData()); + // System.out.println (" good data to be populated, rec datauri=" + + // gribRec.getDataURI()); + // System.out.println + // (" good data to be populated, rec messagedata=" + + // gribRec.getMessageData()); /* * Stores the binary data to the HDF5 data store @@ -210,9 +214,9 @@ public class NcgribDao extends NcepDefaultPluginDao { long[] sizes = new long[] { (gribRec.getSpatialObject()).getNx(), (gribRec.getSpatialObject()).getNy() }; - storageRecord = new FloatDataRecord("Data", gribRec - .getDataURI(), (float[]) gribRec.getMessageData(), - 2, sizes); + storageRecord = new FloatDataRecord("Data", + gribRec.getDataURI(), + (float[]) gribRec.getMessageData(), 2, sizes); } else throw new Exception( "Cannot create data record, spatialData = " @@ -231,8 +235,8 @@ public class NcgribDao extends NcepDefaultPluginDao { * Stores any data from the local section if present */ if (gribRec.isLocalSectionUsed()) { - localSection = new IntegerDataRecord(LOCAL_SECTION, gribRec - .getDataURI(), gribRec.getLocalSection()); + localSection = new IntegerDataRecord(LOCAL_SECTION, + gribRec.getDataURI(), gribRec.getLocalSection()); localSection.setCorrelationObject(gribRec); dataStore.addDataRecord(localSection); } @@ -241,8 +245,8 @@ public class NcgribDao extends NcepDefaultPluginDao { * Stores any hybrid coordinate data if present */ if (gribRec.isHybridGrid()) { - hybridLevels = new FloatDataRecord(HYBRID_LEVELS, gribRec - .getDataURI(), gribRec.getHybridCoordList()); + hybridLevels = new FloatDataRecord(HYBRID_LEVELS, + gribRec.getDataURI(), gribRec.getHybridCoordList()); hybridLevels.setCorrelationObject(gribRec); dataStore.addDataRecord(hybridLevels); } @@ -251,8 +255,8 @@ public class NcgribDao extends NcepDefaultPluginDao { * Stores any thinned point data for quasi-regular grids if present */ if (gribRec.isThinnedGrid()) { - thinnedPts = new IntegerDataRecord(THINNED_PTS, gribRec - .getDataURI(), gribRec.getThinnedPts()); + thinnedPts = new IntegerDataRecord(THINNED_PTS, + gribRec.getDataURI(), gribRec.getThinnedPts()); thinnedPts.setCorrelationObject(gribRec); dataStore.addDataRecord(thinnedPts); } @@ -318,11 +322,11 @@ public class NcgribDao extends NcepDefaultPluginDao { NcgribModel model = rec.getModelInfo(); if (model.getParameterName() == null || model.getParameterName().equals("Missing")) { - //System.out.println (" persist missing or null, rec datauri=" + rec.getDataURI()); - - logger - .info("Discarding record due to missing or unknown parameter mapping: " - + record); + // System.out.println (" persist missing or null, rec datauri=" + // + rec.getDataURI()); + + logger.info("Discarding record due to missing or unknown parameter mapping: " + + record); } else { boolean validLevel = false; Level level = model.getLevel(); @@ -339,9 +343,8 @@ public class NcgribDao extends NcepDefaultPluginDao { if (validLevel) { toPersist.add(rec); } else { - logger - .info("Discarding record due to missing or unknown level mapping: " - + record); + logger.info("Discarding record due to missing or unknown level mapping: " + + record); } } } @@ -367,11 +370,11 @@ public class NcgribDao extends NcepDefaultPluginDao { NcgribModel model = rec.getModelInfo(); if (model.getParameterName() == null || model.getParameterName().equals("Missing")) { - //System.out.println (" verify missing or null, rec datauri=" + rec.getDataURI()); + // System.out.println (" verify missing or null, rec datauri=" + + // rec.getDataURI()); - logger - .info("Discarding record due to missing or unknown parameter mapping: " - + record); + logger.info("Discarding record due to missing or unknown parameter mapping: " + + record); } else { boolean validLevel = false; Level level = model.getLevel(); @@ -388,15 +391,14 @@ public class NcgribDao extends NcepDefaultPluginDao { if (validLevel) { toPersist.add(rec); } else { - logger - .info("Discarding record due to missing or unknown level mapping: " - + record); + logger.info("Discarding record due to missing or unknown level mapping: " + + record); } } } return toPersist.toArray(new NcgribRecord[0]); } - + public List replaceRecord(NcgribRecord pdo) throws PluginException { List exceptions = new ArrayList(); @@ -404,8 +406,7 @@ public class NcgribDao extends NcepDefaultPluginDao { persistable.setHdfFileId(EDEXUtil.getServerId()); // get the directory - String directory = HDF5_DIR + File.separator + pdo.getPluginName() - + File.separator + String directory = pdo.getPluginName() + File.separator + pathProvider.getHDFPath(this.pluginName, persistable); File dataStoreFile = new File(directory + File.separator + pathProvider.getHDFFileName(pdo.getPluginName(), persistable)); @@ -429,5 +430,5 @@ public class NcgribDao extends NcepDefaultPluginDao { } return exceptions; } - + } diff --git a/ncep/gov.noaa.nws.ncep.viz.rsc.ncgrid/src/gov/noaa/nws/ncep/viz/rsc/ncgrid/dgdriv/Dgdriv.java b/ncep/gov.noaa.nws.ncep.viz.rsc.ncgrid/src/gov/noaa/nws/ncep/viz/rsc/ncgrid/dgdriv/Dgdriv.java index 4f49c93c98..253a5d27ab 100644 --- a/ncep/gov.noaa.nws.ncep.viz.rsc.ncgrid/src/gov/noaa/nws/ncep/viz/rsc/ncgrid/dgdriv/Dgdriv.java +++ b/ncep/gov.noaa.nws.ncep.viz.rsc.ncgrid/src/gov/noaa/nws/ncep/viz/rsc/ncgrid/dgdriv/Dgdriv.java @@ -1543,8 +1543,7 @@ public class Dgdriv { // file = new File(File.separator + dataURI.split("/")[1] // + File.separator + path + File.separator + sb.toString()); //} else if (DataMode.getSystemMode() == DataMode.PYPIES) { - file = new File(VizApp.getServerDataDir() + File.separator - + dataURI.split("/")[1] + File.separator + path + file = new File(dataURI.split("/")[1] + File.separator + path + File.separator + sb.toString()); //} else { // file = new File(VizApp.getDataDir() + File.separator diff --git a/ncep/gov.noaa.nws.ncep.viz.rsc.ncgrid/src/gov/noaa/nws/ncep/viz/rsc/ncgrid/dgdriv/TestDgdriv.java b/ncep/gov.noaa.nws.ncep.viz.rsc.ncgrid/src/gov/noaa/nws/ncep/viz/rsc/ncgrid/dgdriv/TestDgdriv.java index 652df71349..324078cd6d 100644 --- a/ncep/gov.noaa.nws.ncep.viz.rsc.ncgrid/src/gov/noaa/nws/ncep/viz/rsc/ncgrid/dgdriv/TestDgdriv.java +++ b/ncep/gov.noaa.nws.ncep.viz.rsc.ncgrid/src/gov/noaa/nws/ncep/viz/rsc/ncgrid/dgdriv/TestDgdriv.java @@ -1058,8 +1058,7 @@ public class TestDgdriv { // file = new File(File.separator + dataURI.split("/")[1] // + File.separator + path + File.separator + sb.toString()); //} else if (DataMode.getSystemMode() == DataMode.PYPIES) { - file = new File(VizApp.getServerDataDir() + File.separator - + dataURI.split("/")[1] + File.separator + path + file = new File(dataURI.split("/")[1] + File.separator + path + File.separator + sb.toString()); //} else { // file = new File(VizApp.getDataDir() + File.separator diff --git a/pythonPackages/pypies/pypies.cfg b/pythonPackages/pypies/pypies.cfg index 4c181a1df5..1642434851 100644 --- a/pythonPackages/pypies/pypies.cfg +++ b/pythonPackages/pypies/pypies.cfg @@ -32,7 +32,8 @@ # # - +[edex_data] +hdf5dir=/awips2/edex/data/hdf5 [loggers] keys=root,minutes,hours diff --git a/pythonPackages/pypies/pypies/config/__init__.py b/pythonPackages/pypies/pypies/config/__init__.py new file mode 100644 index 0000000000..7260b51ddf --- /dev/null +++ b/pythonPackages/pypies/pypies/config/__init__.py @@ -0,0 +1,33 @@ +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. +# +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## + + +# +# __init__.py for hdf5 implementation +# +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# 01/10/13 bkowal Initial Creation. +# +# +# \ No newline at end of file diff --git a/pythonPackages/pypies/pypies/config/pypiesConfigurationManager.py b/pythonPackages/pypies/pypies/config/pypiesConfigurationManager.py new file mode 100644 index 0000000000..6f6e79b71e --- /dev/null +++ b/pythonPackages/pypies/pypies/config/pypiesConfigurationManager.py @@ -0,0 +1,68 @@ +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. +# +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## + + +# +# Configuration for pypies logging +# +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# 01/10/13 bkowal Initial Creation. +# +# +# + +import os, ConfigParser + +class PypiesConfigurationManager: + + def __init__(self): + self.__configLoaded = False + + self.__initConfigLocation() + if (not self.__configLoc): + raise RuntimeError("No pypies.cfg found") + + self.__loadConfig() + + def __initConfigLocation(self): + self.__configLoc = '/awips2/pypies/conf/pypies.cfg' + if not os.path.exists(self.__configLoc): + print "Unable to find pypies.cfg at ", self.__configLoc + self.__configLoc = None + else: + print "Found pypies.cfg at ", self.__configLoc + + def __loadConfig(self): + self.__scp = ConfigParser.SafeConfigParser() + self.__scp.read(self.__configLoc) + self.__configLoaded = True + + def getConfigurationLocation(self): + return self.__configLoc + + def hasConfigurationBeenLoaded(self): + return self.__configLoaded + + def getConfiguration(self): + return self.__scp diff --git a/pythonPackages/pypies/pypies/handlers.py b/pythonPackages/pypies/pypies/handlers.py index 6acba0b470..c74b28b7d5 100644 --- a/pythonPackages/pypies/pypies/handlers.py +++ b/pythonPackages/pypies/pypies/handlers.py @@ -29,7 +29,7 @@ # Date Ticket# Engineer Description # ------------ ---------- ----------- -------------------------- # 08/17/10 njensen Initial Creation. -# +# 01/11/13 bkowal Pypies will now read the hdf5 root from configuration # # @@ -37,12 +37,14 @@ from werkzeug import Request, Response, ClosingIterator import time, logging, os import pypies from pypies import IDataStore +import pypies.config.pypiesConfigurationManager import dynamicserialize from dynamicserialize.dstypes.com.raytheon.uf.common.pypies.request import * from dynamicserialize.dstypes.com.raytheon.uf.common.pypies.response import * logger = pypies.logger timeMap = pypies.timeMap +hdf5Dir = None from pypies.impl import H5pyDataStore datastore = H5pyDataStore.H5pyDataStore() @@ -58,7 +60,27 @@ datastoreMap = { CreateDatasetRequest: (datastore.createDataset, "CreateDatasetRequest"), RepackRequest: (datastore.repack, "RepackRequest"), CopyRequest: (datastore.copy, "CopyRequest") -} +} + +pypiesConfigurationManager = pypies.config.pypiesConfigurationManager.PypiesConfigurationManager() +if (pypiesConfigurationManager.hasConfigurationBeenLoaded()): + configLocation = pypiesConfigurationManager.getConfigurationLocation() + infoMessage = 'using ' + configLocation + ' for pypies config' + logger.info(infoMessage) + + # determine the edex hdf5 root + scp = pypiesConfigurationManager.getConfiguration() + hdf5Dir = scp.get('edex_data', 'hdf5dir') + # add a trailing directory separator (when necessary) + if (not hdf5Dir.endswith('/')): + hdf5Dir = hdf5Dir + '/' + + if not os.path.exists(hdf5Dir): + os.makedirs(hdf5Dir) + infoMessage = 'using hdf5 directory: ' + hdf5Dir + logger.info(infoMessage) + +# TODO: error and halt when configuration cannot be loaded @Request.application def pypies_response(request): @@ -74,6 +96,9 @@ def pypies_response(request): resp.setError(msg) return __prepareResponse(resp) timeMap['deserialize']=time.time()-startTime + # add the hdf5 directory path to the file name + filename = hdf5Dir + obj.getFilename() + obj.setFilename(filename) clz = obj.__class__ if logger.isEnabledFor(logging.DEBUG): diff --git a/pythonPackages/pypies/pypies/logging/logConfig.py b/pythonPackages/pypies/pypies/logging/logConfig.py index 95cbd801bd..1c65f15a18 100644 --- a/pythonPackages/pypies/pypies/logging/logConfig.py +++ b/pythonPackages/pypies/pypies/logging/logConfig.py @@ -35,13 +35,15 @@ import logging, os, ConfigParser import logging.handlers, logging.config +import pypies.config.pypiesConfigurationManager class LogConfig: def __init__(self): - cfgLoc = self.__getConfigLocation() - if cfgLoc: - scp = self.__loadConfig(cfgLoc) + pypiesConfigurationManager = pypies.config.pypiesConfigurationManager.PypiesConfigurationManager() + + if pypiesConfigurationManager.hasConfigurationBeenLoaded(): + self.__configure(pypiesConfigurationManager) self.pypiesLogger = logging.getLogger('root') self.minutesLogger = logging.getLogger('minute') self.hoursLogger = logging.getLogger('hourly') @@ -49,27 +51,15 @@ class LogConfig: self.pypiesLogger = self.__getDefaultLogger() self.minutesLogger = self.pypiesLogger self.hoursLogger = self.pypiesLogger - - def __getConfigLocation(self): - configLoc = '/awips2/pypies/conf/pypies.cfg' - if not os.path.exists(configLoc): - print "Unable to find pypies.cfg at ", configLoc - configLoc = None - else: - print "Found pypies.cfg at ", configLoc - return configLoc - def __loadConfig(self, configLoc): - scp = ConfigParser.SafeConfigParser() - if not configLoc: - raise RuntimeError("No pypies.cfg found") - else: - print "using", configLoc, "for logging config" - scp.read(configLoc) + def __configure(self, configurationManager): + scp = configurationManager.getConfiguration() + print "using", configurationManager.getConfigurationLocation(), "for logging config" + logFileDir = scp.get('handler_pypiesHandler', 'logFileDir') if not os.path.exists(logFileDir): os.makedirs(logFileDir) - logging.config.fileConfig(configLoc) + logging.config.fileConfig(configurationManager.getConfigurationLocation()) def __getDefaultLogger(self): import logging, logging.handlers