Issue #1608 Fixed group deletes in H5pyDataStore.py
Change-Id: I9cd6e186646f9db40b0e498740fb4c63bc99b79a Former-commit-id:a2a89fb4e7
[formerly dc9ac54903107224633b4e18d5caa345649d8266] Former-commit-id:7c0a5d74bf
This commit is contained in:
parent
0bab06709b
commit
e952a4f179
67 changed files with 172 additions and 4942 deletions
|
@ -56,13 +56,6 @@
|
|||
version="0.0.0"
|
||||
unpack="false"/>
|
||||
|
||||
<plugin
|
||||
id="com.raytheon.uf.common.datastorage.hdf5"
|
||||
download-size="0"
|
||||
install-size="0"
|
||||
version="0.0.0"
|
||||
unpack="false"/>
|
||||
|
||||
<plugin
|
||||
id="com.raytheon.uf.common.derivparam"
|
||||
download-size="0"
|
||||
|
|
|
@ -120,30 +120,6 @@
|
|||
version="0.0.0"
|
||||
fragment="true"/>
|
||||
|
||||
<plugin
|
||||
id="ncsa.hdf5"
|
||||
download-size="0"
|
||||
install-size="0"
|
||||
version="0.0.0"/>
|
||||
|
||||
<plugin
|
||||
id="ncsa.hdf5.linux32"
|
||||
os="linux"
|
||||
arch="x86"
|
||||
download-size="0"
|
||||
install-size="0"
|
||||
version="0.0.0"
|
||||
fragment="true"/>
|
||||
|
||||
<plugin
|
||||
id="ncsa.hdf5.win32"
|
||||
os="win32"
|
||||
arch="x86"
|
||||
download-size="0"
|
||||
install-size="0"
|
||||
version="0.0.0"
|
||||
fragment="true"/>
|
||||
|
||||
<plugin
|
||||
id="net.sf.cglib"
|
||||
download-size="0"
|
||||
|
|
|
@ -251,12 +251,6 @@
|
|||
install-size="0"
|
||||
version="0.0.0"/>
|
||||
|
||||
<plugin
|
||||
id="ncsa.hdf5"
|
||||
download-size="0"
|
||||
install-size="0"
|
||||
version="0.0.0"/>
|
||||
|
||||
<plugin
|
||||
id="com.raytheon.uf.common.message"
|
||||
download-size="0"
|
||||
|
@ -271,13 +265,6 @@
|
|||
version="0.0.0"
|
||||
unpack="false"/>
|
||||
|
||||
<plugin
|
||||
id="com.raytheon.uf.common.datastorage.hdf5"
|
||||
download-size="0"
|
||||
install-size="0"
|
||||
version="0.0.0"
|
||||
unpack="false"/>
|
||||
|
||||
<plugin
|
||||
id="com.raytheon.uf.common.geospatial"
|
||||
download-size="0"
|
||||
|
|
|
@ -14,7 +14,6 @@ Require-Bundle: org.eclipse.ui,
|
|||
com.raytheon.uf.common.colormap;bundle-version="1.12.1174",
|
||||
org.geotools;bundle-version="2.6.4",
|
||||
com.raytheon.uf.common.datastorage;bundle-version="1.12.1174",
|
||||
com.raytheon.uf.common.datastorage.hdf5;bundle-version="1.12.1174",
|
||||
com.raytheon.uf.common.dataplugin;bundle-version="1.12.1174",
|
||||
com.raytheon.uf.common.dataplugin.npp.viirs;bundle-version="1.0.0",
|
||||
javax.measure;bundle-version="1.0.0",
|
||||
|
|
|
@ -47,7 +47,8 @@ import com.raytheon.uf.common.util.cache.LRUCacheFS;
|
|||
*
|
||||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* Nov 8, 2011 mschenke Initial creation
|
||||
* Nov 8, 2011 mschenke Initial creation
|
||||
* Feb 12, 2013 #1608 randerso Added explicit deletes for groups and datasets
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -55,6 +56,22 @@ import com.raytheon.uf.common.util.cache.LRUCacheFS;
|
|||
* @version 1.0
|
||||
*/
|
||||
|
||||
/**
|
||||
* TODO Add Description
|
||||
*
|
||||
* <pre>
|
||||
*
|
||||
* SOFTWARE HISTORY
|
||||
*
|
||||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* Feb 12, 2013 randerso Initial creation
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
* @author randerso
|
||||
* @version 1.0
|
||||
*/
|
||||
public class CachingDataStore implements IDataStore {
|
||||
|
||||
// quick byte string to hex conversion
|
||||
|
@ -348,12 +365,26 @@ public class CachingDataStore implements IDataStore {
|
|||
* (non-Javadoc)
|
||||
*
|
||||
* @see
|
||||
* com.raytheon.uf.common.datastorage.IDataStore#delete(java.lang.String[])
|
||||
* com.raytheon.uf.common.datastorage.IDataStore#deleteDatasets(java.lang
|
||||
* .String[])
|
||||
*/
|
||||
@Override
|
||||
public void delete(String... location) throws StorageException,
|
||||
public void deleteDatasets(String... datasets) throws StorageException,
|
||||
FileNotFoundException {
|
||||
delegate.delete(location);
|
||||
delegate.deleteDatasets(datasets);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
*
|
||||
* @see
|
||||
* com.raytheon.uf.common.datastorage.IDataStore#deleteGroups(java.lang.
|
||||
* String[])
|
||||
*/
|
||||
@Override
|
||||
public void deleteGroups(String... groups) throws StorageException,
|
||||
FileNotFoundException {
|
||||
delegate.deleteGroups(groups);
|
||||
}
|
||||
|
||||
/*
|
||||
|
|
|
@ -1,6 +0,0 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<classpath>
|
||||
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER"/>
|
||||
<classpathentry kind="con" path="org.eclipse.pde.core.requiredPlugins"/>
|
||||
<classpathentry kind="output" path="bin"/>
|
||||
</classpath>
|
|
@ -1,28 +0,0 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<projectDescription>
|
||||
<name>ncsa.hdf5.linux32</name>
|
||||
<comment></comment>
|
||||
<projects>
|
||||
</projects>
|
||||
<buildSpec>
|
||||
<buildCommand>
|
||||
<name>org.eclipse.jdt.core.javabuilder</name>
|
||||
<arguments>
|
||||
</arguments>
|
||||
</buildCommand>
|
||||
<buildCommand>
|
||||
<name>org.eclipse.pde.ManifestBuilder</name>
|
||||
<arguments>
|
||||
</arguments>
|
||||
</buildCommand>
|
||||
<buildCommand>
|
||||
<name>org.eclipse.pde.SchemaBuilder</name>
|
||||
<arguments>
|
||||
</arguments>
|
||||
</buildCommand>
|
||||
</buildSpec>
|
||||
<natures>
|
||||
<nature>org.eclipse.pde.PluginNature</nature>
|
||||
<nature>org.eclipse.jdt.core.javanature</nature>
|
||||
</natures>
|
||||
</projectDescription>
|
|
@ -1,7 +0,0 @@
|
|||
#Thu Mar 26 11:22:29 CDT 2009
|
||||
eclipse.preferences.version=1
|
||||
org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.6
|
||||
org.eclipse.jdt.core.compiler.compliance=1.6
|
||||
org.eclipse.jdt.core.compiler.problem.assertIdentifier=error
|
||||
org.eclipse.jdt.core.compiler.problem.enumIdentifier=error
|
||||
org.eclipse.jdt.core.compiler.source=1.6
|
|
@ -1,9 +0,0 @@
|
|||
Manifest-Version: 1.0
|
||||
Bundle-ManifestVersion: 2
|
||||
Bundle-Name: NCSA HDF5 Linux32 Fragment
|
||||
Bundle-SymbolicName: ncsa.hdf5.linux32
|
||||
Bundle-Version: 2.4.0
|
||||
Bundle-Vendor: Raytheon-bundled OSS
|
||||
Fragment-Host: ncsa.hdf5;bundle-version="2.4.0"
|
||||
Bundle-RequiredExecutionEnvironment: JavaSE-1.6
|
||||
Eclipse-PlatformFilter: (& (osgi.os=linux) (osgi.arch=x86))
|
|
@ -1,4 +0,0 @@
|
|||
bin.includes = META-INF/,\
|
||||
.,\
|
||||
libjhdf5.so
|
||||
src.includes = libjhdf5.so
|
Binary file not shown.
|
@ -1,6 +0,0 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<classpath>
|
||||
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER"/>
|
||||
<classpathentry kind="con" path="org.eclipse.pde.core.requiredPlugins"/>
|
||||
<classpathentry kind="output" path="bin"/>
|
||||
</classpath>
|
|
@ -1,28 +0,0 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<projectDescription>
|
||||
<name>ncsa.hdf5.win32</name>
|
||||
<comment></comment>
|
||||
<projects>
|
||||
</projects>
|
||||
<buildSpec>
|
||||
<buildCommand>
|
||||
<name>org.eclipse.jdt.core.javabuilder</name>
|
||||
<arguments>
|
||||
</arguments>
|
||||
</buildCommand>
|
||||
<buildCommand>
|
||||
<name>org.eclipse.pde.ManifestBuilder</name>
|
||||
<arguments>
|
||||
</arguments>
|
||||
</buildCommand>
|
||||
<buildCommand>
|
||||
<name>org.eclipse.pde.SchemaBuilder</name>
|
||||
<arguments>
|
||||
</arguments>
|
||||
</buildCommand>
|
||||
</buildSpec>
|
||||
<natures>
|
||||
<nature>org.eclipse.pde.PluginNature</nature>
|
||||
<nature>org.eclipse.jdt.core.javanature</nature>
|
||||
</natures>
|
||||
</projectDescription>
|
|
@ -1,7 +0,0 @@
|
|||
#Thu Mar 26 11:23:11 CDT 2009
|
||||
eclipse.preferences.version=1
|
||||
org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.6
|
||||
org.eclipse.jdt.core.compiler.compliance=1.6
|
||||
org.eclipse.jdt.core.compiler.problem.assertIdentifier=error
|
||||
org.eclipse.jdt.core.compiler.problem.enumIdentifier=error
|
||||
org.eclipse.jdt.core.compiler.source=1.6
|
|
@ -1,9 +0,0 @@
|
|||
Manifest-Version: 1.0
|
||||
Bundle-ManifestVersion: 2
|
||||
Bundle-Name: NCSA HDF5 Win32 Fragment
|
||||
Bundle-SymbolicName: ncsa.hdf5.win32
|
||||
Bundle-Version: 2.4.0
|
||||
Bundle-Vendor: Raytheon-bundled OSS
|
||||
Fragment-Host: ncsa.hdf5;bundle-version="2.4.0"
|
||||
Bundle-RequiredExecutionEnvironment: JavaSE-1.6
|
||||
Eclipse-PlatformFilter: (& (osgi.os=win32) (osgi.arch=x86))
|
|
@ -1,4 +0,0 @@
|
|||
bin.includes = META-INF/,\
|
||||
.,\
|
||||
jhdf5.dll
|
||||
src.includes = jhdf5.dll
|
Binary file not shown.
|
@ -1,7 +0,0 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<classpath>
|
||||
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER"/>
|
||||
<classpathentry kind="con" path="org.eclipse.pde.core.requiredPlugins"/>
|
||||
<classpathentry exported="true" kind="lib" path="jhdf5.jar" sourcepath="/home/randerso/hdf-java.zip"/>
|
||||
<classpathentry kind="output" path="bin"/>
|
||||
</classpath>
|
|
@ -1,28 +0,0 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<projectDescription>
|
||||
<name>ncsa.hdf5</name>
|
||||
<comment></comment>
|
||||
<projects>
|
||||
</projects>
|
||||
<buildSpec>
|
||||
<buildCommand>
|
||||
<name>org.eclipse.jdt.core.javabuilder</name>
|
||||
<arguments>
|
||||
</arguments>
|
||||
</buildCommand>
|
||||
<buildCommand>
|
||||
<name>org.eclipse.pde.ManifestBuilder</name>
|
||||
<arguments>
|
||||
</arguments>
|
||||
</buildCommand>
|
||||
<buildCommand>
|
||||
<name>org.eclipse.pde.SchemaBuilder</name>
|
||||
<arguments>
|
||||
</arguments>
|
||||
</buildCommand>
|
||||
</buildSpec>
|
||||
<natures>
|
||||
<nature>org.eclipse.pde.PluginNature</nature>
|
||||
<nature>org.eclipse.jdt.core.javanature</nature>
|
||||
</natures>
|
||||
</projectDescription>
|
|
@ -1,7 +0,0 @@
|
|||
#Thu Mar 26 11:34:03 CDT 2009
|
||||
eclipse.preferences.version=1
|
||||
org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.6
|
||||
org.eclipse.jdt.core.compiler.compliance=1.6
|
||||
org.eclipse.jdt.core.compiler.problem.assertIdentifier=error
|
||||
org.eclipse.jdt.core.compiler.problem.enumIdentifier=error
|
||||
org.eclipse.jdt.core.compiler.source=1.6
|
|
@ -1,11 +0,0 @@
|
|||
Manifest-Version: 1.0
|
||||
Bundle-ManifestVersion: 2
|
||||
Bundle-Name: NCSA HDF5 Plug-in
|
||||
Bundle-SymbolicName: ncsa.hdf5
|
||||
Bundle-Version: 2.4.0
|
||||
Bundle-ClassPath: jhdf5.jar
|
||||
Export-Package: ncsa.hdf.hdf5lib,
|
||||
ncsa.hdf.hdf5lib.exceptions
|
||||
Bundle-Vendor: Raytheon-bundled OSS
|
||||
Edex-Deploy: jhdf5.jar
|
||||
Bundle-RequiredExecutionEnvironment: JavaSE-1.6
|
|
@ -1,2 +0,0 @@
|
|||
bin.includes = META-INF/,\
|
||||
jhdf5.jar
|
Binary file not shown.
Binary file not shown.
|
@ -27,8 +27,7 @@ Export-Package: com.raytheon.edex.colormap,
|
|||
com.raytheon.edex.util,
|
||||
com.raytheon.edex.utility,
|
||||
com.raytheon.edex.common.shapefiles
|
||||
Require-Bundle: ncsa.hdf5,
|
||||
net.sf.ehcache,
|
||||
Require-Bundle: net.sf.ehcache,
|
||||
org.apache.commons.configuration,
|
||||
org.apache.velocity;bundle-version="1.6.0",
|
||||
org.geotools,
|
||||
|
@ -57,7 +56,6 @@ Require-Bundle: ncsa.hdf5,
|
|||
com.raytheon.uf.common.geospatial;bundle-version="1.0.0";visibility:=reexport,
|
||||
com.raytheon.uf.common.message;bundle-version="1.11.11",
|
||||
com.raytheon.uf.common.serialization.comm;bundle-version="1.11.17",
|
||||
com.raytheon.uf.common.datastorage.hdf5;bundle-version="1.11.17",
|
||||
org.hibernate;visibility:=reexport,
|
||||
com.raytheon.uf.common.pointdata,
|
||||
com.raytheon.uf.common.util;visibility:=reexport,
|
||||
|
|
|
@ -16,7 +16,6 @@ Require-Bundle: com.raytheon.uf.common.dataplugin.gfe;bundle-version="1.12.1174"
|
|||
com.raytheon.uf.common.serialization,
|
||||
org.junit;bundle-version="1.0.0",
|
||||
com.raytheon.uf.edex.plugin.grid,
|
||||
com.raytheon.uf.common.datastorage.hdf5;bundle-version="1.11.24",
|
||||
com.raytheon.uf.edex.topo,
|
||||
com.raytheon.edex.common,
|
||||
com.raytheon.uf.common.dataplugin.satellite;bundle-version="1.0.0",
|
||||
|
|
|
@ -109,6 +109,8 @@ import com.raytheon.uf.edex.database.query.DatabaseQuery;
|
|||
* adding it to the inventory
|
||||
* 12/06/12 #1394 rjpeter Optimized D2D grid access.
|
||||
* 01/21/12 #1504 randerso Back ported change to use ParameterMapper into 13.1.2
|
||||
* 02/10/13 #1603 randerso Eliminated unnecessary conversion from lists to arrays
|
||||
* 02/12/13 #1608 randerso Changed to use explicit deletes for groups and datasets
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -459,7 +461,7 @@ public class GFEDao extends DefaultPluginDao {
|
|||
String[] groupsToDelete = entry.getValue().getSecond();
|
||||
|
||||
try {
|
||||
dataStore.delete(groupsToDelete);
|
||||
dataStore.deleteGroups(groupsToDelete);
|
||||
|
||||
if (statusHandler.isPriorityEnabled(Priority.DEBUG)) {
|
||||
statusHandler.handle(Priority.DEBUG,
|
||||
|
@ -1074,7 +1076,8 @@ public class GFEDao extends DefaultPluginDao {
|
|||
try {
|
||||
IDataStore ds = DataStoreFactory.getDataStore(GfeUtil
|
||||
.getGridParmHdf5File(GridDatabase.gfeBaseDataDir, dbId));
|
||||
ds.delete("/GridParmInfo/" + parmAndLevel);
|
||||
ds.deleteDatasets("/GridParmInfo/" + parmAndLevel,
|
||||
"/GridParmStorageInfo/" + parmAndLevel);
|
||||
} catch (Exception e1) {
|
||||
throw new DataAccessLayerException("Error deleting data from HDF5",
|
||||
e1);
|
||||
|
|
|
@ -103,6 +103,11 @@ import com.vividsolutions.jts.io.WKTReader;
|
|||
* 06/18/08 njensen Added discrete/wx to getGridData()
|
||||
* 05/04/12 #574 dgilling Restructure class to better match AWIPS1.
|
||||
* 07/11/12 15162 ryu No raising exception in c'tor
|
||||
* 02/10/12 #1603 randerso Implemented deleteDb, moved methods down from
|
||||
* GridDatabase that belonged here.
|
||||
* Removed unncecssary conversion from Lists to/from arrays
|
||||
* Added performance logging
|
||||
* 02/12/13 #1608 randerso Changed to explicitly call deleteGroups
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -2449,7 +2454,7 @@ public class IFPGridDatabase extends GridDatabase {
|
|||
IDataStore dataStore = DataStoreFactory.getDataStore(hdf5File);
|
||||
|
||||
try {
|
||||
dataStore.delete(groupName);
|
||||
dataStore.deleteGroups(groupName);
|
||||
statusHandler.handle(Priority.DEBUG, "Deleted: " + groupName
|
||||
+ " from " + hdf5File.getName());
|
||||
|
||||
|
|
|
@ -72,6 +72,7 @@ import com.raytheon.uf.edex.topo.TopoQuery;
|
|||
* ------------ ---------- ----------- --------------------------
|
||||
* Jul 10, 2009 njensen Initial creation
|
||||
* May 04, 2012 #574 dgilling Re-port to better match AWIPS1.
|
||||
* Feb 12, 2013 #1608 randerso Changed to use explicit deleteGroups
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -310,7 +311,7 @@ public class TopoDatabaseManager {
|
|||
public void revertTopoData(final GridLocation gloc) {
|
||||
String name = calcGroupName(gloc);
|
||||
try {
|
||||
dataStore.delete(name);
|
||||
dataStore.deleteGroups(name);
|
||||
} catch (Exception e) {
|
||||
statusHandler.error("Error attempting to remove: " + name, e);
|
||||
}
|
||||
|
|
|
@ -12,7 +12,6 @@ Require-Bundle: com.raytheon.edex.common;bundle-version="1.11.13",
|
|||
org.apache.commons.logging;bundle-version="1.0.4",
|
||||
com.raytheon.uf.common.localization;bundle-version="1.11.13",
|
||||
com.raytheon.edex.plugin.ldad;bundle-version="1.0.0",
|
||||
ncsa.hdf5;bundle-version="2.4.0",
|
||||
com.raytheon.uf.common.pointdata;bundle-version="1.11.17",
|
||||
com.raytheon.uf.edex.pointdata
|
||||
Bundle-RequiredExecutionEnvironment: JavaSE-1.6
|
||||
|
|
|
@ -1,8 +0,0 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<classpath>
|
||||
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER"/>
|
||||
<classpathentry kind="con" path="org.eclipse.pde.core.requiredPlugins"/>
|
||||
<classpathentry kind="src" path="src"/>
|
||||
<classpathentry kind="src" output="bin" path="unit-test"/>
|
||||
<classpathentry kind="output" path="bin"/>
|
||||
</classpath>
|
|
@ -1,28 +0,0 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<projectDescription>
|
||||
<name>com.raytheon.uf.common.datastorage.hdf5</name>
|
||||
<comment></comment>
|
||||
<projects>
|
||||
</projects>
|
||||
<buildSpec>
|
||||
<buildCommand>
|
||||
<name>org.eclipse.jdt.core.javabuilder</name>
|
||||
<arguments>
|
||||
</arguments>
|
||||
</buildCommand>
|
||||
<buildCommand>
|
||||
<name>org.eclipse.pde.ManifestBuilder</name>
|
||||
<arguments>
|
||||
</arguments>
|
||||
</buildCommand>
|
||||
<buildCommand>
|
||||
<name>org.eclipse.pde.SchemaBuilder</name>
|
||||
<arguments>
|
||||
</arguments>
|
||||
</buildCommand>
|
||||
</buildSpec>
|
||||
<natures>
|
||||
<nature>org.eclipse.pde.PluginNature</nature>
|
||||
<nature>org.eclipse.jdt.core.javanature</nature>
|
||||
</natures>
|
||||
</projectDescription>
|
|
@ -1,12 +0,0 @@
|
|||
#Wed Jun 17 09:51:15 CDT 2009
|
||||
eclipse.preferences.version=1
|
||||
org.eclipse.jdt.core.compiler.codegen.inlineJsrBytecode=enabled
|
||||
org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.6
|
||||
org.eclipse.jdt.core.compiler.codegen.unusedLocal=preserve
|
||||
org.eclipse.jdt.core.compiler.compliance=1.6
|
||||
org.eclipse.jdt.core.compiler.debug.lineNumber=generate
|
||||
org.eclipse.jdt.core.compiler.debug.localVariable=generate
|
||||
org.eclipse.jdt.core.compiler.debug.sourceFile=generate
|
||||
org.eclipse.jdt.core.compiler.problem.assertIdentifier=error
|
||||
org.eclipse.jdt.core.compiler.problem.enumIdentifier=error
|
||||
org.eclipse.jdt.core.compiler.source=1.6
|
|
@ -1,18 +0,0 @@
|
|||
Manifest-Version: 1.0
|
||||
Bundle-ManifestVersion: 2
|
||||
Bundle-Name: Hdf5 Plug-in
|
||||
Bundle-SymbolicName: com.raytheon.uf.common.datastorage.hdf5
|
||||
Bundle-Version: 1.12.1174.qualifier
|
||||
Bundle-Vendor: RAYTHEON
|
||||
Eclipse-BuddyPolicy: registered, ext, global
|
||||
Eclipse-RegisterBuddy: com.raytheon.uf.common.datastorage
|
||||
Bundle-RequiredExecutionEnvironment: JavaSE-1.6
|
||||
Export-Package: com.raytheon.uf.common.datastorage.hdf5
|
||||
Import-Package: com.raytheon.uf.common.datastorage,
|
||||
com.raytheon.uf.common.datastorage.locking,
|
||||
com.raytheon.uf.common.datastorage.records,
|
||||
com.raytheon.uf.common.util,
|
||||
junit.framework,
|
||||
org.junit
|
||||
Require-Bundle: ncsa.hdf5;bundle-version="2.4.0",
|
||||
com.raytheon.uf.common.serialization
|
|
@ -1,4 +0,0 @@
|
|||
source.. = src/
|
||||
output.. = bin/
|
||||
bin.includes = META-INF/,\
|
||||
.
|
|
@ -1,8 +0,0 @@
|
|||
<project basedir="." default="deploy" name="com.raytheon.uf.common.datastorage.hdf5">
|
||||
|
||||
<available file="../build.edex" property="build.dir.location" value="../build.edex"/>
|
||||
<available file="../../../../../build.edex" property="build.dir.location" value="../../../../../build.edex"/>
|
||||
|
||||
<import file="${build.dir.location}/basebuilds/component_deploy_base.xml" />
|
||||
|
||||
</project>
|
|
@ -1,446 +0,0 @@
|
|||
/**
|
||||
* This software was developed and / or modified by Raytheon Company,
|
||||
* pursuant to Contract DG133W-05-CQ-1067 with the US Government.
|
||||
*
|
||||
* U.S. EXPORT CONTROLLED TECHNICAL DATA
|
||||
* This software product contains export-restricted data whose
|
||||
* export/transfer/disclosure is restricted by U.S. law. Dissemination
|
||||
* to non-U.S. persons whether in the United States or abroad requires
|
||||
* an export license or other authorization.
|
||||
*
|
||||
* Contractor Name: Raytheon Company
|
||||
* Contractor Address: 6825 Pine Street, Suite 340
|
||||
* Mail Stop B8
|
||||
* Omaha, NE 68106
|
||||
* 402.291.0100
|
||||
*
|
||||
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
||||
* further licensing information.
|
||||
**/
|
||||
package com.raytheon.uf.common.datastorage.hdf5;
|
||||
|
||||
import java.awt.Point;
|
||||
import java.lang.reflect.Method;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.LinkedHashSet;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
import ncsa.hdf.hdf5lib.H5;
|
||||
import ncsa.hdf.hdf5lib.HDF5Constants;
|
||||
import ncsa.hdf.hdf5lib.HDFNativeData;
|
||||
import ncsa.hdf.hdf5lib.exceptions.HDF5Exception;
|
||||
import ncsa.hdf.hdf5lib.exceptions.HDF5LibraryException;
|
||||
|
||||
import com.raytheon.uf.common.datastorage.DataStoreFactory;
|
||||
import com.raytheon.uf.common.datastorage.Request;
|
||||
import com.raytheon.uf.common.datastorage.StorageException;
|
||||
import com.raytheon.uf.common.datastorage.Request.Type;
|
||||
import com.raytheon.uf.common.datastorage.records.AbstractStorageRecord;
|
||||
import com.raytheon.uf.common.datastorage.records.IDataRecord;
|
||||
|
||||
/**
|
||||
* Defines the base implementation of an HDF5 Read
|
||||
*
|
||||
* <pre>
|
||||
*
|
||||
* SOFTWARE HISTORY
|
||||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* Jul 27, 2009 chammack Initial creation
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
* @author chammack
|
||||
* @version 1.0
|
||||
*/
|
||||
|
||||
public abstract class AbstractHDFRead {
|
||||
|
||||
private static Method select;
|
||||
|
||||
static {
|
||||
try {
|
||||
Method[] methods = H5.class.getDeclaredMethods();
|
||||
for (Method method : methods) {
|
||||
if (method.getName().equals("H5Sselect_elements")) {
|
||||
Class<?>[] clazz = method.getParameterTypes();
|
||||
if (clazz.length != 4) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (clazz[0] == Integer.TYPE && clazz[1] == Integer.TYPE
|
||||
&& clazz[2] == Integer.TYPE
|
||||
&& clazz[3] == byte[].class) {
|
||||
select = method;
|
||||
select.setAccessible(true);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
} catch (Exception e) {
|
||||
System.out
|
||||
.println("Method signature for H5Sselect_elements not found.");
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
|
||||
public IDataRecord read(Request request, int file_id, String group,
|
||||
String dataset, float scaleFactor) throws StorageException {
|
||||
int dataset_id = -1;
|
||||
int typeid = -1;
|
||||
int memspace = -1;
|
||||
int dataspace = -1;
|
||||
int plist = -1;
|
||||
Number fillValueResult = null;
|
||||
|
||||
try {
|
||||
// Open an existing dataset.
|
||||
|
||||
dataset_id = H5.H5Dopen(file_id, group + "/" + dataset);
|
||||
|
||||
plist = H5.H5Dget_create_plist(dataset_id);
|
||||
|
||||
typeid = H5.H5Dget_type(dataset_id);
|
||||
|
||||
int classid = H5.H5Tget_class(typeid);
|
||||
|
||||
int size = H5.H5Tget_size(typeid);
|
||||
boolean isVL = H5.H5Tis_variable_str(typeid);
|
||||
|
||||
dataspace = H5.H5Dget_space(dataset_id);
|
||||
|
||||
int sz = H5.H5Sget_simple_extent_ndims(dataspace);
|
||||
|
||||
long[] dims = new long[sz];
|
||||
long[] originalDims = new long[sz];
|
||||
|
||||
int totalSize = 1;
|
||||
H5.H5Sget_simple_extent_dims(dataspace, dims, (long[]) null);
|
||||
// perform a deep copy
|
||||
for (int i = 0; i < originalDims.length; i++) {
|
||||
originalDims[i] = dims[i];
|
||||
}
|
||||
|
||||
// optimization:
|
||||
// if a line query was used, but it's only 1d, this is really
|
||||
// a point query. We _could_ use hyperslabs to do this, but
|
||||
// it would be a lot slower than a regular point query, so
|
||||
// just rewrite the request as a point query.
|
||||
AbstractHDFRead readObj = this;
|
||||
if (dims.length == 1
|
||||
&& (request.getType() == Type.XLINE || request.getType() == Type.YLINE)) {
|
||||
int[] idx = request.getIndices();
|
||||
int[] pts = new int[idx.length];
|
||||
Set<Point> p = new LinkedHashSet<Point>(idx.length);
|
||||
int k = 0;
|
||||
for (int i = 0; i < idx.length; i++) {
|
||||
if (request.getType() == Type.XLINE) {
|
||||
if (idx[i] >= originalDims[1])
|
||||
continue;
|
||||
} else if (request.getType() == Type.YLINE) {
|
||||
if (idx[i] >= originalDims[0])
|
||||
continue;
|
||||
}
|
||||
p.add(new Point(idx[i], 0));
|
||||
pts[k] = idx[i];
|
||||
k++;
|
||||
}
|
||||
|
||||
if (k != idx.length) {
|
||||
// Prune the caller's copy (optimization)
|
||||
idx = new int[k];
|
||||
System.arraycopy(pts, 0, idx, 0, k);
|
||||
request.setIndices(idx);
|
||||
}
|
||||
|
||||
request = (Request.buildPointRequest(p.toArray(new Point[p
|
||||
.size()])));
|
||||
readObj = HDF5OpManager.readTypes.get(request.getType());
|
||||
}
|
||||
|
||||
totalSize = readObj.calculateSize(dataspace, sz, request, dims,
|
||||
originalDims, totalSize);
|
||||
|
||||
LocalHDFDataType dataType = null;
|
||||
Object data = null;
|
||||
Object fillValue = null;
|
||||
int readType = 0;
|
||||
if (classid == HDF5Constants.H5T_FLOAT) {
|
||||
dataType = LocalHDFDataType.FLOAT;
|
||||
data = new float[totalSize];
|
||||
fillValue = new float[1];
|
||||
readType = HDF5Constants.H5T_NATIVE_FLOAT;
|
||||
} else if (classid == HDF5Constants.H5T_INTEGER) {
|
||||
if (size == 1) {
|
||||
dataType = LocalHDFDataType.BYTE;
|
||||
data = new byte[totalSize];
|
||||
fillValue = new byte[1];
|
||||
readType = HDF5Constants.H5T_NATIVE_INT8;
|
||||
} else if (size == 2) {
|
||||
dataType = LocalHDFDataType.SHORT;
|
||||
data = new short[totalSize];
|
||||
fillValue = new short[1];
|
||||
readType = HDF5Constants.H5T_NATIVE_SHORT;
|
||||
} else if (size == 4) {
|
||||
dataType = LocalHDFDataType.INT;
|
||||
data = new int[totalSize];
|
||||
fillValue = new int[1];
|
||||
readType = HDF5Constants.H5T_NATIVE_INT;
|
||||
} else if (size == 8) {
|
||||
dataType = LocalHDFDataType.LONG;
|
||||
data = new long[totalSize];
|
||||
fillValue = new long[1];
|
||||
readType = HDF5Constants.H5T_NATIVE_INT64;
|
||||
}
|
||||
} else if (classid == HDF5Constants.H5T_STRING) {
|
||||
data = new String[totalSize];
|
||||
dataType = LocalHDFDataType.STRING;
|
||||
readType = typeid;
|
||||
}
|
||||
|
||||
memspace = H5.H5Screate_simple(sz, dims, null);
|
||||
|
||||
readObj.selectSet(memspace, dataspace, request, dims, originalDims);
|
||||
|
||||
if (dataType == LocalHDFDataType.STRING && isVL) {
|
||||
H5.H5DreadVL(dataset_id, readType, memspace, dataspace,
|
||||
HDF5Constants.H5P_DEFAULT, (Object[]) data);
|
||||
} else {
|
||||
H5.H5Dread(dataset_id, readType, memspace, dataspace,
|
||||
HDF5Constants.H5P_DATASET_XFER_DEFAULT, data);
|
||||
}
|
||||
|
||||
if (fillValue != null) {
|
||||
H5.H5Pget_fill_value(plist, readType, fillValue);
|
||||
if (fillValue instanceof double[]) {
|
||||
fillValueResult = new Double(((double[]) fillValue)[0]);
|
||||
} else if (fillValue instanceof int[]) {
|
||||
fillValueResult = new Integer(((int[]) fillValue)[0]);
|
||||
} else if (fillValue instanceof byte[]) {
|
||||
fillValueResult = new Byte(((byte[]) fillValue)[0]);
|
||||
} else if (fillValue instanceof long[]) {
|
||||
fillValueResult = new Long(((long[]) fillValue)[0]);
|
||||
} else if (fillValue instanceof short[]) {
|
||||
fillValueResult = new Short(((short[]) fillValue)[0]);
|
||||
} else if (fillValue instanceof float[]) {
|
||||
fillValueResult = new Float(((float[]) fillValue)[0]);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
// Swizzle and scale the dims to match java nomenclature
|
||||
long[] dims2 = new long[dims.length];
|
||||
int k = 0;
|
||||
for (int i = dims2.length - 1; i >= 0; i--) {
|
||||
dims2[k] = (int) (dims[i] * scaleFactor);
|
||||
k++;
|
||||
}
|
||||
|
||||
AbstractStorageRecord rec = DataStoreFactory.createStorageRecord(
|
||||
dataset, group, data, sz, dims2);
|
||||
if (fillValueResult != null) {
|
||||
rec.setFillValue(fillValueResult);
|
||||
}
|
||||
readProperties(rec, dataset_id);
|
||||
|
||||
return rec;
|
||||
} catch (Exception e) {
|
||||
try {
|
||||
H5.H5Eclear();
|
||||
} catch (HDF5LibraryException e1) {
|
||||
// ignore
|
||||
}
|
||||
throw new StorageException("Error occurred during retrieve ", null,
|
||||
e);
|
||||
} finally {
|
||||
|
||||
try {
|
||||
if (memspace >= 0) {
|
||||
H5.H5Sclose(memspace);
|
||||
}
|
||||
} catch (HDF5LibraryException e) {
|
||||
// ignore
|
||||
}
|
||||
try {
|
||||
if (dataspace >= 0) {
|
||||
H5.H5Sclose(dataspace);
|
||||
}
|
||||
} catch (HDF5LibraryException e) {
|
||||
// ignore
|
||||
}
|
||||
|
||||
try {
|
||||
if (plist >= 0) {
|
||||
H5.H5Pclose(plist);
|
||||
}
|
||||
} catch (HDF5LibraryException e) {
|
||||
// ignore
|
||||
}
|
||||
|
||||
try {
|
||||
if (dataset_id >= 0) {
|
||||
H5.H5Dclose(dataset_id);
|
||||
}
|
||||
} catch (HDF5LibraryException e) {
|
||||
// ignore
|
||||
}
|
||||
|
||||
try {
|
||||
if (typeid >= 0) {
|
||||
H5.H5Tclose(typeid);
|
||||
}
|
||||
} catch (HDF5LibraryException e) {
|
||||
// ignore
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void readProperties(IDataRecord dr, int dataSet) {
|
||||
try {
|
||||
|
||||
Map<String, Object> attribMap = new LinkedHashMap<String, Object>();
|
||||
int attribs = H5.H5Aget_num_attrs(dataSet);
|
||||
if (attribs > 0) {
|
||||
dr.setDataAttributes(attribMap);
|
||||
}
|
||||
|
||||
for (int i = 0; i < attribs; i++) {
|
||||
|
||||
int attribId = 0;
|
||||
int type = 0;
|
||||
int nativeType = 0;
|
||||
try {
|
||||
attribId = H5.H5Aopen_idx(dataSet, i);
|
||||
String[] str = new String[1];
|
||||
H5.H5Aget_name(attribId, 256, str);
|
||||
type = H5.H5Aget_type(attribId);
|
||||
int spc = H5.H5Tget_size(type);
|
||||
nativeType = H5.H5Tget_native_type(type);
|
||||
int cls = H5.H5Tget_class(type);
|
||||
|
||||
if (cls == HDF5Constants.H5T_INTEGER) {
|
||||
Object d = null;
|
||||
switch (spc) {
|
||||
case 1:
|
||||
// byte
|
||||
d = new byte[1];
|
||||
H5.H5Aread(attribId, HDF5Constants.H5T_NATIVE_INT,
|
||||
d);
|
||||
attribMap.put(str[0], ((byte[]) d)[0]);
|
||||
break;
|
||||
case 2:
|
||||
// short
|
||||
d = new short[1];
|
||||
H5.H5Aread(attribId, HDF5Constants.H5T_NATIVE_INT,
|
||||
d);
|
||||
attribMap.put(str[0], ((short[]) d)[0]);
|
||||
break;
|
||||
case 4:
|
||||
// regular int
|
||||
d = new int[1];
|
||||
H5.H5Aread(attribId, HDF5Constants.H5T_NATIVE_INT,
|
||||
d);
|
||||
attribMap.put(str[0], ((int[]) d)[0]);
|
||||
break;
|
||||
case 8:
|
||||
// long
|
||||
d = new long[1];
|
||||
H5.H5Aread(attribId, HDF5Constants.H5T_NATIVE_INT,
|
||||
d);
|
||||
attribMap.put(str[0], ((long[]) d)[0]);
|
||||
break;
|
||||
}
|
||||
|
||||
} else if (cls == HDF5Constants.H5T_FLOAT) {
|
||||
if (spc == 4) {
|
||||
float[] d = new float[1];
|
||||
H5.H5Aread(attribId,
|
||||
HDF5Constants.H5T_NATIVE_FLOAT, d);
|
||||
attribMap.put(str[0], d[0]);
|
||||
} else if (spc == 8) {
|
||||
double[] d = new double[1];
|
||||
H5.H5Aread(attribId,
|
||||
HDF5Constants.H5T_NATIVE_DOUBLE, d);
|
||||
attribMap.put(str[0], d[0]);
|
||||
}
|
||||
} else if (cls == HDF5Constants.H5T_STRING) {
|
||||
byte[] b = new byte[spc];
|
||||
H5.H5Aread(attribId, nativeType, b);
|
||||
String outStr = new String(b, 0, spc - 1);
|
||||
attribMap.put(str[0], outStr);
|
||||
} else {
|
||||
throw new IllegalArgumentException(
|
||||
"Unable to handle type" + cls);
|
||||
}
|
||||
} catch (HDF5Exception e) {
|
||||
H5.H5Eclear();
|
||||
e.printStackTrace();
|
||||
} finally {
|
||||
if (type > 0) {
|
||||
try {
|
||||
H5.H5Tclose(type);
|
||||
} catch (Exception e) {
|
||||
}
|
||||
}
|
||||
|
||||
if (attribId > 0) {
|
||||
try {
|
||||
H5.H5Aclose(attribId);
|
||||
} catch (Exception e) {
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
} catch (HDF5LibraryException e) {
|
||||
try {
|
||||
H5.H5Eclear();
|
||||
} catch (HDF5LibraryException e1) {
|
||||
}
|
||||
e.printStackTrace();
|
||||
return;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
protected abstract void selectSet(int memspace, int dataspace,
|
||||
Request request, long[] dims, long[] originalDatasetDims)
|
||||
throws HDF5Exception, HDF5LibraryException, StorageException;
|
||||
|
||||
protected abstract int calculateSize(int dataspace, int sz,
|
||||
Request request, long[] dims, long[] originalDims, int totalSize)
|
||||
throws HDF5LibraryException;
|
||||
|
||||
// For some reason, the hdf5 folks implemented this using reflection which
|
||||
// makes it very slow. This implementation is at least 5x faster
|
||||
protected static final byte[] longToBytes(long[][] indices) {
|
||||
long[] inn = new long[indices.length * indices[0].length];
|
||||
for (int j = 0; j < indices.length; j++) {
|
||||
System.arraycopy(indices[j], 0, inn, j * indices[0].length,
|
||||
indices[0].length);
|
||||
}
|
||||
|
||||
return HDFNativeData.longToByte(0, inn.length, inn);
|
||||
}
|
||||
|
||||
protected static int invokeH5Sselect_elements(int space, int op,
|
||||
int num_elements, byte[] coords) throws StorageException {
|
||||
// synchronization not needed since caller methods are synchronized
|
||||
if (select == null) {
|
||||
throw new StorageException(
|
||||
"Method did not initialize properly? Incompatible version of hdf5?",
|
||||
null);
|
||||
}
|
||||
|
||||
try {
|
||||
Integer i = (Integer) select.invoke(null, space, op, num_elements,
|
||||
coords);
|
||||
return i;
|
||||
} catch (Exception e) {
|
||||
throw new StorageException("Error selecting elements", null, e);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
File diff suppressed because it is too large
Load diff
|
@ -1,69 +0,0 @@
|
|||
/**
|
||||
* This software was developed and / or modified by Raytheon Company,
|
||||
* pursuant to Contract DG133W-05-CQ-1067 with the US Government.
|
||||
*
|
||||
* U.S. EXPORT CONTROLLED TECHNICAL DATA
|
||||
* This software product contains export-restricted data whose
|
||||
* export/transfer/disclosure is restricted by U.S. law. Dissemination
|
||||
* to non-U.S. persons whether in the United States or abroad requires
|
||||
* an export license or other authorization.
|
||||
*
|
||||
* Contractor Name: Raytheon Company
|
||||
* Contractor Address: 6825 Pine Street, Suite 340
|
||||
* Mail Stop B8
|
||||
* Omaha, NE 68106
|
||||
* 402.291.0100
|
||||
*
|
||||
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
||||
* further licensing information.
|
||||
**/
|
||||
package com.raytheon.uf.common.datastorage.hdf5;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import com.raytheon.uf.common.datastorage.Request;
|
||||
import com.raytheon.uf.common.datastorage.StorageException;
|
||||
import com.raytheon.uf.common.datastorage.Request.Type;
|
||||
import com.raytheon.uf.common.datastorage.records.IDataRecord;
|
||||
|
||||
/**
|
||||
* Provides a proxy for HDF5 to be extended for various types of requests
|
||||
*
|
||||
* <pre>
|
||||
*
|
||||
* SOFTWARE HISTORY
|
||||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* Jul 27, 2009 chammack Initial creation
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
* @author chammack
|
||||
* @version 1.0
|
||||
*/
|
||||
|
||||
public class HDF5OpManager {
|
||||
|
||||
protected static final Map<Type, AbstractHDFRead> readTypes;
|
||||
static {
|
||||
readTypes = new HashMap<Type, AbstractHDFRead>();
|
||||
readTypes.put(Type.POINT, new PointSelectionRead());
|
||||
readTypes.put(Type.ALL, new WholeDatasetSelectionRead());
|
||||
readTypes.put(Type.SLAB, new SlabSelectionRead());
|
||||
readTypes.put(Type.XLINE, new LineSelectionRead.XLineSelectionRead());
|
||||
readTypes.put(Type.YLINE, new LineSelectionRead.YLineSelectionRead());
|
||||
}
|
||||
|
||||
public static IDataRecord read(Request request, int file_id, String group,
|
||||
String dataset, float scaleFactor) throws StorageException {
|
||||
AbstractHDFRead readImpl = readTypes.get(request.getType());
|
||||
if (readImpl == null) {
|
||||
throw new IllegalArgumentException("Unsupported request type: "
|
||||
+ request.getType());
|
||||
}
|
||||
|
||||
return readImpl.read(request, file_id, group, dataset, scaleFactor);
|
||||
}
|
||||
|
||||
}
|
|
@ -1,231 +0,0 @@
|
|||
/**
|
||||
* This software was developed and / or modified by Raytheon Company,
|
||||
* pursuant to Contract DG133W-05-CQ-1067 with the US Government.
|
||||
*
|
||||
* U.S. EXPORT CONTROLLED TECHNICAL DATA
|
||||
* This software product contains export-restricted data whose
|
||||
* export/transfer/disclosure is restricted by U.S. law. Dissemination
|
||||
* to non-U.S. persons whether in the United States or abroad requires
|
||||
* an export license or other authorization.
|
||||
*
|
||||
* Contractor Name: Raytheon Company
|
||||
* Contractor Address: 6825 Pine Street, Suite 340
|
||||
* Mail Stop B8
|
||||
* Omaha, NE 68106
|
||||
* 402.291.0100
|
||||
*
|
||||
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
||||
* further licensing information.
|
||||
**/
|
||||
package com.raytheon.uf.common.datastorage.hdf5;
|
||||
|
||||
import ncsa.hdf.hdf5lib.H5;
|
||||
import ncsa.hdf.hdf5lib.HDF5Constants;
|
||||
import ncsa.hdf.hdf5lib.exceptions.HDF5Exception;
|
||||
import ncsa.hdf.hdf5lib.exceptions.HDF5LibraryException;
|
||||
|
||||
import com.raytheon.uf.common.datastorage.Request;
|
||||
import com.raytheon.uf.common.datastorage.StorageException;
|
||||
import com.raytheon.uf.common.datastorage.records.IDataRecord;
|
||||
|
||||
/**
|
||||
* Implements a set of line (one dimensional) reads
|
||||
*
|
||||
* Initially, this supports reading along either the X or Y axis in a two
|
||||
* dimensional dataset. It could be extended to accomodate selection from
|
||||
* n-dimensional datasets or selection along a diagonal.
|
||||
*
|
||||
* <pre>
|
||||
*
|
||||
* SOFTWARE HISTORY
|
||||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* Jul 28, 2009 chammack Initial creation
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
* @author chammack
|
||||
* @version 1.0
|
||||
*/
|
||||
|
||||
public abstract class LineSelectionRead extends AbstractHDFRead {
|
||||
|
||||
@Override
|
||||
protected int calculateSize(int dataspace, int sz, Request request,
|
||||
long[] dims, long[] originalDims, int totalSize)
|
||||
throws HDF5LibraryException {
|
||||
|
||||
int[] points = request.getIndices();
|
||||
|
||||
return points.length;
|
||||
}
|
||||
|
||||
protected abstract void pruneIndices(Request request, long[] originalDims);
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
*
|
||||
* @see
|
||||
* com.raytheon.uf.common.datastorage.hdf5.AbstractHDFRead#read(com.raytheon
|
||||
* .uf.common.datastorage.Request, int, java.lang.String, java.lang.String,
|
||||
* float)
|
||||
*/
|
||||
@Override
|
||||
public IDataRecord read(Request request, int file_id, String group,
|
||||
String dataset, float scaleFactor) throws StorageException {
|
||||
return super.read(request, file_id, group, dataset, scaleFactor);
|
||||
}
|
||||
|
||||
public static class YLineSelectionRead extends LineSelectionRead {
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
*
|
||||
* @see
|
||||
* com.raytheon.uf.common.datastorage.hdf5.LineSelectionRead#pruneIndices
|
||||
* (com.raytheon.uf.common.datastorage.Request, long[])
|
||||
*/
|
||||
@Override
|
||||
protected void pruneIndices(Request request, long[] originalDims) {
|
||||
int[] indices = request.getIndices();
|
||||
int[] prunedIndices = new int[indices.length];
|
||||
int k = 0;
|
||||
for (int i = 0; i < indices.length; i++) {
|
||||
if (indices[i] < originalDims[0] && indices[i] >= 0) {
|
||||
prunedIndices[k] = indices[i];
|
||||
k++;
|
||||
}
|
||||
}
|
||||
|
||||
indices = new int[k];
|
||||
System.arraycopy(prunedIndices, 0, indices, 0, k);
|
||||
request.setIndices(indices);
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void selectSet(int memspace, int dataspace, Request request,
|
||||
long[] dims, long[] originalDatasetDims) throws HDF5Exception,
|
||||
HDF5LibraryException, StorageException {
|
||||
int[] points = request.getIndices();
|
||||
|
||||
long[] memOffset = new long[] { 0, 0 };
|
||||
for (int i = 0; i < points.length; i++) {
|
||||
long[] start = new long[] { points[i], 0 };
|
||||
long[] count = new long[] { 1, originalDatasetDims[1] };
|
||||
|
||||
if (i == 0) {
|
||||
H5.H5Sselect_hyperslab(dataspace,
|
||||
HDF5Constants.H5S_SELECT_SET, start, null, count,
|
||||
null);
|
||||
H5.H5Sselect_hyperslab(memspace,
|
||||
HDF5Constants.H5S_SELECT_SET, memOffset, null,
|
||||
count, null);
|
||||
} else {
|
||||
memOffset[0] += 1;
|
||||
H5.H5Sselect_hyperslab(dataspace,
|
||||
HDF5Constants.H5S_SELECT_OR, start, null, count,
|
||||
null);
|
||||
H5.H5Sselect_hyperslab(memspace,
|
||||
HDF5Constants.H5S_SELECT_OR, memOffset, null,
|
||||
count, null);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
*
|
||||
* @see
|
||||
* com.raytheon.uf.common.datastorage.hdf5.LineSelectionRead#calculateSize
|
||||
* (int, int, com.raytheon.uf.common.datastorage.Request, long[], int)
|
||||
*/
|
||||
@Override
|
||||
protected int calculateSize(int dataspace, int sz, Request request,
|
||||
long[] dims, long[] originalDims, int totalSize)
|
||||
throws HDF5LibraryException {
|
||||
pruneIndices(request, originalDims);
|
||||
dims[0] = request.getIndices().length;
|
||||
return (int) (super.calculateSize(dataspace, sz, request, dims,
|
||||
originalDims, totalSize) * dims[1]);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
public static class XLineSelectionRead extends LineSelectionRead {
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
*
|
||||
* @see
|
||||
* com.raytheon.uf.common.datastorage.hdf5.LineSelectionRead#pruneIndices
|
||||
* (com.raytheon.uf.common.datastorage.Request, long[])
|
||||
*/
|
||||
@Override
|
||||
protected void pruneIndices(Request request, long[] originalDims) {
|
||||
int[] indices = request.getIndices();
|
||||
int[] prunedIndices = new int[indices.length];
|
||||
int k = 0;
|
||||
for (int i = 0; i < indices.length; i++) {
|
||||
if (indices[i] < originalDims[1] && indices[i] >= 0) {
|
||||
prunedIndices[k] = indices[i];
|
||||
k++;
|
||||
}
|
||||
}
|
||||
|
||||
indices = new int[k];
|
||||
System.arraycopy(prunedIndices, 0, indices, 0, k);
|
||||
request.setIndices(indices);
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void selectSet(int memspace, int dataspace, Request request,
|
||||
long[] dims, long[] originalDatasetDims) throws HDF5Exception,
|
||||
HDF5LibraryException, StorageException {
|
||||
int[] points = request.getIndices();
|
||||
|
||||
long[] memOffset = new long[] { 0, 0 };
|
||||
for (int i = 0; i < points.length; i++) {
|
||||
long[] start = new long[] { 0, points[i] };
|
||||
long[] count = new long[] { originalDatasetDims[0], 1 };
|
||||
|
||||
if (i == 0) {
|
||||
H5.H5Sselect_hyperslab(dataspace,
|
||||
HDF5Constants.H5S_SELECT_SET, start, null, count,
|
||||
null);
|
||||
H5.H5Sselect_hyperslab(memspace,
|
||||
HDF5Constants.H5S_SELECT_SET, memOffset, null,
|
||||
count, null);
|
||||
} else {
|
||||
memOffset[1] += 1;
|
||||
H5.H5Sselect_hyperslab(dataspace,
|
||||
HDF5Constants.H5S_SELECT_OR, start, null, count,
|
||||
null);
|
||||
H5.H5Sselect_hyperslab(memspace,
|
||||
HDF5Constants.H5S_SELECT_OR, memOffset, null,
|
||||
count, null);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
*
|
||||
* @see
|
||||
* com.raytheon.uf.common.datastorage.hdf5.LineSelectionRead#calculateSize
|
||||
* (int, int, com.raytheon.uf.common.datastorage.Request, long[], int)
|
||||
*/
|
||||
@Override
|
||||
protected int calculateSize(int dataspace, int sz, Request request,
|
||||
long[] dims, long[] originalDims, int totalSize)
|
||||
throws HDF5LibraryException {
|
||||
pruneIndices(request, originalDims);
|
||||
dims[1] = request.getIndices().length;
|
||||
return (int) (super.calculateSize(dataspace, sz, request, dims,
|
||||
originalDims, totalSize) * dims[0]);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
|
@ -1,158 +0,0 @@
|
|||
/**
|
||||
* This software was developed and / or modified by Raytheon Company,
|
||||
* pursuant to Contract DG133W-05-CQ-1067 with the US Government.
|
||||
*
|
||||
* U.S. EXPORT CONTROLLED TECHNICAL DATA
|
||||
* This software product contains export-restricted data whose
|
||||
* export/transfer/disclosure is restricted by U.S. law. Dissemination
|
||||
* to non-U.S. persons whether in the United States or abroad requires
|
||||
* an export license or other authorization.
|
||||
*
|
||||
* Contractor Name: Raytheon Company
|
||||
* Contractor Address: 6825 Pine Street, Suite 340
|
||||
* Mail Stop B8
|
||||
* Omaha, NE 68106
|
||||
* 402.291.0100
|
||||
*
|
||||
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
||||
* further licensing information.
|
||||
**/
|
||||
package com.raytheon.uf.common.datastorage.hdf5;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import ncsa.hdf.hdf5lib.HDF5Constants;
|
||||
|
||||
import com.raytheon.uf.common.datastorage.records.ByteDataRecord;
|
||||
import com.raytheon.uf.common.datastorage.records.FloatDataRecord;
|
||||
import com.raytheon.uf.common.datastorage.records.IDataRecord;
|
||||
import com.raytheon.uf.common.datastorage.records.IntegerDataRecord;
|
||||
import com.raytheon.uf.common.datastorage.records.LongDataRecord;
|
||||
import com.raytheon.uf.common.datastorage.records.ShortDataRecord;
|
||||
import com.raytheon.uf.common.datastorage.records.StringDataRecord;
|
||||
|
||||
/**
|
||||
* Enum to manage HDF5 data types.
|
||||
*
|
||||
* <pre>
|
||||
*
|
||||
* SOFTWARE HISTORY
|
||||
*
|
||||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* Feb 12, 2007 chammack Initial Creation. (HDF5DataStore)
|
||||
* 20070914 379 jkorman Refactored from HDFDataStore.
|
||||
* </pre>
|
||||
*
|
||||
* @author chammack
|
||||
* @version 1
|
||||
*/
|
||||
public enum LocalHDFDataType {
|
||||
|
||||
BYTE(HDF5Constants.H5T_NATIVE_INT8, "BYTE"), SHORT(
|
||||
HDF5Constants.H5T_NATIVE_SHORT, "SHORT"), INT(
|
||||
HDF5Constants.H5T_NATIVE_INT, "INT"), LONG(
|
||||
HDF5Constants.H5T_NATIVE_INT64, "LONG"), FLOAT(
|
||||
HDF5Constants.H5T_NATIVE_FLOAT, "FLOAT"), DOUBLE(
|
||||
HDF5Constants.H5T_NATIVE_DOUBLE, "DOUBLE"), STRING(
|
||||
HDF5Constants.H5T_STR_NULLTERM, "STRING");
|
||||
|
||||
private final int hdfNativeDataType;
|
||||
|
||||
private final String localName;
|
||||
|
||||
private static final Map<Integer, LocalHDFDataType> typeMap = new HashMap<Integer, LocalHDFDataType>();
|
||||
static {
|
||||
typeMap.put(BYTE.getHDFNativeType(), BYTE);
|
||||
typeMap.put(SHORT.getHDFNativeType(), SHORT);
|
||||
typeMap.put(INT.getHDFNativeType(), INT);
|
||||
typeMap.put(LONG.getHDFNativeType(), LONG);
|
||||
typeMap.put(FLOAT.getHDFNativeType(), FLOAT);
|
||||
typeMap.put(DOUBLE.getHDFNativeType(), DOUBLE);
|
||||
typeMap.put(STRING.getHDFNativeType(), STRING);
|
||||
}
|
||||
|
||||
private static final Map<Class<? extends IDataRecord>, LocalHDFDataType> dataRecordMap = new HashMap<Class<? extends IDataRecord>, LocalHDFDataType>();
|
||||
static {
|
||||
dataRecordMap.put(FloatDataRecord.class, FLOAT);
|
||||
dataRecordMap.put(IntegerDataRecord.class, INT);
|
||||
dataRecordMap.put(ShortDataRecord.class, SHORT);
|
||||
dataRecordMap.put(LongDataRecord.class, LONG);
|
||||
dataRecordMap.put(StringDataRecord.class, STRING);
|
||||
dataRecordMap.put(ByteDataRecord.class, BYTE);
|
||||
}
|
||||
|
||||
/**
|
||||
* Construct an instance of this class.
|
||||
*
|
||||
* @param hdfDataType
|
||||
* The HDF5 data type.
|
||||
* @param name
|
||||
* The name of this instance.
|
||||
*/
|
||||
private LocalHDFDataType(int hdfDataType, String name) {
|
||||
this.hdfNativeDataType = hdfDataType;
|
||||
localName = name;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the HDF5 data type associated with this instance.
|
||||
*
|
||||
* @return The HDF5 data type.
|
||||
*/
|
||||
public int getHDFNativeType() {
|
||||
return hdfNativeDataType;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the string representation of this instance.
|
||||
*
|
||||
* @return The string representation.
|
||||
*/
|
||||
@Override
|
||||
public String toString() {
|
||||
return localName;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the LocalHDFDataType enum corresponding to a specified HDF5 data
|
||||
* type. If the data type is not defined, a null is returned.
|
||||
*
|
||||
* @param hdfDataType
|
||||
* HDF5 data type to find.
|
||||
* @return The LocalHDFDataType if defined, null otherwise.
|
||||
*/
|
||||
public static LocalHDFDataType getCorrespondingEnum(int hdfDataType) {
|
||||
return typeMap.get(hdfDataType);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the LocalHDFDataType enum corresponding to a specified data type
|
||||
* name. If the data type is not defined, a null is returned.
|
||||
*
|
||||
* @param typeName
|
||||
* hdfDataType HDF5 data type name to find.
|
||||
* @return The LocalHDFDataType if defined, null otherwise.
|
||||
*/
|
||||
public static LocalHDFDataType getCorrespondingEnum(String typeName) {
|
||||
LocalHDFDataType dataType = null;
|
||||
Collection<LocalHDFDataType> dataTypes = typeMap.values();
|
||||
for (LocalHDFDataType type : dataTypes) {
|
||||
if (type.localName.equals(typeName)) {
|
||||
dataType = type;
|
||||
break;
|
||||
}
|
||||
}
|
||||
return dataType;
|
||||
}
|
||||
|
||||
public static LocalHDFDataType getCorrespondingEnum(IDataRecord rec) {
|
||||
if (rec == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return dataRecordMap.get(rec.getClass());
|
||||
}
|
||||
}
|
|
@ -1,99 +0,0 @@
|
|||
/**
|
||||
* This software was developed and / or modified by Raytheon Company,
|
||||
* pursuant to Contract DG133W-05-CQ-1067 with the US Government.
|
||||
*
|
||||
* U.S. EXPORT CONTROLLED TECHNICAL DATA
|
||||
* This software product contains export-restricted data whose
|
||||
* export/transfer/disclosure is restricted by U.S. law. Dissemination
|
||||
* to non-U.S. persons whether in the United States or abroad requires
|
||||
* an export license or other authorization.
|
||||
*
|
||||
* Contractor Name: Raytheon Company
|
||||
* Contractor Address: 6825 Pine Street, Suite 340
|
||||
* Mail Stop B8
|
||||
* Omaha, NE 68106
|
||||
* 402.291.0100
|
||||
*
|
||||
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
||||
* further licensing information.
|
||||
**/
|
||||
package com.raytheon.uf.common.datastorage.hdf5;
|
||||
|
||||
import java.awt.Point;
|
||||
|
||||
import ncsa.hdf.hdf5lib.HDF5Constants;
|
||||
import ncsa.hdf.hdf5lib.exceptions.HDF5Exception;
|
||||
import ncsa.hdf.hdf5lib.exceptions.HDF5LibraryException;
|
||||
|
||||
import com.raytheon.uf.common.datastorage.Request;
|
||||
import com.raytheon.uf.common.datastorage.StorageException;
|
||||
|
||||
/**
|
||||
* Selects data from an hdf dataset along a discrete set of points
|
||||
*
|
||||
* Currently this supports either 1D or 2D selection (the java.awt.Point class
|
||||
* is used, which is only at max two dimensional). In one dimensional cases, the
|
||||
* y value is ignored.
|
||||
*
|
||||
* <pre>
|
||||
*
|
||||
* SOFTWARE HISTORY
|
||||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* Jul 27, 2009 chammack Initial creation
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
* @author chammack
|
||||
* @version 1.0
|
||||
*/
|
||||
|
||||
public class PointSelectionRead extends AbstractHDFRead {
|
||||
|
||||
@Override
|
||||
protected int calculateSize(int dataspace, int sz, Request request,
|
||||
long[] dims, long[] originalDims, int totalSize)
|
||||
throws HDF5LibraryException {
|
||||
Point[] points = request.getPoints();
|
||||
|
||||
dims[0] = points.length;
|
||||
if (sz > 1) {
|
||||
dims[1] = 1;
|
||||
}
|
||||
for (int i = 0; i < dims.length; i++) {
|
||||
totalSize *= dims[i];
|
||||
}
|
||||
return totalSize;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void selectSet(int memspace, int dataspace, Request request,
|
||||
long[] dims, long[] originalDatasetDims) throws HDF5Exception,
|
||||
HDF5LibraryException, StorageException {
|
||||
long[][] coords;
|
||||
Point[] points = request.getPoints();
|
||||
|
||||
if (dims.length == 2) {
|
||||
coords = new long[points.length * (int) dims[1]][dims.length];
|
||||
} else {
|
||||
coords = new long[points.length][dims.length];
|
||||
}
|
||||
|
||||
for (int i = 0; i < points.length; i++) {
|
||||
|
||||
if (dims.length == 2) {
|
||||
coords[i][0] = points[i].y;
|
||||
coords[i][1] = points[i].x;
|
||||
} else {
|
||||
coords[i][0] = points[i].x;
|
||||
}
|
||||
}
|
||||
|
||||
// Below is a performance optimization due to
|
||||
// limitations in hdf5
|
||||
byte[] coordsAsBytes = longToBytes(coords);
|
||||
invokeH5Sselect_elements(dataspace, HDF5Constants.H5S_SELECT_SET,
|
||||
coords.length, coordsAsBytes);
|
||||
}
|
||||
|
||||
}
|
|
@ -1,92 +0,0 @@
|
|||
/**
|
||||
* This software was developed and / or modified by Raytheon Company,
|
||||
* pursuant to Contract DG133W-05-CQ-1067 with the US Government.
|
||||
*
|
||||
* U.S. EXPORT CONTROLLED TECHNICAL DATA
|
||||
* This software product contains export-restricted data whose
|
||||
* export/transfer/disclosure is restricted by U.S. law. Dissemination
|
||||
* to non-U.S. persons whether in the United States or abroad requires
|
||||
* an export license or other authorization.
|
||||
*
|
||||
* Contractor Name: Raytheon Company
|
||||
* Contractor Address: 6825 Pine Street, Suite 340
|
||||
* Mail Stop B8
|
||||
* Omaha, NE 68106
|
||||
* 402.291.0100
|
||||
*
|
||||
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
||||
* further licensing information.
|
||||
**/
|
||||
package com.raytheon.uf.common.datastorage.hdf5;
|
||||
|
||||
import ncsa.hdf.hdf5lib.H5;
|
||||
import ncsa.hdf.hdf5lib.HDF5Constants;
|
||||
import ncsa.hdf.hdf5lib.exceptions.HDF5Exception;
|
||||
import ncsa.hdf.hdf5lib.exceptions.HDF5LibraryException;
|
||||
|
||||
import com.raytheon.uf.common.datastorage.Request;
|
||||
import com.raytheon.uf.common.datastorage.StorageException;
|
||||
|
||||
/**
|
||||
* Read a "stripe" of data. This is much more efficient than reading by points
|
||||
* as it tells the HDF5 library to do a continuous read.
|
||||
*
|
||||
* <pre>
|
||||
*
|
||||
* SOFTWARE HISTORY
|
||||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* Jul 27, 2009 chammack Initial creation
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
* @author chammack
|
||||
* @version 1.0
|
||||
*/
|
||||
|
||||
public class SlabSelectionRead extends AbstractHDFRead {
|
||||
|
||||
@Override
|
||||
protected int calculateSize(int dataspace, int sz, Request request,
|
||||
long[] dims, long[] originalDims, int totalSize)
|
||||
throws HDF5LibraryException {
|
||||
int[] minIndex = request.getMinIndexForSlab();
|
||||
int[] maxIndex = request.getMaxIndexForSlab();
|
||||
|
||||
if (minIndex != null && maxIndex != null) {
|
||||
dims[0] = maxIndex[1] - minIndex[1];
|
||||
dims[1] = maxIndex[0] - minIndex[0];
|
||||
totalSize = (int) ((dims[0]) * dims[1]);
|
||||
} else {
|
||||
H5.H5Sget_simple_extent_dims(dataspace, dims, (long[]) null);
|
||||
for (long dim : dims) {
|
||||
totalSize *= (int) dim;
|
||||
}
|
||||
}
|
||||
return totalSize;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void selectSet(int memspace, int dataspace, Request request,
|
||||
long[] dims, long[] originalDatasetDims) throws HDF5Exception,
|
||||
HDF5LibraryException, StorageException {
|
||||
long[] offset = new long[2];
|
||||
long[] count = new long[2];
|
||||
|
||||
int[] minIndex = request.getMinIndexForSlab();
|
||||
int[] maxIndex = request.getMaxIndexForSlab();
|
||||
|
||||
if ((minIndex[0] > maxIndex[0]) || (minIndex[1] > maxIndex[1])) {
|
||||
throw new StorageException("Minimum slab dimension exceeds Maximum dimension, request failed", null);
|
||||
}
|
||||
|
||||
offset[0] = minIndex[1];
|
||||
offset[1] = minIndex[0];
|
||||
count[0] = maxIndex[1] - minIndex[1];
|
||||
count[1] = maxIndex[0] - minIndex[0];
|
||||
|
||||
H5.H5Sselect_hyperslab(dataspace, HDF5Constants.H5S_SELECT_SET, offset,
|
||||
null, count, null);
|
||||
}
|
||||
|
||||
}
|
|
@ -1,65 +0,0 @@
|
|||
/**
|
||||
* This software was developed and / or modified by Raytheon Company,
|
||||
* pursuant to Contract DG133W-05-CQ-1067 with the US Government.
|
||||
*
|
||||
* U.S. EXPORT CONTROLLED TECHNICAL DATA
|
||||
* This software product contains export-restricted data whose
|
||||
* export/transfer/disclosure is restricted by U.S. law. Dissemination
|
||||
* to non-U.S. persons whether in the United States or abroad requires
|
||||
* an export license or other authorization.
|
||||
*
|
||||
* Contractor Name: Raytheon Company
|
||||
* Contractor Address: 6825 Pine Street, Suite 340
|
||||
* Mail Stop B8
|
||||
* Omaha, NE 68106
|
||||
* 402.291.0100
|
||||
*
|
||||
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
||||
* further licensing information.
|
||||
**/
|
||||
package com.raytheon.uf.common.datastorage.hdf5;
|
||||
|
||||
import ncsa.hdf.hdf5lib.H5;
|
||||
import ncsa.hdf.hdf5lib.exceptions.HDF5Exception;
|
||||
import ncsa.hdf.hdf5lib.exceptions.HDF5LibraryException;
|
||||
|
||||
import com.raytheon.uf.common.datastorage.Request;
|
||||
import com.raytheon.uf.common.datastorage.StorageException;
|
||||
|
||||
/**
|
||||
* Implements the whole dataset read.
|
||||
*
|
||||
* <pre>
|
||||
*
|
||||
* SOFTWARE HISTORY
|
||||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* Jul 27, 2009 chammack Initial creation
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
* @author chammack
|
||||
* @version 1.0
|
||||
*/
|
||||
|
||||
public class WholeDatasetSelectionRead extends AbstractHDFRead {
|
||||
|
||||
@Override
|
||||
protected int calculateSize(int dataspace, int sz, Request request,
|
||||
long[] dims, long[] originalDims, int totalSize)
|
||||
throws HDF5LibraryException {
|
||||
H5.H5Sget_simple_extent_dims(dataspace, dims, (long[]) null);
|
||||
for (long dim : dims) {
|
||||
totalSize *= (int) dim;
|
||||
}
|
||||
return totalSize;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void selectSet(int memspace, int dataspace, Request request,
|
||||
long[] dims, long[] originalDatasetDims) throws HDF5Exception,
|
||||
HDF5LibraryException, StorageException {
|
||||
// no op
|
||||
}
|
||||
|
||||
}
|
File diff suppressed because it is too large
Load diff
|
@ -1,121 +0,0 @@
|
|||
/**
|
||||
* This software was developed and / or modified by Raytheon Company,
|
||||
* pursuant to Contract DG133W-05-CQ-1067 with the US Government.
|
||||
*
|
||||
* U.S. EXPORT CONTROLLED TECHNICAL DATA
|
||||
* This software product contains export-restricted data whose
|
||||
* export/transfer/disclosure is restricted by U.S. law. Dissemination
|
||||
* to non-U.S. persons whether in the United States or abroad requires
|
||||
* an export license or other authorization.
|
||||
*
|
||||
* Contractor Name: Raytheon Company
|
||||
* Contractor Address: 6825 Pine Street, Suite 340
|
||||
* Mail Stop B8
|
||||
* Omaha, NE 68106
|
||||
* 402.291.0100
|
||||
*
|
||||
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
||||
* further licensing information.
|
||||
**/
|
||||
|
||||
package com.raytheon.uf.common.datastorage.hdf5;
|
||||
|
||||
import java.io.File;
|
||||
import java.lang.Thread.State;
|
||||
import java.util.Random;
|
||||
|
||||
import com.raytheon.uf.common.datastorage.DataStoreFactory;
|
||||
import com.raytheon.uf.common.datastorage.IDataStore;
|
||||
import com.raytheon.uf.common.datastorage.records.FloatDataRecord;
|
||||
|
||||
/**
|
||||
* Test locking mechanism on HDF5
|
||||
*
|
||||
* Start two instance of this program and check for errors
|
||||
*
|
||||
* <pre>
|
||||
*
|
||||
* SOFTWARE HISTORY
|
||||
*
|
||||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* Feb 20, 2007 chammack Initial Creation.
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
* @author chammack
|
||||
* @version 1
|
||||
*/
|
||||
public class TestHDF5Locking {
|
||||
|
||||
private static File TEST_FILE = new File("/tmp/test.hdf");
|
||||
|
||||
public static void main(String[] args) {
|
||||
Random rand = new Random(System.currentTimeMillis());
|
||||
int uniqueId = rand.nextInt();
|
||||
|
||||
Thread thread1 = new Thread(new HDF5Writer(uniqueId, 1));
|
||||
Thread thread2 = new Thread(new HDF5Writer(uniqueId, 2));
|
||||
|
||||
try {
|
||||
Thread.sleep(3000);
|
||||
} catch (InterruptedException e1) {
|
||||
// TODO Auto-generated catch block
|
||||
e1.printStackTrace();
|
||||
}
|
||||
|
||||
thread1.start();
|
||||
thread2.start();
|
||||
while (thread1.getState() != State.TERMINATED
|
||||
|| thread2.getState() != State.TERMINATED) {
|
||||
try {
|
||||
Thread.sleep(100);
|
||||
} catch (InterruptedException e) {
|
||||
// TODO Auto-generated catch block
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
|
||||
System.out.println("Complete");
|
||||
System.exit(0);
|
||||
|
||||
}
|
||||
|
||||
public static class HDF5Writer implements Runnable {
|
||||
|
||||
private int progId;
|
||||
|
||||
private int instanceId;
|
||||
|
||||
public HDF5Writer(int progId, int instanceId) {
|
||||
this.progId = progId;
|
||||
this.instanceId = instanceId;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
*
|
||||
* @see java.lang.Runnable#run()
|
||||
*/
|
||||
public void run() {
|
||||
IDataStore dataStore = DataStoreFactory.getDataStore(TEST_FILE);
|
||||
|
||||
float[] dummyFloatData = new float[1024];
|
||||
|
||||
for (int i = 0; i < 5000; i++) {
|
||||
FloatDataRecord fdr = new FloatDataRecord("" + i, progId + "/"
|
||||
+ instanceId, dummyFloatData);
|
||||
try {
|
||||
dataStore.addDataRecord(fdr);
|
||||
dataStore.store();
|
||||
// dataStore.store("/");
|
||||
} catch (Exception e) {
|
||||
// TODO Auto-generated catch block
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
|
@ -1,142 +0,0 @@
|
|||
/**
|
||||
* This software was developed and / or modified by Raytheon Company,
|
||||
* pursuant to Contract DG133W-05-CQ-1067 with the US Government.
|
||||
*
|
||||
* U.S. EXPORT CONTROLLED TECHNICAL DATA
|
||||
* This software product contains export-restricted data whose
|
||||
* export/transfer/disclosure is restricted by U.S. law. Dissemination
|
||||
* to non-U.S. persons whether in the United States or abroad requires
|
||||
* an export license or other authorization.
|
||||
*
|
||||
* Contractor Name: Raytheon Company
|
||||
* Contractor Address: 6825 Pine Street, Suite 340
|
||||
* Mail Stop B8
|
||||
* Omaha, NE 68106
|
||||
* 402.291.0100
|
||||
*
|
||||
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
||||
* further licensing information.
|
||||
**/
|
||||
|
||||
package com.raytheon.uf.common.datastorage.hdf5;
|
||||
|
||||
import java.io.File;
|
||||
import java.lang.Thread.State;
|
||||
|
||||
import junit.framework.Assert;
|
||||
|
||||
import com.raytheon.uf.common.datastorage.locking.ClusteredLockManager;
|
||||
|
||||
/**
|
||||
* A Lock Manager test:
|
||||
*
|
||||
* Tests lock manager running inside the same JVM
|
||||
*
|
||||
* <pre>
|
||||
*
|
||||
* SOFTWARE HISTORY
|
||||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* Feb 20, 2007 chammack Initial Creation.
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
* @author chammack
|
||||
* @version 1
|
||||
*/
|
||||
public class TestLockManager {
|
||||
|
||||
public static void main(String[] args) {
|
||||
|
||||
try {
|
||||
// Test #1: Single Thread
|
||||
|
||||
ClusteredLockManager lockMgr = ClusteredLockManager.getInstance();
|
||||
|
||||
boolean gotLock = lockMgr.getLock(new File("/tmp/foo"), true);
|
||||
if (gotLock == false) {
|
||||
System.out.println("ERROR: Lock was not granted initially");
|
||||
}
|
||||
|
||||
gotLock = lockMgr.getLock(new File("/tmp/foo"), true);
|
||||
if (gotLock == true) {
|
||||
System.out.println("ERROR: Lock should not have been granted");
|
||||
}
|
||||
|
||||
lockMgr.releaseLock(new File("/tmp/foo"));
|
||||
|
||||
// Release and retry
|
||||
gotLock = lockMgr.getLock(new File("/tmp/foo"), true);
|
||||
if (gotLock == false) {
|
||||
System.out.println("ERROR: Lock was not granted after unlock");
|
||||
}
|
||||
|
||||
lockMgr.releaseLock(new File("/tmp/foo"));
|
||||
} catch (Exception e) {
|
||||
e.printStackTrace();
|
||||
Assert.fail(e.getMessage());
|
||||
}
|
||||
|
||||
// Test #2 Multi-thread
|
||||
System.out
|
||||
.println("Threaded test: Threads should both return approximately 0.5 load");
|
||||
|
||||
LockTester lt1 = new LockTester(1);
|
||||
LockTester lt2 = new LockTester(2);
|
||||
|
||||
Thread thread1 = new Thread(lt1);
|
||||
Thread thread2 = new Thread(lt2);
|
||||
|
||||
thread1.start();
|
||||
thread2.start();
|
||||
|
||||
System.out.println("Running test: This will take a few seconds...");
|
||||
while (thread1.getState() != State.TERMINATED
|
||||
|| thread2.getState() != State.TERMINATED) {
|
||||
try {
|
||||
Thread.sleep(100);
|
||||
} catch (InterruptedException e) {
|
||||
// TODO Auto-generated catch block
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
|
||||
System.exit(0);
|
||||
}
|
||||
|
||||
public static class LockTester implements Runnable {
|
||||
|
||||
private int id;
|
||||
|
||||
public LockTester(int id) {
|
||||
this.id = id;
|
||||
}
|
||||
|
||||
public void run() {
|
||||
try {
|
||||
|
||||
ClusteredLockManager lockMgr = ClusteredLockManager
|
||||
.getInstance();
|
||||
|
||||
int hits = 0;
|
||||
for (int i = 0; i < 1000; i++) {
|
||||
boolean gotLock = lockMgr.getLock(new File("/tmp/foo"),
|
||||
true);
|
||||
Thread.sleep(10);
|
||||
if (gotLock) {
|
||||
lockMgr.releaseLock(new File("/tmp/foo"));
|
||||
hits++;
|
||||
}
|
||||
Thread.sleep(10);
|
||||
|
||||
}
|
||||
|
||||
System.out.println("Thread #" + id + ":: " + (hits) / 1000.0);
|
||||
} catch (Exception e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
|
@ -1,123 +0,0 @@
|
|||
/**
|
||||
* This software was developed and / or modified by Raytheon Company,
|
||||
* pursuant to Contract DG133W-05-CQ-1067 with the US Government.
|
||||
*
|
||||
* U.S. EXPORT CONTROLLED TECHNICAL DATA
|
||||
* This software product contains export-restricted data whose
|
||||
* export/transfer/disclosure is restricted by U.S. law. Dissemination
|
||||
* to non-U.S. persons whether in the United States or abroad requires
|
||||
* an export license or other authorization.
|
||||
*
|
||||
* Contractor Name: Raytheon Company
|
||||
* Contractor Address: 6825 Pine Street, Suite 340
|
||||
* Mail Stop B8
|
||||
* Omaha, NE 68106
|
||||
* 402.291.0100
|
||||
*
|
||||
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
|
||||
* further licensing information.
|
||||
**/
|
||||
|
||||
package com.raytheon.uf.common.datastorage.hdf5;
|
||||
|
||||
import java.io.File;
|
||||
import java.lang.Thread.State;
|
||||
|
||||
import com.raytheon.uf.common.datastorage.locking.ClusteredLockManager;
|
||||
import com.raytheon.uf.common.datastorage.locking.LockException;
|
||||
|
||||
/**
|
||||
* Clustered version of TestLockManager. This requires multicast.
|
||||
*
|
||||
* Run two copies of this program at the same time.
|
||||
*
|
||||
* Should yield approximately 0.5 on each program.
|
||||
*
|
||||
*
|
||||
* <pre>
|
||||
* SOFTWARE HISTORY
|
||||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* Feb 20, 2007 chammack Initial Creation.
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
* @author chammack
|
||||
* @version 1
|
||||
*/
|
||||
public class TestLockManagerClustered {
|
||||
|
||||
public static void main(String[] args) {
|
||||
|
||||
try {
|
||||
// ClusteredLockManager lockMgr =
|
||||
// ClusteredLockManager.getInstance();
|
||||
ClusteredLockManager.getInstance();
|
||||
Thread.sleep(2000);
|
||||
} catch (LockException e1) {
|
||||
// TODO Auto-generated catch block
|
||||
e1.printStackTrace();
|
||||
} catch (InterruptedException e1) {
|
||||
// TODO Auto-generated catch block
|
||||
e1.printStackTrace();
|
||||
}
|
||||
|
||||
// Test #1 Clustered
|
||||
System.out
|
||||
.println("Clustered test: Programs should both return approximately 0.5 load");
|
||||
|
||||
LockTester lt1 = new LockTester(1);
|
||||
|
||||
Thread thread1 = new Thread(lt1);
|
||||
|
||||
thread1.start();
|
||||
|
||||
System.out.println("Running test: This will take a few seconds...");
|
||||
while (thread1.getState() != State.TERMINATED) {
|
||||
try {
|
||||
Thread.sleep(100);
|
||||
} catch (InterruptedException e) {
|
||||
// TODO Auto-generated catch block
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
|
||||
System.exit(0);
|
||||
}
|
||||
|
||||
public static class LockTester implements Runnable {
|
||||
|
||||
private int id;
|
||||
|
||||
public LockTester(int id) {
|
||||
this.id = id;
|
||||
}
|
||||
|
||||
public void run() {
|
||||
try {
|
||||
|
||||
ClusteredLockManager lockMgr = ClusteredLockManager
|
||||
.getInstance();
|
||||
|
||||
int hits = 0;
|
||||
for (int i = 0; i < 1000; i++) {
|
||||
boolean gotLock = lockMgr.getLock(new File("/tmp/foo"),
|
||||
true);
|
||||
Thread.sleep(10);
|
||||
if (gotLock) {
|
||||
lockMgr.releaseLock(new File("/tmp/foo"));
|
||||
hits++;
|
||||
}
|
||||
Thread.sleep(10);
|
||||
|
||||
}
|
||||
|
||||
System.out.println("Thread #" + id + ":: " + (hits) / 1000.0);
|
||||
} catch (Exception e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
|
@ -37,7 +37,8 @@ import com.raytheon.uf.common.serialization.ISerializableObject;
|
|||
* Feb 9, 2007 chammack Initial Creation.
|
||||
* Apr 1, 2008 chammack Added delete API
|
||||
* Aug 3, 2009 chammack Modified to support Request
|
||||
* Sep 27, 2010 5091 njensen Added deleteFiles(String)
|
||||
* Sep 27, 2010 5091 njensen Added deleteFiles(String)
|
||||
* Feb 12, 2013 #1608 randerso Added explicit methods for deleting groups and datasets
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -88,18 +89,30 @@ public interface IDataStore extends ISerializableObject {
|
|||
public abstract StorageStatus store() throws StorageException;
|
||||
|
||||
/**
|
||||
* Delete a (set of) location(s), where a location is either a group or a
|
||||
* dataset. If all datasets have been deleted from a file, the file will be
|
||||
* deleted also.
|
||||
* Delete one or more datasets. If all datasets have been deleted from a
|
||||
* file, the file will be deleted also.
|
||||
*
|
||||
* @param location
|
||||
* the full path to the group or dataset
|
||||
* @param datasets
|
||||
* the full path to the dataset(s) to be deleted
|
||||
* @throws StorageException
|
||||
* if deletion fails
|
||||
* @throws FileNotFoundException
|
||||
*/
|
||||
public abstract void delete(String... location) throws StorageException,
|
||||
FileNotFoundException;
|
||||
public abstract void deleteDatasets(String... datasets)
|
||||
throws StorageException, FileNotFoundException;
|
||||
|
||||
/**
|
||||
* Delete one or more groups and all subgroups/datasets they contain. If all
|
||||
* datasets have been deleted from a file, the file will be deleted also.
|
||||
*
|
||||
* @param groups
|
||||
* the full path to the group(s) to be deleted
|
||||
* @throws StorageException
|
||||
* if deletion fails
|
||||
* @throws FileNotFoundException
|
||||
*/
|
||||
public abstract void deleteGroups(String... groups)
|
||||
throws StorageException, FileNotFoundException;
|
||||
|
||||
/**
|
||||
* Store all data records to a given data group, or replace it the group
|
||||
|
|
|
@ -72,6 +72,8 @@ import com.raytheon.uf.common.util.FileUtil;
|
|||
* Oct 01, 2010 rjpeter Added logging of requests over 300ms
|
||||
* Mon 07, 2013 DR 15294 D. Friedman Stream large requests
|
||||
* Feb 11, 2013 1526 njensen use HttpClient.postDynamicSerialize() for memory efficiency
|
||||
* Feb 12, 2013 #1608 randerso Added explicit deletes for groups and datasets
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
* @author njensen
|
||||
|
@ -149,13 +151,29 @@ public class PyPiesDataStore implements IDataStore {
|
|||
* (non-Javadoc)
|
||||
*
|
||||
* @see
|
||||
* com.raytheon.uf.common.datastorage.IDataStore#delete(java.lang.String[])
|
||||
* com.raytheon.uf.common.datastorage.IDataStore#deleteDatasets(java.lang
|
||||
* .String[])
|
||||
*/
|
||||
@Override
|
||||
public void delete(final String... location) throws StorageException,
|
||||
public void deleteDatasets(final String... datasets)
|
||||
throws StorageException, FileNotFoundException {
|
||||
DeleteRequest delete = new DeleteRequest();
|
||||
delete.setDatasets(datasets);
|
||||
sendRequest(delete);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
*
|
||||
* @see
|
||||
* com.raytheon.uf.common.datastorage.IDataStore#deleteGroups(java.lang.
|
||||
* String[])
|
||||
*/
|
||||
@Override
|
||||
public void deleteGroups(final String... groups) throws StorageException,
|
||||
FileNotFoundException {
|
||||
DeleteRequest delete = new DeleteRequest();
|
||||
delete.setLocations(location);
|
||||
delete.setGroups(groups);
|
||||
sendRequest(delete);
|
||||
}
|
||||
|
||||
|
|
|
@ -31,6 +31,7 @@ import com.raytheon.uf.common.serialization.annotations.DynamicSerializeElement;
|
|||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* Aug 10, 2010 njensen Initial creation
|
||||
* Feb 12, 2013 #1608 randerso Added support for explicitly deleting grous and datasets
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -42,14 +43,25 @@ import com.raytheon.uf.common.serialization.annotations.DynamicSerializeElement;
|
|||
public class DeleteRequest extends AbstractRequest {
|
||||
|
||||
@DynamicSerializeElement
|
||||
private String[] locations;
|
||||
private String[] datasets;
|
||||
|
||||
public String[] getLocations() {
|
||||
return locations;
|
||||
@DynamicSerializeElement
|
||||
private String[] groups;
|
||||
|
||||
public String[] getDatasets() {
|
||||
return datasets;
|
||||
}
|
||||
|
||||
public void setLocations(String[] locations) {
|
||||
this.locations = locations;
|
||||
public void setDatasets(String[] datasets) {
|
||||
this.datasets = datasets;
|
||||
}
|
||||
|
||||
public String[] getGroups() {
|
||||
return groups;
|
||||
}
|
||||
|
||||
public void setGroups(String[] groups) {
|
||||
this.groups = groups;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -7,7 +7,6 @@ Bundle-Vendor: RAYTHEON
|
|||
Eclipse-BuddyPolicy: registered, ext, global
|
||||
Bundle-RequiredExecutionEnvironment: JavaSE-1.6
|
||||
Require-Bundle: com.raytheon.uf.common.datastorage;bundle-version="1.11.17",
|
||||
com.raytheon.uf.common.datastorage.hdf5;bundle-version="1.11.17",
|
||||
com.raytheon.uf.common.serialization;bundle-version="1.11.17",
|
||||
com.raytheon.uf.common.geospatial;bundle-version="1.11.17",
|
||||
org.geotools;bundle-version="2.5.8"
|
||||
|
|
|
@ -35,9 +35,9 @@ import java.util.List;
|
|||
import java.util.Map;
|
||||
|
||||
import com.raytheon.uf.common.datastorage.DataStoreFactory;
|
||||
import com.raytheon.uf.common.datastorage.IDataStore;
|
||||
import com.raytheon.uf.common.datastorage.StorageProperties;
|
||||
import com.raytheon.uf.common.datastorage.StorageProperties.Compression;
|
||||
import com.raytheon.uf.common.datastorage.hdf5.HDF5DataStore;
|
||||
import com.raytheon.uf.common.datastorage.records.ByteDataRecord;
|
||||
import com.raytheon.uf.common.datastorage.records.IDataRecord;
|
||||
import com.raytheon.uf.common.datastorage.records.IntegerDataRecord;
|
||||
|
@ -253,8 +253,7 @@ public class TopoImporter {
|
|||
System.exit(-1);
|
||||
}
|
||||
|
||||
HDF5DataStore store = (HDF5DataStore) DataStoreFactory
|
||||
.getDataStore(hdf);
|
||||
IDataStore store = DataStoreFactory.getDataStore(hdf);
|
||||
|
||||
String dataset = "full";
|
||||
long[] sizes = new long[] { maxCols, maxRows };
|
||||
|
|
|
@ -41,10 +41,10 @@ import javax.media.jai.RasterFactory;
|
|||
import javax.media.jai.TiledImage;
|
||||
|
||||
import com.raytheon.uf.common.datastorage.DataStoreFactory;
|
||||
import com.raytheon.uf.common.datastorage.IDataStore;
|
||||
import com.raytheon.uf.common.datastorage.Request;
|
||||
import com.raytheon.uf.common.datastorage.StorageProperties;
|
||||
import com.raytheon.uf.common.datastorage.StorageProperties.Compression;
|
||||
import com.raytheon.uf.common.datastorage.hdf5.HDF5DataStore;
|
||||
import com.raytheon.uf.common.datastorage.records.ByteDataRecord;
|
||||
import com.raytheon.uf.common.datastorage.records.FloatDataRecord;
|
||||
import com.raytheon.uf.common.datastorage.records.IDataRecord;
|
||||
|
@ -59,7 +59,9 @@ import com.raytheon.uf.common.datastorage.records.ShortDataRecord;
|
|||
* SOFTWARE HISTORY
|
||||
* Date Ticket# Engineer Description
|
||||
* ------------ ---------- ----------- --------------------------
|
||||
* Oct 26, 2009 randerso Initial creation
|
||||
* Oct 26, 2009 randerso Initial creation
|
||||
* Feb 12, 2013 #1608 randerso Remove exlicit references to HDF5DataStore
|
||||
* Added explicit calls to deleteGroups
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -72,7 +74,7 @@ public class TopoInterpolator {
|
|||
|
||||
private static final String DEFAULT_TOPO_FILE = "/topo/srtm30.hdf";
|
||||
|
||||
private HDF5DataStore dataStore;
|
||||
private IDataStore dataStore;
|
||||
|
||||
public TopoInterpolator() {
|
||||
this(new File(DEFAULT_TOPO_FILE));
|
||||
|
@ -84,7 +86,7 @@ public class TopoInterpolator {
|
|||
* @param file
|
||||
*/
|
||||
public TopoInterpolator(File hdf) {
|
||||
dataStore = (HDF5DataStore) DataStoreFactory.getDataStore(hdf);
|
||||
dataStore = DataStoreFactory.getDataStore(hdf);
|
||||
|
||||
}
|
||||
|
||||
|
@ -107,7 +109,7 @@ public class TopoInterpolator {
|
|||
// remove existing interpolated datasets
|
||||
if (Arrays.asList(dataStore.getDatasets(srcGroup)).contains(
|
||||
"interpolated")) {
|
||||
dataStore.delete(dstGroup);
|
||||
dataStore.deleteGroups(dstGroup);
|
||||
}
|
||||
|
||||
StorageProperties properties = new StorageProperties();
|
||||
|
|
|
@ -59,13 +59,6 @@
|
|||
version="0.0.0"
|
||||
unpack="false"/>
|
||||
|
||||
<plugin
|
||||
id="com.raytheon.uf.common.datastorage.hdf5"
|
||||
download-size="0"
|
||||
install-size="0"
|
||||
version="0.0.0"
|
||||
unpack="false"/>
|
||||
|
||||
<plugin
|
||||
id="com.raytheon.uf.common.datastorage"
|
||||
download-size="0"
|
||||
|
|
|
@ -65,12 +65,6 @@
|
|||
install-size="0"
|
||||
version="0.0.0"/>
|
||||
|
||||
<plugin
|
||||
id="ncsa.hdf5"
|
||||
download-size="0"
|
||||
install-size="0"
|
||||
version="0.0.0"/>
|
||||
|
||||
<plugin
|
||||
id="net.sf.ehcache"
|
||||
download-size="0"
|
||||
|
|
|
@ -75,7 +75,6 @@ import com.raytheon.uf.common.spatial.reprojection.ReferencedDataRecord;
|
|||
import com.raytheon.uf.common.time.util.TimeUtil;
|
||||
import com.raytheon.uf.common.util.FileUtil;
|
||||
import com.raytheon.uf.edex.core.EdexException;
|
||||
import com.raytheon.uf.edex.core.props.PropertiesFactory;
|
||||
import com.raytheon.uf.edex.database.DataAccessLayerException;
|
||||
import com.raytheon.uf.edex.database.dao.CoreDao;
|
||||
import com.raytheon.uf.edex.database.dao.DaoConfig;
|
||||
|
@ -105,6 +104,8 @@ import com.vividsolutions.jts.geom.Polygon;
|
|||
* Oct 10, 2012 1261 djohnson Add some generics wildcarding.
|
||||
* Jan 14, 2013 1469 bkowal No longer retrieves the hdf5 data directory
|
||||
* from the environment.
|
||||
* Feb 12, 2013 #1608 randerso Changed to call deleteDatasets
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
* @author bphillip
|
||||
|
@ -884,7 +885,8 @@ public abstract class PluginDao extends CoreDao {
|
|||
if (uris == null) {
|
||||
ds.deleteFiles(null);
|
||||
} else {
|
||||
ds.delete(uris.toArray(new String[uris.size()]));
|
||||
ds.deleteDatasets(uris.toArray(new String[uris
|
||||
.size()]));
|
||||
}
|
||||
} catch (Exception e) {
|
||||
PurgeLogger.logError("Error occurred purging file: "
|
||||
|
@ -906,7 +908,7 @@ public abstract class PluginDao extends CoreDao {
|
|||
if (uris == null) {
|
||||
ds.deleteFiles(null);
|
||||
} else {
|
||||
ds.delete(uris.toArray(new String[uris.size()]));
|
||||
ds.deleteDatasets(uris.toArray(new String[uris.size()]));
|
||||
}
|
||||
} catch (Exception e) {
|
||||
PurgeLogger.logError("Error occurred purging file: "
|
||||
|
|
|
@ -84,6 +84,7 @@ import com.vividsolutions.jts.geom.Coordinate;
|
|||
* 04/18/2012 DR 14694 D. Friedman Fixes for static topography generation
|
||||
* 05/09/2012 DR 14939 D. Friedman Fix errors in DR 14694
|
||||
* 01/14/2013 1469 bkowal Removed the hdf5 data directory
|
||||
* 02/12/2013 #1608 randerso Changed to call deleteDatasets
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
|
@ -177,8 +178,10 @@ public class StaticTopoData {
|
|||
try {
|
||||
if (!topoFileExists()) {
|
||||
// TODO: This will fail in a clustered server environment
|
||||
// since
|
||||
// static topo isn't installed to dx3/4
|
||||
// since static topo isn't installed to dx3/4
|
||||
// UPDATE: this doesn't even work in a standalone
|
||||
// environment now because it can't find the gzipped source
|
||||
// files since FILE_PREFIX was changed
|
||||
statusHandler.handle(Priority.INFO,
|
||||
"Static Topo file not found. Creating it...");
|
||||
|
||||
|
@ -344,9 +347,7 @@ public class StaticTopoData {
|
|||
sTopoDataStore.addDataRecord(attributeSet, sp);
|
||||
sTopoDataStore.addDataRecord(westRecord, sp);
|
||||
sTopoDataStore.store();
|
||||
sTopoDataStore.delete("pac");
|
||||
sTopoDataStore.delete("attrpac");
|
||||
|
||||
sTopoDataStore.deleteDatasets("pac", "attrpac");
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -573,12 +574,13 @@ public class StaticTopoData {
|
|||
|
||||
for (int i = 0; i < finalData.length; i++) {
|
||||
float v = finalData[i];
|
||||
if (Float.isNaN(v))
|
||||
if (Float.isNaN(v)) {
|
||||
finalData[i] = TOPO_FILL;
|
||||
else if (v == DATA_FILL || (v > -0.5 && v < 0.5))
|
||||
} else if (v == DATA_FILL || (v > -0.5 && v < 0.5)) {
|
||||
finalData[i] = 0.0f;
|
||||
else
|
||||
} else {
|
||||
finalData[i] = v;
|
||||
}
|
||||
}
|
||||
return finalData;
|
||||
|
||||
|
|
|
@ -14,7 +14,6 @@ Require-Bundle: com.raytheon.edex.uengine,
|
|||
com.raytheon.uf.edex.plugin.grid,
|
||||
com.raytheon.uf.common.dataplugin.grid,
|
||||
com.raytheon.uf.common.parameter,
|
||||
com.raytheon.uf.common.datastorage.hdf5;bundle-version="1.11.22",
|
||||
gov.noaa.nws.ncep.edex.common;bundle-version="1.0.0",
|
||||
gov.noaa.nws.ncep.common.dataplugin.mcidas;bundle-version="1.0.0",
|
||||
gov.noaa.nws.ncep.common.dataplugin.ncgrib;bundle-version="1.0.0",
|
||||
|
|
|
@ -51,8 +51,6 @@ Import-Package: com.raytheon.edex.meteoLib,
|
|||
gov.noaa.nws.ncep.viz.ui.display,
|
||||
javax.measure.converter,
|
||||
javax.measure.unit,
|
||||
ncsa.hdf.hdf5lib,
|
||||
ncsa.hdf.hdf5lib.exceptions,
|
||||
org.eclipse.swt.events,
|
||||
org.eclipse.swt.graphics,
|
||||
org.eclipse.swt.widgets,
|
||||
|
|
|
@ -21,8 +21,7 @@ Require-Bundle: org.eclipse.ui,
|
|||
gov.noaa.nws.ncep.edex.common;bundle-version="1.0.0"
|
||||
Bundle-ActivationPolicy: lazy
|
||||
Bundle-RequiredExecutionEnvironment: JavaSE-1.6
|
||||
Import-Package: com.raytheon.uf.common.datastorage.hdf5,
|
||||
com.raytheon.uf.common.pypies,
|
||||
Import-Package: com.raytheon.uf.common.pypies,
|
||||
com.raytheon.viz.core.gl.dataformat,
|
||||
com.raytheon.viz.satellite.rsc,
|
||||
gov.noaa.nws.ncep.common.dataplugin.mcidas,
|
||||
|
|
|
@ -23,14 +23,21 @@
|
|||
class DeleteRequest(object):
|
||||
|
||||
def __init__(self):
|
||||
self.locations = None
|
||||
self.datasets = None
|
||||
self.groups = None
|
||||
self.filename = None
|
||||
|
||||
def getLocations(self):
|
||||
return self.locations
|
||||
def getDatasets(self):
|
||||
return self.datasets
|
||||
|
||||
def setLocations(self, locations):
|
||||
self.locations = locations
|
||||
def setDatasets(self, datasets):
|
||||
self.datasets = datasets
|
||||
|
||||
def getGroups(self):
|
||||
return self.groups
|
||||
|
||||
def setGroups(self, groups):
|
||||
self.groups = groups
|
||||
|
||||
def getFilename(self):
|
||||
return self.filename
|
||||
|
|
|
@ -31,7 +31,7 @@
|
|||
# 05/03/11 9134 njensen Optimized for pointdata
|
||||
# 10/09/12 rjpeter Optimized __getGroup for retrievals
|
||||
# 01/17/13 DR 15294 D. Friedman Clear out data in response
|
||||
#
|
||||
# 02/12/13 #1608 randerso Added support for explicitly deleting groups and datasets
|
||||
#
|
||||
#
|
||||
|
||||
|
@ -324,18 +324,32 @@ class H5pyDataStore(IDataStore.IDataStore):
|
|||
deleteFile = False
|
||||
|
||||
try:
|
||||
locs = request.getLocations()
|
||||
for dataset in locs:
|
||||
ds = None
|
||||
rootNode=f['/']
|
||||
datasets = request.getDatasets()
|
||||
if datasets is not None:
|
||||
for dataset in datasets:
|
||||
ds = None
|
||||
try :
|
||||
ds = self.__getNode(f, None, dataset)
|
||||
except Exception, e:
|
||||
logger.warn('Unable to find uri [' + str(dataset) + '] in file [' + str(fn) + '] to delete: ' + IDataStore._exc())
|
||||
|
||||
if ds:
|
||||
parent = ds.parent
|
||||
parent.id.unlink(ds.name)
|
||||
|
||||
try :
|
||||
ds = self.__getNode(f, None, dataset)
|
||||
except Exception, e:
|
||||
logger.warn('Unable to find uri [' + str(dataset) + '] in file [' + str(fn) + '] to delete: ' + IDataStore._exc())
|
||||
|
||||
if ds:
|
||||
grp = ds.parent
|
||||
grp.id.unlink(ds.name)
|
||||
groups = request.getGroups()
|
||||
if groups is not None:
|
||||
for group in groups:
|
||||
gp = None
|
||||
try :
|
||||
gp = self.__getNode(f, group)
|
||||
except Exception, e:
|
||||
logger.warn('Unable to find uri [' + str(group) + '] in file [' + str(fn) + '] to delete: ' + IDataStore._exc())
|
||||
|
||||
if gp:
|
||||
parent = gp.parent
|
||||
parent.id.unlink(gp.name)
|
||||
|
||||
finally:
|
||||
# check if file has any remaining data sets
|
||||
|
@ -353,6 +367,7 @@ class H5pyDataStore(IDataStore.IDataStore):
|
|||
timeMap['closeFile']=t1-t0
|
||||
|
||||
if deleteFile:
|
||||
logger.info('Removing empty file ['+ str(fn) + ']')
|
||||
try:
|
||||
os.remove(fn)
|
||||
except Exception, e:
|
||||
|
|
Loading…
Add table
Reference in a new issue