From 25b219c49f1f5415a8624c244ebae42115334baf Mon Sep 17 00:00:00 2001 From: Nate Jensen Date: Wed, 9 Oct 2013 18:03:11 -0500 Subject: [PATCH] Issue #2361 remove ISerializableObject for some pointdata and user roles classes Change-Id: I8a4a53dd1139a376aaa001bb7fef38444d8db005 Former-commit-id: 0cff122d9c5a7a1f5b25c5a5b2b26f31628992db [formerly 3fa7b3de6733842c240959030023e84c844ef760] Former-commit-id: d635901828d30edbde22181a3a23ddcba4cc9ac8 --- .../plugin/bufrmos/MOSPointDataState.java | 7 +- .../goessounding/dao/GOESSoundingDAO.java | 9 +- .../LdadprofilerPointDataTransform.java | 159 +- .../com/raytheon/edex/plugin/obs/ObsDao.java | 3 +- .../poessounding/dao/POESSoundingDAO.java | 5 +- .../sfcobs/SfcObsPointDataTransform.java | 3 +- .../dataplugin/qc/internal/QCPaths.java | 5 +- ...f.common.serialization.ISerializableObject | 5 - .../plugin/nwsauth/xml/NwsRoleData.java | 55 +- .../plugin/nwsauth/xml/PermissionXML.java | 3 +- .../uf/common/plugin/nwsauth/xml/RoleXML.java | 3 +- .../uf/common/plugin/nwsauth/xml/UserXML.java | 3 +- ...f.common.serialization.ISerializableObject | 8 - .../uf/common/pointdata/Dimension.java | 4 +- .../pointdata/ParameterDescription.java | 3 +- .../common/pointdata/PointDataContainer.java | 70 +- .../pointdata/PointDataDescription.java | 42 +- .../pointdata/PointDataSerializable.java | 60 - .../elements/AbstractPointDataObject.java | 11 +- .../elements/FloatPointDataObject.java | 9 +- .../elements/IntPointDataObject.java | 10 +- .../elements/LongPointDataObject.java | 10 +- .../elements/StringPointDataObject.java | 10 +- ...f.common.serialization.ISerializableObject | 2 - .../edex/plugin/bufrsigwx/SigWxDataDao.java | 5 +- .../uf/edex/plugin/fssobs/FSSObsDAO.java | 38 +- .../uf/edex/plugin/madis/MadisDao.java | 28 +- .../edex/plugin/nwsauth/NwsRoleStorage.java | 38 +- .../uf/edex/pointdata/PointDataPluginDao.java | 21 +- .../common/dataplugin/airep/dao/AirepDao.java | 57 +- .../gpd/dao/GenericPointDataDao.java | 2359 +++++++++-------- .../dataplugin/ncpafm/dao/NcPafmDao.java | 48 +- .../common/dataplugin/ncscd/dao/NcScdDao.java | 17 +- .../common/dataplugin/nctaf/dao/NcTafDao.java | 51 +- .../dataplugin/ncuair/dao/NcUairDao.java | 209 +- .../common/dataplugin/pirep/dao/PirepDao.java | 68 +- .../common/dataplugin/sgwh/dao/SgwhDao.java | 34 +- .../common/dataplugin/sgwhv/dao/SgwhvDao.java | 30 +- .../common/dataplugin/ssha/dao/SshaDao.java | 36 +- 39 files changed, 1711 insertions(+), 1827 deletions(-) delete mode 100644 edexOsgi/com.raytheon.uf.common.plugin.nwsauth/META-INF/services/com.raytheon.uf.common.serialization.ISerializableObject delete mode 100644 edexOsgi/com.raytheon.uf.common.pointdata/src/com/raytheon/uf/common/pointdata/PointDataSerializable.java delete mode 100644 edexOsgi/com.raytheon.uf.common.site/META-INF/services/com.raytheon.uf.common.serialization.ISerializableObject diff --git a/edexOsgi/com.raytheon.edex.plugin.bufrmos/src/com/raytheon/edex/plugin/bufrmos/MOSPointDataState.java b/edexOsgi/com.raytheon.edex.plugin.bufrmos/src/com/raytheon/edex/plugin/bufrmos/MOSPointDataState.java index 436f82afa0..75bece537e 100644 --- a/edexOsgi/com.raytheon.edex.plugin.bufrmos/src/com/raytheon/edex/plugin/bufrmos/MOSPointDataState.java +++ b/edexOsgi/com.raytheon.edex.plugin.bufrmos/src/com/raytheon/edex/plugin/bufrmos/MOSPointDataState.java @@ -22,13 +22,12 @@ package com.raytheon.edex.plugin.bufrmos; import java.util.HashMap; import java.util.Map; -import javax.xml.bind.JAXBException; - import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import com.raytheon.uf.common.pointdata.PointDataContainer; import com.raytheon.uf.common.pointdata.PointDataDescription; +import com.raytheon.uf.common.serialization.SerializationException; /** * TODO Add Description @@ -68,7 +67,7 @@ public class MOSPointDataState { if (container != null) { pointData.put(type, container); } - } catch (JAXBException e) { + } catch (SerializationException e) { logger.error("Could not create PointDataContainer for " + type + " model soundings", e); } @@ -78,7 +77,7 @@ public class MOSPointDataState { } public static synchronized PointDataDescription getDescription(String type) - throws JAXBException { + throws SerializationException { PointDataDescription pdd = descriptions.get(type); if (pdd == null) { String strmPath = "/res/pointdata/bufrmos" + type + ".xml"; diff --git a/edexOsgi/com.raytheon.edex.plugin.goessounding/src/com/raytheon/edex/plugin/goessounding/dao/GOESSoundingDAO.java b/edexOsgi/com.raytheon.edex.plugin.goessounding/src/com/raytheon/edex/plugin/goessounding/dao/GOESSoundingDAO.java index 6b84e02fb5..ae2b1538a7 100644 --- a/edexOsgi/com.raytheon.edex.plugin.goessounding/src/com/raytheon/edex/plugin/goessounding/dao/GOESSoundingDAO.java +++ b/edexOsgi/com.raytheon.edex.plugin.goessounding/src/com/raytheon/edex/plugin/goessounding/dao/GOESSoundingDAO.java @@ -22,11 +22,10 @@ package com.raytheon.edex.plugin.goessounding.dao; import java.util.List; import java.util.Map; -import javax.xml.bind.JAXBException; - import com.raytheon.uf.common.dataplugin.PluginException; import com.raytheon.uf.common.dataplugin.goessounding.GOESSounding; import com.raytheon.uf.common.pointdata.PointDataDescription; +import com.raytheon.uf.common.serialization.SerializationException; import com.raytheon.uf.edex.database.DataAccessLayerException; import com.raytheon.uf.edex.pointdata.PointDataPluginDao; import com.raytheon.uf.edex.wmo.message.WMOHeader; @@ -134,7 +133,7 @@ public class GOESSoundingDAO extends PointDataPluginDao { public GOESSounding newObject() { return new GOESSounding(); } - + @Override public PointDataDescription getPointDataDescription(Map obj) { if (hdf5DataDescription == null) { @@ -142,12 +141,12 @@ public class GOESSoundingDAO extends PointDataPluginDao { hdf5DataDescription = PointDataDescription.fromStream(this .getClass().getResourceAsStream( "/res/pointdata/goes.xml")); - } catch (JAXBException e) { + } catch (SerializationException e) { logger.error("Unable to load " + pluginName + " Point Data Description", e); } } return hdf5DataDescription; } - + } diff --git a/edexOsgi/com.raytheon.edex.plugin.ldadprofiler/src/com/raytheon/edex/plugin/ldadprofiler/common/LdadprofilerPointDataTransform.java b/edexOsgi/com.raytheon.edex.plugin.ldadprofiler/src/com/raytheon/edex/plugin/ldadprofiler/common/LdadprofilerPointDataTransform.java index 3097037d63..e7eb44c1a3 100644 --- a/edexOsgi/com.raytheon.edex.plugin.ldadprofiler/src/com/raytheon/edex/plugin/ldadprofiler/common/LdadprofilerPointDataTransform.java +++ b/edexOsgi/com.raytheon.edex.plugin.ldadprofiler/src/com/raytheon/edex/plugin/ldadprofiler/common/LdadprofilerPointDataTransform.java @@ -24,20 +24,20 @@ import java.io.InputStream; import java.util.HashMap; import java.util.Iterator; import java.util.Map; -import javax.xml.bind.JAXBException; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import com.raytheon.edex.plugin.ldadprofiler.dao.LdadProfilerDao; import com.raytheon.uf.common.dataplugin.PluginDataObject; -import com.raytheon.uf.common.pointdata.ParameterDescription; import com.raytheon.uf.common.pointdata.PointDataContainer; import com.raytheon.uf.common.pointdata.PointDataDescription; import com.raytheon.uf.common.pointdata.PointDataView; +import com.raytheon.uf.common.serialization.SerializationException; /** - * Provides a transform from Ldadprofiler Records to PointDataContainer and vice versa. + * Provides a transform from Ldadprofiler Records to PointDataContainer and vice + * versa. * *
  * 
@@ -53,56 +53,55 @@ import com.raytheon.uf.common.pointdata.PointDataView;
  */
 
 public class LdadprofilerPointDataTransform {
-    
-//	private static final String BASE_TIME = "base_time";
-//	
-//	private static final String START_TIME_OFFSET = "start_time_offset";
-//    
-//    private static final String END_TIME_OFFSET = "end_time_offset";
-    
-    private static final String HEIGHTS_NUM = "nhts"; 
-	
+
+    // private static final String BASE_TIME = "base_time";
+    //
+    // private static final String START_TIME_OFFSET = "start_time_offset";
+    //
+    // private static final String END_TIME_OFFSET = "end_time_offset";
+
+    private static final String HEIGHTS_NUM = "nhts";
+
     private static final String LEVEL_HEIGHT = "levelHeight";
-    
+
     private static final String WIND_SPEED = "windSpeed";
-    
+
     private static final String WIND_DIR = "windDir";
-    
+
     private static final String UC_WIND = "ucWind";
-    
+
     private static final String VC_WIND = "vcWind";
-    
+
     private static final String WC_WIND = "wcWind";
-    
+
     private static final String U_CONF = "uconf";
-    
+
     private static final String V_CONF = "vconf";
-    
-    private static final String W_CONF ="wconf";
 
-//    private static final String REPORT_TYPE = "reportType";
-//
-//    private static final String TIME_OBS = "timeObs";
-//
-//    private static final String LONGITUDE = "longitude";
-//
-//    private static final String LATITUDE = "latitude";
-//
-//    private static final String STATION_NAME = "stationName";
-//
-//    private static final String DATAURI = "dataURI";
-//
-//    private static final String ELEVATION = "elevation";
+    private static final String W_CONF = "wconf";
+
+    // private static final String REPORT_TYPE = "reportType";
+    //
+    // private static final String TIME_OBS = "timeObs";
+    //
+    // private static final String LONGITUDE = "longitude";
+    //
+    // private static final String LATITUDE = "latitude";
+    //
+    // private static final String STATION_NAME = "stationName";
+    //
+    // private static final String DATAURI = "dataURI";
+    //
+    // private static final String ELEVATION = "elevation";
 
-	
     private LdadProfilerDao dao;
 
     private PointDataDescription pdd;
-    
+
     private Log logger = LogFactory.getLog(getClass());
-	
+
     public LdadprofilerPointDataTransform() {
-    	
+
         try {
             this.pdd = getDescription("ldadprofiler");
             this.dao = new LdadProfilerDao("ldadprofiler");
@@ -112,8 +111,8 @@ public class LdadprofilerPointDataTransform {
             e.printStackTrace();
         }
     }
-    
-	public PluginDataObject[] toPointData(PluginDataObject[] pdo) {
+
+    public PluginDataObject[] toPointData(PluginDataObject[] pdo) {
         if (pdo.length > 0) {
             Map pointMap = new HashMap();
 
@@ -126,7 +125,7 @@ public class LdadprofilerPointDataTransform {
                     pdc = PointDataContainer.build(this.pdd);
                     pointMap.put(f, pdc);
                 }
-                
+
                 ProfilerLdadObs ldadr = (ProfilerLdadObs) p;
                 PointDataView pdv = buildView(pdc, ldadr);
                 ldadr.setPointDataView(pdv);
@@ -134,60 +133,60 @@ public class LdadprofilerPointDataTransform {
         }
         return pdo;
     }
-	
+
     @SuppressWarnings("unchecked")
-	private PointDataView buildView(PointDataContainer container,
-			ProfilerLdadObs record) {
+    private PointDataView buildView(PointDataContainer container,
+            ProfilerLdadObs record) {
         PointDataView pdv = container.append();
-        
+
         pdv.setInt(HEIGHTS_NUM, record.getNhts());
         if (record.getLevels() != null) {
             Iterator lvlIterator = record.getLevels().iterator();
             int i = 0;
             while (lvlIterator.hasNext()) {
-            	ProfilerLdadLevel lvl = (ProfilerLdadLevel) lvlIterator.next();
-				if (lvl.getLevelHeight() != null) {
-					pdv.setInt(LEVEL_HEIGHT, lvl.getLevelHeight(), i);
-				}
-                if (lvl.getWindSpeed()!= null) {
-					pdv.setFloat(WIND_SPEED, lvl.getWindSpeed().floatValue(), i);
-				}
-				if (lvl.getWindDir() != null) {
-					pdv.setFloat(WIND_DIR, lvl.getWindDir().floatValue(), i);
-				}
-				if (lvl.getUcWind() != null) {
-					pdv.setFloat(UC_WIND, lvl.getUcWind().floatValue(), i);
-				}
-				if (lvl.getVcWind() != null) {
-					pdv.setFloat(VC_WIND, lvl.getVcWind().floatValue(), i);
-				}
-				if (lvl.getWcWind() != null) {
-					pdv.setFloat(WC_WIND, lvl.getWcWind().floatValue(), i);
-				}
-				if (lvl.getUconf() != null) {
-					pdv.setFloat(U_CONF, lvl.getUconf().floatValue(), i);
-				}
-				if (lvl.getVconf() != null) {
-					pdv.setFloat(V_CONF, lvl.getVconf().floatValue(), i);
-				}
-				if (lvl.getWconf() != null) {
-					pdv.setFloat(W_CONF, lvl.getWconf().floatValue(), i);
-				}
-				i++;
+                ProfilerLdadLevel lvl = (ProfilerLdadLevel) lvlIterator.next();
+                if (lvl.getLevelHeight() != null) {
+                    pdv.setInt(LEVEL_HEIGHT, lvl.getLevelHeight(), i);
+                }
+                if (lvl.getWindSpeed() != null) {
+                    pdv.setFloat(WIND_SPEED, lvl.getWindSpeed().floatValue(), i);
+                }
+                if (lvl.getWindDir() != null) {
+                    pdv.setFloat(WIND_DIR, lvl.getWindDir().floatValue(), i);
+                }
+                if (lvl.getUcWind() != null) {
+                    pdv.setFloat(UC_WIND, lvl.getUcWind().floatValue(), i);
+                }
+                if (lvl.getVcWind() != null) {
+                    pdv.setFloat(VC_WIND, lvl.getVcWind().floatValue(), i);
+                }
+                if (lvl.getWcWind() != null) {
+                    pdv.setFloat(WC_WIND, lvl.getWcWind().floatValue(), i);
+                }
+                if (lvl.getUconf() != null) {
+                    pdv.setFloat(U_CONF, lvl.getUconf().floatValue(), i);
+                }
+                if (lvl.getVconf() != null) {
+                    pdv.setFloat(V_CONF, lvl.getVconf().floatValue(), i);
+                }
+                if (lvl.getWconf() != null) {
+                    pdv.setFloat(W_CONF, lvl.getWconf().floatValue(), i);
+                }
+                i++;
             }
         }
-        
+
         return pdv;
-	}
+    }
 
-
-	private PointDataDescription getDescription(String type) throws JAXBException{
+    private PointDataDescription getDescription(String type)
+            throws SerializationException {
         InputStream is = this.getClass().getResourceAsStream(
                 "/res/pointdata/" + type + ".xml");
         if (is == null) {
             throw new RuntimeException("Cannot find descriptor for: " + type);
         }
         return PointDataDescription.fromStream(is);
-	}
-	
+    }
+
 }
diff --git a/edexOsgi/com.raytheon.edex.plugin.obs/src/com/raytheon/edex/plugin/obs/ObsDao.java b/edexOsgi/com.raytheon.edex.plugin.obs/src/com/raytheon/edex/plugin/obs/ObsDao.java
index 0341ab2df3..a03318e32b 100644
--- a/edexOsgi/com.raytheon.edex.plugin.obs/src/com/raytheon/edex/plugin/obs/ObsDao.java
+++ b/edexOsgi/com.raytheon.edex.plugin.obs/src/com/raytheon/edex/plugin/obs/ObsDao.java
@@ -33,6 +33,7 @@ import com.raytheon.uf.common.dataquery.db.QueryParam;
 import com.raytheon.uf.common.datastorage.IDataStore;
 import com.raytheon.uf.common.pointdata.PointDataDescription;
 import com.raytheon.uf.common.pointdata.spatial.ObStation;
+import com.raytheon.uf.common.serialization.SerializationException;
 import com.raytheon.uf.edex.database.DataAccessLayerException;
 import com.raytheon.uf.edex.database.query.DatabaseQuery;
 import com.raytheon.uf.edex.pointdata.PointDataDbDescription;
@@ -159,7 +160,7 @@ public class ObsDao extends PointDataPluginDao {
                 hdf5DataDescription = PointDataDescription.fromStream(this
                         .getClass().getResourceAsStream(
                                 "/res/pointdata/metar.xml"));
-            } catch (JAXBException e) {
+            } catch (SerializationException e) {
                 logger.error("Unable to load metar Point Data Description", e);
             }
         }
diff --git a/edexOsgi/com.raytheon.edex.plugin.poessounding/src/com/raytheon/edex/plugin/poessounding/dao/POESSoundingDAO.java b/edexOsgi/com.raytheon.edex.plugin.poessounding/src/com/raytheon/edex/plugin/poessounding/dao/POESSoundingDAO.java
index c5f0ae8c99..807bba823f 100644
--- a/edexOsgi/com.raytheon.edex.plugin.poessounding/src/com/raytheon/edex/plugin/poessounding/dao/POESSoundingDAO.java
+++ b/edexOsgi/com.raytheon.edex.plugin.poessounding/src/com/raytheon/edex/plugin/poessounding/dao/POESSoundingDAO.java
@@ -22,11 +22,10 @@ package com.raytheon.edex.plugin.poessounding.dao;
 import java.util.List;
 import java.util.Map;
 
-import javax.xml.bind.JAXBException;
-
 import com.raytheon.uf.common.dataplugin.PluginException;
 import com.raytheon.uf.common.dataplugin.poessounding.POESSounding;
 import com.raytheon.uf.common.pointdata.PointDataDescription;
+import com.raytheon.uf.common.serialization.SerializationException;
 import com.raytheon.uf.edex.database.DataAccessLayerException;
 import com.raytheon.uf.edex.pointdata.PointDataPluginDao;
 import com.raytheon.uf.edex.wmo.message.WMOHeader;
@@ -149,7 +148,7 @@ public class POESSoundingDAO extends PointDataPluginDao {
                 hdf5DataDescription = PointDataDescription.fromStream(this
                         .getClass().getResourceAsStream(
                                 "/res/pointdata/poes.xml"));
-            } catch (JAXBException e) {
+            } catch (SerializationException e) {
                 logger.error("Unable to load " + pluginName
                         + " Point Data Description", e);
             }
diff --git a/edexOsgi/com.raytheon.edex.plugin.sfcobs/src/com/raytheon/edex/plugin/sfcobs/SfcObsPointDataTransform.java b/edexOsgi/com.raytheon.edex.plugin.sfcobs/src/com/raytheon/edex/plugin/sfcobs/SfcObsPointDataTransform.java
index c49eaf0f7b..1dad2a934f 100644
--- a/edexOsgi/com.raytheon.edex.plugin.sfcobs/src/com/raytheon/edex/plugin/sfcobs/SfcObsPointDataTransform.java
+++ b/edexOsgi/com.raytheon.edex.plugin.sfcobs/src/com/raytheon/edex/plugin/sfcobs/SfcObsPointDataTransform.java
@@ -39,6 +39,7 @@ import com.raytheon.uf.common.pointdata.PointDataContainer;
 import com.raytheon.uf.common.pointdata.PointDataDescription;
 import com.raytheon.uf.common.pointdata.PointDataView;
 import com.raytheon.uf.common.pointdata.spatial.SurfaceObsLocation;
+import com.raytheon.uf.common.serialization.SerializationException;
 import com.raytheon.uf.edex.decodertools.time.TimeTools;
 
 /**
@@ -494,7 +495,7 @@ public class SfcObsPointDataTransform {
      * @throws JAXBException
      */
     private PointDataDescription getDescription(String type)
-            throws JAXBException {
+            throws SerializationException {
         InputStream is = this.getClass().getResourceAsStream(
                 "/res/pointdata/" + type + ".xml");
         if (is == null) {
diff --git a/edexOsgi/com.raytheon.uf.common.dataplugin.qc/src/com/raytheon/uf/common/dataplugin/qc/internal/QCPaths.java b/edexOsgi/com.raytheon.uf.common.dataplugin.qc/src/com/raytheon/uf/common/dataplugin/qc/internal/QCPaths.java
index fb216dde44..91d3d85774 100644
--- a/edexOsgi/com.raytheon.uf.common.dataplugin.qc/src/com/raytheon/uf/common/dataplugin/qc/internal/QCPaths.java
+++ b/edexOsgi/com.raytheon.uf.common.dataplugin.qc/src/com/raytheon/uf/common/dataplugin/qc/internal/QCPaths.java
@@ -33,14 +33,13 @@ import java.util.Map;
 import java.util.jar.JarEntry;
 import java.util.jar.JarFile;
 
-import javax.xml.bind.JAXBException;
-
 import com.raytheon.uf.common.localization.IPathManager;
 import com.raytheon.uf.common.localization.LocalizationContext.LocalizationLevel;
 import com.raytheon.uf.common.localization.LocalizationContext.LocalizationType;
 import com.raytheon.uf.common.localization.PathManagerFactory;
 import com.raytheon.uf.common.pointdata.PointDataDescription;
 import com.raytheon.uf.common.python.PyUtil;
+import com.raytheon.uf.common.serialization.SerializationException;
 import com.raytheon.uf.common.status.IUFStatusHandler;
 import com.raytheon.uf.common.status.UFStatus;
 import com.raytheon.uf.common.status.UFStatus.Priority;
@@ -140,7 +139,7 @@ public class QCPaths {
                 }
             }
             return pdds;
-        } catch (JAXBException e) {
+        } catch (SerializationException e) {
             throw new RuntimeException(
                     "Failed to initialize QcNetCDF PointDataDescriptions.  QC Python netCDF calls will Fail!");
         } catch (UnsupportedEncodingException e) {
diff --git a/edexOsgi/com.raytheon.uf.common.plugin.nwsauth/META-INF/services/com.raytheon.uf.common.serialization.ISerializableObject b/edexOsgi/com.raytheon.uf.common.plugin.nwsauth/META-INF/services/com.raytheon.uf.common.serialization.ISerializableObject
deleted file mode 100644
index dcdb98d253..0000000000
--- a/edexOsgi/com.raytheon.uf.common.plugin.nwsauth/META-INF/services/com.raytheon.uf.common.serialization.ISerializableObject
+++ /dev/null
@@ -1,5 +0,0 @@
-com.raytheon.uf.common.plugin.nwsauth.user.UserId
-com.raytheon.uf.common.plugin.nwsauth.xml.NwsRoleData
-com.raytheon.uf.common.plugin.nwsauth.xml.PermissionXML
-com.raytheon.uf.common.plugin.nwsauth.xml.RoleXML
-com.raytheon.uf.common.plugin.nwsauth.xml.UserXML
diff --git a/edexOsgi/com.raytheon.uf.common.plugin.nwsauth/src/com/raytheon/uf/common/plugin/nwsauth/xml/NwsRoleData.java b/edexOsgi/com.raytheon.uf.common.plugin.nwsauth/src/com/raytheon/uf/common/plugin/nwsauth/xml/NwsRoleData.java
index 8194b50017..777a4b3763 100644
--- a/edexOsgi/com.raytheon.uf.common.plugin.nwsauth/src/com/raytheon/uf/common/plugin/nwsauth/xml/NwsRoleData.java
+++ b/edexOsgi/com.raytheon.uf.common.plugin.nwsauth/src/com/raytheon/uf/common/plugin/nwsauth/xml/NwsRoleData.java
@@ -14,14 +14,13 @@ import javax.xml.bind.annotation.XmlElement;
 import javax.xml.bind.annotation.XmlElements;
 import javax.xml.bind.annotation.XmlRootElement;
 
-import com.raytheon.uf.common.serialization.ISerializableObject;
 import com.raytheon.uf.common.serialization.annotations.DynamicSerialize;
 import com.raytheon.uf.common.serialization.annotations.DynamicSerializeElement;
 
 @XmlRootElement(name = "nwsRoleData")
 @XmlAccessorType(XmlAccessType.NONE)
 @DynamicSerialize
-public class NwsRoleData implements ISerializableObject {
+public class NwsRoleData {
     @DynamicSerializeElement
     @XmlElement(name = "application")
     private String application;
@@ -107,7 +106,7 @@ public class NwsRoleData implements ISerializableObject {
             perms.add(p.getId());
         }
         Collections.sort(perms);
-        
+
         return perms.toArray(new String[perms.size()]);
     }
 
@@ -117,7 +116,7 @@ public class NwsRoleData implements ISerializableObject {
             roles.add(r.getRoleId());
         }
         Collections.sort(roles);
-        
+
         return roles.toArray(new String[roles.size()]);
     }
 
@@ -127,7 +126,7 @@ public class NwsRoleData implements ISerializableObject {
             users.add(r.getUserId());
         }
         Collections.sort(users);
-        
+
         return users.toArray(new String[users.size()]);
     }
 
@@ -140,18 +139,18 @@ public class NwsRoleData implements ISerializableObject {
     }
 
     public void addRole(String role, String description) {
-        if (role != null && description != null && role.length() > 0 && description.length() > 0) {
+        if (role != null && description != null && role.length() > 0
+                && description.length() > 0) {
             RoleXML roleXml = new RoleXML();
             roleXml.setRoleDescription(description);
             roleXml.setRoleId(role);
             this.roleList.add(roleXml);
         }
     }
-    
+
     /**
-     * Add a permission.  This should only be used for 
-     * Localization permissions, which are directory access
-     * permissions.
+     * Add a permission. This should only be used for Localization permissions,
+     * which are directory access permissions.
      * 
      * @param permission
      */
@@ -159,14 +158,15 @@ public class NwsRoleData implements ISerializableObject {
         if (permission != null && permission.length() > 0) {
             PermissionXML pXml = new PermissionXML();
             pXml.setId(permission);
-            this.permissionList.add(pXml);            
+            this.permissionList.add(pXml);
         }
     }
 
     /**
      * Get the user's permissions
      * 
-     * @param userId id of the user
+     * @param userId
+     *            id of the user
      * @return String[] of permissions
      */
     public String[] getUserPermissions(String userId) {
@@ -181,10 +181,10 @@ public class NwsRoleData implements ISerializableObject {
             }
         }
         Collections.sort(userPermissions);
-        
+
         return userPermissions.toArray(new String[userPermissions.size()]);
     }
-    
+
     /**
      * Get an array of all defined permissions
      * 
@@ -192,13 +192,13 @@ public class NwsRoleData implements ISerializableObject {
      */
     public String[] getAllDefinedPermissions() {
         ArrayList permissions = new ArrayList();
-        for (PermissionXML p: this.permissionList) {
+        for (PermissionXML p : this.permissionList) {
             permissions.add(p.getId());
         }
-        
+
         return permissions.toArray(new String[permissions.size()]);
     }
-    
+
     public String[] getRolePermissions(String roleId) {
         ArrayList rolePermissions = new ArrayList();
 
@@ -211,7 +211,7 @@ public class NwsRoleData implements ISerializableObject {
             }
         }
         Collections.sort(rolePermissions);
-        
+
         return rolePermissions.toArray(new String[rolePermissions.size()]);
     }
 
@@ -227,7 +227,7 @@ public class NwsRoleData implements ISerializableObject {
             }
         }
         Collections.sort(userRoles);
-        
+
         return userRoles.toArray(new String[userRoles.size()]);
     }
 
@@ -237,7 +237,7 @@ public class NwsRoleData implements ISerializableObject {
             roleIdList.add(rx.getRoleId());
         }
         Collections.sort(roleIdList);
-        
+
         return roleIdList.toArray(new String[roleIdList.size()]);
     }
 
@@ -262,7 +262,7 @@ public class NwsRoleData implements ISerializableObject {
             for (RoleXML roleXml : roleList) {
                 for (String role : roles) {
                     if (roleXml.getRoleId().equals(role)) {
-                        for (String p: roleXml.getPermissionList()) {
+                        for (String p : roleXml.getPermissionList()) {
                             permSet.add(p);
                         }
                     }
@@ -286,20 +286,21 @@ public class NwsRoleData implements ISerializableObject {
      */
     public boolean isAuthorized(String permission, String user) {
         Set authorizedPermissions = this.getAuthorizedPermissions(user);
-        Set allAuthorizedPermissions = this.getAuthorizedPermissions("ALL");
-        
-        for (String perm: authorizedPermissions) {
+        Set allAuthorizedPermissions = this
+                .getAuthorizedPermissions("ALL");
+
+        for (String perm : authorizedPermissions) {
             if (perm.equalsIgnoreCase(permission)) {
                 return true;
             }
         }
-        
-        for (String perm: allAuthorizedPermissions) {
+
+        for (String perm : allAuthorizedPermissions) {
             if (perm.equalsIgnoreCase(permission)) {
                 return true;
             }
         }
-        
+
         return false;
     }
 
diff --git a/edexOsgi/com.raytheon.uf.common.plugin.nwsauth/src/com/raytheon/uf/common/plugin/nwsauth/xml/PermissionXML.java b/edexOsgi/com.raytheon.uf.common.plugin.nwsauth/src/com/raytheon/uf/common/plugin/nwsauth/xml/PermissionXML.java
index 1966f492ea..d17eebe104 100644
--- a/edexOsgi/com.raytheon.uf.common.plugin.nwsauth/src/com/raytheon/uf/common/plugin/nwsauth/xml/PermissionXML.java
+++ b/edexOsgi/com.raytheon.uf.common.plugin.nwsauth/src/com/raytheon/uf/common/plugin/nwsauth/xml/PermissionXML.java
@@ -24,7 +24,6 @@ import javax.xml.bind.annotation.XmlAccessorType;
 import javax.xml.bind.annotation.XmlAttribute;
 import javax.xml.bind.annotation.XmlElement;
 
-import com.raytheon.uf.common.serialization.ISerializableObject;
 import com.raytheon.uf.common.serialization.annotations.DynamicSerialize;
 import com.raytheon.uf.common.serialization.annotations.DynamicSerializeElement;
 
@@ -47,7 +46,7 @@ import com.raytheon.uf.common.serialization.annotations.DynamicSerializeElement;
  */
 @XmlAccessorType(XmlAccessType.NONE)
 @DynamicSerialize
-public class PermissionXML implements ISerializableObject {
+public class PermissionXML {
 
     @DynamicSerializeElement
     @XmlAttribute(name = "id")
diff --git a/edexOsgi/com.raytheon.uf.common.plugin.nwsauth/src/com/raytheon/uf/common/plugin/nwsauth/xml/RoleXML.java b/edexOsgi/com.raytheon.uf.common.plugin.nwsauth/src/com/raytheon/uf/common/plugin/nwsauth/xml/RoleXML.java
index 5802aa8118..1f469ed718 100644
--- a/edexOsgi/com.raytheon.uf.common.plugin.nwsauth/src/com/raytheon/uf/common/plugin/nwsauth/xml/RoleXML.java
+++ b/edexOsgi/com.raytheon.uf.common.plugin.nwsauth/src/com/raytheon/uf/common/plugin/nwsauth/xml/RoleXML.java
@@ -28,7 +28,6 @@ import javax.xml.bind.annotation.XmlAttribute;
 import javax.xml.bind.annotation.XmlElement;
 import javax.xml.bind.annotation.XmlElements;
 
-import com.raytheon.uf.common.serialization.ISerializableObject;
 import com.raytheon.uf.common.serialization.annotations.DynamicSerialize;
 import com.raytheon.uf.common.serialization.annotations.DynamicSerializeElement;
 
@@ -50,7 +49,7 @@ import com.raytheon.uf.common.serialization.annotations.DynamicSerializeElement;
  */
 @XmlAccessorType(XmlAccessType.NONE)
 @DynamicSerialize
-public class RoleXML implements ISerializableObject {
+public class RoleXML {
     @DynamicSerializeElement
     @XmlAttribute(name = "roleId")
     private String roleId;
diff --git a/edexOsgi/com.raytheon.uf.common.plugin.nwsauth/src/com/raytheon/uf/common/plugin/nwsauth/xml/UserXML.java b/edexOsgi/com.raytheon.uf.common.plugin.nwsauth/src/com/raytheon/uf/common/plugin/nwsauth/xml/UserXML.java
index 61f9a5d19e..15827c47e0 100644
--- a/edexOsgi/com.raytheon.uf.common.plugin.nwsauth/src/com/raytheon/uf/common/plugin/nwsauth/xml/UserXML.java
+++ b/edexOsgi/com.raytheon.uf.common.plugin.nwsauth/src/com/raytheon/uf/common/plugin/nwsauth/xml/UserXML.java
@@ -34,7 +34,6 @@ import org.apache.commons.lang.builder.HashCodeBuilder;
 import com.raytheon.uf.common.auth.user.IAuthenticationData;
 import com.raytheon.uf.common.auth.user.IUser;
 import com.raytheon.uf.common.auth.user.IUserId;
-import com.raytheon.uf.common.serialization.ISerializableObject;
 import com.raytheon.uf.common.serialization.annotations.DynamicSerialize;
 import com.raytheon.uf.common.serialization.annotations.DynamicSerializeElement;
 
@@ -56,7 +55,7 @@ import com.raytheon.uf.common.serialization.annotations.DynamicSerializeElement;
  */
 @XmlAccessorType(XmlAccessType.NONE)
 @DynamicSerialize
-public class UserXML implements IUser, ISerializableObject {
+public class UserXML implements IUser {
     @DynamicSerializeElement
     @XmlAttribute(name = "userId")
     private String userId;
diff --git a/edexOsgi/com.raytheon.uf.common.pointdata/META-INF/services/com.raytheon.uf.common.serialization.ISerializableObject b/edexOsgi/com.raytheon.uf.common.pointdata/META-INF/services/com.raytheon.uf.common.serialization.ISerializableObject
index 9dad017dfe..02051a688a 100644
--- a/edexOsgi/com.raytheon.uf.common.pointdata/META-INF/services/com.raytheon.uf.common.serialization.ISerializableObject
+++ b/edexOsgi/com.raytheon.uf.common.pointdata/META-INF/services/com.raytheon.uf.common.serialization.ISerializableObject
@@ -1,11 +1,3 @@
-com.raytheon.uf.common.pointdata.PointDataContainer
-com.raytheon.uf.common.pointdata.PointDataSerializable
-com.raytheon.uf.common.pointdata.ParameterDescription
-com.raytheon.uf.common.pointdata.Dimension
-com.raytheon.uf.common.pointdata.elements.FloatPointDataObject
-com.raytheon.uf.common.pointdata.elements.IntPointDataObject
-com.raytheon.uf.common.pointdata.elements.LongPointDataObject
-com.raytheon.uf.common.pointdata.elements.StringPointDataObject
 com.raytheon.uf.common.pointdata.spatial.ObStation
 com.raytheon.uf.common.pointdata.spatial.AircraftObsLocation
 com.raytheon.uf.common.pointdata.spatial.SurfaceObsLocation
\ No newline at end of file
diff --git a/edexOsgi/com.raytheon.uf.common.pointdata/src/com/raytheon/uf/common/pointdata/Dimension.java b/edexOsgi/com.raytheon.uf.common.pointdata/src/com/raytheon/uf/common/pointdata/Dimension.java
index a30246b22d..88d69cfa41 100644
--- a/edexOsgi/com.raytheon.uf.common.pointdata/src/com/raytheon/uf/common/pointdata/Dimension.java
+++ b/edexOsgi/com.raytheon.uf.common.pointdata/src/com/raytheon/uf/common/pointdata/Dimension.java
@@ -23,8 +23,6 @@ import javax.xml.bind.annotation.XmlAccessType;
 import javax.xml.bind.annotation.XmlAccessorType;
 import javax.xml.bind.annotation.XmlAttribute;
 
-import com.raytheon.uf.common.serialization.ISerializableObject;
-
 /**
  * Provides metadata (primarily the size) of a named dimension
  * 
@@ -41,7 +39,7 @@ import com.raytheon.uf.common.serialization.ISerializableObject;
  * @version 1.0
  */
 @XmlAccessorType(XmlAccessType.NONE)
-public class Dimension implements ISerializableObject {
+public class Dimension {
 
     @XmlAttribute(name = "name")
     private String dimensionName;
diff --git a/edexOsgi/com.raytheon.uf.common.pointdata/src/com/raytheon/uf/common/pointdata/ParameterDescription.java b/edexOsgi/com.raytheon.uf.common.pointdata/src/com/raytheon/uf/common/pointdata/ParameterDescription.java
index 6be79c020f..431ef1ef32 100644
--- a/edexOsgi/com.raytheon.uf.common.pointdata/src/com/raytheon/uf/common/pointdata/ParameterDescription.java
+++ b/edexOsgi/com.raytheon.uf.common.pointdata/src/com/raytheon/uf/common/pointdata/ParameterDescription.java
@@ -28,7 +28,6 @@ import javax.xml.bind.annotation.XmlAccessorType;
 import javax.xml.bind.annotation.XmlAttribute;
 
 import com.raytheon.uf.common.pointdata.PointDataDescription.Type;
-import com.raytheon.uf.common.serialization.ISerializableObject;
 import com.raytheon.uf.common.serialization.annotations.DynamicSerialize;
 import com.raytheon.uf.common.serialization.annotations.DynamicSerializeElement;
 
@@ -51,7 +50,7 @@ import com.raytheon.uf.common.serialization.annotations.DynamicSerializeElement;
  */
 @XmlAccessorType(XmlAccessType.NONE)
 @DynamicSerialize
-public class ParameterDescription implements ISerializableObject {
+public class ParameterDescription {
 
     @XmlAttribute(name = "name", required = true)
     @DynamicSerializeElement
diff --git a/edexOsgi/com.raytheon.uf.common.pointdata/src/com/raytheon/uf/common/pointdata/PointDataContainer.java b/edexOsgi/com.raytheon.uf.common.pointdata/src/com/raytheon/uf/common/pointdata/PointDataContainer.java
index 179b0aa393..e32a5deb0f 100644
--- a/edexOsgi/com.raytheon.uf.common.pointdata/src/com/raytheon/uf/common/pointdata/PointDataContainer.java
+++ b/edexOsgi/com.raytheon.uf.common.pointdata/src/com/raytheon/uf/common/pointdata/PointDataContainer.java
@@ -23,17 +23,8 @@ import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.HashSet;
 import java.util.List;
-import java.util.Map.Entry;
 import java.util.Set;
 
-import javax.xml.bind.annotation.XmlAccessType;
-import javax.xml.bind.annotation.XmlAccessorType;
-import javax.xml.bind.annotation.XmlAttribute;
-import javax.xml.bind.annotation.XmlElement;
-import javax.xml.bind.annotation.XmlRootElement;
-import javax.xml.bind.annotation.adapters.XmlAdapter;
-import javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter;
-
 import com.raytheon.uf.common.datastorage.records.FloatDataRecord;
 import com.raytheon.uf.common.datastorage.records.IDataRecord;
 import com.raytheon.uf.common.datastorage.records.IntegerDataRecord;
@@ -46,7 +37,6 @@ import com.raytheon.uf.common.pointdata.elements.FloatPointDataObject;
 import com.raytheon.uf.common.pointdata.elements.IntPointDataObject;
 import com.raytheon.uf.common.pointdata.elements.LongPointDataObject;
 import com.raytheon.uf.common.pointdata.elements.StringPointDataObject;
-import com.raytheon.uf.common.serialization.ISerializableObject;
 import com.raytheon.uf.common.serialization.annotations.DynamicSerialize;
 import com.raytheon.uf.common.serialization.annotations.DynamicSerializeElement;
 
@@ -59,7 +49,8 @@ import com.raytheon.uf.common.serialization.annotations.DynamicSerializeElement;
  * SOFTWARE HISTORY
  * Date         Ticket#    Engineer    Description
  * ------------ ---------- ----------- --------------------------
- * Apr 8, 2009            chammack     Initial creation
+ * Apr 8, 2009             chammack    Initial creation
+ * Oct 9, 2013  2361       njensen     Removed XML annotations
  * 
  * 
* @@ -67,23 +58,17 @@ import com.raytheon.uf.common.serialization.annotations.DynamicSerializeElement; * @version 1.0 */ @DynamicSerialize -@XmlRootElement -@XmlAccessorType(XmlAccessType.FIELD) -public class PointDataContainer implements ISerializableObject { +public class PointDataContainer { protected static final int DEFAULT_SZ = 2048; @DynamicSerializeElement - @XmlAttribute protected int currentSz; @DynamicSerializeElement - @XmlAttribute protected int allocatedSz; @DynamicSerializeElement - @XmlElement - @XmlJavaTypeAdapter(value = PointDataMarshaller.class) protected HashMap> pointDataTypes; public PointDataContainer() { @@ -426,55 +411,6 @@ public class PointDataContainer implements ISerializableObject { this.currentSz = currentSz; } - public static class PointDataMarshaller - extends - XmlAdapter>> { - - /* - * (non-Javadoc) - * - * @see - * javax.xml.bind.annotation.adapters.XmlAdapter#marshal(java.lang.Object - * ) - */ - @Override - public PointDataSerializable marshal( - HashMap> v) throws Exception { - PointDataSerializable serializable = new PointDataSerializable(); - PointDataSerializable.PointDataItemSerializable[] items = new PointDataSerializable.PointDataItemSerializable[v - .size()]; - int i = 0; - for (Entry> entry : v.entrySet()) { - items[i] = new PointDataSerializable.PointDataItemSerializable(); - items[i].key = entry.getKey(); - items[i].value = entry.getValue(); - i++; - } - serializable.items = items; - return serializable; - } - - /* - * (non-Javadoc) - * - * @see - * javax.xml.bind.annotation.adapters.XmlAdapter#unmarshal(java.lang - * .Object) - */ - @Override - public HashMap> unmarshal( - PointDataSerializable v) throws Exception { - HashMap> map = new HashMap>( - v.items.length); - for (PointDataSerializable.PointDataItemSerializable item : v.items) { - map.put(item.key, item.value); - } - - return map; - } - - } - protected AbstractPointDataObject getParamSafe(String parameter) { AbstractPointDataObject p = pointDataTypes.get(parameter); if (p == null) { diff --git a/edexOsgi/com.raytheon.uf.common.pointdata/src/com/raytheon/uf/common/pointdata/PointDataDescription.java b/edexOsgi/com.raytheon.uf.common.pointdata/src/com/raytheon/uf/common/pointdata/PointDataDescription.java index fee0f70b71..a43b1d45b1 100644 --- a/edexOsgi/com.raytheon.uf.common.pointdata/src/com/raytheon/uf/common/pointdata/PointDataDescription.java +++ b/edexOsgi/com.raytheon.uf.common.pointdata/src/com/raytheon/uf/common/pointdata/PointDataDescription.java @@ -24,16 +24,15 @@ import java.io.InputStream; import java.util.HashMap; import java.util.Map; -import javax.xml.bind.JAXBContext; import javax.xml.bind.JAXBException; -import javax.xml.bind.Unmarshaller; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlRootElement; import javax.xml.bind.annotation.XmlType; -import com.raytheon.uf.common.serialization.ISerializableObject; +import com.raytheon.uf.common.serialization.SerializationException; +import com.raytheon.uf.common.serialization.SingleTypeJAXBManager; /** * A generic description for a type of point data @@ -46,7 +45,8 @@ import com.raytheon.uf.common.serialization.ISerializableObject; * SOFTWARE HISTORY * Date Ticket# Engineer Description * ------------ ---------- ----------- -------------------------- - * Apr 8, 2009 chammack Initial creation + * Apr 8, 2009 chammack Initial creation + * Oct 9, 2013 2361 njensen Use JAXBManager for XML * * * @@ -56,7 +56,7 @@ import com.raytheon.uf.common.serialization.ISerializableObject; @XmlAccessorType(XmlAccessType.NONE) @XmlRootElement(name = "pointDataDescription") -public class PointDataDescription implements ISerializableObject { +public class PointDataDescription { private static final int DEFAULT_LEVELSIZE = 64; @@ -67,6 +67,9 @@ public class PointDataDescription implements ISerializableObject { public static final int FILL_VALUE_INT = -9999; + private static final SingleTypeJAXBManager jaxb = SingleTypeJAXBManager + .createWithoutException(PointDataDescription.class); + @XmlElement(name = "dimension") public Dimension[] dimensions; @@ -84,19 +87,10 @@ public class PointDataDescription implements ISerializableObject { * @return * @throws JAXBException */ - public static PointDataDescription fromFile(File file) throws JAXBException { - PointDataDescription pdd = null; - - JAXBContext ctx = JAXBContext.newInstance(PointDataDescription.class); - if (ctx != null) { - Unmarshaller um = ctx.createUnmarshaller(); - if (um != null) { - pdd = (PointDataDescription) um.unmarshal(file); - } - } - + public static PointDataDescription fromFile(File file) + throws SerializationException { + PointDataDescription pdd = jaxb.unmarshalFromXmlFile(file); pdd.resolveDimensions(); - return pdd; } @@ -107,17 +101,9 @@ public class PointDataDescription implements ISerializableObject { * @throws JAXBException */ public static PointDataDescription fromStream(InputStream is) - throws JAXBException { - PointDataDescription pdd = null; - - JAXBContext ctx = JAXBContext.newInstance(PointDataDescription.class); - if (ctx != null) { - Unmarshaller um = ctx.createUnmarshaller(); - if (um != null) { - pdd = (PointDataDescription) um.unmarshal(is); - } - } - + throws SerializationException { + PointDataDescription pdd = (PointDataDescription) jaxb + .unmarshalFromInputStream(is); pdd.resolveDimensions(); return pdd; diff --git a/edexOsgi/com.raytheon.uf.common.pointdata/src/com/raytheon/uf/common/pointdata/PointDataSerializable.java b/edexOsgi/com.raytheon.uf.common.pointdata/src/com/raytheon/uf/common/pointdata/PointDataSerializable.java deleted file mode 100644 index f5bfebc698..0000000000 --- a/edexOsgi/com.raytheon.uf.common.pointdata/src/com/raytheon/uf/common/pointdata/PointDataSerializable.java +++ /dev/null @@ -1,60 +0,0 @@ -/** - * This software was developed and / or modified by Raytheon Company, - * pursuant to Contract DG133W-05-CQ-1067 with the US Government. - * - * U.S. EXPORT CONTROLLED TECHNICAL DATA - * This software product contains export-restricted data whose - * export/transfer/disclosure is restricted by U.S. law. Dissemination - * to non-U.S. persons whether in the United States or abroad requires - * an export license or other authorization. - * - * Contractor Name: Raytheon Company - * Contractor Address: 6825 Pine Street, Suite 340 - * Mail Stop B8 - * Omaha, NE 68106 - * 402.291.0100 - * - * See the AWIPS II Master Rights File ("Master Rights File.pdf") for - * further licensing information. - **/ -package com.raytheon.uf.common.pointdata; - -import javax.xml.bind.annotation.XmlAccessType; -import javax.xml.bind.annotation.XmlAccessorType; -import javax.xml.bind.annotation.XmlAttribute; -import javax.xml.bind.annotation.XmlElement; - -import com.raytheon.uf.common.pointdata.elements.AbstractPointDataObject; -import com.raytheon.uf.common.serialization.ISerializableObject; - -/** - * A serialization helper class for serializing pointdata maps - * - *
- * 
- * SOFTWARE HISTORY
- * Date         Ticket#    Engineer    Description
- * ------------ ---------- ----------- --------------------------
- * Apr 16, 2009            chammack     Initial creation
- * 
- * 
- * - * @author chammack - * @version 1.0 - */ -@XmlAccessorType(XmlAccessType.NONE) -public class PointDataSerializable implements ISerializableObject { - - @XmlElement(name = "item") - public PointDataItemSerializable[] items; - - public static class PointDataItemSerializable implements - ISerializableObject { - - @XmlAttribute - public String key; - - @XmlElement - public AbstractPointDataObject value; - } -} diff --git a/edexOsgi/com.raytheon.uf.common.pointdata/src/com/raytheon/uf/common/pointdata/elements/AbstractPointDataObject.java b/edexOsgi/com.raytheon.uf.common.pointdata/src/com/raytheon/uf/common/pointdata/elements/AbstractPointDataObject.java index a7bc160516..8809021aa6 100644 --- a/edexOsgi/com.raytheon.uf.common.pointdata/src/com/raytheon/uf/common/pointdata/elements/AbstractPointDataObject.java +++ b/edexOsgi/com.raytheon.uf.common.pointdata/src/com/raytheon/uf/common/pointdata/elements/AbstractPointDataObject.java @@ -22,13 +22,9 @@ package com.raytheon.uf.common.pointdata.elements; import java.util.HashMap; import java.util.Map; -import javax.xml.bind.annotation.XmlAccessType; -import javax.xml.bind.annotation.XmlAccessorType; - import com.raytheon.uf.common.datastorage.records.IDataRecord; import com.raytheon.uf.common.pointdata.ParameterDescription; import com.raytheon.uf.common.pointdata.PointDataContainer; -import com.raytheon.uf.common.serialization.ISerializableObject; import com.raytheon.uf.common.serialization.annotations.DynamicSerialize; import com.raytheon.uf.common.serialization.annotations.DynamicSerializeElement; @@ -40,16 +36,17 @@ import com.raytheon.uf.common.serialization.annotations.DynamicSerializeElement; * SOFTWARE HISTORY * Date Ticket# Engineer Description * ------------ ---------- ----------- -------------------------- - * Apr 8, 2009 chammack Initial creation + * Apr 8, 2009 chammack Initial creation + * Oct 9, 2013 2361 njensen Removed XML annotations * * * * @author chammack * @version 1.0 */ -@XmlAccessorType(XmlAccessType.NONE) + @DynamicSerialize -public abstract class AbstractPointDataObject implements ISerializableObject { +public abstract class AbstractPointDataObject { static final int STORAGE_CHUNK_SIZE = 1024; diff --git a/edexOsgi/com.raytheon.uf.common.pointdata/src/com/raytheon/uf/common/pointdata/elements/FloatPointDataObject.java b/edexOsgi/com.raytheon.uf.common.pointdata/src/com/raytheon/uf/common/pointdata/elements/FloatPointDataObject.java index 02f8742dfb..bffec6ea88 100644 --- a/edexOsgi/com.raytheon.uf.common.pointdata/src/com/raytheon/uf/common/pointdata/elements/FloatPointDataObject.java +++ b/edexOsgi/com.raytheon.uf.common.pointdata/src/com/raytheon/uf/common/pointdata/elements/FloatPointDataObject.java @@ -19,10 +19,6 @@ **/ package com.raytheon.uf.common.pointdata.elements; -import javax.xml.bind.annotation.XmlAccessType; -import javax.xml.bind.annotation.XmlAccessorType; -import javax.xml.bind.annotation.XmlElement; - import com.raytheon.uf.common.datastorage.records.FloatDataRecord; import com.raytheon.uf.common.datastorage.records.IDataRecord; import com.raytheon.uf.common.pointdata.ParameterDescription; @@ -38,7 +34,8 @@ import com.raytheon.uf.common.serialization.annotations.DynamicSerializeElement; * SOFTWARE HISTORY * Date Ticket# Engineer Description * ------------ ---------- ----------- -------------------------- - * Apr 8, 2009 chammack Initial creation + * Apr 8, 2009 chammack Initial creation + * Oct 9, 2013 2361 njensen Removed XML annotations * * * @@ -46,11 +43,9 @@ import com.raytheon.uf.common.serialization.annotations.DynamicSerializeElement; * @version 1.0 */ @DynamicSerialize -@XmlAccessorType(XmlAccessType.NONE) public class FloatPointDataObject extends AbstractPointDataObject { @DynamicSerializeElement - @XmlElement protected float[] floatData; public FloatPointDataObject() { diff --git a/edexOsgi/com.raytheon.uf.common.pointdata/src/com/raytheon/uf/common/pointdata/elements/IntPointDataObject.java b/edexOsgi/com.raytheon.uf.common.pointdata/src/com/raytheon/uf/common/pointdata/elements/IntPointDataObject.java index a41601df68..28c933b923 100644 --- a/edexOsgi/com.raytheon.uf.common.pointdata/src/com/raytheon/uf/common/pointdata/elements/IntPointDataObject.java +++ b/edexOsgi/com.raytheon.uf.common.pointdata/src/com/raytheon/uf/common/pointdata/elements/IntPointDataObject.java @@ -19,10 +19,6 @@ **/ package com.raytheon.uf.common.pointdata.elements; -import javax.xml.bind.annotation.XmlAccessType; -import javax.xml.bind.annotation.XmlAccessorType; -import javax.xml.bind.annotation.XmlElement; - import com.raytheon.uf.common.datastorage.records.IDataRecord; import com.raytheon.uf.common.datastorage.records.IntegerDataRecord; import com.raytheon.uf.common.pointdata.ParameterDescription; @@ -38,7 +34,8 @@ import com.raytheon.uf.common.serialization.annotations.DynamicSerializeElement; * SOFTWARE HISTORY * Date Ticket# Engineer Description * ------------ ---------- ----------- -------------------------- - * Apr 8, 2009 chammack Initial creation + * Apr 8, 2009 chammack Initial creation + * Oct 9, 2013 2361 njensen Removed XML annotations * * * @@ -46,10 +43,9 @@ import com.raytheon.uf.common.serialization.annotations.DynamicSerializeElement; * @version 1.0 */ @DynamicSerialize -@XmlAccessorType(XmlAccessType.NONE) public class IntPointDataObject extends AbstractPointDataObject { + @DynamicSerializeElement - @XmlElement protected int[] intData; public IntPointDataObject() { diff --git a/edexOsgi/com.raytheon.uf.common.pointdata/src/com/raytheon/uf/common/pointdata/elements/LongPointDataObject.java b/edexOsgi/com.raytheon.uf.common.pointdata/src/com/raytheon/uf/common/pointdata/elements/LongPointDataObject.java index 7f95c05964..12d3871bc7 100644 --- a/edexOsgi/com.raytheon.uf.common.pointdata/src/com/raytheon/uf/common/pointdata/elements/LongPointDataObject.java +++ b/edexOsgi/com.raytheon.uf.common.pointdata/src/com/raytheon/uf/common/pointdata/elements/LongPointDataObject.java @@ -19,10 +19,6 @@ **/ package com.raytheon.uf.common.pointdata.elements; -import javax.xml.bind.annotation.XmlAccessType; -import javax.xml.bind.annotation.XmlAccessorType; -import javax.xml.bind.annotation.XmlElement; - import com.raytheon.uf.common.datastorage.records.IDataRecord; import com.raytheon.uf.common.datastorage.records.LongDataRecord; import com.raytheon.uf.common.pointdata.ParameterDescription; @@ -38,7 +34,8 @@ import com.raytheon.uf.common.serialization.annotations.DynamicSerializeElement; * SOFTWARE HISTORY * Date Ticket# Engineer Description * ------------ ---------- ----------- -------------------------- - * Apr 8, 2009 chammack Initial creation + * Apr 8, 2009 chammack Initial creation + * Oct 9, 2013 2361 njensen Removed XML annotations * * * @@ -46,10 +43,9 @@ import com.raytheon.uf.common.serialization.annotations.DynamicSerializeElement; * @version 1.0 */ @DynamicSerialize -@XmlAccessorType(XmlAccessType.NONE) public class LongPointDataObject extends AbstractPointDataObject { + @DynamicSerializeElement - @XmlElement protected long[] longData; public LongPointDataObject() { diff --git a/edexOsgi/com.raytheon.uf.common.pointdata/src/com/raytheon/uf/common/pointdata/elements/StringPointDataObject.java b/edexOsgi/com.raytheon.uf.common.pointdata/src/com/raytheon/uf/common/pointdata/elements/StringPointDataObject.java index a57cbdc427..8d504c97a3 100644 --- a/edexOsgi/com.raytheon.uf.common.pointdata/src/com/raytheon/uf/common/pointdata/elements/StringPointDataObject.java +++ b/edexOsgi/com.raytheon.uf.common.pointdata/src/com/raytheon/uf/common/pointdata/elements/StringPointDataObject.java @@ -19,10 +19,6 @@ **/ package com.raytheon.uf.common.pointdata.elements; -import javax.xml.bind.annotation.XmlAccessType; -import javax.xml.bind.annotation.XmlAccessorType; -import javax.xml.bind.annotation.XmlElement; - import com.raytheon.uf.common.datastorage.records.IDataRecord; import com.raytheon.uf.common.datastorage.records.StringDataRecord; import com.raytheon.uf.common.pointdata.ParameterDescription; @@ -38,7 +34,8 @@ import com.raytheon.uf.common.serialization.annotations.DynamicSerializeElement; * SOFTWARE HISTORY * Date Ticket# Engineer Description * ------------ ---------- ----------- -------------------------- - * Apr 8, 2009 chammack Initial creation + * Apr 8, 2009 chammack Initial creation + * Oct 9, 2013 2361 njensen Removed XML annotations * * * @@ -46,10 +43,9 @@ import com.raytheon.uf.common.serialization.annotations.DynamicSerializeElement; * @version 1.0 */ @DynamicSerialize -@XmlAccessorType(XmlAccessType.NONE) public class StringPointDataObject extends AbstractPointDataObject { + @DynamicSerializeElement - @XmlElement protected String[] stringData; public StringPointDataObject() { diff --git a/edexOsgi/com.raytheon.uf.common.site/META-INF/services/com.raytheon.uf.common.serialization.ISerializableObject b/edexOsgi/com.raytheon.uf.common.site/META-INF/services/com.raytheon.uf.common.serialization.ISerializableObject deleted file mode 100644 index ca55e31b8f..0000000000 --- a/edexOsgi/com.raytheon.uf.common.site/META-INF/services/com.raytheon.uf.common.serialization.ISerializableObject +++ /dev/null @@ -1,2 +0,0 @@ -com.raytheon.uf.common.site.xml.AdjacentWfoXML -com.raytheon.uf.common.site.xml.CwaXML \ No newline at end of file diff --git a/edexOsgi/com.raytheon.uf.edex.plugin.bufrsigwx/src/com/raytheon/uf/edex/plugin/bufrsigwx/SigWxDataDao.java b/edexOsgi/com.raytheon.uf.edex.plugin.bufrsigwx/src/com/raytheon/uf/edex/plugin/bufrsigwx/SigWxDataDao.java index 16037672a8..badaeac074 100644 --- a/edexOsgi/com.raytheon.uf.edex.plugin.bufrsigwx/src/com/raytheon/uf/edex/plugin/bufrsigwx/SigWxDataDao.java +++ b/edexOsgi/com.raytheon.uf.edex.plugin.bufrsigwx/src/com/raytheon/uf/edex/plugin/bufrsigwx/SigWxDataDao.java @@ -22,13 +22,12 @@ package com.raytheon.uf.edex.plugin.bufrsigwx; import java.util.List; import java.util.Map; -import javax.xml.bind.JAXBException; - import com.raytheon.uf.common.dataplugin.PluginException; import com.raytheon.uf.common.dataplugin.bufrsigwx.SigWxData; import com.raytheon.uf.common.dataplugin.bufrsigwx.common.SigWxLayer; import com.raytheon.uf.common.dataplugin.bufrsigwx.common.SigWxType; import com.raytheon.uf.common.pointdata.PointDataDescription; +import com.raytheon.uf.common.serialization.SerializationException; import com.raytheon.uf.edex.database.DataAccessLayerException; import com.raytheon.uf.edex.pointdata.PointDataPluginDao; @@ -167,7 +166,7 @@ public class SigWxDataDao extends PointDataPluginDao { try { return PointDataDescription.fromStream(this.getClass() .getResourceAsStream(pddFile)); - } catch (JAXBException e) { + } catch (SerializationException e) { logger.error("Unable to load " + pluginName + " Point Data Description for " + type + "," + layer, e); diff --git a/edexOsgi/com.raytheon.uf.edex.plugin.fssobs/src/com/raytheon/uf/edex/plugin/fssobs/FSSObsDAO.java b/edexOsgi/com.raytheon.uf.edex.plugin.fssobs/src/com/raytheon/uf/edex/plugin/fssobs/FSSObsDAO.java index 38a349eca9..5af406ab29 100644 --- a/edexOsgi/com.raytheon.uf.edex.plugin.fssobs/src/com/raytheon/uf/edex/plugin/fssobs/FSSObsDAO.java +++ b/edexOsgi/com.raytheon.uf.edex.plugin.fssobs/src/com/raytheon/uf/edex/plugin/fssobs/FSSObsDAO.java @@ -1,6 +1,5 @@ package com.raytheon.uf.edex.plugin.fssobs; - import java.io.InputStream; import java.sql.SQLException; import java.util.ArrayList; @@ -16,19 +15,17 @@ import com.raytheon.uf.common.dataplugin.persist.IPersistable; import com.raytheon.uf.common.datastorage.IDataStore; import com.raytheon.uf.common.datastorage.records.IDataRecord; import com.raytheon.uf.common.pointdata.PointDataDescription; - +import com.raytheon.uf.common.serialization.SerializationException; import com.raytheon.uf.edex.database.DataAccessLayerException; import com.raytheon.uf.edex.pointdata.PointDataDbDescription; import com.raytheon.uf.edex.pointdata.PointDataPluginDao; - public class FSSObsDAO extends PointDataPluginDao { - - public FSSObsDAO(String pluginName) throws PluginException { - super(pluginName); - } - + public FSSObsDAO(String pluginName) throws PluginException { + super(pluginName); + } + public FSSObsDAO() throws PluginException, SQLException { this("fssobs"); } @@ -46,7 +43,7 @@ public class FSSObsDAO extends PointDataPluginDao { } return report; } - + public Object[] queryDataUriColumn(final String dataUri) { String sql = "select datauri from awips.fssobs where datauri='" @@ -56,7 +53,7 @@ public class FSSObsDAO extends PointDataPluginDao { return results; } - + @Override protected IDataStore populateDataStore(IDataStore dataStore, IPersistable obj) throws Exception { @@ -77,8 +74,8 @@ public class FSSObsDAO extends PointDataPluginDao { record = getDataStore((IPersistable) obj).retrieve( obj.getDataURI()); } catch (Exception e) { - throw new PluginException("Error retrieving FSSObs HDF5 data", - e); + throw new PluginException( + "Error retrieving FSSObs HDF5 data", e); } retVal.add(record); } @@ -96,12 +93,11 @@ public class FSSObsDAO extends PointDataPluginDao { return new FSSObsRecord(); } - @Override public String getPointDataFileName(FSSObsRecord p) { return "fssobs.h5"; } - + @Override public PointDataDescription getPointDataDescription(Map obj) { if (hdf5DataDescription == null) { @@ -109,15 +105,19 @@ public class FSSObsDAO extends PointDataPluginDao { hdf5DataDescription = PointDataDescription.fromStream(this .getClass().getResourceAsStream( "/res/pointdata/fssobs.xml")); - } catch (JAXBException e) { + } catch (SerializationException e) { logger.error("Unable to load fssobs Point Data Description", e); } } return hdf5DataDescription; } - - /* (non-Javadoc) - * @see com.raytheon.uf.edex.pointdata.PointDataPluginDao#getPointDataDbDescription() + + /* + * (non-Javadoc) + * + * @see + * com.raytheon.uf.edex.pointdata.PointDataPluginDao#getPointDataDbDescription + * () */ @Override public PointDataDbDescription getPointDataDbDescription() { @@ -136,5 +136,5 @@ public class FSSObsDAO extends PointDataPluginDao { } return dbDataDescription; } - + } diff --git a/edexOsgi/com.raytheon.uf.edex.plugin.madis/src/com/raytheon/uf/edex/plugin/madis/MadisDao.java b/edexOsgi/com.raytheon.uf.edex.plugin.madis/src/com/raytheon/uf/edex/plugin/madis/MadisDao.java index 64fa9338ae..8d50f6380b 100644 --- a/edexOsgi/com.raytheon.uf.edex.plugin.madis/src/com/raytheon/uf/edex/plugin/madis/MadisDao.java +++ b/edexOsgi/com.raytheon.uf.edex.plugin.madis/src/com/raytheon/uf/edex/plugin/madis/MadisDao.java @@ -35,18 +35,20 @@ import com.raytheon.uf.common.dataplugin.madis.MadisRecord; import com.raytheon.uf.common.dataquery.db.QueryParam; import com.raytheon.uf.common.pointdata.PointDataDescription; import com.raytheon.uf.common.pointdata.spatial.ObStation; +import com.raytheon.uf.common.serialization.SerializationException; import com.raytheon.uf.common.status.IUFStatusHandler; import com.raytheon.uf.common.status.UFStatus; import com.raytheon.uf.common.status.UFStatus.Priority; import com.raytheon.uf.common.util.CollectionUtil; import com.raytheon.uf.edex.database.DataAccessLayerException; +import com.raytheon.uf.edex.database.plugin.PluginDao; import com.raytheon.uf.edex.database.query.DatabaseQuery; import com.raytheon.uf.edex.pointdata.PointDataDbDescription; import com.raytheon.uf.edex.pointdata.PointDataPluginDao; import com.raytheon.uf.edex.pointdata.spatial.ObStationDao; /** - * MadisDao MADIS data DAO + * MadisDao MADIS data DAO * *
  * 
@@ -63,22 +65,22 @@ import com.raytheon.uf.edex.pointdata.spatial.ObStationDao;
  */
 
 public class MadisDao extends PointDataPluginDao {
-    
+
     /** The station dao */
     private ObStationDao obDao = new ObStationDao();
-    
+
     private static final IUFStatusHandler statusHandler = UFStatus
-    .getHandler(MadisDao.class);
-   
+            .getHandler(MadisDao.class);
+
     public List queryBySpatialBox(double upperLeftLat, double upperLeftLon,
             double lowerRightLat, double lowerRightLon)
             throws DataAccessLayerException {
 
         List stationList = obDao.queryBySpatialBox(upperLeftLat,
                 upperLeftLon, lowerRightLat, lowerRightLon);
-       
+
         List stationNames = new ArrayList();
-        for (ObStation ob: stationList) {
+        for (ObStation ob : stationList) {
             stationNames.add(ob.getIcao());
         }
 
@@ -108,8 +110,8 @@ public class MadisDao extends PointDataPluginDao {
      */
     public Object[] queryDataUriColumn(final String dataUri) {
 
-        String sql = "select datauri from awips.madis where datauri='" + dataUri
-                + "';";
+        String sql = "select datauri from awips.madis where datauri='"
+                + dataUri + "';";
 
         Object[] results = executeSQLQuery(sql);
 
@@ -129,10 +131,11 @@ public class MadisDao extends PointDataPluginDao {
             hdf5DataDescription = PointDataDescription.fromStream(this
                     .getClass().getResourceAsStream(
                             "/res/pointdata/" + pluginName + ".xml"));
-        } catch (JAXBException e) {
+        } catch (SerializationException e) {
             statusHandler.error("Unable to load madis Point Data Description",
                     e);
-            throw new PluginException("Unable to load madis Point Data Description!", e);
+            throw new PluginException(
+                    "Unable to load madis Point Data Description!", e);
         }
 
     }
@@ -142,7 +145,6 @@ public class MadisDao extends PointDataPluginDao {
         return hdf5DataDescription;
     }
 
-
     public ObStationDao getObDao() {
         return obDao;
     }
@@ -165,7 +167,7 @@ public class MadisDao extends PointDataPluginDao {
     public MadisRecord newObject() {
         return new MadisRecord();
     }
-    
+
     /*
      * (non-Javadoc)
      * 
diff --git a/edexOsgi/com.raytheon.uf.edex.plugin.nwsauth/src/com/raytheon/uf/edex/plugin/nwsauth/NwsRoleStorage.java b/edexOsgi/com.raytheon.uf.edex.plugin.nwsauth/src/com/raytheon/uf/edex/plugin/nwsauth/NwsRoleStorage.java
index 08988dd73e..567b498d41 100644
--- a/edexOsgi/com.raytheon.uf.edex.plugin.nwsauth/src/com/raytheon/uf/edex/plugin/nwsauth/NwsRoleStorage.java
+++ b/edexOsgi/com.raytheon.uf.edex.plugin.nwsauth/src/com/raytheon/uf/edex/plugin/nwsauth/NwsRoleStorage.java
@@ -13,7 +13,7 @@ import com.raytheon.uf.common.localization.LocalizationFile;
 import com.raytheon.uf.common.localization.PathManagerFactory;
 import com.raytheon.uf.common.plugin.nwsauth.exception.RoleApplicationNotFoundException;
 import com.raytheon.uf.common.plugin.nwsauth.xml.NwsRoleData;
-import com.raytheon.uf.common.serialization.SerializationUtil;
+import com.raytheon.uf.common.serialization.SingleTypeJAXBManager;
 import com.raytheon.uf.common.status.UFStatus;
 import com.raytheon.uf.common.status.UFStatus.Priority;
 import com.raytheon.uf.edex.auth.roles.IRoleStorage;
@@ -27,6 +27,7 @@ import com.raytheon.uf.edex.auth.roles.IRoleStorage;
  * Date         Ticket#    Engineer    Description
  * ------------ ---------- ----------- --------------------------
  * May 25, 2010            rgeorge     Initial creation
+ * Oct 09, 2013 2361       njensen     Use JAXBManager for XML
  * 
  * 
* @@ -34,6 +35,10 @@ import com.raytheon.uf.edex.auth.roles.IRoleStorage; * @version 1.0 */ public class NwsRoleStorage implements IRoleStorage { + + private static final SingleTypeJAXBManager jaxb = SingleTypeJAXBManager + .createWithoutException(NwsRoleData.class); + private static NwsRoleStorage instance = null; private final Map lastUsedFileMap = new HashMap(); @@ -65,12 +70,15 @@ public class NwsRoleStorage implements IRoleStorage { // Check COMMON_STATIC base and site levels LocalizationContext[] contexts = new LocalizationContext[2]; - contexts[0] = pm.getContext(LocalizationType.COMMON_STATIC, LocalizationLevel.BASE); - contexts[1] = pm.getContext(LocalizationType.COMMON_STATIC, LocalizationLevel.SITE); + contexts[0] = pm.getContext(LocalizationType.COMMON_STATIC, + LocalizationLevel.BASE); + contexts[1] = pm.getContext(LocalizationType.COMMON_STATIC, + LocalizationLevel.SITE); String[] extensions = new String[] { ".xml" }; - LocalizationFile[] localizationFiles = - PathManagerFactory.getPathManager().listFiles(contexts, "roles", extensions, true, true); + LocalizationFile[] localizationFiles = PathManagerFactory + .getPathManager().listFiles(contexts, "roles", extensions, + true, true); File file = null; for (LocalizationFile locFile : localizationFiles) { @@ -79,20 +87,26 @@ public class NwsRoleStorage implements IRoleStorage { file = locFile.getFile(); if (lastUsedFileMap.get(locFile.getName()) == null - || (file != null && (file.equals(lastUsedFileMap.get(locFile.getName())) == false || file - .lastModified() > lastModificationTimeMap.get(locFile.getName())))) { + || (file != null && (file.equals(lastUsedFileMap + .get(locFile.getName())) == false || file + .lastModified() > lastModificationTimeMap + .get(locFile.getName())))) { // First time we found a role file, or we have a different // file to // use or we were modified since our last check lastUsedFileMap.put(locFile.getName(), file); try { - roleData = (NwsRoleData) SerializationUtil.jaxbUnmarshalFromXmlFile(file.getAbsolutePath()); - applicationRoleMap.put(roleData.getApplication(), roleData); + roleData = jaxb.unmarshalFromXmlFile(file + .getAbsolutePath()); + applicationRoleMap.put(roleData.getApplication(), + roleData); } catch (Exception e) { - UFStatus.getHandler().handle(Priority.PROBLEM, "Error loading file: " + file.getName(), e); + UFStatus.getHandler().handle(Priority.PROBLEM, + "Error loading file: " + file.getName(), e); } - lastModificationTimeMap.put(locFile.getName(), file.lastModified()); + lastModificationTimeMap.put(locFile.getName(), + file.lastModified()); } } } @@ -131,7 +145,7 @@ public class NwsRoleStorage implements IRoleStorage { NwsRoleData roleData = getRoleData(application); return roleData.isAuthorized(permission, user); } - + @Override public String[] getAllDefinedPermissions(String application) throws AuthorizationException { diff --git a/edexOsgi/com.raytheon.uf.edex.pointdata/src/com/raytheon/uf/edex/pointdata/PointDataPluginDao.java b/edexOsgi/com.raytheon.uf.edex.pointdata/src/com/raytheon/uf/edex/pointdata/PointDataPluginDao.java index 08dc86ab31..a6986c3c30 100644 --- a/edexOsgi/com.raytheon.uf.edex.pointdata/src/com/raytheon/uf/edex/pointdata/PointDataPluginDao.java +++ b/edexOsgi/com.raytheon.uf.edex.pointdata/src/com/raytheon/uf/edex/pointdata/PointDataPluginDao.java @@ -53,6 +53,7 @@ import com.raytheon.uf.common.pointdata.IPointData; import com.raytheon.uf.common.pointdata.PointDataContainer; import com.raytheon.uf.common.pointdata.PointDataDescription; import com.raytheon.uf.common.pointdata.PointDataView; +import com.raytheon.uf.common.serialization.SerializationException; import com.raytheon.uf.common.status.IUFStatusHandler; import com.raytheon.uf.common.status.UFStatus; import com.raytheon.uf.common.time.DataTime; @@ -269,7 +270,7 @@ public abstract class PointDataPluginDao extends hdf5DataDescription = PointDataDescription.fromStream(this .getClass().getResourceAsStream( "/res/pointdata/" + pluginName + ".xml")); - } catch (JAXBException e) { + } catch (SerializationException e) { statusHandler.error("Unable to load " + pluginName + " Point Data Description", e); } @@ -496,17 +497,17 @@ public abstract class PointDataPluginDao extends this.beanMapCache.offer(bm); } } - + protected String generatePointDataFileName(T bean) { return this.pluginName - + File.separator - + this.pathProvider.getHDFPath(this.pluginName, - (IPersistable) bean) - + File.separator - + getPointDataFileName(bean).replace(".h5", "") - + DefaultPathProvider.fileNameFormat.get().format( - ((PluginDataObject) bean).getDataTime() - .getRefTime()) + ".h5"; + + File.separator + + this.pathProvider.getHDFPath(this.pluginName, + (IPersistable) bean) + + File.separator + + getPointDataFileName(bean).replace(".h5", "") + + DefaultPathProvider.fileNameFormat.get().format( + ((PluginDataObject) bean).getDataTime().getRefTime()) + + ".h5"; } public abstract T newObject(); diff --git a/ncep/gov.noaa.nws.ncep.common.dataplugin.airep/src/gov/noaa/nws/ncep/common/dataplugin/airep/dao/AirepDao.java b/ncep/gov.noaa.nws.ncep.common.dataplugin.airep/src/gov/noaa/nws/ncep/common/dataplugin/airep/dao/AirepDao.java index 5067a5a60f..6d4cfefc1a 100644 --- a/ncep/gov.noaa.nws.ncep.common.dataplugin.airep/src/gov/noaa/nws/ncep/common/dataplugin/airep/dao/AirepDao.java +++ b/ncep/gov.noaa.nws.ncep.common.dataplugin.airep/src/gov/noaa/nws/ncep/common/dataplugin/airep/dao/AirepDao.java @@ -10,12 +10,11 @@ import gov.noaa.nws.ncep.common.dataplugin.airep.AirepRecord; import java.util.List; -import javax.xml.bind.JAXBException; - import com.raytheon.uf.common.dataplugin.PluginException; import com.raytheon.uf.common.dataplugin.persist.IPersistable; import com.raytheon.uf.common.datastorage.IDataStore; import com.raytheon.uf.common.pointdata.PointDataDescription; +import com.raytheon.uf.common.serialization.SerializationException; import com.raytheon.uf.edex.database.DataAccessLayerException; import com.raytheon.uf.edex.pointdata.PointDataPluginDao; @@ -95,43 +94,37 @@ public class AirepDao extends PointDataPluginDao { return results; } - @Override - public String[] getKeysRequiredForFileName() { - return new String[] { "dataTime.refTime" }; - } - - @Override - public AirepRecord newObject() { - return new AirepRecord(); - } - - @Override - public String getPointDataFileName(AirepRecord p) { - return "airep.h5"; - } - - /* @Override - public String[] getParameters(File file) throws StorageException, - FileNotFoundException { - - try { - // This should be faster than hitting the datastore. - return getPointDataDescription().getParameterNames(); - } catch (Exception e) { - // let super handle it - return super.getParameters(file); - } + public String[] getKeysRequiredForFileName() { + return new String[] { "dataTime.refTime" }; } - */ - public PointDataDescription getPointDataDescription() throws JAXBException { + @Override + public AirepRecord newObject() { + return new AirepRecord(); + } + + @Override + public String getPointDataFileName(AirepRecord p) { + return "airep.h5"; + } + + /* + * @Override public String[] getParameters(File file) throws + * StorageException, FileNotFoundException { + * + * try { // This should be faster than hitting the datastore. return + * getPointDataDescription().getParameterNames(); } catch (Exception e) { // + * let super handle it return super.getParameters(file); } } + */ + + public PointDataDescription getPointDataDescription() + throws SerializationException { if (pdd == null) { - + pdd = PointDataDescription.fromStream(this.getClass() .getResourceAsStream("/res/pointdata/airep.xml")); } return pdd; } } - diff --git a/ncep/gov.noaa.nws.ncep.common.dataplugin.gpd/src/gov/noaa/nws/ncep/common/dataplugin/gpd/dao/GenericPointDataDao.java b/ncep/gov.noaa.nws.ncep.common.dataplugin.gpd/src/gov/noaa/nws/ncep/common/dataplugin/gpd/dao/GenericPointDataDao.java index db62fc8130..27e0d010e2 100644 --- a/ncep/gov.noaa.nws.ncep.common.dataplugin.gpd/src/gov/noaa/nws/ncep/common/dataplugin/gpd/dao/GenericPointDataDao.java +++ b/ncep/gov.noaa.nws.ncep.common.dataplugin.gpd/src/gov/noaa/nws/ncep/common/dataplugin/gpd/dao/GenericPointDataDao.java @@ -36,8 +36,6 @@ import java.util.List; import java.util.Map; import java.util.Set; -import javax.xml.bind.JAXBException; - import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.hibernate.Criteria; @@ -54,481 +52,179 @@ import com.raytheon.uf.common.pointdata.PointDataContainer; import com.raytheon.uf.common.pointdata.PointDataDescription; import com.raytheon.uf.common.pointdata.PointDataView; import com.raytheon.uf.common.pointdata.spatial.ObStation; +import com.raytheon.uf.common.serialization.SerializationException; import com.raytheon.uf.edex.database.DataAccessLayerException; import com.raytheon.uf.edex.database.purge.PurgeLogger; import com.raytheon.uf.edex.database.query.DatabaseQuery; import com.raytheon.uf.edex.pointdata.PointDataPluginDao; +public class GenericPointDataDao extends + PointDataPluginDao { -public class GenericPointDataDao extends PointDataPluginDao { + private Log logger = LogFactory.getLog(getClass()); - private Log logger = LogFactory.getLog(getClass()); - private PointDataDescription pdd; - private SimpleDateFormat hdfFileDateFormat, dbRefTimeFormat; + private PointDataDescription pdd; - public GenericPointDataDao(String pluginName) throws PluginException { - super(pluginName); - hdfFileDateFormat = new SimpleDateFormat("-yyyy-MM-dd-HH-mm"); - dbRefTimeFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); - } + private SimpleDateFormat hdfFileDateFormat, dbRefTimeFormat; - @Override - public String[] getKeysRequiredForFileName() { - return new String[] { GenericPointDataConstants.DB_REFTIME_NAME , GenericPointDataConstants.DB_PROD_NAME, GenericPointDataConstants.DB_MASTER_LEVEL_NAME}; - } + public GenericPointDataDao(String pluginName) throws PluginException { + super(pluginName); + hdfFileDateFormat = new SimpleDateFormat("-yyyy-MM-dd-HH-mm"); + dbRefTimeFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); + } + @Override + public String[] getKeysRequiredForFileName() { + return new String[] { GenericPointDataConstants.DB_REFTIME_NAME, + GenericPointDataConstants.DB_PROD_NAME, + GenericPointDataConstants.DB_MASTER_LEVEL_NAME }; + } + @Override + public GenericPointDataRecord newObject() { + return new GenericPointDataRecord(); + } - @Override - public GenericPointDataRecord newObject() { - return new GenericPointDataRecord(); - } + /* + * (non-Javadoc) + * + * @see + * com.raytheon.uf.edex.pointdata.PointDataPluginDao#getPointDataFileName + * (com.raytheon.uf.common.dataplugin.PluginDataObject) Chin: in GPD + * implementation this is only called from Purge + */ + @Override + public String getPointDataFileName(GenericPointDataRecord p) { + Date refTime = (p.getDataTime().getRefTime()); + String dateStr = hdfFileDateFormat.format(refTime); + // System.out.println("gpd getPointDataFileName1 called and return: "+"gpd-"+p.getProductInfo().getName()+dateStr+/*"-"+p.getReportType().getMasterLevel().getName()+*/".h5"); + return "gpd-" + p.getProductInfo().getName() + dateStr + ".h5"; + } + /* + * (non-Javadoc) + * + * @see + * com.raytheon.uf.edex.pointdata.PointDataPluginDao#getPointDataFileName + * (java.util.Map) return a full HDF5 point data file name (including path) + * Chin:::This is called when requesting data Currently one report is + * defined with only one master level, therefore file path will be like + * this. + * /awips2/edex/data/hdf5/gpd/productName/gpd-productName-yyyy-mm-dd-HH + * -MM.h5 E.g. /awips2/edex/data/hdf5/gpd/sib1/gpd-sib1-2013-05-08-19-00.h5 + * Note:::future ??? If, we want to defined more than one master level for + * one report type, then file path will like this, + * /awips2/edex/data/hdf5/gpd + * /reportName/masterLevelName/gpd-reportName-masterLevelName + * -yyyy-mm-dd-HH-MM.h5 E.g. + * /awips2/edex/data/hdf5/gpd/sib1/MB/gpd-sib1-MB-2013-05-08-19-00.h5 + */ + @Override + public String getPointDataFileName(Map dbResults) { + String reportname = (String) dbResults + .get(GenericPointDataConstants.DB_PROD_NAME); + // String lmName= + // (String)dbResults.get(GenericPointDataConstants.DB_MASTER_LEVEL_NAME); + String dateStr = hdfFileDateFormat.format(dbResults + .get(GenericPointDataConstants.DB_REFTIME_NAME)); + String filename = PLUGIN_HDF5_DIR + reportname + File.separator + // + lmName + File.separator + + this.pluginName + "-" + reportname + /* "-"+lmName+ */dateStr + + ".h5"; + // System.out.println("GenericPointDataDao getPointDataFileName2 called and return: "+ + // filename); - /* - * (non-Javadoc) - * @see com.raytheon.uf.edex.pointdata.PointDataPluginDao#getPointDataFileName(com.raytheon.uf.common.dataplugin.PluginDataObject) - * Chin: in GPD implementation this is only called from Purge - * - */ - @Override - public String getPointDataFileName(GenericPointDataRecord p) { - Date refTime = (p.getDataTime().getRefTime()); - String dateStr = hdfFileDateFormat.format(refTime); - //System.out.println("gpd getPointDataFileName1 called and return: "+"gpd-"+p.getProductInfo().getName()+dateStr+/*"-"+p.getReportType().getMasterLevel().getName()+*/".h5"); - return "gpd-"+p.getProductInfo().getName()+dateStr+".h5"; - } + return filename; + } - /* - * (non-Javadoc) - * @see com.raytheon.uf.edex.pointdata.PointDataPluginDao#getPointDataFileName(java.util.Map) - * return a full HDF5 point data file name (including path) - * Chin:::This is called when requesting data - * Currently one report is defined with only one master level, therefore file path will be like this. - * /awips2/edex/data/hdf5/gpd/productName/gpd-productName-yyyy-mm-dd-HH-MM.h5 - * E.g. /awips2/edex/data/hdf5/gpd/sib1/gpd-sib1-2013-05-08-19-00.h5 - * Note:::future ??? - * If, we want to defined more than one master level for one report type, then file path will like this, - * /awips2/edex/data/hdf5/gpd/reportName/masterLevelName/gpd-reportName-masterLevelName-yyyy-mm-dd-HH-MM.h5 - * E.g. /awips2/edex/data/hdf5/gpd/sib1/MB/gpd-sib1-MB-2013-05-08-19-00.h5 - * - */ - @Override - public String getPointDataFileName(Map dbResults) { - String reportname = (String)dbResults.get(GenericPointDataConstants.DB_PROD_NAME); - //String lmName= (String)dbResults.get(GenericPointDataConstants.DB_MASTER_LEVEL_NAME); - String dateStr = hdfFileDateFormat.format(dbResults.get(GenericPointDataConstants.DB_REFTIME_NAME)); - String filename = - PLUGIN_HDF5_DIR - + reportname + File.separator - //+ lmName + File.separator - +this.pluginName+"-"+reportname+/*"-"+lmName+*/dateStr+".h5"; - //System.out.println("GenericPointDataDao getPointDataFileName2 called and return: "+ filename); + /* + * (non-Javadoc) + * + * @see + * com.raytheon.uf.edex.pointdata.PointDataPluginDao#getFullFilePath(com + * .raytheon.uf.common.dataplugin.PluginDataObject) return a full HDF5 point + * data file name (including path) This is called when decoding data + */ + @Override + public File getFullFilePath(PluginDataObject persistable) { + File file; + GenericPointDataRecord rec = (GenericPointDataRecord) persistable; + String directory = PLUGIN_HDF5_DIR /* + * + File.separator + + * persistable.getPluginName() + + * File.separator + */ + + rec.getProductInfo().getName(); - return filename; - } - /* - * (non-Javadoc) - * @see com.raytheon.uf.edex.pointdata.PointDataPluginDao#getFullFilePath(com.raytheon.uf.common.dataplugin.PluginDataObject) - * return a full HDF5 point data file name (including path) - * This is called when decoding data - */ - @Override - public File getFullFilePath(PluginDataObject persistable) { - File file; - GenericPointDataRecord rec = (GenericPointDataRecord)persistable; - String directory = PLUGIN_HDF5_DIR /*+ File.separator + persistable.getPluginName() - + File.separator*/ - + rec.getProductInfo().getName(); + Date refTime = ((PluginDataObject) persistable).getDataTime() + .getRefTime(); + String dateStr = hdfFileDateFormat.format(refTime); + String fileName = persistable.getPluginName() + "-" + + rec.getProductInfo().getName() + /* + * "-"+rec.getReportType(). + * getMasterLevel + * ().getName()+ + */dateStr + ".h5"; + file = new File(directory + File.separator + fileName); + // System.out.println("GenericPointDataDao getFullFilePath return "+ + // file.getPath()+" "+ file.getName()); + return file; + } - Date refTime = ((PluginDataObject) persistable).getDataTime().getRefTime(); - String dateStr = hdfFileDateFormat.format(refTime); - String fileName = persistable.getPluginName()+ "-"+rec.getProductInfo().getName()+/*"-"+rec.getReportType().getMasterLevel().getName()+*/dateStr+".h5"; - file = new File(directory - + File.separator - + fileName); - //System.out.println("GenericPointDataDao getFullFilePath return "+ file.getPath()+" "+ file.getName()); - return file; - } - /* - * This function is for development testing.. not used in production code - */ - public PointDataDescription getPointDataDescription() throws JAXBException { - if (pdd == null) { - pdd = PointDataDescription.fromStream(this.getClass() - .getResourceAsStream("/res/pointdata/gpd.xml")); - } - return pdd; - } + /* + * This function is for development testing.. not used in production code + */ + public PointDataDescription getPointDataDescription() + throws SerializationException { + if (pdd == null) { + pdd = PointDataDescription.fromStream(this.getClass() + .getResourceAsStream("/res/pointdata/gpd.xml")); + } + return pdd; + } - //look up target location in gpd_location table. If not present and if createLocation = TRUE, insert it to table. - public boolean lookupGpdLocation(ObStation location, boolean createLocation) { - boolean status= true; - Session sess = null; - Transaction trans = null; - try { - sess = getSessionFactory().openSession(); - trans = sess.beginTransaction(); - - Criteria crit = sess.createCriteria(ObStation.class); - - Criterion nameCrit = Restrictions.eq("stationId", location.getStationId()); - crit.add(nameCrit); - Criterion nameCrit1 = Restrictions.eq("catalogType", location.getCatalogType()); - crit.add(nameCrit1); - //querying... - List vals = crit.list(); - - if (vals.size() <= 0) { - //not in database - if (createLocation) { - sess.saveOrUpdate(location); - trans.commit(); - } else { - status = false; - } - } - - } catch (Exception e) { - logger.error( - "lookupGpdLocation:Error occurred looking up [" + location.getStationId() - + "]", e); - status = false; - if (trans != null) { - try { - trans.rollback(); - } catch (Exception e1) { - logger.error("lookupGpdLocation: Error occurred rolling back transaction", e); - } - } - } finally { - if (sess != null) { - try { - sess.close(); - } catch (Exception e) { - logger.error("lookupGpdLocation: Error occurred closing session", e); - } - } - } - - return status; - } - //look up target parm in parameter table. If not present and if createParm = TRUE, insert it to table. - public boolean lookupParameter(Parameter parm, boolean createParm) { - boolean status= true; - Session sess = null; - Transaction trans = null; - try { - sess = getSessionFactory().openSession(); - trans = sess.beginTransaction(); - - Criteria crit = sess.createCriteria(Parameter.class); - - Criterion nameCrit = Restrictions.eq("abbreviation", parm.getAbbreviation()); - crit.add(nameCrit); - //querying... - List vals = crit.list(); - if (vals.size() <= 0 ){ - if(createParm) { - sess.saveOrUpdate(parm); - trans.commit(); - } - else { - status = false; - } - } - - } catch (Exception e) { - logger.error( - "lookupParameter: Error occurred looking up parm[" + parm.getAbbreviation() - + "]", e); - status = false; - if (trans != null) { - try { - trans.rollback(); - } catch (Exception e1) { - logger.error("lookupParameter: Error occurred rolling back transaction", e); - } - } - } finally { - if (sess != null) { - try { - sess.close(); - } catch (Exception e) { - logger.error("lookupParameter: Error occurred closing session", e); - } - } - } - - return status; - } - //look up target masterLvl in level_master table. If not present and if createMl = TRUE, insert it to table. - public boolean lookupMasterlevel(MasterLevel masterLvl, boolean createMl) { - boolean status= true; - Session sess = null; - Transaction trans = null; - try { - sess = getSessionFactory().openSession(); - trans = sess.beginTransaction(); - - Criteria crit = sess.createCriteria(Parameter.class); - - Criterion nameCrit = Restrictions.eq("name", masterLvl.getName()); - crit.add(nameCrit); - //querying... - List vals = crit.list(); - if (vals.size() <= 0 ){ - if(createMl) { - sess.saveOrUpdate(masterLvl); - trans.commit(); - } - else { - status = false; - } - } - - } catch (Exception e) { - logger.error( - "lookupMasterlevel: Error occurred looking up parm[" + masterLvl.getName() - + "]", e); - status = false; - if (trans != null) { - try { - trans.rollback(); - } catch (Exception e1) { - logger.error("lookupMasterlevel: Error occurred rolling back transaction", e); - } - } - } finally { - if (sess != null) { - try { - sess.close(); - } catch (Exception e) { - logger.error("lookupMasterlevel: Error occurred closing session", e); - } - } - } - - return status; - } - - - private boolean mergeProductInfoParameterList(GenericPointDataProductInfo targetProdInfo, GenericPointDataProductInfo sourceProdInfo){ - List srcParmList = sourceProdInfo.getParameterLst(); - List tarParmList = targetProdInfo.getParameterLst(); - boolean merged = false; - /*for(Parameter p: srcParmList){ - System.out.println("srcA p="+p.getAbbreviation()); - } - for(Parameter p: tarParmList){ - System.out.println("tarA p="+p.getAbbreviation()); - }*/ - for (int index = srcParmList.size()-1; index >=0 ; index--){ - Parameter pm = srcParmList.get(index); - boolean found = false; - for(Parameter p: tarParmList){ - if(p.getAbbreviation().equals(pm.getAbbreviation())){ - found = true; - break; - } - } - if(found == false){ - Parameter newPm = srcParmList.remove(index); - tarParmList.add(newPm); - merged = true; - //System.out.println("not found add newPM="+ newPm.getAbbreviation()); - } - /*else { - System.out.println("found srcP="+pm.getAbbreviation()); - }*/ - } - //System.out.println("merged = "+ merged); - /*for(Parameter p: srcParmList){ - System.out.println("srcB p="+p.getAbbreviation()); - } - for(Parameter p: tarParmList){ - System.out.println("tarB p="+p.getAbbreviation()); - }*/ - return merged; - } - // 1. look up target product in gpd_productinfo table. If not present and if createReport = TRUE, insert it to table. - // 2. A complete product is returned using the contents found in DB, as user may just use product name in input XML file for - // decoding request, when a same product information had been already saved in DB earlier. - // 3. If prodInfo contains new parameters, if version number is 0, than update DB, otherwise, reject the update - // 4. copy input prod's "number of level" to return prodInfo - public GenericPointDataProductInfo lookupUpdateGpdProdInfo(GenericPointDataProductInfo prod, boolean createProd, int prodVersion) { - GenericPointDataProductInfo returnProdInfo = null; - boolean status= true; - Session sess = null; - Transaction trans = null; - int maxnumLvl = prod.getMaxNumberOfLevel(); - try { - sess = getSessionFactory().openSession(); - trans = sess.beginTransaction(); - - Criteria crit = sess.createCriteria(GenericPointDataProductInfo.class); - - Criterion nameCrit = Restrictions.eq("name", prod.getName()); - crit.add(nameCrit); - List vals = crit.list(); - if (vals.size() > 0) { - // the product is already in DB - GenericPointDataProductInfo dbProdInfo = (GenericPointDataProductInfo) vals.get(0); - //check to see if there are new parameters, and merge then to parameter list in dbProdInfo - boolean merged = mergeProductInfoParameterList(dbProdInfo,prod); - if(merged == true ){ - //if there are new parameters and product version number is 0, then update product to DB - if( prodVersion == 0){ - for (Parameter pm: dbProdInfo.getParameterLst()){ - if(lookupParameter( pm, true) == false){ - break; - } - } - sess.saveOrUpdate(dbProdInfo); - trans.commit(); - //TBD...do we need clone it? - returnProdInfo = dbProdInfo;//.clone(); - } - //prod version > 0, disallow update prod info - } - else{ - //TBD...do we need clone it? - returnProdInfo = dbProdInfo;//.clone(); - } - - } else if (createProd) { - for (Parameter pm: prod.getParameterLst()){ - if(lookupParameter( pm, true) == false){ - status = false; - break; - } - } - if(status== true && lookupMasterlevel(prod.getMasterLevel(), true) == false){ - status = false; - } - if(status){ - sess.saveOrUpdate(prod); - returnProdInfo = prod; - trans.commit(); - } - } else - status = false; - } catch (Exception e) { - logger.error( - "lookupGpdReportType: Error occurred looking up GenericPointDataReporttype[" + prod.getName() - + "]", e); - status = false; - if (trans != null) { - try { - trans.rollback(); - } catch (Exception e1) { - logger.error("lookupGpdReportType: Error occurred rolling back transaction", e); - } - } - } finally { - if (sess != null) { - try { - sess.close(); - } catch (Exception e) { - logger.error("lookupGpdReportType: Error occurred closing session", e); - } - } - } - returnProdInfo.setMaxNumberOfLevel(maxnumLvl); - return returnProdInfo; - } - /* - * To create or update product meta data information - */ - public GenericPointDataProductInfo updateProductInfo(GenericPointDataProductInfo prod){ - Session sess = null; - boolean status=true; - Transaction trans = null; - GenericPointDataProductInfo rval=null; - try { - sess = getSessionFactory().openSession(); - trans = sess.beginTransaction(); - - - for (Parameter pm: prod.getParameterLst()){ - if(lookupParameter( pm, true) == false){ - status = false; - break; - } - } - if(status== true && lookupMasterlevel(prod.getMasterLevel(), true) == false){ - status = false; - } - if(status){ - sess.saveOrUpdate(prod); - - Criteria crit = sess.createCriteria(GenericPointDataProductInfo.class); - - Criterion nameCrit = Restrictions.eq("name", prod.getName()); - crit.add(nameCrit); - List vals = crit.list(); - if (vals.size() > 0) { - rval = ((GenericPointDataProductInfo) vals.get(0)).clone(); - System.out.println("updateProductInfo: new parameter array size="+rval.getParameterLst().size()); - } - - trans.commit(); - } - - } catch (Exception e) { - logger.error( - "updateProductInfo: Error occurred looking up product [" + prod.getName() - + "]", e); - - if (trans != null) { - try { - trans.rollback(); - } catch (Exception e1) { - logger.error("updateProductInfo: Error occurred rolling back transaction", e); - } - } - } finally { - if (sess != null) { - try { - sess.close(); - } catch (Exception e) { - logger.error("updateProductInfo: Error occurred closing session", e); - } - } - } - return rval; - - } - - /* TBM...Chin delet this leter...not used - public boolean lookupGpdDataUri(String dataUri) { - boolean status= true; + // look up target location in gpd_location table. If not present and if + // createLocation = TRUE, insert it to table. + public boolean lookupGpdLocation(ObStation location, boolean createLocation) { + boolean status = true; Session sess = null; Transaction trans = null; try { sess = getSessionFactory().openSession(); trans = sess.beginTransaction(); - Criteria crit = sess.createCriteria(GenericPointDataRecord.class); + Criteria crit = sess.createCriteria(ObStation.class); - Criterion nameCrit = Restrictions.eq("dataURI", dataUri); + Criterion nameCrit = Restrictions.eq("stationId", + location.getStationId()); crit.add(nameCrit); - //querying... + Criterion nameCrit1 = Restrictions.eq("catalogType", + location.getCatalogType()); + crit.add(nameCrit1); + // querying... List vals = crit.list(); if (vals.size() <= 0) { - status = false; - } + // not in database + if (createLocation) { + sess.saveOrUpdate(location); + trans.commit(); + } else { + status = false; + } + } + } catch (Exception e) { - logger.error( - "lookupGpdLocation:Error occurred looking up lookupGpdDataUri[" + dataUri - + "]", e); + logger.error("lookupGpdLocation:Error occurred looking up [" + + location.getStationId() + "]", e); status = false; if (trans != null) { try { trans.rollback(); } catch (Exception e1) { - logger.error("lookupGpdDataUri: Error occurred rolling back transaction", e); + logger.error( + "lookupGpdLocation: Error occurred rolling back transaction", + e); } } } finally { @@ -536,760 +232,1115 @@ public class GenericPointDataDao extends PointDataPluginDao vals = crit.list(); - if (vals.size() > 0) { - try { - //to avoid LazyInitializationException, we have to take care of Collection before - // closing session. Therefore, clone() it. - rval = ((GenericPointDataProductInfo) vals.get(0)).clone(); - } catch (CloneNotSupportedException e) { - e.printStackTrace(); - } - } - if (sess != null) { - try { - sess.close(); - } catch (Exception e) { - e.printStackTrace(); - } - } - } - return rval; - } - /* - * Chin:::: - * Get point data container for one or multiple stations. - * Based on queryKey. - * If BY_STN_ID, then stnId required, and is querying one station - * If BY_SLAT_SLON, then slat/slon required, and is querying one station - * If BY_REPORT_NAME, then both not required, and is querying all stations that meet prodInfo and refTime query constraints - */ - private PointDataContainer getPointDataContainer(Date refTime, GenericPointDataQueryKey quertKey, String stnId, float slat, float slon, GenericPointDataProductInfo prodInfo, int productVersion) throws Exception{ - String prodName = prodInfo.getName(); + Criteria crit = sess.createCriteria(Parameter.class); - PointDataContainer pdc=null; + Criterion nameCrit = Restrictions.eq("abbreviation", + parm.getAbbreviation()); + crit.add(nameCrit); + // querying... + List vals = crit.list(); + if (vals.size() <= 0) { + if (createParm) { + sess.saveOrUpdate(parm); + trans.commit(); + } else { + status = false; + } + } - GenericPointDataQuery pdq = new GenericPointDataQuery("gpd"); - StringBuilder returnParametersString = new StringBuilder(); - /* - * add return fields for both DB and HDF5 - */ - //1st:: add return fields from HDF5. They are the parameter list defined in a GPD report - for ( Parameter parm: prodInfo.getParameterLst()) { - String parameter= parm.getAbbreviation(); - if (returnParametersString.length() > 0) { - returnParametersString.append(","); - } - returnParametersString.append(parameter); - } - //also add the 3 HDF5 mandatory datasets - returnParametersString.append(","+GenericPointDataConstants.HDF5_LEVEL_VALUE); - returnParametersString.append(","+GenericPointDataConstants.HDF5_NUM_LEVEL); - returnParametersString.append(","+GenericPointDataConstants.HDF5_STN_ID); - System.out.println("gpd dao hdf5 parameterlist="+returnParametersString.toString()); + } catch (Exception e) { + logger.error("lookupParameter: Error occurred looking up parm[" + + parm.getAbbreviation() + "]", e); + status = false; + if (trans != null) { + try { + trans.rollback(); + } catch (Exception e1) { + logger.error( + "lookupParameter: Error occurred rolling back transaction", + e); + } + } + } finally { + if (sess != null) { + try { + sess.close(); + } catch (Exception e) { + logger.error( + "lookupParameter: Error occurred closing session", + e); + } + } + } - //2nd:: add return fields form DB. the parameter name need to be defined in - //gov.noaa.nws.ncep.edex.plugin.gpd/res/pointdata/gpddb.xmlquertKey - // for example:: - returnParametersString.append(","+GenericPointDataConstants.DB_STN_CATALOGTYPE); - returnParametersString.append(","+GenericPointDataConstants.DB_SLAT); - returnParametersString.append(","+GenericPointDataConstants.DB_SLON); + return status; + } - // parameters defined in /gov.noaa.nws.ncep.edex.plugin.gpd/utility/common_static/base/path/gpdPathKeys.xml - // AND those returned by dao.getKeysRequiredForFileName() - // will be added automatically when calling PointDataQuery.execute() + // look up target masterLvl in level_master table. If not present and if + // createMl = TRUE, insert it to table. + public boolean lookupMasterlevel(MasterLevel masterLvl, boolean createMl) { + boolean status = true; + Session sess = null; + Transaction trans = null; + try { + sess = getSessionFactory().openSession(); + trans = sess.beginTransaction(); - //PointDataQuery.setParameters() is to set return fields from both DB and HDF5 - pdq.setParameters(returnParametersString.toString()); + Criteria crit = sess.createCriteria(Parameter.class); - //PointDataQuery.addParameter() is to add DB query constraints - pdq.addParameter("productInfo.name",prodName,"="); - if(quertKey == GenericPointDataQueryKey.BY_STN_ID) - pdq.addParameter("location.stationId",stnId,"="); - else if(quertKey == GenericPointDataQueryKey.BY_SLAT_SLON){ - pdq.addParameter("slat",Float.toString(slat+0.001f),"<"); - pdq.addParameter("slon",Float.toString(slon+0.001f),"<"); - pdq.addParameter("slat",Float.toString(slat-0.001f),">"); - pdq.addParameter("slon",Float.toString(slon-0.001f),">"); - } + Criterion nameCrit = Restrictions.eq("name", masterLvl.getName()); + crit.add(nameCrit); + // querying... + List vals = crit.list(); + if (vals.size() <= 0) { + if (createMl) { + sess.saveOrUpdate(masterLvl); + trans.commit(); + } else { + status = false; + } + } - String dateStr = dbRefTimeFormat.format(refTime); - pdq.addParameter("dataTime.refTime",dateStr,"="); - pdq.addParameter("productVersion", Integer.toString(productVersion), "="); - System.out.println("requestig refTime = "+ dateStr); - pdq.requestAllLevels(); - pdc = pdq.execute(); + } catch (Exception e) { + logger.error("lookupMasterlevel: Error occurred looking up parm[" + + masterLvl.getName() + "]", e); + status = false; + if (trans != null) { + try { + trans.rollback(); + } catch (Exception e1) { + logger.error( + "lookupMasterlevel: Error occurred rolling back transaction", + e); + } + } + } finally { + if (sess != null) { + try { + sess.close(); + } catch (Exception e) { + logger.error( + "lookupMasterlevel: Error occurred closing session", + e); + } + } + } - return pdc; - } + return status; + } - /* TBM...Chin delet this leter...not used - public GenericPointDataProductContainer getGpdProduct(Date refTime, String prodName,boolean useSpecifiedProductVersion, int productVersion)throws Exception{ - GenericPointDataProductInfo prodInfo = getGpdProdInfo( prodName); - if (prodInfo == null) { - System.out.println("report is not in DB"); - return null; - } - if(useSpecifiedProductVersion== false){ - //find the latest version if there is one. Otherwise, use user specified version number - productVersion = getGpdProductLatestVersion( refTime, prodName); - } - PointDataContainer pdc = getPointDataContainer( refTime,GenericPointDataQueryKey.BY_REPORT_NAME, null,0,0, prodInfo, productVersion); - if (pdc == null) { - System.out.println("pdc is null"); - return null; - } - System.out.println("pdc CurrentSz()="+pdc.getCurrentSz()); - GenericPointDataProductContainer prodCon = new GenericPointDataProductContainer(); - prodCon.setProductInfo(prodInfo); - prodCon.setRefTime(refTime); - prodCon.setProductCorrectionVersion(productVersion); - for (int i = 0; i < pdc.getCurrentSz(); i++) { - PointDataView pdv =pdc.readRandom(i); - System.out.println("pdv#"+i+" *********************************************"); - Set parameters = new HashSet(pdv.getContainer().getParameters()); - for(String parm: parameters){ - System.out.println("parm ="+parm); - } - String stnId= null; - if (parameters.contains(GenericPointDataConstants.HDF5_STN_ID)){ - stnId = pdv.getString(GenericPointDataConstants.HDF5_STN_ID); - System.out.println("stnid= "+ stnId); - //stnId value is retrieved already, so drop it here - parameters.remove(GenericPointDataConstants.HDF5_STN_ID); - } - else - continue; //stnId not available, no need to continue on this PDV. - int numLevel=0; - if (parameters.contains(GenericPointDataConstants.HDF5_NUM_LEVEL)){ - numLevel = pdv.getInt(GenericPointDataConstants.HDF5_NUM_LEVEL); - System.out.println("numLevel= "+ numLevel); - //numLevel value is retrieved already, so drop it here - parameters.remove(GenericPointDataConstants.HDF5_NUM_LEVEL); - } - else - continue; //level number is 0, no need to continue on this PDV. + private boolean mergeProductInfoParameterList( + GenericPointDataProductInfo targetProdInfo, + GenericPointDataProductInfo sourceProdInfo) { + List srcParmList = sourceProdInfo.getParameterLst(); + List tarParmList = targetProdInfo.getParameterLst(); + boolean merged = false; + /* + * for(Parameter p: srcParmList){ + * System.out.println("srcA p="+p.getAbbreviation()); } for(Parameter p: + * tarParmList){ System.out.println("tarA p="+p.getAbbreviation()); } + */ + for (int index = srcParmList.size() - 1; index >= 0; index--) { + Parameter pm = srcParmList.get(index); + boolean found = false; + for (Parameter p : tarParmList) { + if (p.getAbbreviation().equals(pm.getAbbreviation())) { + found = true; + break; + } + } + if (found == false) { + Parameter newPm = srcParmList.remove(index); + tarParmList.add(newPm); + merged = true; + // System.out.println("not found add newPM="+ + // newPm.getAbbreviation()); + } + /* + * else { System.out.println("found srcP="+pm.getAbbreviation()); } + */ + } + // System.out.println("merged = "+ merged); + /* + * for(Parameter p: srcParmList){ + * System.out.println("srcB p="+p.getAbbreviation()); } for(Parameter p: + * tarParmList){ System.out.println("tarB p="+p.getAbbreviation()); } + */ + return merged; + } - List levelList; - if (parameters.contains(GenericPointDataConstants.HDF5_LEVEL_VALUE)){ - levelList = new ArrayList(numLevel); - Number[] num = pdv.getNumberAllLevels(GenericPointDataConstants.HDF5_LEVEL_VALUE);//pdv.getNumberAllLevels(parm,numLevel); - for(Number n: num){ - System.out.println("level value="+n.floatValue()); - if(n.floatValue() == GenericPointDataConstants.GPD_INVALID_FLOAT_VALUE) - //Not a valid level, skip it - continue; - GenericPointDataLevel gpdLevel = new GenericPointDataLevel(); - gpdLevel.setLevelValue(n.floatValue()); - levelList.add(gpdLevel); - } - //level value is retrieved already, so drop it here - parameters.remove(GenericPointDataConstants.HDF5_LEVEL_VALUE); - } - else - continue; //no level value, no need to continue on this PDV. + // 1. look up target product in gpd_productinfo table. If not present and if + // createReport = TRUE, insert it to table. + // 2. A complete product is returned using the contents found in DB, as user + // may just use product name in input XML file for + // decoding request, when a same product information had been already saved + // in DB earlier. + // 3. If prodInfo contains new parameters, if version number is 0, than + // update DB, otherwise, reject the update + // 4. copy input prod's "number of level" to return prodInfo + public GenericPointDataProductInfo lookupUpdateGpdProdInfo( + GenericPointDataProductInfo prod, boolean createProd, + int prodVersion) { + GenericPointDataProductInfo returnProdInfo = null; + boolean status = true; + Session sess = null; + Transaction trans = null; + int maxnumLvl = prod.getMaxNumberOfLevel(); + try { + sess = getSessionFactory().openSession(); + trans = sess.beginTransaction(); - int stnCatalogType=ObStation.CAT_TYPE_MESONET; - if (parameters.contains(GenericPointDataConstants.DB_STN_CATALOGTYPE)){ - stnCatalogType = pdv.getInt(GenericPointDataConstants.DB_STN_CATALOGTYPE); - System.out.println("stnCatalogType= "+ stnCatalogType); - //DB_STN_CATALOGTYPE value is retrieved already, so drop it here - parameters.remove(GenericPointDataConstants.DB_STN_CATALOGTYPE); - } - float slat = -9999; - if (parameters.contains(GenericPointDataConstants.DB_SLAT)){ - slat = pdv.getFloat(GenericPointDataConstants.DB_SLAT); - System.out.println("slat= "+ slat); - //slat value is retrieved already, so drop it here - parameters.remove(GenericPointDataConstants.DB_SLAT); - } - float slon = -9999; - if (parameters.contains(GenericPointDataConstants.DB_SLON)){ - slon = pdv.getFloat(GenericPointDataConstants.DB_SLON); - System.out.println("slon= "+ slon); - //slon value is retrieved already, so drop it here - parameters.remove(GenericPointDataConstants.DB_SLON); - } - //PDV id is not returned back to user, so drop it here - parameters.remove(GenericPointDataConstants.HDF5_PDV_ID); + Criteria crit = sess + .createCriteria(GenericPointDataProductInfo.class); - GenericPointDataStationProduct stnPd= new GenericPointDataStationProduct(); - stnPd.setProductName(prodName); - stnPd.setRefTime(refTime); - stnPd.setLevelLst(levelList); - stnPd.setProductVersion(productVersion); - stnPd.getLocation().setStationId(stnId); - stnPd.getLocation().setCatalogType(stnCatalogType); - stnPd.setNumLevel(numLevel); - stnPd.setSlat(slat); - stnPd.setSlon(slon); - for(String parm: parameters){ - //these parameters are data parameters and should be 2 dimensional float value per design - //If a new "meta" data is queried, then we should take care of that data specifically before here. - Number[] num = pdv.getNumberAllLevels(parm);//,numLevel); - System.out.println("parm ="+parm); - for(int j=0; j< numLevel ; j++){ - Number n = num[j]; - System.out.println(" value="+n.floatValue()); - GenericPointDataParameter gpdParm = new GenericPointDataParameter(parm,n.floatValue()); - levelList.get(j).getGpdParameters().add(gpdParm); - } - } - prodCon.getStnProdLst().add(stnPd); - } - return prodCon; - } - */ - public GenericPointDataProductContainer getGpdProduct(Date refTime, GenericPointDataQueryKey key, String stnId, float slat, float slon, String prodName,boolean useSpecifiedProductVersion, int productVersion)throws Exception{ - GenericPointDataProductInfo prodInfo = getGpdProdInfo( prodName); - if (prodInfo == null) { - System.out.println("report is not in DB"); - return null; - } - if(useSpecifiedProductVersion== false){ - //find the latest version if there is one. Otherwise, use user specified version number - productVersion = getGpdProductLatestVersion( refTime, prodName); - if(productVersion <0) - return null; - } - PointDataContainer pdc = getPointDataContainer( refTime, key, stnId, slat, slon, prodInfo, productVersion); - if (pdc == null) { - System.out.println("pdc is null"); - return null; - } - System.out.println("pdc CurrentSz()="+pdc.getCurrentSz()); - GenericPointDataProductContainer prodCon = new GenericPointDataProductContainer(); - prodCon.setProductInfo(prodInfo); - prodCon.setRefTime(refTime); - prodCon.setProductCorrectionVersion(productVersion); - for (int i = 0; i < pdc.getCurrentSz(); i++) { - PointDataView pdv =pdc.readRandom(i); - //System.out.println("pdv#"+i+" *********************************************"); - Set parameters = new HashSet(pdv.getContainer().getParameters()); - /*for(String parm: parameters){ - System.out.println("parm ="+parm); - }*/ - int numLevel=0; - if (parameters.contains(GenericPointDataConstants.HDF5_NUM_LEVEL)){ - numLevel = pdv.getInt(GenericPointDataConstants.HDF5_NUM_LEVEL); - //System.out.println("numLevel= "+ numLevel); - //numLevel value is retrieved already, so drop it here - parameters.remove(GenericPointDataConstants.HDF5_NUM_LEVEL); - } - else - continue; //level number is 0, no need to continue on this PDV. + Criterion nameCrit = Restrictions.eq("name", prod.getName()); + crit.add(nameCrit); + List vals = crit.list(); + if (vals.size() > 0) { + // the product is already in DB + GenericPointDataProductInfo dbProdInfo = (GenericPointDataProductInfo) vals + .get(0); + // check to see if there are new parameters, and merge then to + // parameter list in dbProdInfo + boolean merged = mergeProductInfoParameterList(dbProdInfo, prod); + if (merged == true) { + // if there are new parameters and product version number is + // 0, then update product to DB + if (prodVersion == 0) { + for (Parameter pm : dbProdInfo.getParameterLst()) { + if (lookupParameter(pm, true) == false) { + break; + } + } + sess.saveOrUpdate(dbProdInfo); + trans.commit(); + // TBD...do we need clone it? + returnProdInfo = dbProdInfo;// .clone(); + } + // prod version > 0, disallow update prod info + } else { + // TBD...do we need clone it? + returnProdInfo = dbProdInfo;// .clone(); + } - List levelList; - if (parameters.contains(GenericPointDataConstants.HDF5_LEVEL_VALUE)){ - levelList = new ArrayList(numLevel); - if(numLevel >1){ - Number[] num = pdv.getNumberAllLevels(GenericPointDataConstants.HDF5_LEVEL_VALUE);//pdv.getNumberAllLevels(parm,numLevel); - int count=0; - for(Number n: num){ - count++; - if(count > numLevel) - break; - //System.out.println("Level " +count+ " value="+n.floatValue()); - GenericPointDataLevel gpdLevel = new GenericPointDataLevel(); - gpdLevel.setLevelValue(n.floatValue()); - levelList.add(gpdLevel); - } - } - else{ - GenericPointDataLevel gpdLevel = new GenericPointDataLevel(); - gpdLevel.setLevelValue(pdv.getFloat(GenericPointDataConstants.HDF5_LEVEL_VALUE)); - levelList.add(gpdLevel); - } - //level value is retrieved already, so drop it here - parameters.remove(GenericPointDataConstants.HDF5_LEVEL_VALUE); - } - else - continue; //no level value, no need to continue on this PDV. + } else if (createProd) { + for (Parameter pm : prod.getParameterLst()) { + if (lookupParameter(pm, true) == false) { + status = false; + break; + } + } + if (status == true + && lookupMasterlevel(prod.getMasterLevel(), true) == false) { + status = false; + } + if (status) { + sess.saveOrUpdate(prod); + returnProdInfo = prod; + trans.commit(); + } + } else + status = false; + } catch (Exception e) { + logger.error( + "lookupGpdReportType: Error occurred looking up GenericPointDataReporttype[" + + prod.getName() + "]", e); + status = false; + if (trans != null) { + try { + trans.rollback(); + } catch (Exception e1) { + logger.error( + "lookupGpdReportType: Error occurred rolling back transaction", + e); + } + } + } finally { + if (sess != null) { + try { + sess.close(); + } catch (Exception e) { + logger.error( + "lookupGpdReportType: Error occurred closing session", + e); + } + } + } + returnProdInfo.setMaxNumberOfLevel(maxnumLvl); + return returnProdInfo; + } - int stnCatalogType=ObStation.CAT_TYPE_MESONET; - if (parameters.contains(GenericPointDataConstants.DB_STN_CATALOGTYPE)){ - stnCatalogType = pdv.getInt(GenericPointDataConstants.DB_STN_CATALOGTYPE); - //System.out.println("stnCatalogType= "+ stnCatalogType); - //DB_STN_CATALOGTYPE value is retrieved already, so drop it here - parameters.remove(GenericPointDataConstants.DB_STN_CATALOGTYPE); - } - String rtnstnId = stnId; - if (parameters.contains(GenericPointDataConstants.HDF5_STN_ID)){ - rtnstnId = pdv.getString(GenericPointDataConstants.HDF5_STN_ID); - //System.out.println("stnId= "+ rtnstnId); - //stnId is input parameter, can drop it here. - parameters.remove(GenericPointDataConstants.HDF5_STN_ID); - } - float rtnslat = slat; - if (parameters.contains(GenericPointDataConstants.DB_SLAT)){ - rtnslat = pdv.getFloat(GenericPointDataConstants.DB_SLAT); - //System.out.println("slat= "+ rtnslat); - //slat value is retrieved already, so drop it here - parameters.remove(GenericPointDataConstants.DB_SLAT); - } - float rtnslon = slon; - if (parameters.contains(GenericPointDataConstants.DB_SLON)){ - rtnslon = pdv.getFloat(GenericPointDataConstants.DB_SLON); - //System.out.println("slon= "+ rtnslon); - //slon value is retrieved already, so drop it here - parameters.remove(GenericPointDataConstants.DB_SLON); - } - //PDV id is not returned back to user, so drop it here - parameters.remove(GenericPointDataConstants.HDF5_PDV_ID); + /* + * To create or update product meta data information + */ + public GenericPointDataProductInfo updateProductInfo( + GenericPointDataProductInfo prod) { + Session sess = null; + boolean status = true; + Transaction trans = null; + GenericPointDataProductInfo rval = null; + try { + sess = getSessionFactory().openSession(); + trans = sess.beginTransaction(); - GenericPointDataStationProduct stnPd= new GenericPointDataStationProduct(); - stnPd.setProductName(prodName); - stnPd.setRefTime(refTime); - stnPd.setLevelLst(levelList); - stnPd.setProductVersion(productVersion); - stnPd.getLocation().setStationId(rtnstnId); - stnPd.getLocation().setCatalogType(stnCatalogType); - stnPd.setNumLevel(numLevel); - stnPd.setSlat(rtnslat); - stnPd.setSlon(rtnslon); - for(String parm: parameters){ - if(numLevel>1){ - //these parameters are data parameters and should be 2 dimensional float value per design - //If a new "meta" data is queried, then we should take care of that data specifically before here. - Number[] num = pdv.getNumberAllLevels(parm);//,numLevel); - //System.out.println("parm ="+parm); - for(int j=0; j< numLevel ; j++){ - Number n = num[j]; - //System.out.println(" value="+n.floatValue()); - GenericPointDataParameter gpdParm = new GenericPointDataParameter(parm,n.floatValue()); - levelList.get(j).getGpdParameters().add(gpdParm); - } - } - else { - GenericPointDataParameter gpdParm = new GenericPointDataParameter(parm,pdv.getFloat(parm)); - levelList.get(0).getGpdParameters().add(gpdParm); - } - } - prodCon.getStnProdLst().add(stnPd); - } - return prodCon; - } - /* + for (Parameter pm : prod.getParameterLst()) { + if (lookupParameter(pm, true) == false) { + status = false; + break; + } + } + if (status == true + && lookupMasterlevel(prod.getMasterLevel(), true) == false) { + status = false; + } + if (status) { + sess.saveOrUpdate(prod); + + Criteria crit = sess + .createCriteria(GenericPointDataProductInfo.class); + + Criterion nameCrit = Restrictions.eq("name", prod.getName()); + crit.add(nameCrit); + List vals = crit.list(); + if (vals.size() > 0) { + rval = ((GenericPointDataProductInfo) vals.get(0)).clone(); + System.out + .println("updateProductInfo: new parameter array size=" + + rval.getParameterLst().size()); + } + + trans.commit(); + } + + } catch (Exception e) { + logger.error( + "updateProductInfo: Error occurred looking up product [" + + prod.getName() + "]", e); + + if (trans != null) { + try { + trans.rollback(); + } catch (Exception e1) { + logger.error( + "updateProductInfo: Error occurred rolling back transaction", + e); + } + } + } finally { + if (sess != null) { + try { + sess.close(); + } catch (Exception e) { + logger.error( + "updateProductInfo: Error occurred closing session", + e); + } + } + } + return rval; + + } + + /* + * TBM...Chin delet this leter...not used public boolean + * lookupGpdDataUri(String dataUri) { boolean status= true; Session sess = + * null; Transaction trans = null; try { sess = + * getSessionFactory().openSession(); trans = sess.beginTransaction(); + * + * Criteria crit = sess.createCriteria(GenericPointDataRecord.class); + * + * Criterion nameCrit = Restrictions.eq("dataURI", dataUri); + * crit.add(nameCrit); //querying... List vals = crit.list(); + * + * if (vals.size() <= 0) { status = false; } } catch (Exception e) { + * logger.error( + * "lookupGpdLocation:Error occurred looking up lookupGpdDataUri[" + dataUri + * + "]", e); status = false; if (trans != null) { try { trans.rollback(); } + * catch (Exception e1) { + * logger.error("lookupGpdDataUri: Error occurred rolling back transaction", + * e); } } } finally { if (sess != null) { try { sess.close(); } catch + * (Exception e) { + * logger.error("lookupGpdDataUri: Error occurred closing session", e); } } + * } + * + * return status; } + */ + public GenericPointDataProductInfo getGpdProdInfo(String prodName) { + GenericPointDataProductInfo rval = null; + if (prodName != null) { + Session sess = null; + sess = getSessionFactory().openSession(); + sess.beginTransaction(); + + Criteria crit = sess + .createCriteria(GenericPointDataProductInfo.class); + + Criterion nameCrit = Restrictions.eq("name", prodName); + crit.add(nameCrit); + List vals = crit.list(); + if (vals.size() > 0) { + try { + // to avoid LazyInitializationException, we have to take + // care of Collection before + // closing session. Therefore, clone() it. + rval = ((GenericPointDataProductInfo) vals.get(0)).clone(); + } catch (CloneNotSupportedException e) { + e.printStackTrace(); + } + } + if (sess != null) { + try { + sess.close(); + } catch (Exception e) { + e.printStackTrace(); + } + } + } + return rval; + } + + /* + * Chin:::: Get point data container for one or multiple stations. Based on + * queryKey. If BY_STN_ID, then stnId required, and is querying one station + * If BY_SLAT_SLON, then slat/slon required, and is querying one station If + * BY_REPORT_NAME, then both not required, and is querying all stations that + * meet prodInfo and refTime query constraints + */ + private PointDataContainer getPointDataContainer(Date refTime, + GenericPointDataQueryKey quertKey, String stnId, float slat, + float slon, GenericPointDataProductInfo prodInfo, int productVersion) + throws Exception { + String prodName = prodInfo.getName(); + + PointDataContainer pdc = null; + + GenericPointDataQuery pdq = new GenericPointDataQuery("gpd"); + StringBuilder returnParametersString = new StringBuilder(); + /* + * add return fields for both DB and HDF5 + */ + // 1st:: add return fields from HDF5. They are the parameter list + // defined in a GPD report + for (Parameter parm : prodInfo.getParameterLst()) { + String parameter = parm.getAbbreviation(); + if (returnParametersString.length() > 0) { + returnParametersString.append(","); + } + returnParametersString.append(parameter); + } + // also add the 3 HDF5 mandatory datasets + returnParametersString.append("," + + GenericPointDataConstants.HDF5_LEVEL_VALUE); + returnParametersString.append("," + + GenericPointDataConstants.HDF5_NUM_LEVEL); + returnParametersString.append("," + + GenericPointDataConstants.HDF5_STN_ID); + System.out.println("gpd dao hdf5 parameterlist=" + + returnParametersString.toString()); + + // 2nd:: add return fields form DB. the parameter name need to be + // defined in + // gov.noaa.nws.ncep.edex.plugin.gpd/res/pointdata/gpddb.xmlquertKey + // for example:: + returnParametersString.append("," + + GenericPointDataConstants.DB_STN_CATALOGTYPE); + returnParametersString.append("," + GenericPointDataConstants.DB_SLAT); + returnParametersString.append("," + GenericPointDataConstants.DB_SLON); + + // parameters defined in + // /gov.noaa.nws.ncep.edex.plugin.gpd/utility/common_static/base/path/gpdPathKeys.xml + // AND those returned by dao.getKeysRequiredForFileName() + // will be added automatically when calling PointDataQuery.execute() + + // PointDataQuery.setParameters() is to set return fields from both DB + // and HDF5 + pdq.setParameters(returnParametersString.toString()); + + // PointDataQuery.addParameter() is to add DB query constraints + pdq.addParameter("productInfo.name", prodName, "="); + if (quertKey == GenericPointDataQueryKey.BY_STN_ID) + pdq.addParameter("location.stationId", stnId, "="); + else if (quertKey == GenericPointDataQueryKey.BY_SLAT_SLON) { + pdq.addParameter("slat", Float.toString(slat + 0.001f), "<"); + pdq.addParameter("slon", Float.toString(slon + 0.001f), "<"); + pdq.addParameter("slat", Float.toString(slat - 0.001f), ">"); + pdq.addParameter("slon", Float.toString(slon - 0.001f), ">"); + } + + String dateStr = dbRefTimeFormat.format(refTime); + pdq.addParameter("dataTime.refTime", dateStr, "="); + pdq.addParameter("productVersion", Integer.toString(productVersion), + "="); + System.out.println("requestig refTime = " + dateStr); + pdq.requestAllLevels(); + pdc = pdq.execute(); + + return pdc; + } + + /* + * TBM...Chin delet this leter...not used public + * GenericPointDataProductContainer getGpdProduct(Date refTime, String + * prodName,boolean useSpecifiedProductVersion, int productVersion)throws + * Exception{ GenericPointDataProductInfo prodInfo = getGpdProdInfo( + * prodName); if (prodInfo == null) { + * System.out.println("report is not in DB"); return null; } + * if(useSpecifiedProductVersion== false){ //find the latest version if + * there is one. Otherwise, use user specified version number productVersion + * = getGpdProductLatestVersion( refTime, prodName); } PointDataContainer + * pdc = getPointDataContainer( + * refTime,GenericPointDataQueryKey.BY_REPORT_NAME, null,0,0, prodInfo, + * productVersion); if (pdc == null) { System.out.println("pdc is null"); + * return null; } System.out.println("pdc CurrentSz()="+pdc.getCurrentSz()); + * GenericPointDataProductContainer prodCon = new + * GenericPointDataProductContainer(); prodCon.setProductInfo(prodInfo); + * prodCon.setRefTime(refTime); + * prodCon.setProductCorrectionVersion(productVersion); for (int i = 0; i < + * pdc.getCurrentSz(); i++) { PointDataView pdv =pdc.readRandom(i); + * System.out + * .println("pdv#"+i+" *********************************************"); + * Set parameters = new + * HashSet(pdv.getContainer().getParameters()); for(String parm: + * parameters){ System.out.println("parm ="+parm); } String stnId= null; if + * (parameters.contains(GenericPointDataConstants.HDF5_STN_ID)){ stnId = + * pdv.getString(GenericPointDataConstants.HDF5_STN_ID); + * System.out.println("stnid= "+ stnId); //stnId value is retrieved already, + * so drop it here parameters.remove(GenericPointDataConstants.HDF5_STN_ID); + * } else continue; //stnId not available, no need to continue on this PDV. + * int numLevel=0; if + * (parameters.contains(GenericPointDataConstants.HDF5_NUM_LEVEL)){ numLevel + * = pdv.getInt(GenericPointDataConstants.HDF5_NUM_LEVEL); + * System.out.println("numLevel= "+ numLevel); //numLevel value is retrieved + * already, so drop it here + * parameters.remove(GenericPointDataConstants.HDF5_NUM_LEVEL); } else + * continue; //level number is 0, no need to continue on this PDV. + * + * List levelList; if + * (parameters.contains(GenericPointDataConstants.HDF5_LEVEL_VALUE)){ + * levelList = new ArrayList(numLevel); Number[] num + * = + * pdv.getNumberAllLevels(GenericPointDataConstants.HDF5_LEVEL_VALUE);//pdv + * .getNumberAllLevels(parm,numLevel); for(Number n: num){ + * System.out.println("level value="+n.floatValue()); if(n.floatValue() == + * GenericPointDataConstants.GPD_INVALID_FLOAT_VALUE) //Not a valid level, + * skip it continue; GenericPointDataLevel gpdLevel = new + * GenericPointDataLevel(); gpdLevel.setLevelValue(n.floatValue()); + * levelList.add(gpdLevel); } //level value is retrieved already, so drop it + * here parameters.remove(GenericPointDataConstants.HDF5_LEVEL_VALUE); } + * else continue; //no level value, no need to continue on this PDV. + * + * int stnCatalogType=ObStation.CAT_TYPE_MESONET; if + * (parameters.contains(GenericPointDataConstants.DB_STN_CATALOGTYPE)){ + * stnCatalogType = + * pdv.getInt(GenericPointDataConstants.DB_STN_CATALOGTYPE); + * System.out.println("stnCatalogType= "+ stnCatalogType); + * //DB_STN_CATALOGTYPE value is retrieved already, so drop it here + * parameters.remove(GenericPointDataConstants.DB_STN_CATALOGTYPE); } float + * slat = -9999; if + * (parameters.contains(GenericPointDataConstants.DB_SLAT)){ slat = + * pdv.getFloat(GenericPointDataConstants.DB_SLAT); + * System.out.println("slat= "+ slat); //slat value is retrieved already, so + * drop it here parameters.remove(GenericPointDataConstants.DB_SLAT); } + * float slon = -9999; if + * (parameters.contains(GenericPointDataConstants.DB_SLON)){ slon = + * pdv.getFloat(GenericPointDataConstants.DB_SLON); + * System.out.println("slon= "+ slon); //slon value is retrieved already, so + * drop it here parameters.remove(GenericPointDataConstants.DB_SLON); } + * //PDV id is not returned back to user, so drop it here + * parameters.remove(GenericPointDataConstants.HDF5_PDV_ID); + * + * GenericPointDataStationProduct stnPd= new + * GenericPointDataStationProduct(); stnPd.setProductName(prodName); + * stnPd.setRefTime(refTime); stnPd.setLevelLst(levelList); + * stnPd.setProductVersion(productVersion); + * stnPd.getLocation().setStationId(stnId); + * stnPd.getLocation().setCatalogType(stnCatalogType); + * stnPd.setNumLevel(numLevel); stnPd.setSlat(slat); stnPd.setSlon(slon); + * for(String parm: parameters){ //these parameters are data parameters and + * should be 2 dimensional float value per design //If a new "meta" data is + * queried, then we should take care of that data specifically before here. + * Number[] num = pdv.getNumberAllLevels(parm);//,numLevel); + * System.out.println("parm ="+parm); for(int j=0; j< numLevel ; j++){ + * Number n = num[j]; System.out.println(" value="+n.floatValue()); + * GenericPointDataParameter gpdParm = new + * GenericPointDataParameter(parm,n.floatValue()); + * levelList.get(j).getGpdParameters().add(gpdParm); } } + * prodCon.getStnProdLst().add(stnPd); } return prodCon; } + */ + public GenericPointDataProductContainer getGpdProduct(Date refTime, + GenericPointDataQueryKey key, String stnId, float slat, float slon, + String prodName, boolean useSpecifiedProductVersion, + int productVersion) throws Exception { + GenericPointDataProductInfo prodInfo = getGpdProdInfo(prodName); + if (prodInfo == null) { + System.out.println("report is not in DB"); + return null; + } + if (useSpecifiedProductVersion == false) { + // find the latest version if there is one. Otherwise, use user + // specified version number + productVersion = getGpdProductLatestVersion(refTime, prodName); + if (productVersion < 0) + return null; + } + PointDataContainer pdc = getPointDataContainer(refTime, key, stnId, + slat, slon, prodInfo, productVersion); + if (pdc == null) { + System.out.println("pdc is null"); + return null; + } + System.out.println("pdc CurrentSz()=" + pdc.getCurrentSz()); + GenericPointDataProductContainer prodCon = new GenericPointDataProductContainer(); + prodCon.setProductInfo(prodInfo); + prodCon.setRefTime(refTime); + prodCon.setProductCorrectionVersion(productVersion); + for (int i = 0; i < pdc.getCurrentSz(); i++) { + PointDataView pdv = pdc.readRandom(i); + // System.out.println("pdv#"+i+" *********************************************"); + Set parameters = new HashSet(pdv.getContainer() + .getParameters()); + /* + * for(String parm: parameters){ System.out.println("parm ="+parm); + * } + */ + int numLevel = 0; + if (parameters.contains(GenericPointDataConstants.HDF5_NUM_LEVEL)) { + numLevel = pdv.getInt(GenericPointDataConstants.HDF5_NUM_LEVEL); + // System.out.println("numLevel= "+ numLevel); + // numLevel value is retrieved already, so drop it here + parameters.remove(GenericPointDataConstants.HDF5_NUM_LEVEL); + } else + continue; // level number is 0, no need to continue on this PDV. + + List levelList; + if (parameters.contains(GenericPointDataConstants.HDF5_LEVEL_VALUE)) { + levelList = new ArrayList(numLevel); + if (numLevel > 1) { + Number[] num = pdv + .getNumberAllLevels(GenericPointDataConstants.HDF5_LEVEL_VALUE);// pdv.getNumberAllLevels(parm,numLevel); + int count = 0; + for (Number n : num) { + count++; + if (count > numLevel) + break; + // System.out.println("Level " +count+ + // " value="+n.floatValue()); + GenericPointDataLevel gpdLevel = new GenericPointDataLevel(); + gpdLevel.setLevelValue(n.floatValue()); + levelList.add(gpdLevel); + } + } else { + GenericPointDataLevel gpdLevel = new GenericPointDataLevel(); + gpdLevel.setLevelValue(pdv + .getFloat(GenericPointDataConstants.HDF5_LEVEL_VALUE)); + levelList.add(gpdLevel); + } + // level value is retrieved already, so drop it here + parameters.remove(GenericPointDataConstants.HDF5_LEVEL_VALUE); + } else + continue; // no level value, no need to continue on this PDV. + + int stnCatalogType = ObStation.CAT_TYPE_MESONET; + if (parameters + .contains(GenericPointDataConstants.DB_STN_CATALOGTYPE)) { + stnCatalogType = pdv + .getInt(GenericPointDataConstants.DB_STN_CATALOGTYPE); + // System.out.println("stnCatalogType= "+ stnCatalogType); + // DB_STN_CATALOGTYPE value is retrieved already, so drop it + // here + parameters.remove(GenericPointDataConstants.DB_STN_CATALOGTYPE); + } + String rtnstnId = stnId; + if (parameters.contains(GenericPointDataConstants.HDF5_STN_ID)) { + rtnstnId = pdv.getString(GenericPointDataConstants.HDF5_STN_ID); + // System.out.println("stnId= "+ rtnstnId); + // stnId is input parameter, can drop it here. + parameters.remove(GenericPointDataConstants.HDF5_STN_ID); + } + float rtnslat = slat; + if (parameters.contains(GenericPointDataConstants.DB_SLAT)) { + rtnslat = pdv.getFloat(GenericPointDataConstants.DB_SLAT); + // System.out.println("slat= "+ rtnslat); + // slat value is retrieved already, so drop it here + parameters.remove(GenericPointDataConstants.DB_SLAT); + } + float rtnslon = slon; + if (parameters.contains(GenericPointDataConstants.DB_SLON)) { + rtnslon = pdv.getFloat(GenericPointDataConstants.DB_SLON); + // System.out.println("slon= "+ rtnslon); + // slon value is retrieved already, so drop it here + parameters.remove(GenericPointDataConstants.DB_SLON); + } + // PDV id is not returned back to user, so drop it here + parameters.remove(GenericPointDataConstants.HDF5_PDV_ID); + + GenericPointDataStationProduct stnPd = new GenericPointDataStationProduct(); + stnPd.setProductName(prodName); + stnPd.setRefTime(refTime); + stnPd.setLevelLst(levelList); + stnPd.setProductVersion(productVersion); + stnPd.getLocation().setStationId(rtnstnId); + stnPd.getLocation().setCatalogType(stnCatalogType); + stnPd.setNumLevel(numLevel); + stnPd.setSlat(rtnslat); + stnPd.setSlon(rtnslon); + for (String parm : parameters) { + if (numLevel > 1) { + // these parameters are data parameters and should be 2 + // dimensional float value per design + // If a new "meta" data is queried, then we should take care + // of that data specifically before here. + Number[] num = pdv.getNumberAllLevels(parm);// ,numLevel); + // System.out.println("parm ="+parm); + for (int j = 0; j < numLevel; j++) { + Number n = num[j]; + // System.out.println(" value="+n.floatValue()); + GenericPointDataParameter gpdParm = new GenericPointDataParameter( + parm, n.floatValue()); + levelList.get(j).getGpdParameters().add(gpdParm); + } + } else { + GenericPointDataParameter gpdParm = new GenericPointDataParameter( + parm, pdv.getFloat(parm)); + levelList.get(0).getGpdParameters().add(gpdParm); + } + } + prodCon.getStnProdLst().add(stnPd); + } + return prodCon; + } + + /* * */ - public List getGpdStationProduct(List refTimeList, GenericPointDataQueryKey key, String stnId, float slat, float slon, String prodName)throws Exception{ - GenericPointDataProductInfo prodInfo = getGpdProdInfo( prodName); - if (prodInfo == null) { - System.out.println("product is not in DB"); - return null; - } - List stnProdList = new ArrayList(); - for(Date refTime: refTimeList){ - int productVersion = getGpdProductLatestVersion( refTime, prodName); - if(productVersion <0) - continue; - PointDataContainer pdc = getPointDataContainer( refTime, key, stnId, slat, slon, prodInfo, productVersion); - if (pdc == null) { - System.out.println("pdc is null"); - continue; - } - System.out.println(refTime.toString() +" pdc CurrentSz()="+pdc.getCurrentSz()); + public List getGpdStationProduct( + List refTimeList, GenericPointDataQueryKey key, String stnId, + float slat, float slon, String prodName) throws Exception { + GenericPointDataProductInfo prodInfo = getGpdProdInfo(prodName); + if (prodInfo == null) { + System.out.println("product is not in DB"); + return null; + } + List stnProdList = new ArrayList(); + for (Date refTime : refTimeList) { + int productVersion = getGpdProductLatestVersion(refTime, prodName); + if (productVersion < 0) + continue; + PointDataContainer pdc = getPointDataContainer(refTime, key, stnId, + slat, slon, prodInfo, productVersion); + if (pdc == null) { + System.out.println("pdc is null"); + continue; + } + System.out.println(refTime.toString() + " pdc CurrentSz()=" + + pdc.getCurrentSz()); - for (int i = 0; i < pdc.getCurrentSz(); i++) { - PointDataView pdv =pdc.readRandom(i); - //System.out.println("pdv#"+i+" *********************************************"); - Set parameters = new HashSet(pdv.getContainer().getParameters()); - int numLevel=0; - if (parameters.contains(GenericPointDataConstants.HDF5_NUM_LEVEL)){ - numLevel = pdv.getInt(GenericPointDataConstants.HDF5_NUM_LEVEL); - //System.out.println("numLevel= "+ numLevel); - //numLevel value is retrieved already, so drop it here - parameters.remove(GenericPointDataConstants.HDF5_NUM_LEVEL); - } - else - continue; //level number is 0, no need to continue on this PDV. + for (int i = 0; i < pdc.getCurrentSz(); i++) { + PointDataView pdv = pdc.readRandom(i); + // System.out.println("pdv#"+i+" *********************************************"); + Set parameters = new HashSet(pdv.getContainer() + .getParameters()); + int numLevel = 0; + if (parameters + .contains(GenericPointDataConstants.HDF5_NUM_LEVEL)) { + numLevel = pdv + .getInt(GenericPointDataConstants.HDF5_NUM_LEVEL); + // System.out.println("numLevel= "+ numLevel); + // numLevel value is retrieved already, so drop it here + parameters.remove(GenericPointDataConstants.HDF5_NUM_LEVEL); + } else + continue; // level number is 0, no need to continue on this + // PDV. - List levelList; - if (parameters.contains(GenericPointDataConstants.HDF5_LEVEL_VALUE)){ - levelList = new ArrayList(numLevel); - if(numLevel >1){ - Number[] num = pdv.getNumberAllLevels(GenericPointDataConstants.HDF5_LEVEL_VALUE);//pdv.getNumberAllLevels(parm,numLevel); - int count=0; - for(Number n: num){ - count++; - if(count > numLevel) - break; - //System.out.println("Level " +count+ " value="+n.floatValue()); - GenericPointDataLevel gpdLevel = new GenericPointDataLevel(); - gpdLevel.setLevelValue(n.floatValue()); - levelList.add(gpdLevel); - } - } - else{ - GenericPointDataLevel gpdLevel = new GenericPointDataLevel(); - gpdLevel.setLevelValue(pdv.getFloat(GenericPointDataConstants.HDF5_LEVEL_VALUE)); - levelList.add(gpdLevel); - } - //level value is retrieved already, so drop it here - parameters.remove(GenericPointDataConstants.HDF5_LEVEL_VALUE); - } - else - continue; //no level value, no need to continue on this PDV. + List levelList; + if (parameters + .contains(GenericPointDataConstants.HDF5_LEVEL_VALUE)) { + levelList = new ArrayList(numLevel); + if (numLevel > 1) { + Number[] num = pdv + .getNumberAllLevels(GenericPointDataConstants.HDF5_LEVEL_VALUE);// pdv.getNumberAllLevels(parm,numLevel); + int count = 0; + for (Number n : num) { + count++; + if (count > numLevel) + break; + // System.out.println("Level " +count+ + // " value="+n.floatValue()); + GenericPointDataLevel gpdLevel = new GenericPointDataLevel(); + gpdLevel.setLevelValue(n.floatValue()); + levelList.add(gpdLevel); + } + } else { + GenericPointDataLevel gpdLevel = new GenericPointDataLevel(); + gpdLevel.setLevelValue(pdv + .getFloat(GenericPointDataConstants.HDF5_LEVEL_VALUE)); + levelList.add(gpdLevel); + } + // level value is retrieved already, so drop it here + parameters + .remove(GenericPointDataConstants.HDF5_LEVEL_VALUE); + } else + continue; // no level value, no need to continue on this + // PDV. - int stnCatalogType=ObStation.CAT_TYPE_MESONET; - if (parameters.contains(GenericPointDataConstants.DB_STN_CATALOGTYPE)){ - stnCatalogType = pdv.getInt(GenericPointDataConstants.DB_STN_CATALOGTYPE); - //System.out.println("stnCatalogType= "+ stnCatalogType); - //DB_STN_CATALOGTYPE value is retrieved already, so drop it here - parameters.remove(GenericPointDataConstants.DB_STN_CATALOGTYPE); - } - String rtnstnId = stnId; - if (parameters.contains(GenericPointDataConstants.HDF5_STN_ID)){ - rtnstnId = pdv.getString(GenericPointDataConstants.HDF5_STN_ID); - //System.out.println("stnId= "+ rtnstnId); - //stnId is input parameter, can drop it here. - parameters.remove(GenericPointDataConstants.HDF5_STN_ID); - } - float rtnslat = slat; - if (parameters.contains(GenericPointDataConstants.DB_SLAT)){ - rtnslat = pdv.getFloat(GenericPointDataConstants.DB_SLAT); - //System.out.println("slat= "+ rtnslat); - //slat value is retrieved already, so drop it here - parameters.remove(GenericPointDataConstants.DB_SLAT); - } - float rtnslon = slon; - if (parameters.contains(GenericPointDataConstants.DB_SLON)){ - rtnslon = pdv.getFloat(GenericPointDataConstants.DB_SLON); - //System.out.println("slon= "+ rtnslon); - //slon value is retrieved already, so drop it here - parameters.remove(GenericPointDataConstants.DB_SLON); - } - //PDV id is not returned back to user, so drop it here - parameters.remove(GenericPointDataConstants.HDF5_PDV_ID); + int stnCatalogType = ObStation.CAT_TYPE_MESONET; + if (parameters + .contains(GenericPointDataConstants.DB_STN_CATALOGTYPE)) { + stnCatalogType = pdv + .getInt(GenericPointDataConstants.DB_STN_CATALOGTYPE); + // System.out.println("stnCatalogType= "+ stnCatalogType); + // DB_STN_CATALOGTYPE value is retrieved already, so drop it + // here + parameters + .remove(GenericPointDataConstants.DB_STN_CATALOGTYPE); + } + String rtnstnId = stnId; + if (parameters.contains(GenericPointDataConstants.HDF5_STN_ID)) { + rtnstnId = pdv + .getString(GenericPointDataConstants.HDF5_STN_ID); + // System.out.println("stnId= "+ rtnstnId); + // stnId is input parameter, can drop it here. + parameters.remove(GenericPointDataConstants.HDF5_STN_ID); + } + float rtnslat = slat; + if (parameters.contains(GenericPointDataConstants.DB_SLAT)) { + rtnslat = pdv.getFloat(GenericPointDataConstants.DB_SLAT); + // System.out.println("slat= "+ rtnslat); + // slat value is retrieved already, so drop it here + parameters.remove(GenericPointDataConstants.DB_SLAT); + } + float rtnslon = slon; + if (parameters.contains(GenericPointDataConstants.DB_SLON)) { + rtnslon = pdv.getFloat(GenericPointDataConstants.DB_SLON); + // System.out.println("slon= "+ rtnslon); + // slon value is retrieved already, so drop it here + parameters.remove(GenericPointDataConstants.DB_SLON); + } + // PDV id is not returned back to user, so drop it here + parameters.remove(GenericPointDataConstants.HDF5_PDV_ID); - GenericPointDataStationProduct stnPd= new GenericPointDataStationProduct(); - stnPd.setProductName(prodName); - stnPd.setRefTime(refTime); - stnPd.setLevelLst(levelList); - stnPd.setProductVersion(productVersion); - stnPd.getLocation().setStationId(rtnstnId); - stnPd.getLocation().setCatalogType(stnCatalogType); - stnPd.setNumLevel(numLevel); - stnPd.setSlat(rtnslat); - stnPd.setSlon(rtnslon); - for(String parm: parameters){ - if(numLevel>1){ - //these parameters are data parameters and should be 2 dimensional float value per design - //If a new "meta" data is queried, then we should take care of that data specifically before here. - Number[] num = pdv.getNumberAllLevels(parm);//,numLevel); - //System.out.println("parm ="+parm); - for(int j=0; j< numLevel ; j++){ - Number n = num[j]; - //System.out.println(" value="+n.floatValue()); - GenericPointDataParameter gpdParm = new GenericPointDataParameter(parm,n.floatValue()); - levelList.get(j).getGpdParameters().add(gpdParm); - } - } - else { - GenericPointDataParameter gpdParm = new GenericPointDataParameter(parm,pdv.getFloat(parm)); - levelList.get(0).getGpdParameters().add(gpdParm); - } - } - stnProdList.add(stnPd); - } - } - return stnProdList; - } + GenericPointDataStationProduct stnPd = new GenericPointDataStationProduct(); + stnPd.setProductName(prodName); + stnPd.setRefTime(refTime); + stnPd.setLevelLst(levelList); + stnPd.setProductVersion(productVersion); + stnPd.getLocation().setStationId(rtnstnId); + stnPd.getLocation().setCatalogType(stnCatalogType); + stnPd.setNumLevel(numLevel); + stnPd.setSlat(rtnslat); + stnPd.setSlon(rtnslon); + for (String parm : parameters) { + if (numLevel > 1) { + // these parameters are data parameters and should be 2 + // dimensional float value per design + // If a new "meta" data is queried, then we should take + // care of that data specifically before here. + Number[] num = pdv.getNumberAllLevels(parm);// ,numLevel); + // System.out.println("parm ="+parm); + for (int j = 0; j < numLevel; j++) { + Number n = num[j]; + // System.out.println(" value="+n.floatValue()); + GenericPointDataParameter gpdParm = new GenericPointDataParameter( + parm, n.floatValue()); + levelList.get(j).getGpdParameters().add(gpdParm); + } + } else { + GenericPointDataParameter gpdParm = new GenericPointDataParameter( + parm, pdv.getFloat(parm)); + levelList.get(0).getGpdParameters().add(gpdParm); + } + } + stnProdList.add(stnPd); + } + } + return stnProdList; + } - /* TBM...Chin delet this leter...not used - public GenericPointDataStationProduct getGpdStationProduct(Date refTime, GenericPointDataQueryKey key, String stnId, double slat, double slon, String reportName,boolean useSpecifiedProductVersion, int productVersion)throws Exception{ - GenericPointDataProductInfo report = getGpdProdInfo( reportName); - if (report == null) { - System.out.println("report is not in DB"); - return null; - } - if(useSpecifiedProductVersion== false){ - //find the latest version if there is one. Otherwise, use user specified version number - productVersion = getGpdProductLatestVersion( refTime, reportName); - } - PointDataContainer pdc; - pdc = getPointDataContainer( refTime, key, stnId, slat, slon, report, productVersion); + /* + * TBM...Chin delet this leter...not used public + * GenericPointDataStationProduct getGpdStationProduct(Date refTime, + * GenericPointDataQueryKey key, String stnId, double slat, double slon, + * String reportName,boolean useSpecifiedProductVersion, int + * productVersion)throws Exception{ GenericPointDataProductInfo report = + * getGpdProdInfo( reportName); if (report == null) { + * System.out.println("report is not in DB"); return null; } + * if(useSpecifiedProductVersion== false){ //find the latest version if + * there is one. Otherwise, use user specified version number productVersion + * = getGpdProductLatestVersion( refTime, reportName); } PointDataContainer + * pdc; pdc = getPointDataContainer( refTime, key, stnId, slat, slon, + * report, productVersion); + * + * if (pdc == null) { System.out.println("pdc is null"); return null; } + * //for a single station product query, the pdc.getCurrentSz() should + * always be 1 System.out.println("pdc CurrentSz()="+pdc.getCurrentSz()); + * GenericPointDataStationProduct stnPd= null; for (int i = 0; i < + * pdc.getCurrentSz(); i++) { PointDataView pdv =pdc.readRandom(i); + * Set parameters = pdv.getContainer().getParameters(); for(String + * parm: parameters){ System.out.println("parm ="+parm); } int numLevel=0; + * if (parameters.contains(GenericPointDataConstants.HDF5_NUM_LEVEL)){ + * numLevel = pdv.getInt(GenericPointDataConstants.HDF5_NUM_LEVEL); + * System.out.println("numLevel= "+ numLevel); //numLevel value is retrieved + * already, so drop it here + * parameters.remove(GenericPointDataConstants.HDF5_NUM_LEVEL); } else + * continue; //level number is 0, no need to continue on this PDV. + * + * List levelList; if + * (parameters.contains(GenericPointDataConstants.HDF5_LEVEL_VALUE)){ + * levelList = new ArrayList(numLevel); Number[] num + * = + * pdv.getNumberAllLevels(GenericPointDataConstants.HDF5_LEVEL_VALUE);//pdv + * .getNumberAllLevels(parm,numLevel); for(Number n: num){ + * System.out.println("level value="+n.floatValue()); if(n.floatValue() == + * GenericPointDataConstants.GPD_INVALID_FLOAT_VALUE) //Not a valid level, + * skip it continue; GenericPointDataLevel gpdLevel = new + * GenericPointDataLevel(); gpdLevel.setLevelValue(n.floatValue()); + * levelList.add(gpdLevel); } //level value is retrieved already, so drop it + * here parameters.remove(GenericPointDataConstants.HDF5_LEVEL_VALUE); } + * else continue; //no level value, no need to continue on this PDV. + * + * int stnCatalogType=ObStation.CAT_TYPE_MESONET; if + * (parameters.contains(GenericPointDataConstants.DB_STN_CATALOGTYPE)){ + * stnCatalogType = + * pdv.getInt(GenericPointDataConstants.DB_STN_CATALOGTYPE); + * System.out.println("stnCatalogType= "+ stnCatalogType); + * //DB_STN_CATALOGTYPE value is retrieved already, so drop it here + * parameters.remove(GenericPointDataConstants.DB_STN_CATALOGTYPE); } String + * rtnstnId = stnId; if + * (parameters.contains(GenericPointDataConstants.HDF5_STN_ID)){ rtnstnId = + * pdv.getString(GenericPointDataConstants.HDF5_STN_ID); + * System.out.println("stnId= "+ rtnstnId); //stnId is input parameter, can + * drop it here. parameters.remove(GenericPointDataConstants.HDF5_STN_ID); } + * double rtnslat = slat; if + * (parameters.contains(GenericPointDataConstants.DB_SLAT)){ rtnslat = + * pdv.getFloat(GenericPointDataConstants.DB_SLAT); + * System.out.println("slat= "+ rtnslat); //slat value is retrieved already, + * so drop it here parameters.remove(GenericPointDataConstants.DB_SLAT); } + * double rtnslon = slon; if + * (parameters.contains(GenericPointDataConstants.DB_SLON)){ rtnslon = + * pdv.getFloat(GenericPointDataConstants.DB_SLON); + * System.out.println("slon= "+ rtnslon); //slon value is retrieved already, + * so drop it here parameters.remove(GenericPointDataConstants.DB_SLON); } + * //PDV_id is not returned back to user, so drop it here + * parameters.remove(GenericPointDataConstants.HDF5_PDV_ID); + * + * + * stnPd= new GenericPointDataStationProduct(); + * stnPd.setProductName(reportName); stnPd.setRefTime(refTime); + * stnPd.setLevelLst(levelList); stnPd.setProductVersion(productVersion); + * stnPd.setSlat(rtnslat); stnPd.setSlon(rtnslon); + * stnPd.getLocation().setStationId(rtnstnId); + * stnPd.getLocation().setCatalogType(stnCatalogType); + * stnPd.setNumLevel(numLevel); for(String parm: parameters){ //these + * parameters are data parameters and should be 2 dimensional float value + * per design //If a new "meta" data is queried, then we should take care of + * that data specifically before here. Number[] num = + * pdv.getNumberAllLevels(parm);//,numLevel); + * System.out.println("parm ="+parm); for(int j=0; j< numLevel ; j++){ + * Number n = num[j]; System.out.println(" value="+n.floatValue()); + * GenericPointDataParameter gpdParm = new + * GenericPointDataParameter(parm,n.floatValue()); + * levelList.get(j).getGpdParameters().add(gpdParm); } } } return stnPd; } + */ + public int getGpdProductLatestVersion(Date refTime, String prodName) { + int latestProdVer = -1; + Session sess = null; + sess = getSessionFactory().openSession(); + sess.beginTransaction(); - if (pdc == null) { - System.out.println("pdc is null"); - return null; - } - //for a single station product query, the pdc.getCurrentSz() should always be 1 - System.out.println("pdc CurrentSz()="+pdc.getCurrentSz()); - GenericPointDataStationProduct stnPd= null; - for (int i = 0; i < pdc.getCurrentSz(); i++) { - PointDataView pdv =pdc.readRandom(i); - Set parameters = pdv.getContainer().getParameters(); - for(String parm: parameters){ - System.out.println("parm ="+parm); - } - int numLevel=0; - if (parameters.contains(GenericPointDataConstants.HDF5_NUM_LEVEL)){ - numLevel = pdv.getInt(GenericPointDataConstants.HDF5_NUM_LEVEL); - System.out.println("numLevel= "+ numLevel); - //numLevel value is retrieved already, so drop it here - parameters.remove(GenericPointDataConstants.HDF5_NUM_LEVEL); - } - else - continue; //level number is 0, no need to continue on this PDV. + Criteria crit = sess.createCriteria(GenericPointDataRecord.class); + // System.out.println("getGpdProductLatestVersion input reportName="+ + // prodName); + Criterion rptnameCrit = Restrictions.eq("productInfo.name", prodName); + crit.add(rptnameCrit); + Criterion reftimeCrit = Restrictions.eq("dataTime.refTime", refTime); + crit.add(reftimeCrit); + List vals = crit.list(); + if (vals.size() > 0) { + for (int i = 0; i < vals.size(); i++) { + // to avoid LazyInitializationException, we have to take care of + // Collection before + // closing session. + GenericPointDataRecord rec = (GenericPointDataRecord) vals + .get(i); + int version = rec.getProductVersion(); + if (version > latestProdVer) + latestProdVer = version; + // System.out.println("latestProdVer= "+ latestProdVer); - List levelList; - if (parameters.contains(GenericPointDataConstants.HDF5_LEVEL_VALUE)){ - levelList = new ArrayList(numLevel); - Number[] num = pdv.getNumberAllLevels(GenericPointDataConstants.HDF5_LEVEL_VALUE);//pdv.getNumberAllLevels(parm,numLevel); - for(Number n: num){ - System.out.println("level value="+n.floatValue()); - if(n.floatValue() == GenericPointDataConstants.GPD_INVALID_FLOAT_VALUE) - //Not a valid level, skip it - continue; - GenericPointDataLevel gpdLevel = new GenericPointDataLevel(); - gpdLevel.setLevelValue(n.floatValue()); - levelList.add(gpdLevel); - } - //level value is retrieved already, so drop it here - parameters.remove(GenericPointDataConstants.HDF5_LEVEL_VALUE); - } - else - continue; //no level value, no need to continue on this PDV. + } + } + if (sess != null) { + try { + sess.close(); + } catch (Exception e) { + e.printStackTrace(); + } + } + return latestProdVer; + } - int stnCatalogType=ObStation.CAT_TYPE_MESONET; - if (parameters.contains(GenericPointDataConstants.DB_STN_CATALOGTYPE)){ - stnCatalogType = pdv.getInt(GenericPointDataConstants.DB_STN_CATALOGTYPE); - System.out.println("stnCatalogType= "+ stnCatalogType); - //DB_STN_CATALOGTYPE value is retrieved already, so drop it here - parameters.remove(GenericPointDataConstants.DB_STN_CATALOGTYPE); - } - String rtnstnId = stnId; - if (parameters.contains(GenericPointDataConstants.HDF5_STN_ID)){ - rtnstnId = pdv.getString(GenericPointDataConstants.HDF5_STN_ID); - System.out.println("stnId= "+ rtnstnId); - //stnId is input parameter, can drop it here. - parameters.remove(GenericPointDataConstants.HDF5_STN_ID); - } - double rtnslat = slat; - if (parameters.contains(GenericPointDataConstants.DB_SLAT)){ - rtnslat = pdv.getFloat(GenericPointDataConstants.DB_SLAT); - System.out.println("slat= "+ rtnslat); - //slat value is retrieved already, so drop it here - parameters.remove(GenericPointDataConstants.DB_SLAT); - } - double rtnslon = slon; - if (parameters.contains(GenericPointDataConstants.DB_SLON)){ - rtnslon = pdv.getFloat(GenericPointDataConstants.DB_SLON); - System.out.println("slon= "+ rtnslon); - //slon value is retrieved already, so drop it here - parameters.remove(GenericPointDataConstants.DB_SLON); - } - //PDV_id is not returned back to user, so drop it here - parameters.remove(GenericPointDataConstants.HDF5_PDV_ID); + /** + * Chin note: copy from PluginDao, modified code to get GPD HDF5 file path + * correctly. + * + * Purges data from the database for this plugin with the given reference + * time matching the given productKeys. If refTime is null, will purge all + * data associated with the productKeys. Hdf5 must be purged separately as + * most hdf5 files can't be purged with a single reference time. Use the + * passed map to track what needs to be done with hdf5. + * + * @param refTime + * The reftime to delete data for. A null will purge all data for + * the productKeys. + * @param productKeys + * The product key/values to use as a constraint for deletions. + * Should be in key value pairs. + * @param trackHdf5 + * If true will use trackToUri to populate hdf5FileToUriPurged + * map. + * @param trackToUri + * If true will track each URI that needs to be deleted from + * HDF5, if false will only track the hdf5 files that need to be + * deleted. + * @param hdf5FileToUriPurged + * Map to be populated by purgeDataByRefTime of all the hdf5 + * files that need to be updated. If trackToUri is true, each + * file will have the exact data URI's to be removed from each + * file. If trackToUri is false, the map will have a null entry + * for the list and only track the files. + * @return Number of rows deleted from database. + * @throws DataAccessLayerException + */ + @Override + @SuppressWarnings("unchecked") + public int purgeDataByRefTime(Date refTime, + Map productKeys, boolean trackHdf5, + boolean trackToUri, Map> hdf5FileToUriPurged) + throws DataAccessLayerException { + int results = 0; + PurgeLogger.logInfo("Enter purgeDataByRefTime refTime=" + refTime + + " trackHdf5=" + trackHdf5 + " trackToUri=" + trackToUri, + pluginName); + DatabaseQuery dataQuery = new DatabaseQuery(this.daoClass); + if ((hdf5FileToUriPurged != null)) { + for (String key : hdf5FileToUriPurged.keySet()) { + List pairLst = hdf5FileToUriPurged.get(key); + PurgeLogger.logInfo( + "starting purgeDataByRefTime hdf5FileToUriPurged map key=" + + key, pluginName); + if (pairLst != null) { + for (String val : pairLst) { + PurgeLogger.logInfo( + "starting purgeDataByRefTime hdf5FileToUriPurged map val=" + + val, pluginName); + } + } + } + } + if (refTime != null) { + dataQuery.addQueryParam(PURGE_VERSION_FIELD, refTime); + } - stnPd= new GenericPointDataStationProduct(); - stnPd.setProductName(reportName); - stnPd.setRefTime(refTime); - stnPd.setLevelLst(levelList); - stnPd.setProductVersion(productVersion); - stnPd.setSlat(rtnslat); - stnPd.setSlon(rtnslon); - stnPd.getLocation().setStationId(rtnstnId); - stnPd.getLocation().setCatalogType(stnCatalogType); - stnPd.setNumLevel(numLevel); - for(String parm: parameters){ - //these parameters are data parameters and should be 2 dimensional float value per design - //If a new "meta" data is queried, then we should take care of that data specifically before here. - Number[] num = pdv.getNumberAllLevels(parm);//,numLevel); - System.out.println("parm ="+parm); - for(int j=0; j< numLevel ; j++){ - Number n = num[j]; - System.out.println(" value="+n.floatValue()); - GenericPointDataParameter gpdParm = new GenericPointDataParameter(parm,n.floatValue()); - levelList.get(j).getGpdParameters().add(gpdParm); - } - } - } - return stnPd; - } - */ - public int getGpdProductLatestVersion( Date refTime, String prodName){ - int latestProdVer=-1; - Session sess = null; - sess = getSessionFactory().openSession(); - sess.beginTransaction(); + if ((productKeys != null) && (productKeys.size() > 0)) { + for (Map.Entry pair : productKeys.entrySet()) { + dataQuery.addQueryParam(pair.getKey(), pair.getValue()); + PurgeLogger.logInfo(" purgeDataByRefTime product map key=" + + pair.getKey() + " value=" + pair.getValue(), + pluginName); + } + } - Criteria crit = sess.createCriteria(GenericPointDataRecord.class); - //System.out.println("getGpdProductLatestVersion input reportName="+ prodName); - Criterion rptnameCrit = Restrictions.eq("productInfo.name", prodName); - crit.add(rptnameCrit); - Criterion reftimeCrit = Restrictions.eq("dataTime.refTime", refTime); - crit.add(reftimeCrit); - List vals = crit.list(); - if(vals.size() > 0) { - for(int i=0; i < vals.size() ; i++){ - //to avoid LazyInitializationException, we have to take care of Collection before - // closing session. - GenericPointDataRecord rec = (GenericPointDataRecord) vals.get(i); - int version = rec.getProductVersion(); - if(version > latestProdVer) - latestProdVer = version; - //System.out.println("latestProdVer= "+ latestProdVer); + List pdos = null; - } - } - if (sess != null) { - try { - sess.close(); - } catch (Exception e) { - e.printStackTrace(); - } - } - return latestProdVer; - } + dataQuery.setMaxResults(500); - /** - * Chin note: copy from PluginDao, modified code to get GPD HDF5 file path correctly. - * - * Purges data from the database for this plugin with the given reference - * time matching the given productKeys. If refTime is null, will purge all - * data associated with the productKeys. Hdf5 must be purged separately as - * most hdf5 files can't be purged with a single reference time. Use the - * passed map to track what needs to be done with hdf5. - * - * @param refTime - * The reftime to delete data for. A null will purge all data for - * the productKeys. - * @param productKeys - * The product key/values to use as a constraint for deletions. - * Should be in key value pairs. - * @param trackHdf5 - * If true will use trackToUri to populate hdf5FileToUriPurged - * map. - * @param trackToUri - * If true will track each URI that needs to be deleted from - * HDF5, if false will only track the hdf5 files that need to be - * deleted. - * @param hdf5FileToUriPurged - * Map to be populated by purgeDataByRefTime of all the hdf5 - * files that need to be updated. If trackToUri is true, each - * file will have the exact data URI's to be removed from each - * file. If trackToUri is false, the map will have a null entry - * for the list and only track the files. - * @return Number of rows deleted from database. - * @throws DataAccessLayerException - */ - @Override - @SuppressWarnings("unchecked") - public int purgeDataByRefTime(Date refTime, - Map productKeys, boolean trackHdf5, - boolean trackToUri, Map> hdf5FileToUriPurged) - throws DataAccessLayerException { + // fields for hdf5 purge + String previousFile = null; + StringBuilder pathBuilder = new StringBuilder(); - int results = 0; - PurgeLogger.logInfo("Enter purgeDataByRefTime refTime=" + refTime +" trackHdf5="+trackHdf5+ " trackToUri="+trackToUri, pluginName); - DatabaseQuery dataQuery = new DatabaseQuery(this.daoClass); - if((hdf5FileToUriPurged != null)){ - for (String key: hdf5FileToUriPurged.keySet()) - { - List pairLst = hdf5FileToUriPurged.get(key); - PurgeLogger.logInfo("starting purgeDataByRefTime hdf5FileToUriPurged map key=" + key, pluginName); - if(pairLst!=null){ - for(String val: pairLst){ - PurgeLogger.logInfo("starting purgeDataByRefTime hdf5FileToUriPurged map val=" + val, pluginName); - } - } - } - } - if (refTime != null) { - dataQuery.addQueryParam(PURGE_VERSION_FIELD, refTime); - } + int loopCount = 0; + do { + pdos = (List) this.queryByCriteria(dataQuery); + if ((pdos != null) && !pdos.isEmpty()) { + this.delete(pdos); - if ((productKeys != null) && (productKeys.size() > 0)) { - for (Map.Entry pair : productKeys.entrySet()) { - dataQuery.addQueryParam(pair.getKey(), pair.getValue()); - PurgeLogger.logInfo(" purgeDataByRefTime product map key=" + pair.getKey()+" value="+ pair.getValue(), pluginName); - } - } + if (trackHdf5 && (hdf5FileToUriPurged != null)) { + for (PluginDataObject pdo : pdos) { + pathBuilder.setLength(0); + GenericPointDataRecord rec = (GenericPointDataRecord) pdo; + String directory = PLUGIN_HDF5_DIR + + rec.getProductInfo().getName(); - List pdos = null; + String dateStr = hdfFileDateFormat.format(refTime); + String fileName = this.pluginName + "-" + + rec.getProductInfo().getName() + dateStr + + ".h5"; + String file = directory + File.separator + fileName; + PurgeLogger.logInfo(++loopCount + + " purgeDataByRefTime file=" + file, + pluginName); + if (trackToUri) { + List uriList = hdf5FileToUriPurged + .get(file); + if (uriList == null) { + // sizing to 50 as most data types have numerous + // entries in a file + uriList = new ArrayList(50); + hdf5FileToUriPurged.put(file, uriList); + } + uriList.add(file); + } else { + // only need to track file, tracking last file + // instead of constantly indexing hashMap + if (!file.equals(previousFile)) { + hdf5FileToUriPurged.put(file, null); + previousFile = file; + } + } + } + } - dataQuery.setMaxResults(500); + results += pdos.size(); + } - // fields for hdf5 purge - String previousFile = null; - StringBuilder pathBuilder = new StringBuilder(); - - int loopCount=0; - do { - pdos = (List) this.queryByCriteria(dataQuery); - if ((pdos != null) && !pdos.isEmpty()) { - this.delete(pdos); - - if (trackHdf5 && (hdf5FileToUriPurged != null)) { - for (PluginDataObject pdo : pdos) { - pathBuilder.setLength(0); - GenericPointDataRecord rec = (GenericPointDataRecord)pdo; - String directory = PLUGIN_HDF5_DIR + rec.getProductInfo().getName(); - - String dateStr = hdfFileDateFormat.format(refTime); - String fileName = this.pluginName+ "-"+rec.getProductInfo().getName()+dateStr+".h5"; - String file = directory+ File.separator+ fileName; - PurgeLogger.logInfo(++loopCount+" purgeDataByRefTime file=" + file, pluginName); - if (trackToUri) { - List uriList = hdf5FileToUriPurged - .get(file); - if (uriList == null) { - // sizing to 50 as most data types have numerous - // entries in a file - uriList = new ArrayList(50); - hdf5FileToUriPurged.put(file, uriList); - } - uriList.add(file); - } else { - // only need to track file, tracking last file - // instead of constantly indexing hashMap - if (!file.equals(previousFile)) { - hdf5FileToUriPurged.put(file, null); - previousFile = file; - } - } - } - } - - results += pdos.size(); - } - - } while ((pdos != null) && !pdos.isEmpty()); - if((hdf5FileToUriPurged != null)){ - for (String key: hdf5FileToUriPurged.keySet()) - { - List pairLst = hdf5FileToUriPurged.get(key); - PurgeLogger.logInfo("leaving purgeDataByRefTime hdf5FileToUriPurged map key=" + key, pluginName); - if(pairLst!=null){ - for(String val: pairLst){ - PurgeLogger.logInfo("leaving purgeDataByRefTime hdf5FileToUriPurged map val=" + val, pluginName); - } - } - } - } - return results; - } + } while ((pdos != null) && !pdos.isEmpty()); + if ((hdf5FileToUriPurged != null)) { + for (String key : hdf5FileToUriPurged.keySet()) { + List pairLst = hdf5FileToUriPurged.get(key); + PurgeLogger.logInfo( + "leaving purgeDataByRefTime hdf5FileToUriPurged map key=" + + key, pluginName); + if (pairLst != null) { + for (String val : pairLst) { + PurgeLogger.logInfo( + "leaving purgeDataByRefTime hdf5FileToUriPurged map val=" + + val, pluginName); + } + } + } + } + return results; + } } diff --git a/ncep/gov.noaa.nws.ncep.common.dataplugin.ncpafm/src/gov/noaa/nws/ncep/common/dataplugin/ncpafm/dao/NcPafmDao.java b/ncep/gov.noaa.nws.ncep.common.dataplugin.ncpafm/src/gov/noaa/nws/ncep/common/dataplugin/ncpafm/dao/NcPafmDao.java index bc9555cf91..e4031284c5 100644 --- a/ncep/gov.noaa.nws.ncep.common.dataplugin.ncpafm/src/gov/noaa/nws/ncep/common/dataplugin/ncpafm/dao/NcPafmDao.java +++ b/ncep/gov.noaa.nws.ncep.common.dataplugin.ncpafm/src/gov/noaa/nws/ncep/common/dataplugin/ncpafm/dao/NcPafmDao.java @@ -15,26 +15,24 @@ **/ package gov.noaa.nws.ncep.common.dataplugin.ncpafm.dao; -import java.util.List; - -import javax.xml.bind.JAXBException; - import gov.noaa.nws.ncep.common.dataplugin.ncpafm.NcPafmRecord; -import gov.noaa.nws.ncep.edex.common.dao.NcepDefaultPluginDao; + +import java.util.List; import com.raytheon.uf.common.dataplugin.PluginException; import com.raytheon.uf.common.pointdata.PointDataDescription; +import com.raytheon.uf.common.serialization.SerializationException; import com.raytheon.uf.edex.database.DataAccessLayerException; import com.raytheon.uf.edex.pointdata.PointDataPluginDao; public class NcPafmDao extends PointDataPluginDao { - private PointDataDescription pdd; + private PointDataDescription pdd; - /** * Creates a new ReccoDao - * @throws PluginException + * + * @throws PluginException */ public NcPafmDao(String pluginName) throws PluginException { super(pluginName); @@ -55,19 +53,19 @@ public class NcPafmDao extends PointDataPluginDao { } catch (DataAccessLayerException e) { e.printStackTrace(); } - if((obs != null)&&(obs.size() > 0)) { + if ((obs != null) && (obs.size() > 0)) { report = (NcPafmRecord) obs.get(0); } return report; } - + /** * Queries for to determine if a given data uri exists on the PAFM table. * * @param dataUri * The DataURI to find. * @return An array of objects. If not null, there should only be a single - * element. + * element. */ public Object[] queryDataUriColumn(final String dataUri) { @@ -78,15 +76,16 @@ public class NcPafmDao extends PointDataPluginDao { return results; } - + @Override - public String[] getKeysRequiredForFileName() { //TODO: See if this is correct/complete + public String[] getKeysRequiredForFileName() { // TODO: See if this is + // correct/complete return new String[] { "dataTime.refTime" }; } @Override public String getPointDataFileName(NcPafmRecord p) { - return "ncpafm.h5"; //TODO: "s"? or no "s"? + return "ncpafm.h5"; // TODO: "s"? or no "s"? } @Override @@ -95,20 +94,15 @@ public class NcPafmDao extends PointDataPluginDao { } /* - @Override - public String[] getParameters(File file) throws StorageException, - FileNotFoundException { - try { - // This should be faster than hitting the datastore. - return getPointDataDescription().getParameterNames(); - } catch (Exception e) { - // let super handle it - return super.getParameters(file); - } - } - */ + * @Override public String[] getParameters(File file) throws + * StorageException, FileNotFoundException { try { // This should be faster + * than hitting the datastore. return + * getPointDataDescription().getParameterNames(); } catch (Exception e) { // + * let super handle it return super.getParameters(file); } } + */ - public PointDataDescription getPointDataDescription() throws JAXBException { + public PointDataDescription getPointDataDescription() + throws SerializationException { if (pdd == null) { pdd = PointDataDescription.fromStream(this.getClass() .getResourceAsStream("/res/pointdata/ncpafm.xml")); diff --git a/ncep/gov.noaa.nws.ncep.common.dataplugin.ncscd/src/gov/noaa/nws/ncep/common/dataplugin/ncscd/dao/NcScdDao.java b/ncep/gov.noaa.nws.ncep.common.dataplugin.ncscd/src/gov/noaa/nws/ncep/common/dataplugin/ncscd/dao/NcScdDao.java index faa0084486..8711745fff 100644 --- a/ncep/gov.noaa.nws.ncep.common.dataplugin.ncscd/src/gov/noaa/nws/ncep/common/dataplugin/ncscd/dao/NcScdDao.java +++ b/ncep/gov.noaa.nws.ncep.common.dataplugin.ncscd/src/gov/noaa/nws/ncep/common/dataplugin/ncscd/dao/NcScdDao.java @@ -20,6 +20,8 @@ package gov.noaa.nws.ncep.common.dataplugin.ncscd.dao; +import gov.noaa.nws.ncep.common.dataplugin.ncscd.NcScdRecord; + import java.io.InputStream; import java.util.ArrayList; import java.util.List; @@ -33,19 +35,18 @@ import com.raytheon.uf.common.dataquery.db.QueryParam; import com.raytheon.uf.common.datastorage.IDataStore; import com.raytheon.uf.common.pointdata.PointDataDescription; import com.raytheon.uf.common.pointdata.spatial.ObStation; +import com.raytheon.uf.common.serialization.SerializationException; import com.raytheon.uf.edex.database.DataAccessLayerException; import com.raytheon.uf.edex.database.query.DatabaseQuery; import com.raytheon.uf.edex.pointdata.PointDataDbDescription; import com.raytheon.uf.edex.pointdata.PointDataPluginDao; import com.raytheon.uf.edex.pointdata.spatial.ObStationDao; -import gov.noaa.nws.ncep.common.dataplugin.ncscd.NcScdRecord; - public class NcScdDao extends PointDataPluginDao { /** The station dao */ private ObStationDao obDao = new ObStationDao(); - + /** * Creates a new NcScdDao * @@ -61,6 +62,7 @@ public class NcScdDao extends PointDataPluginDao { // TODO Auto-generated method stub return null; } + public List queryBySpatialBox(double upperLeftLat, double upperLeftLon, double lowerRightLat, double lowerRightLon) throws DataAccessLayerException { @@ -94,7 +96,8 @@ public class NcScdDao extends PointDataPluginDao { query.addQueryParam("location.stationId", icaos, QueryParam.QueryOperand.IN); return queryByCriteria(query); - } + } + /** * Retrieves an ncscd report using the datauri . * @@ -141,7 +144,7 @@ public class NcScdDao extends PointDataPluginDao { public void setObDao(ObStationDao obDao) { this.obDao = obDao; } - + @Override public String[] getKeysRequiredForFileName() { return new String[] { "dataTime.refTime" }; @@ -164,7 +167,7 @@ public class NcScdDao extends PointDataPluginDao { hdf5DataDescription = PointDataDescription.fromStream(this .getClass().getResourceAsStream( "/res/pointdata/ncscd.xml")); - } catch (JAXBException e) { + } catch (SerializationException e) { logger.error("Unable to load ncscd Point Data Description", e); } } @@ -195,5 +198,5 @@ public class NcScdDao extends PointDataPluginDao { } return dbDataDescription; } - + } diff --git a/ncep/gov.noaa.nws.ncep.common.dataplugin.nctaf/src/gov/noaa/nws/ncep/common/dataplugin/nctaf/dao/NcTafDao.java b/ncep/gov.noaa.nws.ncep.common.dataplugin.nctaf/src/gov/noaa/nws/ncep/common/dataplugin/nctaf/dao/NcTafDao.java index f953b9d000..9e0f2b61c8 100644 --- a/ncep/gov.noaa.nws.ncep.common.dataplugin.nctaf/src/gov/noaa/nws/ncep/common/dataplugin/nctaf/dao/NcTafDao.java +++ b/ncep/gov.noaa.nws.ncep.common.dataplugin.nctaf/src/gov/noaa/nws/ncep/common/dataplugin/nctaf/dao/NcTafDao.java @@ -4,33 +4,34 @@ **/ package gov.noaa.nws.ncep.common.dataplugin.nctaf.dao; +import gov.noaa.nws.ncep.common.dataplugin.nctaf.NcTafRecord; + import java.io.InputStream; import java.util.ArrayList; import java.util.List; import javax.xml.bind.JAXBException; -import gov.noaa.nws.ncep.common.dataplugin.nctaf.NcTafRecord; - import com.raytheon.uf.common.dataplugin.PluginException; import com.raytheon.uf.common.dataplugin.persist.IPersistable; import com.raytheon.uf.common.dataquery.db.QueryParam; import com.raytheon.uf.common.datastorage.IDataStore; +import com.raytheon.uf.common.pointdata.PointDataDescription; +import com.raytheon.uf.common.pointdata.spatial.ObStation; +import com.raytheon.uf.common.serialization.SerializationException; import com.raytheon.uf.edex.database.DataAccessLayerException; import com.raytheon.uf.edex.database.query.DatabaseQuery; import com.raytheon.uf.edex.pointdata.PointDataDbDescription; import com.raytheon.uf.edex.pointdata.PointDataPluginDao; import com.raytheon.uf.edex.pointdata.spatial.ObStationDao; -import com.raytheon.uf.common.pointdata.PointDataDescription; -import com.raytheon.uf.common.pointdata.spatial.ObStation; /** * Set of DAO methods for TAF data. * *
- *
+ * 
  * SOFTWARE HISTORY
- *
+ * 
  * Date         Ticket#    Engineer    Description
  * ------------ ---------- ----------- --------------------------
  * 09/09/2011   458			sgurung	   Initial Creation from Raytheon's taf plugin
@@ -44,12 +45,13 @@ import com.raytheon.uf.common.pointdata.spatial.ObStation;
 
 public class NcTafDao extends PointDataPluginDao {
 
-	/** The station dao */
+    /** The station dao */
     private ObStationDao obDao = new ObStationDao();
-      
+
     /**
      * Creates a new NcTafDao
-     * @throws PluginException 
+     * 
+     * @throws PluginException
      */
     public NcTafDao(String pluginName) throws PluginException {
         super(pluginName);
@@ -61,7 +63,7 @@ public class NcTafDao extends PointDataPluginDao {
         // TODO Auto-generated method stub
         return null;
     }
-    
+
     public List queryBySpatialBox(double upperLeftLat, double upperLeftLon,
             double lowerRightLat, double lowerRightLon)
             throws DataAccessLayerException {
@@ -95,8 +97,8 @@ public class NcTafDao extends PointDataPluginDao {
         query.addQueryParam("location.stationId", icaos,
                 QueryParam.QueryOperand.IN);
         return queryByCriteria(query);
-    } 
-    
+    }
+
     /**
      * Retrieves an nctaf report using the datauri .
      * 
@@ -105,26 +107,26 @@ public class NcTafDao extends PointDataPluginDao {
      * @return The report record if it exists.
      */
     public NcTafRecord queryByDataURI(String dataURI) {
-    	NcTafRecord report = null;
+        NcTafRecord report = null;
         List obs = null;
         try {
             obs = queryBySingleCriteria("dataURI", dataURI);
         } catch (DataAccessLayerException e) {
             e.printStackTrace();
         }
-        if((obs != null)&&(obs.size() > 0)) {
+        if ((obs != null) && (obs.size() > 0)) {
             report = (NcTafRecord) obs.get(0);
         }
         return report;
     }
-    
+
     /**
      * Queries for to determine if a given data uri exists on the nctaf table.
      * 
      * @param dataUri
      *            The DataURI to find.
      * @return An array of objects. If not null, there should only be a single
-     * element.
+     *         element.
      */
     public Object[] queryDataUriColumn(final String dataUri) {
 
@@ -135,7 +137,7 @@ public class NcTafDao extends PointDataPluginDao {
 
         return results;
     }
-    
+
     public ObStationDao getObDao() {
         return obDao;
     }
@@ -143,7 +145,7 @@ public class NcTafDao extends PointDataPluginDao {
     public void setObDao(ObStationDao obDao) {
         this.obDao = obDao;
     }
-    
+
     @Override
     public String[] getKeysRequiredForFileName() {
         return new String[] { "dataTime.refTime" };
@@ -159,15 +161,17 @@ public class NcTafDao extends PointDataPluginDao {
         return new NcTafRecord();
     }
 
-    public PointDataDescription getPointDataDescription() throws JAXBException {
+    public PointDataDescription getPointDataDescription()
+            throws SerializationException {
         if (hdf5DataDescription == null) {
-        	hdf5DataDescription = PointDataDescription.fromStream(this.getClass()
-                    .getResourceAsStream("/res/pointdata/nctaf.xml"));
-            
+            hdf5DataDescription = PointDataDescription
+                    .fromStream(this.getClass().getResourceAsStream(
+                            "/res/pointdata/nctaf.xml"));
+
         }
         return hdf5DataDescription;
     }
-    
+
     @Override
     public PointDataDbDescription getPointDataDbDescription() {
         if (dbDataDescription == null) {
@@ -185,6 +189,5 @@ public class NcTafDao extends PointDataPluginDao {
         }
         return dbDataDescription;
     }
-    
 
 }
diff --git a/ncep/gov.noaa.nws.ncep.common.dataplugin.ncuair/src/gov/noaa/nws/ncep/common/dataplugin/ncuair/dao/NcUairDao.java b/ncep/gov.noaa.nws.ncep.common.dataplugin.ncuair/src/gov/noaa/nws/ncep/common/dataplugin/ncuair/dao/NcUairDao.java
index 9d933b755c..32bb9c56f5 100644
--- a/ncep/gov.noaa.nws.ncep.common.dataplugin.ncuair/src/gov/noaa/nws/ncep/common/dataplugin/ncuair/dao/NcUairDao.java
+++ b/ncep/gov.noaa.nws.ncep.common.dataplugin.ncuair/src/gov/noaa/nws/ncep/common/dataplugin/ncuair/dao/NcUairDao.java
@@ -24,123 +24,120 @@
 
 package gov.noaa.nws.ncep.common.dataplugin.ncuair.dao;
 
-import java.io.InputStream;
-import java.util.List;
-import javax.xml.bind.JAXBException;
-import com.raytheon.uf.common.dataplugin.PluginException;
-import com.raytheon.uf.common.pointdata.PointDataDescription;
-import com.raytheon.uf.edex.database.DataAccessLayerException;
-import com.raytheon.uf.edex.pointdata.PointDataDbDescription;
-
 import gov.noaa.nws.ncep.common.dataplugin.ncuair.NcUairRecord;
 import gov.noaa.nws.ncep.edex.common.dao.NcepPointDataPluginDao;
 
+import java.io.InputStream;
+import java.util.List;
+
+import javax.xml.bind.JAXBException;
+
+import com.raytheon.uf.common.dataplugin.PluginException;
+import com.raytheon.uf.common.pointdata.PointDataDescription;
+import com.raytheon.uf.common.serialization.SerializationException;
+import com.raytheon.uf.edex.database.DataAccessLayerException;
+import com.raytheon.uf.edex.pointdata.PointDataDbDescription;
+
 public class NcUairDao extends NcepPointDataPluginDao {
 
-	   private PointDataDescription pdd;
-	   
-	    /**
-	     * Creates a new ReccoDao
-	     * 
-	     * @throws PluginException
-	     */
-	    public NcUairDao(String pluginName) throws PluginException {
-	        super(pluginName);
-	    }
+    private PointDataDescription pdd;
 
-	    /**
-	     * Retrieves an sfcobs report using the datauri .
-	     * 
-	     * @param dataURI
-	     *            The dataURI to match against.
-	     * @return The report record if it exists.
-	     */
-	    public NcUairRecord queryByDataURI(String dataURI) {
-	        NcUairRecord report = null;
-	        List obs = null;
-	        try {
-	            obs = queryBySingleCriteria("dataURI", dataURI);
-	        } catch (DataAccessLayerException e) {
-	            e.printStackTrace();
-	        }
-	        if ((obs != null) && (obs.size() > 0)) {
-	            report = (NcUairRecord) obs.get(0);
-	        }
-	        return report;
-	    }
+    /**
+     * Creates a new ReccoDao
+     * 
+     * @throws PluginException
+     */
+    public NcUairDao(String pluginName) throws PluginException {
+        super(pluginName);
+    }
 
-	    /**
-	     * Queries for to determine if a given data uri exists on the sfcobs table.
-	     * 
-	     * @param dataUri
-	     *            The DataURI to find.
-	     * @return An array of objects. If not null, there should only be a single
-	     *         element.
-	     */
-	    public Object[] queryDataUriColumn(final String dataUri) {
+    /**
+     * Retrieves an sfcobs report using the datauri .
+     * 
+     * @param dataURI
+     *            The dataURI to match against.
+     * @return The report record if it exists.
+     */
+    public NcUairRecord queryByDataURI(String dataURI) {
+        NcUairRecord report = null;
+        List obs = null;
+        try {
+            obs = queryBySingleCriteria("dataURI", dataURI);
+        } catch (DataAccessLayerException e) {
+            e.printStackTrace();
+        }
+        if ((obs != null) && (obs.size() > 0)) {
+            report = (NcUairRecord) obs.get(0);
+        }
+        return report;
+    }
 
-	        String sql = "select datauri from awips.ncuair where datauri='"
-	                + dataUri + "';";
+    /**
+     * Queries for to determine if a given data uri exists on the sfcobs table.
+     * 
+     * @param dataUri
+     *            The DataURI to find.
+     * @return An array of objects. If not null, there should only be a single
+     *         element.
+     */
+    public Object[] queryDataUriColumn(final String dataUri) {
 
-	        Object[] results = executeSQLQuery(sql);
+        String sql = "select datauri from awips.ncuair where datauri='"
+                + dataUri + "';";
 
-	        return results;
-	    }
-	    
-	    @Override
-	    public String[] getKeysRequiredForFileName() {
-	        return new String[] { "dataTime.refTime" };
-	    }
+        Object[] results = executeSQLQuery(sql);
 
-	    @Override
-	    public String getPointDataFileName(NcUairRecord p) {
-	        return "ncuairs.h5";
-	    }
+        return results;
+    }
 
-	    @Override
-	    public NcUairRecord newObject() {
-	        return new NcUairRecord();
-	    }
+    @Override
+    public String[] getKeysRequiredForFileName() {
+        return new String[] { "dataTime.refTime" };
+    }
 
-	    /*
-	    @Override
-	    public String[] getParameters(File file) throws StorageException,
-	            FileNotFoundException {
-	        try {
-	            // This should be faster than hitting the datastore.
-	            return getPointDataDescription().getParameterNames();
-	        } catch (Exception e) {
-	            // let super handle it
-	            return super.getParameters(file);
-	        }
-	    }
-	    */
+    @Override
+    public String getPointDataFileName(NcUairRecord p) {
+        return "ncuairs.h5";
+    }
 
-	    public PointDataDescription getPointDataDescription() throws JAXBException {
-	        if (pdd == null) {
-	            pdd = PointDataDescription.fromStream(this.getClass()
-	                    .getResourceAsStream("/res/pointdata/ncuair.xml"));
-	        }
-	        return pdd;
-	    }
-	   
-	    @Override
-	    public PointDataDbDescription getPointDataDbDescription() {
-	        if (dbDataDescription == null) {
-	            InputStream stream = this.getClass().getResourceAsStream(
-	                    "/res/pointdata/ncuairdb.xml");
-	            if (stream != null) {
-	                try {
-	                    dbDataDescription = PointDataDbDescription
-	                            .fromStream(stream);
-	                } catch (JAXBException e) {
-	                    logger.error("Unable to load " + pluginName
-	                            + " Point Data Database Description", e);
-	                }
-	            }
-	        }
-	        return dbDataDescription;
-	    }
-	    
-	    
-	}
+    @Override
+    public NcUairRecord newObject() {
+        return new NcUairRecord();
+    }
+
+    /*
+     * @Override public String[] getParameters(File file) throws
+     * StorageException, FileNotFoundException { try { // This should be faster
+     * than hitting the datastore. return
+     * getPointDataDescription().getParameterNames(); } catch (Exception e) { //
+     * let super handle it return super.getParameters(file); } }
+     */
+
+    public PointDataDescription getPointDataDescription()
+            throws SerializationException {
+        if (pdd == null) {
+            pdd = PointDataDescription.fromStream(this.getClass()
+                    .getResourceAsStream("/res/pointdata/ncuair.xml"));
+        }
+        return pdd;
+    }
+
+    @Override
+    public PointDataDbDescription getPointDataDbDescription() {
+        if (dbDataDescription == null) {
+            InputStream stream = this.getClass().getResourceAsStream(
+                    "/res/pointdata/ncuairdb.xml");
+            if (stream != null) {
+                try {
+                    dbDataDescription = PointDataDbDescription
+                            .fromStream(stream);
+                } catch (JAXBException e) {
+                    logger.error("Unable to load " + pluginName
+                            + " Point Data Database Description", e);
+                }
+            }
+        }
+        return dbDataDescription;
+    }
+
+}
diff --git a/ncep/gov.noaa.nws.ncep.common.dataplugin.pirep/src/gov/noaa/nws/ncep/common/dataplugin/pirep/dao/PirepDao.java b/ncep/gov.noaa.nws.ncep.common.dataplugin.pirep/src/gov/noaa/nws/ncep/common/dataplugin/pirep/dao/PirepDao.java
index 174b85814c..1cfe0fb39f 100644
--- a/ncep/gov.noaa.nws.ncep.common.dataplugin.pirep/src/gov/noaa/nws/ncep/common/dataplugin/pirep/dao/PirepDao.java
+++ b/ncep/gov.noaa.nws.ncep.common.dataplugin.pirep/src/gov/noaa/nws/ncep/common/dataplugin/pirep/dao/PirepDao.java
@@ -10,12 +10,11 @@ import gov.noaa.nws.ncep.common.dataplugin.pirep.PirepRecord;
 
 import java.util.List;
 
-import javax.xml.bind.JAXBException;
-
 import com.raytheon.uf.common.dataplugin.PluginException;
 import com.raytheon.uf.common.dataplugin.persist.IPersistable;
 import com.raytheon.uf.common.datastorage.IDataStore;
 import com.raytheon.uf.common.pointdata.PointDataDescription;
+import com.raytheon.uf.common.serialization.SerializationException;
 import com.raytheon.uf.edex.database.DataAccessLayerException;
 import com.raytheon.uf.edex.pointdata.PointDataPluginDao;
 
@@ -40,20 +39,22 @@ import com.raytheon.uf.edex.pointdata.PointDataPluginDao;
 public class PirepDao extends PointDataPluginDao {
 
     private PointDataDescription pdd;
+
     /**
      * Creates a new PirepDao
-     *
+     * 
      * @throws PluginException
      */
     public PirepDao(String pluginName) throws PluginException {
         super(pluginName);
     }
+
     @Override
     protected IDataStore populateDataStore(IDataStore dataStore,
             IPersistable obj) throws Exception {
         return null;
     }
-    
+
     /**
      * Retrieves an Pirep report using the datauri .
      * 
@@ -92,44 +93,39 @@ public class PirepDao extends PointDataPluginDao {
 
         return results;
     }
-	@Override
-	public String[] getKeysRequiredForFileName() {
-		return new String[] { "dataTime.refTime" };
-	}
 
-	@Override
-	public PirepRecord newObject() {
-		return new PirepRecord();
-	}
-
-	@Override
-	public String getPointDataFileName(PirepRecord p) {
-        return "pirep.h5";
-	}
-	
-	/*
     @Override
-    public String[] getParameters(File file) throws StorageException,
-            FileNotFoundException {
-    	
-        try {
-            // This should be faster than hitting the datastore.
-            return getPointDataDescription().getParameterNames();
-        } catch (Exception e) {
-            // let super handle it
-            return super.getParameters(file);
-        }
+    public String[] getKeysRequiredForFileName() {
+        return new String[] { "dataTime.refTime" };
     }
-    */
 
-    public PointDataDescription getPointDataDescription() throws JAXBException {
-    	if (pdd == null) {
+    @Override
+    public PirepRecord newObject() {
+        return new PirepRecord();
+    }
 
-    		pdd = PointDataDescription.fromStream(this.getClass()
+    @Override
+    public String getPointDataFileName(PirepRecord p) {
+        return "pirep.h5";
+    }
+
+    /*
+     * @Override public String[] getParameters(File file) throws
+     * StorageException, FileNotFoundException {
+     * 
+     * try { // This should be faster than hitting the datastore. return
+     * getPointDataDescription().getParameterNames(); } catch (Exception e) { //
+     * let super handle it return super.getParameters(file); } }
+     */
+
+    public PointDataDescription getPointDataDescription()
+            throws SerializationException {
+        if (pdd == null) {
+
+            pdd = PointDataDescription.fromStream(this.getClass()
                     .getResourceAsStream("/res/pointdata/pirep.xml"));
-    	}
-    	return pdd;
+        }
+        return pdd;
     }
 
 }
-
diff --git a/ncep/gov.noaa.nws.ncep.common.dataplugin.sgwh/src/gov/noaa/nws/ncep/common/dataplugin/sgwh/dao/SgwhDao.java b/ncep/gov.noaa.nws.ncep.common.dataplugin.sgwh/src/gov/noaa/nws/ncep/common/dataplugin/sgwh/dao/SgwhDao.java
index 31ff863243..4855513db0 100644
--- a/ncep/gov.noaa.nws.ncep.common.dataplugin.sgwh/src/gov/noaa/nws/ncep/common/dataplugin/sgwh/dao/SgwhDao.java
+++ b/ncep/gov.noaa.nws.ncep.common.dataplugin.sgwh/src/gov/noaa/nws/ncep/common/dataplugin/sgwh/dao/SgwhDao.java
@@ -16,22 +16,24 @@
  **/
 package gov.noaa.nws.ncep.common.dataplugin.sgwh.dao;
 
-import java.util.List;
-
-import javax.xml.bind.JAXBException;
-
-import com.raytheon.uf.common.dataplugin.PluginException;
-import com.raytheon.uf.common.pointdata.PointDataDescription;
-import com.raytheon.uf.edex.database.DataAccessLayerException;
 import gov.noaa.nws.ncep.common.dataplugin.sgwh.SgwhRecord;
 import gov.noaa.nws.ncep.edex.common.dao.NcepPointDataPluginDao;
 
+import java.util.List;
+
+import com.raytheon.uf.common.dataplugin.PluginException;
+import com.raytheon.uf.common.pointdata.PointDataDescription;
+import com.raytheon.uf.common.serialization.SerializationException;
+import com.raytheon.uf.edex.database.DataAccessLayerException;
+
 public class SgwhDao extends NcepPointDataPluginDao {
 
-	private PointDataDescription pdd;
+    private PointDataDescription pdd;
+
     /**
      * Creates a new ReccoDao
-     * @throws PluginException 
+     * 
+     * @throws PluginException
      */
     public SgwhDao(String pluginName) throws PluginException {
         super(pluginName);
@@ -52,30 +54,32 @@ public class SgwhDao extends NcepPointDataPluginDao {
         } catch (DataAccessLayerException e) {
             e.printStackTrace();
         }
-        if((obs != null)&&(obs.size() > 0)) {
+        if ((obs != null) && (obs.size() > 0)) {
             report = (SgwhRecord) obs.get(0);
         }
         return report;
     }
-    
+
     /**
      * Queries for to determine if a given data uri exists on the sgwh table.
      * 
      * @param dataUri
      *            The DataURI to find.
      * @return An array of objects. If not null, there should only be a single
-     * element.
+     *         element.
      */
     public Object[] queryDataUriColumn(final String dataUri) {
 
-        String sql = "select datauri from awips.sgwh where datauri='"
-                + dataUri + "';";
+        String sql = "select datauri from awips.sgwh where datauri='" + dataUri
+                + "';";
 
         Object[] results = executeSQLQuery(sql);
 
         return results;
     }
-    public PointDataDescription getPointDataDescription() throws JAXBException {
+
+    public PointDataDescription getPointDataDescription()
+            throws SerializationException {
         if (pdd == null) {
             pdd = PointDataDescription.fromStream(this.getClass()
                     .getResourceAsStream("/res/pointdata/sgwh.xml"));
diff --git a/ncep/gov.noaa.nws.ncep.common.dataplugin.sgwhv/src/gov/noaa/nws/ncep/common/dataplugin/sgwhv/dao/SgwhvDao.java b/ncep/gov.noaa.nws.ncep.common.dataplugin.sgwhv/src/gov/noaa/nws/ncep/common/dataplugin/sgwhv/dao/SgwhvDao.java
index a07218e0f6..6e53cd2269 100644
--- a/ncep/gov.noaa.nws.ncep.common.dataplugin.sgwhv/src/gov/noaa/nws/ncep/common/dataplugin/sgwhv/dao/SgwhvDao.java
+++ b/ncep/gov.noaa.nws.ncep.common.dataplugin.sgwhv/src/gov/noaa/nws/ncep/common/dataplugin/sgwhv/dao/SgwhvDao.java
@@ -16,23 +16,23 @@
  **/
 package gov.noaa.nws.ncep.common.dataplugin.sgwhv.dao;
 
-import java.util.List;
-
-import javax.xml.bind.JAXBException;
-
 import gov.noaa.nws.ncep.common.dataplugin.sgwhv.SgwhvRecord;
 import gov.noaa.nws.ncep.edex.common.dao.NcepPointDataPluginDao;
 
+import java.util.List;
+
 import com.raytheon.uf.common.dataplugin.PluginException;
 import com.raytheon.uf.common.pointdata.PointDataDescription;
+import com.raytheon.uf.common.serialization.SerializationException;
 import com.raytheon.uf.edex.database.DataAccessLayerException;
 
-public class SgwhvDao extends NcepPointDataPluginDao  {
-	private PointDataDescription pdd;
-    
+public class SgwhvDao extends NcepPointDataPluginDao {
+    private PointDataDescription pdd;
+
     /**
      * Creates a new ReccoDao
-     * @throws PluginException 
+     * 
+     * @throws PluginException
      */
     public SgwhvDao(String pluginName) throws PluginException {
         super(pluginName);
@@ -53,19 +53,20 @@ public class SgwhvDao extends NcepPointDataPluginDao  {
         } catch (DataAccessLayerException e) {
             e.printStackTrace();
         }
-        if((obs != null)&&(obs.size() > 0)) {
+        if ((obs != null) && (obs.size() > 0)) {
             report = (SgwhvRecord) obs.get(0);
         }
         return report;
     }
-    
+
     /**
-     * Queries for to determine if a given data uri exists on the Bufrsgwhv table.
+     * Queries for to determine if a given data uri exists on the Bufrsgwhv
+     * table.
      * 
      * @param dataUri
      *            The DataURI to find.
      * @return An array of objects. If not null, there should only be a single
-     * element.
+     *         element.
      */
     public Object[] queryDataUriColumn(final String dataUri) {
 
@@ -76,13 +77,16 @@ public class SgwhvDao extends NcepPointDataPluginDao  {
 
         return results;
     }
-    public PointDataDescription getPointDataDescription() throws JAXBException {
+
+    public PointDataDescription getPointDataDescription()
+            throws SerializationException {
         if (pdd == null) {
             pdd = PointDataDescription.fromStream(this.getClass()
                     .getResourceAsStream("/res/pointdata/sgwhv.xml"));
         }
         return pdd;
     }
+
     @Override
     public String[] getKeysRequiredForFileName() {
         return new String[] { "dataTime.refTime" };
diff --git a/ncep/gov.noaa.nws.ncep.common.dataplugin.ssha/src/gov/noaa/nws/ncep/common/dataplugin/ssha/dao/SshaDao.java b/ncep/gov.noaa.nws.ncep.common.dataplugin.ssha/src/gov/noaa/nws/ncep/common/dataplugin/ssha/dao/SshaDao.java
index 24c5b988a4..b0aec08e80 100644
--- a/ncep/gov.noaa.nws.ncep.common.dataplugin.ssha/src/gov/noaa/nws/ncep/common/dataplugin/ssha/dao/SshaDao.java
+++ b/ncep/gov.noaa.nws.ncep.common.dataplugin.ssha/src/gov/noaa/nws/ncep/common/dataplugin/ssha/dao/SshaDao.java
@@ -16,23 +16,23 @@
  **/
 package gov.noaa.nws.ncep.common.dataplugin.ssha.dao;
 
-import java.util.List;
-
-import javax.xml.bind.JAXBException;
-
 import gov.noaa.nws.ncep.common.dataplugin.ssha.SshaRecord;
 import gov.noaa.nws.ncep.edex.common.dao.NcepPointDataPluginDao;
 
+import java.util.List;
+
 import com.raytheon.uf.common.dataplugin.PluginException;
 import com.raytheon.uf.common.pointdata.PointDataDescription;
+import com.raytheon.uf.common.serialization.SerializationException;
 import com.raytheon.uf.edex.database.DataAccessLayerException;
 
-public class SshaDao extends NcepPointDataPluginDao  {
-	private PointDataDescription pdd;
-    
+public class SshaDao extends NcepPointDataPluginDao {
+    private PointDataDescription pdd;
+
     /**
      * Creates a new BufrsshaDao
-     * @throws PluginException 
+     * 
+     * @throws PluginException
      */
     public SshaDao(String pluginName) throws PluginException {
         super(pluginName);
@@ -53,37 +53,41 @@ public class SshaDao extends NcepPointDataPluginDao  {
         } catch (DataAccessLayerException e) {
             e.printStackTrace();
         }
-        if((obs != null)&&(obs.size() > 0)) {
+        if ((obs != null) && (obs.size() > 0)) {
             report = (SshaRecord) obs.get(0);
         }
         return report;
     }
-    
-    /**
-     * Queries for to determine if a given data uri exists on the Bufrssha table.import gov.noaa.nws.ncep.common.dataplugin.sgwh.SgwhRecord;
 
+    /**
+     * Queries for to determine if a given data uri exists on the Bufrssha
+     * table.import gov.noaa.nws.ncep.common.dataplugin.sgwh.SgwhRecord;
+     * 
      * 
      * @param dataUri
      *            The DataURI to find.
      * @return An array of objects. If not null, there should only be a single
-     * element.
+     *         element.
      */
     public Object[] queryDataUriColumn(final String dataUri) {
 
-        String sql = "select datauri from awips.ssha where datauri='"
-                + dataUri + "';";
+        String sql = "select datauri from awips.ssha where datauri='" + dataUri
+                + "';";
 
         Object[] results = executeSQLQuery(sql);
 
         return results;
     }
-    public PointDataDescription getPointDataDescription() throws JAXBException {
+
+    public PointDataDescription getPointDataDescription()
+            throws SerializationException {
         if (pdd == null) {
             pdd = PointDataDescription.fromStream(this.getClass()
                     .getResourceAsStream("/res/pointdata/ssha.xml"));
         }
         return pdd;
     }
+
     @Override
     public String[] getKeysRequiredForFileName() {
         return new String[] { "dataTime.refTime" };