paramMap = new HashMap<>(8, 1);
+ paramMap.put("avgVal", avgVal);
+ paramMap.put("qc", DEFAULT_QC_VALUE);
+ paramMap.put("validDate", validDate);
+ paramMap.put("postDate", postDate);
+ paramMap.put("areaId", areaId);
+ paramMap.put("dur", dur);
+ paramMap.put("validDate", validDate);
+ paramMap.put("basisTime", basisTime);
dao.executeSQLUpdate(updateSql);
}
@@ -317,8 +325,8 @@ public class GAFFDB {
public Object[] getLineSegs(String areaId) {
CoreDao dao = null;
dao = new CoreDao(DaoConfig.forDatabase(IHFS));
- Object[] rs = dao.executeSQLQuery(LINESEGS_QUERY + " where area_Id = '"
- + areaId + "'");
+ Object[] rs = dao.executeSQLQuery(LINESEGS_QUERY
+ + " where area_Id = :areaId", "areaId", areaId);
return rs;
}
diff --git a/edexOsgi/com.raytheon.uf.edex.ohd/src/com/raytheon/uf/edex/ohd/pproc/MpeLightningSrv.java b/edexOsgi/com.raytheon.uf.edex.ohd/src/com/raytheon/uf/edex/ohd/pproc/MpeLightningSrv.java
index 7d51b1072c..e9deaff095 100644
--- a/edexOsgi/com.raytheon.uf.edex.ohd/src/com/raytheon/uf/edex/ohd/pproc/MpeLightningSrv.java
+++ b/edexOsgi/com.raytheon.uf.edex.ohd/src/com/raytheon/uf/edex/ohd/pproc/MpeLightningSrv.java
@@ -32,8 +32,6 @@ import com.raytheon.uf.common.dataplugin.binlightning.BinLightningRecord;
import com.raytheon.uf.common.dataplugin.binlightning.LightningConstants;
import com.raytheon.uf.common.dataplugin.persist.DefaultPathProvider;
import com.raytheon.uf.common.dataplugin.persist.IPersistable;
-import com.raytheon.uf.common.dataquery.db.QueryResult;
-import com.raytheon.uf.common.dataquery.db.QueryResultRow;
import com.raytheon.uf.common.datastorage.DataStoreFactory;
import com.raytheon.uf.common.datastorage.IDataStore;
import com.raytheon.uf.common.datastorage.StorageException;
@@ -49,26 +47,26 @@ import com.vividsolutions.jts.geom.Coordinate;
/**
* Service implementation for gathering the lightning datasets from files in
* HDF5 format and inserting them into the ifhs lightning table.
- *
+ *
*
- *
+ *
* SOFTWARE HISTORY
- * Date Ticket# Engineer Description
+ * Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
- * Jan 06, 2011 5951 jnjanga Initial creation
- * Jan 10, 2013 1448 bgonzale Added app context check in runOnSchedule().
- * Jan 18, 2013 1469 bkowal Removed the hdf5 data directory.
- * Mar 28, 2014 2952 mpduff Changed to use UFStatus for logging.
- * Jun 05, 2014 3226 bclement BinLightning refactor
- * Aug 20, 2014 3549 njensen Fixed spelling in exceptions
- * Sep 17, 2014 3015 bclement improved exception handling
- * Dec 04, 2014 3015 njensen Corrected usage of Coordinate(x, y)
- * Feb 25, 2015 3992 nabowle Limit getMostRecentStrikes to NLDN.
- * Deduplicate lightning data in a
- * single BinLightningRecord.
- *
+ * Jan 06, 2011 5951 jnjanga Initial creation
+ * Jan 10, 2013 1448 bgonzale Added app context check in runOnSchedule().
+ * Jan 18, 2013 1469 bkowal Removed the hdf5 data directory.
+ * Mar 28, 2014 2952 mpduff Changed to use UFStatus for logging.
+ * Jun 05, 2014 3226 bclement BinLightning refactor
+ * Aug 20, 2014 3549 njensen Fixed spelling in exceptions
+ * Sep 17, 2014 3015 bclement improved exception handling
+ * Dec 04, 2014 3015 njensen Corrected usage of Coordinate(x, y)
+ * Feb 25, 2015 3992 nabowle Limit getMostRecentStrikes to NLDN.
+ * Deduplicate lightning data in a
+ * single BinLightningRecord.
+ * Jul 09, 2015 4500 rjpeter Fix SQL Injection concern.
*
- *
+ *
* @author jnjanga
* @version 1.0
*/
@@ -96,27 +94,30 @@ public class MpeLightningSrv {
/**
* Check the metadata Database for new lightning entries.
- *
+ *
* @return rows returned from the query
*/
- private QueryResultRow[] getMostRecentStrikes() throws EdexException {
- QueryResult rs = null;
+ private Object[] getMostRecentStrikes() throws EdexException {
CoreDao coreDao = new CoreDao(DaoConfig.DEFAULT);
+ /*
+ * TODO: This can miss data, should use insertTime and track last pull
+ * time
+ */
final String lgtSQL = "select datauri from binlightning "
+ "where reftime > (now()- interval \'30 minutes \')"
- + "and source = '" + LightningConstants.DEFAULT_SOURCE + "'";
+ + "and source = :source";
try {
- rs = (QueryResult) coreDao.executeNativeSql(lgtSQL, true);
+ return coreDao.executeSQLQuery(lgtSQL, "source",
+ LightningConstants.DEFAULT_SOURCE);
} catch (Exception e) {
throw new EdexException("Couldn't get BinLightning records from"
+ " metadata database. Failed SQL: " + lgtSQL, e);
}
- return rs.getRows();
}
/**
* Inserts a single record into ihfs's lightning table.
- *
+ *
* @param dataURI
* @throws EdexException
*/
@@ -231,48 +232,47 @@ public class MpeLightningSrv {
/**
* Populates ifhs' lightning table with the resultset obtained from querying
* metadata's binlighting table.
- *
+ *
* @param rows
* @throws EdexException
*/
- private void ifhsInsertMostRecentStrikes(QueryResultRow[] rows)
+ private void ifhsInsertMostRecentStrikes(Object[] rows)
throws EdexException {
if (rows.length == 0) {
logger.info("No new lightning records to insert in ifhs. ");
}
- for (QueryResultRow row : rows) {
- String dataURI = (String) row.getColumn(0);
+ for (Object col : rows) {
+ String dataURI = (String) col;
ifhsInsertLightRecord(dataURI);
}
}
/**
* run at scheduled timer.
- *
+ *
* @throws EdexException
*/
public void runOnSchedule() throws EdexException {
if (!AppsDefaults.getInstance().setAppContext(this)) {
return;
}
- QueryResultRow[] rows = getMostRecentStrikes();
+ Object[] rows = getMostRecentStrikes();
ifhsInsertMostRecentStrikes(rows);
}
-
/**
* Class to simplify deduplicating lightning data in a
* {@link BinLightningRecord} that generate the same ihfs lightning primary
* key.
*/
private static class LightningData {
- private short x;
+ private final short x;
- private short y;
+ private final short y;
- private long obstime;
+ private final long obstime;
- private byte strikes;
+ private final byte strikes;
public LightningData(short x, short y, long time, byte strikes) {
super();
@@ -310,7 +310,6 @@ public class MpeLightningSrv {
return strikes;
}
-
/**
* Generate a hashcode using the ihfs primary key fields: x, y, and
* time.
@@ -319,9 +318,9 @@ public class MpeLightningSrv {
public int hashCode() {
final int prime = 31;
int result = 1;
- result = prime * result + (int) (obstime ^ (obstime >>> 32));
- result = prime * result + x;
- result = prime * result + y;
+ result = (prime * result) + (int) (obstime ^ (obstime >>> 32));
+ result = (prime * result) + x;
+ result = (prime * result) + y;
return result;
}
diff --git a/edexOsgi/com.raytheon.uf.edex.plugin.grid/META-INF/MANIFEST.MF b/edexOsgi/com.raytheon.uf.edex.plugin.grid/META-INF/MANIFEST.MF
index 0f1ab86d17..1fc6f5bcc9 100644
--- a/edexOsgi/com.raytheon.uf.edex.plugin.grid/META-INF/MANIFEST.MF
+++ b/edexOsgi/com.raytheon.uf.edex.plugin.grid/META-INF/MANIFEST.MF
@@ -32,4 +32,5 @@ Require-Bundle: com.raytheon.uf.common.parameter;bundle-version="1.0.0",
org.springframework;bundle-version="2.5.6",
javax.measure;bundle-version="1.0.0",
com.raytheon.uf.common.status;bundle-version="1.12.1174",
- com.raytheon.uf.common.comm;bundle-version="1.12.1174"
+ com.raytheon.uf.common.comm;bundle-version="1.12.1174",
+ org.hibernate
diff --git a/edexOsgi/com.raytheon.uf.edex.plugin.grid/src/com/raytheon/uf/edex/plugin/grid/dao/GridDao.java b/edexOsgi/com.raytheon.uf.edex.plugin.grid/src/com/raytheon/uf/edex/plugin/grid/dao/GridDao.java
index a22ec5b8b7..eb16b29325 100644
--- a/edexOsgi/com.raytheon.uf.edex.plugin.grid/src/com/raytheon/uf/edex/plugin/grid/dao/GridDao.java
+++ b/edexOsgi/com.raytheon.uf.edex.plugin.grid/src/com/raytheon/uf/edex/plugin/grid/dao/GridDao.java
@@ -28,6 +28,11 @@ import java.util.Set;
import javax.measure.converter.UnitConverter;
+import org.hibernate.Query;
+import org.hibernate.Session;
+import org.springframework.transaction.TransactionStatus;
+import org.springframework.transaction.support.TransactionCallback;
+
import com.raytheon.uf.common.dataplugin.PluginDataObject;
import com.raytheon.uf.common.dataplugin.PluginException;
import com.raytheon.uf.common.dataplugin.grid.GridConstants;
@@ -37,7 +42,6 @@ import com.raytheon.uf.common.dataplugin.level.Level;
import com.raytheon.uf.common.dataplugin.level.LevelFactory;
import com.raytheon.uf.common.dataplugin.level.MasterLevel;
import com.raytheon.uf.common.dataplugin.persist.IPersistable;
-import com.raytheon.uf.common.dataquery.db.QueryResult;
import com.raytheon.uf.common.datastorage.IDataStore;
import com.raytheon.uf.common.datastorage.Request;
import com.raytheon.uf.common.datastorage.StorageException;
@@ -70,7 +74,7 @@ import com.raytheon.uf.edex.database.plugin.PluginDao;
* Mar 14, 2013 1587 bsteffen Fix static data persisting to datastore.
* Mar 27, 2013 1821 bsteffen Speed up GridInfoCache.
* Mar 20, 2013 2910 bsteffen Clear dataURI after loading cached info.
- *
+ * Jul 09, 2015 4500 rjpeter Fix SQL Injection concern.
*
*
* @author bphillip
@@ -99,7 +103,7 @@ public class GridDao extends PluginDao {
GridRecord gridRec = (GridRecord) obj;
Object messageData = gridRec.getMessageData();
GridCoverage location = gridRec.getLocation();
- if (location != null && messageData instanceof float[]) {
+ if ((location != null) && (messageData instanceof float[])) {
long[] sizes = new long[] { location.getNx(), location.getNy() };
String abbrev = gridRec.getParameter().getAbbreviation();
String group = gridRec.getDataURI();
@@ -109,8 +113,7 @@ public class GridDao extends PluginDao {
datasetName = abbrev;
}
AbstractStorageRecord storageRecord = new FloatDataRecord(
- datasetName,
- group, (float[]) messageData, 2, sizes);
+ datasetName, group, (float[]) messageData, 2, sizes);
storageRecord.setCorrelationObject(gridRec);
StorageProperties sp = new StorageProperties();
@@ -136,8 +139,8 @@ public class GridDao extends PluginDao {
records.length);
for (PluginDataObject record : records) {
GridRecord rec = (GridRecord) record;
- if (rec.getParameter() == null
- || rec.getParameter().getName() == null
+ if ((rec.getParameter() == null)
+ || (rec.getParameter().getName() == null)
|| rec.getParameter().getName().equals("Missing")) {
logger.info("Discarding record due to missing or unknown parameter mapping: "
+ record);
@@ -148,7 +151,7 @@ public class GridDao extends PluginDao {
if (level != null) {
MasterLevel ml = level.getMasterLevel();
- if (ml != null
+ if ((ml != null)
&& !LevelFactory.UNKNOWN_LEVEL.equals(ml.getName())) {
validLevel = true;
}
@@ -242,7 +245,8 @@ public class GridDao extends PluginDao {
record.setInfo(GridInfoCache.getInstance().getGridInfo(
record.getInfo()));
} catch (DataAccessLayerException e) {
- logger.handle(Priority.PROBLEM,
+ logger.handle(
+ Priority.PROBLEM,
"Cannot load GridInfoRecord from DB for: "
+ record.getDataURI(), e);
return false;
@@ -285,7 +289,8 @@ public class GridDao extends PluginDao {
// // match, but currently we will persist it anyway.
// // result = false;
// } else
- if (converter != null && converter != UnitConverter.IDENTITY) {
+ if ((converter != null)
+ && (converter != UnitConverter.IDENTITY)) {
Object messageData = record.getMessageData();
if (messageData instanceof float[]) {
float[] data = (float[]) messageData;
@@ -338,7 +343,8 @@ public class GridDao extends PluginDao {
if (level != null) {
MasterLevel ml = level.getMasterLevel();
- if (ml != null && !LevelFactory.UNKNOWN_LEVEL.equals(ml.getName())) {
+ if ((ml != null)
+ && !LevelFactory.UNKNOWN_LEVEL.equals(ml.getName())) {
result = true;
}
}
@@ -354,60 +360,59 @@ public class GridDao extends PluginDao {
* Overridden to clean up orphan GridInfoRecords.
*/
@Override
- public void delete(List objs) {
+ public void delete(final List objs) {
super.delete(objs);
- Set orphanedIds = new HashSet(objs.size());
- StringBuilder sqlString = new StringBuilder(objs.size() * 15 + 80);
- sqlString
- .append("select distinct info_id from awips.grid where info_id in (");
- for (PluginDataObject pdo : objs) {
- if (pdo instanceof GridRecord) {
- Integer id = ((GridRecord) pdo).getInfo().getId();
- if (orphanedIds.add(id)) {
- if (orphanedIds.size() > 1) {
- sqlString.append(", ");
- }
- sqlString.append(id);
- }
- }
- }
- sqlString.append(");");
try {
- QueryResult result = (QueryResult) this.executeNativeSql(sqlString
- .toString());
- for (int i = 0; i < result.getResultCount(); i++) {
- orphanedIds.remove(result.getRowColumnValue(i, 0));
- }
- if (!orphanedIds.isEmpty()) {
- sqlString = new StringBuilder(orphanedIds.size() * 15 + 60);
- sqlString.append("delete from awips.grid_info where id in (");
- boolean first = true;
- for (Integer id : orphanedIds) {
- if (!first) {
- sqlString.append(", ");
- } else {
- first = false;
+ txTemplate.execute(new TransactionCallback() {
+ @Override
+ public Integer doInTransaction(TransactionStatus status) {
+ Set orphanedIds = new HashSet(objs.size());
+ for (PluginDataObject pdo : objs) {
+ if (pdo instanceof GridRecord) {
+ Integer id = ((GridRecord) pdo).getInfo().getId();
+ orphanedIds.add(id);
+ }
}
- sqlString.append(id);
+ int rowsDeleted = 0;
+ if (!orphanedIds.isEmpty()) {
+ Session sess = getCurrentSession();
+ Query query = sess
+ .createQuery("select distinct info.id from GridRecord where info.id in (:ids)");
+ query.setParameterList("ids", orphanedIds);
+
+ List> idsToKeep = query.list();
+ if (idsToKeep != null) {
+ orphanedIds.removeAll(idsToKeep);
+ }
+ if (!orphanedIds.isEmpty()) {
+ if (purgeModelCacheTopic != null) {
+ query = sess
+ .createQuery("delete from GridInfoRecord where id in (:ids)");
+ query.setParameterList("ids", orphanedIds);
+ rowsDeleted = query.executeUpdate();
+
+ try {
+ EDEXUtil.getMessageProducer().sendAsyncUri(
+ purgeModelCacheTopic, orphanedIds);
+ } catch (EdexException e) {
+ logger.error(
+ "Error sending message to purge grid info topic",
+ e);
+ }
+ } else {
+ GridInfoCache.getInstance().purgeCache(
+ new ArrayList(orphanedIds));
+ logger.warn("Unable to purge model cache of clustered edices");
+ }
+
+ }
+ }
+
+ return rowsDeleted;
}
- sqlString.append(");");
- if (purgeModelCacheTopic != null) {
- this.executeNativeSql(sqlString.toString());
- EDEXUtil.getMessageProducer().sendAsyncUri(
- purgeModelCacheTopic, orphanedIds);
- } else {
- GridInfoCache.getInstance().purgeCache(
- new ArrayList(orphanedIds));
- logger
- .warn("Unable to purge model cache of clustered edices");
- }
- }
- } catch (DataAccessLayerException e1) {
- logger.error("Error purging orphaned grid info entries", e1);
- } catch (EdexException e) {
- logger.error(
- "Error sending message to purge grid info topic", e);
+ });
+ } catch (Exception e) {
+ logger.error("Error purging orphaned grid info entries", e);
}
}
-
}