Omaha #4500 - Prevent SQL Injection

Change-Id: Ia61d5283ac60d01af892440218cb199365a3f79d

Former-commit-id: 70b8f0d02904abaf7633dfd131672f139f37dc8f
This commit is contained in:
Richard Peter 2015-07-09 14:36:47 -05:00
parent e37e8f93b4
commit badc21926a
10 changed files with 238 additions and 234 deletions

View file

@ -1,28 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<projectDescription>
<name>com.raytheon.edex.plugin.bufrmos</name>
<comment></comment>
<projects>
</projects>
<buildSpec>
<buildCommand>
<name>org.eclipse.jdt.core.javabuilder</name>
<arguments>
</arguments>
</buildCommand>
<buildCommand>
<name>org.eclipse.pde.ManifestBuilder</name>
<arguments>
</arguments>
</buildCommand>
<buildCommand>
<name>org.eclipse.pde.SchemaBuilder</name>
<arguments>
</arguments>
</buildCommand>
</buildSpec>
<natures>
<nature>org.eclipse.pde.PluginNature</nature>
<nature>org.eclipse.jdt.core.javanature</nature>
</natures>
</projectDescription>

View file

@ -23,10 +23,13 @@ package com.raytheon.edex.plugin.radar.dao;
import java.util.ArrayList;
import java.util.List;
import org.hibernate.SQLQuery;
import org.hibernate.Session;
import org.hibernate.criterion.DetachedCriteria;
import org.hibernate.criterion.Disjunction;
import org.hibernate.criterion.Expression;
import org.hibernate.criterion.Restrictions;
import org.springframework.transaction.TransactionStatus;
import org.springframework.transaction.support.TransactionCallback;
import com.raytheon.uf.common.dataplugin.radar.RadarStation;
import com.raytheon.uf.common.dataquery.db.QueryResult;
@ -46,9 +49,9 @@ import com.vividsolutions.jts.geom.Coordinate;
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* 7/24/07 353 bphillip Initial Check in
* 10/16/2014 3454 bphillip Upgrading to Hibernate 4
* 10/16/2014 3454 bphillip Upgrading to Hibernate 4
* 10/28/2014 3454 bphillip Fix usage of getSession()
*
* Jul 09, 2015 4500 rjpeter Add setSridOnAllRadarStation.
* </pre>
*
* @author bphillip
@ -108,7 +111,7 @@ public class RadarStationDao extends CoreDao {
public List<RadarStation> queryByWfo(String wfo)
throws DataAccessLayerException {
List<?> stations = queryBySingleCriteria("wfoId", wfo);
if (stations == null || stations.isEmpty()) {
if ((stations == null) || stations.isEmpty()) {
return null;
} else {
return (List<RadarStation>) stations;
@ -162,11 +165,11 @@ public class RadarStationDao extends CoreDao {
DetachedCriteria crit = DetachedCriteria
.forClass(RadarStation.class);
Disjunction stationEq = Expression.disjunction();
for (int i = 0; i < names.length; i++) {
if (((Object[]) names[i])[0] != null) {
stationEq.add(Expression.eq("wfoId",
((Object[]) names[i])[0].toString()));
Disjunction stationEq = Restrictions.disjunction();
for (Object name : names) {
if (((Object[]) name)[0] != null) {
stationEq.add(Restrictions.eq("wfoId",
((Object[]) name)[0].toString()));
}
}
crit.add(stationEq);
@ -174,7 +177,7 @@ public class RadarStationDao extends CoreDao {
try {
return crit.getExecutableCriteria(session).list();
} finally {
if (session != null){
if (session != null) {
session.close();
}
}
@ -221,4 +224,20 @@ public class RadarStationDao extends CoreDao {
return rdaIds;
}
/**
* Sets the station field on all radar spatial entries to setsrid of the
* geometry.
*/
public void setSridOnAllRadarStation() {
txTemplate.execute(new TransactionCallback<Integer>() {
@Override
public Integer doInTransaction(TransactionStatus status) {
Session sess = getCurrentSession();
SQLQuery query = sess
.createSQLQuery("update radar_spatial set station=st_setsrid(the_geom, 4326)");
return query.executeUpdate();
}
});
}
}

View file

@ -25,7 +25,6 @@ import com.raytheon.uf.common.localization.PathManagerFactory;
import com.raytheon.uf.common.status.IUFStatusHandler;
import com.raytheon.uf.common.status.UFStatus;
import com.raytheon.uf.common.status.UFStatus.Priority;
import com.raytheon.uf.edex.database.DataAccessLayerException;
import com.raytheon.uf.edex.ndm.ingest.INationalDatasetSubscriber;
import com.vividsolutions.jts.geom.GeometryFactory;
import com.vividsolutions.jts.geom.Point;
@ -40,13 +39,12 @@ import com.vividsolutions.jts.io.WKTReader;
* SOFTWARE HISTORY
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* 10Oct2011 10520 JWork Initial check-in.
* 10Oct2011 10520 JWork Initial check-in.
* 09/11/2012 DR 15366 D. Friedman Set SRID on radar stations.
* Mar 06, 2014 2876 mpduff Moved NationalDatasetSubscriber.
* Mar 06, 2014 2876 mpduff Moved NationalDatasetSubscriber.
* Jul 09, 2015 4500 rjpeter Fix SQL Injection concern.
* </pre>
*
*/
public class Import88DLocationsUtil implements INationalDatasetSubscriber {
private static final IUFStatusHandler statusHandler = UFStatus
.getHandler(Import88DLocationsUtil.class);
@ -292,11 +290,10 @@ public class Import88DLocationsUtil implements INationalDatasetSubscriber {
* Once GetTools is updated/fixed, this should be removed.
*/
try {
radarStationDAO
.executeNativeSql("update radar_spatial set the_geom=st_setsrid(the_geom, 4326)");
} catch (DataAccessLayerException e) {
radarStationDAO.setSridOnAllRadarStation();
} catch (Exception e) {
statusHandler.handle(Priority.ERROR,
"Failed to update the SRIDs in the radar_spatial_table", e);
"Failed to update the SRIDs in the radar_spatial table", e);
}
if (statusHandler.isPriorityEnabled(Priority.INFO)) {

View file

@ -117,7 +117,7 @@ import com.raytheon.uf.edex.database.query.DatabaseQuery;
* Apr 28, 2015 #4027 randerso Expunged Calendar from ActiveTableRecord,
* fixed next ETN query to query for >= Jan 1
* May 22, 2015 4522 randerso Create proper primary key for ActiveTableRecord
*
* Jul 09, 2015 4500 rjpeter Fix SQL Injection concern.
* </pre>
*
* @author njensen
@ -743,12 +743,11 @@ public class ActiveTable {
throws DataAccessLayerException {
CoreDao dao = practiceDao;
String sql = "delete from practice_activetable;";
dao.executeNativeSql(sql);
dao.executeSQLUpdate(sql);
sql = "delete from cluster_task where name ='"
+ GetNextEtnUtil.getEtnClusterLockName(requestedSiteId,
ActiveTableMode.PRACTICE) + "';";
dao.executeNativeSql(sql);
sql = "delete from cluster_task where name = :name";
dao.executeSQLUpdate(sql, "name", GetNextEtnUtil.getEtnClusterLockName(
requestedSiteId, ActiveTableMode.PRACTICE));
}
/**

View file

@ -28,8 +28,6 @@ import java.io.FileWriter;
import java.io.IOException;
import com.raytheon.uf.common.awipstools.GetWfoCenterPoint;
import com.raytheon.uf.common.dataquery.db.QueryResult;
import com.raytheon.uf.common.dataquery.db.QueryResultRow;
import com.raytheon.uf.common.localization.IPathManager;
import com.raytheon.uf.common.localization.LocalizationContext;
import com.raytheon.uf.common.localization.LocalizationContext.LocalizationType;
@ -38,7 +36,6 @@ import com.raytheon.uf.common.serialization.comm.IRequestHandler;
import com.raytheon.uf.common.status.IUFStatusHandler;
import com.raytheon.uf.common.status.UFStatus;
import com.raytheon.uf.common.status.UFStatus.Priority;
import com.raytheon.uf.edex.database.DataAccessLayerException;
import com.raytheon.uf.edex.database.dao.CoreDao;
import com.raytheon.uf.edex.database.dao.DaoConfig;
import com.vividsolutions.jts.geom.Coordinate;
@ -52,8 +49,8 @@ import com.vividsolutions.jts.geom.Coordinate;
* SOFTWARE HISTORY
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Jun 9, 2010 mschenke Initial creation
*
* Jun 9, 2010 mschenke Initial creation
* Jul 09, 2015 4500 rjpeter Fix SQL Injection concern.
* </pre>
*
* @author mschenke
@ -98,25 +95,22 @@ public class GetWfoCenterHandler implements IRequestHandler<GetWfoCenterPoint> {
private static Coordinate lookupInWfoCenter(String wfoId) {
Coordinate loc = null;
try {
String query = String.format(
"SELECT lat,lon FROM mapdata.cwa WHERE wfo = '%s' LIMIT 1",
wfoId);
QueryResult result = (QueryResult) new CoreDao(
DaoConfig.forDatabase("maps")).executeNativeSql(query);
CoreDao dao = new CoreDao(DaoConfig.forDatabase("maps"));
Object[] rows = dao.executeSQLQuery(
"SELECT lat,lon FROM mapdata.cwa WHERE wfo = :wfo LIMIT 1",
"wfo", wfoId);
if (result.getRows().length == 0) {
query = String
.format("select ST_Y(theCentroid) as lat, ST_X(theCentroid) as lon from (select ST_CENTROID(theUnion) as theCentroid from (select ST_Union(the_geom) as theUnion from mapdata.rfc where site_id = '%s') as dummyAlias) as dummyAlias;",
wfoId);
result = (QueryResult) new CoreDao(
DaoConfig.forDatabase("maps")).executeNativeSql(query);
if ((rows == null) || (rows.length == 0)) {
rows = dao
.executeSQLQuery(
"select ST_Y(theCentroid) as lat, ST_X(theCentroid) as lon from (select ST_CENTROID(theUnion) as theCentroid from (select ST_Union(the_geom) as theUnion from mapdata.rfc where site_id = :site) as dummyAlias) as dummyAlias",
"site", wfoId);
}
if (result.getRows().length > 0) {
QueryResultRow row = result.getRows()[0];
Double lat = ((Number) row.getColumn(0)).doubleValue();
Double lon = ((Number) row.getColumn(1)).doubleValue();
if ((rows != null) && (rows.length > 0)) {
Object[] row = (Object[]) rows[0];
Double lat = ((Number) row[0]).doubleValue();
Double lon = ((Number) row[1]).doubleValue();
loc = new Coordinate(lon, lat);
}
@ -124,7 +118,7 @@ public class GetWfoCenterHandler implements IRequestHandler<GetWfoCenterPoint> {
statusHandler.handle(Priority.PROBLEM,
"No location information found for wfo: " + wfoId);
}
} catch (DataAccessLayerException e) {
} catch (Exception e) {
statusHandler.handle(Priority.PROBLEM,
"Error executing query for wfo center point", e);
}
@ -152,7 +146,8 @@ public class GetWfoCenterHandler implements IRequestHandler<GetWfoCenterPoint> {
double lat = Double.parseDouble(line.substring(0, p));
double lon = Double.parseDouble(line.substring(p));
if (lat > 90.0 || lat < -90.0 || lon > 180.0 || lon < -180.0) {
if ((lat > 90.0) || (lat < -90.0) || (lon > 180.0)
|| (lon < -180.0)) {
statusHandler
.handle(Priority.PROBLEM,
"Invalid lat/lon in wfo center point file, using default");

View file

@ -21,13 +21,18 @@ package com.raytheon.uf.edex.metartohmdb.dao;
import java.util.Calendar;
import org.hibernate.Query;
import org.hibernate.Session;
import org.springframework.transaction.TransactionStatus;
import org.springframework.transaction.support.TransactionCallback;
import com.raytheon.uf.common.time.util.TimeUtil;
import com.raytheon.uf.edex.database.dao.CoreDao;
import com.raytheon.uf.edex.database.dao.DaoConfig;
import com.raytheon.uf.edex.decodertools.time.TimeTools;
/**
* TODO Add Description
* Dao for HMDBReport.
*
* <pre>
*
@ -37,7 +42,7 @@ import com.raytheon.uf.edex.decodertools.time.TimeTools;
* ------------ ---------- ----------- --------------------------
* Jun 29, 2009 jkorman Initial creation
* Sep 18, 2014 #3627 mapeters Updated deprecated {@link TimeTools} usage.
*
* Jun 18, 2015 4500 rjpeter Fix SQL Injection concern.
* </pre>
*
* @author jkorman
@ -46,7 +51,7 @@ import com.raytheon.uf.edex.decodertools.time.TimeTools;
public class HMDBRptDao extends CoreDao {
private static final Object LOCK = new Object();
public HMDBRptDao() {
super(DaoConfig.forClass("hmdb", HMDBReport.class));
}
@ -58,43 +63,47 @@ public class HMDBRptDao extends CoreDao {
*/
public boolean storeToTable(HMDBReport report) {
boolean status = true;
synchronized(LOCK) {
synchronized (LOCK) {
logger.debug("SQL = " + report.toInsertSQL());
try {
status = (executeSQLUpdate(report.toInsertSQL()) == 1);
} catch(Exception e) {
} catch (Exception e) {
logger.error("Insert query = " + report.toInsertSQL());
logger.error("Error writing to rpt table", e);
}
}
return status;
}
/**
*
* @return
*/
public boolean purgeTable(int purgeHours) {
public boolean purgeTable(final int purgeHours) {
boolean status = true;
Calendar c = TimeUtil.newGmtCalendar();
c.add(Calendar.HOUR_OF_DAY,-purgeHours);
StringBuilder sb = new StringBuilder("delete from rpt where nominal < ");
sb.append(String.format(HMDBReport.DTFMT, c));
sb.append(";");
String query = sb.toString();
final StringBuilder queryString = new StringBuilder();
try {
logger.debug("Delete query = " + query);
executeNativeSql(query);
} catch(Exception e) {
logger.error("Purge query = " + query);
logger.error("Error in purging hmdb.rpt",e);
txTemplate.execute(new TransactionCallback<Integer>() {
@Override
public Integer doInTransaction(TransactionStatus status) {
Calendar c = TimeUtil.newGmtCalendar();
c.add(Calendar.HOUR_OF_DAY, -purgeHours);
Session sess = getCurrentSession();
Query query = sess
.createQuery("delete from HMDBReport where nominal < :nominal");
query.setCalendar("nominal", c);
query.getQueryString();
queryString.append(query.getQueryString());
return query.executeUpdate();
}
});
} catch (Exception e) {
logger.error("Purge query = " + queryString);
logger.error("Error in purging hmdb.rpt", e);
status = false;
}
return status;
}
}

View file

@ -23,7 +23,9 @@ import java.math.BigInteger;
import java.sql.Timestamp;
import java.text.SimpleDateFormat;
import java.util.Calendar;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.TimeZone;
import com.raytheon.uf.common.dataplugin.grid.GridConstants;
@ -47,7 +49,7 @@ import com.raytheon.uf.edex.database.query.DatabaseQuery;
* Jan 11, 2011 mpduff Initial creation
* Mar 28, 2014 2952 mpduff Changed to use UFStatus for logging.
* Apr 21, 2014 2060 njensen Remove dependency on grid dataURI column
*
* Jul 09, 2015 4500 rjpeter Fix SQL Injection concern.
* </pre>
*
* @author mpduff
@ -92,16 +94,17 @@ public class GAFFDB {
cal.setTimeInMillis(start);
String startTime = sdf.format(cal.getTime());
String process = processName;
String sql = "insert into perflog (process, start_time, num_processed, "
+ "num_reads, num_inserts, num_updates, num_deletes, "
+ "elapsed_time, cpu_time, io_time) values ('"
+ process
+ "', " + " '" + startTime + "', 0, 0, 0, 0, 0, 0, 0, 0)";
+ "elapsed_time, cpu_time, io_time) values (:process,"
+ " :startTime, 0, 0, 0, 0, 0, 0, 0, 0)";
Map<String, Object> paramMap = new HashMap<>(2, 1);
paramMap.put("process", processName);
paramMap.put("startTime", startTime);
CoreDao dao = null;
dao = new CoreDao(DaoConfig.forDatabase(IHFS));
dao.executeNativeSql(sql, false);
dao.executeSQLUpdate(sql, paramMap);
}
@ -179,7 +182,7 @@ public class GAFFDB {
List<?> rs = dao.queryByCriteria(query);
if ((rs != null) && (!rs.isEmpty())) {
Object result = rs.get(0);
if (result != null && result instanceof GridRecord) {
if ((result != null) && (result instanceof GridRecord)) {
rec = ((GridRecord) result);
}
}
@ -250,35 +253,40 @@ public class GAFFDB {
String sql = "insert into contingencyvalue (lid, pe, dur, ts, "
+ "extremum, probability, validtime, basistime, value, "
+ "shef_qual_code, quality_code, revision, product_id, "
+ "producttime, postingtime) values ('" + areaId + "', "
+ "'PP', " + dur + ", 'CP', 'Z', -1.0, '" + validDate
+ "', '" + basisTime + "', " + avgVal + ", 'Z', "
+ DEFAULT_QC_VALUE + ", " + "0, 'GRIDFFG', '" + validDate
+ "', '" + postDate + "')";
+ "producttime, postingtime) values (:areaId, 'PP', "
+ ":dur, 'CP', 'Z', -1.0, :validDate, :basisTime, :avgVal, "
+ "'Z', :qc, 0, 'GRIDFFG', :validDate, :postDate)";
log.debug(sql);
Map<String, Object> paramMap = new HashMap<>(8, 1);
paramMap.put("areaId", areaId);
paramMap.put("dur", dur);
paramMap.put("validDate", validDate);
paramMap.put("basisTime", basisTime);
paramMap.put("avgVal", avgVal);
paramMap.put("qc", DEFAULT_QC_VALUE);
paramMap.put("validDate", validDate);
paramMap.put("postDate", postDate);
dao.executeNativeSql(sql, false);
dao.executeSQLUpdate(sql, paramMap);
} else {
// Need to do an update to the row
String updateSql = "update contingencyvalue set value = "
+ avgVal
+ ", shef_qual_code = 'Z', quality_code = "
+ DEFAULT_QC_VALUE
+ ", revision = 0, product_id = "
+ "'GRIDFFG', producttime = '"
+ validDate
+ "', "
+ " postingtime = '"
+ postDate
+ "' where "
+ "lid = '"
+ areaId
+ "' and pe = 'PP' and dur = "
+ dur
+ " and ts = 'CP' and extremum = 'Z' and probability = -1.0 "
+ " and validtime = '" + validDate + "' and basistime = '"
+ basisTime + "'";
String updateSql = "update contingencyvalue set value = :avgVal, "
+ "shef_qual_code = 'Z', quality_code = :qc, revision = 0, "
+ "product_id = 'GRIDFFG', producttime = :validDate, "
+ "postingtime = :postDate where lid = :areaId and pe = 'PP' "
+ "and dur = :dur and ts = 'CP' and extremum = 'Z' and probability = -1.0 "
+ " and validtime = :validDate and basistime = :basisTime";
Map<String, Object> paramMap = new HashMap<>(8, 1);
paramMap.put("avgVal", avgVal);
paramMap.put("qc", DEFAULT_QC_VALUE);
paramMap.put("validDate", validDate);
paramMap.put("postDate", postDate);
paramMap.put("areaId", areaId);
paramMap.put("dur", dur);
paramMap.put("validDate", validDate);
paramMap.put("basisTime", basisTime);
dao.executeSQLUpdate(updateSql);
}
@ -317,8 +325,8 @@ public class GAFFDB {
public Object[] getLineSegs(String areaId) {
CoreDao dao = null;
dao = new CoreDao(DaoConfig.forDatabase(IHFS));
Object[] rs = dao.executeSQLQuery(LINESEGS_QUERY + " where area_Id = '"
+ areaId + "'");
Object[] rs = dao.executeSQLQuery(LINESEGS_QUERY
+ " where area_Id = :areaId", "areaId", areaId);
return rs;
}

View file

@ -32,8 +32,6 @@ import com.raytheon.uf.common.dataplugin.binlightning.BinLightningRecord;
import com.raytheon.uf.common.dataplugin.binlightning.LightningConstants;
import com.raytheon.uf.common.dataplugin.persist.DefaultPathProvider;
import com.raytheon.uf.common.dataplugin.persist.IPersistable;
import com.raytheon.uf.common.dataquery.db.QueryResult;
import com.raytheon.uf.common.dataquery.db.QueryResultRow;
import com.raytheon.uf.common.datastorage.DataStoreFactory;
import com.raytheon.uf.common.datastorage.IDataStore;
import com.raytheon.uf.common.datastorage.StorageException;
@ -49,26 +47,26 @@ import com.vividsolutions.jts.geom.Coordinate;
/**
* Service implementation for gathering the lightning datasets from files in
* HDF5 format and inserting them into the ifhs lightning table.
*
*
* <pre>
*
*
* SOFTWARE HISTORY
* Date Ticket# Engineer Description
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Jan 06, 2011 5951 jnjanga Initial creation
* Jan 10, 2013 1448 bgonzale Added app context check in runOnSchedule().
* Jan 18, 2013 1469 bkowal Removed the hdf5 data directory.
* Mar 28, 2014 2952 mpduff Changed to use UFStatus for logging.
* Jun 05, 2014 3226 bclement BinLightning refactor
* Aug 20, 2014 3549 njensen Fixed spelling in exceptions
* Sep 17, 2014 3015 bclement improved exception handling
* Dec 04, 2014 3015 njensen Corrected usage of Coordinate(x, y)
* Feb 25, 2015 3992 nabowle Limit getMostRecentStrikes to NLDN.
* Deduplicate lightning data in a
* single BinLightningRecord.
*
* Jan 06, 2011 5951 jnjanga Initial creation
* Jan 10, 2013 1448 bgonzale Added app context check in runOnSchedule().
* Jan 18, 2013 1469 bkowal Removed the hdf5 data directory.
* Mar 28, 2014 2952 mpduff Changed to use UFStatus for logging.
* Jun 05, 2014 3226 bclement BinLightning refactor
* Aug 20, 2014 3549 njensen Fixed spelling in exceptions
* Sep 17, 2014 3015 bclement improved exception handling
* Dec 04, 2014 3015 njensen Corrected usage of Coordinate(x, y)
* Feb 25, 2015 3992 nabowle Limit getMostRecentStrikes to NLDN.
* Deduplicate lightning data in a
* single BinLightningRecord.
* Jul 09, 2015 4500 rjpeter Fix SQL Injection concern.
* </pre>
*
*
* @author jnjanga
* @version 1.0
*/
@ -96,27 +94,30 @@ public class MpeLightningSrv {
/**
* Check the metadata Database for new lightning entries.
*
*
* @return rows returned from the query
*/
private QueryResultRow[] getMostRecentStrikes() throws EdexException {
QueryResult rs = null;
private Object[] getMostRecentStrikes() throws EdexException {
CoreDao coreDao = new CoreDao(DaoConfig.DEFAULT);
/*
* TODO: This can miss data, should use insertTime and track last pull
* time
*/
final String lgtSQL = "select datauri from binlightning "
+ "where reftime > (now()- interval \'30 minutes \')"
+ "and source = '" + LightningConstants.DEFAULT_SOURCE + "'";
+ "and source = :source";
try {
rs = (QueryResult) coreDao.executeNativeSql(lgtSQL, true);
return coreDao.executeSQLQuery(lgtSQL, "source",
LightningConstants.DEFAULT_SOURCE);
} catch (Exception e) {
throw new EdexException("Couldn't get BinLightning records from"
+ " metadata database. Failed SQL: " + lgtSQL, e);
}
return rs.getRows();
}
/**
* Inserts a single record into ihfs's lightning table.
*
*
* @param dataURI
* @throws EdexException
*/
@ -231,48 +232,47 @@ public class MpeLightningSrv {
/**
* Populates ifhs' lightning table with the resultset obtained from querying
* metadata's binlighting table.
*
*
* @param rows
* @throws EdexException
*/
private void ifhsInsertMostRecentStrikes(QueryResultRow[] rows)
private void ifhsInsertMostRecentStrikes(Object[] rows)
throws EdexException {
if (rows.length == 0) {
logger.info("No new lightning records to insert in ifhs. ");
}
for (QueryResultRow row : rows) {
String dataURI = (String) row.getColumn(0);
for (Object col : rows) {
String dataURI = (String) col;
ifhsInsertLightRecord(dataURI);
}
}
/**
* run at scheduled timer.
*
*
* @throws EdexException
*/
public void runOnSchedule() throws EdexException {
if (!AppsDefaults.getInstance().setAppContext(this)) {
return;
}
QueryResultRow[] rows = getMostRecentStrikes();
Object[] rows = getMostRecentStrikes();
ifhsInsertMostRecentStrikes(rows);
}
/**
* Class to simplify deduplicating lightning data in a
* {@link BinLightningRecord} that generate the same ihfs lightning primary
* key.
*/
private static class LightningData {
private short x;
private final short x;
private short y;
private final short y;
private long obstime;
private final long obstime;
private byte strikes;
private final byte strikes;
public LightningData(short x, short y, long time, byte strikes) {
super();
@ -310,7 +310,6 @@ public class MpeLightningSrv {
return strikes;
}
/**
* Generate a hashcode using the ihfs primary key fields: x, y, and
* time.
@ -319,9 +318,9 @@ public class MpeLightningSrv {
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + (int) (obstime ^ (obstime >>> 32));
result = prime * result + x;
result = prime * result + y;
result = (prime * result) + (int) (obstime ^ (obstime >>> 32));
result = (prime * result) + x;
result = (prime * result) + y;
return result;
}

View file

@ -32,4 +32,5 @@ Require-Bundle: com.raytheon.uf.common.parameter;bundle-version="1.0.0",
org.springframework;bundle-version="2.5.6",
javax.measure;bundle-version="1.0.0",
com.raytheon.uf.common.status;bundle-version="1.12.1174",
com.raytheon.uf.common.comm;bundle-version="1.12.1174"
com.raytheon.uf.common.comm;bundle-version="1.12.1174",
org.hibernate

View file

@ -28,6 +28,11 @@ import java.util.Set;
import javax.measure.converter.UnitConverter;
import org.hibernate.Query;
import org.hibernate.Session;
import org.springframework.transaction.TransactionStatus;
import org.springframework.transaction.support.TransactionCallback;
import com.raytheon.uf.common.dataplugin.PluginDataObject;
import com.raytheon.uf.common.dataplugin.PluginException;
import com.raytheon.uf.common.dataplugin.grid.GridConstants;
@ -37,7 +42,6 @@ import com.raytheon.uf.common.dataplugin.level.Level;
import com.raytheon.uf.common.dataplugin.level.LevelFactory;
import com.raytheon.uf.common.dataplugin.level.MasterLevel;
import com.raytheon.uf.common.dataplugin.persist.IPersistable;
import com.raytheon.uf.common.dataquery.db.QueryResult;
import com.raytheon.uf.common.datastorage.IDataStore;
import com.raytheon.uf.common.datastorage.Request;
import com.raytheon.uf.common.datastorage.StorageException;
@ -70,7 +74,7 @@ import com.raytheon.uf.edex.database.plugin.PluginDao;
* Mar 14, 2013 1587 bsteffen Fix static data persisting to datastore.
* Mar 27, 2013 1821 bsteffen Speed up GridInfoCache.
* Mar 20, 2013 2910 bsteffen Clear dataURI after loading cached info.
*
* Jul 09, 2015 4500 rjpeter Fix SQL Injection concern.
* </pre>
*
* @author bphillip
@ -99,7 +103,7 @@ public class GridDao extends PluginDao {
GridRecord gridRec = (GridRecord) obj;
Object messageData = gridRec.getMessageData();
GridCoverage location = gridRec.getLocation();
if (location != null && messageData instanceof float[]) {
if ((location != null) && (messageData instanceof float[])) {
long[] sizes = new long[] { location.getNx(), location.getNy() };
String abbrev = gridRec.getParameter().getAbbreviation();
String group = gridRec.getDataURI();
@ -109,8 +113,7 @@ public class GridDao extends PluginDao {
datasetName = abbrev;
}
AbstractStorageRecord storageRecord = new FloatDataRecord(
datasetName,
group, (float[]) messageData, 2, sizes);
datasetName, group, (float[]) messageData, 2, sizes);
storageRecord.setCorrelationObject(gridRec);
StorageProperties sp = new StorageProperties();
@ -136,8 +139,8 @@ public class GridDao extends PluginDao {
records.length);
for (PluginDataObject record : records) {
GridRecord rec = (GridRecord) record;
if (rec.getParameter() == null
|| rec.getParameter().getName() == null
if ((rec.getParameter() == null)
|| (rec.getParameter().getName() == null)
|| rec.getParameter().getName().equals("Missing")) {
logger.info("Discarding record due to missing or unknown parameter mapping: "
+ record);
@ -148,7 +151,7 @@ public class GridDao extends PluginDao {
if (level != null) {
MasterLevel ml = level.getMasterLevel();
if (ml != null
if ((ml != null)
&& !LevelFactory.UNKNOWN_LEVEL.equals(ml.getName())) {
validLevel = true;
}
@ -242,7 +245,8 @@ public class GridDao extends PluginDao {
record.setInfo(GridInfoCache.getInstance().getGridInfo(
record.getInfo()));
} catch (DataAccessLayerException e) {
logger.handle(Priority.PROBLEM,
logger.handle(
Priority.PROBLEM,
"Cannot load GridInfoRecord from DB for: "
+ record.getDataURI(), e);
return false;
@ -285,7 +289,8 @@ public class GridDao extends PluginDao {
// // match, but currently we will persist it anyway.
// // result = false;
// } else
if (converter != null && converter != UnitConverter.IDENTITY) {
if ((converter != null)
&& (converter != UnitConverter.IDENTITY)) {
Object messageData = record.getMessageData();
if (messageData instanceof float[]) {
float[] data = (float[]) messageData;
@ -338,7 +343,8 @@ public class GridDao extends PluginDao {
if (level != null) {
MasterLevel ml = level.getMasterLevel();
if (ml != null && !LevelFactory.UNKNOWN_LEVEL.equals(ml.getName())) {
if ((ml != null)
&& !LevelFactory.UNKNOWN_LEVEL.equals(ml.getName())) {
result = true;
}
}
@ -354,60 +360,59 @@ public class GridDao extends PluginDao {
* Overridden to clean up orphan GridInfoRecords.
*/
@Override
public void delete(List<PluginDataObject> objs) {
public void delete(final List<PluginDataObject> objs) {
super.delete(objs);
Set<Integer> orphanedIds = new HashSet<Integer>(objs.size());
StringBuilder sqlString = new StringBuilder(objs.size() * 15 + 80);
sqlString
.append("select distinct info_id from awips.grid where info_id in (");
for (PluginDataObject pdo : objs) {
if (pdo instanceof GridRecord) {
Integer id = ((GridRecord) pdo).getInfo().getId();
if (orphanedIds.add(id)) {
if (orphanedIds.size() > 1) {
sqlString.append(", ");
}
sqlString.append(id);
}
}
}
sqlString.append(");");
try {
QueryResult result = (QueryResult) this.executeNativeSql(sqlString
.toString());
for (int i = 0; i < result.getResultCount(); i++) {
orphanedIds.remove(result.getRowColumnValue(i, 0));
}
if (!orphanedIds.isEmpty()) {
sqlString = new StringBuilder(orphanedIds.size() * 15 + 60);
sqlString.append("delete from awips.grid_info where id in (");
boolean first = true;
for (Integer id : orphanedIds) {
if (!first) {
sqlString.append(", ");
} else {
first = false;
txTemplate.execute(new TransactionCallback<Integer>() {
@Override
public Integer doInTransaction(TransactionStatus status) {
Set<Integer> orphanedIds = new HashSet<Integer>(objs.size());
for (PluginDataObject pdo : objs) {
if (pdo instanceof GridRecord) {
Integer id = ((GridRecord) pdo).getInfo().getId();
orphanedIds.add(id);
}
}
sqlString.append(id);
int rowsDeleted = 0;
if (!orphanedIds.isEmpty()) {
Session sess = getCurrentSession();
Query query = sess
.createQuery("select distinct info.id from GridRecord where info.id in (:ids)");
query.setParameterList("ids", orphanedIds);
List<?> idsToKeep = query.list();
if (idsToKeep != null) {
orphanedIds.removeAll(idsToKeep);
}
if (!orphanedIds.isEmpty()) {
if (purgeModelCacheTopic != null) {
query = sess
.createQuery("delete from GridInfoRecord where id in (:ids)");
query.setParameterList("ids", orphanedIds);
rowsDeleted = query.executeUpdate();
try {
EDEXUtil.getMessageProducer().sendAsyncUri(
purgeModelCacheTopic, orphanedIds);
} catch (EdexException e) {
logger.error(
"Error sending message to purge grid info topic",
e);
}
} else {
GridInfoCache.getInstance().purgeCache(
new ArrayList<Integer>(orphanedIds));
logger.warn("Unable to purge model cache of clustered edices");
}
}
}
return rowsDeleted;
}
sqlString.append(");");
if (purgeModelCacheTopic != null) {
this.executeNativeSql(sqlString.toString());
EDEXUtil.getMessageProducer().sendAsyncUri(
purgeModelCacheTopic, orphanedIds);
} else {
GridInfoCache.getInstance().purgeCache(
new ArrayList<Integer>(orphanedIds));
logger
.warn("Unable to purge model cache of clustered edices");
}
}
} catch (DataAccessLayerException e1) {
logger.error("Error purging orphaned grid info entries", e1);
} catch (EdexException e) {
logger.error(
"Error sending message to purge grid info topic", e);
});
} catch (Exception e) {
logger.error("Error purging orphaned grid info entries", e);
}
}
}