12.5.1-15 baseline

Former-commit-id: 3904c4ccf5 [formerly b13cbb7e00 [formerly 4909e0dd166e43c22a34d96aa744f51db8a7d6c0]]
Former-commit-id: b13cbb7e00
Former-commit-id: 8552902906
This commit is contained in:
Steve Harris 2012-06-08 13:20:42 -05:00
parent e0a22310dc
commit 2f10c7a1e4
1975 changed files with 98440 additions and 19817 deletions

750
GribSpatialCache.java Normal file
View file

@ -0,0 +1,750 @@
/**
* This software was developed and / or modified by Raytheon Company,
* pursuant to Contract DG133W-05-CQ-1067 with the US Government.
*
* U.S. EXPORT CONTROLLED TECHNICAL DATA
* This software product contains export-restricted data whose
* export/transfer/disclosure is restricted by U.S. law. Dissemination
* to non-U.S. persons whether in the United States or abroad requires
* an export license or other authorization.
*
* Contractor Name: Raytheon Company
* Contractor Address: 6825 Pine Street, Suite 340
* Mail Stop B8
* Omaha, NE 68106
* 402.291.0100
*
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
* further licensing information.
**/
package com.raytheon.edex.plugin.grib.spatial;
import java.io.File;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.opengis.metadata.spatial.PixelOrientation;
import com.raytheon.edex.plugin.grib.dao.GribModelDao;
import com.raytheon.edex.plugin.grib.dao.GridCoverageDao;
import com.raytheon.edex.plugin.grib.dao.IGridCoverageDao;
import com.raytheon.edex.site.SiteUtil;
import com.raytheon.uf.common.awipstools.GetWfoCenterPoint;
import com.raytheon.uf.common.dataplugin.grib.exception.GribException;
import com.raytheon.uf.common.dataplugin.grib.spatial.projections.GridCoverage;
import com.raytheon.uf.common.dataplugin.grib.subgrid.SubGrid;
import com.raytheon.uf.common.dataplugin.grib.subgrid.SubGridDef;
import com.raytheon.uf.common.dataplugin.grib.util.GribModelLookup;
import com.raytheon.uf.common.dataplugin.grib.util.GridModel;
import com.raytheon.uf.common.geospatial.MapUtil;
import com.raytheon.uf.common.localization.IPathManager;
import com.raytheon.uf.common.localization.LocalizationContext;
import com.raytheon.uf.common.localization.LocalizationContext.LocalizationLevel;
import com.raytheon.uf.common.localization.LocalizationContext.LocalizationType;
import com.raytheon.uf.common.localization.LocalizationFile;
import com.raytheon.uf.common.localization.PathManagerFactory;
import com.raytheon.uf.common.serialization.SerializationException;
import com.raytheon.uf.common.serialization.SerializationUtil;
import com.raytheon.uf.edex.awipstools.GetWfoCenterHandler;
import com.raytheon.uf.edex.core.EDEXUtil;
import com.raytheon.uf.edex.database.DataAccessLayerException;
import com.raytheon.uf.edex.database.cluster.ClusterLockUtils;
import com.raytheon.uf.edex.database.cluster.ClusterLockUtils.LockState;
import com.raytheon.uf.edex.database.cluster.ClusterTask;
import com.raytheon.uf.edex.database.dao.CoreDao;
import com.raytheon.uf.edex.database.dao.DaoConfig;
import com.vividsolutions.jts.geom.Coordinate;
/**
* Cache used for holding GridCoverage objects. Since creating geometries and
* CRS objects are expensive operations, this cache is used to store
* GridCoverages as they are produced.
*
* <pre>
*
* SOFTWARE HISTORY
*
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* 4/7/09 1994 bphillip Initial Creation
*
* </pre>
*
* @author bphillip
* @version 1
*/
public class GribSpatialCache {
/** The logger */
protected transient Log logger = LogFactory.getLog(getClass());
/** The singleton instance */
private static GribSpatialCache instance = new GribSpatialCache();
/**
* Map containing the GridCoverages<br>
* The key for this map is the id field of the GridCoverage object stored as
* the value of the map
*/
private Map<Integer, GridCoverage> spatialMap;
/**
* Map containing the GridCoverages<br>
* The key for this map is the name field of the GridCoverage object stored
* as the value of the map. This is only used internally for lookup of a
* coverage by name aka gridId.
*/
private Map<String, GridCoverage> spatialNameMap;
/**
* Map containing the subGrid coverage based on a model name.
*/
private Map<String, Integer> subGridCoverageMap;
/**
* Map containing the subGrid definition based on a model name.
*/
private Map<String, SubGrid> definedSubGridMap;
/**
* Gets the singleton instance of GribSpatialCache
*
* @return The singleton instance of the GribSpatialCache
*/
public static GribSpatialCache getInstance() {
return instance;
}
/**
* Creates a new GribSpatialCache
*/
private GribSpatialCache() {
spatialMap = new HashMap<Integer, GridCoverage>();
spatialNameMap = new HashMap<String, GridCoverage>();
definedSubGridMap = new HashMap<String, SubGrid>();
subGridCoverageMap = new HashMap<String, Integer>();
initializeGrids();
}
/**
* Retrieves a grid from the map. If the grid does not exist, null is
* returned
*
* @param id
* The id of the GridCoverage to retrieve
* @return The GridCoverage object, null if not present
* @throws GribException
* @throws DataAccessLayerException
*/
public GridCoverage getGrid(GridCoverage coverage) throws GribException {
GridCoverage retVal = spatialMap.get(coverage.getId());
if (retVal == null) {
/*
* Coverage not found in cache, but the values provided in the GDS
* may be slightly different than those for the grid in the cache.
* Check the database to be sure.
*/
try {
retVal = ((IGridCoverageDao) EDEXUtil.getESBComponent(coverage
.getProjectionType().replaceAll(" ", "") + "Dao"))
.checkGrid(coverage);
} catch (DataAccessLayerException e) {
throw new GribException("Error querying for grib coverage!", e);
}
if (retVal != null) {
spatialMap.put(coverage.getId(), retVal);
spatialNameMap.put(coverage.getName(), retVal);
}
}
return retVal;
}
public GridCoverage getGrid(int id) {
return spatialMap.get(id);
}
public GridCoverage getGrid(String modelName) {
GridCoverage rval = null;
if (modelName != null) {
if (subGridCoverageMap.containsKey(modelName)) {
rval = spatialMap.get(subGridCoverageMap.get(modelName));
} else {
GridModel model = GribModelLookup.getInstance().getModelByName(
modelName);
if (model != null) {
rval = spatialNameMap.get(model.getGrid().toString());
}
}
}
return rval;
}
public GridCoverage getGridByName(String name) {
return spatialNameMap.get(name);
}
/**
* Puts a grid into the GribSpatialCache.
*
* @param grid
* The grid to store
* @param persistToDb
* True if this GridCoverage object is to be persisted to the
* database
* @throws GribException
* If problems occur while initializing the grid
*/
public void putGrid(GridCoverage grid, boolean initializeGrid,
boolean persistToDb) throws GribException {
if (initializeGrid) {
/*
* Prepare the grid to be stored into the cache. Initializes the
* geometry and crs objects and generates the id field
*/
grid.initialize();
if (grid.getName() == null) {
grid.generateName();
}
}
// Persist to the database if desired
if (persistToDb) {
new CoreDao(DaoConfig.DEFAULT).saveOrUpdate(grid);
}
spatialMap.put(grid.getId(), grid);
spatialNameMap.put(grid.getName(), grid);
}
public SubGrid getSubGrid(String modelName) {
return definedSubGridMap.get(modelName);
}
public GridCoverage getSubGridCoverage(String modelName) {
GridCoverage rval = null;
if (subGridCoverageMap.containsKey(modelName)) {
rval = spatialMap.get(subGridCoverageMap.get(modelName));
}
return rval;
}
/**
* Initializes the predefined set of grids. The grids are stored in xml
* format in the utility folder so the localization service has access to
* them.<br>
* GridCoverage are created from the xml via JaxB and placed in the cache
*/
private void initializeGrids() {
ClusterTask ct = null;
do {
ct = ClusterLockUtils.lock("grib", "spatialCache", 120000, true);
} while (!LockState.SUCCESSFUL.equals(ct.getLockState()));
try {
// pull all the coverage from the database
GridCoverageDao dao = new GridCoverageDao();
FileDataList previousFdl = getPreviousFileDataList();
FileDataList currentFdl = generateFileDataList();
if (isDefintionChanged(previousFdl, currentFdl)) {
processBaseGridsChanged(dao, currentFdl);
saveFileDataList(currentFdl);
} else {
List<? extends GridCoverage> baseCoverages = dao
.loadBaseGrids();
if (baseCoverages != null && baseCoverages.size() > 0) {
for (Object obj : baseCoverages) {
try {
putGrid((GridCoverage) obj, false, false);
} catch (Exception e) {
// Log error but do not throw exception, technically
// is
// only from initialize which isn't being called
logger.error(
"Unable to load grid coverage into cache "
+ obj, e);
}
}
} else {
// database wiped/plugin re-initialized need to repopulate
processBaseGridsChanged(dao, currentFdl);
saveFileDataList(currentFdl);
}
}
processUnknownGrids(dao);
processSubGrids(dao, currentFdl);
} finally {
ClusterLockUtils.unlock(ct, false);
}
}
/**
* A non subgridded definition has been added, deleted, or changed.
* Changed/delete both delete all records, models, and coverage defintion.
* Then Change/Add put in a new coverage definition.
*
* TODO: Post process Unknown definitions to see if they are now known. If
* now known delete definitions of unknown.
*
* @param dao
* @param currentFdl
*/
private void processBaseGridsChanged(GridCoverageDao dao,
FileDataList currentFdl) {
List<? extends GridCoverage> baseCoverages = dao.loadBaseGrids();
Map<String, GridCoverage> fileCoverageMap = loadGridDefinitionsFromDisk(currentFdl);
// update needs to delete all hdf5 same as delete, so update is
// a delete and then an add to simplify logic and handle primary
// key changes.
List<GridCoverage> coveragesToDelete = new LinkedList<GridCoverage>();
HashSet<String> validDbCoverageNames = new HashSet<String>(
(int) (baseCoverages.size() * 1.25) + 1);
Iterator<? extends GridCoverage> iter = baseCoverages.iterator();
while (iter.hasNext()) {
GridCoverage dbCov = iter.next();
GridCoverage fileCoverage = fileCoverageMap.get(dbCov.getName());
if (!dbCov.equals(fileCoverage)) {
// coverage not in flat file or coverage has changed,
// delete coverage old coverage
coveragesToDelete.add(dbCov);
iter.remove();
} else {
// current coverage still valid
validDbCoverageNames.add(dbCov.getName());
}
}
// delete grids, models, coverages, and hdf5 for namesToDelete.
for (GridCoverage cov : coveragesToDelete) {
logger.info("GridCoverage " + cov.getName()
+ " has changed. Deleting out of date data");
if (!dao.deleteCoverageAssociatedData(cov, true)) {
logger.warn("Failed to delete GridCoverage " + cov.getName()
+ ". Manual intervention required.");
} else {
logger.info("GridCoverage successfully deleted");
}
}
// remove the valid db coverages from the map
fileCoverageMap.keySet().removeAll(validDbCoverageNames);
// add new grids in bulk
for (GridCoverage cov : fileCoverageMap.values()) {
try {
putGrid(cov, true, false);
} catch (Exception e) {
logger.error(
"Failed to initialize grid definition " + cov.getName(),
e);
}
}
// bulk persist the spatial maps
if (spatialMap.size() > 0) {
dao.persistAll(spatialMap.values());
}
for (GridCoverage cov : baseCoverages) {
try {
putGrid(cov, false, false);
} catch (Exception e) {
logger.error(
"Failed to initialize grid definition " + cov.getName(),
e);
}
}
}
/**
* A non subGridd definition has been added, deleted, or changed.
* Changed/delete both delete all records, models, and coverage defintion.
* Then Change/Add put in a new coverage definition, and also delete any
* data associated with base model definition.
*
* @param dao
* @param currentFdl
*/
private void processSubGrids(GridCoverageDao dao, FileDataList currentFdl) {
List<? extends GridCoverage> oldSubGridCoverages = dao.loadSubGrids();
Map<String, GridCoverage> fileSubGridCoverageMap = loadSubGridDefinitionsFromDisk(currentFdl);
// update needs to delete all hdf5 same as delete, so update is
// a delete and then an add to simplify logic and handle primary
// key changes.
List<GridCoverage> coveragesToDelete = new LinkedList<GridCoverage>();
HashSet<String> validDbCoverageNames = new HashSet<String>(
(int) (oldSubGridCoverages.size() * 1.25) + 1);
Iterator<? extends GridCoverage> iter = oldSubGridCoverages.iterator();
while (iter.hasNext()) {
GridCoverage dbCov = iter.next();
GridCoverage fileCoverage = fileSubGridCoverageMap.get(dbCov
.getName());
if (!dbCov.equals(fileCoverage)) {
// coverage not in flat file or coverage has changed,
// delete coverage
coveragesToDelete.add(dbCov);
iter.remove();
} else {
// current coverage still valid
validDbCoverageNames.add(dbCov.getName());
}
}
// delete grids, models, coverages, and hdf5 for namesToDelete.
for (GridCoverage cov : coveragesToDelete) {
logger.info("Model "
+ cov.getSubGridModel()
+ " has changed subGrid definition, deleting out of date data");
if (!dao.deleteCoverageAssociatedData(cov, true)) {
logger.warn("Failed to delete GridCoverage " + cov.getName()
+ ". Manual intervention required.");
} else {
logger.info("GridModel successfully deleted");
}
}
// remove the valid db coverages from the map
fileSubGridCoverageMap.keySet().removeAll(validDbCoverageNames);
// need to delete model information for new adds, as old grid may not
// have been subgridded
GribModelDao modelDao = new GribModelDao();
for (GridCoverage cov : fileSubGridCoverageMap.values()) {
logger.info("Model "
+ cov.getSubGridModel()
+ " has changed subGrid definition, deleting out of date data");
// look up parent
if (modelDao.deleteModelAndAssociatedData(cov.getSubGridModel()) < 0) {
logger.warn("Failed to delete SubGrid Model "
+ cov.getSubGridModel()
+ ". Manual intervention required.");
} else {
logger.info("GridModel successfully deleted");
}
}
// add new grids, persisting individually
for (GridCoverage cov : fileSubGridCoverageMap.values()) {
try {
putGrid(cov, true, true);
subGridCoverageMap.put(cov.getSubGridModel(), cov.getId());
} catch (Exception e) {
logger.error(
"Failed to initialize grid definition " + cov.getName(),
e);
}
}
// put database grids into map
for (GridCoverage cov : oldSubGridCoverages) {
try {
putGrid(cov, true, true);
subGridCoverageMap.put(cov.getSubGridModel(), cov.getId());
} catch (Exception e) {
logger.error(
"Failed to initialize grid definition " + cov.getName(),
e);
}
}
}
private void processUnknownGrids(GridCoverageDao dao) {
List<? extends GridCoverage> unknownGrids = dao.loadUnknownGrids();
for (GridCoverage cov : unknownGrids) {
try {
GridCoverage dbCov = getGrid(cov);
if (!cov.getName().equals(dbCov.getName())) {
logger.info("Unknown grid " + cov.getName()
+ " is now mapped by " + dbCov.getName()
+ ". Deleting unknown grid");
dao.deleteCoverageAssociatedData(cov, true);
}
} catch (Exception e) {
logger.error("Erro occurred scanning unknown grids", e);
}
}
}
private Map<String, GridCoverage> loadSubGridDefinitionsFromDisk(
FileDataList currentFdl) {
GribModelLookup gribModelLUT = GribModelLookup.getInstance();
List<FileData> subGridDefs = currentFdl.getSubGridFileList();
Map<String, GridCoverage> subGrids = null;
if (subGridDefs != null && subGridDefs.size() > 0) {
subGrids = new HashMap<String, GridCoverage>(subGridDefs.size() * 3);
Coordinate wfoCenterPoint = null;
String wfo = SiteUtil.getSite();
GetWfoCenterPoint centerPointRequest = new GetWfoCenterPoint(wfo);
try {
wfoCenterPoint = new GetWfoCenterHandler()
.handleRequest(centerPointRequest);
} catch (Exception e) {
logger.error(
"Failed to generate sub grid definitions. Unable to lookup WFO Center Point",
e);
return new HashMap<String, GridCoverage>(0);
}
for (FileData fd : subGridDefs) {
try {
SubGridDef subGridDef = loadSubGridDef(fd.getFilePath());
if (subGridDef != null) {
String referenceModel = subGridDef.getReferenceModel();
GridCoverage gridCoverage = getGrid(referenceModel);
if (gridCoverage != null) {
Coordinate wfoCenter = MapUtil
.latLonToGridCoordinate(wfoCenterPoint,
PixelOrientation.CENTER,
gridCoverage);
double xCenterPoint = wfoCenter.x;
double yCenterPoint = wfoCenter.y;
double xDistance = subGridDef.getNx() / 2;
double yDistance = subGridDef.getNy() / 2;
Coordinate lowerLeftPosition = new Coordinate(
xCenterPoint - xDistance, yCenterPoint
+ yDistance);
Coordinate upperRightPosition = new Coordinate(
xCenterPoint + xDistance, yCenterPoint
- yDistance);
lowerLeftPosition = MapUtil.gridCoordinateToLatLon(
lowerLeftPosition, PixelOrientation.CENTER,
gridCoverage);
upperRightPosition = MapUtil
.gridCoordinateToLatLon(upperRightPosition,
PixelOrientation.CENTER,
gridCoverage);
subGridDef.setLowerLeftLon(lowerLeftPosition.x);
subGridDef.setLowerLeftLat(lowerLeftPosition.y);
subGridDef.setUpperRightLon(upperRightPosition.x);
subGridDef.setUpperRightLat(upperRightPosition.y);
// verify numbers in -180 -> 180 range
subGridDef.setLowerLeftLon(MapUtil
.correctLon(subGridDef.getLowerLeftLon()));
subGridDef.setUpperRightLon(MapUtil
.correctLon(subGridDef.getUpperRightLon()));
// do a reverse lookup of the model name to get its
// associated grid id
for (String modelName : subGridDef.getModelNames()) {
GridModel model = gribModelLUT
.getModelByName(modelName);
if (model != null) {
GridCoverage baseCoverage = spatialNameMap
.get(model.getGrid().toString());
if (baseCoverage != null) {
SubGrid subGrid = new SubGrid();
subGrid.setModelName(modelName);
GridCoverage subGridCoverage = baseCoverage
.trim(subGridDef, subGrid);
if (subGridCoverage != null) {
subGrids.put(
subGridCoverage.getName(),
subGridCoverage);
definedSubGridMap.put(modelName,
subGrid);
}
}
}
}
} else {
logger.error("Failed to generate sub grid for "
+ fd.getFilePath()
+ ". Unable to determine coverage for referenceModel ["
+ referenceModel + "]");
}
}
} catch (Exception e) {
// Log error but do not throw exception
logger.error(
"Failed processing sub grid file: "
+ fd.getFilePath(), e);
}
}
} else {
subGrids = new HashMap<String, GridCoverage>(0);
}
return subGrids;
}
/**
* Loads and validates subGridDef pointed to by filePath. If definition
* empty/invalid returns null.
*
* @param filePath
* @return
*/
private SubGridDef loadSubGridDef(String filePath) {
SubGridDef rval = null;
File f = new File(filePath);
if (f.length() > 0) {
try {
rval = (SubGridDef) SerializationUtil
.jaxbUnmarshalFromXmlFile(f);
if (rval.getReferenceModel() == null
|| rval.getModelNames() == null
|| rval.getModelNames().size() == 0) {
// sub grid didn't have required definitions
rval = null;
}
} catch (SerializationException e) {
logger.error("Failed reading sub grid file: " + filePath, e);
}
}
return rval;
}
private static boolean isDefintionChanged(FileDataList previousFdl,
FileDataList currentFdl) {
boolean rval = true;
if (currentFdl != null) {
rval = !currentFdl.equals(previousFdl);
} else {
rval = previousFdl != null;
}
return rval;
}
private FileDataList generateFileDataList() {
/*
* Retrieve the list of files from the localization service
*/
IPathManager pm = PathManagerFactory.getPathManager();
FileDataList fileList = new FileDataList();
LocalizationContext[] contexts = pm
.getLocalSearchHierarchy(LocalizationType.EDEX_STATIC);
fileList.addCoverageFiles(pm.listFiles(contexts, "/grib/grids",
new String[] { "xml" }, true, true));
fileList.addSubGridFiles(pm.listFiles(contexts, "/grib/subgrids",
new String[] { "xml" }, true, true));
return fileList;
}
private FileDataList getPreviousFileDataList() {
IPathManager pm = PathManagerFactory.getPathManager();
File previousFileData = pm.getFile(pm.getContext(
LocalizationType.EDEX_STATIC, LocalizationLevel.CONFIGURED),
"/grib/gridDefFileListing.xml");
FileDataList rval = null;
if (previousFileData.exists() && previousFileData.length() > 0) {
try {
Object obj = SerializationUtil
.jaxbUnmarshalFromXmlFile(previousFileData);
if (obj instanceof FileDataList) {
rval = (FileDataList) obj;
} else {
logger.error("Error occurred deserializing "
+ previousFileData.getAbsolutePath()
+ ", expected type " + FileDataList.class
+ " received " + obj.getClass());
}
} catch (Exception e) {
logger.error(
"Error occurred deserializing "
+ previousFileData.getAbsolutePath(), e);
}
}
return rval;
}
private Map<String, GridCoverage> loadGridDefinitionsFromDisk(
FileDataList currentFdl) {
List<FileData> coverageFiles = currentFdl.getCoverageFileList();
Map<String, GridCoverage> fileCoverageMap = new HashMap<String, GridCoverage>(
(int) (coverageFiles.size() * 1.25) + 1);
/*
* Iterate over file list. Unmarshal to GridCoverage object
*/
for (FileData fd : coverageFiles) {
try {
GridCoverage grid = (GridCoverage) SerializationUtil
.jaxbUnmarshalFromXmlFile(fd.getFilePath());
GridCoverage previousGrid = fileCoverageMap.put(grid.getName(),
grid);
if (previousGrid != null) {
for (FileData fd2 : coverageFiles) {
GridCoverage grid2 = (GridCoverage) SerializationUtil
.jaxbUnmarshalFromXmlFile(fd2.getFilePath());
if (grid.getName().equals(grid2.getName())) {
logger.error("Grid " + grid.getName()
+ " has already been defined. "
+ fd.getFilePath() + " and "
+ fd2.getFilePath()
+ " have same name. Using "
+ fd2.getFilePath());
break;
}
}
}
} catch (Exception e) {
// Log error but do not throw exception
logger.error(
"Unable to read default grids file: "
+ fd.getFilePath(), e);
}
}
return fileCoverageMap;
}
private void saveFileDataList(FileDataList fdl) {
try {
IPathManager pm = PathManagerFactory.getPathManager();
LocalizationFile lf = pm.getLocalizationFile(
pm.getContext(LocalizationType.EDEX_STATIC,
LocalizationLevel.CONFIGURED),
"/grib/gridDefFileListing.xml");
SerializationUtil.jaxbMarshalToXmlFile(fdl, lf.getFile()
.getAbsolutePath());
lf.save();
} catch (Exception e) {
logger.error(
"Failed to save coverage file data list, coverages may be reloaded on next restart",
e);
}
}
public static void reinitialize() {
GribSpatialCache newInstance = new GribSpatialCache();
instance = newInstance;
}
}

282
PurgeDao.java Normal file
View file

@ -0,0 +1,282 @@
/**
* This software was developed and / or modified by Raytheon Company,
* pursuant to Contract DG133W-05-CQ-1067 with the US Government.
*
* U.S. EXPORT CONTROLLED TECHNICAL DATA
* This software product contains export-restricted data whose
* export/transfer/disclosure is restricted by U.S. law. Dissemination
* to non-U.S. persons whether in the United States or abroad requires
* an export license or other authorization.
*
* Contractor Name: Raytheon Company
* Contractor Address: 6825 Pine Street, Suite 340
* Mail Stop B8
* Omaha, NE 68106
* 402.291.0100
*
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
* further licensing information.
**/
package com.raytheon.uf.edex.purgesrv;
import java.sql.Timestamp;
import java.util.Calendar;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.TimeZone;
import org.hibernate.Query;
import org.hibernate.Session;
import org.springframework.transaction.TransactionStatus;
import org.springframework.transaction.support.TransactionCallback;
import org.springframework.transaction.support.TransactionCallbackWithoutResult;
import com.raytheon.uf.edex.database.dao.CoreDao;
import com.raytheon.uf.edex.database.dao.DaoConfig;
/**
*
* Data access object for accessing purge job status objects
*
* <pre>
*
* SOFTWARE HISTORY
*
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* May 1, 2012 #470 bphillip Initial creation
*
* </pre>
*
* @author bphillip
* @version 1.0
*/
public class PurgeDao extends CoreDao {
/**
* Constructs a new purge data access object
*/
public PurgeDao() {
super(DaoConfig.forClass(PurgeJobStatus.class));
}
/**
* Gets the number of purge jobs currently running on the cluster. A job is
* considered running if the 'running' flag is set to true and the job has
* been started since validStartTime and has not met or exceeded the failed
* count.
*
* @param validStartTime
* @param failedCount
* @return The number of purge jobs currently running on the cluster
*/
public int getRunningClusterJobs(final Date validStartTime,
final int failedCount) {
final String query = "from "
+ daoClass.getName()
+ " obj where obj.running = true and obj.startTime > :startTime and obj.failedCount <= :failedCount";
return (Integer) txTemplate.execute(new TransactionCallback() {
@Override
public Object doInTransaction(TransactionStatus status) {
Query hibQuery = getSession(false).createQuery(query);
hibQuery.setTimestamp("startTime", validStartTime);
hibQuery.setInteger("failedCount", failedCount);
List<?> queryResult = hibQuery.list();
if (queryResult == null) {
return 0;
} else {
return queryResult.size();
}
}
});
}
/**
* Returns the jobs that have met or exceed the failed count.
*
* @param failedCount
* @return
*/
@SuppressWarnings("unchecked")
public List<PurgeJobStatus> getFailedJobs(final int failedCount) {
final String query = "from " + daoClass.getName()
+ " obj where obj.failedCount >= :failedCount";
return (List<PurgeJobStatus>) txTemplate
.execute(new TransactionCallback() {
@Override
public List<PurgeJobStatus> doInTransaction(
TransactionStatus status) {
Query hibQuery = getSession(false).createQuery(query);
hibQuery.setInteger("failedCount", failedCount);
return hibQuery.list();
}
});
}
@SuppressWarnings("unchecked")
public List<PurgeJobStatus> getTimedOutJobs(final Date validStartTime) {
final String query = "from "
+ daoClass.getName()
+ " obj where obj.running = true and obj.startTime <= :startTime";
return (List<PurgeJobStatus>) txTemplate
.execute(new TransactionCallback() {
@Override
public List<PurgeJobStatus> doInTransaction(
TransactionStatus status) {
Query hibQuery = getSession(false).createQuery(query);
hibQuery.setTimestamp("startTime", validStartTime);
return hibQuery.list();
}
});
}
@SuppressWarnings("unchecked")
public Map<String, List<PurgeJobStatus>> getRunningServerJobs() {
final String query = "from "
+ daoClass.getName()
+ " obj where obj.running = true and obj.timedOut = false and obj.failed = false and obj.id.server=':SERVER'";
return (Map<String, List<PurgeJobStatus>>) txTemplate
.execute(new TransactionCallback() {
@Override
public Map<String, List<PurgeJobStatus>> doInTransaction(
TransactionStatus status) {
Map<String, List<PurgeJobStatus>> serverMap = new HashMap<String, List<PurgeJobStatus>>();
Query serverQuery = getSession(false).createQuery(
"select distinct obj.id.server from "
+ daoClass.getName()
+ " obj order by obj.id.server asc");
List<String> result = serverQuery.list();
for (String server : result) {
Query query2 = getSession(false).createQuery(
query.replace(":SERVER", server));
serverMap.put(server, query2.list());
}
return serverMap;
}
});
}
/**
* Gets the amount of time in milliseconds since the last purge of a given
* plugin
*
* @param plugin
* The plugin name
* @return Number of milliseconds since the purge job was run for the given
* plugin
*/
public long getTimeSinceLastPurge(String plugin) {
final String query = "select obj.startTime from " + daoClass.getName()
+ " obj where obj.id.plugin='" + plugin + "'";
return (Long) txTemplate.execute(new TransactionCallback() {
@Override
public Object doInTransaction(TransactionStatus status) {
Query hibQuery = getSession(false).createQuery(query);
Timestamp queryResult = (Timestamp) hibQuery.uniqueResult();
if (queryResult == null) {
return -1;
} else {
return System.currentTimeMillis() - queryResult.getTime();
}
}
});
}
/**
* Gets the purge job status for a plugin
*
* @param plugin
* The plugin to get the purge job status for
* @return The purge job statuses
*/
public PurgeJobStatus getJobForPlugin(String plugin) {
final String query = "from " + daoClass.getName()
+ " obj where obj.id.plugin='" + plugin + "'";
return (PurgeJobStatus) txTemplate.execute(new TransactionCallback() {
@Override
public PurgeJobStatus doInTransaction(TransactionStatus status) {
Query hibQuery = getSession(false).createQuery(query);
PurgeJobStatus queryResult = (PurgeJobStatus) hibQuery
.uniqueResult();
return queryResult;
}
});
}
/**
* Sets a purge job to running status and sets the startTime to current
* time. If was previously running, will increment the failed count.
*
* @param plugin
* The plugin row to update
*/
public void startJob(final String plugin) {
final String query = "from " + daoClass.getName()
+ " obj where obj.id.plugin='" + plugin + "'";
txTemplate.execute(new TransactionCallback() {
@Override
public PurgeJobStatus doInTransaction(TransactionStatus status) {
Session sess = getSession(false);
Query hibQuery = sess.createQuery(query);
PurgeJobStatus queryResult = (PurgeJobStatus) hibQuery
.uniqueResult();
if (queryResult == null) {
queryResult = new PurgeJobStatus();
queryResult.setFailedCount(0);
queryResult.setPlugin(plugin);
queryResult.setRunning(false);
sess.save(queryResult);
}
if (queryResult.isRunning()) {
// query was previously running, update failed count
queryResult.incrementFailedCount();
}
queryResult.setStartTime(Calendar.getInstance(
TimeZone.getTimeZone("GMT")).getTime());
queryResult.setRunning(true);
sess.update(queryResult);
return queryResult;
}
});
}
/**
* Retrieves the plugins order by startTime.
*
* @param latestStartTime
* @param failedCount
* @return
*/
@SuppressWarnings("unchecked")
public List<String> getPluginsByPurgeTime() {
final String query = "select obj.id.plugin from " + daoClass.getName()
+ " obj order by obj.startTime asc, obj.plugin asc";
return (List<String>) txTemplate.execute(new TransactionCallback() {
@Override
public List<String> doInTransaction(TransactionStatus status) {
Query hibQuery = getSession(false).createQuery(query);
List<String> result = (List<String>) hibQuery.list();
return result;
}
});
}
/**
* Updates a purge job status object
*
* @param jobStatus
* The object to update
*/
public void update(final PurgeJobStatus jobStatus) {
txTemplate.execute(new TransactionCallbackWithoutResult() {
@Override
public void doInTransactionWithoutResult(TransactionStatus status) {
getHibernateTemplate().update(jobStatus);
}
});
}
}

302
PurgeJob.java Normal file
View file

@ -0,0 +1,302 @@
/**
* This software was developed and / or modified by Raytheon Company,
* pursuant to Contract DG133W-05-CQ-1067 with the US Government.
*
* U.S. EXPORT CONTROLLED TECHNICAL DATA
* This software product contains export-restricted data whose
* export/transfer/disclosure is restricted by U.S. law. Dissemination
* to non-U.S. persons whether in the United States or abroad requires
* an export license or other authorization.
*
* Contractor Name: Raytheon Company
* Contractor Address: 6825 Pine Street, Suite 340
* Mail Stop B8
* Omaha, NE 68106
* 402.291.0100
*
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
* further licensing information.
**/
package com.raytheon.uf.edex.purgesrv;
import java.lang.reflect.Method;
import java.sql.SQLException;
import java.util.Calendar;
import java.util.Date;
import java.util.Map;
import java.util.TimeZone;
import com.raytheon.uf.edex.database.cluster.ClusterLockUtils;
import com.raytheon.uf.edex.database.cluster.ClusterTask;
import com.raytheon.uf.edex.database.plugin.PluginDao;
import com.raytheon.uf.edex.database.plugin.PluginFactory;
import com.raytheon.uf.edex.database.purge.PurgeLogger;
/**
*
* This class encapsulates the purge activity for a plugin into a cluster task.
*
* <pre>
*
* SOFTWARE HISTORY
*
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Apr 19, 2012 #470 bphillip Initial creation
*
* </pre>
*
* @author bphillip
* @version 1.0
*/
public class PurgeJob extends Thread {
/** The type of purge */
public enum PURGE_JOB_TYPE {
PURGE_ALL, PURGE_EXPIRED
}
private long startTime;
/** The cluster task name to use for purge jobs */
public static final String TASK_NAME = "Purge Plugin Data";
/** The plugin associated with this purge job */
private String pluginName;
/** The type of purge job being executed */
private PURGE_JOB_TYPE purgeType;
/** Last time job has printed a timed out message */
private long lastTimeOutMessage = 0;
/**
* Creates a new Purge job for the specified plugin.
*
* @param pluginName
* The plugin to be purged
* @param purgeType
* The type of purge to be executed
*/
public PurgeJob(String pluginName, PURGE_JOB_TYPE purgeType) {
// Give the thread a name
this.setName("Purge-" + pluginName.toUpperCase() + "-Thread");
this.pluginName = pluginName;
this.purgeType = purgeType;
}
public void run() {
// Flag used to track if this job has failed
boolean failed = false;
startTime = System.currentTimeMillis();
PurgeLogger.logInfo("Purging expired data...", pluginName);
PluginDao dao = null;
try {
dao = PluginFactory.getInstance().getPluginDao(pluginName);
if (dao.getDaoClass() != null) {
dao.purgeExpiredData();
PurgeLogger.logInfo("Data successfully Purged!", pluginName);
} else {
Method m = dao.getClass().getMethod("purgeExpiredData",
new Class[] {});
if (m != null) {
if (m.getDeclaringClass().equals(PluginDao.class)) {
PurgeLogger
.logWarn(
"Unable to purge data. This plugin does not specify a record class and does not implement a custom purger.",
pluginName);
} else {
if (this.purgeType.equals(PURGE_JOB_TYPE.PURGE_EXPIRED)) {
dao.purgeExpiredData();
} else {
dao.purgeAllData();
}
PurgeLogger.logInfo("Data successfully Purged!",
pluginName);
}
}
}
} catch (Exception e) {
failed = true;
// keep getting next exceptions with sql exceptions to ensure
// we can see the underlying error
PurgeLogger
.logError("Error purging expired data!\n", pluginName, e);
Throwable t = e.getCause();
while (t != null) {
if (t instanceof SQLException) {
SQLException se = ((SQLException) t).getNextException();
PurgeLogger.logError("Next exception:", pluginName, se);
}
t = t.getCause();
}
} finally {
ClusterTask purgeLock = PurgeManager.getInstance().getPurgeLock();
try {
/*
* Update the status accordingly if the purge failed or
* succeeded
*/
PurgeDao purgeDao = new PurgeDao();
PurgeJobStatus status = purgeDao
.getJobForPlugin(this.pluginName);
if (status == null) {
PurgeLogger.logError(
"Purge job completed but no status object found!",
this.pluginName);
} else {
if (failed) {
status.incrementFailedCount();
if (status.getFailedCount() >= PurgeManager
.getInstance().getFatalFailureCount()) {
PurgeLogger
.logFatal(
"Purger for this plugin has reached or exceeded consecutive failure limit of "
+ PurgeManager
.getInstance()
.getFatalFailureCount()
+ ". Data will no longer being purged for this plugin.",
pluginName);
} else {
PurgeLogger.logError("Purge job has failed "
+ status.getFailedCount()
+ " consecutive times.", this.pluginName);
// Back the start time off by half an hour to try to
// purgin soon, don't want to start immediately so
// it doesn't ping pong between servers in a time
// out scenario
Date startTime = status.getStartTime();
startTime.setTime(startTime.getTime() - (1800000));
}
} else {
status.setFailedCount(0);
}
/*
* This purger thread has exceeded the time out duration but
* finally finished. Output a message and update the status
*/
int deadPurgeJobAge = PurgeManager.getInstance()
.getDeadPurgeJobAge();
Calendar purgeTimeOutLimit = Calendar.getInstance();
purgeTimeOutLimit.setTimeZone(TimeZone.getTimeZone("GMT"));
purgeTimeOutLimit.add(Calendar.MINUTE, -deadPurgeJobAge);
if (startTime < purgeTimeOutLimit.getTimeInMillis()) {
PurgeLogger
.logInfo(
"Purge job has recovered from timed out state!!",
pluginName);
}
status.setRunning(false);
purgeDao.update(status);
/*
* Log execution times
*/
long executionTime = getAge();
long execTimeInMinutes = executionTime / 60000;
if (execTimeInMinutes > 0) {
PurgeLogger.logInfo("Purge run time: " + executionTime
+ " ms (" + execTimeInMinutes + " minutes)",
this.pluginName);
} else {
PurgeLogger.logInfo("Purge run time: " + executionTime
+ " ms", this.pluginName);
}
}
} catch (Throwable e) {
PurgeLogger
.logError(
"An unexpected error occurred upon completion of the purge job",
this.pluginName, e);
} finally {
ClusterLockUtils.unlock(purgeLock, false);
}
}
}
public void printTimedOutMessage(int deadPurgeJobAge) {
// only print message every 5 minutes
if (System.currentTimeMillis() - lastTimeOutMessage > 300000) {
PurgeLogger.logFatal(
"Purger running time has exceeded timeout duration of "
+ deadPurgeJobAge
+ " minutes. Current running time: "
+ (getAge() / 60000) + " minutes", pluginName);
printStackTrace();
}
}
/**
* Prints the stack trace for this job thread.
*/
public void printStackTrace() {
StringBuffer buffer = new StringBuffer();
buffer.append("Stack trace for Purge Job Thread:\n");
buffer.append(getStackTrace(this));
// If this thread is blocked, output the stack traces for the other
// blocked threads to assist in determining the source of the
// deadlocked
// threads
if (this.getState().equals(State.BLOCKED)) {
buffer.append("\tDUMPING OTHER BLOCKED THREADS\n");
buffer.append(getBlockedStackTraces());
}
PurgeLogger.logError(buffer.toString(), this.pluginName);
}
/**
* Gets the stack traces for all other threads in the BLOCKED state in the
* JVM
*
* @return The stack traces for all other threads in the BLOCKED state in
* the JVM
*/
private String getBlockedStackTraces() {
StringBuffer buffer = new StringBuffer();
Map<Thread, StackTraceElement[]> threads = Thread.getAllStackTraces();
for (Thread t : threads.keySet()) {
if (t.getState().equals(State.BLOCKED)) {
if (t.getId() != this.getId()) {
buffer.append(getStackTrace(t));
}
}
}
return buffer.toString();
}
/**
* Gets the stack trace for the given thread
*
* @param thread
* The thread to get the stack trace for
* @return The stack trace as a String
*/
private String getStackTrace(Thread thread) {
StringBuffer buffer = new StringBuffer();
StackTraceElement[] stack = Thread.getAllStackTraces().get(thread);
buffer.append("\tThread ID: ").append(thread.getId())
.append(" Thread state: ").append(this.getState())
.append("\n");
if (stack == null) {
buffer.append("No stack trace could be retrieved for this thread");
} else {
for (int i = 0; i < stack.length; i++) {
buffer.append("\t\t").append(stack[i]).append("\n");
}
}
return buffer.toString();
}
public long getStartTime() {
return startTime;
}
public long getAge() {
return System.currentTimeMillis() - startTime;
}
}

488
PurgeManager.java Normal file
View file

@ -0,0 +1,488 @@
/**
* This software was developed and / or modified by Raytheon Company,
* pursuant to Contract DG133W-05-CQ-1067 with the US Government.
*
* U.S. EXPORT CONTROLLED TECHNICAL DATA
* This software product contains export-restricted data whose
* export/transfer/disclosure is restricted by U.S. law. Dissemination
* to non-U.S. persons whether in the United States or abroad requires
* an export license or other authorization.
*
* Contractor Name: Raytheon Company
* Contractor Address: 6825 Pine Street, Suite 340
* Mail Stop B8
* Omaha, NE 68106
* 402.291.0100
*
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
* further licensing information.
**/
package com.raytheon.uf.edex.purgesrv;
import java.lang.Thread.State;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Collections;
import java.util.Date;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TimeZone;
import java.util.concurrent.ConcurrentHashMap;
import com.raytheon.uf.edex.core.dataplugin.PluginRegistry;
import com.raytheon.uf.edex.database.cluster.ClusterLockUtils;
import com.raytheon.uf.edex.database.cluster.ClusterLockUtils.LockState;
import com.raytheon.uf.edex.database.cluster.ClusterTask;
import com.raytheon.uf.edex.database.purge.PurgeLogger;
import com.raytheon.uf.edex.database.status.StatusConstants;
import com.raytheon.uf.edex.purgesrv.PurgeJob.PURGE_JOB_TYPE;
/**
*
* Object for managing purge jobs. The purge manager relies on the purgejobs
* table to coordinate information. The executePurge() method on this class is
* executed every minute via a quartz timer defined in the purge-spring.xml
* Spring configuration file.
* <p>
* The purge manager is designed to adhere to the following rules:
* <p>
* · The cluster may have no more than 6 purge jobs running simultaneously by
* default. This property is configurable in the project.properties file<br>
* · Any given server may have no more than 2 purge jobs running simultaneously
* by default. This property is configurable in the project.properties file<br>
* · A purge job for a plugin is considered 'hung' if it has been running for
* more than 20 minutes by default. This property is configurable in the
* project.properties file <br>
* · If a purge job that was previously determined to be hung actually finishes
* it's execution, the cluster lock is updated appropriately and the purge job
* is able to resume normal operation. This is in place so if a hung purge
* process goes unnoticed for a period of time, the server will still try to
* recover autonomously if it can. <br>
* · If a purge job is determined to be hung, the stack trace for the thread
* executing the job is output to the log. Furthermore, if the job is in the
* BLOCKED state, the stack traces for all other BLOCKED threads is output to
* the purge log as part of a rudimentary deadlock detection strategy to be used
* by personnel attempting to remedy the situation.<br>
* · By default, a fatal condition occurs if a given plugin's purge job fails 3
* consecutive times.<br>
* · If a purge job hangs on one server in the cluster, it will try and run on
* another cluster member at the next purge interval.<br>
* · If the purge manager attempts to purge a plugin that has been running for
* longer than the 20 minute threshold, it is considered a failure, and the
* failure count is updated.
* <p>
*
*
* <pre>
*
* SOFTWARE HISTORY
*
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Apr 18, 2012 #470 bphillip Initial creation
*
* </pre>
*
* @author bphillip
* @version 1.0
*/
public class PurgeManager {
/** Purge Manager task name */
private static final String PURGE_TASK_NAME = "Purge Manager";
/** Purge Manager task details */
private static final String PURGE_TASK_DETAILS = "Purge Manager Job";
/** Purge Manager task override timeout. Currently 2 minutes */
private static final long PURGE_MANAGER_TIMEOUT = 120000;
/**
* The cluster limit property to be set via Spring with the value defined in
* project.properties
*/
private int clusterLimit = 6;
/**
* The server limit property to be set via Spring with the value defined in
* project.properties
*/
private int serverLimit = 2;
/**
* The time in minutes at which a purge job is considered 'dead' or 'hung'
* set via Spring with the value defined in project.properties
*/
private int deadPurgeJobAge = 20;
/**
* The frequency, in minutes, that a plugin may be purged set via Spring
* with the value defined in project.properties
*/
private int purgeFrequency = 60;
/**
* How many times a purger is allowed to fail before it is considered fatal.
* Set via Spring with the value defined in project.properties
*/
private int fatalFailureCount = 3;
/**
* The master switch defined in project.properties that enables and disables
* data purging
*/
private boolean purgeEnabled = true;
/** Map of purge jobs */
private Map<String, PurgeJob> purgeJobs = new ConcurrentHashMap<String, PurgeJob>();
private PurgeDao dao = new PurgeDao();
private static PurgeManager instance = new PurgeManager();
public static PurgeManager getInstance() {
return instance;
}
/**
* Creates a new PurgeManager
*/
private PurgeManager() {
}
/**
* Executes the purge routine
*/
public void executePurge() {
if (!purgeEnabled) {
PurgeLogger.logWarn(
"Data purging has been disabled. No data will be purged.",
null);
return;
}
ClusterTask purgeMgrTask = getPurgeLock();
try {
// Prune the job map
Iterator<PurgeJob> iter = purgeJobs.values().iterator();
while (iter.hasNext()) {
if (!iter.next().isAlive()) {
iter.remove();
}
}
Calendar purgeTimeOutLimit = Calendar.getInstance();
purgeTimeOutLimit.setTimeZone(TimeZone.getTimeZone("GMT"));
purgeTimeOutLimit.add(Calendar.MINUTE, -deadPurgeJobAge);
Calendar purgeFrequencyLimit = Calendar.getInstance();
purgeFrequencyLimit.setTimeZone(TimeZone.getTimeZone("GMT"));
purgeFrequencyLimit.add(Calendar.MINUTE, -purgeFrequency);
// Gets the list of plugins in ascending order by the last time they
// were purged
List<String> pluginList = dao.getPluginsByPurgeTime();
// check for any new plugins or database being purged and needing
// entries recreated
Set<String> availablePlugins = new HashSet<String>(PluginRegistry
.getInstance().getRegisteredObjects());
// Merge the lists
availablePlugins.removeAll(pluginList);
if (availablePlugins.size() > 0) {
// generate new list with them at the beginning
List<String> newSortedPlugins = new ArrayList<String>(
availablePlugins);
Collections.sort(newSortedPlugins);
newSortedPlugins.addAll(pluginList);
pluginList = newSortedPlugins;
}
boolean canPurge = true;
int jobsStarted = 0;
int maxNumberOfJobsToStart = Math.min(
clusterLimit
- dao.getRunningClusterJobs(
purgeTimeOutLimit.getTime(),
fatalFailureCount), serverLimit
- getNumberRunningJobsOnServer(purgeTimeOutLimit));
for (String plugin : pluginList) {
try {
// initialize canPurge based on number of jobs started
canPurge = jobsStarted < maxNumberOfJobsToStart;
PurgeJob jobThread = purgeJobs.get(plugin);
PurgeJobStatus job = dao.getJobForPlugin(plugin);
if (job == null) {
// no job in database, generate empty job
try {
job = new PurgeJobStatus();
job.setPlugin(plugin);
job.setFailedCount(0);
job.setRunning(false);
job.setStartTime(new Date(0));
dao.create(job);
} catch (Throwable e) {
PurgeLogger.logError(
"Failed to create new purge job entry",
plugin, e);
}
}
// Check to see if this job has met the fatal failure count
if (job.getFailedCount() >= fatalFailureCount) {
canPurge = false;
PurgeLogger
.logFatal(
"Purger for this plugin has reached or exceeded consecutive failure limit of "
+ fatalFailureCount
+ ". Data will no longer being purged for this plugin.",
plugin);
}
// is purge job currently running on this server
if (jobThread != null) {
// job currently running on our server, don't start
// another
canPurge = false;
if (purgeTimeOutLimit.getTimeInMillis() > jobThread
.getStartTime()) {
jobThread.printTimedOutMessage(deadPurgeJobAge);
}
} else {
if (job.isRunning()) {
// check if job has timed out
if (purgeTimeOutLimit.getTime().before(
job.getStartTime())) {
canPurge = false;
}
// else if no one else sets canPurge = false will
// start purging on this server
} else {
// not currently running, check if need to be purged
Date startTime = job.getStartTime();
if (startTime != null
&& startTime.after(purgeFrequencyLimit
.getTime())) {
canPurge = false;
}
}
}
if (canPurge) {
purgeJobs.put(plugin, purgeExpiredData(plugin));
jobsStarted++;
}
} catch (Throwable e) {
PurgeLogger
.logError(
"An unexpected error occured during the purge job check for plugin",
plugin, e);
}
}
} catch (Throwable e) {
PurgeLogger
.logError(
"An unexpected error occured during the data purge process",
StatusConstants.CATEGORY_PURGE, e);
} finally {
// Unlock the purge task to allow other servers to run.
ClusterLockUtils.unlock(purgeMgrTask, false);
// PurgeLogger.logInfo(getPurgeStatus(true), null);
}
}
@SuppressWarnings("unused")
private String getPurgeStatus(boolean verbose) {
Calendar purgeTimeOutLimit = Calendar.getInstance();
purgeTimeOutLimit.setTimeZone(TimeZone.getTimeZone("GMT"));
purgeTimeOutLimit.add(Calendar.MINUTE, -deadPurgeJobAge);
StringBuilder builder = new StringBuilder();
List<PurgeJobStatus> failedJobs = dao.getFailedJobs(fatalFailureCount);
List<PurgeJobStatus> timedOutJobs = dao
.getTimedOutJobs(purgeTimeOutLimit.getTime());
int clusterJobs = dao.getRunningClusterJobs(
purgeTimeOutLimit.getTime(), fatalFailureCount);
Map<String, List<PurgeJobStatus>> serverMap = dao
.getRunningServerJobs();
builder.append("\nPURGE JOB STATUS:");
builder.append("\n\tTotal Jobs Running On Cluster: ").append(
clusterJobs);
List<PurgeJobStatus> jobs = null;
for (String server : serverMap.keySet()) {
jobs = serverMap.get(server);
builder.append("\n\tJobs Running On ").append(server).append(": ")
.append(jobs.size());
if (verbose && !jobs.isEmpty()) {
builder.append(" Plugins: ");
for (int i = 0; i < jobs.size(); i++) {
builder.append(jobs.get(i).getPlugin());
if (i != jobs.size() - 1) {
builder.append(",");
}
}
}
}
if (verbose) {
builder.append("\n\tFailed Jobs: ");
if (failedJobs.isEmpty()) {
builder.append("0");
} else {
PurgeJobStatus currentJob = null;
for (int i = 0; i < failedJobs.size(); i++) {
currentJob = failedJobs.get(i);
builder.append(currentJob.getPlugin());
if (i != failedJobs.size() - 1) {
builder.append(",");
}
}
}
builder.append("\n\tTimed Out Jobs: ");
if (timedOutJobs.isEmpty()) {
builder.append("0");
} else {
PurgeJobStatus currentJob = null;
for (int i = 0; i < timedOutJobs.size(); i++) {
currentJob = timedOutJobs.get(i);
builder.append(currentJob.getPlugin());
if (i != timedOutJobs.size() - 1) {
builder.append(",");
}
}
}
}
return builder.toString();
}
public ClusterTask getPurgeLock() {
// Lock so only one cluster member may start purge processes
ClusterTask purgeMgrTask = ClusterLockUtils.lock(PURGE_TASK_NAME,
PURGE_TASK_DETAILS, PURGE_MANAGER_TIMEOUT, true);
LockState purgeMgrLockState = purgeMgrTask.getLockState();
switch (purgeMgrLockState) {
case FAILED:
PurgeLogger.logError(
"Purge Manager failed to acquire cluster task lock",
StatusConstants.CATEGORY_PURGE);
return null;
case OLD:
PurgeLogger.logWarn("Purge Manager acquired old cluster task lock",
StatusConstants.CATEGORY_PURGE);
break;
case ALREADY_RUNNING:
PurgeLogger
.logWarn(
"Purge Manager acquired currently running cluster task lock",
StatusConstants.CATEGORY_PURGE);
return null;
case SUCCESSFUL:
break;
}
return purgeMgrTask;
}
private int getNumberRunningJobsOnServer(Calendar timeOutTime) {
int rval = 0;
for (PurgeJob job : purgeJobs.values()) {
// if job has not timed out or if the job is not blocked consider it
// running on this server
if (timeOutTime.getTimeInMillis() < job.getStartTime()
|| !job.getState().equals(State.BLOCKED)) {
rval++;
}
}
return rval;
}
/**
* Starts a purge expired data job for the specified plugin. Using this
* method allows for exceeding failure count via a manual purge as well as
* kicking off a second purge for one already running on a server.
*
* @param plugin
* The plugin to purge the expired data for
* @return The PurgeJob that was started
*/
public PurgeJob purgeExpiredData(String plugin) {
dao.startJob(plugin);
PurgeJob job = new PurgeJob(plugin, PURGE_JOB_TYPE.PURGE_EXPIRED);
job.start();
return job;
}
/**
* Starts a purge all data job for the specified plugin. Using this method
* allows for exceeding failure count via a manual purge as well as kicking
* off a second purge for one already running on a server.
*
* @param plugin
* The plugin to purge all data for
* @return The PurgeJob that was started
*/
public PurgeJob purgeAllData(String plugin) {
dao.startJob(plugin);
PurgeJob job = new PurgeJob(plugin, PURGE_JOB_TYPE.PURGE_ALL);
job.start();
return job;
}
public int getClusterLimit() {
return clusterLimit;
}
public void setClusterLimit(int clusterLimit) {
this.clusterLimit = clusterLimit;
}
public int getServerLimit() {
return serverLimit;
}
public void setServerLimit(int serverLimit) {
this.serverLimit = serverLimit;
}
public int getDeadPurgeJobAge() {
return deadPurgeJobAge;
}
public void setDeadPurgeJobAge(int deadPurgeJobAge) {
this.deadPurgeJobAge = deadPurgeJobAge;
}
public int getPurgeFrequency() {
return purgeFrequency;
}
public void setPurgeFrequency(int purgeFrequency) {
this.purgeFrequency = purgeFrequency;
}
public int getFatalFailureCount() {
return this.fatalFailureCount;
}
public void setFatalFailureCount(int fatalFailureCount) {
this.fatalFailureCount = fatalFailureCount;
}
public void setPurgeEnabled(boolean purgeEnabled) {
this.purgeEnabled = purgeEnabled;
}
public boolean getPurgeEnabled() {
return purgeEnabled;
}
}

View file

@ -5,6 +5,7 @@
<available file="../../../../../build.edex"
property="build.dir.location"
value="../../../../../build.edex"/>
<available file="../build.edex" property="build.dir.location" value="../build.edex"/>
<import file="${build.dir.location}/basebuilds/component_deploy_base.xml" />
</project>
</project>

View file

@ -32,6 +32,20 @@ import com.raytheon.rcm.message.GSM;
import com.raytheon.rcm.request.Request;
import com.raytheon.rcm.request.RpsList;
/**
* Utility class for dealing with AWIPS 1 RPS lists.
*
* <pre>
* SOFTWARE HISTORY
*
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* 2009 dfriedma Initial version
* 2012-04-30 DR 14908 D. Friedman Require radar name for valid RPS
* file names.
* </pre>
*
*/
public class Awips1RpsListUtil {
// is 'maint' an opMode??
public static class Selector {
@ -187,7 +201,7 @@ public class Awips1RpsListUtil {
}
protected static final Pattern selectorPattern = Pattern
.compile("^(?:(.+)\\.)?(.+)\\.VCP(\\d+)(?:\\.(.*))?$");
.compile("^(.+)\\.(.+)\\.VCP(\\d+)(?:\\.(.*))?$");
protected static final Pattern maintPattern = Pattern
.compile("^([^\\.]+)\\.maint(?:\\.(.*))?$");
@ -197,9 +211,7 @@ public class Awips1RpsListUtil {
if (m.matches()) {
Selector sel = new Selector();
if (m.group(1) != null) {
sel.radar = m.group(1).toLowerCase();
}
sel.radar = m.group(1).toLowerCase();
String opModeString = m.group(2).toLowerCase();
if (opModeString.equals("clear-air"))

View file

@ -48,6 +48,15 @@ import com.raytheon.rcm.server.Log;
* com.raytheon.rcm.config.awips1.FXA_LOCAL_SITE property.
*
* Note: Does not recognize the FILE_SERVER_DEFAULT_PATHS environment variable.
*
* <pre>
* SOFTWARE HISTORY
*
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* 2009 dfriedma Initial version
* 2012-04-30 DR 14904 D. Friedman Add backup links to dial ORPGs.
* </pre>
*/
public class Awips1ConfigProvider implements ConfigurationProvider {
@ -410,10 +419,8 @@ public class Awips1ConfigProvider implements ConfigurationProvider {
String radarName = ls.next().toLowerCase();
int nexradId = ls.nextInt();
RadarConfig rc = radars.get(radarName.toLowerCase()); //config.getConfigForRadar(radarName);
// so only getting entries for current purpose .. does not
// allow easy switching
if (rc == null || rc.isDedicated() != dedicated)
RadarConfig rc = radars.get(radarName.toLowerCase());
if (rc == null)
continue;
if (nexradId != rc.getNexradID()) {
// warn...
@ -428,8 +435,6 @@ public class Awips1ConfigProvider implements ConfigurationProvider {
lr.setLinkIndex(ls.nextInt());
lr.setTcmPassword(ls.next());
lr.setDedicated(dedicated);
// TODO: do something with max rps size?
// lr.setBackup(backup);
if (dedicated) {
lr.setMaxRpsListSize(ls.nextInt());

72
TextDao.java Normal file
View file

@ -0,0 +1,72 @@
/**
* This software was developed and / or modified by Raytheon Company,
* pursuant to Contract DG133W-05-CQ-1067 with the US Government.
*
* U.S. EXPORT CONTROLLED TECHNICAL DATA
* This software product contains export-restricted data whose
* export/transfer/disclosure is restricted by U.S. law. Dissemination
* to non-U.S. persons whether in the United States or abroad requires
* an export license or other authorization.
*
* Contractor Name: Raytheon Company
* Contractor Address: 6825 Pine Street, Suite 340
* Mail Stop B8
* Omaha, NE 68106
* 402.291.0100
*
* See the AWIPS II Master Rights File ("Master Rights File.pdf") for
* further licensing information.
**/
package com.raytheon.edex.plugin.text.dao;
import java.util.Calendar;
import com.raytheon.edex.db.dao.DefaultPluginDao;
import com.raytheon.edex.textdb.dbapi.impl.TextDB;
import com.raytheon.uf.common.dataplugin.PluginException;
import com.raytheon.uf.edex.database.purge.PurgeLogger;
/**
* DAO for text products
*
* <pre>
*
* SOFTWARE HISTORY
*
* Date Ticket# Engineer Description
* ------------ ---------- ----------- --------------------------
* Jul 10, 2009 2191 rjpeter Update retention time handling.
* Aug 18, 2009 2191 rjpeter Changed to version purging.
* </pre>
*
* @author
* @version 1
*/
public class TextDao extends DefaultPluginDao {
public TextDao(String pluginName) throws PluginException {
super(pluginName);
}
@Override
public void purgeAllData() {
logger.warn("purgeAllPluginData not implemented for text. No data will be purged.");
}
protected void loadScripts() throws PluginException {
// no op
}
public void purgeExpiredData() throws PluginException {
int deletedRecords = 0;
// only do full purge every few hours since incremental purge runs every
// minute
if (Calendar.getInstance().get(Calendar.HOUR_OF_DAY) % 3 == 0) {
TextDB.purgeStdTextProducts();
}
PurgeLogger.logInfo("Purged " + deletedRecords + " items total.",
"text");
}
}

13
after.txt Normal file
View file

@ -0,0 +1,13 @@
-rw-r--r-- 1 dmsys dmtool 94518 May 17 14:24 cave/com.raytheon.uf.viz.monitor.ffmp/src/com/raytheon/uf/viz/monitor/ffmp/FFMPMonitor.java
-rw-r--r-- 1 dmsys dmtool 7156 May 17 14:24 cave/com.raytheon.uf.viz.monitor.ffmp/src/com/raytheon/uf/viz/monitor/ffmp/ui/dialogs/FFMPTableCellData.java
-rw-r--r-- 1 dmsys dmtool 71285 May 17 14:24 cave/com.raytheon.uf.viz.monitor.ffmp/src/com/raytheon/uf/viz/monitor/ffmp/ui/dialogs/FfmpBasinTableDlg.java
-rw-r--r-- 1 dmsys dmtool 9851 May 17 14:24 cave/com.raytheon.uf.viz.monitor.ffmp/src/com/raytheon/uf/viz/monitor/ffmp/ui/rsc/FFMPColorUtils.java
-rw-r--r-- 1 dmsys dmtool 40157 May 17 14:24 cave/com.raytheon.uf.viz.monitor.ffmp/src/com/raytheon/uf/viz/monitor/ffmp/ui/rsc/FFMPDataGenerator.java
-rw-r--r-- 1 dmsys dmtool 18611 May 17 14:24 cave/com.raytheon.uf.viz.monitor.ffmp/src/com/raytheon/uf/viz/monitor/ffmp/ui/rsc/FFMPDataLoader.java
-rw-r--r-- 1 dmsys dmtool 147202 May 17 14:24 cave/com.raytheon.uf.viz.monitor.ffmp/src/com/raytheon/uf/viz/monitor/ffmp/ui/rsc/FFMPResource.java
-rw-r--r-- 1 dmsys dmtool 14664 May 17 14:24 edexOsgi/com.raytheon.uf.common.dataplugin.ffmp/src/com/raytheon/uf/common/dataplugin/ffmp/FFMPDataContainer.java
-rw-r--r-- 1 dmsys dmtool 26923 May 17 14:24 edexOsgi/com.raytheon.uf.common.dataplugin.ffmp/src/com/raytheon/uf/common/dataplugin/ffmp/FFMPRecord.java
-rw-r--r-- 1 dmsys dmtool 61981 May 17 14:24 edexOsgi/com.raytheon.uf.common.monitor/src/com/raytheon/uf/common/monitor/scan/ScanUtils.java
-rw-r--r-- 1 dmsys dmtool 17730 May 17 14:24 edexOsgi/com.raytheon.uf.edex.plugin.ffmp/src/com/raytheon/uf/edex/plugin/ffmp/FFMPURIFilter.java
-rw-r--r-- 1 dmsys dmtool 65982 May 17 14:24 edexOsgi/com.raytheon.uf.edex.plugin.ffmp/src/com/raytheon/uf/edex/plugin/ffmp/common/FFMPProcessor.java
-rw-r--r-- 1 dmsys dmtool 36163 May 17 14:24 edexOsgi/com.raytheon.uf.edex.plugin.ffmp/src/com/raytheon/uf/edex/plugin/ffmp/common/FFTI.java

5
after_purge.out Normal file
View file

@ -0,0 +1,5 @@
-rw-r--r-- 1 dmsys dmtool 24661 May 24 17:52 edexOsgi/com.raytheon.edex.plugin.grib/src/com/raytheon/edex/plugin/grib/spatial/GribSpatialCache.java
-rw-r--r-- 1 dmsys dmtool 2197 May 24 17:52 edexOsgi/com.raytheon.edex.plugin.text/src/com/raytheon/edex/plugin/text/dao/TextDao.java
-rw-r--r-- 1 dmsys dmtool 9250 May 24 17:53 edexOsgi/com.raytheon.uf.edex.purgesrv/src/com/raytheon/uf/edex/purgesrv/PurgeDao.java
-rw-r--r-- 1 dmsys dmtool 9574 May 24 17:53 edexOsgi/com.raytheon.uf.edex.purgesrv/src/com/raytheon/uf/edex/purgesrv/PurgeJob.java
-rw-r--r-- 1 dmsys dmtool 15681 May 24 17:53 edexOsgi/com.raytheon.uf.edex.purgesrv/src/com/raytheon/uf/edex/purgesrv/PurgeManager.java

13
before.txt Normal file
View file

@ -0,0 +1,13 @@
-rw-r--r-- 1 dmsys dmtool 95993 May 10 11:41 cave/com.raytheon.uf.viz.monitor.ffmp/src/com/raytheon/uf/viz/monitor/ffmp/FFMPMonitor.java
-r--r--r-- 1 dmsys dmtool 7016 Nov 10 2011 cave/com.raytheon.uf.viz.monitor.ffmp/src/com/raytheon/uf/viz/monitor/ffmp/ui/dialogs/FFMPTableCellData.java
-rw-r--r-- 1 dmsys dmtool 71722 May 10 11:41 cave/com.raytheon.uf.viz.monitor.ffmp/src/com/raytheon/uf/viz/monitor/ffmp/ui/dialogs/FfmpBasinTableDlg.java
-r--r--r-- 1 dmsys dmtool 10752 Dec 7 15:05 cave/com.raytheon.uf.viz.monitor.ffmp/src/com/raytheon/uf/viz/monitor/ffmp/ui/rsc/FFMPColorUtils.java
-rw-r--r-- 1 dmsys dmtool 40273 May 10 11:41 cave/com.raytheon.uf.viz.monitor.ffmp/src/com/raytheon/uf/viz/monitor/ffmp/ui/rsc/FFMPDataGenerator.java
-r--r--r-- 1 dmsys dmtool 19531 Jan 31 07:54 cave/com.raytheon.uf.viz.monitor.ffmp/src/com/raytheon/uf/viz/monitor/ffmp/ui/rsc/FFMPDataLoader.java
-rw-r--r-- 1 dmsys dmtool 147364 May 10 11:41 cave/com.raytheon.uf.viz.monitor.ffmp/src/com/raytheon/uf/viz/monitor/ffmp/ui/rsc/FFMPResource.java
-rw-r--r-- 1 dmsys dmtool 15108 May 10 11:41 edexOsgi/com.raytheon.uf.common.dataplugin.ffmp/src/com/raytheon/uf/common/dataplugin/ffmp/FFMPDataContainer.java
-r--r--r-- 1 dmsys dmtool 27099 Apr 16 08:06 edexOsgi/com.raytheon.uf.common.dataplugin.ffmp/src/com/raytheon/uf/common/dataplugin/ffmp/FFMPRecord.java
-r--r--r-- 1 dmsys dmtool 61329 Feb 24 14:37 edexOsgi/com.raytheon.uf.common.monitor/src/com/raytheon/uf/common/monitor/scan/ScanUtils.java
-r--r--r-- 1 dmsys dmtool 21327 Apr 18 12:03 edexOsgi/com.raytheon.uf.edex.plugin.ffmp/src/com/raytheon/uf/edex/plugin/ffmp/FFMPURIFilter.java
-rw-r--r-- 1 dmsys dmtool 65837 May 7 10:47 edexOsgi/com.raytheon.uf.edex.plugin.ffmp/src/com/raytheon/uf/edex/plugin/ffmp/common/FFMPProcessor.java
-rw-r--r-- 1 dmsys dmtool 36591 May 10 11:41 edexOsgi/com.raytheon.uf.edex.plugin.ffmp/src/com/raytheon/uf/edex/plugin/ffmp/common/FFTI.java

5
before_purge.out Normal file
View file

@ -0,0 +1,5 @@
-rw-r--r-- 1 dmsys dmtool 23911 May 10 11:41 edexOsgi/com.raytheon.edex.plugin.grib/src/com/raytheon/edex/plugin/grib/spatial/GribSpatialCache.java
-r--r--r-- 1 dmsys dmtool 2000 Jun 15 2011 edexOsgi/com.raytheon.edex.plugin.text/src/com/raytheon/edex/plugin/text/dao/TextDao.java
-rw-r--r-- 1 dmsys dmtool 9022 May 10 11:41 edexOsgi/com.raytheon.uf.edex.purgesrv/src/com/raytheon/uf/edex/purgesrv/PurgeDao.java
-rw-r--r-- 1 dmsys dmtool 9090 May 10 11:41 edexOsgi/com.raytheon.uf.edex.purgesrv/src/com/raytheon/uf/edex/purgesrv/PurgeJob.java
-rw-r--r-- 1 dmsys dmtool 15020 May 10 11:41 edexOsgi/com.raytheon.uf.edex.purgesrv/src/com/raytheon/uf/edex/purgesrv/PurgeManager.java

View file

@ -30,7 +30,7 @@
<default-memory-setting>
<default-max-memory>
<jvm-arg>Xmx</jvm-arg>
<value>1024M</value>
<value>1280M</value>
</default-max-memory>
<default-max-perm>

View file

@ -270,10 +270,6 @@
<param name="feature"
value="com.raytheon.uf.viz.thinclient.feature" />
</antcall>
<antcall target="p2.build.repo">
<param name="feature"
value="com.raytheon.uf.viz.npp.feature" />
</antcall>
<antcall target="cleanup.features" />
</target>
@ -409,4 +405,4 @@
<taskdef resource="net/sf/antcontrib/antlib.xml"
classpath="${basedir}/lib/ant/ant-contrib-1.0b3.jar" />
</project>
</project>

View file

@ -400,9 +400,9 @@ class _NetCDFFile:
# v = self._fh.variables['refTime']
# itime = int(v[records[0]])
import PointDataRetrieve
import ForecastPointDataRetrieve
self.Model = 'ETA'
pdc = PointDataRetrieve.retrieve('modelsounding', ident, PARAMETERS, refTime=refTime, constraint={'reportType':self.Model})
pdc = ForecastPointDataRetrieve.retrieve('modelsounding', ident, PARAMETERS, refTime=refTime, constraint={'reportType':self.Model})
self.NumData = min(self.MaxData, len(pdc.keys()))
keys = pdc.keys()
keys.sort()

View file

@ -698,7 +698,7 @@ class Server(object):
"""
Process the newly arrived profiler data
"""
import PointDataRetrieve, NoDataException
import RefTimePointDataRetrieve, NoDataException
PARAMETERS = ["profilerId", "validTime", "numProfLvls", "height",
"uComponent", "vComponent", "uvQualityCode"]
site = AvnParser.getTafSiteCfg(ident)
@ -706,9 +706,9 @@ class Server(object):
if len(profilerList) > 0:
for profilerName in profilerList:
try :
pdc = PointDataRetrieve.retrieve('profiler', None, PARAMETERS,
pdc = RefTimePointDataRetrieve.retrieve('profiler', None, PARAMETERS,
keyId='validTime', constraint={'profilerId':profilerName},
forecast=False, maxSize=1)
maxSize=1)
except NoDataException.NoDataException:
_Logger.info("Error reading profiler " + profilerName)
profilerList.remove(profilerName)

View file

@ -19,8 +19,9 @@
##
import Avn, MetarDecoder
import PointDataRetrieve
import NoDataException
import HoursRefTimePointDataRetrieve
#
# Retrieves metar data through pointdata interfaces
@ -31,6 +32,7 @@ import NoDataException
# Date Ticket# Engineer Description
# ------------ ---------- ----------- --------------------------
# 08/26/09 njensen Initial Creation.
# 26APR2012 14688 rferrel Use HoursRefTimePointDataRetrieve.
#
#
#
@ -41,7 +43,7 @@ def retrieve(siteID, size=1):
if type(siteID) is str:
siteID = [siteID]
try :
pdc = PointDataRetrieve.retrieve('obs', siteID[0], PARAMETERS, keyId='timeObs', maxSize=size, forecast=False)
pdc = HoursRefTimePointDataRetrieve.retrieve('obs', siteID[0], PARAMETERS, keyId='timeObs', maxSize=size)
except NoDataException.NoDataException:
raise NoDataException.NoDataException('No METAR data available for site %s' % siteID[0])
decoder = MetarDecoder.Decoder()

View file

@ -181,7 +181,7 @@ _TokList = [
('autocor', r'AUTO|COR|RTD'),
('wind', r'(VRB|\d{3})\d{2,3}(G\d{2,3})?(KT|MPS)'),
('wind_vrb', r'\d{3}V\d{3}'),
('vsby', r'(M\d/\d|%s|\d{1,4})SM|0|50|\d{3,4}[NEWS]{0,2}' % _Fract),
('vsby', r'(M\d/\d|%s|\d{1,3})SM|\d{1,4}[NEWS]{0,2}' % _Fract),
('rvr', r'R\w+/[MP]?\d{3,4}(V?P?\d{4})?(FT)?'),
('funnel', r'[+]?FC'),
('pcp', r'%s|TS(\s+%s)?' % (_pcptok, _pcptok)),

View file

@ -624,9 +624,9 @@ class _NetCDFFile:
# if recno is None:
# return None
import PointDataRetrieve
import ForecastPointDataRetrieve
# print 'makeData: ident (%s), selfModel(%s) refTime(%s):' % (ident, self.Model, refTime)
pdc = PointDataRetrieve.retrieve('bufrmos' + self.Model, ident, PARAMETERS, refTime=refTime)
pdc = ForecastPointDataRetrieve.retrieve('bufrmos' + self.Model, ident, PARAMETERS, refTime=refTime)
self.NumData = min(self.NumData, len(pdc.keys()))
self.issuetime = pdc.refTime.getTime() / 1000
fcstHrList = pdc.keys()
@ -655,8 +655,8 @@ class _NetCDFFile:
return result
def makeReport(self, ident):
import PointDataRetrieve
pdc = PointDataRetrieve.retrieve('bufrmos' + self.Model, ident, PARAMETERS)
import ForecastPointDataRetrieve
pdc = ForecastPointDataRetrieve.retrieve('bufrmos' + self.Model, ident, PARAMETERS)
self.NumData = min(self.NumData, len(pdc.keys()))
self.issuetime = pdc.refTime.getTime() / 1000
fcstHrList = pdc.keys()
@ -1064,8 +1064,8 @@ class _GfsLampNetCDFFile(_NetCDFFile):
return g
def makeReport(self, ident):
import PointDataRetrieve
pdc = PointDataRetrieve.retrieve('bufrmos' + self.Model, ident, PARAMETERS)
import ForecastPointDataRetrieve
pdc = ForecastPointDataRetrieve.retrieve('bufrmos' + self.Model, ident, PARAMETERS)
self.NumData = min(self.NumData, len(pdc.keys()))
self.issuetime = pdc.refTime.getTime() / 1000
fcstHrList = pdc.keys()

View file

@ -20,7 +20,7 @@
import logging
import Avn
import PointDataRetrieve, NoDataException
import ForecastPointDataRetrieve, NoDataException
#
# Retrieves mos lightning data through pointdata interfaces
@ -31,6 +31,7 @@ import PointDataRetrieve, NoDataException
# Date Ticket# Engineer Description
# ------------ ---------- ----------- --------------------------
# 09/15/09 njensen Initial Creation.
# 26APR2012 14688 rferrel Use ForecastPointDataRetrieve.
#
#
#
@ -43,7 +44,7 @@ _Logger = logging.getLogger(Avn.CATEGORY)
def retrieve(siteID):
try:
pdc = PointDataRetrieve.retrieve('bufrmosLAMP', siteID, PARAMETERS)
pdc = ForecastPointDataRetrieve.retrieve('bufrmosLAMP', siteID, PARAMETERS)
except NoDataException.NoDataException:
return None
pots = []

View file

@ -1,231 +0,0 @@
<!-- ====================
This is a colormap file that is read via JaXB to marshel the ColorMap class.
======================-->
<colorMap>
<color r="0.0" g="0.0" b="0.0" a="0.0"/>
<!-- 0.0 -->
<color r="0.0" g="1.0" b="1.0" a="1.0"/>
<color r="0.0" g="1.0" b="1.0" a="1.0"/>
<!-- 0.1 -->
<color r="0.0" g="0.7490196" b="1.0" a="1.0"/>
<color r="0.0" g="0.7490196" b="1.0" a="1.0"/>
<color r="0.0" g="0.7490196" b="1.0" a="1.0"/>
<!-- 0.2 -->
<color r="0.0" g="0.0" b="1.0" a="1.0"/>
<color r="0.0" g="0.0" b="1.0" a="1.0"/>
<color r="0.0" g="0.0" b="1.0" a="1.0"/>
<color r="0.0" g="0.0" b="1.0" a="1.0"/>
<color r="0.0" g="0.0" b="1.0" a="1.0"/>
<color r="0.0" g="0.0" b="1.0" a="1.0"/>
<!-- 0.4 -->
<color r="0.0" g="1.0" b="0.0" a="1.0"/>
<color r="0.0" g="1.0" b="0.0" a="1.0"/>
<color r="0.0" g="1.0" b="0.0" a="1.0"/>
<color r="0.0" g="1.0" b="0.0" a="1.0"/>
<color r="0.0" g="1.0" b="0.0" a="1.0"/>
<color r="0.0" g="1.0" b="0.0" a="1.0"/>
<!-- 0.6 -->
<color r="0.0" g="0.79215687" b="0.0" a="1.0"/>
<color r="0.0" g="0.79215687" b="0.0" a="1.0"/>
<color r="0.0" g="0.79215687" b="0.0" a="1.0"/>
<color r="0.0" g="0.79215687" b="0.0" a="1.0"/>
<color r="0.0" g="0.79215687" b="0.0" a="1.0"/>
<color r="0.0" g="0.79215687" b="0.0" a="1.0"/>
<!-- 0.8 -->
<color r="0.0" g="0.39215687" b="0.0" a="1.0"/>
<color r="0.0" g="0.39215687" b="0.0" a="1.0"/>
<color r="0.0" g="0.39215687" b="0.0" a="1.0"/>
<color r="0.0" g="0.39215687" b="0.0" a="1.0"/>
<color r="0.0" g="0.39215687" b="0.0" a="1.0"/>
<color r="0.0" g="0.39215687" b="0.0" a="1.0"/>
<!-- 1.0 -->
<color r="1.0" g="1.0" b="0.0" a="1.0"/>
<color r="1.0" g="1.0" b="0.0" a="1.0"/>
<color r="1.0" g="1.0" b="0.0" a="1.0"/>
<color r="1.0" g="1.0" b="0.0" a="1.0"/>
<color r="1.0" g="1.0" b="0.0" a="1.0"/>
<color r="1.0" g="1.0" b="0.0" a="1.0"/>
<color r="1.0" g="1.0" b="0.0" a="1.0"/>
<color r="1.0" g="1.0" b="0.0" a="1.0"/>
<!-- 1.25 -->
<color r="1.0" g="0.64705884" b="0.0" a="1.0"/>
<color r="1.0" g="0.64705884" b="0.0" a="1.0"/>
<color r="1.0" g="0.64705884" b="0.0" a="1.0"/>
<color r="1.0" g="0.64705884" b="0.0" a="1.0"/>
<color r="1.0" g="0.64705884" b="0.0" a="1.0"/>
<color r="1.0" g="0.64705884" b="0.0" a="1.0"/>
<color r="1.0" g="0.64705884" b="0.0" a="1.0"/>
<color r="1.0" g="0.64705884" b="0.0" a="1.0"/>
<!-- 1.5 -->
<color r="1.0" g="0.54901963" b="0.0" a="1.0"/>
<color r="1.0" g="0.54901963" b="0.0" a="1.0"/>
<color r="1.0" g="0.54901963" b="0.0" a="1.0"/>
<color r="1.0" g="0.54901963" b="0.0" a="1.0"/>
<color r="1.0" g="0.54901963" b="0.0" a="1.0"/>
<color r="1.0" g="0.54901963" b="0.0" a="1.0"/>
<color r="1.0" g="0.54901963" b="0.0" a="1.0"/>
<!-- 1.75 -->
<color r="1.0" g="0.0" b="0.0" a="1.0"/>
<color r="1.0" g="0.0" b="0.0" a="1.0"/>
<color r="1.0" g="0.0" b="0.0" a="1.0"/>
<color r="1.0" g="0.0" b="0.0" a="1.0"/>
<color r="1.0" g="0.0" b="0.0" a="1.0"/>
<color r="1.0" g="0.0" b="0.0" a="1.0"/>
<color r="1.0" g="0.0" b="0.0" a="1.0"/>
<color r="1.0" g="0.0" b="0.0" a="1.0"/>
<!-- 2.0 -->
<color r="0.69803923" g="0.13333334" b="0.13333334" a="1.0"/>
<color r="0.69803923" g="0.13333334" b="0.13333334" a="1.0"/>
<color r="0.69803923" g="0.13333334" b="0.13333334" a="1.0"/>
<color r="0.69803923" g="0.13333334" b="0.13333334" a="1.0"/>
<color r="0.69803923" g="0.13333334" b="0.13333334" a="1.0"/>
<color r="0.69803923" g="0.13333334" b="0.13333334" a="1.0"/>
<color r="0.69803923" g="0.13333334" b="0.13333334" a="1.0"/>
<color r="0.69803923" g="0.13333334" b="0.13333334" a="1.0"/>
<color r="0.69803923" g="0.13333334" b="0.13333334" a="1.0"/>
<color r="0.69803923" g="0.13333334" b="0.13333334" a="1.0"/>
<color r="0.69803923" g="0.13333334" b="0.13333334" a="1.0"/>
<color r="0.69803923" g="0.13333334" b="0.13333334" a="1.0"/>
<color r="0.69803923" g="0.13333334" b="0.13333334" a="1.0"/>
<color r="0.69803923" g="0.13333334" b="0.13333334" a="1.0"/>
<color r="0.69803923" g="0.13333334" b="0.13333334" a="1.0"/>
<color r="0.69803923" g="0.13333334" b="0.13333334" a="1.0"/>
<color r="0.69803923" g="0.13333334" b="0.13333334" a="1.0"/>
<color r="0.69803923" g="0.13333334" b="0.13333334" a="1.0"/>
<color r="0.69803923" g="0.13333334" b="0.13333334" a="1.0"/>
<color r="0.69803923" g="0.13333334" b="0.13333334" a="1.0"/>
<color r="0.69803923" g="0.13333334" b="0.13333334" a="1.0"/>
<color r="0.69803923" g="0.13333334" b="0.13333334" a="1.0"/>
<color r="0.69803923" g="0.13333334" b="0.13333334" a="1.0"/>
<color r="0.69803923" g="0.13333334" b="0.13333334" a="1.0"/>
<color r="0.69803923" g="0.13333334" b="0.13333334" a="1.0"/>
<color r="0.69803923" g="0.13333334" b="0.13333334" a="1.0"/>
<color r="0.69803923" g="0.13333334" b="0.13333334" a="1.0"/>
<color r="0.69803923" g="0.13333334" b="0.13333334" a="1.0"/>
<color r="0.69803923" g="0.13333334" b="0.13333334" a="1.0"/>
<color r="0.69803923" g="0.13333334" b="0.13333334" a="1.0"/>
<!-- 3.0 -->
<color r="0.64705884" g="0.19607843" b="0.19607843" a="1.0"/>
<color r="0.64705884" g="0.19607843" b="0.19607843" a="1.0"/>
<color r="0.64705884" g="0.19607843" b="0.19607843" a="1.0"/>
<color r="0.64705884" g="0.19607843" b="0.19607843" a="1.0"/>
<color r="0.64705884" g="0.19607843" b="0.19607843" a="1.0"/>
<color r="0.64705884" g="0.19607843" b="0.19607843" a="1.0"/>
<color r="0.64705884" g="0.19607843" b="0.19607843" a="1.0"/>
<color r="0.64705884" g="0.19607843" b="0.19607843" a="1.0"/>
<color r="0.64705884" g="0.19607843" b="0.19607843" a="1.0"/>
<color r="0.64705884" g="0.19607843" b="0.19607843" a="1.0"/>
<color r="0.64705884" g="0.19607843" b="0.19607843" a="1.0"/>
<color r="0.64705884" g="0.19607843" b="0.19607843" a="1.0"/>
<color r="0.64705884" g="0.19607843" b="0.19607843" a="1.0"/>
<color r="0.64705884" g="0.19607843" b="0.19607843" a="1.0"/>
<color r="0.64705884" g="0.19607843" b="0.19607843" a="1.0"/>
<color r="0.64705884" g="0.19607843" b="0.19607843" a="1.0"/>
<color r="0.64705884" g="0.19607843" b="0.19607843" a="1.0"/>
<color r="0.64705884" g="0.19607843" b="0.19607843" a="1.0"/>
<color r="0.64705884" g="0.19607843" b="0.19607843" a="1.0"/>
<color r="0.64705884" g="0.19607843" b="0.19607843" a="1.0"/>
<color r="0.64705884" g="0.19607843" b="0.19607843" a="1.0"/>
<color r="0.64705884" g="0.19607843" b="0.19607843" a="1.0"/>
<color r="0.64705884" g="0.19607843" b="0.19607843" a="1.0"/>
<color r="0.64705884" g="0.19607843" b="0.19607843" a="1.0"/>
<color r="0.64705884" g="0.19607843" b="0.19607843" a="1.0"/>
<color r="0.64705884" g="0.19607843" b="0.19607843" a="1.0"/>
<color r="0.64705884" g="0.19607843" b="0.19607843" a="1.0"/>
<color r="0.64705884" g="0.19607843" b="0.19607843" a="1.0"/>
<color r="0.64705884" g="0.19607843" b="0.19607843" a="1.0"/>
<color r="0.64705884" g="0.19607843" b="0.19607843" a="1.0"/>
<!-- 4.0 -->
<color r="1.0" g="0.078431375" b="0.5764706" a="1.0"/>
<color r="1.0" g="0.078431375" b="0.5764706" a="1.0"/>
<color r="1.0" g="0.078431375" b="0.5764706" a="1.0"/>
<color r="1.0" g="0.078431375" b="0.5764706" a="1.0"/>
<color r="1.0" g="0.078431375" b="0.5764706" a="1.0"/>
<color r="1.0" g="0.078431375" b="0.5764706" a="1.0"/>
<color r="1.0" g="0.078431375" b="0.5764706" a="1.0"/>
<color r="1.0" g="0.078431375" b="0.5764706" a="1.0"/>
<color r="1.0" g="0.078431375" b="0.5764706" a="1.0"/>
<color r="1.0" g="0.078431375" b="0.5764706" a="1.0"/>
<color r="1.0" g="0.078431375" b="0.5764706" a="1.0"/>
<color r="1.0" g="0.078431375" b="0.5764706" a="1.0"/>
<color r="1.0" g="0.078431375" b="0.5764706" a="1.0"/>
<color r="1.0" g="0.078431375" b="0.5764706" a="1.0"/>
<color r="1.0" g="0.078431375" b="0.5764706" a="1.0"/>
<color r="1.0" g="0.078431375" b="0.5764706" a="1.0"/>
<color r="1.0" g="0.078431375" b="0.5764706" a="1.0"/>
<color r="1.0" g="0.078431375" b="0.5764706" a="1.0"/>
<color r="1.0" g="0.078431375" b="0.5764706" a="1.0"/>
<color r="1.0" g="0.078431375" b="0.5764706" a="1.0"/>
<color r="1.0" g="0.078431375" b="0.5764706" a="1.0"/>
<color r="1.0" g="0.078431375" b="0.5764706" a="1.0"/>
<color r="1.0" g="0.078431375" b="0.5764706" a="1.0"/>
<color r="1.0" g="0.078431375" b="0.5764706" a="1.0"/>
<color r="1.0" g="0.078431375" b="0.5764706" a="1.0"/>
<color r="1.0" g="0.078431375" b="0.5764706" a="1.0"/>
<color r="1.0" g="0.078431375" b="0.5764706" a="1.0"/>
<color r="1.0" g="0.078431375" b="0.5764706" a="1.0"/>
<color r="1.0" g="0.078431375" b="0.5764706" a="1.0"/>
<color r="1.0" g="0.078431375" b="0.5764706" a="1.0"/>
<!-- 5.0 -->
<color r="0.5176471" g="0.4392157" b="1.0" a="1.0"/>
<color r="0.5176471" g="0.4392157" b="1.0" a="1.0"/>
<color r="0.5176471" g="0.4392157" b="1.0" a="1.0"/>
<color r="0.5176471" g="0.4392157" b="1.0" a="1.0"/>
<color r="0.5176471" g="0.4392157" b="1.0" a="1.0"/>
<color r="0.5176471" g="0.4392157" b="1.0" a="1.0"/>
<color r="0.5176471" g="0.4392157" b="1.0" a="1.0"/>
<color r="0.5176471" g="0.4392157" b="1.0" a="1.0"/>
<color r="0.5176471" g="0.4392157" b="1.0" a="1.0"/>
<color r="0.5176471" g="0.4392157" b="1.0" a="1.0"/>
<color r="0.5176471" g="0.4392157" b="1.0" a="1.0"/>
<color r="0.5176471" g="0.4392157" b="1.0" a="1.0"/>
<color r="0.5176471" g="0.4392157" b="1.0" a="1.0"/>
<color r="0.5176471" g="0.4392157" b="1.0" a="1.0"/>
<color r="0.5176471" g="0.4392157" b="1.0" a="1.0"/>
<color r="0.5176471" g="0.4392157" b="1.0" a="1.0"/>
<color r="0.5176471" g="0.4392157" b="1.0" a="1.0"/>
<color r="0.5176471" g="0.4392157" b="1.0" a="1.0"/>
<color r="0.5176471" g="0.4392157" b="1.0" a="1.0"/>
<color r="0.5176471" g="0.4392157" b="1.0" a="1.0"/>
<color r="0.5176471" g="0.4392157" b="1.0" a="1.0"/>
<color r="0.5176471" g="0.4392157" b="1.0" a="1.0"/>
<color r="0.5176471" g="0.4392157" b="1.0" a="1.0"/>
<color r="0.5176471" g="0.4392157" b="1.0" a="1.0"/>
<color r="0.5176471" g="0.4392157" b="1.0" a="1.0"/>
<color r="0.5176471" g="0.4392157" b="1.0" a="1.0"/>
<color r="0.5176471" g="0.4392157" b="1.0" a="1.0"/>
<color r="0.5176471" g="0.4392157" b="1.0" a="1.0"/>
<color r="0.5176471" g="0.4392157" b="1.0" a="1.0"/>
<color r="0.5176471" g="0.4392157" b="1.0" a="1.0"/>
<!-- 6.0 -->
<color r="1.0" g="1.0" b="1.0" a="1.0"/>
<color r="1.0" g="1.0" b="1.0" a="1.0"/>
<color r="1.0" g="1.0" b="1.0" a="1.0"/>
<color r="1.0" g="1.0" b="1.0" a="1.0"/>
<color r="1.0" g="1.0" b="1.0" a="1.0"/>
<color r="1.0" g="1.0" b="1.0" a="1.0"/>
<color r="1.0" g="1.0" b="1.0" a="1.0"/>
<color r="1.0" g="1.0" b="1.0" a="1.0"/>
<color r="1.0" g="1.0" b="1.0" a="1.0"/>
<color r="1.0" g="1.0" b="1.0" a="1.0"/>
<color r="1.0" g="1.0" b="1.0" a="1.0"/>
<color r="1.0" g="1.0" b="1.0" a="1.0"/>
<color r="1.0" g="1.0" b="1.0" a="1.0"/>
<color r="1.0" g="1.0" b="1.0" a="1.0"/>
<color r="1.0" g="1.0" b="1.0" a="1.0"/>
<color r="1.0" g="1.0" b="1.0" a="1.0"/>
<color r="1.0" g="1.0" b="1.0" a="1.0"/>
<color r="1.0" g="1.0" b="1.0" a="1.0"/>
<color r="1.0" g="1.0" b="1.0" a="1.0"/>
<color r="1.0" g="1.0" b="1.0" a="1.0"/>
<color r="1.0" g="1.0" b="1.0" a="1.0"/>
<color r="1.0" g="1.0" b="1.0" a="1.0"/>
<color r="1.0" g="1.0" b="1.0" a="1.0"/>
<color r="1.0" g="1.0" b="1.0" a="1.0"/>
<color r="1.0" g="1.0" b="1.0" a="1.0"/>
<color r="1.0" g="1.0" b="1.0" a="1.0"/>
<color r="1.0" g="1.0" b="1.0" a="1.0"/>
<color r="1.0" g="1.0" b="1.0" a="1.0"/>
<color r="1.0" g="1.0" b="1.0" a="1.0"/>
<color r="1.0" g="1.0" b="1.0" a="1.0"/>
</colorMap>

View file

@ -93,7 +93,7 @@
</TableColumn>
<TableColumn>
<ColumnName>GUID</ColumnName>
<ReverseFilter>false</ReverseFilter>
<ReverseFilter>true</ReverseFilter>
<Filter>6.0</Filter>
<Low>1.0</Low>
<Mid>3.0</Mid>

View file

@ -1599,6 +1599,9 @@ Png_legendFormat_LT_end = "%b %d %I:%M %p %Z" # ifpIMAGE only
Scripts = [
"Send Grids to NDFD..:" +
"sendGridsToNDFD.sh {site} &",
"Send Point and Click Grids to Consolidated Web Farm..:" +
"/awips2/GFESuite/bin/rsyncGridsToCWF_client.sh {site} &",
"Png Images...:" +
"ifpIMAGE " +\

View file

@ -525,7 +525,7 @@ IF YOU BECOME CAUGHT IN A RIP CURRENT...DO NOT PANIC. REMAIN CALM AND BEGIN TO S
def ctaSEW(self):
return [
"""A HAZARDOUS SEAS WARNING MEANS HAZARDOUS SEA CONDITIONS ARE IMMINENT OR OCCURING. RECREATIONAL BOATERS SHOULD REMAIN IN PORT...OR TAKE SHELTER UNTIL WAVES SUBSIDE. COMMERCIAL VESSELS SHOULD PREPARE FOR ROUGH SEAS<85>AND CONSIDER REMAINING IN PORT OR TAKING SHELTER IN PORT UNTIL HAZARDOUS SEAS SUBSIDE.""",
"""A HAZARDOUS SEAS WARNING MEANS HAZARDOUS SEA CONDITIONS ARE IMMINENT OR OCCURING. RECREATIONAL BOATERS SHOULD REMAIN IN PORT...OR TAKE SHELTER UNTIL WAVES SUBSIDE. COMMERCIAL VESSELS SHOULD PREPARE FOR ROUGH SEAS AND CONSIDER REMAINING IN PORT OR TAKING SHELTER IN PORT UNTIL HAZARDOUS SEAS SUBSIDE.""",
]
def ctaSIY(self):

View file

@ -793,7 +793,7 @@ class HazardUtils(SmartScript.SmartScript):
def _lockHazards(self):
"Flag the hazards parm as being edited. Return the hazards parm and its grid."
hazParm = self.getParm(MODEL, ELEMENT, LEVEL)
startAbsTime = current()
startAbsTime = AbsTime(int(current().unixTime() /3600)*3600)
endAbsTime = startAbsTime + LOCK_HOURS() * HOUR_SECONDS()
timeRange = TimeRange(startAbsTime, endAbsTime)

View file

@ -169,7 +169,8 @@ def buildWidgetList(pythonWidgetList):
for widget in pythonWidgetList:
res = None
res = 1.0 # Default resolution
prec = 3 # Default precision
valueList = []
# unpack the tuple
@ -179,22 +180,17 @@ def buildWidgetList(pythonWidgetList):
name,defaultValue,entType,valueList = widget
if len(widget) == 5:
name,defaultValue,entType,valueList,res = widget
if len(widget) == 6:
name,defaultValue,entType,valueList,res,prec = widget
# Handle possibility of (label, variable) tuple
if type(name) is types.TupleType:
desc = name[0]
else:
desc = name
w = FieldDefinition()
w.setName(JUtil.pyValToJavaObj(name))
w.setDescription(desc)
w.setType(FieldType.convertPythonType(entType))
w.setDefaultValue(JUtil.pyValToJavaObj(defaultValue))
w.setValueList(JUtil.pyValToJavaObj(valueList))
if res is not None:
w.setResolution(float(res))
w = FieldDefinition(JUtil.pyValToJavaObj(name),desc,FieldType.convertPythonType(entType),
JUtil.pyValToJavaObj(defaultValue),JUtil.pyValToJavaObj(valueList),
float(res),int(prec))
widgetList.add(w)

View file

@ -24,6 +24,7 @@ import loadConfig
from operator import attrgetter
from com.raytheon.uf.common.time import DataTime
from com.raytheon.uf.viz.core import RGBColors
from com.raytheon.viz.gfe.core.parm import ParmDisplayAttributes_VisMode as VisMode
class PngWriter:
def __init__(self, conf="testIFPImage", userName="", baseTime=None,
@ -47,6 +48,7 @@ class PngWriter:
self.pgons = None
self.imgParm = None
self.ipn = self.getConfig('Png_image', '')
print "ipn:",self.ipn
# user named time range specified?
if usrTimeRange is not None:
@ -79,51 +81,11 @@ class PngWriter:
rval.append(tparm)
return rval
def getVisuals(self, parms, time):
rval = []
for p in parms:
image = AFPS.ParmDspAttr.GRAPHIC
if p.parmID().compositeNameUI() == self.ipn:
image = AFPS.ParmDspAttr.IMAGE
self.imgParm = p
for vt in p.dspAttr().visualizationType(
AFPS.ParmDspAttr.SPATIAL, image):
v = self._createVisual(vt, p, time)
if v is not None:
rval.append(v)
return rval
def adjustAspect(self, width, height, wd):
# Calculate the correct aspect ratio and adjust the width
# and height to fit.
if width is None and height is None:
width = 400 # The default
if width is not None:
height = int((float(width) * wd.extent().y) / wd.extent().x)
else:
width = int((float(height) * wd.extent().x) / wd.extent().y)
return width, height
def getBG(self):
bgColor = self.getConfig('bgColor', "black")
trans = self.getConfig('Png_transBG', 0, int)
return bgColor, trans
def paintBorder(self, i1):
width, height = i1.size()
# paint a border
self.white = i1.colorAllocate((255, 255, 255))
black = i1.colorAllocate((70, 70, 70))
white = i1.colorAllocate((220, 220, 220))
i1.rectangle((0, 0), (width - 1, height - 1), black)
i1.rectangle((1, 1), (width - 2, height - 2), black)
i1.line((0, 0), (width - 1, 0), white)
i1.line((1, 1), (width - 2, 1), white)
i1.line((0, 0), (0, height - 1), white)
i1.line((1, 1), (1, height - 2), white)
def getFileName(self, dir, setime):
# calculate output filename, baseTime is AbsTime
baseTimeFormat = self.getConfig('Png_baseTimeFormat', "%Y%m%d_%H%M")
@ -137,97 +99,10 @@ class PngWriter:
fname = dir + "/" + prefix + timeString
return fname
def writePng(self, dir, setime, visualInfo, i1):
fname = self.getFileName(dir, setime) + '.png'
if len(visualInfo) > 0:
i1.writePng(fname)
cbv = None
def getFileType(self):
ext = self.getConfig('Png_fileType', 'png')
return ext
def paintVisuals(self, visuals, setime, dir, wd, maskBasedOnHistory,
width=None, height=None):
fexten, ftype = self.getFileType()
omitColorBar = self.getConfig('Png_omitColorBar', 0, int)
width, height = self.adjustAspect(width, height, wd)
if self.imgParm is not None and not omitColorBar:
height += 25 # for colorbar
sd = AFPS.CD2Dint(AFPS.CC2Dint(0, 0), AFPS.CC2Dint(width, height))
cbv = Graphics.SEColorBarVisual(self.dbss.msgHandler(),
AFPS.GridID_default(), 0)
mapping = Graphics.Mapping_spatial(sd, wd)
bgColor, trans = self.getBG()
fname = self.getFileName(dir, setime) + '.' + fexten
LogStream.logEvent("painting:", setime, "fname:", fname)
canvas = Graphics.FileCanvas_newCanvas(mapping, fname,
ftype, bgColor, trans)
canvas.reg(visuals)
refMgr = self.dbss.dataManager().referenceSetMgr()
visualInfo = []
for v in visuals:
if hasattr(v, "parm"):
parm = v.parm()
gridid = AFPS.GridID(parm, setime)
griddata = gridid.grid()
if griddata is None:
continue
# set up attributes for painting
AFPS.SETimeChangedMsg_send_mh(self.dbss.msgHandler(), setime)
AFPS.GridVisibilityChangedMsg_send_mh(self.dbss.msgHandler(),
gridid, 1, 0)
if self.imgParm is not None and self.imgParm == v.parm():
AFPS.DisplayTypeChangedMsg_send_mh(self.dbss.msgHandler(),
AFPS.ParmDspAttr.SPATIAL, gridid,
AFPS.ParmDspAttr.IMAGE)
if v.visualType().type() == AFPS.VisualType.IMAGE:
info = (parm.parmID(), griddata.gridTime().startTime(),
griddata.gridTime().endTime(),
'NoColor', 1)
else:
info = (parm.parmID(), griddata.gridTime().startTime(),
griddata.gridTime().endTime(),
parm.dspAttr().baseColor(), 0)
# fit to data special cases
if v.visualType().type() == AFPS.VisualType.IMAGE:
alg = self.getConfig(parm.parmID().compositeNameUI()
+ '_fitToDataColorTable', None)
if alg is not None:
if alg == 'Single Grid over Area':
ct = parm.dspAttr().colorTable()
refarea = refMgr.activeRefSet()
ct.fitToData_gridarea(gridid, refarea)
elif alg == 'Single Grid':
ct = parm.dspAttr().colorTable()
ct.fitToData_grid(gridid)
visualInfo.append(info)
# special masking based on Grid Data History
if maskBasedOnHistory:
parm = v.parm()
bits = refMgr.siteGridpoints(griddata.historySites(), 1)
parm.dspAttr().setDisplayMask_grid2dbit(bits)
canvas.paint(mapping.domain(), 1)
canvas.unreg(visuals)
self.writeInfo(dir, setime, visualInfo)
if ftype != Graphics.FileCanvas.PNG:
canvas.close()
else:
i1 = canvas.getImage()
if not omitColorBar:
self.paintColorBar(width, bgColor, trans, setime, cbv, i1)
newI = self.paintLogo(i1)
self.paintBorder(newI)
self.writePng(dir, setime, visualInfo, newI)
def writeInfo(self, dir, setime, visualInfo):
if len(visualInfo) > 0:
fname = self.getFileName(dir, setime) + ".info"
@ -339,8 +214,8 @@ class PngWriter:
#mmgr = self.dm.mapMgr()
mv = []
mids = []
height = self.getConfig('Png_height', 400.0, float)
width = self.getConfig('Png_width', 400.0, float)
height = self.getConfig('Png_height', None, int)
width = self.getConfig('Png_width', None, int)
localFlag = self.getConfig('Png_localTime', 0, int)
snapshotTime = self.getConfig('Png_snapshotTime', 0, int)
useLegend = self.getConfig('Png_legend', 1, int)
@ -419,13 +294,18 @@ class PngWriter:
overrideColors = {}
for p in prms:
pname = p.getParmID().compositeNameUI()
if pname == self.ipn:
overrideColors[pname] = "White"
color = self.getConfig(pname + "_Legend_color", None)
if color:
overrideColors[pname] = color
lang = self.getConfig('Png_legendLanguage', '');
viz.setupLegend(localTime, snapshotTime, snapshotFmt, descName, durFmt, startFmt, endFmt, overrideColors, lang)
bgColor = self.getConfig('bgColor', None)
#TODO handle transparent background
bgColor, trans = self.getBG()
if not omitColorbar:
viz.enableColorbar()
@ -444,29 +324,31 @@ class PngWriter:
fitToDataAlg = None
for p in prms:
pname = p.getParmID().compositeNameUI()
if pname == self.ipn:
colormap = self.getConfig(pname + '_defaultColorTable', None)
colorMax = self.getConfig(pname + '_maxColorTableValue', None, float)
colorMin = self.getConfig(pname + '_minColorTableValue', None, float)
viz.addImageResource(pname, colormap=colormap, colorMin=colorMin, colorMax=colorMax, smooth=smooth)
fitToDataAlg = self.getConfig(pname + '_fitToDataColorTable', None)
if fitToDataAlg is not None:
from com.raytheon.viz.gfe.rsc.colorbar import FitToData
fit = FitToData(self.dm, p)
if fitToDataAlg == 'All Grids':
fit.fitToData()
fitToDataAlg = None
elif fitToDataAlg == 'All Grids over Area':
fit.fitToData(self.dm.getRefManager().getActiveRefSet())
fitToDataAlg = None
colormap = self.getConfig(pname + '_defaultColorTable', None)
colorMax = self.getConfig(pname + '_maxColorTableValue', None, float)
colorMin = self.getConfig(pname + '_minColorTableValue', None, float)
color = self.getConfig(pname + '_graphicColor', None)
lineWidth = self.getConfig(pname + '_lineWidth', None, int)
viz.addGfeResource(pname, colormap=colormap, colorMin=colorMin, colorMax=colorMax, \
smooth=smooth, color=color, lineWidth=lineWidth)
fitToDataAlg = self.getConfig(pname + '_fitToDataColorTable', None)
if fitToDataAlg is not None:
from com.raytheon.viz.gfe.rsc.colorbar import FitToData
fit = FitToData(self.dm, p)
if fitToDataAlg == 'All Grids':
fit.fitToData()
fitToDataAlg = None
elif fitToDataAlg == 'All Grids over Area':
fit.fitToData(self.dm.getRefManager().getActiveRefSet())
fitToDataAlg = None
if pname == self.ipn:
print "setting",pname,"to IMAGE"
p.getDisplayAttributes().setVisMode(VisMode.IMAGE)
else:
graphicParms.append(pname)
for gp in graphicParms:
color = self.getConfig(gp + '_graphicColor', None)
lineWidth = self.getConfig(gp + '_lineWidth', None, int)
viz.addGraphicResource(gp, color=color, lineWidth=lineWidth)
print "setting",pname,"to GRAPHIC"
p.getDisplayAttributes().setVisMode(VisMode.GRAPHIC)
self.initSamples()
# paint once to get map retrieval started
@ -475,26 +357,35 @@ class PngWriter:
for t in times:
paintTime = t
if paintTime and self.overlapsWithGrids(prms, paintTime):
if paintTime and self.overlapsWithGrids(prms, paintTime):
self.dm.getSpatialDisplayManager().setSpatialEditorTime(paintTime.javaDate())
if fitToDataAlg:
from com.raytheon.viz.gfe.edittool import GridID
gridid = GridID(self.dm.getSpatialDisplayManager().getActivatedParm(), )
if fitToDataAlg == 'Single Grid':
fit.fitToData(gridid)
elif fitToDataAlg == 'Single Grid over Area':
fit.fitToData(gridid, self.dm.getRefManager().getActiveRefSet())
viz.paint(paintTime, backgroundColor=bgColor)
fname = self.getFileName(dir, t) + '.' + fexten
viz.outputFiles(fname, showLogo, logoString)
visualInfo = []
for p in prms:
griddata = p.overlappingGrid(paintTime.javaDate())
if griddata is not None:
info = (p.getParmID().toString(), AbsTime.AbsTime(griddata.getGridTime().getStart()),
AbsTime.AbsTime(griddata.getGridTime().getEnd()),
RGBColors.getColorName(p.getDisplayAttributes().getBaseColor()), p.getDisplayAttributes().getVisMode().toString() == 'Image')
visualInfo.append(info)
if griddata is None:
continue
# fit to data special cases
if p.getDisplayAttributes().getVisMode().toString() == 'Image':
fitToDataAlg = self.getConfig(p.getParmID().compositeNameUI() + '_fitToDataColorTable', None)
if fitToDataAlg:
from com.raytheon.viz.gfe.rsc.colorbar import FitToData
fit = FitToData(self.dm, p)
from com.raytheon.viz.gfe.edittool import GridID
gridid = GridID(p, paintTime.javaDate())
if fitToDataAlg == 'Single Grid':
fit.fitToData(gridid)
elif fitToDataAlg == 'Single Grid over Area':
fit.fitToData(gridid, self.dm.getRefManager().getActiveRefSet())
info = (p.getParmID().toString(), AbsTime.AbsTime(griddata.getGridTime().getStart()),
AbsTime.AbsTime(griddata.getGridTime().getEnd()),
RGBColors.getColorName(p.getDisplayAttributes().getBaseColor()), p.getDisplayAttributes().getVisMode().toString() == 'Image')
visualInfo.append(info)
viz.paint(paintTime, backgroundColor=bgColor)
fname = self.getFileName(dir, t) + '.' + fexten
viz.outputFiles(fname, showLogo, logoString)
self.writeInfo(dir, paintTime, visualInfo)
else:
LogStream.logEvent("No grids to generate for ", `t`)
@ -521,43 +412,6 @@ class PngWriter:
else:
return 0
def toscreen(self, point, wd, sd):
x = int(((float(point.x) - wd.origin().x) * float(sd[0])) \
/ wd.extent().x)
y = int(((float(point.y) - wd.origin().y) * float(sd[1])) \
/ float(wd.extent().y))
return (x, sd[1] - y)
def toworld(self, point, wd, sd):
x = (float(point[0]) * wd.extent().x) / float(sd[0])
y = (float(point[1]) * wd.extent().y) / float(sd[1])
return (wd.origin().x + x, wd.origin().y + y)
def _createVisual(self, vt, parm, gridTime):
visType = vt.type()
if visType == AFPS.VisualType.IMAGE:
return Graphics.ImageVisual(parm, gridTime)
elif visType == AFPS.VisualType.CONTOUR:
return Graphics.ContourVisual(parm, gridTime)
elif visType == AFPS.VisualType.WIND_BARB:
size = self.getConfig('WindBarbDefaultSize', 60, int)
size = self.getConfig(parm.parmID().compositeNameUI()
+ '_windBarbDefaultSize', size, int)
return Graphics.WindBarbGridVisual(parm, gridTime, size)
elif visType == AFPS.VisualType.WIND_ARROW:
size = self.getConfig('WindArrowDefaultSize', 60, int)
size = self.getConfig(parm.parmID().compositeNameUI()
+ '_windArrowDefaultSize', size, int)
return Graphics.WindArrowGridVisual(parm, gridTime, size)
elif visType == AFPS.VisualType.BOUNDED_AREA:
return Graphics.BoundedAreaVisual(parm, gridTime)
else:
LogStream.logBug("PngWriter._createVisual() : ",
"Unknown visual type : ", vt)
return None
def usage():
msg = """
usage: ifpIMAGE [-c config] [-u username] [-h host] [-p port] -o directory

View file

@ -1,6 +1,8 @@
ENS_COMPONENT_WEIGHTS={NAM}
ENS_COMPONENT_WEIGHTS={gfs}
GLEVEL=500
GVCORD=pres
SKIP=
FILTER=y
SCALE=-1
GDPFUN=ens_savg(hght)
TYPE=c

View file

@ -1,6 +1,8 @@
ENS_COMPONENT_WEIGHTS={NAM}
ENS_COMPONENT_WEIGHTS={gfs}
GLEVEL=500
GVCORD=pres
SKIP=
FILTER=y
SCALE=0
GDPFUN=ens_savg(tmpc)
TYPE=c

View file

@ -1,4 +1,7 @@
colorMapName=osf_ref16
colorBar=@dfltRadarColorBar.xml
productCode=19
elevationNumber=1
elevationNumber=1
alpha=1.0
brightness=1.0
contrast=1.0

View file

@ -1,4 +1,7 @@
colorMapName=osf_ref16
colorBar=@dfltRadarColorBar.xml
productCode=20
elevationNumber=1
elevationNumber=1
alpha=1.0
brightness=1.0
contrast=1.0

View file

@ -1,4 +1,7 @@
colorMapName=osf_ref16
colorBar=@dfltRadarColorBar.xml
productCode=19
elevationNumber=2
elevationNumber=2
alpha=1.0
brightness=1.0
contrast=1.0

View file

@ -1,4 +1,7 @@
colorMapName=osf_ref16
colorBar=@dfltRadarColorBar.xml
productCode=20
elevationNumber=2
elevationNumber=2
alpha=1.0
brightness=1.0
contrast=1.0

View file

@ -1,4 +1,7 @@
colorMapName=osf_ref16
colorBar=@dfltRadarColorBar.xml
productCode=19
elevationNumber=3
elevationNumber=3
alpha=1.0
brightness=1.0
contrast=1.0

View file

@ -1,4 +1,7 @@
colorMapName=osf_ref16
colorBar=@dfltRadarColorBar.xml
productCode=20
elevationNumber=3
elevationNumber=3
alpha=1.0
brightness=1.0
contrast=1.0

View file

@ -1,4 +1,7 @@
colorMapName=osf_ref16
colorBar=@dfltRadarColorBar.xml
productCode=19
elevationNumber=4
elevationNumber=4
alpha=1.0
brightness=1.0
contrast=1.0

View file

@ -1,4 +1,7 @@
colorMapName=osf_ref16
colorBar=@dfltRadarColorBar.xml
productCode=20
elevationNumber=4
elevationNumber=4
alpha=1.0
brightness=1.0
contrast=1.0

View file

@ -1,4 +1,7 @@
colorMapName=osf_ref16
colorBar=@dfltRadarColorBar.xml
productCode=19
elevationNumber=5
elevationNumber=5
alpha=1.0
brightness=1.0
contrast=1.0

View file

@ -1,4 +1,7 @@
colorMapName=osf_ref16
colorBar=@dfltRadarColorBar.xml
productCode=19
elevationNumber=6
elevationNumber=6
alpha=1.0
brightness=1.0
contrast=1.0

View file

@ -3,4 +3,7 @@ colorBar=@dfltRadarColorBar.xml
! 4km CREF
productCode=38
! check elevation
elevationNumber=0
elevationNumber=0
alpha=1.0
brightness=1.0
contrast=1.0

View file

@ -3,4 +3,7 @@ colorBar=@dfltRadarColorBar.xml
! 1km COMP REFL
productCode=37
! check elevation
elevationNumber=0
elevationNumber=0
alpha=1.0
brightness=1.0
contrast=1.0

View file

@ -3,4 +3,7 @@ colorBar=@dfltRadarColorBar.xml
! 4km COMP REFL
productCode=36
! check elevation
elevationNumber=0
elevationNumber=0
alpha=1.0
brightness=1.0
contrast=1.0

View file

@ -3,4 +3,7 @@ colorMapName=lingray
colorBar=@dfltRadarColorBar.xml
! 1km Digital Hybrid Scan Refl.
productCode=32
elevationNumber=0
elevationNumber=0
alpha=1.0
brightness=1.0
contrast=1.0

View file

@ -3,3 +3,6 @@ colorBar=@dfltRadarColorBar.xml
! 1km Enhanced Echo Tops
productCode=135
elevationNumber=0
alpha=1.0
brightness=1.0
contrast=1.0

View file

@ -2,4 +2,7 @@ colorMapName=nids_pre
colorBar=@dfltRadarColorBar.xml
! 2km 1 hr precip
productCode=78
elevationNumber=0
elevationNumber=0
alpha=1.0
brightness=1.0
contrast=1.0

View file

@ -2,4 +2,7 @@ colorMapName=nids_pre
colorBar=@dfltRadarColorBar.xml
! 2km total precip
productCode=80
elevationNumber=0
elevationNumber=0
alpha=1.0
brightness=1.0
contrast=1.0

View file

@ -3,3 +3,6 @@ colorBar=@dfltRadarColorBar.xml
productCode=101
prodName=PRCPC
elevationNumber=0
alpha=1.0
brightness=1.0
contrast=1.0

View file

@ -2,4 +2,7 @@ colorMapName=osf_ref16
colorBar=@dfltRadarColorBar.xml
! Radar Coded Message
productCode=74
elevationNumber=0
elevationNumber=0
alpha=1.0
brightness=1.0
contrast=1.0

View file

@ -2,4 +2,7 @@ colorMapName=rad_srmvel
colorBar=@dfltRadarColorBar.xml
! 1km Storm Relative Motion
productCode=56
elevationNumber=1
elevationNumber=1
alpha=1.0
brightness=1.0
contrast=1.0

View file

@ -2,4 +2,7 @@ colorMapName=rad_srmvel
colorBar=@dfltRadarColorBar.xml
! 1km Storm Relative Motion
productCode=56
elevationNumber=2
elevationNumber=2
alpha=1.0
brightness=1.0
contrast=1.0

View file

@ -2,4 +2,7 @@ colorMapName=rad_srmvel
colorBar=@dfltRadarColorBar.xml
! 1km Storm Relative Motion
productCode=56
elevationNumber=3
elevationNumber=3
alpha=1.0
brightness=1.0
contrast=1.0

View file

@ -2,4 +2,7 @@ colorMapName=rad_srmvel
colorBar=@dfltRadarColorBar.xml
! 1km Storm Relative Motion
productCode=56
elevationNumber=4
elevationNumber=4
alpha=1.0
brightness=1.0
contrast=1.0

View file

@ -2,4 +2,7 @@ colorMapName=rad_srmvel
colorBar=@dfltRadarColorBar.xml
! 1km Storm Relative Motion
productCode=56
elevationNumber=5
elevationNumber=5
alpha=1.0
brightness=1.0
contrast=1.0

View file

@ -2,4 +2,7 @@ colorMapName=rad_srmvel
colorBar=@dfltRadarColorBar.xml
! 1km Storm Relative Motion
productCode=56
elevationNumber=6
elevationNumber=6
alpha=1.0
brightness=1.0
contrast=1.0

View file

@ -3,3 +3,6 @@ colorBar=@dfltRadarColorBar.xml
! Storm Track Information (Graphic)
productCode=58
elevationNumber=0
alpha=1.0
brightness=1.0
contrast=1.0

View file

@ -3,4 +3,7 @@ colorBar=@dfltRadarColorBar.xml
! 1km Echo Tops
productCode=41
! check elevation
elevationNumber=0
elevationNumber=0
alpha=1.0
brightness=1.0
contrast=1.0

View file

@ -2,4 +2,7 @@ colorMapName=nids_vel16
colorBar=@dfltRadarColorBar.xml
! .25km Radial Velocity
productCode=99
elevationNumber=1
elevationNumber=1
alpha=1.0
brightness=1.0
contrast=1.0

View file

@ -1,4 +1,7 @@
colorMapName=nids_vel16
colorBar=@dfltRadarColorBar.xml
productCode=27
elevationNumber=1
elevationNumber=1
alpha=1.0
brightness=1.0
contrast=1.0

View file

@ -2,4 +2,7 @@ colorMapName=nids_vel16
colorBar=@dfltRadarColorBar.xml
! .25km Radial Velocity
productCode=99
elevationNumber=2
elevationNumber=2
alpha=1.0
brightness=1.0
contrast=1.0

View file

@ -1,4 +1,7 @@
colorMapName=nids_vel16
colorBar=@dfltRadarColorBar.xml
productCode=27
elevationNumber=2
elevationNumber=2
alpha=1.0
brightness=1.0
contrast=1.0

View file

@ -2,4 +2,7 @@ colorMapName=nids_vel16
colorBar=@dfltRadarColorBar.xml
! .25km Radial Velocity
productCode=99
elevationNumber=3
elevationNumber=3
alpha=1.0
brightness=1.0
contrast=1.0

View file

@ -1,4 +1,7 @@
colorMapName=nids_vel16
colorBar=@dfltRadarColorBar.xml
productCode=27
elevationNumber=3
elevationNumber=3
alpha=1.0
brightness=1.0
contrast=1.0

View file

@ -2,4 +2,7 @@ colorMapName=nids_vel16
colorBar=@dfltRadarColorBar.xml
! .25km Radial Velocity
productCode=99
elevationNumber=4
elevationNumber=4
alpha=1.0
brightness=1.0
contrast=1.0

View file

@ -1,4 +1,7 @@
colorMapName=nids_vel16
colorBar=@dfltRadarColorBar.xml
productCode=27
elevationNumber=4
elevationNumber=4
alpha=1.0
brightness=1.0
contrast=1.0

View file

@ -1,4 +1,7 @@
colorMapName=nids_vel16
colorBar=@dfltRadarColorBar.xml
productCode=27
elevationNumber=5
elevationNumber=5
alpha=1.0
brightness=1.0
contrast=1.0

View file

@ -1,4 +1,7 @@
colorMapName=nids_vel16
colorBar=@dfltRadarColorBar.xml
productCode=27
elevationNumber=6
elevationNumber=6
alpha=1.0
brightness=1.0
contrast=1.0

View file

@ -3,3 +3,6 @@ colorBar=@dfltRadarColorBar.xml
! 1km DIGITAL VIL
productCode=134
elevationNumber=0
alpha=1.0
brightness=1.0
contrast=1.0

View file

@ -1,4 +1,7 @@
colorMapName=osf_ref16
colorBar=@dfltRadarColorBar.xml
productCode=57
elevationNumber=0
elevationNumber=0
alpha=1.0
brightness=1.0
contrast=1.0

View file

@ -11,6 +11,8 @@
!
GLEVEL=4700:10000!700 !700 !850 !30:0
GVCORD=SGMA !PRES !PRES !PRES!pdly
SKIP=
FILTER=y
SCALE=0 !3 !3 !0
GDPFUN=relh !sm9s(omeg)!sm9s(omeg) !tmpc !tmpc
TYPE=f !c

View file

@ -9,6 +9,8 @@
!
GLEVEL=4400:10000!700 !700 !850 !9950
GVCORD=SGMA !PRES !PRES!PRES!SGMA
SKIP=
FILTER=y
SCALE=0 !3 !3 !0
GDPFUN=relh !omeg !omeg!tmpc!tmpc
TYPE=f !c

View file

@ -1,5 +1,7 @@
GLEVEL=500:1000
GVCORD=pres
SKIP=
FILTER=y
SCALE=-1
GDPFUN=sm5s(ldf(hght))
TYPE=c

View file

@ -6,6 +6,8 @@
!
GLEVEL=500:1000!500:1000
GVCORD=pres!pres
SKIP=
FILTER=y
SCALE=-1 ! -1
GDPFUN=ldf(hght)!ldf(hght)
TYPE=c

View file

@ -7,6 +7,8 @@
!
GLEVEL=4400:10000
GVCORD=sgma
SKIP=
FILTER=y
SCALE=0
GDPFUN=relh ! relh
TYPE=c/f!c

View file

@ -6,6 +6,8 @@
!
GLEVEL=850:1000
GVCORD=pres
SKIP=
FILTER=y
SCALE=-1
GDPFUN=sm9s(sub(hght@850,hght@1000))
TYPE=c

View file

@ -6,6 +6,8 @@
!
GLEVEL=850:1000
GVCORD=pres
SKIP=
FILTER=y
SCALE=-1
GDPFUN=(sub(hght@850,hght@1000))
TYPE=c

View file

@ -6,6 +6,8 @@
!
GLEVEL=1000
GVCORD=pres
SKIP=
FILTER=y
SCALE=5 !5 !-1 !0
GDPFUN=(avor(wnd))//v!v !sm9s(hght) !kntv(wnd)
TYPE=c/f !c !c !b

View file

@ -7,6 +7,8 @@
!
GLEVEL=1000
GVCORD=pres
SKIP=
FILTER=y
SCALE=0 !0 !0 !-1
GDPFUN=tmpc !tmpc !tmpc !hght !kntv(wnd)
TYPE=c/f !c !c !c !b

View file

@ -6,6 +6,8 @@
!
GLEVEL=1000
GVCORD=pres
SKIP=
FILTER=y
SCALE=0!0
GDPFUN=mag(kntv(wnd))!kntv(wnd)
TYPE=c/f!s

View file

@ -6,6 +6,8 @@
!
GLEVEL=100
GVCORD=pres
SKIP=
FILTER=y
SCALE=0 !-1
GDPFUN=knts((mag(wnd))) !sm9s(hght)!kntv(wnd)
TYPE=c/f !c !b

View file

@ -6,7 +6,9 @@
!
GLEVEL=150
GVCORD=pres
SCALE=0 !-1
SKIP=
FILTER=y
SCALE=3 !-1
GDPFUN=knts((mag(wnd))) !sm9s(hght)!kntv(wnd)
TYPE=c/f !c !b
CINT=30;50;70;90;110;130;150 !12

View file

@ -6,6 +6,8 @@
!
GLEVEL=200
GVCORD=pres
SKIP=
FILTER=
SCALE=5 !5 !-1
GDPFUN=abs(avor(wnd)) !abs(avor(wnd)) !kntv(wnd)
TYPE=c/f !c !b

View file

@ -6,6 +6,8 @@
!
GLEVEL=200
GVCORD=PRES
SKIP=
FILTER=y
SCALE=0!0!5!5!-1
GDPFUN=mag(kntv(wnd))!mag(kntv(wnd))!div(wnd)!div(wnd)!sm5s(hght)
TYPE=c!c/f!c/f!c!c

View file

@ -9,6 +9,8 @@
!
GLEVEL=200
GVCORD=pres
SKIP=2/2
FILTER=1.0
SCALE=0 !-1
GDPFUN=knts((mag(wnd))) !sm5s(hght)!kntv(wnd)
TYPE=c/f !c !b

View file

@ -7,6 +7,8 @@
!
GLEVEL=200
GVCORD=pres
SKIP=
FILTER=y
SCALE=0 ! -1
GDPFUN=knts((mag(wnd))) !(hght) ! kntv(wnd)
TYPE=c/f !c !b

View file

@ -7,6 +7,8 @@
!
GLEVEL=0 !0 !200 !200 !200 !0
GVCORD=none !none !PRES!pres!pres!none
SKIP=
FILTER=y
SCALE=0 !0 !5 !5 !-1
GDPFUN=quo(pwtr;25.4)//pw!pw !avor(obs)!avor(obs)!sm5s(hght)!kntv(wnd@850%PRES)
TYPE=c !c/f !c !c ! c !b

View file

@ -7,6 +7,8 @@
!
GLEVEL=200
GVCORD=pres
SKIP=
FILTER=y
SCALE=0 ! -1
GDPFUN=knts((mag(wnd))) ! kntv(wnd)
TYPE=c/f ! b

View file

@ -8,6 +8,8 @@
!
GLEVEL=200
GVCORD=pres
SKIP=
FILTER=y
SCALE=0!0!5!5
GDPFUN=mag(kntv(wnd))!mag(kntv(wnd))!div(wnd)!div(wnd) ! obs
TYPE=c!c/f!c/f!c ! b

View file

@ -7,6 +7,8 @@
!
GLEVEL=200
GVCORD=PRES
SKIP=
FILTER=y
SCALE=0 !0
GDPFUN=knts(mag(wnd)) !kntv(wnd)
TYPE=c/f !b

View file

@ -7,6 +7,8 @@
!
GLEVEL=250
GVCORD=PRES
SKIP=
FILTER=y
SCALE=0!0!5/0!5/0!-1 ! 0
GDPFUN=mag(kntv(wnd))//jet!jet!div(wnd)//dvg!dvg!sm5s(hght) ! age(hght)
TYPE=c!c/f!c/f!c!c ! a

View file

@ -7,6 +7,8 @@
!
GLEVEL=250
GVCORD=PRES
SKIP=
FILTER=y
SCALE=0!0!5/0!5/0!-1 ! 0
GDPFUN=mag(kntv(wnd))!mag(kntv(wnd))!div(wnd)!div(wnd)!sm5s(hght) ! age(hght)
TYPE=c!c/f!c/f!c!c ! a

View file

@ -7,6 +7,8 @@
!
GLEVEL=250
GVCORD=pres
SKIP=
FILTER=y
SCALE=5 !5 !-1
GDPFUN=abs(avor(wnd)) !abs(avor(wnd)) !hght
TYPE=c/f !c !c

View file

@ -9,6 +9,8 @@
!
GLEVEL=250
GVCORD=pres
SKIP=2/2
FILTER=1.0
SCALE=0 !-1
GDPFUN=knts((mag(wnd))) !sm5s(hght) !kntv(wnd)
TYPE=c/f !c !b

View file

@ -6,6 +6,8 @@
!
GLEVEL=250
GVCORD=pres
SKIP=
FILTER=y
SCALE=0 ! -1
GDPFUN=knts((mag(wnd))) !(hght) ! kntv(wnd)
TYPE=c !c !b

View file

@ -6,6 +6,8 @@
!
GLEVEL=250
GVCORD=pres
SKIP=
FILTER=y
SCALE=0!0
GDPFUN=mag(kntv(wnd))!kntv(wnd)
TYPE=c/f!s

View file

@ -7,6 +7,8 @@
!
GLEVEL=0 !0 !250 !250 !250 !0
GVCORD=none !none !PRES!pres!pres!none
SKIP=
FILTER=y
SCALE=0 !0 !5 !5 !-1
GDPFUN=quo(pwtr;25.4)//pw!pw !avor(obs)!avor(obs)!sm5s(hght)!kntv(wnd@850%PRES)
TYPE=c !c/f !c !c ! c !b

View file

@ -6,6 +6,8 @@
! J. Carr/HPC 2/99 Changed obs to kntv(wnd)
GLEVEL=250
GVCORD=pres
SKIP=
FILTER=y
SCALE=5
GDPFUN=(vor(wnd)) !(vor(wnd)) !kntv(wnd)
TYPE=c/f ! c ! b

View file

@ -7,6 +7,8 @@
!
GLEVEL=296
GVCORD=thta
SKIP=
FILTER=y
SCALE=3!3!0
GDPFUN=ADV(pres,obs)!ADV(pres,obs)!sm5s(pres)!obs
TYPE=c/f !c !c !b

View file

@ -8,6 +8,8 @@
!
GLEVEL=9950!300:200!300:200!0:9950!0:9950!0:9950!0 !300:200
GVCORD=SGMA!pres !pres !none !none !none !none!pres
SKIP=
FILTER=y
SCALE=0!6!6!0
GDPFUN=thte(pres@0%none;tmpc;dwpc)//te!pvor(thta,wnd)//pv!pv!te!te!te!sm5s(pmsl)!kntv(wnd@300%pres)
TYPE=c/f!c/f!c ! c ! c ! c ! c ! b

View file

@ -7,6 +7,8 @@
!
GLEVEL=300:700
GVCORD=pres
SKIP=
FILTER=y
SCALE=-1/0
GDPFUN=sm5s(ldf(hght))//ldfh!smul(4e9,qvec(ldfh,sm5v(vlav(geo))))
TYPE=c ! a

View file

@ -7,6 +7,8 @@
!
GLEVEL=300!300!300:700
GVCORD=pres
SKIP=
FILTER=y
SCALE=0!0!-1/0
GDPFUN=mag(kntv(wnd))//jet!jet!ldf(hght)//ldfh!smul(4e9,vasv(qvec(ldfh,sm5v(vlav(geo))),kcrs(thrm(hght)))
TYPE=c!c/f!c ! a

Some files were not shown because too many files have changed in this diff Show more