Merge "Issue #1230: Update pypies logging" into development

Former-commit-id: d96e963156 [formerly 938f1e1025] [formerly c10cad326e [formerly 5aaf39c6924905704e5288ecd56ab47be5db7944]]
Former-commit-id: c10cad326e
Former-commit-id: 83cbec74da
This commit is contained in:
Nate Jensen 2012-10-01 13:28:34 -05:00 committed by Gerrit Code Review
commit 9beea59fb6
9 changed files with 222 additions and 67 deletions

View file

@ -34,6 +34,7 @@
import fcntl, time, os, logging import fcntl, time, os, logging
from pypies import logger from pypies import logger
from pypies import timeMap
MAX_TIME_TO_WAIT = 120 # seconds MAX_TIME_TO_WAIT = 120 # seconds
@ -52,6 +53,8 @@ def dirCheck(filename):
os.close(fd) os.close(fd)
def getLock(filename, mode): def getLock(filename, mode):
t0 = time.time()
dirCheck(filename) dirCheck(filename)
gotLock = False gotLock = False
startTime = time.time() startTime = time.time()
@ -82,12 +85,25 @@ def getLock(filename, mode):
if logger.isEnabledFor(logging.DEBUG): if logger.isEnabledFor(logging.DEBUG):
logger.debug(str(os.getpid()) + " failed to get lock") logger.debug(str(os.getpid()) + " failed to get lock")
os.close(fd) os.close(fd)
t1=time.time()
if timeMap.has_key('getLock'):
timeMap['getLock']+=t1-t0
else:
timeMap['getLock']=t1-t0
return gotLock, fd return gotLock, fd
def releaseLock(fd): def releaseLock(fd):
t0=time.time()
fcntl.lockf(fd, fcntl.LOCK_UN) fcntl.lockf(fd, fcntl.LOCK_UN)
os.close(fd) os.close(fd)
if logger.isEnabledFor(logging.DEBUG): if logger.isEnabledFor(logging.DEBUG):
logger.debug('Released lock on ' + str(fd)) logger.debug('Released lock on ' + str(fd))
t1=time.time()
if timeMap.has_key('releaseLock'):
timeMap['releaseLock']+=t1-t0
else:
timeMap['releaseLock']=t1-t0

View file

@ -33,11 +33,12 @@
import time, os, logging import time, os, logging
from pypies import logger from pypies import logger
from pypies import timeMap
MAX_TIME_TO_WAIT = 120 # seconds MAX_TIME_TO_WAIT = 120 # seconds
ORPHAN_TIMEOUT = 150 # seconds ORPHAN_TIMEOUT = 150 # seconds
MAX_SLEEP_TIME = 0.05 MAX_SLEEP_TIME = 0.025
MIN_SLEEP_TIME = 0.01 MIN_SLEEP_TIME = 0.005
readLockAppend = "_read" readLockAppend = "_read"
writeLockAppend = "_write" writeLockAppend = "_write"
@ -52,6 +53,8 @@ def dirCheck(filename):
raise e raise e
def getLock(filename, mode): def getLock(filename, mode):
t0 = time.time()
dirCheck(filename) dirCheck(filename)
gotLock, fpath = _getLockInternal(filename, mode) gotLock, fpath = _getLockInternal(filename, mode)
if gotLock: if gotLock:
@ -60,6 +63,13 @@ def getLock(filename, mode):
else: else:
if logger.isEnabledFor(logging.DEBUG): if logger.isEnabledFor(logging.DEBUG):
logger.debug(str(os.getpid()) + " failed to get lock") logger.debug(str(os.getpid()) + " failed to get lock")
t1=time.time()
if timeMap.has_key('getLock'):
timeMap['getLock']+=t1-t0
else:
timeMap['getLock']=t1-t0
return gotLock, fpath return gotLock, fpath
@ -159,10 +169,18 @@ def _getSleepTime(timeWaiting):
sleepTime = MIN_SLEEP_TIME sleepTime = MIN_SLEEP_TIME
elif sleepTime > MAX_SLEEP_TIME: elif sleepTime > MAX_SLEEP_TIME:
sleepTime = MAX_SLEEP_TIME sleepTime = MAX_SLEEP_TIME
if timeMap.has_key('approxLockSleepTime'):
timeMap['approxLockSleepTime']+=sleepTime
else:
timeMap['approxLockSleepTime']=sleepTime
return sleepTime return sleepTime
def releaseLock(lockPath): def releaseLock(lockPath):
t0=time.time()
if lockPath.endswith('.pid'): if lockPath.endswith('.pid'):
# it was a read # it was a read
os.remove(lockPath) os.remove(lockPath)
@ -185,6 +203,12 @@ def releaseLock(lockPath):
if logger.isEnabledFor(logging.DEBUG): if logger.isEnabledFor(logging.DEBUG):
logger.debug('Released lock on ' + str(lockPath)) logger.debug('Released lock on ' + str(lockPath))
t1=time.time()
if timeMap.has_key('releaseLock'):
timeMap['releaseLock']+=t1-t0
else:
timeMap['releaseLock']=t1-t0
def _checkForOrphans(filename): def _checkForOrphans(filename):
if logger.isEnabledFor(logging.DEBUG): if logger.isEnabledFor(logging.DEBUG):
logger.debug('Checking for orphan locks on ' + filename) logger.debug('Checking for orphan locks on ' + filename)
@ -233,6 +257,11 @@ def _checkForOrphans(filename):
# 2 indicates no such directory, assuming another process removed it # 2 indicates no such directory, assuming another process removed it
if e.errno != 2: if e.errno != 2:
logger.error('Unable to remove orphaned lock: ' + str(e)) logger.error('Unable to remove orphaned lock: ' + str(e))
if timeMap.has_key('orphanCheck'):
timeMap['orphanCheck']+=(time.time() - nowTime)
else:
timeMap['orphanCheck']=(time.time() - nowTime)
return orphanRemoved return orphanRemoved

View file

@ -46,6 +46,7 @@ def getLogger():
return logger return logger
logger = getLogger() logger = getLogger()
timeMap = {}
def pypiesWrapper(request): def pypiesWrapper(request):

View file

@ -42,6 +42,7 @@ from dynamicserialize.dstypes.com.raytheon.uf.common.pypies.request import *
from dynamicserialize.dstypes.com.raytheon.uf.common.pypies.response import * from dynamicserialize.dstypes.com.raytheon.uf.common.pypies.response import *
logger = pypies.logger logger = pypies.logger
timeMap = pypies.timeMap
from pypies.impl import H5pyDataStore from pypies.impl import H5pyDataStore
datastore = H5pyDataStore.H5pyDataStore() datastore = H5pyDataStore.H5pyDataStore()
@ -61,8 +62,9 @@ datastoreMap = {
@Request.application @Request.application
def pypies_response(request): def pypies_response(request):
timeMap.clear()
try: try:
t0 = time.time() startTime = time.time()
try: try:
obj = dynamicserialize.deserialize(request.data) obj = dynamicserialize.deserialize(request.data)
except: except:
@ -71,6 +73,7 @@ def pypies_response(request):
resp = ErrorResponse() resp = ErrorResponse()
resp.setError(msg) resp.setError(msg)
return __prepareResponse(resp) return __prepareResponse(resp)
timeMap['deserialize']=time.time()-startTime
clz = obj.__class__ clz = obj.__class__
if logger.isEnabledFor(logging.DEBUG): if logger.isEnabledFor(logging.DEBUG):
@ -90,11 +93,14 @@ def pypies_response(request):
logger.error(msg) logger.error(msg)
resp = ErrorResponse() resp = ErrorResponse()
resp.setError(msg) resp.setError(msg)
startSerialize = time.time()
httpResp = __prepareResponse(resp) httpResp = __prepareResponse(resp)
if success: if success:
t1 = time.time() endTime = time.time()
logger.info({'request':datastoreMap[clz][1], 'time':t1-t0, 'file':obj.getFilename()}) timeMap['serialize'] = endTime - startSerialize
timeMap['total'] = endTime - startTime
logger.info({'request':datastoreMap[clz][1], 'time':timeMap, 'file':obj.getFilename()})
#logger.info("pid=" + str(os.getpid()) + " " + datastoreMap[clz][1] + " on " + obj.getFilename() + " processed in " + ('%.3f' % (t1-t0)) + " seconds") #logger.info("pid=" + str(os.getpid()) + " " + datastoreMap[clz][1] + " on " + obj.getFilename() + " processed in " + ('%.3f' % (t1-t0)) + " seconds")
return httpResp return httpResp
except: except:

View file

@ -32,10 +32,11 @@
# #
# #
import numpy, pypies, logging import numpy, pypies, logging, time
from dynamicserialize.dstypes.com.raytheon.uf.common.datastorage import * from dynamicserialize.dstypes.com.raytheon.uf.common.datastorage import *
from dynamicserialize.dstypes.com.raytheon.uf.common.datastorage.records import * from dynamicserialize.dstypes.com.raytheon.uf.common.datastorage.records import *
logger = pypies.logger logger = pypies.logger
timeMap = pypies.timeMap
typeToClassMap = { typeToClassMap = {
numpy.int8: ByteDataRecord, numpy.int8: ByteDataRecord,
@ -48,6 +49,8 @@ typeToClassMap = {
} }
def createStorageRecord(rawData, ds): def createStorageRecord(rawData, ds):
t0=time.time()
t = typeToClassMap[rawData.dtype.type] t = typeToClassMap[rawData.dtype.type]
inst = t() inst = t()
name = ds.name name = ds.name
@ -98,4 +101,10 @@ def createStorageRecord(rawData, ds):
# TODO downscaled? # TODO downscaled?
inst.setProps(props) inst.setProps(props)
t1=time.time()
if timeMap.has_key('createRecord'):
timeMap['createRecord']+=t1-t0
else:
timeMap['createRecord']=t1-t0
return inst return inst

View file

@ -46,6 +46,7 @@ from dynamicserialize.dstypes.com.raytheon.uf.common.datastorage.records import
from dynamicserialize.dstypes.com.raytheon.uf.common.pypies.response import * from dynamicserialize.dstypes.com.raytheon.uf.common.pypies.response import *
logger = pypies.logger logger = pypies.logger
timeMap = pypies.timeMap
vlen_str_type = h5py.new_vlen(str) vlen_str_type = h5py.new_vlen(str)
@ -82,6 +83,7 @@ class H5pyDataStore(IDataStore.IDataStore):
exc = [] exc = []
failRecs = [] failRecs = []
ss = None ss = None
t0=time.time()
for r in recs: for r in recs:
try: try:
if r.getProps() and r.getProps().getDownscaled(): if r.getProps() and r.getProps().getDownscaled():
@ -97,14 +99,18 @@ class H5pyDataStore(IDataStore.IDataStore):
status.setOperationPerformed(ss['op']) status.setOperationPerformed(ss['op'])
if ss.has_key('index'): if ss.has_key('index'):
status.setIndexOfAppend(ss['index']) status.setIndexOfAppend(ss['index'])
t1=time.time()
timeMap['store']=t1-t0
resp = StoreResponse() resp = StoreResponse()
resp.setStatus(status) resp.setStatus(status)
resp.setExceptions(exc) resp.setExceptions(exc)
resp.setFailedRecords(failRecs) resp.setFailedRecords(failRecs)
return resp return resp
finally: finally:
t0=time.time()
f.close() f.close()
t1=time.time()
timeMap['closeFile']=t1-t0
LockManager.releaseLock(lock) LockManager.releaseLock(lock)
@ -318,7 +324,10 @@ class H5pyDataStore(IDataStore.IDataStore):
grp = ds.parent grp = ds.parent
grp.id.unlink(ds.name) grp.id.unlink(ds.name)
finally: finally:
t0=time.time()
f.close() f.close()
t1=time.time()
timeMap['closeFile']=t1-t0
LockManager.releaseLock(lock) LockManager.releaseLock(lock)
return resp return resp
@ -330,8 +339,11 @@ class H5pyDataStore(IDataStore.IDataStore):
try: try:
group = request.getGroup() group = request.getGroup()
req = request.getRequest() req = request.getRequest()
if req: if req:
t0=time.time()
grp = self.__getGroup(f, group) grp = self.__getGroup(f, group)
t1=time.time()
timeMap['getGroup']=t1-t0
result = [self.__retrieveInternal(grp, request.getDataset(), req)] result = [self.__retrieveInternal(grp, request.getDataset(), req)]
else: else:
result = self.__retrieve(f, group, request.getIncludeInterpolated()) result = self.__retrieve(f, group, request.getIncludeInterpolated())
@ -339,8 +351,12 @@ class H5pyDataStore(IDataStore.IDataStore):
resp.setRecords(result) resp.setRecords(result)
return resp return resp
finally: finally:
t0=time.time()
f.close() f.close()
t1=time.time()
timeMap['closeFile']=t1-t0
LockManager.releaseLock(lock) LockManager.releaseLock(lock)
def __retrieve(self, f, group, includeInterpolated=False): def __retrieve(self, f, group, includeInterpolated=False):
@ -427,7 +443,10 @@ class H5pyDataStore(IDataStore.IDataStore):
resp.setRecords(recs) resp.setRecords(recs)
return resp return resp
finally: finally:
t0=time.time()
f.close() f.close()
t1=time.time()
timeMap['closeFile']=t1-t0
LockManager.releaseLock(lock) LockManager.releaseLock(lock)
def getDatasets(self, request): def getDatasets(self, request):
@ -439,7 +458,10 @@ class H5pyDataStore(IDataStore.IDataStore):
ds = grp.keys() ds = grp.keys()
return ds return ds
finally: finally:
t0=time.time()
f.close() f.close()
t1=time.time()
timeMap['closeFile']=t1-t0
LockManager.releaseLock(lock) LockManager.releaseLock(lock)
def deleteFiles(self, request): def deleteFiles(self, request):
@ -492,7 +514,10 @@ class H5pyDataStore(IDataStore.IDataStore):
resp = StoreResponse() resp = StoreResponse()
return resp return resp
finally: finally:
t0=time.time()
f.close() f.close()
t1=time.time()
timeMap['closeFile']=t1-t0
LockManager.releaseLock(lock) LockManager.releaseLock(lock)
def __createDatasetInternal(self, group, datasetName, dtype, szDims, def __createDatasetInternal(self, group, datasetName, dtype, szDims,
@ -566,10 +591,11 @@ class H5pyDataStore(IDataStore.IDataStore):
if gotLock: if gotLock:
LockManager.releaseLock(lock) LockManager.releaseLock(lock)
def __openFile(self, filename, mode='r'): def __openFile(self, filename, mode='r'):
if mode == 'r' and not os.path.exists(filename): if mode == 'r' and not os.path.exists(filename):
raise StorageException('File ' + filename + ' does not exist') raise StorageException('File ' + filename + ' does not exist')
gotLock, fd = LockManager.getLock(filename, mode) gotLock, fd = LockManager.getLock(filename, mode)
t0=time.time()
if not gotLock: if not gotLock:
raise StorageException('Unable to acquire lock on file ' + filename) raise StorageException('Unable to acquire lock on file ' + filename)
try: try:
@ -581,17 +607,21 @@ class H5pyDataStore(IDataStore.IDataStore):
logger.error(msg) logger.error(msg)
LockManager.releaseLock(fd) LockManager.releaseLock(fd)
raise e raise e
t1=time.time()
timeMap['openFile']=t1-t0
return f, fd return f, fd
def __getGroup(self, f, name, create=False): def __getGroup(self, f, name, create=False):
t0=time.time()
parts = name.split('/') parts = name.split('/')
grp = None grp = None
for s in parts: for s in parts:
if not grp: if not grp:
if not s: if not s:
s = '/' s = '/'
if s in f.keys() or s == '/': if s == '/' or s in f.keys():
grp = f[s] grp = f[s]
else: else:
if create: if create:
@ -607,7 +637,13 @@ class H5pyDataStore(IDataStore.IDataStore):
grp = grp.create_group(s) grp = grp.create_group(s)
else: else:
raise StorageException("No group " + name + " found") raise StorageException("No group " + name + " found")
t1=time.time()
if timeMap.has_key('getGroup'):
timeMap['getGroup']+=t1-t0
else:
timeMap['getGroup']=t1-t0
return grp return grp
def __link(self, group, linkName, dataset): def __link(self, group, linkName, dataset):
@ -649,6 +685,7 @@ class H5pyDataStore(IDataStore.IDataStore):
return results return results
def __doRepack(self, filepath, basePath, outDir, compression): def __doRepack(self, filepath, basePath, outDir, compression):
t0=time.time()
# call h5repack to repack the file # call h5repack to repack the file
if outDir is None: if outDir is None:
repackedFullPath = filepath + '.repacked' repackedFullPath = filepath + '.repacked'
@ -673,6 +710,8 @@ class H5pyDataStore(IDataStore.IDataStore):
# repack failed, but they wanted the data in a different # repack failed, but they wanted the data in a different
# directory, so just copy the original data without the repack # directory, so just copy the original data without the repack
shutil.copy(filepath, repackedFullPath) shutil.copy(filepath, repackedFullPath)
t1=time.time()
timeMap['repack']
return success return success
def __doFileAction(self, filepath, basePath, outputDir, fileAction, response, compression='NONE', timestampCheck=None): def __doFileAction(self, filepath, basePath, outputDir, fileAction, response, compression='NONE', timestampCheck=None):

View file

@ -32,12 +32,14 @@
# #
# #
import numpy, pypies, logging import numpy, pypies, logging, time
import h5py.selections import h5py.selections
from pypies import StorageException, NotImplementedException from pypies import StorageException, NotImplementedException
logger = pypies.logger logger = pypies.logger
timeMap = pypies.timeMap
def read(ds, request): def read(ds, request):
t0=time.time()
rt = request.getType() rt = request.getType()
if logger.isEnabledFor(logging.DEBUG): if logger.isEnabledFor(logging.DEBUG):
logger.debug('requestType=' + rt) logger.debug('requestType=' + rt)
@ -100,7 +102,13 @@ def read(ds, request):
else: else:
raise NotImplementedException('Only read requests supported are ' + raise NotImplementedException('Only read requests supported are ' +
'ALL, POINT, XLINE, YLINE, and SLAB') 'ALL, POINT, XLINE, YLINE, and SLAB')
t1=time.time()
if timeMap.has_key('read'):
timeMap['read']+=t1-t0
else:
timeMap['read']=t1-t0
return result return result

View file

@ -35,6 +35,20 @@
import threading, time, logging import threading, time, logging
STORE_DIR = '/awips2/edex/data/hdf5/' # TODO this should be a config file STORE_DIR = '/awips2/edex/data/hdf5/' # TODO this should be a config file
STORE_DIR_LEN = len(STORE_DIR) STORE_DIR_LEN = len(STORE_DIR)
SECTION_KEYS=['total',
' deserialize',
' getLock',
' approxLockSleepTime',
' orphanCheck',
' openFile',
' getGroup',
' repack',
' read',
' store',
' createRecord',
' closeFile',
' releaseLock',
' serialize']
class StatsThread(threading.Thread): class StatsThread(threading.Thread):
@ -77,7 +91,7 @@ class StatsThread(threading.Thread):
self.hourStats['lastOutput'] = time.time() self.hourStats['lastOutput'] = time.time()
def addRecord(self, rec): def addRecord(self, rec):
with self.lock: with self.lock:
self.minuteStats = self.__addNewStat(self.minuteStats, rec) self.minuteStats = self.__addNewStat(self.minuteStats, rec)
self.hourStats = self.__addNewStat(self.hourStats, rec) self.hourStats = self.__addNewStat(self.hourStats, rec)
@ -90,23 +104,37 @@ class StatsThread(threading.Thread):
plugin = pluginName[0:slashIndex] plugin = pluginName[0:slashIndex]
else: else:
plugin = pluginName plugin = pluginName
req = rec['request']
recTime = rec['time']
if statDict.has_key(plugin): if statDict.has_key(plugin):
pluginEntry = statDict[plugin] pluginDict = statDict[plugin]
else: else:
pluginEntry = {} pluginDict = {}
if not pluginEntry.has_key(req): statDict[plugin]=pluginDict
pluginEntry[req] = {'count':0, 'time':0.0, 'slowest':0.0, 'fastest':9999.0}
requestEntry = pluginEntry[req] req = rec['request']
requestEntry['count'] = requestEntry['count'] + 1
requestEntry['time'] = requestEntry['time'] + recTime if pluginDict.has_key(req):
if recTime > requestEntry['slowest']: reqDict=pluginDict[req]
requestEntry['slowest'] = recTime else:
if recTime < requestEntry['fastest']: reqDict={}
requestEntry['fastest'] = recTime pluginDict[req] = reqDict
statDict[plugin] = pluginEntry
recTimes = rec['time']
for timeKey in recTimes.keys():
recTime=recTimes[timeKey]
if not reqDict.has_key(timeKey):
reqDict[timeKey] = {'count':0, 'time':0.0, 'slowest':0.0, 'fastest':9999.0}
requestEntry = reqDict[timeKey]
requestEntry['count'] += 1
requestEntry['time'] += recTime
if recTime > requestEntry['slowest']:
requestEntry['slowest'] = recTime
if recTime < requestEntry['fastest']:
requestEntry['fastest'] = recTime
return statDict return statDict
@ -120,34 +148,34 @@ class StatsThread(threading.Thread):
if len(statDict): if len(statDict):
stmt += COL + 'plugin'.ljust(20) stmt += COL + 'plugin'.ljust(20)
stmt += 'request'.ljust(20) + COL stmt += 'request'.ljust(20) + COL
stmt += 'section'.ljust(25) + COL
stmt += 'count'.rjust(7) + COL stmt += 'count'.rjust(7) + COL
stmt += 'average'.rjust(8) + COL stmt += 'average'.rjust(8) + COL
stmt += 'min'.rjust(5) + COL stmt += 'min'.rjust(5) + COL
stmt += 'max'.rjust(5) stmt += 'max'.rjust(5)
stmt += '\n' stmt += '\n'
stmt += ('-' * 85) + '\n' stmt += ('-' * 114) + '\n'
pluginNames = statDict.keys() pluginNames = statDict.keys()
pluginNames.sort() pluginNames.sort()
for plugin in pluginNames: for plugin in pluginNames:
pluginEntry = statDict[plugin] pluginDict = statDict[plugin]
reqNames = pluginEntry.keys() reqNames = pluginDict.keys()
reqNames.sort() reqNames.sort()
for req in reqNames: for req in reqNames:
stmt += COL + plugin.ljust(20) reqDict = pluginDict[req]
entry = pluginEntry[req] for section in SECTION_KEYS:
avg = '%.3f' % (entry['time'] / entry['count']) timeKey = section.strip()
fast = '%.3f' % (entry['fastest']) if reqDict.has_key(timeKey):
slow = '%.3f' % (entry['slowest']) stmt += COL + plugin.ljust(20)
stmt += req.ljust(20) + COL entry = reqDict[timeKey]
stmt += str(entry['count']).rjust(7) + COL + avg.rjust(8) + COL avg = '%.3f' % (entry['time'] / entry['count'])
stmt += fast + COL + slow + '\n' fast = '%.3f' % (entry['fastest'])
slow = '%.3f' % (entry['slowest'])
stmt += req.ljust(20) + COL
stmt += section.ljust(25) + COL
stmt += str(entry['count']).rjust(7) + COL + avg.rjust(8) + COL
stmt += fast + COL + slow + '\n'
stmt += '\n' stmt += '\n'
else: else:
stmt += COL + 'No transactions reported' stmt += COL + 'No transactions reported'
return stmt return stmt

View file

@ -55,7 +55,8 @@ class LogRecordStreamHandler(SocketServer.StreamRequestHandler):
import StatsThread import StatsThread
statsThread = StatsThread.StatsThread(logCfg) statsThread = StatsThread.StatsThread(logCfg)
statsThread.start() statsThread.start()
SECTION_KEYS = StatsThread.SECTION_KEYS
def handle(self): def handle(self):
""" """
@ -64,24 +65,42 @@ class LogRecordStreamHandler(SocketServer.StreamRequestHandler):
according to whatever policy is configured locally. according to whatever policy is configured locally.
""" """
while True: while True:
chunk = self.connection.recv(4) try:
if len(chunk) < 4: chunk = self.connection.recv(4)
break if len(chunk) < 4:
slen = struct.unpack(">L", chunk)[0] break
chunk = self.connection.recv(slen) slen = struct.unpack(">L", chunk)[0]
while len(chunk) < slen: chunk = self.connection.recv(slen)
chunk = chunk + self.connection.recv(slen - len(chunk)) while len(chunk) < slen:
obj = self.unPickle(chunk) chunk = chunk + self.connection.recv(slen - len(chunk))
msg = obj['msg'] obj = self.unPickle(chunk)
if type(msg) is str: msg = obj['msg']
record = logging.makeLogRecord(obj) if type(msg) is str:
self.handleLogRecord(record)
else:
self.statsThread.addRecord(msg)
if msg['time'] > LOG_THRESHOLD:
obj['msg'] = 'Processed ' + msg['request'] + ' on ' + msg['file'] + ' in ' + ('%.3f' % msg['time']) + ' seconds'
record = logging.makeLogRecord(obj) record = logging.makeLogRecord(obj)
self.handleLogRecord(record) self.handleLogRecord(record)
else:
self.statsThread.addRecord(msg)
timeDict = msg['time']
if timeDict['total'] > LOG_THRESHOLD:
#obj['msg'] = 'Processed ' + msg['request'] + ' on ' + msg['file'] + ' in ' + ('%.3f' % msg['time']['total']) + ' seconds'
logMsg = 'Processed ' + msg['request'] + ' on ' + msg['file'] + '. Timing entries in seconds: '
addComma=False
for SECTION in self.SECTION_KEYS:
timeKey=SECTION.strip()
if timeDict.has_key(timeKey):
if addComma:
logMsg += ','
else:
addComma = True
logMsg += ' ' + timeKey + ' ' + ('%.3f' % timeDict[timeKey])
obj['msg'] = logMsg
record = logging.makeLogRecord(obj)
self.handleLogRecord(record)
except Exception, e:
import sys, traceback, string
t, v, tb = sys.exc_info()
print string.join(traceback.format_exception(t, v, tb))
def unPickle(self, data): def unPickle(self, data):