[merge] standard logging branch into trunk

bzr revid: xmo@tinyerp.com-20100302100007-wiiqi416tc3rv6mw
This commit is contained in:
Xavier Morel 2010-03-02 11:00:07 +01:00
commit abf7703985
2 changed files with 26 additions and 25 deletions

View File

@ -36,6 +36,7 @@ import threading
import time
import xmlrpclib
import release
import warnings
class Service(object):
""" Base class for *Local* services
@ -129,15 +130,13 @@ class ExportService(object):
LOG_NOTSET = 'notset'
LOG_DEBUG_RPC = 'debug_rpc'
LOG_DEBUG = 'debug'
LOG_DEBUG2 = 'debug2'
LOG_INFO = 'info'
LOG_WARNING = 'warn'
LOG_ERROR = 'error'
LOG_CRITICAL = 'critical'
# add new log level below DEBUG
logging.DEBUG2 = logging.DEBUG - 1
logging.DEBUG_RPC = logging.DEBUG2 - 1
logging.DEBUG_RPC = logging.DEBUG - 2
logging.addLevelName(logging.DEBUG_RPC, 'DEBUG_RPC')
def init_logger():
import os
@ -197,7 +196,6 @@ def init_logger():
mapping = {
'DEBUG_RPC': ('blue', 'white'),
'DEBUG2': ('green', 'white'),
'DEBUG': ('blue', 'default'),
'INFO': ('green', 'default'),
'WARNING': ('yellow', 'default'),
@ -211,16 +209,20 @@ def init_logger():
class Logger(object):
def __init__(self):
warnings.warn("The netsvc.Logger API shouldn't be used anymore, please "
"use the standard `logging.getLogger` API instead",
PendingDeprecationWarning, stacklevel=2)
super(Logger, self).__init__()
def notifyChannel(self, name, level, msg):
warnings.warn("notifyChannel API shouldn't be used anymore, please use "
"the standard `logging` module instead",
PendingDeprecationWarning, stacklevel=2)
from service.web_services import common
log = logging.getLogger(tools.ustr(name))
if level == LOG_DEBUG2 and not hasattr(log, level):
fct = lambda msg, *args, **kwargs: log.log(logging.DEBUG2, msg, *args, **kwargs)
setattr(log, LOG_DEBUG2, fct)
if level == LOG_DEBUG_RPC and not hasattr(log, level):
fct = lambda msg, *args, **kwargs: log.log(logging.DEBUG_RPC, msg, *args, **kwargs)
setattr(log, LOG_DEBUG_RPC, fct)

View File

@ -21,6 +21,7 @@
__all__ = ['db_connect', 'close_db']
import logging
import netsvc
from psycopg2.extensions import ISOLATION_LEVEL_AUTOCOMMIT, ISOLATION_LEVEL_READ_COMMITTED, ISOLATION_LEVEL_SERIALIZABLE
from psycopg2.psycopg1 import cursor as psycopg1cursor
@ -60,15 +61,11 @@ import re
re_from = re.compile('.* from "?([a-zA-Z_0-9]+)"? .*$');
re_into = re.compile('.* into "?([a-zA-Z_0-9]+)"? .*$');
def log(msg, lvl=netsvc.LOG_DEBUG2):
logger = netsvc.Logger()
logger.notifyChannel('sql', lvl, msg)
sql_counter = 0
class Cursor(object):
IN_MAX = 1000
__logger = logging.getLogger('db.cursor')
def check(f):
@wraps(f)
@ -102,15 +99,16 @@ class Cursor(object):
# pool, preventing some operation on the database like dropping it.
# This can also lead to a server overload.
msg = "Cursor not closed explicitly\n" \
"Cursor was created at %s:%s" % self.__caller
log(msg, netsvc.LOG_WARNING)
"Cursor was created at %s:%s"
self.__logger.warn(msg, *self.__caller)
self.close()
@check
def execute(self, query, params=None):
if '%d' in query or '%f' in query:
log(query, netsvc.LOG_WARNING)
log("SQL queries cannot contain %d or %f anymore. Use only %s", netsvc.LOG_WARNING)
self.__logger.warn(query)
self.__logger.warn("SQL queries cannot contain %d or %f anymore. "
"Use only %s")
if params:
query = query.replace('%d', '%s').replace('%f', '%s')
@ -124,16 +122,15 @@ class Cursor(object):
logger= netsvc.Logger()
logger.notifyChannel('sql_db', netsvc.LOG_ERROR, "Programming error: %s, in query %s" % (pe, query))
raise
except Exception, e:
log("bad query: %s" % self._obj.query)
log(e)
except Exception:
self.__logger.exception("bad query: %s", self._obj.query)
raise
if self.sql_log:
delay = mdt.now() - now
delay = delay.seconds * 1E6 + delay.microseconds
log("query: %s" % self._obj.query)
self.__logger.debug("query: %s", self._obj.query)
self.sql_log_count+=1
res_from = re_from.match(query.lower())
if res_from:
@ -158,14 +155,16 @@ class Cursor(object):
if sqllogs[type]:
sqllogitems = sqllogs[type].items()
sqllogitems.sort(key=lambda k: k[1][1])
log("SQL LOG %s:" % (type,))
self.__logger.debug("SQL LOG %s:", type)
for r in sqllogitems:
delay = timedelta(microseconds=r[1][1])
log("table: %s: %s/%s" %(r[0], str(delay), r[1][0]))
self.__logger.debug("table: %s: %s/%s",
r[0], delay, r[1][0])
sum+= r[1][1]
sqllogs[type].clear()
sum = timedelta(microseconds=sum)
log("SUM %s:%s/%d [%d]" % (type, str(sum), self.sql_log_count, sql_counter))
self.__logger.debug("SUM %s:%s/%d [%d]",
type, sum, self.sql_log_count, sql_counter)
sqllogs[type].clear()
process('from')
process('into')