[IMP] logging: use logging instead of netsvc, use module __name__ instead of something else.

bzr revid: vmt@openerp.com-20120125094451-a6mejmnaxa2vp1bu
This commit is contained in:
Vo Minh Thu 2012-01-25 10:44:51 +01:00
parent 7c4e08eb46
commit 57b41aae43
27 changed files with 369 additions and 376 deletions

View File

@ -19,17 +19,19 @@
#
##############################################################################
import time
from datetime import datetime
from dateutil.relativedelta import relativedelta
import logging
from operator import itemgetter
import time
import netsvc
import pooler
from osv import fields, osv
import decimal_precision as dp
from osv import fields, osv
import pooler
from tools.translate import _
_logger = logging.getLogger(__name__)
def check_cycle(self, cr, uid, ids, context=None):
""" climbs the ``self._table.parent_id`` chains for 100 levels or
until it can't find any more parent(s)
@ -212,7 +214,6 @@ class account_account(osv.osv):
_name = "account.account"
_description = "Account"
_parent_store = True
logger = netsvc.Logger()
def search(self, cr, uid, args, offset=0, limit=None, order=None,
context=None, count=False):
@ -295,8 +296,7 @@ class account_account(osv.osv):
if aml_query.strip():
wheres.append(aml_query.strip())
filters = " AND ".join(wheres)
self.logger.notifyChannel('addons.'+self._name, netsvc.LOG_DEBUG,
'Filters: %s'%filters)
_logger.debug('Filters: %s', filters)
# IN might not work ideally in case there are too many
# children_and_consolidated, in that case join on a
# values() e.g.:
@ -312,8 +312,7 @@ class account_account(osv.osv):
" GROUP BY l.account_id")
params = (tuple(children_and_consolidated),) + query_params
cr.execute(request, params)
self.logger.notifyChannel('addons.'+self._name, netsvc.LOG_DEBUG,
'Status: %s'%cr.statusmessage)
_logger.debug('Status: %s', cr.statusmessage)
for res in cr.dictfetchall():
accounts[res['id']] = res
@ -2083,8 +2082,7 @@ class account_tax(osv.osv):
}
def compute(self, cr, uid, taxes, price_unit, quantity, address_id=None, product=None, partner=None):
logger = netsvc.Logger()
logger.notifyChannel("warning", netsvc.LOG_WARNING,
_logger.warning(
"Deprecated, use compute_all(...)['taxes'] instead of compute(...) to manage prices with tax included")
return self._compute(cr, uid, taxes, price_unit, quantity, address_id, product, partner)

View File

@ -22,14 +22,10 @@
import time
from report import report_sxw
import pooler
import netsvc
logger=netsvc.Logger()
class bank_statement_balance_report(report_sxw.rml_parse):
def set_context(self, objects, data, ids, report_type=None):
#logger.notifyChannel('addons.'+__name__, netsvc.LOG_WARNING, 'set_context, objects = %s, data = %s, ids = %s' % (objects, data, ids))
cr = self.cr
uid = self.uid
context = self.context

View File

@ -21,11 +21,10 @@
##############################################################################
import time
from osv import osv, fields
import decimal_precision as dp
import netsvc
from osv import osv, fields
from tools.translate import _
logger=netsvc.Logger()
class coda_bank_account(osv.osv):
_name= 'coda.bank.account'

View File

@ -20,15 +20,16 @@
#
##############################################################################
import time
import base64
import re
from sys import exc_info
import time
from traceback import format_exception
from osv import fields,osv
from tools.translate import _
import netsvc
import re
from traceback import format_exception
from sys import exc_info
logger=netsvc.Logger()
_logger = logging.getLogger(__name__)
class account_coda_import(osv.osv_memory):
_name = 'account.coda.import'
@ -816,7 +817,6 @@ class account_coda_import(osv.osv_memory):
ttype = line['type'] == 'supplier' and 'payment' or 'receipt',
date = line['val_date'],
context = context)
#logger.notifyChannel('addons.'+self._name, netsvc.LOG_WARNING, 'voucher_dict = %s' % voucher_dict)
voucher_line_vals = False
if voucher_dict['value']['line_ids']:
for line_dict in voucher_dict['value']['line_ids']:
@ -889,22 +889,22 @@ class account_coda_import(osv.osv_memory):
nb_err += 1
err_string += _('\nError ! ') + str(e)
tb = ''.join(format_exception(*exc_info()))
logger.notifyChannel('addons.'+self._name, netsvc.LOG_ERROR,
'Application Error while processing Statement %s\n%s' % (statement.get('name', '/'),tb))
_logger.error('Application Error while processing Statement %s\n%s',
statement.get('name', '/'), tb)
except Exception, e:
cr.rollback()
nb_err += 1
err_string += _('\nSystem Error : ') + str(e)
tb = ''.join(format_exception(*exc_info()))
logger.notifyChannel('addons.'+self._name, netsvc.LOG_ERROR,
'System Error while processing Statement %s\n%s' % (statement.get('name', '/'),tb))
_logger.error('System Error while processing Statement %s\n%s',
statement.get('name', '/'), tb)
except :
cr.rollback()
nb_err += 1
err_string = _('\nUnknown Error : ') + str(e)
tb = ''.join(format_exception(*exc_info()))
logger.notifyChannel('addons.'+self._name, netsvc.LOG_ERROR,
'Unknown Error while processing Statement %s\n%s' % (statement.get('name', '/'),tb))
_logger.error('Unknown Error while processing Statement %s\n%s',
statement.get('name', '/'), tb)
# end 'for statement in coda_statements'

View File

@ -19,11 +19,14 @@
#
##############################################################################
import logging
import time
from osv import osv, fields
import netsvc
_logger = logging.getLogger(__name__)
class payment_mode(osv.osv):
_name= 'payment.mode'
_description= 'Payment Mode'
@ -70,9 +73,7 @@ class payment_order(osv.osv):
#dead code
def get_wizard(self, type):
logger = netsvc.Logger()
logger.notifyChannel("warning", netsvc.LOG_WARNING,
"No wizard found for the payment type '%s'." % type)
_logger.warning("No wizard found for the payment type '%s'.", type)
return None
def _total(self, cursor, user, ids, name, args, context=None):

View File

@ -18,17 +18,18 @@
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from os.path import join
import base64
import tempfile
import tarfile
import httplib
import logging
import os
from os.path import join
import tarfile
import tempfile
import netsvc
import wizard
import pooler
import os
import tools
_logger = logging.getLogger(__name__)
choose_file_form = '''<?xml version="1.0"?>
<form string="Create Technical Guide in rst format">
@ -99,9 +100,8 @@ class RstDoc(object):
if res.status in (200, ):
status_good = True
except (Exception, ), e:
logger = netsvc.Logger()
msg = "error connecting to server '%s' with link '%s'. Error message: %s" % (server, link, str(e))
logger.notifyChannel("base_module_doc_rst", netsvc.LOG_ERROR, msg)
_logger.error(msg)
status_good = False
return status_good
@ -241,9 +241,8 @@ class RstDoc(object):
def _write_objects(self):
def write_field(field_def):
if not isinstance(field_def, tuple):
logger = netsvc.Logger()
msg = "Error on Object %s: field_def: %s [type: %s]" % (obj_name.encode('utf8'), field_def.encode('utf8'), type(field_def))
logger.notifyChannel("base_module_doc_rst", netsvc.LOG_ERROR, msg)
_logger.error(msg)
return ""
field_name = field_def[0]
@ -392,9 +391,8 @@ class wizard_tech_guide_rst(wizard.interface):
try:
os.unlink(tgz_tmp_filename)
except Exception, e:
logger = netsvc.Logger()
msg = "Temporary file %s could not be deleted. (%s)" % (tgz_tmp_filename, e)
logger.notifyChannel("warning", netsvc.LOG_WARNING, msg)
_logger.warning(msg)
return {
'rst_file': base64.encodestring(out),
@ -483,9 +481,8 @@ class wizard_tech_guide_rst(wizard.interface):
res = modobj.fields_get(cr, uid).items()
return res
else:
logger = netsvc.Logger()
msg = "Object %s not found" % (obj)
logger.notifyChannel("base_module_doc_rst", netsvc.LOG_ERROR, msg)
_logger.error(msg)
return ""
states = {

View File

@ -25,6 +25,9 @@ from tools.translate import _
from osv import osv, fields
import logging
import addons
_logger = logging.getLogger(__name__)
class abstract_quality_check(object):
'''
This Class is abstract class for all test
@ -78,7 +81,6 @@ class abstract_quality_check(object):
#This variable used to give message if test result is good or not
self.message = ''
self.log = logging.getLogger('module.quality')
#The tests have to subscribe itselfs in this list, that contains
#all the test that have to be performed.
@ -108,11 +110,11 @@ class abstract_quality_check(object):
model_data = pool.get('ir.model.data').browse(cr, uid, ids2)
for model in model_data:
model_list.append(model.res_id)
self.log.debug('get_objects() model_list: %s', ','.join(map(str, model_list)))
_logger.debug('get_objects() model_list: %s', ','.join(map(str, model_list)))
obj_list = []
for mod in pool.get('ir.model').browse(cr, uid, model_list):
obj_list.append(str(mod.model))
self.log.debug('get_objects() obj_list: %s', ','.join(obj_list))
_logger.debug('get_objects() obj_list: %s', ','.join(obj_list))
return obj_list
def get_model_ids(self, cr, uid, models=[]):
@ -120,7 +122,7 @@ class abstract_quality_check(object):
if not models:
return []
pool = pooler.get_pool(cr.dbname)
self.log.debug('get_model_ids([%s])', ', '.join(models))
_logger.debug('get_model_ids([%s])', ', '.join(models))
return pool.get('ir.model').search(cr, uid, [('model', 'in', models)])
def get_ids(self, cr, uid, object_list):
@ -211,7 +213,6 @@ class module_quality_check(osv.osv):
So here the detail result is in html format and summary will be in text_wiki format.
'''
pool = pooler.get_pool(cr.dbname)
log = logging.getLogger('module.quality')
obj_module = pool.get('ir.module.module')
if not module_state:
module_id = obj_module.search(cr, uid, [('name', '=', module_name)])
@ -223,14 +224,14 @@ class module_quality_check(osv.osv):
ponderation_sum = 0.0
create_ids = []
module_path = addons.get_module_path(module_name)
log.info('Performing quality tests for %s', module_name)
_logger.info('Performing quality tests for %s', module_name)
for test in abstract_obj.tests:
val = test.quality_test()
if not val.active:
log.info('Skipping inactive step %s for %s', val.name, module_name)
_logger.info('Skipping inactive step %s for %s', val.name, module_name)
continue
log.info('Performing step %s for %s', val.name, module_name)
_logger.info('Performing step %s for %s', val.name, module_name)
# Get a separate cursor per test, so that an SQL error in one
# will not block the others.
cr2 = pooler.get_db(cr.dbname).cursor()
@ -269,9 +270,9 @@ class module_quality_check(osv.osv):
'summary': _("The module has to be installed before running this test.")
}
create_ids.append((0, 0, data))
log.info('Finished quality test step')
_logger.info('Finished quality test step')
except Exception, e:
log.exception("Could not finish test step %s due to %s", val.name, e)
_logger.exception("Could not finish test step %s due to %s", val.name, e)
finally:
cr2.rollback()
cr2.close()

View File

@ -30,6 +30,8 @@ from tools.translate import _
import nodes
import logging
_logger = logging.getLogger(__name__)
DMS_ROOT_PATH = tools.config.get('document_path', os.path.join(tools.config['root_path'], 'filestore'))
class document_file(osv.osv):
@ -54,7 +56,7 @@ class document_file(osv.osv):
parent_id = self.pool.get('document.directory')._get_root_directory(cr,uid)
if not parent_id:
logging.getLogger('document').warning("at _attach_parent_id(), still not able to set the parent!")
_logger.warning("at _attach_parent_id(), still not able to set the parent!")
return False
if ids is not None:
@ -339,7 +341,7 @@ class document_file(osv.osv):
if r:
unres.append(r)
else:
logging.getLogger('document').warning("Unlinking attachment #%s %s that has no storage",
_logger.warning("Unlinking attachment #%s %s that has no storage",
f.id, f.name)
res = super(document_file, self).unlink(cr, uid, ids, context)
stor.do_unlink(cr, uid, unres)

View File

@ -19,6 +19,7 @@
#
##############################################################################
import logging
from osv import osv, fields
from osv.orm import except_orm
@ -26,6 +27,8 @@ from osv.orm import except_orm
import nodes
from tools.translate import _
_logger = logging.getLogger(__name__)
class document_directory(osv.osv):
_name = 'document.directory'
_description = 'Directory'
@ -78,9 +81,7 @@ class document_directory(osv.osv):
root_id = objid.read(cr, uid, mid, ['res_id'])['res_id']
return root_id
except Exception, e:
import netsvc
logger = netsvc.Logger()
logger.notifyChannel("document", netsvc.LOG_WARNING, 'Cannot set directory root:'+ str(e))
_logger.warning('Cannot set directory root:' + str(e))
return False
return objid.browse(cr, uid, mid, context=context).res_id

View File

@ -41,6 +41,8 @@ import pooler
import nodes
from content_index import cntIndex
_logger = logging.getLogger(__name__)
DMS_ROOT_PATH = tools.config.get('document_path', os.path.join(tools.config.get('root_path'), 'filestore'))
@ -130,7 +132,7 @@ class nodefd_file(nodes.node_descriptor):
mime, icont = cntIndex.doIndex(None, filename=filename,
content_type=None, realfname=fname)
except Exception:
logging.getLogger('document.storage').debug('Cannot index file:', exc_info=True)
_logger.debug('Cannot index file:', exc_info=True)
pass
try:
@ -150,7 +152,7 @@ class nodefd_file(nodes.node_descriptor):
cr.commit()
cr.close()
except Exception:
logging.getLogger('document.storage').warning('Cannot save file indexed content:', exc_info=True)
_logger.warning('Cannot save file indexed content:', exc_info=True)
elif self.mode in ('a', 'a+' ):
try:
@ -164,7 +166,7 @@ class nodefd_file(nodes.node_descriptor):
cr.commit()
cr.close()
except Exception:
logging.getLogger('document.storage').warning('Cannot save file appended content:', exc_info=True)
_logger.warning('Cannot save file appended content:', exc_info=True)
@ -191,7 +193,7 @@ class nodefd_db(StringIO, nodes.node_descriptor):
elif mode == 'a':
StringIO.__init__(self, None)
else:
logging.getLogger('document.storage').error("Incorrect mode %s specified", mode)
_logger.error("Incorrect mode %s specified", mode)
raise IOError(errno.EINVAL, "Invalid file mode")
self.mode = mode
@ -217,7 +219,7 @@ class nodefd_db(StringIO, nodes.node_descriptor):
mime, icont = cntIndex.doIndex(data, filename=filename,
content_type=None, realfname=None)
except Exception:
logging.getLogger('document.storage').debug('Cannot index file:', exc_info=True)
_logger.debug('Cannot index file:', exc_info=True)
pass
try:
@ -241,7 +243,7 @@ class nodefd_db(StringIO, nodes.node_descriptor):
(out, len(data), par.file_id))
cr.commit()
except Exception:
logging.getLogger('document.storage').exception('Cannot update db file #%d for close:', par.file_id)
_logger.exception('Cannot update db file #%d for close:', par.file_id)
raise
finally:
cr.close()
@ -271,7 +273,7 @@ class nodefd_db64(StringIO, nodes.node_descriptor):
elif mode == 'a':
StringIO.__init__(self, None)
else:
logging.getLogger('document.storage').error("Incorrect mode %s specified", mode)
_logger.error("Incorrect mode %s specified", mode)
raise IOError(errno.EINVAL, "Invalid file mode")
self.mode = mode
@ -297,7 +299,7 @@ class nodefd_db64(StringIO, nodes.node_descriptor):
mime, icont = cntIndex.doIndex(data, filename=filename,
content_type=None, realfname=None)
except Exception:
logging.getLogger('document.storage').debug('Cannot index file:', exc_info=True)
_logger.debug('Cannot index file:', exc_info=True)
pass
try:
@ -320,7 +322,7 @@ class nodefd_db64(StringIO, nodes.node_descriptor):
(base64.encodestring(data), len(data), par.file_id))
cr.commit()
except Exception:
logging.getLogger('document.storage').exception('Cannot update db file #%d for close:', par.file_id)
_logger.exception('Cannot update db file #%d for close:', par.file_id)
raise
finally:
cr.close()
@ -339,7 +341,6 @@ class document_storage(osv.osv):
"""
_name = 'document.storage'
_description = 'Storage Media'
_doclog = logging.getLogger('document')
_columns = {
'name': fields.char('Name', size=64, required=True, select=1),
@ -401,8 +402,6 @@ class document_storage(osv.osv):
# npath may contain empty elements, for root directory etc.
npath = filter(lambda x: x is not None, npath)
# if self._debug:
# self._doclog.debug('Npath: %s', npath)
for n in npath:
if n == '..':
raise ValueError("Invalid '..' element in path")
@ -413,7 +412,7 @@ class document_storage(osv.osv):
dpath += npath[:-1]
path = os.path.join(*dpath)
if not os.path.isdir(path):
self._doclog.debug("Create dirs: %s", path)
_logger.debug("Create dirs: %s", path)
os.makedirs(path)
return path, npath
@ -451,7 +450,7 @@ class document_storage(osv.osv):
# try to fix their directory.
if mode in ('r','r+'):
if ira.file_size:
self._doclog.warning( "ir.attachment #%d does not have a filename, but is at filestore, fix it!" % ira.id)
_logger.warning( "ir.attachment #%d does not have a filename, but is at filestore, fix it!" % ira.id)
raise IOError(errno.ENOENT, 'No file can be located')
else:
store_fname = self.__get_random_fname(boo.path)
@ -493,7 +492,7 @@ class document_storage(osv.osv):
# On a migrated db, some files may have the wrong storage type
# try to fix their directory.
if ira.file_size:
self._doclog.warning( "ir.attachment #%d does not have a filename, but is at filestore, fix it!" % ira.id)
_logger.warning( "ir.attachment #%d does not have a filename, but is at filestore, fix it!" % ira.id)
return None
fpath = os.path.join(boo.path, ira.store_fname)
return file(fpath, 'rb').read()
@ -517,7 +516,7 @@ class document_storage(osv.osv):
# On a migrated db, some files may have the wrong storage type
# try to fix their directory.
if ira.file_size:
self._doclog.warning("ir.attachment #%d does not have a filename, trying the name." %ira.id)
_logger.warning("ir.attachment #%d does not have a filename, trying the name." %ira.id)
# sfname = ira.name
fpath = os.path.join(boo.path,ira.store_fname or ira.name)
if os.path.exists(fpath):
@ -550,7 +549,7 @@ class document_storage(osv.osv):
if boo.readonly:
raise IOError(errno.EPERM, "Readonly medium")
self._doclog.debug( "Store data for ir.attachment #%d" % ira.id)
_logger.debug( "Store data for ir.attachment #%d" % ira.id)
store_fname = None
fname = None
if boo.type == 'filestore':
@ -563,13 +562,13 @@ class document_storage(osv.osv):
fp.write(data)
finally:
fp.close()
self._doclog.debug( "Saved data to %s" % fname)
_logger.debug( "Saved data to %s" % fname)
filesize = len(data) # os.stat(fname).st_size
# TODO Here, an old file would be left hanging.
except Exception, e:
self._doclog.warning( "Couldn't save data to %s", path, exc_info=True)
_logger.warning( "Couldn't save data to %s", path, exc_info=True)
raise except_orm(_('Error!'), str(e))
elif boo.type == 'db':
filesize = len(data)
@ -592,12 +591,12 @@ class document_storage(osv.osv):
fp.write(data)
finally:
fp.close()
self._doclog.debug("Saved data to %s", fname)
_logger.debug("Saved data to %s", fname)
filesize = len(data) # os.stat(fname).st_size
store_fname = os.path.join(*npath)
# TODO Here, an old file would be left hanging.
except Exception,e :
self._doclog.warning("Couldn't save data:", exc_info=True)
_logger.warning("Couldn't save data:", exc_info=True)
raise except_orm(_('Error!'), str(e))
elif boo.type == 'virtual':
@ -616,7 +615,7 @@ class document_storage(osv.osv):
mime, icont = cntIndex.doIndex(data, ira.datas_fname,
ira.file_type or None, fname)
except Exception:
self._doclog.debug('Cannot index file:', exc_info=True)
_logger.debug('Cannot index file:', exc_info=True)
pass
try:
@ -633,7 +632,7 @@ class document_storage(osv.osv):
file_node.content_type = mime
return True
except Exception, e :
self._doclog.warning("Couldn't save data:", exc_info=True)
_logger.warning("Couldn't save data:", exc_info=True)
# should we really rollback once we have written the actual data?
# at the db case (only), that rollback would be safe
raise except_orm(_('Error at doc write!'), str(e))
@ -671,9 +670,9 @@ class document_storage(osv.osv):
try:
os.unlink(fname)
except Exception:
self._doclog.warning("Could not remove file %s, please remove manually.", fname, exc_info=True)
_logger.warning("Could not remove file %s, please remove manually.", fname, exc_info=True)
else:
self._doclog.warning("Unknown unlink key %s" % ktype)
_logger.warning("Unknown unlink key %s" % ktype)
return True
@ -703,9 +702,9 @@ class document_storage(osv.osv):
fname = ira.store_fname
if not fname:
self._doclog.warning("Trying to rename a non-stored file")
_logger.warning("Trying to rename a non-stored file")
if fname != os.path.join(*npath):
self._doclog.warning("inconsistency in realstore: %s != %s" , fname, repr(npath))
_logger.warning("inconsistency in realstore: %s != %s" , fname, repr(npath))
oldpath = os.path.join(path, npath[-1])
newpath = os.path.join(path, new_name)
@ -743,7 +742,7 @@ class document_storage(osv.osv):
break
par = par.parent_id
if file_node.storage_id != psto:
self._doclog.debug('Cannot move file %r from %r to %r', file_node, file_node.parent, ndir_bro.name)
_logger.debug('Cannot move file %r from %r to %r', file_node, file_node.parent, ndir_bro.name)
raise NotImplementedError('Cannot move files between storage media')
if sbro.type in ('filestore', 'db', 'db64'):
@ -756,9 +755,9 @@ class document_storage(osv.osv):
fname = ira.store_fname
if not fname:
self._doclog.warning("Trying to rename a non-stored file")
_logger.warning("Trying to rename a non-stored file")
if fname != os.path.join(*opath):
self._doclog.warning("inconsistency in realstore: %s != %s" , fname, repr(opath))
_logger.warning("inconsistency in realstore: %s != %s" , fname, repr(opath))
oldpath = os.path.join(path, opath[-1])
@ -766,12 +765,12 @@ class document_storage(osv.osv):
npath = filter(lambda x: x is not None, npath)
newdir = os.path.join(*npath)
if not os.path.isdir(newdir):
self._doclog.debug("Must create dir %s", newdir)
_logger.debug("Must create dir %s", newdir)
os.makedirs(newdir)
npath.append(opath[-1])
newpath = os.path.join(*npath)
self._doclog.debug("Going to move %s from %s to %s", opath[-1], oldpath, newpath)
_logger.debug("Going to move %s from %s to %s", opath[-1], oldpath, newpath)
shutil.move(oldpath, newpath)
store_path = npath[1:] + [opath[-1],]

View File

@ -42,7 +42,7 @@ from StringIO import StringIO
# root: if we are at the first directory of a ressource
#
logger = logging.getLogger('doc2.nodes')
_logger = logging.getLogger(__name__)
def _str2time(cre):
""" Convert a string with time representation (from db) into time (float)
@ -328,7 +328,7 @@ class node_class(object):
if self.DAV_M_NS.has_key(ns):
prefix = self.DAV_M_NS[ns]
else:
logger.debug('No namespace: %s ("%s")',ns, prop)
_logger.debug('No namespace: %s ("%s")',ns, prop)
return None
mname = prefix + "_" + prop.replace('-','_')
@ -341,7 +341,7 @@ class node_class(object):
r = m(cr)
return r
except AttributeError:
logger.debug('Property %s not supported' % prop, exc_info=True)
_logger.debug('Property %s not supported' % prop, exc_info=True)
return None
def get_dav_resourcetype(self, cr):
@ -384,13 +384,13 @@ class node_class(object):
def create_child(self, cr, path, data=None):
""" Create a regular file under this node
"""
logger.warning("Attempted to create a file under %r, not possible.", self)
_logger.warning("Attempted to create a file under %r, not possible.", self)
raise IOError(errno.EPERM, "Not allowed to create files here")
def create_child_collection(self, cr, objname):
""" Create a child collection (directory) under self
"""
logger.warning("Attempted to create a collection under %r, not possible.", self)
_logger.warning("Attempted to create a collection under %r, not possible.", self)
raise IOError(errno.EPERM, "Not allowed to create folders here")
def rm(self, cr):
@ -725,7 +725,7 @@ class node_dir(node_database):
assert self.parent
if self.parent != ndir_node:
logger.debug('Cannot move dir %r from %r to %r', self, self.parent, ndir_node)
_logger.debug('Cannot move dir %r from %r to %r', self, self.parent, ndir_node)
raise NotImplementedError('Cannot move dir to another dir')
ret = {}
@ -998,7 +998,7 @@ class node_res_obj(node_class):
def get_dav_eprop_DEPR(self, cr, ns, prop):
# Deprecated!
if ns != 'http://groupdav.org/' or prop != 'resourcetype':
logger.warning("Who asked for %s:%s?" % (ns, prop))
_logger.warning("Who asked for %s:%s?" % (ns, prop))
return None
cntobj = self.context._dirobj.pool.get('document.directory.content')
uid = self.context.uid
@ -1328,7 +1328,7 @@ class node_file(node_class):
ret = {}
if ndir_node and self.parent != ndir_node:
if not (isinstance(self.parent, node_dir) and isinstance(ndir_node, node_dir)):
logger.debug('Cannot move file %r from %r to %r', self, self.parent, ndir_node)
_logger.debug('Cannot move file %r from %r to %r', self, self.parent, ndir_node)
raise NotImplementedError('Cannot move files between dynamic folders')
if not ndir_obj:
@ -1473,7 +1473,7 @@ class nodefd_content(StringIO, node_descriptor):
elif mode == 'a':
StringIO.__init__(self, None)
else:
logging.getLogger('document.content').error("Incorrect mode %s specified", mode)
_logger.error("Incorrect mode %s specified", mode)
raise IOError(errno.EINVAL, "Invalid file mode")
self.mode = mode
@ -1499,7 +1499,7 @@ class nodefd_content(StringIO, node_descriptor):
raise NotImplementedError
cr.commit()
except Exception:
logging.getLogger('document.content').exception('Cannot update db content #%d for close:', par.cnt_id)
_logger.exception('Cannot update db content #%d for close:', par.cnt_id)
raise
finally:
cr.close()
@ -1526,7 +1526,7 @@ class nodefd_static(StringIO, node_descriptor):
elif mode == 'a':
StringIO.__init__(self, None)
else:
logging.getLogger('document.nodes').error("Incorrect mode %s specified", mode)
_logger.error("Incorrect mode %s specified", mode)
raise IOError(errno.EINVAL, "Invalid file mode")
self.mode = mode
@ -1551,7 +1551,7 @@ class nodefd_static(StringIO, node_descriptor):
raise NotImplementedError
cr.commit()
except Exception:
logging.getLogger('document.nodes').exception('Cannot update db content #%d for close:', par.cnt_id)
_logger.exception('Cannot update db content #%d for close:', par.cnt_id)
raise
finally:
cr.close()

View File

@ -26,6 +26,8 @@ import odt2txt
import sys, zipfile, xml.dom.minidom
import logging
_logger = logging.getLogger(__name__)
def _to_unicode(s):
try:
return s.decode('utf-8')
@ -101,9 +103,8 @@ class DocIndex(indexer):
(data, _) = pop.communicate()
return _to_unicode(data)
except OSError:
logger = logging.getLogger('document.DocIndex')
logger.warn("Failed attempt to execute antiword (MS Word reader). Antiword is necessary to index the file %s of MIME type %s. Detailed error available at DEBUG level.", fname, self._getMimeTypes()[0])
logger.debug("Trace of the failed file indexing attempt: ", exc_info=True)
_logger.warning("Failed attempt to execute antiword (MS Word reader). Antiword is necessary to index the file %s of MIME type %s. Detailed error available at DEBUG level.", fname, self._getMimeTypes()[0])
_logger.debug("Trace of the failed file indexing attempt: ", exc_info=True)
return False
cntIndex.register(DocIndex())

View File

@ -19,6 +19,7 @@
#
##############################################################################
import logging
import threading
import ftpserver
import authorizer
@ -26,6 +27,8 @@ import abstracted_fs
import netsvc
from tools import config
_logger = logging.getLogger(__name__)
def start_server():
HOST = config.get('ftp_server_host', '127.0.0.1')
PORT = int(config.get('ftp_server_port', '8021'))
@ -36,8 +39,7 @@ def start_server():
class ftp_server(threading.Thread):
def log(self, level, message):
logger = netsvc.Logger()
logger.notifyChannel('FTP', level, message)
_logger.log(level, message)
def run(self):
autho = authorizer.authorizer()
@ -56,9 +58,9 @@ def start_server():
ftpd.serve_forever()
if HOST.lower() == 'none':
netsvc.Logger().notifyChannel("FTP", netsvc.LOG_INFO, "\n Server FTP Not Started\n")
_logger.info("\n Server FTP Not Started\n")
else:
netsvc.Logger().notifyChannel("FTP", netsvc.LOG_INFO, "\n Serving FTP on %s:%s\n" % (HOST, PORT))
_logger.info(\n Serving FTP on %s:%s\n", HOST, PORT)
ds = ftp_server()
ds.daemon = True
ds.start()

View File

@ -28,7 +28,7 @@ from models.edi import EDIMixin, edi_document
try:
import controllers
except ImportError:
logging.getLogger('init.load').warn(
logging.getLogger(__name__).warning(
"""Could not load openerp-web section of EDI, EDI will not behave correctly
To fix, launch openerp-web in embedded mode""")

View File

@ -23,7 +23,7 @@ import logging
import netsvc
import openerp
_logger = logging.getLogger('edi.service')
_logger = logging.getLogger(__name__)
class edi(netsvc.ExportService):

View File

@ -36,6 +36,8 @@ from osv import osv,fields,orm
from tools.translate import _
from tools.safe_eval import safe_eval as eval
_logger = logging.getLogger(__name__)
EXTERNAL_ID_PATTERN = re.compile(r'^([^.:]+)(?::([^.]+))?\.(\S+)$')
EDI_VIEW_WEB_URL = '%s/edi/view?debug=1&db=%s&token=%s'
EDI_PROTOCOL_VERSION = 1 # arbitrary ever-increasing version number
@ -72,8 +74,6 @@ def last_update_for(record):
return record_log.get('write_date') or record_log.get('create_date') or False
return False
_logger = logging.getLogger('edi')
class edi_document(osv.osv):
_name = 'edi.document'
_description = 'EDI Document'
@ -682,4 +682,4 @@ class EDIMixin(object):
return record_id
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:

View File

@ -25,6 +25,8 @@ from edi import EDIMixin
from openerp import SUPERUSER_ID
from tools.translate import _
_logger = logging.getLogger(__name__)
RES_PARTNER_ADDRESS_EDI_STRUCT = {
'name': True,
'email': True,
@ -72,7 +74,7 @@ class res_partner_address(osv.osv, EDIMixin):
code, label = 'edi_generic', 'Generic Bank Type (auto-created for EDI)'
bank_code_ids = res_partner_bank_type.search(cr, uid, [('code','=',code)], context=context)
if not bank_code_ids:
logging.getLogger('edi.res_partner').info('Normal bank account type is missing, creating '
_logger.info('Normal bank account type is missing, creating '
'a generic bank account type for EDI.')
self.res_partner_bank_type.create(cr, SUPERUSER_ID, {'name': label,
'code': label})
@ -98,7 +100,7 @@ class res_partner_address(osv.osv, EDIMixin):
bank_name, ext_bank_id, context=import_ctx)
except osv.except_osv:
# failed to import it, try again with unrestricted default type
logging.getLogger('edi.res_partner').warning('Failed to import bank account using'
_logger.warning('Failed to import bank account using'
'bank type: %s, ignoring', import_ctx['default_state'],
exc_info=True)
return address_id

View File

@ -39,7 +39,7 @@ from osv import osv, fields
import tools
from tools.translate import _
logger = logging.getLogger('fetchmail')
_logger = logging.getLogger(__name__)
class fetchmail_server(osv.osv):
"""Incoming POP/IMAP mail server account"""
@ -151,7 +151,7 @@ openerp_mailgate.py -u %(uid)d -p PASSWORD -o %(model)s -d %(dbname)s --host=HOS
connection = server.connect()
server.write({'state':'done'})
except Exception, e:
logger.exception("Failed to connect to %s server %s", server.type, server.name)
_logger.exception("Failed to connect to %s server %s", server.type, server.name)
raise osv.except_osv(_("Connection test failed!"), _("Here is what we got instead:\n %s") % tools.ustr(e))
finally:
try:
@ -177,7 +177,7 @@ openerp_mailgate.py -u %(uid)d -p PASSWORD -o %(model)s -d %(dbname)s --host=HOS
mail_thread = self.pool.get('mail.thread')
action_pool = self.pool.get('ir.actions.server')
for server in self.browse(cr, uid, ids, context=context):
logger.info('start checking for new emails on %s server %s', server.type, server.name)
_logger.info('start checking for new emails on %s server %s', server.type, server.name)
context.update({'fetchmail_server_id': server.id, 'server_type': server.type})
count = 0
if server.type == 'imap':
@ -196,9 +196,9 @@ openerp_mailgate.py -u %(uid)d -p PASSWORD -o %(model)s -d %(dbname)s --host=HOS
imap_server.store(num, '+FLAGS', '\\Seen')
cr.commit()
count += 1
logger.info("fetched/processed %s email(s) on %s server %s", count, server.type, server.name)
_logger.info("fetched/processed %s email(s) on %s server %s", count, server.type, server.name)
except Exception, e:
logger.exception("Failed to fetch mail from %s server %s", server.type, server.name)
_logger.exception("Failed to fetch mail from %s server %s", server.type, server.name)
finally:
if imap_server:
imap_server.close()
@ -220,9 +220,9 @@ openerp_mailgate.py -u %(uid)d -p PASSWORD -o %(model)s -d %(dbname)s --host=HOS
action_pool.run(cr, uid, [server.action_id.id], {'active_id': res_id, 'active_ids':[res_id]})
pop_server.dele(num)
cr.commit()
logger.info("fetched/processed %s email(s) on %s server %s", numMsgs, server.type, server.name)
_logger.info("fetched/processed %s email(s) on %s server %s", numMsgs, server.type, server.name)
except Exception, e:
logger.exception("Failed to fetch mail from %s server %s", server.type, server.name)
_logger.exception("Failed to fetch mail from %s server %s", server.type, server.name)
finally:
if pop_server:
pop_server.quit()

View File

@ -29,11 +29,11 @@ import datetime
import logging
import StringIO
import traceback
_logger = logging.getLogger(__name__)
pp = pprint.PrettyPrinter(indent=4)
class import_framework(Thread):
"""
This class should be extends,
@ -60,7 +60,6 @@ class import_framework(Thread):
self.context = context or {}
self.email = email_to_notify
self.table_list = []
self.logger = logging.getLogger(module_name)
self.initialize()
"""
@ -165,7 +164,7 @@ class import_framework(Thread):
data_i is a map external field_name => value
and each data_i have a external id => in data_id['id']
"""
self.logger.info(' Importing %s into %s' % (table, model))
_logger.info(' Importing %s into %s', table, model)
if not datas:
return (0, 'No data found')
mapping['id'] = 'id_new'
@ -188,7 +187,7 @@ class import_framework(Thread):
model_obj = self.obj.pool.get(model)
if not model_obj:
raise ValueError(_("%s is not a valid model name") % model)
self.logger.debug(_(" fields imported : ") + str(fields))
_logger.debug(_(" fields imported : ") + str(fields))
(p, r, warning, s) = model_obj.import_data(self.cr, self.uid, fields, res, mode='update', current_module=self.module_name, noupdate=True, context=self.context)
for (field, field_name) in self_dependencies:
self._import_self_dependencies(model_obj, field, datas)
@ -431,9 +430,9 @@ class import_framework(Thread):
'auto_delete' : True})
email_obj.send(self.cr, self.uid, [email_id])
if error:
self.logger.error(_("Import failed due to an unexpected error"))
_logger.error(_("Import failed due to an unexpected error"))
else:
self.logger.info(_("Import finished, notification email sended"))
_logger.info(_("Import finished, notification email sended"))
def get_email_subject(self, result, error=False):
"""

View File

@ -1,219 +1,218 @@
# -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
#
# Copyright (c) 2011 Noviat nv/sa (www.noviat.be). All rights reserved.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import re, time, random
from osv import fields, osv
from tools.translate import _
import netsvc
logger=netsvc.Logger()
"""
account.invoice object:
- Add support for Belgian structured communication
- Rename 'reference' field labels to 'Communication'
"""
class account_invoice(osv.osv):
_inherit = 'account.invoice'
def _get_reference_type(self, cursor, user, context=None):
"""Add BBA Structured Communication Type and change labels from 'reference' into 'communication' """
res = super(account_invoice, self)._get_reference_type(cursor, user,
context=context)
res[[i for i,x in enumerate(res) if x[0] == 'none'][0]] = ('none', 'Free Communication')
res.append(('bba', 'BBA Structured Communication'))
#logger.notifyChannel('addons.'+self._name, netsvc.LOG_WARNING, 'reference_type = %s' %res )
return res
def check_bbacomm(self, val):
supported_chars = '0-9+*/ '
pattern = re.compile('[^' + supported_chars + ']')
if pattern.findall(val or ''):
return False
bbacomm = re.sub('\D', '', val or '')
if len(bbacomm) == 12:
base = int(bbacomm[:10])
mod = base % 97 or 97
if mod == int(bbacomm[-2:]):
return True
return False
def _check_communication(self, cr, uid, ids):
for inv in self.browse(cr, uid, ids):
if inv.reference_type == 'bba':
return self.check_bbacomm(inv.reference)
return True
def onchange_partner_id(self, cr, uid, ids, type, partner_id,
date_invoice=False, payment_term=False, partner_bank_id=False, company_id=False):
result = super(account_invoice, self).onchange_partner_id(cr, uid, ids, type, partner_id,
date_invoice, payment_term, partner_bank_id, company_id)
# reference_type = self.default_get(cr, uid, ['reference_type'])['reference_type']
# logger.notifyChannel('addons.'+self._name, netsvc.LOG_WARNING, 'partner_id %s' % partner_id)
reference = False
reference_type = 'none'
if partner_id:
if (type == 'out_invoice'):
reference_type = self.pool.get('res.partner').browse(cr, uid, partner_id).out_inv_comm_type
if reference_type:
algorithm = self.pool.get('res.partner').browse(cr, uid, partner_id).out_inv_comm_algorithm
if not algorithm:
algorithm = 'random'
reference = self.generate_bbacomm(cr, uid, ids, type, reference_type, algorithm, partner_id, '')['value']['reference']
res_update = {
'reference_type': reference_type or 'none',
'reference': reference,
}
result['value'].update(res_update)
return result
def generate_bbacomm(self, cr, uid, ids, type, reference_type, algorithm, partner_id, reference):
partner_obj = self.pool.get('res.partner')
reference = reference or ''
if (type == 'out_invoice'):
if reference_type == 'bba':
if not algorithm:
if partner_id:
algorithm = partner_obj.browse(cr, uid, partner_id).out_inv_comm_algorithm
if not algorithm:
if not algorithm:
algorithm = 'random'
if algorithm == 'date':
if not self.check_bbacomm(reference):
doy = time.strftime('%j')
year = time.strftime('%Y')
seq = '001'
seq_ids = self.search(cr, uid,
[('type', '=', 'out_invoice'), ('reference_type', '=', 'bba'),
('reference', 'like', '+++%s/%s/%%' % (doy, year))], order='reference')
if seq_ids:
prev_seq = int(self.browse(cr, uid, seq_ids[-1]).reference[12:15])
if prev_seq < 999:
seq = '%03d' % (prev_seq + 1)
else:
raise osv.except_osv(_('Warning!'),
_('The daily maximum of outgoing invoices with an automatically generated BBA Structured Communications has been exceeded!' \
'\nPlease create manually a unique BBA Structured Communication.'))
bbacomm = doy + year + seq
base = int(bbacomm)
mod = base % 97 or 97
reference = '+++%s/%s/%s%02d+++' % (doy, year, seq, mod)
elif algorithm == 'partner_ref':
if not self.check_bbacomm(reference):
partner_ref = self.pool.get('res.partner').browse(cr, uid, partner_id).ref
partner_ref_nr = re.sub('\D', '', partner_ref or '')
if (len(partner_ref_nr) < 3) or (len(partner_ref_nr) > 7):
raise osv.except_osv(_('Warning!'),
_('The Partner should have a 3-7 digit Reference Number for the generation of BBA Structured Communications!' \
'\nPlease correct the Partner record.'))
else:
partner_ref_nr = partner_ref_nr.ljust(7, '0')
seq = '001'
seq_ids = self.search(cr, uid,
[('type', '=', 'out_invoice'), ('reference_type', '=', 'bba'),
('reference', 'like', '+++%s/%s/%%' % (partner_ref_nr[:3], partner_ref_nr[3:]))], order='reference')
if seq_ids:
prev_seq = int(self.browse(cr, uid, seq_ids[-1]).reference[12:15])
if prev_seq < 999:
seq = '%03d' % (prev_seq + 1)
else:
raise osv.except_osv(_('Warning!'),
_('The daily maximum of outgoing invoices with an automatically generated BBA Structured Communications has been exceeded!' \
'\nPlease create manually a unique BBA Structured Communication.'))
bbacomm = partner_ref_nr + seq
base = int(bbacomm)
mod = base % 97 or 97
reference = '+++%s/%s/%s%02d+++' % (partner_ref_nr[:3], partner_ref_nr[3:], seq, mod)
elif algorithm == 'random':
if not self.check_bbacomm(reference):
base = random.randint(1, 9999999999)
bbacomm = str(base).rjust(7, '0')
base = int(bbacomm)
mod = base % 97 or 97
mod = str(mod).rjust(2, '0')
reference = '+++%s/%s/%s%s+++' % (bbacomm[:3], bbacomm[3:7], bbacomm[7:], mod)
else:
raise osv.except_osv(_('Error!'),
_("Unsupported Structured Communication Type Algorithm '%s' !" \
"\nPlease contact your OpenERP support channel.") % algorithm)
return {'value': {'reference': reference}}
def create(self, cr, uid, vals, context=None):
if vals.has_key('reference_type'):
reference_type = vals['reference_type']
if reference_type == 'bba':
if vals.has_key('reference'):
bbacomm = vals['reference']
else:
raise osv.except_osv(_('Warning!'),
_('Empty BBA Structured Communication!' \
'\nPlease fill in a unique BBA Structured Communication.'))
if self.check_bbacomm(bbacomm):
reference = re.sub('\D', '', bbacomm)
vals['reference'] = '+++' + reference[0:3] + '/' + reference[3:7] + '/' + reference[7:] + '+++'
same_ids = self.search(cr, uid,
[('type', '=', 'out_invoice'), ('reference_type', '=', 'bba'),
('reference', '=', vals['reference'])])
if same_ids:
raise osv.except_osv(_('Warning!'),
_('The BBA Structured Communication has already been used!' \
'\nPlease create manually a unique BBA Structured Communication.'))
return super(account_invoice, self).create(cr, uid, vals, context=context)
def write(self, cr, uid, ids, vals, context={}):
if isinstance(ids, (int, long)):
ids = [ids]
for inv in self.browse(cr, uid, ids, context):
if vals.has_key('reference_type'):
reference_type = vals['reference_type']
else:
reference_type = inv.reference_type or ''
if reference_type == 'bba':
if vals.has_key('reference'):
bbacomm = vals['reference']
else:
bbacomm = inv.reference or ''
if self.check_bbacomm(bbacomm):
reference = re.sub('\D', '', bbacomm)
vals['reference'] = '+++' + reference[0:3] + '/' + reference[3:7] + '/' + reference[7:] + '+++'
same_ids = self.search(cr, uid,
[('id', '!=', inv.id), ('type', '=', 'out_invoice'),
('reference_type', '=', 'bba'), ('reference', '=', vals['reference'])])
if same_ids:
raise osv.except_osv(_('Warning!'),
_('The BBA Structured Communication has already been used!' \
'\nPlease create manually a unique BBA Structured Communication.'))
return super(account_invoice, self).write(cr, uid, ids, vals, context)
_columns = {
'reference': fields.char('Communication', size=64, help="The partner reference of this invoice."),
'reference_type': fields.selection(_get_reference_type, 'Communication Type',
required=True),
}
_constraints = [
(_check_communication, 'Invalid BBA Structured Communication !', ['Communication']),
]
account_invoice()
# -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
#
# Copyright (c) 2011 Noviat nv/sa (www.noviat.be). All rights reserved.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import random
import re
import time
from osv import fields, osv
from tools.translate import _
"""
account.invoice object:
- Add support for Belgian structured communication
- Rename 'reference' field labels to 'Communication'
"""
class account_invoice(osv.osv):
_inherit = 'account.invoice'
def _get_reference_type(self, cursor, user, context=None):
"""Add BBA Structured Communication Type and change labels from 'reference' into 'communication' """
res = super(account_invoice, self)._get_reference_type(cursor, user,
context=context)
res[[i for i,x in enumerate(res) if x[0] == 'none'][0]] = ('none', 'Free Communication')
res.append(('bba', 'BBA Structured Communication'))
return res
def check_bbacomm(self, val):
supported_chars = '0-9+*/ '
pattern = re.compile('[^' + supported_chars + ']')
if pattern.findall(val or ''):
return False
bbacomm = re.sub('\D', '', val or '')
if len(bbacomm) == 12:
base = int(bbacomm[:10])
mod = base % 97 or 97
if mod == int(bbacomm[-2:]):
return True
return False
def _check_communication(self, cr, uid, ids):
for inv in self.browse(cr, uid, ids):
if inv.reference_type == 'bba':
return self.check_bbacomm(inv.reference)
return True
def onchange_partner_id(self, cr, uid, ids, type, partner_id,
date_invoice=False, payment_term=False, partner_bank_id=False, company_id=False):
result = super(account_invoice, self).onchange_partner_id(cr, uid, ids, type, partner_id,
date_invoice, payment_term, partner_bank_id, company_id)
# reference_type = self.default_get(cr, uid, ['reference_type'])['reference_type']
reference = False
reference_type = 'none'
if partner_id:
if (type == 'out_invoice'):
reference_type = self.pool.get('res.partner').browse(cr, uid, partner_id).out_inv_comm_type
if reference_type:
algorithm = self.pool.get('res.partner').browse(cr, uid, partner_id).out_inv_comm_algorithm
if not algorithm:
algorithm = 'random'
reference = self.generate_bbacomm(cr, uid, ids, type, reference_type, algorithm, partner_id, '')['value']['reference']
res_update = {
'reference_type': reference_type or 'none',
'reference': reference,
}
result['value'].update(res_update)
return result
def generate_bbacomm(self, cr, uid, ids, type, reference_type, algorithm, partner_id, reference):
partner_obj = self.pool.get('res.partner')
reference = reference or ''
if (type == 'out_invoice'):
if reference_type == 'bba':
if not algorithm:
if partner_id:
algorithm = partner_obj.browse(cr, uid, partner_id).out_inv_comm_algorithm
if not algorithm:
if not algorithm:
algorithm = 'random'
if algorithm == 'date':
if not self.check_bbacomm(reference):
doy = time.strftime('%j')
year = time.strftime('%Y')
seq = '001'
seq_ids = self.search(cr, uid,
[('type', '=', 'out_invoice'), ('reference_type', '=', 'bba'),
('reference', 'like', '+++%s/%s/%%' % (doy, year))], order='reference')
if seq_ids:
prev_seq = int(self.browse(cr, uid, seq_ids[-1]).reference[12:15])
if prev_seq < 999:
seq = '%03d' % (prev_seq + 1)
else:
raise osv.except_osv(_('Warning!'),
_('The daily maximum of outgoing invoices with an automatically generated BBA Structured Communications has been exceeded!' \
'\nPlease create manually a unique BBA Structured Communication.'))
bbacomm = doy + year + seq
base = int(bbacomm)
mod = base % 97 or 97
reference = '+++%s/%s/%s%02d+++' % (doy, year, seq, mod)
elif algorithm == 'partner_ref':
if not self.check_bbacomm(reference):
partner_ref = self.pool.get('res.partner').browse(cr, uid, partner_id).ref
partner_ref_nr = re.sub('\D', '', partner_ref or '')
if (len(partner_ref_nr) < 3) or (len(partner_ref_nr) > 7):
raise osv.except_osv(_('Warning!'),
_('The Partner should have a 3-7 digit Reference Number for the generation of BBA Structured Communications!' \
'\nPlease correct the Partner record.'))
else:
partner_ref_nr = partner_ref_nr.ljust(7, '0')
seq = '001'
seq_ids = self.search(cr, uid,
[('type', '=', 'out_invoice'), ('reference_type', '=', 'bba'),
('reference', 'like', '+++%s/%s/%%' % (partner_ref_nr[:3], partner_ref_nr[3:]))], order='reference')
if seq_ids:
prev_seq = int(self.browse(cr, uid, seq_ids[-1]).reference[12:15])
if prev_seq < 999:
seq = '%03d' % (prev_seq + 1)
else:
raise osv.except_osv(_('Warning!'),
_('The daily maximum of outgoing invoices with an automatically generated BBA Structured Communications has been exceeded!' \
'\nPlease create manually a unique BBA Structured Communication.'))
bbacomm = partner_ref_nr + seq
base = int(bbacomm)
mod = base % 97 or 97
reference = '+++%s/%s/%s%02d+++' % (partner_ref_nr[:3], partner_ref_nr[3:], seq, mod)
elif algorithm == 'random':
if not self.check_bbacomm(reference):
base = random.randint(1, 9999999999)
bbacomm = str(base).rjust(7, '0')
base = int(bbacomm)
mod = base % 97 or 97
mod = str(mod).rjust(2, '0')
reference = '+++%s/%s/%s%s+++' % (bbacomm[:3], bbacomm[3:7], bbacomm[7:], mod)
else:
raise osv.except_osv(_('Error!'),
_("Unsupported Structured Communication Type Algorithm '%s' !" \
"\nPlease contact your OpenERP support channel.") % algorithm)
return {'value': {'reference': reference}}
def create(self, cr, uid, vals, context=None):
if vals.has_key('reference_type'):
reference_type = vals['reference_type']
if reference_type == 'bba':
if vals.has_key('reference'):
bbacomm = vals['reference']
else:
raise osv.except_osv(_('Warning!'),
_('Empty BBA Structured Communication!' \
'\nPlease fill in a unique BBA Structured Communication.'))
if self.check_bbacomm(bbacomm):
reference = re.sub('\D', '', bbacomm)
vals['reference'] = '+++' + reference[0:3] + '/' + reference[3:7] + '/' + reference[7:] + '+++'
same_ids = self.search(cr, uid,
[('type', '=', 'out_invoice'), ('reference_type', '=', 'bba'),
('reference', '=', vals['reference'])])
if same_ids:
raise osv.except_osv(_('Warning!'),
_('The BBA Structured Communication has already been used!' \
'\nPlease create manually a unique BBA Structured Communication.'))
return super(account_invoice, self).create(cr, uid, vals, context=context)
def write(self, cr, uid, ids, vals, context={}):
if isinstance(ids, (int, long)):
ids = [ids]
for inv in self.browse(cr, uid, ids, context):
if vals.has_key('reference_type'):
reference_type = vals['reference_type']
else:
reference_type = inv.reference_type or ''
if reference_type == 'bba':
if vals.has_key('reference'):
bbacomm = vals['reference']
else:
bbacomm = inv.reference or ''
if self.check_bbacomm(bbacomm):
reference = re.sub('\D', '', bbacomm)
vals['reference'] = '+++' + reference[0:3] + '/' + reference[3:7] + '/' + reference[7:] + '+++'
same_ids = self.search(cr, uid,
[('id', '!=', inv.id), ('type', '=', 'out_invoice'),
('reference_type', '=', 'bba'), ('reference', '=', vals['reference'])])
if same_ids:
raise osv.except_osv(_('Warning!'),
_('The BBA Structured Communication has already been used!' \
'\nPlease create manually a unique BBA Structured Communication.'))
return super(account_invoice, self).write(cr, uid, ids, vals, context)
_columns = {
'reference': fields.char('Communication', size=64, help="The partner reference of this invoice."),
'reference_type': fields.selection(_get_reference_type, 'Communication Type',
required=True),
}
_constraints = [
(_check_communication, 'Invalid BBA Structured Communication !', ['Communication']),
]
account_invoice()
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:

View File

@ -21,11 +21,10 @@
#
##############################################################################
from osv import fields, osv
import time
from osv import fields, osv
from tools.translate import _
import netsvc
logger=netsvc.Logger()
class res_partner(osv.osv):
""" add field to indicate default 'Communication Type' on customer invoices """

View File

@ -19,11 +19,13 @@
#
##############################################################################
from osv import fields, osv
import logging
import os
from osv import fields, osv
from tools.translate import _
import netsvc
logger=netsvc.Logger()
_logger = logging.getLogger(__name__)
class wizard_multi_charts_accounts(osv.osv_memory):
"""
@ -80,8 +82,9 @@ class wizard_multi_charts_accounts(osv.osv_memory):
if context.get('lang') == lang:
self.pool.get(out_obj._name).write(cr, uid, out_ids[j], {in_field: value[in_id]})
else:
logger.notifyChannel('addons.'+self._name, netsvc.LOG_WARNING,
'Language: %s. Translation from template: there is no translation available for %s!' %(lang, src[in_id]))#out_obj._name))
_logger.warning(
'Language: %s. Translation from template: there is no translation available for %s!',
lang, src[in_id])
return True
def execute(self, cr, uid, ids, context=None):

View File

@ -34,7 +34,7 @@ from osv import osv
from osv import fields
from tools.translate import _
_logger = logging.getLogger('mail')
_logger = logging.getLogger(__name__)
def format_date_tz(date, tz=None):
if not date:

View File

@ -25,14 +25,11 @@ import base64
import email
from email.utils import parsedate
import logging
import xmlrpclib
from osv import osv, fields
from tools.translate import _
from mail_message import decode, to_email
_logger = logging.getLogger('mail')
class mail_thread(osv.osv):
'''Mixin model, meant to be inherited by any model that needs to
act as a discussion topic on which messages can be attached.

View File

@ -20,14 +20,12 @@
##############################################################################
from osv import fields, osv
import netsvc
class messages(osv.osv):
"""
Message from one user to another within a project
"""
_name = 'project.messages'
logger = netsvc.Logger()
_columns = {
'create_date': fields.datetime('Creation Date', readonly=True),

View File

@ -41,7 +41,6 @@ from mako.template import Template
from mako.lookup import TemplateLookup
from mako import exceptions
import netsvc
import pooler
from report_helper import WebKitHelper
from report.report_sxw import *
@ -50,7 +49,7 @@ import tools
from tools.translate import _
from osv.osv import except_osv
logger = logging.getLogger('report_webkit')
_logger = logging.getLogger(__name__)
def mako_template(text):
"""Build a Mako template.
@ -248,7 +247,7 @@ class WebKitParser(report_sxw):
htmls.append(html)
except Exception, e:
msg = exceptions.text_error_template().render()
logger.error(msg)
_logger.error(msg)
raise except_osv(_('Webkit render'), msg)
else:
try :
@ -259,7 +258,7 @@ class WebKitParser(report_sxw):
htmls.append(html)
except Exception, e:
msg = exceptions.text_error_template().render()
logger.error(msg)
_logger.error(msg)
raise except_osv(_('Webkit render'), msg)
head_mako_tpl = mako_template(header)
try :
@ -281,7 +280,7 @@ class WebKitParser(report_sxw):
**self.parser_instance.localcontext)
except:
msg = exceptions.text_error_template().render()
logger.error(msg)
_logger.error(msg)
raise except_osv(_('Webkit render'), msg)
if report_xml.webkit_debug :
try :
@ -292,7 +291,7 @@ class WebKitParser(report_sxw):
**self.parser_instance.localcontext)
except Exception, e:
msg = exceptions.text_error_template().render()
logger.error(msg)
_logger.error(msg)
raise except_osv(_('Webkit render'), msg)
return (deb, 'html')
bin = self.get_lib(cursor, uid, company.id)

View File

@ -31,6 +31,7 @@ import tools
import decimal_precision as dp
import logging
_logger = logging.getLogger(__name__)
#----------------------------------------------------------
# Incoterms
@ -408,9 +409,8 @@ class stock_location(osv.osv):
# so we ROLLBACK to the SAVEPOINT to restore the transaction to its earlier
# state, we return False as if the products were not available, and log it:
cr.execute("ROLLBACK TO stock_location_product_reserve")
logger = logging.getLogger('stock.location')
logger.warn("Failed attempt to reserve %s x product %s, likely due to another transaction already in progress. Next attempt is likely to work. Detailed error available at DEBUG level.", product_qty, product_id)
logger.debug("Trace of the failed product reservation attempt: ", exc_info=True)
_logger.warning("Failed attempt to reserve %s x product %s, likely due to another transaction already in progress. Next attempt is likely to work. Detailed error available at DEBUG level.", product_qty, product_id)
_logger.debug("Trace of the failed product reservation attempt: ", exc_info=True)
return False
# XXX TODO: rewrite this with one single query, possibly even the quantity conversion