2009-10-20 10:52:23 +00:00
|
|
|
# -*- coding: utf-8 -*-
|
2006-12-07 13:41:40 +00:00
|
|
|
##############################################################################
|
2009-11-20 11:44:41 +00:00
|
|
|
#
|
2008-11-04 06:33:23 +00:00
|
|
|
# OpenERP, Open Source Management Solution
|
2009-10-14 12:32:15 +00:00
|
|
|
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
|
2006-12-07 13:41:40 +00:00
|
|
|
#
|
2008-11-03 18:27:16 +00:00
|
|
|
# This program is free software: you can redistribute it and/or modify
|
2009-10-14 12:32:15 +00:00
|
|
|
# it under the terms of the GNU Affero General Public License as
|
|
|
|
# published by the Free Software Foundation, either version 3 of the
|
|
|
|
# License, or (at your option) any later version.
|
2006-12-07 13:41:40 +00:00
|
|
|
#
|
2008-11-03 18:27:16 +00:00
|
|
|
# This program is distributed in the hope that it will be useful,
|
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
2009-10-14 12:32:15 +00:00
|
|
|
# GNU Affero General Public License for more details.
|
2006-12-07 13:41:40 +00:00
|
|
|
#
|
2009-10-14 12:32:15 +00:00
|
|
|
# You should have received a copy of the GNU Affero General Public License
|
2009-11-20 11:44:41 +00:00
|
|
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
2006-12-07 13:41:40 +00:00
|
|
|
#
|
2008-11-03 18:27:16 +00:00
|
|
|
##############################################################################
|
2006-12-07 13:41:40 +00:00
|
|
|
|
|
|
|
"""
|
2008-09-10 08:46:40 +00:00
|
|
|
Miscelleanous tools used by OpenERP.
|
2006-12-07 13:41:40 +00:00
|
|
|
"""
|
|
|
|
|
|
|
|
import os, time, sys
|
|
|
|
import inspect
|
|
|
|
|
|
|
|
from config import config
|
|
|
|
|
2007-04-21 13:32:18 +00:00
|
|
|
import zipfile
|
2007-07-30 13:35:27 +00:00
|
|
|
import release
|
2007-08-01 14:04:09 +00:00
|
|
|
import socket
|
2010-01-05 08:13:29 +00:00
|
|
|
import re
|
2007-04-21 13:32:18 +00:00
|
|
|
|
2006-12-07 13:41:40 +00:00
|
|
|
if sys.version_info[:2] < (2, 4):
|
2008-07-22 14:24:36 +00:00
|
|
|
from threadinglocal import local
|
2006-12-07 13:41:40 +00:00
|
|
|
else:
|
2008-07-22 14:24:36 +00:00
|
|
|
from threading import local
|
2006-12-07 13:41:40 +00:00
|
|
|
|
2008-08-14 09:51:20 +00:00
|
|
|
from itertools import izip
|
|
|
|
|
2008-09-16 13:32:15 +00:00
|
|
|
# initialize a database with base/base.sql
|
2006-12-07 13:41:40 +00:00
|
|
|
def init_db(cr):
|
2008-07-22 14:24:36 +00:00
|
|
|
import addons
|
|
|
|
f = addons.get_module_resource('base', 'base.sql')
|
2009-01-09 12:57:00 +00:00
|
|
|
for line in file_open(f).read().split(';'):
|
2008-07-22 14:24:36 +00:00
|
|
|
if (len(line)>0) and (not line.isspace()):
|
|
|
|
cr.execute(line)
|
|
|
|
cr.commit()
|
|
|
|
|
|
|
|
for i in addons.get_modules():
|
|
|
|
terp_file = addons.get_module_resource(i, '__terp__.py')
|
|
|
|
mod_path = addons.get_module_path(i)
|
2008-10-27 12:18:52 +00:00
|
|
|
if not mod_path:
|
|
|
|
continue
|
2008-07-22 14:24:36 +00:00
|
|
|
info = False
|
2009-01-09 12:57:00 +00:00
|
|
|
if os.path.isfile(terp_file) or os.path.isfile(mod_path+'.zip'):
|
|
|
|
info = eval(file_open(terp_file).read())
|
2008-07-22 14:24:36 +00:00
|
|
|
if info:
|
|
|
|
categs = info.get('category', 'Uncategorized').split('/')
|
|
|
|
p_id = None
|
|
|
|
while categs:
|
|
|
|
if p_id is not None:
|
|
|
|
cr.execute('select id \
|
|
|
|
from ir_module_category \
|
2008-12-09 13:35:40 +00:00
|
|
|
where name=%s and parent_id=%s', (categs[0], p_id))
|
2008-07-22 14:24:36 +00:00
|
|
|
else:
|
|
|
|
cr.execute('select id \
|
|
|
|
from ir_module_category \
|
|
|
|
where name=%s and parent_id is NULL', (categs[0],))
|
|
|
|
c_id = cr.fetchone()
|
|
|
|
if not c_id:
|
|
|
|
cr.execute('select nextval(\'ir_module_category_id_seq\')')
|
|
|
|
c_id = cr.fetchone()[0]
|
|
|
|
cr.execute('insert into ir_module_category \
|
|
|
|
(id, name, parent_id) \
|
2008-12-09 13:35:40 +00:00
|
|
|
values (%s, %s, %s)', (c_id, categs[0], p_id))
|
2008-07-22 14:24:36 +00:00
|
|
|
else:
|
|
|
|
c_id = c_id[0]
|
|
|
|
p_id = c_id
|
|
|
|
categs = categs[1:]
|
|
|
|
|
|
|
|
active = info.get('active', False)
|
|
|
|
installable = info.get('installable', True)
|
|
|
|
if installable:
|
|
|
|
if active:
|
|
|
|
state = 'to install'
|
|
|
|
else:
|
|
|
|
state = 'uninstalled'
|
|
|
|
else:
|
|
|
|
state = 'uninstallable'
|
|
|
|
cr.execute('select nextval(\'ir_module_module_id_seq\')')
|
|
|
|
id = cr.fetchone()[0]
|
|
|
|
cr.execute('insert into ir_module_module \
|
2008-12-11 11:44:13 +00:00
|
|
|
(id, author, website, name, shortdesc, description, \
|
2009-05-19 12:09:41 +00:00
|
|
|
category_id, state, certificate) \
|
|
|
|
values (%s, %s, %s, %s, %s, %s, %s, %s, %s)', (
|
2008-07-22 14:24:36 +00:00
|
|
|
id, info.get('author', ''),
|
|
|
|
info.get('website', ''), i, info.get('name', False),
|
2009-05-19 12:09:41 +00:00
|
|
|
info.get('description', ''), p_id, state, info.get('certificate')))
|
2009-01-27 00:10:07 +00:00
|
|
|
cr.execute('insert into ir_model_data \
|
2009-06-12 11:00:33 +00:00
|
|
|
(name,model,module, res_id, noupdate) values (%s,%s,%s,%s,%s)', (
|
|
|
|
'module_meta_information', 'ir.module.module', i, id, True))
|
2008-07-22 14:24:36 +00:00
|
|
|
dependencies = info.get('depends', [])
|
|
|
|
for d in dependencies:
|
|
|
|
cr.execute('insert into ir_module_module_dependency \
|
|
|
|
(module_id,name) values (%s, %s)', (id, d))
|
|
|
|
cr.commit()
|
2006-12-07 13:41:40 +00:00
|
|
|
|
|
|
|
def find_in_path(name):
|
2008-07-22 14:24:36 +00:00
|
|
|
if os.name == "nt":
|
|
|
|
sep = ';'
|
|
|
|
else:
|
|
|
|
sep = ':'
|
|
|
|
path = [dir for dir in os.environ['PATH'].split(sep)
|
|
|
|
if os.path.isdir(dir)]
|
|
|
|
for dir in path:
|
|
|
|
val = os.path.join(dir, name)
|
|
|
|
if os.path.isfile(val) or os.path.islink(val):
|
|
|
|
return val
|
|
|
|
return None
|
2006-12-07 13:41:40 +00:00
|
|
|
|
|
|
|
def find_pg_tool(name):
|
2008-07-22 14:24:36 +00:00
|
|
|
if config['pg_path'] and config['pg_path'] != 'None':
|
|
|
|
return os.path.join(config['pg_path'], name)
|
|
|
|
else:
|
|
|
|
return find_in_path(name)
|
2006-12-07 13:41:40 +00:00
|
|
|
|
|
|
|
def exec_pg_command(name, *args):
|
2008-07-22 14:24:36 +00:00
|
|
|
prog = find_pg_tool(name)
|
|
|
|
if not prog:
|
|
|
|
raise Exception('Couldn\'t find %s' % name)
|
|
|
|
args2 = (os.path.basename(prog),) + args
|
|
|
|
return os.spawnv(os.P_WAIT, prog, args2)
|
2006-12-07 13:41:40 +00:00
|
|
|
|
|
|
|
def exec_pg_command_pipe(name, *args):
|
2008-07-22 14:24:36 +00:00
|
|
|
prog = find_pg_tool(name)
|
|
|
|
if not prog:
|
|
|
|
raise Exception('Couldn\'t find %s' % name)
|
|
|
|
if os.name == "nt":
|
|
|
|
cmd = '"' + prog + '" ' + ' '.join(args)
|
|
|
|
else:
|
|
|
|
cmd = prog + ' ' + ' '.join(args)
|
|
|
|
return os.popen2(cmd, 'b')
|
2006-12-07 13:41:40 +00:00
|
|
|
|
2007-03-15 12:26:40 +00:00
|
|
|
def exec_command_pipe(name, *args):
|
2008-07-22 14:24:36 +00:00
|
|
|
prog = find_in_path(name)
|
|
|
|
if not prog:
|
|
|
|
raise Exception('Couldn\'t find %s' % name)
|
|
|
|
if os.name == "nt":
|
|
|
|
cmd = '"'+prog+'" '+' '.join(args)
|
|
|
|
else:
|
|
|
|
cmd = prog+' '+' '.join(args)
|
|
|
|
return os.popen2(cmd, 'b')
|
2007-03-15 12:26:40 +00:00
|
|
|
|
2006-12-07 13:41:40 +00:00
|
|
|
#----------------------------------------------------------
|
|
|
|
# File paths
|
|
|
|
#----------------------------------------------------------
|
|
|
|
#file_path_root = os.getcwd()
|
|
|
|
#file_path_addons = os.path.join(file_path_root, 'addons')
|
|
|
|
|
2008-07-17 09:55:16 +00:00
|
|
|
def file_open(name, mode="r", subdir='addons', pathinfo=False):
|
2008-09-10 08:46:40 +00:00
|
|
|
"""Open a file from the OpenERP root, using a subdir folder.
|
2008-07-22 14:24:36 +00:00
|
|
|
|
|
|
|
>>> file_open('hr/report/timesheer.xsl')
|
|
|
|
>>> file_open('addons/hr/report/timesheet.xsl')
|
|
|
|
>>> file_open('../../base/report/rml_template.xsl', subdir='addons/hr/report', pathinfo=True)
|
|
|
|
|
|
|
|
@param name: name of the file
|
|
|
|
@param mode: file open mode
|
|
|
|
@param subdir: subdirectory
|
|
|
|
@param pathinfo: if True returns tupple (fileobject, filepath)
|
|
|
|
|
|
|
|
@return: fileobject if pathinfo is False else (fileobject, filepath)
|
|
|
|
"""
|
2009-06-01 18:27:47 +00:00
|
|
|
import addons
|
|
|
|
adps = addons.ad_paths
|
2008-07-22 14:24:36 +00:00
|
|
|
rtp = os.path.normcase(os.path.abspath(config['root_path']))
|
|
|
|
|
|
|
|
if name.replace(os.path.sep, '/').startswith('addons/'):
|
|
|
|
subdir = 'addons'
|
|
|
|
name = name[7:]
|
|
|
|
|
|
|
|
# First try to locate in addons_path
|
|
|
|
if subdir:
|
|
|
|
subdir2 = subdir
|
|
|
|
if subdir2.replace(os.path.sep, '/').startswith('addons/'):
|
|
|
|
subdir2 = subdir2[7:]
|
|
|
|
|
|
|
|
subdir2 = (subdir2 != 'addons' or None) and subdir2
|
|
|
|
|
2009-06-01 18:27:47 +00:00
|
|
|
for adp in adps:
|
|
|
|
try:
|
2008-07-22 14:24:36 +00:00
|
|
|
if subdir2:
|
|
|
|
fn = os.path.join(adp, subdir2, name)
|
|
|
|
else:
|
|
|
|
fn = os.path.join(adp, name)
|
|
|
|
fn = os.path.normpath(fn)
|
|
|
|
fo = file_open(fn, mode=mode, subdir=None, pathinfo=pathinfo)
|
|
|
|
if pathinfo:
|
|
|
|
return fo, fn
|
|
|
|
return fo
|
2009-06-01 18:27:47 +00:00
|
|
|
except IOError, e:
|
2008-07-22 14:24:36 +00:00
|
|
|
pass
|
|
|
|
|
|
|
|
if subdir:
|
|
|
|
name = os.path.join(rtp, subdir, name)
|
|
|
|
else:
|
|
|
|
name = os.path.join(rtp, name)
|
|
|
|
|
|
|
|
name = os.path.normpath(name)
|
|
|
|
|
|
|
|
# Check for a zipfile in the path
|
|
|
|
head = name
|
|
|
|
zipname = False
|
|
|
|
name2 = False
|
|
|
|
while True:
|
|
|
|
head, tail = os.path.split(head)
|
|
|
|
if not tail:
|
|
|
|
break
|
|
|
|
if zipname:
|
|
|
|
zipname = os.path.join(tail, zipname)
|
|
|
|
else:
|
|
|
|
zipname = tail
|
|
|
|
if zipfile.is_zipfile(head+'.zip'):
|
2009-01-09 11:35:25 +00:00
|
|
|
from cStringIO import StringIO
|
2008-07-22 14:24:36 +00:00
|
|
|
zfile = zipfile.ZipFile(head+'.zip')
|
|
|
|
try:
|
2009-01-09 11:35:25 +00:00
|
|
|
fo = StringIO()
|
|
|
|
fo.write(zfile.read(os.path.join(
|
2008-07-22 14:24:36 +00:00
|
|
|
os.path.basename(head), zipname).replace(
|
|
|
|
os.sep, '/')))
|
2009-01-09 14:28:49 +00:00
|
|
|
fo.seek(0)
|
2008-07-22 14:24:36 +00:00
|
|
|
if pathinfo:
|
|
|
|
return fo, name
|
|
|
|
return fo
|
|
|
|
except:
|
|
|
|
name2 = os.path.normpath(os.path.join(head + '.zip', zipname))
|
|
|
|
pass
|
|
|
|
for i in (name2, name):
|
|
|
|
if i and os.path.isfile(i):
|
|
|
|
fo = file(i, mode)
|
|
|
|
if pathinfo:
|
|
|
|
return fo, i
|
|
|
|
return fo
|
2008-11-12 13:05:09 +00:00
|
|
|
if os.path.splitext(name)[1] == '.rml':
|
|
|
|
raise IOError, 'Report %s doesn\'t exist or deleted : ' %str(name)
|
2008-07-22 14:24:36 +00:00
|
|
|
raise IOError, 'File not found : '+str(name)
|
2007-05-03 13:34:39 +00:00
|
|
|
|
2006-12-07 13:41:40 +00:00
|
|
|
|
2008-06-03 11:14:02 +00:00
|
|
|
#----------------------------------------------------------
|
|
|
|
# iterables
|
|
|
|
#----------------------------------------------------------
|
|
|
|
def flatten(list):
|
|
|
|
"""Flatten a list of elements into a uniqu list
|
|
|
|
Author: Christophe Simonis (christophe@tinyerp.com)
|
2008-09-16 13:32:15 +00:00
|
|
|
|
2008-06-03 11:14:02 +00:00
|
|
|
Examples:
|
|
|
|
>>> flatten(['a'])
|
|
|
|
['a']
|
|
|
|
>>> flatten('b')
|
|
|
|
['b']
|
|
|
|
>>> flatten( [] )
|
|
|
|
[]
|
|
|
|
>>> flatten( [[], [[]]] )
|
|
|
|
[]
|
|
|
|
>>> flatten( [[['a','b'], 'c'], 'd', ['e', [], 'f']] )
|
|
|
|
['a', 'b', 'c', 'd', 'e', 'f']
|
|
|
|
>>> t = (1,2,(3,), [4, 5, [6, [7], (8, 9), ([10, 11, (12, 13)]), [14, [], (15,)], []]])
|
|
|
|
>>> flatten(t)
|
|
|
|
[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15]
|
|
|
|
"""
|
2008-09-16 13:32:15 +00:00
|
|
|
|
2008-06-03 11:14:02 +00:00
|
|
|
def isiterable(x):
|
|
|
|
return hasattr(x, "__iter__")
|
|
|
|
|
|
|
|
r = []
|
|
|
|
for e in list:
|
|
|
|
if isiterable(e):
|
|
|
|
map(r.append, flatten(e))
|
|
|
|
else:
|
|
|
|
r.append(e)
|
|
|
|
return r
|
|
|
|
|
2008-08-18 07:56:02 +00:00
|
|
|
def reverse_enumerate(l):
|
|
|
|
"""Like enumerate but in the other sens
|
|
|
|
>>> a = ['a', 'b', 'c']
|
|
|
|
>>> it = reverse_enumerate(a)
|
|
|
|
>>> it.next()
|
|
|
|
(2, 'c')
|
|
|
|
>>> it.next()
|
|
|
|
(1, 'b')
|
|
|
|
>>> it.next()
|
|
|
|
(0, 'a')
|
|
|
|
>>> it.next()
|
|
|
|
Traceback (most recent call last):
|
|
|
|
File "<stdin>", line 1, in <module>
|
|
|
|
StopIteration
|
|
|
|
"""
|
|
|
|
return izip(xrange(len(l)-1, -1, -1), reversed(l))
|
2008-06-03 11:14:02 +00:00
|
|
|
|
2006-12-07 13:41:40 +00:00
|
|
|
#----------------------------------------------------------
|
|
|
|
# Emails
|
|
|
|
#----------------------------------------------------------
|
2010-01-05 08:13:29 +00:00
|
|
|
email_re = re.compile(r"""
|
|
|
|
([a-zA-Z][\w\.-]*[a-zA-Z0-9] # username part
|
|
|
|
@ # mandatory @ sign
|
|
|
|
[a-zA-Z0-9][\w\.-]* # domain must start with a letter ... Ged> why do we include a 0-9 then?
|
|
|
|
\.
|
|
|
|
[a-z]{2,3} # TLD
|
|
|
|
)
|
|
|
|
""", re.VERBOSE)
|
|
|
|
res_re = re.compile(r"\[([0-9]+)\]", re.UNICODE)
|
|
|
|
command_re = re.compile("^Set-([a-z]+) *: *(.+)$", re.I + re.UNICODE)
|
|
|
|
reference_re = re.compile("<.*-openobject-(\\d+)@(.*)>", re.UNICODE)
|
|
|
|
|
|
|
|
priorities = {
|
|
|
|
'1': '1 (Highest)',
|
|
|
|
'2': '2 (High)',
|
|
|
|
'3': '3 (Normal)',
|
|
|
|
'4': '4 (Low)',
|
|
|
|
'5': '5 (Lowest)',
|
|
|
|
}
|
|
|
|
|
|
|
|
def html2plaintext(html, body_id=None, encoding='utf-8'):
|
|
|
|
## (c) Fry-IT, www.fry-it.com, 2007
|
|
|
|
## <peter@fry-it.com>
|
|
|
|
## download here: http://www.peterbe.com/plog/html2plaintext
|
|
|
|
|
|
|
|
|
|
|
|
""" from an HTML text, convert the HTML to plain text.
|
|
|
|
If @body_id is provided then this is the tag where the
|
|
|
|
body (not necessarily <body>) starts.
|
|
|
|
"""
|
|
|
|
try:
|
|
|
|
from BeautifulSoup import BeautifulSoup, SoupStrainer, Comment
|
|
|
|
except:
|
|
|
|
return html
|
|
|
|
|
|
|
|
urls = []
|
|
|
|
if body_id is not None:
|
|
|
|
strainer = SoupStrainer(id=body_id)
|
|
|
|
else:
|
|
|
|
strainer = SoupStrainer('body')
|
|
|
|
|
|
|
|
soup = BeautifulSoup(html, parseOnlyThese=strainer, fromEncoding=encoding)
|
|
|
|
for link in soup.findAll('a'):
|
|
|
|
title = link.renderContents()
|
|
|
|
for url in [x[1] for x in link.attrs if x[0]=='href']:
|
|
|
|
urls.append(dict(url=url, tag=str(link), title=title))
|
|
|
|
|
|
|
|
html = soup.__str__()
|
|
|
|
|
|
|
|
url_index = []
|
|
|
|
i = 0
|
|
|
|
for d in urls:
|
|
|
|
if d['title'] == d['url'] or 'http://'+d['title'] == d['url']:
|
|
|
|
html = html.replace(d['tag'], d['url'])
|
|
|
|
else:
|
|
|
|
i += 1
|
|
|
|
html = html.replace(d['tag'], '%s [%s]' % (d['title'], i))
|
|
|
|
url_index.append(d['url'])
|
|
|
|
|
|
|
|
html = html.replace('<strong>','*').replace('</strong>','*')
|
|
|
|
html = html.replace('<b>','*').replace('</b>','*')
|
|
|
|
html = html.replace('<h3>','*').replace('</h3>','*')
|
|
|
|
html = html.replace('<h2>','**').replace('</h2>','**')
|
|
|
|
html = html.replace('<h1>','**').replace('</h1>','**')
|
|
|
|
html = html.replace('<em>','/').replace('</em>','/')
|
|
|
|
|
|
|
|
|
|
|
|
# the only line breaks we respect is those of ending tags and
|
|
|
|
# breaks
|
|
|
|
|
|
|
|
html = html.replace('\n',' ')
|
|
|
|
html = html.replace('<br>', '\n')
|
|
|
|
html = html.replace('<tr>', '\n')
|
|
|
|
html = html.replace('</p>', '\n\n')
|
|
|
|
html = re.sub('<br\s*/>', '\n', html)
|
|
|
|
html = html.replace(' ' * 2, ' ')
|
|
|
|
|
|
|
|
|
|
|
|
# for all other tags we failed to clean up, just remove then and
|
|
|
|
# complain about them on the stderr
|
|
|
|
def desperate_fixer(g):
|
|
|
|
#print >>sys.stderr, "failed to clean up %s" % str(g.group())
|
|
|
|
return ' '
|
|
|
|
|
|
|
|
html = re.sub('<.*?>', desperate_fixer, html)
|
|
|
|
|
|
|
|
# lstrip all lines
|
|
|
|
html = '\n'.join([x.lstrip() for x in html.splitlines()])
|
|
|
|
|
|
|
|
for i, url in enumerate(url_index):
|
|
|
|
if i == 0:
|
|
|
|
html += '\n\n'
|
2010-01-26 14:30:03 +00:00
|
|
|
html += '[%s] %s\n' % (i+1, url)
|
2010-01-05 08:13:29 +00:00
|
|
|
return html
|
|
|
|
|
2009-03-02 14:01:35 +00:00
|
|
|
def email_send(email_from, email_to, subject, body, email_cc=None, email_bcc=None, reply_to=False,
|
2010-01-05 08:13:29 +00:00
|
|
|
attach=None, openobject_id=False, ssl=False, debug=False, subtype='plain', x_headers=None, priority='3'):
|
2009-05-28 16:27:35 +00:00
|
|
|
|
2008-07-22 14:24:36 +00:00
|
|
|
"""Send an email."""
|
|
|
|
import smtplib
|
|
|
|
from email.MIMEText import MIMEText
|
|
|
|
from email.MIMEBase import MIMEBase
|
|
|
|
from email.MIMEMultipart import MIMEMultipart
|
|
|
|
from email.Header import Header
|
|
|
|
from email.Utils import formatdate, COMMASPACE
|
2008-11-24 11:13:20 +00:00
|
|
|
from email.Utils import formatdate, COMMASPACE
|
2008-07-22 14:24:36 +00:00
|
|
|
from email import Encoders
|
2010-01-26 14:30:03 +00:00
|
|
|
import netsvc
|
2008-07-22 14:24:36 +00:00
|
|
|
|
2009-03-10 10:06:00 +00:00
|
|
|
if x_headers is None:
|
|
|
|
x_headers = {}
|
|
|
|
|
2010-01-26 14:41:54 +00:00
|
|
|
if not ssl: ssl = config.get('smtp_ssl', False)
|
2008-09-16 13:32:15 +00:00
|
|
|
|
2010-01-26 14:30:03 +00:00
|
|
|
if not (email_from or config['email_from']):
|
|
|
|
raise ValueError("Sending an email requires either providing a sender "
|
|
|
|
"address or having configured one")
|
2009-02-28 13:53:40 +00:00
|
|
|
|
2010-01-26 14:41:54 +00:00
|
|
|
if not email_from: email_from = config.get('email_from', False)
|
2010-01-05 08:13:29 +00:00
|
|
|
|
2010-01-26 14:41:54 +00:00
|
|
|
if not email_cc: email_cc = []
|
|
|
|
if not email_bcc: email_bcc = []
|
|
|
|
if not body: body = u''
|
2010-01-26 14:46:47 +00:00
|
|
|
try: email_body = body.encode('utf-8')
|
2010-01-26 14:41:54 +00:00
|
|
|
except (UnicodeEncodeError, UnicodeDecodeError):
|
2010-01-26 14:46:47 +00:00
|
|
|
email_body = body
|
2008-11-24 11:13:20 +00:00
|
|
|
|
2010-01-27 11:37:16 +00:00
|
|
|
email_text = MIMEText(email_body, _subtype=subtype, _charset='utf-8')
|
2010-01-26 14:46:47 +00:00
|
|
|
|
|
|
|
if attach: msg = MIMEMultipart()
|
|
|
|
else: msg = email_text
|
2008-11-24 11:13:20 +00:00
|
|
|
|
2009-03-05 12:14:16 +00:00
|
|
|
msg['Subject'] = Header(ustr(subject), 'utf-8')
|
2008-07-22 14:24:36 +00:00
|
|
|
msg['From'] = email_from
|
|
|
|
del msg['Reply-To']
|
|
|
|
if reply_to:
|
2009-02-28 13:53:40 +00:00
|
|
|
msg['Reply-To'] = reply_to
|
|
|
|
else:
|
|
|
|
msg['Reply-To'] = msg['From']
|
2008-07-22 14:24:36 +00:00
|
|
|
msg['To'] = COMMASPACE.join(email_to)
|
|
|
|
if email_cc:
|
|
|
|
msg['Cc'] = COMMASPACE.join(email_cc)
|
|
|
|
if email_bcc:
|
|
|
|
msg['Bcc'] = COMMASPACE.join(email_bcc)
|
|
|
|
msg['Date'] = formatdate(localtime=True)
|
2008-11-24 11:13:20 +00:00
|
|
|
|
2009-02-28 13:53:40 +00:00
|
|
|
# Add OpenERP Server information
|
|
|
|
msg['X-Generated-By'] = 'OpenERP (http://www.openerp.com)'
|
|
|
|
msg['X-OpenERP-Server-Host'] = socket.gethostname()
|
|
|
|
msg['X-OpenERP-Server-Version'] = release.version
|
2010-01-26 14:26:18 +00:00
|
|
|
|
|
|
|
msg['X-Priority'] = priorities.get(priority, '3 (Normal)')
|
2009-02-28 13:53:40 +00:00
|
|
|
|
|
|
|
# Add dynamic X Header
|
2010-01-26 14:30:03 +00:00
|
|
|
for key, value in x_headers.iteritems():
|
2009-03-02 14:01:35 +00:00
|
|
|
msg['X-OpenERP-%s' % key] = str(value)
|
2009-02-28 13:53:40 +00:00
|
|
|
|
2010-01-05 08:13:29 +00:00
|
|
|
if openobject_id:
|
|
|
|
msg['Message-Id'] = "<%s-openobject-%s@%s>" % (time.time(), openobject_id, socket.gethostname())
|
2008-11-24 11:13:20 +00:00
|
|
|
|
|
|
|
if attach:
|
2010-01-26 14:41:54 +00:00
|
|
|
msg.attach(email_text)
|
2008-11-24 11:13:20 +00:00
|
|
|
for (fname,fcontent) in attach:
|
|
|
|
part = MIMEBase('application', "octet-stream")
|
|
|
|
part.set_payload( fcontent )
|
|
|
|
Encoders.encode_base64(part)
|
|
|
|
part.add_header('Content-Disposition', 'attachment; filename="%s"' % (fname,))
|
|
|
|
msg.attach(part)
|
2009-11-20 11:44:41 +00:00
|
|
|
|
2009-05-28 16:27:35 +00:00
|
|
|
class WriteToLogger(object):
|
|
|
|
def __init__(self):
|
|
|
|
self.logger = netsvc.Logger()
|
|
|
|
|
|
|
|
def write(self, s):
|
|
|
|
self.logger.notifyChannel('email_send', netsvc.LOG_DEBUG, s)
|
|
|
|
|
2009-07-23 22:29:37 +00:00
|
|
|
smtp_server = config['smtp_server']
|
|
|
|
if smtp_server.startswith('maildir:/'):
|
|
|
|
from mailbox import Maildir
|
2009-11-24 14:44:05 +00:00
|
|
|
maildir_path = smtp_server[8:]
|
|
|
|
try:
|
|
|
|
mdir = Maildir(maildir_path,factory=None, create = True)
|
|
|
|
mdir.add(msg.as_string(True))
|
|
|
|
return True
|
|
|
|
except Exception,e:
|
|
|
|
netsvc.Logger().notifyChannel('email_send (maildir)', netsvc.LOG_ERROR, e)
|
2010-01-05 08:13:29 +00:00
|
|
|
return False
|
2009-11-24 14:44:05 +00:00
|
|
|
|
2008-07-22 14:24:36 +00:00
|
|
|
try:
|
2009-05-28 16:27:35 +00:00
|
|
|
oldstderr = smtplib.stderr
|
2008-07-22 14:24:36 +00:00
|
|
|
s = smtplib.SMTP()
|
2009-05-28 16:27:35 +00:00
|
|
|
try:
|
|
|
|
# in case of debug, the messages are printed to stderr.
|
|
|
|
if debug:
|
|
|
|
smtplib.stderr = WriteToLogger()
|
|
|
|
|
2009-11-20 14:31:04 +00:00
|
|
|
s.set_debuglevel(int(bool(debug))) # 0 or 1
|
2009-07-23 22:29:37 +00:00
|
|
|
s.connect(smtp_server, config['smtp_port'])
|
2009-05-28 16:27:35 +00:00
|
|
|
if ssl:
|
|
|
|
s.ehlo()
|
|
|
|
s.starttls()
|
|
|
|
s.ehlo()
|
|
|
|
|
|
|
|
if config['smtp_user'] or config['smtp_password']:
|
2010-01-05 08:13:29 +00:00
|
|
|
s.login(config['smtp_user'], config['smtp_password'])
|
2009-11-20 11:44:41 +00:00
|
|
|
s.sendmail(email_from,
|
|
|
|
flatten([email_to, email_cc, email_bcc]),
|
2009-05-28 16:27:35 +00:00
|
|
|
msg.as_string()
|
|
|
|
)
|
|
|
|
finally:
|
|
|
|
s.quit()
|
|
|
|
if debug:
|
|
|
|
smtplib.stderr = oldstderr
|
2008-09-16 13:32:15 +00:00
|
|
|
|
2008-07-22 14:24:36 +00:00
|
|
|
except Exception, e:
|
2009-04-28 13:50:45 +00:00
|
|
|
netsvc.Logger().notifyChannel('email_send', netsvc.LOG_ERROR, e)
|
2008-07-23 16:33:28 +00:00
|
|
|
return False
|
2009-11-20 11:44:41 +00:00
|
|
|
|
2008-07-22 14:24:36 +00:00
|
|
|
return True
|
2007-04-13 05:03:14 +00:00
|
|
|
|
2006-12-07 13:41:40 +00:00
|
|
|
#----------------------------------------------------------
|
|
|
|
# SMS
|
|
|
|
#----------------------------------------------------------
|
|
|
|
# text must be latin-1 encoded
|
|
|
|
def sms_send(user, password, api_id, text, to):
|
2008-07-22 14:24:36 +00:00
|
|
|
import urllib
|
2009-02-19 05:55:32 +00:00
|
|
|
url = "http://api.urlsms.com/SendSMS.aspx"
|
|
|
|
#url = "http://196.7.150.220/http/sendmsg"
|
|
|
|
params = urllib.urlencode({'UserID': user, 'Password': password, 'SenderID': api_id, 'MsgText': text, 'RecipientMobileNo':to})
|
|
|
|
f = urllib.urlopen(url+"?"+params)
|
2008-12-10 21:36:04 +00:00
|
|
|
# FIXME: Use the logger if there is an error
|
2008-07-22 14:24:36 +00:00
|
|
|
return True
|
2006-12-07 13:41:40 +00:00
|
|
|
|
|
|
|
#---------------------------------------------------------
|
|
|
|
# Class that stores an updateable string (used in wizards)
|
|
|
|
#---------------------------------------------------------
|
|
|
|
class UpdateableStr(local):
|
|
|
|
|
2008-07-22 14:24:36 +00:00
|
|
|
def __init__(self, string=''):
|
|
|
|
self.string = string
|
2008-09-16 13:32:15 +00:00
|
|
|
|
2008-07-22 14:24:36 +00:00
|
|
|
def __str__(self):
|
|
|
|
return str(self.string)
|
2006-12-07 13:41:40 +00:00
|
|
|
|
2008-07-22 14:24:36 +00:00
|
|
|
def __repr__(self):
|
|
|
|
return str(self.string)
|
2006-12-07 13:41:40 +00:00
|
|
|
|
2008-07-22 14:24:36 +00:00
|
|
|
def __nonzero__(self):
|
|
|
|
return bool(self.string)
|
2006-12-07 13:41:40 +00:00
|
|
|
|
2007-10-11 13:56:36 +00:00
|
|
|
|
|
|
|
class UpdateableDict(local):
|
2008-07-22 14:24:36 +00:00
|
|
|
'''Stores an updateable dict to use in wizards'''
|
2007-10-11 13:56:36 +00:00
|
|
|
|
2008-07-22 14:24:36 +00:00
|
|
|
def __init__(self, dict=None):
|
|
|
|
if dict is None:
|
|
|
|
dict = {}
|
|
|
|
self.dict = dict
|
2007-10-11 13:56:36 +00:00
|
|
|
|
2008-07-22 14:24:36 +00:00
|
|
|
def __str__(self):
|
|
|
|
return str(self.dict)
|
2007-10-11 13:56:36 +00:00
|
|
|
|
2008-07-22 14:24:36 +00:00
|
|
|
def __repr__(self):
|
|
|
|
return str(self.dict)
|
2007-10-11 13:56:36 +00:00
|
|
|
|
2008-07-22 14:24:36 +00:00
|
|
|
def clear(self):
|
|
|
|
return self.dict.clear()
|
2007-10-11 13:56:36 +00:00
|
|
|
|
2008-07-22 14:24:36 +00:00
|
|
|
def keys(self):
|
|
|
|
return self.dict.keys()
|
2007-10-11 13:56:36 +00:00
|
|
|
|
2008-07-22 14:24:36 +00:00
|
|
|
def __setitem__(self, i, y):
|
|
|
|
self.dict.__setitem__(i, y)
|
2007-10-11 13:56:36 +00:00
|
|
|
|
2008-07-22 14:24:36 +00:00
|
|
|
def __getitem__(self, i):
|
|
|
|
return self.dict.__getitem__(i)
|
2007-10-11 13:56:36 +00:00
|
|
|
|
2008-07-22 14:24:36 +00:00
|
|
|
def copy(self):
|
|
|
|
return self.dict.copy()
|
2007-10-11 13:56:36 +00:00
|
|
|
|
2008-07-22 14:24:36 +00:00
|
|
|
def iteritems(self):
|
|
|
|
return self.dict.iteritems()
|
2007-10-11 13:56:36 +00:00
|
|
|
|
2008-07-22 14:24:36 +00:00
|
|
|
def iterkeys(self):
|
|
|
|
return self.dict.iterkeys()
|
2007-10-11 13:56:36 +00:00
|
|
|
|
2008-07-22 14:24:36 +00:00
|
|
|
def itervalues(self):
|
|
|
|
return self.dict.itervalues()
|
2007-10-11 13:56:36 +00:00
|
|
|
|
2008-07-22 14:24:36 +00:00
|
|
|
def pop(self, k, d=None):
|
|
|
|
return self.dict.pop(k, d)
|
2007-10-11 13:56:36 +00:00
|
|
|
|
2008-07-22 14:24:36 +00:00
|
|
|
def popitem(self):
|
|
|
|
return self.dict.popitem()
|
2007-10-11 13:56:36 +00:00
|
|
|
|
2008-07-22 14:24:36 +00:00
|
|
|
def setdefault(self, k, d=None):
|
|
|
|
return self.dict.setdefault(k, d)
|
2007-10-11 13:56:36 +00:00
|
|
|
|
2008-07-22 14:24:36 +00:00
|
|
|
def update(self, E, **F):
|
|
|
|
return self.dict.update(E, F)
|
2007-10-11 13:56:36 +00:00
|
|
|
|
2008-07-22 14:24:36 +00:00
|
|
|
def values(self):
|
|
|
|
return self.dict.values()
|
2007-10-11 13:56:36 +00:00
|
|
|
|
2008-07-22 14:24:36 +00:00
|
|
|
def get(self, k, d=None):
|
|
|
|
return self.dict.get(k, d)
|
2007-10-11 13:56:36 +00:00
|
|
|
|
2008-07-22 14:24:36 +00:00
|
|
|
def has_key(self, k):
|
|
|
|
return self.dict.has_key(k)
|
2007-10-11 13:56:36 +00:00
|
|
|
|
2008-07-22 14:24:36 +00:00
|
|
|
def items(self):
|
|
|
|
return self.dict.items()
|
2007-10-11 13:56:36 +00:00
|
|
|
|
2008-07-22 14:24:36 +00:00
|
|
|
def __cmp__(self, y):
|
|
|
|
return self.dict.__cmp__(y)
|
2007-10-11 13:56:36 +00:00
|
|
|
|
2008-07-22 14:24:36 +00:00
|
|
|
def __contains__(self, k):
|
|
|
|
return self.dict.__contains__(k)
|
2007-10-11 13:56:36 +00:00
|
|
|
|
2008-07-22 14:24:36 +00:00
|
|
|
def __delitem__(self, y):
|
|
|
|
return self.dict.__delitem__(y)
|
2007-10-11 13:56:36 +00:00
|
|
|
|
2008-07-22 14:24:36 +00:00
|
|
|
def __eq__(self, y):
|
|
|
|
return self.dict.__eq__(y)
|
2007-10-11 13:56:36 +00:00
|
|
|
|
2008-07-22 14:24:36 +00:00
|
|
|
def __ge__(self, y):
|
|
|
|
return self.dict.__ge__(y)
|
2007-10-11 13:56:36 +00:00
|
|
|
|
2008-07-22 14:24:36 +00:00
|
|
|
def __gt__(self, y):
|
|
|
|
return self.dict.__gt__(y)
|
2007-10-11 13:56:36 +00:00
|
|
|
|
2008-07-22 14:24:36 +00:00
|
|
|
def __hash__(self):
|
|
|
|
return self.dict.__hash__()
|
2007-10-11 13:56:36 +00:00
|
|
|
|
2008-07-22 14:24:36 +00:00
|
|
|
def __iter__(self):
|
|
|
|
return self.dict.__iter__()
|
2007-10-11 13:56:36 +00:00
|
|
|
|
2008-07-22 14:24:36 +00:00
|
|
|
def __le__(self, y):
|
|
|
|
return self.dict.__le__(y)
|
2007-10-11 13:56:36 +00:00
|
|
|
|
2008-07-22 14:24:36 +00:00
|
|
|
def __len__(self):
|
|
|
|
return self.dict.__len__()
|
2007-10-11 13:56:36 +00:00
|
|
|
|
2008-07-22 14:24:36 +00:00
|
|
|
def __lt__(self, y):
|
|
|
|
return self.dict.__lt__(y)
|
2007-10-11 13:56:36 +00:00
|
|
|
|
2008-07-22 14:24:36 +00:00
|
|
|
def __ne__(self, y):
|
|
|
|
return self.dict.__ne__(y)
|
2007-10-11 13:56:36 +00:00
|
|
|
|
|
|
|
|
2006-12-28 09:44:56 +00:00
|
|
|
# Don't use ! Use res.currency.round()
|
2007-10-11 13:56:36 +00:00
|
|
|
class currency(float):
|
2006-12-07 13:41:40 +00:00
|
|
|
|
2008-07-22 14:24:36 +00:00
|
|
|
def __init__(self, value, accuracy=2, rounding=None):
|
|
|
|
if rounding is None:
|
|
|
|
rounding=10**-accuracy
|
|
|
|
self.rounding=rounding
|
|
|
|
self.accuracy=accuracy
|
2006-12-07 13:41:40 +00:00
|
|
|
|
2008-07-22 14:24:36 +00:00
|
|
|
def __new__(cls, value, accuracy=2, rounding=None):
|
|
|
|
return float.__new__(cls, round(value, accuracy))
|
2006-12-07 13:41:40 +00:00
|
|
|
|
2008-07-22 14:24:36 +00:00
|
|
|
#def __str__(self):
|
|
|
|
# display_value = int(self*(10**(-self.accuracy))/self.rounding)*self.rounding/(10**(-self.accuracy))
|
|
|
|
# return str(display_value)
|
2006-12-07 13:41:40 +00:00
|
|
|
|
|
|
|
|
2008-10-16 18:28:16 +00:00
|
|
|
def is_hashable(h):
|
|
|
|
try:
|
|
|
|
hash(h)
|
|
|
|
return True
|
|
|
|
except TypeError:
|
|
|
|
return False
|
|
|
|
|
2006-12-07 13:41:40 +00:00
|
|
|
class cache(object):
|
2008-12-15 11:58:01 +00:00
|
|
|
"""
|
|
|
|
Use it as a decorator of the function you plan to cache
|
|
|
|
Timeout: 0 = no timeout, otherwise in seconds
|
|
|
|
"""
|
2009-11-20 11:44:41 +00:00
|
|
|
|
2008-12-24 00:24:18 +00:00
|
|
|
__caches = []
|
2009-11-20 11:44:41 +00:00
|
|
|
|
2008-12-24 00:24:18 +00:00
|
|
|
def __init__(self, timeout=None, skiparg=2, multi=None):
|
2008-12-15 11:58:01 +00:00
|
|
|
assert skiparg >= 2 # at least self and cr
|
2008-12-24 00:24:18 +00:00
|
|
|
if timeout is None:
|
|
|
|
self.timeout = config['cache_timeout']
|
|
|
|
else:
|
|
|
|
self.timeout = timeout
|
2008-12-13 06:01:18 +00:00
|
|
|
self.skiparg = skiparg
|
|
|
|
self.multi = multi
|
2008-12-18 19:41:14 +00:00
|
|
|
self.lasttime = time.time()
|
2008-07-22 14:24:36 +00:00
|
|
|
self.cache = {}
|
2009-11-20 11:44:41 +00:00
|
|
|
self.fun = None
|
2008-12-24 00:24:18 +00:00
|
|
|
cache.__caches.append(self)
|
|
|
|
|
2009-11-20 11:44:41 +00:00
|
|
|
|
2009-01-05 21:17:46 +00:00
|
|
|
def _generate_keys(self, dbname, kwargs2):
|
|
|
|
"""
|
|
|
|
Generate keys depending of the arguments and the self.mutli value
|
|
|
|
"""
|
2009-11-20 11:44:41 +00:00
|
|
|
|
2009-01-05 21:17:46 +00:00
|
|
|
def to_tuple(d):
|
2009-08-14 15:05:06 +00:00
|
|
|
pairs = d.items()
|
|
|
|
pairs.sort(key=lambda (k,v): k)
|
|
|
|
for i, (k, v) in enumerate(pairs):
|
|
|
|
if isinstance(v, dict):
|
|
|
|
pairs[i] = (k, to_tuple(v))
|
|
|
|
if isinstance(v, (list, set)):
|
|
|
|
pairs[i] = (k, tuple(v))
|
|
|
|
elif not is_hashable(v):
|
|
|
|
pairs[i] = (k, repr(v))
|
|
|
|
return tuple(pairs)
|
2009-01-05 21:17:46 +00:00
|
|
|
|
|
|
|
if not self.multi:
|
|
|
|
key = (('dbname', dbname),) + to_tuple(kwargs2)
|
|
|
|
yield key, None
|
|
|
|
else:
|
2009-11-20 11:44:41 +00:00
|
|
|
multis = kwargs2[self.multi][:]
|
2009-01-05 21:17:46 +00:00
|
|
|
for id in multis:
|
2009-01-06 16:28:38 +00:00
|
|
|
kwargs2[self.multi] = (id,)
|
2009-01-05 21:17:46 +00:00
|
|
|
key = (('dbname', dbname),) + to_tuple(kwargs2)
|
|
|
|
yield key, id
|
2009-11-20 11:44:41 +00:00
|
|
|
|
2009-01-05 21:17:46 +00:00
|
|
|
def _unify_args(self, *args, **kwargs):
|
|
|
|
# Update named arguments with positional argument values (without self and cr)
|
|
|
|
kwargs2 = self.fun_default_values.copy()
|
|
|
|
kwargs2.update(kwargs)
|
|
|
|
kwargs2.update(dict(zip(self.fun_arg_names, args[self.skiparg-2:])))
|
|
|
|
return kwargs2
|
2009-11-20 11:44:41 +00:00
|
|
|
|
2009-01-05 21:17:46 +00:00
|
|
|
def clear(self, dbname, *args, **kwargs):
|
|
|
|
"""clear the cache for database dbname
|
|
|
|
if *args and **kwargs are both empty, clear all the keys related to this database
|
|
|
|
"""
|
|
|
|
if not args and not kwargs:
|
|
|
|
keys_to_del = [key for key in self.cache if key[0][1] == dbname]
|
|
|
|
else:
|
|
|
|
kwargs2 = self._unify_args(*args, **kwargs)
|
|
|
|
keys_to_del = [key for key, _ in self._generate_keys(dbname, kwargs2) if key in self.cache]
|
2009-11-20 11:44:41 +00:00
|
|
|
|
2009-01-05 21:17:46 +00:00
|
|
|
for key in keys_to_del:
|
|
|
|
del self.cache[key]
|
2009-11-20 11:44:41 +00:00
|
|
|
|
2008-12-31 16:06:10 +00:00
|
|
|
@classmethod
|
2009-01-05 21:17:46 +00:00
|
|
|
def clean_caches_for_db(cls, dbname):
|
|
|
|
for c in cls.__caches:
|
|
|
|
c.clear(dbname)
|
2008-07-22 14:24:36 +00:00
|
|
|
|
|
|
|
def __call__(self, fn):
|
2009-01-05 21:17:46 +00:00
|
|
|
if self.fun is not None:
|
|
|
|
raise Exception("Can not use a cache instance on more than one function")
|
|
|
|
self.fun = fn
|
|
|
|
|
|
|
|
argspec = inspect.getargspec(fn)
|
|
|
|
self.fun_arg_names = argspec[0][self.skiparg:]
|
|
|
|
self.fun_default_values = {}
|
|
|
|
if argspec[3]:
|
|
|
|
self.fun_default_values = dict(zip(self.fun_arg_names[-len(argspec[3]):], argspec[3]))
|
2009-11-20 11:44:41 +00:00
|
|
|
|
2009-01-05 21:17:46 +00:00
|
|
|
def cached_result(self2, cr, *args, **kwargs):
|
2009-10-26 12:50:05 +00:00
|
|
|
if time.time()-int(self.timeout) > self.lasttime:
|
2008-12-18 19:41:14 +00:00
|
|
|
self.lasttime = time.time()
|
2009-11-20 11:44:41 +00:00
|
|
|
t = time.time()-int(self.timeout)
|
2009-07-18 11:21:10 +00:00
|
|
|
old_keys = [key for key in self.cache if self.cache[key][1] < t]
|
|
|
|
for key in old_keys:
|
|
|
|
del self.cache[key]
|
2008-12-18 19:41:14 +00:00
|
|
|
|
2009-01-05 21:17:46 +00:00
|
|
|
kwargs2 = self._unify_args(*args, **kwargs)
|
2008-07-22 14:24:36 +00:00
|
|
|
|
2009-01-05 21:17:46 +00:00
|
|
|
result = {}
|
|
|
|
notincache = {}
|
|
|
|
for key, id in self._generate_keys(cr.dbname, kwargs2):
|
|
|
|
if key in self.cache:
|
|
|
|
result[id] = self.cache[key][0]
|
|
|
|
else:
|
|
|
|
notincache[id] = key
|
2009-11-20 11:44:41 +00:00
|
|
|
|
2009-01-05 21:17:46 +00:00
|
|
|
if notincache:
|
|
|
|
if self.multi:
|
|
|
|
kwargs2[self.multi] = notincache.keys()
|
2009-11-20 11:44:41 +00:00
|
|
|
|
2009-05-19 08:40:30 +00:00
|
|
|
result2 = fn(self2, cr, *args[:self.skiparg-2], **kwargs2)
|
2009-01-05 21:17:46 +00:00
|
|
|
if not self.multi:
|
|
|
|
key = notincache[None]
|
|
|
|
self.cache[key] = (result2, time.time())
|
|
|
|
result[None] = result2
|
|
|
|
else:
|
|
|
|
for id in result2:
|
|
|
|
key = notincache[id]
|
|
|
|
self.cache[key] = (result2[id], time.time())
|
|
|
|
result.update(result2)
|
2009-11-20 11:44:41 +00:00
|
|
|
|
2009-01-05 21:17:46 +00:00
|
|
|
if not self.multi:
|
|
|
|
return result[None]
|
2008-07-22 14:24:36 +00:00
|
|
|
return result
|
2009-01-05 21:17:46 +00:00
|
|
|
|
|
|
|
cached_result.clear_cache = self.clear
|
2008-07-22 14:24:36 +00:00
|
|
|
return cached_result
|
2006-12-07 13:41:40 +00:00
|
|
|
|
2008-03-03 11:54:45 +00:00
|
|
|
def to_xml(s):
|
2008-07-22 14:24:36 +00:00
|
|
|
return s.replace('&','&').replace('<','<').replace('>','>')
|
2008-03-03 11:54:45 +00:00
|
|
|
|
2009-02-05 13:40:49 +00:00
|
|
|
def ustr(value):
|
2008-12-09 13:35:40 +00:00
|
|
|
"""This method is similar to the builtin `str` method, except
|
|
|
|
it will return Unicode string.
|
|
|
|
|
|
|
|
@param value: the value to convert
|
|
|
|
|
|
|
|
@rtype: unicode
|
|
|
|
@return: unicode string
|
|
|
|
"""
|
|
|
|
|
|
|
|
if isinstance(value, unicode):
|
|
|
|
return value
|
|
|
|
|
|
|
|
if hasattr(value, '__unicode__'):
|
|
|
|
return unicode(value)
|
|
|
|
|
|
|
|
if not isinstance(value, str):
|
|
|
|
value = str(value)
|
|
|
|
|
2009-02-12 13:32:52 +00:00
|
|
|
try: # first try utf-8
|
2009-02-05 23:43:52 +00:00
|
|
|
return unicode(value, 'utf-8')
|
|
|
|
except:
|
2009-02-12 13:32:52 +00:00
|
|
|
pass
|
|
|
|
|
|
|
|
try: # then extened iso-8858
|
|
|
|
return unicode(value, 'iso-8859-15')
|
|
|
|
except:
|
|
|
|
pass
|
|
|
|
|
|
|
|
# else use default system locale
|
|
|
|
from locale import getlocale
|
|
|
|
return unicode(value, getlocale()[1])
|
2008-12-09 13:35:40 +00:00
|
|
|
|
2009-01-07 18:44:27 +00:00
|
|
|
def exception_to_unicode(e):
|
2009-02-16 18:39:33 +00:00
|
|
|
if (sys.version_info[:2] < (2,6)) and hasattr(e, 'message'):
|
2009-01-07 18:44:27 +00:00
|
|
|
return ustr(e.message)
|
|
|
|
if hasattr(e, 'args'):
|
|
|
|
return "\n".join((ustr(a) for a in e.args))
|
|
|
|
try:
|
|
|
|
return ustr(e)
|
|
|
|
except:
|
2009-02-16 18:39:33 +00:00
|
|
|
return u"Unknown message"
|
2009-01-07 18:44:27 +00:00
|
|
|
|
|
|
|
|
2008-12-26 10:18:58 +00:00
|
|
|
# to be compatible with python 2.4
|
|
|
|
import __builtin__
|
|
|
|
if not hasattr(__builtin__, 'all'):
|
|
|
|
def all(iterable):
|
|
|
|
for element in iterable:
|
|
|
|
if not element:
|
2009-11-17 07:20:14 +00:00
|
|
|
return False
|
2008-12-26 10:18:58 +00:00
|
|
|
return True
|
2009-11-17 07:20:14 +00:00
|
|
|
|
2008-12-26 10:18:58 +00:00
|
|
|
__builtin__.all = all
|
|
|
|
del all
|
2009-11-17 07:20:14 +00:00
|
|
|
|
2008-12-26 10:18:58 +00:00
|
|
|
if not hasattr(__builtin__, 'any'):
|
|
|
|
def any(iterable):
|
|
|
|
for element in iterable:
|
|
|
|
if element:
|
2009-11-17 07:20:14 +00:00
|
|
|
return True
|
2008-12-26 10:18:58 +00:00
|
|
|
return False
|
2009-11-17 07:20:14 +00:00
|
|
|
|
2008-12-26 10:18:58 +00:00
|
|
|
__builtin__.any = any
|
|
|
|
del any
|
|
|
|
|
2009-11-20 11:44:41 +00:00
|
|
|
get_iso = {'ca_ES':'ca',
|
|
|
|
'cs_CZ': 'cs',
|
|
|
|
'et_EE': 'et',
|
|
|
|
'sv_SE': 'sv',
|
|
|
|
'sq_AL': 'sq',
|
|
|
|
'uk_UA': 'uk',
|
2009-11-24 11:31:33 +00:00
|
|
|
'vi_VN': 'vi',
|
|
|
|
'af_ZA': 'af',
|
|
|
|
'be_BY': 'be',
|
|
|
|
'ja_JP': 'ja',
|
|
|
|
'ko_KR': 'ko'
|
|
|
|
}
|
2009-11-20 11:44:41 +00:00
|
|
|
|
|
|
|
def get_iso_codes(lang):
|
|
|
|
if lang in get_iso:
|
|
|
|
lang = get_iso[lang]
|
|
|
|
elif lang.find('_') != -1:
|
|
|
|
if lang.split('_')[0] == lang.split('_')[1].lower():
|
|
|
|
lang = lang.split('_')[0]
|
|
|
|
return lang
|
2008-12-26 10:18:58 +00:00
|
|
|
|
2006-12-07 13:41:40 +00:00
|
|
|
def get_languages():
|
2008-07-22 14:24:36 +00:00
|
|
|
languages={
|
2008-12-22 19:34:26 +00:00
|
|
|
'ar_AR': u'Arabic / الْعَرَبيّة',
|
2008-11-21 19:50:42 +00:00
|
|
|
'bg_BG': u'Bulgarian / български',
|
2009-01-03 11:14:41 +00:00
|
|
|
'bs_BS': u'Bosnian / bosanski jezik',
|
2008-11-21 19:50:42 +00:00
|
|
|
'ca_ES': u'Catalan / Català',
|
|
|
|
'cs_CZ': u'Czech / Čeština',
|
2009-01-09 14:20:08 +00:00
|
|
|
'da_DK': u'Danish / Dansk',
|
2008-11-21 19:50:42 +00:00
|
|
|
'de_DE': u'German / Deutsch',
|
2009-01-29 09:22:10 +00:00
|
|
|
'el_GR': u'Greek / Ελληνικά',
|
2008-11-22 00:09:35 +00:00
|
|
|
'en_CA': u'English (CA)',
|
|
|
|
'en_GB': u'English (UK)',
|
|
|
|
'en_US': u'English (US)',
|
|
|
|
'es_AR': u'Spanish (AR) / Español (AR)',
|
2008-11-21 19:50:42 +00:00
|
|
|
'es_ES': u'Spanish / Español',
|
2009-01-12 12:01:09 +00:00
|
|
|
'et_EE': u'Estonian / Eesti keel',
|
2009-08-28 16:18:29 +00:00
|
|
|
'fi_FI': u'Finland / Suomi',
|
2008-11-22 00:09:35 +00:00
|
|
|
'fr_BE': u'French (BE) / Français (BE)',
|
|
|
|
'fr_CH': u'French (CH) / Français (CH)',
|
2008-11-21 19:50:42 +00:00
|
|
|
'fr_FR': u'French / Français',
|
|
|
|
'hr_HR': u'Croatian / hrvatski jezik',
|
|
|
|
'hu_HU': u'Hungarian / Magyar',
|
2009-01-09 14:20:08 +00:00
|
|
|
'id_ID': u'Indonesian / Bahasa Indonesia',
|
2008-11-21 19:50:42 +00:00
|
|
|
'it_IT': u'Italian / Italiano',
|
|
|
|
'lt_LT': u'Lithuanian / Lietuvių kalba',
|
|
|
|
'nl_NL': u'Dutch / Nederlands',
|
2009-02-06 14:45:56 +00:00
|
|
|
'nl_BE': u'Dutch (Belgium) / Nederlands (Belgïe)',
|
2008-12-22 19:34:26 +00:00
|
|
|
'pl_PL': u'Polish / Język polski',
|
2008-11-22 00:09:35 +00:00
|
|
|
'pt_BR': u'Portugese (BR) / português (BR)',
|
2008-11-21 19:50:42 +00:00
|
|
|
'pt_PT': u'Portugese / português',
|
|
|
|
'ro_RO': u'Romanian / limba română',
|
|
|
|
'ru_RU': u'Russian / русский язык',
|
|
|
|
'sl_SL': u'Slovenian / slovenščina',
|
2009-08-28 16:18:29 +00:00
|
|
|
'sq_AL': u'Albanian / Shqipëri',
|
2008-11-21 19:50:42 +00:00
|
|
|
'sv_SE': u'Swedish / svenska',
|
2008-12-22 19:34:26 +00:00
|
|
|
'tr_TR': u'Turkish / Türkçe',
|
2009-08-28 16:18:29 +00:00
|
|
|
'vi_VN': u'Vietnam / Cộng hòa xã hội chủ nghĩa Việt Nam',
|
2009-03-15 18:00:17 +00:00
|
|
|
'uk_UA': u'Ukrainian / украї́нська мо́ва',
|
2009-08-28 16:18:29 +00:00
|
|
|
'zh_CN': u'Chinese (CN) / 简体中文',
|
2008-11-22 00:09:35 +00:00
|
|
|
'zh_TW': u'Chinese (TW) / 正體字',
|
2009-08-28 16:18:29 +00:00
|
|
|
'th_TH': u'Thai / ภาษาไทย',
|
2009-10-14 12:32:15 +00:00
|
|
|
'tlh_TLH': u'Klingon',
|
2008-07-22 14:24:36 +00:00
|
|
|
}
|
|
|
|
return languages
|
2006-12-07 13:41:40 +00:00
|
|
|
|
|
|
|
def scan_languages():
|
2009-11-25 06:25:20 +00:00
|
|
|
# import glob
|
|
|
|
# file_list = [os.path.splitext(os.path.basename(f))[0] for f in glob.glob(os.path.join(config['root_path'],'addons', 'base', 'i18n', '*.po'))]
|
|
|
|
# ret = [(lang, lang_dict.get(lang, lang)) for lang in file_list]
|
|
|
|
# Now it will take all languages from get languages function without filter it with base module languages
|
2008-07-22 14:24:36 +00:00
|
|
|
lang_dict = get_languages()
|
2009-11-25 06:25:20 +00:00
|
|
|
ret = [(lang, lang_dict.get(lang, lang)) for lang in list(lang_dict)]
|
2008-11-21 19:50:42 +00:00
|
|
|
ret.sort(key=lambda k:k[1])
|
|
|
|
return ret
|
2006-12-07 13:41:40 +00:00
|
|
|
|
2007-04-23 13:13:47 +00:00
|
|
|
|
|
|
|
def get_user_companies(cr, user):
|
2008-07-22 14:24:36 +00:00
|
|
|
def _get_company_children(cr, ids):
|
|
|
|
if not ids:
|
|
|
|
return []
|
2009-09-03 07:32:40 +00:00
|
|
|
cr.execute('SELECT id FROM res_company WHERE parent_id = ANY (%s)', (ids,))
|
2008-07-22 14:24:36 +00:00
|
|
|
res=[x[0] for x in cr.fetchall()]
|
|
|
|
res.extend(_get_company_children(cr, res))
|
|
|
|
return res
|
2009-09-03 07:32:40 +00:00
|
|
|
cr.execute('SELECT comp.id FROM res_company AS comp, res_users AS u WHERE u.id = %s AND comp.id = u.company_id', (user,))
|
2008-07-22 14:24:36 +00:00
|
|
|
compids=[cr.fetchone()[0]]
|
|
|
|
compids.extend(_get_company_children(cr, compids))
|
|
|
|
return compids
|
2007-04-23 13:13:47 +00:00
|
|
|
|
2007-10-04 05:59:39 +00:00
|
|
|
def mod10r(number):
|
2008-07-22 14:24:36 +00:00
|
|
|
"""
|
|
|
|
Input number : account or invoice number
|
|
|
|
Output return: the same number completed with the recursive mod10
|
|
|
|
key
|
|
|
|
"""
|
|
|
|
codec=[0,9,4,6,8,2,7,1,3,5]
|
|
|
|
report = 0
|
|
|
|
result=""
|
|
|
|
for digit in number:
|
|
|
|
result += digit
|
|
|
|
if digit.isdigit():
|
|
|
|
report = codec[ (int(digit) + report) % 10 ]
|
|
|
|
return result + str((10 - report) % 10)
|
2007-10-04 05:59:39 +00:00
|
|
|
|
2008-06-03 11:14:02 +00:00
|
|
|
|
2008-09-02 10:09:28 +00:00
|
|
|
def human_size(sz):
|
|
|
|
"""
|
|
|
|
Return the size in a human readable format
|
|
|
|
"""
|
|
|
|
if not sz:
|
|
|
|
return False
|
|
|
|
units = ('bytes', 'Kb', 'Mb', 'Gb')
|
2008-09-16 13:32:15 +00:00
|
|
|
if isinstance(sz,basestring):
|
|
|
|
sz=len(sz)
|
2008-09-02 10:09:28 +00:00
|
|
|
s, i = float(sz), 0
|
|
|
|
while s >= 1024 and i < len(units)-1:
|
|
|
|
s = s / 1024
|
|
|
|
i = i + 1
|
|
|
|
return "%0.2f %s" % (s, units[i])
|
2008-06-03 11:14:02 +00:00
|
|
|
|
2008-12-12 10:51:23 +00:00
|
|
|
def logged(f):
|
2008-12-12 11:49:45 +00:00
|
|
|
from tools.func import wraps
|
2009-11-20 11:44:41 +00:00
|
|
|
|
2008-12-12 10:51:23 +00:00
|
|
|
@wraps(f)
|
|
|
|
def wrapper(*args, **kwargs):
|
|
|
|
import netsvc
|
|
|
|
from pprint import pformat
|
|
|
|
|
|
|
|
vector = ['Call -> function: %r' % f]
|
2008-09-09 16:15:17 +00:00
|
|
|
for i, arg in enumerate(args):
|
2008-12-12 10:51:23 +00:00
|
|
|
vector.append(' arg %02d: %s' % (i, pformat(arg)))
|
2008-09-09 16:15:17 +00:00
|
|
|
for key, value in kwargs.items():
|
2008-12-12 10:51:23 +00:00
|
|
|
vector.append(' kwarg %10s: %s' % (key, pformat(value)))
|
|
|
|
|
|
|
|
timeb4 = time.time()
|
|
|
|
res = f(*args, **kwargs)
|
2009-11-20 11:44:41 +00:00
|
|
|
|
2008-12-12 10:51:23 +00:00
|
|
|
vector.append(' result: %s' % pformat(res))
|
|
|
|
vector.append(' time delta: %s' % (time.time() - timeb4))
|
2008-12-18 10:20:03 +00:00
|
|
|
netsvc.Logger().notifyChannel('logged', netsvc.LOG_DEBUG, '\n'.join(vector))
|
2008-12-12 10:51:23 +00:00
|
|
|
return res
|
|
|
|
|
|
|
|
return wrapper
|
2008-09-09 16:15:17 +00:00
|
|
|
|
2009-01-26 18:52:11 +00:00
|
|
|
class profile(object):
|
|
|
|
def __init__(self, fname=None):
|
|
|
|
self.fname = fname
|
|
|
|
|
|
|
|
def __call__(self, f):
|
|
|
|
from tools.func import wraps
|
|
|
|
|
|
|
|
@wraps(f)
|
|
|
|
def wrapper(*args, **kwargs):
|
|
|
|
class profile_wrapper(object):
|
|
|
|
def __init__(self):
|
|
|
|
self.result = None
|
|
|
|
def __call__(self):
|
|
|
|
self.result = f(*args, **kwargs)
|
|
|
|
pw = profile_wrapper()
|
|
|
|
import cProfile
|
|
|
|
fname = self.fname or ("%s.cprof" % (f.func_name,))
|
|
|
|
cProfile.runctx('pw()', globals(), locals(), filename=fname)
|
|
|
|
return pw.result
|
|
|
|
|
|
|
|
return wrapper
|
|
|
|
|
2008-12-18 10:20:03 +00:00
|
|
|
def debug(what):
|
|
|
|
"""
|
|
|
|
This method allow you to debug your code without print
|
|
|
|
Example:
|
|
|
|
>>> def func_foo(bar)
|
|
|
|
... baz = bar
|
|
|
|
... debug(baz)
|
|
|
|
... qnx = (baz, bar)
|
|
|
|
... debug(qnx)
|
|
|
|
...
|
|
|
|
>>> func_foo(42)
|
|
|
|
|
|
|
|
This will output on the logger:
|
2009-11-20 11:44:41 +00:00
|
|
|
|
2008-12-18 10:20:03 +00:00
|
|
|
[Wed Dec 25 00:00:00 2008] DEBUG:func_foo:baz = 42
|
|
|
|
[Wed Dec 25 00:00:00 2008] DEBUG:func_foo:qnx = (42, 42)
|
|
|
|
|
|
|
|
To view the DEBUG lines in the logger you must start the server with the option
|
|
|
|
--log-level=debug
|
|
|
|
|
|
|
|
"""
|
|
|
|
import netsvc
|
|
|
|
from inspect import stack
|
|
|
|
import re
|
|
|
|
from pprint import pformat
|
|
|
|
st = stack()[1]
|
|
|
|
param = re.split("debug *\((.+)\)", st[4][0].strip())[1].strip()
|
|
|
|
while param.count(')') > param.count('('): param = param[:param.rfind(')')]
|
2008-12-22 16:55:15 +00:00
|
|
|
what = pformat(what)
|
|
|
|
if param != what:
|
|
|
|
what = "%s = %s" % (param, what)
|
|
|
|
netsvc.Logger().notifyChannel(st[3], netsvc.LOG_DEBUG, what)
|
2008-12-18 10:20:03 +00:00
|
|
|
|
2008-06-03 11:14:02 +00:00
|
|
|
|
2008-09-16 12:26:07 +00:00
|
|
|
icons = map(lambda x: (x,x), ['STOCK_ABOUT', 'STOCK_ADD', 'STOCK_APPLY', 'STOCK_BOLD',
|
|
|
|
'STOCK_CANCEL', 'STOCK_CDROM', 'STOCK_CLEAR', 'STOCK_CLOSE', 'STOCK_COLOR_PICKER',
|
|
|
|
'STOCK_CONNECT', 'STOCK_CONVERT', 'STOCK_COPY', 'STOCK_CUT', 'STOCK_DELETE',
|
|
|
|
'STOCK_DIALOG_AUTHENTICATION', 'STOCK_DIALOG_ERROR', 'STOCK_DIALOG_INFO',
|
|
|
|
'STOCK_DIALOG_QUESTION', 'STOCK_DIALOG_WARNING', 'STOCK_DIRECTORY', 'STOCK_DISCONNECT',
|
|
|
|
'STOCK_DND', 'STOCK_DND_MULTIPLE', 'STOCK_EDIT', 'STOCK_EXECUTE', 'STOCK_FILE',
|
|
|
|
'STOCK_FIND', 'STOCK_FIND_AND_REPLACE', 'STOCK_FLOPPY', 'STOCK_GOTO_BOTTOM',
|
|
|
|
'STOCK_GOTO_FIRST', 'STOCK_GOTO_LAST', 'STOCK_GOTO_TOP', 'STOCK_GO_BACK',
|
|
|
|
'STOCK_GO_DOWN', 'STOCK_GO_FORWARD', 'STOCK_GO_UP', 'STOCK_HARDDISK',
|
|
|
|
'STOCK_HELP', 'STOCK_HOME', 'STOCK_INDENT', 'STOCK_INDEX', 'STOCK_ITALIC',
|
|
|
|
'STOCK_JUMP_TO', 'STOCK_JUSTIFY_CENTER', 'STOCK_JUSTIFY_FILL',
|
|
|
|
'STOCK_JUSTIFY_LEFT', 'STOCK_JUSTIFY_RIGHT', 'STOCK_MEDIA_FORWARD',
|
|
|
|
'STOCK_MEDIA_NEXT', 'STOCK_MEDIA_PAUSE', 'STOCK_MEDIA_PLAY',
|
|
|
|
'STOCK_MEDIA_PREVIOUS', 'STOCK_MEDIA_RECORD', 'STOCK_MEDIA_REWIND',
|
|
|
|
'STOCK_MEDIA_STOP', 'STOCK_MISSING_IMAGE', 'STOCK_NETWORK', 'STOCK_NEW',
|
|
|
|
'STOCK_NO', 'STOCK_OK', 'STOCK_OPEN', 'STOCK_PASTE', 'STOCK_PREFERENCES',
|
|
|
|
'STOCK_PRINT', 'STOCK_PRINT_PREVIEW', 'STOCK_PROPERTIES', 'STOCK_QUIT',
|
|
|
|
'STOCK_REDO', 'STOCK_REFRESH', 'STOCK_REMOVE', 'STOCK_REVERT_TO_SAVED',
|
|
|
|
'STOCK_SAVE', 'STOCK_SAVE_AS', 'STOCK_SELECT_COLOR', 'STOCK_SELECT_FONT',
|
|
|
|
'STOCK_SORT_ASCENDING', 'STOCK_SORT_DESCENDING', 'STOCK_SPELL_CHECK',
|
|
|
|
'STOCK_STOP', 'STOCK_STRIKETHROUGH', 'STOCK_UNDELETE', 'STOCK_UNDERLINE',
|
|
|
|
'STOCK_UNDO', 'STOCK_UNINDENT', 'STOCK_YES', 'STOCK_ZOOM_100',
|
|
|
|
'STOCK_ZOOM_FIT', 'STOCK_ZOOM_IN', 'STOCK_ZOOM_OUT',
|
|
|
|
'terp-account', 'terp-crm', 'terp-mrp', 'terp-product', 'terp-purchase',
|
|
|
|
'terp-sale', 'terp-tools', 'terp-administration', 'terp-hr', 'terp-partner',
|
|
|
|
'terp-project', 'terp-report', 'terp-stock', 'terp-calendar', 'terp-graph',
|
|
|
|
])
|
|
|
|
|
2009-01-22 13:22:38 +00:00
|
|
|
def extract_zip_file(zip_file, outdirectory):
|
|
|
|
import zipfile
|
|
|
|
import os
|
|
|
|
|
|
|
|
zf = zipfile.ZipFile(zip_file, 'r')
|
|
|
|
out = outdirectory
|
|
|
|
for path in zf.namelist():
|
|
|
|
tgt = os.path.join(out, path)
|
|
|
|
tgtdir = os.path.dirname(tgt)
|
|
|
|
if not os.path.exists(tgtdir):
|
|
|
|
os.makedirs(tgtdir)
|
|
|
|
|
|
|
|
if not tgt.endswith(os.sep):
|
|
|
|
fp = open(tgt, 'wb')
|
|
|
|
fp.write(zf.read(path))
|
|
|
|
fp.close()
|
|
|
|
zf.close()
|
|
|
|
|
2009-11-17 07:20:14 +00:00
|
|
|
def detect_ip_addr():
|
|
|
|
def _detect_ip_addr():
|
|
|
|
from array import array
|
|
|
|
import socket
|
|
|
|
from struct import pack, unpack
|
2009-01-22 13:22:38 +00:00
|
|
|
|
2009-11-17 07:20:14 +00:00
|
|
|
try:
|
|
|
|
import fcntl
|
|
|
|
except ImportError:
|
|
|
|
fcntl = None
|
|
|
|
|
|
|
|
ip_addr = None
|
|
|
|
|
|
|
|
if not fcntl: # not UNIX:
|
|
|
|
host = socket.gethostname()
|
|
|
|
ip_addr = socket.gethostbyname(host)
|
|
|
|
else: # UNIX:
|
|
|
|
# get all interfaces:
|
|
|
|
nbytes = 128 * 32
|
|
|
|
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
|
|
|
|
names = array('B', '\0' * nbytes)
|
|
|
|
#print 'names: ', names
|
|
|
|
outbytes = unpack('iL', fcntl.ioctl( s.fileno(), 0x8912, pack('iL', nbytes, names.buffer_info()[0])))[0]
|
|
|
|
namestr = names.tostring()
|
|
|
|
|
|
|
|
# try 64 bit kernel:
|
|
|
|
for i in range(0, outbytes, 40):
|
|
|
|
name = namestr[i:i+16].split('\0', 1)[0]
|
|
|
|
if name != 'lo':
|
|
|
|
ip_addr = socket.inet_ntoa(namestr[i+20:i+24])
|
|
|
|
break
|
|
|
|
|
|
|
|
# try 32 bit kernel:
|
|
|
|
if ip_addr is None:
|
|
|
|
ifaces = filter(None, [namestr[i:i+32].split('\0', 1)[0] for i in range(0, outbytes, 32)])
|
|
|
|
|
|
|
|
for ifname in [iface for iface in ifaces if iface != 'lo']:
|
|
|
|
ip_addr = socket.inet_ntoa(fcntl.ioctl(s.fileno(), 0x8915, pack('256s', ifname[:15]))[20:24])
|
|
|
|
break
|
|
|
|
|
|
|
|
return ip_addr or 'localhost'
|
2009-01-22 13:22:38 +00:00
|
|
|
|
2009-11-17 07:20:14 +00:00
|
|
|
try:
|
|
|
|
ip_addr = _detect_ip_addr()
|
|
|
|
except:
|
|
|
|
ip_addr = 'localhost'
|
|
|
|
return ip_addr
|
2008-09-16 12:26:07 +00:00
|
|
|
|
2010-01-05 15:23:12 +00:00
|
|
|
# RATIONALE BEHIND TIMESTAMP CALCULATIONS AND TIMEZONE MANAGEMENT:
|
|
|
|
# The server side never does any timestamp calculation, always
|
|
|
|
# sends them in a naive (timezone agnostic) format supposed to be
|
|
|
|
# expressed within the server timezone, and expects the clients to
|
|
|
|
# provide timestamps in the server timezone as well.
|
|
|
|
# It stores all timestamps in the database in naive format as well,
|
|
|
|
# which also expresses the time in the server timezone.
|
|
|
|
# For this reason the server makes its timezone name available via the
|
|
|
|
# common/timezone_get() rpc method, which clients need to read
|
|
|
|
# to know the appropriate time offset to use when reading/writing
|
|
|
|
# times.
|
|
|
|
def get_win32_timezone():
|
|
|
|
"""Attempt to return the "standard name" of the current timezone on a win32 system.
|
|
|
|
@return: the standard name of the current win32 timezone, or False if it cannot be found.
|
|
|
|
"""
|
|
|
|
res = False
|
|
|
|
if (sys.platform == "win32"):
|
|
|
|
try:
|
|
|
|
import _winreg
|
|
|
|
hklm = _winreg.ConnectRegistry(None,_winreg.HKEY_LOCAL_MACHINE)
|
|
|
|
current_tz_key = _winreg.OpenKey(hklm, r"SYSTEM\CurrentControlSet\Control\TimeZoneInformation", 0,_winreg.KEY_ALL_ACCESS)
|
|
|
|
res = str(_winreg.QueryValueEx(current_tz_key,"StandardName")[0]) # [0] is value, [1] is type code
|
|
|
|
_winreg.CloseKey(current_tz_key)
|
|
|
|
_winreg.CloseKey(hklm)
|
|
|
|
except:
|
|
|
|
pass
|
|
|
|
return res
|
|
|
|
|
|
|
|
def detect_server_timezone():
|
|
|
|
"""Attempt to detect the timezone to use on the server side.
|
|
|
|
Defaults to UTC if no working timezone can be found.
|
|
|
|
@return: the timezone identifier as expected by pytz.timezone.
|
|
|
|
"""
|
|
|
|
import time
|
|
|
|
import netsvc
|
|
|
|
try:
|
|
|
|
import pytz
|
|
|
|
except:
|
|
|
|
netsvc.Logger().notifyChannel("detect_server_timezone", netsvc.LOG_WARNING,
|
|
|
|
"Python pytz module is not available. Timezone will be set to UTC by default.")
|
|
|
|
return 'UTC'
|
|
|
|
|
|
|
|
# Option 1: the configuration option (did not exist before, so no backwards compatibility issue)
|
|
|
|
# Option 2: to be backwards compatible with 5.0 or earlier, the value from time.tzname[0], but only if it is known to pytz
|
|
|
|
# Option 3: the environment variable TZ
|
|
|
|
sources = [ (config['timezone'], 'OpenERP configuration'),
|
|
|
|
(time.tzname[0], 'time.tzname'),
|
|
|
|
(os.environ.get('TZ',False),'TZ environment variable'), ]
|
|
|
|
# Option 4: OS-specific: /etc/timezone on Unix
|
|
|
|
if (os.path.exists("/etc/timezone")):
|
|
|
|
tz_value = False
|
|
|
|
try:
|
|
|
|
f = open("/etc/timezone")
|
|
|
|
tz_value = f.read(128).strip()
|
|
|
|
except:
|
|
|
|
pass
|
|
|
|
finally:
|
|
|
|
f.close()
|
|
|
|
sources.append((tz_value,"/etc/timezone file"))
|
|
|
|
# Option 5: timezone info from registry on Win32
|
|
|
|
if (sys.platform == "win32"):
|
|
|
|
# Timezone info is stored in windows registry.
|
|
|
|
# However this is not likely to work very well as the standard name
|
|
|
|
# of timezones in windows is rarely something that is known to pytz.
|
|
|
|
# But that's ok, it is always possible to use a config option to set
|
|
|
|
# it explicitly.
|
2010-01-06 23:28:31 +00:00
|
|
|
sources.append((get_win32_timezone(),"Windows Registry"))
|
2010-01-05 15:23:12 +00:00
|
|
|
|
|
|
|
for (value,source) in sources:
|
|
|
|
if value:
|
|
|
|
try:
|
|
|
|
tz = pytz.timezone(value)
|
|
|
|
netsvc.Logger().notifyChannel("detect_server_timezone", netsvc.LOG_INFO,
|
|
|
|
"Using timezone %s obtained from %s." % (tz.zone,source))
|
|
|
|
return value
|
|
|
|
except pytz.UnknownTimeZoneError:
|
|
|
|
netsvc.Logger().notifyChannel("detect_server_timezone", netsvc.LOG_WARNING,
|
|
|
|
"The timezone specified in %s (%s) is invalid, ignoring it." % (source,value))
|
|
|
|
|
|
|
|
netsvc.Logger().notifyChannel("detect_server_timezone", netsvc.LOG_WARNING,
|
|
|
|
"No valid timezone could be detected, using default UTC timezone. You can specify it explicitly with option 'timezone' in the server configuration.")
|
|
|
|
return 'UTC'
|
|
|
|
|
2008-09-16 12:26:07 +00:00
|
|
|
|
2008-06-03 11:14:02 +00:00
|
|
|
if __name__ == '__main__':
|
|
|
|
import doctest
|
|
|
|
doctest.testmod()
|
|
|
|
|
|
|
|
|
2008-07-23 15:01:27 +00:00
|
|
|
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
|
|
|
|