commit
8a887ad4e0
|
@ -1,6 +1,6 @@
|
||||||
Metadata-Version: 1.1
|
Metadata-Version: 1.1
|
||||||
Name: OpenERP
|
Name: OpenERP
|
||||||
Version: 5.0.4
|
Version: 5.0.7
|
||||||
Author: Tiny.be
|
Author: Tiny.be
|
||||||
Author-email: fp at tiny be
|
Author-email: fp at tiny be
|
||||||
Maintainer: Tiny.be
|
Maintainer: Tiny.be
|
||||||
|
|
|
@ -372,7 +372,10 @@ def init_module_objects(cr, module_name, obj_list):
|
||||||
logger.notifyChannel('init', netsvc.LOG_INFO, 'module %s: creating or updating database tables' % module_name)
|
logger.notifyChannel('init', netsvc.LOG_INFO, 'module %s: creating or updating database tables' % module_name)
|
||||||
todo = []
|
todo = []
|
||||||
for obj in obj_list:
|
for obj in obj_list:
|
||||||
|
try:
|
||||||
result = obj._auto_init(cr, {'module': module_name})
|
result = obj._auto_init(cr, {'module': module_name})
|
||||||
|
except Exception, e:
|
||||||
|
raise
|
||||||
if result:
|
if result:
|
||||||
todo += result
|
todo += result
|
||||||
if hasattr(obj, 'init'):
|
if hasattr(obj, 'init'):
|
||||||
|
@ -744,7 +747,6 @@ def load_modules(db, force_demo=False, status=None, update_module=False):
|
||||||
if new_modules_in_graph == 0:
|
if new_modules_in_graph == 0:
|
||||||
# nothing to load
|
# nothing to load
|
||||||
break
|
break
|
||||||
|
|
||||||
logger.notifyChannel('init', netsvc.LOG_DEBUG, 'Updating graph with %d more modules' % (len(module_list)))
|
logger.notifyChannel('init', netsvc.LOG_DEBUG, 'Updating graph with %d more modules' % (len(module_list)))
|
||||||
r = load_module_graph(cr, graph, status, report=report)
|
r = load_module_graph(cr, graph, status, report=report)
|
||||||
has_updates = has_updates or r
|
has_updates = has_updates or r
|
||||||
|
|
|
@ -120,8 +120,8 @@
|
||||||
<separator colspan="4" string="Legend (for prefix, suffix)"/>
|
<separator colspan="4" string="Legend (for prefix, suffix)"/>
|
||||||
<group col="8" colspan="4">
|
<group col="8" colspan="4">
|
||||||
<group>
|
<group>
|
||||||
<label colspan="4" string="Year with century: %%(year)s"/>
|
<label colspan="4" string="Current Year with Century: %%(year)s"/>
|
||||||
<label colspan="4" string="Year without century: %%(y)s"/>
|
<label colspan="4" string="Current Year without Century: %%(y)s"/>
|
||||||
<label colspan="4" string="Month: %%(month)s"/>
|
<label colspan="4" string="Month: %%(month)s"/>
|
||||||
<label colspan="4" string="Day: %%(day)s"/>
|
<label colspan="4" string="Day: %%(day)s"/>
|
||||||
</group>
|
</group>
|
||||||
|
@ -1280,8 +1280,7 @@
|
||||||
<page string="Multi Actions" attrs="{'invisible':[('state','!=','other')]}">
|
<page string="Multi Actions" attrs="{'invisible':[('state','!=','other')]}">
|
||||||
<separator colspan="4" string="Other Actions Configuration"/>
|
<separator colspan="4" string="Other Actions Configuration"/>
|
||||||
<field name="child_ids" nolabel="1" colspan="4"/>
|
<field name="child_ids" nolabel="1" colspan="4"/>
|
||||||
<label colspan="4" string="Only one client action will be execute, last
|
<label colspan="4" string="Only one client action will be executed, last client action will be considered in case of multiple client actions." align="0.0"/>
|
||||||
clinent action will be consider in case of multiples clients actions" align="0.0"/>
|
|
||||||
</page>
|
</page>
|
||||||
</notebook>
|
</notebook>
|
||||||
<field name="type" readonly="1"/>
|
<field name="type" readonly="1"/>
|
||||||
|
|
|
@ -87,7 +87,7 @@ class ir_model(osv.osv):
|
||||||
if vals.get('state','base')=='manual':
|
if vals.get('state','base')=='manual':
|
||||||
self.instanciate(cr, user, vals['model'], context)
|
self.instanciate(cr, user, vals['model'], context)
|
||||||
self.pool.get(vals['model']).__init__(self.pool, cr)
|
self.pool.get(vals['model']).__init__(self.pool, cr)
|
||||||
self.pool.get(vals['model'])._auto_init(cr,{})
|
self.pool.get(vals['model'])._auto_init(cr,{'field_name':vals['name'],'field_state':'manual','select':vals.get('select_level','0')})
|
||||||
#pooler.restart_pool(cr.dbname)
|
#pooler.restart_pool(cr.dbname)
|
||||||
return res
|
return res
|
||||||
|
|
||||||
|
@ -263,7 +263,8 @@ class ir_model_fields(osv.osv):
|
||||||
|
|
||||||
if self.pool.get(vals['model']):
|
if self.pool.get(vals['model']):
|
||||||
self.pool.get(vals['model']).__init__(self.pool, cr)
|
self.pool.get(vals['model']).__init__(self.pool, cr)
|
||||||
self.pool.get(vals['model'])._auto_init(cr, {})
|
#Added context to _auto_init for special treatment to custom field for select_level
|
||||||
|
self.pool.get(vals['model'])._auto_init(cr, {'field_name':vals['name'],'field_state':'manual','select':vals.get('select_level','0')})
|
||||||
|
|
||||||
return res
|
return res
|
||||||
ir_model_fields()
|
ir_model_fields()
|
||||||
|
|
|
@ -110,11 +110,10 @@ class ir_rule(osv.osv):
|
||||||
if rule.domain_force:
|
if rule.domain_force:
|
||||||
res[rule.id] = eval(rule.domain_force, eval_user_data)
|
res[rule.id] = eval(rule.domain_force, eval_user_data)
|
||||||
else:
|
else:
|
||||||
if rule.operand.startswith('user.') and rule.operand.count('.') > 1:
|
if rule.operand and rule.operand.startswith('user.') and rule.operand.count('.') > 1:
|
||||||
#Need to check user.field.field1.field2(if field is False,it will break the chain)
|
#Need to check user.field.field1.field2(if field is False,it will break the chain)
|
||||||
op = rule.operand[5:]
|
op = rule.operand[5:]
|
||||||
rule.operand = rule.operand[:5+len(op[:op.find('.')])] +' and '+ rule.operand + ' or False'
|
rule.operand = rule.operand[:5+len(op[:op.find('.')])] +' and '+ rule.operand + ' or False'
|
||||||
|
|
||||||
if rule.operator in ('in', 'child_of'):
|
if rule.operator in ('in', 'child_of'):
|
||||||
dom = eval("[('%s', '%s', [%s])]" % (rule.field_id.name, rule.operator,
|
dom = eval("[('%s', '%s', [%s])]" % (rule.field_id.name, rule.operator,
|
||||||
eval(rule.operand,eval_user_data)), eval_user_data)
|
eval(rule.operand,eval_user_data)), eval_user_data)
|
||||||
|
|
|
@ -235,7 +235,7 @@ class ir_values(osv.osv):
|
||||||
#ir_del(cr, uid, x[0])
|
#ir_del(cr, uid, x[0])
|
||||||
return False
|
return False
|
||||||
else:
|
else:
|
||||||
datas = pickle.loads(str(x[2]))
|
datas = pickle.loads(str(x[2].encode('utf-8')))
|
||||||
if meta:
|
if meta:
|
||||||
meta2 = pickle.loads(x[4])
|
meta2 = pickle.loads(x[4])
|
||||||
return (x[0],x[1],datas,meta2)
|
return (x[0],x[1],datas,meta2)
|
||||||
|
|
|
@ -196,10 +196,18 @@ class module(osv.osv):
|
||||||
return True
|
return True
|
||||||
if isinstance(ids, (int, long)):
|
if isinstance(ids, (int, long)):
|
||||||
ids = [ids]
|
ids = [ids]
|
||||||
for mod in self.read(cr, uid, ids, ['state'], context):
|
mod_names = []
|
||||||
|
for mod in self.read(cr, uid, ids, ['state','name'], context):
|
||||||
if mod['state'] in ('installed', 'to upgrade', 'to remove', 'to install'):
|
if mod['state'] in ('installed', 'to upgrade', 'to remove', 'to install'):
|
||||||
raise orm.except_orm(_('Error'),
|
raise orm.except_orm(_('Error'),
|
||||||
_('You try to remove a module that is installed or will be installed'))
|
_('You try to remove a module that is installed or will be installed'))
|
||||||
|
mod_names.append(mod['name'])
|
||||||
|
#Removing the entry from ir_model_data
|
||||||
|
ids_meta = self.pool.get('ir.model.data').search(cr, uid, [('name', '=', 'module_meta_information'), ('module', 'in', mod_names)])
|
||||||
|
|
||||||
|
if ids_meta:
|
||||||
|
self.pool.get('ir.model.data').unlink(cr, uid, ids_meta, context)
|
||||||
|
|
||||||
return super(module, self).unlink(cr, uid, ids, context=context)
|
return super(module, self).unlink(cr, uid, ids, context=context)
|
||||||
|
|
||||||
def state_update(self, cr, uid, ids, newstate, states_to_update, context={}, level=100):
|
def state_update(self, cr, uid, ids, newstate, states_to_update, context={}, level=100):
|
||||||
|
|
|
@ -115,7 +115,7 @@ def _partner_title_get(self, cr, uid, context={}):
|
||||||
obj = self.pool.get('res.partner.title')
|
obj = self.pool.get('res.partner.title')
|
||||||
ids = obj.search(cr, uid, [('domain', '=', 'partner')])
|
ids = obj.search(cr, uid, [('domain', '=', 'partner')])
|
||||||
res = obj.read(cr, uid, ids, ['shortcut','name'], context)
|
res = obj.read(cr, uid, ids, ['shortcut','name'], context)
|
||||||
return [(r['shortcut'], r['name']) for r in res]
|
return [(r['shortcut'], r['name']) for r in res] + [('','')]
|
||||||
|
|
||||||
def _lang_get(self, cr, uid, context={}):
|
def _lang_get(self, cr, uid, context={}):
|
||||||
obj = self.pool.get('res.lang')
|
obj = self.pool.get('res.lang')
|
||||||
|
|
|
@ -82,7 +82,7 @@ class res_currency(osv.osv):
|
||||||
if currency.rounding == 0:
|
if currency.rounding == 0:
|
||||||
return 0.0
|
return 0.0
|
||||||
else:
|
else:
|
||||||
return round(amount / currency.rounding) * currency.rounding
|
return round(amount / currency.rounding,6) * currency.rounding
|
||||||
|
|
||||||
def is_zero(self, cr, uid, currency, amount):
|
def is_zero(self, cr, uid, currency, amount):
|
||||||
return abs(self.round(cr, uid, currency, amount)) < currency.rounding
|
return abs(self.round(cr, uid, currency, amount)) < currency.rounding
|
||||||
|
|
|
@ -344,6 +344,7 @@
|
||||||
<rng:ref name="group"/>
|
<rng:ref name="group"/>
|
||||||
<rng:ref name="graph"/>
|
<rng:ref name="graph"/>
|
||||||
<rng:ref name="filter"/>
|
<rng:ref name="filter"/>
|
||||||
|
<rng:ref name="notebook"/>
|
||||||
<rng:element name="newline"><rng:empty/></rng:element>
|
<rng:element name="newline"><rng:empty/></rng:element>
|
||||||
</rng:choice>
|
</rng:choice>
|
||||||
</rng:zeroOrMore>
|
</rng:zeroOrMore>
|
||||||
|
|
183
bin/netsvc.py
183
bin/netsvc.py
|
@ -1,3 +1,4 @@
|
||||||
|
#!/usr/bin/env python
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
##############################################################################
|
##############################################################################
|
||||||
#
|
#
|
||||||
|
@ -386,4 +387,186 @@ class OpenERPDispatcher:
|
||||||
pdb.post_mortem(tb[2])
|
pdb.post_mortem(tb[2])
|
||||||
raise OpenERPDispatcherException(e, tb_s)
|
raise OpenERPDispatcherException(e, tb_s)
|
||||||
|
|
||||||
|
class GenericXMLRPCRequestHandler(OpenERPDispatcher):
|
||||||
|
def _dispatch(self, method, params):
|
||||||
|
try:
|
||||||
|
service_name = self.path.split("/")[-1]
|
||||||
|
return self.dispatch(service_name, method, params)
|
||||||
|
except OpenERPDispatcherException, e:
|
||||||
|
raise xmlrpclib.Fault(tools.exception_to_unicode(e.exception), e.traceback)
|
||||||
|
|
||||||
|
class SSLSocket(object):
|
||||||
|
def __init__(self, socket):
|
||||||
|
if not hasattr(socket, 'sock_shutdown'):
|
||||||
|
from OpenSSL import SSL
|
||||||
|
ctx = SSL.Context(SSL.SSLv23_METHOD)
|
||||||
|
ctx.use_privatekey_file(tools.config['secure_pkey_file'])
|
||||||
|
ctx.use_certificate_file(tools.config['secure_cert_file'])
|
||||||
|
self.socket = SSL.Connection(ctx, socket)
|
||||||
|
else:
|
||||||
|
self.socket = socket
|
||||||
|
|
||||||
|
def shutdown(self, how):
|
||||||
|
return self.socket.sock_shutdown(how)
|
||||||
|
|
||||||
|
def __getattr__(self, name):
|
||||||
|
return getattr(self.socket, name)
|
||||||
|
|
||||||
|
class SimpleXMLRPCRequestHandler(GenericXMLRPCRequestHandler, SimpleXMLRPCServer.SimpleXMLRPCRequestHandler):
|
||||||
|
rpc_paths = map(lambda s: '/xmlrpc/%s' % s, GROUPS.get('web-services', {}).keys())
|
||||||
|
|
||||||
|
class SecureXMLRPCRequestHandler(SimpleXMLRPCRequestHandler):
|
||||||
|
def setup(self):
|
||||||
|
self.connection = SSLSocket(self.request)
|
||||||
|
self.rfile = socket._fileobject(self.request, "rb", self.rbufsize)
|
||||||
|
self.wfile = socket._fileobject(self.request, "wb", self.wbufsize)
|
||||||
|
|
||||||
|
class SimpleThreadedXMLRPCServer(SocketServer.ThreadingMixIn, SimpleXMLRPCServer.SimpleXMLRPCServer):
|
||||||
|
def server_bind(self):
|
||||||
|
self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
|
||||||
|
SimpleXMLRPCServer.SimpleXMLRPCServer.server_bind(self)
|
||||||
|
|
||||||
|
class SecureThreadedXMLRPCServer(SimpleThreadedXMLRPCServer):
|
||||||
|
def __init__(self, server_address, HandlerClass, logRequests=1):
|
||||||
|
SimpleThreadedXMLRPCServer.__init__(self, server_address, HandlerClass, logRequests)
|
||||||
|
self.socket = SSLSocket(socket.socket(self.address_family, self.socket_type))
|
||||||
|
self.server_bind()
|
||||||
|
self.server_activate()
|
||||||
|
|
||||||
|
class HttpDaemon(threading.Thread):
|
||||||
|
def __init__(self, interface, port, secure=False):
|
||||||
|
threading.Thread.__init__(self)
|
||||||
|
self.__port = port
|
||||||
|
self.__interface = interface
|
||||||
|
self.secure = bool(secure)
|
||||||
|
handler_class = (SimpleXMLRPCRequestHandler, SecureXMLRPCRequestHandler)[self.secure]
|
||||||
|
server_class = (SimpleThreadedXMLRPCServer, SecureThreadedXMLRPCServer)[self.secure]
|
||||||
|
|
||||||
|
if self.secure:
|
||||||
|
from OpenSSL.SSL import Error as SSLError
|
||||||
|
else:
|
||||||
|
class SSLError(Exception): pass
|
||||||
|
try:
|
||||||
|
self.server = server_class((interface, port), handler_class, 0)
|
||||||
|
except SSLError, e:
|
||||||
|
Logger().notifyChannel('xml-rpc-ssl', LOG_CRITICAL, "Can not load the certificate and/or the private key files")
|
||||||
|
sys.exit(1)
|
||||||
|
except Exception, e:
|
||||||
|
Logger().notifyChannel('xml-rpc', LOG_CRITICAL, "Error occur when starting the server daemon: %s" % (e,))
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
|
||||||
|
def attach(self, path, gw):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def stop(self):
|
||||||
|
self.running = False
|
||||||
|
if os.name != 'nt':
|
||||||
|
try:
|
||||||
|
self.server.socket.shutdown(
|
||||||
|
hasattr(socket, 'SHUT_RDWR') and socket.SHUT_RDWR or 2)
|
||||||
|
except socket.error, e:
|
||||||
|
if e.errno != 57: raise
|
||||||
|
# OSX, socket shutdowns both sides if any side closes it
|
||||||
|
# causing an error 57 'Socket is not connected' on shutdown
|
||||||
|
# of the other side (or something), see
|
||||||
|
# http://bugs.python.org/issue4397
|
||||||
|
Logger().notifyChannel(
|
||||||
|
'server', LOG_DEBUG,
|
||||||
|
'"%s" when shutting down server socket, '
|
||||||
|
'this is normal under OS X'%e)
|
||||||
|
self.server.socket.close()
|
||||||
|
|
||||||
|
def run(self):
|
||||||
|
self.server.register_introspection_functions()
|
||||||
|
|
||||||
|
self.running = True
|
||||||
|
while self.running:
|
||||||
|
self.server.handle_request()
|
||||||
|
return True
|
||||||
|
|
||||||
|
# If the server need to be run recursively
|
||||||
|
#
|
||||||
|
#signal.signal(signal.SIGALRM, self.my_handler)
|
||||||
|
#signal.alarm(6)
|
||||||
|
#while True:
|
||||||
|
# self.server.handle_request()
|
||||||
|
#signal.alarm(0) # Disable the alarm
|
||||||
|
|
||||||
|
import tiny_socket
|
||||||
|
class TinySocketClientThread(threading.Thread, OpenERPDispatcher):
|
||||||
|
def __init__(self, sock, threads):
|
||||||
|
threading.Thread.__init__(self)
|
||||||
|
self.sock = sock
|
||||||
|
self.threads = threads
|
||||||
|
|
||||||
|
def run(self):
|
||||||
|
import select
|
||||||
|
self.running = True
|
||||||
|
try:
|
||||||
|
ts = tiny_socket.mysocket(self.sock)
|
||||||
|
except:
|
||||||
|
self.sock.close()
|
||||||
|
self.threads.remove(self)
|
||||||
|
return False
|
||||||
|
while self.running:
|
||||||
|
try:
|
||||||
|
msg = ts.myreceive()
|
||||||
|
except:
|
||||||
|
self.sock.close()
|
||||||
|
self.threads.remove(self)
|
||||||
|
return False
|
||||||
|
try:
|
||||||
|
result = self.dispatch(msg[0], msg[1], msg[2:])
|
||||||
|
ts.mysend(result)
|
||||||
|
except OpenERPDispatcherException, e:
|
||||||
|
new_e = Exception(tools.exception_to_unicode(e.exception)) # avoid problems of pickeling
|
||||||
|
ts.mysend(new_e, exception=True, traceback=e.traceback)
|
||||||
|
|
||||||
|
self.sock.close()
|
||||||
|
self.threads.remove(self)
|
||||||
|
return True
|
||||||
|
|
||||||
|
def stop(self):
|
||||||
|
self.running = False
|
||||||
|
|
||||||
|
|
||||||
|
class TinySocketServerThread(threading.Thread):
|
||||||
|
def __init__(self, interface, port, secure=False):
|
||||||
|
threading.Thread.__init__(self)
|
||||||
|
self.__port = port
|
||||||
|
self.__interface = interface
|
||||||
|
self.socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||||
|
self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
|
||||||
|
self.socket.bind((self.__interface, self.__port))
|
||||||
|
self.socket.listen(5)
|
||||||
|
self.threads = []
|
||||||
|
|
||||||
|
def run(self):
|
||||||
|
import select
|
||||||
|
try:
|
||||||
|
self.running = True
|
||||||
|
while self.running:
|
||||||
|
(clientsocket, address) = self.socket.accept()
|
||||||
|
ct = TinySocketClientThread(clientsocket, self.threads)
|
||||||
|
self.threads.append(ct)
|
||||||
|
ct.start()
|
||||||
|
self.socket.close()
|
||||||
|
except Exception, e:
|
||||||
|
self.socket.close()
|
||||||
|
return False
|
||||||
|
|
||||||
|
def stop(self):
|
||||||
|
self.running = False
|
||||||
|
for t in self.threads:
|
||||||
|
t.stop()
|
||||||
|
try:
|
||||||
|
if hasattr(socket, 'SHUT_RDWR'):
|
||||||
|
self.socket.shutdown(socket.SHUT_RDWR)
|
||||||
|
else:
|
||||||
|
self.socket.shutdown(2)
|
||||||
|
self.socket.close()
|
||||||
|
except:
|
||||||
|
return False
|
||||||
|
|
||||||
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
||||||
|
|
|
@ -155,14 +155,13 @@ class expression(object):
|
||||||
# values in the database, so we must ignore it : we generate a dummy leaf
|
# values in the database, so we must ignore it : we generate a dummy leaf
|
||||||
self.__exp[i] = self.__DUMMY_LEAF
|
self.__exp[i] = self.__DUMMY_LEAF
|
||||||
else:
|
else:
|
||||||
subexp = field.search(cr, uid, table, left, [self.__exp[i]])
|
subexp = field.search(cr, uid, table, left, [self.__exp[i]], context=context)
|
||||||
# we assume that the expression is valid
|
# we assume that the expression is valid
|
||||||
# we create a dummy leaf for forcing the parsing of the resulting expression
|
# we create a dummy leaf for forcing the parsing of the resulting expression
|
||||||
self.__exp[i] = '&'
|
self.__exp[i] = '&'
|
||||||
self.__exp.insert(i + 1, self.__DUMMY_LEAF)
|
self.__exp.insert(i + 1, self.__DUMMY_LEAF)
|
||||||
for j, se in enumerate(subexp):
|
for j, se in enumerate(subexp):
|
||||||
self.__exp.insert(i + 2 + j, se)
|
self.__exp.insert(i + 2 + j, se)
|
||||||
|
|
||||||
# else, the value of the field is store in the database, so we search on it
|
# else, the value of the field is store in the database, so we search on it
|
||||||
|
|
||||||
|
|
||||||
|
@ -170,7 +169,7 @@ class expression(object):
|
||||||
# Applying recursivity on field(one2many)
|
# Applying recursivity on field(one2many)
|
||||||
if operator == 'child_of':
|
if operator == 'child_of':
|
||||||
if isinstance(right, basestring):
|
if isinstance(right, basestring):
|
||||||
ids2 = [x[0] for x in field_obj.name_search(cr, uid, right, [], 'like', limit=None)]
|
ids2 = [x[0] for x in field_obj.name_search(cr, uid, right, [], 'like', context=context, limit=None)]
|
||||||
else:
|
else:
|
||||||
ids2 = list(right)
|
ids2 = list(right)
|
||||||
if field._obj != working_table._name:
|
if field._obj != working_table._name:
|
||||||
|
@ -184,7 +183,7 @@ class expression(object):
|
||||||
|
|
||||||
if right:
|
if right:
|
||||||
if isinstance(right, basestring):
|
if isinstance(right, basestring):
|
||||||
ids2 = [x[0] for x in field_obj.name_search(cr, uid, right, [], operator, limit=None)]
|
ids2 = [x[0] for x in field_obj.name_search(cr, uid, right, [], operator, context=context, limit=None)]
|
||||||
operator = 'in'
|
operator = 'in'
|
||||||
else:
|
else:
|
||||||
if not isinstance(right,list):
|
if not isinstance(right,list):
|
||||||
|
@ -211,7 +210,7 @@ class expression(object):
|
||||||
#FIXME
|
#FIXME
|
||||||
if operator == 'child_of':
|
if operator == 'child_of':
|
||||||
if isinstance(right, basestring):
|
if isinstance(right, basestring):
|
||||||
ids2 = [x[0] for x in field_obj.name_search(cr, uid, right, [], 'like', limit=None)]
|
ids2 = [x[0] for x in field_obj.name_search(cr, uid, right, [], 'like', context=context, limit=None)]
|
||||||
else:
|
else:
|
||||||
ids2 = list(right)
|
ids2 = list(right)
|
||||||
|
|
||||||
|
@ -227,7 +226,7 @@ class expression(object):
|
||||||
call_null_m2m = True
|
call_null_m2m = True
|
||||||
if right:
|
if right:
|
||||||
if isinstance(right, basestring):
|
if isinstance(right, basestring):
|
||||||
res_ids = [x[0] for x in field_obj.name_search(cr, uid, right, [], operator)]
|
res_ids = [x[0] for x in field_obj.name_search(cr, uid, right, [], operator, context=context)]
|
||||||
operator = 'in'
|
operator = 'in'
|
||||||
else:
|
else:
|
||||||
if not isinstance(right, list):
|
if not isinstance(right, list):
|
||||||
|
@ -333,7 +332,7 @@ class expression(object):
|
||||||
query = '(%s.%s in (%s))' % (table._table, left, right[0])
|
query = '(%s.%s in (%s))' % (table._table, left, right[0])
|
||||||
params = right[1]
|
params = right[1]
|
||||||
elif operator in ['in', 'not in']:
|
elif operator in ['in', 'not in']:
|
||||||
params = right[:]
|
params = right and right[:] or []
|
||||||
len_before = len(params)
|
len_before = len(params)
|
||||||
for i in range(len_before)[::-1]:
|
for i in range(len_before)[::-1]:
|
||||||
if params[i] == False:
|
if params[i] == False:
|
||||||
|
@ -349,7 +348,12 @@ class expression(object):
|
||||||
else:
|
else:
|
||||||
instr = ','.join([table._columns[left]._symbol_set[0]] * len_after)
|
instr = ','.join([table._columns[left]._symbol_set[0]] * len_after)
|
||||||
query = '(%s.%s %s (%s))' % (table._table, left, operator, instr)
|
query = '(%s.%s %s (%s))' % (table._table, left, operator, instr)
|
||||||
|
else:
|
||||||
|
# the case for [field, 'in', []] or [left, 'not in', []]
|
||||||
|
if operator == 'in':
|
||||||
|
query = '(%s.%s IS NULL)' % (table._table, left)
|
||||||
|
else:
|
||||||
|
query = '(%s.%s IS NOT NULL)' % (table._table, left)
|
||||||
if check_nulls:
|
if check_nulls:
|
||||||
query = '(%s OR %s.%s IS NULL)' % (query, table._table, left)
|
query = '(%s OR %s.%s IS NULL)' % (query, table._table, left)
|
||||||
else:
|
else:
|
||||||
|
|
|
@ -99,9 +99,9 @@ class _column(object):
|
||||||
def get(self, cr, obj, ids, name, user=None, offset=0, context=None, values=None):
|
def get(self, cr, obj, ids, name, user=None, offset=0, context=None, values=None):
|
||||||
raise Exception(_('undefined get method !'))
|
raise Exception(_('undefined get method !'))
|
||||||
|
|
||||||
def search(self, cr, obj, args, name, value, offset=0, limit=None, uid=None):
|
def search(self, cr, obj, args, name, value, offset=0, limit=None, uid=None, context=None):
|
||||||
ids = obj.search(cr, uid, args+self._domain+[(name, 'ilike', value)], offset, limit)
|
ids = obj.search(cr, uid, args+self._domain+[(name, 'ilike', value)], offset, limit, context=context)
|
||||||
res = obj.read(cr, uid, ids, [name])
|
res = obj.read(cr, uid, ids, [name], context=context)
|
||||||
return [x[name] for x in res]
|
return [x[name] for x in res]
|
||||||
|
|
||||||
def search_memory(self, cr, obj, args, name, value, offset=0, limit=None, uid=None, context=None):
|
def search_memory(self, cr, obj, args, name, value, offset=0, limit=None, uid=None, context=None):
|
||||||
|
@ -267,8 +267,8 @@ class one2one(_column):
|
||||||
id = cr.fetchone()[0]
|
id = cr.fetchone()[0]
|
||||||
obj.write(cr, user, [id], act[1], context=context)
|
obj.write(cr, user, [id], act[1], context=context)
|
||||||
|
|
||||||
def search(self, cr, obj, args, name, value, offset=0, limit=None, uid=None):
|
def search(self, cr, obj, args, name, value, offset=0, limit=None, uid=None, context=None):
|
||||||
return obj.pool.get(self._obj).search(cr, uid, args+self._domain+[('name', 'like', value)], offset, limit)
|
return obj.pool.get(self._obj).search(cr, uid, args+self._domain+[('name', 'like', value)], offset, limit, context=context)
|
||||||
|
|
||||||
|
|
||||||
class many2one(_column):
|
class many2one(_column):
|
||||||
|
@ -345,8 +345,8 @@ class many2one(_column):
|
||||||
else:
|
else:
|
||||||
cr.execute('update '+obj_src._table+' set '+field+'=null where id=%s', (id,))
|
cr.execute('update '+obj_src._table+' set '+field+'=null where id=%s', (id,))
|
||||||
|
|
||||||
def search(self, cr, obj, args, name, value, offset=0, limit=None, uid=None):
|
def search(self, cr, obj, args, name, value, offset=0, limit=None, uid=None, context=None):
|
||||||
return obj.pool.get(self._obj).search(cr, uid, args+self._domain+[('name', 'like', value)], offset, limit)
|
return obj.pool.get(self._obj).search(cr, uid, args+self._domain+[('name', 'like', value)], offset, limit, context=context)
|
||||||
|
|
||||||
|
|
||||||
class one2many(_column):
|
class one2many(_column):
|
||||||
|
@ -463,8 +463,8 @@ class one2many(_column):
|
||||||
obj.write(cr, user, ids3, {self._fields_id:False}, context=context or {})
|
obj.write(cr, user, ids3, {self._fields_id:False}, context=context or {})
|
||||||
return result
|
return result
|
||||||
|
|
||||||
def search(self, cr, obj, args, name, value, offset=0, limit=None, uid=None, operator='like'):
|
def search(self, cr, obj, args, name, value, offset=0, limit=None, uid=None, operator='like', context=None):
|
||||||
return obj.pool.get(self._obj).name_search(cr, uid, value, self._domain, offset, limit)
|
return obj.pool.get(self._obj).name_search(cr, uid, value, self._domain, operator, context=context,limit=limit)
|
||||||
|
|
||||||
|
|
||||||
#
|
#
|
||||||
|
@ -558,8 +558,8 @@ class many2many(_column):
|
||||||
#
|
#
|
||||||
# TODO: use a name_search
|
# TODO: use a name_search
|
||||||
#
|
#
|
||||||
def search(self, cr, obj, args, name, value, offset=0, limit=None, uid=None, operator='like'):
|
def search(self, cr, obj, args, name, value, offset=0, limit=None, uid=None, operator='like', context=None):
|
||||||
return obj.pool.get(self._obj).search(cr, uid, args+self._domain+[('name', operator, value)], offset, limit)
|
return obj.pool.get(self._obj).search(cr, uid, args+self._domain+[('name', operator, value)], offset, limit, context=context)
|
||||||
|
|
||||||
def get_memory(self, cr, obj, ids, name, user=None, offset=0, context=None, values=None):
|
def get_memory(self, cr, obj, ids, name, user=None, offset=0, context=None, values=None):
|
||||||
result = {}
|
result = {}
|
||||||
|
@ -648,11 +648,11 @@ class function(_column):
|
||||||
self._symbol_f = float._symbol_f
|
self._symbol_f = float._symbol_f
|
||||||
self._symbol_set = float._symbol_set
|
self._symbol_set = float._symbol_set
|
||||||
|
|
||||||
def search(self, cr, uid, obj, name, args):
|
def search(self, cr, uid, obj, name, args, context=None):
|
||||||
if not self._fnct_search:
|
if not self._fnct_search:
|
||||||
#CHECKME: should raise an exception
|
#CHECKME: should raise an exception
|
||||||
return []
|
return []
|
||||||
return self._fnct_search(obj, cr, uid, obj, name, args)
|
return self._fnct_search(obj, cr, uid, obj, name, args, context=context)
|
||||||
|
|
||||||
def get(self, cr, obj, ids, name, user=None, context=None, values=None):
|
def get(self, cr, obj, ids, name, user=None, context=None, values=None):
|
||||||
if not context:
|
if not context:
|
||||||
|
|
|
@ -380,11 +380,19 @@ class orm_template(object):
|
||||||
'ttype': f._type,
|
'ttype': f._type,
|
||||||
'relation': f._obj or 'NULL',
|
'relation': f._obj or 'NULL',
|
||||||
'view_load': (f.view_load and 1) or 0,
|
'view_load': (f.view_load and 1) or 0,
|
||||||
'select_level': str(f.select or 0),
|
'select_level': tools.ustr(f.select or 0),
|
||||||
'readonly':(f.readonly and 1) or 0,
|
'readonly':(f.readonly and 1) or 0,
|
||||||
'required':(f.required and 1) or 0,
|
'required':(f.required and 1) or 0,
|
||||||
'selectable' : (f.selectable and 1) or 0,
|
'selectable' : (f.selectable and 1) or 0,
|
||||||
}
|
}
|
||||||
|
# When its a custom field,it does not contain f.select
|
||||||
|
if context.get('field_state','base') == 'manual':
|
||||||
|
if context.get('field_name','') == k:
|
||||||
|
vals['select_level'] = context.get('select','0')
|
||||||
|
#setting value to let the problem NOT occur next time
|
||||||
|
else:
|
||||||
|
vals['select_level'] = cols[k]['select_level']
|
||||||
|
|
||||||
if k not in cols:
|
if k not in cols:
|
||||||
cr.execute('select nextval(%s)', ('ir_model_fields_id_seq',))
|
cr.execute('select nextval(%s)', ('ir_model_fields_id_seq',))
|
||||||
id = cr.fetchone()[0]
|
id = cr.fetchone()[0]
|
||||||
|
@ -862,6 +870,7 @@ class orm_template(object):
|
||||||
except Exception, e:
|
except Exception, e:
|
||||||
import psycopg2
|
import psycopg2
|
||||||
import osv
|
import osv
|
||||||
|
cr.rollback()
|
||||||
if isinstance(e,psycopg2.IntegrityError):
|
if isinstance(e,psycopg2.IntegrityError):
|
||||||
msg= _('Insertion Failed! ')
|
msg= _('Insertion Failed! ')
|
||||||
for key in self.pool._sql_error.keys():
|
for key in self.pool._sql_error.keys():
|
||||||
|
@ -1918,12 +1927,11 @@ class orm(orm_template):
|
||||||
elif isinstance(f, fields.many2many):
|
elif isinstance(f, fields.many2many):
|
||||||
cr.execute("SELECT relname FROM pg_class WHERE relkind in ('r','v') AND relname=%s", (f._rel,))
|
cr.execute("SELECT relname FROM pg_class WHERE relkind in ('r','v') AND relname=%s", (f._rel,))
|
||||||
if not cr.dictfetchall():
|
if not cr.dictfetchall():
|
||||||
#FIXME: Remove this try/except
|
if not self.pool.get(f._obj):
|
||||||
try:
|
raise except_orm('Programming Error', ('There is no reference available for %s') % (f._obj,))
|
||||||
ref = self.pool.get(f._obj)._table
|
ref = self.pool.get(f._obj)._table
|
||||||
except AttributeError:
|
# ref = f._obj.replace('.', '_')
|
||||||
ref = f._obj.replace('.', '_')
|
cr.execute('CREATE TABLE "%s" ("%s" INTEGER NOT NULL REFERENCES "%s" ON DELETE CASCADE, "%s" INTEGER NOT NULL REFERENCES "%s" ON DELETE CASCADE) WITH OIDS' % (f._rel, f._id1, self._table, f._id2, ref))
|
||||||
cr.execute('CREATE TABLE "%s" ("%s" INTEGER NOT NULL REFERENCES "%s" ON DELETE CASCADE, "%s" INTEGER NOT NULL REFERENCES "%s" ON DELETE CASCADE) WITHOUT OIDS' % (f._rel, f._id1, self._table, f._id2, ref))
|
|
||||||
cr.execute('CREATE INDEX "%s_%s_index" ON "%s" ("%s")' % (f._rel, f._id1, f._rel, f._id1))
|
cr.execute('CREATE INDEX "%s_%s_index" ON "%s" ("%s")' % (f._rel, f._id1, f._rel, f._id1))
|
||||||
cr.execute('CREATE INDEX "%s_%s_index" ON "%s" ("%s")' % (f._rel, f._id2, f._rel, f._id2))
|
cr.execute('CREATE INDEX "%s_%s_index" ON "%s" ("%s")' % (f._rel, f._id2, f._rel, f._id2))
|
||||||
cr.execute("COMMENT ON TABLE \"%s\" IS 'RELATION BETWEEN %s AND %s'" % (f._rel, self._table, ref))
|
cr.execute("COMMENT ON TABLE \"%s\" IS 'RELATION BETWEEN %s AND %s'" % (f._rel, self._table, ref))
|
||||||
|
@ -1980,11 +1988,10 @@ class orm(orm_template):
|
||||||
|
|
||||||
# and add constraints if needed
|
# and add constraints if needed
|
||||||
if isinstance(f, fields.many2one):
|
if isinstance(f, fields.many2one):
|
||||||
#FIXME: Remove this try/except
|
if not self.pool.get(f._obj):
|
||||||
try:
|
raise except_orm('Programming Error', ('There is no reference available for %s') % (f._obj,))
|
||||||
ref = self.pool.get(f._obj)._table
|
ref = self.pool.get(f._obj)._table
|
||||||
except AttributeError:
|
# ref = f._obj.replace('.', '_')
|
||||||
ref = f._obj.replace('.', '_')
|
|
||||||
# ir_actions is inherited so foreign key doesn't work on it
|
# ir_actions is inherited so foreign key doesn't work on it
|
||||||
if ref != 'ir_actions':
|
if ref != 'ir_actions':
|
||||||
cr.execute('ALTER TABLE "%s" ADD FOREIGN KEY ("%s") REFERENCES "%s" ON DELETE %s' % (self._table, k, ref, f.ondelete))
|
cr.execute('ALTER TABLE "%s" ADD FOREIGN KEY ("%s") REFERENCES "%s" ON DELETE %s' % (self._table, k, ref, f.ondelete))
|
||||||
|
@ -2396,19 +2403,21 @@ class orm(orm_template):
|
||||||
return 'length("%s") as "%s"' % (f, f)
|
return 'length("%s") as "%s"' % (f, f)
|
||||||
return '"%s"' % (f,)
|
return '"%s"' % (f,)
|
||||||
fields_pre2 = map(convert_field, fields_pre)
|
fields_pre2 = map(convert_field, fields_pre)
|
||||||
|
order_by = self._parent_order or self._order
|
||||||
for i in range(0, len(ids), cr.IN_MAX):
|
for i in range(0, len(ids), cr.IN_MAX):
|
||||||
sub_ids = ids[i:i+cr.IN_MAX]
|
sub_ids = ids[i:i+cr.IN_MAX]
|
||||||
if d1:
|
if d1:
|
||||||
cr.execute('SELECT %s FROM %s WHERE %s.id = ANY (%%s) AND %s ORDER BY %s' % \
|
cr.execute('SELECT %s FROM %s WHERE %s.id = ANY (%%s) AND %s ORDER BY %s' % \
|
||||||
(','.join(fields_pre2 + [self._table + '.id']), ','.join(tables), self._table, ' and '.join(d1),
|
(','.join(fields_pre2 + [self._table + '.id']), ','.join(tables), self._table, ' and '.join(d1),
|
||||||
self._order),[sub_ids,]+d2)
|
order_by),[sub_ids,]+d2)
|
||||||
if not cr.rowcount == len({}.fromkeys(sub_ids)):
|
if not cr.rowcount == len({}.fromkeys(sub_ids)):
|
||||||
raise except_orm(_('AccessError'),
|
raise except_orm(_('AccessError'),
|
||||||
_('You try to bypass an access rule while reading (Document type: %s).') % self._description)
|
_('You try to bypass an access rule while reading (Document type: %s).') % self._description)
|
||||||
else:
|
else:
|
||||||
cr.execute('SELECT %s FROM \"%s\" WHERE id = ANY (%%s) ORDER BY %s' % \
|
cr.execute('SELECT %s FROM \"%s\" WHERE id = ANY (%%s) ORDER BY %s' % \
|
||||||
(','.join(fields_pre2 + ['id']), self._table,
|
(','.join(fields_pre2 + ['id']), self._table,
|
||||||
self._order), (sub_ids,))
|
','.join(['%s' for x in sub_ids]),
|
||||||
|
order_by), sub_ids)
|
||||||
res.extend(cr.dictfetchall())
|
res.extend(cr.dictfetchall())
|
||||||
else:
|
else:
|
||||||
res = map(lambda x: {'id': x}, ids)
|
res = map(lambda x: {'id': x}, ids)
|
||||||
|
@ -2729,8 +2738,12 @@ class orm(orm_template):
|
||||||
# TODO: optimize
|
# TODO: optimize
|
||||||
for f in direct:
|
for f in direct:
|
||||||
if self._columns[f].translate:
|
if self._columns[f].translate:
|
||||||
src_trans = self.pool.get(self._name).read(cr,user,ids,[f])
|
src_trans = self.pool.get(self._name).read(cr,user,ids,[f])[0][f]
|
||||||
self.pool.get('ir.translation')._set_ids(cr, user, self._name+','+f, 'model', context['lang'], ids, vals[f], src_trans[0][f])
|
if not src_trans:
|
||||||
|
src_trans = vals[f]
|
||||||
|
# Inserting value to DB
|
||||||
|
self.write(cr, user, ids, {f:vals[f]})
|
||||||
|
self.pool.get('ir.translation')._set_ids(cr, user, self._name+','+f, 'model', context['lang'], ids, vals[f], src_trans)
|
||||||
|
|
||||||
|
|
||||||
# call the 'set' method of fields which are not classic_write
|
# call the 'set' method of fields which are not classic_write
|
||||||
|
|
|
@ -21,7 +21,7 @@
|
||||||
##############################################################################
|
##############################################################################
|
||||||
|
|
||||||
name = 'openerp-server'
|
name = 'openerp-server'
|
||||||
version = '5.0.6'
|
version = '5.0.7'
|
||||||
major_version = '5.0'
|
major_version = '5.0'
|
||||||
description = 'OpenERP Server'
|
description = 'OpenERP Server'
|
||||||
long_desc = '''\
|
long_desc = '''\
|
||||||
|
|
|
@ -290,7 +290,7 @@ class report_custom(report_int):
|
||||||
|
|
||||||
def _append_node(name, text):
|
def _append_node(name, text):
|
||||||
n = etree.SubElement(config, name)
|
n = etree.SubElement(config, name)
|
||||||
t.text = text
|
n.text = text
|
||||||
|
|
||||||
_append_node('date', time.strftime('%d/%m/%Y'))
|
_append_node('date', time.strftime('%d/%m/%Y'))
|
||||||
_append_node('PageFormat', '%s' % report['print_format'])
|
_append_node('PageFormat', '%s' % report['print_format'])
|
||||||
|
|
|
@ -20,3 +20,5 @@
|
||||||
##############################################################################
|
##############################################################################
|
||||||
|
|
||||||
from odt2odt import parseNode
|
from odt2odt import parseNode
|
||||||
|
|
||||||
|
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
|
@ -255,10 +255,10 @@ class rml_parse(object):
|
||||||
parse_format = DHM_FORMAT
|
parse_format = DHM_FORMAT
|
||||||
|
|
||||||
# filtering time.strftime('%Y-%m-%d')
|
# filtering time.strftime('%Y-%m-%d')
|
||||||
if type(value) == type(''):
|
# if type(value) == type(''):
|
||||||
parse_format = DHM_FORMAT
|
# parse_format = DHM_FORMAT
|
||||||
if (not date_time):
|
# if (not date_time):
|
||||||
return str(value)
|
# return str(value)
|
||||||
|
|
||||||
if not isinstance(value, time.struct_time):
|
if not isinstance(value, time.struct_time):
|
||||||
try:
|
try:
|
||||||
|
@ -361,8 +361,11 @@ class report_sxw(report_rml, preprocess.report):
|
||||||
elif report_type=='mako2html':
|
elif report_type=='mako2html':
|
||||||
fnct = self.create_source_mako2html
|
fnct = self.create_source_mako2html
|
||||||
else:
|
else:
|
||||||
raise Exception('Unknown Report Type: '+report_type)
|
raise 'Unknown Report Type'
|
||||||
return fnct(cr, uid, ids, data, report_xml, context)
|
fnct_ret = fnct(cr, uid, ids, data, report_xml, context)
|
||||||
|
if not fnct_ret:
|
||||||
|
return (False,False)
|
||||||
|
return fnct_ret
|
||||||
|
|
||||||
def create_source_odt(self, cr, uid, ids, data, report_xml, context=None):
|
def create_source_odt(self, cr, uid, ids, data, report_xml, context=None):
|
||||||
return self.create_single_odt(cr, uid, ids, data, report_xml, context or {})
|
return self.create_single_odt(cr, uid, ids, data, report_xml, context or {})
|
||||||
|
@ -394,6 +397,8 @@ class report_sxw(report_rml, preprocess.report):
|
||||||
results.append((d,'pdf'))
|
results.append((d,'pdf'))
|
||||||
continue
|
continue
|
||||||
result = self.create_single_pdf(cr, uid, [obj.id], data, report_xml, context)
|
result = self.create_single_pdf(cr, uid, [obj.id], data, report_xml, context)
|
||||||
|
if not result:
|
||||||
|
return False
|
||||||
try:
|
try:
|
||||||
if aname:
|
if aname:
|
||||||
name = aname+'.'+result[1]
|
name = aname+'.'+result[1]
|
||||||
|
@ -431,6 +436,9 @@ class report_sxw(report_rml, preprocess.report):
|
||||||
context = context.copy()
|
context = context.copy()
|
||||||
title = report_xml.name
|
title = report_xml.name
|
||||||
rml = report_xml.report_rml_content
|
rml = report_xml.report_rml_content
|
||||||
|
# if no rml file is found
|
||||||
|
if not rml:
|
||||||
|
return False
|
||||||
rml_parser = self.parser(cr, uid, self.name2, context=context)
|
rml_parser = self.parser(cr, uid, self.name2, context=context)
|
||||||
objs = self.getObjects(cr, uid, ids, context)
|
objs = self.getObjects(cr, uid, ids, context)
|
||||||
rml_parser.set_context(objs, data, ids, report_xml.report_type)
|
rml_parser.set_context(objs, data, ids, report_xml.report_type)
|
||||||
|
|
|
@ -32,11 +32,10 @@ class ExceptionNoTb(Exception):
|
||||||
self.args = (msg, '')
|
self.args = (msg, '')
|
||||||
|
|
||||||
def login(db, login, password):
|
def login(db, login, password):
|
||||||
|
if not password:
|
||||||
|
return False
|
||||||
cr = pooler.get_db(db).cursor()
|
cr = pooler.get_db(db).cursor()
|
||||||
if password:
|
|
||||||
cr.execute('select id from res_users where login=%s and password=%s and active', (tools.ustr(login), tools.ustr(password)))
|
cr.execute('select id from res_users where login=%s and password=%s and active', (tools.ustr(login), tools.ustr(password)))
|
||||||
else:
|
|
||||||
cr.execute('select id from res_users where login=%s and password is null and active', (tools.ustr(login),))
|
|
||||||
res = cr.fetchone()
|
res = cr.fetchone()
|
||||||
cr.close()
|
cr.close()
|
||||||
if res:
|
if res:
|
||||||
|
@ -51,14 +50,13 @@ def check_super(passwd):
|
||||||
raise ExceptionNoTb('AccessDenied')
|
raise ExceptionNoTb('AccessDenied')
|
||||||
|
|
||||||
def check(db, uid, passwd):
|
def check(db, uid, passwd):
|
||||||
|
if not passwd:
|
||||||
|
return False
|
||||||
cached_pass = _uid_cache.get(db, {}).get(uid)
|
cached_pass = _uid_cache.get(db, {}).get(uid)
|
||||||
if (cached_pass is not None) and cached_pass == passwd:
|
if (cached_pass is not None) and cached_pass == passwd:
|
||||||
return True
|
return True
|
||||||
cr = pooler.get_db(db).cursor()
|
cr = pooler.get_db(db).cursor()
|
||||||
if passwd:
|
|
||||||
cr.execute('select count(1) from res_users where id=%s and password=%s and active=%s', (int(uid), passwd, True))
|
cr.execute('select count(1) from res_users where id=%s and password=%s and active=%s', (int(uid), passwd, True))
|
||||||
else:
|
|
||||||
cr.execute('select count(1) from res_users where id=%s and password is null and active=%s', (int(uid), True))
|
|
||||||
res = cr.fetchone()[0]
|
res = cr.fetchone()[0]
|
||||||
cr.close()
|
cr.close()
|
||||||
if not bool(res):
|
if not bool(res):
|
||||||
|
@ -72,11 +70,10 @@ def check(db, uid, passwd):
|
||||||
return bool(res)
|
return bool(res)
|
||||||
|
|
||||||
def access(db, uid, passwd, sec_level, ids):
|
def access(db, uid, passwd, sec_level, ids):
|
||||||
|
if not passwd:
|
||||||
|
return False
|
||||||
cr = pooler.get_db(db).cursor()
|
cr = pooler.get_db(db).cursor()
|
||||||
if passwd:
|
|
||||||
cr.execute('select id from res_users where id=%s and password=%s', (uid, passwd))
|
cr.execute('select id from res_users where id=%s and password=%s', (uid, passwd))
|
||||||
else:
|
|
||||||
cr.execute('select id from res_users where id=%s and password is null', (uid,))
|
|
||||||
res = cr.fetchone()
|
res = cr.fetchone()
|
||||||
cr.close()
|
cr.close()
|
||||||
if not res:
|
if not res:
|
||||||
|
|
|
@ -78,13 +78,16 @@ class db(netsvc.ExportService):
|
||||||
self.actions[id] = {'clean': False}
|
self.actions[id] = {'clean': False}
|
||||||
|
|
||||||
db = sql_db.db_connect('template1')
|
db = sql_db.db_connect('template1')
|
||||||
|
db.lock()
|
||||||
|
try:
|
||||||
cr = db.cursor()
|
cr = db.cursor()
|
||||||
try:
|
try:
|
||||||
cr.autocommit(True) # avoid transaction block
|
cr.autocommit(True) # avoid transaction block
|
||||||
cr.execute('CREATE DATABASE "%s" ENCODING \'unicode\'' % db_name)
|
cr.execute('CREATE DATABASE "%s" ENCODING \'unicode\'' % db_name)
|
||||||
finally:
|
finally:
|
||||||
cr.close()
|
cr.close()
|
||||||
del db
|
finally:
|
||||||
|
db.release()
|
||||||
|
|
||||||
class DBInitialize(object):
|
class DBInitialize(object):
|
||||||
def __call__(self, serv, id, db_name, demo, lang, user_password='admin'):
|
def __call__(self, serv, id, db_name, demo, lang, user_password='admin'):
|
||||||
|
@ -158,6 +161,8 @@ class db(netsvc.ExportService):
|
||||||
logger = netsvc.Logger()
|
logger = netsvc.Logger()
|
||||||
|
|
||||||
db = sql_db.db_connect('template1')
|
db = sql_db.db_connect('template1')
|
||||||
|
db.lock()
|
||||||
|
try:
|
||||||
cr = db.cursor()
|
cr = db.cursor()
|
||||||
cr.autocommit(True) # avoid transaction block
|
cr.autocommit(True) # avoid transaction block
|
||||||
try:
|
try:
|
||||||
|
@ -172,6 +177,8 @@ class db(netsvc.ExportService):
|
||||||
'DROP DB: %s' % (db_name))
|
'DROP DB: %s' % (db_name))
|
||||||
finally:
|
finally:
|
||||||
cr.close()
|
cr.close()
|
||||||
|
finally:
|
||||||
|
db.release()
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def _set_pg_psw_env_var(self):
|
def _set_pg_psw_env_var(self):
|
||||||
|
@ -223,13 +230,16 @@ class db(netsvc.ExportService):
|
||||||
raise Exception, "Database already exists"
|
raise Exception, "Database already exists"
|
||||||
|
|
||||||
db = sql_db.db_connect('template1')
|
db = sql_db.db_connect('template1')
|
||||||
|
db.lock()
|
||||||
|
try:
|
||||||
cr = db.cursor()
|
cr = db.cursor()
|
||||||
cr.autocommit(True) # avoid transaction block
|
cr.autocommit(True) # avoid transaction block
|
||||||
try:
|
try:
|
||||||
cr.execute("""CREATE DATABASE "%s" ENCODING 'unicode' TEMPLATE "template0" """ % db_name)
|
cr.execute("""CREATE DATABASE "%s" ENCODING 'unicode' TEMPLATE "template0" """ % db_name)
|
||||||
finally:
|
finally:
|
||||||
cr.close()
|
cr.close()
|
||||||
del db
|
finally:
|
||||||
|
db.release()
|
||||||
|
|
||||||
cmd = ['pg_restore', '--no-owner']
|
cmd = ['pg_restore', '--no-owner']
|
||||||
if tools.config['db_user']:
|
if tools.config['db_user']:
|
||||||
|
@ -267,6 +277,8 @@ class db(netsvc.ExportService):
|
||||||
logger = netsvc.Logger()
|
logger = netsvc.Logger()
|
||||||
|
|
||||||
db = sql_db.db_connect('template1')
|
db = sql_db.db_connect('template1')
|
||||||
|
db.lock()
|
||||||
|
try:
|
||||||
cr = db.cursor()
|
cr = db.cursor()
|
||||||
try:
|
try:
|
||||||
try:
|
try:
|
||||||
|
@ -284,6 +296,8 @@ class db(netsvc.ExportService):
|
||||||
'RENAME DB: %s -> %s' % (old_name, new_name))
|
'RENAME DB: %s -> %s' % (old_name, new_name))
|
||||||
finally:
|
finally:
|
||||||
cr.close()
|
cr.close()
|
||||||
|
finally:
|
||||||
|
db.release()
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def exp_db_exist(self, db_name):
|
def exp_db_exist(self, db_name):
|
||||||
|
@ -291,12 +305,14 @@ class db(netsvc.ExportService):
|
||||||
return bool(sql_db.db_connect(db_name))
|
return bool(sql_db.db_connect(db_name))
|
||||||
|
|
||||||
def exp_list(self):
|
def exp_list(self):
|
||||||
|
if not tools.config['list_db']:
|
||||||
|
raise Exception('AccessDenied')
|
||||||
|
|
||||||
db = sql_db.db_connect('template1')
|
db = sql_db.db_connect('template1')
|
||||||
|
db.lock()
|
||||||
|
try:
|
||||||
cr = db.cursor()
|
cr = db.cursor()
|
||||||
try:
|
try:
|
||||||
list_db = tools.config["list_db"]
|
|
||||||
if list_db == 'False':
|
|
||||||
return []
|
|
||||||
try:
|
try:
|
||||||
db_user = tools.config["db_user"]
|
db_user = tools.config["db_user"]
|
||||||
if not db_user and os.name == 'posix':
|
if not db_user and os.name == 'posix':
|
||||||
|
@ -315,6 +331,8 @@ class db(netsvc.ExportService):
|
||||||
res = []
|
res = []
|
||||||
finally:
|
finally:
|
||||||
cr.close()
|
cr.close()
|
||||||
|
finally:
|
||||||
|
db.release()
|
||||||
res.sort()
|
res.sort()
|
||||||
return res
|
return res
|
||||||
|
|
||||||
|
@ -709,15 +727,19 @@ class report_spool(netsvc.ExportService):
|
||||||
|
|
||||||
def go(id, uid, ids, datas, context):
|
def go(id, uid, ids, datas, context):
|
||||||
cr = pooler.get_db(db).cursor()
|
cr = pooler.get_db(db).cursor()
|
||||||
|
import traceback
|
||||||
|
import sys
|
||||||
try:
|
try:
|
||||||
obj = netsvc.LocalService('report.'+object)
|
obj = netsvc.LocalService('report.'+object)
|
||||||
(result, format) = obj.create(cr, uid, ids, datas, context)
|
(result, format) = obj.create(cr, uid, ids, datas, context)
|
||||||
|
if not result:
|
||||||
|
tb = sys.exc_info()
|
||||||
|
self._reports[id]['exception'] = ExceptionWithTraceback('RML is not available at specified location or not enough data to print!', tb)
|
||||||
self._reports[id]['result'] = result
|
self._reports[id]['result'] = result
|
||||||
self._reports[id]['format'] = format
|
self._reports[id]['format'] = format
|
||||||
self._reports[id]['state'] = True
|
self._reports[id]['state'] = True
|
||||||
except Exception, exception:
|
except Exception, exception:
|
||||||
import traceback
|
|
||||||
import sys
|
|
||||||
tb = sys.exc_info()
|
tb = sys.exc_info()
|
||||||
tb_s = "".join(traceback.format_exception(*tb))
|
tb_s = "".join(traceback.format_exception(*tb))
|
||||||
logger = netsvc.Logger()
|
logger = netsvc.Logger()
|
||||||
|
|
|
@ -299,17 +299,31 @@ class Connection(object):
|
||||||
self.dbname = dbname
|
self.dbname = dbname
|
||||||
self._pool = pool
|
self._pool = pool
|
||||||
self._unique = unique
|
self._unique = unique
|
||||||
if unique:
|
|
||||||
if dbname not in self.__LOCKS:
|
|
||||||
self.__LOCKS[dbname] = threading.Lock()
|
|
||||||
self.__LOCKS[dbname].acquire()
|
|
||||||
|
|
||||||
def __del__(self):
|
def __enter__(self):
|
||||||
if self._unique:
|
if self._unique:
|
||||||
|
self.lock()
|
||||||
|
return self
|
||||||
|
|
||||||
|
def __exit__(self, exc_type, exc_value, traceback):
|
||||||
|
if self._unique:
|
||||||
|
self.release()
|
||||||
|
|
||||||
|
def lock(self):
|
||||||
|
if self.dbname not in self.__LOCKS:
|
||||||
|
self.__LOCKS[self.dbname] = threading.Lock()
|
||||||
|
self.__LOCKS[self.dbname].acquire()
|
||||||
|
|
||||||
|
def release(self):
|
||||||
close_db(self.dbname)
|
close_db(self.dbname)
|
||||||
self.__LOCKS[self.dbname].release()
|
self.__LOCKS[self.dbname].release()
|
||||||
|
|
||||||
def cursor(self, serialized=False):
|
def cursor(self, serialized=False):
|
||||||
|
if self._unique:
|
||||||
|
lock = self.__LOCKS.get(self.dbname, None)
|
||||||
|
if not (lock and lock.locked()):
|
||||||
|
netsvc.Logger().notifyChannel('Connection', netsvc.LOG_WARNING, 'Unprotected connection to %s' % (self.dbname,))
|
||||||
|
|
||||||
return Cursor(self._pool, self.dbname, serialized=serialized)
|
return Cursor(self._pool, self.dbname, serialized=serialized)
|
||||||
|
|
||||||
def serialized_cursor(self):
|
def serialized_cursor(self):
|
||||||
|
|
|
@ -121,6 +121,8 @@ class configmanager(object):
|
||||||
parser.add_option("--assert-exit-level", dest='assert_exit_level', type="choice", choices=self._LOGLEVELS.keys(),
|
parser.add_option("--assert-exit-level", dest='assert_exit_level', type="choice", choices=self._LOGLEVELS.keys(),
|
||||||
help="specify the level at which a failed assertion will stop the server. Accepted values: %s" % (self._LOGLEVELS.keys(),))
|
help="specify the level at which a failed assertion will stop the server. Accepted values: %s" % (self._LOGLEVELS.keys(),))
|
||||||
parser.add_option('--price_accuracy', dest='price_accuracy', default='2', help='specify the price accuracy')
|
parser.add_option('--price_accuracy', dest='price_accuracy', default='2', help='specify the price accuracy')
|
||||||
|
parser.add_option('--no-database-list', action="store_false", dest='list_db', default=True, help="disable the ability to return the list of databases")
|
||||||
|
|
||||||
if hasSSL:
|
if hasSSL:
|
||||||
group = optparse.OptionGroup(parser, "SSL Configuration")
|
group = optparse.OptionGroup(parser, "SSL Configuration")
|
||||||
group.add_option("-S", "--secure", dest="secure",
|
group.add_option("-S", "--secure", dest="secure",
|
||||||
|
@ -240,7 +242,10 @@ class configmanager(object):
|
||||||
self.options[arg] = getattr(opt, arg)
|
self.options[arg] = getattr(opt, arg)
|
||||||
|
|
||||||
keys = ['language', 'translate_out', 'translate_in', 'debug_mode',
|
keys = ['language', 'translate_out', 'translate_in', 'debug_mode',
|
||||||
'stop_after_init', 'logrotate']
|
'stop_after_init', 'logrotate', 'without_demo', 'netrpc', 'xmlrpc', 'syslog', 'list_db']
|
||||||
|
|
||||||
|
if hasSSL and not self.options['secure']:
|
||||||
|
keys.append('secure')
|
||||||
|
|
||||||
for arg in keys:
|
for arg in keys:
|
||||||
if getattr(opt, arg) is not None:
|
if getattr(opt, arg) is not None:
|
||||||
|
|
|
@ -636,9 +636,6 @@ class UpdateableDict(local):
|
||||||
def __ge__(self, y):
|
def __ge__(self, y):
|
||||||
return self.dict.__ge__(y)
|
return self.dict.__ge__(y)
|
||||||
|
|
||||||
def __getitem__(self, y):
|
|
||||||
return self.dict.__getitem__(y)
|
|
||||||
|
|
||||||
def __gt__(self, y):
|
def __gt__(self, y):
|
||||||
return self.dict.__gt__(y)
|
return self.dict.__gt__(y)
|
||||||
|
|
||||||
|
|
|
@ -154,8 +154,12 @@ _ = GettextAlias()
|
||||||
# class to handle po files
|
# class to handle po files
|
||||||
class TinyPoFile(object):
|
class TinyPoFile(object):
|
||||||
def __init__(self, buffer):
|
def __init__(self, buffer):
|
||||||
|
self.logger = netsvc.Logger()
|
||||||
self.buffer = buffer
|
self.buffer = buffer
|
||||||
|
|
||||||
|
def warn(self, msg):
|
||||||
|
self.logger.notifyChannel("i18n", netsvc.LOG_WARNING, msg)
|
||||||
|
|
||||||
def __iter__(self):
|
def __iter__(self):
|
||||||
self.buffer.seek(0)
|
self.buffer.seek(0)
|
||||||
self.lines = self._get_lines()
|
self.lines = self._get_lines()
|
||||||
|
@ -218,7 +222,6 @@ class TinyPoFile(object):
|
||||||
# This has been a deprecated entry, don't return anything
|
# This has been a deprecated entry, don't return anything
|
||||||
return self.next()
|
return self.next()
|
||||||
|
|
||||||
|
|
||||||
if not line.startswith('msgid'):
|
if not line.startswith('msgid'):
|
||||||
raise Exception("malformed file: bad line: %s" % line)
|
raise Exception("malformed file: bad line: %s" % line)
|
||||||
source = unquote(line[6:])
|
source = unquote(line[6:])
|
||||||
|
@ -251,7 +254,8 @@ class TinyPoFile(object):
|
||||||
|
|
||||||
self.first = False
|
self.first = False
|
||||||
|
|
||||||
if name == None:
|
if name is None:
|
||||||
|
self.warn('Missing "#:" formated comment for the following source:\n\t%s' % (source,))
|
||||||
return self.next()
|
return self.next()
|
||||||
return type, name, res_id, source, trad
|
return type, name, res_id, source, trad
|
||||||
|
|
||||||
|
|
332
doc/Changelog
332
doc/Changelog
|
@ -1,3 +1,335 @@
|
||||||
|
2009-12-03: 5.0.7
|
||||||
|
=================
|
||||||
|
|
||||||
|
Bugfixes (server)
|
||||||
|
-----------------
|
||||||
|
|
||||||
|
* Not linked to a bug report:
|
||||||
|
* translation: modify msg id where it contains python code in translations po files
|
||||||
|
* use env for python shebang rather than hardcoding /usr/bin/python
|
||||||
|
* base: wrong fr_FR translation for 'Delete Permission'
|
||||||
|
* db_exist method works as expected
|
||||||
|
* quality_integration_server : fixed bug on make link
|
||||||
|
* quality_integration_server: wrong web-service name
|
||||||
|
* Upgrade could have failed when its a change in name field,added cascade
|
||||||
|
* Expression : Making search easier for 'child_of'- Recursivity on field used as a Left operand
|
||||||
|
* solve problem of importing data from csv with constraints available on object
|
||||||
|
* act_window False domain problem - produce after dom to lxml conversion
|
||||||
|
* On update, fields with only numeric/int/float could have digits
|
||||||
|
* On Update, if only the size of column is changed,it should reflect into DB
|
||||||
|
* quality_integration_server : fixed problem in integration server on translation checking if translation file is changed on base module
|
||||||
|
* Avoid crash when no args
|
||||||
|
* Config : wrong calculation if cached_timeout comes from config file
|
||||||
|
* Expression : domain calculation failes, max. recursion error protected
|
||||||
|
* recursive calls
|
||||||
|
* allow to call write without ids on osv_memory objects
|
||||||
|
* Specify the name of the argument for the context, to avoir a critical bug
|
||||||
|
* <seq> problem Report Engine
|
||||||
|
* Workflow button desactivation regarding user roles
|
||||||
|
* Improved Previous bad commit for pagelayout error
|
||||||
|
* Import : Context of the action/screen passed,taken into consideration while importing
|
||||||
|
* quality_integration_server: ascci encoding problem in quality html log
|
||||||
|
* recursive child_of on one2many and many2many
|
||||||
|
* default context
|
||||||
|
* https://launchpad.net/bugs/463415
|
||||||
|
* https://launchpad.net/bugs/490604
|
||||||
|
* RAW reports creation corrected
|
||||||
|
* https://launchpad.net/bugs/430133
|
||||||
|
* Name_search() is having now record limit to be 80 instead of None
|
||||||
|
* https://launchpad.net/bugs/434341
|
||||||
|
* Export : Selection field gets imported by its external name if export is not import compatible
|
||||||
|
* https://launchpad.net/bugs/489355
|
||||||
|
* fields: '_fnct_write' should pass context object to the 'write' method
|
||||||
|
* https://launchpad.net/bugs/410191
|
||||||
|
* Record rule : domain evaluation problem corrected
|
||||||
|
* https://launchpad.net/bugs/420507
|
||||||
|
* Domain was getting failed when trying to work upon M2M,O2M field of object
|
||||||
|
* https://launchpad.net/bugs/453269
|
||||||
|
* Amount to text conversions made better
|
||||||
|
* https://launchpad.net/bugs/488234
|
||||||
|
* connection pool when database password is provided (thanks to Syleam Crew)
|
||||||
|
* https://launchpad.net/bugs/430805
|
||||||
|
* Import made successful when field is O2M and it has relation under that.
|
||||||
|
* https://launchpad.net/bugs/480301
|
||||||
|
* M2M : values filtering on set()
|
||||||
|
* https://launchpad.net/bugs/462506
|
||||||
|
* Priority on fields.function with store dictionary made working.
|
||||||
|
* https://launchpad.net/bugs/433886
|
||||||
|
* Update Module : Float8 to float and numeric to float casting made possible
|
||||||
|
* https://launchpad.net/bugs/491365
|
||||||
|
* Translation: Error parsing .po files when application puts comments in a single line
|
||||||
|
* https://launchpad.net/bugs/460560
|
||||||
|
* Ir_attachment : Context updation corrected on preview
|
||||||
|
* https://launchpad.net/bugs/356628
|
||||||
|
* allow related fields to point to one2many and many2many fields
|
||||||
|
* https://launchpad.net/bugs/483527
|
||||||
|
* https://launchpad.net/bugs/435933
|
||||||
|
* Encoding error corrected for client_action(ir_values-tree view)
|
||||||
|
* https://launchpad.net/bugs/430728
|
||||||
|
* Allowing sql keywords as fields(don't use them in order by clause)
|
||||||
|
* https://launchpad.net/bugs/487836
|
||||||
|
* Custom Object xml arch needed encoding.
|
||||||
|
* https://launchpad.net/bugs/487723
|
||||||
|
* Module informtion was not getting updated on upgrading module
|
||||||
|
* https://launchpad.net/bugs/480782
|
||||||
|
* https://launchpad.net/bugs/478724
|
||||||
|
* Export : M2M field name was missing last character
|
||||||
|
* https://launchpad.net/bugs/437729
|
||||||
|
* Export Translation : Warning on Non-existing record instead of breaking flow
|
||||||
|
* https://launchpad.net/bugs/429519
|
||||||
|
* https://launchpad.net/bugs/433395
|
||||||
|
* https://launchpad.net/bugs/479915
|
||||||
|
* Added lxml as required module on setup
|
||||||
|
* https://launchpad.net/bugs/400614
|
||||||
|
* Use an alternative for the locale.RADIXCHAR if this one doesn't exist
|
||||||
|
* https://launchpad.net/bugs/491462
|
||||||
|
* Pickling issue solved with ir_values (get method)
|
||||||
|
|
||||||
|
|
||||||
|
Improvements (server)
|
||||||
|
---------------------
|
||||||
|
|
||||||
|
* add option to disable the database listing
|
||||||
|
* sql_log
|
||||||
|
* quality_integration_server: added net_port option in script
|
||||||
|
* better connection pool (global)
|
||||||
|
* ir_cron : added active feild on list view
|
||||||
|
* Allow to specify the view via the context
|
||||||
|
* cron: check the arguments to avoid security issues
|
||||||
|
* cron: failed jobs are logged
|
||||||
|
* cron: clean code
|
||||||
|
* base_quality_interrrogation: put message if the score of quality less then minimal and remove unit test condition
|
||||||
|
* quality_integration_server: quality log : add new option to specify qualitylog path to store html pages of log
|
||||||
|
* default value for module
|
||||||
|
|
||||||
|
|
||||||
|
Bugfixes (addons)
|
||||||
|
-----------------
|
||||||
|
|
||||||
|
* Not linked to a bug report:
|
||||||
|
* Project : remove unused import causing a DeprecationWarning under Python 2.6
|
||||||
|
* l10n_be: corrected internal type of vat account in l10n_be: set other instead of payable/receivable
|
||||||
|
* Hr_timesheet_sheet : setting type=workflow to 2 buttons
|
||||||
|
* Account: text made translatable
|
||||||
|
* Stock : Added Product UoS to the Stock Move view
|
||||||
|
* fix the problem of menus uring installation
|
||||||
|
* Base_contact : Making Email field of address visible on Address form view
|
||||||
|
* Stock : Added translations
|
||||||
|
* Account_invoice_layout : Reports improved and SXWs made compatible to RML
|
||||||
|
* membership: invoice membership wizard now calculate tax corectly and on product membership fields make readonly to false
|
||||||
|
* membership: change membership product date from and date to value with current year on demo data
|
||||||
|
* membership: solve problem of deleting membership invoice created with old membership product
|
||||||
|
* Account: Wizards were missing translation import statement
|
||||||
|
* Residual amount into invoice is now correct in every case and avoid rounding trouble even if rating has changed
|
||||||
|
* account: avoid translate tool missing import on strictier Python 2.6 versions; bare in mind that mx.Datetime should be eraticated from the surface of the earth
|
||||||
|
* account: removed pdb now that things are claimed fixed (after the commits messages)
|
||||||
|
* account_analytic_plans: Use etree instead of xml.dom
|
||||||
|
* fix the problem that appers in to the account move due to accout voucher
|
||||||
|
* Account_date_check : correction on a method
|
||||||
|
* Account_voucher : Typo corrected for Voucher type.
|
||||||
|
* Account : Corrected the malformed report of Partner Ledger
|
||||||
|
* purchase_manual: Import OSV
|
||||||
|
* python2.6 compatibility
|
||||||
|
* AuditTrail : View Logs should only show logs of current object and resources
|
||||||
|
* fix the problem of the reports account voucher
|
||||||
|
* #TODO: fix me sale module can not be used here,
|
||||||
|
* Import OSV in the wizard
|
||||||
|
* purchase: Use the price_accuracy
|
||||||
|
* Base_Report_Creator : xml record was malformed, corrected
|
||||||
|
* hr_expense: corrected error message
|
||||||
|
* account: avoid ZeroDivisionError
|
||||||
|
* Specify the name of the argument for the context, to avoir a critical bug
|
||||||
|
* CRM : Mailgate script added option -he was conflicting,improved
|
||||||
|
* Account :Invoice report had Partner name displayed at wrong position
|
||||||
|
* bugfix residual computation and multi-currencies
|
||||||
|
* Base_report_creator : Calendar view problems on custom report
|
||||||
|
* typo
|
||||||
|
* workcenter load graph
|
||||||
|
* set the access right
|
||||||
|
* base_module_quality: speed test if read method has exception
|
||||||
|
* account_analytic_plans: avoid encoding errors
|
||||||
|
* https://launchpad.net/bugs/458415
|
||||||
|
* document_ics : solved accent problem
|
||||||
|
* https://launchpad.net/bugs/447402
|
||||||
|
* Project_timesheet : Wrong synchro on analytic line creation fro tasks(for name).
|
||||||
|
* https://launchpad.net/bugs/490318
|
||||||
|
* Account : Ondelete=cascade added to bank statement line
|
||||||
|
* https://launchpad.net/bugs/446520
|
||||||
|
* [account_followup] wrong sender for email
|
||||||
|
* https://launchpad.net/bugs/454536
|
||||||
|
* Warning : Onchange Methods were malfunctioned.Corrected and made messages available for translations
|
||||||
|
* https://launchpad.net/bugs/450180
|
||||||
|
* Hr_timesheet_invoice : Wrong domain was sent for opening invoices created
|
||||||
|
* https://launchpad.net/bugs/383057
|
||||||
|
* Stock-MRP : Split production wizard made individual to mrp if mrp is not installed
|
||||||
|
* https://launchpad.net/bugs/488869
|
||||||
|
* Stock/MRP : Track line wizard improved
|
||||||
|
* https://launchpad.net/bugs/446681
|
||||||
|
* Account : Refund wuzard malfunctioned with modify invoice option
|
||||||
|
* https://launchpad.net/bugs/446391
|
||||||
|
* Base_report_creator : Allowing to use current userid
|
||||||
|
* https://launchpad.net/bugs/479747
|
||||||
|
* https://launchpad.net/bugs/470359
|
||||||
|
* https://launchpad.net/bugs/460701
|
||||||
|
* Document : Attachment with document can now be deleted
|
||||||
|
* https://launchpad.net/bugs/439469
|
||||||
|
* Product : Pricelist types getting translated on Pricelist
|
||||||
|
* https://launchpad.net/bugs/489355
|
||||||
|
* account, invoice, sale: description (product sold, invoice line and account entry line) was not translated
|
||||||
|
* https://launchpad.net/bugs/435160
|
||||||
|
* [IMP] Correct write-off date, add analytic account, better interface between all way to reconcile
|
||||||
|
* https://launchpad.net/bugs/481524
|
||||||
|
* Stock : Forecast report: unicode error corrected
|
||||||
|
* https://launchpad.net/bugs/458553
|
||||||
|
* Account : Onchange of amount/base amount of Invocie tax corrected
|
||||||
|
* https://launchpad.net/bugs/438705
|
||||||
|
* Stock : Stock move lines on Production Order well-structured
|
||||||
|
* https://launchpad.net/bugs/441609
|
||||||
|
* Account : Fiscal Position Template was missing a requireed field in form view
|
||||||
|
* https://launchpad.net/bugs/467880
|
||||||
|
* MRP : Procurement can only be confirmed when qty>0.0
|
||||||
|
* https://launchpad.net/bugs/486783
|
||||||
|
* Point_of_sale : Report for Reciept corrected
|
||||||
|
* https://launchpad.net/bugs/448591
|
||||||
|
* Account : Refund wizard wasnt calculating taxes for new invoices
|
||||||
|
* https://launchpad.net/bugs/395160
|
||||||
|
* Project_timesheet : Analytic line creation/edition takes user based information
|
||||||
|
* https://launchpad.net/bugs/436008
|
||||||
|
* Sale: Passing Contact address for invoices from SO
|
||||||
|
* https://launchpad.net/bugs/428926
|
||||||
|
* account_payment when importing payment lines (currency not set correctly)
|
||||||
|
* https://launchpad.net/bugs/439041
|
||||||
|
* Report_project :wrong average of closing days counting
|
||||||
|
* https://launchpad.net/bugs/396637
|
||||||
|
* account_analytic_analysis : Analytic account functional field methods corrected.
|
||||||
|
* Account : Analytic account functional field methods corrected.
|
||||||
|
* https://launchpad.net/bugs/445547
|
||||||
|
* Stock: Picking report correction
|
||||||
|
* https://launchpad.net/bugs/443069
|
||||||
|
* Project: wrong domain for 'Tasks in Progress' menuitem
|
||||||
|
* https://launchpad.net/bugs/483723
|
||||||
|
* CRM : Case form view priority issue resolved
|
||||||
|
* https://launchpad.net/bugs/427869
|
||||||
|
* Residual amount in invoice when currency rating change
|
||||||
|
* https://launchpad.net/bugs/416810
|
||||||
|
* Document: attachment linked with Files
|
||||||
|
* https://launchpad.net/bugs/461801
|
||||||
|
* Sale : Workflow behaviour fixed when order is set to draft
|
||||||
|
* https://launchpad.net/bugs/461720
|
||||||
|
* Scrum : Wrong view,widgetless fields on scrum view made correct
|
||||||
|
* https://launchpad.net/bugs/474337
|
||||||
|
* [CRITICAL] warning : missing import statement for "_"
|
||||||
|
* https://launchpad.net/bugs/443132
|
||||||
|
* Sale : Passing customer ref. of picking to invoice
|
||||||
|
* https://launchpad.net/bugs/488809
|
||||||
|
* Sale : State was written wrongly
|
||||||
|
* https://launchpad.net/bugs/465010
|
||||||
|
* Stock : Notification Message made translatable
|
||||||
|
* https://launchpad.net/bugs/490342
|
||||||
|
* Account : making the default_get() eTiny compatible
|
||||||
|
* https://launchpad.net/bugs/453030
|
||||||
|
* Avoid display write-off in pay invoice wizard : take care of partial payment
|
||||||
|
* Avoid display write-off in pay invoice wizard. Put the right date and currency for conversion
|
||||||
|
* https://launchpad.net/bugs/445267
|
||||||
|
* Mrp_subproduct : Wrong calculation of QTYs corrected(Product qty,UOS qty)
|
||||||
|
* https://launchpad.net/bugs/401035
|
||||||
|
* Audittrail : Assigned Access Rights to non-admin user.
|
||||||
|
* https://launchpad.net/bugs/421636
|
||||||
|
* Account : Restricting Payment term lines percentage insertion from 0 to 1
|
||||||
|
* https://launchpad.net/bugs/425671
|
||||||
|
* Stock : Moves offer onchange on UOM to affect UOS
|
||||||
|
* https://launchpad.net/bugs/489083
|
||||||
|
* Account :customer refund was displaying supplier invoice view, corrected
|
||||||
|
* https://launchpad.net/bugs/480856
|
||||||
|
* Warning : the super of onchange may return {},covered the same
|
||||||
|
* https://launchpad.net/bugs/459196
|
||||||
|
* Account : Partner Ledger Report formatting problem solved
|
||||||
|
* https://launchpad.net/bugs/483583
|
||||||
|
* Sale/Purcghase : Function fields did not have digits attribute for precision accuracy
|
||||||
|
* https://launchpad.net/bugs/475135
|
||||||
|
* Account : Warning message was missing _ import
|
||||||
|
* https://launchpad.net/bugs/457188
|
||||||
|
* Account_analytic_analysis : Summary of Months calculation Corrected
|
||||||
|
* https://launchpad.net/bugs/435178
|
||||||
|
* [CRITICAL]subscription: crash subscription process
|
||||||
|
* https://launchpad.net/bugs/480035
|
||||||
|
* CRM : action name improved
|
||||||
|
* https://launchpad.net/bugs/487091
|
||||||
|
* Stock :Invoice created from manual picking might miss UoS.
|
||||||
|
* https://launchpad.net/bugs/435298
|
||||||
|
* Subscription : Disallowed to change the Object linked to the document type.
|
||||||
|
* https://launchpad.net/bugs/436651
|
||||||
|
* mrp : workcentre load report had a query malformed
|
||||||
|
* https://launchpad.net/bugs/479886
|
||||||
|
* Account : Total field on supplier invoice set to 0.0 to amke it Web-client compatible
|
||||||
|
* https://launchpad.net/bugs/479195
|
||||||
|
* Base_vat : Romania VAT validation corrected
|
||||||
|
* https://launchpad.net/bugs/351083
|
||||||
|
* Account : Partner Balance report gets printed with respect to the company selected
|
||||||
|
* https://launchpad.net/bugs/451310
|
||||||
|
* https://launchpad.net/bugs/449583
|
||||||
|
* Sale : Better error message when account is missing from SOL for Invioce
|
||||||
|
* https://launchpad.net/bugs/486794
|
||||||
|
* Hr_holidays: Spell mistake corrected
|
||||||
|
* https://launchpad.net/bugs/351167
|
||||||
|
* https://launchpad.net/bugs/438725
|
||||||
|
* Purchase : Setting the value of payment term while creatying invoice from PO
|
||||||
|
* https://launchpad.net/bugs/471052
|
||||||
|
* Product : Pricelist Item cannot use Main pricelist as the other pricelist
|
||||||
|
* https://launchpad.net/bugs/481372
|
||||||
|
* Project : If company has no Project time unit,it would have crashed.
|
||||||
|
* https://launchpad.net/bugs/458030
|
||||||
|
* Account : Ledger Report Landscape report adjusted for A4.
|
||||||
|
* https://launchpad.net/bugs/446205
|
||||||
|
* CRM : Mailgate script has host as parameter now onwards
|
||||||
|
* https://launchpad.net/bugs/487641
|
||||||
|
* Purchase : MOQ-pricing problem corrected
|
||||||
|
* https://launchpad.net/bugs/476428
|
||||||
|
* Stock : Partial Picking wizard was missing translation import statement
|
||||||
|
* https://launchpad.net/bugs/366944
|
||||||
|
* Base_vat : Spanish numbers validation corrected
|
||||||
|
* https://launchpad.net/bugs/466658
|
||||||
|
* Account : Action window of anlytic entries was missing name
|
||||||
|
* https://launchpad.net/bugs/440557
|
||||||
|
* Purchase : POL had a wrong tree view which is unused till now
|
||||||
|
* https://launchpad.net/bugs/452854
|
||||||
|
* Correct the validate function for balanced move into account.py (after Fabien Warning)
|
||||||
|
* Use price_accuracy to verify balanced entry insteed of fixed '0,0001'. Add price_accuracy on debit and credit move lines
|
||||||
|
* https://launchpad.net/bugs/491241
|
||||||
|
* Stock : Removal of picking shuold affect product stock
|
||||||
|
* https://launchpad.net/bugs/490327
|
||||||
|
* Reverted bad commit from Joel@CamptoCamp
|
||||||
|
* https://launchpad.net/bugs/440734
|
||||||
|
* Stock : Picking did not have 'type' field on display(inconsistent behavior from eTiny,koo) to work upon domain.
|
||||||
|
* https://launchpad.net/bugs/436174
|
||||||
|
* Account: Supplier invoices was not taking product price on onchange of product
|
||||||
|
* https://launchpad.net/bugs/474340
|
||||||
|
* Stock : Wizard improvements from Lionel
|
||||||
|
* https://launchpad.net/bugs/440711
|
||||||
|
* Purchase : PO with different pricelists should not be merged
|
||||||
|
* https://launchpad.net/bugs/439208
|
||||||
|
* Hr_timesheet : Allowing user to enter Analytic Journal if not linked with employee(Working hours tree view)
|
||||||
|
* https://launchpad.net/bugs/379118
|
||||||
|
* Stock : Partial Picking wizard was making the back order reference jump to 2 numbers,notification given on packing done.
|
||||||
|
* https://launchpad.net/bugs/476343
|
||||||
|
* https://launchpad.net/bugs/419720
|
||||||
|
* Sale : Delivery Date delay computation made corrected when days are passed with fractions
|
||||||
|
|
||||||
|
|
||||||
|
Improvements (addons)
|
||||||
|
---------------------
|
||||||
|
|
||||||
|
* MRP : mrp.routing.workcenter made deletable on cascade effect
|
||||||
|
* Account : Entry Line action Name Improved
|
||||||
|
* Stock : Improved names of Stock move tree view
|
||||||
|
* account: make comment mandatory. Since it has a default value anyway, it doesn't reall y change anything, but makes the reconciliation UI looks consistent accross the whole OpenERP (eg invoice payment)
|
||||||
|
* Add support of analytic account into bank statement to be convenient with other method
|
||||||
|
* Add account field into bank statement line
|
||||||
|
* better python2.5/2.6 compatibility handling
|
||||||
|
* base_module_quality: styles on different tests
|
||||||
|
* base_module_quality: modify all tests result display for buildpot, use class on tag instead of css file
|
||||||
|
* base_module_quality: pylint test result display for buildpot, use class on tag instead of css file
|
||||||
|
|
||||||
|
|
||||||
2009-09-22: 5.0.6
|
2009-09-22: 5.0.6
|
||||||
=================
|
=================
|
||||||
|
|
Loading…
Reference in New Issue