[FIX] api: environment recomputation
In a workflow context (for instance, in the invoice workflow), context is not passed. Therefore, relying on the 'recompute' key being the context in order to not recompute the fields does not work with Workflows. It leads to huge performance issues, as fields are recomputed recursively (instead of sequentially) when several records are implied. For instance, when reconciling several invoices with one payment (100 invoices with 1 payment for instance), records of each invoice are recomputed uselessly in each workflow call (for each "confirm_paid" method done for each invoice). With a significant number of invoices (100, for instance), it even leads to a "Maximum recursion depth reached" errror. closes #4905
This commit is contained in:
parent
a67747f77e
commit
c9154e08aa
|
@ -1391,7 +1391,8 @@ class account_move_line(osv.osv):
|
||||||
self.create(cr, uid, data, context)
|
self.create(cr, uid, data, context)
|
||||||
del vals['account_tax_id']
|
del vals['account_tax_id']
|
||||||
|
|
||||||
if check and not context.get('novalidate') and (context.get('recompute', True) or journal.entry_posted):
|
recompute = journal.env.recompute and context.get('recompute', True)
|
||||||
|
if check and not context.get('novalidate') and (recompute or journal.entry_posted):
|
||||||
tmp = move_obj.validate(cr, uid, [vals['move_id']], context)
|
tmp = move_obj.validate(cr, uid, [vals['move_id']], context)
|
||||||
if journal.entry_posted and tmp:
|
if journal.entry_posted and tmp:
|
||||||
move_obj.button_validate(cr,uid, [vals['move_id']], context)
|
move_obj.button_validate(cr,uid, [vals['move_id']], context)
|
||||||
|
|
|
@ -890,6 +890,19 @@ class Environment(object):
|
||||||
if invalids:
|
if invalids:
|
||||||
raise Warning('Invalid cache for fields\n' + pformat(invalids))
|
raise Warning('Invalid cache for fields\n' + pformat(invalids))
|
||||||
|
|
||||||
|
@property
|
||||||
|
def recompute(self):
|
||||||
|
return self.all.recompute
|
||||||
|
|
||||||
|
@contextmanager
|
||||||
|
def norecompute(self):
|
||||||
|
tmp = self.all.recompute
|
||||||
|
self.all.recompute = False
|
||||||
|
try:
|
||||||
|
yield
|
||||||
|
finally:
|
||||||
|
self.all.recompute = tmp
|
||||||
|
|
||||||
|
|
||||||
class Environments(object):
|
class Environments(object):
|
||||||
""" A common object for all environments in a request. """
|
""" A common object for all environments in a request. """
|
||||||
|
@ -897,6 +910,7 @@ class Environments(object):
|
||||||
self.envs = WeakSet() # weak set of environments
|
self.envs = WeakSet() # weak set of environments
|
||||||
self.todo = {} # recomputations {field: [records]}
|
self.todo = {} # recomputations {field: [records]}
|
||||||
self.mode = False # flag for draft/onchange
|
self.mode = False # flag for draft/onchange
|
||||||
|
self.recompute = True
|
||||||
|
|
||||||
def add(self, env):
|
def add(self, env):
|
||||||
""" Add the environment `env`. """
|
""" Add the environment `env`. """
|
||||||
|
|
|
@ -3973,7 +3973,7 @@ class BaseModel(object):
|
||||||
self.pool[model_name]._store_set_values(cr, user, todo, fields_to_recompute, context)
|
self.pool[model_name]._store_set_values(cr, user, todo, fields_to_recompute, context)
|
||||||
|
|
||||||
# recompute new-style fields
|
# recompute new-style fields
|
||||||
if context.get('recompute', True):
|
if recs.env.recompute and context.get('recompute', True):
|
||||||
recs.recompute()
|
recs.recompute()
|
||||||
|
|
||||||
self.step_workflow(cr, user, ids, context=context)
|
self.step_workflow(cr, user, ids, context=context)
|
||||||
|
@ -4220,7 +4220,7 @@ class BaseModel(object):
|
||||||
# check Python constraints
|
# check Python constraints
|
||||||
recs._validate_fields(vals)
|
recs._validate_fields(vals)
|
||||||
|
|
||||||
if context.get('recompute', True):
|
if recs.env.recompute and context.get('recompute', True):
|
||||||
result += self._store_get_values(cr, user, [id_new],
|
result += self._store_get_values(cr, user, [id_new],
|
||||||
list(set(vals.keys() + self._inherits.values())),
|
list(set(vals.keys() + self._inherits.values())),
|
||||||
context)
|
context)
|
||||||
|
@ -4233,7 +4233,7 @@ class BaseModel(object):
|
||||||
# recompute new-style fields
|
# recompute new-style fields
|
||||||
recs.recompute()
|
recs.recompute()
|
||||||
|
|
||||||
if self._log_create and context.get('recompute', True):
|
if self._log_create and recs.env.recompute and context.get('recompute', True):
|
||||||
message = self._description + \
|
message = self._description + \
|
||||||
" '" + \
|
" '" + \
|
||||||
self.name_get(cr, user, [id_new], context=context)[0][1] + \
|
self.name_get(cr, user, [id_new], context=context)[0][1] + \
|
||||||
|
@ -5629,12 +5629,13 @@ class BaseModel(object):
|
||||||
field, recs = self.env.get_todo()
|
field, recs = self.env.get_todo()
|
||||||
# evaluate the fields to recompute, and save them to database
|
# evaluate the fields to recompute, and save them to database
|
||||||
names = [f.name for f in field.computed_fields if f.store]
|
names = [f.name for f in field.computed_fields if f.store]
|
||||||
for rec, rec1 in zip(recs, recs.with_context(recompute=False)):
|
for rec in recs:
|
||||||
try:
|
try:
|
||||||
values = rec._convert_to_write({
|
values = rec._convert_to_write({
|
||||||
name: rec[name] for name in names
|
name: rec[name] for name in names
|
||||||
})
|
})
|
||||||
rec1._write(values)
|
with rec.env.norecompute():
|
||||||
|
rec._write(values)
|
||||||
except MissingError:
|
except MissingError:
|
||||||
pass
|
pass
|
||||||
# mark the computed fields as done
|
# mark the computed fields as done
|
||||||
|
|
|
@ -736,57 +736,58 @@ class one2many(_column):
|
||||||
result = []
|
result = []
|
||||||
context = dict(context or {})
|
context = dict(context or {})
|
||||||
context.update(self._context)
|
context.update(self._context)
|
||||||
context['recompute'] = False # recomputation is done by outer create/write
|
|
||||||
if not values:
|
if not values:
|
||||||
return
|
return
|
||||||
obj = obj.pool[self._obj]
|
obj = obj.pool[self._obj]
|
||||||
_table = obj._table
|
rec = obj.browse(cr, user, [], context=context)
|
||||||
for act in values:
|
with rec.env.norecompute():
|
||||||
if act[0] == 0:
|
_table = obj._table
|
||||||
act[2][self._fields_id] = id
|
for act in values:
|
||||||
id_new = obj.create(cr, user, act[2], context=context)
|
if act[0] == 0:
|
||||||
result += obj._store_get_values(cr, user, [id_new], act[2].keys(), context)
|
act[2][self._fields_id] = id
|
||||||
elif act[0] == 1:
|
id_new = obj.create(cr, user, act[2], context=context)
|
||||||
obj.write(cr, user, [act[1]], act[2], context=context)
|
result += obj._store_get_values(cr, user, [id_new], act[2].keys(), context)
|
||||||
elif act[0] == 2:
|
elif act[0] == 1:
|
||||||
obj.unlink(cr, user, [act[1]], context=context)
|
obj.write(cr, user, [act[1]], act[2], context=context)
|
||||||
elif act[0] == 3:
|
elif act[0] == 2:
|
||||||
inverse_field = obj._fields.get(self._fields_id)
|
|
||||||
assert inverse_field, 'Trying to unlink the content of a o2m but the pointed model does not have a m2o'
|
|
||||||
# if the model has on delete cascade, just delete the row
|
|
||||||
if inverse_field.ondelete == "cascade":
|
|
||||||
obj.unlink(cr, user, [act[1]], context=context)
|
obj.unlink(cr, user, [act[1]], context=context)
|
||||||
else:
|
elif act[0] == 3:
|
||||||
cr.execute('update '+_table+' set '+self._fields_id+'=null where id=%s', (act[1],))
|
inverse_field = obj._fields.get(self._fields_id)
|
||||||
elif act[0] == 4:
|
assert inverse_field, 'Trying to unlink the content of a o2m but the pointed model does not have a m2o'
|
||||||
# table of the field (parent_model in case of inherit)
|
# if the model has on delete cascade, just delete the row
|
||||||
field = obj.pool[self._obj]._fields[self._fields_id]
|
if inverse_field.ondelete == "cascade":
|
||||||
field_model = field.base_field.model_name
|
obj.unlink(cr, user, [act[1]], context=context)
|
||||||
field_table = obj.pool[field_model]._table
|
else:
|
||||||
cr.execute("select 1 from {0} where id=%s and {1}=%s".format(field_table, self._fields_id), (act[1], id))
|
cr.execute('update '+_table+' set '+self._fields_id+'=null where id=%s', (act[1],))
|
||||||
if not cr.fetchone():
|
elif act[0] == 4:
|
||||||
# Must use write() to recompute parent_store structure if needed and check access rules
|
# table of the field (parent_model in case of inherit)
|
||||||
obj.write(cr, user, [act[1]], {self._fields_id:id}, context=context or {})
|
field = obj.pool[self._obj]._fields[self._fields_id]
|
||||||
elif act[0] == 5:
|
field_model = field.base_field.model_name
|
||||||
inverse_field = obj._fields.get(self._fields_id)
|
field_table = obj.pool[field_model]._table
|
||||||
assert inverse_field, 'Trying to unlink the content of a o2m but the pointed model does not have a m2o'
|
cr.execute("select 1 from {0} where id=%s and {1}=%s".format(field_table, self._fields_id), (act[1], id))
|
||||||
# if the o2m has a static domain we must respect it when unlinking
|
if not cr.fetchone():
|
||||||
domain = self._domain(obj) if callable(self._domain) else self._domain
|
# Must use write() to recompute parent_store structure if needed and check access rules
|
||||||
extra_domain = domain or []
|
obj.write(cr, user, [act[1]], {self._fields_id:id}, context=context or {})
|
||||||
ids_to_unlink = obj.search(cr, user, [(self._fields_id,'=',id)] + extra_domain, context=context)
|
elif act[0] == 5:
|
||||||
# If the model has cascade deletion, we delete the rows because it is the intended behavior,
|
inverse_field = obj._fields.get(self._fields_id)
|
||||||
# otherwise we only nullify the reverse foreign key column.
|
assert inverse_field, 'Trying to unlink the content of a o2m but the pointed model does not have a m2o'
|
||||||
if inverse_field.ondelete == "cascade":
|
# if the o2m has a static domain we must respect it when unlinking
|
||||||
obj.unlink(cr, user, ids_to_unlink, context=context)
|
domain = self._domain(obj) if callable(self._domain) else self._domain
|
||||||
else:
|
extra_domain = domain or []
|
||||||
obj.write(cr, user, ids_to_unlink, {self._fields_id: False}, context=context)
|
ids_to_unlink = obj.search(cr, user, [(self._fields_id,'=',id)] + extra_domain, context=context)
|
||||||
elif act[0] == 6:
|
# If the model has cascade deletion, we delete the rows because it is the intended behavior,
|
||||||
# Must use write() to recompute parent_store structure if needed
|
# otherwise we only nullify the reverse foreign key column.
|
||||||
obj.write(cr, user, act[2], {self._fields_id:id}, context=context or {})
|
if inverse_field.ondelete == "cascade":
|
||||||
ids2 = act[2] or [0]
|
obj.unlink(cr, user, ids_to_unlink, context=context)
|
||||||
cr.execute('select id from '+_table+' where '+self._fields_id+'=%s and id <> ALL (%s)', (id,ids2))
|
else:
|
||||||
ids3 = map(lambda x:x[0], cr.fetchall())
|
obj.write(cr, user, ids_to_unlink, {self._fields_id: False}, context=context)
|
||||||
obj.write(cr, user, ids3, {self._fields_id:False}, context=context or {})
|
elif act[0] == 6:
|
||||||
|
# Must use write() to recompute parent_store structure if needed
|
||||||
|
obj.write(cr, user, act[2], {self._fields_id:id}, context=context or {})
|
||||||
|
ids2 = act[2] or [0]
|
||||||
|
cr.execute('select id from '+_table+' where '+self._fields_id+'=%s and id <> ALL (%s)', (id,ids2))
|
||||||
|
ids3 = map(lambda x:x[0], cr.fetchall())
|
||||||
|
obj.write(cr, user, ids3, {self._fields_id:False}, context=context or {})
|
||||||
return result
|
return result
|
||||||
|
|
||||||
def search(self, cr, obj, args, name, value, offset=0, limit=None, uid=None, operator='like', context=None):
|
def search(self, cr, obj, args, name, value, offset=0, limit=None, uid=None, operator='like', context=None):
|
||||||
|
|
Loading…
Reference in New Issue