2014-07-06 14:44:26 +00:00
|
|
|
# -*- coding: utf-8 -*-
|
|
|
|
##############################################################################
|
|
|
|
#
|
|
|
|
# OpenERP, Open Source Management Solution
|
|
|
|
# Copyright (C) 2013-2014 OpenERP (<http://www.openerp.com>).
|
|
|
|
#
|
|
|
|
# This program is free software: you can redistribute it and/or modify
|
|
|
|
# it under the terms of the GNU Affero General Public License as
|
|
|
|
# published by the Free Software Foundation, either version 3 of the
|
|
|
|
# License, or (at your option) any later version.
|
|
|
|
#
|
|
|
|
# This program is distributed in the hope that it will be useful,
|
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
# GNU Affero General Public License for more details.
|
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU Affero General Public License
|
|
|
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
|
|
|
#
|
|
|
|
##############################################################################
|
|
|
|
|
|
|
|
""" High-level objects for fields. """
|
|
|
|
|
2015-02-11 11:02:21 +00:00
|
|
|
from collections import OrderedDict
|
2014-07-06 14:44:26 +00:00
|
|
|
from datetime import date, datetime
|
|
|
|
from functools import partial
|
|
|
|
from operator import attrgetter
|
2014-10-08 14:32:48 +00:00
|
|
|
from types import NoneType
|
2014-07-06 14:44:26 +00:00
|
|
|
import logging
|
|
|
|
import pytz
|
|
|
|
import xmlrpclib
|
|
|
|
|
2015-06-30 14:23:16 +00:00
|
|
|
from openerp.tools import float_round, frozendict, html_sanitize, ustr, OrderedSet
|
2014-07-06 14:44:26 +00:00
|
|
|
from openerp.tools import DEFAULT_SERVER_DATE_FORMAT as DATE_FORMAT
|
|
|
|
from openerp.tools import DEFAULT_SERVER_DATETIME_FORMAT as DATETIME_FORMAT
|
|
|
|
|
|
|
|
DATE_LENGTH = len(date.today().strftime(DATE_FORMAT))
|
|
|
|
DATETIME_LENGTH = len(datetime.now().strftime(DATETIME_FORMAT))
|
2015-03-11 09:16:34 +00:00
|
|
|
EMPTY_DICT = frozendict()
|
2014-07-06 14:44:26 +00:00
|
|
|
|
|
|
|
_logger = logging.getLogger(__name__)
|
|
|
|
|
|
|
|
class SpecialValue(object):
|
|
|
|
""" Encapsulates a value in the cache in place of a normal value. """
|
|
|
|
def __init__(self, value):
|
|
|
|
self.value = value
|
|
|
|
def get(self):
|
|
|
|
return self.value
|
|
|
|
|
|
|
|
class FailedValue(SpecialValue):
|
|
|
|
""" Special value that encapsulates an exception instead of a value. """
|
|
|
|
def __init__(self, exception):
|
|
|
|
self.exception = exception
|
|
|
|
def get(self):
|
|
|
|
raise self.exception
|
|
|
|
|
|
|
|
def _check_value(value):
|
2015-07-06 15:39:19 +00:00
|
|
|
""" Return ``value``, or call its getter if ``value`` is a :class:`SpecialValue`. """
|
2014-07-06 14:44:26 +00:00
|
|
|
return value.get() if isinstance(value, SpecialValue) else value
|
|
|
|
|
|
|
|
|
|
|
|
def resolve_all_mro(cls, name, reverse=False):
|
2015-07-06 15:39:19 +00:00
|
|
|
""" Return the (successively overridden) values of attribute ``name`` in ``cls``
|
|
|
|
in mro order, or inverse mro order if ``reverse`` is true.
|
2014-07-06 14:44:26 +00:00
|
|
|
"""
|
|
|
|
klasses = reversed(cls.__mro__) if reverse else cls.__mro__
|
|
|
|
for klass in klasses:
|
|
|
|
if name in klass.__dict__:
|
|
|
|
yield klass.__dict__[name]
|
|
|
|
|
|
|
|
|
|
|
|
class MetaField(type):
|
|
|
|
""" Metaclass for field classes. """
|
|
|
|
by_type = {}
|
|
|
|
|
2015-03-12 15:39:12 +00:00
|
|
|
def __new__(meta, name, bases, attrs):
|
|
|
|
""" Combine the ``_slots`` dict from parent classes, and determine
|
2015-07-06 15:39:19 +00:00
|
|
|
``__slots__`` for them on the new class.
|
2015-03-12 15:39:12 +00:00
|
|
|
"""
|
|
|
|
base_slots = {}
|
|
|
|
for base in reversed(bases):
|
|
|
|
base_slots.update(getattr(base, '_slots', ()))
|
|
|
|
|
|
|
|
slots = dict(base_slots)
|
|
|
|
slots.update(attrs.get('_slots', ()))
|
|
|
|
|
|
|
|
attrs['__slots__'] = set(slots) - set(base_slots)
|
|
|
|
attrs['_slots'] = slots
|
|
|
|
return type.__new__(meta, name, bases, attrs)
|
|
|
|
|
2014-07-06 14:44:26 +00:00
|
|
|
def __init__(cls, name, bases, attrs):
|
|
|
|
super(MetaField, cls).__init__(name, bases, attrs)
|
2015-03-11 11:26:02 +00:00
|
|
|
if cls.type and cls.type not in MetaField.by_type:
|
|
|
|
MetaField.by_type[cls.type] = cls
|
2014-07-06 14:44:26 +00:00
|
|
|
|
|
|
|
# compute class attributes to avoid calling dir() on fields
|
|
|
|
cls.column_attrs = []
|
|
|
|
cls.related_attrs = []
|
|
|
|
cls.description_attrs = []
|
|
|
|
for attr in dir(cls):
|
2015-03-12 15:39:12 +00:00
|
|
|
if attr.startswith('_column_'):
|
2014-07-06 14:44:26 +00:00
|
|
|
cls.column_attrs.append((attr[8:], attr))
|
|
|
|
elif attr.startswith('_related_'):
|
|
|
|
cls.related_attrs.append((attr[9:], attr))
|
|
|
|
elif attr.startswith('_description_'):
|
|
|
|
cls.description_attrs.append((attr[13:], attr))
|
|
|
|
|
|
|
|
|
|
|
|
class Field(object):
|
|
|
|
""" The field descriptor contains the field definition, and manages accesses
|
|
|
|
and assignments of the corresponding field on records. The following
|
|
|
|
attributes may be provided when instanciating a field:
|
|
|
|
|
|
|
|
:param string: the label of the field seen by users (string); if not
|
|
|
|
set, the ORM takes the field name in the class (capitalized).
|
|
|
|
|
|
|
|
:param help: the tooltip of the field seen by users (string)
|
|
|
|
|
|
|
|
:param readonly: whether the field is readonly (boolean, by default ``False``)
|
|
|
|
|
|
|
|
:param required: whether the value of the field is required (boolean, by
|
|
|
|
default ``False``)
|
|
|
|
|
|
|
|
:param index: whether the field is indexed in database (boolean, by
|
|
|
|
default ``False``)
|
|
|
|
|
|
|
|
:param default: the default value for the field; this is either a static
|
|
|
|
value, or a function taking a recordset and returning a value
|
|
|
|
|
2014-08-04 16:19:47 +00:00
|
|
|
:param states: a dictionary mapping state values to lists of UI attribute-value
|
|
|
|
pairs; possible attributes are: 'readonly', 'required', 'invisible'.
|
|
|
|
Note: Any state-based condition requires the ``state`` field value to be
|
|
|
|
available on the client-side UI. This is typically done by including it in
|
|
|
|
the relevant views, possibly made invisible if not relevant for the
|
|
|
|
end-user.
|
2014-07-06 14:44:26 +00:00
|
|
|
|
|
|
|
:param groups: comma-separated list of group xml ids (string); this
|
|
|
|
restricts the field access to the users of the given groups only
|
|
|
|
|
2014-08-07 10:41:33 +00:00
|
|
|
:param bool copy: whether the field value should be copied when the record
|
|
|
|
is duplicated (default: ``True`` for normal fields, ``False`` for
|
|
|
|
``one2many`` and computed fields, including property fields and
|
|
|
|
related fields)
|
|
|
|
|
2014-11-03 12:39:16 +00:00
|
|
|
:param string oldname: the previous name of this field, so that ORM can rename
|
|
|
|
it automatically at migration
|
|
|
|
|
2014-07-06 14:44:26 +00:00
|
|
|
.. _field-computed:
|
|
|
|
|
|
|
|
.. rubric:: Computed fields
|
|
|
|
|
|
|
|
One can define a field whose value is computed instead of simply being
|
|
|
|
read from the database. The attributes that are specific to computed
|
|
|
|
fields are given below. To define such a field, simply provide a value
|
2015-07-06 15:39:19 +00:00
|
|
|
for the attribute ``compute``.
|
2014-07-06 14:44:26 +00:00
|
|
|
|
|
|
|
:param compute: name of a method that computes the field
|
|
|
|
|
|
|
|
:param inverse: name of a method that inverses the field (optional)
|
|
|
|
|
|
|
|
:param search: name of a method that implement search on the field (optional)
|
|
|
|
|
|
|
|
:param store: whether the field is stored in database (boolean, by
|
|
|
|
default ``False`` on computed fields)
|
|
|
|
|
2015-02-05 10:48:58 +00:00
|
|
|
:param compute_sudo: whether the field should be recomputed as superuser
|
|
|
|
to bypass access rights (boolean, by default ``False``)
|
|
|
|
|
2015-07-06 15:39:19 +00:00
|
|
|
The methods given for ``compute``, ``inverse`` and ``search`` are model
|
2014-07-06 14:44:26 +00:00
|
|
|
methods. Their signature is shown in the following example::
|
|
|
|
|
|
|
|
upper = fields.Char(compute='_compute_upper',
|
|
|
|
inverse='_inverse_upper',
|
|
|
|
search='_search_upper')
|
|
|
|
|
|
|
|
@api.depends('name')
|
|
|
|
def _compute_upper(self):
|
|
|
|
for rec in self:
|
2015-06-17 07:39:39 +00:00
|
|
|
rec.upper = rec.name.upper() if rec.name else False
|
2014-07-06 14:44:26 +00:00
|
|
|
|
|
|
|
def _inverse_upper(self):
|
|
|
|
for rec in self:
|
2015-06-17 07:39:39 +00:00
|
|
|
rec.name = rec.upper.lower() if rec.upper else False
|
2014-07-06 14:44:26 +00:00
|
|
|
|
|
|
|
def _search_upper(self, operator, value):
|
|
|
|
if operator == 'like':
|
|
|
|
operator = 'ilike'
|
|
|
|
return [('name', operator, value)]
|
|
|
|
|
|
|
|
The compute method has to assign the field on all records of the invoked
|
|
|
|
recordset. The decorator :meth:`openerp.api.depends` must be applied on
|
|
|
|
the compute method to specify the field dependencies; those dependencies
|
|
|
|
are used to determine when to recompute the field; recomputation is
|
|
|
|
automatic and guarantees cache/database consistency. Note that the same
|
|
|
|
method can be used for several fields, you simply have to assign all the
|
|
|
|
given fields in the method; the method will be invoked once for all
|
|
|
|
those fields.
|
|
|
|
|
|
|
|
By default, a computed field is not stored to the database, and is
|
|
|
|
computed on-the-fly. Adding the attribute ``store=True`` will store the
|
|
|
|
field's values in the database. The advantage of a stored field is that
|
|
|
|
searching on that field is done by the database itself. The disadvantage
|
|
|
|
is that it requires database updates when the field must be recomputed.
|
|
|
|
|
|
|
|
The inverse method, as its name says, does the inverse of the compute
|
|
|
|
method: the invoked records have a value for the field, and you must
|
|
|
|
apply the necessary changes on the field dependencies such that the
|
|
|
|
computation gives the expected value. Note that a computed field without
|
|
|
|
an inverse method is readonly by default.
|
|
|
|
|
|
|
|
The search method is invoked when processing domains before doing an
|
|
|
|
actual search on the model. It must return a domain equivalent to the
|
2015-07-06 15:39:19 +00:00
|
|
|
condition: ``field operator value``.
|
2014-07-06 14:44:26 +00:00
|
|
|
|
|
|
|
.. _field-related:
|
|
|
|
|
|
|
|
.. rubric:: Related fields
|
|
|
|
|
|
|
|
The value of a related field is given by following a sequence of
|
|
|
|
relational fields and reading a field on the reached model. The complete
|
|
|
|
sequence of fields to traverse is specified by the attribute
|
|
|
|
|
|
|
|
:param related: sequence of field names
|
|
|
|
|
[IMP] use model._fields instead of model._all_columns to cover all fields
The old-api model._all_columns contains information about model._columns and
inherited columns. This dictionary is missing new-api computed non-stored
fields, and the new field objects provide a more readable api...
This commit contains the following changes:
- adapt several methods of BaseModel to use fields instead of columns and
_all_columns
- copy all semantic-free attributes of related fields from their source
- add attribute 'group_operator' on integer and float fields
- base, base_action_rule, crm, edi, hr, mail, mass_mailing, pad,
payment_acquirer, share, website, website_crm, website_mail: simply use
_fields instead of _all_columns
- base, decimal_precision, website: adapt qweb rendering methods to use fields
instead of columns
2014-11-03 15:00:50 +00:00
|
|
|
Some field attributes are automatically copied from the source field if
|
2015-07-06 15:39:19 +00:00
|
|
|
they are not redefined: ``string``, ``help``, ``readonly``, ``required`` (only
|
|
|
|
if all fields in the sequence are required), ``groups``, ``digits``, ``size``,
|
|
|
|
``translate``, ``sanitize``, ``selection``, ``comodel_name``, ``domain``,
|
|
|
|
``context``. All semantic-free attributes are copied from the source
|
[IMP] use model._fields instead of model._all_columns to cover all fields
The old-api model._all_columns contains information about model._columns and
inherited columns. This dictionary is missing new-api computed non-stored
fields, and the new field objects provide a more readable api...
This commit contains the following changes:
- adapt several methods of BaseModel to use fields instead of columns and
_all_columns
- copy all semantic-free attributes of related fields from their source
- add attribute 'group_operator' on integer and float fields
- base, base_action_rule, crm, edi, hr, mail, mass_mailing, pad,
payment_acquirer, share, website, website_crm, website_mail: simply use
_fields instead of _all_columns
- base, decimal_precision, website: adapt qweb rendering methods to use fields
instead of columns
2014-11-03 15:00:50 +00:00
|
|
|
field.
|
2014-07-06 14:44:26 +00:00
|
|
|
|
|
|
|
By default, the values of related fields are not stored to the database.
|
|
|
|
Add the attribute ``store=True`` to make it stored, just like computed
|
|
|
|
fields. Related fields are automatically recomputed when their
|
|
|
|
dependencies are modified.
|
|
|
|
|
|
|
|
.. _field-company-dependent:
|
|
|
|
|
|
|
|
.. rubric:: Company-dependent fields
|
|
|
|
|
|
|
|
Formerly known as 'property' fields, the value of those fields depends
|
|
|
|
on the company. In other words, users that belong to different companies
|
|
|
|
may see different values for the field on a given record.
|
|
|
|
|
|
|
|
:param company_dependent: whether the field is company-dependent (boolean)
|
|
|
|
|
|
|
|
.. _field-incremental-definition:
|
|
|
|
|
|
|
|
.. rubric:: Incremental definition
|
|
|
|
|
2014-08-22 15:51:20 +00:00
|
|
|
A field is defined as class attribute on a model class. If the model
|
|
|
|
is extended (see :class:`~openerp.models.Model`), one can also extend
|
|
|
|
the field definition by redefining a field with the same name and same
|
|
|
|
type on the subclass. In that case, the attributes of the field are
|
|
|
|
taken from the parent class and overridden by the ones given in
|
|
|
|
subclasses.
|
2014-07-06 14:44:26 +00:00
|
|
|
|
|
|
|
For instance, the second class below only adds a tooltip on the field
|
|
|
|
``state``::
|
|
|
|
|
|
|
|
class First(models.Model):
|
|
|
|
_name = 'foo'
|
|
|
|
state = fields.Selection([...], required=True)
|
|
|
|
|
|
|
|
class Second(models.Model):
|
|
|
|
_inherit = 'foo'
|
|
|
|
state = fields.Selection(help="Blah blah blah")
|
|
|
|
|
|
|
|
"""
|
|
|
|
__metaclass__ = MetaField
|
|
|
|
|
2015-03-12 15:39:12 +00:00
|
|
|
type = None # type of the field (string)
|
|
|
|
relational = False # whether the field is a relational one
|
|
|
|
|
|
|
|
_slots = {
|
|
|
|
'_attrs': EMPTY_DICT, # dictionary of field attributes; it contains:
|
|
|
|
# - all attributes after __init__()
|
|
|
|
# - free attributes only after set_class_name()
|
|
|
|
|
|
|
|
'automatic': False, # whether the field is automatically created ("magic" field)
|
|
|
|
'inherited': False, # whether the field is inherited (_inherits)
|
|
|
|
'column': None, # the column corresponding to the field
|
|
|
|
'setup_done': False, # whether the field has been set up
|
|
|
|
|
|
|
|
'name': None, # name of the field
|
|
|
|
'model_name': None, # name of the model of this field
|
|
|
|
'comodel_name': None, # name of the model of values (if relational)
|
|
|
|
|
|
|
|
'store': True, # whether the field is stored in database
|
|
|
|
'index': False, # whether the field is indexed in database
|
|
|
|
'manual': False, # whether the field is a custom field
|
|
|
|
'copy': True, # whether the field is copied over by BaseModel.copy()
|
|
|
|
'depends': (), # collection of field dependencies
|
|
|
|
'recursive': False, # whether self depends on itself
|
|
|
|
'compute': None, # compute(recs) computes field on recs
|
|
|
|
'compute_sudo': False, # whether field should be recomputed as admin
|
|
|
|
'inverse': None, # inverse(recs) inverses field on recs
|
|
|
|
'search': None, # search(recs, operator, value) searches on self
|
|
|
|
'related': None, # sequence of field names, for related fields
|
|
|
|
'related_sudo': True, # whether related fields should be read as admin
|
2015-07-06 15:39:19 +00:00
|
|
|
'company_dependent': False, # whether ``self`` is company-dependent (property field)
|
2015-03-12 15:39:12 +00:00
|
|
|
'default': None, # default(recs) returns the default value
|
|
|
|
|
|
|
|
'string': None, # field label
|
|
|
|
'help': None, # field tooltip
|
|
|
|
'readonly': False, # whether the field is readonly
|
|
|
|
'required': False, # whether the field is required
|
|
|
|
'states': None, # set readonly and required depending on state
|
|
|
|
'groups': None, # csv list of group xml ids
|
|
|
|
'change_default': False, # whether the field may trigger a "user-onchange"
|
|
|
|
'deprecated': None, # whether the field is deprecated
|
|
|
|
|
|
|
|
'inverse_fields': (), # collection of inverse fields (objects)
|
|
|
|
'computed_fields': (), # fields computed with the same method as self
|
|
|
|
'related_field': None, # corresponding related field
|
|
|
|
'_triggers': (), # invalidation and recomputation triggers
|
|
|
|
}
|
2015-03-11 09:36:17 +00:00
|
|
|
|
2014-07-06 14:44:26 +00:00
|
|
|
def __init__(self, string=None, **kwargs):
|
|
|
|
kwargs['string'] = string
|
2015-03-11 09:16:34 +00:00
|
|
|
attrs = {key: val for key, val in kwargs.iteritems() if val is not None}
|
|
|
|
self._attrs = attrs or EMPTY_DICT
|
2014-07-06 14:44:26 +00:00
|
|
|
|
2015-03-11 11:37:10 +00:00
|
|
|
def __getattr__(self, name):
|
|
|
|
""" Access non-slot field attribute. """
|
|
|
|
try:
|
|
|
|
return self._attrs[name]
|
|
|
|
except KeyError:
|
|
|
|
raise AttributeError(name)
|
|
|
|
|
|
|
|
def __setattr__(self, name, value):
|
|
|
|
""" Set slot or non-slot field attribute. """
|
|
|
|
try:
|
|
|
|
object.__setattr__(self, name, value)
|
|
|
|
except AttributeError:
|
2015-03-11 09:16:34 +00:00
|
|
|
if self._attrs:
|
|
|
|
self._attrs[name] = value
|
|
|
|
else:
|
|
|
|
self._attrs = {name: value} # replace EMPTY_DICT
|
|
|
|
|
2015-03-11 11:37:10 +00:00
|
|
|
def __delattr__(self, name):
|
|
|
|
""" Remove non-slot field attribute. """
|
|
|
|
try:
|
|
|
|
del self._attrs[name]
|
|
|
|
except KeyError:
|
|
|
|
raise AttributeError(name)
|
|
|
|
|
2014-10-09 09:01:23 +00:00
|
|
|
def new(self, **kwargs):
|
2015-07-06 15:39:19 +00:00
|
|
|
""" Return a field of the same type as ``self``, with its own parameters. """
|
2014-10-09 09:01:23 +00:00
|
|
|
return type(self)(**kwargs)
|
2014-07-06 14:44:26 +00:00
|
|
|
|
|
|
|
def set_class_name(self, cls, name):
|
2015-07-06 15:39:19 +00:00
|
|
|
""" Assign the model class and field name of ``self``. """
|
2015-03-12 15:39:12 +00:00
|
|
|
self_attrs = self._attrs
|
|
|
|
for attr, value in self._slots.iteritems():
|
2015-03-11 11:37:10 +00:00
|
|
|
setattr(self, attr, value)
|
|
|
|
|
2014-07-06 14:44:26 +00:00
|
|
|
self.model_name = cls._name
|
|
|
|
self.name = name
|
|
|
|
|
|
|
|
# determine all inherited field attributes
|
|
|
|
attrs = {}
|
|
|
|
for field in resolve_all_mro(cls, name, reverse=True):
|
|
|
|
if isinstance(field, type(self)):
|
|
|
|
attrs.update(field._attrs)
|
|
|
|
else:
|
|
|
|
attrs.clear()
|
2015-03-12 15:39:12 +00:00
|
|
|
attrs.update(self_attrs) # necessary in case self is not in cls
|
2014-07-06 14:44:26 +00:00
|
|
|
|
2015-07-06 15:39:19 +00:00
|
|
|
# initialize ``self`` with ``attrs``
|
2014-07-06 14:44:26 +00:00
|
|
|
if attrs.get('compute'):
|
|
|
|
# by default, computed fields are not stored, not copied and readonly
|
|
|
|
attrs['store'] = attrs.get('store', False)
|
|
|
|
attrs['copy'] = attrs.get('copy', False)
|
|
|
|
attrs['readonly'] = attrs.get('readonly', not attrs.get('inverse'))
|
|
|
|
if attrs.get('related'):
|
2015-01-29 12:25:40 +00:00
|
|
|
# by default, related fields are not stored and not copied
|
2014-07-06 14:44:26 +00:00
|
|
|
attrs['store'] = attrs.get('store', False)
|
2015-01-29 12:25:40 +00:00
|
|
|
attrs['copy'] = attrs.get('copy', False)
|
2014-07-06 14:44:26 +00:00
|
|
|
|
2014-10-30 10:00:10 +00:00
|
|
|
# fix for function fields overridden by regular columns
|
|
|
|
if not isinstance(attrs.get('column'), (NoneType, fields.function)):
|
|
|
|
attrs.pop('store', None)
|
|
|
|
|
2014-07-06 14:44:26 +00:00
|
|
|
for attr, value in attrs.iteritems():
|
2015-03-11 11:37:10 +00:00
|
|
|
setattr(self, attr, value)
|
2014-07-06 14:44:26 +00:00
|
|
|
|
2014-10-23 09:14:52 +00:00
|
|
|
if not self.string and not self.related:
|
|
|
|
# related fields get their string from their parent field
|
2014-07-06 14:44:26 +00:00
|
|
|
self.string = name.replace('_', ' ').capitalize()
|
|
|
|
|
2014-10-02 15:01:03 +00:00
|
|
|
# determine self.default and cls._defaults in a consistent way
|
|
|
|
self._determine_default(cls, name)
|
|
|
|
|
|
|
|
def _determine_default(self, cls, name):
|
2015-07-06 15:39:19 +00:00
|
|
|
""" Retrieve the default value for ``self`` in the hierarchy of ``cls``, and
|
|
|
|
determine ``self.default`` and ``cls._defaults`` accordingly.
|
2014-10-02 15:01:03 +00:00
|
|
|
"""
|
|
|
|
self.default = None
|
|
|
|
|
|
|
|
# traverse the class hierarchy upwards, and take the first field
|
|
|
|
# definition with a default or _defaults for self
|
|
|
|
for klass in cls.__mro__:
|
2014-11-06 08:59:41 +00:00
|
|
|
if name in klass.__dict__:
|
|
|
|
field = klass.__dict__[name]
|
|
|
|
if not isinstance(field, type(self)):
|
|
|
|
# klass contains another value overridden by self
|
|
|
|
return
|
|
|
|
|
|
|
|
if 'default' in field._attrs:
|
|
|
|
# take the default in field, and adapt it for cls._defaults
|
|
|
|
value = field._attrs['default']
|
|
|
|
if callable(value):
|
|
|
|
from openerp import api
|
|
|
|
self.default = value
|
|
|
|
cls._defaults[name] = api.model(
|
|
|
|
lambda recs: self.convert_to_write(value(recs))
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
self.default = lambda recs: value
|
|
|
|
cls._defaults[name] = value
|
|
|
|
return
|
2014-10-02 15:01:03 +00:00
|
|
|
|
|
|
|
defaults = klass.__dict__.get('_defaults') or {}
|
|
|
|
if name in defaults:
|
|
|
|
# take the value from _defaults, and adapt it for self.default
|
|
|
|
value = defaults[name]
|
2014-11-06 08:59:41 +00:00
|
|
|
if callable(value):
|
|
|
|
func = lambda recs: value(recs._model, recs._cr, recs._uid, recs._context)
|
|
|
|
else:
|
|
|
|
func = lambda recs: value
|
2014-10-02 15:01:03 +00:00
|
|
|
self.default = lambda recs: self.convert_to_cache(
|
2014-11-06 08:59:41 +00:00
|
|
|
func(recs), recs, validate=False,
|
2014-10-02 15:01:03 +00:00
|
|
|
)
|
|
|
|
cls._defaults[name] = value
|
|
|
|
return
|
|
|
|
|
2014-07-06 14:44:26 +00:00
|
|
|
def __str__(self):
|
|
|
|
return "%s.%s" % (self.model_name, self.name)
|
|
|
|
|
|
|
|
def __repr__(self):
|
|
|
|
return "%s.%s" % (self.model_name, self.name)
|
|
|
|
|
|
|
|
############################################################################
|
|
|
|
#
|
|
|
|
# Field setup
|
|
|
|
#
|
|
|
|
|
|
|
|
def setup(self, env):
|
2015-07-06 15:39:19 +00:00
|
|
|
""" Make sure that ``self`` is set up, except for recomputation triggers. """
|
2014-10-06 09:56:03 +00:00
|
|
|
if not self.setup_done:
|
2015-01-14 16:17:47 +00:00
|
|
|
if self.related:
|
|
|
|
self._setup_related(env)
|
|
|
|
else:
|
|
|
|
self._setup_regular(env)
|
2014-10-14 15:34:30 +00:00
|
|
|
self.setup_done = True
|
2014-07-06 14:44:26 +00:00
|
|
|
|
2015-01-14 16:17:47 +00:00
|
|
|
#
|
|
|
|
# Setup of non-related fields
|
|
|
|
#
|
2014-07-06 14:44:26 +00:00
|
|
|
|
2015-01-14 16:17:47 +00:00
|
|
|
def _setup_regular(self, env):
|
|
|
|
""" Setup the attributes of a non-related field. """
|
|
|
|
recs = env[self.model_name]
|
2014-07-08 08:16:16 +00:00
|
|
|
|
2015-01-14 16:17:47 +00:00
|
|
|
def make_depends(deps):
|
|
|
|
return tuple(deps(recs) if callable(deps) else deps)
|
|
|
|
|
|
|
|
# convert compute into a callable and determine depends
|
|
|
|
if isinstance(self.compute, basestring):
|
|
|
|
# if the compute method has been overridden, concatenate all their _depends
|
|
|
|
self.depends = ()
|
|
|
|
for method in resolve_all_mro(type(recs), self.compute, reverse=True):
|
|
|
|
self.depends += make_depends(getattr(method, '_depends', ()))
|
|
|
|
self.compute = getattr(type(recs), self.compute)
|
|
|
|
else:
|
|
|
|
self.depends = make_depends(getattr(self.compute, '_depends', ()))
|
|
|
|
|
|
|
|
# convert inverse and search into callables
|
|
|
|
if isinstance(self.inverse, basestring):
|
|
|
|
self.inverse = getattr(type(recs), self.inverse)
|
|
|
|
if isinstance(self.search, basestring):
|
|
|
|
self.search = getattr(type(recs), self.search)
|
2014-07-06 14:44:26 +00:00
|
|
|
|
|
|
|
#
|
|
|
|
# Setup of related fields
|
|
|
|
#
|
|
|
|
|
|
|
|
def _setup_related(self, env):
|
|
|
|
""" Setup the attributes of a related field. """
|
|
|
|
# fix the type of self.related if necessary
|
|
|
|
if isinstance(self.related, basestring):
|
|
|
|
self.related = tuple(self.related.split('.'))
|
|
|
|
|
2014-10-09 09:01:23 +00:00
|
|
|
# determine the chain of fields, and make sure they are all set up
|
2014-07-06 14:44:26 +00:00
|
|
|
recs = env[self.model_name]
|
2014-10-23 07:46:04 +00:00
|
|
|
fields = []
|
2014-10-09 09:01:23 +00:00
|
|
|
for name in self.related:
|
2014-10-14 15:34:30 +00:00
|
|
|
field = recs._fields[name]
|
2014-12-10 15:20:44 +00:00
|
|
|
field.setup(env)
|
2014-10-14 15:34:30 +00:00
|
|
|
recs = recs[name]
|
2014-10-23 07:46:04 +00:00
|
|
|
fields.append(field)
|
2014-10-09 09:01:23 +00:00
|
|
|
|
2014-10-14 15:34:30 +00:00
|
|
|
self.related_field = field
|
2014-07-06 14:44:26 +00:00
|
|
|
|
|
|
|
# check type consistency
|
|
|
|
if self.type != field.type:
|
|
|
|
raise Warning("Type of related field %s is inconsistent with %s" % (self, field))
|
|
|
|
|
|
|
|
# determine dependencies, compute, inverse, and search
|
|
|
|
self.depends = ('.'.join(self.related),)
|
|
|
|
self.compute = self._compute_related
|
2015-01-15 10:50:37 +00:00
|
|
|
if not (self.readonly or field.readonly):
|
|
|
|
self.inverse = self._inverse_related
|
2014-10-30 10:00:10 +00:00
|
|
|
if field._description_searchable:
|
2014-10-01 12:47:08 +00:00
|
|
|
# allow searching on self only if the related field is searchable
|
2014-08-06 12:59:57 +00:00
|
|
|
self.search = self._search_related
|
2014-07-06 14:44:26 +00:00
|
|
|
|
|
|
|
# copy attributes from field to self (string, help, etc.)
|
|
|
|
for attr, prop in self.related_attrs:
|
|
|
|
if not getattr(self, attr):
|
|
|
|
setattr(self, attr, getattr(field, prop))
|
|
|
|
|
2015-03-10 10:58:12 +00:00
|
|
|
for attr, value in field._attrs.iteritems():
|
|
|
|
if attr not in self._attrs:
|
2015-03-11 11:37:10 +00:00
|
|
|
setattr(self, attr, value)
|
[IMP] use model._fields instead of model._all_columns to cover all fields
The old-api model._all_columns contains information about model._columns and
inherited columns. This dictionary is missing new-api computed non-stored
fields, and the new field objects provide a more readable api...
This commit contains the following changes:
- adapt several methods of BaseModel to use fields instead of columns and
_all_columns
- copy all semantic-free attributes of related fields from their source
- add attribute 'group_operator' on integer and float fields
- base, base_action_rule, crm, edi, hr, mail, mass_mailing, pad,
payment_acquirer, share, website, website_crm, website_mail: simply use
_fields instead of _all_columns
- base, decimal_precision, website: adapt qweb rendering methods to use fields
instead of columns
2014-11-03 15:00:50 +00:00
|
|
|
|
2014-12-02 13:20:52 +00:00
|
|
|
# special case for states: copy it only for inherited fields
|
|
|
|
if not self.states and self.inherited:
|
|
|
|
self.states = field.states
|
|
|
|
|
2014-10-23 07:46:04 +00:00
|
|
|
# special case for required: check if all fields are required
|
|
|
|
if not self.store and not self.required:
|
|
|
|
self.required = all(field.required for field in fields)
|
|
|
|
|
2014-07-06 14:44:26 +00:00
|
|
|
def _compute_related(self, records):
|
2015-07-06 15:39:19 +00:00
|
|
|
""" Compute the related field ``self`` on ``records``. """
|
2014-09-02 08:50:52 +00:00
|
|
|
# when related_sudo, bypass access rights checks when reading values
|
|
|
|
others = records.sudo() if self.related_sudo else records
|
|
|
|
for record, other in zip(records, others):
|
|
|
|
if not record.id:
|
|
|
|
# draft record, do not switch to another environment
|
|
|
|
other = record
|
|
|
|
# traverse the intermediate fields; follow the first record at each step
|
2014-07-06 14:44:26 +00:00
|
|
|
for name in self.related[:-1]:
|
2014-09-02 08:50:52 +00:00
|
|
|
other = other[name][:1]
|
|
|
|
record[self.name] = other[self.related[-1]]
|
2014-07-06 14:44:26 +00:00
|
|
|
|
|
|
|
def _inverse_related(self, records):
|
2015-07-06 15:39:19 +00:00
|
|
|
""" Inverse the related field ``self`` on ``records``. """
|
2014-07-06 14:44:26 +00:00
|
|
|
for record in records:
|
|
|
|
other = record
|
|
|
|
# traverse the intermediate fields, and keep at most one record
|
|
|
|
for name in self.related[:-1]:
|
|
|
|
other = other[name][:1]
|
|
|
|
if other:
|
|
|
|
other[self.related[-1]] = record[self.name]
|
|
|
|
|
|
|
|
def _search_related(self, records, operator, value):
|
2015-07-06 15:39:19 +00:00
|
|
|
""" Determine the domain to search on field ``self``. """
|
2014-07-06 14:44:26 +00:00
|
|
|
return [('.'.join(self.related), operator, value)]
|
|
|
|
|
|
|
|
# properties used by _setup_related() to copy values from related field
|
2014-10-09 09:01:23 +00:00
|
|
|
_related_comodel_name = property(attrgetter('comodel_name'))
|
2014-07-06 14:44:26 +00:00
|
|
|
_related_string = property(attrgetter('string'))
|
|
|
|
_related_help = property(attrgetter('help'))
|
|
|
|
_related_readonly = property(attrgetter('readonly'))
|
|
|
|
_related_groups = property(attrgetter('groups'))
|
|
|
|
|
[IMP] use model._fields instead of model._all_columns to cover all fields
The old-api model._all_columns contains information about model._columns and
inherited columns. This dictionary is missing new-api computed non-stored
fields, and the new field objects provide a more readable api...
This commit contains the following changes:
- adapt several methods of BaseModel to use fields instead of columns and
_all_columns
- copy all semantic-free attributes of related fields from their source
- add attribute 'group_operator' on integer and float fields
- base, base_action_rule, crm, edi, hr, mail, mass_mailing, pad,
payment_acquirer, share, website, website_crm, website_mail: simply use
_fields instead of _all_columns
- base, decimal_precision, website: adapt qweb rendering methods to use fields
instead of columns
2014-11-03 15:00:50 +00:00
|
|
|
@property
|
|
|
|
def base_field(self):
|
2015-07-06 15:39:19 +00:00
|
|
|
""" Return the base field of an inherited field, or ``self``. """
|
2015-03-03 09:34:51 +00:00
|
|
|
return self.related_field.base_field if self.inherited else self
|
[IMP] use model._fields instead of model._all_columns to cover all fields
The old-api model._all_columns contains information about model._columns and
inherited columns. This dictionary is missing new-api computed non-stored
fields, and the new field objects provide a more readable api...
This commit contains the following changes:
- adapt several methods of BaseModel to use fields instead of columns and
_all_columns
- copy all semantic-free attributes of related fields from their source
- add attribute 'group_operator' on integer and float fields
- base, base_action_rule, crm, edi, hr, mail, mass_mailing, pad,
payment_acquirer, share, website, website_crm, website_mail: simply use
_fields instead of _all_columns
- base, decimal_precision, website: adapt qweb rendering methods to use fields
instead of columns
2014-11-03 15:00:50 +00:00
|
|
|
|
2014-07-06 14:44:26 +00:00
|
|
|
#
|
2015-01-14 16:17:47 +00:00
|
|
|
# Setup of field triggers
|
2014-07-06 14:44:26 +00:00
|
|
|
#
|
2015-03-11 09:36:17 +00:00
|
|
|
# The triggers is a collection of pairs (field, path) of computed fields
|
2015-07-06 15:39:19 +00:00
|
|
|
# that depend on ``self``. When ``self`` is modified, it invalidates the cache
|
|
|
|
# of each ``field``, and registers the records to recompute based on ``path``.
|
|
|
|
# See method ``modified`` below for details.
|
2015-03-11 09:36:17 +00:00
|
|
|
#
|
|
|
|
|
|
|
|
def add_trigger(self, trigger):
|
2015-07-06 15:39:19 +00:00
|
|
|
""" Add a recomputation trigger on ``self``. """
|
2015-03-11 09:36:17 +00:00
|
|
|
if trigger not in self._triggers:
|
|
|
|
self._triggers += (trigger,)
|
2014-07-06 14:44:26 +00:00
|
|
|
|
2015-01-14 16:17:47 +00:00
|
|
|
def setup_triggers(self, env):
|
2015-07-06 15:39:19 +00:00
|
|
|
""" Add the necessary triggers to invalidate/recompute ``self``. """
|
2015-01-14 16:17:47 +00:00
|
|
|
model = env[self.model_name]
|
|
|
|
for path in self.depends:
|
|
|
|
self._setup_dependency([], model, path.split('.'))
|
2014-07-06 14:44:26 +00:00
|
|
|
|
|
|
|
def _setup_dependency(self, path0, model, path1):
|
2015-07-06 15:39:19 +00:00
|
|
|
""" Make ``self`` depend on ``model``; `path0 + path1` is a dependency of
|
|
|
|
``self``, and ``path0`` is the sequence of field names from ``self.model``
|
|
|
|
to ``model``.
|
2014-07-06 14:44:26 +00:00
|
|
|
"""
|
|
|
|
env = model.env
|
|
|
|
head, tail = path1[0], path1[1:]
|
|
|
|
|
|
|
|
if head == '*':
|
|
|
|
# special case: add triggers on all fields of model (except self)
|
|
|
|
fields = set(model._fields.itervalues()) - set([self])
|
|
|
|
else:
|
|
|
|
fields = [model._fields[head]]
|
|
|
|
|
|
|
|
for field in fields:
|
|
|
|
if field == self:
|
|
|
|
_logger.debug("Field %s is recursively defined", self)
|
|
|
|
self.recursive = True
|
|
|
|
continue
|
|
|
|
|
|
|
|
#_logger.debug("Add trigger on %s to recompute %s", field, self)
|
2015-03-11 09:36:17 +00:00
|
|
|
field.add_trigger((self, '.'.join(path0 or ['id'])))
|
2014-07-06 14:44:26 +00:00
|
|
|
|
2014-08-08 12:04:40 +00:00
|
|
|
# add trigger on inverse fields, too
|
|
|
|
for invf in field.inverse_fields:
|
|
|
|
#_logger.debug("Add trigger on %s to recompute %s", invf, self)
|
2015-03-11 09:36:17 +00:00
|
|
|
invf.add_trigger((self, '.'.join(path0 + [head])))
|
2014-07-06 14:44:26 +00:00
|
|
|
|
|
|
|
# recursively traverse the dependency
|
|
|
|
if tail:
|
|
|
|
comodel = env[field.comodel_name]
|
|
|
|
self._setup_dependency(path0 + [head], comodel, tail)
|
|
|
|
|
|
|
|
@property
|
|
|
|
def dependents(self):
|
2015-07-06 15:39:19 +00:00
|
|
|
""" Return the computed fields that depend on ``self``. """
|
2014-07-06 14:44:26 +00:00
|
|
|
return (field for field, path in self._triggers)
|
|
|
|
|
|
|
|
############################################################################
|
|
|
|
#
|
|
|
|
# Field description
|
|
|
|
#
|
|
|
|
|
|
|
|
def get_description(self, env):
|
2015-07-06 15:39:19 +00:00
|
|
|
""" Return a dictionary that describes the field ``self``. """
|
2014-07-06 14:44:26 +00:00
|
|
|
desc = {'type': self.type}
|
|
|
|
for attr, prop in self.description_attrs:
|
|
|
|
value = getattr(self, prop)
|
|
|
|
if callable(value):
|
|
|
|
value = value(env)
|
2014-08-06 12:59:57 +00:00
|
|
|
if value is not None:
|
2014-07-06 14:44:26 +00:00
|
|
|
desc[attr] = value
|
2014-08-06 12:59:57 +00:00
|
|
|
|
2014-07-06 14:44:26 +00:00
|
|
|
return desc
|
|
|
|
|
|
|
|
# properties used by get_description()
|
2014-10-30 10:00:10 +00:00
|
|
|
_description_store = property(attrgetter('store'))
|
2014-08-21 09:40:38 +00:00
|
|
|
_description_manual = property(attrgetter('manual'))
|
2014-07-06 14:44:26 +00:00
|
|
|
_description_depends = property(attrgetter('depends'))
|
|
|
|
_description_related = property(attrgetter('related'))
|
|
|
|
_description_company_dependent = property(attrgetter('company_dependent'))
|
|
|
|
_description_readonly = property(attrgetter('readonly'))
|
|
|
|
_description_required = property(attrgetter('required'))
|
|
|
|
_description_states = property(attrgetter('states'))
|
|
|
|
_description_groups = property(attrgetter('groups'))
|
2014-08-07 11:04:26 +00:00
|
|
|
_description_change_default = property(attrgetter('change_default'))
|
|
|
|
_description_deprecated = property(attrgetter('deprecated'))
|
2014-07-06 14:44:26 +00:00
|
|
|
|
2014-10-30 10:00:10 +00:00
|
|
|
@property
|
|
|
|
def _description_searchable(self):
|
|
|
|
return bool(self.store or self.search or (self.column and self.column._fnct_search))
|
|
|
|
|
|
|
|
@property
|
|
|
|
def _description_sortable(self):
|
|
|
|
return self.store or (self.inherited and self.related_field._description_sortable)
|
|
|
|
|
2014-07-06 14:44:26 +00:00
|
|
|
def _description_string(self, env):
|
|
|
|
if self.string and env.lang:
|
2014-11-06 15:00:58 +00:00
|
|
|
field = self.base_field
|
|
|
|
name = "%s,%s" % (field.model_name, field.name)
|
2014-07-06 14:44:26 +00:00
|
|
|
trans = env['ir.translation']._get_source(name, 'field', env.lang)
|
|
|
|
return trans or self.string
|
|
|
|
return self.string
|
|
|
|
|
|
|
|
def _description_help(self, env):
|
|
|
|
if self.help and env.lang:
|
|
|
|
name = "%s,%s" % (self.model_name, self.name)
|
|
|
|
trans = env['ir.translation']._get_source(name, 'help', env.lang)
|
|
|
|
return trans or self.help
|
|
|
|
return self.help
|
|
|
|
|
|
|
|
############################################################################
|
|
|
|
#
|
|
|
|
# Conversion to column instance
|
|
|
|
#
|
|
|
|
|
|
|
|
def to_column(self):
|
2015-07-06 15:39:19 +00:00
|
|
|
""" Return a column object corresponding to ``self``, or ``None``. """
|
2015-02-19 13:01:19 +00:00
|
|
|
if not self.store and self.compute:
|
|
|
|
# non-stored computed fields do not have a corresponding column
|
|
|
|
self.column = None
|
|
|
|
return None
|
2014-07-06 14:44:26 +00:00
|
|
|
|
2014-10-22 10:49:12 +00:00
|
|
|
# determine column parameters
|
2015-01-21 11:50:34 +00:00
|
|
|
#_logger.debug("Create fields._column for Field %s", self)
|
2014-07-06 14:44:26 +00:00
|
|
|
args = {}
|
|
|
|
for attr, prop in self.column_attrs:
|
|
|
|
args[attr] = getattr(self, prop)
|
2015-03-10 10:58:12 +00:00
|
|
|
for attr, value in self._attrs.iteritems():
|
|
|
|
args[attr] = value
|
2014-07-06 14:44:26 +00:00
|
|
|
|
|
|
|
if self.company_dependent:
|
|
|
|
# company-dependent fields are mapped to former property fields
|
|
|
|
args['type'] = self.type
|
|
|
|
args['relation'] = self.comodel_name
|
2014-10-30 10:00:10 +00:00
|
|
|
self.column = fields.property(**args)
|
|
|
|
elif self.column:
|
2014-10-22 10:49:12 +00:00
|
|
|
# let the column provide a valid column for the given parameters
|
2015-02-23 10:29:03 +00:00
|
|
|
self.column = self.column.new(_computed_field=bool(self.compute), **args)
|
2014-10-30 10:00:10 +00:00
|
|
|
else:
|
|
|
|
# create a fresh new column of the right type
|
|
|
|
self.column = getattr(fields, self.type)(**args)
|
2014-10-08 08:47:25 +00:00
|
|
|
|
2014-10-30 10:00:10 +00:00
|
|
|
return self.column
|
2014-07-06 14:44:26 +00:00
|
|
|
|
|
|
|
# properties used by to_column() to create a column instance
|
2014-10-09 09:06:12 +00:00
|
|
|
_column_copy = property(attrgetter('copy'))
|
2014-07-06 14:44:26 +00:00
|
|
|
_column_select = property(attrgetter('index'))
|
2014-08-21 09:40:38 +00:00
|
|
|
_column_manual = property(attrgetter('manual'))
|
2014-07-06 14:44:26 +00:00
|
|
|
_column_string = property(attrgetter('string'))
|
|
|
|
_column_help = property(attrgetter('help'))
|
|
|
|
_column_readonly = property(attrgetter('readonly'))
|
|
|
|
_column_required = property(attrgetter('required'))
|
|
|
|
_column_states = property(attrgetter('states'))
|
|
|
|
_column_groups = property(attrgetter('groups'))
|
2014-08-07 11:04:26 +00:00
|
|
|
_column_change_default = property(attrgetter('change_default'))
|
|
|
|
_column_deprecated = property(attrgetter('deprecated'))
|
2014-07-06 14:44:26 +00:00
|
|
|
|
|
|
|
############################################################################
|
|
|
|
#
|
|
|
|
# Conversion of values
|
|
|
|
#
|
|
|
|
|
|
|
|
def null(self, env):
|
|
|
|
""" return the null value for this field in the given environment """
|
|
|
|
return False
|
|
|
|
|
2014-07-09 15:06:29 +00:00
|
|
|
def convert_to_cache(self, value, record, validate=True):
|
2015-07-06 15:39:19 +00:00
|
|
|
""" convert ``value`` to the cache level in ``env``; ``value`` may come from
|
2014-07-06 14:44:26 +00:00
|
|
|
an assignment, or have the format of methods :meth:`BaseModel.read`
|
|
|
|
or :meth:`BaseModel.write`
|
[FIX] fields: do not revalidate field values unless they are being modified
In the previous implementation of the new API fields,
both fields.Selection and fields.Reference were performing
early validation of their `value` as soon as it entered
the cache, either by being read, written, or computed.
This is a source of trouble and performance problems,
and is unnecessary, as we should consider that the database
always contains valid values. If that is not the case it
means it was modified externally and is an exception that
should be handled externally as well.
Revalidating selection/reference values can be expensive
when the domain of values is dynamic and requires extra
database queries, with extra access rights control, etc.
This patch adds a `validate` parameter to `convert_to_cache`,
allowing to turn off the re-validation on demand. The ORM
will turn off validation whenever the value being converted
is supposed to be already validated, such as when reading it
from the database.
The parameter is currently ignored by all other fields,
and defaults to True so validation is performed in all other
caes.
2014-07-23 10:30:24 +00:00
|
|
|
|
2014-07-09 15:06:29 +00:00
|
|
|
:param record: the target record for the assignment, or an empty recordset
|
|
|
|
|
[FIX] fields: do not revalidate field values unless they are being modified
In the previous implementation of the new API fields,
both fields.Selection and fields.Reference were performing
early validation of their `value` as soon as it entered
the cache, either by being read, written, or computed.
This is a source of trouble and performance problems,
and is unnecessary, as we should consider that the database
always contains valid values. If that is not the case it
means it was modified externally and is an exception that
should be handled externally as well.
Revalidating selection/reference values can be expensive
when the domain of values is dynamic and requires extra
database queries, with extra access rights control, etc.
This patch adds a `validate` parameter to `convert_to_cache`,
allowing to turn off the re-validation on demand. The ORM
will turn off validation whenever the value being converted
is supposed to be already validated, such as when reading it
from the database.
The parameter is currently ignored by all other fields,
and defaults to True so validation is performed in all other
caes.
2014-07-23 10:30:24 +00:00
|
|
|
:param bool validate: when True, field-specific validation of
|
2015-07-06 15:39:19 +00:00
|
|
|
``value`` will be performed
|
2014-07-06 14:44:26 +00:00
|
|
|
"""
|
|
|
|
return value
|
|
|
|
|
|
|
|
def convert_to_read(self, value, use_name_get=True):
|
2015-07-06 15:39:19 +00:00
|
|
|
""" convert ``value`` from the cache to a value as returned by method
|
2014-07-06 14:44:26 +00:00
|
|
|
:meth:`BaseModel.read`
|
[FIX] fields: do not revalidate field values unless they are being modified
In the previous implementation of the new API fields,
both fields.Selection and fields.Reference were performing
early validation of their `value` as soon as it entered
the cache, either by being read, written, or computed.
This is a source of trouble and performance problems,
and is unnecessary, as we should consider that the database
always contains valid values. If that is not the case it
means it was modified externally and is an exception that
should be handled externally as well.
Revalidating selection/reference values can be expensive
when the domain of values is dynamic and requires extra
database queries, with extra access rights control, etc.
This patch adds a `validate` parameter to `convert_to_cache`,
allowing to turn off the re-validation on demand. The ORM
will turn off validation whenever the value being converted
is supposed to be already validated, such as when reading it
from the database.
The parameter is currently ignored by all other fields,
and defaults to True so validation is performed in all other
caes.
2014-07-23 10:30:24 +00:00
|
|
|
|
|
|
|
:param bool use_name_get: when True, value's diplay name will
|
|
|
|
be computed using :meth:`BaseModel.name_get`, if relevant
|
|
|
|
for the field
|
2014-07-06 14:44:26 +00:00
|
|
|
"""
|
2014-07-20 11:31:50 +00:00
|
|
|
return False if value is None else value
|
2014-07-06 14:44:26 +00:00
|
|
|
|
|
|
|
def convert_to_write(self, value, target=None, fnames=None):
|
2015-07-06 15:39:19 +00:00
|
|
|
""" convert ``value`` from the cache to a valid value for method
|
2014-07-06 14:44:26 +00:00
|
|
|
:meth:`BaseModel.write`.
|
|
|
|
|
|
|
|
:param target: optional, the record to be modified with this value
|
|
|
|
:param fnames: for relational fields only, an optional collection of
|
|
|
|
field names to convert
|
|
|
|
"""
|
|
|
|
return self.convert_to_read(value)
|
|
|
|
|
|
|
|
def convert_to_onchange(self, value):
|
2015-07-06 15:39:19 +00:00
|
|
|
""" convert ``value`` from the cache to a valid value for an onchange
|
2014-07-06 14:44:26 +00:00
|
|
|
method v7.
|
|
|
|
"""
|
|
|
|
return self.convert_to_write(value)
|
|
|
|
|
|
|
|
def convert_to_export(self, value, env):
|
2015-07-06 15:39:19 +00:00
|
|
|
""" convert ``value`` from the cache to a valid value for export. The
|
|
|
|
parameter ``env`` is given for managing translations.
|
2014-07-06 14:44:26 +00:00
|
|
|
"""
|
2015-07-10 08:23:41 +00:00
|
|
|
if not value:
|
|
|
|
return ''
|
|
|
|
return value if env.context.get('export_raw_data') else ustr(value)
|
2014-07-06 14:44:26 +00:00
|
|
|
|
2015-01-05 14:42:59 +00:00
|
|
|
def convert_to_display_name(self, value, record=None):
|
2015-07-06 15:39:19 +00:00
|
|
|
""" convert ``value`` from the cache to a suitable display name. """
|
2014-07-06 14:44:26 +00:00
|
|
|
return ustr(value)
|
|
|
|
|
|
|
|
############################################################################
|
|
|
|
#
|
|
|
|
# Descriptor methods
|
|
|
|
#
|
|
|
|
|
|
|
|
def __get__(self, record, owner):
|
2015-07-06 15:39:19 +00:00
|
|
|
""" return the value of field ``self`` on ``record`` """
|
2014-07-06 14:44:26 +00:00
|
|
|
if record is None:
|
|
|
|
return self # the field is accessed through the owner class
|
|
|
|
|
|
|
|
if not record:
|
|
|
|
# null record -> return the null value for this field
|
|
|
|
return self.null(record.env)
|
|
|
|
|
|
|
|
# only a single record may be accessed
|
|
|
|
record.ensure_one()
|
|
|
|
|
|
|
|
try:
|
|
|
|
return record._cache[self]
|
|
|
|
except KeyError:
|
|
|
|
pass
|
|
|
|
|
|
|
|
# cache miss, retrieve value
|
|
|
|
if record.id:
|
|
|
|
# normal record -> read or compute value for this field
|
|
|
|
self.determine_value(record)
|
|
|
|
else:
|
2014-10-09 15:22:42 +00:00
|
|
|
# draft record -> compute the value or let it be null
|
|
|
|
self.determine_draft_value(record)
|
2014-07-06 14:44:26 +00:00
|
|
|
|
|
|
|
# the result should be in cache now
|
|
|
|
return record._cache[self]
|
|
|
|
|
|
|
|
def __set__(self, record, value):
|
2015-07-06 15:39:19 +00:00
|
|
|
""" set the value of field ``self`` on ``record`` """
|
2014-07-06 14:44:26 +00:00
|
|
|
env = record.env
|
|
|
|
|
|
|
|
# only a single record may be updated
|
|
|
|
record.ensure_one()
|
|
|
|
|
|
|
|
# adapt value to the cache level
|
2014-07-09 15:06:29 +00:00
|
|
|
value = self.convert_to_cache(value, record)
|
2014-07-06 14:44:26 +00:00
|
|
|
|
|
|
|
if env.in_draft or not record.id:
|
|
|
|
# determine dependent fields
|
|
|
|
spec = self.modified_draft(record)
|
|
|
|
|
|
|
|
# set value in cache, inverse field, and mark record as dirty
|
|
|
|
record._cache[self] = value
|
|
|
|
if env.in_onchange:
|
2014-08-08 12:04:40 +00:00
|
|
|
for invf in self.inverse_fields:
|
|
|
|
invf._update(value, record)
|
2014-11-24 14:31:18 +00:00
|
|
|
record._set_dirty(self.name)
|
2014-07-06 14:44:26 +00:00
|
|
|
|
|
|
|
# determine more dependent fields, and invalidate them
|
|
|
|
if self.relational:
|
|
|
|
spec += self.modified_draft(record)
|
|
|
|
env.invalidate(spec)
|
|
|
|
|
|
|
|
else:
|
|
|
|
# simply write to the database, and update cache
|
|
|
|
record.write({self.name: self.convert_to_write(value)})
|
|
|
|
record._cache[self] = value
|
|
|
|
|
|
|
|
############################################################################
|
|
|
|
#
|
|
|
|
# Computation of field values
|
|
|
|
#
|
|
|
|
|
|
|
|
def _compute_value(self, records):
|
2015-07-06 15:39:19 +00:00
|
|
|
""" Invoke the compute method on ``records``. """
|
2014-10-22 15:00:23 +00:00
|
|
|
# initialize the fields to their corresponding null value in cache
|
2014-07-06 14:44:26 +00:00
|
|
|
for field in self.computed_fields:
|
2014-10-22 15:00:23 +00:00
|
|
|
records._cache[field] = field.null(records.env)
|
2014-07-06 14:44:26 +00:00
|
|
|
records.env.computed[field].update(records._ids)
|
|
|
|
self.compute(records)
|
|
|
|
for field in self.computed_fields:
|
|
|
|
records.env.computed[field].difference_update(records._ids)
|
|
|
|
|
|
|
|
def compute_value(self, records):
|
2015-07-06 15:39:19 +00:00
|
|
|
""" Invoke the compute method on ``records``; the results are in cache. """
|
2014-07-06 14:44:26 +00:00
|
|
|
with records.env.do_in_draft():
|
|
|
|
try:
|
|
|
|
self._compute_value(records)
|
2014-09-11 08:31:10 +00:00
|
|
|
except (AccessError, MissingError):
|
|
|
|
# some record is forbidden or missing, retry record by record
|
|
|
|
for record in records:
|
|
|
|
try:
|
|
|
|
self._compute_value(record)
|
|
|
|
except Exception as exc:
|
|
|
|
record._cache[self.name] = FailedValue(exc)
|
2014-07-06 14:44:26 +00:00
|
|
|
|
|
|
|
def determine_value(self, record):
|
2015-07-06 15:39:19 +00:00
|
|
|
""" Determine the value of ``self`` for ``record``. """
|
2014-07-06 14:44:26 +00:00
|
|
|
env = record.env
|
|
|
|
|
2014-10-30 10:00:10 +00:00
|
|
|
if self.column and not (self.depends and env.in_draft):
|
|
|
|
# this is a stored field or an old-style function field
|
2014-07-06 14:44:26 +00:00
|
|
|
if self.depends:
|
|
|
|
# this is a stored computed field, check for recomputation
|
|
|
|
recs = record._recompute_check(self)
|
|
|
|
if recs:
|
|
|
|
# recompute the value (only in cache)
|
|
|
|
self.compute_value(recs)
|
|
|
|
# HACK: if result is in the wrong cache, copy values
|
|
|
|
if recs.env != env:
|
|
|
|
for source, target in zip(recs, recs.with_env(env)):
|
|
|
|
try:
|
|
|
|
values = target._convert_to_cache({
|
|
|
|
f.name: source[f.name] for f in self.computed_fields
|
[FIX] fields: do not revalidate field values unless they are being modified
In the previous implementation of the new API fields,
both fields.Selection and fields.Reference were performing
early validation of their `value` as soon as it entered
the cache, either by being read, written, or computed.
This is a source of trouble and performance problems,
and is unnecessary, as we should consider that the database
always contains valid values. If that is not the case it
means it was modified externally and is an exception that
should be handled externally as well.
Revalidating selection/reference values can be expensive
when the domain of values is dynamic and requires extra
database queries, with extra access rights control, etc.
This patch adds a `validate` parameter to `convert_to_cache`,
allowing to turn off the re-validation on demand. The ORM
will turn off validation whenever the value being converted
is supposed to be already validated, such as when reading it
from the database.
The parameter is currently ignored by all other fields,
and defaults to True so validation is performed in all other
caes.
2014-07-23 10:30:24 +00:00
|
|
|
}, validate=False)
|
2014-07-06 14:44:26 +00:00
|
|
|
except MissingError as e:
|
|
|
|
values = FailedValue(e)
|
|
|
|
target._cache.update(values)
|
|
|
|
# the result is saved to database by BaseModel.recompute()
|
|
|
|
return
|
|
|
|
|
|
|
|
# read the field from database
|
|
|
|
record._prefetch_field(self)
|
|
|
|
|
|
|
|
elif self.compute:
|
|
|
|
# this is either a non-stored computed field, or a stored computed
|
|
|
|
# field in draft mode
|
|
|
|
if self.recursive:
|
|
|
|
self.compute_value(record)
|
|
|
|
else:
|
|
|
|
recs = record._in_cache_without(self)
|
|
|
|
self.compute_value(recs)
|
|
|
|
|
|
|
|
else:
|
|
|
|
# this is a non-stored non-computed field
|
|
|
|
record._cache[self] = self.null(env)
|
|
|
|
|
2014-10-09 15:22:42 +00:00
|
|
|
def determine_draft_value(self, record):
|
2015-07-06 15:39:19 +00:00
|
|
|
""" Determine the value of ``self`` for the given draft ``record``. """
|
2014-10-09 15:22:42 +00:00
|
|
|
if self.compute:
|
2014-07-06 14:44:26 +00:00
|
|
|
self._compute_value(record)
|
|
|
|
else:
|
|
|
|
record._cache[self] = SpecialValue(self.null(record.env))
|
|
|
|
|
|
|
|
def determine_inverse(self, records):
|
2015-07-06 15:39:19 +00:00
|
|
|
""" Given the value of ``self`` on ``records``, inverse the computation. """
|
2014-07-06 14:44:26 +00:00
|
|
|
if self.inverse:
|
|
|
|
self.inverse(records)
|
|
|
|
|
|
|
|
def determine_domain(self, records, operator, value):
|
2015-07-06 15:39:19 +00:00
|
|
|
""" Return a domain representing a condition on ``self``. """
|
2014-07-06 14:44:26 +00:00
|
|
|
if self.search:
|
|
|
|
return self.search(records, operator, value)
|
|
|
|
else:
|
|
|
|
return [(self.name, operator, value)]
|
|
|
|
|
|
|
|
############################################################################
|
|
|
|
#
|
|
|
|
# Notification when fields are modified
|
|
|
|
#
|
|
|
|
|
|
|
|
def modified(self, records):
|
2015-07-06 15:39:19 +00:00
|
|
|
""" Notify that field ``self`` has been modified on ``records``: prepare the
|
2014-07-06 14:44:26 +00:00
|
|
|
fields/records to recompute, and return a spec indicating what to
|
|
|
|
invalidate.
|
|
|
|
"""
|
|
|
|
# invalidate the fields that depend on self, and prepare recomputation
|
|
|
|
spec = [(self, records._ids)]
|
|
|
|
for field, path in self._triggers:
|
2014-07-08 08:16:16 +00:00
|
|
|
if path and field.store:
|
2014-07-06 14:44:26 +00:00
|
|
|
# don't move this line to function top, see log
|
|
|
|
env = records.env(user=SUPERUSER_ID, context={'active_test': False})
|
|
|
|
target = env[field.model_name].search([(path, 'in', records.ids)])
|
|
|
|
if target:
|
|
|
|
spec.append((field, target._ids))
|
2015-02-05 10:48:58 +00:00
|
|
|
# recompute field on target in the environment of records,
|
|
|
|
# and as user admin if required
|
|
|
|
if field.compute_sudo:
|
|
|
|
target = target.with_env(records.env(user=SUPERUSER_ID))
|
|
|
|
else:
|
|
|
|
target = target.with_env(records.env)
|
|
|
|
target._recompute_todo(field)
|
2014-07-06 14:44:26 +00:00
|
|
|
else:
|
|
|
|
spec.append((field, None))
|
|
|
|
|
|
|
|
return spec
|
|
|
|
|
|
|
|
def modified_draft(self, records):
|
|
|
|
""" Same as :meth:`modified`, but in draft mode. """
|
|
|
|
env = records.env
|
|
|
|
|
|
|
|
# invalidate the fields on the records in cache that depend on
|
2015-07-06 15:39:19 +00:00
|
|
|
# ``records``, except fields currently being computed
|
2014-07-06 14:44:26 +00:00
|
|
|
spec = []
|
|
|
|
for field, path in self._triggers:
|
|
|
|
target = env[field.model_name]
|
|
|
|
computed = target.browse(env.computed[field])
|
|
|
|
if path == 'id':
|
|
|
|
target = records - computed
|
2014-07-08 13:52:02 +00:00
|
|
|
elif path:
|
|
|
|
target = (target.browse(env.cache[field]) - computed).filtered(
|
|
|
|
lambda rec: rec._mapped_cache(path) & records
|
|
|
|
)
|
2014-07-06 14:44:26 +00:00
|
|
|
else:
|
2014-07-08 13:47:44 +00:00
|
|
|
target = target.browse(env.cache[field]) - computed
|
2014-07-08 13:52:02 +00:00
|
|
|
|
2014-07-06 14:44:26 +00:00
|
|
|
if target:
|
|
|
|
spec.append((field, target._ids))
|
|
|
|
|
|
|
|
return spec
|
|
|
|
|
|
|
|
|
|
|
|
class Boolean(Field):
|
|
|
|
type = 'boolean'
|
|
|
|
|
2014-07-09 15:06:29 +00:00
|
|
|
def convert_to_cache(self, value, record, validate=True):
|
2014-07-06 14:44:26 +00:00
|
|
|
return bool(value)
|
|
|
|
|
|
|
|
def convert_to_export(self, value, env):
|
|
|
|
if env.context.get('export_raw_data'):
|
|
|
|
return value
|
|
|
|
return ustr(value)
|
|
|
|
|
|
|
|
|
|
|
|
class Integer(Field):
|
|
|
|
type = 'integer'
|
2015-03-12 15:39:12 +00:00
|
|
|
_slots = {
|
|
|
|
'group_operator': None, # operator for aggregating values
|
|
|
|
}
|
[IMP] use model._fields instead of model._all_columns to cover all fields
The old-api model._all_columns contains information about model._columns and
inherited columns. This dictionary is missing new-api computed non-stored
fields, and the new field objects provide a more readable api...
This commit contains the following changes:
- adapt several methods of BaseModel to use fields instead of columns and
_all_columns
- copy all semantic-free attributes of related fields from their source
- add attribute 'group_operator' on integer and float fields
- base, base_action_rule, crm, edi, hr, mail, mass_mailing, pad,
payment_acquirer, share, website, website_crm, website_mail: simply use
_fields instead of _all_columns
- base, decimal_precision, website: adapt qweb rendering methods to use fields
instead of columns
2014-11-03 15:00:50 +00:00
|
|
|
|
2015-03-11 11:37:10 +00:00
|
|
|
_related_group_operator = property(attrgetter('group_operator'))
|
[IMP] use model._fields instead of model._all_columns to cover all fields
The old-api model._all_columns contains information about model._columns and
inherited columns. This dictionary is missing new-api computed non-stored
fields, and the new field objects provide a more readable api...
This commit contains the following changes:
- adapt several methods of BaseModel to use fields instead of columns and
_all_columns
- copy all semantic-free attributes of related fields from their source
- add attribute 'group_operator' on integer and float fields
- base, base_action_rule, crm, edi, hr, mail, mass_mailing, pad,
payment_acquirer, share, website, website_crm, website_mail: simply use
_fields instead of _all_columns
- base, decimal_precision, website: adapt qweb rendering methods to use fields
instead of columns
2014-11-03 15:00:50 +00:00
|
|
|
_column_group_operator = property(attrgetter('group_operator'))
|
2014-07-06 14:44:26 +00:00
|
|
|
|
2014-07-09 15:06:29 +00:00
|
|
|
def convert_to_cache(self, value, record, validate=True):
|
2014-09-17 12:16:25 +00:00
|
|
|
if isinstance(value, dict):
|
|
|
|
# special case, when an integer field is used as inverse for a one2many
|
|
|
|
return value.get('id', False)
|
2014-07-06 14:44:26 +00:00
|
|
|
return int(value or 0)
|
|
|
|
|
|
|
|
def convert_to_read(self, value, use_name_get=True):
|
|
|
|
# Integer values greater than 2^31-1 are not supported in pure XMLRPC,
|
|
|
|
# so we have to pass them as floats :-(
|
|
|
|
if value and value > xmlrpclib.MAXINT:
|
|
|
|
return float(value)
|
|
|
|
return value
|
|
|
|
|
|
|
|
def _update(self, records, value):
|
|
|
|
# special case, when an integer field is used as inverse for a one2many
|
|
|
|
records._cache[self] = value.id or 0
|
|
|
|
|
2015-07-10 08:23:41 +00:00
|
|
|
def convert_to_export(self, value, env):
|
|
|
|
if value or value == 0:
|
|
|
|
return value if env.context.get('export_raw_data') else ustr(value)
|
|
|
|
return ''
|
|
|
|
|
2014-07-06 14:44:26 +00:00
|
|
|
|
|
|
|
class Float(Field):
|
2014-08-22 15:51:20 +00:00
|
|
|
""" The precision digits are given by the attribute
|
2014-07-06 14:44:26 +00:00
|
|
|
|
2014-08-22 15:51:20 +00:00
|
|
|
:param digits: a pair (total, decimal), or a function taking a database
|
|
|
|
cursor and returning a pair (total, decimal)
|
2014-07-06 14:44:26 +00:00
|
|
|
"""
|
|
|
|
type = 'float'
|
2015-03-12 15:39:12 +00:00
|
|
|
_slots = {
|
|
|
|
'_digits': None, # digits argument passed to class initializer
|
|
|
|
'group_operator': None, # operator for aggregating values
|
|
|
|
}
|
2014-07-06 14:44:26 +00:00
|
|
|
|
|
|
|
def __init__(self, string=None, digits=None, **kwargs):
|
|
|
|
super(Float, self).__init__(string=string, _digits=digits, **kwargs)
|
|
|
|
|
2015-03-03 13:25:10 +00:00
|
|
|
@property
|
|
|
|
def digits(self):
|
|
|
|
if callable(self._digits):
|
2015-05-21 10:53:56 +00:00
|
|
|
with fields._get_cursor() as cr:
|
|
|
|
return self._digits(cr)
|
2015-03-03 13:25:10 +00:00
|
|
|
else:
|
|
|
|
return self._digits
|
|
|
|
|
2014-09-18 08:31:18 +00:00
|
|
|
def _setup_digits(self, env):
|
2015-07-06 15:39:19 +00:00
|
|
|
""" Setup the digits for ``self`` and its corresponding column """
|
2015-03-03 13:25:10 +00:00
|
|
|
pass
|
2014-09-18 08:31:18 +00:00
|
|
|
|
2014-07-06 14:44:26 +00:00
|
|
|
def _setup_regular(self, env):
|
|
|
|
super(Float, self)._setup_regular(env)
|
2014-09-18 08:31:18 +00:00
|
|
|
self._setup_digits(env)
|
2014-07-06 14:44:26 +00:00
|
|
|
|
2015-03-03 13:25:10 +00:00
|
|
|
_related__digits = property(attrgetter('_digits'))
|
[IMP] use model._fields instead of model._all_columns to cover all fields
The old-api model._all_columns contains information about model._columns and
inherited columns. This dictionary is missing new-api computed non-stored
fields, and the new field objects provide a more readable api...
This commit contains the following changes:
- adapt several methods of BaseModel to use fields instead of columns and
_all_columns
- copy all semantic-free attributes of related fields from their source
- add attribute 'group_operator' on integer and float fields
- base, base_action_rule, crm, edi, hr, mail, mass_mailing, pad,
payment_acquirer, share, website, website_crm, website_mail: simply use
_fields instead of _all_columns
- base, decimal_precision, website: adapt qweb rendering methods to use fields
instead of columns
2014-11-03 15:00:50 +00:00
|
|
|
_related_group_operator = property(attrgetter('group_operator'))
|
2014-07-06 14:44:26 +00:00
|
|
|
|
|
|
|
_description_digits = property(attrgetter('digits'))
|
|
|
|
|
|
|
|
_column_digits = property(lambda self: not callable(self._digits) and self._digits)
|
|
|
|
_column_digits_compute = property(lambda self: callable(self._digits) and self._digits)
|
[IMP] use model._fields instead of model._all_columns to cover all fields
The old-api model._all_columns contains information about model._columns and
inherited columns. This dictionary is missing new-api computed non-stored
fields, and the new field objects provide a more readable api...
This commit contains the following changes:
- adapt several methods of BaseModel to use fields instead of columns and
_all_columns
- copy all semantic-free attributes of related fields from their source
- add attribute 'group_operator' on integer and float fields
- base, base_action_rule, crm, edi, hr, mail, mass_mailing, pad,
payment_acquirer, share, website, website_crm, website_mail: simply use
_fields instead of _all_columns
- base, decimal_precision, website: adapt qweb rendering methods to use fields
instead of columns
2014-11-03 15:00:50 +00:00
|
|
|
_column_group_operator = property(attrgetter('group_operator'))
|
2014-07-06 14:44:26 +00:00
|
|
|
|
2014-07-09 15:06:29 +00:00
|
|
|
def convert_to_cache(self, value, record, validate=True):
|
2014-07-06 14:44:26 +00:00
|
|
|
# apply rounding here, otherwise value in cache may be wrong!
|
2015-03-03 13:25:10 +00:00
|
|
|
value = float(value or 0.0)
|
|
|
|
digits = self.digits
|
|
|
|
return float_round(value, precision_digits=digits[1]) if digits else value
|
2014-07-06 14:44:26 +00:00
|
|
|
|
2015-07-10 08:23:41 +00:00
|
|
|
def convert_to_export(self, value, env):
|
|
|
|
if value or value == 0.0:
|
|
|
|
return value if env.context.get('export_raw_data') else ustr(value)
|
|
|
|
return ''
|
|
|
|
|
2014-07-06 14:44:26 +00:00
|
|
|
|
|
|
|
class _String(Field):
|
|
|
|
""" Abstract class for string fields. """
|
2015-03-12 15:39:12 +00:00
|
|
|
_slots = {
|
|
|
|
'translate': False, # whether the field is translated
|
|
|
|
}
|
2014-07-06 14:44:26 +00:00
|
|
|
|
|
|
|
_column_translate = property(attrgetter('translate'))
|
|
|
|
_related_translate = property(attrgetter('translate'))
|
|
|
|
_description_translate = property(attrgetter('translate'))
|
2015-07-08 08:48:03 +00:00
|
|
|
|
2014-07-06 14:44:26 +00:00
|
|
|
|
|
|
|
class Char(_String):
|
2014-08-22 15:51:20 +00:00
|
|
|
""" Basic string field, can be length-limited, usually displayed as a
|
|
|
|
single-line string in clients
|
2014-07-06 14:44:26 +00:00
|
|
|
|
2014-08-22 15:51:20 +00:00
|
|
|
:param int size: the maximum size of values stored for that field
|
|
|
|
:param bool translate: whether the values of this field can be translated
|
2014-07-06 14:44:26 +00:00
|
|
|
"""
|
|
|
|
type = 'char'
|
2015-03-12 15:39:12 +00:00
|
|
|
_slots = {
|
|
|
|
'size': None, # maximum size of values (deprecated)
|
|
|
|
}
|
2014-07-06 14:44:26 +00:00
|
|
|
|
|
|
|
_column_size = property(attrgetter('size'))
|
|
|
|
_related_size = property(attrgetter('size'))
|
|
|
|
_description_size = property(attrgetter('size'))
|
|
|
|
|
2015-03-11 11:37:10 +00:00
|
|
|
def _setup_regular(self, env):
|
|
|
|
super(Char, self)._setup_regular(env)
|
|
|
|
assert isinstance(self.size, (NoneType, int)), \
|
|
|
|
"Char field %s with non-integer size %r" % (self, self.size)
|
|
|
|
|
2014-07-09 15:06:29 +00:00
|
|
|
def convert_to_cache(self, value, record, validate=True):
|
2014-08-13 08:44:05 +00:00
|
|
|
if value is None or value is False:
|
|
|
|
return False
|
|
|
|
return ustr(value)[:self.size]
|
2014-07-06 14:44:26 +00:00
|
|
|
|
|
|
|
class Text(_String):
|
2014-11-24 07:52:38 +00:00
|
|
|
""" Very similar to :class:`~.Char` but used for longer contents, does not
|
|
|
|
have a size and usually displayed as a multiline text box.
|
2014-07-06 14:44:26 +00:00
|
|
|
|
2014-08-22 15:51:20 +00:00
|
|
|
:param translate: whether the value of this field can be translated
|
2014-07-06 14:44:26 +00:00
|
|
|
"""
|
|
|
|
type = 'text'
|
|
|
|
|
2014-07-09 15:06:29 +00:00
|
|
|
def convert_to_cache(self, value, record, validate=True):
|
2014-08-13 08:44:05 +00:00
|
|
|
if value is None or value is False:
|
|
|
|
return False
|
|
|
|
return ustr(value)
|
2014-07-06 14:44:26 +00:00
|
|
|
|
|
|
|
class Html(_String):
|
|
|
|
type = 'html'
|
2015-03-12 15:39:12 +00:00
|
|
|
_slots = {
|
|
|
|
'sanitize': True, # whether value must be sanitized
|
|
|
|
'strip_style': False, # whether to strip style attributes
|
|
|
|
}
|
2014-08-21 09:40:38 +00:00
|
|
|
|
|
|
|
_column_sanitize = property(attrgetter('sanitize'))
|
|
|
|
_related_sanitize = property(attrgetter('sanitize'))
|
|
|
|
_description_sanitize = property(attrgetter('sanitize'))
|
2014-07-06 14:44:26 +00:00
|
|
|
|
2015-03-07 00:36:18 +00:00
|
|
|
_column_strip_style = property(attrgetter('strip_style'))
|
|
|
|
_related_strip_style = property(attrgetter('strip_style'))
|
|
|
|
_description_strip_style = property(attrgetter('strip_style'))
|
|
|
|
|
2014-07-09 15:06:29 +00:00
|
|
|
def convert_to_cache(self, value, record, validate=True):
|
2014-08-13 08:44:05 +00:00
|
|
|
if value is None or value is False:
|
|
|
|
return False
|
2014-08-21 09:40:38 +00:00
|
|
|
if validate and self.sanitize:
|
2015-03-07 00:36:18 +00:00
|
|
|
return html_sanitize(value, strip_style=self.strip_style)
|
2014-08-21 09:40:38 +00:00
|
|
|
return value
|
2014-07-06 14:44:26 +00:00
|
|
|
|
|
|
|
|
|
|
|
class Date(Field):
|
|
|
|
type = 'date'
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def today(*args):
|
|
|
|
""" Return the current day in the format expected by the ORM.
|
|
|
|
This function may be used to compute default values.
|
|
|
|
"""
|
|
|
|
return date.today().strftime(DATE_FORMAT)
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def context_today(record, timestamp=None):
|
|
|
|
""" Return the current date as seen in the client's timezone in a format
|
|
|
|
fit for date fields. This method may be used to compute default
|
|
|
|
values.
|
|
|
|
|
|
|
|
:param datetime timestamp: optional datetime value to use instead of
|
|
|
|
the current date and time (must be a datetime, regular dates
|
|
|
|
can't be converted between timezones.)
|
|
|
|
:rtype: str
|
|
|
|
"""
|
|
|
|
today = timestamp or datetime.now()
|
|
|
|
context_today = None
|
|
|
|
tz_name = record._context.get('tz') or record.env.user.tz
|
|
|
|
if tz_name:
|
|
|
|
try:
|
|
|
|
today_utc = pytz.timezone('UTC').localize(today, is_dst=False) # UTC = no DST
|
|
|
|
context_today = today_utc.astimezone(pytz.timezone(tz_name))
|
|
|
|
except Exception:
|
|
|
|
_logger.debug("failed to compute context/client-specific today date, using UTC value for `today`",
|
|
|
|
exc_info=True)
|
|
|
|
return (context_today or today).strftime(DATE_FORMAT)
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def from_string(value):
|
2015-07-06 15:39:19 +00:00
|
|
|
""" Convert an ORM ``value`` into a :class:`date` value. """
|
2015-09-04 14:43:09 +00:00
|
|
|
if not value:
|
|
|
|
return None
|
2014-07-06 14:44:26 +00:00
|
|
|
value = value[:DATE_LENGTH]
|
|
|
|
return datetime.strptime(value, DATE_FORMAT).date()
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def to_string(value):
|
|
|
|
""" Convert a :class:`date` value into the format expected by the ORM. """
|
2015-09-04 14:43:09 +00:00
|
|
|
return value.strftime(DATE_FORMAT) if value else False
|
2014-07-06 14:44:26 +00:00
|
|
|
|
2014-07-09 15:06:29 +00:00
|
|
|
def convert_to_cache(self, value, record, validate=True):
|
2014-07-06 14:44:26 +00:00
|
|
|
if not value:
|
|
|
|
return False
|
|
|
|
if isinstance(value, basestring):
|
2014-08-07 10:56:13 +00:00
|
|
|
if validate:
|
|
|
|
# force parsing for validation
|
|
|
|
self.from_string(value)
|
|
|
|
return value[:DATE_LENGTH]
|
|
|
|
return self.to_string(value)
|
2014-07-06 14:44:26 +00:00
|
|
|
|
|
|
|
def convert_to_export(self, value, env):
|
2015-07-10 08:23:41 +00:00
|
|
|
if not value:
|
|
|
|
return ''
|
|
|
|
return self.from_string(value) if env.context.get('export_raw_data') else ustr(value)
|
2014-07-06 14:44:26 +00:00
|
|
|
|
|
|
|
|
|
|
|
class Datetime(Field):
|
|
|
|
type = 'datetime'
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def now(*args):
|
|
|
|
""" Return the current day and time in the format expected by the ORM.
|
|
|
|
This function may be used to compute default values.
|
|
|
|
"""
|
|
|
|
return datetime.now().strftime(DATETIME_FORMAT)
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def context_timestamp(record, timestamp):
|
|
|
|
"""Returns the given timestamp converted to the client's timezone.
|
|
|
|
This method is *not* meant for use as a _defaults initializer,
|
|
|
|
because datetime fields are automatically converted upon
|
|
|
|
display on client side. For _defaults you :meth:`fields.datetime.now`
|
|
|
|
should be used instead.
|
|
|
|
|
|
|
|
:param datetime timestamp: naive datetime value (expressed in UTC)
|
|
|
|
to be converted to the client timezone
|
|
|
|
:rtype: datetime
|
|
|
|
:return: timestamp converted to timezone-aware datetime in context
|
|
|
|
timezone
|
|
|
|
"""
|
|
|
|
assert isinstance(timestamp, datetime), 'Datetime instance expected'
|
|
|
|
tz_name = record._context.get('tz') or record.env.user.tz
|
2014-10-29 12:42:45 +00:00
|
|
|
utc_timestamp = pytz.utc.localize(timestamp, is_dst=False) # UTC = no DST
|
2014-07-06 14:44:26 +00:00
|
|
|
if tz_name:
|
|
|
|
try:
|
|
|
|
context_tz = pytz.timezone(tz_name)
|
|
|
|
return utc_timestamp.astimezone(context_tz)
|
|
|
|
except Exception:
|
|
|
|
_logger.debug("failed to compute context/client-specific timestamp, "
|
|
|
|
"using the UTC value",
|
|
|
|
exc_info=True)
|
2014-10-29 12:42:45 +00:00
|
|
|
return utc_timestamp
|
2014-07-06 14:44:26 +00:00
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def from_string(value):
|
2015-07-06 15:39:19 +00:00
|
|
|
""" Convert an ORM ``value`` into a :class:`datetime` value. """
|
2015-09-04 14:43:09 +00:00
|
|
|
if not value:
|
|
|
|
return None
|
2014-07-06 14:44:26 +00:00
|
|
|
value = value[:DATETIME_LENGTH]
|
|
|
|
if len(value) == DATE_LENGTH:
|
|
|
|
value += " 00:00:00"
|
|
|
|
return datetime.strptime(value, DATETIME_FORMAT)
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def to_string(value):
|
|
|
|
""" Convert a :class:`datetime` value into the format expected by the ORM. """
|
2015-09-04 14:43:09 +00:00
|
|
|
return value.strftime(DATETIME_FORMAT) if value else False
|
2014-07-06 14:44:26 +00:00
|
|
|
|
2014-07-09 15:06:29 +00:00
|
|
|
def convert_to_cache(self, value, record, validate=True):
|
2014-07-06 14:44:26 +00:00
|
|
|
if not value:
|
|
|
|
return False
|
|
|
|
if isinstance(value, basestring):
|
2014-08-07 10:56:13 +00:00
|
|
|
if validate:
|
|
|
|
# force parsing for validation
|
|
|
|
self.from_string(value)
|
|
|
|
value = value[:DATETIME_LENGTH]
|
|
|
|
if len(value) == DATE_LENGTH:
|
|
|
|
value += " 00:00:00"
|
|
|
|
return value
|
|
|
|
return self.to_string(value)
|
2014-07-06 14:44:26 +00:00
|
|
|
|
|
|
|
def convert_to_export(self, value, env):
|
2015-07-10 08:23:41 +00:00
|
|
|
if not value:
|
|
|
|
return ''
|
|
|
|
return self.from_string(value) if env.context.get('export_raw_data') else ustr(value)
|
2014-07-06 14:44:26 +00:00
|
|
|
|
2015-01-05 14:42:59 +00:00
|
|
|
def convert_to_display_name(self, value, record=None):
|
|
|
|
assert record, 'Record expected'
|
|
|
|
return Datetime.to_string(Datetime.context_timestamp(record, Datetime.from_string(value)))
|
|
|
|
|
2014-07-06 14:44:26 +00:00
|
|
|
|
|
|
|
class Binary(Field):
|
|
|
|
type = 'binary'
|
|
|
|
|
|
|
|
|
|
|
|
class Selection(Field):
|
2014-08-22 15:51:20 +00:00
|
|
|
"""
|
|
|
|
:param selection: specifies the possible values for this field.
|
2015-07-06 15:39:19 +00:00
|
|
|
It is given as either a list of pairs (``value``, ``string``), or a
|
2014-08-22 15:51:20 +00:00
|
|
|
model method, or a method name.
|
|
|
|
:param selection_add: provides an extension of the selection in the case
|
2015-07-06 15:39:19 +00:00
|
|
|
of an overridden field. It is a list of pairs (``value``, ``string``).
|
2014-08-22 15:51:20 +00:00
|
|
|
|
2015-07-06 15:39:19 +00:00
|
|
|
The attribute ``selection`` is mandatory except in the case of
|
2014-08-22 15:51:20 +00:00
|
|
|
:ref:`related fields <field-related>` or :ref:`field extensions
|
|
|
|
<field-incremental-definition>`.
|
2014-07-06 14:44:26 +00:00
|
|
|
"""
|
|
|
|
type = 'selection'
|
2015-03-12 15:39:12 +00:00
|
|
|
_slots = {
|
|
|
|
'selection': None, # [(value, string), ...], function or method name
|
|
|
|
}
|
2014-07-06 14:44:26 +00:00
|
|
|
|
|
|
|
def __init__(self, selection=None, string=None, **kwargs):
|
|
|
|
if callable(selection):
|
|
|
|
from openerp import api
|
|
|
|
selection = api.expected(api.model, selection)
|
|
|
|
super(Selection, self).__init__(selection=selection, string=string, **kwargs)
|
|
|
|
|
2015-01-14 16:17:47 +00:00
|
|
|
def _setup_regular(self, env):
|
|
|
|
super(Selection, self)._setup_regular(env)
|
2014-10-14 08:11:40 +00:00
|
|
|
assert self.selection is not None, "Field %s without selection" % self
|
|
|
|
|
2014-07-06 14:44:26 +00:00
|
|
|
def _setup_related(self, env):
|
|
|
|
super(Selection, self)._setup_related(env)
|
|
|
|
# selection must be computed on related field
|
|
|
|
field = self.related_field
|
|
|
|
self.selection = lambda model: field._description_selection(model.env)
|
|
|
|
|
2014-10-08 08:47:25 +00:00
|
|
|
def set_class_name(self, cls, name):
|
|
|
|
super(Selection, self).set_class_name(cls, name)
|
|
|
|
# determine selection (applying 'selection_add' extensions)
|
|
|
|
for field in resolve_all_mro(cls, name, reverse=True):
|
[IMP] fields: add mechanism to extend a selection field
If a selection field is defined by a list as selection, such as:
state = fields.Selection([('a', 'A'), ('b', 'B')])
one can extend it by inheritance by redefining the field, as:
state = fields.Selection(selection_add=[('c', 'C')])
The result is that the selection field will have the list
[('a', 'A'), ('b', 'B'), ('c', 'C')] as selection.
2014-07-10 20:04:03 +00:00
|
|
|
if isinstance(field, type(self)):
|
|
|
|
# We cannot use field.selection or field.selection_add here
|
2015-07-06 15:39:19 +00:00
|
|
|
# because those attributes are overridden by ``set_class_name``.
|
[IMP] fields: add mechanism to extend a selection field
If a selection field is defined by a list as selection, such as:
state = fields.Selection([('a', 'A'), ('b', 'B')])
one can extend it by inheritance by redefining the field, as:
state = fields.Selection(selection_add=[('c', 'C')])
The result is that the selection field will have the list
[('a', 'A'), ('b', 'B'), ('c', 'C')] as selection.
2014-07-10 20:04:03 +00:00
|
|
|
if 'selection' in field._attrs:
|
2015-03-16 14:31:43 +00:00
|
|
|
self.selection = field._attrs['selection']
|
[IMP] fields: add mechanism to extend a selection field
If a selection field is defined by a list as selection, such as:
state = fields.Selection([('a', 'A'), ('b', 'B')])
one can extend it by inheritance by redefining the field, as:
state = fields.Selection(selection_add=[('c', 'C')])
The result is that the selection field will have the list
[('a', 'A'), ('b', 'B'), ('c', 'C')] as selection.
2014-07-10 20:04:03 +00:00
|
|
|
if 'selection_add' in field._attrs:
|
2015-02-11 11:02:21 +00:00
|
|
|
# use an OrderedDict to update existing values
|
|
|
|
selection_add = field._attrs['selection_add']
|
2015-03-16 14:31:43 +00:00
|
|
|
self.selection = OrderedDict(self.selection + selection_add).items()
|
[IMP] fields: add mechanism to extend a selection field
If a selection field is defined by a list as selection, such as:
state = fields.Selection([('a', 'A'), ('b', 'B')])
one can extend it by inheritance by redefining the field, as:
state = fields.Selection(selection_add=[('c', 'C')])
The result is that the selection field will have the list
[('a', 'A'), ('b', 'B'), ('c', 'C')] as selection.
2014-07-10 20:04:03 +00:00
|
|
|
else:
|
2015-03-16 14:31:43 +00:00
|
|
|
self.selection = None
|
[IMP] fields: add mechanism to extend a selection field
If a selection field is defined by a list as selection, such as:
state = fields.Selection([('a', 'A'), ('b', 'B')])
one can extend it by inheritance by redefining the field, as:
state = fields.Selection(selection_add=[('c', 'C')])
The result is that the selection field will have the list
[('a', 'A'), ('b', 'B'), ('c', 'C')] as selection.
2014-07-10 20:04:03 +00:00
|
|
|
|
2014-07-06 14:44:26 +00:00
|
|
|
def _description_selection(self, env):
|
|
|
|
""" return the selection list (pairs (value, label)); labels are
|
|
|
|
translated according to context language
|
|
|
|
"""
|
|
|
|
selection = self.selection
|
|
|
|
if isinstance(selection, basestring):
|
|
|
|
return getattr(env[self.model_name], selection)()
|
|
|
|
if callable(selection):
|
|
|
|
return selection(env[self.model_name])
|
|
|
|
|
|
|
|
# translate selection labels
|
|
|
|
if env.lang:
|
|
|
|
name = "%s,%s" % (self.model_name, self.name)
|
|
|
|
translate = partial(
|
|
|
|
env['ir.translation']._get_source, name, 'selection', env.lang)
|
2014-10-22 09:41:33 +00:00
|
|
|
return [(value, translate(label) if label else label) for value, label in selection]
|
2014-07-06 14:44:26 +00:00
|
|
|
else:
|
|
|
|
return selection
|
|
|
|
|
|
|
|
@property
|
|
|
|
def _column_selection(self):
|
|
|
|
if isinstance(self.selection, basestring):
|
|
|
|
method = self.selection
|
|
|
|
return lambda self, *a, **kw: getattr(self, method)(*a, **kw)
|
|
|
|
else:
|
|
|
|
return self.selection
|
|
|
|
|
|
|
|
def get_values(self, env):
|
|
|
|
""" return a list of the possible values """
|
|
|
|
selection = self.selection
|
|
|
|
if isinstance(selection, basestring):
|
|
|
|
selection = getattr(env[self.model_name], selection)()
|
|
|
|
elif callable(selection):
|
|
|
|
selection = selection(env[self.model_name])
|
|
|
|
return [value for value, _ in selection]
|
|
|
|
|
2014-07-09 15:06:29 +00:00
|
|
|
def convert_to_cache(self, value, record, validate=True):
|
[FIX] fields: do not revalidate field values unless they are being modified
In the previous implementation of the new API fields,
both fields.Selection and fields.Reference were performing
early validation of their `value` as soon as it entered
the cache, either by being read, written, or computed.
This is a source of trouble and performance problems,
and is unnecessary, as we should consider that the database
always contains valid values. If that is not the case it
means it was modified externally and is an exception that
should be handled externally as well.
Revalidating selection/reference values can be expensive
when the domain of values is dynamic and requires extra
database queries, with extra access rights control, etc.
This patch adds a `validate` parameter to `convert_to_cache`,
allowing to turn off the re-validation on demand. The ORM
will turn off validation whenever the value being converted
is supposed to be already validated, such as when reading it
from the database.
The parameter is currently ignored by all other fields,
and defaults to True so validation is performed in all other
caes.
2014-07-23 10:30:24 +00:00
|
|
|
if not validate:
|
|
|
|
return value or False
|
2014-07-09 15:06:29 +00:00
|
|
|
if value in self.get_values(record.env):
|
2014-07-06 14:44:26 +00:00
|
|
|
return value
|
|
|
|
elif not value:
|
|
|
|
return False
|
|
|
|
raise ValueError("Wrong value for %s: %r" % (self, value))
|
|
|
|
|
|
|
|
def convert_to_export(self, value, env):
|
|
|
|
if not isinstance(self.selection, list):
|
|
|
|
# FIXME: this reproduces an existing buggy behavior!
|
2015-07-10 08:23:41 +00:00
|
|
|
return value if value else ''
|
2014-07-06 14:44:26 +00:00
|
|
|
for item in self._description_selection(env):
|
|
|
|
if item[0] == value:
|
|
|
|
return item[1]
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
|
|
|
class Reference(Selection):
|
|
|
|
type = 'reference'
|
2015-03-12 15:39:12 +00:00
|
|
|
_slots = {
|
|
|
|
'size': None, # maximum size of values (deprecated)
|
|
|
|
}
|
2014-07-06 14:44:26 +00:00
|
|
|
|
2015-03-11 11:37:10 +00:00
|
|
|
_related_size = property(attrgetter('size'))
|
|
|
|
_column_size = property(attrgetter('size'))
|
2014-07-06 14:44:26 +00:00
|
|
|
|
2015-01-14 16:17:47 +00:00
|
|
|
def _setup_regular(self, env):
|
|
|
|
super(Reference, self)._setup_regular(env)
|
2014-10-22 10:49:12 +00:00
|
|
|
assert isinstance(self.size, (NoneType, int)), \
|
2014-10-08 14:32:48 +00:00
|
|
|
"Reference field %s with non-integer size %r" % (self, self.size)
|
|
|
|
|
2014-07-09 15:06:29 +00:00
|
|
|
def convert_to_cache(self, value, record, validate=True):
|
2014-07-06 14:44:26 +00:00
|
|
|
if isinstance(value, BaseModel):
|
2014-07-09 15:06:29 +00:00
|
|
|
if ((not validate or value._name in self.get_values(record.env))
|
[FIX] fields: do not revalidate field values unless they are being modified
In the previous implementation of the new API fields,
both fields.Selection and fields.Reference were performing
early validation of their `value` as soon as it entered
the cache, either by being read, written, or computed.
This is a source of trouble and performance problems,
and is unnecessary, as we should consider that the database
always contains valid values. If that is not the case it
means it was modified externally and is an exception that
should be handled externally as well.
Revalidating selection/reference values can be expensive
when the domain of values is dynamic and requires extra
database queries, with extra access rights control, etc.
This patch adds a `validate` parameter to `convert_to_cache`,
allowing to turn off the re-validation on demand. The ORM
will turn off validation whenever the value being converted
is supposed to be already validated, such as when reading it
from the database.
The parameter is currently ignored by all other fields,
and defaults to True so validation is performed in all other
caes.
2014-07-23 10:30:24 +00:00
|
|
|
and len(value) <= 1):
|
2014-07-09 15:06:29 +00:00
|
|
|
return value.with_env(record.env) or False
|
2014-07-06 14:44:26 +00:00
|
|
|
elif isinstance(value, basestring):
|
|
|
|
res_model, res_id = value.split(',')
|
2014-07-09 15:06:29 +00:00
|
|
|
return record.env[res_model].browse(int(res_id))
|
2014-07-06 14:44:26 +00:00
|
|
|
elif not value:
|
|
|
|
return False
|
|
|
|
raise ValueError("Wrong value for %s: %r" % (self, value))
|
|
|
|
|
|
|
|
def convert_to_read(self, value, use_name_get=True):
|
|
|
|
return "%s,%s" % (value._name, value.id) if value else False
|
|
|
|
|
|
|
|
def convert_to_export(self, value, env):
|
2015-07-10 08:23:41 +00:00
|
|
|
return value.name_get()[0][1] if value else ''
|
2014-07-06 14:44:26 +00:00
|
|
|
|
2015-01-05 14:42:59 +00:00
|
|
|
def convert_to_display_name(self, value, record=None):
|
2014-07-06 14:44:26 +00:00
|
|
|
return ustr(value and value.display_name)
|
|
|
|
|
|
|
|
|
|
|
|
class _Relational(Field):
|
|
|
|
""" Abstract class for relational fields. """
|
|
|
|
relational = True
|
2015-03-12 15:39:12 +00:00
|
|
|
_slots = {
|
|
|
|
'domain': [], # domain for searching values
|
|
|
|
'context': {}, # context for searching values
|
|
|
|
}
|
2014-07-06 14:44:26 +00:00
|
|
|
|
2015-01-14 16:17:47 +00:00
|
|
|
def _setup_regular(self, env):
|
|
|
|
super(_Relational, self)._setup_regular(env)
|
2014-11-25 16:04:32 +00:00
|
|
|
if self.comodel_name not in env.registry:
|
|
|
|
_logger.warning("Field %s with unknown comodel_name %r"
|
|
|
|
% (self, self.comodel_name))
|
|
|
|
self.comodel_name = '_unknown'
|
2014-10-08 14:32:48 +00:00
|
|
|
|
2014-10-09 09:01:23 +00:00
|
|
|
@property
|
|
|
|
def _related_domain(self):
|
|
|
|
if callable(self.domain):
|
|
|
|
# will be called with another model than self's
|
|
|
|
return lambda recs: self.domain(recs.env[self.model_name])
|
|
|
|
else:
|
|
|
|
# maybe not correct if domain is a string...
|
|
|
|
return self.domain
|
|
|
|
|
|
|
|
_related_context = property(attrgetter('context'))
|
|
|
|
|
2014-07-06 14:44:26 +00:00
|
|
|
_description_relation = property(attrgetter('comodel_name'))
|
|
|
|
_description_context = property(attrgetter('context'))
|
|
|
|
|
|
|
|
def _description_domain(self, env):
|
|
|
|
return self.domain(env[self.model_name]) if callable(self.domain) else self.domain
|
|
|
|
|
|
|
|
_column_obj = property(attrgetter('comodel_name'))
|
|
|
|
_column_domain = property(attrgetter('domain'))
|
|
|
|
_column_context = property(attrgetter('context'))
|
|
|
|
|
|
|
|
def null(self, env):
|
|
|
|
return env[self.comodel_name]
|
|
|
|
|
|
|
|
def modified(self, records):
|
2014-08-08 12:04:40 +00:00
|
|
|
# Invalidate cache for self.inverse_fields, too. Note that recomputation
|
|
|
|
# of fields that depend on self.inverse_fields is already covered by the
|
2014-07-06 14:44:26 +00:00
|
|
|
# triggers (see above).
|
|
|
|
spec = super(_Relational, self).modified(records)
|
2014-08-08 12:04:40 +00:00
|
|
|
for invf in self.inverse_fields:
|
|
|
|
spec.append((invf, None))
|
2014-07-06 14:44:26 +00:00
|
|
|
return spec
|
|
|
|
|
|
|
|
|
|
|
|
class Many2one(_Relational):
|
2014-08-22 15:51:20 +00:00
|
|
|
""" The value of such a field is a recordset of size 0 (no
|
|
|
|
record) or 1 (a single record).
|
2014-07-06 14:44:26 +00:00
|
|
|
|
2014-08-22 15:51:20 +00:00
|
|
|
:param comodel_name: name of the target model (string)
|
2014-07-06 14:44:26 +00:00
|
|
|
|
2014-08-22 15:51:20 +00:00
|
|
|
:param domain: an optional domain to set on candidate values on the
|
|
|
|
client side (domain or string)
|
2014-07-06 14:44:26 +00:00
|
|
|
|
2014-08-22 15:51:20 +00:00
|
|
|
:param context: an optional context to use on the client side when
|
|
|
|
handling that field (dictionary)
|
2014-07-06 14:44:26 +00:00
|
|
|
|
2014-08-22 15:51:20 +00:00
|
|
|
:param ondelete: what to do when the referred record is deleted;
|
|
|
|
possible values are: ``'set null'``, ``'restrict'``, ``'cascade'``
|
2014-07-06 14:44:26 +00:00
|
|
|
|
2014-08-22 15:51:20 +00:00
|
|
|
:param auto_join: whether JOINs are generated upon search through that
|
|
|
|
field (boolean, by default ``False``)
|
2014-07-06 14:44:26 +00:00
|
|
|
|
2014-08-22 15:51:20 +00:00
|
|
|
:param delegate: set it to ``True`` to make fields of the target model
|
|
|
|
accessible from the current model (corresponds to ``_inherits``)
|
2014-07-06 14:44:26 +00:00
|
|
|
|
2015-07-06 15:39:19 +00:00
|
|
|
The attribute ``comodel_name`` is mandatory except in the case of related
|
2014-08-22 15:51:20 +00:00
|
|
|
fields or field extensions.
|
2014-07-06 14:44:26 +00:00
|
|
|
"""
|
|
|
|
type = 'many2one'
|
2015-03-12 15:39:12 +00:00
|
|
|
_slots = {
|
|
|
|
'ondelete': 'set null', # what to do when value is deleted
|
|
|
|
'auto_join': False, # whether joins are generated upon search
|
|
|
|
'delegate': False, # whether self implements delegation
|
|
|
|
}
|
2014-07-06 14:44:26 +00:00
|
|
|
|
|
|
|
def __init__(self, comodel_name=None, string=None, **kwargs):
|
|
|
|
super(Many2one, self).__init__(comodel_name=comodel_name, string=string, **kwargs)
|
|
|
|
|
2014-10-08 08:47:25 +00:00
|
|
|
def set_class_name(self, cls, name):
|
|
|
|
super(Many2one, self).set_class_name(cls, name)
|
2014-07-06 14:44:26 +00:00
|
|
|
# determine self.delegate
|
2014-10-08 08:47:25 +00:00
|
|
|
if not self.delegate:
|
|
|
|
self.delegate = name in cls._inherits.values()
|
2014-07-06 14:44:26 +00:00
|
|
|
|
|
|
|
_column_ondelete = property(attrgetter('ondelete'))
|
|
|
|
_column_auto_join = property(attrgetter('auto_join'))
|
|
|
|
|
|
|
|
def _update(self, records, value):
|
2015-07-06 15:39:19 +00:00
|
|
|
""" Update the cached value of ``self`` for ``records`` with ``value``. """
|
2014-07-06 14:44:26 +00:00
|
|
|
records._cache[self] = value
|
|
|
|
|
2014-07-09 15:06:29 +00:00
|
|
|
def convert_to_cache(self, value, record, validate=True):
|
2014-11-27 16:05:48 +00:00
|
|
|
if isinstance(value, (NoneType, int, long)):
|
2014-07-09 15:06:29 +00:00
|
|
|
return record.env[self.comodel_name].browse(value)
|
2014-07-06 14:44:26 +00:00
|
|
|
if isinstance(value, BaseModel):
|
|
|
|
if value._name == self.comodel_name and len(value) <= 1:
|
2014-07-09 15:06:29 +00:00
|
|
|
return value.with_env(record.env)
|
2014-07-06 14:44:26 +00:00
|
|
|
raise ValueError("Wrong value for %s: %r" % (self, value))
|
|
|
|
elif isinstance(value, tuple):
|
2014-07-09 15:06:29 +00:00
|
|
|
return record.env[self.comodel_name].browse(value[0])
|
2014-07-06 14:44:26 +00:00
|
|
|
elif isinstance(value, dict):
|
2014-07-09 15:06:29 +00:00
|
|
|
return record.env[self.comodel_name].new(value)
|
2014-07-06 14:44:26 +00:00
|
|
|
else:
|
2014-11-27 16:03:29 +00:00
|
|
|
return self.null(record.env)
|
2014-07-06 14:44:26 +00:00
|
|
|
|
|
|
|
def convert_to_read(self, value, use_name_get=True):
|
|
|
|
if use_name_get and value:
|
|
|
|
# evaluate name_get() as superuser, because the visibility of a
|
|
|
|
# many2one field value (id and name) depends on the current record's
|
|
|
|
# access rights, and not the value's access rights.
|
2014-09-16 10:28:32 +00:00
|
|
|
try:
|
2015-03-02 13:26:39 +00:00
|
|
|
value_sudo = value.sudo()
|
|
|
|
# performance trick: make sure that all records of the same
|
|
|
|
# model as value in value.env will be prefetched in value_sudo.env
|
|
|
|
value_sudo.env.prefetch[value._name].update(value.env.prefetch[value._name])
|
|
|
|
return value_sudo.name_get()[0]
|
2014-09-16 10:28:32 +00:00
|
|
|
except MissingError:
|
|
|
|
# Should not happen, unless the foreign key is missing.
|
|
|
|
return False
|
2014-07-06 14:44:26 +00:00
|
|
|
else:
|
|
|
|
return value.id
|
|
|
|
|
|
|
|
def convert_to_write(self, value, target=None, fnames=None):
|
2014-08-04 09:12:39 +00:00
|
|
|
return value.id
|
2014-07-06 14:44:26 +00:00
|
|
|
|
|
|
|
def convert_to_onchange(self, value):
|
|
|
|
return value.id
|
|
|
|
|
|
|
|
def convert_to_export(self, value, env):
|
2015-07-10 08:23:41 +00:00
|
|
|
return value.name_get()[0][1] if value else ''
|
2014-07-06 14:44:26 +00:00
|
|
|
|
2015-01-05 14:42:59 +00:00
|
|
|
def convert_to_display_name(self, value, record=None):
|
2014-07-06 14:44:26 +00:00
|
|
|
return ustr(value.display_name)
|
|
|
|
|
|
|
|
|
2014-09-02 10:02:23 +00:00
|
|
|
class UnionUpdate(SpecialValue):
|
|
|
|
""" Placeholder for a value update; when this value is taken from the cache,
|
|
|
|
it returns ``record[field.name] | value`` and stores it in the cache.
|
|
|
|
"""
|
|
|
|
def __init__(self, field, record, value):
|
|
|
|
self.args = (field, record, value)
|
|
|
|
|
|
|
|
def get(self):
|
|
|
|
field, record, value = self.args
|
|
|
|
# in order to read the current field's value, remove self from cache
|
|
|
|
del record._cache[field]
|
|
|
|
# read the current field's value, and update it in cache only
|
|
|
|
record._cache[field] = new_value = record[field.name] | value
|
|
|
|
return new_value
|
|
|
|
|
|
|
|
|
2014-07-06 14:44:26 +00:00
|
|
|
class _RelationalMulti(_Relational):
|
|
|
|
""" Abstract class for relational fields *2many. """
|
|
|
|
|
|
|
|
def _update(self, records, value):
|
2015-07-06 15:39:19 +00:00
|
|
|
""" Update the cached value of ``self`` for ``records`` with ``value``. """
|
2014-07-06 14:44:26 +00:00
|
|
|
for record in records:
|
2014-09-02 10:02:23 +00:00
|
|
|
if self in record._cache:
|
|
|
|
record._cache[self] = record[self.name] | value
|
|
|
|
else:
|
|
|
|
record._cache[self] = UnionUpdate(self, record, value)
|
2014-07-06 14:44:26 +00:00
|
|
|
|
2014-07-09 15:06:29 +00:00
|
|
|
def convert_to_cache(self, value, record, validate=True):
|
2014-07-06 14:44:26 +00:00
|
|
|
if isinstance(value, BaseModel):
|
|
|
|
if value._name == self.comodel_name:
|
2014-07-09 15:06:29 +00:00
|
|
|
return value.with_env(record.env)
|
2014-07-06 14:44:26 +00:00
|
|
|
elif isinstance(value, list):
|
|
|
|
# value is a list of record ids or commands
|
2015-06-30 14:23:16 +00:00
|
|
|
comodel = record.env[self.comodel_name]
|
|
|
|
ids = OrderedSet(record[self.name].ids)
|
|
|
|
# modify ids with the commands
|
2014-07-06 14:44:26 +00:00
|
|
|
for command in value:
|
|
|
|
if isinstance(command, (tuple, list)):
|
|
|
|
if command[0] == 0:
|
2015-06-30 14:23:16 +00:00
|
|
|
ids.add(comodel.new(command[2]).id)
|
2014-07-06 14:44:26 +00:00
|
|
|
elif command[0] == 1:
|
2015-06-30 14:23:16 +00:00
|
|
|
comodel.browse(command[1]).update(command[2])
|
|
|
|
ids.add(command[1])
|
2014-07-06 14:44:26 +00:00
|
|
|
elif command[0] == 2:
|
2014-07-09 15:06:29 +00:00
|
|
|
# note: the record will be deleted by write()
|
2015-06-30 14:23:16 +00:00
|
|
|
ids.discard(command[1])
|
2014-07-06 14:44:26 +00:00
|
|
|
elif command[0] == 3:
|
2015-06-30 14:23:16 +00:00
|
|
|
ids.discard(command[1])
|
2014-07-06 14:44:26 +00:00
|
|
|
elif command[0] == 4:
|
2015-06-30 14:23:16 +00:00
|
|
|
ids.add(command[1])
|
2014-07-06 14:44:26 +00:00
|
|
|
elif command[0] == 5:
|
2015-06-30 14:23:16 +00:00
|
|
|
ids.clear()
|
2014-07-06 14:44:26 +00:00
|
|
|
elif command[0] == 6:
|
2015-06-30 14:23:16 +00:00
|
|
|
ids = OrderedSet(command[2])
|
2014-07-06 14:44:26 +00:00
|
|
|
elif isinstance(command, dict):
|
2015-06-30 14:23:16 +00:00
|
|
|
ids.add(comodel.new(command).id)
|
2014-07-06 14:44:26 +00:00
|
|
|
else:
|
2015-06-30 14:23:16 +00:00
|
|
|
ids.add(command)
|
|
|
|
# return result as a recordset
|
|
|
|
return comodel.browse(list(ids))
|
2014-07-06 14:44:26 +00:00
|
|
|
elif not value:
|
2014-07-09 15:06:29 +00:00
|
|
|
return self.null(record.env)
|
2014-07-06 14:44:26 +00:00
|
|
|
raise ValueError("Wrong value for %s: %s" % (self, value))
|
|
|
|
|
|
|
|
def convert_to_read(self, value, use_name_get=True):
|
|
|
|
return value.ids
|
|
|
|
|
|
|
|
def convert_to_write(self, value, target=None, fnames=None):
|
|
|
|
# remove/delete former records
|
|
|
|
if target is None:
|
|
|
|
set_ids = []
|
|
|
|
result = [(6, 0, set_ids)]
|
|
|
|
add_existing = lambda id: set_ids.append(id)
|
|
|
|
else:
|
|
|
|
tag = 2 if self.type == 'one2many' else 3
|
|
|
|
result = [(tag, record.id) for record in target[self.name] - value]
|
|
|
|
add_existing = lambda id: result.append((4, id))
|
|
|
|
|
|
|
|
if fnames is None:
|
2014-08-08 12:04:40 +00:00
|
|
|
# take all fields in cache, except the inverses of self
|
2014-07-06 14:44:26 +00:00
|
|
|
fnames = set(value._fields) - set(MAGIC_COLUMNS)
|
2014-08-08 12:04:40 +00:00
|
|
|
for invf in self.inverse_fields:
|
|
|
|
fnames.discard(invf.name)
|
2014-07-06 14:44:26 +00:00
|
|
|
|
|
|
|
# add new and existing records
|
|
|
|
for record in value:
|
2014-11-25 10:25:50 +00:00
|
|
|
if not record.id:
|
|
|
|
values = {k: v for k, v in record._cache.iteritems() if k in fnames}
|
2014-07-06 14:44:26 +00:00
|
|
|
values = record._convert_to_write(values)
|
2014-11-25 10:25:50 +00:00
|
|
|
result.append((0, 0, values))
|
|
|
|
elif record._is_dirty():
|
2014-11-24 14:31:18 +00:00
|
|
|
values = {k: record._cache[k] for k in record._get_dirty() if k in fnames}
|
2014-07-06 14:44:26 +00:00
|
|
|
values = record._convert_to_write(values)
|
2014-11-25 10:25:50 +00:00
|
|
|
result.append((1, record.id, values))
|
2014-07-06 14:44:26 +00:00
|
|
|
else:
|
|
|
|
add_existing(record.id)
|
|
|
|
|
|
|
|
return result
|
|
|
|
|
|
|
|
def convert_to_export(self, value, env):
|
2015-07-10 08:23:41 +00:00
|
|
|
return ','.join(name for id, name in value.name_get()) if value else ''
|
2014-07-06 14:44:26 +00:00
|
|
|
|
2015-01-05 14:42:59 +00:00
|
|
|
def convert_to_display_name(self, value, record=None):
|
2014-07-06 14:44:26 +00:00
|
|
|
raise NotImplementedError()
|
|
|
|
|
[FIX] fields: *2many related fields should not be read as superuser
one2many and many2many fields depends on the security rules.
For instance, on products, with the taxes_id many2many fields, you only see the taxes of your own company, thanks to the multi company security rule
With related *2many fields, if you browse it with superuser, you will have all records of the one2many fields, even those you are not allowed to see, as superuser ignores security rules.
For instance, taxes_id of product.product is a related of taxes_id of product_template (through the inherits on product.template), and you should see the same taxes on the product template and on the product product (variant). This is not the case if the fields is read using the superuser
2014-08-20 12:07:43 +00:00
|
|
|
def _compute_related(self, records):
|
2015-07-06 15:39:19 +00:00
|
|
|
""" Compute the related field ``self`` on ``records``. """
|
[FIX] fields: *2many related fields should not be read as superuser
one2many and many2many fields depends on the security rules.
For instance, on products, with the taxes_id many2many fields, you only see the taxes of your own company, thanks to the multi company security rule
With related *2many fields, if you browse it with superuser, you will have all records of the one2many fields, even those you are not allowed to see, as superuser ignores security rules.
For instance, taxes_id of product.product is a related of taxes_id of product_template (through the inherits on product.template), and you should see the same taxes on the product template and on the product product (variant). This is not the case if the fields is read using the superuser
2014-08-20 12:07:43 +00:00
|
|
|
for record in records:
|
|
|
|
value = record
|
|
|
|
# traverse the intermediate fields, and keep at most one record
|
|
|
|
for name in self.related[:-1]:
|
|
|
|
value = value[name][:1]
|
|
|
|
record[self.name] = value[self.related[-1]]
|
|
|
|
|
2014-07-06 14:44:26 +00:00
|
|
|
|
|
|
|
class One2many(_RelationalMulti):
|
|
|
|
""" One2many field; the value of such a field is the recordset of all the
|
2015-07-06 15:39:19 +00:00
|
|
|
records in ``comodel_name`` such that the field ``inverse_name`` is equal to
|
2014-07-06 14:44:26 +00:00
|
|
|
the current record.
|
|
|
|
|
|
|
|
:param comodel_name: name of the target model (string)
|
|
|
|
|
2015-07-06 15:39:19 +00:00
|
|
|
:param inverse_name: name of the inverse ``Many2one`` field in
|
|
|
|
``comodel_name`` (string)
|
2014-07-06 14:44:26 +00:00
|
|
|
|
|
|
|
:param domain: an optional domain to set on candidate values on the
|
|
|
|
client side (domain or string)
|
|
|
|
|
|
|
|
:param context: an optional context to use on the client side when
|
|
|
|
handling that field (dictionary)
|
|
|
|
|
|
|
|
:param auto_join: whether JOINs are generated upon search through that
|
|
|
|
field (boolean, by default ``False``)
|
|
|
|
|
|
|
|
:param limit: optional limit to use upon read (integer)
|
|
|
|
|
2015-07-06 15:39:19 +00:00
|
|
|
The attributes ``comodel_name`` and ``inverse_name`` are mandatory except in
|
2014-07-06 14:44:26 +00:00
|
|
|
the case of related fields or field extensions.
|
|
|
|
"""
|
|
|
|
type = 'one2many'
|
2015-03-12 15:39:12 +00:00
|
|
|
_slots = {
|
|
|
|
'inverse_name': None, # name of the inverse field
|
|
|
|
'auto_join': False, # whether joins are generated upon search
|
|
|
|
'limit': None, # optional limit to use upon read
|
|
|
|
'copy': False, # o2m are not copied by default
|
|
|
|
}
|
2014-07-06 14:44:26 +00:00
|
|
|
|
|
|
|
def __init__(self, comodel_name=None, inverse_name=None, string=None, **kwargs):
|
|
|
|
super(One2many, self).__init__(
|
|
|
|
comodel_name=comodel_name,
|
|
|
|
inverse_name=inverse_name,
|
|
|
|
string=string,
|
|
|
|
**kwargs
|
|
|
|
)
|
|
|
|
|
|
|
|
def _setup_regular(self, env):
|
|
|
|
super(One2many, self)._setup_regular(env)
|
|
|
|
|
|
|
|
if self.inverse_name:
|
|
|
|
# link self to its inverse field and vice-versa
|
2014-11-06 14:03:00 +00:00
|
|
|
comodel = env[self.comodel_name]
|
|
|
|
invf = comodel._fields[self.inverse_name]
|
2015-07-06 15:39:19 +00:00
|
|
|
# In some rare cases, a ``One2many`` field can link to ``Int`` field
|
2014-09-09 11:26:51 +00:00
|
|
|
# (res_model/res_id pattern). Only inverse the field if this is
|
2015-07-06 15:39:19 +00:00
|
|
|
# a ``Many2one`` field.
|
2014-09-09 11:26:51 +00:00
|
|
|
if isinstance(invf, Many2one):
|
2015-03-11 09:36:17 +00:00
|
|
|
self.inverse_fields += (invf,)
|
|
|
|
invf.inverse_fields += (self,)
|
2014-07-06 14:44:26 +00:00
|
|
|
|
|
|
|
_description_relation_field = property(attrgetter('inverse_name'))
|
|
|
|
|
|
|
|
_column_fields_id = property(attrgetter('inverse_name'))
|
|
|
|
_column_auto_join = property(attrgetter('auto_join'))
|
|
|
|
_column_limit = property(attrgetter('limit'))
|
|
|
|
|
|
|
|
|
|
|
|
class Many2many(_RelationalMulti):
|
|
|
|
""" Many2many field; the value of such a field is the recordset.
|
|
|
|
|
|
|
|
:param comodel_name: name of the target model (string)
|
|
|
|
|
2015-07-06 15:39:19 +00:00
|
|
|
The attribute ``comodel_name`` is mandatory except in the case of related
|
2014-07-06 14:44:26 +00:00
|
|
|
fields or field extensions.
|
|
|
|
|
|
|
|
:param relation: optional name of the table that stores the relation in
|
|
|
|
the database (string)
|
|
|
|
|
|
|
|
:param column1: optional name of the column referring to "these" records
|
2015-07-06 15:39:19 +00:00
|
|
|
in the table ``relation`` (string)
|
2014-07-06 14:44:26 +00:00
|
|
|
|
|
|
|
:param column2: optional name of the column referring to "those" records
|
2015-07-06 15:39:19 +00:00
|
|
|
in the table ``relation`` (string)
|
2014-07-06 14:44:26 +00:00
|
|
|
|
2015-07-06 15:39:19 +00:00
|
|
|
The attributes ``relation``, ``column1`` and ``column2`` are optional. If not
|
2014-07-06 14:44:26 +00:00
|
|
|
given, names are automatically generated from model names, provided
|
2015-07-06 15:39:19 +00:00
|
|
|
``model_name`` and ``comodel_name`` are different!
|
2014-07-06 14:44:26 +00:00
|
|
|
|
|
|
|
:param domain: an optional domain to set on candidate values on the
|
|
|
|
client side (domain or string)
|
|
|
|
|
|
|
|
:param context: an optional context to use on the client side when
|
|
|
|
handling that field (dictionary)
|
|
|
|
|
|
|
|
:param limit: optional limit to use upon read (integer)
|
|
|
|
|
|
|
|
"""
|
|
|
|
type = 'many2many'
|
2015-03-12 15:39:12 +00:00
|
|
|
_slots = {
|
|
|
|
'relation': None, # name of table
|
|
|
|
'column1': None, # column of table referring to model
|
|
|
|
'column2': None, # column of table referring to comodel
|
|
|
|
'limit': None, # optional limit to use upon read
|
|
|
|
}
|
2014-07-06 14:44:26 +00:00
|
|
|
|
|
|
|
def __init__(self, comodel_name=None, relation=None, column1=None, column2=None,
|
|
|
|
string=None, **kwargs):
|
|
|
|
super(Many2many, self).__init__(
|
|
|
|
comodel_name=comodel_name,
|
|
|
|
relation=relation,
|
|
|
|
column1=column1,
|
|
|
|
column2=column2,
|
|
|
|
string=string,
|
|
|
|
**kwargs
|
|
|
|
)
|
|
|
|
|
|
|
|
def _setup_regular(self, env):
|
|
|
|
super(Many2many, self)._setup_regular(env)
|
|
|
|
|
2015-02-09 14:43:43 +00:00
|
|
|
if not self.relation and self.store:
|
|
|
|
# retrieve self.relation from the corresponding column
|
|
|
|
column = self.to_column()
|
|
|
|
if isinstance(column, fields.many2many):
|
2014-10-30 10:00:10 +00:00
|
|
|
self.relation, self.column1, self.column2 = \
|
2015-02-09 14:43:43 +00:00
|
|
|
column._sql_names(env[self.model_name])
|
2014-07-06 14:44:26 +00:00
|
|
|
|
|
|
|
if self.relation:
|
|
|
|
m2m = env.registry._m2m
|
|
|
|
# if inverse field has already been setup, it is present in m2m
|
|
|
|
invf = m2m.get((self.relation, self.column2, self.column1))
|
|
|
|
if invf:
|
2015-03-11 09:36:17 +00:00
|
|
|
self.inverse_fields += (invf,)
|
|
|
|
invf.inverse_fields += (self,)
|
2014-07-06 14:44:26 +00:00
|
|
|
else:
|
|
|
|
# add self in m2m, so that its inverse field can find it
|
|
|
|
m2m[(self.relation, self.column1, self.column2)] = self
|
|
|
|
|
|
|
|
_column_rel = property(attrgetter('relation'))
|
|
|
|
_column_id1 = property(attrgetter('column1'))
|
|
|
|
_column_id2 = property(attrgetter('column2'))
|
|
|
|
_column_limit = property(attrgetter('limit'))
|
|
|
|
|
|
|
|
|
2015-01-06 09:13:54 +00:00
|
|
|
class Serialized(Field):
|
|
|
|
""" Minimal support for existing sparse and serialized fields. """
|
|
|
|
type = 'serialized'
|
|
|
|
|
|
|
|
def convert_to_cache(self, value, record, validate=True):
|
|
|
|
return value or {}
|
|
|
|
|
|
|
|
|
2014-07-06 14:44:26 +00:00
|
|
|
class Id(Field):
|
|
|
|
""" Special case for field 'id'. """
|
2015-03-11 11:26:02 +00:00
|
|
|
type = 'integer'
|
2015-03-12 15:39:12 +00:00
|
|
|
_slots = {
|
|
|
|
'string': 'ID',
|
|
|
|
'store': True,
|
|
|
|
'readonly': True,
|
|
|
|
}
|
2014-07-06 14:44:26 +00:00
|
|
|
|
|
|
|
def to_column(self):
|
2015-03-11 11:26:02 +00:00
|
|
|
self.column = fields.integer(self.string)
|
2014-10-30 10:00:10 +00:00
|
|
|
return self.column
|
2014-07-06 14:44:26 +00:00
|
|
|
|
|
|
|
def __get__(self, record, owner):
|
|
|
|
if record is None:
|
|
|
|
return self # the field is accessed through the class owner
|
|
|
|
if not record:
|
|
|
|
return False
|
|
|
|
return record.ensure_one()._ids[0]
|
|
|
|
|
|
|
|
def __set__(self, record, value):
|
|
|
|
raise TypeError("field 'id' cannot be assigned")
|
|
|
|
|
|
|
|
# imported here to avoid dependency cycle issues
|
2015-03-03 13:25:10 +00:00
|
|
|
from openerp import SUPERUSER_ID, registry
|
2014-09-11 08:31:10 +00:00
|
|
|
from .exceptions import Warning, AccessError, MissingError
|
2014-07-06 14:44:26 +00:00
|
|
|
from .models import BaseModel, MAGIC_COLUMNS
|
|
|
|
from .osv import fields
|