2011-03-02 18:56:06 +00:00
|
|
|
# -*- coding: utf-8 -*-
|
2011-07-13 10:50:58 +00:00
|
|
|
|
2011-10-05 17:58:26 +00:00
|
|
|
import ast
|
2011-08-04 09:20:43 +00:00
|
|
|
import base64
|
|
|
|
import csv
|
|
|
|
import glob
|
2011-09-05 13:05:38 +00:00
|
|
|
import itertools
|
2012-02-09 16:15:42 +00:00
|
|
|
import logging
|
2011-08-04 09:20:43 +00:00
|
|
|
import operator
|
2012-01-23 10:07:44 +00:00
|
|
|
import datetime
|
2012-02-10 14:00:21 +00:00
|
|
|
import hashlib
|
2011-08-04 09:20:43 +00:00
|
|
|
import os
|
|
|
|
import re
|
|
|
|
import simplejson
|
2011-08-29 15:52:56 +00:00
|
|
|
import time
|
2011-12-16 00:44:02 +00:00
|
|
|
import urllib2
|
2011-10-05 17:58:26 +00:00
|
|
|
import xmlrpclib
|
2011-09-07 07:28:18 +00:00
|
|
|
import zlib
|
2011-03-10 15:53:45 +00:00
|
|
|
from xml.etree import ElementTree
|
2011-03-24 20:11:25 +00:00
|
|
|
from cStringIO import StringIO
|
2011-03-02 18:56:06 +00:00
|
|
|
|
2011-10-05 17:58:26 +00:00
|
|
|
import babel.messages.pofile
|
2011-12-20 15:05:56 +00:00
|
|
|
import werkzeug.utils
|
2012-02-10 14:00:21 +00:00
|
|
|
import werkzeug.wrappers
|
2012-01-13 09:06:11 +00:00
|
|
|
try:
|
|
|
|
import xlwt
|
|
|
|
except ImportError:
|
|
|
|
xlwt = None
|
2011-10-05 17:58:26 +00:00
|
|
|
|
2012-01-16 10:43:29 +00:00
|
|
|
from .. import common
|
|
|
|
openerpweb = common.http
|
2011-03-03 14:55:52 +00:00
|
|
|
|
2011-03-02 18:56:06 +00:00
|
|
|
#----------------------------------------------------------
|
2011-09-05 11:03:09 +00:00
|
|
|
# OpenERP Web web Controllers
|
2011-03-02 18:56:06 +00:00
|
|
|
#----------------------------------------------------------
|
|
|
|
|
2011-11-02 11:11:05 +00:00
|
|
|
|
|
|
|
def concat_xml(file_list):
|
|
|
|
"""Concatenate xml files
|
2012-02-10 14:00:21 +00:00
|
|
|
|
|
|
|
:param list(str) file_list: list of files to check
|
|
|
|
:returns: (concatenation_result, checksum)
|
|
|
|
:rtype: (str, str)
|
2011-11-02 11:11:05 +00:00
|
|
|
"""
|
2012-02-10 14:00:21 +00:00
|
|
|
checksum = hashlib.new('sha1')
|
2012-01-13 15:06:41 +00:00
|
|
|
if not file_list:
|
2012-02-10 14:00:21 +00:00
|
|
|
return '', checksum.hexdigest()
|
2012-01-13 15:06:41 +00:00
|
|
|
|
2011-11-02 11:11:05 +00:00
|
|
|
root = None
|
|
|
|
for fname in file_list:
|
2012-02-10 14:00:21 +00:00
|
|
|
with open(fname, 'rb') as fp:
|
|
|
|
contents = fp.read()
|
|
|
|
checksum.update(contents)
|
|
|
|
fp.seek(0)
|
|
|
|
xml = ElementTree.parse(fp).getroot()
|
2011-11-02 11:11:05 +00:00
|
|
|
|
|
|
|
if root is None:
|
|
|
|
root = ElementTree.Element(xml.tag)
|
|
|
|
#elif root.tag != xml.tag:
|
|
|
|
# raise ValueError("Root tags missmatch: %r != %r" % (root.tag, xml.tag))
|
|
|
|
|
|
|
|
for child in xml.getchildren():
|
|
|
|
root.append(child)
|
2012-02-10 14:00:21 +00:00
|
|
|
return ElementTree.tostring(root, 'utf-8'), checksum.hexdigest()
|
2011-11-02 11:11:05 +00:00
|
|
|
|
|
|
|
|
2012-01-13 15:01:10 +00:00
|
|
|
def concat_files(file_list, reader=None, intersperse=""):
|
2012-02-10 14:00:21 +00:00
|
|
|
""" Concatenates contents of all provided files
|
|
|
|
|
|
|
|
:param list(str) file_list: list of files to check
|
|
|
|
:param function reader: reading procedure for each file
|
|
|
|
:param str intersperse: string to intersperse between file contents
|
|
|
|
:returns: (concatenation_result, checksum)
|
|
|
|
:rtype: (str, str)
|
2011-07-22 14:28:24 +00:00
|
|
|
"""
|
2012-02-10 14:00:21 +00:00
|
|
|
checksum = hashlib.new('sha1')
|
2012-01-13 15:01:10 +00:00
|
|
|
if not file_list:
|
2012-02-10 14:00:21 +00:00
|
|
|
return '', checksum.hexdigest()
|
2012-01-13 15:01:10 +00:00
|
|
|
|
2011-11-03 14:47:38 +00:00
|
|
|
if reader is None:
|
|
|
|
def reader(f):
|
|
|
|
with open(f) as fp:
|
|
|
|
return fp.read()
|
|
|
|
|
2011-07-22 14:28:24 +00:00
|
|
|
files_content = []
|
2011-10-14 15:29:46 +00:00
|
|
|
for fname in file_list:
|
2012-02-10 14:00:21 +00:00
|
|
|
contents = reader(fname)
|
|
|
|
checksum.update(contents)
|
|
|
|
files_content.append(contents)
|
2011-11-03 14:47:38 +00:00
|
|
|
|
2012-01-13 15:01:10 +00:00
|
|
|
files_concat = intersperse.join(files_content)
|
2012-02-10 14:00:21 +00:00
|
|
|
return files_concat, checksum.hexdigest()
|
2011-07-22 14:28:24 +00:00
|
|
|
|
2012-01-22 23:32:02 +00:00
|
|
|
html_template = """<!DOCTYPE html>
|
|
|
|
<html style="height: 100%%">
|
|
|
|
<head>
|
2012-02-13 13:56:11 +00:00
|
|
|
<meta http-equiv="X-UA-Compatible" content="IE=edge,chrome=1"/>
|
2012-01-22 23:32:02 +00:00
|
|
|
<meta http-equiv="content-type" content="text/html; charset=utf-8" />
|
|
|
|
<title>OpenERP</title>
|
|
|
|
<link rel="shortcut icon" href="/web/static/src/img/favicon.ico" type="image/x-icon"/>
|
|
|
|
%(css)s
|
|
|
|
%(js)s
|
|
|
|
<script type="text/javascript">
|
|
|
|
$(function() {
|
|
|
|
var s = new openerp.init(%(modules)s);
|
|
|
|
%(init)s
|
|
|
|
});
|
|
|
|
</script>
|
|
|
|
</head>
|
|
|
|
<body class="openerp" id="oe"></body>
|
|
|
|
</html>
|
|
|
|
"""
|
2011-10-18 20:13:37 +00:00
|
|
|
|
2011-07-22 14:28:24 +00:00
|
|
|
class WebClient(openerpweb.Controller):
|
2011-09-05 11:03:09 +00:00
|
|
|
_cp_path = "/web/webclient"
|
2011-07-22 14:28:24 +00:00
|
|
|
|
2011-09-30 20:10:18 +00:00
|
|
|
def server_wide_modules(self, req):
|
|
|
|
addons = [i for i in req.config.server_wide_modules if i in openerpweb.addons_manifest]
|
|
|
|
return addons
|
|
|
|
|
|
|
|
def manifest_glob(self, req, addons, key):
|
2011-10-14 15:29:46 +00:00
|
|
|
if addons is None:
|
2011-09-30 20:10:18 +00:00
|
|
|
addons = self.server_wide_modules(req)
|
|
|
|
else:
|
|
|
|
addons = addons.split(',')
|
2011-12-07 14:06:44 +00:00
|
|
|
r = []
|
2011-09-30 20:10:18 +00:00
|
|
|
for addon in addons:
|
|
|
|
manifest = openerpweb.addons_manifest.get(addon, None)
|
|
|
|
if not manifest:
|
|
|
|
continue
|
2011-10-17 14:54:30 +00:00
|
|
|
# ensure does not ends with /
|
|
|
|
addons_path = os.path.join(manifest['addons_path'], '')[:-1]
|
2011-09-30 20:10:18 +00:00
|
|
|
globlist = manifest.get(key, [])
|
|
|
|
for pattern in globlist:
|
2011-10-14 15:29:46 +00:00
|
|
|
for path in glob.glob(os.path.normpath(os.path.join(addons_path, addon, pattern))):
|
2011-12-07 14:06:44 +00:00
|
|
|
r.append( (path, path[len(addons_path):]))
|
|
|
|
return r
|
2011-10-14 15:29:46 +00:00
|
|
|
|
2011-10-17 14:54:30 +00:00
|
|
|
def manifest_list(self, req, mods, extension):
|
2011-10-14 15:29:46 +00:00
|
|
|
if not req.debug:
|
|
|
|
path = '/web/webclient/' + extension
|
|
|
|
if mods is not None:
|
|
|
|
path += '?mods=' + mods
|
|
|
|
return [path]
|
|
|
|
return ['%s?debug=%s' % (wp, os.path.getmtime(fp)) for fp, wp in self.manifest_glob(req, mods, extension)]
|
2011-09-30 20:10:18 +00:00
|
|
|
|
2011-07-22 14:28:24 +00:00
|
|
|
@openerpweb.jsonrequest
|
2011-09-27 00:14:35 +00:00
|
|
|
def csslist(self, req, mods=None):
|
2011-10-17 14:54:30 +00:00
|
|
|
return self.manifest_list(req, mods, 'css')
|
2011-07-22 14:28:24 +00:00
|
|
|
|
|
|
|
@openerpweb.jsonrequest
|
2011-09-27 00:14:35 +00:00
|
|
|
def jslist(self, req, mods=None):
|
2011-10-17 14:54:30 +00:00
|
|
|
return self.manifest_list(req, mods, 'js')
|
2011-07-22 14:28:24 +00:00
|
|
|
|
2011-11-02 11:11:05 +00:00
|
|
|
@openerpweb.jsonrequest
|
|
|
|
def qweblist(self, req, mods=None):
|
|
|
|
return self.manifest_list(req, mods, 'qweb')
|
|
|
|
|
2012-02-10 14:00:21 +00:00
|
|
|
def get_last_modified(self, files):
|
|
|
|
""" Returns the modification time of the most recently modified
|
|
|
|
file provided
|
|
|
|
|
|
|
|
:param list(str) files: names of files to check
|
|
|
|
:return: most recent modification time amongst the fileset
|
|
|
|
:rtype: datetime.datetime
|
|
|
|
"""
|
2012-02-13 09:10:49 +00:00
|
|
|
files = list(files)
|
|
|
|
if files:
|
|
|
|
return max(datetime.datetime.fromtimestamp(os.path.getmtime(f))
|
|
|
|
for f in files)
|
|
|
|
return datetime.datetime(1970, 1, 1)
|
2012-02-10 14:00:21 +00:00
|
|
|
|
2012-02-10 14:13:38 +00:00
|
|
|
def make_conditional(self, req, response, last_modified=None, etag=None):
|
|
|
|
""" Makes the provided response conditional based upon the request,
|
|
|
|
and mandates revalidation from clients
|
|
|
|
|
|
|
|
Uses Werkzeug's own :meth:`ETagResponseMixin.make_conditional`, after
|
|
|
|
setting ``last_modified`` and ``etag`` correctly on the response object
|
|
|
|
|
|
|
|
:param req: OpenERP request
|
|
|
|
:type req: web.common.http.WebRequest
|
|
|
|
:param response: Werkzeug response
|
|
|
|
:type response: werkzeug.wrappers.Response
|
|
|
|
:param datetime.datetime last_modified: last modification date of the response content
|
|
|
|
:param str etag: some sort of checksum of the content (deep etag)
|
|
|
|
:return: the response object provided
|
|
|
|
:rtype: werkzeug.wrappers.Response
|
|
|
|
"""
|
|
|
|
response.cache_control.must_revalidate = True
|
|
|
|
response.cache_control.max_age = 0
|
|
|
|
if last_modified:
|
|
|
|
response.last_modified = last_modified
|
|
|
|
if etag:
|
|
|
|
response.set_etag(etag)
|
|
|
|
return response.make_conditional(req.httprequest)
|
|
|
|
|
2011-07-22 14:28:24 +00:00
|
|
|
@openerpweb.httprequest
|
2011-09-27 00:14:35 +00:00
|
|
|
def css(self, req, mods=None):
|
2011-11-03 14:47:38 +00:00
|
|
|
files = list(self.manifest_glob(req, mods, 'css'))
|
2012-02-10 14:00:21 +00:00
|
|
|
last_modified = self.get_last_modified(f[0] for f in files)
|
|
|
|
if req.httprequest.if_modified_since and req.httprequest.if_modified_since >= last_modified:
|
|
|
|
return werkzeug.wrappers.Response(status=304)
|
|
|
|
|
2011-11-03 14:47:38 +00:00
|
|
|
file_map = dict(files)
|
|
|
|
|
|
|
|
rx_import = re.compile(r"""@import\s+('|")(?!'|"|/|https?://)""", re.U)
|
2011-11-08 14:52:14 +00:00
|
|
|
rx_url = re.compile(r"""url\s*\(\s*('|"|)(?!'|"|/|https?://)""", re.U)
|
2011-11-03 14:47:38 +00:00
|
|
|
|
|
|
|
|
|
|
|
def reader(f):
|
|
|
|
"""read the a css file and absolutify all relative uris"""
|
|
|
|
with open(f) as fp:
|
|
|
|
data = fp.read()
|
|
|
|
|
2012-01-27 11:30:33 +00:00
|
|
|
path = file_map[f]
|
|
|
|
# convert FS path into web path
|
|
|
|
web_dir = '/'.join(os.path.dirname(path).split(os.path.sep))
|
2011-11-03 14:47:38 +00:00
|
|
|
|
|
|
|
data = re.sub(
|
|
|
|
rx_import,
|
|
|
|
r"""@import \1%s/""" % (web_dir,),
|
|
|
|
data,
|
|
|
|
)
|
|
|
|
|
|
|
|
data = re.sub(
|
|
|
|
rx_url,
|
|
|
|
r"""url(\1%s/""" % (web_dir,),
|
|
|
|
data,
|
|
|
|
)
|
|
|
|
return data
|
|
|
|
|
2012-02-10 14:00:21 +00:00
|
|
|
content, checksum = concat_files((f[0] for f in files), reader)
|
|
|
|
|
2012-02-10 14:13:38 +00:00
|
|
|
return self.make_conditional(
|
|
|
|
req, req.make_response(content, [('Content-Type', 'text/css')]),
|
|
|
|
last_modified, checksum)
|
2011-07-22 14:28:24 +00:00
|
|
|
|
|
|
|
@openerpweb.httprequest
|
2011-09-27 00:14:35 +00:00
|
|
|
def js(self, req, mods=None):
|
2011-10-17 14:54:30 +00:00
|
|
|
files = [f[0] for f in self.manifest_glob(req, mods, 'js')]
|
2012-02-10 14:00:21 +00:00
|
|
|
last_modified = self.get_last_modified(files)
|
|
|
|
if req.httprequest.if_modified_since and req.httprequest.if_modified_since >= last_modified:
|
|
|
|
return werkzeug.wrappers.Response(status=304)
|
|
|
|
|
|
|
|
content, checksum = concat_files(files, intersperse=';')
|
|
|
|
|
2012-02-10 14:13:38 +00:00
|
|
|
return self.make_conditional(
|
|
|
|
req, req.make_response(content, [('Content-Type', 'application/javascript')]),
|
|
|
|
last_modified, checksum)
|
2011-07-22 14:28:24 +00:00
|
|
|
|
2011-11-02 11:11:05 +00:00
|
|
|
@openerpweb.httprequest
|
|
|
|
def qweb(self, req, mods=None):
|
|
|
|
files = [f[0] for f in self.manifest_glob(req, mods, 'qweb')]
|
2012-02-10 14:00:21 +00:00
|
|
|
last_modified = self.get_last_modified(files)
|
|
|
|
if req.httprequest.if_modified_since and req.httprequest.if_modified_since >= last_modified:
|
|
|
|
return werkzeug.wrappers.Response(status=304)
|
|
|
|
|
|
|
|
content,checksum = concat_xml(files)
|
2011-11-02 11:11:05 +00:00
|
|
|
|
2012-02-10 14:13:38 +00:00
|
|
|
return self.make_conditional(
|
|
|
|
req, req.make_response(content, [('Content-Type', 'text/xml')]),
|
|
|
|
last_modified, checksum)
|
2011-11-02 11:11:05 +00:00
|
|
|
|
2011-07-22 14:28:24 +00:00
|
|
|
@openerpweb.httprequest
|
2011-08-26 14:07:45 +00:00
|
|
|
def home(self, req, s_action=None, **kw):
|
2011-10-17 14:54:30 +00:00
|
|
|
js = "\n ".join('<script type="text/javascript" src="%s"></script>'%i for i in self.manifest_list(req, None, 'js'))
|
|
|
|
css = "\n ".join('<link rel="stylesheet" href="%s">'%i for i in self.manifest_list(req, None, 'css'))
|
2011-09-20 15:43:58 +00:00
|
|
|
|
2011-10-18 20:13:37 +00:00
|
|
|
r = html_template % {
|
2011-10-18 20:24:29 +00:00
|
|
|
'js': js,
|
2011-09-20 15:43:58 +00:00
|
|
|
'css': css,
|
2011-09-30 20:10:18 +00:00
|
|
|
'modules': simplejson.dumps(self.server_wide_modules(req)),
|
2012-01-22 23:32:02 +00:00
|
|
|
'init': 'new s.web.WebClient().start();',
|
2011-08-04 09:20:43 +00:00
|
|
|
}
|
2011-07-22 14:28:24 +00:00
|
|
|
return r
|
2011-08-18 18:36:56 +00:00
|
|
|
|
2011-12-20 15:05:56 +00:00
|
|
|
@openerpweb.httprequest
|
|
|
|
def login(self, req, db, login, key):
|
|
|
|
req.session.authenticate(db, login, key, {})
|
|
|
|
redirect = werkzeug.utils.redirect('/web/webclient/home', 303)
|
|
|
|
cookie_val = urllib2.quote(simplejson.dumps(req.session_id))
|
|
|
|
redirect.set_cookie('session0|session_id', cookie_val)
|
|
|
|
return redirect
|
|
|
|
|
2011-08-11 16:39:33 +00:00
|
|
|
@openerpweb.jsonrequest
|
2011-08-12 15:17:36 +00:00
|
|
|
def translations(self, req, mods, lang):
|
2011-08-16 12:24:50 +00:00
|
|
|
lang_model = req.session.model('res.lang')
|
|
|
|
ids = lang_model.search([("code", "=", lang)])
|
|
|
|
if ids:
|
|
|
|
lang_obj = lang_model.read(ids[0], ["direction", "date_format", "time_format",
|
|
|
|
"grouping", "decimal_point", "thousands_sep"])
|
|
|
|
else:
|
|
|
|
lang_obj = None
|
2011-08-25 05:49:30 +00:00
|
|
|
|
2012-01-19 08:52:13 +00:00
|
|
|
if "_" in lang:
|
2011-08-16 12:59:12 +00:00
|
|
|
separator = "_"
|
|
|
|
else:
|
|
|
|
separator = "@"
|
|
|
|
langs = lang.split(separator)
|
|
|
|
langs = [separator.join(langs[:x]) for x in range(1, len(langs) + 1)]
|
2011-08-25 05:49:30 +00:00
|
|
|
|
2011-08-16 12:59:12 +00:00
|
|
|
transs = {}
|
|
|
|
for addon_name in mods:
|
|
|
|
transl = {"messages":[]}
|
|
|
|
transs[addon_name] = transl
|
2012-01-19 08:52:13 +00:00
|
|
|
addons_path = openerpweb.addons_manifest[addon_name]['addons_path']
|
2011-08-16 12:59:12 +00:00
|
|
|
for l in langs:
|
[IMP] refactored translation system to merge web translations with addons translations
- Moved the web *.po files to /i18n to be consistent
with the addons convention. Using /po was considered
for a while because it played better with LP's auto-
detection of PO Templates, but that is not necessary
anymore, we now have full control on LP templates.
- In order to support addons that contain translations
for both the web addon and the regular addon part,
both kinds of translations are now merged in a single
addon/i18n/addon.pot file. Terms that are used by
the web part are now marked with a PO annotation:
#. openerp-web
so the web client can recognize them and only load
the relevant translations in the browser memory.
This is important because a complete PO file can
be rather large, e.g. account/i18n/de.po = 400KB.
- The web translation export scripts were updated to
behave properly for addons that have a non-web
part, and will merge the web translation in the
original POT file, annotating the web translations
as needed. These scripts are Unix-only and meant
to be used by OpenERP packagers when needed.
- The GetText spec says that PO auto-comments indicating
the source location have this form:
#: /path/to/file:lineno
However OpenERP's POT export system defaults to a modified
version of this format with an extra 'type' field:
#: type:/path/to/file:lineno
The babel extractors we use have the GetText format
hardcoded so a small patch is needed on the server
to make it more lenient and accept the standard
source annotation, defaulting to 'code' type.
This does not matter for openerp-web, but makes sure
the server will not fail to load the new PO files
that contain openerp-web translations with standard
annotations.
The patch for making the server more lenient was
checked in trunk at revision 4002
rev-id odo@openerp.com-20120202143210-05p1w24t6u77cyv8
- The existing translation sync and export wizards for
regular addons have not been updated to consider
web addons, so for the time being we will have
to export regular addons terms first, and run the
web export script (gen_translations.sh) on the
addons directory afterwards. This could be improved
later.
As soon as this change is merged we will have to
perform a full update of addons translation
templates in order to include the web terms as well.
bzr revid: odo@openerp.com-20120202145603-ffo0il0qnfp3r6gt
2012-02-02 14:56:03 +00:00
|
|
|
f_name = os.path.join(addons_path, addon_name, "i18n", l + ".po")
|
2011-08-16 12:59:12 +00:00
|
|
|
if not os.path.exists(f_name):
|
|
|
|
continue
|
|
|
|
try:
|
|
|
|
with open(f_name) as t_file:
|
2011-10-05 17:58:26 +00:00
|
|
|
po = babel.messages.pofile.read_po(t_file)
|
2012-01-19 08:52:13 +00:00
|
|
|
except Exception:
|
2011-08-16 12:59:12 +00:00
|
|
|
continue
|
|
|
|
for x in po:
|
[IMP] refactored translation system to merge web translations with addons translations
- Moved the web *.po files to /i18n to be consistent
with the addons convention. Using /po was considered
for a while because it played better with LP's auto-
detection of PO Templates, but that is not necessary
anymore, we now have full control on LP templates.
- In order to support addons that contain translations
for both the web addon and the regular addon part,
both kinds of translations are now merged in a single
addon/i18n/addon.pot file. Terms that are used by
the web part are now marked with a PO annotation:
#. openerp-web
so the web client can recognize them and only load
the relevant translations in the browser memory.
This is important because a complete PO file can
be rather large, e.g. account/i18n/de.po = 400KB.
- The web translation export scripts were updated to
behave properly for addons that have a non-web
part, and will merge the web translation in the
original POT file, annotating the web translations
as needed. These scripts are Unix-only and meant
to be used by OpenERP packagers when needed.
- The GetText spec says that PO auto-comments indicating
the source location have this form:
#: /path/to/file:lineno
However OpenERP's POT export system defaults to a modified
version of this format with an extra 'type' field:
#: type:/path/to/file:lineno
The babel extractors we use have the GetText format
hardcoded so a small patch is needed on the server
to make it more lenient and accept the standard
source annotation, defaulting to 'code' type.
This does not matter for openerp-web, but makes sure
the server will not fail to load the new PO files
that contain openerp-web translations with standard
annotations.
The patch for making the server more lenient was
checked in trunk at revision 4002
rev-id odo@openerp.com-20120202143210-05p1w24t6u77cyv8
- The existing translation sync and export wizards for
regular addons have not been updated to consider
web addons, so for the time being we will have
to export regular addons terms first, and run the
web export script (gen_translations.sh) on the
addons directory afterwards. This could be improved
later.
As soon as this change is merged we will have to
perform a full update of addons translation
templates in order to include the web terms as well.
bzr revid: odo@openerp.com-20120202145603-ffo0il0qnfp3r6gt
2012-02-02 14:56:03 +00:00
|
|
|
if x.id and x.string and "openerp-web" in x.auto_comments:
|
2011-08-16 12:59:12 +00:00
|
|
|
transl["messages"].append({'id': x.id, 'string': x.string})
|
2011-08-16 12:24:50 +00:00
|
|
|
return {"modules": transs,
|
|
|
|
"lang_parameters": lang_obj}
|
2011-07-22 14:28:24 +00:00
|
|
|
|
2011-09-05 15:14:20 +00:00
|
|
|
@openerpweb.jsonrequest
|
|
|
|
def version_info(self, req):
|
|
|
|
return {
|
2012-01-16 10:43:29 +00:00
|
|
|
"version": common.release.version
|
2011-09-05 15:14:20 +00:00
|
|
|
}
|
|
|
|
|
2011-12-15 12:07:32 +00:00
|
|
|
class Proxy(openerpweb.Controller):
|
|
|
|
_cp_path = '/web/proxy'
|
|
|
|
|
2011-12-16 00:44:02 +00:00
|
|
|
@openerpweb.jsonrequest
|
2011-12-15 12:07:32 +00:00
|
|
|
def load(self, req, path):
|
2011-12-19 14:57:59 +00:00
|
|
|
""" Proxies an HTTP request through a JSON request.
|
2011-12-15 12:07:32 +00:00
|
|
|
|
2011-12-19 14:57:59 +00:00
|
|
|
It is strongly recommended to not request binary files through this,
|
|
|
|
as the result will be a binary data blob as well.
|
2011-12-15 12:07:32 +00:00
|
|
|
|
2011-12-19 14:57:59 +00:00
|
|
|
:param req: OpenERP request
|
|
|
|
:param path: actual request path
|
|
|
|
:return: file content
|
|
|
|
"""
|
|
|
|
from werkzeug.test import Client
|
|
|
|
from werkzeug.wrappers import BaseResponse
|
|
|
|
|
|
|
|
return Client(req.httprequest.app, BaseResponse).get(path).data
|
2011-12-15 12:07:32 +00:00
|
|
|
|
2011-07-13 10:26:12 +00:00
|
|
|
class Database(openerpweb.Controller):
|
2011-09-05 11:03:09 +00:00
|
|
|
_cp_path = "/web/database"
|
2011-07-13 10:26:12 +00:00
|
|
|
|
|
|
|
@openerpweb.jsonrequest
|
2011-08-02 09:18:49 +00:00
|
|
|
def get_list(self, req):
|
2011-07-13 10:26:12 +00:00
|
|
|
proxy = req.session.proxy("db")
|
|
|
|
dbs = proxy.list()
|
2012-02-21 18:57:39 +00:00
|
|
|
h = req.httprequest.environ['HTTP_HOST'].split(':')[0]
|
2011-07-13 12:04:11 +00:00
|
|
|
d = h.split('.')[0]
|
2011-09-02 08:58:53 +00:00
|
|
|
r = req.config.dbfilter.replace('%h', h).replace('%d', d)
|
2011-07-13 10:53:14 +00:00
|
|
|
dbs = [i for i in dbs if re.match(r, i)]
|
2011-07-13 10:26:12 +00:00
|
|
|
return {"db_list": dbs}
|
|
|
|
|
2011-07-01 12:22:22 +00:00
|
|
|
@openerpweb.jsonrequest
|
2011-08-02 09:18:49 +00:00
|
|
|
def create(self, req, fields):
|
2011-08-01 09:11:07 +00:00
|
|
|
params = dict(map(operator.itemgetter('name', 'value'), fields))
|
2011-08-04 10:37:25 +00:00
|
|
|
create_attrs = (
|
|
|
|
params['super_admin_pwd'],
|
|
|
|
params['db_name'],
|
|
|
|
bool(params.get('demo_data')),
|
|
|
|
params['db_lang'],
|
|
|
|
params['create_admin_pwd']
|
|
|
|
)
|
2011-12-15 10:29:10 +00:00
|
|
|
|
2012-03-09 14:05:36 +00:00
|
|
|
return req.session.proxy("db").create_database(*create_attrs)
|
2011-07-26 08:03:18 +00:00
|
|
|
|
|
|
|
@openerpweb.jsonrequest
|
2011-08-02 09:18:49 +00:00
|
|
|
def drop(self, req, fields):
|
2011-08-01 09:11:07 +00:00
|
|
|
password, db = operator.itemgetter(
|
|
|
|
'drop_pwd', 'drop_db')(
|
|
|
|
dict(map(operator.itemgetter('name', 'value'), fields)))
|
2011-08-03 05:53:58 +00:00
|
|
|
|
2011-07-26 08:03:18 +00:00
|
|
|
try:
|
|
|
|
return req.session.proxy("db").drop(password, db)
|
2011-08-01 09:11:07 +00:00
|
|
|
except xmlrpclib.Fault, e:
|
2011-07-26 08:03:18 +00:00
|
|
|
if e.faultCode and e.faultCode.split(':')[0] == 'AccessDenied':
|
2011-07-26 08:20:02 +00:00
|
|
|
return {'error': e.faultCode, 'title': 'Drop Database'}
|
2011-08-01 09:11:07 +00:00
|
|
|
return {'error': 'Could not drop database !', 'title': 'Drop Database'}
|
2011-07-26 08:03:18 +00:00
|
|
|
|
2011-08-01 13:12:54 +00:00
|
|
|
@openerpweb.httprequest
|
2011-08-02 09:18:49 +00:00
|
|
|
def backup(self, req, backup_db, backup_pwd, token):
|
2012-01-19 14:48:37 +00:00
|
|
|
db_dump = base64.b64decode(
|
|
|
|
req.session.proxy("db").dump(backup_pwd, backup_db))
|
2012-01-23 10:07:44 +00:00
|
|
|
filename = "%(db)s_%(timestamp)s.dump" % {
|
|
|
|
'db': backup_db,
|
|
|
|
'timestamp': datetime.datetime.utcnow().strftime(
|
|
|
|
"%Y-%m-%d_%H-%M-%SZ")
|
|
|
|
}
|
2012-01-19 14:48:37 +00:00
|
|
|
return req.make_response(db_dump,
|
|
|
|
[('Content-Type', 'application/octet-stream; charset=binary'),
|
2012-01-23 10:07:44 +00:00
|
|
|
('Content-Disposition', 'attachment; filename="' + filename + '"')],
|
2012-01-19 14:48:37 +00:00
|
|
|
{'fileToken': int(token)}
|
|
|
|
)
|
2011-08-18 18:51:45 +00:00
|
|
|
|
2011-08-01 14:47:28 +00:00
|
|
|
@openerpweb.httprequest
|
2011-08-02 09:18:49 +00:00
|
|
|
def restore(self, req, db_file, restore_pwd, new_db):
|
2011-07-26 08:03:18 +00:00
|
|
|
try:
|
2011-11-14 12:51:06 +00:00
|
|
|
data = base64.b64encode(db_file.read())
|
2011-08-02 09:13:44 +00:00
|
|
|
req.session.proxy("db").restore(restore_pwd, new_db, data)
|
|
|
|
return ''
|
2011-08-01 09:11:07 +00:00
|
|
|
except xmlrpclib.Fault, e:
|
2011-07-26 08:03:18 +00:00
|
|
|
if e.faultCode and e.faultCode.split(':')[0] == 'AccessDenied':
|
2011-08-18 18:51:45 +00:00
|
|
|
raise Exception("AccessDenied")
|
2011-08-01 14:47:28 +00:00
|
|
|
|
2011-07-26 08:03:18 +00:00
|
|
|
@openerpweb.jsonrequest
|
2011-08-02 09:18:49 +00:00
|
|
|
def change_password(self, req, fields):
|
2011-08-01 09:11:07 +00:00
|
|
|
old_password, new_password = operator.itemgetter(
|
2011-08-01 14:54:37 +00:00
|
|
|
'old_pwd', 'new_pwd')(
|
2011-08-01 09:11:07 +00:00
|
|
|
dict(map(operator.itemgetter('name', 'value'), fields)))
|
2011-07-26 08:03:18 +00:00
|
|
|
try:
|
|
|
|
return req.session.proxy("db").change_admin_password(old_password, new_password)
|
2011-08-01 09:11:07 +00:00
|
|
|
except xmlrpclib.Fault, e:
|
2011-07-26 08:03:18 +00:00
|
|
|
if e.faultCode and e.faultCode.split(':')[0] == 'AccessDenied':
|
2011-07-26 08:20:02 +00:00
|
|
|
return {'error': e.faultCode, 'title': 'Change Password'}
|
2011-08-01 09:11:07 +00:00
|
|
|
return {'error': 'Error, password not changed !', 'title': 'Change Password'}
|
2011-07-13 10:26:12 +00:00
|
|
|
|
2012-03-06 14:20:10 +00:00
|
|
|
def topological_sort(modules):
|
|
|
|
""" Return a list of module names sorted so that their dependencies of the
|
|
|
|
modules are listed before the module itself
|
|
|
|
|
|
|
|
modules is a dict of {module_name: dependencies}
|
|
|
|
|
|
|
|
:param modules: modules to sort
|
|
|
|
:type modules: dict
|
|
|
|
:returns: list(str)
|
|
|
|
"""
|
|
|
|
|
|
|
|
dependencies = set(itertools.chain.from_iterable(modules.itervalues()))
|
|
|
|
# incoming edge: dependency on other module (if a depends on b, a has an
|
|
|
|
# incoming edge from b, aka there's an edge from b to a)
|
|
|
|
# outgoing edge: other module depending on this one
|
|
|
|
|
|
|
|
# [Tarjan 1976], http://en.wikipedia.org/wiki/Topological_sorting#Algorithms
|
|
|
|
#L ← Empty list that will contain the sorted nodes
|
|
|
|
L = []
|
|
|
|
#S ← Set of all nodes with no outgoing edges (modules on which no other
|
|
|
|
# module depends)
|
|
|
|
S = set(module for module in modules if module not in dependencies)
|
|
|
|
|
|
|
|
visited = set()
|
|
|
|
#function visit(node n)
|
|
|
|
def visit(n):
|
|
|
|
#if n has not been visited yet then
|
|
|
|
if n not in visited:
|
|
|
|
#mark n as visited
|
|
|
|
visited.add(n)
|
|
|
|
#change: n not web module, can not be resolved, ignore
|
|
|
|
if n not in modules: return
|
|
|
|
#for each node m with an edge from m to n do (dependencies of n)
|
|
|
|
for m in modules[n]:
|
|
|
|
#visit(m)
|
|
|
|
visit(m)
|
|
|
|
#add n to L
|
|
|
|
L.append(n)
|
|
|
|
#for each node n in S do
|
|
|
|
for n in S:
|
|
|
|
#visit(n)
|
|
|
|
visit(n)
|
|
|
|
return L
|
|
|
|
|
2011-03-10 11:51:23 +00:00
|
|
|
class Session(openerpweb.Controller):
|
2011-09-05 11:03:09 +00:00
|
|
|
_cp_path = "/web/session"
|
2011-03-10 11:51:23 +00:00
|
|
|
|
2011-12-27 19:39:00 +00:00
|
|
|
def session_info(self, req):
|
2012-01-19 14:30:57 +00:00
|
|
|
req.session.ensure_valid()
|
2011-12-27 15:59:15 +00:00
|
|
|
return {
|
|
|
|
"session_id": req.session_id,
|
|
|
|
"uid": req.session._uid,
|
|
|
|
"context": req.session.get_context() if req.session._uid else {},
|
|
|
|
"db": req.session._db,
|
|
|
|
"login": req.session._login,
|
|
|
|
"openerp_entreprise": req.session.openerp_entreprise(),
|
|
|
|
}
|
|
|
|
|
2011-12-27 19:39:00 +00:00
|
|
|
@openerpweb.jsonrequest
|
|
|
|
def get_session_info(self, req):
|
|
|
|
return self.session_info(req)
|
|
|
|
|
2011-03-11 13:26:22 +00:00
|
|
|
@openerpweb.jsonrequest
|
2011-10-13 14:33:39 +00:00
|
|
|
def authenticate(self, req, db, login, password, base_location=None):
|
2011-10-12 16:12:08 +00:00
|
|
|
wsgienv = req.httprequest.environ
|
2012-01-16 10:43:29 +00:00
|
|
|
release = common.release
|
2011-10-12 16:12:08 +00:00
|
|
|
env = dict(
|
2011-10-13 14:33:39 +00:00
|
|
|
base_location=base_location,
|
|
|
|
HTTP_HOST=wsgienv['HTTP_HOST'],
|
2011-10-12 16:12:08 +00:00
|
|
|
REMOTE_ADDR=wsgienv['REMOTE_ADDR'],
|
2011-10-13 14:33:39 +00:00
|
|
|
user_agent="%s / %s" % (release.name, release.version),
|
2011-10-12 16:12:08 +00:00
|
|
|
)
|
2011-10-13 14:33:39 +00:00
|
|
|
req.session.authenticate(db, login, password, env)
|
2011-09-27 00:14:35 +00:00
|
|
|
|
2011-12-27 19:39:00 +00:00
|
|
|
return self.session_info(req)
|
2011-09-27 00:14:35 +00:00
|
|
|
|
2011-09-13 10:23:20 +00:00
|
|
|
@openerpweb.jsonrequest
|
2011-08-19 13:10:07 +00:00
|
|
|
def change_password (self,req,fields):
|
2011-08-23 12:58:02 +00:00
|
|
|
old_password, new_password,confirm_password = operator.itemgetter('old_pwd', 'new_password','confirm_pwd')(
|
2011-08-19 13:10:07 +00:00
|
|
|
dict(map(operator.itemgetter('name', 'value'), fields)))
|
2011-08-23 12:58:02 +00:00
|
|
|
if not (old_password.strip() and new_password.strip() and confirm_password.strip()):
|
|
|
|
return {'error':'All passwords have to be filled.','title': 'Change Password'}
|
|
|
|
if new_password != confirm_password:
|
|
|
|
return {'error': 'The new password and its confirmation must be identical.','title': 'Change Password'}
|
2011-08-19 13:10:07 +00:00
|
|
|
try:
|
|
|
|
if req.session.model('res.users').change_password(
|
|
|
|
old_password, new_password):
|
2011-08-30 11:16:49 +00:00
|
|
|
return {'new_password':new_password}
|
2012-01-19 08:52:13 +00:00
|
|
|
except Exception:
|
2011-08-19 13:10:07 +00:00
|
|
|
return {'error': 'Original password incorrect, your password was not changed.', 'title': 'Change Password'}
|
2011-08-30 11:16:49 +00:00
|
|
|
return {'error': 'Error, password not changed !', 'title': 'Change Password'}
|
2011-09-27 00:14:35 +00:00
|
|
|
|
2011-06-10 12:50:06 +00:00
|
|
|
@openerpweb.jsonrequest
|
|
|
|
def sc_list(self, req):
|
2011-08-16 10:11:10 +00:00
|
|
|
return req.session.model('ir.ui.view_sc').get_sc(
|
|
|
|
req.session._uid, "ir.ui.menu", req.session.eval_context(req.context))
|
|
|
|
|
2011-07-13 10:50:58 +00:00
|
|
|
@openerpweb.jsonrequest
|
|
|
|
def get_lang_list(self, req):
|
|
|
|
try:
|
2011-08-01 10:58:38 +00:00
|
|
|
return {
|
|
|
|
'lang_list': (req.session.proxy("db").list_lang() or []),
|
|
|
|
'error': ""
|
|
|
|
}
|
2011-07-13 10:50:58 +00:00
|
|
|
except Exception, e:
|
2011-07-26 08:20:02 +00:00
|
|
|
return {"error": e, "title": "Languages"}
|
2011-08-03 05:53:58 +00:00
|
|
|
|
2011-03-11 13:26:22 +00:00
|
|
|
@openerpweb.jsonrequest
|
|
|
|
def modules(self, req):
|
2011-10-06 14:34:57 +00:00
|
|
|
# Compute available candidates module
|
2012-01-13 15:01:10 +00:00
|
|
|
loadable = openerpweb.addons_manifest
|
2012-03-06 14:20:10 +00:00
|
|
|
loaded = set(req.config.server_wide_modules)
|
2011-10-06 14:34:57 +00:00
|
|
|
candidates = [mod for mod in loadable if mod not in loaded]
|
|
|
|
|
2012-03-06 14:20:10 +00:00
|
|
|
# already installed modules have no dependencies
|
|
|
|
modules = dict.fromkeys(loaded, [])
|
|
|
|
|
2011-10-06 14:34:57 +00:00
|
|
|
# Compute active true modules that might be on the web side only
|
2012-03-06 14:20:10 +00:00
|
|
|
modules.update((name, openerpweb.addons_manifest[name].get('depends', []))
|
2012-03-05 14:29:48 +00:00
|
|
|
for name in candidates
|
|
|
|
if openerpweb.addons_manifest[name].get('active'))
|
2011-10-06 14:34:57 +00:00
|
|
|
|
|
|
|
# Retrieve database installed modules
|
2011-10-06 15:36:22 +00:00
|
|
|
Modules = req.session.model('ir.module.module')
|
2012-03-06 14:20:10 +00:00
|
|
|
for module in Modules.search_read(
|
|
|
|
[('state','=','installed'), ('name','in', candidates)],
|
|
|
|
['name', 'dependencies_id']):
|
|
|
|
deps = module.get('dependencies_id')
|
|
|
|
if deps:
|
|
|
|
dependencies = map(
|
|
|
|
operator.itemgetter('name'),
|
|
|
|
req.session.model('ir.module.module.dependency').read(deps, ['name']))
|
|
|
|
modules[module['name']] = list(
|
|
|
|
set(modules.get(module['name'], []) + dependencies))
|
|
|
|
|
|
|
|
sorted_modules = topological_sort(modules)
|
|
|
|
return [module for module in sorted_modules if module not in loaded]
|
2011-03-21 08:13:31 +00:00
|
|
|
|
2011-03-25 12:32:52 +00:00
|
|
|
@openerpweb.jsonrequest
|
2011-03-28 16:39:17 +00:00
|
|
|
def eval_domain_and_context(self, req, contexts, domains,
|
|
|
|
group_by_seq=None):
|
|
|
|
""" Evaluates sequences of domains and contexts, composing them into
|
|
|
|
a single context, domain or group_by sequence.
|
|
|
|
|
|
|
|
:param list contexts: list of contexts to merge together. Contexts are
|
|
|
|
evaluated in sequence, all previous contexts
|
|
|
|
are part of their own evaluation context
|
|
|
|
(starting at the session context).
|
|
|
|
:param list domains: list of domains to merge together. Domains are
|
|
|
|
evaluated in sequence and appended to one another
|
|
|
|
(implicit AND), their evaluation domain is the
|
|
|
|
result of merging all contexts.
|
|
|
|
:param list group_by_seq: list of domains (which may be in a different
|
|
|
|
order than the ``contexts`` parameter),
|
|
|
|
evaluated in sequence, their ``'group_by'``
|
|
|
|
key is extracted if they have one.
|
|
|
|
:returns:
|
|
|
|
a 3-dict of:
|
|
|
|
|
|
|
|
context (``dict``)
|
|
|
|
the global context created by merging all of
|
|
|
|
``contexts``
|
|
|
|
|
|
|
|
domain (``list``)
|
|
|
|
the concatenation of all domains
|
|
|
|
|
|
|
|
group_by (``list``)
|
|
|
|
a list of fields to group by, potentially empty (in which case
|
|
|
|
no group by should be performed)
|
|
|
|
"""
|
2011-06-28 12:17:47 +00:00
|
|
|
context, domain = eval_context_and_domain(req.session,
|
2012-01-16 10:43:29 +00:00
|
|
|
common.nonliterals.CompoundContext(*(contexts or [])),
|
|
|
|
common.nonliterals.CompoundDomain(*(domains or [])))
|
2011-07-22 12:52:14 +00:00
|
|
|
|
2011-06-17 16:08:49 +00:00
|
|
|
group_by_sequence = []
|
|
|
|
for candidate in (group_by_seq or []):
|
|
|
|
ctx = req.session.eval_context(candidate, context)
|
|
|
|
group_by = ctx.get('group_by')
|
|
|
|
if not group_by:
|
|
|
|
continue
|
|
|
|
elif isinstance(group_by, basestring):
|
|
|
|
group_by_sequence.append(group_by)
|
|
|
|
else:
|
|
|
|
group_by_sequence.extend(group_by)
|
2011-07-22 12:52:14 +00:00
|
|
|
|
2011-06-17 16:08:49 +00:00
|
|
|
return {
|
|
|
|
'context': context,
|
|
|
|
'domain': domain,
|
|
|
|
'group_by': group_by_sequence
|
|
|
|
}
|
2011-04-08 15:25:08 +00:00
|
|
|
|
|
|
|
@openerpweb.jsonrequest
|
|
|
|
def save_session_action(self, req, the_action):
|
|
|
|
"""
|
|
|
|
This method store an action object in the session object and returns an integer
|
|
|
|
identifying that action. The method get_session_action() can be used to get
|
|
|
|
back the action.
|
2011-07-22 12:52:14 +00:00
|
|
|
|
2011-04-08 15:25:08 +00:00
|
|
|
:param the_action: The action to save in the session.
|
|
|
|
:type the_action: anything
|
|
|
|
:return: A key identifying the saved action.
|
|
|
|
:rtype: integer
|
|
|
|
"""
|
2011-08-18 18:51:45 +00:00
|
|
|
saved_actions = req.httpsession.get('saved_actions')
|
2011-04-08 15:25:08 +00:00
|
|
|
if not saved_actions:
|
|
|
|
saved_actions = {"next":0, "actions":{}}
|
2011-08-18 18:51:45 +00:00
|
|
|
req.httpsession['saved_actions'] = saved_actions
|
2011-04-08 15:25:08 +00:00
|
|
|
# we don't allow more than 10 stored actions
|
|
|
|
if len(saved_actions["actions"]) >= 10:
|
2012-01-19 08:52:13 +00:00
|
|
|
del saved_actions["actions"][min(saved_actions["actions"])]
|
2011-04-08 15:25:08 +00:00
|
|
|
key = saved_actions["next"]
|
|
|
|
saved_actions["actions"][key] = the_action
|
|
|
|
saved_actions["next"] = key + 1
|
|
|
|
return key
|
|
|
|
|
|
|
|
@openerpweb.jsonrequest
|
|
|
|
def get_session_action(self, req, key):
|
|
|
|
"""
|
|
|
|
Gets back a previously saved action. This method can return None if the action
|
|
|
|
was saved since too much time (this case should be handled in a smart way).
|
2011-07-22 12:52:14 +00:00
|
|
|
|
2011-04-08 15:25:08 +00:00
|
|
|
:param key: The key given by save_session_action()
|
|
|
|
:type key: integer
|
|
|
|
:return: The saved action or None.
|
|
|
|
:rtype: anything
|
|
|
|
"""
|
2011-08-18 18:51:45 +00:00
|
|
|
saved_actions = req.httpsession.get('saved_actions')
|
2011-04-08 15:25:08 +00:00
|
|
|
if not saved_actions:
|
|
|
|
return None
|
|
|
|
return saved_actions["actions"].get(key)
|
2011-07-04 14:18:07 +00:00
|
|
|
|
|
|
|
@openerpweb.jsonrequest
|
|
|
|
def check(self, req):
|
|
|
|
req.session.assert_valid()
|
|
|
|
return None
|
2011-07-14 10:22:43 +00:00
|
|
|
|
2012-02-10 16:43:09 +00:00
|
|
|
@openerpweb.jsonrequest
|
|
|
|
def destroy(self, req):
|
|
|
|
req.session._suicide = True
|
|
|
|
|
2011-06-17 16:08:49 +00:00
|
|
|
def eval_context_and_domain(session, context, domain=None):
|
|
|
|
e_context = session.eval_context(context)
|
2011-06-28 14:54:13 +00:00
|
|
|
# should we give the evaluated context as an evaluation context to the domain?
|
|
|
|
e_domain = session.eval_domain(domain or [])
|
2011-06-17 12:08:34 +00:00
|
|
|
|
2011-06-29 13:12:33 +00:00
|
|
|
return e_context, e_domain
|
2011-07-13 10:26:12 +00:00
|
|
|
|
2011-08-25 17:04:10 +00:00
|
|
|
def load_actions_from_ir_values(req, key, key2, models, meta):
|
|
|
|
context = req.session.eval_context(req.context)
|
2011-04-04 13:47:05 +00:00
|
|
|
Values = req.session.model('ir.values')
|
|
|
|
actions = Values.get(key, key2, models, meta, context)
|
|
|
|
|
2011-08-25 17:04:10 +00:00
|
|
|
return [(id, name, clean_action(req, action))
|
2011-06-29 15:14:00 +00:00
|
|
|
for id, name, action in actions]
|
2011-03-02 18:56:06 +00:00
|
|
|
|
2011-10-18 15:57:33 +00:00
|
|
|
def clean_action(req, action, do_not_eval=False):
|
2011-08-29 08:29:29 +00:00
|
|
|
action.setdefault('flags', {})
|
|
|
|
|
2011-08-25 17:04:10 +00:00
|
|
|
context = req.session.eval_context(req.context)
|
|
|
|
eval_ctx = req.session.evaluation_context(context)
|
2011-08-29 08:29:29 +00:00
|
|
|
|
2011-10-18 15:57:33 +00:00
|
|
|
if not do_not_eval:
|
|
|
|
# values come from the server, we can just eval them
|
|
|
|
if isinstance(action.get('context'), basestring):
|
|
|
|
action['context'] = eval( action['context'], eval_ctx ) or {}
|
2011-11-03 14:47:38 +00:00
|
|
|
|
2011-10-18 15:57:33 +00:00
|
|
|
if isinstance(action.get('domain'), basestring):
|
|
|
|
action['domain'] = eval( action['domain'], eval_ctx ) or []
|
|
|
|
else:
|
|
|
|
if 'context' in action:
|
|
|
|
action['context'] = parse_context(action['context'], req.session)
|
|
|
|
if 'domain' in action:
|
|
|
|
action['domain'] = parse_domain(action['domain'], req.session)
|
2011-07-27 15:09:42 +00:00
|
|
|
|
2012-01-19 08:52:13 +00:00
|
|
|
action_type = action.setdefault('type', 'ir.actions.act_window_close')
|
|
|
|
if action_type == 'ir.actions.act_window':
|
2011-09-07 13:05:02 +00:00
|
|
|
return fix_view_modes(action)
|
|
|
|
return action
|
2011-04-07 09:59:12 +00:00
|
|
|
|
2011-08-18 18:36:56 +00:00
|
|
|
# I think generate_views,fix_view_modes should go into js ActionManager
|
2011-06-30 06:46:51 +00:00
|
|
|
def generate_views(action):
|
|
|
|
"""
|
|
|
|
While the server generates a sequence called "views" computing dependencies
|
|
|
|
between a bunch of stuff for views coming directly from the database
|
|
|
|
(the ``ir.actions.act_window model``), it's also possible for e.g. buttons
|
|
|
|
to return custom view dictionaries generated on the fly.
|
|
|
|
|
|
|
|
In that case, there is no ``views`` key available on the action.
|
|
|
|
|
|
|
|
Since the web client relies on ``action['views']``, generate it here from
|
|
|
|
``view_mode`` and ``view_id``.
|
|
|
|
|
|
|
|
Currently handles two different cases:
|
|
|
|
|
|
|
|
* no view_id, multiple view_mode
|
|
|
|
* single view_id, single view_mode
|
|
|
|
|
|
|
|
:param dict action: action descriptor dictionary to generate a views key for
|
|
|
|
"""
|
|
|
|
view_id = action.get('view_id', False)
|
|
|
|
if isinstance(view_id, (list, tuple)):
|
|
|
|
view_id = view_id[0]
|
|
|
|
|
|
|
|
# providing at least one view mode is a requirement, not an option
|
|
|
|
view_modes = action['view_mode'].split(',')
|
|
|
|
|
|
|
|
if len(view_modes) > 1:
|
|
|
|
if view_id:
|
|
|
|
raise ValueError('Non-db action dictionaries should provide '
|
|
|
|
'either multiple view modes or a single view '
|
|
|
|
'mode and an optional view id.\n\n Got view '
|
|
|
|
'modes %r and view id %r for action %r' % (
|
|
|
|
view_modes, view_id, action))
|
|
|
|
action['views'] = [(False, mode) for mode in view_modes]
|
|
|
|
return
|
|
|
|
action['views'] = [(view_id, view_modes[0])]
|
|
|
|
|
2011-04-04 14:26:40 +00:00
|
|
|
def fix_view_modes(action):
|
|
|
|
""" For historical reasons, OpenERP has weird dealings in relation to
|
|
|
|
view_mode and the view_type attribute (on window actions):
|
|
|
|
|
|
|
|
* one of the view modes is ``tree``, which stands for both list views
|
|
|
|
and tree views
|
|
|
|
* the choice is made by checking ``view_type``, which is either
|
|
|
|
``form`` for a list view or ``tree`` for an actual tree view
|
|
|
|
|
|
|
|
This methods simply folds the view_type into view_mode by adding a
|
|
|
|
new view mode ``list`` which is the result of the ``tree`` view_mode
|
|
|
|
in conjunction with the ``form`` view_type.
|
|
|
|
|
2012-01-31 09:56:51 +00:00
|
|
|
This method also adds a ``page`` view mode in case there is a ``form`` in
|
|
|
|
the input action.
|
|
|
|
|
2011-04-04 14:26:40 +00:00
|
|
|
TODO: this should go into the doc, some kind of "peculiarities" section
|
|
|
|
|
|
|
|
:param dict action: an action descriptor
|
|
|
|
:returns: nothing, the action is modified in place
|
|
|
|
"""
|
2011-12-12 10:11:21 +00:00
|
|
|
if not action.get('views'):
|
2011-06-30 06:46:51 +00:00
|
|
|
generate_views(action)
|
|
|
|
|
2011-11-29 14:04:09 +00:00
|
|
|
id_form = None
|
|
|
|
for index, (id, mode) in enumerate(action['views']):
|
|
|
|
if mode == 'form':
|
|
|
|
id_form = id
|
|
|
|
break
|
|
|
|
if id_form is not None:
|
|
|
|
action['views'].insert(index + 1, (id_form, 'page'))
|
|
|
|
|
2011-10-21 10:43:12 +00:00
|
|
|
if action.pop('view_type', 'form') != 'form':
|
2011-07-12 13:34:30 +00:00
|
|
|
return action
|
2011-04-04 14:26:40 +00:00
|
|
|
|
2011-06-30 06:24:24 +00:00
|
|
|
action['views'] = [
|
|
|
|
[id, mode if mode != 'tree' else 'list']
|
|
|
|
for id, mode in action['views']
|
|
|
|
]
|
|
|
|
|
2011-05-31 19:52:02 +00:00
|
|
|
return action
|
2011-03-02 18:56:06 +00:00
|
|
|
|
|
|
|
class Menu(openerpweb.Controller):
|
2011-09-05 11:03:09 +00:00
|
|
|
_cp_path = "/web/menu"
|
2011-03-02 18:56:06 +00:00
|
|
|
|
2012-02-08 00:13:05 +00:00
|
|
|
@openerpweb.jsonrequest
|
|
|
|
def load(self, req):
|
|
|
|
return {'data': self.do_load(req)}
|
2011-03-21 10:47:35 +00:00
|
|
|
|
2012-02-09 14:13:39 +00:00
|
|
|
@openerpweb.jsonrequest
|
|
|
|
def get_user_roots(self, req):
|
|
|
|
return self.do_get_user_roots(req)
|
|
|
|
|
|
|
|
def do_get_user_roots(self, req):
|
|
|
|
""" Return all root menu ids visible for the session user.
|
2011-03-21 10:47:35 +00:00
|
|
|
|
|
|
|
:param req: A request object, with an OpenERP session attribute
|
|
|
|
:type req: < session -> OpenERPSession >
|
2012-02-09 14:13:39 +00:00
|
|
|
:return: the root menu ids
|
|
|
|
:rtype: list(int)
|
2011-03-21 10:47:35 +00:00
|
|
|
"""
|
2012-02-08 00:13:05 +00:00
|
|
|
s = req.session
|
|
|
|
context = s.eval_context(req.context)
|
|
|
|
Menus = s.model('ir.ui.menu')
|
|
|
|
# If a menu action is defined use its domain to get the root menu items
|
|
|
|
user_menu_id = s.model('res.users').read([s._uid], ['menu_id'], context)[0]['menu_id']
|
2012-02-23 13:46:12 +00:00
|
|
|
|
|
|
|
menu_domain = [('parent_id', '=', False)]
|
2012-02-08 00:13:05 +00:00
|
|
|
if user_menu_id:
|
2012-02-23 13:46:12 +00:00
|
|
|
domain_string = s.model('ir.actions.act_window').read([user_menu_id[0]], ['domain'], context)[0]['domain']
|
|
|
|
if domain_string:
|
|
|
|
menu_domain = ast.literal_eval(domain_string)
|
|
|
|
|
2012-02-09 14:13:39 +00:00
|
|
|
return Menus.search(menu_domain, 0, False, False, context)
|
|
|
|
|
|
|
|
def do_load(self, req):
|
|
|
|
""" Loads all menu items (all applications and their sub-menus).
|
|
|
|
|
|
|
|
:param req: A request object, with an OpenERP session attribute
|
|
|
|
:type req: < session -> OpenERPSession >
|
|
|
|
:return: the menu root
|
|
|
|
:rtype: dict('children': menu_nodes)
|
|
|
|
"""
|
|
|
|
context = req.session.eval_context(req.context)
|
|
|
|
Menus = req.session.model('ir.ui.menu')
|
|
|
|
|
|
|
|
menu_roots = Menus.read(self.do_get_user_roots(req), ['name', 'sequence', 'parent_id'], context)
|
2012-02-09 08:35:54 +00:00
|
|
|
menu_root = {'id': False, 'name': 'root', 'parent_id': [-1, ''], 'children' : menu_roots}
|
2012-02-08 00:13:05 +00:00
|
|
|
|
|
|
|
# menus are loaded fully unlike a regular tree view, cause there are a
|
|
|
|
# limited number of items (752 when all 6.1 addons are installed)
|
|
|
|
menu_ids = Menus.search([], 0, False, False, context)
|
|
|
|
menu_items = Menus.read(menu_ids, ['name', 'sequence', 'parent_id'], context)
|
2012-02-09 08:35:54 +00:00
|
|
|
# adds roots at the end of the sequence, so that they will overwrite
|
|
|
|
# equivalent menu items from full menu read when put into id:item
|
|
|
|
# mapping, resulting in children being correctly set on the roots.
|
2012-02-08 00:13:05 +00:00
|
|
|
menu_items.extend(menu_roots)
|
|
|
|
|
|
|
|
# make a tree using parent_id
|
|
|
|
menu_items_map = dict((menu_item["id"], menu_item) for menu_item in menu_items)
|
|
|
|
for menu_item in menu_items:
|
|
|
|
if menu_item['parent_id']:
|
|
|
|
parent = menu_item['parent_id'][0]
|
|
|
|
else:
|
|
|
|
parent = False
|
|
|
|
if parent in menu_items_map:
|
|
|
|
menu_items_map[parent].setdefault(
|
|
|
|
'children', []).append(menu_item)
|
|
|
|
|
|
|
|
# sort by sequence a tree using parent_id
|
|
|
|
for menu_item in menu_items:
|
|
|
|
menu_item.setdefault('children', []).sort(
|
|
|
|
key=operator.itemgetter('sequence'))
|
|
|
|
|
|
|
|
return menu_root
|
2011-03-02 18:56:06 +00:00
|
|
|
|
|
|
|
@openerpweb.jsonrequest
|
2011-03-21 08:13:31 +00:00
|
|
|
def action(self, req, menu_id):
|
2011-04-04 13:47:05 +00:00
|
|
|
actions = load_actions_from_ir_values(req,'action', 'tree_but_open',
|
2011-08-25 17:04:10 +00:00
|
|
|
[('ir.ui.menu', menu_id)], False)
|
2011-03-25 09:41:19 +00:00
|
|
|
return {"action": actions}
|
2011-03-02 18:56:06 +00:00
|
|
|
|
|
|
|
class DataSet(openerpweb.Controller):
|
2011-09-05 11:03:09 +00:00
|
|
|
_cp_path = "/web/dataset"
|
2011-03-02 18:56:06 +00:00
|
|
|
|
|
|
|
@openerpweb.jsonrequest
|
2011-03-21 08:13:31 +00:00
|
|
|
def fields(self, req, model):
|
2011-06-17 16:08:49 +00:00
|
|
|
return {'fields': req.session.model(model).fields_get(False,
|
|
|
|
req.session.eval_context(req.context))}
|
2011-03-02 18:56:06 +00:00
|
|
|
|
|
|
|
@openerpweb.jsonrequest
|
2011-08-25 17:14:51 +00:00
|
|
|
def search_read(self, req, model, fields=False, offset=0, limit=False, domain=None, sort=None):
|
|
|
|
return self.do_search_read(req, model, fields, offset, limit, domain, sort)
|
|
|
|
def do_search_read(self, req, model, fields=False, offset=0, limit=False, domain=None
|
2011-06-22 14:55:59 +00:00
|
|
|
, sort=None):
|
2011-03-23 12:08:06 +00:00
|
|
|
""" Performs a search() followed by a read() (if needed) using the
|
2011-03-23 12:21:26 +00:00
|
|
|
provided search criteria
|
|
|
|
|
2011-08-25 17:14:51 +00:00
|
|
|
:param req: a JSON-RPC request object
|
|
|
|
:type req: openerpweb.JsonRequest
|
2011-03-28 09:18:09 +00:00
|
|
|
:param str model: the name of the model to search on
|
2011-03-23 12:21:26 +00:00
|
|
|
:param fields: a list of the fields to return in the result records
|
|
|
|
:type fields: [str]
|
2011-03-28 09:18:09 +00:00
|
|
|
:param int offset: from which index should the results start being returned
|
|
|
|
:param int limit: the maximum number of records to return
|
|
|
|
:param list domain: the search domain for the query
|
|
|
|
:param list sort: sorting directives
|
2011-06-22 14:55:59 +00:00
|
|
|
:returns: A structure (dict) with two keys: ids (all the ids matching
|
|
|
|
the (domain, context) pair) and records (paginated records
|
|
|
|
matching fields selection set)
|
2011-03-23 12:22:17 +00:00
|
|
|
:rtype: list
|
2011-03-23 12:08:06 +00:00
|
|
|
"""
|
2011-08-25 17:14:51 +00:00
|
|
|
Model = req.session.model(model)
|
2011-05-13 13:39:14 +00:00
|
|
|
|
2011-06-22 14:55:59 +00:00
|
|
|
context, domain = eval_context_and_domain(
|
2011-08-25 17:14:51 +00:00
|
|
|
req.session, req.context, domain)
|
2011-05-04 14:05:35 +00:00
|
|
|
|
2012-02-09 17:07:48 +00:00
|
|
|
ids = Model.search(domain, offset or 0, limit or False, sort or False, context)
|
|
|
|
if limit and len(ids) == limit:
|
|
|
|
length = Model.search_count(domain, context)
|
|
|
|
else:
|
|
|
|
length = len(ids) + (offset or 0)
|
2011-03-23 08:34:41 +00:00
|
|
|
if fields and fields == ['id']:
|
|
|
|
# shortcut read if we only want the ids
|
2011-06-22 14:55:59 +00:00
|
|
|
return {
|
|
|
|
'ids': ids,
|
2012-02-09 17:07:48 +00:00
|
|
|
'length': length,
|
|
|
|
'records': [{'id': id} for id in ids]
|
2011-06-22 14:55:59 +00:00
|
|
|
}
|
|
|
|
|
2012-02-09 17:07:48 +00:00
|
|
|
records = Model.read(ids, fields or False, context)
|
2011-06-22 14:55:59 +00:00
|
|
|
records.sort(key=lambda obj: ids.index(obj['id']))
|
|
|
|
return {
|
|
|
|
'ids': ids,
|
2012-02-09 17:07:48 +00:00
|
|
|
'length': length,
|
2011-06-22 14:55:59 +00:00
|
|
|
'records': records
|
|
|
|
}
|
2011-05-04 14:05:35 +00:00
|
|
|
|
2011-03-02 18:56:06 +00:00
|
|
|
|
2011-05-13 13:39:14 +00:00
|
|
|
@openerpweb.jsonrequest
|
2011-08-25 17:14:51 +00:00
|
|
|
def read(self, req, model, ids, fields=False):
|
|
|
|
return self.do_search_read(req, model, ids, fields)
|
2011-03-02 18:56:06 +00:00
|
|
|
|
2011-03-23 12:08:06 +00:00
|
|
|
@openerpweb.jsonrequest
|
2011-08-25 17:14:51 +00:00
|
|
|
def get(self, req, model, ids, fields=False):
|
|
|
|
return self.do_get(req, model, ids, fields)
|
2011-06-14 12:37:18 +00:00
|
|
|
|
2011-08-25 17:14:51 +00:00
|
|
|
def do_get(self, req, model, ids, fields=False):
|
2011-03-23 12:08:06 +00:00
|
|
|
""" Fetches and returns the records of the model ``model`` whose ids
|
|
|
|
are in ``ids``.
|
|
|
|
|
|
|
|
The results are in the same order as the inputs, but elements may be
|
|
|
|
missing (if there is no record left for the id)
|
|
|
|
|
2011-08-25 17:14:51 +00:00
|
|
|
:param req: the JSON-RPC2 request object
|
|
|
|
:type req: openerpweb.JsonRequest
|
2011-03-23 12:08:06 +00:00
|
|
|
:param model: the model to read from
|
|
|
|
:type model: str
|
|
|
|
:param ids: a list of identifiers
|
|
|
|
:type ids: list
|
2011-04-11 06:30:54 +00:00
|
|
|
:param fields: a list of fields to fetch, ``False`` or empty to fetch
|
|
|
|
all fields in the model
|
|
|
|
:type fields: list | False
|
2011-03-23 12:21:26 +00:00
|
|
|
:returns: a list of records, in the same order as the list of ids
|
|
|
|
:rtype: list
|
2011-03-23 12:08:06 +00:00
|
|
|
"""
|
2011-08-25 17:14:51 +00:00
|
|
|
Model = req.session.model(model)
|
|
|
|
records = Model.read(ids, fields, req.session.eval_context(req.context))
|
2011-03-23 12:08:06 +00:00
|
|
|
|
|
|
|
record_map = dict((record['id'], record) for record in records)
|
|
|
|
|
|
|
|
return [record_map[id] for id in ids if record_map.get(id)]
|
2011-07-22 12:52:14 +00:00
|
|
|
|
2011-03-17 14:06:38 +00:00
|
|
|
@openerpweb.jsonrequest
|
2011-03-21 08:13:31 +00:00
|
|
|
def load(self, req, model, id, fields):
|
2011-03-17 14:06:38 +00:00
|
|
|
m = req.session.model(model)
|
|
|
|
value = {}
|
2011-06-17 16:08:49 +00:00
|
|
|
r = m.read([id], False, req.session.eval_context(req.context))
|
2011-03-17 14:06:38 +00:00
|
|
|
if r:
|
|
|
|
value = r[0]
|
|
|
|
return {'value': value}
|
|
|
|
|
2011-04-11 11:35:16 +00:00
|
|
|
@openerpweb.jsonrequest
|
2011-06-17 16:08:49 +00:00
|
|
|
def create(self, req, model, data):
|
2011-04-11 11:35:16 +00:00
|
|
|
m = req.session.model(model)
|
2011-06-17 16:08:49 +00:00
|
|
|
r = m.create(data, req.session.eval_context(req.context))
|
2011-04-11 11:35:16 +00:00
|
|
|
return {'result': r}
|
|
|
|
|
2011-04-05 14:32:29 +00:00
|
|
|
@openerpweb.jsonrequest
|
2011-06-17 16:08:49 +00:00
|
|
|
def save(self, req, model, id, data):
|
2011-04-05 14:32:29 +00:00
|
|
|
m = req.session.model(model)
|
2011-06-17 16:08:49 +00:00
|
|
|
r = m.write([id], data, req.session.eval_context(req.context))
|
2011-04-05 14:32:29 +00:00
|
|
|
return {'result': r}
|
|
|
|
|
2011-05-26 21:06:41 +00:00
|
|
|
@openerpweb.jsonrequest
|
2011-08-25 17:14:51 +00:00
|
|
|
def unlink(self, req, model, ids=()):
|
|
|
|
Model = req.session.model(model)
|
|
|
|
return Model.unlink(ids, req.session.eval_context(req.context))
|
2011-05-26 21:06:41 +00:00
|
|
|
|
2011-06-28 15:59:56 +00:00
|
|
|
def call_common(self, req, model, method, args, domain_id=None, context_id=None):
|
2011-12-06 11:31:41 +00:00
|
|
|
has_domain = domain_id is not None and domain_id < len(args)
|
|
|
|
has_context = context_id is not None and context_id < len(args)
|
|
|
|
|
|
|
|
domain = args[domain_id] if has_domain else []
|
|
|
|
context = args[context_id] if has_context else {}
|
2011-06-29 13:12:33 +00:00
|
|
|
c, d = eval_context_and_domain(req.session, context, domain)
|
2011-12-06 11:31:41 +00:00
|
|
|
if has_domain:
|
2011-06-17 16:08:49 +00:00
|
|
|
args[domain_id] = d
|
2011-12-06 11:31:41 +00:00
|
|
|
if has_context:
|
2011-06-17 16:08:49 +00:00
|
|
|
args[context_id] = c
|
2011-06-28 15:59:56 +00:00
|
|
|
|
2012-01-06 10:32:19 +00:00
|
|
|
return self._call_kw(req, model, method, args, {})
|
|
|
|
|
|
|
|
def _call_kw(self, req, model, method, args, kwargs):
|
2011-09-02 15:26:22 +00:00
|
|
|
for i in xrange(len(args)):
|
2012-01-16 10:43:29 +00:00
|
|
|
if isinstance(args[i], common.nonliterals.BaseContext):
|
2011-09-02 15:59:49 +00:00
|
|
|
args[i] = req.session.eval_context(args[i])
|
2012-01-16 10:43:29 +00:00
|
|
|
elif isinstance(args[i], common.nonliterals.BaseDomain):
|
2011-09-02 15:59:49 +00:00
|
|
|
args[i] = req.session.eval_domain(args[i])
|
2012-01-06 10:32:19 +00:00
|
|
|
for k in kwargs.keys():
|
2012-01-16 10:43:29 +00:00
|
|
|
if isinstance(kwargs[k], common.nonliterals.BaseContext):
|
2012-01-06 10:32:19 +00:00
|
|
|
kwargs[k] = req.session.eval_context(kwargs[k])
|
2012-01-16 10:43:29 +00:00
|
|
|
elif isinstance(kwargs[k], common.nonliterals.BaseDomain):
|
2012-01-06 10:32:19 +00:00
|
|
|
kwargs[k] = req.session.eval_domain(kwargs[k])
|
2011-06-28 15:59:56 +00:00
|
|
|
|
2012-01-06 10:32:19 +00:00
|
|
|
return getattr(req.session.model(model), method)(*args, **kwargs)
|
2011-06-28 15:59:56 +00:00
|
|
|
|
2012-01-20 10:56:08 +00:00
|
|
|
@openerpweb.jsonrequest
|
|
|
|
def onchange(self, req, model, method, args, context_id=None):
|
2012-01-20 11:05:43 +00:00
|
|
|
""" Support method for handling onchange calls: behaves much like call
|
|
|
|
with the following differences:
|
|
|
|
|
|
|
|
* Does not take a domain_id
|
|
|
|
* Is aware of the return value's structure, and will parse the domains
|
|
|
|
if needed in order to return either parsed literal domains (in JSON)
|
|
|
|
or non-literal domain instances, allowing those domains to be used
|
|
|
|
from JS
|
|
|
|
|
|
|
|
:param req:
|
|
|
|
:type req: web.common.http.JsonRequest
|
|
|
|
:param str model: object type on which to call the method
|
|
|
|
:param str method: name of the onchange handler method
|
|
|
|
:param list args: arguments to call the onchange handler with
|
|
|
|
:param int context_id: index of the context object in the list of
|
|
|
|
arguments
|
|
|
|
:return: result of the onchange call with all domains parsed
|
|
|
|
"""
|
2012-01-20 10:56:08 +00:00
|
|
|
result = self.call_common(req, model, method, args, context_id=context_id)
|
2012-01-23 14:46:05 +00:00
|
|
|
if not result or 'domain' not in result:
|
2012-01-20 10:56:08 +00:00
|
|
|
return result
|
|
|
|
|
|
|
|
result['domain'] = dict(
|
|
|
|
(k, parse_domain(v, req.session))
|
|
|
|
for k, v in result['domain'].iteritems())
|
|
|
|
|
|
|
|
return result
|
|
|
|
|
2011-06-28 15:59:56 +00:00
|
|
|
@openerpweb.jsonrequest
|
|
|
|
def call(self, req, model, method, args, domain_id=None, context_id=None):
|
2011-07-14 10:05:22 +00:00
|
|
|
return self.call_common(req, model, method, args, domain_id, context_id)
|
2012-01-06 10:32:19 +00:00
|
|
|
|
|
|
|
@openerpweb.jsonrequest
|
|
|
|
def call_kw(self, req, model, method, args, kwargs):
|
|
|
|
return self._call_kw(req, model, method, args, kwargs)
|
2011-06-28 15:59:56 +00:00
|
|
|
|
|
|
|
@openerpweb.jsonrequest
|
|
|
|
def call_button(self, req, model, method, args, domain_id=None, context_id=None):
|
|
|
|
action = self.call_common(req, model, method, args, domain_id, context_id)
|
|
|
|
if isinstance(action, dict) and action.get('type') != '':
|
2011-08-25 17:04:10 +00:00
|
|
|
return {'result': clean_action(req, action)}
|
2011-06-29 15:14:00 +00:00
|
|
|
return {'result': False}
|
2011-04-05 14:32:29 +00:00
|
|
|
|
2011-04-21 15:56:05 +00:00
|
|
|
@openerpweb.jsonrequest
|
|
|
|
def exec_workflow(self, req, model, id, signal):
|
|
|
|
r = req.session.exec_workflow(model, id, signal)
|
|
|
|
return {'result': r}
|
|
|
|
|
2011-04-07 13:07:25 +00:00
|
|
|
@openerpweb.jsonrequest
|
2011-06-17 14:19:45 +00:00
|
|
|
def default_get(self, req, model, fields):
|
2011-07-14 10:05:22 +00:00
|
|
|
Model = req.session.model(model)
|
|
|
|
return Model.default_get(fields, req.session.eval_context(req.context))
|
2011-04-27 12:20:51 +00:00
|
|
|
|
|
|
|
@openerpweb.jsonrequest
|
|
|
|
def name_search(self, req, model, search_str, domain=[], context={}):
|
|
|
|
m = req.session.model(model)
|
2011-04-28 13:07:12 +00:00
|
|
|
r = m.name_search(search_str+'%', domain, '=ilike', context)
|
2011-04-27 12:20:51 +00:00
|
|
|
return {'result': r}
|
|
|
|
|
2011-05-10 08:34:20 +00:00
|
|
|
class DataGroup(openerpweb.Controller):
|
2011-09-05 11:03:09 +00:00
|
|
|
_cp_path = "/web/group"
|
2011-05-10 08:34:20 +00:00
|
|
|
@openerpweb.jsonrequest
|
2011-08-25 17:14:51 +00:00
|
|
|
def read(self, req, model, fields, group_by_fields, domain=None, sort=None):
|
|
|
|
Model = req.session.model(model)
|
|
|
|
context, domain = eval_context_and_domain(req.session, req.context, domain)
|
2011-05-24 09:11:14 +00:00
|
|
|
|
|
|
|
return Model.read_group(
|
2011-07-11 11:32:28 +00:00
|
|
|
domain or [], fields, group_by_fields, 0, False,
|
2011-07-13 10:37:25 +00:00
|
|
|
dict(context, group_by=group_by_fields), sort or False)
|
2011-05-10 08:34:20 +00:00
|
|
|
|
2011-03-24 20:11:25 +00:00
|
|
|
class View(openerpweb.Controller):
|
2011-09-05 11:03:09 +00:00
|
|
|
_cp_path = "/web/view"
|
2011-06-01 13:45:14 +00:00
|
|
|
|
2011-08-25 17:14:51 +00:00
|
|
|
def fields_view_get(self, req, model, view_id, view_type,
|
2011-04-11 10:31:18 +00:00
|
|
|
transform=True, toolbar=False, submenu=False):
|
2011-08-25 17:14:51 +00:00
|
|
|
Model = req.session.model(model)
|
|
|
|
context = req.session.eval_context(req.context)
|
2011-06-17 16:08:49 +00:00
|
|
|
fvg = Model.fields_view_get(view_id, view_type, context, toolbar, submenu)
|
|
|
|
# todo fme?: check that we should pass the evaluated context here
|
2011-12-01 09:28:23 +00:00
|
|
|
self.process_view(req.session, fvg, context, transform, (view_type == 'kanban'))
|
2011-09-07 15:02:57 +00:00
|
|
|
if toolbar and transform:
|
|
|
|
self.process_toolbar(req, fvg['toolbar'])
|
2011-06-06 07:04:51 +00:00
|
|
|
return fvg
|
2011-07-13 10:26:12 +00:00
|
|
|
|
2011-12-01 09:28:23 +00:00
|
|
|
def process_view(self, session, fvg, context, transform, preserve_whitespaces=False):
|
2011-06-30 14:25:28 +00:00
|
|
|
# depending on how it feels, xmlrpclib.ServerProxy can translate
|
|
|
|
# XML-RPC strings to ``str`` or ``unicode``. ElementTree does not
|
|
|
|
# enjoy unicode strings which can not be trivially converted to
|
|
|
|
# strings, and it blows up during parsing.
|
|
|
|
|
|
|
|
# So ensure we fix this retardation by converting view xml back to
|
|
|
|
# bit strings.
|
|
|
|
if isinstance(fvg['arch'], unicode):
|
|
|
|
arch = fvg['arch'].encode('utf-8')
|
|
|
|
else:
|
|
|
|
arch = fvg['arch']
|
|
|
|
|
2011-03-24 20:11:25 +00:00
|
|
|
if transform:
|
2011-06-06 07:04:51 +00:00
|
|
|
evaluation_context = session.evaluation_context(context or {})
|
2011-06-30 14:25:28 +00:00
|
|
|
xml = self.transform_view(arch, session, evaluation_context)
|
2011-03-24 20:11:25 +00:00
|
|
|
else:
|
2011-06-30 14:25:28 +00:00
|
|
|
xml = ElementTree.fromstring(arch)
|
2012-01-18 10:12:52 +00:00
|
|
|
fvg['arch'] = common.xml2json.from_elementtree(xml, preserve_whitespaces)
|
2011-07-14 15:36:47 +00:00
|
|
|
|
2012-01-25 11:16:28 +00:00
|
|
|
if 'id' in fvg['fields']:
|
|
|
|
# Special case for id's
|
2012-01-25 11:23:25 +00:00
|
|
|
id_field = fvg['fields']['id']
|
|
|
|
id_field['original_type'] = id_field['type']
|
|
|
|
id_field['type'] = 'id'
|
2012-01-25 11:16:28 +00:00
|
|
|
|
2011-07-14 15:36:47 +00:00
|
|
|
for field in fvg['fields'].itervalues():
|
|
|
|
if field.get('views'):
|
|
|
|
for view in field["views"].itervalues():
|
2011-06-06 07:04:51 +00:00
|
|
|
self.process_view(session, view, None, transform)
|
2011-07-14 13:52:00 +00:00
|
|
|
if field.get('domain'):
|
2011-10-18 15:57:33 +00:00
|
|
|
field["domain"] = parse_domain(field["domain"], session)
|
2011-07-14 13:52:00 +00:00
|
|
|
if field.get('context'):
|
2011-10-18 15:57:33 +00:00
|
|
|
field["context"] = parse_context(field["context"], session)
|
2011-03-28 08:31:12 +00:00
|
|
|
|
2011-09-07 15:02:57 +00:00
|
|
|
def process_toolbar(self, req, toolbar):
|
|
|
|
"""
|
|
|
|
The toolbar is a mapping of section_key: [action_descriptor]
|
|
|
|
|
|
|
|
We need to clean all those actions in order to ensure correct
|
|
|
|
round-tripping
|
|
|
|
"""
|
2011-09-08 07:03:09 +00:00
|
|
|
for actions in toolbar.itervalues():
|
|
|
|
for action in actions:
|
|
|
|
if 'context' in action:
|
2011-10-18 15:57:33 +00:00
|
|
|
action['context'] = parse_context(
|
2011-09-08 07:03:09 +00:00
|
|
|
action['context'], req.session)
|
|
|
|
if 'domain' in action:
|
2011-10-18 15:57:33 +00:00
|
|
|
action['domain'] = parse_domain(
|
2011-09-08 07:03:09 +00:00
|
|
|
action['domain'], req.session)
|
2011-09-07 15:02:57 +00:00
|
|
|
|
2011-06-01 13:45:14 +00:00
|
|
|
@openerpweb.jsonrequest
|
2011-08-25 17:14:51 +00:00
|
|
|
def add_custom(self, req, view_id, arch):
|
|
|
|
CustomView = req.session.model('ir.ui.view.custom')
|
2011-06-01 13:45:14 +00:00
|
|
|
CustomView.create({
|
2011-08-25 17:14:51 +00:00
|
|
|
'user_id': req.session._uid,
|
2011-06-01 13:45:14 +00:00
|
|
|
'ref_id': view_id,
|
|
|
|
'arch': arch
|
2011-08-25 17:14:51 +00:00
|
|
|
}, req.session.eval_context(req.context))
|
2011-06-01 13:45:14 +00:00
|
|
|
return {'result': True}
|
|
|
|
|
|
|
|
@openerpweb.jsonrequest
|
2011-08-25 17:14:51 +00:00
|
|
|
def undo_custom(self, req, view_id, reset=False):
|
|
|
|
CustomView = req.session.model('ir.ui.view.custom')
|
|
|
|
context = req.session.eval_context(req.context)
|
|
|
|
vcustom = CustomView.search([('user_id', '=', req.session._uid), ('ref_id' ,'=', view_id)],
|
2011-06-17 16:08:49 +00:00
|
|
|
0, False, False, context)
|
2011-06-01 13:45:14 +00:00
|
|
|
if vcustom:
|
2011-06-06 10:54:36 +00:00
|
|
|
if reset:
|
2011-06-17 16:08:49 +00:00
|
|
|
CustomView.unlink(vcustom, context)
|
2011-06-06 10:54:36 +00:00
|
|
|
else:
|
2011-06-17 16:08:49 +00:00
|
|
|
CustomView.unlink([vcustom[0]], context)
|
2011-06-01 13:45:14 +00:00
|
|
|
return {'result': True}
|
|
|
|
return {'result': False}
|
|
|
|
|
2011-03-28 12:27:24 +00:00
|
|
|
def transform_view(self, view_string, session, context=None):
|
2011-03-28 08:31:12 +00:00
|
|
|
# transform nodes on the fly via iterparse, instead of
|
|
|
|
# doing it statically on the parsing result
|
|
|
|
parser = ElementTree.iterparse(StringIO(view_string), events=("start",))
|
2011-03-24 20:11:25 +00:00
|
|
|
root = None
|
|
|
|
for event, elem in parser:
|
|
|
|
if event == "start":
|
|
|
|
if root is None:
|
|
|
|
root = elem
|
2011-03-28 12:27:24 +00:00
|
|
|
self.parse_domains_and_contexts(elem, session)
|
2011-03-24 20:11:25 +00:00
|
|
|
return root
|
|
|
|
|
2011-03-29 09:09:41 +00:00
|
|
|
def parse_domains_and_contexts(self, elem, session):
|
|
|
|
""" Converts domains and contexts from the view into Python objects,
|
|
|
|
either literals if they can be parsed by literal_eval or a special
|
|
|
|
placeholder object if the domain or context refers to free variables.
|
|
|
|
|
|
|
|
:param elem: the current node being parsed
|
|
|
|
:type param: xml.etree.ElementTree.Element
|
|
|
|
:param session: OpenERP session object, used to store and retrieve
|
|
|
|
non-literal objects
|
|
|
|
:type session: openerpweb.openerpweb.OpenERPSession
|
|
|
|
"""
|
2011-07-14 13:52:00 +00:00
|
|
|
for el in ['domain', 'filter_domain']:
|
|
|
|
domain = elem.get(el, '').strip()
|
|
|
|
if domain:
|
2011-10-18 15:57:33 +00:00
|
|
|
elem.set(el, parse_domain(domain, session))
|
2011-11-14 20:50:12 +00:00
|
|
|
elem.set(el + '_string', domain)
|
2011-06-30 12:31:13 +00:00
|
|
|
for el in ['context', 'default_get']:
|
|
|
|
context_string = elem.get(el, '').strip()
|
|
|
|
if context_string:
|
2011-10-18 15:57:33 +00:00
|
|
|
elem.set(el, parse_context(context_string, session))
|
2011-11-14 20:50:12 +00:00
|
|
|
elem.set(el + '_string', context_string)
|
2011-03-24 20:11:25 +00:00
|
|
|
|
2011-03-02 18:56:06 +00:00
|
|
|
@openerpweb.jsonrequest
|
2011-09-06 20:54:38 +00:00
|
|
|
def load(self, req, model, view_id, view_type, toolbar=False):
|
|
|
|
return self.fields_view_get(req, model, view_id, view_type, toolbar=toolbar)
|
2011-03-21 08:13:31 +00:00
|
|
|
|
2011-10-18 15:57:33 +00:00
|
|
|
def parse_domain(domain, session):
|
|
|
|
""" Parses an arbitrary string containing a domain, transforms it
|
2012-01-16 10:43:29 +00:00
|
|
|
to either a literal domain or a :class:`common.nonliterals.Domain`
|
2011-10-18 15:57:33 +00:00
|
|
|
|
|
|
|
:param domain: the domain to parse, if the domain is not a string it
|
|
|
|
is assumed to be a literal domain and is returned as-is
|
|
|
|
:param session: Current OpenERP session
|
|
|
|
:type session: openerpweb.openerpweb.OpenERPSession
|
|
|
|
"""
|
2012-01-19 08:52:13 +00:00
|
|
|
if not isinstance(domain, basestring):
|
2011-10-18 15:57:33 +00:00
|
|
|
return domain
|
|
|
|
try:
|
|
|
|
return ast.literal_eval(domain)
|
|
|
|
except ValueError:
|
|
|
|
# not a literal
|
2012-01-16 10:43:29 +00:00
|
|
|
return common.nonliterals.Domain(session, domain)
|
2011-10-18 15:57:33 +00:00
|
|
|
|
|
|
|
def parse_context(context, session):
|
|
|
|
""" Parses an arbitrary string containing a context, transforms it
|
2012-01-16 10:43:29 +00:00
|
|
|
to either a literal context or a :class:`common.nonliterals.Context`
|
2011-10-18 15:57:33 +00:00
|
|
|
|
|
|
|
:param context: the context to parse, if the context is not a string it
|
|
|
|
is assumed to be a literal domain and is returned as-is
|
|
|
|
:param session: Current OpenERP session
|
|
|
|
:type session: openerpweb.openerpweb.OpenERPSession
|
|
|
|
"""
|
2012-01-19 08:52:13 +00:00
|
|
|
if not isinstance(context, basestring):
|
2011-10-18 15:57:33 +00:00
|
|
|
return context
|
|
|
|
try:
|
|
|
|
return ast.literal_eval(context)
|
|
|
|
except ValueError:
|
2012-01-16 10:43:29 +00:00
|
|
|
return common.nonliterals.Context(session, context)
|
2011-10-18 15:57:33 +00:00
|
|
|
|
2011-03-24 20:11:25 +00:00
|
|
|
class ListView(View):
|
2011-09-05 11:03:09 +00:00
|
|
|
_cp_path = "/web/listview"
|
2011-03-21 08:13:31 +00:00
|
|
|
|
2011-04-11 12:54:50 +00:00
|
|
|
def process_colors(self, view, row, context):
|
|
|
|
colors = view['arch']['attrs'].get('colors')
|
|
|
|
|
|
|
|
if not colors:
|
|
|
|
return None
|
|
|
|
|
|
|
|
color = [
|
|
|
|
pair.split(':')[0]
|
|
|
|
for pair in colors.split(';')
|
|
|
|
if eval(pair.split(':')[1], dict(context, **row))
|
|
|
|
]
|
|
|
|
|
|
|
|
if not color:
|
|
|
|
return None
|
|
|
|
elif len(color) == 1:
|
|
|
|
return color[0]
|
|
|
|
return 'maroon'
|
|
|
|
|
2011-09-06 20:54:38 +00:00
|
|
|
class TreeView(View):
|
|
|
|
_cp_path = "/web/treeview"
|
|
|
|
|
|
|
|
@openerpweb.jsonrequest
|
|
|
|
def action(self, req, model, id):
|
|
|
|
return load_actions_from_ir_values(
|
|
|
|
req,'action', 'tree_but_open',[(model, id)],
|
|
|
|
False)
|
|
|
|
|
2011-03-24 20:11:25 +00:00
|
|
|
class SearchView(View):
|
2011-09-05 11:03:09 +00:00
|
|
|
_cp_path = "/web/searchview"
|
2011-03-21 08:13:31 +00:00
|
|
|
|
2011-03-02 18:56:06 +00:00
|
|
|
@openerpweb.jsonrequest
|
2011-03-21 08:13:31 +00:00
|
|
|
def load(self, req, model, view_id):
|
2011-04-11 10:31:18 +00:00
|
|
|
fields_view = self.fields_view_get(req, model, view_id, 'search')
|
2011-03-24 20:11:25 +00:00
|
|
|
return {'fields_view': fields_view}
|
2011-05-23 14:52:19 +00:00
|
|
|
|
2011-05-19 15:12:49 +00:00
|
|
|
@openerpweb.jsonrequest
|
|
|
|
def fields_get(self, req, model):
|
|
|
|
Model = req.session.model(model)
|
2011-06-17 16:08:49 +00:00
|
|
|
fields = Model.fields_get(False, req.session.eval_context(req.context))
|
2011-07-14 15:54:38 +00:00
|
|
|
for field in fields.values():
|
|
|
|
# shouldn't convert the views too?
|
|
|
|
if field.get('domain'):
|
2011-10-18 15:57:33 +00:00
|
|
|
field["domain"] = parse_domain(field["domain"], req.session)
|
2011-07-14 15:54:38 +00:00
|
|
|
if field.get('context'):
|
2011-10-18 15:57:33 +00:00
|
|
|
field["context"] = parse_context(field["context"], req.session)
|
2011-05-19 15:12:49 +00:00
|
|
|
return {'fields': fields}
|
2011-07-27 06:43:25 +00:00
|
|
|
|
2011-07-22 15:35:05 +00:00
|
|
|
@openerpweb.jsonrequest
|
|
|
|
def get_filters(self, req, model):
|
2012-02-09 16:15:42 +00:00
|
|
|
logger = logging.getLogger(__name__ + '.SearchView.get_filters')
|
2011-07-22 15:35:05 +00:00
|
|
|
Model = req.session.model("ir.filters")
|
|
|
|
filters = Model.get_filters(model)
|
|
|
|
for filter in filters:
|
2012-02-09 16:15:42 +00:00
|
|
|
try:
|
2012-03-07 12:03:23 +00:00
|
|
|
parsed_context = parse_context(filter["context"], req.session)
|
|
|
|
filter["context"] = (parsed_context
|
|
|
|
if not isinstance(parsed_context, common.nonliterals.BaseContext)
|
|
|
|
else req.session.eval_context(parsed_context))
|
|
|
|
|
|
|
|
parsed_domain = parse_domain(filter["domain"], req.session)
|
|
|
|
filter["domain"] = (parsed_domain
|
|
|
|
if not isinstance(parsed_domain, common.nonliterals.BaseDomain)
|
|
|
|
else req.session.eval_domain(parsed_domain))
|
2012-02-09 16:15:42 +00:00
|
|
|
except Exception:
|
|
|
|
logger.exception("Failed to parse custom filter %s in %s",
|
|
|
|
filter['name'], model)
|
|
|
|
filter['disabled'] = True
|
|
|
|
del filter['context']
|
|
|
|
del filter['domain']
|
2011-07-22 15:35:05 +00:00
|
|
|
return filters
|
2011-07-27 06:43:25 +00:00
|
|
|
|
2011-07-25 11:37:40 +00:00
|
|
|
@openerpweb.jsonrequest
|
|
|
|
def save_filter(self, req, model, name, context_to_save, domain):
|
|
|
|
Model = req.session.model("ir.filters")
|
2012-01-16 10:43:29 +00:00
|
|
|
ctx = common.nonliterals.CompoundContext(context_to_save)
|
2011-07-25 11:37:40 +00:00
|
|
|
ctx.session = req.session
|
|
|
|
ctx = ctx.evaluate()
|
2012-01-16 10:43:29 +00:00
|
|
|
domain = common.nonliterals.CompoundDomain(domain)
|
2011-07-25 11:37:40 +00:00
|
|
|
domain.session = req.session
|
|
|
|
domain = domain.evaluate()
|
|
|
|
uid = req.session._uid
|
|
|
|
context = req.session.eval_context(req.context)
|
|
|
|
to_return = Model.create_or_replace({"context": ctx,
|
|
|
|
"domain": domain,
|
|
|
|
"model_id": model,
|
|
|
|
"name": name,
|
|
|
|
"user_id": uid
|
|
|
|
}, context)
|
|
|
|
return to_return
|
2011-03-21 08:13:31 +00:00
|
|
|
|
2011-11-23 21:53:56 +00:00
|
|
|
@openerpweb.jsonrequest
|
|
|
|
def add_to_dashboard(self, req, menu_id, action_id, context_to_save, domain, view_mode, name=''):
|
2012-01-16 10:43:29 +00:00
|
|
|
ctx = common.nonliterals.CompoundContext(context_to_save)
|
2011-11-23 21:53:56 +00:00
|
|
|
ctx.session = req.session
|
|
|
|
ctx = ctx.evaluate()
|
2012-03-07 16:05:03 +00:00
|
|
|
ctx['dashboard_merge_domains_contexts'] = False # TODO: replace this 6.1 workaround by attribute on <action/>
|
2012-01-16 10:43:29 +00:00
|
|
|
domain = common.nonliterals.CompoundDomain(domain)
|
2011-11-23 21:53:56 +00:00
|
|
|
domain.session = req.session
|
|
|
|
domain = domain.evaluate()
|
|
|
|
|
|
|
|
dashboard_action = load_actions_from_ir_values(req, 'action', 'tree_but_open',
|
|
|
|
[('ir.ui.menu', menu_id)], False)
|
|
|
|
if dashboard_action:
|
|
|
|
action = dashboard_action[0][2]
|
|
|
|
if action['res_model'] == 'board.board' and action['views'][0][1] == 'form':
|
|
|
|
# Maybe should check the content instead of model board.board ?
|
|
|
|
view_id = action['views'][0][0]
|
|
|
|
board = req.session.model(action['res_model']).fields_view_get(view_id, 'form')
|
|
|
|
if board and 'arch' in board:
|
|
|
|
xml = ElementTree.fromstring(board['arch'])
|
|
|
|
column = xml.find('./board/column')
|
2012-03-06 20:59:55 +00:00
|
|
|
if column is not None:
|
2011-11-23 21:53:56 +00:00
|
|
|
new_action = ElementTree.Element('action', {
|
|
|
|
'name' : str(action_id),
|
|
|
|
'string' : name,
|
|
|
|
'view_mode' : view_mode,
|
|
|
|
'context' : str(ctx),
|
|
|
|
'domain' : str(domain)
|
|
|
|
})
|
|
|
|
column.insert(0, new_action)
|
|
|
|
arch = ElementTree.tostring(xml, 'utf-8')
|
|
|
|
return req.session.model('ir.ui.view.custom').create({
|
|
|
|
'user_id': req.session._uid,
|
|
|
|
'ref_id': view_id,
|
|
|
|
'arch': arch
|
|
|
|
}, req.session.eval_context(req.context))
|
|
|
|
|
|
|
|
return False
|
|
|
|
|
2011-05-23 14:52:19 +00:00
|
|
|
class Binary(openerpweb.Controller):
|
2011-09-05 11:03:09 +00:00
|
|
|
_cp_path = "/web/binary"
|
2011-05-23 14:52:19 +00:00
|
|
|
|
|
|
|
@openerpweb.httprequest
|
2011-08-18 18:51:45 +00:00
|
|
|
def image(self, req, model, id, field, **kw):
|
|
|
|
Model = req.session.model(model)
|
|
|
|
context = req.session.eval_context(req.context)
|
2011-09-06 11:11:57 +00:00
|
|
|
|
2011-05-23 14:52:19 +00:00
|
|
|
try:
|
|
|
|
if not id:
|
2011-10-05 15:57:40 +00:00
|
|
|
res = Model.default_get([field], context).get(field)
|
2011-05-23 14:52:19 +00:00
|
|
|
else:
|
2011-10-05 15:57:40 +00:00
|
|
|
res = Model.read([int(id)], [field], context)[0].get(field)
|
2011-09-06 11:12:18 +00:00
|
|
|
image_data = base64.b64decode(res)
|
2011-09-06 11:12:26 +00:00
|
|
|
except (TypeError, xmlrpclib.Fault):
|
2011-09-06 11:11:57 +00:00
|
|
|
image_data = self.placeholder(req)
|
|
|
|
return req.make_response(image_data, [
|
|
|
|
('Content-Type', 'image/png'), ('Content-Length', len(image_data))])
|
2011-09-02 09:44:49 +00:00
|
|
|
def placeholder(self, req):
|
2011-10-05 15:57:40 +00:00
|
|
|
addons_path = openerpweb.addons_manifest['web']['addons_path']
|
|
|
|
return open(os.path.join(addons_path, 'web', 'static', 'src', 'img', 'placeholder.png'), 'rb').read()
|
2011-05-23 14:52:19 +00:00
|
|
|
|
|
|
|
@openerpweb.httprequest
|
2012-01-10 14:35:18 +00:00
|
|
|
def saveas(self, req, model, field, id=None, filename_field=None, **kw):
|
|
|
|
""" Download link for files stored as binary fields.
|
|
|
|
|
|
|
|
If the ``id`` parameter is omitted, fetches the default value for the
|
|
|
|
binary field (via ``default_get``), otherwise fetches the field for
|
|
|
|
that precise record.
|
|
|
|
|
|
|
|
:param req: OpenERP request
|
|
|
|
:type req: :class:`web.common.http.HttpRequest`
|
|
|
|
:param str model: name of the model to fetch the binary from
|
|
|
|
:param str field: binary field
|
|
|
|
:param str id: id of the record from which to fetch the binary
|
|
|
|
:param str filename_field: field holding the file's name, if any
|
|
|
|
:returns: :class:`werkzeug.wrappers.Response`
|
|
|
|
"""
|
2011-08-18 18:51:45 +00:00
|
|
|
Model = req.session.model(model)
|
|
|
|
context = req.session.eval_context(req.context)
|
2012-01-10 15:39:05 +00:00
|
|
|
fields = [field]
|
|
|
|
if filename_field:
|
|
|
|
fields.append(filename_field)
|
2011-10-17 14:58:49 +00:00
|
|
|
if id:
|
2012-01-10 15:39:05 +00:00
|
|
|
res = Model.read([int(id)], fields, context)[0]
|
2011-10-17 14:58:49 +00:00
|
|
|
else:
|
2012-01-10 15:39:05 +00:00
|
|
|
res = Model.default_get(fields, context)
|
2011-10-14 11:09:42 +00:00
|
|
|
filecontent = base64.b64decode(res.get(field, ''))
|
2011-05-23 14:52:19 +00:00
|
|
|
if not filecontent:
|
2011-09-02 08:58:53 +00:00
|
|
|
return req.not_found()
|
2011-05-23 14:52:19 +00:00
|
|
|
else:
|
|
|
|
filename = '%s_%s' % (model.replace('.', '_'), id)
|
2012-01-10 14:35:18 +00:00
|
|
|
if filename_field:
|
|
|
|
filename = res.get(filename_field, '') or filename
|
2011-09-02 08:58:53 +00:00
|
|
|
return req.make_response(filecontent,
|
|
|
|
[('Content-Type', 'application/octet-stream'),
|
2012-01-10 15:50:20 +00:00
|
|
|
('Content-Disposition', 'attachment; filename="%s"' % filename)])
|
2011-05-23 14:52:19 +00:00
|
|
|
|
2012-02-08 10:39:35 +00:00
|
|
|
@openerpweb.httprequest
|
|
|
|
def saveas_ajax(self, req, data, token):
|
|
|
|
jdata = simplejson.loads(data)
|
|
|
|
model = jdata['model']
|
|
|
|
field = jdata['field']
|
|
|
|
id = jdata.get('id', None)
|
|
|
|
filename_field = jdata.get('filename_field', None)
|
|
|
|
context = jdata.get('context', dict())
|
|
|
|
|
|
|
|
context = req.session.eval_context(context)
|
|
|
|
Model = req.session.model(model)
|
|
|
|
fields = [field]
|
|
|
|
if filename_field:
|
|
|
|
fields.append(filename_field)
|
|
|
|
if id:
|
|
|
|
res = Model.read([int(id)], fields, context)[0]
|
|
|
|
else:
|
|
|
|
res = Model.default_get(fields, context)
|
|
|
|
filecontent = base64.b64decode(res.get(field, ''))
|
|
|
|
if not filecontent:
|
|
|
|
raise ValueError("No content found for field '%s' on '%s:%s'" %
|
|
|
|
(field, model, id))
|
|
|
|
else:
|
|
|
|
filename = '%s_%s' % (model.replace('.', '_'), id)
|
|
|
|
if filename_field:
|
|
|
|
filename = res.get(filename_field, '') or filename
|
|
|
|
return req.make_response(filecontent,
|
|
|
|
headers=[('Content-Type', 'application/octet-stream'),
|
|
|
|
('Content-Disposition', 'attachment; filename="%s"' % filename)],
|
|
|
|
cookies={'fileToken': int(token)})
|
|
|
|
|
2011-05-23 14:52:19 +00:00
|
|
|
@openerpweb.httprequest
|
2011-09-06 11:53:16 +00:00
|
|
|
def upload(self, req, callback, ufile):
|
2011-06-29 13:12:33 +00:00
|
|
|
# TODO: might be useful to have a configuration flag for max-length file uploads
|
2011-05-23 14:52:19 +00:00
|
|
|
try:
|
|
|
|
out = """<script language="javascript" type="text/javascript">
|
|
|
|
var win = window.top.window,
|
|
|
|
callback = win[%s];
|
|
|
|
if (typeof(callback) === 'function') {
|
|
|
|
callback.apply(this, %s);
|
|
|
|
} else {
|
|
|
|
win.jQuery('#oe_notification', win.document).notify('create', {
|
|
|
|
title: "Ajax File Upload",
|
|
|
|
text: "Could not find callback"
|
|
|
|
});
|
|
|
|
}
|
|
|
|
</script>"""
|
2011-09-06 11:53:16 +00:00
|
|
|
data = ufile.read()
|
2011-12-15 10:29:10 +00:00
|
|
|
args = [len(data), ufile.filename,
|
2011-09-06 11:53:16 +00:00
|
|
|
ufile.content_type, base64.b64encode(data)]
|
2011-05-23 14:52:19 +00:00
|
|
|
except Exception, e:
|
|
|
|
args = [False, e.message]
|
|
|
|
return out % (simplejson.dumps(callback), simplejson.dumps(args))
|
|
|
|
|
2011-05-26 21:06:41 +00:00
|
|
|
@openerpweb.httprequest
|
2011-09-06 11:53:16 +00:00
|
|
|
def upload_attachment(self, req, callback, model, id, ufile):
|
2011-08-18 18:51:45 +00:00
|
|
|
context = req.session.eval_context(req.context)
|
|
|
|
Model = req.session.model('ir.attachment')
|
2011-05-26 21:06:41 +00:00
|
|
|
try:
|
|
|
|
out = """<script language="javascript" type="text/javascript">
|
|
|
|
var win = window.top.window,
|
|
|
|
callback = win[%s];
|
|
|
|
if (typeof(callback) === 'function') {
|
|
|
|
callback.call(this, %s);
|
|
|
|
}
|
|
|
|
</script>"""
|
|
|
|
attachment_id = Model.create({
|
|
|
|
'name': ufile.filename,
|
2011-09-06 11:53:16 +00:00
|
|
|
'datas': base64.encodestring(ufile.read()),
|
2012-01-11 12:55:01 +00:00
|
|
|
'datas_fname': ufile.filename,
|
2011-05-26 21:06:41 +00:00
|
|
|
'res_model': model,
|
|
|
|
'res_id': int(id)
|
2011-06-17 16:08:49 +00:00
|
|
|
}, context)
|
2011-05-26 21:06:41 +00:00
|
|
|
args = {
|
|
|
|
'filename': ufile.filename,
|
|
|
|
'id': attachment_id
|
|
|
|
}
|
|
|
|
except Exception, e:
|
|
|
|
args = { 'error': e.message }
|
|
|
|
return out % (simplejson.dumps(callback), simplejson.dumps(args))
|
|
|
|
|
2011-03-02 18:56:06 +00:00
|
|
|
class Action(openerpweb.Controller):
|
2011-09-05 11:03:09 +00:00
|
|
|
_cp_path = "/web/action"
|
2011-03-02 18:56:06 +00:00
|
|
|
|
|
|
|
@openerpweb.jsonrequest
|
2011-10-18 15:57:33 +00:00
|
|
|
def load(self, req, action_id, do_not_eval=False):
|
2011-04-21 13:23:54 +00:00
|
|
|
Actions = req.session.model('ir.actions.actions')
|
|
|
|
value = False
|
2011-06-17 16:08:49 +00:00
|
|
|
context = req.session.eval_context(req.context)
|
|
|
|
action_type = Actions.read([action_id], ['type'], context)
|
2011-04-21 13:23:54 +00:00
|
|
|
if action_type:
|
2011-09-07 13:23:20 +00:00
|
|
|
ctx = {}
|
|
|
|
if action_type[0]['type'] == 'ir.actions.report.xml':
|
|
|
|
ctx.update({'bin_size': True})
|
|
|
|
ctx.update(context)
|
|
|
|
action = req.session.model(action_type[0]['type']).read([action_id], False, ctx)
|
2011-04-21 13:23:54 +00:00
|
|
|
if action:
|
2011-10-18 15:57:33 +00:00
|
|
|
value = clean_action(req, action[0], do_not_eval)
|
2011-04-21 16:13:43 +00:00
|
|
|
return {'result': value}
|
2011-06-30 06:24:24 +00:00
|
|
|
|
|
|
|
@openerpweb.jsonrequest
|
|
|
|
def run(self, req, action_id):
|
2011-08-25 17:04:10 +00:00
|
|
|
return clean_action(req, req.session.model('ir.actions.server').run(
|
|
|
|
[action_id], req.session.eval_context(req.context)))
|
2011-07-13 10:26:12 +00:00
|
|
|
|
2011-07-22 12:52:14 +00:00
|
|
|
class Export(View):
|
2011-09-05 11:03:09 +00:00
|
|
|
_cp_path = "/web/export"
|
2011-07-22 12:52:14 +00:00
|
|
|
|
2011-08-30 13:23:26 +00:00
|
|
|
@openerpweb.jsonrequest
|
|
|
|
def formats(self, req):
|
|
|
|
""" Returns all valid export formats
|
|
|
|
|
|
|
|
:returns: for each export format, a pair of identifier and printable name
|
|
|
|
:rtype: [(str, str)]
|
|
|
|
"""
|
|
|
|
return sorted([
|
|
|
|
controller.fmt
|
|
|
|
for path, controller in openerpweb.controllers_path.iteritems()
|
|
|
|
if path.startswith(self._cp_path)
|
|
|
|
if hasattr(controller, 'fmt')
|
2012-01-13 09:06:11 +00:00
|
|
|
], key=operator.itemgetter("label"))
|
2011-08-30 13:23:26 +00:00
|
|
|
|
2011-07-22 12:52:14 +00:00
|
|
|
def fields_get(self, req, model):
|
|
|
|
Model = req.session.model(model)
|
|
|
|
fields = Model.fields_get(False, req.session.eval_context(req.context))
|
|
|
|
return fields
|
|
|
|
|
|
|
|
@openerpweb.jsonrequest
|
2011-08-31 13:29:50 +00:00
|
|
|
def get_fields(self, req, model, prefix='', parent_name= '',
|
2011-11-09 16:32:32 +00:00
|
|
|
import_compat=True, parent_field_type=None,
|
|
|
|
exclude=None):
|
2011-07-22 12:52:14 +00:00
|
|
|
|
2011-08-31 12:28:44 +00:00
|
|
|
if import_compat and parent_field_type == "many2one":
|
2011-07-22 12:52:14 +00:00
|
|
|
fields = {}
|
2011-08-31 14:04:02 +00:00
|
|
|
else:
|
|
|
|
fields = self.fields_get(req, model)
|
2011-11-09 15:11:33 +00:00
|
|
|
|
|
|
|
if import_compat:
|
|
|
|
fields.pop('id', None)
|
|
|
|
else:
|
|
|
|
fields['.id'] = fields.pop('id', {'string': 'ID'})
|
2011-07-22 12:52:14 +00:00
|
|
|
|
2011-08-31 13:29:50 +00:00
|
|
|
fields_sequence = sorted(fields.iteritems(),
|
2011-08-31 13:34:10 +00:00
|
|
|
key=lambda field: field[1].get('string', ''))
|
2011-07-22 12:52:14 +00:00
|
|
|
|
|
|
|
records = []
|
2011-08-31 13:29:50 +00:00
|
|
|
for field_name, field in fields_sequence:
|
2011-12-16 11:49:41 +00:00
|
|
|
if import_compat:
|
|
|
|
if exclude and field_name in exclude:
|
|
|
|
continue
|
|
|
|
if field.get('readonly'):
|
|
|
|
# If none of the field's states unsets readonly, skip the field
|
|
|
|
if all(dict(attrs).get('readonly', True)
|
|
|
|
for attrs in field.get('states', {}).values()):
|
|
|
|
continue
|
2011-07-22 12:52:14 +00:00
|
|
|
|
2011-08-31 12:21:01 +00:00
|
|
|
id = prefix + (prefix and '/'or '') + field_name
|
2011-08-31 13:29:50 +00:00
|
|
|
name = parent_name + (parent_name and '/' or '') + field['string']
|
2011-09-01 06:42:59 +00:00
|
|
|
record = {'id': id, 'string': name,
|
|
|
|
'value': id, 'children': False,
|
2011-08-31 12:28:59 +00:00
|
|
|
'field_type': field.get('type'),
|
2011-11-09 16:32:32 +00:00
|
|
|
'required': field.get('required'),
|
|
|
|
'relation_field': field.get('relation_field')}
|
2011-07-22 12:52:14 +00:00
|
|
|
records.append(record)
|
|
|
|
|
2011-08-31 13:29:50 +00:00
|
|
|
if len(name.split('/')) < 3 and 'relation' in field:
|
2011-08-31 12:34:01 +00:00
|
|
|
ref = field.pop('relation')
|
2011-09-01 06:42:59 +00:00
|
|
|
record['value'] += '/id'
|
2011-08-31 13:29:50 +00:00
|
|
|
record['params'] = {'model': ref, 'prefix': id, 'name': name}
|
2011-08-31 14:30:02 +00:00
|
|
|
|
2011-09-01 06:42:59 +00:00
|
|
|
if not import_compat or field['type'] == 'one2many':
|
2011-08-31 14:30:02 +00:00
|
|
|
# m2m field in import_compat is childless
|
|
|
|
record['children'] = True
|
2011-07-22 12:52:14 +00:00
|
|
|
|
|
|
|
return records
|
|
|
|
|
|
|
|
@openerpweb.jsonrequest
|
|
|
|
def namelist(self,req, model, export_id):
|
2011-09-05 13:05:38 +00:00
|
|
|
# TODO: namelist really has no reason to be in Python (although itertools.groupby helps)
|
2011-09-05 09:38:37 +00:00
|
|
|
export = req.session.model("ir.exports").read([export_id])[0]
|
|
|
|
export_fields_list = req.session.model("ir.exports.line").read(
|
|
|
|
export['export_fields'])
|
2011-07-22 12:52:14 +00:00
|
|
|
|
2011-09-05 13:05:38 +00:00
|
|
|
fields_data = self.fields_info(
|
|
|
|
req, model, map(operator.itemgetter('name'), export_fields_list))
|
2011-07-22 12:52:14 +00:00
|
|
|
|
2011-09-05 15:24:42 +00:00
|
|
|
return [
|
|
|
|
{'name': field['name'], 'label': fields_data[field['name']]}
|
|
|
|
for field in export_fields_list
|
|
|
|
]
|
2011-07-22 12:52:14 +00:00
|
|
|
|
2011-09-05 13:05:38 +00:00
|
|
|
def fields_info(self, req, model, export_fields):
|
|
|
|
info = {}
|
2011-07-22 12:52:14 +00:00
|
|
|
fields = self.fields_get(req, model)
|
2011-09-05 13:05:38 +00:00
|
|
|
|
|
|
|
# To make fields retrieval more efficient, fetch all sub-fields of a
|
|
|
|
# given field at the same time. Because the order in the export list is
|
|
|
|
# arbitrary, this requires ordering all sub-fields of a given field
|
|
|
|
# together so they can be fetched at the same time
|
|
|
|
#
|
|
|
|
# Works the following way:
|
|
|
|
# * sort the list of fields to export, the default sorting order will
|
|
|
|
# put the field itself (if present, for xmlid) and all of its
|
|
|
|
# sub-fields right after it
|
|
|
|
# * then, group on: the first field of the path (which is the same for
|
|
|
|
# a field and for its subfields and the length of splitting on the
|
|
|
|
# first '/', which basically means grouping the field on one side and
|
|
|
|
# all of the subfields on the other. This way, we have the field (for
|
|
|
|
# the xmlid) with length 1, and all of the subfields with the same
|
|
|
|
# base but a length "flag" of 2
|
|
|
|
# * if we have a normal field (length 1), just add it to the info
|
|
|
|
# mapping (with its string) as-is
|
|
|
|
# * otherwise, recursively call fields_info via graft_subfields.
|
|
|
|
# all graft_subfields does is take the result of fields_info (on the
|
|
|
|
# field's model) and prepend the current base (current field), which
|
|
|
|
# rebuilds the whole sub-tree for the field
|
|
|
|
#
|
|
|
|
# result: because we're not fetching the fields_get for half the
|
|
|
|
# database models, fetching a namelist with a dozen fields (including
|
|
|
|
# relational data) falls from ~6s to ~300ms (on the leads model).
|
|
|
|
# export lists with no sub-fields (e.g. import_compatible lists with
|
|
|
|
# no o2m) are even more efficient (from the same 6s to ~170ms, as
|
|
|
|
# there's a single fields_get to execute)
|
|
|
|
for (base, length), subfields in itertools.groupby(
|
|
|
|
sorted(export_fields),
|
|
|
|
lambda field: (field.split('/', 1)[0], len(field.split('/', 1)))):
|
|
|
|
subfields = list(subfields)
|
|
|
|
if length == 2:
|
|
|
|
# subfields is a seq of $base/*rest, and not loaded yet
|
|
|
|
info.update(self.graft_subfields(
|
|
|
|
req, fields[base]['relation'], base, fields[base]['string'],
|
|
|
|
subfields
|
|
|
|
))
|
|
|
|
else:
|
|
|
|
info[base] = fields[base]['string']
|
2011-07-22 12:52:14 +00:00
|
|
|
|
2011-09-05 13:05:38 +00:00
|
|
|
return info
|
2011-07-22 12:52:14 +00:00
|
|
|
|
2011-09-05 13:05:38 +00:00
|
|
|
def graft_subfields(self, req, model, prefix, prefix_string, fields):
|
|
|
|
export_fields = [field.split('/', 1)[1] for field in fields]
|
|
|
|
return (
|
|
|
|
(prefix + '/' + k, prefix_string + '/' + v)
|
|
|
|
for k, v in self.fields_info(req, model, export_fields).iteritems())
|
2011-07-22 12:52:14 +00:00
|
|
|
|
2011-08-30 13:06:32 +00:00
|
|
|
#noinspection PyPropertyDefinition
|
|
|
|
@property
|
|
|
|
def content_type(self):
|
|
|
|
""" Provides the format's content type """
|
|
|
|
raise NotImplementedError()
|
2011-07-22 12:52:14 +00:00
|
|
|
|
2011-08-30 13:06:32 +00:00
|
|
|
def filename(self, base):
|
|
|
|
""" Creates a valid filename for the format (with extension) from the
|
|
|
|
provided base name (exension-less)
|
|
|
|
"""
|
|
|
|
raise NotImplementedError()
|
2011-07-22 12:52:14 +00:00
|
|
|
|
2011-08-30 13:06:32 +00:00
|
|
|
def from_data(self, fields, rows):
|
|
|
|
""" Conversion method from OpenERP's export data to whatever the
|
|
|
|
current export class outputs
|
|
|
|
|
|
|
|
:params list fields: a list of fields to export
|
|
|
|
:params list rows: a list of records to export
|
|
|
|
:returns:
|
|
|
|
:rtype: bytes
|
|
|
|
"""
|
|
|
|
raise NotImplementedError()
|
|
|
|
|
2011-08-30 11:02:04 +00:00
|
|
|
@openerpweb.httprequest
|
2011-08-30 13:06:32 +00:00
|
|
|
def index(self, req, data, token):
|
|
|
|
model, fields, ids, domain, import_compat = \
|
2011-08-30 11:02:04 +00:00
|
|
|
operator.itemgetter('model', 'fields', 'ids', 'domain',
|
2011-08-30 13:06:32 +00:00
|
|
|
'import_compat')(
|
2011-08-30 11:02:04 +00:00
|
|
|
simplejson.loads(data))
|
2011-07-22 12:52:14 +00:00
|
|
|
|
|
|
|
context = req.session.eval_context(req.context)
|
2011-08-30 11:02:04 +00:00
|
|
|
Model = req.session.model(model)
|
2011-11-24 15:46:01 +00:00
|
|
|
ids = ids or Model.search(domain, 0, False, False, context)
|
2011-07-22 12:52:14 +00:00
|
|
|
|
2011-08-31 11:04:39 +00:00
|
|
|
field_names = map(operator.itemgetter('name'), fields)
|
|
|
|
import_data = Model.export_data(ids, field_names, context).get('datas',[])
|
2011-07-22 12:52:14 +00:00
|
|
|
|
2011-08-31 11:04:39 +00:00
|
|
|
if import_compat:
|
|
|
|
columns_headers = field_names
|
2011-07-22 12:52:14 +00:00
|
|
|
else:
|
2011-08-31 11:04:39 +00:00
|
|
|
columns_headers = [val['label'].strip() for val in fields]
|
2011-08-29 15:24:38 +00:00
|
|
|
|
|
|
|
|
2011-09-05 14:24:55 +00:00
|
|
|
return req.make_response(self.from_data(columns_headers, import_data),
|
|
|
|
headers=[('Content-Disposition', 'attachment; filename="%s"' % self.filename(model)),
|
|
|
|
('Content-Type', self.content_type)],
|
|
|
|
cookies={'fileToken': int(token)})
|
2011-08-30 13:06:32 +00:00
|
|
|
|
|
|
|
class CSVExport(Export):
|
2011-09-05 14:24:55 +00:00
|
|
|
_cp_path = '/web/export/csv'
|
2012-01-13 09:06:11 +00:00
|
|
|
fmt = {'tag': 'csv', 'label': 'CSV'}
|
2011-08-30 13:06:32 +00:00
|
|
|
|
|
|
|
@property
|
|
|
|
def content_type(self):
|
|
|
|
return 'text/csv;charset=utf8'
|
|
|
|
|
|
|
|
def filename(self, base):
|
|
|
|
return base + '.csv'
|
|
|
|
|
|
|
|
def from_data(self, fields, rows):
|
|
|
|
fp = StringIO()
|
|
|
|
writer = csv.writer(fp, quoting=csv.QUOTE_ALL)
|
|
|
|
|
2012-01-25 16:25:58 +00:00
|
|
|
writer.writerow([name.encode('utf-8') for name in fields])
|
2011-08-30 13:06:32 +00:00
|
|
|
|
|
|
|
for data in rows:
|
|
|
|
row = []
|
|
|
|
for d in data:
|
|
|
|
if isinstance(d, basestring):
|
|
|
|
d = d.replace('\n',' ').replace('\t',' ')
|
|
|
|
try:
|
|
|
|
d = d.encode('utf-8')
|
2012-01-19 08:52:13 +00:00
|
|
|
except UnicodeError:
|
2011-08-30 13:06:32 +00:00
|
|
|
pass
|
|
|
|
if d is False: d = None
|
|
|
|
row.append(d)
|
|
|
|
writer.writerow(row)
|
|
|
|
|
|
|
|
fp.seek(0)
|
|
|
|
data = fp.read()
|
|
|
|
fp.close()
|
|
|
|
return data
|
|
|
|
|
|
|
|
class ExcelExport(Export):
|
2011-09-05 14:24:55 +00:00
|
|
|
_cp_path = '/web/export/xls'
|
2012-01-13 09:06:11 +00:00
|
|
|
fmt = {
|
|
|
|
'tag': 'xls',
|
|
|
|
'label': 'Excel',
|
|
|
|
'error': None if xlwt else "XLWT required"
|
|
|
|
}
|
2011-08-30 13:06:32 +00:00
|
|
|
|
|
|
|
@property
|
|
|
|
def content_type(self):
|
|
|
|
return 'application/vnd.ms-excel'
|
|
|
|
|
|
|
|
def filename(self, base):
|
|
|
|
return base + '.xls'
|
|
|
|
|
|
|
|
def from_data(self, fields, rows):
|
|
|
|
workbook = xlwt.Workbook()
|
|
|
|
worksheet = workbook.add_sheet('Sheet 1')
|
|
|
|
|
|
|
|
for i, fieldname in enumerate(fields):
|
2012-01-25 16:25:58 +00:00
|
|
|
worksheet.write(0, i, fieldname)
|
2011-08-30 13:06:32 +00:00
|
|
|
worksheet.col(i).width = 8000 # around 220 pixels
|
|
|
|
|
|
|
|
style = xlwt.easyxf('align: wrap yes')
|
|
|
|
|
|
|
|
for row_index, row in enumerate(rows):
|
|
|
|
for cell_index, cell_value in enumerate(row):
|
2011-08-30 13:43:05 +00:00
|
|
|
if isinstance(cell_value, basestring):
|
|
|
|
cell_value = re.sub("\r", " ", cell_value)
|
2011-11-10 13:09:50 +00:00
|
|
|
if cell_value is False: cell_value = None
|
2011-08-30 13:06:32 +00:00
|
|
|
worksheet.write(row_index + 1, cell_index, cell_value, style)
|
|
|
|
|
|
|
|
fp = StringIO()
|
|
|
|
workbook.save(fp)
|
|
|
|
fp.seek(0)
|
|
|
|
data = fp.read()
|
|
|
|
fp.close()
|
|
|
|
return data
|
2011-09-06 11:57:54 +00:00
|
|
|
|
|
|
|
class Reports(View):
|
|
|
|
_cp_path = "/web/report"
|
2011-09-07 07:23:58 +00:00
|
|
|
POLLING_DELAY = 0.25
|
|
|
|
TYPES_MAPPING = {
|
|
|
|
'doc': 'application/vnd.ms-word',
|
|
|
|
'html': 'text/html',
|
|
|
|
'odt': 'application/vnd.oasis.opendocument.text',
|
|
|
|
'pdf': 'application/pdf',
|
|
|
|
'sxw': 'application/vnd.sun.xml.writer',
|
|
|
|
'xls': 'application/vnd.ms-excel',
|
|
|
|
}
|
2011-09-06 11:57:54 +00:00
|
|
|
|
2011-09-07 07:23:58 +00:00
|
|
|
@openerpweb.httprequest
|
|
|
|
def index(self, req, action, token):
|
|
|
|
action = simplejson.loads(action)
|
2011-09-06 11:57:54 +00:00
|
|
|
|
2011-09-07 07:23:58 +00:00
|
|
|
report_srv = req.session.proxy("report")
|
|
|
|
context = req.session.eval_context(
|
2012-01-16 10:43:29 +00:00
|
|
|
common.nonliterals.CompoundContext(
|
2011-09-07 07:23:58 +00:00
|
|
|
req.context or {}, action[ "context"]))
|
2011-09-08 07:14:15 +00:00
|
|
|
|
2011-09-08 09:53:27 +00:00
|
|
|
report_data = {}
|
2011-09-08 09:55:44 +00:00
|
|
|
report_ids = context["active_ids"]
|
2011-09-08 07:14:15 +00:00
|
|
|
if 'report_type' in action:
|
|
|
|
report_data['report_type'] = action['report_type']
|
2011-09-08 09:55:44 +00:00
|
|
|
if 'datas' in action:
|
|
|
|
if 'ids' in action['datas']:
|
2011-10-21 14:32:32 +00:00
|
|
|
report_ids = action['datas'].pop('ids')
|
|
|
|
report_data.update(action['datas'])
|
2011-09-15 12:24:20 +00:00
|
|
|
|
2011-09-07 07:23:58 +00:00
|
|
|
report_id = report_srv.report(
|
|
|
|
req.session._db, req.session._uid, req.session._password,
|
2011-09-08 09:54:11 +00:00
|
|
|
action["report_name"], report_ids,
|
2011-09-08 07:14:15 +00:00
|
|
|
report_data, context)
|
|
|
|
|
2011-09-07 07:23:58 +00:00
|
|
|
report_struct = None
|
2011-09-06 11:57:54 +00:00
|
|
|
while True:
|
2011-09-07 07:23:58 +00:00
|
|
|
report_struct = report_srv.report_get(
|
|
|
|
req.session._db, req.session._uid, req.session._password, report_id)
|
|
|
|
if report_struct["state"]:
|
2011-09-06 11:57:54 +00:00
|
|
|
break
|
2011-08-31 10:44:13 +00:00
|
|
|
|
2011-09-07 07:23:58 +00:00
|
|
|
time.sleep(self.POLLING_DELAY)
|
|
|
|
|
|
|
|
report = base64.b64decode(report_struct['result'])
|
2011-09-07 07:28:18 +00:00
|
|
|
if report_struct.get('code') == 'zlib':
|
|
|
|
report = zlib.decompress(report)
|
2011-09-07 07:23:58 +00:00
|
|
|
report_mimetype = self.TYPES_MAPPING.get(
|
|
|
|
report_struct['format'], 'octet-stream')
|
|
|
|
return req.make_response(report,
|
|
|
|
headers=[
|
2011-09-08 07:14:15 +00:00
|
|
|
('Content-Disposition', 'attachment; filename="%s.%s"' % (action['report_name'], report_struct['format'])),
|
2011-09-07 07:23:58 +00:00
|
|
|
('Content-Type', report_mimetype),
|
|
|
|
('Content-Length', len(report))],
|
|
|
|
cookies={'fileToken': int(token)})
|
2011-09-06 11:57:54 +00:00
|
|
|
|
2011-07-27 06:43:25 +00:00
|
|
|
class Import(View):
|
2011-09-06 13:05:25 +00:00
|
|
|
_cp_path = "/web/import"
|
2011-07-27 06:43:25 +00:00
|
|
|
|
2011-08-03 12:32:42 +00:00
|
|
|
def fields_get(self, req, model):
|
|
|
|
Model = req.session.model(model)
|
|
|
|
fields = Model.fields_get(False, req.session.eval_context(req.context))
|
|
|
|
return fields
|
|
|
|
|
|
|
|
@openerpweb.httprequest
|
2011-10-24 14:06:58 +00:00
|
|
|
def detect_data(self, req, csvfile, csvsep=',', csvdel='"', csvcode='utf-8', jsonp='callback'):
|
2011-08-03 12:32:42 +00:00
|
|
|
try:
|
2011-09-23 08:59:24 +00:00
|
|
|
data = list(csv.reader(
|
|
|
|
csvfile, quotechar=str(csvdel), delimiter=str(csvsep)))
|
|
|
|
except csv.Error, e:
|
2011-09-08 10:24:00 +00:00
|
|
|
csvfile.seek(0)
|
2011-09-09 10:53:07 +00:00
|
|
|
return '<script>window.top.%s(%s);</script>' % (
|
2011-09-23 08:59:24 +00:00
|
|
|
jsonp, simplejson.dumps({'error': {
|
|
|
|
'message': 'Error parsing CSV file: %s' % e,
|
|
|
|
# decodes each byte to a unicode character, which may or
|
|
|
|
# may not be printable, but decoding will succeed.
|
|
|
|
# Otherwise simplejson will try to decode the `str` using
|
|
|
|
# utf-8, which is very likely to blow up on characters out
|
|
|
|
# of the ascii range (in range [128, 256))
|
|
|
|
'preview': csvfile.read(200).decode('iso-8859-1')}}))
|
|
|
|
|
2011-09-23 09:23:46 +00:00
|
|
|
try:
|
|
|
|
return '<script>window.top.%s(%s);</script>' % (
|
|
|
|
jsonp, simplejson.dumps(
|
|
|
|
{'records': data[:10]}, encoding=csvcode))
|
|
|
|
except UnicodeDecodeError:
|
|
|
|
return '<script>window.top.%s(%s);</script>' % (
|
|
|
|
jsonp, simplejson.dumps({
|
|
|
|
'message': u"Failed to decode CSV file using encoding %s, "
|
|
|
|
u"try switching to a different encoding" % csvcode
|
|
|
|
}))
|
2011-08-03 12:32:42 +00:00
|
|
|
|
2011-07-27 06:43:25 +00:00
|
|
|
@openerpweb.httprequest
|
2011-09-23 12:22:02 +00:00
|
|
|
def import_data(self, req, model, csvfile, csvsep, csvdel, csvcode, jsonp,
|
|
|
|
meta):
|
2011-09-09 10:53:07 +00:00
|
|
|
modle_obj = req.session.model(model)
|
2011-09-23 12:22:02 +00:00
|
|
|
skip, indices, fields = operator.itemgetter('skip', 'indices', 'fields')(
|
|
|
|
simplejson.loads(meta))
|
2011-07-27 06:43:25 +00:00
|
|
|
|
2011-09-23 12:22:02 +00:00
|
|
|
error = None
|
2011-09-08 10:24:00 +00:00
|
|
|
if not (csvdel and len(csvdel) == 1):
|
2011-09-23 12:22:02 +00:00
|
|
|
error = u"The CSV delimiter must be a single character"
|
2011-07-27 06:43:25 +00:00
|
|
|
|
2011-09-23 12:22:02 +00:00
|
|
|
if not indices and fields:
|
|
|
|
error = u"You must select at least one field to import"
|
2011-09-08 10:24:00 +00:00
|
|
|
|
2011-09-23 12:22:02 +00:00
|
|
|
if error:
|
2011-09-09 10:53:07 +00:00
|
|
|
return '<script>window.top.%s(%s);</script>' % (
|
2011-09-23 12:22:02 +00:00
|
|
|
jsonp, simplejson.dumps({'error': {'message': error}}))
|
2011-07-27 06:43:25 +00:00
|
|
|
|
2011-09-23 12:22:02 +00:00
|
|
|
# skip ignored records
|
|
|
|
data_record = itertools.islice(
|
|
|
|
csv.reader(csvfile, quotechar=str(csvdel), delimiter=str(csvsep)),
|
|
|
|
skip, None)
|
2011-07-27 06:43:25 +00:00
|
|
|
|
2011-09-23 12:22:02 +00:00
|
|
|
# if only one index, itemgetter will return an atom rather than a tuple
|
|
|
|
if len(indices) == 1: mapper = lambda row: [row[indices[0]]]
|
|
|
|
else: mapper = operator.itemgetter(*indices)
|
2011-07-27 06:43:25 +00:00
|
|
|
|
2011-09-23 12:22:02 +00:00
|
|
|
data = None
|
|
|
|
error = None
|
|
|
|
try:
|
|
|
|
# decode each data row
|
|
|
|
data = [
|
|
|
|
[record.decode(csvcode) for record in row]
|
|
|
|
for row in itertools.imap(mapper, data_record)
|
|
|
|
# don't insert completely empty rows (can happen due to fields
|
|
|
|
# filtering in case of e.g. o2m content rows)
|
|
|
|
if any(row)
|
|
|
|
]
|
|
|
|
except UnicodeDecodeError:
|
|
|
|
error = u"Failed to decode CSV file using encoding %s" % csvcode
|
|
|
|
except csv.Error, e:
|
|
|
|
error = u"Could not process CSV file: %s" % e
|
2011-07-27 06:43:25 +00:00
|
|
|
|
|
|
|
# If the file contains nothing,
|
2011-09-23 12:22:02 +00:00
|
|
|
if not data:
|
|
|
|
error = u"File to import is empty"
|
|
|
|
if error:
|
2011-09-09 10:53:07 +00:00
|
|
|
return '<script>window.top.%s(%s);</script>' % (
|
2011-09-23 12:22:02 +00:00
|
|
|
jsonp, simplejson.dumps({'error': {'message': error}}))
|
2011-07-27 06:43:25 +00:00
|
|
|
|
|
|
|
try:
|
2011-09-23 12:22:02 +00:00
|
|
|
(code, record, message, _nope) = modle_obj.import_data(
|
|
|
|
fields, data, 'init', '', False,
|
|
|
|
req.session.eval_context(req.context))
|
2011-08-16 06:08:35 +00:00
|
|
|
except xmlrpclib.Fault, e:
|
2011-09-23 12:22:02 +00:00
|
|
|
error = {"message": u"%s, %s" % (e.faultCode, e.faultString)}
|
2011-09-09 10:53:07 +00:00
|
|
|
return '<script>window.top.%s(%s);</script>' % (
|
|
|
|
jsonp, simplejson.dumps({'error':error}))
|
2011-07-27 06:43:25 +00:00
|
|
|
|
2011-09-23 12:22:02 +00:00
|
|
|
if code != -1:
|
2011-09-09 10:53:07 +00:00
|
|
|
return '<script>window.top.%s(%s);</script>' % (
|
2011-09-12 12:59:00 +00:00
|
|
|
jsonp, simplejson.dumps({'success':True}))
|
2011-07-27 06:43:25 +00:00
|
|
|
|
2011-09-23 12:22:02 +00:00
|
|
|
msg = u"Error during import: %s\n\nTrying to import record %r" % (
|
|
|
|
message, record)
|
2011-09-09 10:53:07 +00:00
|
|
|
return '<script>window.top.%s(%s);</script>' % (
|
2011-09-23 12:22:02 +00:00
|
|
|
jsonp, simplejson.dumps({'error': {'message':msg}}))
|