2013-11-15 13:26:26 +00:00
|
|
|
# -*- coding: utf-8 -*-
|
2014-03-24 15:17:28 +00:00
|
|
|
import datetime
|
|
|
|
import hashlib
|
2013-12-03 11:46:57 +00:00
|
|
|
import logging
|
2014-06-27 10:46:56 +00:00
|
|
|
import os
|
2014-01-24 10:46:10 +00:00
|
|
|
import re
|
2013-11-25 16:59:58 +00:00
|
|
|
import traceback
|
2014-06-27 10:46:56 +00:00
|
|
|
|
2013-12-03 17:17:14 +00:00
|
|
|
import werkzeug
|
2013-11-15 15:48:40 +00:00
|
|
|
import werkzeug.routing
|
2014-07-06 14:44:26 +00:00
|
|
|
import werkzeug.utils
|
2013-11-27 17:04:32 +00:00
|
|
|
|
2013-11-15 13:26:26 +00:00
|
|
|
import openerp
|
|
|
|
from openerp.addons.base import ir
|
2014-01-27 11:40:34 +00:00
|
|
|
from openerp.addons.base.ir import ir_qweb
|
2014-06-24 18:55:26 +00:00
|
|
|
from openerp.addons.website.models.website import slug, url_for, _UNSLUG_RE
|
2013-11-25 16:59:58 +00:00
|
|
|
from openerp.http import request
|
2014-06-27 10:46:56 +00:00
|
|
|
from openerp.tools import config
|
2013-11-25 16:59:58 +00:00
|
|
|
from openerp.osv import orm
|
2013-11-15 13:26:26 +00:00
|
|
|
|
2013-12-03 11:46:57 +00:00
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
2014-01-24 10:46:10 +00:00
|
|
|
class RequestUID(object):
|
|
|
|
def __init__(self, **kw):
|
|
|
|
self.__dict__.update(kw)
|
|
|
|
|
2013-11-15 13:26:26 +00:00
|
|
|
class ir_http(orm.AbstractModel):
|
|
|
|
_inherit = 'ir.http'
|
|
|
|
|
2013-11-27 17:04:32 +00:00
|
|
|
rerouting_limit = 10
|
2014-06-18 12:50:24 +00:00
|
|
|
geo_ip_resolver = None
|
2013-11-27 17:04:32 +00:00
|
|
|
|
2013-11-15 13:26:26 +00:00
|
|
|
def _get_converters(self):
|
|
|
|
return dict(
|
|
|
|
super(ir_http, self)._get_converters(),
|
|
|
|
model=ModelConverter,
|
2013-11-15 15:48:40 +00:00
|
|
|
page=PageConverter,
|
2013-11-15 13:26:26 +00:00
|
|
|
)
|
|
|
|
|
2014-01-30 23:39:51 +00:00
|
|
|
def _auth_method_public(self):
|
|
|
|
# TODO: select user_id from matching website
|
|
|
|
if not request.session.uid:
|
|
|
|
request.uid = self.pool['ir.model.data'].xmlid_to_res_id(request.cr, openerp.SUPERUSER_ID, 'base.public_user')
|
|
|
|
else:
|
|
|
|
request.uid = request.session.uid
|
|
|
|
|
2013-11-27 17:04:32 +00:00
|
|
|
def _dispatch(self):
|
2013-12-02 09:35:00 +00:00
|
|
|
first_pass = not hasattr(request, 'website')
|
2013-11-27 17:04:32 +00:00
|
|
|
request.website = None
|
|
|
|
func = None
|
|
|
|
try:
|
|
|
|
func, arguments = self._find_handler()
|
2014-01-20 15:37:33 +00:00
|
|
|
request.website_enabled = func.routing.get('website', False)
|
2013-11-27 17:04:32 +00:00
|
|
|
except werkzeug.exceptions.NotFound:
|
|
|
|
# either we have a language prefixed route, either a real 404
|
|
|
|
# in all cases, website processes them
|
2014-01-20 15:37:33 +00:00
|
|
|
request.website_enabled = True
|
2013-11-27 17:04:32 +00:00
|
|
|
|
2014-05-19 10:33:47 +00:00
|
|
|
request.website_multilang = request.website_enabled and func and func.routing.get('multilang', True)
|
2014-05-13 10:07:20 +00:00
|
|
|
|
2014-06-27 10:46:56 +00:00
|
|
|
if 'geoip' not in request.session:
|
2014-06-18 12:50:24 +00:00
|
|
|
record = {}
|
|
|
|
if self.geo_ip_resolver is None:
|
|
|
|
try:
|
|
|
|
import GeoIP
|
2014-06-27 10:46:56 +00:00
|
|
|
# updated database can be downloaded on MaxMind website
|
|
|
|
# http://dev.maxmind.com/geoip/legacy/install/city/
|
|
|
|
geofile = config.get('geoip_database', '/usr/share/GeoIP/GeoLiteCity.dat')
|
|
|
|
if os.path.exists(geofile):
|
|
|
|
self.geo_ip_resolver = GeoIP.open(geofile, GeoIP.GEOIP_STANDARD)
|
|
|
|
else:
|
|
|
|
self.geo_ip_resolver = False
|
|
|
|
logger.warning('GeoIP database file %r does not exists', geofile)
|
2014-06-18 12:50:24 +00:00
|
|
|
except ImportError:
|
|
|
|
self.geo_ip_resolver = False
|
2014-06-24 13:11:53 +00:00
|
|
|
if self.geo_ip_resolver and request.httprequest.remote_addr:
|
2014-06-18 13:43:28 +00:00
|
|
|
record = self.geo_ip_resolver.record_by_addr(request.httprequest.remote_addr) or {}
|
2014-06-18 12:50:24 +00:00
|
|
|
request.session['geoip'] = record
|
2014-07-08 15:33:00 +00:00
|
|
|
|
2014-01-20 15:37:33 +00:00
|
|
|
if request.website_enabled:
|
2013-11-27 17:04:32 +00:00
|
|
|
if func:
|
2014-01-20 15:37:33 +00:00
|
|
|
self._authenticate(func.routing['auth'])
|
2013-11-27 17:04:32 +00:00
|
|
|
else:
|
|
|
|
self._auth_method_public()
|
2014-08-26 09:48:21 +00:00
|
|
|
request.redirect = lambda url, code=302: werkzeug.utils.redirect(url_for(url), code)
|
2013-11-27 17:04:32 +00:00
|
|
|
request.website = request.registry['website'].get_current_website(request.cr, request.uid, context=request.context)
|
2013-12-02 09:35:00 +00:00
|
|
|
if first_pass:
|
2013-12-02 17:21:10 +00:00
|
|
|
request.lang = request.website.default_lang_code
|
2013-11-27 17:04:32 +00:00
|
|
|
request.context['lang'] = request.lang
|
|
|
|
if not func:
|
|
|
|
path = request.httprequest.path.split('/')
|
2013-12-02 17:21:10 +00:00
|
|
|
langs = [lg[0] for lg in request.website.get_languages()]
|
2013-11-27 17:04:32 +00:00
|
|
|
if path[1] in langs:
|
|
|
|
request.lang = request.context['lang'] = path.pop(1)
|
|
|
|
path = '/'.join(path) or '/'
|
2014-05-13 10:07:29 +00:00
|
|
|
if request.lang == request.website.default_lang_code:
|
|
|
|
# If language is in the url and it is the default language, redirect
|
|
|
|
# to url without language so google doesn't see duplicate content
|
2014-05-23 08:39:26 +00:00
|
|
|
return request.redirect(path + '?' + request.httprequest.query_string)
|
2013-11-27 17:04:32 +00:00
|
|
|
return self.reroute(path)
|
|
|
|
return super(ir_http, self)._dispatch()
|
|
|
|
|
|
|
|
def reroute(self, path):
|
|
|
|
if not hasattr(request, 'rerouting'):
|
2014-03-25 10:07:22 +00:00
|
|
|
request.rerouting = [request.httprequest.path]
|
2013-11-27 17:04:32 +00:00
|
|
|
if path in request.rerouting:
|
|
|
|
raise Exception("Rerouting loop is forbidden")
|
|
|
|
request.rerouting.append(path)
|
|
|
|
if len(request.rerouting) > self.rerouting_limit:
|
|
|
|
raise Exception("Rerouting limit exceeded")
|
|
|
|
request.httprequest.environ['PATH_INFO'] = path
|
|
|
|
# void werkzeug cached_property. TODO: find a proper way to do this
|
|
|
|
for key in ('path', 'full_path', 'url', 'base_url'):
|
|
|
|
request.httprequest.__dict__.pop(key, None)
|
|
|
|
|
|
|
|
return self._dispatch()
|
|
|
|
|
2014-03-10 13:05:30 +00:00
|
|
|
def _postprocess_args(self, arguments, rule):
|
2014-06-13 14:11:56 +00:00
|
|
|
super(ir_http, self)._postprocess_args(arguments, rule)
|
2014-03-12 14:35:39 +00:00
|
|
|
|
2014-07-06 14:44:26 +00:00
|
|
|
for key, val in arguments.items():
|
2014-03-10 13:05:30 +00:00
|
|
|
# Replace uid placeholder by the current request.uid
|
2014-07-06 14:44:26 +00:00
|
|
|
if isinstance(val, orm.BaseModel) and isinstance(val._uid, RequestUID):
|
|
|
|
arguments[key] = val.sudo(request.uid)
|
|
|
|
|
2014-03-10 13:05:30 +00:00
|
|
|
try:
|
|
|
|
_, path = rule.build(arguments)
|
|
|
|
assert path is not None
|
2014-06-25 16:34:03 +00:00
|
|
|
except Exception, e:
|
|
|
|
return self._handle_exception(e, code=404)
|
2014-03-25 14:00:26 +00:00
|
|
|
|
2014-06-13 14:11:56 +00:00
|
|
|
if getattr(request, 'website_multilang', False) and request.httprequest.method in ('GET', 'HEAD'):
|
2014-04-29 09:56:43 +00:00
|
|
|
generated_path = werkzeug.url_unquote_plus(path)
|
|
|
|
current_path = werkzeug.url_unquote_plus(request.httprequest.path)
|
|
|
|
if generated_path != current_path:
|
|
|
|
if request.lang != request.website.default_lang_code:
|
|
|
|
path = '/' + request.lang + path
|
2014-05-20 08:25:39 +00:00
|
|
|
if request.httprequest.query_string:
|
|
|
|
path += '?' + request.httprequest.query_string
|
2014-04-29 09:56:43 +00:00
|
|
|
return werkzeug.utils.redirect(path)
|
2014-01-24 10:46:10 +00:00
|
|
|
|
2014-03-24 15:17:28 +00:00
|
|
|
def _serve_attachment(self):
|
|
|
|
domain = [('type', '=', 'binary'), ('url', '=', request.httprequest.path)]
|
2014-03-26 18:40:22 +00:00
|
|
|
attach = self.pool['ir.attachment'].search_read(request.cr, openerp.SUPERUSER_ID, domain, ['__last_update', 'datas', 'mimetype'], context=request.context)
|
2014-03-24 15:17:28 +00:00
|
|
|
if attach:
|
|
|
|
wdate = attach[0]['__last_update']
|
|
|
|
datas = attach[0]['datas']
|
|
|
|
response = werkzeug.wrappers.Response()
|
|
|
|
server_format = openerp.tools.misc.DEFAULT_SERVER_DATETIME_FORMAT
|
|
|
|
try:
|
|
|
|
response.last_modified = datetime.datetime.strptime(wdate, server_format + '.%f')
|
|
|
|
except ValueError:
|
|
|
|
# just in case we have a timestamp without microseconds
|
|
|
|
response.last_modified = datetime.datetime.strptime(wdate, server_format)
|
|
|
|
|
|
|
|
response.set_etag(hashlib.sha1(datas).hexdigest())
|
|
|
|
response.make_conditional(request.httprequest)
|
|
|
|
|
|
|
|
if response.status_code == 304:
|
|
|
|
return response
|
|
|
|
|
2014-06-25 16:37:03 +00:00
|
|
|
response.mimetype = attach[0]['mimetype'] or 'application/octet-stream'
|
2014-04-24 13:14:15 +00:00
|
|
|
response.data = datas.decode('base64')
|
2014-03-24 15:17:28 +00:00
|
|
|
return response
|
|
|
|
|
2014-06-25 09:47:44 +00:00
|
|
|
def _handle_exception(self, exception, code=500):
|
2014-06-25 16:34:03 +00:00
|
|
|
# This is done first as the attachment path may
|
|
|
|
# not match any HTTP controller, so the request
|
|
|
|
# may not be website-enabled.
|
|
|
|
attach = self._serve_attachment()
|
|
|
|
if attach:
|
|
|
|
return attach
|
2013-11-25 16:59:58 +00:00
|
|
|
|
2014-06-25 10:20:40 +00:00
|
|
|
is_website_request = bool(getattr(request, 'website_enabled', False) and request.website)
|
2014-06-25 09:47:44 +00:00
|
|
|
if not is_website_request:
|
|
|
|
# Don't touch non website requests exception handling
|
2014-05-23 11:15:52 +00:00
|
|
|
return super(ir_http, self)._handle_exception(exception)
|
2014-06-25 09:47:44 +00:00
|
|
|
else:
|
|
|
|
try:
|
|
|
|
response = super(ir_http, self)._handle_exception(exception)
|
|
|
|
if isinstance(response, Exception):
|
|
|
|
exception = response
|
|
|
|
else:
|
|
|
|
# if parent excplicitely returns a plain response, then we don't touch it
|
|
|
|
return response
|
|
|
|
except Exception, e:
|
|
|
|
exception = e
|
|
|
|
|
|
|
|
values = dict(
|
|
|
|
exception=exception,
|
|
|
|
traceback=traceback.format_exc(exception),
|
|
|
|
)
|
|
|
|
code = getattr(exception, 'code', code)
|
|
|
|
|
2014-06-25 10:20:40 +00:00
|
|
|
if isinstance(exception, openerp.exceptions.AccessError):
|
|
|
|
code = 403
|
|
|
|
|
2014-06-25 09:47:44 +00:00
|
|
|
if isinstance(exception, ir_qweb.QWebException):
|
|
|
|
values.update(qweb_exception=exception)
|
|
|
|
if isinstance(exception.qweb.get('cause'), openerp.exceptions.AccessError):
|
|
|
|
code = 403
|
|
|
|
|
2014-06-27 10:56:24 +00:00
|
|
|
if isinstance(exception, werkzeug.exceptions.HTTPException) and code is None:
|
|
|
|
# Hand-crafted HTTPException likely coming from abort(),
|
|
|
|
# usually for a redirect response -> return it directly
|
|
|
|
return exception
|
|
|
|
|
2014-06-25 09:47:44 +00:00
|
|
|
if code == 500:
|
|
|
|
logger.error("500 Internal Server Error:\n\n%s", values['traceback'])
|
|
|
|
if 'qweb_exception' in values:
|
|
|
|
view = request.registry.get("ir.ui.view")
|
|
|
|
views = view._views_get(request.cr, request.uid, exception.qweb['template'], request.context)
|
2014-07-30 13:10:22 +00:00
|
|
|
to_reset = [v for v in views if v.model_data_id.noupdate is True and not v.page]
|
2014-06-25 09:47:44 +00:00
|
|
|
values['views'] = to_reset
|
|
|
|
elif code == 403:
|
|
|
|
logger.warn("403 Forbidden:\n\n%s", values['traceback'])
|
|
|
|
|
|
|
|
values.update(
|
|
|
|
status_message=werkzeug.http.HTTP_STATUS_CODES[code],
|
|
|
|
status_code=code,
|
|
|
|
)
|
|
|
|
|
|
|
|
if not request.uid:
|
|
|
|
self._auth_method_public()
|
2013-12-05 15:06:33 +00:00
|
|
|
|
2014-06-25 09:47:44 +00:00
|
|
|
try:
|
|
|
|
html = request.website._render('website.%s' % code, values)
|
|
|
|
except Exception:
|
|
|
|
html = request.website._render('website.http_error', values)
|
|
|
|
return werkzeug.wrappers.Response(html, status=code, content_type='text/html;charset=utf-8')
|
2013-11-15 13:26:26 +00:00
|
|
|
|
|
|
|
class ModelConverter(ir.ir_http.ModelConverter):
|
2014-05-11 11:52:31 +00:00
|
|
|
def __init__(self, url_map, model=False, domain='[]'):
|
2013-11-15 13:26:26 +00:00
|
|
|
super(ModelConverter, self).__init__(url_map, model)
|
2014-05-11 11:52:31 +00:00
|
|
|
self.domain = domain
|
2014-06-24 18:55:26 +00:00
|
|
|
self.regex = _UNSLUG_RE.pattern
|
2013-11-15 13:26:26 +00:00
|
|
|
|
|
|
|
def to_url(self, value):
|
2013-12-02 12:56:52 +00:00
|
|
|
return slug(value)
|
2013-11-15 13:26:26 +00:00
|
|
|
|
2014-01-24 10:46:10 +00:00
|
|
|
def to_python(self, value):
|
|
|
|
m = re.match(self.regex, value)
|
|
|
|
_uid = RequestUID(value=value, match=m, converter=self)
|
2014-08-01 08:50:25 +00:00
|
|
|
record_id = int(m.group(2))
|
|
|
|
if record_id < 0:
|
|
|
|
# limited support for negative IDs due to our slug pattern, assume abs() if not found
|
|
|
|
if not request.registry[self.model].exists(request.cr, _uid, [record_id]):
|
|
|
|
record_id = abs(record_id)
|
2014-01-24 10:46:10 +00:00
|
|
|
return request.registry[self.model].browse(
|
2014-08-01 08:50:25 +00:00
|
|
|
request.cr, _uid, record_id, context=request.context)
|
2014-01-24 10:46:10 +00:00
|
|
|
|
2014-05-11 11:52:31 +00:00
|
|
|
def generate(self, cr, uid, query=None, args=None, context=None):
|
2014-05-11 13:40:37 +00:00
|
|
|
obj = request.registry[self.model]
|
|
|
|
domain = eval( self.domain, (args or {}).copy())
|
|
|
|
if query:
|
|
|
|
domain.append((obj._rec_name, 'ilike', '%'+query+'%'))
|
|
|
|
for record in obj.search_read(cr, uid, domain=domain, fields=['write_date',obj._rec_name], context=context):
|
|
|
|
if record.get(obj._rec_name, False):
|
|
|
|
yield {'loc': (record['id'], record[obj._rec_name])}
|
2013-11-15 15:48:40 +00:00
|
|
|
|
|
|
|
class PageConverter(werkzeug.routing.PathConverter):
|
2014-05-11 11:52:31 +00:00
|
|
|
""" Only point of this converter is to bundle pages enumeration logic """
|
|
|
|
def generate(self, cr, uid, query=None, args={}, context=None):
|
2013-11-15 15:48:40 +00:00
|
|
|
View = request.registry['ir.ui.view']
|
2014-05-11 11:52:31 +00:00
|
|
|
views = View.search_read(cr, uid, [['page', '=', True]],
|
|
|
|
fields=['xml_id','priority','write_date'], order='name', context=context)
|
2013-11-15 15:48:40 +00:00
|
|
|
for view in views:
|
2014-05-11 11:52:31 +00:00
|
|
|
xid = view['xml_id'].startswith('website.') and view['xml_id'][8:] or view['xml_id']
|
|
|
|
# the 'page/homepage' url is indexed as '/', avoid aving the same page referenced twice
|
|
|
|
# when we will have an url mapping mechanism, replace this by a rule: page/homepage --> /
|
|
|
|
if xid=='homepage': continue
|
|
|
|
if query and query.lower() not in xid.lower():
|
|
|
|
continue
|
|
|
|
record = {'loc': xid}
|
|
|
|
if view['priority'] <> 16:
|
|
|
|
record['__priority'] = min(round(view['priority'] / 32.0,1), 1)
|
|
|
|
if view['write_date']:
|
|
|
|
record['__lastmod'] = view['write_date'][:10]
|
|
|
|
yield record
|