Merge commit 'origin/master' into mdv-gpl3

bzr revid: p_christ@hol.gr-20081207195509-2tb8fb3d7qr2xigt
bzr revid: p_christ@hol.gr-20081210075901-97mt6chs3ljokpsx
This commit is contained in:
P. Christeas 2008-12-10 09:59:01 +02:00
commit 281c7c980a
67 changed files with 19499 additions and 12245 deletions

View File

@ -1,6 +1,6 @@
Metadata-Version: 1.1
Name: OpenERP
Version: 5.0.0-alpha
Version: 5.0.0-rc1
Author: Tiny.be
Author-email: fp at tiny be
Maintainer: Tiny.be

View File

@ -21,11 +21,13 @@
##############################################################################
import os, sys, imp
from os.path import join as opj
import itertools
from sets import Set
import osv
import tools
import tools.osutil
import pooler
@ -33,11 +35,10 @@ import netsvc
from osv import fields
import zipfile
import release
logger = netsvc.Logger()
opj = os.path.join
_ad = os.path.abspath(opj(tools.config['root_path'], 'addons')) # default addons path (base)
ad = os.path.abspath(tools.config['addons_path']) # alternate addons path
@ -137,10 +138,41 @@ def get_module_path(module):
if os.path.exists(opj(_ad, module)) or os.path.exists(opj(_ad, '%s.zip' % module)):
return opj(_ad, module)
logger.notifyChannel('init', netsvc.LOG_WARNING, 'addon:%s:module not found' % (module,))
logger.notifyChannel('init', netsvc.LOG_WARNING, 'addon %s: module not found' % (module,))
return False
raise IOError, 'Module not found : %s' % module
def get_module_filetree(module, dir='.'):
path = get_module_path(module)
if not path:
return False
dir = os.path.normpath(dir)
if dir == '.': dir = ''
if dir.startswith('..') or dir[0] == '/':
raise Exception('Cannot access file outside the module')
if not os.path.isdir(path):
# zipmodule
zip = zipfile.ZipFile(path + ".zip")
files = ['/'.join(f.split('/')[1:]) for f in zip.namelist()]
else:
files = tools.osutil.listdir(path, True)
tree = {}
for f in files:
if not f.startswith(dir):
continue
f = f[len(dir)+int(not dir.endswith('/')):]
lst = f.split(os.sep)
current = tree
while len(lst) != 1:
current = current.setdefault(lst.pop(0), {})
current[lst.pop(0)] = None
return tree
def get_module_resource(module, *args):
"""Return the full path of a resource of the given module.
@ -184,7 +216,7 @@ def create_graph(module_list, force=None):
try:
info = eval(tools.file_open(terp_file).read())
except:
logger.notifyChannel('init', netsvc.LOG_ERROR, 'addon:%s:eval file %s' % (module, terp_file))
logger.notifyChannel('init', netsvc.LOG_ERROR, 'addon %s: eval file %s' % (module, terp_file))
raise
if info.get('installable', True):
packages.append((module, info.get('depends', []), info))
@ -192,7 +224,7 @@ def create_graph(module_list, force=None):
dependencies = dict([(p, deps) for p, deps, data in packages])
current, later = Set([p for p, dep, data in packages]), Set()
while packages and current > later:
package, deps, datas = packages[0]
package, deps, data = packages[0]
# if all dependencies of 'package' are already in the graph, add 'package' in the graph
if reduce(lambda x,y: x and y in graph, deps, True):
@ -203,24 +235,24 @@ def create_graph(module_list, force=None):
current.remove(package)
graph.addNode(package, deps)
node = Node(package, graph)
node.datas = datas
node.data = data
for kind in ('init', 'demo', 'update'):
if package in tools.config[kind] or 'all' in tools.config[kind] or kind in force:
setattr(node, kind, True)
else:
later.add(package)
packages.append((package, deps, datas))
packages.append((package, deps, data))
packages.pop(0)
for package in later:
unmet_deps = filter(lambda p: p not in graph, dependencies[package])
logger.notifyChannel('init', netsvc.LOG_ERROR, 'addon:%s:Unmet dependencies: %s' % (package, ', '.join(unmet_deps)))
logger.notifyChannel('init', netsvc.LOG_ERROR, 'addon %s: Unmet dependencies: %s' % (package, ', '.join(unmet_deps)))
return graph
def init_module_objects(cr, module_name, obj_list):
pool = pooler.get_pool(cr.dbname)
logger.notifyChannel('init', netsvc.LOG_INFO, 'addon:%s:creating or updating database tables' % module_name)
logger.notifyChannel('init', netsvc.LOG_INFO, 'addon %s: creating or updating database tables' % module_name)
for obj in obj_list:
if hasattr(obj, 'init'):
obj.init(cr)
@ -234,12 +266,11 @@ def register_class(m):
global loaded
if m in loaded:
return
logger.notifyChannel('init', netsvc.LOG_INFO, 'addon:%s:registering classes' % m)
sys.stdout.flush()
logger.notifyChannel('init', netsvc.LOG_INFO, 'addon %s: registering classes' % m)
loaded.append(m)
mod_path = get_module_path(m)
if not os.path.isfile(mod_path+'.zip'):
imp.load_module(m, *imp.find_module(m))
imp.load_module(m, *imp.find_module(m, [ad, _ad]))
else:
import zipimport
try:
@ -248,14 +279,140 @@ def register_class(m):
except zipimport.ZipImportError:
logger.notifyChannel('init', netsvc.LOG_ERROR, 'Couldn\'t find module %s' % m)
def register_classes():
return
module_list = get_modules()
for package in create_graph(module_list):
m = package.name
register_class(m)
logger.notifyChannel('init', netsvc.LOG_INFO, 'addon:%s:registering classes' % m)
sys.stdout.flush()
class MigrationManager(object):
"""
This class manage the migration of modules
Migrations files must be python files containing a "migrate(cr, installed_version)" function.
Theses files must respect a directory tree structure: A 'migrations' folder which containt a
folder by version. Version can be 'module' version or 'server.module' version (in this case,
the files will only be processed by this version of the server). Python file names must start
by 'pre' or 'post' and will be executed, respectively, before and after the module initialisation
Example:
<moduledir>
`-- migrations
|-- 1.0
| |-- pre-update_table_x.py
| |-- pre-update_table_y.py
| |-- post-clean-data.py
| `-- README.txt # not processed
|-- 5.0.1.1 # files in this folder will be executed only on a 5.0 server
| |-- pre-delete_table_z.py
| `-- post-clean-data.py
`-- foo.py # not processed
This similar structure is generated by the maintenance module with the migrations files get by
the maintenance contract
"""
def __init__(self, cr, graph):
self.cr = cr
self.graph = graph
self.migrations = {}
self._get_files()
def _get_files(self):
"""
import addons.base.maintenance.utils as maintenance_utils
maintenance_utils.update_migrations_files(self.cr)
#"""
for pkg in self.graph:
self.migrations[pkg.name] = {}
if not (hasattr(pkg, 'update') or pkg.state == 'to upgrade'):
continue
self.migrations[pkg.name]['module'] = get_module_filetree(pkg.name, 'migrations') or {}
self.migrations[pkg.name]['maintenance'] = get_module_filetree('base', 'maintenance/migrations/' + pkg.name) or {}
def migrate_module(self, pkg, stage):
assert stage in ('pre', 'post')
stageformat = {'pre': '[>%s]',
'post': '[%s>]',
}
if not (hasattr(pkg, 'update') or pkg.state == 'to upgrade'):
return
def convert_version(version):
if version.startswith(release.major_version) and version != release.major_version:
return version # the version number already containt the server version
return "%s.%s" % (release.major_version, version)
def _get_migration_versions(pkg):
def __get_dir(tree):
return [d for d in tree if tree[d] is not None]
versions = list(set(
__get_dir(self.migrations[pkg.name]['module']) +
__get_dir(self.migrations[pkg.name]['maintenance'])
))
versions.sort(key=lambda k: parse_version(convert_version(k)))
return versions
def _get_migration_files(pkg, version, stage):
""" return a list of tuple (module, file)
"""
m = self.migrations[pkg.name]
lst = []
mapping = {'module': {'module': pkg.name, 'rootdir': opj('migrations')},
'maintenance': {'module': 'base', 'rootdir': opj('maintenance', 'migrations', pkg.name)},
}
for x in mapping.keys():
if version in m[x]:
for f in m[x][version]:
if m[x][version][f] is not None:
continue
if not f.startswith(stage + '-'):
continue
lst.append((mapping[x]['module'], opj(mapping[x]['rootdir'], version, f)))
return lst
def mergedict(a,b):
a = a.copy()
a.update(b)
return a
from tools.parse_version import parse_version
parsed_installed_version = parse_version(pkg.latest_version)
current_version = parse_version(convert_version(pkg.data.get('version', '0')))
versions = _get_migration_versions(pkg)
for version in versions:
if parsed_installed_version < parse_version(convert_version(version)) <= current_version:
strfmt = {'addon': pkg.name,
'stage': stage,
'version': stageformat[stage] % version,
}
for modulename, pyfile in _get_migration_files(pkg, version, stage):
name, ext = os.path.splitext(os.path.basename(pyfile))
if ext.lower() != '.py':
continue
fp = tools.file_open(opj(modulename, pyfile))
mod = None
try:
mod = imp.load_source(name, pyfile, fp)
logger.notifyChannel('migration', netsvc.LOG_INFO, 'addon %(addon)s: Running migration %(version)s %(name)s"' % mergedict({'name': mod.__name__},strfmt))
mod.migrate(self.cr, pkg.latest_version)
except ImportError:
logger.notifyChannel('migration', netsvc.LOG_ERROR, 'addon %(addon)s: Unable to load %(stage)-migration file %(file)s' % mergedict({'file': opj(modulename,pyfile)}, strfmt))
raise
except AttributeError:
logger.notifyChannel('migration', netsvc.LOG_ERROR, 'addon %(addon)s: Each %(stage)-migration file must have a "migrate(cr, installed_version)" function' % strfmt)
except:
raise
fp.close()
del mod
def load_module_graph(cr, graph, status=None, **kwargs):
# **kwargs is passed directly to convert_xml_import
@ -265,67 +422,94 @@ def load_module_graph(cr, graph, status=None, **kwargs):
status = status.copy()
package_todo = []
statusi = 0
pool = pooler.get_pool(cr.dbname)
# update the graph with values from the database (if exist)
## First, we set the default values for each package in graph
additional_data = dict.fromkeys([p.name for p in graph], {'id': 0, 'state': 'uninstalled', 'dbdemo': False, 'latest_version': None})
## Then we get the values from the database
cr.execute('SELECT name, id, state, demo AS dbdemo, latest_version'
' FROM ir_module_module'
' WHERE name in (%s)' % (','.join(['%s'] * len(graph))),
additional_data.keys()
)
## and we update the default values with values from the database
additional_data.update(dict([(x.pop('name'), x) for x in cr.dictfetchall()]))
for package in graph:
for k, v in additional_data[package.name].items():
setattr(package, k, v)
migrations = MigrationManager(cr, graph)
for package in graph:
status['progress'] = (float(statusi)+0.1)/len(graph)
m = package.name
mid = package.id
migrations.migrate_module(package, 'pre')
register_class(m)
logger.notifyChannel('init', netsvc.LOG_INFO, 'addon:%s' % m)
sys.stdout.flush()
pool = pooler.get_pool(cr.dbname)
logger.notifyChannel('init', netsvc.LOG_INFO, 'addon %s' % m)
modules = pool.instanciate(m, cr)
cr.execute('select id from ir_module_module where name=%s', (m,))
mid = int(cr.rowcount and cr.fetchone()[0] or 0)
cr.execute('select state, demo from ir_module_module where id=%d', (mid,))
(package_state, package_demo) = (cr.rowcount and cr.fetchone()) or ('uninstalled', False)
idref = {}
status['progress'] = (float(statusi)+0.4)/len(graph)
if hasattr(package, 'init') or hasattr(package, 'update') or package_state in ('to install', 'to upgrade'):
if hasattr(package, 'init') or hasattr(package, 'update') or package.state in ('to install', 'to upgrade'):
init_module_objects(cr, m, modules)
for kind in ('init', 'update'):
for filename in package.datas.get('%s_xml' % kind, []):
for filename in package.data.get('%s_xml' % kind, []):
mode = 'update'
if hasattr(package, 'init') or package_state=='to install':
if hasattr(package, 'init') or package.state=='to install':
mode = 'init'
logger.notifyChannel('init', netsvc.LOG_INFO, 'addon:%s:loading %s' % (m, filename))
logger.notifyChannel('init', netsvc.LOG_INFO, 'addon %s: loading %s' % (m, filename))
name, ext = os.path.splitext(filename)
fp = tools.file_open(opj(m, filename))
if ext == '.csv':
tools.convert_csv_import(cr, m, os.path.basename(filename), tools.file_open(opj(m, filename)).read(), idref, mode=mode)
tools.convert_csv_import(cr, m, os.path.basename(filename), fp.read(), idref, mode=mode)
elif ext == '.sql':
queries = tools.file_open(opj(m, filename)).read().split(';')
queries = fp.read().split(';')
for query in queries:
new_query = ' '.join(query.split())
if new_query:
cr.execute(new_query)
else:
tools.convert_xml_import(cr, m, tools.file_open(opj(m, filename)), idref, mode=mode, **kwargs)
if hasattr(package, 'demo') or (package_demo and package_state != 'installed'):
tools.convert_xml_import(cr, m, fp, idref, mode=mode, **kwargs)
fp.close()
if hasattr(package, 'demo') or (package.dbdemo and package.state != 'installed'):
status['progress'] = (float(statusi)+0.75)/len(graph)
for xml in package.datas.get('demo_xml', []):
for xml in package.data.get('demo_xml', []):
name, ext = os.path.splitext(xml)
logger.notifyChannel('init', netsvc.LOG_INFO, 'addon:%s:loading %s' % (m, xml))
logger.notifyChannel('init', netsvc.LOG_INFO, 'addon %s: loading %s' % (m, xml))
fp = tools.file_open(opj(m, xml))
if ext == '.csv':
tools.convert_csv_import(cr, m, os.path.basename(xml), tools.file_open(opj(m, xml)).read(), idref, noupdate=True)
tools.convert_csv_import(cr, m, os.path.basename(xml), fp.read(), idref, noupdate=True)
else:
tools.convert_xml_import(cr, m, tools.file_open(opj(m, xml)), idref, noupdate=True, **kwargs)
tools.convert_xml_import(cr, m, fp, idref, noupdate=True, **kwargs)
fp.close()
cr.execute('update ir_module_module set demo=%s where id=%d', (True, mid))
package_todo.append(package.name)
cr.execute("update ir_module_module set state='installed' where state in ('to upgrade', 'to install') and id=%d", (mid,))
ver = release.major_version + '.' + package.data.get('version', '1.0')
cr.execute("update ir_module_module set state='installed', latest_version=%s where id=%d", (ver, mid,))
cr.commit()
# Update translations for all installed languages
# Set new modules and dependencies
modobj = pool.get('ir.module.module')
# Update translations for all installed languages
if modobj:
modobj.update_translations(cr, 1, [mid], None)
cr.commit()
migrations.migrate_module(package, 'post')
statusi+=1
cr.execute("""select model,name from ir_model where id not in (select model_id from ir_model_access)""")
for (model,name) in cr.fetchall():
logger.notifyChannel('init', netsvc.LOG_WARNING, 'addon:object %s (%s) has no access rules!' % (model,name))
logger.notifyChannel('init', netsvc.LOG_WARNING, 'addon object %s (%s) has no access rules!' % (model,name))
pool = pooler.get_pool(cr.dbname)
cr.execute('select * from ir_model where state=%s', ('manual',))
@ -342,10 +526,23 @@ def load_modules(db, force_demo=False, status=None, update_module=False):
force = []
if force_demo:
force.append('demo')
pool = pooler.get_pool(cr.dbname)
if update_module:
for module in tools.config['init']:
cr.execute('update ir_module_module set state=%s where state=%s and name=%s', ('to install', 'uninstalled', module))
cr.commit()
register_class('base')
pool.instanciate('base', cr)
modobj = pool.get('ir.module.module')
modobj.update_list(cr, 1)
mids = modobj.search(cr, 1, [('state','in',('installed','to install'))])
for m in modobj.browse(cr, 1, mids):
for dep in m.dependencies_id:
if dep.state=='uninstalled':
modobj.button_install(cr, 1, [m.id])
cr.execute("select name from ir_module_module where state in ('installed', 'to install', 'to upgrade','to remove')")
else:
cr.execute("select name from ir_module_module where state in ('installed', 'to upgrade', 'to remove')")
@ -354,7 +551,7 @@ def load_modules(db, force_demo=False, status=None, update_module=False):
report = tools.assertion_report()
load_module_graph(cr, graph, status, report=report)
if report.get_report():
logger.notifyChannel('init', netsvc.LOG_INFO, 'assert:%s' % report)
logger.notifyChannel('init', netsvc.LOG_INFO, 'assert: %s' % report)
for kind in ('init', 'demo', 'update'):
tools.config[kind]={}

View File

@ -1,7 +1,7 @@
# -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2008 Tiny SPRL (<http://tiny.be>). All Rights Reserved
# $Id$
#
@ -23,7 +23,7 @@
import ir
import module
import res
import maintenance
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:

View File

@ -1,7 +1,7 @@
# -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2008 Tiny SPRL (<http://tiny.be>). All Rights Reserved
# $Id$
#
@ -32,6 +32,7 @@
"base_menu.xml",
"security/base_security.xml",
"res/res_security.xml",
"maintenance/maintenance_security.xml",
],
"demo_xml" : [
"base_demo.xml",
@ -59,6 +60,7 @@
"res/partner/partner_data.xml",
"res/ir_property_view.xml",
"security/base_security.xml",
"maintenance/maintenance_view.xml",
"security/ir.model.access.csv",
],
"active": True,

View File

@ -27,6 +27,7 @@ CREATE TABLE ir_model (
id serial,
model varchar(64) DEFAULT ''::varchar NOT NULL,
name varchar(64),
state varchar(16),
info text,
primary key(id)
);
@ -196,7 +197,7 @@ create table wkf_activity
signal_send varchar(32) default null,
flow_start boolean default False,
flow_stop boolean default False,
action varchar(64) default null,
action text default null,
primary key(id)
);

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -505,8 +505,8 @@
<form string="Attachments">
<group colspan="4" col="6">
<field name="name" select="1" />
<field name="create_uid" select="1"/>
<field name="create_date" select="1"/>
<field name="create_uid" select="2"/>
<field name="create_date" select="2"/>
</group>
<notebook colspan="4">
<page string="Attachment">
@ -517,7 +517,7 @@
</group>
<group col="2" colspan="2">
<separator string="Attached To" colspan="2"/>
<field name="res_model" select="1"/>
<field name="res_model" select="2"/>
<field name="res_id"/>
</group>
<separator string="Preview" colspan="4"/>
@ -1155,19 +1155,23 @@
<field name="trigger_name" select="2"/>
</page>
<page string="Email / SMS" attrs="{'invisible':[('state','=','python'),('state','=','dummy'),('state','=','trigger'), ('state','=','object_create'), ('state','=','object_write'), ('state','=','client_action'), ('state','=','other')]}">
<separator colspan="4" string="Email Configuration"/>
<field name="address" domain="[('model_id','=',model_id)]"/>
<field name="sms" colspan="4" attrs="{'readonly':[('state','=','python'), ('state','=','email'), ('state','=','dummy'),('state','=','trigger'), ('state','=','object_create'), ('state','=','object_write'), ('state','=','client_action'), ('state','=','other')]}"/>
<field name="message" select="2" colspan="4" attrs="{'readonly':[('state','=','python'), ('state','=','dummy'),('state','=','trigger'), ('state','=','object_create'), ('state','=','object_write'), ('state','=','sms'), ('state','=','client_action'), ('state','=','other')]}" />
<newline/>
<label colspan="4" string="Access all the fields related to the current object easily just by defining name of the attribute, i.e. [[partner_id.name]] for Invoice Object"/>
<page string="Action to Launch" attrs="{'invisible':[('state','!=','client_action')]}">
<field name="action_id" select="2" colspan="4"/>
</page>
<page string="Create / Write" attrs="{'invisible':[('state','=','python'),('state','=','dummy'),('state','=','trigger'), ('state','=','sms'), ('state','=','email'), ('state','=','client_action'), ('state','=','other')]}">
<page string="Email / SMS" attrs="{'invisible':[('state','!=','sms'),('state','!=','email')]}">
<separator colspan="4" string="Email Configuration"/>
<field name="address" domain="[('model_id','=',model_id)]"/>
<field name="sms" colspan="4" attrs="{'readonly':[('state','!=','sms')]}"/>
<field name="message" select="2" colspan="4" attrs="{'readonly':[('state','!=','email')]}" />
<newline/>
<label colspan="4" string="Access all the fields related to the current object using expression in double brackets, i.e. [[ object.partner_id.name ]]" align="0.0"/>
</page>
<page string="Create / Write" attrs="{'invisible':[('state','!=','object_create'),('state','!=','object_write')]}">
<separator colspan="4" string="Fields Mapping"/>
<field name="otype"/>
<field name="srcmodel_id" select="2" attrs="{'readonly':[('type','=','copy'),('state','=','write_create')]}"/>
<field name="srcmodel_id" select="2"/>
<field name="fields_lines" nolabel="1" select="2" colspan="4">
<tree string="Field Mappings" editable="top">
<field name="col1" domain="[('model_id','=',parent.srcmodel_id or parent.model_id)]"/>
@ -1180,6 +1184,7 @@
<field name="value" colsapan="4"/>
</form>
</field>
<label colspan="4" string="If you use a formula type, use a python expression using the variable 'object'." align="0.0"/>
</page>
<page string="Other Actions" attrs="{'invisible':[('state','!=','other')]}">
@ -1290,7 +1295,7 @@
<separator colspan="4" string=""/>
<label string="" colspan="2"/>
<group colspan="2" col="2">
<button icon="gtk-cancel" special="cancel" string="Skip Step"/>
<button icon="gtk-cancel" name="button_skip" string="Skip Step" type="object"/>
<button icon="gtk-go-forward" name="button_continue" string="Continue" type="object"/>
</group>
</group>

View File

@ -287,7 +287,7 @@ class ir_model_fields(osv.osv):
_inherit = 'ir.model.fields'
_rec_name = 'field_description'
_columns = {
'complete_name': fields.char('Complete Name', required=True, size=64, select=1),
'complete_name': fields.char('Complete Name', size=64, select=1),
}
def name_search(self, cr, uid, name, args=None, operator='ilike', context=None, limit=80):
@ -371,6 +371,7 @@ class actions_server(osv.osv):
_columns = {
'name': fields.char('Action Name', required=True, size=64),
'state': fields.selection([
('client_action','Client Action'),
('python','Python Code'),
('dummy','Dummy'),
('trigger','Trigger'),
@ -378,12 +379,12 @@ class actions_server(osv.osv):
('sms','SMS'),
('object_create','Create Object'),
('object_write','Write Object'),
('client_action','Client Action'),
('other','Others Actions'),
], 'Action State', required=True, size=32, change_default=True),
], 'Action State', required=True, size=32),
'code': fields.text('Python Code'),
'sequence': fields.integer('Sequence'),
'model_id': fields.many2one('ir.model', 'Object', required=True),
'action_id': fields.many2one('ir.actions.actions', 'Client Action'),
'trigger_name': fields.char('Trigger Name', size=128),
'trigger_obj_id': fields.reference('Trigger On', selection=model_get, size=128),
'message': fields.text('Message', translate=True),
@ -438,26 +439,12 @@ class actions_server(osv.osv):
def merge_message(self, cr, uid, keystr, action, context):
logger = netsvc.Logger()
def merge(match):
obj_pool = self.pool.get(action.model_id.model)
id = context.get('active_id')
obj = obj_pool.browse(cr, uid, id)
return eval(match[2:-2], {'object':obj, 'context': context,'time':time})
field = match.group()
field = field.replace('[','')
field = field.replace(']','')
field = field.strip()
fields = field.split('.')
for field in fields:
try:
obj = getattr(obj, field)
except Exception,e :
logger.notifyChannel('Workflow', netsvc.LOG_ERROR, 'Failed to parse : %s' % (match.group()))
return str(obj)
com = re.compile('\[\[(.+?)\]\]')
com = re.compile('(\[\[.+?\]\])')
message = com.sub(merge, keystr)
return message
@ -471,6 +458,10 @@ class actions_server(osv.osv):
def run(self, cr, uid, ids, context={}):
logger = netsvc.Logger()
for action in self.browse(cr, uid, ids, context):
if action.state=='client_action':
if not action.action_id:
raise osv.except_osv(_('Error'), _("Please specify an action to launch !"))
return self.pool.get(action.action_id.type).read(cr, uid, action.action_id.id, context=context)
if action.state=='python':
localdict = {
'self': self.pool.get(action.model_id.model),
@ -490,9 +481,7 @@ class actions_server(osv.osv):
address = self.get_field_value(cr, uid, action, context)
if not address:
raise osv.except_osv(_('Error'), _("Please specify the Partner Email address !"))
body = self.merge_message(cr, uid, str(action.message), action, context)
if tools.email_send_attach(user, address, subject, body, debug=False) == True:
logger.notifyChannel('email', netsvc.LOG_INFO, 'Email successfully send to : %s' % (address))
else:
@ -537,8 +526,9 @@ class actions_server(osv.osv):
for exp in action.fields_lines:
euq = exp.value
if exp.type == 'equation':
expr = self.merge_message(cr, uid, euq, action, context)
expr = eval(expr)
obj_pool = self.pool.get(action.model_id.model)
obj = obj_pool.browse(cr, uid, context['active_id'], context=context)
expr = eval(euq, {'context':context, 'object': obj})
else:
expr = exp.value
res[exp.col1.name] = expr
@ -550,8 +540,9 @@ class actions_server(osv.osv):
for exp in action.fields_lines:
euq = exp.value
if exp.type == 'equation':
expr = self.merge_message(cr, uid, euq, action, context)
expr = eval(expr)
obj_pool = self.pool.get(action.model_id.model)
obj = obj_pool.browse(cr, uid, context['active_id'], context=context)
expr = eval(euq, {'context':context, 'object': obj})
else:
expr = exp.value
res[exp.col1.name] = expr
@ -591,7 +582,7 @@ class ir_actions_todo(osv.osv):
_name = 'ir.actions.todo'
_columns={
'name':fields.char('Name',size=64,required=True, select=True),
'note':fields.text('Text'),
'note':fields.text('Text', translate=True),
'start_date': fields.datetime('Start Date'),
'end_date': fields.datetime('End Date'),
'action_id':fields.many2one('ir.actions.act_window', 'Action', select=True,required=True, ondelete='cascade'),
@ -651,6 +642,21 @@ class ir_actions_configuration_wizard(osv.osv_memory):
'item_id':_get_action,
'name':_get_action_name,
}
def button_next(self,cr,uid,ids,context=None):
user_action=self.pool.get('res.users').browse(cr,uid,uid)
act_obj=self.pool.get(user_action.menu_id.type)
action_ids=act_obj.search(cr,uid,[('name','=',user_action.menu_id.name)])
action_open=act_obj.browse(cr,uid,action_ids)[0]
if context.get('menu',False):
return{
'view_type': action_open.view_type,
'view_id':action_open.view_id and [action_open.view_id.id] or False,
'res_model': action_open.res_model,
'type': action_open.type,
'domain':action_open.domain
}
return {'type':'ir.actions.act_window_close'}
def button_skip(self,cr,uid,ids,context=None):
item_obj = self.pool.get('ir.actions.todo')
item_id=self.read(cr,uid,ids)[0]['item_id']
@ -666,7 +672,7 @@ class ir_actions_configuration_wizard(osv.osv_memory):
'type': 'ir.actions.act_window',
'target':'new',
}
return {'type':'ir.actions.act_window_close'}
return self.button_next(cr, uid, ids, context)
def button_continue(self, cr, uid, ids, context=None):
item_obj = self.pool.get('ir.actions.todo')
@ -684,19 +690,7 @@ class ir_actions_configuration_wizard(osv.osv_memory):
'type': item.action_id.type,
'target':item.action_id.target,
}
user_action=self.pool.get('res.users').browse(cr,uid,uid)
act_obj=self.pool.get(user_action.menu_id.type)
action_ids=act_obj.search(cr,uid,[('name','=',user_action.menu_id.name)])
action_open=act_obj.browse(cr,uid,action_ids)[0]
if context.get('menu',False):
return{
'view_type': action_open.view_type,
'view_id':action_open.view_id and [action_open.view_id.id] or False,
'res_model': action_open.res_model,
'type': action_open.type,
'domain':action_open.domain
}
return {'type':'ir.actions.act_window_close' }
return self.button_next(cr, uid, ids, context)
ir_actions_configuration_wizard()
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:

View File

@ -52,12 +52,11 @@ class ir_attachment(osv.osv):
cache = {}
ima = self.pool.get('ir.model.access')
for m in models:
if m['res_model'] in cache:
if not cache[m['res_model']]:
ids.remove(m['id'])
continue
cache[m['res_model']] = ima.check(cr, uid, m['res_model'], 'read',
raise_exception=False)
if m['res_model'] not in cache:
cache[m['res_model']] = ima.check(cr, uid, m['res_model'], 'read',
raise_exception=False)
if not cache[m['res_model']]:
ids.remove(m['id'])
if count:
return len(ids)

View File

@ -27,6 +27,7 @@ from osv.orm import except_orm, browse_record
import time
import tools
from tools import config
import pooler
def _get_fields_type(self, cr, uid, context=None):
@ -74,7 +75,10 @@ class ir_model(osv.osv):
vals['state']='manual'
res = super(ir_model,self).create(cr, user, vals, context)
if vals.get('state','base')=='manual':
pooler.restart_pool(cr.dbname)
self.instanciate(cr, user, vals['model'], context)
self.pool.get(vals['model']).__init__(self.pool, cr)
self.pool.get(vals['model'])._auto_init(cr,{})
#pooler.restart_pool(cr.dbname)
return res
def instanciate(self, cr, user, model, context={}):
@ -83,10 +87,7 @@ class ir_model(osv.osv):
x_custom_model._name = model
x_custom_model._module = False
x_custom_model.createInstance(self.pool, '', cr)
if 'x_name' in x_custom_model._columns:
x_custom_model._rec_name = 'x_name'
else:
x_custom_model._rec_name = x_custom_model._columns.keys()[0]
x_custom_model._rec_name = 'x_name'
ir_model()
@ -561,18 +562,19 @@ class ir_model_data(osv.osv):
wf_service.trg_write(uid, model, id, cr)
cr.commit()
for (model,id) in self.unlink_mark.keys():
if self.pool.get(model):
logger = netsvc.Logger()
logger.notifyChannel('init', netsvc.LOG_INFO, 'Deleting %s@%s' % (id, model))
try:
self.pool.get(model).unlink(cr, uid, [id])
if self.unlink_mark[(model,id)]:
self.unlink(cr, uid, [self.unlink_mark[(model,id)]])
cr.execute('DELETE FROM ir_values WHERE value=%s', (model+','+str(id),))
cr.commit()
except:
logger.notifyChannel('init', netsvc.LOG_ERROR, 'Could not delete id: %d of model %s\tThere should be some relation that points to this resource\tYou should manually fix this and restart --update=module' % (id, model))
if not config.get('import_partial', False):
for (model,id) in self.unlink_mark.keys():
if self.pool.get(model):
logger = netsvc.Logger()
logger.notifyChannel('init', netsvc.LOG_INFO, 'Deleting %s@%s' % (id, model))
try:
self.pool.get(model).unlink(cr, uid, [id])
if self.unlink_mark[(model,id)]:
self.unlink(cr, uid, [self.unlink_mark[(model,id)]])
cr.execute('DELETE FROM ir_values WHERE value=%s', (model+','+str(id),))
cr.commit()
except:
logger.notifyChannel('init', netsvc.LOG_ERROR, 'Could not delete id: %d of model %s\tThere should be some relation that points to this resource\tYou should manually fix this and restart --update=module' % (id, model))
return True
ir_model_data()

View File

@ -166,14 +166,19 @@ class ir_values(osv.osv):
result = []
ok = True
result_ids = {}
while ok:
if not where_opt:
cr.execute('select id from ir_values where ' +\
cr.execute('select id,name,value,object,meta, key from ir_values where ' +\
' and '.join(where1)+' and user_id is null', where2)
else:
cr.execute('select id from ir_values where ' +\
cr.execute('select id,name,value,object,meta, key from ir_values where ' +\
' and '.join(where1+where_opt), where2)
result.extend([x[0] for x in cr.fetchall()])
for rec in cr.fetchall():
if rec[0] in result_ids:
continue
result.append(rec)
result_ids[rec[0]] = True
if len(where_opt):
where_opt.pop()
else:
@ -184,13 +189,6 @@ class ir_values(osv.osv):
if not result:
return []
cid = self.pool.get('res.users').browse(cr, uid, uid, context={}).company_id.id
cr.execute('select id,name,value,object,meta, key ' \
'from ir_values ' \
'where id in ('+','.join(map(str,result))+') ' \
'and (company_id is null or company_id = %d) '\
'ORDER BY user_id', (cid,))
result = cr.fetchall()
def _result_get(x, keys):
if x[1] in keys:
@ -199,18 +197,19 @@ class ir_values(osv.osv):
if x[3]:
model,id = x[2].split(',')
id = int(id)
datas = self.pool.get(model).read(cr, uid, id, False, context)
fields = self.pool.get(model).fields_get_keys(cr, uid)
pos = 0
while pos<len(fields):
if fields[pos] in ('report_sxw_content', 'report_rml_content',
'report_sxw', 'report_rml', 'report_sxw_content_data',
'report_rml_content_data'):
del fields[pos]
else:
pos+=1
datas = self.pool.get(model).read(cr, uid, [id], fields, context)[0]
if not datas:
#ir_del(cr, uid, x[0])
return False
def clean(x):
for key in ('report_sxw_content', 'report_rml_content',
'report_sxw', 'report_rml', 'report_sxw_content_data',
'report_rml_content_data'):
if key in x:
del x[key]
return x
datas = clean(datas)
else:
datas = pickle.loads(x[2])
if meta:

View File

@ -21,8 +21,8 @@
</field>
<label colspan="2" string=""/>
<group col="2" colspan="2">
<button special="cancel" string="Cancel"/>
<button name="menu_create" string="Create Menu" type="object"/>
<button special="cancel" string="Cancel" icon="gtk-cancel"/>
<button name="menu_create" string="Create Menu" type="object" icon="gtk-ok"/>
</group>
</form>
</field>

View File

@ -38,7 +38,7 @@ def graph_get(cr, graph, wkf_id, nested=False, workitem={}):
if n['subflow_id'] and nested:
cr.execute('select * from wkf where id=%d', (n['subflow_id'],))
wkfinfo = cr.dictfetchone()
graph2 = pydot.Cluster('subflow'+str(n['subflow_id']), fontsize='12', label = "Subflow: "+n['name']+'\\nOSV: '+wkfinfo['osv'])
graph2 = pydot.Cluster('subflow'+str(n['subflow_id']), fontsize='12', label = """\"Subflow: %s\\nOSV: %s\"""" % ( n['name'], wkfinfo['osv']) )
(s1,s2) = graph_get(cr, graph2, n['subflow_id'], nested,workitem)
graph.add_subgraph(graph2)
actfrom[n['id']] = s2
@ -143,12 +143,9 @@ showpage'''
showpage'''
else:
inst_id = inst_id[0]
graph = pydot.Dot(fontsize='16', label="""\\\n\\nWorkflow: %s\\n OSV: %s""" % (wkfinfo['name'],wkfinfo['osv']))
graph.set('size', '10.7,7.3')
graph.set('center', '1')
graph.set('ratio', 'auto')
graph.set('rotate', '90')
graph.set('rankdir', 'LR')
graph = pydot.Dot(fontsize='16', label="""\\\n\\nWorkflow: %s\\n OSV: %s""" % (wkfinfo['name'],wkfinfo['osv']),
size='10.7, 7.3', center='1', ratio='auto', rotate='90', rankdir='LR'
)
graph_instance_get(cr, graph, inst_id, data.get('nested', False))
ps_string = graph.create(prog='dot', format='ps')
except Exception, e:

View File

@ -1,7 +1,7 @@
# -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2008 Tiny SPRL (<http://tiny.be>). All Rights Reserved
# $Id$
#
@ -27,7 +27,7 @@ import netsvc
class workflow(osv.osv):
_name = "workflow"
_table = "wkf"
_log_access = False
# _log_access = False
_columns = {
'name': fields.char('Name', size=64, required=True),
'osv': fields.char('Resource Object', size=64, required=True,select=True),
@ -49,7 +49,7 @@ class workflow(osv.osv):
# scale = (vertical-distance, horizontal-distance, min-node-width(optional), min-node-height(optional), margin(default=20))
#
def graph_get(self, cr, uid, id, scale, context={}):
nodes= []
@ -67,24 +67,24 @@ class workflow(osv.osv):
else:
if not a.in_transitions:
no_ancester.append(a.id)
for t in a.out_transitions:
transitions.append((a.id, t.act_to.id))
tres[t.id] = (a.id, t.act_to.id)
g = graph(nodes, transitions, no_ancester)
g.process(start)
g.scale(*scale)
result = g.result_get()
results = {}
for node in nodes_name:
results[str(node[0])] = result[node[0]]
results[str(node[0])]['name'] = node[1]
return {'nodes': results, 'transitions': tres}
def create(self, cr, user, vals, context=None):
if not context:
@ -97,7 +97,7 @@ workflow()
class wkf_activity(osv.osv):
_name = "workflow.activity"
_table = "wkf_activity"
_log_access = False
# _log_access = False
_columns = {
'name': fields.char('Name', size=64, required=True),
'wkf_id': fields.many2one('workflow', 'Workflow', required=True, select=True, ondelete='cascade'),
@ -123,7 +123,7 @@ wkf_activity()
class wkf_transition(osv.osv):
_table = "wkf_transition"
_name = "workflow.transition"
_log_access = False
# _log_access = False
_rec_name = 'signal'
_columns = {
'trigger_model': fields.char('Trigger Object', size=128),

View File

@ -0,0 +1,33 @@
# -*- encoding: utf-8 -*-
##############################################################################
#
# Copyright (c) 2004-2008 Tiny SPRL (http://tiny.be) All Rights Reserved.
#
# $Id$
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsability of assessing all potential
# consequences resulting from its eventual inadequacies and bugs
# End users who are looking for a ready-to-use solution with commercial
# garantees and support are strongly adviced to contract a Free Software
# Service Company
#
# This program is Free Software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
###############################################################################
import maintenance
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:

View File

@ -0,0 +1,126 @@
# -*- encoding: utf-8 -*-
##############################################################################
#
# Copyright (c) 2004-2008 TINY SPRL. (http://tiny.be) All Rights Reserved.
#
# $Id$
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsability of assessing all potential
# consequences resulting from its eventual inadequacies and bugs
# End users who are looking for a ready-to-use solution with commercial
# garantees and support are strongly adviced to contract a Free Software
# Service Company
#
# This program is Free Software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
##############################################################################
from osv import osv, fields
import pooler
import time
import math
from tools import config
import xmlrpclib
class maintenance_contract(osv.osv):
_name = "maintenance.contract"
_description = "Maintenance Contract"
_columns = {
'name' : fields.char('Contract ID', size=256, required=True, readonly=True),
'password' : fields.char('Password', size=64, invisible=True, required=True, readonly=True),
'date_start' : fields.date('Starting Date', readonly=True),
'date_stop' : fields.date('Ending Date', readonly=True),
'modules': fields.text('Covered Modules')
}
_defaults = {
'password' : lambda obj,cr,uid,context={} : '',
}
def _test_maintenance(self, cr, uid, ids, context):
remote_db='trunk'
remote_server='localhost'
port=8069
module_obj=self.pool.get('ir.module.module')
contract_obj=self.pool.get('maintenance.contract')
module_ids=module_obj.search(cr, uid, [('state','=','installed')])
modules=module_obj.read(cr, uid, module_ids, ['name','installed_version'])
contract_obj=contract_obj.read(cr, uid, ids[0])
local_url = 'http://%s:%d/xmlrpc/common'%(remote_server,port)
rpc = xmlrpclib.ServerProxy(local_url)
ruid = rpc.login(remote_db, 'admin', 'admin')
local_url = 'http://%s:%d/xmlrpc/object'%(remote_server,port)
rrpc = xmlrpclib.ServerProxy(local_url)
try:
result=rrpc.execute(remote_db, ruid, 'admin', 'maintenance.maintenance', 'check_contract' , modules,contract_obj)
except:
raise osv.except_osv(_('Maintenance Error !'),('''Module Maintenance_Editor is not installed at server : %s Database : %s'''%(remote_server,remote_db)))
if context.get('active_id',False):
if result['status']=='ko':
raise osv.except_osv(_('Maintenance Error !'),('''Maintenance Contract
-----------------------------------------------------------
You have no valid maintenance contract! If you are using
Open ERP, it is highly suggested to take maintenance contract.
The maintenance program offers you:
* Migrations on new versions,
* Bugfix guarantee,
* Monthly announces of bugs,
* Security alerts,
* Access to the customer portal.
* Check the maintenance contract (www.openerp.com)'''))
elif result['status']=='partial':
raise osv.except_osv(_('Maintenance Error !'),('''Maintenance Contract
-----------------------------------------------------------
You have a maintenance contract, But you installed modules those
are not covered by your maintenance contract:
%s
It means we can not offer you the garantee of maintenance on
your whole installation.
The maintenance program includes:
* Migrations on new versions,
* Bugfix guarantee,
* Monthly announces of bugs,
* Security alerts,
* Access to the customer portal.
To include these modules in your maintenance contract, you should
extend your contract with the editor. We will review and validate
your installed modules.
* Extend your maintenance to the modules you used.
* Check your maintenance contract''') % ','.join(result['modules']))
else:
raise osv.except_osv(_('Valid Maintenance Contract !'),('''Your Maintenance Contract is up to date'''))
return result
maintenance_contract()
class maintenance_contract_wizard(osv.osv_memory):
_name = 'maintenance.contract.wizard'
_columns = {
'name' : fields.char('Contract ID', size=256, required=True ),
'password' : fields.char('Password', size=64, required=True),
}
def validate_cb(self, cr, uid, ids, context):
print "Validate_cb"
return False
maintenance_contract_wizard()
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:

View File

@ -0,0 +1,14 @@
<?xml version="1.0" encoding="utf-8"?>
<openerp>
<data noupdate="0">
<record model="res.groups" id="group_maintenance_manager">
<field name="name">Maintenance Manager</field>
</record>
<record model="ir.ui.menu" id="menu_maintenance_contract">
<field eval="[(6,0,[ref('group_maintenance_manager')])]" name="groups_id"/>
</record>
</data>
</openerp>

View File

@ -0,0 +1,84 @@
<?xml version="1.0" encoding="utf-8"?>
<openerp>
<data>
<record id="maintenance_contract_tree_view" model="ir.ui.view">
<field name="name">maintenance.contract.tree</field>
<field name="model">maintenance.contract</field>
<field name="type">tree</field>
<field name="arch" type="xml">
<tree string="Maintenance Contract">
<field name="name"/>
<field name="date_start"/>
<field name="date_stop"/>
</tree>
</field>
</record>
<record id="maintenance_contract_form_view" model="ir.ui.view">
<field name="name">maintenance.contract.form</field>
<field name="model">maintenance.contract</field>
<field name="type">form</field>
<field name="arch" type="xml">
<form string="Maintenance Contract">
<separator string="Information" colspan="4"/>
<field name="name" select="1" colspan="4"/>
<field name="date_start" select="1"/>
<field name="date_stop" select="1"/>
<separator string="Covered Modules" colspan="4"/>
<field name="modules" nolabel="1" colspan="4"/>
</form>
</field>
</record>
<record id="action_maintenance_contract_form" model="ir.actions.act_window">
<field name="name">Your Maintenance Contracts</field>
<field name="type">ir.actions.act_window</field>
<field name="res_model">maintenance.contract</field>
<field name="view_type">form</field>
<field name="view_mode">tree,form</field>
</record>
<menuitem
name="Maintenance"
id="maintenance"
parent="base.menu_administration"/>
<menuitem
action="action_maintenance_contract_form"
id="menu_maintenance_contract"
parent="maintenance"/>
<record id="maintenance_contract_add_wizard" model="ir.ui.view">
<field name="name">maintenance.contract.add.wizard</field>
<field name="model">maintenance.contract.wizard</field>
<field name="type">form</field>
<field name="arch" type="xml">
<form string="Add Maintenance Contract">
<separator string="Add Maintenance Contract" colspan="2"/>
<newline />
<field name="name" />
<newline />
<field name="password" password="True" />
<newline />
<label string="" /><button name="validate_cb" type="object" string="Validate" />
</form>
</field>
</record>
<record id="action_maintenance_contract_add_wizard" model="ir.actions.act_window">
<field name="name">Add Maintenance Contract</field>
<field name="type">ir.actions.act_window</field>
<field name="res_model">maintenance.contract.wizard</field>
<field name="view_type">form</field>
<field name="view_mode">form</field>
<field name="target">new</field>
</record>
<menuitem
action="action_maintenance_contract_add_wizard"
id="menu_maintenance_contract_add"
parent="maintenance" />
</data>
</openerp>

View File

@ -35,111 +35,7 @@ import addons
import pooler
import netsvc
ver_regexp = re.compile("^(\\d+)((\\.\\d+)*)([a-z]?)((_(pre|p|beta|alpha|rc)\\d*)*)(-r(\\d+))?$")
suffix_regexp = re.compile("^(alpha|beta|rc|pre|p)(\\d*)$")
def vercmp(ver1, ver2):
"""
Compare two versions
Take from portage_versions.py
@param ver1: version to compare with
@type ver1: string (example "1.2-r3")
@param ver2: version to compare again
@type ver2: string (example "2.1-r1")
@rtype: None or float
@return:
1. position if ver1 is greater than ver2
2. negative if ver1 is less than ver2
3. 0 if ver1 equals ver2
4. None if ver1 or ver2 are invalid
"""
match1 = ver_regexp.match(ver1)
match2 = ver_regexp.match(ver2)
if not match1 or not match1.groups():
return None
if not match2 or not match2.groups():
return None
list1 = [int(match1.group(1))]
list2 = [int(match2.group(1))]
if len(match1.group(2)) or len(match2.group(2)):
vlist1 = match1.group(2)[1:].split(".")
vlist2 = match2.group(2)[1:].split(".")
for i in range(0, max(len(vlist1), len(vlist2))):
# Implicit .0 is given -1, so 1.0.0 > 1.0
# would be ambiguous if two versions that aren't literally equal
# are given the same value (in sorting, for example).
if len(vlist1) <= i or len(vlist1[i]) == 0:
list1.append(-1)
list2.append(int(vlist2[i]))
elif len(vlist2) <= i or len(vlist2[i]) == 0:
list1.append(int(vlist1[i]))
list2.append(-1)
# Let's make life easy and use integers unless we're forced to use floats
elif (vlist1[i][0] != "0" and vlist2[i][0] != "0"):
list1.append(int(vlist1[i]))
list2.append(int(vlist2[i]))
# now we have to use floats so 1.02 compares correctly against 1.1
else:
list1.append(float("0."+vlist1[i]))
list2.append(float("0."+vlist2[i]))
# and now the final letter
if len(match1.group(4)):
list1.append(ord(match1.group(4)))
if len(match2.group(4)):
list2.append(ord(match2.group(4)))
for i in range(0, max(len(list1), len(list2))):
if len(list1) <= i:
return -1
elif len(list2) <= i:
return 1
elif list1[i] != list2[i]:
return list1[i] - list2[i]
# main version is equal, so now compare the _suffix part
list1 = match1.group(5).split("_")[1:]
list2 = match2.group(5).split("_")[1:]
for i in range(0, max(len(list1), len(list2))):
# Implicit _p0 is given a value of -1, so that 1 < 1_p0
if len(list1) <= i:
s1 = ("p","-1")
else:
s1 = suffix_regexp.match(list1[i]).groups()
if len(list2) <= i:
s2 = ("p","-1")
else:
s2 = suffix_regexp.match(list2[i]).groups()
if s1[0] != s2[0]:
return suffix_value[s1[0]] - suffix_value[s2[0]]
if s1[1] != s2[1]:
# it's possible that the s(1|2)[1] == ''
# in such a case, fudge it.
try:
r1 = int(s1[1])
except ValueError:
r1 = 0
try:
r2 = int(s2[1])
except ValueError:
r2 = 0
if r1 - r2:
return r1 - r2
# the suffix part is equal to, so finally check the revision
if match1.group(9):
r1 = int(match1.group(9))
else:
r1 = 0
if match2.group(9):
r2 = int(match2.group(9))
else:
r2 = 0
return r1 - r2
from tools.parse_version import parse_version
class module_repository(osv.osv):
@ -158,7 +54,7 @@ class module_repository(osv.osv):
}
_defaults = {
'sequence': lambda *a: 5,
'filter': lambda *a: 'href="([a-zA-Z0-9_]+)-('+release.version.rsplit('.', 1)[0]+'.(\\d+)((\\.\\d+)*)([a-z]?)((_(pre|p|beta|alpha|rc)\\d*)*)(-r(\\d+))?)(\.zip)"',
'filter': lambda *a: 'href="([a-zA-Z0-9_]+)-('+release.major_version+'.(\\d+)((\\.\\d+)*)([a-z]?)((_(pre|p|beta|alpha|rc)\\d*)*)(-r(\\d+))?)(\.zip)"',
'active': lambda *a: 1,
}
_order = "sequence"
@ -196,7 +92,7 @@ class module(osv.osv):
data = f.read()
info = eval(data)
if 'version' in info:
info['version'] = release.version.rsplit('.', 1)[0] + '.' + info['version']
info['version'] = release.major_version + '.' + info['version']
f.close()
except:
return {}
@ -313,7 +209,7 @@ class module(osv.osv):
_('You try to remove a module that is installed or will be installed'))
return super(module, self).unlink(cr, uid, ids, context=context)
def state_update(self, cr, uid, ids, newstate, states_to_update, context={}, level=50):
def state_update(self, cr, uid, ids, newstate, states_to_update, context={}, level=100):
if level<1:
raise orm.except_orm(_('Error'), _('Recursion error in modules dependencies !'))
demo = False
@ -403,7 +299,7 @@ class module(osv.osv):
terp = self.get_module_info(mod_name)
if terp.get('installable', True) and mod.state == 'uninstallable':
self.write(cr, uid, id, {'state': 'uninstalled'})
if vercmp(terp.get('version', ''), mod.latest_version or '0') > 0:
if parse_version(terp.get('version', '')) > parse_version(mod.latest_version or ''):
self.write(cr, uid, id, {
'latest_version': terp.get('version'),
'url': ''})
@ -429,15 +325,14 @@ class module(osv.osv):
if not terp or not terp.get('installable', True):
continue
if not os.path.isfile( mod_path ):
import imp
# XXX must restrict to only addons paths
path = imp.find_module(mod_name)
imp.load_module(name, *path)
else:
import zipimport
zimp = zipimport.zipimporter(mod_path)
zimp.load_module(mod_name)
#if not os.path.isfile( mod_path ):
# import imp
# path = imp.find_module(mod_name, [addons.ad, addons._ad])
# imp.load_module(name, *path)
#else:
# import zipimport
# zimp = zipimport.zipimporter(mod_path)
# zimp.load_module(mod_name)
id = self.create(cr, uid, {
'name': mod_name,
'state': 'uninstalled',
@ -452,8 +347,8 @@ class module(osv.osv):
self._update_dependencies(cr, uid, id, terp.get('depends', []))
self._update_category(cr, uid, id, terp.get('category', 'Uncategorized'))
import socket
socket.setdefaulttimeout(10)
#import socket
#socket.setdefaulttimeout(10)
for repository in robj.browse(cr, uid, robj.search(cr, uid, [])):
try:
index_page = urllib.urlopen(repository.url).read()
@ -472,7 +367,7 @@ class module(osv.osv):
if version == 'x': # 'x' version was a mistake
version = '0'
if name in mod_sort:
if vercmp(version, mod_sort[name][0]) <= 0:
if parse_version(version) <= parse_version(mod_sort[name][0]):
continue
mod_sort[name] = [version, extension]
for name in mod_sort.keys():
@ -494,8 +389,7 @@ class module(osv.osv):
['latest_version']
if latest_version == 'x': # 'x' version was a mistake
latest_version = '0'
c = vercmp(version, latest_version)
if c > 0:
if parse_version(version) > parse_version(latest_version):
self.write(cr, uid, id,
{'latest_version': version, 'url': url})
res[0] += 1
@ -503,8 +397,7 @@ class module(osv.osv):
['published_version']
if published_version == 'x' or not published_version:
published_version = '0'
c = vercmp(version, published_version)
if c > 0:
if parse_version(version) > parse_version(published_version):
self.write(cr, uid, id,
{'published_version': version})
return res
@ -518,7 +411,7 @@ class module(osv.osv):
version = '0'
if match:
version = match.group(1)
if vercmp(mod.installed_version or '0', version) >= 0:
if parse_version(mod.installed_version or '0') >= parse_version(version):
continue
res.append(mod.url)
if not download:

View File

@ -334,7 +334,7 @@ class res_partner_bank_type(osv.osv):
_description='Bank Account Type'
_name = 'res.partner.bank.type'
_columns = {
'name': fields.char('Name', size=64, required=True),
'name': fields.char('Name', size=64, required=True, translate=True),
'code': fields.char('Code', size=64, required=True),
'field_ids': fields.one2many('res.partner.bank.type.field', 'bank_type_id', 'Type fields'),
}
@ -344,7 +344,7 @@ class res_partner_bank_type_fields(osv.osv):
_description='Bank type fields'
_name = 'res.partner.bank.type.field'
_columns = {
'name': fields.char('Field name', size=64, required=True),
'name': fields.char('Field name', size=64, required=True, translate=True),
'bank_type_id': fields.many2one('res.partner.bank.type', 'Bank type', required=True, ondelete='cascade'),
'required': fields.boolean('Required'),
'readonly': fields.boolean('Readonly'),

View File

@ -202,6 +202,16 @@
<field name="type">default</field>
<field name="partner_id" ref="res_partner_10"/>
</record>
<record id="res_partner_address_3000" model="res.partner.address">
<field name="city">Champs sur Marne</field>
<field name="name">Laith Jubair</field>
<field name="zip">77420</field>
<field model="res.country" name="country_id" search="[('name','=','France')]"/>
<field name="email">info@axelor.com</field>
<field name="phone">+33 1 64 61 04 01</field>
<field name="street">12 rue Albert Einstein</field>
<field name="partner_id" ref="res_partner_desertic_hispafuentes"/>
</record>
<record id="res_partner_address_3" model="res.partner.address">
<field name="city">Louvain-la-Neuve</field>
<field name="name">Thomas Passot</field>

View File

@ -508,7 +508,7 @@
<menuitem action="action_partner_category" id="menu_partner_category_main" parent="base.menu_partner_form" sequence="1"/>
<record id="action_partner_by_category" model="ir.actions.act_window">
<field name="name" eval="False"/>
<field name="name">Partner Categories</field>
<field name="res_model">res.partner</field>
<field name="view_type">form</field>
<field name="view_mode">tree,form</field>

View File

@ -92,7 +92,7 @@ def _tz_get(self,cr,uid, context={}):
class users(osv.osv):
_name = "res.users"
_log_access = False
#_log_access = False
_columns = {
'name': fields.char('Name', size=64, required=True, select=True),
'login': fields.char('Login', size=64, required=True),

View File

@ -22,6 +22,7 @@
"access_ir_module_module_dependency_group_system","ir_module_module_dependency group_system","model_ir_module_module_dependency","group_system",1,1,1,1
"access_ir_module_repository_group_system","ir_module_repository group_system","model_ir_module_repository","group_system",1,1,1,1
"access_ir_property_group_user","ir_property group_user","model_ir_property",,1,0,0,0
"access_ir_property_group_user_manager","ir_property group_manager","model_ir_property",base.group_partner_manager,1,1,1,1
"access_ir_report_custom_group_system","ir_report_custom group_system","model_ir_report_custom",,1,0,0,0
"access_ir_report_custom_fields_group_system","ir_report_custom_fields group_system","model_ir_report_custom_fields",,1,0,0,0
"access_ir_rule_group_user","ir_rule group_user","model_ir_rule",,1,0,0,0
@ -38,7 +39,8 @@
"access_ir_ui_view_custom_group_user","ir_ui_view_custom_group_user","model_ir_ui_view_custom",,1,0,0,0
"access_ir_ui_view_custom_group_system","ir_ui_view_custom_group_system","model_ir_ui_view_custom","group_system",1,1,1,1
"access_ir_ui_view_sc_group_user","ir_ui_view_sc group_user","model_ir_ui_view_sc",,1,1,1,1
"access_ir_values_group_erp_manager","ir_values group_erp_manager","model_ir_values",,1,1,1,1
"access_ir_values_group_erp_manager","ir_values group_erp_manager","model_ir_values",group_erp_manager,1,1,1,1
"access_ir_values_group_all","ir_values group_all","model_ir_values",,1,0,0,0
"access_wizard_ir_model_menu_create_group_system","wizard_ir_model_menu_create group_system","model_wizard_ir_model_menu_create","group_system",1,1,1,1
"access_wizard_ir_model_menu_create_line_group_system","wizard_ir_model_menu_create_line group_system","model_wizard_ir_model_menu_create_line","group_system",1,1,1,1
"access_wizard_module_lang_export_group_system","wizard_module_lang_export group_system","model_wizard_module_lang_export","group_system",1,1,1,1
@ -127,3 +129,4 @@
"access_res_config_view_all","res_config_view_all","model_res_config_view",,1,0,0,0
"access_res_bank_group_partner_manager","res_bank_group_partner_manager","model_res_bank","group_partner_manager",1,1,1,1
"access_res_bank_user","res_bank user","model_res_bank","group_user",1,0,0,0
"access_maintenance_group_user","maintenance_contract group_user","model_maintenance_contract","group_maintenance_manager",1,1,1,1

1 id name model_id:id group_id:id perm_read perm_write perm_create perm_unlink
22 access_ir_module_module_dependency_group_system ir_module_module_dependency group_system model_ir_module_module_dependency group_system 1 1 1 1
23 access_ir_module_repository_group_system ir_module_repository group_system model_ir_module_repository group_system 1 1 1 1
24 access_ir_property_group_user ir_property group_user model_ir_property 1 0 0 0
25 access_ir_property_group_user_manager ir_property group_manager model_ir_property base.group_partner_manager 1 1 1 1
26 access_ir_report_custom_group_system ir_report_custom group_system model_ir_report_custom 1 0 0 0
27 access_ir_report_custom_fields_group_system ir_report_custom_fields group_system model_ir_report_custom_fields 1 0 0 0
28 access_ir_rule_group_user ir_rule group_user model_ir_rule 1 0 0 0
39 access_ir_ui_view_custom_group_user ir_ui_view_custom_group_user model_ir_ui_view_custom 1 0 0 0
40 access_ir_ui_view_custom_group_system ir_ui_view_custom_group_system model_ir_ui_view_custom group_system 1 1 1 1
41 access_ir_ui_view_sc_group_user ir_ui_view_sc group_user model_ir_ui_view_sc 1 1 1 1
42 access_ir_values_group_erp_manager ir_values group_erp_manager model_ir_values group_erp_manager 1 1 1 1
43 access_ir_values_group_all ir_values group_all model_ir_values 1 0 0 0
44 access_wizard_ir_model_menu_create_group_system wizard_ir_model_menu_create group_system model_wizard_ir_model_menu_create group_system 1 1 1 1
45 access_wizard_ir_model_menu_create_line_group_system wizard_ir_model_menu_create_line group_system model_wizard_ir_model_menu_create_line group_system 1 1 1 1
46 access_wizard_module_lang_export_group_system wizard_module_lang_export group_system model_wizard_module_lang_export group_system 1 1 1 1
129 access_res_config_view_all res_config_view_all model_res_config_view 1 0 0 0
130 access_res_bank_group_partner_manager res_bank_group_partner_manager model_res_bank group_partner_manager 1 1 1 1
131 access_res_bank_user res_bank user model_res_bank group_user 1 0 0 0
132 access_maintenance_group_user maintenance_contract group_user model_maintenance_contract group_maintenance_manager 1 1 1 1

View File

@ -135,9 +135,9 @@
<rng:optional><rng:attribute name="id" /> </rng:optional>
<rng:attribute name="model" />
<rng:optional><rng:attribute name="forcecreate" /></rng:optional>
<rng:oneOrMore>
<rng:zeroOrMore>
<rng:ref name="field" />
</rng:oneOrMore>
</rng:zeroOrMore>
</rng:element>
</rng:define>

View File

@ -122,10 +122,15 @@ class Service(object):
class LocalService(Service):
def __init__(self, name):
self.__name = name
s = _service[name]
self._service = s
for m in s._method:
setattr(self, m, s._method[m])
try:
s = _service[name]
self._service = s
for m in s._method:
setattr(self, m, s._method[m])
except KeyError, keyError:
Logger().notifyChannel('module', LOG_ERROR, 'This service does not exists: %s' % (str(keyError),) )
raise
class ServiceUnavailable(Exception):
@ -154,6 +159,9 @@ def init_logger():
logf = config['logfile']
# test if the directories exist, else create them
try:
dirname = os.path.dirname(logf)
if not os.path.isdir(dirname):
res = os.makedirs(dirname)
handler = logging.handlers.TimedRotatingFileHandler(logf,'D',1,30)
except:
sys.stderr.write("ERROR: couldn't create the logfile directory\n")
@ -272,24 +280,19 @@ class GenericXMLRPCRequestHandler:
raise xmlrpclib.Fault(s, tb_s)
class SimpleXMLRPCRequestHandler(GenericXMLRPCRequestHandler,
SimpleXMLRPCServer.SimpleXMLRPCRequestHandler):
class SimpleXMLRPCRequestHandler(GenericXMLRPCRequestHandler, SimpleXMLRPCServer.SimpleXMLRPCRequestHandler):
SimpleXMLRPCServer.SimpleXMLRPCRequestHandler.rpc_paths = get_rpc_paths()
class SimpleThreadedXMLRPCServer(SocketServer.ThreadingMixIn,
SimpleXMLRPCServer.SimpleXMLRPCServer):
class SimpleThreadedXMLRPCServer(SocketServer.ThreadingMixIn, SimpleXMLRPCServer.SimpleXMLRPCServer):
def server_bind(self):
try:
self.socket.setsockopt(socket.SOL_SOCKET,
socket.SO_REUSEADDR, 1)
self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
SimpleXMLRPCServer.SimpleXMLRPCServer.server_bind(self)
except:
sys.stderr.write("ERROR: address already in use\n")
Logger().notifyChannel('init', LOG_ERROR, 'Address already in use')
sys.exit(1)
class HttpDaemon(threading.Thread):
def __init__(self, interface, port, secure=False):
@ -299,22 +302,19 @@ class HttpDaemon(threading.Thread):
self.secure = secure
if secure:
from ssl import SecureXMLRPCServer
class SecureXMLRPCRequestHandler(GenericXMLRPCRequestHandler,
SecureXMLRPCServer.SecureXMLRPCRequestHandler):
SecureXMLRPCServer.SecureXMLRPCRequestHandler.rpc_paths = get_rpc_paths()
class SecureThreadedXMLRPCServer(SocketServer.ThreadingMixIn,
SecureXMLRPCServer.SecureXMLRPCServer):
class SecureXMLRPCRequestHandler(GenericXMLRPCRequestHandler, SecureXMLRPCServer.SecureXMLRPCRequestHandler):
SecureXMLRPCServer.SecureXMLRPCRequestHandler.rpc_paths = get_rpc_paths()
class SecureThreadedXMLRPCServer(SocketServer.ThreadingMixIn, SecureXMLRPCServer.SecureXMLRPCServer):
def server_bind(self):
try:
self.socket.setsockopt(socket.SOL_SOCKET,
socket.SO_REUSEADDR, 1)
self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
SecureXMLRPCServer.SecureXMLRPCServer.server_bind(self)
except:
sys.stderr.write("ERROR: address already in use\n")
sys.exit(1)
self.server = SecureThreadedXMLRPCServer((interface, port),
SecureXMLRPCRequestHandler, 0)
else:

View File

@ -22,6 +22,7 @@
##############################################################################
from tools import flatten, reverse_enumerate
import fields
class expression(object):
@ -190,7 +191,7 @@ class expression(object):
dom = _rec_get(ids2, working_table, left)
self.__exp = self.__exp[:i] + dom + self.__exp[i+1:]
else:
if isinstance(right, basestring):
if isinstance(right, basestring): # and not isinstance(field, fields.related):
res_ids = field_obj.name_search(cr, uid, right, [], operator, limit=None)
right = map(lambda x: x[0], res_ids)
self.__exp[i] = (left, 'in', right)

View File

@ -1,7 +1,7 @@
# -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2008 Tiny SPRL (<http://tiny.be>). All Rights Reserved
# $Id$
#
@ -160,7 +160,7 @@ class char(_column):
u_symb = symb
else:
u_symb = unicode(symb)
return u_symb.encode('utf8')[:self.size]
return u_symb[:self.size].encode('utf8')
class text(_column):
@ -597,6 +597,9 @@ class function(_column):
self._type = type
self._fnct_search = fnct_search
self.store = store
if store:
self._classic_read = True
self._classic_write = True
if type == 'float':
self._symbol_c = '%f'
self._symbol_f = lambda x: __builtin__.float(x or 0.0)
@ -636,60 +639,27 @@ class function(_column):
class related(function):
def _fnct_search(self, tobj, cr, uid, obj=None, name=None, context=None):
where_flag = 0
where = " where"
query = "select %s.id from %s" % (obj._table, obj._table)
relation_child = obj._name
relation = obj._name
for i in range(len(self._arg)):
field_detail = self._field_get(cr, uid, obj, relation, self._arg[i])
relation = field_detail[0]
if field_detail[1] in ('one2many', 'many2many'):
obj_child = obj.pool.get(field_detail[2][self._arg[i]]['relation'])
field_detail_child = obj_child.fields_get(cr, uid,)
fields_filter = dict(filter(lambda x: x[1].get('relation', False)
and x[1].get('relation') == relation_child
and x[1].get('type')=='many2one', field_detail_child.items()))
query += " inner join %s on %s.id = %s.%s" % (obj_child._table, obj._table, obj_child._table, fields_filter.keys()[0])
relation_child = relation
elif field_detail[1] in ('many2one'):
obj_child = obj.pool.get(field_detail[2][self._arg[i]]['relation'])
obj_child2 = obj.pool.get(relation_child)
if obj_child._name == obj_child2._name:
# select res_partner.id from res_partner where res_partner.parent_id in(select id from res_partner where res_partner.date >= '2008-10-01');
# where +=" %s.id = %s.%s in (select id from %s where %s.%s %s %s"%(obj_child._table,obj_child2._table,self._arg[i])
pass
else:
query += " inner join %s on %s.id = %s.%s" % (obj_child._table, obj_child._table, obj_child2._table, self._arg[i])
relation_child = field_detail[0]
if i == (len(self._arg)-1):
if obj_child._inherits:
obj_child_inherits = obj.pool.get(obj_child._inherits.keys()[0])
query += " inner join %s on %s.id = %s.%s" % (obj_child_inherits._table, obj_child_inherits._table, obj_child._table, obj_child._inherits.values()[0])
obj_child = obj_child_inherits
where += " %s.%s %s '%%%s%%' and" % (obj_child._table, obj_child._rec_name, context[0][1], context[0][2])
def _fnct_search(self, tobj, cr, uid, obj=None, name=None, domain=None, context={}):
self._field_get2(cr, uid, obj, context)
i = len(self._arg)-1
sarg = name
while i>0:
if type(sarg) in [type([]), type( (1,) )]:
where = [(self._arg[i], 'in', sarg)]
else:
obj_child = obj.pool.get(relation_child)
if field_detail[1] in ('char'):
where += " %s.%s %s '%%%s%%' and" % (obj_child._table, self._arg[i], context[0][1], context[0][2])
if field_detail[1] in ('date'):
where += " %s.%s %s '%s' and" % (obj_child._table, self._arg[i], context[0][1], context[0][2])
if field_detail[1] in ['integer', 'long', 'float','integer_big']:
where += " %s.%s %s '%d' and" % (obj_child._table, self._arg[i], context[0][1], context[0][2])
if len(where)>10:
query += where.rstrip('and')
cr.execute(query)
ids = []
for id in cr.fetchall():
ids.append(id[0])
return [('id', 'in', ids)]
where = [(self._arg[i], '=', sarg)]
if domain:
where = map(lambda x: (self._arg[i],x[1], x[2]), domain)
domain = []
sarg = obj.pool.get(self._relations[i]['object']).search(cr, uid, where, context=context)
i -= 1
return [(self._arg[0], 'in', sarg)]
# def _fnct_write(self,obj,cr, uid, ids,values, field_name, args, context=None):
# raise 'Not Implemented Yet'
def _fnct_write(self,obj,cr, uid, ids,values, field_name, args, context=None):
raise 'Not Implemented Yet'
def _fnct_read(self, obj, cr, uid, ids, field_name, args, context=None):
self._field_get2(cr, uid, obj, context)
if not ids: return {}
relation = obj._name
res = {}
@ -698,33 +668,40 @@ class related(function):
t_data = data
relation = obj._name
for i in range(len(self.arg)):
field_detail = self._field_get(cr, uid, obj, relation, self.arg[i])
relation = field_detail[0]
field_detail = self._relations[i]
relation = field_detail['object']
if not t_data[self.arg[i]]:
t_data = False
break
if field_detail[1] in ('one2many', 'many2many'):
if field_detail['type'] in ('one2many', 'many2many'):
t_data = t_data[self.arg[i]][0]
else:
t_data = t_data[self.arg[i]]
if type(t_data) == type(objlst[0]):
res[data.id] = t_data.id
res[data.id] = (t_data.id,t_data.name)
else:
res[data.id] = t_data
return res
def __init__(self, *arg, **args):
self.arg = arg
self._relations = []
super(related, self).__init__(self._fnct_read, arg, fnct_inv_arg=arg, method=True, fnct_search=self._fnct_search, **args)
# TODO: call field_get on the object, not in the DB
def _field_get(self, cr, uid, obj, model_name, prop):
fields = obj.pool.get(model_name).fields_get(cr, uid,)
if fields.get(prop, False):
return(fields[prop].get('relation', False), fields[prop].get('type', False), fields)
else:
raise 'Field %s not exist in %s' % (prop, model_name)
def _field_get2(self, cr, uid, obj, context={}):
if self._relations:
return
obj_name = obj._name
for i in range(len(self._arg)):
f = obj.pool.get(obj_name).fields_get(cr, uid, [self._arg[i]], context=context)[self._arg[i]]
self._relations.append({
'object': obj_name,
'type': f['type']
})
if f.get('relation',False):
obj_name = f['relation']
self._relations[-1]['relation'] = f['relation']
# ---------------------------------------------------------
# Serialized fields

View File

@ -303,18 +303,24 @@ class orm_template(object):
_description = None
_inherits = {}
_table = None
_invalids=[]
_invalids = set()
def _field_create(self, cr, context={}):
cr.execute("SELECT id FROM ir_model_data WHERE name='%s'" % ('model_'+self._name.replace('.','_'),))
cr.execute("SELECT id FROM ir_model WHERE model='%s'" % self._name)
if not cr.rowcount:
cr.execute('SELECT nextval(%s)', ('ir_model_id_seq',))
id = cr.fetchone()[0]
cr.execute("INSERT INTO ir_model (id,model, name, info) VALUES (%s, %s, %s, %s)", (id, self._name, self._description, self.__doc__))
if 'module' in context:
model_id = cr.fetchone()[0]
cr.execute("INSERT INTO ir_model (id,model, name, info,state) VALUES (%s, %s, %s, %s,%s)", (model_id, self._name, self._description, self.__doc__, 'base'))
else:
model_id = cr.fetchone()[0]
if 'module' in context:
name_id = 'model_'+self._name.replace('.','_')
cr.execute('select * from ir_model_data where name=%s and res_id=%s', (name_id,model_id))
if not cr.rowcount:
cr.execute("INSERT INTO ir_model_data (name,date_init,date_update,module,model,res_id) VALUES (%s, now(), now(), %s, %s, %s)", \
('model_'+self._name.replace('.','_'), context['module'], 'ir.model', id)
(name_id, context['module'], 'ir.model', model_id)
)
cr.commit()
cr.execute("SELECT * FROM ir_model_fields WHERE model=%s", (self._name,))
@ -322,9 +328,6 @@ class orm_template(object):
for rec in cr.dictfetchall():
cols[rec['name']] = rec
cr.execute("SELECT id FROM ir_model WHERE model='%s'" % self._name)
model_id = cr.fetchone()[0]
for (k, f) in self._columns.items():
vals = {
'model_id': model_id,
@ -656,7 +659,7 @@ class orm_template(object):
raise _('The read method is not implemented on this object !')
def get_invalid_fields(self,cr,uid):
return self._invalids.__str__()
return list(self._invalids)
def _validate(self, cr, uid, ids, context=None):
context = context or {}
@ -670,12 +673,12 @@ class orm_template(object):
error_msgs.append(
_("Error occured while validating the field(s) %s: %s") % (','.join(fields), translated_msg)
)
self._invalids.extend(fields)
self._invalids.update(fields)
if error_msgs:
cr.rollback()
raise except_orm('ValidateError', '\n'.join(error_msgs))
else:
self._invalids=[]
self._invalids.clear()
def default_get(self, cr, uid, fields_list, context=None):
return {}
@ -694,6 +697,14 @@ class orm_template(object):
# returns the definition of each field in the object
# the optional fields parameter can limit the result to some fields
def fields_get_keys(self, cr, user, context=None, read_access=True):
if context is None:
context = {}
res = self._columns.keys()
for parent in self._inherits:
res.extend(self.pool.get(parent).fields_get_keys(cr, user, fields, context))
return res
def fields_get(self, cr, user, fields=None, context=None, read_access=True):
if context is None:
context = {}
@ -790,7 +801,9 @@ class orm_template(object):
for f in node.childNodes:
if f.nodeType == f.ELEMENT_NODE and f.localName in ('form', 'tree', 'graph'):
node.removeChild(f)
xarch, xfields = self.pool.get(relation).__view_look_dom_arch(cr, user, f, context)
ctx = context.copy()
ctx['base_model_name'] = self._name
xarch, xfields = self.pool.get(relation).__view_look_dom_arch(cr, user, f, ctx)
views[str(f.localName)] = {
'arch': xarch,
'fields': xfields
@ -829,6 +842,8 @@ class orm_template(object):
if ('lang' in context) and not result:
if node.hasAttribute('string') and node.getAttribute('string'):
trans = tools.translate(cr, self._name, 'view', context['lang'], node.getAttribute('string').encode('utf8'))
if not trans and ('base_model_name' in context):
trans = tools.translate(cr, context['base_model_name'], 'view', context['lang'], node.getAttribute('string').encode('utf8'))
if trans:
node.setAttribute('string', trans.decode('utf8'))
if node.hasAttribute('sum') and node.getAttribute('sum'):
@ -839,11 +854,13 @@ class orm_template(object):
if childs:
for f in node.childNodes:
fields.update(self.__view_look_dom(cr, user, f, context))
if ('state' not in fields) and (('state' in self._columns) or ('state' in self._inherit_fields)):
fields['state'] = {}
return fields
def __view_look_dom_arch(self, cr, user, node, context=None):
if not context:
context = {}
fields_def = self.__view_look_dom(cr, user, node, context=context)
buttons = xpath.Evaluate('//button', node)
@ -1035,6 +1052,9 @@ class orm_template(object):
xml += "<newline/>"
xml += "</form>"
elif view_type == 'tree':
_rec_name = self._rec_name
if _rec_name not in self._columns:
_rec_name = self._columns.keys()[0]
xml = '''<?xml version="1.0" encoding="utf-8"?>''' \
'''<tree string="%s"><field name="%s"/></tree>''' \
% (self._description, self._rec_name)
@ -1110,6 +1130,47 @@ class orm_template(object):
def copy(self, cr, uid, id, default=None, context=None):
raise _('The copy method is not implemented on this object !')
def read_string(self, cr, uid, id, langs, fields=None, context=None):
if not context:
context = {}
res = {}
res2 = {}
self.pool.get('ir.model.access').check(cr, uid, 'ir.translation', 'read')
if not fields:
fields = self._columns.keys() + self._inherit_fields.keys()
for lang in langs:
res[lang] = {'code': lang}
for f in fields:
if f in self._columns:
res_trans = self.pool.get('ir.translation')._get_source(cr, uid, self._name+','+f, 'field', lang)
if res_trans:
res[lang][f] = res_trans
else:
res[lang][f] = self._columns[f].string
for table in self._inherits:
cols = intersect(self._inherit_fields.keys(), fields)
res2 = self.pool.get(table).read_string(cr, uid, id, langs, cols, context)
for lang in res2:
if lang in res:
res[lang] = {'code': lang}
for f in res2[lang]:
res[lang][f] = res2[lang][f]
return res
def write_string(self, cr, uid, id, langs, vals, context=None):
if not context:
context = {}
self.pool.get('ir.model.access').check(cr, uid, 'ir.translation', 'write')
for lang in langs:
for field in vals:
if field in self._columns:
self.pool.get('ir.translation')._set_ids(cr, uid, self._name+','+field, 'field', lang, [0], vals[field])
for table in self._inherits:
cols = intersect(self._inherit_fields.keys(), vals)
if cols:
self.pool.get(table).write_string(cr, uid, id, langs, vals, context)
return True
class orm_memory(orm_template):
_protected = ['read', 'write', 'create', 'default_get', 'perm_read', 'unlink', 'fields_get', 'fields_view_get', 'search', 'name_get', 'distinct_field_get', 'name_search', 'copy', 'import_data', 'search_count']
@ -1379,6 +1440,7 @@ class orm(orm_template):
cr.execute("ALTER TABLE \"%s\" ALTER COLUMN \"%s\" DROP NOT NULL" % (self._table, column['attname']))
# iterate on the "object columns"
todo_update_store = []
for k in self._columns:
if k in ('id', 'write_uid', 'write_date', 'create_uid', 'create_date'):
continue
@ -1420,20 +1482,9 @@ class orm(orm_template):
cr.execute("UPDATE \"%s\" SET \"%s\"=NULL" % (self._table, k))
else:
cr.execute("UPDATE \"%s\" SET \"%s\"='%s'" % (self._table, k, default))
if isinstance(f, fields.function):
cr.execute('select id from '+self._table)
ids_lst = map(lambda x: x[0], cr.fetchall())
while ids_lst:
iids = ids_lst[:40]
ids_lst = ids_lst[40:]
res = f.get(cr, self, iids, k, 1, {})
for key,val in res.items():
if f._multi:
val = val[k]
if (val<>False) or (type(val)<>bool):
cr.execute("UPDATE \"%s\" SET \"%s\"='%s' where id=%d"% (self._table, k, val, key))
#else:
# cr.execute("UPDATE \"%s\" SET \"%s\"=NULL where id=%d"% (self._table, k, key))
todo_update_store.append((f,k))
# and add constraints if needed
if isinstance(f, fields.many2one):
@ -1547,6 +1598,21 @@ class orm(orm_template):
cr.commit()
else:
print "ERROR"
for f,k in todo_update_store:
cr.execute('select id from '+self._table)
ids_lst = map(lambda x: x[0], cr.fetchall())
while ids_lst:
iids = ids_lst[:40]
ids_lst = ids_lst[40:]
res = f.get(cr, self, iids, k, 1, {})
for key,val in res.items():
if f._multi:
val = val[k]
if (val<>False) or (type(val)<>bool):
cr.execute("UPDATE \"%s\" SET \"%s\"='%s' where id=%d"% (self._table, k, val, key))
#else:
# cr.execute("UPDATE \"%s\" SET \"%s\"=NULL where id=%d"% (self._table, k, key))
else:
cr.execute("SELECT relname FROM pg_class WHERE relkind in ('r','v') AND relname='%s'" % self._table)
create = not bool(cr.fetchone())
@ -1573,29 +1639,33 @@ class orm(orm_template):
def __init__(self, cr):
super(orm, self).__init__(cr)
self._columns = self._columns.copy()
f = filter(lambda a: isinstance(self._columns[a], fields.function) and self._columns[a].store, self._columns)
if f:
list_store = []
tuple_store = ()
tuple_fn = ()
for store_field in f:
if not self._columns[store_field].store == True:
dict_store = self._columns[store_field].store
key = dict_store.keys()
list_data = []
for i in key:
tuple_store = self._name, store_field, self._columns[store_field]._fnct.__name__, tuple(dict_store[i][0]), dict_store[i][1], i
list_data.append(tuple_store)
#tuple_store=self._name,store_field,self._columns[store_field]._fnct.__name__,tuple(dict_store[key[0]][0]),dict_store[key[0]][1]
for l in list_data:
list_store = []
if l[5] in self.pool._store_function.keys():
self.pool._store_function[l[5]].append(l)
temp_list = list(set(self.pool._store_function[l[5]]))
self.pool._store_function[l[5]] = temp_list
else:
list_store.append(l)
self.pool._store_function[l[5]] = list_store
for store_field in self._columns:
f = self._columns[store_field]
if not isinstance(f, fields.function):
continue
if not f.store:
continue
if self._columns[store_field].store is True:
sm = {self._name:(lambda self,cr, uid, ids, c={}: ids, None)}
else:
sm = self._columns[store_field].store
for object, aa in sm.items():
if len(aa)==2:
(fnct,fields2)=aa
order = 1
elif len(aa)==3:
(fnct,fields2,order)=aa
else:
raise except_orm(_('Error'),
_('Invalid function definition %s in object %s !' % (store_field, self._name)))
self.pool._store_function.setdefault(object, [])
ok = True
for x,y,z,e,f in self.pool._store_function[object]:
if (x==self._name) and (y==store_field) and (e==fields2):
ok = False
if ok:
self.pool._store_function[object].append( (self._name, store_field, fnct, fields2, order))
self.pool._store_function[object].sort(lambda x,y: cmp(x[4],y[4]))
for (key, _, msg) in self._sql_constraints:
self.pool._sql_error[self._table+'_'+key] = msg
@ -1747,7 +1817,7 @@ class orm(orm_template):
if v == None:
r[key] = False
if isinstance(ids, (int, long)):
return result[0]
return result and result[0] or False
return result
def _read_flat(self, cr, user, ids, fields_to_read, context=None, load='_classic_read'):
@ -1924,15 +1994,10 @@ class orm(orm_template):
ids = [ids]
fn_list = []
if self._name in self.pool._store_function.keys():
list_store = self.pool._store_function[self._name]
fn_data = ()
id_change = []
for tuple_fn in list_store:
for id in ids:
id_change.append(self._store_get_ids(cr, uid, id, tuple_fn, context)[0])
fn_data = id_change, tuple_fn
fn_list.append(fn_data)
for fnct in self.pool._store_function.get(self._name, []):
ids2 = filter(None, fnct[2](self,cr, uid, ids, context))
if ids2:
fn_list.append( (fnct[0], fnct[1], ids2) )
delta = context.get('read_delta', False)
if delta and self._log_access:
@ -1979,10 +2044,11 @@ class orm(orm_template):
else:
cr.execute('delete from "'+self._table+'" ' \
'where id in ('+str_d+')', sub_ids)
if fn_list:
for ids, tuple_fn in fn_list:
self._store_set_values(cr, uid, ids, tuple_fn, id_change, context)
for object,field,ids in fn_list:
ids = self.pool.get(object).search(cr, uid, [('id','in', ids)], context=context)
if ids:
self.pool.get(object)._store_set_values(cr, uid, ids, field, context)
return True
#
@ -2193,22 +2259,17 @@ class orm(orm_template):
wf_service = netsvc.LocalService("workflow")
for id in ids:
wf_service.trg_write(user, self._name, id, cr)
self._update_function_stored(cr, user, ids, context=context)
if self._name in self.pool._store_function.keys():
list_store = self.pool._store_function[self._name]
for tuple_fn in list_store:
flag = False
if not tuple_fn[3]:
flag = True
for field in tuple_fn[3]:
if field in vals.keys():
flag = True
break
if flag:
id_change = self._store_get_ids(cr, user, ids[0], tuple_fn, context)
self._store_set_values(cr, user, ids[0], tuple_fn, id_change, context)
for fnct in self.pool._store_function.get(self._name, []):
ok = False
for key in vals.keys():
if (not fnct[3]) or (key in fnct[3]):
ok = True
if ok:
ids2 = fnct[2](self,cr, user, ids, context)
ids2 = filter(None, ids2)
if ids2:
self.pool.get(fnct[0])._store_set_values(cr, user, ids2, fnct[1], context)
return True
#
@ -2315,54 +2376,32 @@ class orm(orm_template):
wf_service = netsvc.LocalService("workflow")
wf_service.trg_create(user, self._name, id_new, cr)
self._update_function_stored(cr, user, [id_new], context=context)
if self._name in self.pool._store_function.keys():
list_store = self.pool._store_function[self._name]
for tuple_fn in list_store:
id_change = self._store_get_ids(cr, user, id_new, tuple_fn, context)
self._store_set_values(cr, user, id_new, tuple_fn, id_change, context)
for fnct in self.pool._store_function.get(self._name, []):
ids2 = fnct[2](self,cr, user, [id_new], context)
ids2 = filter(None, ids2)
if ids2:
self.pool.get(fnct[0])._store_set_values(cr, user, ids2, fnct[1], context)
return id_new
def _store_get_ids(self, cr, uid, ids, tuple_fn, context):
parent_id = getattr(self.pool.get(tuple_fn[0]), tuple_fn[4].func_name)(cr, uid, [ids])
return parent_id
def _store_set_values(self, cr, uid, ids, tuple_fn, parent_id, context):
name = tuple_fn[1]
table = tuple_fn[0]
def _store_set_values(self, cr, uid, ids, field, context):
args = {}
vals_tot = getattr(self.pool.get(table), tuple_fn[2])(cr, uid, parent_id, name, args, context)
write_dict = {}
for id in vals_tot.keys():
write_dict[name] = vals_tot[id]
self.pool.get(table).write(cr, uid, [id], write_dict)
return True
def _update_function_stored(self, cr, user, ids, context=None):
if not context:
context = {}
f = filter(lambda a: isinstance(self._columns[a], fields.function) \
and self._columns[a].store, self._columns)
if f:
result = self.read(cr, user, ids, fields=f, context=context)
for res in result:
upd0 = []
upd1 = []
for field in res:
if field not in f:
continue
value = res[field]
if self._columns[field]._type in ('many2one', 'one2one'):
try:
value = res[field][0]
except:
value = res[field]
upd0.append('"'+field+'"='+self._columns[field]._symbol_set[0])
upd1.append(self._columns[field]._symbol_set[1](value))
upd1.append(res['id'])
cr.execute('update "' + self._table + '" set ' + \
string.join(upd0, ',') + ' where id = %d', upd1)
result = self._columns[field].get(cr, self, ids, field, uid, context=context)
for id,value in result.items():
upd0 = []
upd1 = []
if self._columns[field]._multi:
value = value[field]
if self._columns[field]._type in ('many2one', 'one2one'):
try:
value = value[0]
except:
pass
upd0.append('"'+field+'"='+self._columns[field]._symbol_set[0])
upd1.append(self._columns[field]._symbol_set[1](value))
upd1.append(id)
cr.execute('update "' + self._table + '" set ' + \
string.join(upd0, ',') + ' where id = %d', upd1)
return True
#
@ -2546,47 +2585,6 @@ class orm(orm_template):
return new_id
def read_string(self, cr, uid, id, langs, fields=None, context=None):
if not context:
context = {}
res = {}
res2 = {}
self.pool.get('ir.model.access').check(cr, uid, 'ir.translation', 'read')
if not fields:
fields = self._columns.keys() + self._inherit_fields.keys()
for lang in langs:
res[lang] = {'code': lang}
for f in fields:
if f in self._columns:
res_trans = self.pool.get('ir.translation')._get_source(cr, uid, self._name+','+f, 'field', lang)
if res_trans:
res[lang][f] = res_trans
else:
res[lang][f] = self._columns[f].string
for table in self._inherits:
cols = intersect(self._inherit_fields.keys(), fields)
res2 = self.pool.get(table).read_string(cr, uid, id, langs, cols, context)
for lang in res2:
if lang in res:
res[lang] = {'code': lang}
for f in res2[lang]:
res[lang][f] = res2[lang][f]
return res
def write_string(self, cr, uid, id, langs, vals, context=None):
if not context:
context = {}
self.pool.get('ir.model.access').check(cr, uid, 'ir.translation', 'write')
for lang in langs:
for field in vals:
if field in self._columns:
self.pool.get('ir.translation')._set_ids(cr, uid, self._name+','+field, 'field', lang, [0], vals[field])
for table in self._inherits:
cols = intersect(self._inherit_fields.keys(), vals)
if cols:
self.pool.get(table).write_string(cr, uid, id, langs, vals, context)
return True
def check_recursion(self, cr, uid, ids, parent=None):
if not parent:
parent = self._parent_name

View File

@ -90,16 +90,14 @@ class osv_pool(netsvc.Service):
except psycopg.IntegrityError, inst:
for key in self._sql_error.keys():
if key in inst[0]:
self.abortResponse(1, 'Constraint Error', 'warning',
self._sql_error[key])
self.abortResponse(1, 'Constraint Error', 'warning', self._sql_error[key])
self.abortResponse(1, 'Integrity Error', 'warning', inst[0])
except Exception, e:
import traceback
tb_s = reduce(lambda x, y: x+y, traceback.format_exception(
sys.exc_type, sys.exc_value, sys.exc_traceback))
tb_s = reduce(lambda x, y: x+y, traceback.format_exception( sys.exc_type, sys.exc_value, sys.exc_traceback))
logger = Logger()
logger.notifyChannel("web-services", LOG_ERROR,
'Exception in call: ' + tb_s)
for idx, s in enumerate(tb_s.split('\n')):
logger.notifyChannel("web-services", LOG_ERROR, '[%2d]: %s' % (idx, s,))
raise
def execute(self, db, uid, obj, method, *args, **kw):

View File

@ -22,7 +22,8 @@
##############################################################################
name = 'openerp-server'
version = '5.0.0-alpha'
version = '5.0.0-rc1'
major_version = version.rsplit('.', 1)[0]
description = 'OpenERP Server'
long_desc = '''\
OpenERP is a complete ERP and CRM. The main features are accounting (analytic
@ -33,7 +34,7 @@ customizable reports, and SOAP and XML-RPC interfaces.
'''
classifiers = """\
Development Status :: 5 - Production/Stable
License :: OSI Approved :: GNU General Public License Version 2 (GPL-2)
License :: OSI Approved :: GNU General Public License Version 3 (GPL-3)
Programming Language :: Python
"""
url = 'http://www.openerp.com'

View File

@ -121,7 +121,7 @@ class document(object):
#Pinky: Why not this ? eval(expr, browser) ?
# name = browser.name
# data_dict = browser._data[self.get_value(browser, 'id')]
return eval(expr)
return eval(expr, {}, {'obj': record})
def parse_node(self, node, parent, browser, datas=None):
# node is the node of the xml template to be parsed

View File

@ -67,6 +67,7 @@ class db(netsvc.Service):
db = sql_db.db_connect('template1', serialize=1)
db.truedb.autocommit()
cr = db.cursor()
time.sleep(0.2)
cr.execute('CREATE DATABASE ' + db_name + ' ENCODING \'unicode\'')
cr.close()
class DBInitialize(object):
@ -149,7 +150,7 @@ class db(netsvc.Service):
try:
cr.execute('DROP DATABASE ' + db_name)
except:
logger.notifyChannel("web-service", netsvc.LOG_ERROR,
logger.notifyChannel("web-services", netsvc.LOG_ERROR,
'DROP DB: %s failed' % (db_name,))
raise
else:
@ -163,11 +164,6 @@ class db(netsvc.Service):
security.check_super(password)
logger = netsvc.Logger()
if tools.config['db_password']:
logger.notifyChannel("web-service", netsvc.LOG_ERROR,
'DUMP DB: %s doesn\'t work with password' % (db_name,))
raise Exception, "Couldn't dump database with password"
cmd = ['pg_dump', '--format=c']
if tools.config['db_user']:
cmd.append('--username=' + tools.config['db_user'])
@ -182,7 +178,7 @@ class db(netsvc.Service):
data = stdout.read()
res = stdout.close()
if res:
logger.notifyChannel("web-service", netsvc.LOG_ERROR,
logger.notifyChannel("web-services", netsvc.LOG_ERROR,
'DUMP DB: %s failed\n%s' % (db_name, data))
raise Exception, "Couldn't dump database"
logger.notifyChannel("web-services", netsvc.LOG_INFO,
@ -194,15 +190,10 @@ class db(netsvc.Service):
logger = netsvc.Logger()
if self.db_exist(db_name):
logger.notifyChannel("web-service", netsvc.LOG_WARNING,
logger.notifyChannel("web-services", netsvc.LOG_WARNING,
'RESTORE DB: %s already exists' % (db_name,))
raise Exception, "Database already exists"
if tools.config['db_password']:
logger.notifyChannel("web-service", netsvc.LOG_ERROR,
'RESTORE DB: %s doesn\'t work with password' % (db_name,))
raise Exception, "Couldn't restore database with password"
db = sql_db.db_connect('template1', serialize=1)
db.truedb.autocommit()
cr = db.cursor()
@ -328,7 +319,7 @@ class common(netsvc.Service):
res = security.login(db, login, password)
logger = netsvc.Logger()
msg = res and 'successful login' or 'bad login or password'
logger.notifyChannel("web-service", netsvc.LOG_INFO, "%s from '%s' using database '%s'" % (msg, login, db.lower()))
logger.notifyChannel("web-services", netsvc.LOG_INFO, "%s from '%s' using database '%s'" % (msg, login, db.lower()))
return res or False
def about(self, extended=False):
@ -480,7 +471,7 @@ class report_spool(netsvc.Service):
tb_s = reduce(lambda x, y: x+y, traceback.format_exception(
sys.exc_type, sys.exc_value, sys.exc_traceback))
logger = netsvc.Logger()
logger.notifyChannel('web-service', netsvc.LOG_ERROR,
logger.notifyChannel('web-services', netsvc.LOG_ERROR,
'Exception: %s\n%s' % (str(exception), tb_s))
self._reports[id]['exception'] = exception
self._reports[id]['state'] = True

View File

@ -171,6 +171,9 @@ def amount_to_text_nl(number, currency):
#-------------------------------------------------------------
_translate_funcs = {'fr' : amount_to_text_fr, 'nl' : amount_to_text_nl}
def add_amount_to_text_function(lang, func):
_translate_funcs[lang] = func
#TODO: we should use the country AND language (ex: septante VS soixante dix)
#TODO: we should use en by default, but the translation func is yet to be implemented

View File

@ -92,7 +92,7 @@ def _eval_xml(self,node, pool, cr, uid, idref, context=None):
idref2['time'] = time
idref2['DateTime'] = DateTime
import release
idref2['version'] = release.version.rsplit('.', 1)[0]
idref2['version'] = release.major_version
idref2['ref'] = lambda x: self.id_get(cr, False, x)
if len(f_model):
idref2['obj'] = _obj(self.pool, cr, uid, f_model, context=context)
@ -287,7 +287,7 @@ form: module.record_id""" % (xml_id,)
groups_value.append((4, group_id))
res['groups_id'] = groups_value
id = self.pool.get('ir.model.data')._update(cr, self.uid, "ir.actions.report.xml", self.module, res, xml_id, mode=self.mode)
id = self.pool.get('ir.model.data')._update(cr, self.uid, "ir.actions.report.xml", self.module, res, xml_id, noupdate=self.isnoupdate(data_node), mode=self.mode)
self.idref[xml_id] = int(id)
if not rec.hasAttribute('menu') or eval(rec.getAttribute('menu')):
keyword = str(rec.getAttribute('keyword') or 'client_print_multi')
@ -327,7 +327,7 @@ form: module.record_id""" % (xml_id,)
groups_value.append((4, group_id))
res['groups_id'] = groups_value
id = self.pool.get('ir.model.data')._update(cr, self.uid, "ir.actions.wizard", self.module, res, xml_id, mode=self.mode)
id = self.pool.get('ir.model.data')._update(cr, self.uid, "ir.actions.wizard", self.module, res, xml_id, noupdate=self.isnoupdate(data_node), mode=self.mode)
self.idref[xml_id] = int(id)
# ir_set
if (not rec.hasAttribute('menu') or eval(rec.getAttribute('menu'))) and id:
@ -348,7 +348,7 @@ form: module.record_id""" % (xml_id,)
res = {'name': name, 'url': url, 'target':target}
id = self.pool.get('ir.model.data')._update(cr, self.uid, "ir.actions.url", self.module, res, xml_id, mode=self.mode)
id = self.pool.get('ir.model.data')._update(cr, self.uid, "ir.actions.url", self.module, res, xml_id, noupdate=self.isnoupdate(data_node), mode=self.mode)
self.idref[xml_id] = int(id)
# ir_set
if (not rec.hasAttribute('menu') or eval(rec.getAttribute('menu'))) and id:
@ -419,7 +419,7 @@ form: module.record_id""" % (xml_id,)
if rec.hasAttribute('target'):
res['target'] = rec.getAttribute('target')
id = self.pool.get('ir.model.data')._update(cr, self.uid, 'ir.actions.act_window', self.module, res, xml_id, mode=self.mode)
id = self.pool.get('ir.model.data')._update(cr, self.uid, 'ir.actions.act_window', self.module, res, xml_id, noupdate=self.isnoupdate(data_node), mode=self.mode)
self.idref[xml_id] = int(id)
if src_model:
@ -567,7 +567,7 @@ form: module.record_id""" % (xml_id,)
xml_id = rec.getAttribute('id').encode('utf8')
self._test_xml_id(xml_id)
pid = self.pool.get('ir.model.data')._update(cr, self.uid, 'ir.ui.menu', self.module, values, xml_id, True, mode=self.mode, res_id=res and res[0] or False)
pid = self.pool.get('ir.model.data')._update(cr, self.uid, 'ir.ui.menu', self.module, values, xml_id, noupdate=self.isnoupdate(data_node), mode=self.mode, res_id=res and res[0] or False)
if rec_id and pid:
self.idref[rec_id] = int(pid)
@ -757,15 +757,17 @@ form: module.record_id""" % (xml_id,)
raise
return True
def __init__(self, cr, module, idref, mode, report=assertion_report(), noupdate = False):
def __init__(self, cr, module, idref, mode, report=None, noupdate=False):
self.logger = netsvc.Logger()
self.mode = mode
self.module = module
self.cr = cr
self.idref = idref
self.pool = pooler.get_pool(cr.dbname)
# self.pool = osv.osv.FakePool(module)
self.uid = 1
if report is None:
report = assertion_report()
self.assert_report = report
self.noupdate = noupdate
self._tags = {
@ -835,7 +837,7 @@ def convert_csv_import(cr, module, fname, csvcontent, idref=None, mode='init',
#
# xml import/export
#
def convert_xml_import(cr, module, xmlfile, idref=None, mode='init', noupdate = False, report=None):
def convert_xml_import(cr, module, xmlfile, idref=None, mode='init', noupdate=False, report=None):
xmlstr = xmlfile.read()
xmlfile.seek(0)
relaxng_doc = etree.parse(file(os.path.join( config['root_path'], 'import_xml.rng' )))
@ -852,9 +854,7 @@ def convert_xml_import(cr, module, xmlfile, idref=None, mode='init', noupdate =
if idref is None:
idref={}
if report is None:
report=assertion_report()
obj = xml_import(cr, module, idref, mode, report=report, noupdate = noupdate)
obj = xml_import(cr, module, idref, mode, report=report, noupdate=noupdate)
obj.parse(xmlstr)
del obj
return True

View File

@ -104,7 +104,7 @@ def init_db(cr):
category_id, state) \
values (%d, %s, %s, %s, %s, %s, %s, %d, %s)', (
id, info.get('author', ''),
release.version.rsplit('.', 1)[0] + '.' + info.get('version', ''),
release.major_version + '.' + info.get('version', ''),
info.get('website', ''), i, info.get('name', False),
info.get('description', ''), p_id, state))
dependencies = info.get('depends', [])
@ -252,23 +252,6 @@ def file_open(name, mode="r", subdir='addons', pathinfo=False):
raise IOError, 'File not found : '+str(name)
def oswalksymlinks(top, topdown=True, onerror=None):
"""
same as os.walk but follow symlinks
attention: all symlinks are walked before all normals directories
"""
for dirpath, dirnames, filenames in os.walk(top, topdown, onerror):
if topdown:
yield dirpath, dirnames, filenames
symlinks = filter(lambda dirname: os.path.islink(os.path.join(dirpath, dirname)), dirnames)
for s in symlinks:
for x in oswalksymlinks(os.path.join(dirpath, s), topdown, onerror):
yield x
if not topdown:
yield dirpath, dirnames, filenames
#----------------------------------------------------------
# iterables
#----------------------------------------------------------

63
bin/tools/osutil.py Normal file
View File

@ -0,0 +1,63 @@
# -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2008 Tiny SPRL (<http://tiny.be>). All Rights Reserved
# $Id$
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
"""
Some functions related to the os and os.path module
"""
import os
from os.path import join as opj
def listdir(dir, recursive=False):
"""Allow to recursively get the file listing"""
dir = os.path.normpath(dir)
if not recursive:
return os.listdir(dir)
res = []
for root, dirs, files in os.walk(dir):
root = root[len(dir)+1:]
res.extend([opj(root, f) for f in files])
return res
def walksymlinks(top, topdown=True, onerror=None):
"""
same as os.walk but follow symlinks
attention: all symlinks are walked before all normals directories
"""
for dirpath, dirnames, filenames in os.walk(top, topdown, onerror):
if topdown:
yield dirpath, dirnames, filenames
symlinks = filter(lambda dirname: os.path.islink(os.path.join(dirpath, dirname)), dirnames)
for s in symlinks:
for x in walksymlinks(os.path.join(dirpath, s), topdown, onerror):
yield x
if not topdown:
yield dirpath, dirnames, filenames
if __name__ == '__main__':
from pprint import pprint as pp
pp(listdir('../report', True))

View File

@ -0,0 +1,96 @@
# -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2008 Tiny SPRL (<http://tiny.be>). All Rights Reserved
# $Id$
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
## this functions are taken from the setuptools package (version 0.6c8)
## http://peak.telecommunity.com/DevCenter/PkgResources#parsing-utilities
import re
component_re = re.compile(r'(\d+ | [a-z]+ | \.| -)', re.VERBOSE)
replace = {'pre':'c', 'preview':'c','-':'final-','rc':'c','dev':'@'}.get
def _parse_version_parts(s):
for part in component_re.split(s):
part = replace(part,part)
if not part or part=='.':
continue
if part[:1] in '0123456789':
yield part.zfill(8) # pad for numeric comparison
else:
yield '*'+part
yield '*final' # ensure that alpha/beta/candidate are before final
def parse_version(s):
"""Convert a version string to a chronologically-sortable key
This is a rough cross between distutils' StrictVersion and LooseVersion;
if you give it versions that would work with StrictVersion, then it behaves
the same; otherwise it acts like a slightly-smarter LooseVersion. It is
*possible* to create pathological version coding schemes that will fool
this parser, but they should be very rare in practice.
The returned value will be a tuple of strings. Numeric portions of the
version are padded to 8 digits so they will compare numerically, but
without relying on how numbers compare relative to strings. Dots are
dropped, but dashes are retained. Trailing zeros between alpha segments
or dashes are suppressed, so that e.g. "2.4.0" is considered the same as
"2.4". Alphanumeric parts are lower-cased.
The algorithm assumes that strings like "-" and any alpha string that
alphabetically follows "final" represents a "patch level". So, "2.4-1"
is assumed to be a branch or patch of "2.4", and therefore "2.4.1" is
considered newer than "2.4-1", whic in turn is newer than "2.4".
Strings like "a", "b", "c", "alpha", "beta", "candidate" and so on (that
come before "final" alphabetically) are assumed to be pre-release versions,
so that the version "2.4" is considered newer than "2.4a1".
Finally, to handle miscellaneous cases, the strings "pre", "preview", and
"rc" are treated as if they were "c", i.e. as though they were release
candidates, and therefore are not as new as a version string that does not
contain them.
"""
parts = []
for part in _parse_version_parts(s.lower()):
if part.startswith('*'):
if part<'*final': # remove '-' before a prerelease tag
while parts and parts[-1]=='*final-': parts.pop()
# remove trailing zeros from each series of numeric parts
while parts and parts[-1]=='00000000':
parts.pop()
parts.append(part)
return tuple(parts)
if __name__ == '__main__':
pvs = []
for v in ('0', '4.2', '4.2.3.4', '5.0.0-alpha', '5.0.0-rc1', '5.0.0-rc1.1', '5.0.0'):
pv = parse_version(v)
print v, pv
pvs.append(pv)
def cmp(a, b):
assert(a < b)
return b
reduce(cmp, pvs)

View File

@ -190,13 +190,16 @@ class TinyPoFile(object):
plurial = len(modules) > 1 and 's' or ''
self.buffer.write("#. module%s: %s\n" % (plurial, ', '.join(modules)))
if "code" in map(lambda e: e[0], tnrs):
# only strings in python code are python formated
self.buffer.write("#, python-format\n")
code = False
for typy, name, res_id in tnrs:
self.buffer.write("#: %s:%s:%s\n" % (typy, name, res_id))
if typy == 'code':
code = True
if code:
# only strings in python code are python formated
self.buffer.write("#, python-format\n")
if not isinstance(trad, unicode):
trad = unicode(trad, 'utf8')
@ -373,10 +376,14 @@ def trans_generate(lang, modules, dbname=None):
# export fields
for field_name, field_def in result['fields'].iteritems():
res_name = name + ',' + field_name
if 'string' in field_def:
source = field_def['string']
res_name = name + ',' + field_name
push_translation(module, 'wizard_field', res_name, 0, source)
push_translation(module, 'wizard_field', res_name, 0, source.encode('utf8'))
if 'selection' in field_def:
for key, val in field_def['selection']:
push_translation(module, 'selection', res_name, 0, val.encode('utf8'))
# export arch
arch = result['arch']
@ -472,7 +479,7 @@ def trans_generate(lang, modules, dbname=None):
installed_modids = modobj.search(cr, uid, [('state', '=', 'installed')])
installed_modules = map(lambda m: m['name'], modobj.read(cr, uid, installed_modids, ['name']))
for root, dirs, files in tools.oswalksymlinks(tools.config['root_path']):
for root, dirs, files in tools.osutil.walksymlinks(tools.config['root_path']):
for fname in fnmatch.filter(files, '*.py'):
fabsolutepath = join(root, fname)
frelativepath = fabsolutepath[len(tools.config['root_path'])+1:]
@ -506,7 +513,7 @@ def trans_load(db_name, filename, lang, strict=False, verbose=True):
return r
except IOError:
if verbose:
logger.notifyChannel("init", netsvc.LOG_ERROR, "couldn't read file")
logger.notifyChannel("init", netsvc.LOG_ERROR, "couldn't read translation file %s" % (filename,)) # FIXME translate message
return None
def trans_load_data(db_name, fileobj, fileformat, lang, strict=False, lang_name=None, verbose=True):
@ -627,7 +634,8 @@ def trans_load_data(db_name, fileobj, fileformat, lang, strict=False, lang_name=
logger.notifyChannel("init", netsvc.LOG_INFO,
"translation file loaded succesfully")
except IOError:
logger.notifyChannel("init", netsvc.LOG_ERROR, "couldn't read file")
filename = '[lang: %s][format: %s]' % (lang or 'new', fileformat)
logger.notifyChannel("init", netsvc.LOG_ERROR, "couldn't read translation file %s" % (filename,))
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:

View File

@ -116,6 +116,11 @@ class interface(netsvc.Service):
trans = translate(cr, self.wiz_name+','+state+','+field, 'wizard_field', lang)
if trans:
fields[field]['string'] = trans
if 'selection' in fields[field]:
trans = lambda x: translate(cr, self.wiz_name+','+state+','+field, 'selection', lang, x) or x
for idx, (key, val) in enumerate(fields[field]['selection']):
fields[field]['selection'][idx] = (key, trans(val))
# translate arch
if not isinstance(arch, UpdateableStr):

View File

@ -66,6 +66,7 @@ def _eval_expr(cr, ident, workitem, action):
ret=False
assert action, 'You used a NULL action in a workflow, use dummy node instead.'
for line in action.split('\n'):
line = line.replace(chr(13),'')
uid=ident[0]
model=ident[1]
ids=[ident[2]]

View File

@ -96,6 +96,12 @@ def _execute(cr, workitem, activity, ident, stack):
if activity['kind']=='dummy':
if workitem['state']=='active':
_state_set(cr, workitem, activity, 'complete', ident)
if activity['action_id']:
print 'ICI'
res2 = wkf_expr.execute_action(cr, ident, workitem, activity)
if res2:
stack.append(res2)
result=res2
elif activity['kind']=='function':
if workitem['state']=='active':
_state_set(cr, workitem, activity, 'running', ident)

View File

@ -28,7 +28,6 @@
#
##############################################################################
__author__ = 'Cédric Krier, <ced@tinyerp.com>'
__version__ = '0.1.0'
import psycopg

View File

@ -35,6 +35,9 @@ import glob
from stat import ST_MODE
from distutils.core import setup, Command
from distutils.command.install import install
from distutils.command.build import build
from distutils.command.build_scripts import build_scripts
from distutils.command.install_scripts import install_scripts
from distutils.file_util import copy_file
@ -74,10 +77,9 @@ def check_modules():
def find_addons():
for (dp, dn, names) in os.walk(opj('bin', 'addons')):
for dirpath, dirnames, filenames in os.walk(dp):
if '__init__.py' in filenames:
modname = dirpath.replace(os.path.sep, '.')
yield modname.replace('bin', 'openerp-server', 1)
if '__init__.py' in names:
modname = dp.replace(os.path.sep, '.').replace('bin', 'openerp-server', 1)
yield modname
def data_files():
'''Build list of data files to be installed'''
@ -86,12 +88,10 @@ def data_files():
os.chdir('bin')
for (dp,dn,names) in os.walk('addons'):
files.append((dp, map(lambda x: opj('bin', dp, x), names)))
for (dp,dn,names) in os.walk('i18n'):
files.append((dp, map(lambda x: opj('bin', dp, x), names)))
os.chdir('..')
for (dp,dn,names) in os.walk('doc'):
files.append((dp, map(lambda x: opj(dp, x), names)))
files.append(('.', [('bin/import_xml.rng')]))
files.append(('.', [(opj('bin', 'import_xml.rng'))]))
else:
man_directory = opj('share', 'man')
files.append((opj(man_directory, 'man1'), ['man/openerp-server.1']))
@ -104,59 +104,37 @@ def data_files():
openerp_site_packages = opj('lib', 'python%s' % py_short_version, 'site-packages', 'openerp-server')
files.append((opj(openerp_site_packages, 'i18n'), glob.glob('bin/i18n/*')))
files.append((opj(openerp_site_packages, 'addons', 'custom'),
glob.glob('bin/addons/custom/*xml') + glob.glob('bin/addons/custom/*rml') + glob.glob('bin/addons/custom/*xsl')))
files.append((openerp_site_packages, [('bin/import_xml.rng')]))
for addon in find_addons():
add_path = addon.replace('.', os.path.sep).replace('openerp-server', 'bin', 1)
addon_path = opj('lib', 'python%s' % py_short_version, 'site-packages', add_path.replace('bin', 'openerp-server', 1))
pathfiles = [
(
addon_path,
glob.glob(opj(add_path, '*xml')) +
glob.glob(opj(add_path, '*csv')) +
glob.glob(opj(add_path, '*sql'))
),
(
opj(addon_path, 'data'),
glob.glob(opj(add_path, 'data', '*xml'))
),
(
opj(addon_path, 'report'),
glob.glob(opj(add_path, 'report', '*xml')) +
glob.glob(opj(add_path, 'report', '*rml')) +
glob.glob(opj(add_path, 'report', '*sxw')) +
glob.glob(opj(add_path, 'report', '*xsl'))
),
(
opj(addon_path, 'security'),
glob.glob(opj(add_path, 'security', '*csv')) +
glob.glob(opj(add_path, 'security', '*xml'))
),
(
opj(addon_path, 'rng'),
glob.glob(opj(add_path, 'rng', '*rng'))
)
]
pathfiles = []
for root, dirs, innerfiles in os.walk(add_path):
innerfiles = filter(lambda file: os.path.splitext(file)[1] not in ('.pyc', '.py', '.pyd', '.pyo'), innerfiles)
if innerfiles:
pathfiles.extend(((opj(addon_path, root.replace('bin/addons/', '')), map(lambda file: opj(root, file), innerfiles)),))
files.extend(pathfiles)
return files
check_modules()
# create startup script
start_script = \
"#!/bin/sh\n\
cd %s/lib/python%s/site-packages/openerp-server\n\
exec %s ./openerp-server.py $@\n" % (sys.prefix, py_short_version, sys.executable)
# write script
f = open('openerp-server', 'w')
f = file('openerp-server','w')
start_script = """#!/bin/sh\necho "OpenERP Setup - The content of this file is generated at the install stage" """
f.write(start_script)
f.close()
class openerp_server_install(install):
def run(self):
# create startup script
start_script = "#!/bin/sh\ncd %s\nexec %s ./openerp-server.py $@\n" % (opj(self.install_libbase, "openerp-server"), sys.executable)
# write script
f = open('openerp-server', 'w')
f.write(start_script)
f.close()
install.run(self)
options = {
"py2exe": {
"compressed": 1,
@ -179,6 +157,9 @@ setup(name = name,
classifiers = filter(None, classifiers.split("\n")),
license = license,
data_files = data_files(),
cmdclass = {
'install' : openerp_server_install,
},
scripts = ['openerp-server'],
packages = ['openerp-server',
'openerp-server.addons',