bzr revid: olt@tinyerp.com-20090709083913-0jkpicud1kdq01um
This commit is contained in:
Olivier Laurent 2009-07-09 10:39:13 +02:00
commit 9b8f222fd9
23 changed files with 314 additions and 221 deletions

View File

@ -68,7 +68,7 @@ class Graph(dict):
father.addChild(name)
else:
Node(name, self)
def update_from_db(self, cr):
# update the graph with values from the database (if exist)
## First, we set the default values for each package in graph
@ -141,7 +141,7 @@ class Node(Singleton):
def __iter__(self):
return itertools.chain(iter(self.children), *map(iter, self.children))
def __str__(self):
return self._pprint()
@ -322,7 +322,7 @@ def upgrade_graph(graph, cr, module_list, force=None):
raise
if info.get('installable', True):
packages.append((module, info.get('depends', []), info))
dependencies = dict([(p, deps) for p, deps, data in packages])
current, later = set([p for p, dep, data in packages]), set()
@ -352,7 +352,7 @@ def upgrade_graph(graph, cr, module_list, force=None):
for package in later:
unmet_deps = filter(lambda p: p not in graph, dependencies[package])
logger.notifyChannel('init', netsvc.LOG_ERROR, 'module %s: Unmet dependencies: %s' % (package, ', '.join(unmet_deps)))
result = len(graph) - len_graph
if result != len(module_list):
logger.notifyChannel('init', netsvc.LOG_WARNING, 'Not all modules have loaded.')
@ -513,7 +513,7 @@ class MigrationManager(object):
parsed_installed_version = parse_version(pkg.installed_version or '')
current_version = parse_version(convert_version(pkg.data.get('version', '0')))
versions = _get_migration_versions(pkg)
for version in versions:
@ -580,13 +580,12 @@ def load_module_graph(cr, graph, status=None, perform_checks=True, **kwargs):
if hasattr(package, 'init') or hasattr(package, 'update') or package.state in ('to install', 'to upgrade'):
init_module_objects(cr, package.name, modules)
cr.commit()
for package in graph:
status['progress'] = (float(statusi)+0.1) / len(graph)
m = package.name
mid = package.id
if modobj is None:
modobj = pool.get('ir.module.module')
@ -595,7 +594,7 @@ def load_module_graph(cr, graph, status=None, perform_checks=True, **kwargs):
idref = {}
status['progress'] = (float(statusi)+0.4) / len(graph)
mode = 'update'
if hasattr(package, 'init') or package.state == 'to install':
mode = 'init'
@ -633,7 +632,7 @@ def load_module_graph(cr, graph, status=None, perform_checks=True, **kwargs):
package_todo.append(package.name)
migrations.migrate_module(package, 'post')
if modobj:
ver = release.major_version + '.' + package.data.get('version', '1.0')
# Set new modules and dependencies
@ -642,21 +641,21 @@ def load_module_graph(cr, graph, status=None, perform_checks=True, **kwargs):
# Update translations for all installed languages
modobj.update_translations(cr, 1, [mid], None)
cr.commit()
package.state = 'installed'
for kind in ('init', 'demo', 'update'):
if hasattr(package, kind):
delattr(package, kind)
statusi += 1
cr.execute('select model from ir_model where state=%s', ('manual',))
for model in cr.dictfetchall():
pool.get('ir.model').instanciate(cr, 1, model['model'], {})
pool.get('ir.model.data')._process_end(cr, 1, package_todo)
cr.commit()
return has_updates
def load_modules(db, force_demo=False, status=None, update_module=False):
@ -664,6 +663,17 @@ def load_modules(db, force_demo=False, status=None, update_module=False):
status = {}
cr = db.cursor()
if cr:
cr.execute("SELECT relname FROM pg_class WHERE relkind='r' AND relname='ir_module_module'")
if len(cr.fetchall())==0:
logger.notifyChannel("init", netsvc.LOG_INFO, "init db")
tools.init_db(cr)
# cr.execute("update res_users set password=%s where id=%s",('admin',1))
# in that case, force --init=all
tools.config["init"]["all"] = 1
tools.config['update']['all'] = 1
if not tools.config['without_demo']:
tools.config["demo"]['all'] = 1
force = []
if force_demo:
force.append('demo')
@ -673,7 +683,7 @@ def load_modules(db, force_demo=False, status=None, update_module=False):
# NOTE: Try to also load the modules that have been marked as uninstallable previously...
STATES_TO_LOAD = ['installed', 'to upgrade', 'uninstallable']
graph = create_graph(cr, ['base'], force)
has_updates = load_module_graph(cr, graph, status, perform_checks=(not update_module), report=report)
if update_module:
@ -695,9 +705,9 @@ def load_modules(db, force_demo=False, status=None, update_module=False):
modobj.button_upgrade(cr, 1, ids)
cr.execute("update ir_module_module set state=%s where name=%s", ('installed', 'base'))
STATES_TO_LOAD += ['to install']
loop_guardrail = 0
while True:
loop_guardrail += 1
@ -713,7 +723,7 @@ def load_modules(db, force_demo=False, status=None, update_module=False):
if new_modules_in_graph == 0:
# nothing to load
break
logger.notifyChannel('init', netsvc.LOG_DEBUG, 'Updating graph with %d more modules' % (len(module_list)))
r = load_module_graph(cr, graph, status, report=report)
has_updates = has_updates or r

View File

@ -1063,7 +1063,7 @@
<field name="model">ir.model.access</field>
<field name="type">tree</field>
<field name="arch" type="xml">
<tree string="Access Controls">
<tree string="Access Controls" editable="bottom">
<field name="name"/>
<field name="model_id"/>
<field name="group_id"/>

View File

@ -377,7 +377,7 @@ class actions_server(osv.osv):
_sequence = 'ir_actions_id_seq'
_order = 'sequence'
_columns = {
'name': fields.char('Action Name', required=True, size=64, help="Easy to Refer action by name e.g. One Sales Order -> Many Invoices"),
'name': fields.char('Action Name', required=True, size=64, help="Easy to Refer action by name e.g. One Sales Order -> Many Invoices", translate=True),
'condition' : fields.char('Condition', size=256, required=True, help="Condition that is to be tested before action is executed, e.g. object.list_price > object.cost_price"),
'state': fields.selection([
('client_action','Client Action'),

View File

@ -71,6 +71,11 @@ class ir_model(osv.osv):
pooler.restart_pool(cr.dbname)
return res
def write(self, cr, user, ids, vals, context=None):
if context:
del context['__last_update']
return super(ir_model,self).write(cr, user, ids, vals, context)
def create(self, cr, user, vals, context=None):
if context and context.get('manual',False):
vals['state']='manual'
@ -408,6 +413,9 @@ class ir_model_data(osv.osv):
'date_update': lambda *a: time.strftime('%Y-%m-%d %H:%M:%S'),
'noupdate': lambda *a: False
}
_sql_constraints = [
('module_name_uniq', 'unique(name, module)', 'You can not have multiple records with the same id for the same module'),
]
def __init__(self, pool, cr):
osv.osv.__init__(self, pool, cr)
@ -415,11 +423,13 @@ class ir_model_data(osv.osv):
self.doinit = True
self.unlink_mark = {}
def _get_id(self,cr, uid, module, xml_id):
@tools.cache()
def _get_id(self, cr, uid, module, xml_id):
ids = self.search(cr, uid, [('module','=',module),('name','=', xml_id)])
assert len(ids)==1, '%d reference(s) to %s.%s. You should have one and only one !' % (len(ids), module, xml_id)
if not ids:
raise Exception('No references to %s.%s' % (module, xml_id))
# the sql constraints ensure us we have only one result
return ids[0]
_get_id = tools.cache(skiparg=2)(_get_id)
def _update_dummy(self,cr, uid, model, module, xml_id=False, store=True):
if not xml_id:

View File

@ -31,7 +31,7 @@ class ir_rule_group(osv.osv):
_columns = {
'name': fields.char('Name', size=128, select=1),
'model_id': fields.many2one('ir.model', 'Object',select=1, required=True),
'global': fields.boolean('Global', select=1, help="Make the rule global, otherwise it needs to be put on a group or user"),
'global': fields.boolean('Global', select=1, help="Make the rule global, otherwise it needs to be put on a group"),
'rules': fields.one2many('ir.rule', 'rule_group', 'Tests', help="The rule is satisfied if at least one test is True"),
'groups': fields.many2many('res.groups', 'group_rule_group_rel', 'rule_group_id', 'group_id', 'Groups'),
'users': fields.many2many('res.users', 'user_rule_group_rel', 'rule_group_id', 'user_id', 'Users'),

View File

@ -94,7 +94,7 @@ class ir_translation(osv.osv):
translations[res_id] = value
return translations
def _set_ids(self, cr, uid, name, tt, lang, ids, value):
def _set_ids(self, cr, uid, name, tt, lang, ids, value, src=None):
# clear the caches
tr = self._get_ids(cr, uid, name, tt, lang, ids)
for res_id in tr:
@ -117,6 +117,7 @@ class ir_translation(osv.osv):
'name':name,
'res_id':id,
'value':value,
'src':src,
})
return len(ids)

View File

@ -480,7 +480,7 @@ class module(osv.osv):
if not mod.description:
logger.notifyChannel("init", netsvc.LOG_WARNING, 'module %s: description is empty !' % (mod.name,))
if not mod.certificate:
if not mod.certificate or not mod.certificate.isdigit():
logger.notifyChannel('init', netsvc.LOG_WARNING, 'module %s: no quality certificate' % (mod.name,))
else:
val = long(mod.certificate[2:]) % 97 == 29

View File

@ -272,6 +272,7 @@
<rng:zeroOrMore>
<rng:choice>
<rng:ref name="form"/>
<rng:ref name="data"/>
<rng:ref name="tree"/>
<rng:ref name="field"/>
<rng:ref name="label"/>

View File

@ -24,7 +24,6 @@
#
##############################################################################
import SimpleXMLRPCServer
import SocketServer
import logging
@ -95,7 +94,7 @@ def init_logger():
logger = logging.getLogger()
# create a format for log messages and dates
formatter = logging.Formatter('[%(asctime)s] %(levelname)s:%(name)s:%(message)s')
logging_to_stdout = False
if tools.config['syslog']:
# SysLog Handler
@ -117,7 +116,7 @@ def init_logger():
handler = logging.handlers.TimedRotatingFileHandler(logf,'D',1,30)
except Exception, ex:
sys.stderr.write("ERROR: couldn't create the logfile directory. Logging to the standard output.\n")
handler = logging.StreamHandler(sys.stdout)
handler = logging.StreamHandler(sys.stdout)
logging_to_stdout = True
else:
# Normal Handler on standard output
@ -156,7 +155,10 @@ def init_logger():
class Logger(object):
def notifyChannel(self, name, level, msg):
from service.web_services import common
log = logging.getLogger(tools.ustr(name))
if level == LOG_DEBUG_RPC and not hasattr(log, level):
@ -168,6 +170,9 @@ class Logger(object):
if isinstance(msg, Exception):
msg = tools.exception_to_unicode(msg)
if level in (LOG_ERROR,LOG_CRITICAL):
msg = common().get_server_environment() + msg
result = tools.ustr(msg).strip().split('\n')
if len(result)>1:
for idx, s in enumerate(result):
@ -197,7 +202,7 @@ class Agent(object):
for timer in self._timers[db]:
if not timer.isAlive():
self._timers[db].remove(timer)
@classmethod
def cancel(cls, db_name):
"""Cancel all timers for a given database. If None passed, all timers are cancelled"""
@ -205,7 +210,7 @@ class Agent(object):
if db_name is None or db == db_name:
for timer in cls._timers[db]:
timer.cancel()
@classmethod
def quit(cls):
cls.cancel(None)

View File

@ -129,7 +129,7 @@ if tools.config["translate_out"]:
tools.trans_export(tools.config["language"], tools.config["translate_modules"], buf, fileformat)
buf.close()
logger.notifyChannel("init", netsvc.LOG_INFO, 'translation file written succesfully')
logger.notifyChannel("init", netsvc.LOG_INFO, 'translation file written successfully')
sys.exit(0)
if tools.config["translate_in"]:

View File

@ -201,18 +201,30 @@ class expression(object):
if field.translate:
if operator in ('like', 'ilike', 'not like', 'not ilike'):
right = '%%%s%%' % right
operator = operator == '=like' and 'like' or operator
query1 = '( SELECT res_id' \
' FROM ir_translation' \
' WHERE name = %s' \
' AND lang = %s' \
' AND type = %s' \
' AND value ' + operator + ' %s' \
' AND type = %s'
instr = ' %s'
#Covering in,not in operators with operands (%s,%s) ,etc.
if operator in ['in','not in']:
instr = ','.join(['%s'] * len(right))
query1 += ' AND value ' + operator + ' ' +" (" + instr + ")" \
') UNION (' \
' SELECT id' \
' FROM "' + working_table._table + '"' \
' WHERE "' + left + '" ' + operator + ' %s' \
')'
' WHERE "' + left + '" ' + operator + ' ' +" (" + instr + "))"
else:
query1 += ' AND value ' + operator + instr + \
') UNION (' \
' SELECT id' \
' FROM "' + working_table._table + '"' \
' WHERE "' + left + '" ' + operator + instr + ")"
query2 = [working_table._name + ',' + left,
context.get('lang', False) or 'en_US',
'model',

View File

@ -827,6 +827,14 @@ class property(function):
int(prop.value.split(',')[1])) or False
obj = obj.pool.get(self._obj)
existing_ids = obj.search(cr, uid, [('id','in',res.values())])
deleted_ids = []
for res_id in res.values():
if res_id and (res_id not in existing_ids):
if res_id not in deleted_ids:
cr.execute('DELETE FROM ir_property WHERE value=%s', ((obj._name+','+str(res_id)),))
deleted_ids.append(res_id)
names = dict(obj.name_get(cr, uid, filter(None, res.values()), context))
for r in res.keys():
if res[r] and res[r] in names:

View File

@ -433,6 +433,16 @@ class orm_template(object):
return browse_null()
def __export_row(self, cr, uid, row, fields, context=None):
def check_type(type,r):
if type == 'float':
return 0.0
elif type == 'integer':
return 0
elif type == 'char':
return ''
return r
lines = []
data = map(lambda x: '', range(len(fields)))
done = []
@ -444,6 +454,12 @@ class orm_template(object):
while i < len(f):
r = r[f[i]]
if not r:
if f[i] in self._columns:
r = check_type(self._columns[f[i]]._type,r)
elif f[i] in self._inherit_fields:
r = check_type(self._inherit_fields[f[i]][2]._type,r)
data[fpos] = tools.ustr(r)
break
if isinstance(r, (browse_record_list, list)):
first = True
@ -553,12 +569,16 @@ class orm_template(object):
sel = fields_def[field[len(prefix)]]['selection'](self,
cr, uid, context)
for key, val in sel:
if str(key) == line[i]:
if line[i] in [str(key),str(val)]: #Acepting key or value for selection field
res = key
break
if line[i] and not res:
logger.notifyChannel("import", netsvc.LOG_WARNING,
"key '%s' not found in selection field '%s'" % \
(line[i], field[len(prefix)]))
warning += "Key/value '"+ str(line[i]) +"' not found in selection field '"+str(field[len(prefix)])+"'"
elif fields_def[field[len(prefix)]]['type']=='many2one':
res = False
if line[i]:
@ -652,10 +672,22 @@ class orm_template(object):
process_liness(self, datas, [], fields_def)
if warning:
cr.rollback()
return (-1, res, warning, '')
id = self.pool.get('ir.model.data')._update(cr, uid, self._name,
current_module, res, xml_id=data_id, mode=mode,
noupdate=noupdate)
return (-1, res, 'Line ' + str(counter) +' : ' + warning, '')
try:
id = self.pool.get('ir.model.data')._update(cr, uid, self._name,
current_module, res, xml_id=data_id, mode=mode,
noupdate=noupdate)
except Exception, e:
import psycopg2
if isinstance(e,psycopg2.IntegrityError):
msg= 'Insertion Failed!'
for key in self.pool._sql_error.keys():
if key in e[0]:
msg = self.pool._sql_error[key]
break
return (-1, res,'Line ' + str(counter) +' : ' + msg,'' )
for lang in translate:
context2 = context.copy()
context2['lang'] = lang
@ -1621,7 +1653,9 @@ class orm(orm_template):
('int4', 'float', get_pg_type(f)[1], '::'+get_pg_type(f)[1]),
('date', 'datetime', 'TIMESTAMP', '::TIMESTAMP'),
]
if f_pg_type == 'varchar' and f._type == 'char' and f_pg_size != f.size:
# !!! Avoid reduction of varchar field !!!
if f_pg_type == 'varchar' and f._type == 'char' and f_pg_size < f.size:
# if f_pg_type == 'varchar' and f._type == 'char' and f_pg_size != f.size:
logger.notifyChannel('orm', netsvc.LOG_INFO, "column '%s' in table '%s' changed size" % (k, self._table))
cr.execute('ALTER TABLE "%s" RENAME COLUMN "%s" TO temp_change_size' % (self._table, k))
cr.execute('ALTER TABLE "%s" ADD COLUMN "%s" VARCHAR(%d)' % (self._table, k, f.size))
@ -2277,11 +2311,12 @@ class orm(orm_template):
else:
cr.execute('update "'+self._table+'" set '+string.join(upd0, ',')+' ' \
'where id in ('+ids_str+')', upd1)
if totranslate:
for f in direct:
if self._columns[f].translate:
self.pool.get('ir.translation')._set_ids(cr, user, self._name+','+f, 'model', context['lang'], ids, vals[f])
src_trans = self.pool.get(self._name).read(cr,user,ids,[f])
self.pool.get('ir.translation')._set_ids(cr, user, self._name+','+f, 'model', context['lang'], ids, vals[f], src_trans[0][f])
# call the 'set' method of fields which are not classic_write
upd_todo.sort(lambda x, y: self._columns[x].priority-self._columns[y].priority)

View File

@ -554,6 +554,7 @@ class report_custom(report_int):
abscissa.update(fields_bar[idx])
idx0 += 1
abscissa = map(lambda x : [x, None], abscissa)
abscissa.sort()
ar.x_coord = category_coord.T(abscissa,0)
ar.draw(can)

View File

@ -4,6 +4,7 @@ import re
rml_parents = ['tr','story','section']
html_parents = ['tr','body','div']
sxw_parents = ['{http://openoffice.org/2000/table}table-row','{http://openoffice.org/2000/office}body','{http://openoffice.org/2000/text}section']
odt_parents = ['{urn:oasis:names:tc:opendocument:xmlns:office:1.0}body','{urn:oasis:names:tc:opendocument:xmlns:table:1.0}table-row','{urn:oasis:names:tc:opendocument:xmlns:text:1.0}section']
class report(object):
def preprocess_rml(self, root_node,type='pdf'):
@ -37,7 +38,9 @@ class report(object):
if len(txt.group(4)) > 1:
return " "
match = rml_parents
if type in ['odt','sxw']:
if type == 'odt':
match = odt_parents
if type == 'sxw':
match = sxw_parents
if type =='html2html':
match = html_parents

View File

@ -1,7 +1,7 @@
# -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>). All Rights Reserved
# $Id$
#
@ -21,12 +21,11 @@
##############################################################################
import os,types
from xml.dom import minidom
from lxml import etree
import netsvc
import tools
import print_fnc
import copy
from osv.orm import browse_null, browse_record
import pooler
@ -35,7 +34,7 @@ class InheritDict(dict):
def __init__(self, parent=None):
self.parent = parent
def __getitem__(self, name):
if name in self:
return super(InheritDict, self).__getitem__(name)
@ -51,36 +50,32 @@ def tounicode(val):
elif isinstance(val, unicode):
unicode_val = val
else:
unicode_val = unicode(val)
unicode_val = unicode(val)
return unicode_val
class document(object):
def __init__(self, cr, uid, datas, func=False):
# create a new document
self.cr = cr
self.cr = cr
self.pool = pooler.get_pool(cr.dbname)
self.doc = minidom.Document()
self.func = func or {}
self.datas = datas
self.uid = uid
self.bin_datas = {}
def node_attrs_get(self, node):
attrs = {}
nattr = node.attributes
for i in range(nattr.length):
attr = nattr.item(i)
attrs[attr.localName] = attr.nodeValue
# attrs[attr.name] = attr.nodeValue
return attrs
if len(node.attrib):
return node.attrib
return {}
def get_value(self, browser, field_path):
fields = field_path.split('.')
if not len(fields):
return ''
value = browser
for f in fields:
if isinstance(value, list):
if len(value)==0:
@ -90,12 +85,12 @@ class document(object):
return ''
else:
value = value[f]
if isinstance(value, browse_null) or (type(value)==bool and not value):
return ''
else:
else:
return value
def get_value2(self, browser, field_path):
value = self.get_value(browser, field_path)
if isinstance(value, browse_record):
@ -104,10 +99,10 @@ class document(object):
return False
else:
return value
def eval(self, record, expr):
#TODO: support remote variables (eg address.title) in expr
# how to do that: parse the string, find dots, replace those dotted variables by temporary
# how to do that: parse the string, find dots, replace those dotted variables by temporary
# "simple ones", fetch the value of those variables and add them (temporarily) to the _data
# dictionary passed to eval
@ -120,13 +115,6 @@ class document(object):
return eval(expr, {}, {'obj': record})
def parse_node(self, node, parent, browser, datas=None):
# node is the node of the xml template to be parsed
# parent = the parent node in the xml data tree we are creating
if node.nodeType == node.ELEMENT_NODE:
# convert the attributes of the node to a dictionary
attrs = self.node_attrs_get(node)
if 'type' in attrs:
if attrs['type']=='field':
@ -134,28 +122,26 @@ class document(object):
#TODO: test this
if value == '' and 'default' in attrs:
value = attrs['default']
el = self.doc.createElement(node.localName)
parent.appendChild(el)
el_txt = self.doc.createTextNode(tounicode(value))
el.appendChild(el_txt)
el = etree.Element(node.tag)
parent.append(el)
el.text = tounicode(value)
#TODO: test this
for key, value in attrs.iteritems():
if key not in ('type', 'name', 'default'):
el.setAttribute(key, value)
el.set(key, value)
elif attrs['type']=='attachment':
if isinstance(browser, list):
model = browser[0]._table_name
else:
else:
model = browser._table_name
value = self.get_value(browser, attrs['name'])
service = netsvc.LocalService("object_proxy")
ids = service.execute(self.cr.dbname, self.uid, 'ir.attachment', 'search', [('res_model','=',model),('res_id','=',int(value))])
datas = service.execute(self.cr.dbname, self.uid, 'ir.attachment', 'read', ids)
if len(datas):
# if there are several, pick first
datas = datas[0]
@ -169,37 +155,31 @@ class document(object):
fp.write(dt)
i = str(len(self.bin_datas))
self.bin_datas[i] = fp
el = self.doc.createElement(node.localName)
parent.appendChild(el)
# node content is the length of the image
el_txt = self.doc.createTextNode(i)
el.appendChild(el_txt)
el = etree.Element(node.tag)
el.text = i
parent.append(el)
elif attrs['type']=='data':
#TODO: test this
el = self.doc.createElement(node.localName)
parent.appendChild(el)
txt = self.datas.get('form', {}).get(attrs['name'], '')
el_txt = self.doc.createTextNode(tounicode(txt))
el.appendChild(el_txt)
el = etree.Element(node.tag)
el.text = txt
parent.append(el)
elif attrs['type']=='function':
el = self.doc.createElement(node.localName)
parent.appendChild(el)
if attrs['name'] in self.func:
txt = self.func[attrs['name']](node)
else:
txt = print_fnc.print_fnc(attrs['name'], node)
el_txt = self.doc.createTextNode(txt)
el.appendChild(el_txt)
el = etree.Element(node.tag)
el.text = txt
parent.append(el)
elif attrs['type']=='eval':
el = self.doc.createElement(node.localName)
parent.appendChild(el)
value = self.eval(browser, attrs['expr'])
el_txt = self.doc.createTextNode(str(value))
el.appendChild(el_txt)
el = etree.Element(node.tag)
el.text = str(value)
parent.append(el)
elif attrs['type']=='fields':
fields = attrs['name'].split(',')
@ -216,26 +196,24 @@ class document(object):
keys.reverse()
v_list = [vals[k] for k in keys]
for v in v_list:
el = self.doc.createElement(node.localName)
parent.appendChild(el)
el_cld = node.firstChild
while el_cld:
for v in v_list:
el = etree.Element(node.tag)
parent.append(el)
for el_cld in node:
self.parse_node(el_cld, el, v)
el_cld = el_cld.nextSibling
elif attrs['type']=='call':
if len(attrs['args']):
#TODO: test this
#TODO: test this
# fetches the values of the variables which names where passed in the args attribute
args = [self.eval(browser, arg) for arg in attrs['args'].split(',')]
else:
args = []
# get the object
if attrs.has_key('model'):
obj = self.pool.get(attrs['model'])
else:
else:
if isinstance(browser, list):
obj = browser[0]._table
else:
@ -244,38 +222,28 @@ class document(object):
# get the ids
if attrs.has_key('ids'):
ids = self.eval(browser, attrs['ids'])
else:
else:
if isinstance(browser, list):
ids = [b.id for b in browser]
ids = [b.id for b in browser]
else:
ids = [browser.id]
# call the method itself
newdatas = getattr(obj, attrs['name'])(self.cr, self.uid, ids, *args)
def parse_result_tree(node, parent, datas):
if node.nodeType == node.ELEMENT_NODE:
el = self.doc.createElement(node.localName)
parent.appendChild(el)
el = etree.Element(node.tag)
parent.append(el)
atr = self.node_attrs_get(node)
if 'value' in atr:
if not isinstance(datas[atr['value']], (str, unicode)):
txt = self.doc.createTextNode(str(datas[atr['value']]))
txt = str(datas[atr['value']])
else:
# txt = self.doc.createTextNode(datas[atr['value']].decode('utf-8'))
txt = self.doc.createTextNode(datas[atr['value']])
el.appendChild(txt)
txt = datas[atr['value']]
el.append(txt)
else:
el_cld = node.firstChild
while el_cld:
for el_cld in node:
parse_result_tree(el_cld, el, datas)
el_cld = el_cld.nextSibling
elif node.nodeType==node.TEXT_NODE:
el = self.doc.createTextNode(node.nodeValue)
parent.appendChild(el)
else:
pass
if not isinstance(newdatas, list):
newdatas = [newdatas]
for newdata in newdatas:
@ -283,50 +251,38 @@ class document(object):
elif attrs['type']=='zoom':
value = self.get_value(browser, attrs['name'])
if value:
if not isinstance(value, list):
v_list = [value]
else:
v_list = value
for v in v_list:
el = self.doc.createElement(node.localName)
parent.appendChild(el)
el_cld = node.firstChild
while el_cld:
el = etree.Element(node.tag)
parent.append(el)
for el_cld in node:
self.parse_node(el_cld, el, v)
el_cld = el_cld.nextSibling
else:
# if there is no "type" attribute in the node, copy it to the xml data and parse its childs
el = self.doc.createElement(node.localName)
parent.appendChild(el)
el_cld = node.firstChild
while el_cld:
self.parse_node(el_cld, el, browser)
el_cld = el_cld.nextSibling
elif node.nodeType==node.TEXT_NODE:
# if it's a text node, copy it to the xml data
el = self.doc.createTextNode(node.nodeValue)
parent.appendChild(el)
else:
pass
for el_cld in node:
self.parse_node(el_cld, parent, browser)
def xml_get(self):
return self.doc.toxml('utf-8')
#return self.doc.toxml('utf-8')
return etree.tostring(self.doc,encoding="utf-8",xml_declaration=True)
def parse_tree(self, ids, model, context=None):
if not context:
context={}
browser = self.pool.get(model).browse(self.cr, self.uid, ids, context)
self.parse_node(self.dom.documentElement, self.doc, browser)
self.parse_node(self.dom, self.doc, browser)
def parse_string(self, xml, ids, model, context=None):
if not context:
context={}
# parses the xml template to memory
self.dom = minidom.parseString(xml)
self.dom = etree.XML(xml)
# create the xml data from the xml template
self.parse_tree(ids, model, context)
@ -334,9 +290,8 @@ class document(object):
if not context:
context={}
# parses the xml template to memory
self.dom = minidom.parseString(tools.file_open(filename).read())
# create the xml data from the xml template
self.dom = etree.XML(tools.file_open(filename).read())
self.doc = etree.Element(self.dom.tag)
self.parse_tree(ids, model, context)
def close(self):

View File

@ -26,7 +26,6 @@ import tools
from report import render
from lxml import etree
from xml.dom import minidom
import libxml2
import libxslt
@ -38,24 +37,19 @@ class report_printscreen_list(report_int):
def _parse_node(self, root_node):
result = []
for node in root_node.getchildren():
for node in root_node:
if node.tag == 'field':
#attrsa = node.attributes
attrsa = node.attrib
print "typppppp",type(attrsa),dir(attrsa)
attrs = {}
if not attrsa is None:
for key,val in attrsa.items():
for key,val in attrsa.items():
attrs[key] = val
#for i in range(attrsa.length):
# attrs[attrsa.item(i).localName] = attrsa.item(i).nodeValue
result.append(attrs['name'])
else:
result.extend(self._parse_node(node))
return result
def _parse_string(self, view):
#dom = minidom.parseString(view)
dom = etree.XML(view)
return self._parse_node(dom)
@ -85,16 +79,14 @@ class report_printscreen_list(report_int):
pageSize=[297.0,210.0]
impl = minidom.getDOMImplementation()
new_doc = impl.createDocument(None, "report", None)
new_doc = etree.Element("report")
config = etree.Element("config")
# build header
config = new_doc.createElement("config")
def _append_node(name, text):
n = new_doc.createElement(name)
t = new_doc.createTextNode(text)
n.appendChild(t)
config.appendChild(n)
n = etree.Element(name)
n.text = text
config.append(n)
_append_node('date', time.strftime('%d/%m/%Y'))
_append_node('PageSize', '%.2fmm,%.2fmm' % tuple(pageSize))
@ -119,20 +111,17 @@ class report_printscreen_list(report_int):
s = fields[fields_order[pos]].get('size', 56) / 28 + 1
l[pos] = strmax * s / t
_append_node('tableSize', ','.join(map(str,l)) )
new_doc.childNodes[0].appendChild(config)
header = new_doc.createElement("header")
new_doc.append(config)
header=etree.Element("header")
for f in fields_order:
field = new_doc.createElement("field")
field_txt = new_doc.createTextNode(str(fields[f]['string']))
field.appendChild(field_txt)
header.appendChild(field)
new_doc.childNodes[0].appendChild(header)
lines = new_doc.createElement("lines")
field = etree.Element("field")
field.text = fields[f]['string'] or ''
header.append(field)
new_doc.append(header)
lines = etree.Element("lines")
for line in results:
node_line = new_doc.createElement("row")
node_line = etree.Element("row")
for f in fields_order:
if fields[f]['type']=='many2one' and line[f]:
line[f] = line[f][1]
@ -141,16 +130,15 @@ class report_printscreen_list(report_int):
if fields[f]['type'] == 'float':
precision=(('digits' in fields[f]) and fields[f]['digits'][1]) or 2
line[f]=round(line[f],precision)
col = new_doc.createElement("col")
col.setAttribute('tree','no')
col = etree.Element("col")
col.set('tree','no')
if line[f] != None:
txt = new_doc.createTextNode(str(line[f] or ''))
col.text = tools.ustr(line[f] or '')
else:
txt = new_doc.createTextNode('/')
col.appendChild(txt)
node_line.appendChild(col)
lines.appendChild(node_line)
new_doc.childNodes[0].appendChild(lines)
col.text = '/'
node_line.append(col)
lines.append(node_line)
new_doc.append(node_line)
styledoc = libxml2.parseFile(os.path.join(tools.config['root_path'],'addons/base/report/custom_new.xsl'))
style = libxslt.parseStylesheetDoc(styledoc)

View File

@ -24,28 +24,26 @@ from report.render.rml2pdf import utils
from lxml import etree
import copy
class odt2odt(object):
def __init__(self, odt, localcontext):
self.localcontext = localcontext
self.etree = odt
self._node = None
def render(self):
def process_text(node,new_node):
if new_node.tag in ['story','tr','section']:
new_node.attrib.clear()
for child in utils._child_get(node, self):
new_child = copy.deepcopy(child)
new_child.text = utils._process_text(self, child.text)
new_node.append(new_child)
if len(child):
for n in new_child:
new_child.text = utils._process_text(self, child.text)
new_child.tail = utils._process_text(self, child.tail)
new_child.remove(n)
process_text(child, new_child)
else:
new_child.text = utils._process_text(self, child.text)
new_child.tail = utils._process_text(self, child.tail)
self._node = copy.deepcopy(self.etree)
for n in self._node:
self._node.remove(n)
@ -53,16 +51,6 @@ class odt2odt(object):
return self._node
def parseNode(node, localcontext = {}):
body = node.getchildren()[-1]
elements = body.findall(localcontext['name_space']["text"]+"p")
for pe in elements:
e = pe.findall(localcontext['name_space']["text"]+"drop-down")
for de in e:
pp=de.getparent()
for cnd in de.getchildren():
if cnd.text:
pe.append(cnd)
pp.remove(de)
r = odt2odt(node, localcontext)
return r.render()

View File

@ -236,16 +236,19 @@ class _rml_canvas(object):
def _drawString(self, node):
v = utils.attr_get(node, ['x','y'])
text=self._textual(node, **v)
text = utils.xml2str(text)
self.canvas.drawString(text=text, **v)
def _drawCenteredString(self, node):
v = utils.attr_get(node, ['x','y'])
text=self._textual(node, **v)
text = utils.xml2str(text)
self.canvas.drawCentredString(text=text, **v)
def _drawRightString(self, node):
v = utils.attr_get(node, ['x','y'])
text=self._textual(node, **v)
text = utils.xml2str(text)
self.canvas.drawRightString(text=text, **v)
def _rect(self, node):
@ -570,7 +573,7 @@ class _rml_flowable(object):
from reportlab.graphics.barcode import usps
except Exception, e:
return None
args = utils.attr_get(node, [], {'ratio':'float','xdim':'unit','height':'unit','checksum':'bool','quiet':'bool'})
args = utils.attr_get(node, [], {'ratio':'float','xdim':'unit','height':'unit','checksum':'int','quiet':'int','width':'unit','stop':'bool','bearers':'int','barWidth':'float','barHeight':'float'})
codes = {
'codabar': lambda x: common.Codabar(x, **args),
'code11': lambda x: common.Code11(x, **args),

View File

@ -47,6 +47,9 @@ _regex = re.compile('\[\[(.+?)\]\]')
def str2xml(s):
return s.replace('&', '&amp;').replace('<', '&lt;').replace('>', '&gt;')
def xml2str(s):
return s.replace('&amp;','&').replace('&lt;','<').replace('&gt;','>')
def _child_get(node, self=None, tagname=None):
for n in node:
if self and self.localcontext and n.get('rml_loop', False):
@ -159,6 +162,8 @@ def attr_get(node, attrs, dict={}):
res[key] = int(node.get(key))
elif dict[key]=='unit':
res[key] = unit_get(node.get(key))
elif dict[key] == 'float' :
res[key] = float(node.get(key))
return res
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:

View File

@ -305,7 +305,7 @@ class rml_parse(object):
head_dom = etree.XML(rml_head)
for tag in head_dom.getchildren():
found = rml_dom.find('.//'+tag.tag)
if found:
if found and len(found):
if tag.get('position'):
found.append(tag)
else :
@ -464,10 +464,47 @@ class report_sxw(report_rml, preprocess.report):
meta = etree.tostring(rml_dom_meta)
rml_dom = etree.XML(rml)
body = rml_dom.getchildren()[-1]
elements = []
key1 = rml_parser.localcontext['name_space']["text"]+"p"
key2 = rml_parser.localcontext['name_space']["text"]+"drop-down"
for n in rml_dom.iterdescendants():
if n.tag == key1:
elements.append(n)
if report_type == 'odt':
for pe in elements:
e = pe.findall(key2)
for de in e:
pp=de.getparent()
if de.text or de.tail:
pe.text = de.text or de.tail
for cnd in de.getchildren():
if cnd.text or cnd.tail:
if pe.text:
pe.text += cnd.text or cnd.tail
else:
pe.text = cnd.text or cnd.tail
pp.remove(de)
else:
for pe in elements:
e = pe.findall(key2)
for de in e:
pp = de.getparent()
if de.text or de.tail:
pe.text = de.text or de.tail
for cnd in de:
text = cnd.get("{http://openoffice.org/2000/text}value",False)
if text:
if pe.text and text.startswith('[['):
pe.text += text
elif text.startswith('[['):
pe.text = text
if de.getparent():
pp.remove(de)
rml_dom = self.preprocess_rml(rml_dom,report_type)
create_doc = self.generators[report_type]
odt = etree.tostring(create_doc(rml_dom, rml_parser.localcontext))
sxw_z = zipfile.ZipFile(sxw_io, mode='a')
sxw_z.writestr('content.xml', "<?xml version='1.0' encoding='UTF-8'?>" + \
odt)

View File

@ -1,7 +1,7 @@
# -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>). All Rights Reserved
# $Id$
#
@ -20,7 +20,7 @@
#
##############################################################################
import base64
import base64
import logging
import os
import security
@ -279,7 +279,7 @@ class db(netsvc.Service):
fs = os.path.join(tools.config['root_path'], 'filestore')
if os.path.exists(os.path.join(fs, old_name)):
os.rename(os.path.join(fs, old_name), os.path.join(fs, new_name))
logger.notifyChannel("web-services", netsvc.LOG_INFO,
'RENAME DB: %s -> %s' % (old_name, new_name))
finally:
@ -347,7 +347,7 @@ class db(netsvc.Service):
try:
l.notifyChannel('migration', netsvc.LOG_INFO, 'migrate database %s' % (db,))
tools.config['update']['base'] = True
pooler.restart_pool(db, force_demo=False, update_module=True)
pooler.restart_pool(db, force_demo=False, update_module=True)
except except_orm, inst:
self.abortResponse(1, inst.name, 'warning', inst.value)
except except_osv, inst:
@ -373,6 +373,7 @@ class common(netsvc.Service):
self.exportMethod(self.timezone_get)
self.exportMethod(self.get_available_updates)
self.exportMethod(self.get_migration_scripts)
self.exportMethod(self.get_server_environment)
def ir_set(self, db, uid, password, keys, args, name, value, replace=True, isobject=False):
security.check(db, uid, password)
@ -437,36 +438,36 @@ GNU Public Licence.
def timezone_get(self, db, login, password):
return time.tzname[0]
def get_available_updates(self, password, contract_id, contract_password):
security.check_super(password)
import tools.maintenance as tm
try:
rc = tm.remote_contract(contract_id, contract_password)
if not rc.id:
raise tm.RemoteContractException('This contract does not exist or is not active')
raise tm.RemoteContractException('This contract does not exist or is not active')
return rc.get_available_updates(rc.id, addons.get_modules_with_version())
except tm.RemoteContractException, e:
self.abortResponse(1, 'Migration Error', 'warning', str(e))
def get_migration_scripts(self, password, contract_id, contract_password):
security.check_super(password)
l = netsvc.Logger()
import tools.maintenance as tm
try:
try:
rc = tm.remote_contract(contract_id, contract_password)
if not rc.id:
raise tm.RemoteContractException('This contract does not exist or is not active')
raise tm.RemoteContractException('This contract does not exist or is not active')
if rc.status != 'full':
raise tm.RemoteContractException('Can not get updates for a partial contract')
l.notifyChannel('migration', netsvc.LOG_INFO, 'starting migration with contract %s' % (rc.name,))
zips = rc.retrieve_updates(rc.id, addons.get_modules_with_version())
from shutil import rmtree, copytree, copy
backup_directory = os.path.join(tools.config['root_path'], 'backup', time.strftime('%Y-%m-%d-%H-%M'))
@ -524,7 +525,33 @@ GNU Public Licence.
tb_s = reduce(lambda x, y: x+y, traceback.format_exception( sys.exc_type, sys.exc_value, sys.exc_traceback))
l.notifyChannel('migration', netsvc.LOG_ERROR, tb_s)
raise
def get_server_environment(self):
try:
revno = os.popen('bzr revno').read()
rev_log = ''
cnt = 0
for line in os.popen('bzr log -r %s'%(int(revno))).readlines():
if line.find(':')!=-1:
if not cnt == 4:
rev_log += '\t' + line
cnt += 1
else:
break
except Exception,e:
rev_log = 'Exception: %s\n' % (str(e))
os_lang = os.environ.get('LANG', '').split('.')[0]
environment = '\nEnvironment_Information : \n' \
'PlatForm : %s\n' \
'Operating System : %s\n' \
'Operating System Version : %s\n' \
'Operating System Locale : %s\n'\
'Python Version : %s\n'\
'OpenERP-Server Version : %s\n'\
'Last revision Details: \n%s' \
%(sys.platform,os.name,str(sys.version.split('\n')[1]),os_lang,str(sys.version[0:5]),release.version,rev_log)
return environment
common()
class objects_proxy(netsvc.Service):

View File

@ -256,7 +256,11 @@ form: module.record_id""" % (xml_id,)
if len(d_search):
ids = self.pool.get(d_model).search(cr,self.uid,eval(d_search))
if len(d_id):
ids.append(self.id_get(cr, d_model, d_id))
try:
ids.append(self.id_get(cr, d_model, d_id))
except:
# d_id cannot be found. doesn't matter in this case
pass
if len(ids):
self.pool.get(d_model).unlink(cr, self.uid, ids)
self.pool.get('ir.model.data')._unlink(cr, self.uid, d_model, ids, direct=True)