convert tabs to 4 spaces
bzr revid: christophe@tinyerp.com-20080722142436-143iu4ryy47w3av0
This commit is contained in:
parent
c5c7c2ef07
commit
2dd9723b8b
|
@ -45,310 +45,310 @@ logger = netsvc.Logger()
|
||||||
|
|
||||||
opj = os.path.join
|
opj = os.path.join
|
||||||
|
|
||||||
_ad = os.path.abspath(opj(tools.config['root_path'], 'addons')) # default addons path (base)
|
_ad = os.path.abspath(opj(tools.config['root_path'], 'addons')) # default addons path (base)
|
||||||
ad = os.path.abspath(tools.config['addons_path']) # alternate addons path
|
ad = os.path.abspath(tools.config['addons_path']) # alternate addons path
|
||||||
|
|
||||||
sys.path.insert(1, _ad)
|
sys.path.insert(1, _ad)
|
||||||
if ad != _ad:
|
if ad != _ad:
|
||||||
sys.path.insert(1, ad)
|
sys.path.insert(1, ad)
|
||||||
|
|
||||||
class Graph(dict):
|
class Graph(dict):
|
||||||
|
|
||||||
def addNode(self, name, deps):
|
def addNode(self, name, deps):
|
||||||
max_depth, father = 0, None
|
max_depth, father = 0, None
|
||||||
for n in [Node(x, self) for x in deps]:
|
for n in [Node(x, self) for x in deps]:
|
||||||
if n.depth >= max_depth:
|
if n.depth >= max_depth:
|
||||||
father = n
|
father = n
|
||||||
max_depth = n.depth
|
max_depth = n.depth
|
||||||
if father:
|
if father:
|
||||||
father.addChild(name)
|
father.addChild(name)
|
||||||
else:
|
else:
|
||||||
Node(name, self)
|
Node(name, self)
|
||||||
|
|
||||||
def __iter__(self):
|
def __iter__(self):
|
||||||
level = 0
|
level = 0
|
||||||
done = Set(self.keys())
|
done = Set(self.keys())
|
||||||
while done:
|
while done:
|
||||||
level_modules = [(name, module) for name, module in self.items() if module.depth==level]
|
level_modules = [(name, module) for name, module in self.items() if module.depth==level]
|
||||||
for name, module in level_modules:
|
for name, module in level_modules:
|
||||||
done.remove(name)
|
done.remove(name)
|
||||||
yield module
|
yield module
|
||||||
level += 1
|
level += 1
|
||||||
|
|
||||||
class Singleton(object):
|
class Singleton(object):
|
||||||
|
|
||||||
def __new__(cls, name, graph):
|
def __new__(cls, name, graph):
|
||||||
if name in graph:
|
if name in graph:
|
||||||
inst = graph[name]
|
inst = graph[name]
|
||||||
else:
|
else:
|
||||||
inst = object.__new__(cls)
|
inst = object.__new__(cls)
|
||||||
inst.name = name
|
inst.name = name
|
||||||
graph[name] = inst
|
graph[name] = inst
|
||||||
return inst
|
return inst
|
||||||
|
|
||||||
class Node(Singleton):
|
class Node(Singleton):
|
||||||
|
|
||||||
def __init__(self, name, graph):
|
def __init__(self, name, graph):
|
||||||
self.graph = graph
|
self.graph = graph
|
||||||
if not hasattr(self, 'childs'):
|
if not hasattr(self, 'childs'):
|
||||||
self.childs = []
|
self.childs = []
|
||||||
if not hasattr(self, 'depth'):
|
if not hasattr(self, 'depth'):
|
||||||
self.depth = 0
|
self.depth = 0
|
||||||
|
|
||||||
def addChild(self, name):
|
def addChild(self, name):
|
||||||
node = Node(name, self.graph)
|
node = Node(name, self.graph)
|
||||||
node.depth = self.depth + 1
|
node.depth = self.depth + 1
|
||||||
if node not in self.childs:
|
if node not in self.childs:
|
||||||
self.childs.append(node)
|
self.childs.append(node)
|
||||||
for attr in ('init', 'update', 'demo'):
|
for attr in ('init', 'update', 'demo'):
|
||||||
if hasattr(self, attr):
|
if hasattr(self, attr):
|
||||||
setattr(node, attr, True)
|
setattr(node, attr, True)
|
||||||
self.childs.sort(lambda x,y: cmp(x.name, y.name))
|
self.childs.sort(lambda x,y: cmp(x.name, y.name))
|
||||||
|
|
||||||
def hasChild(self, name):
|
def hasChild(self, name):
|
||||||
return Node(name, self.graph) in self.childs or \
|
return Node(name, self.graph) in self.childs or \
|
||||||
bool([c for c in self.childs if c.hasChild(name)])
|
bool([c for c in self.childs if c.hasChild(name)])
|
||||||
|
|
||||||
def __setattr__(self, name, value):
|
def __setattr__(self, name, value):
|
||||||
super(Singleton, self).__setattr__(name, value)
|
super(Singleton, self).__setattr__(name, value)
|
||||||
if name in ('init', 'update', 'demo'):
|
if name in ('init', 'update', 'demo'):
|
||||||
tools.config[name][self.name] = 1
|
tools.config[name][self.name] = 1
|
||||||
for child in self.childs:
|
for child in self.childs:
|
||||||
setattr(child, name, value)
|
setattr(child, name, value)
|
||||||
if name == 'depth':
|
if name == 'depth':
|
||||||
for child in self.childs:
|
for child in self.childs:
|
||||||
setattr(child, name, value + 1)
|
setattr(child, name, value + 1)
|
||||||
|
|
||||||
def __iter__(self):
|
def __iter__(self):
|
||||||
return itertools.chain(iter(self.childs), *map(iter, self.childs))
|
return itertools.chain(iter(self.childs), *map(iter, self.childs))
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return self._pprint()
|
return self._pprint()
|
||||||
|
|
||||||
def _pprint(self, depth=0):
|
def _pprint(self, depth=0):
|
||||||
s = '%s\n' % self.name
|
s = '%s\n' % self.name
|
||||||
for c in self.childs:
|
for c in self.childs:
|
||||||
s += '%s`-> %s' % (' ' * depth, c._pprint(depth+1))
|
s += '%s`-> %s' % (' ' * depth, c._pprint(depth+1))
|
||||||
return s
|
return s
|
||||||
|
|
||||||
def get_module_path(module):
|
def get_module_path(module):
|
||||||
"""Return the path of the given module.
|
"""Return the path of the given module.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
if os.path.exists(opj(ad, module)) or os.path.exists(opj(ad, '%s.zip' % module)):
|
if os.path.exists(opj(ad, module)) or os.path.exists(opj(ad, '%s.zip' % module)):
|
||||||
return opj(ad, module)
|
return opj(ad, module)
|
||||||
|
|
||||||
if os.path.exists(opj(_ad, module)) or os.path.exists(opj(_ad, '%s.zip' % module)):
|
if os.path.exists(opj(_ad, module)) or os.path.exists(opj(_ad, '%s.zip' % module)):
|
||||||
return opj(_ad, module)
|
return opj(_ad, module)
|
||||||
|
|
||||||
raise IOError, 'Module not found : %s' % module
|
raise IOError, 'Module not found : %s' % module
|
||||||
|
|
||||||
def get_module_resource(module, *args):
|
def get_module_resource(module, *args):
|
||||||
"""Return the full path of a resource of the given module.
|
"""Return the full path of a resource of the given module.
|
||||||
|
|
||||||
@param module: the module
|
@param module: the module
|
||||||
@param args: the resource path components
|
@param args: the resource path components
|
||||||
|
|
||||||
@return: absolute path to the resource
|
@return: absolute path to the resource
|
||||||
"""
|
"""
|
||||||
return opj(get_module_path(module), *args)
|
return opj(get_module_path(module), *args)
|
||||||
|
|
||||||
def get_modules():
|
def get_modules():
|
||||||
"""Returns the list of module names
|
"""Returns the list of module names
|
||||||
"""
|
"""
|
||||||
|
|
||||||
module_list = os.listdir(ad)
|
module_list = os.listdir(ad)
|
||||||
module_names = [os.path.basename(m) for m in module_list]
|
module_names = [os.path.basename(m) for m in module_list]
|
||||||
module_list += [m for m in os.listdir(_ad) if m not in module_names]
|
module_list += [m for m in os.listdir(_ad) if m not in module_names]
|
||||||
|
|
||||||
return module_list
|
return module_list
|
||||||
|
|
||||||
def create_graph(module_list, force=None):
|
def create_graph(module_list, force=None):
|
||||||
if not force:
|
if not force:
|
||||||
force=[]
|
force=[]
|
||||||
graph = Graph()
|
graph = Graph()
|
||||||
packages = []
|
packages = []
|
||||||
|
|
||||||
for module in module_list:
|
for module in module_list:
|
||||||
if module[-4:]=='.zip':
|
if module[-4:]=='.zip':
|
||||||
module = module[:-4]
|
module = module[:-4]
|
||||||
mod_path = get_module_path(module)
|
mod_path = get_module_path(module)
|
||||||
terp_file = get_module_resource(module, '__terp__.py')
|
terp_file = get_module_resource(module, '__terp__.py')
|
||||||
if os.path.isfile(terp_file) or zipfile.is_zipfile(mod_path):
|
if os.path.isfile(terp_file) or zipfile.is_zipfile(mod_path):
|
||||||
try:
|
try:
|
||||||
info = eval(tools.file_open(terp_file).read())
|
info = eval(tools.file_open(terp_file).read())
|
||||||
except:
|
except:
|
||||||
logger.notifyChannel('init', netsvc.LOG_ERROR, 'addon:%s:eval file %s' % (module, terp_file))
|
logger.notifyChannel('init', netsvc.LOG_ERROR, 'addon:%s:eval file %s' % (module, terp_file))
|
||||||
raise
|
raise
|
||||||
if info.get('installable', True):
|
if info.get('installable', True):
|
||||||
packages.append((module, info.get('depends', []), info))
|
packages.append((module, info.get('depends', []), info))
|
||||||
|
|
||||||
current,later = Set([p for p, dep, data in packages]), Set()
|
current,later = Set([p for p, dep, data in packages]), Set()
|
||||||
while packages and current > later:
|
while packages and current > later:
|
||||||
package, deps, datas = packages[0]
|
package, deps, datas = packages[0]
|
||||||
|
|
||||||
# if all dependencies of 'package' are already in the graph, add 'package' in the graph
|
# if all dependencies of 'package' are already in the graph, add 'package' in the graph
|
||||||
if reduce(lambda x,y: x and y in graph, deps, True):
|
if reduce(lambda x,y: x and y in graph, deps, True):
|
||||||
if not package in current:
|
if not package in current:
|
||||||
packages.pop(0)
|
packages.pop(0)
|
||||||
continue
|
continue
|
||||||
later.clear()
|
later.clear()
|
||||||
current.remove(package)
|
current.remove(package)
|
||||||
graph.addNode(package, deps)
|
graph.addNode(package, deps)
|
||||||
node = Node(package, graph)
|
node = Node(package, graph)
|
||||||
node.datas = datas
|
node.datas = datas
|
||||||
for kind in ('init', 'demo', 'update'):
|
for kind in ('init', 'demo', 'update'):
|
||||||
if package in tools.config[kind] or 'all' in tools.config[kind] or kind in force:
|
if package in tools.config[kind] or 'all' in tools.config[kind] or kind in force:
|
||||||
setattr(node, kind, True)
|
setattr(node, kind, True)
|
||||||
else:
|
else:
|
||||||
later.add(package)
|
later.add(package)
|
||||||
packages.append((package, deps, datas))
|
packages.append((package, deps, datas))
|
||||||
packages.pop(0)
|
packages.pop(0)
|
||||||
|
|
||||||
for package in later:
|
for package in later:
|
||||||
logger.notifyChannel('init', netsvc.LOG_ERROR, 'addon:%s:Unmet dependency' % package)
|
logger.notifyChannel('init', netsvc.LOG_ERROR, 'addon:%s:Unmet dependency' % package)
|
||||||
|
|
||||||
return graph
|
return graph
|
||||||
|
|
||||||
def init_module_objects(cr, module_name, obj_list):
|
def init_module_objects(cr, module_name, obj_list):
|
||||||
pool = pooler.get_pool(cr.dbname)
|
pool = pooler.get_pool(cr.dbname)
|
||||||
logger.notifyChannel('init', netsvc.LOG_INFO, 'addon:%s:creating or updating database tables' % module_name)
|
logger.notifyChannel('init', netsvc.LOG_INFO, 'addon:%s:creating or updating database tables' % module_name)
|
||||||
for obj in obj_list:
|
for obj in obj_list:
|
||||||
if hasattr(obj, 'init'):
|
if hasattr(obj, 'init'):
|
||||||
obj.init(cr)
|
obj.init(cr)
|
||||||
obj._auto_init(cr, {'module': module_name})
|
obj._auto_init(cr, {'module': module_name})
|
||||||
cr.commit()
|
cr.commit()
|
||||||
|
|
||||||
def load_module_graph(cr, graph, status=None, **kwargs):
|
def load_module_graph(cr, graph, status=None, **kwargs):
|
||||||
# **kwargs is passed directly to convert_xml_import
|
# **kwargs is passed directly to convert_xml_import
|
||||||
if not status:
|
if not status:
|
||||||
status={}
|
status={}
|
||||||
|
|
||||||
status = status.copy()
|
status = status.copy()
|
||||||
package_todo = []
|
package_todo = []
|
||||||
statusi = 0
|
statusi = 0
|
||||||
for package in graph:
|
for package in graph:
|
||||||
status['progress'] = (float(statusi)+0.1)/len(graph)
|
status['progress'] = (float(statusi)+0.1)/len(graph)
|
||||||
m = package.name
|
m = package.name
|
||||||
logger.notifyChannel('init', netsvc.LOG_INFO, 'addon:%s' % m)
|
logger.notifyChannel('init', netsvc.LOG_INFO, 'addon:%s' % m)
|
||||||
sys.stdout.flush()
|
sys.stdout.flush()
|
||||||
pool = pooler.get_pool(cr.dbname)
|
pool = pooler.get_pool(cr.dbname)
|
||||||
modules = pool.instanciate(m, cr)
|
modules = pool.instanciate(m, cr)
|
||||||
cr.execute('select state, demo from ir_module_module where name=%s', (m,))
|
cr.execute('select state, demo from ir_module_module where name=%s', (m,))
|
||||||
(package_state, package_demo) = (cr.rowcount and cr.fetchone()) or ('uninstalled', False)
|
(package_state, package_demo) = (cr.rowcount and cr.fetchone()) or ('uninstalled', False)
|
||||||
idref = {}
|
idref = {}
|
||||||
status['progress'] = (float(statusi)+0.4)/len(graph)
|
status['progress'] = (float(statusi)+0.4)/len(graph)
|
||||||
if hasattr(package, 'init') or hasattr(package, 'update') or package_state in ('to install', 'to upgrade'):
|
if hasattr(package, 'init') or hasattr(package, 'update') or package_state in ('to install', 'to upgrade'):
|
||||||
init_module_objects(cr, m, modules)
|
init_module_objects(cr, m, modules)
|
||||||
for kind in ('init', 'update'):
|
for kind in ('init', 'update'):
|
||||||
for filename in package.datas.get('%s_xml' % kind, []):
|
for filename in package.datas.get('%s_xml' % kind, []):
|
||||||
mode = 'update'
|
mode = 'update'
|
||||||
if hasattr(package, 'init') or package_state=='to install':
|
if hasattr(package, 'init') or package_state=='to install':
|
||||||
mode = 'init'
|
mode = 'init'
|
||||||
logger.notifyChannel('init', netsvc.LOG_INFO, 'addon:%s:loading %s' % (m, filename))
|
logger.notifyChannel('init', netsvc.LOG_INFO, 'addon:%s:loading %s' % (m, filename))
|
||||||
name, ext = os.path.splitext(filename)
|
name, ext = os.path.splitext(filename)
|
||||||
if ext == '.csv':
|
if ext == '.csv':
|
||||||
tools.convert_csv_import(cr, m, os.path.basename(filename), tools.file_open(opj(m, filename)).read(), idref, mode=mode)
|
tools.convert_csv_import(cr, m, os.path.basename(filename), tools.file_open(opj(m, filename)).read(), idref, mode=mode)
|
||||||
elif ext == '.sql':
|
elif ext == '.sql':
|
||||||
queries = tools.file_open(opj(m, filename)).read().split(';')
|
queries = tools.file_open(opj(m, filename)).read().split(';')
|
||||||
for query in queries:
|
for query in queries:
|
||||||
new_query = ' '.join(query.split())
|
new_query = ' '.join(query.split())
|
||||||
if new_query:
|
if new_query:
|
||||||
cr.execute(new_query)
|
cr.execute(new_query)
|
||||||
else:
|
else:
|
||||||
tools.convert_xml_import(cr, m, tools.file_open(opj(m, filename)), idref, mode=mode, **kwargs)
|
tools.convert_xml_import(cr, m, tools.file_open(opj(m, filename)), idref, mode=mode, **kwargs)
|
||||||
if hasattr(package, 'demo') or (package_demo and package_state != 'installed'):
|
if hasattr(package, 'demo') or (package_demo and package_state != 'installed'):
|
||||||
status['progress'] = (float(statusi)+0.75)/len(graph)
|
status['progress'] = (float(statusi)+0.75)/len(graph)
|
||||||
for xml in package.datas.get('demo_xml', []):
|
for xml in package.datas.get('demo_xml', []):
|
||||||
name, ext = os.path.splitext(xml)
|
name, ext = os.path.splitext(xml)
|
||||||
logger.notifyChannel('init', netsvc.LOG_INFO, 'addon:%s:loading %s' % (m, xml))
|
logger.notifyChannel('init', netsvc.LOG_INFO, 'addon:%s:loading %s' % (m, xml))
|
||||||
if ext == '.csv':
|
if ext == '.csv':
|
||||||
tools.convert_csv_import(cr, m, os.path.basename(xml), tools.file_open(opj(m, xml)).read(), idref, noupdate=True)
|
tools.convert_csv_import(cr, m, os.path.basename(xml), tools.file_open(opj(m, xml)).read(), idref, noupdate=True)
|
||||||
else:
|
else:
|
||||||
tools.convert_xml_import(cr, m, tools.file_open(opj(m, xml)), idref, noupdate=True, **kwargs)
|
tools.convert_xml_import(cr, m, tools.file_open(opj(m, xml)), idref, noupdate=True, **kwargs)
|
||||||
cr.execute('update ir_module_module set demo=%s where name=%s', (True, package.name))
|
cr.execute('update ir_module_module set demo=%s where name=%s', (True, package.name))
|
||||||
package_todo.append(package.name)
|
package_todo.append(package.name)
|
||||||
cr.execute("update ir_module_module set state='installed' where state in ('to upgrade', 'to install') and name=%s", (package.name,))
|
cr.execute("update ir_module_module set state='installed' where state in ('to upgrade', 'to install') and name=%s", (package.name,))
|
||||||
cr.commit()
|
cr.commit()
|
||||||
statusi+=1
|
statusi+=1
|
||||||
|
|
||||||
pool = pooler.get_pool(cr.dbname)
|
pool = pooler.get_pool(cr.dbname)
|
||||||
cr.execute('select * from ir_model where state=%s', ('manual',))
|
cr.execute('select * from ir_model where state=%s', ('manual',))
|
||||||
for model in cr.dictfetchall():
|
for model in cr.dictfetchall():
|
||||||
pool.get('ir.model').instanciate(cr, 1, model['model'], {})
|
pool.get('ir.model').instanciate(cr, 1, model['model'], {})
|
||||||
|
|
||||||
pool.get('ir.model.data')._process_end(cr, 1, package_todo)
|
pool.get('ir.model.data')._process_end(cr, 1, package_todo)
|
||||||
cr.commit()
|
cr.commit()
|
||||||
|
|
||||||
def register_classes():
|
def register_classes():
|
||||||
module_list = get_modules()
|
module_list = get_modules()
|
||||||
for package in create_graph(module_list):
|
for package in create_graph(module_list):
|
||||||
m = package.name
|
m = package.name
|
||||||
logger.notifyChannel('init', netsvc.LOG_INFO, 'addon:%s:registering classes' % m)
|
logger.notifyChannel('init', netsvc.LOG_INFO, 'addon:%s:registering classes' % m)
|
||||||
sys.stdout.flush()
|
sys.stdout.flush()
|
||||||
|
|
||||||
mod_path = get_module_path(m)
|
mod_path = get_module_path(m)
|
||||||
if not os.path.isfile(mod_path+'.zip'):
|
if not os.path.isfile(mod_path+'.zip'):
|
||||||
# XXX must restrict to only addons paths
|
# XXX must restrict to only addons paths
|
||||||
imp.load_module(m, *imp.find_module(m))
|
imp.load_module(m, *imp.find_module(m))
|
||||||
else:
|
else:
|
||||||
import zipimport
|
import zipimport
|
||||||
try:
|
try:
|
||||||
zimp = zipimport.zipimporter(mod_path+'.zip')
|
zimp = zipimport.zipimporter(mod_path+'.zip')
|
||||||
zimp.load_module(m)
|
zimp.load_module(m)
|
||||||
except zipimport.ZipImportError:
|
except zipimport.ZipImportError:
|
||||||
logger.notifyChannel('init', netsvc.LOG_ERROR, 'Couldn\'t find module %s' % m)
|
logger.notifyChannel('init', netsvc.LOG_ERROR, 'Couldn\'t find module %s' % m)
|
||||||
|
|
||||||
def load_modules(db, force_demo=False, status=None, update_module=False):
|
def load_modules(db, force_demo=False, status=None, update_module=False):
|
||||||
if not status:
|
if not status:
|
||||||
status={}
|
status={}
|
||||||
cr = db.cursor()
|
cr = db.cursor()
|
||||||
force = []
|
force = []
|
||||||
if force_demo:
|
if force_demo:
|
||||||
force.append('demo')
|
force.append('demo')
|
||||||
if update_module:
|
if update_module:
|
||||||
cr.execute("select name from ir_module_module where state in ('installed', 'to install', 'to upgrade','to remove')")
|
cr.execute("select name from ir_module_module where state in ('installed', 'to install', 'to upgrade','to remove')")
|
||||||
else:
|
else:
|
||||||
cr.execute("select name from ir_module_module where state in ('installed', 'to upgrade', 'to remove')")
|
cr.execute("select name from ir_module_module where state in ('installed', 'to upgrade', 'to remove')")
|
||||||
module_list = [name for (name,) in cr.fetchall()]
|
module_list = [name for (name,) in cr.fetchall()]
|
||||||
graph = create_graph(module_list, force)
|
graph = create_graph(module_list, force)
|
||||||
report = tools.assertion_report()
|
report = tools.assertion_report()
|
||||||
load_module_graph(cr, graph, status, report=report)
|
load_module_graph(cr, graph, status, report=report)
|
||||||
if report.get_report():
|
if report.get_report():
|
||||||
logger.notifyChannel('init', netsvc.LOG_INFO, 'assert:%s' % report)
|
logger.notifyChannel('init', netsvc.LOG_INFO, 'assert:%s' % report)
|
||||||
|
|
||||||
for kind in ('init', 'demo', 'update'):
|
for kind in ('init', 'demo', 'update'):
|
||||||
tools.config[kind]={}
|
tools.config[kind]={}
|
||||||
|
|
||||||
cr.commit()
|
cr.commit()
|
||||||
if update_module:
|
if update_module:
|
||||||
cr.execute("select id,name from ir_module_module where state in ('to remove')")
|
cr.execute("select id,name from ir_module_module where state in ('to remove')")
|
||||||
for mod_id, mod_name in cr.fetchall():
|
for mod_id, mod_name in cr.fetchall():
|
||||||
pool = pooler.get_pool(cr.dbname)
|
pool = pooler.get_pool(cr.dbname)
|
||||||
cr.execute('select model,res_id from ir_model_data where not noupdate and module=%s order by id desc', (mod_name,))
|
cr.execute('select model,res_id from ir_model_data where not noupdate and module=%s order by id desc', (mod_name,))
|
||||||
for rmod,rid in cr.fetchall():
|
for rmod,rid in cr.fetchall():
|
||||||
#
|
#
|
||||||
# TO BE Improved:
|
# TO BE Improved:
|
||||||
# I can not use the class_pool has _table could be defined in __init__
|
# I can not use the class_pool has _table could be defined in __init__
|
||||||
# and I can not use the pool has the module could not be loaded in the pool
|
# and I can not use the pool has the module could not be loaded in the pool
|
||||||
#
|
#
|
||||||
uid = 1
|
uid = 1
|
||||||
pool.get(rmod).unlink(cr, uid, [rid])
|
pool.get(rmod).unlink(cr, uid, [rid])
|
||||||
cr.commit()
|
cr.commit()
|
||||||
#
|
#
|
||||||
# TODO: remove menu without actions of childs
|
# TODO: remove menu without actions of childs
|
||||||
#
|
#
|
||||||
cr.execute('''delete from
|
cr.execute('''delete from
|
||||||
ir_ui_menu
|
ir_ui_menu
|
||||||
where
|
where
|
||||||
(id not in (select parent_id from ir_ui_menu where parent_id is not null))
|
(id not in (select parent_id from ir_ui_menu where parent_id is not null))
|
||||||
and
|
and
|
||||||
(id not in (select res_id from ir_values where model='ir.ui.menu'))
|
(id not in (select res_id from ir_values where model='ir.ui.menu'))
|
||||||
and
|
and
|
||||||
(id not in (select res_id from ir_model_data where model='ir.ui.menu'))''')
|
(id not in (select res_id from ir_model_data where model='ir.ui.menu'))''')
|
||||||
|
|
||||||
cr.execute("update ir_module_module set state=%s where state in ('to remove')", ('uninstalled', ))
|
cr.execute("update ir_module_module set state=%s where state in ('to remove')", ('uninstalled', ))
|
||||||
cr.commit()
|
cr.commit()
|
||||||
pooler.restart_pool(cr.dbname)
|
pooler.restart_pool(cr.dbname)
|
||||||
cr.close()
|
cr.close()
|
||||||
|
|
||||||
|
|
|
@ -26,44 +26,44 @@
|
||||||
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
|
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
|
||||||
###############################################################################
|
###############################################################################
|
||||||
{
|
{
|
||||||
"name" : "Base",
|
"name" : "Base",
|
||||||
"version" : "1.0",
|
"version" : "1.0",
|
||||||
"author" : "Tiny",
|
"author" : "Tiny",
|
||||||
"website" : "http://tinyerp.com",
|
"website" : "http://tinyerp.com",
|
||||||
"category" : "Generic Modules/Base",
|
"category" : "Generic Modules/Base",
|
||||||
"description": "The kernel of Tiny ERP, needed for all installation.",
|
"description": "The kernel of Tiny ERP, needed for all installation.",
|
||||||
"depends" : [],
|
"depends" : [],
|
||||||
"init_xml" : [
|
"init_xml" : [
|
||||||
"base_data.xml",
|
"base_data.xml",
|
||||||
"base_menu.xml",
|
"base_menu.xml",
|
||||||
],
|
],
|
||||||
"demo_xml" : [
|
"demo_xml" : [
|
||||||
"base_demo.xml",
|
"base_demo.xml",
|
||||||
"res/partner/partner_demo.xml",
|
"res/partner/partner_demo.xml",
|
||||||
"res/partner/crm_demo.xml",
|
"res/partner/crm_demo.xml",
|
||||||
],
|
],
|
||||||
"update_xml" : [
|
"update_xml" : [
|
||||||
"base_update.xml",
|
"base_update.xml",
|
||||||
"ir/wizard/wizard_menu_view.xml",
|
"ir/wizard/wizard_menu_view.xml",
|
||||||
"ir/ir.xml",
|
"ir/ir.xml",
|
||||||
"ir/workflow/workflow_view.xml",
|
"ir/workflow/workflow_view.xml",
|
||||||
"module/module_data.xml",
|
"module/module_data.xml",
|
||||||
"module/module_wizard.xml",
|
"module/module_wizard.xml",
|
||||||
"module/module_view.xml",
|
"module/module_view.xml",
|
||||||
"module/module_report.xml",
|
"module/module_report.xml",
|
||||||
"res/res_request_view.xml",
|
"res/res_request_view.xml",
|
||||||
"res/res_lang_view.xml",
|
"res/res_lang_view.xml",
|
||||||
"res/partner/partner_report.xml",
|
"res/partner/partner_report.xml",
|
||||||
"res/partner/partner_view.xml",
|
"res/partner/partner_view.xml",
|
||||||
"res/partner/partner_wizard.xml",
|
"res/partner/partner_wizard.xml",
|
||||||
"res/bank_view.xml",
|
"res/bank_view.xml",
|
||||||
"res/country_view.xml",
|
"res/country_view.xml",
|
||||||
"res/res_currency_view.xml",
|
"res/res_currency_view.xml",
|
||||||
"res/partner/crm_view.xml",
|
"res/partner/crm_view.xml",
|
||||||
"res/partner/partner_data.xml",
|
"res/partner/partner_data.xml",
|
||||||
"res/ir_property_view.xml",
|
"res/ir_property_view.xml",
|
||||||
"base_security.xml",
|
"base_security.xml",
|
||||||
],
|
],
|
||||||
"active": True,
|
"active": True,
|
||||||
"installable": True,
|
"installable": True,
|
||||||
}
|
}
|
||||||
|
|
|
@ -32,263 +32,263 @@ import tools
|
||||||
import time
|
import time
|
||||||
|
|
||||||
class actions(osv.osv):
|
class actions(osv.osv):
|
||||||
_name = 'ir.actions.actions'
|
_name = 'ir.actions.actions'
|
||||||
_table = 'ir_actions'
|
_table = 'ir_actions'
|
||||||
_columns = {
|
_columns = {
|
||||||
'name': fields.char('Action Name', required=True, size=64),
|
'name': fields.char('Action Name', required=True, size=64),
|
||||||
'type': fields.char('Action Type', required=True, size=32),
|
'type': fields.char('Action Type', required=True, size=32),
|
||||||
'usage': fields.char('Action Usage', size=32),
|
'usage': fields.char('Action Usage', size=32),
|
||||||
'parent_id': fields.many2one('ir.actions.server', 'Parent Action'),
|
'parent_id': fields.many2one('ir.actions.server', 'Parent Action'),
|
||||||
}
|
}
|
||||||
_defaults = {
|
_defaults = {
|
||||||
'usage': lambda *a: False,
|
'usage': lambda *a: False,
|
||||||
}
|
}
|
||||||
actions()
|
actions()
|
||||||
|
|
||||||
class report_custom(osv.osv):
|
class report_custom(osv.osv):
|
||||||
_name = 'ir.actions.report.custom'
|
_name = 'ir.actions.report.custom'
|
||||||
_table = 'ir_act_report_custom'
|
_table = 'ir_act_report_custom'
|
||||||
_sequence = 'ir_actions_id_seq'
|
_sequence = 'ir_actions_id_seq'
|
||||||
_columns = {
|
_columns = {
|
||||||
'name': fields.char('Report Name', size=64, required=True, translate=True),
|
'name': fields.char('Report Name', size=64, required=True, translate=True),
|
||||||
'type': fields.char('Report Type', size=32, required=True),
|
'type': fields.char('Report Type', size=32, required=True),
|
||||||
'model':fields.char('Model', size=64, required=True),
|
'model':fields.char('Model', size=64, required=True),
|
||||||
'report_id': fields.integer('Report Ref.', required=True),
|
'report_id': fields.integer('Report Ref.', required=True),
|
||||||
'usage': fields.char('Action Usage', size=32),
|
'usage': fields.char('Action Usage', size=32),
|
||||||
'multi': fields.boolean('On multiple doc.', help="If set to true, the action will not be displayed on the right toolbar of a form views.")
|
'multi': fields.boolean('On multiple doc.', help="If set to true, the action will not be displayed on the right toolbar of a form views.")
|
||||||
}
|
}
|
||||||
_defaults = {
|
_defaults = {
|
||||||
'multi': lambda *a: False,
|
'multi': lambda *a: False,
|
||||||
'type': lambda *a: 'ir.actions.report.custom',
|
'type': lambda *a: 'ir.actions.report.custom',
|
||||||
}
|
}
|
||||||
report_custom()
|
report_custom()
|
||||||
|
|
||||||
class report_xml(osv.osv):
|
class report_xml(osv.osv):
|
||||||
|
|
||||||
def _report_content(self, cursor, user, ids, name, arg, context=None):
|
def _report_content(self, cursor, user, ids, name, arg, context=None):
|
||||||
res = {}
|
res = {}
|
||||||
for report in self.browse(cursor, user, ids, context=context):
|
for report in self.browse(cursor, user, ids, context=context):
|
||||||
data = report[name + '_data']
|
data = report[name + '_data']
|
||||||
if not data and report[name[:-8]]:
|
if not data and report[name[:-8]]:
|
||||||
try:
|
try:
|
||||||
fp = tools.file_open(report[name[:-8]], mode='rb')
|
fp = tools.file_open(report[name[:-8]], mode='rb')
|
||||||
data = fp.read()
|
data = fp.read()
|
||||||
except:
|
except:
|
||||||
data = False
|
data = False
|
||||||
res[report.id] = data
|
res[report.id] = data
|
||||||
return res
|
return res
|
||||||
|
|
||||||
def _report_content_inv(self, cursor, user, id, name, value, arg, context=None):
|
def _report_content_inv(self, cursor, user, id, name, value, arg, context=None):
|
||||||
self.write(cursor, user, id, {name+'_data': value}, context=context)
|
self.write(cursor, user, id, {name+'_data': value}, context=context)
|
||||||
|
|
||||||
def _report_sxw(self, cursor, user, ids, name, arg, context=None):
|
def _report_sxw(self, cursor, user, ids, name, arg, context=None):
|
||||||
res = {}
|
res = {}
|
||||||
for report in self.browse(cursor, user, ids, context=context):
|
for report in self.browse(cursor, user, ids, context=context):
|
||||||
if report.report_rml:
|
if report.report_rml:
|
||||||
res[report.id] = report.report_rml.replace('.rml', '.sxw')
|
res[report.id] = report.report_rml.replace('.rml', '.sxw')
|
||||||
else:
|
else:
|
||||||
res[report.id] = False
|
res[report.id] = False
|
||||||
return res
|
return res
|
||||||
|
|
||||||
_name = 'ir.actions.report.xml'
|
_name = 'ir.actions.report.xml'
|
||||||
_table = 'ir_act_report_xml'
|
_table = 'ir_act_report_xml'
|
||||||
_sequence = 'ir_actions_id_seq'
|
_sequence = 'ir_actions_id_seq'
|
||||||
_columns = {
|
_columns = {
|
||||||
'name': fields.char('Name', size=64, required=True, translate=True),
|
'name': fields.char('Name', size=64, required=True, translate=True),
|
||||||
'type': fields.char('Report Type', size=32, required=True),
|
'type': fields.char('Report Type', size=32, required=True),
|
||||||
'model': fields.char('Model', size=64, required=True),
|
'model': fields.char('Model', size=64, required=True),
|
||||||
'report_name': fields.char('Internal Name', size=64, required=True),
|
'report_name': fields.char('Internal Name', size=64, required=True),
|
||||||
'report_xsl': fields.char('XSL path', size=256),
|
'report_xsl': fields.char('XSL path', size=256),
|
||||||
'report_xml': fields.char('XML path', size=256),
|
'report_xml': fields.char('XML path', size=256),
|
||||||
'report_rml': fields.char('RML path', size=256,
|
'report_rml': fields.char('RML path', size=256,
|
||||||
help="The .rml path of the file or NULL if the content is in report_rml_content"),
|
help="The .rml path of the file or NULL if the content is in report_rml_content"),
|
||||||
'report_sxw': fields.function(_report_sxw, method=True, type='char',
|
'report_sxw': fields.function(_report_sxw, method=True, type='char',
|
||||||
string='SXW path'),
|
string='SXW path'),
|
||||||
'report_sxw_content_data': fields.binary('SXW content'),
|
'report_sxw_content_data': fields.binary('SXW content'),
|
||||||
'report_rml_content_data': fields.binary('RML content'),
|
'report_rml_content_data': fields.binary('RML content'),
|
||||||
'report_sxw_content': fields.function(_report_content,
|
'report_sxw_content': fields.function(_report_content,
|
||||||
fnct_inv=_report_content_inv, method=True,
|
fnct_inv=_report_content_inv, method=True,
|
||||||
type='binary', string='SXW content',),
|
type='binary', string='SXW content',),
|
||||||
'report_rml_content': fields.function(_report_content,
|
'report_rml_content': fields.function(_report_content,
|
||||||
fnct_inv=_report_content_inv, method=True,
|
fnct_inv=_report_content_inv, method=True,
|
||||||
type='binary', string='RML content'),
|
type='binary', string='RML content'),
|
||||||
'auto': fields.boolean('Automatic XSL:RML', required=True),
|
'auto': fields.boolean('Automatic XSL:RML', required=True),
|
||||||
'usage': fields.char('Action Usage', size=32),
|
'usage': fields.char('Action Usage', size=32),
|
||||||
'header': fields.boolean('Add RML header',
|
'header': fields.boolean('Add RML header',
|
||||||
help="Add or not the coporate RML header"),
|
help="Add or not the coporate RML header"),
|
||||||
'multi': fields.boolean('On multiple doc.',
|
'multi': fields.boolean('On multiple doc.',
|
||||||
help="If set to true, the action will not be displayed on the right toolbar of a form views."),
|
help="If set to true, the action will not be displayed on the right toolbar of a form views."),
|
||||||
'report_type': fields.selection([
|
'report_type': fields.selection([
|
||||||
('pdf', 'pdf'),
|
('pdf', 'pdf'),
|
||||||
('html', 'html'),
|
('html', 'html'),
|
||||||
('raw', 'raw'),
|
('raw', 'raw'),
|
||||||
('sxw', 'sxw'),
|
('sxw', 'sxw'),
|
||||||
], string='Type', required=True),
|
], string='Type', required=True),
|
||||||
'groups_id': fields.many2many('res.groups', 'res_groups_report_rel', 'uid', 'gid', 'Groups')
|
'groups_id': fields.many2many('res.groups', 'res_groups_report_rel', 'uid', 'gid', 'Groups')
|
||||||
|
|
||||||
}
|
}
|
||||||
_defaults = {
|
_defaults = {
|
||||||
'type': lambda *a: 'ir.actions.report.xml',
|
'type': lambda *a: 'ir.actions.report.xml',
|
||||||
'multi': lambda *a: False,
|
'multi': lambda *a: False,
|
||||||
'auto': lambda *a: True,
|
'auto': lambda *a: True,
|
||||||
'header': lambda *a: True,
|
'header': lambda *a: True,
|
||||||
'report_sxw_content': lambda *a: False,
|
'report_sxw_content': lambda *a: False,
|
||||||
'report_type': lambda *a: 'pdf',
|
'report_type': lambda *a: 'pdf',
|
||||||
}
|
}
|
||||||
|
|
||||||
report_xml()
|
report_xml()
|
||||||
|
|
||||||
class act_window(osv.osv):
|
class act_window(osv.osv):
|
||||||
_name = 'ir.actions.act_window'
|
_name = 'ir.actions.act_window'
|
||||||
_table = 'ir_act_window'
|
_table = 'ir_act_window'
|
||||||
_sequence = 'ir_actions_id_seq'
|
_sequence = 'ir_actions_id_seq'
|
||||||
|
|
||||||
def _views_get_fnc(self, cr, uid, ids, name, arg, context={}):
|
def _views_get_fnc(self, cr, uid, ids, name, arg, context={}):
|
||||||
res={}
|
res={}
|
||||||
for act in self.browse(cr, uid, ids):
|
for act in self.browse(cr, uid, ids):
|
||||||
res[act.id]=[(view.view_id.id, view.view_mode) for view in act.view_ids]
|
res[act.id]=[(view.view_id.id, view.view_mode) for view in act.view_ids]
|
||||||
if (not act.view_ids):
|
if (not act.view_ids):
|
||||||
modes = act.view_mode.split(',')
|
modes = act.view_mode.split(',')
|
||||||
find = False
|
find = False
|
||||||
if act.view_id.id:
|
if act.view_id.id:
|
||||||
res[act.id].append((act.view_id.id, act.view_id.type))
|
res[act.id].append((act.view_id.id, act.view_id.type))
|
||||||
for t in modes:
|
for t in modes:
|
||||||
if act.view_id and (t == act.view_id.type) and not find:
|
if act.view_id and (t == act.view_id.type) and not find:
|
||||||
find = True
|
find = True
|
||||||
continue
|
continue
|
||||||
res[act.id].append((False, t))
|
res[act.id].append((False, t))
|
||||||
|
|
||||||
if 'calendar' not in modes:
|
if 'calendar' not in modes:
|
||||||
mobj = self.pool.get(act.res_model)
|
mobj = self.pool.get(act.res_model)
|
||||||
if mobj._date_name in mobj._columns:
|
if mobj._date_name in mobj._columns:
|
||||||
res[act.id].append((False, 'calendar'))
|
res[act.id].append((False, 'calendar'))
|
||||||
return res
|
return res
|
||||||
|
|
||||||
_columns = {
|
_columns = {
|
||||||
'name': fields.char('Action Name', size=64, translate=True),
|
'name': fields.char('Action Name', size=64, translate=True),
|
||||||
'type': fields.char('Action Type', size=32, required=True),
|
'type': fields.char('Action Type', size=32, required=True),
|
||||||
'view_id': fields.many2one('ir.ui.view', 'View Ref.', ondelete='cascade'),
|
'view_id': fields.many2one('ir.ui.view', 'View Ref.', ondelete='cascade'),
|
||||||
'domain': fields.char('Domain Value', size=250),
|
'domain': fields.char('Domain Value', size=250),
|
||||||
'context': fields.char('Context Value', size=250),
|
'context': fields.char('Context Value', size=250),
|
||||||
'res_model': fields.char('Model', size=64),
|
'res_model': fields.char('Model', size=64),
|
||||||
'src_model': fields.char('Source model', size=64),
|
'src_model': fields.char('Source model', size=64),
|
||||||
'target': fields.selection([('current','Current Window'),('new','New Window')], 'Target Window'),
|
'target': fields.selection([('current','Current Window'),('new','New Window')], 'Target Window'),
|
||||||
'view_type': fields.selection((('tree','Tree'),('form','Form')),string='Type of view'),
|
'view_type': fields.selection((('tree','Tree'),('form','Form')),string='Type of view'),
|
||||||
'view_mode': fields.char('Mode of view', size=250),
|
'view_mode': fields.char('Mode of view', size=250),
|
||||||
'usage': fields.char('Action Usage', size=32),
|
'usage': fields.char('Action Usage', size=32),
|
||||||
'view_ids': fields.one2many('ir.actions.act_window.view', 'act_window_id', 'Views'),
|
'view_ids': fields.one2many('ir.actions.act_window.view', 'act_window_id', 'Views'),
|
||||||
'views': fields.function(_views_get_fnc, method=True, type='binary', string='Views'),
|
'views': fields.function(_views_get_fnc, method=True, type='binary', string='Views'),
|
||||||
'limit': fields.integer('Limit', help='Default limit for the list view'),
|
'limit': fields.integer('Limit', help='Default limit for the list view'),
|
||||||
'auto_refresh': fields.integer('Auto-Refresh',
|
'auto_refresh': fields.integer('Auto-Refresh',
|
||||||
help='Add an auto-refresh on the view'),
|
help='Add an auto-refresh on the view'),
|
||||||
}
|
}
|
||||||
_defaults = {
|
_defaults = {
|
||||||
'type': lambda *a: 'ir.actions.act_window',
|
'type': lambda *a: 'ir.actions.act_window',
|
||||||
'view_type': lambda *a: 'form',
|
'view_type': lambda *a: 'form',
|
||||||
'view_mode': lambda *a: 'tree,form',
|
'view_mode': lambda *a: 'tree,form',
|
||||||
'context': lambda *a: '{}',
|
'context': lambda *a: '{}',
|
||||||
'limit': lambda *a: 80,
|
'limit': lambda *a: 80,
|
||||||
'target': lambda *a: 'current',
|
'target': lambda *a: 'current',
|
||||||
'auto_refresh': lambda *a: 0,
|
'auto_refresh': lambda *a: 0,
|
||||||
}
|
}
|
||||||
act_window()
|
act_window()
|
||||||
|
|
||||||
class act_window_view(osv.osv):
|
class act_window_view(osv.osv):
|
||||||
_name = 'ir.actions.act_window.view'
|
_name = 'ir.actions.act_window.view'
|
||||||
_table = 'ir_act_window_view'
|
_table = 'ir_act_window_view'
|
||||||
_rec_name = 'view_id'
|
_rec_name = 'view_id'
|
||||||
_columns = {
|
_columns = {
|
||||||
'sequence': fields.integer('Sequence'),
|
'sequence': fields.integer('Sequence'),
|
||||||
'view_id': fields.many2one('ir.ui.view', 'View'),
|
'view_id': fields.many2one('ir.ui.view', 'View'),
|
||||||
'view_mode': fields.selection((
|
'view_mode': fields.selection((
|
||||||
('tree', 'Tree'),
|
('tree', 'Tree'),
|
||||||
('form', 'Form'),
|
('form', 'Form'),
|
||||||
('graph', 'Graph'),
|
('graph', 'Graph'),
|
||||||
('calendar', 'Calendar')), string='Type of view', required=True),
|
('calendar', 'Calendar')), string='Type of view', required=True),
|
||||||
'act_window_id': fields.many2one('ir.actions.act_window', 'Action', ondelete='cascade'),
|
'act_window_id': fields.many2one('ir.actions.act_window', 'Action', ondelete='cascade'),
|
||||||
'multi': fields.boolean('On multiple doc.',
|
'multi': fields.boolean('On multiple doc.',
|
||||||
help="If set to true, the action will not be displayed on the right toolbar of a form views."),
|
help="If set to true, the action will not be displayed on the right toolbar of a form views."),
|
||||||
}
|
}
|
||||||
_defaults = {
|
_defaults = {
|
||||||
'multi': lambda *a: False,
|
'multi': lambda *a: False,
|
||||||
}
|
}
|
||||||
_order = 'sequence'
|
_order = 'sequence'
|
||||||
act_window_view()
|
act_window_view()
|
||||||
|
|
||||||
class act_wizard(osv.osv):
|
class act_wizard(osv.osv):
|
||||||
_name = 'ir.actions.wizard'
|
_name = 'ir.actions.wizard'
|
||||||
_table = 'ir_act_wizard'
|
_table = 'ir_act_wizard'
|
||||||
_sequence = 'ir_actions_id_seq'
|
_sequence = 'ir_actions_id_seq'
|
||||||
_columns = {
|
_columns = {
|
||||||
'name': fields.char('Wizard info', size=64, required=True, translate=True),
|
'name': fields.char('Wizard info', size=64, required=True, translate=True),
|
||||||
'type': fields.char('Action type', size=32, required=True),
|
'type': fields.char('Action type', size=32, required=True),
|
||||||
'wiz_name': fields.char('Wizard name', size=64, required=True),
|
'wiz_name': fields.char('Wizard name', size=64, required=True),
|
||||||
'multi': fields.boolean('Action on multiple doc.', help="If set to true, the wizard will not be displayed on the right toolbar of a form views."),
|
'multi': fields.boolean('Action on multiple doc.', help="If set to true, the wizard will not be displayed on the right toolbar of a form views."),
|
||||||
'groups_id': fields.many2many('res.groups', 'res_groups_wizard_rel', 'uid', 'gid', 'Groups')
|
'groups_id': fields.many2many('res.groups', 'res_groups_wizard_rel', 'uid', 'gid', 'Groups')
|
||||||
}
|
}
|
||||||
_defaults = {
|
_defaults = {
|
||||||
'type': lambda *a: 'ir.actions.wizard',
|
'type': lambda *a: 'ir.actions.wizard',
|
||||||
'multi': lambda *a: False,
|
'multi': lambda *a: False,
|
||||||
}
|
}
|
||||||
act_wizard()
|
act_wizard()
|
||||||
|
|
||||||
class act_url(osv.osv):
|
class act_url(osv.osv):
|
||||||
_name = 'ir.actions.url'
|
_name = 'ir.actions.url'
|
||||||
_table = 'ir_act_url'
|
_table = 'ir_act_url'
|
||||||
_sequence = 'ir_actions_id_seq'
|
_sequence = 'ir_actions_id_seq'
|
||||||
_columns = {
|
_columns = {
|
||||||
'name': fields.char('Action Name', size=64, translate=True),
|
'name': fields.char('Action Name', size=64, translate=True),
|
||||||
'type': fields.char('Action Type', size=32, required=True),
|
'type': fields.char('Action Type', size=32, required=True),
|
||||||
'url': fields.text('Action Url',required=True),
|
'url': fields.text('Action Url',required=True),
|
||||||
'target': fields.selection((
|
'target': fields.selection((
|
||||||
('new', 'New Window'),
|
('new', 'New Window'),
|
||||||
('self', 'This Window')),
|
('self', 'This Window')),
|
||||||
'Action Target', required=True
|
'Action Target', required=True
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
_defaults = {
|
_defaults = {
|
||||||
'type': lambda *a: 'ir.actions.act_url',
|
'type': lambda *a: 'ir.actions.act_url',
|
||||||
'target': lambda *a: 'new'
|
'target': lambda *a: 'new'
|
||||||
}
|
}
|
||||||
act_url()
|
act_url()
|
||||||
|
|
||||||
#
|
#
|
||||||
# Actions that are run on the server side
|
# Actions that are run on the server side
|
||||||
#
|
#
|
||||||
class actions_server(osv.osv):
|
class actions_server(osv.osv):
|
||||||
_name = 'ir.actions.server'
|
_name = 'ir.actions.server'
|
||||||
_table = 'ir_act_server'
|
_table = 'ir_act_server'
|
||||||
_sequence = 'ir_actions_id_seq'
|
_sequence = 'ir_actions_id_seq'
|
||||||
_columns = {
|
_columns = {
|
||||||
'name': fields.char('Action Name', required=True, size=64),
|
'name': fields.char('Action Name', required=True, size=64),
|
||||||
'state': fields.selection([
|
'state': fields.selection([
|
||||||
('python','Python Code'),
|
('python','Python Code'),
|
||||||
('dummy','Dummy'),
|
('dummy','Dummy'),
|
||||||
('trigger','Trigger'),
|
('trigger','Trigger'),
|
||||||
('email','Email'),
|
('email','Email'),
|
||||||
('sms','SMS'),
|
('sms','SMS'),
|
||||||
('object_create','Create Object'),
|
('object_create','Create Object'),
|
||||||
('object_write','Write Object'),
|
('object_write','Write Object'),
|
||||||
('client_action','Client Action'),
|
('client_action','Client Action'),
|
||||||
('other','Others Actions'),
|
('other','Others Actions'),
|
||||||
], 'Action State', required=True, size=32),
|
], 'Action State', required=True, size=32),
|
||||||
'code': fields.text('Python Code'),
|
'code': fields.text('Python Code'),
|
||||||
'sequence': fields.integer('Sequence'),
|
'sequence': fields.integer('Sequence'),
|
||||||
'model_id': fields.many2one('ir.model', 'Model', required=True),
|
'model_id': fields.many2one('ir.model', 'Model', required=True),
|
||||||
'trigger_name': fields.char('Trigger Name', size=128),
|
'trigger_name': fields.char('Trigger Name', size=128),
|
||||||
'trigger_object': fields.char('Trigger Object', size=128),
|
'trigger_object': fields.char('Trigger Object', size=128),
|
||||||
'trigger_object_id': fields.char('Trigger Object ID', size=128),
|
'trigger_object_id': fields.char('Trigger Object ID', size=128),
|
||||||
'message': fields.text('Message', translate=True),
|
'message': fields.text('Message', translate=True),
|
||||||
'address': fields.char('Email Address', size=128),
|
'address': fields.char('Email Address', size=128),
|
||||||
'child_ids': fields.one2many('ir.actions.actions', 'parent_id', 'Others Actions'),
|
'child_ids': fields.one2many('ir.actions.actions', 'parent_id', 'Others Actions'),
|
||||||
'usage': fields.char('Action Usage', size=32),
|
'usage': fields.char('Action Usage', size=32),
|
||||||
'type': fields.char('Report Type', size=32, required=True),
|
'type': fields.char('Report Type', size=32, required=True),
|
||||||
}
|
}
|
||||||
_defaults = {
|
_defaults = {
|
||||||
'state': lambda *a: 'dummy',
|
'state': lambda *a: 'dummy',
|
||||||
'type': lambda *a: 'ir.actions.server',
|
'type': lambda *a: 'ir.actions.server',
|
||||||
'sequence': lambda *a: 0,
|
'sequence': lambda *a: 0,
|
||||||
'code': lambda *a: """# You can use the following variables
|
'code': lambda *a: """# You can use the following variables
|
||||||
# - object
|
# - object
|
||||||
# - object2
|
# - object2
|
||||||
# - time
|
# - time
|
||||||
|
@ -297,43 +297,43 @@ class actions_server(osv.osv):
|
||||||
# - ids
|
# - ids
|
||||||
# If you plan to return an action, assign: action = {...}
|
# If you plan to return an action, assign: action = {...}
|
||||||
"""
|
"""
|
||||||
}
|
}
|
||||||
#
|
#
|
||||||
# Context should contains:
|
# Context should contains:
|
||||||
# ids : original ids
|
# ids : original ids
|
||||||
# id : current id of the object
|
# id : current id of the object
|
||||||
# OUT:
|
# OUT:
|
||||||
# False : Finnished correctly
|
# False : Finnished correctly
|
||||||
# ACTION_ID : Action to launch
|
# ACTION_ID : Action to launch
|
||||||
def run(self, cr, uid, ids, context={}):
|
def run(self, cr, uid, ids, context={}):
|
||||||
for action in self.browse(cr, uid, ids, context):
|
for action in self.browse(cr, uid, ids, context):
|
||||||
if action.state=='python':
|
if action.state=='python':
|
||||||
localdict = {
|
localdict = {
|
||||||
'self': self.pool.get(action.model_id.model),
|
'self': self.pool.get(action.model_id.model),
|
||||||
'context': context,
|
'context': context,
|
||||||
'time': time,
|
'time': time,
|
||||||
'ids': ids,
|
'ids': ids,
|
||||||
'cr': cr,
|
'cr': cr,
|
||||||
'uid': uid
|
'uid': uid
|
||||||
}
|
}
|
||||||
print action.code
|
print action.code
|
||||||
exec action.code in localdict
|
exec action.code in localdict
|
||||||
print localdict.keys()
|
print localdict.keys()
|
||||||
if 'action' in localdict:
|
if 'action' in localdict:
|
||||||
return localdict['action']
|
return localdict['action']
|
||||||
return False
|
return False
|
||||||
actions_server()
|
actions_server()
|
||||||
|
|
||||||
class act_window_close(osv.osv):
|
class act_window_close(osv.osv):
|
||||||
_name = 'ir.actions.act_window_close'
|
_name = 'ir.actions.act_window_close'
|
||||||
_table = 'ir_actions'
|
_table = 'ir_actions'
|
||||||
_sequence = 'ir_actions_id_seq'
|
_sequence = 'ir_actions_id_seq'
|
||||||
_columns = {
|
_columns = {
|
||||||
'name': fields.char('Action Name', size=64, translate=True),
|
'name': fields.char('Action Name', size=64, translate=True),
|
||||||
'type': fields.char('Action Type', size=32, required=True),
|
'type': fields.char('Action Type', size=32, required=True),
|
||||||
}
|
}
|
||||||
_defaults = {
|
_defaults = {
|
||||||
'type': lambda *a: 'ir.actions.act_window_close',
|
'type': lambda *a: 'ir.actions.act_window_close',
|
||||||
}
|
}
|
||||||
act_window_close()
|
act_window_close()
|
||||||
|
|
||||||
|
|
|
@ -30,16 +30,16 @@
|
||||||
from osv import fields,osv
|
from osv import fields,osv
|
||||||
|
|
||||||
class ir_attachment(osv.osv):
|
class ir_attachment(osv.osv):
|
||||||
_name = 'ir.attachment'
|
_name = 'ir.attachment'
|
||||||
_columns = {
|
_columns = {
|
||||||
'name': fields.char('Attachment Name',size=64, required=True),
|
'name': fields.char('Attachment Name',size=64, required=True),
|
||||||
'datas': fields.binary('Data'),
|
'datas': fields.binary('Data'),
|
||||||
'datas_fname': fields.char('Data Filename',size=64),
|
'datas_fname': fields.char('Data Filename',size=64),
|
||||||
'description': fields.text('Description'),
|
'description': fields.text('Description'),
|
||||||
# Not required due to the document module !
|
# Not required due to the document module !
|
||||||
'res_model': fields.char('Resource Model',size=64, readonly=True),
|
'res_model': fields.char('Resource Model',size=64, readonly=True),
|
||||||
'res_id': fields.integer('Resource ID', readonly=True),
|
'res_id': fields.integer('Resource ID', readonly=True),
|
||||||
'link': fields.char('Link', size=256)
|
'link': fields.char('Link', size=256)
|
||||||
}
|
}
|
||||||
ir_attachment()
|
ir_attachment()
|
||||||
|
|
||||||
|
|
|
@ -30,12 +30,12 @@
|
||||||
from osv import fields,osv
|
from osv import fields,osv
|
||||||
|
|
||||||
class board(osv.osv):
|
class board(osv.osv):
|
||||||
_name = 'ir.board'
|
_name = 'ir.board'
|
||||||
def create(self, cr, user, vals, context={}):
|
def create(self, cr, user, vals, context={}):
|
||||||
return False
|
return False
|
||||||
def copy(self, cr, uid, id, default=None, context={}):
|
def copy(self, cr, uid, id, default=None, context={}):
|
||||||
return False
|
return False
|
||||||
_columns = {
|
_columns = {
|
||||||
'name': fields.char('Board', size=64),
|
'name': fields.char('Board', size=64),
|
||||||
}
|
}
|
||||||
board()
|
board()
|
||||||
|
|
|
@ -28,13 +28,13 @@
|
||||||
##############################################################################
|
##############################################################################
|
||||||
#
|
#
|
||||||
# SPEC: Execute "model.function(*eval(args))" periodically
|
# SPEC: Execute "model.function(*eval(args))" periodically
|
||||||
# date : date to execute the job or NULL if directly
|
# date : date to execute the job or NULL if directly
|
||||||
# delete_after: delete the ir.cron entry after execution
|
# delete_after: delete the ir.cron entry after execution
|
||||||
# interval_* : period
|
# interval_* : period
|
||||||
# max_repeat : number of execution or NULL if endlessly
|
# max_repeat : number of execution or NULL if endlessly
|
||||||
#
|
#
|
||||||
# TODO:
|
# TODO:
|
||||||
# Error treatment: exception, request, ... -> send request to uid
|
# Error treatment: exception, request, ... -> send request to uid
|
||||||
#
|
#
|
||||||
|
|
||||||
from mx import DateTime
|
from mx import DateTime
|
||||||
|
@ -47,85 +47,85 @@ from osv import fields,osv
|
||||||
next_wait = 60
|
next_wait = 60
|
||||||
|
|
||||||
_intervalTypes = {
|
_intervalTypes = {
|
||||||
'work_days': lambda interval: DateTime.RelativeDateTime(days=interval),
|
'work_days': lambda interval: DateTime.RelativeDateTime(days=interval),
|
||||||
'days': lambda interval: DateTime.RelativeDateTime(days=interval),
|
'days': lambda interval: DateTime.RelativeDateTime(days=interval),
|
||||||
'hours': lambda interval: DateTime.RelativeDateTime(hours=interval),
|
'hours': lambda interval: DateTime.RelativeDateTime(hours=interval),
|
||||||
'weeks': lambda interval: DateTime.RelativeDateTime(days=7*interval),
|
'weeks': lambda interval: DateTime.RelativeDateTime(days=7*interval),
|
||||||
'months': lambda interval: DateTime.RelativeDateTime(months=interval),
|
'months': lambda interval: DateTime.RelativeDateTime(months=interval),
|
||||||
'minutes': lambda interval: DateTime.RelativeDateTime(minutes=interval),
|
'minutes': lambda interval: DateTime.RelativeDateTime(minutes=interval),
|
||||||
}
|
}
|
||||||
|
|
||||||
class ir_cron(osv.osv, netsvc.Agent):
|
class ir_cron(osv.osv, netsvc.Agent):
|
||||||
_name = "ir.cron"
|
_name = "ir.cron"
|
||||||
_columns = {
|
_columns = {
|
||||||
'name': fields.char('Name', size=60, required=True),
|
'name': fields.char('Name', size=60, required=True),
|
||||||
'user_id': fields.many2one('res.users', 'User', required=True),
|
'user_id': fields.many2one('res.users', 'User', required=True),
|
||||||
'active': fields.boolean('Active'),
|
'active': fields.boolean('Active'),
|
||||||
'interval_number': fields.integer('Interval Number'),
|
'interval_number': fields.integer('Interval Number'),
|
||||||
'interval_type': fields.selection( [('minutes', 'Minutes'),
|
'interval_type': fields.selection( [('minutes', 'Minutes'),
|
||||||
('hours', 'Hours'), ('work_days','Work Days'), ('days', 'Days'),('weeks', 'Weeks'), ('months', 'Months')], 'Interval Unit'),
|
('hours', 'Hours'), ('work_days','Work Days'), ('days', 'Days'),('weeks', 'Weeks'), ('months', 'Months')], 'Interval Unit'),
|
||||||
'numbercall': fields.integer('Number of calls', help='Number of time the function is called,\na negative number indicates that the function will always be called'),
|
'numbercall': fields.integer('Number of calls', help='Number of time the function is called,\na negative number indicates that the function will always be called'),
|
||||||
'doall' : fields.boolean('Repeat missed'),
|
'doall' : fields.boolean('Repeat missed'),
|
||||||
'nextcall' : fields.datetime('Next call date', required=True),
|
'nextcall' : fields.datetime('Next call date', required=True),
|
||||||
'model': fields.char('Model', size=64),
|
'model': fields.char('Model', size=64),
|
||||||
'function': fields.char('Function', size=64),
|
'function': fields.char('Function', size=64),
|
||||||
'args': fields.text('Arguments'),
|
'args': fields.text('Arguments'),
|
||||||
'priority': fields.integer('Priority', help='0=Very Urgent\n10=Not urgent')
|
'priority': fields.integer('Priority', help='0=Very Urgent\n10=Not urgent')
|
||||||
}
|
}
|
||||||
|
|
||||||
_defaults = {
|
_defaults = {
|
||||||
'nextcall' : lambda *a: time.strftime('%Y-%m-%d %H:%M:%S'),
|
'nextcall' : lambda *a: time.strftime('%Y-%m-%d %H:%M:%S'),
|
||||||
'priority' : lambda *a: 5,
|
'priority' : lambda *a: 5,
|
||||||
'user_id' : lambda obj,cr,uid,context: uid,
|
'user_id' : lambda obj,cr,uid,context: uid,
|
||||||
'interval_number' : lambda *a: 1,
|
'interval_number' : lambda *a: 1,
|
||||||
'interval_type' : lambda *a: 'months',
|
'interval_type' : lambda *a: 'months',
|
||||||
'numbercall' : lambda *a: 1,
|
'numbercall' : lambda *a: 1,
|
||||||
'active' : lambda *a: 1,
|
'active' : lambda *a: 1,
|
||||||
'doall' : lambda *a: 1
|
'doall' : lambda *a: 1
|
||||||
}
|
}
|
||||||
|
|
||||||
def _callback(self, cr, uid, model, func, args):
|
def _callback(self, cr, uid, model, func, args):
|
||||||
args = (args or []) and eval(args)
|
args = (args or []) and eval(args)
|
||||||
m=self.pool.get(model)
|
m=self.pool.get(model)
|
||||||
if m and hasattr(m, func):
|
if m and hasattr(m, func):
|
||||||
f = getattr(m, func)
|
f = getattr(m, func)
|
||||||
f(cr, uid, *args)
|
f(cr, uid, *args)
|
||||||
|
|
||||||
def _poolJobs(self, db_name, check=False):
|
def _poolJobs(self, db_name, check=False):
|
||||||
now = DateTime.now()
|
now = DateTime.now()
|
||||||
#FIXME: multidb. Solution: a l'instanciation d'une nouvelle connection bd (ds pooler) fo que j'instancie
|
#FIXME: multidb. Solution: a l'instanciation d'une nouvelle connection bd (ds pooler) fo que j'instancie
|
||||||
# un nouveau pooljob avec comme parametre la bd
|
# un nouveau pooljob avec comme parametre la bd
|
||||||
try:
|
try:
|
||||||
cr = pooler.get_db(db_name).cursor()
|
cr = pooler.get_db(db_name).cursor()
|
||||||
except:
|
except:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
try:
|
try:
|
||||||
cr.execute('select * from ir_cron where numbercall<>0 and active and nextcall<=now() order by priority')
|
cr.execute('select * from ir_cron where numbercall<>0 and active and nextcall<=now() order by priority')
|
||||||
for job in cr.dictfetchall():
|
for job in cr.dictfetchall():
|
||||||
nextcall = DateTime.strptime(job['nextcall'], '%Y-%m-%d %H:%M:%S')
|
nextcall = DateTime.strptime(job['nextcall'], '%Y-%m-%d %H:%M:%S')
|
||||||
numbercall = job['numbercall']
|
numbercall = job['numbercall']
|
||||||
|
|
||||||
ok = False
|
ok = False
|
||||||
while nextcall<now and numbercall:
|
while nextcall<now and numbercall:
|
||||||
if numbercall > 0:
|
if numbercall > 0:
|
||||||
numbercall -= 1
|
numbercall -= 1
|
||||||
if not ok or job['doall']:
|
if not ok or job['doall']:
|
||||||
self._callback(cr, job['user_id'], job['model'], job['function'], job['args'])
|
self._callback(cr, job['user_id'], job['model'], job['function'], job['args'])
|
||||||
if numbercall:
|
if numbercall:
|
||||||
nextcall += _intervalTypes[job['interval_type']](job['interval_number'])
|
nextcall += _intervalTypes[job['interval_type']](job['interval_number'])
|
||||||
ok = True
|
ok = True
|
||||||
addsql=''
|
addsql=''
|
||||||
if not numbercall:
|
if not numbercall:
|
||||||
addsql = ', active=False'
|
addsql = ', active=False'
|
||||||
cr.execute("update ir_cron set nextcall=%s, numbercall=%d"+addsql+" where id=%d", (nextcall.strftime('%Y-%m-%d %H:%M:%S'), numbercall, job['id']))
|
cr.execute("update ir_cron set nextcall=%s, numbercall=%d"+addsql+" where id=%d", (nextcall.strftime('%Y-%m-%d %H:%M:%S'), numbercall, job['id']))
|
||||||
cr.commit()
|
cr.commit()
|
||||||
finally:
|
finally:
|
||||||
cr.close()
|
cr.close()
|
||||||
#
|
#
|
||||||
# Can be improved to do at the min(min(nextcalls), time()+next_wait)
|
# Can be improved to do at the min(min(nextcalls), time()+next_wait)
|
||||||
# But is this an improvement ?
|
# But is this an improvement ?
|
||||||
#
|
#
|
||||||
if not check:
|
if not check:
|
||||||
self.setAlarm(self._poolJobs, int(time.time())+next_wait, [db_name])
|
self.setAlarm(self._poolJobs, int(time.time())+next_wait, [db_name])
|
||||||
ir_cron()
|
ir_cron()
|
||||||
|
|
|
@ -30,26 +30,26 @@
|
||||||
from osv import fields,osv
|
from osv import fields,osv
|
||||||
|
|
||||||
class ir_default(osv.osv):
|
class ir_default(osv.osv):
|
||||||
_name = 'ir.default'
|
_name = 'ir.default'
|
||||||
_columns = {
|
_columns = {
|
||||||
'field_tbl': fields.char('Model',size=64),
|
'field_tbl': fields.char('Model',size=64),
|
||||||
'field_name': fields.char('Model field',size=64),
|
'field_name': fields.char('Model field',size=64),
|
||||||
'value': fields.char('Default Value',size=64),
|
'value': fields.char('Default Value',size=64),
|
||||||
'uid': fields.many2one('res.users', 'Users'),
|
'uid': fields.many2one('res.users', 'Users'),
|
||||||
'page': fields.char('View',size=64),
|
'page': fields.char('View',size=64),
|
||||||
'ref_table': fields.char('Table Ref.',size=64),
|
'ref_table': fields.char('Table Ref.',size=64),
|
||||||
'ref_id': fields.integer('ID Ref.',size=64),
|
'ref_id': fields.integer('ID Ref.',size=64),
|
||||||
'company_id': fields.many2one('res.company','Company')
|
'company_id': fields.many2one('res.company','Company')
|
||||||
}
|
}
|
||||||
|
|
||||||
def _get_company_id(self, cr, uid, context={}):
|
def _get_company_id(self, cr, uid, context={}):
|
||||||
res = self.pool.get('res.users').read(cr, uid, [uid], ['company_id'], context=context)
|
res = self.pool.get('res.users').read(cr, uid, [uid], ['company_id'], context=context)
|
||||||
if res and res[0]['company_id']:
|
if res and res[0]['company_id']:
|
||||||
return res[0]['company_id'][0]
|
return res[0]['company_id'][0]
|
||||||
return False
|
return False
|
||||||
|
|
||||||
_defaults = {
|
_defaults = {
|
||||||
'company_id': _get_company_id,
|
'company_id': _get_company_id,
|
||||||
}
|
}
|
||||||
ir_default()
|
ir_default()
|
||||||
|
|
||||||
|
|
|
@ -31,20 +31,20 @@ from osv import fields,osv
|
||||||
|
|
||||||
|
|
||||||
class ir_exports(osv.osv):
|
class ir_exports(osv.osv):
|
||||||
_name = "ir.exports"
|
_name = "ir.exports"
|
||||||
_columns = {
|
_columns = {
|
||||||
'name': fields.char('Export name', size=128),
|
'name': fields.char('Export name', size=128),
|
||||||
'resource': fields.char('Resource', size=128),
|
'resource': fields.char('Resource', size=128),
|
||||||
'export_fields': fields.one2many('ir.exports.line', 'export_id',
|
'export_fields': fields.one2many('ir.exports.line', 'export_id',
|
||||||
'Export Id'),
|
'Export Id'),
|
||||||
}
|
}
|
||||||
ir_exports()
|
ir_exports()
|
||||||
|
|
||||||
|
|
||||||
class ir_exports_line(osv.osv):
|
class ir_exports_line(osv.osv):
|
||||||
_name = 'ir.exports.line'
|
_name = 'ir.exports.line'
|
||||||
_columns = {
|
_columns = {
|
||||||
'name': fields.char('Field name', size=64),
|
'name': fields.char('Field name', size=64),
|
||||||
'export_id': fields.many2one('ir.exports', 'Exportation', select=True, ondelete='cascade'),
|
'export_id': fields.many2one('ir.exports', 'Exportation', select=True, ondelete='cascade'),
|
||||||
}
|
}
|
||||||
ir_exports_line()
|
ir_exports_line()
|
||||||
|
|
|
@ -37,395 +37,395 @@ import tools
|
||||||
import pooler
|
import pooler
|
||||||
|
|
||||||
def _get_fields_type(self, cr, uid, context=None):
|
def _get_fields_type(self, cr, uid, context=None):
|
||||||
cr.execute('select distinct ttype,ttype from ir_model_fields')
|
cr.execute('select distinct ttype,ttype from ir_model_fields')
|
||||||
return cr.fetchall()
|
return cr.fetchall()
|
||||||
|
|
||||||
class ir_model(osv.osv):
|
class ir_model(osv.osv):
|
||||||
_name = 'ir.model'
|
_name = 'ir.model'
|
||||||
_description = "Objects"
|
_description = "Objects"
|
||||||
_rec_name = 'name'
|
_rec_name = 'name'
|
||||||
_columns = {
|
_columns = {
|
||||||
'name': fields.char('Model Name', size=64, translate=True, required=True),
|
'name': fields.char('Model Name', size=64, translate=True, required=True),
|
||||||
'model': fields.char('Object Name', size=64, required=True, search=1),
|
'model': fields.char('Object Name', size=64, required=True, search=1),
|
||||||
'info': fields.text('Information'),
|
'info': fields.text('Information'),
|
||||||
'field_id': fields.one2many('ir.model.fields', 'model_id', 'Fields', required=True),
|
'field_id': fields.one2many('ir.model.fields', 'model_id', 'Fields', required=True),
|
||||||
'state': fields.selection([('manual','Custom Object'),('base','Base Field')],'Manualy Created',readonly=1),
|
'state': fields.selection([('manual','Custom Object'),('base','Base Field')],'Manualy Created',readonly=1),
|
||||||
}
|
}
|
||||||
_defaults = {
|
_defaults = {
|
||||||
'model': lambda *a: 'x_',
|
'model': lambda *a: 'x_',
|
||||||
'state': lambda self,cr,uid,ctx={}: (ctx and ctx.get('manual',False)) and 'manual' or 'base',
|
'state': lambda self,cr,uid,ctx={}: (ctx and ctx.get('manual',False)) and 'manual' or 'base',
|
||||||
}
|
}
|
||||||
|
|
||||||
def _check_model_name(self, cr, uid, ids):
|
def _check_model_name(self, cr, uid, ids):
|
||||||
for model in self.browse(cr, uid, ids):
|
for model in self.browse(cr, uid, ids):
|
||||||
if model.state=='manual':
|
if model.state=='manual':
|
||||||
if not model.model.startswith('x_'):
|
if not model.model.startswith('x_'):
|
||||||
return False
|
return False
|
||||||
if not re.match('^[a-z_A-Z0-9]+$',model.model):
|
if not re.match('^[a-z_A-Z0-9]+$',model.model):
|
||||||
return False
|
return False
|
||||||
return True
|
return True
|
||||||
|
|
||||||
_constraints = [
|
_constraints = [
|
||||||
(_check_model_name, 'The model name must start with x_ and not contain any special character !', ['model']),
|
(_check_model_name, 'The model name must start with x_ and not contain any special character !', ['model']),
|
||||||
]
|
]
|
||||||
def unlink(self, cr, user, ids, context=None):
|
def unlink(self, cr, user, ids, context=None):
|
||||||
for model in self.browse(cr, user, ids, context):
|
for model in self.browse(cr, user, ids, context):
|
||||||
if model.state <> 'manual':
|
if model.state <> 'manual':
|
||||||
raise except_orm(_('Error'), _("You can not remove the model '%s' !") %(field.name,))
|
raise except_orm(_('Error'), _("You can not remove the model '%s' !") %(field.name,))
|
||||||
res = super(ir_model, self).unlink(cr, user, ids, context)
|
res = super(ir_model, self).unlink(cr, user, ids, context)
|
||||||
pooler.restart_pool(cr.dbname)
|
pooler.restart_pool(cr.dbname)
|
||||||
return res
|
return res
|
||||||
|
|
||||||
def create(self, cr, user, vals, context=None):
|
def create(self, cr, user, vals, context=None):
|
||||||
if context and context.get('manual',False):
|
if context and context.get('manual',False):
|
||||||
vals['state']='manual'
|
vals['state']='manual'
|
||||||
res = super(ir_model,self).create(cr, user, vals, context)
|
res = super(ir_model,self).create(cr, user, vals, context)
|
||||||
if vals.get('state','base')=='manual':
|
if vals.get('state','base')=='manual':
|
||||||
pooler.restart_pool(cr.dbname)
|
pooler.restart_pool(cr.dbname)
|
||||||
return res
|
return res
|
||||||
|
|
||||||
def instanciate(self, cr, user, model, context={}):
|
def instanciate(self, cr, user, model, context={}):
|
||||||
class x_custom_model(osv.osv):
|
class x_custom_model(osv.osv):
|
||||||
pass
|
pass
|
||||||
x_custom_model._name = model
|
x_custom_model._name = model
|
||||||
x_custom_model._module = False
|
x_custom_model._module = False
|
||||||
x_custom_model.createInstance(self.pool, '', cr)
|
x_custom_model.createInstance(self.pool, '', cr)
|
||||||
if 'x_name' in x_custom_model._columns:
|
if 'x_name' in x_custom_model._columns:
|
||||||
x_custom_model._rec_name = 'x_name'
|
x_custom_model._rec_name = 'x_name'
|
||||||
else:
|
else:
|
||||||
x_custom_model._rec_name = x_custom_model._columns.keys()[0]
|
x_custom_model._rec_name = x_custom_model._columns.keys()[0]
|
||||||
ir_model()
|
ir_model()
|
||||||
|
|
||||||
class ir_model_fields(osv.osv):
|
class ir_model_fields(osv.osv):
|
||||||
_name = 'ir.model.fields'
|
_name = 'ir.model.fields'
|
||||||
_description = "Fields"
|
_description = "Fields"
|
||||||
_columns = {
|
_columns = {
|
||||||
'name': fields.char('Name', required=True, size=64, select=1),
|
'name': fields.char('Name', required=True, size=64, select=1),
|
||||||
'model': fields.char('Object Name', size=64, required=True),
|
'model': fields.char('Object Name', size=64, required=True),
|
||||||
'relation': fields.char('Model Relation', size=64),
|
'relation': fields.char('Model Relation', size=64),
|
||||||
'model_id': fields.many2one('ir.model', 'Model id', required=True, select=True, ondelete='cascade'),
|
'model_id': fields.many2one('ir.model', 'Model id', required=True, select=True, ondelete='cascade'),
|
||||||
'field_description': fields.char('Field Label', required=True, size=256),
|
'field_description': fields.char('Field Label', required=True, size=256),
|
||||||
'relate': fields.boolean('Click and Relate'),
|
'relate': fields.boolean('Click and Relate'),
|
||||||
|
|
||||||
'ttype': fields.selection(_get_fields_type, 'Field Type',size=64, required=True),
|
'ttype': fields.selection(_get_fields_type, 'Field Type',size=64, required=True),
|
||||||
'selection': fields.char('Field Selection',size=128),
|
'selection': fields.char('Field Selection',size=128),
|
||||||
'required': fields.boolean('Required'),
|
'required': fields.boolean('Required'),
|
||||||
'readonly': fields.boolean('Readonly'),
|
'readonly': fields.boolean('Readonly'),
|
||||||
'select_level': fields.selection([('0','Not Searchable'),('1','Always Searchable'),('2','Advanced Search')],'Searchable', required=True),
|
'select_level': fields.selection([('0','Not Searchable'),('1','Always Searchable'),('2','Advanced Search')],'Searchable', required=True),
|
||||||
'translate': fields.boolean('Translate'),
|
'translate': fields.boolean('Translate'),
|
||||||
'size': fields.integer('Size'),
|
'size': fields.integer('Size'),
|
||||||
'state': fields.selection([('manual','Custom Field'),('base','Base Field')],'Manualy Created'),
|
'state': fields.selection([('manual','Custom Field'),('base','Base Field')],'Manualy Created'),
|
||||||
'on_delete': fields.selection([('cascade','Cascade'),('set null','Set NULL')], 'On delete', help='On delete property for many2one fields'),
|
'on_delete': fields.selection([('cascade','Cascade'),('set null','Set NULL')], 'On delete', help='On delete property for many2one fields'),
|
||||||
'domain': fields.char('Domain', size=256),
|
'domain': fields.char('Domain', size=256),
|
||||||
|
|
||||||
'groups': fields.many2many('res.groups', 'ir_model_fields_group_rel', 'field_id', 'group_id', 'Groups'),
|
'groups': fields.many2many('res.groups', 'ir_model_fields_group_rel', 'field_id', 'group_id', 'Groups'),
|
||||||
'group_name': fields.char('Group Name', size=128),
|
'group_name': fields.char('Group Name', size=128),
|
||||||
'view_load': fields.boolean('View Auto-Load'),
|
'view_load': fields.boolean('View Auto-Load'),
|
||||||
}
|
}
|
||||||
_defaults = {
|
_defaults = {
|
||||||
'relate': lambda *a: 0,
|
'relate': lambda *a: 0,
|
||||||
'view_load': lambda *a: 0,
|
'view_load': lambda *a: 0,
|
||||||
'selection': lambda *a: "[]",
|
'selection': lambda *a: "[]",
|
||||||
'domain': lambda *a: "[]",
|
'domain': lambda *a: "[]",
|
||||||
'name': lambda *a: 'x_',
|
'name': lambda *a: 'x_',
|
||||||
'state': lambda self,cr,uid,ctx={}: (ctx and ctx.get('manual',False)) and 'manual' or 'base',
|
'state': lambda self,cr,uid,ctx={}: (ctx and ctx.get('manual',False)) and 'manual' or 'base',
|
||||||
'on_delete': lambda *a: 'set null',
|
'on_delete': lambda *a: 'set null',
|
||||||
'select_level': lambda *a: '0',
|
'select_level': lambda *a: '0',
|
||||||
'size': lambda *a: 64,
|
'size': lambda *a: 64,
|
||||||
'field_description': lambda *a: '',
|
'field_description': lambda *a: '',
|
||||||
}
|
}
|
||||||
_order = "id"
|
_order = "id"
|
||||||
def unlink(self, cr, user, ids, context=None):
|
def unlink(self, cr, user, ids, context=None):
|
||||||
for field in self.browse(cr, user, ids, context):
|
for field in self.browse(cr, user, ids, context):
|
||||||
if field.state <> 'manual':
|
if field.state <> 'manual':
|
||||||
raise except_orm(_('Error'), _("You can not remove the field '%s' !") %(field.name,))
|
raise except_orm(_('Error'), _("You can not remove the field '%s' !") %(field.name,))
|
||||||
#
|
#
|
||||||
# MAY BE ADD A ALTER TABLE DROP ?
|
# MAY BE ADD A ALTER TABLE DROP ?
|
||||||
#
|
#
|
||||||
return super(ir_model_fields, self).unlink(cr, user, ids, context)
|
return super(ir_model_fields, self).unlink(cr, user, ids, context)
|
||||||
|
|
||||||
def create(self, cr, user, vals, context=None):
|
def create(self, cr, user, vals, context=None):
|
||||||
if 'model_id' in vals:
|
if 'model_id' in vals:
|
||||||
model_data=self.pool.get('ir.model').read(cr,user,vals['model_id'])
|
model_data=self.pool.get('ir.model').read(cr,user,vals['model_id'])
|
||||||
vals['model']=model_data['model']
|
vals['model']=model_data['model']
|
||||||
if context and context.get('manual',False):
|
if context and context.get('manual',False):
|
||||||
vals['state']='manual'
|
vals['state']='manual'
|
||||||
res = super(ir_model_fields,self).create(cr, user, vals, context)
|
res = super(ir_model_fields,self).create(cr, user, vals, context)
|
||||||
if vals.get('state','base')=='manual':
|
if vals.get('state','base')=='manual':
|
||||||
if not vals['name'].startswith('x_'):
|
if not vals['name'].startswith('x_'):
|
||||||
raise except_orm(_('Error'), _("Custom fields must have a name that starts with 'x_' !"))
|
raise except_orm(_('Error'), _("Custom fields must have a name that starts with 'x_' !"))
|
||||||
if self.pool.get(vals['model']):
|
if self.pool.get(vals['model']):
|
||||||
self.pool.get(vals['model']).__init__(self.pool, cr)
|
self.pool.get(vals['model']).__init__(self.pool, cr)
|
||||||
self.pool.get(vals['model'])._auto_init(cr,{})
|
self.pool.get(vals['model'])._auto_init(cr,{})
|
||||||
return res
|
return res
|
||||||
ir_model_fields()
|
ir_model_fields()
|
||||||
|
|
||||||
class ir_model_access(osv.osv):
|
class ir_model_access(osv.osv):
|
||||||
_name = 'ir.model.access'
|
_name = 'ir.model.access'
|
||||||
_columns = {
|
_columns = {
|
||||||
'name': fields.char('Name', size=64, required=True),
|
'name': fields.char('Name', size=64, required=True),
|
||||||
'model_id': fields.many2one('ir.model', 'Model', required=True),
|
'model_id': fields.many2one('ir.model', 'Model', required=True),
|
||||||
'group_id': fields.many2one('res.groups', 'Group'),
|
'group_id': fields.many2one('res.groups', 'Group'),
|
||||||
'perm_read': fields.boolean('Read Access'),
|
'perm_read': fields.boolean('Read Access'),
|
||||||
'perm_write': fields.boolean('Write Access'),
|
'perm_write': fields.boolean('Write Access'),
|
||||||
'perm_create': fields.boolean('Create Access'),
|
'perm_create': fields.boolean('Create Access'),
|
||||||
'perm_unlink': fields.boolean('Delete Permission'),
|
'perm_unlink': fields.boolean('Delete Permission'),
|
||||||
}
|
}
|
||||||
|
|
||||||
def check_groups(self, cr, uid, group):
|
def check_groups(self, cr, uid, group):
|
||||||
res = False
|
res = False
|
||||||
grouparr = group.split('.')
|
grouparr = group.split('.')
|
||||||
if grouparr:
|
if grouparr:
|
||||||
cr.execute("select * from res_groups_users_rel where uid=" + str(uid) + " and gid in(select res_id from ir_model_data where module='%s' and name='%s')" % (grouparr[0], grouparr[1]))
|
cr.execute("select * from res_groups_users_rel where uid=" + str(uid) + " and gid in(select res_id from ir_model_data where module='%s' and name='%s')" % (grouparr[0], grouparr[1]))
|
||||||
r = cr.fetchall()
|
r = cr.fetchall()
|
||||||
if not r:
|
if not r:
|
||||||
res = False
|
res = False
|
||||||
else:
|
else:
|
||||||
res = True
|
res = True
|
||||||
else:
|
else:
|
||||||
res = False
|
res = False
|
||||||
return res
|
return res
|
||||||
|
|
||||||
def check(self, cr, uid, model_name, mode='read',raise_exception=True):
|
def check(self, cr, uid, model_name, mode='read',raise_exception=True):
|
||||||
assert mode in ['read','write','create','unlink'], 'Invalid access mode for security'
|
assert mode in ['read','write','create','unlink'], 'Invalid access mode for security'
|
||||||
if uid == 1:
|
if uid == 1:
|
||||||
return True # TODO: check security: don't allow xml-rpc request with uid == 1
|
return True # TODO: check security: don't allow xml-rpc request with uid == 1
|
||||||
|
|
||||||
cr.execute('SELECT MAX(CASE WHEN perm_'+mode+' THEN 1 else 0 END) '
|
cr.execute('SELECT MAX(CASE WHEN perm_'+mode+' THEN 1 else 0 END) '
|
||||||
'FROM ir_model_access a '
|
'FROM ir_model_access a '
|
||||||
'JOIN ir_model m '
|
'JOIN ir_model m '
|
||||||
'ON (a.model_id=m.id) '
|
'ON (a.model_id=m.id) '
|
||||||
'JOIN res_groups_users_rel gu '
|
'JOIN res_groups_users_rel gu '
|
||||||
'ON (gu.gid = a.group_id) '
|
'ON (gu.gid = a.group_id) '
|
||||||
'WHERE m.model = %s AND gu.uid = %s', (model_name, uid,))
|
'WHERE m.model = %s AND gu.uid = %s', (model_name, uid,))
|
||||||
r = cr.fetchall()
|
r = cr.fetchall()
|
||||||
if r[0][0] == None:
|
if r[0][0] == None:
|
||||||
cr.execute('SELECT MAX(CASE WHEN perm_'+mode+' THEN 1 else 0 END) '
|
cr.execute('SELECT MAX(CASE WHEN perm_'+mode+' THEN 1 else 0 END) '
|
||||||
'FROM ir_model_access a '
|
'FROM ir_model_access a '
|
||||||
'JOIN ir_model m '
|
'JOIN ir_model m '
|
||||||
'ON (a.model_id = m.id) '
|
'ON (a.model_id = m.id) '
|
||||||
'WHERE a.group_id IS NULL AND m.model = %s', (model_name,))
|
'WHERE a.group_id IS NULL AND m.model = %s', (model_name,))
|
||||||
r= cr.fetchall()
|
r= cr.fetchall()
|
||||||
if r[0][0] == None:
|
if r[0][0] == None:
|
||||||
return False # by default, the user had no access
|
return False # by default, the user had no access
|
||||||
|
|
||||||
if not r[0][0]:
|
if not r[0][0]:
|
||||||
if raise_exception:
|
if raise_exception:
|
||||||
msgs = {
|
msgs = {
|
||||||
'read': _('You can not read this document! (%s)'),
|
'read': _('You can not read this document! (%s)'),
|
||||||
'write': _('You can not write in this document! (%s)'),
|
'write': _('You can not write in this document! (%s)'),
|
||||||
'create': _('You can not create this kind of document! (%s)'),
|
'create': _('You can not create this kind of document! (%s)'),
|
||||||
'unlink': _('You can not delete this document! (%s)'),
|
'unlink': _('You can not delete this document! (%s)'),
|
||||||
}
|
}
|
||||||
# due to the assert at the begin of the function, we will never have a KeyError
|
# due to the assert at the begin of the function, we will never have a KeyError
|
||||||
raise except_orm(_('AccessError'), msgs[mode] % model_name )
|
raise except_orm(_('AccessError'), msgs[mode] % model_name )
|
||||||
return r[0][0]
|
return r[0][0]
|
||||||
|
|
||||||
check = tools.cache()(check)
|
check = tools.cache()(check)
|
||||||
|
|
||||||
#
|
#
|
||||||
# Methods to clean the cache on the Check Method.
|
# Methods to clean the cache on the Check Method.
|
||||||
#
|
#
|
||||||
def write(self, cr, uid, *args, **argv):
|
def write(self, cr, uid, *args, **argv):
|
||||||
res = super(ir_model_access, self).write(cr, uid, *args, **argv)
|
res = super(ir_model_access, self).write(cr, uid, *args, **argv)
|
||||||
self.check()
|
self.check()
|
||||||
return res
|
return res
|
||||||
def create(self, cr, uid, *args, **argv):
|
def create(self, cr, uid, *args, **argv):
|
||||||
res = super(ir_model_access, self).create(cr, uid, *args, **argv)
|
res = super(ir_model_access, self).create(cr, uid, *args, **argv)
|
||||||
self.check()
|
self.check()
|
||||||
return res
|
return res
|
||||||
def unlink(self, cr, uid, *args, **argv):
|
def unlink(self, cr, uid, *args, **argv):
|
||||||
res = super(ir_model_access, self).unlink(cr, uid, *args, **argv)
|
res = super(ir_model_access, self).unlink(cr, uid, *args, **argv)
|
||||||
self.check()
|
self.check()
|
||||||
return res
|
return res
|
||||||
ir_model_access()
|
ir_model_access()
|
||||||
|
|
||||||
class ir_model_data(osv.osv):
|
class ir_model_data(osv.osv):
|
||||||
_name = 'ir.model.data'
|
_name = 'ir.model.data'
|
||||||
_columns = {
|
_columns = {
|
||||||
'name': fields.char('XML Identifier', required=True, size=64),
|
'name': fields.char('XML Identifier', required=True, size=64),
|
||||||
'model': fields.char('Model', required=True, size=64),
|
'model': fields.char('Model', required=True, size=64),
|
||||||
'module': fields.char('Module', required=True, size=64),
|
'module': fields.char('Module', required=True, size=64),
|
||||||
'res_id': fields.integer('Resource ID'),
|
'res_id': fields.integer('Resource ID'),
|
||||||
'noupdate': fields.boolean('Non Updatable'),
|
'noupdate': fields.boolean('Non Updatable'),
|
||||||
'date_update': fields.datetime('Update Date'),
|
'date_update': fields.datetime('Update Date'),
|
||||||
'date_init': fields.datetime('Init Date')
|
'date_init': fields.datetime('Init Date')
|
||||||
}
|
}
|
||||||
_defaults = {
|
_defaults = {
|
||||||
'date_init': lambda *a: time.strftime('%Y-%m-%d %H:%M:%S'),
|
'date_init': lambda *a: time.strftime('%Y-%m-%d %H:%M:%S'),
|
||||||
'date_update': lambda *a: time.strftime('%Y-%m-%d %H:%M:%S'),
|
'date_update': lambda *a: time.strftime('%Y-%m-%d %H:%M:%S'),
|
||||||
'noupdate': lambda *a: False
|
'noupdate': lambda *a: False
|
||||||
}
|
}
|
||||||
|
|
||||||
def __init__(self, pool, cr):
|
def __init__(self, pool, cr):
|
||||||
osv.osv.__init__(self, pool, cr)
|
osv.osv.__init__(self, pool, cr)
|
||||||
self.loads = {}
|
self.loads = {}
|
||||||
self.doinit = True
|
self.doinit = True
|
||||||
self.unlink_mark = {}
|
self.unlink_mark = {}
|
||||||
|
|
||||||
def _get_id(self,cr, uid, module, xml_id):
|
def _get_id(self,cr, uid, module, xml_id):
|
||||||
ids = self.search(cr, uid, [('module','=',module),('name','=', xml_id)])
|
ids = self.search(cr, uid, [('module','=',module),('name','=', xml_id)])
|
||||||
assert len(ids)==1, '%d reference(s) to %s. You should have only one !' % (len(ids),xml_id)
|
assert len(ids)==1, '%d reference(s) to %s. You should have only one !' % (len(ids),xml_id)
|
||||||
return ids[0]
|
return ids[0]
|
||||||
_get_id = tools.cache()(_get_id)
|
_get_id = tools.cache()(_get_id)
|
||||||
|
|
||||||
def _update_dummy(self,cr, uid, model, module, xml_id=False, store=True):
|
def _update_dummy(self,cr, uid, model, module, xml_id=False, store=True):
|
||||||
if not xml_id:
|
if not xml_id:
|
||||||
return False
|
return False
|
||||||
try:
|
try:
|
||||||
id = self.read(cr, uid, [self._get_id(cr, uid, module, xml_id)], ['res_id'])[0]['res_id']
|
id = self.read(cr, uid, [self._get_id(cr, uid, module, xml_id)], ['res_id'])[0]['res_id']
|
||||||
self.loads[(module,xml_id)] = (model,id)
|
self.loads[(module,xml_id)] = (model,id)
|
||||||
except:
|
except:
|
||||||
id = False
|
id = False
|
||||||
return id
|
return id
|
||||||
|
|
||||||
def _update(self,cr, uid, model, module, values, xml_id=False, store=True, noupdate=False, mode='init', res_id=False):
|
def _update(self,cr, uid, model, module, values, xml_id=False, store=True, noupdate=False, mode='init', res_id=False):
|
||||||
warning = True
|
warning = True
|
||||||
model_obj = self.pool.get(model)
|
model_obj = self.pool.get(model)
|
||||||
context = {}
|
context = {}
|
||||||
if xml_id and ('.' in xml_id):
|
if xml_id and ('.' in xml_id):
|
||||||
assert len(xml_id.split('.'))==2, _('"%s" contains too many dots. XML ids should not contain dots ! These are used to refer to other modules data, as in module.reference_id') % (xml_id)
|
assert len(xml_id.split('.'))==2, _('"%s" contains too many dots. XML ids should not contain dots ! These are used to refer to other modules data, as in module.reference_id') % (xml_id)
|
||||||
warning = False
|
warning = False
|
||||||
module, xml_id = xml_id.split('.')
|
module, xml_id = xml_id.split('.')
|
||||||
if (not xml_id) and (not self.doinit):
|
if (not xml_id) and (not self.doinit):
|
||||||
return False
|
return False
|
||||||
action_id = False
|
action_id = False
|
||||||
if xml_id:
|
if xml_id:
|
||||||
cr.execute('select id,res_id from ir_model_data where module=%s and name=%s', (module,xml_id))
|
cr.execute('select id,res_id from ir_model_data where module=%s and name=%s', (module,xml_id))
|
||||||
results = cr.fetchall()
|
results = cr.fetchall()
|
||||||
for action_id2,res_id2 in results:
|
for action_id2,res_id2 in results:
|
||||||
cr.execute('select id from '+self.pool.get(model)._table+' where id=%d', (res_id2,))
|
cr.execute('select id from '+self.pool.get(model)._table+' where id=%d', (res_id2,))
|
||||||
result3 = cr.fetchone()
|
result3 = cr.fetchone()
|
||||||
if not result3:
|
if not result3:
|
||||||
cr.execute('delete from ir_model_data where id=%d', (action_id2,))
|
cr.execute('delete from ir_model_data where id=%d', (action_id2,))
|
||||||
else:
|
else:
|
||||||
res_id,action_id = res_id2,action_id2
|
res_id,action_id = res_id2,action_id2
|
||||||
|
|
||||||
if action_id and res_id:
|
if action_id and res_id:
|
||||||
model_obj.write(cr, uid, [res_id], values)
|
model_obj.write(cr, uid, [res_id], values)
|
||||||
self.write(cr, uid, [action_id], {
|
self.write(cr, uid, [action_id], {
|
||||||
'date_update': time.strftime('%Y-%m-%d %H:%M:%S'),
|
'date_update': time.strftime('%Y-%m-%d %H:%M:%S'),
|
||||||
})
|
})
|
||||||
elif res_id:
|
elif res_id:
|
||||||
model_obj.write(cr, uid, [res_id], values)
|
model_obj.write(cr, uid, [res_id], values)
|
||||||
if xml_id:
|
if xml_id:
|
||||||
self.create(cr, uid, {
|
self.create(cr, uid, {
|
||||||
'name': xml_id,
|
'name': xml_id,
|
||||||
'model': model,
|
'model': model,
|
||||||
'module':module,
|
'module':module,
|
||||||
'res_id':res_id,
|
'res_id':res_id,
|
||||||
'noupdate': noupdate,
|
'noupdate': noupdate,
|
||||||
})
|
})
|
||||||
if model_obj._inherits:
|
if model_obj._inherits:
|
||||||
for table in model_obj._inherits:
|
for table in model_obj._inherits:
|
||||||
inherit_id = model_obj.browse(cr, uid,
|
inherit_id = model_obj.browse(cr, uid,
|
||||||
res_id)[model_obj._inherits[table]]
|
res_id)[model_obj._inherits[table]]
|
||||||
self.create(cr, uid, {
|
self.create(cr, uid, {
|
||||||
'name': xml_id + '_' + table.replace('.', '_'),
|
'name': xml_id + '_' + table.replace('.', '_'),
|
||||||
'model': table,
|
'model': table,
|
||||||
'module': module,
|
'module': module,
|
||||||
'res_id': inherit_id,
|
'res_id': inherit_id,
|
||||||
'noupdate': noupdate,
|
'noupdate': noupdate,
|
||||||
})
|
})
|
||||||
else:
|
else:
|
||||||
if mode=='init' or (mode=='update' and xml_id):
|
if mode=='init' or (mode=='update' and xml_id):
|
||||||
res_id = model_obj.create(cr, uid, values)
|
res_id = model_obj.create(cr, uid, values)
|
||||||
if xml_id:
|
if xml_id:
|
||||||
self.create(cr, uid, {
|
self.create(cr, uid, {
|
||||||
'name': xml_id,
|
'name': xml_id,
|
||||||
'model': model,
|
'model': model,
|
||||||
'module': module,
|
'module': module,
|
||||||
'res_id': res_id,
|
'res_id': res_id,
|
||||||
'noupdate': noupdate
|
'noupdate': noupdate
|
||||||
})
|
})
|
||||||
if model_obj._inherits:
|
if model_obj._inherits:
|
||||||
for table in model_obj._inherits:
|
for table in model_obj._inherits:
|
||||||
inherit_id = model_obj.browse(cr, uid,
|
inherit_id = model_obj.browse(cr, uid,
|
||||||
res_id)[model_obj._inherits[table]]
|
res_id)[model_obj._inherits[table]]
|
||||||
self.create(cr, uid, {
|
self.create(cr, uid, {
|
||||||
'name': xml_id + '_' + table.replace('.', '_'),
|
'name': xml_id + '_' + table.replace('.', '_'),
|
||||||
'model': table,
|
'model': table,
|
||||||
'module': module,
|
'module': module,
|
||||||
'res_id': inherit_id,
|
'res_id': inherit_id,
|
||||||
'noupdate': noupdate,
|
'noupdate': noupdate,
|
||||||
})
|
})
|
||||||
if xml_id:
|
if xml_id:
|
||||||
if res_id:
|
if res_id:
|
||||||
self.loads[(module, xml_id)] = (model, res_id)
|
self.loads[(module, xml_id)] = (model, res_id)
|
||||||
if model_obj._inherits:
|
if model_obj._inherits:
|
||||||
for table in model_obj._inherits:
|
for table in model_obj._inherits:
|
||||||
inherit_field = model_obj._inherits[table]
|
inherit_field = model_obj._inherits[table]
|
||||||
inherit_id = model_obj.read(cr, uid, res_id,
|
inherit_id = model_obj.read(cr, uid, res_id,
|
||||||
[inherit_field])[inherit_field]
|
[inherit_field])[inherit_field]
|
||||||
self.loads[(module, xml_id + '_' + \
|
self.loads[(module, xml_id + '_' + \
|
||||||
table.replace('.', '_'))] = (table, inherit_id)
|
table.replace('.', '_'))] = (table, inherit_id)
|
||||||
return res_id
|
return res_id
|
||||||
|
|
||||||
def _unlink(self, cr, uid, model, ids, direct=False):
|
def _unlink(self, cr, uid, model, ids, direct=False):
|
||||||
#self.pool.get(model).unlink(cr, uid, ids)
|
#self.pool.get(model).unlink(cr, uid, ids)
|
||||||
for id in ids:
|
for id in ids:
|
||||||
self.unlink_mark[(model, id)]=False
|
self.unlink_mark[(model, id)]=False
|
||||||
cr.execute('delete from ir_model_data where res_id=%d and model=\'%s\'', (id,model))
|
cr.execute('delete from ir_model_data where res_id=%d and model=\'%s\'', (id,model))
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def ir_set(self, cr, uid, key, key2, name, models, value, replace=True, isobject=False, meta=None, xml_id=False):
|
def ir_set(self, cr, uid, key, key2, name, models, value, replace=True, isobject=False, meta=None, xml_id=False):
|
||||||
obj = self.pool.get('ir.values')
|
obj = self.pool.get('ir.values')
|
||||||
if type(models[0])==type([]) or type(models[0])==type(()):
|
if type(models[0])==type([]) or type(models[0])==type(()):
|
||||||
model,res_id = models[0]
|
model,res_id = models[0]
|
||||||
else:
|
else:
|
||||||
res_id=None
|
res_id=None
|
||||||
model = models[0]
|
model = models[0]
|
||||||
|
|
||||||
if res_id:
|
if res_id:
|
||||||
where = ' and res_id=%d' % (res_id,)
|
where = ' and res_id=%d' % (res_id,)
|
||||||
else:
|
else:
|
||||||
where = ' and (res_id is null)'
|
where = ' and (res_id is null)'
|
||||||
|
|
||||||
if key2:
|
if key2:
|
||||||
where += ' and key2=\'%s\'' % (key2,)
|
where += ' and key2=\'%s\'' % (key2,)
|
||||||
else:
|
else:
|
||||||
where += ' and (key2 is null)'
|
where += ' and (key2 is null)'
|
||||||
|
|
||||||
cr.execute('select * from ir_values where model=%s and key=%s and name=%s'+where,(model, key, name))
|
cr.execute('select * from ir_values where model=%s and key=%s and name=%s'+where,(model, key, name))
|
||||||
res = cr.fetchone()
|
res = cr.fetchone()
|
||||||
if not res:
|
if not res:
|
||||||
res = ir.ir_set(cr, uid, key, key2, name, models, value, replace, isobject, meta)
|
res = ir.ir_set(cr, uid, key, key2, name, models, value, replace, isobject, meta)
|
||||||
elif xml_id:
|
elif xml_id:
|
||||||
cr.execute('UPDATE ir_values set value=%s WHERE model=%s and key=%s and name=%s'+where,(value, model, key, name))
|
cr.execute('UPDATE ir_values set value=%s WHERE model=%s and key=%s and name=%s'+where,(value, model, key, name))
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def _process_end(self, cr, uid, modules):
|
def _process_end(self, cr, uid, modules):
|
||||||
if not modules:
|
if not modules:
|
||||||
return True
|
return True
|
||||||
module_str = ["'%s'" % m for m in modules]
|
module_str = ["'%s'" % m for m in modules]
|
||||||
cr.execute('select id,name,model,res_id,module from ir_model_data where module in ('+','.join(module_str)+') and not noupdate')
|
cr.execute('select id,name,model,res_id,module from ir_model_data where module in ('+','.join(module_str)+') and not noupdate')
|
||||||
wkf_todo = []
|
wkf_todo = []
|
||||||
for (id, name, model, res_id,module) in cr.fetchall():
|
for (id, name, model, res_id,module) in cr.fetchall():
|
||||||
if (module,name) not in self.loads:
|
if (module,name) not in self.loads:
|
||||||
self.unlink_mark[(model,res_id)] = id
|
self.unlink_mark[(model,res_id)] = id
|
||||||
if model=='workflow.activity':
|
if model=='workflow.activity':
|
||||||
cr.execute('select res_type,res_id from wkf_instance where id in (select inst_id from wkf_workitem where act_id=%d)', (res_id,))
|
cr.execute('select res_type,res_id from wkf_instance where id in (select inst_id from wkf_workitem where act_id=%d)', (res_id,))
|
||||||
wkf_todo.extend(cr.fetchall())
|
wkf_todo.extend(cr.fetchall())
|
||||||
cr.execute("update wkf_transition set condition='True', role_id=NULL, signal=NULL,act_to=act_from,act_from=%d where act_to=%d", (res_id,res_id))
|
cr.execute("update wkf_transition set condition='True', role_id=NULL, signal=NULL,act_to=act_from,act_from=%d where act_to=%d", (res_id,res_id))
|
||||||
cr.execute("delete from wkf_transition where act_to=%d", (res_id,))
|
cr.execute("delete from wkf_transition where act_to=%d", (res_id,))
|
||||||
|
|
||||||
for model,id in wkf_todo:
|
for model,id in wkf_todo:
|
||||||
wf_service = netsvc.LocalService("workflow")
|
wf_service = netsvc.LocalService("workflow")
|
||||||
wf_service.trg_write(uid, model, id, cr)
|
wf_service.trg_write(uid, model, id, cr)
|
||||||
|
|
||||||
cr.commit()
|
cr.commit()
|
||||||
for (model,id) in self.unlink_mark.keys():
|
for (model,id) in self.unlink_mark.keys():
|
||||||
if self.pool.get(model):
|
if self.pool.get(model):
|
||||||
logger = netsvc.Logger()
|
logger = netsvc.Logger()
|
||||||
logger.notifyChannel('init', netsvc.LOG_INFO, 'Deleting %s@%s' % (id, model))
|
logger.notifyChannel('init', netsvc.LOG_INFO, 'Deleting %s@%s' % (id, model))
|
||||||
try:
|
try:
|
||||||
self.pool.get(model).unlink(cr, uid, [id])
|
self.pool.get(model).unlink(cr, uid, [id])
|
||||||
if self.unlink_mark[(model,id)]:
|
if self.unlink_mark[(model,id)]:
|
||||||
self.unlink(cr, uid, [self.unlink_mark[(model,id)]])
|
self.unlink(cr, uid, [self.unlink_mark[(model,id)]])
|
||||||
cr.execute('DELETE FROM ir_values WHERE value=%s', (model+','+str(id),))
|
cr.execute('DELETE FROM ir_values WHERE value=%s', (model+','+str(id),))
|
||||||
cr.commit()
|
cr.commit()
|
||||||
except:
|
except:
|
||||||
logger.notifyChannel('init', netsvc.LOG_ERROR, 'Could not delete id: %d of model %s\tThere should be some relation that points to this resource\tYou should manually fix this and restart --update=module' % (id, model))
|
logger.notifyChannel('init', netsvc.LOG_ERROR, 'Could not delete id: %d of model %s\tThere should be some relation that points to this resource\tYou should manually fix this and restart --update=module' % (id, model))
|
||||||
return True
|
return True
|
||||||
ir_model_data()
|
ir_model_data()
|
||||||
|
|
||||||
|
|
|
@ -34,176 +34,176 @@ import report.custom
|
||||||
from tools.translate import _
|
from tools.translate import _
|
||||||
|
|
||||||
class report_custom(osv.osv):
|
class report_custom(osv.osv):
|
||||||
_name = 'ir.report.custom'
|
_name = 'ir.report.custom'
|
||||||
_columns = {
|
_columns = {
|
||||||
'name': fields.char('Report Name', size=64, required=True, translate=True),
|
'name': fields.char('Report Name', size=64, required=True, translate=True),
|
||||||
'model_id': fields.many2one('ir.model','Model', required=True, change_default=True),
|
'model_id': fields.many2one('ir.model','Model', required=True, change_default=True),
|
||||||
'type': fields.selection([('table','Tabular'),('pie','Pie Chart'),('bar','Bar Chart'),('line','Line Plot')], "Report Type", size=64, required='True'),
|
'type': fields.selection([('table','Tabular'),('pie','Pie Chart'),('bar','Bar Chart'),('line','Line Plot')], "Report Type", size=64, required='True'),
|
||||||
'title': fields.char("Report title", size=64, required='True', translate=True),
|
'title': fields.char("Report title", size=64, required='True', translate=True),
|
||||||
'print_format': fields.selection((('A4','a4'),('A5','a5')), 'Print format', required=True),
|
'print_format': fields.selection((('A4','a4'),('A5','a5')), 'Print format', required=True),
|
||||||
'print_orientation': fields.selection((('landscape','Landscape'),('portrait','Portrait')), 'Print orientation', required=True, size=16),
|
'print_orientation': fields.selection((('landscape','Landscape'),('portrait','Portrait')), 'Print orientation', required=True, size=16),
|
||||||
'repeat_header': fields.boolean('Repeat Header'),
|
'repeat_header': fields.boolean('Repeat Header'),
|
||||||
'footer': fields.char('Report Footer', size=64, required=True),
|
'footer': fields.char('Report Footer', size=64, required=True),
|
||||||
'sortby': fields.char('Sorted By', size=64),
|
'sortby': fields.char('Sorted By', size=64),
|
||||||
'fields_child0': fields.one2many('ir.report.custom.fields', 'report_id','Fields', required=True),
|
'fields_child0': fields.one2many('ir.report.custom.fields', 'report_id','Fields', required=True),
|
||||||
'field_parent': fields.many2one('ir.model.fields','Child Field'),
|
'field_parent': fields.many2one('ir.model.fields','Child Field'),
|
||||||
'state': fields.selection([('unsubscribed','Unsubscribed'),('subscribed','Subscribed')], 'State', size=64),
|
'state': fields.selection([('unsubscribed','Unsubscribed'),('subscribed','Subscribed')], 'State', size=64),
|
||||||
'frequency': fields.selection([('Y','Yearly'),('M','Monthly'),('D','Daily')], 'Frequency', size=64),
|
'frequency': fields.selection([('Y','Yearly'),('M','Monthly'),('D','Daily')], 'Frequency', size=64),
|
||||||
'limitt': fields.char('Limit', size=9),
|
'limitt': fields.char('Limit', size=9),
|
||||||
'menu_id': fields.many2one('ir.ui.menu', 'Menu')
|
'menu_id': fields.many2one('ir.ui.menu', 'Menu')
|
||||||
}
|
}
|
||||||
_defaults = {
|
_defaults = {
|
||||||
'print_format': lambda *a: 'A4',
|
'print_format': lambda *a: 'A4',
|
||||||
'print_orientation': lambda *a: 'portrait',
|
'print_orientation': lambda *a: 'portrait',
|
||||||
'state': lambda *a: 'unsubscribed',
|
'state': lambda *a: 'unsubscribed',
|
||||||
'type': lambda *a: 'table',
|
'type': lambda *a: 'table',
|
||||||
'footer': lambda *a: 'Generated by Tiny ERP'
|
'footer': lambda *a: 'Generated by Tiny ERP'
|
||||||
}
|
}
|
||||||
|
|
||||||
def onchange_model_id(self, cr, uid, ids, model_id):
|
def onchange_model_id(self, cr, uid, ids, model_id):
|
||||||
if not(model_id):
|
if not(model_id):
|
||||||
return {}
|
return {}
|
||||||
return {'domain': {'field_parent': [('model_id','=',model_id)]}}
|
return {'domain': {'field_parent': [('model_id','=',model_id)]}}
|
||||||
|
|
||||||
def unsubscribe(self, cr, uid, ids, context={}):
|
def unsubscribe(self, cr, uid, ids, context={}):
|
||||||
#TODO: should delete the ir.actions.report.custom for these reports and do an ir_del
|
#TODO: should delete the ir.actions.report.custom for these reports and do an ir_del
|
||||||
self.write(cr, uid, ids, {'state':'unsubscribed'})
|
self.write(cr, uid, ids, {'state':'unsubscribed'})
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def subscribe(self, cr, uid, ids, context={}):
|
def subscribe(self, cr, uid, ids, context={}):
|
||||||
for report in self.browse(cr, uid, ids):
|
for report in self.browse(cr, uid, ids):
|
||||||
report.fields_child0.sort(lambda x,y : x.sequence - y.sequence)
|
report.fields_child0.sort(lambda x,y : x.sequence - y.sequence)
|
||||||
|
|
||||||
# required on field0 does not seem to work( cause we use o2m_l ?)
|
# required on field0 does not seem to work( cause we use o2m_l ?)
|
||||||
if not report.fields_child0:
|
if not report.fields_child0:
|
||||||
raise osv.except_osv(_('Invalid operation'), _('Enter at least one field !'))
|
raise osv.except_osv(_('Invalid operation'), _('Enter at least one field !'))
|
||||||
|
|
||||||
if report.type in ['pie', 'bar', 'line'] and report.field_parent:
|
if report.type in ['pie', 'bar', 'line'] and report.field_parent:
|
||||||
raise osv.except_osv(_('Invalid operation'), _('Tree can only be used in tabular reports'))
|
raise osv.except_osv(_('Invalid operation'), _('Tree can only be used in tabular reports'))
|
||||||
|
|
||||||
# Otherwise it won't build a good tree. See level.pop in custom.py.
|
# Otherwise it won't build a good tree. See level.pop in custom.py.
|
||||||
if report.type == 'table' and report.field_parent and report.fields_child0 and not report.fields_child0[0].groupby:
|
if report.type == 'table' and report.field_parent and report.fields_child0 and not report.fields_child0[0].groupby:
|
||||||
raise osv.except_osv('Invalid operation :', 'When creating tree (field child) report, data must be group by the first field')
|
raise osv.except_osv('Invalid operation :', 'When creating tree (field child) report, data must be group by the first field')
|
||||||
|
|
||||||
if report.type == 'pie':
|
if report.type == 'pie':
|
||||||
if len(report.fields_child0) != 2:
|
if len(report.fields_child0) != 2:
|
||||||
raise osv.except_osv(_('Invalid operation'), _('Pie charts need exactly two fields'))
|
raise osv.except_osv(_('Invalid operation'), _('Pie charts need exactly two fields'))
|
||||||
else:
|
else:
|
||||||
c_f = {}
|
c_f = {}
|
||||||
for i in range(2):
|
for i in range(2):
|
||||||
c_f[i] = []
|
c_f[i] = []
|
||||||
tmp = report.fields_child0[i]
|
tmp = report.fields_child0[i]
|
||||||
for j in range(3):
|
for j in range(3):
|
||||||
c_f[i].append((not isinstance(eval('tmp.field_child'+str(j)), browse_null) and eval('tmp.field_child'+str(j)+'.ttype')) or None)
|
c_f[i].append((not isinstance(eval('tmp.field_child'+str(j)), browse_null) and eval('tmp.field_child'+str(j)+'.ttype')) or None)
|
||||||
if not reduce(lambda x,y : x or y, map(lambda x: x in ['integer', 'float'], c_f[1])):
|
if not reduce(lambda x,y : x or y, map(lambda x: x in ['integer', 'float'], c_f[1])):
|
||||||
raise osv.except_osv(_('Invalid operation'), _('Second field should be figures'))
|
raise osv.except_osv(_('Invalid operation'), _('Second field should be figures'))
|
||||||
|
|
||||||
if report.type == 'bar':
|
if report.type == 'bar':
|
||||||
if len(report.fields_child0) < 2:
|
if len(report.fields_child0) < 2:
|
||||||
raise osv.except_osv(_('Invalid operation'), _('Bar charts need at least two fields'))
|
raise osv.except_osv(_('Invalid operation'), _('Bar charts need at least two fields'))
|
||||||
else:
|
else:
|
||||||
c_f = {}
|
c_f = {}
|
||||||
for i in range(len(report.fields_child0)):
|
for i in range(len(report.fields_child0)):
|
||||||
c_f[i] = []
|
c_f[i] = []
|
||||||
tmp = report.fields_child0[i]
|
tmp = report.fields_child0[i]
|
||||||
for j in range(3):
|
for j in range(3):
|
||||||
c_f[i].append((not isinstance(eval('tmp.field_child'+str(j)), browse_null) and eval('tmp.field_child'+str(j)+'.ttype')) or None)
|
c_f[i].append((not isinstance(eval('tmp.field_child'+str(j)), browse_null) and eval('tmp.field_child'+str(j)+'.ttype')) or None)
|
||||||
|
|
||||||
if i == 0:
|
if i == 0:
|
||||||
pass
|
pass
|
||||||
else:
|
else:
|
||||||
if not reduce(lambda x,y : x or y, map(lambda x: x in ['integer', 'float'], c_f[i])):
|
if not reduce(lambda x,y : x or y, map(lambda x: x in ['integer', 'float'], c_f[i])):
|
||||||
raise osv.except_osv(_('Invalid operation'), _('Field %d should be a figure') %(i,))
|
raise osv.except_osv(_('Invalid operation'), _('Field %d should be a figure') %(i,))
|
||||||
|
|
||||||
if report.state=='subscribed':
|
if report.state=='subscribed':
|
||||||
continue
|
continue
|
||||||
|
|
||||||
name = report.name
|
name = report.name
|
||||||
model = report.model_id.model
|
model = report.model_id.model
|
||||||
|
|
||||||
action_def = {'report_id':report.id, 'type':'ir.actions.report.custom', 'model':model, 'name':name}
|
action_def = {'report_id':report.id, 'type':'ir.actions.report.custom', 'model':model, 'name':name}
|
||||||
id = self.pool.get('ir.actions.report.custom').create(cr, uid, action_def)
|
id = self.pool.get('ir.actions.report.custom').create(cr, uid, action_def)
|
||||||
m_id = report.menu_id.id
|
m_id = report.menu_id.id
|
||||||
action = "ir.actions.report.custom,%d" % (id,)
|
action = "ir.actions.report.custom,%d" % (id,)
|
||||||
if not report.menu_id:
|
if not report.menu_id:
|
||||||
ir.ir_set(cr, uid, 'action', 'client_print_multi', name, [(model, False)], action, False, True)
|
ir.ir_set(cr, uid, 'action', 'client_print_multi', name, [(model, False)], action, False, True)
|
||||||
else:
|
else:
|
||||||
ir.ir_set(cr, uid, 'action', 'tree_but_open', 'Menuitem', [('ir.ui.menu', int(m_id))], action, False, True)
|
ir.ir_set(cr, uid, 'action', 'tree_but_open', 'Menuitem', [('ir.ui.menu', int(m_id))], action, False, True)
|
||||||
|
|
||||||
self.write(cr, uid, [report.id], {'state':'subscribed'}, context)
|
self.write(cr, uid, [report.id], {'state':'subscribed'}, context)
|
||||||
return True
|
return True
|
||||||
report_custom()
|
report_custom()
|
||||||
|
|
||||||
|
|
||||||
class report_custom_fields(osv.osv):
|
class report_custom_fields(osv.osv):
|
||||||
_name = 'ir.report.custom.fields'
|
_name = 'ir.report.custom.fields'
|
||||||
_columns = {
|
_columns = {
|
||||||
'name': fields.char('Name', size=64, required=True),
|
'name': fields.char('Name', size=64, required=True),
|
||||||
'report_id': fields.many2one('ir.report.custom', 'Report Ref', select=True),
|
'report_id': fields.many2one('ir.report.custom', 'Report Ref', select=True),
|
||||||
'field_child0': fields.many2one('ir.model.fields', 'field child0', required=True),
|
'field_child0': fields.many2one('ir.model.fields', 'field child0', required=True),
|
||||||
'fc0_operande': fields.many2one('ir.model.fields', 'Constraint'),
|
'fc0_operande': fields.many2one('ir.model.fields', 'Constraint'),
|
||||||
'fc0_condition': fields.char('Condition', size=64),
|
'fc0_condition': fields.char('Condition', size=64),
|
||||||
'fc0_op': fields.selection((('>','>'),('<','<'),('==','='),('in','in'),('gety,==','(year)=')), 'Relation'),
|
'fc0_op': fields.selection((('>','>'),('<','<'),('==','='),('in','in'),('gety,==','(year)=')), 'Relation'),
|
||||||
'field_child1': fields.many2one('ir.model.fields', 'field child1'),
|
'field_child1': fields.many2one('ir.model.fields', 'field child1'),
|
||||||
'fc1_operande': fields.many2one('ir.model.fields', 'Constraint'),
|
'fc1_operande': fields.many2one('ir.model.fields', 'Constraint'),
|
||||||
'fc1_condition': fields.char('condition', size=64),
|
'fc1_condition': fields.char('condition', size=64),
|
||||||
'fc1_op': fields.selection((('>','>'),('<','<'),('==','='),('in','in'),('gety,==','(year)=')), 'Relation'),
|
'fc1_op': fields.selection((('>','>'),('<','<'),('==','='),('in','in'),('gety,==','(year)=')), 'Relation'),
|
||||||
'field_child2': fields.many2one('ir.model.fields', 'field child2'),
|
'field_child2': fields.many2one('ir.model.fields', 'field child2'),
|
||||||
'fc2_operande': fields.many2one('ir.model.fields', 'Constraint'),
|
'fc2_operande': fields.many2one('ir.model.fields', 'Constraint'),
|
||||||
'fc2_condition': fields.char('condition', size=64),
|
'fc2_condition': fields.char('condition', size=64),
|
||||||
'fc2_op': fields.selection((('>','>'),('<','<'),('==','='),('in','in'),('gety,==','(year)=')), 'Relation'),
|
'fc2_op': fields.selection((('>','>'),('<','<'),('==','='),('in','in'),('gety,==','(year)=')), 'Relation'),
|
||||||
'field_child3': fields.many2one('ir.model.fields', 'field child3'),
|
'field_child3': fields.many2one('ir.model.fields', 'field child3'),
|
||||||
'fc3_operande': fields.many2one('ir.model.fields', 'Constraint'),
|
'fc3_operande': fields.many2one('ir.model.fields', 'Constraint'),
|
||||||
'fc3_condition': fields.char('condition', size=64),
|
'fc3_condition': fields.char('condition', size=64),
|
||||||
'fc3_op': fields.selection((('>','>'),('<','<'),('==','='),('in','in'),('gety,==','(year)=')), 'Relation'),
|
'fc3_op': fields.selection((('>','>'),('<','<'),('==','='),('in','in'),('gety,==','(year)=')), 'Relation'),
|
||||||
'alignment': fields.selection((('left','left'),('right','right'),('center','center')), 'Alignment', required=True),
|
'alignment': fields.selection((('left','left'),('right','right'),('center','center')), 'Alignment', required=True),
|
||||||
'sequence': fields.integer('Sequence', required=True),
|
'sequence': fields.integer('Sequence', required=True),
|
||||||
'width': fields.integer('Fixed Width'),
|
'width': fields.integer('Fixed Width'),
|
||||||
'operation': fields.selection((('none', 'None'),('calc_sum','Calculate Sum'),('calc_avg','Calculate Average'),('calc_count','Calculate Count'),('calc_max', 'Get Max'))),
|
'operation': fields.selection((('none', 'None'),('calc_sum','Calculate Sum'),('calc_avg','Calculate Average'),('calc_count','Calculate Count'),('calc_max', 'Get Max'))),
|
||||||
'groupby' : fields.boolean('Group by'),
|
'groupby' : fields.boolean('Group by'),
|
||||||
'bgcolor': fields.char('Background Color', size=64),
|
'bgcolor': fields.char('Background Color', size=64),
|
||||||
'fontcolor': fields.char('Font color', size=64),
|
'fontcolor': fields.char('Font color', size=64),
|
||||||
'cumulate': fields.boolean('Cumulate')
|
'cumulate': fields.boolean('Cumulate')
|
||||||
}
|
}
|
||||||
_defaults = {
|
_defaults = {
|
||||||
'alignment': lambda *a: 'left',
|
'alignment': lambda *a: 'left',
|
||||||
'bgcolor': lambda *a: 'white',
|
'bgcolor': lambda *a: 'white',
|
||||||
'fontcolor': lambda *a: 'black',
|
'fontcolor': lambda *a: 'black',
|
||||||
'operation': lambda *a: 'none',
|
'operation': lambda *a: 'none',
|
||||||
}
|
}
|
||||||
_order = "sequence"
|
_order = "sequence"
|
||||||
|
|
||||||
def onchange_any_field_child(self, cr, uid, ids, field_id, level):
|
def onchange_any_field_child(self, cr, uid, ids, field_id, level):
|
||||||
if not(field_id):
|
if not(field_id):
|
||||||
return {}
|
return {}
|
||||||
next_level_field_name = 'field_child%d' % (level+1)
|
next_level_field_name = 'field_child%d' % (level+1)
|
||||||
next_level_operande = 'fc%d_operande' % (level+1)
|
next_level_operande = 'fc%d_operande' % (level+1)
|
||||||
field = self.pool.get('ir.model.fields').browse(cr, uid, [field_id])[0]
|
field = self.pool.get('ir.model.fields').browse(cr, uid, [field_id])[0]
|
||||||
res = self.pool.get(field.model).fields_get(cr, uid, field.name)
|
res = self.pool.get(field.model).fields_get(cr, uid, field.name)
|
||||||
if res[field.name].has_key('relation'):
|
if res[field.name].has_key('relation'):
|
||||||
cr.execute('select id from ir_model where model=%s', (res[field.name]['relation'],))
|
cr.execute('select id from ir_model where model=%s', (res[field.name]['relation'],))
|
||||||
(id,) = cr.fetchone() or (False,)
|
(id,) = cr.fetchone() or (False,)
|
||||||
if id:
|
if id:
|
||||||
return {
|
return {
|
||||||
'domain': {
|
'domain': {
|
||||||
next_level_field_name: [('model_id', '=', id)],
|
next_level_field_name: [('model_id', '=', id)],
|
||||||
next_level_operande: [('model_id', '=', id)]
|
next_level_operande: [('model_id', '=', id)]
|
||||||
},
|
},
|
||||||
'required': {
|
'required': {
|
||||||
next_level_field_name: True
|
next_level_field_name: True
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
else:
|
else:
|
||||||
print _("Warning: using a relation field which uses an unknown object") #TODO use the logger
|
print _("Warning: using a relation field which uses an unknown object") #TODO use the logger
|
||||||
return {'required': {next_level_field_name: True}}
|
return {'required': {next_level_field_name: True}}
|
||||||
else:
|
else:
|
||||||
return {'domain': {next_level_field_name: []}}
|
return {'domain': {next_level_field_name: []}}
|
||||||
|
|
||||||
def get_field_child_onchange_method(level):
|
def get_field_child_onchange_method(level):
|
||||||
return lambda self, cr, uid, ids, field_id: self.onchange_any_field_child(cr, uid, ids, field_id, level)
|
return lambda self, cr, uid, ids, field_id: self.onchange_any_field_child(cr, uid, ids, field_id, level)
|
||||||
|
|
||||||
onchange_field_child0 = get_field_child_onchange_method(0)
|
onchange_field_child0 = get_field_child_onchange_method(0)
|
||||||
onchange_field_child1 = get_field_child_onchange_method(1)
|
onchange_field_child1 = get_field_child_onchange_method(1)
|
||||||
onchange_field_child2 = get_field_child_onchange_method(2)
|
onchange_field_child2 = get_field_child_onchange_method(2)
|
||||||
report_custom_fields()
|
report_custom_fields()
|
||||||
|
|
||||||
|
|
|
@ -33,208 +33,208 @@ import tools
|
||||||
|
|
||||||
|
|
||||||
class ir_rule_group(osv.osv):
|
class ir_rule_group(osv.osv):
|
||||||
_name = 'ir.rule.group'
|
_name = 'ir.rule.group'
|
||||||
|
|
||||||
_columns = {
|
_columns = {
|
||||||
'name': fields.char('Name', size=128, select=1),
|
'name': fields.char('Name', size=128, select=1),
|
||||||
'model_id': fields.many2one('ir.model', 'Model',select=1, required=True),
|
'model_id': fields.many2one('ir.model', 'Model',select=1, required=True),
|
||||||
'global': fields.boolean('Global', select=1, help="Make the rule global or it needs to be put on a group or user"),
|
'global': fields.boolean('Global', select=1, help="Make the rule global or it needs to be put on a group or user"),
|
||||||
'rules': fields.one2many('ir.rule', 'rule_group', 'Tests', help="The rule is satisfied if at least one test is True"),
|
'rules': fields.one2many('ir.rule', 'rule_group', 'Tests', help="The rule is satisfied if at least one test is True"),
|
||||||
'groups': fields.many2many('res.groups', 'group_rule_group_rel', 'rule_group_id', 'group_id', 'Groups'),
|
'groups': fields.many2many('res.groups', 'group_rule_group_rel', 'rule_group_id', 'group_id', 'Groups'),
|
||||||
'users': fields.many2many('res.users', 'user_rule_group_rel', 'rule_group_id', 'user_id', 'Users'),
|
'users': fields.many2many('res.users', 'user_rule_group_rel', 'rule_group_id', 'user_id', 'Users'),
|
||||||
}
|
}
|
||||||
|
|
||||||
_order = 'model_id, global DESC'
|
_order = 'model_id, global DESC'
|
||||||
|
|
||||||
_defaults={
|
_defaults={
|
||||||
'global': lambda *a: True,
|
'global': lambda *a: True,
|
||||||
}
|
}
|
||||||
|
|
||||||
def unlink(self, cr, uid, ids, context=None):
|
def unlink(self, cr, uid, ids, context=None):
|
||||||
res = super(ir_rule_group, self).unlink(cr, uid, ids, context=context)
|
res = super(ir_rule_group, self).unlink(cr, uid, ids, context=context)
|
||||||
# Restart the cache on the domain_get method of ir.rule
|
# Restart the cache on the domain_get method of ir.rule
|
||||||
self.pool.get('ir.rule').domain_get()
|
self.pool.get('ir.rule').domain_get()
|
||||||
return res
|
return res
|
||||||
|
|
||||||
def create(self, cr, user, vals, context=None):
|
def create(self, cr, user, vals, context=None):
|
||||||
res = super(ir_rule_group, self).create(cr, user, vals, context=context)
|
res = super(ir_rule_group, self).create(cr, user, vals, context=context)
|
||||||
# Restart the cache on the domain_get method of ir.rule
|
# Restart the cache on the domain_get method of ir.rule
|
||||||
self.pool.get('ir.rule').domain_get()
|
self.pool.get('ir.rule').domain_get()
|
||||||
return res
|
return res
|
||||||
|
|
||||||
def write(self, cr, uid, ids, vals, context=None):
|
def write(self, cr, uid, ids, vals, context=None):
|
||||||
if not context:
|
if not context:
|
||||||
context={}
|
context={}
|
||||||
res = super(ir_rule_group, self).write(cr, uid, ids, vals, context=context)
|
res = super(ir_rule_group, self).write(cr, uid, ids, vals, context=context)
|
||||||
# Restart the cache on the domain_get method of ir.rule
|
# Restart the cache on the domain_get method of ir.rule
|
||||||
self.pool.get('ir.rule').domain_get()
|
self.pool.get('ir.rule').domain_get()
|
||||||
return res
|
return res
|
||||||
|
|
||||||
ir_rule_group()
|
ir_rule_group()
|
||||||
|
|
||||||
|
|
||||||
class ir_rule(osv.osv):
|
class ir_rule(osv.osv):
|
||||||
_name = 'ir.rule'
|
_name = 'ir.rule'
|
||||||
_rec_name = 'field_id'
|
_rec_name = 'field_id'
|
||||||
|
|
||||||
def _operand(self,cr,uid,context):
|
def _operand(self,cr,uid,context):
|
||||||
|
|
||||||
def get(object, level=3, recur=None, root_tech='', root=''):
|
def get(object, level=3, recur=None, root_tech='', root=''):
|
||||||
res = []
|
res = []
|
||||||
if not recur:
|
if not recur:
|
||||||
recur = []
|
recur = []
|
||||||
fields = self.pool.get(object).fields_get(cr,uid)
|
fields = self.pool.get(object).fields_get(cr,uid)
|
||||||
key = fields.keys()
|
key = fields.keys()
|
||||||
key.sort()
|
key.sort()
|
||||||
for k in key:
|
for k in key:
|
||||||
|
|
||||||
if fields[k]['type'] in ('many2one'):
|
if fields[k]['type'] in ('many2one'):
|
||||||
res.append((root_tech+'.'+k+'.id',
|
res.append((root_tech+'.'+k+'.id',
|
||||||
root+'/'+fields[k]['string']))
|
root+'/'+fields[k]['string']))
|
||||||
|
|
||||||
elif fields[k]['type'] in ('many2many', 'one2many'):
|
elif fields[k]['type'] in ('many2many', 'one2many'):
|
||||||
res.append(('\',\'.join(map(lambda x: str(x.id), '+root_tech+'.'+k+'))',
|
res.append(('\',\'.join(map(lambda x: str(x.id), '+root_tech+'.'+k+'))',
|
||||||
root+'/'+fields[k]['string']))
|
root+'/'+fields[k]['string']))
|
||||||
|
|
||||||
else:
|
else:
|
||||||
res.append((root_tech+'.'+k,
|
res.append((root_tech+'.'+k,
|
||||||
root+'/'+fields[k]['string']))
|
root+'/'+fields[k]['string']))
|
||||||
|
|
||||||
if (fields[k]['type'] in recur) and (level>0):
|
if (fields[k]['type'] in recur) and (level>0):
|
||||||
res.extend(get(fields[k]['relation'], level-1,
|
res.extend(get(fields[k]['relation'], level-1,
|
||||||
recur, root_tech+'.'+k, root+'/'+fields[k]['string']))
|
recur, root_tech+'.'+k, root+'/'+fields[k]['string']))
|
||||||
|
|
||||||
return res
|
return res
|
||||||
|
|
||||||
res = [("False", "False"), ("True", "True"), ("user.id", "User")]
|
res = [("False", "False"), ("True", "True"), ("user.id", "User")]
|
||||||
res += get('res.users', level=1,
|
res += get('res.users', level=1,
|
||||||
recur=['many2one'], root_tech='user', root='User')
|
recur=['many2one'], root_tech='user', root='User')
|
||||||
return res
|
return res
|
||||||
|
|
||||||
def _domain_force_get(self, cr, uid, ids, field_name, arg, context={}):
|
def _domain_force_get(self, cr, uid, ids, field_name, arg, context={}):
|
||||||
res = {}
|
res = {}
|
||||||
for rule in self.browse(cr, uid, ids, context):
|
for rule in self.browse(cr, uid, ids, context):
|
||||||
if rule.domain_force:
|
if rule.domain_force:
|
||||||
res[rule.id] = eval(rule.domain_force, {'user': self.pool.get('res.users').browse(cr, 1, uid),
|
res[rule.id] = eval(rule.domain_force, {'user': self.pool.get('res.users').browse(cr, 1, uid),
|
||||||
'time':time})
|
'time':time})
|
||||||
else:
|
else:
|
||||||
if rule.operator in ('in', 'child_of'):
|
if rule.operator in ('in', 'child_of'):
|
||||||
dom = eval("[('%s', '%s', [%s])]" % (rule.field_id.name, rule.operator,
|
dom = eval("[('%s', '%s', [%s])]" % (rule.field_id.name, rule.operator,
|
||||||
rule.operand), {'user': self.pool.get('res.users').browse(cr, 1, uid),
|
rule.operand), {'user': self.pool.get('res.users').browse(cr, 1, uid),
|
||||||
'time':time})
|
'time':time})
|
||||||
else:
|
else:
|
||||||
dom = eval("[('%s', '%s', %s)]" % (rule.field_id.name, rule.operator,
|
dom = eval("[('%s', '%s', %s)]" % (rule.field_id.name, rule.operator,
|
||||||
rule.operand), {'user': self.pool.get('res.users').browse(cr, 1, uid),
|
rule.operand), {'user': self.pool.get('res.users').browse(cr, 1, uid),
|
||||||
'time':time})
|
'time':time})
|
||||||
res[rule.id] = dom
|
res[rule.id] = dom
|
||||||
return res
|
return res
|
||||||
|
|
||||||
_columns = {
|
_columns = {
|
||||||
'field_id': fields.many2one('ir.model.fields', 'Field',domain= "[('model_id','=', parent.model_id)]", select=1, required=True),
|
'field_id': fields.many2one('ir.model.fields', 'Field',domain= "[('model_id','=', parent.model_id)]", select=1, required=True),
|
||||||
'operator':fields.selection((('=', '='), ('<>', '<>'), ('<=', '<='), ('>=', '>='), ('in', 'in'), ('child_of', 'child_of')), 'Operator', required=True),
|
'operator':fields.selection((('=', '='), ('<>', '<>'), ('<=', '<='), ('>=', '>='), ('in', 'in'), ('child_of', 'child_of')), 'Operator', required=True),
|
||||||
'operand':fields.selection(_operand,'Operand', size=64, required=True),
|
'operand':fields.selection(_operand,'Operand', size=64, required=True),
|
||||||
'rule_group': fields.many2one('ir.rule.group', 'Group', select=2, required=True, ondelete="cascade"),
|
'rule_group': fields.many2one('ir.rule.group', 'Group', select=2, required=True, ondelete="cascade"),
|
||||||
'domain_force': fields.char('Force Domain', size=250),
|
'domain_force': fields.char('Force Domain', size=250),
|
||||||
'domain': fields.function(_domain_force_get, method=True, string='Domain', type='char', size=250)
|
'domain': fields.function(_domain_force_get, method=True, string='Domain', type='char', size=250)
|
||||||
}
|
}
|
||||||
|
|
||||||
def onchange_all(self, cr, uid, ids, field_id, operator, operand):
|
def onchange_all(self, cr, uid, ids, field_id, operator, operand):
|
||||||
if not (field_id or operator or operand):
|
if not (field_id or operator or operand):
|
||||||
return {}
|
return {}
|
||||||
|
|
||||||
def domain_get(self, cr, uid, model_name):
|
def domain_get(self, cr, uid, model_name):
|
||||||
# root user above constraint
|
# root user above constraint
|
||||||
if uid == 1:
|
if uid == 1:
|
||||||
return '', []
|
return '', []
|
||||||
|
|
||||||
cr.execute("""SELECT r.id FROM
|
cr.execute("""SELECT r.id FROM
|
||||||
ir_rule r
|
ir_rule r
|
||||||
JOIN (ir_rule_group g
|
JOIN (ir_rule_group g
|
||||||
JOIN ir_model m ON (g.model_id = m.id))
|
JOIN ir_model m ON (g.model_id = m.id))
|
||||||
ON (g.id = r.rule_group)
|
ON (g.id = r.rule_group)
|
||||||
WHERE m.model = %s
|
WHERE m.model = %s
|
||||||
AND (g.id IN (SELECT rule_group_id FROM group_rule_group_rel g_rel
|
AND (g.id IN (SELECT rule_group_id FROM group_rule_group_rel g_rel
|
||||||
JOIN res_groups_users_rel u_rel ON (g_rel.group_id = u_rel.gid)
|
JOIN res_groups_users_rel u_rel ON (g_rel.group_id = u_rel.gid)
|
||||||
WHERE u_rel.uid = %d) OR g.global)""", (model_name, uid))
|
WHERE u_rel.uid = %d) OR g.global)""", (model_name, uid))
|
||||||
ids = map(lambda x:x[0], cr.fetchall())
|
ids = map(lambda x:x[0], cr.fetchall())
|
||||||
if not ids:
|
if not ids:
|
||||||
return '', []
|
return '', []
|
||||||
obj = self.pool.get(model_name)
|
obj = self.pool.get(model_name)
|
||||||
add = []
|
add = []
|
||||||
add_str = []
|
add_str = []
|
||||||
sub = []
|
sub = []
|
||||||
sub_str = []
|
sub_str = []
|
||||||
clause={}
|
clause={}
|
||||||
clause_global={}
|
clause_global={}
|
||||||
for rule in self.browse(cr, uid, ids):
|
for rule in self.browse(cr, uid, ids):
|
||||||
dom = rule.domain
|
dom = rule.domain
|
||||||
if rule.rule_group['global']:
|
if rule.rule_group['global']:
|
||||||
clause_global.setdefault(rule.rule_group.id, [])
|
clause_global.setdefault(rule.rule_group.id, [])
|
||||||
clause_global[rule.rule_group.id].append(obj._where_calc(cr, uid, dom, active_test=False))
|
clause_global[rule.rule_group.id].append(obj._where_calc(cr, uid, dom, active_test=False))
|
||||||
else:
|
else:
|
||||||
clause.setdefault(rule.rule_group.id, [])
|
clause.setdefault(rule.rule_group.id, [])
|
||||||
clause[rule.rule_group.id].append(obj._where_calc(cr, uid, dom, active_test=False))
|
clause[rule.rule_group.id].append(obj._where_calc(cr, uid, dom, active_test=False))
|
||||||
|
|
||||||
def _query(clause, test):
|
def _query(clause, test):
|
||||||
query = ''
|
query = ''
|
||||||
val = []
|
val = []
|
||||||
for g in clause.values():
|
for g in clause.values():
|
||||||
if not g:
|
if not g:
|
||||||
continue
|
continue
|
||||||
if len(query):
|
if len(query):
|
||||||
query += ' '+test+' '
|
query += ' '+test+' '
|
||||||
query += '('
|
query += '('
|
||||||
first = True
|
first = True
|
||||||
for c in g:
|
for c in g:
|
||||||
if not first:
|
if not first:
|
||||||
query += ' AND '
|
query += ' AND '
|
||||||
first = False
|
first = False
|
||||||
query += '('
|
query += '('
|
||||||
first2 = True
|
first2 = True
|
||||||
for clause in c[0]:
|
for clause in c[0]:
|
||||||
if not first2:
|
if not first2:
|
||||||
query += ' AND '
|
query += ' AND '
|
||||||
first2 = False
|
first2 = False
|
||||||
query += clause
|
query += clause
|
||||||
query += ')'
|
query += ')'
|
||||||
val += c[1]
|
val += c[1]
|
||||||
query += ')'
|
query += ')'
|
||||||
return query, val
|
return query, val
|
||||||
|
|
||||||
query, val = _query(clause, 'OR')
|
query, val = _query(clause, 'OR')
|
||||||
query_global, val_global = _query(clause_global, 'OR')
|
query_global, val_global = _query(clause_global, 'OR')
|
||||||
if query_global:
|
if query_global:
|
||||||
if query:
|
if query:
|
||||||
query = '('+query+') OR '+query_global
|
query = '('+query+') OR '+query_global
|
||||||
val.extend(val_global)
|
val.extend(val_global)
|
||||||
else:
|
else:
|
||||||
query = query_global
|
query = query_global
|
||||||
val = val_global
|
val = val_global
|
||||||
|
|
||||||
|
|
||||||
if query:
|
if query:
|
||||||
query = '('+query+')'
|
query = '('+query+')'
|
||||||
return query, val
|
return query, val
|
||||||
domain_get = tools.cache()(domain_get)
|
domain_get = tools.cache()(domain_get)
|
||||||
|
|
||||||
def unlink(self, cr, uid, ids, context=None):
|
def unlink(self, cr, uid, ids, context=None):
|
||||||
res = super(ir_rule, self).unlink(cr, uid, ids, context=context)
|
res = super(ir_rule, self).unlink(cr, uid, ids, context=context)
|
||||||
# Restart the cache on the domain_get method of ir.rule
|
# Restart the cache on the domain_get method of ir.rule
|
||||||
self.domain_get()
|
self.domain_get()
|
||||||
return res
|
return res
|
||||||
|
|
||||||
def create(self, cr, user, vals, context=None):
|
def create(self, cr, user, vals, context=None):
|
||||||
res = super(ir_rule, self).create(cr, user, vals, context=context)
|
res = super(ir_rule, self).create(cr, user, vals, context=context)
|
||||||
# Restart the cache on the domain_get method of ir.rule
|
# Restart the cache on the domain_get method of ir.rule
|
||||||
self.domain_get()
|
self.domain_get()
|
||||||
return res
|
return res
|
||||||
|
|
||||||
def write(self, cr, uid, ids, vals, context=None):
|
def write(self, cr, uid, ids, vals, context=None):
|
||||||
if not context:
|
if not context:
|
||||||
context={}
|
context={}
|
||||||
res = super(ir_rule, self).write(cr, uid, ids, vals, context=context)
|
res = super(ir_rule, self).write(cr, uid, ids, vals, context=context)
|
||||||
# Restart the cache on the domain_get method
|
# Restart the cache on the domain_get method
|
||||||
self.domain_get()
|
self.domain_get()
|
||||||
return res
|
return res
|
||||||
|
|
||||||
ir_rule()
|
ir_rule()
|
||||||
|
|
|
@ -31,52 +31,52 @@ import time
|
||||||
from osv import fields,osv
|
from osv import fields,osv
|
||||||
|
|
||||||
class ir_sequence_type(osv.osv):
|
class ir_sequence_type(osv.osv):
|
||||||
_name = 'ir.sequence.type'
|
_name = 'ir.sequence.type'
|
||||||
_columns = {
|
_columns = {
|
||||||
'name': fields.char('Sequence Name',size=64, required=True),
|
'name': fields.char('Sequence Name',size=64, required=True),
|
||||||
'code': fields.char('Sequence Code',size=32, required=True),
|
'code': fields.char('Sequence Code',size=32, required=True),
|
||||||
}
|
}
|
||||||
ir_sequence_type()
|
ir_sequence_type()
|
||||||
|
|
||||||
def _code_get(self, cr, uid, context={}):
|
def _code_get(self, cr, uid, context={}):
|
||||||
cr.execute('select code, name from ir_sequence_type')
|
cr.execute('select code, name from ir_sequence_type')
|
||||||
return cr.fetchall()
|
return cr.fetchall()
|
||||||
|
|
||||||
class ir_sequence(osv.osv):
|
class ir_sequence(osv.osv):
|
||||||
_name = 'ir.sequence'
|
_name = 'ir.sequence'
|
||||||
_columns = {
|
_columns = {
|
||||||
'name': fields.char('Sequence Name',size=64, required=True),
|
'name': fields.char('Sequence Name',size=64, required=True),
|
||||||
'code': fields.selection(_code_get, 'Sequence Code',size=64, required=True),
|
'code': fields.selection(_code_get, 'Sequence Code',size=64, required=True),
|
||||||
'active': fields.boolean('Active'),
|
'active': fields.boolean('Active'),
|
||||||
'prefix': fields.char('Prefix',size=64),
|
'prefix': fields.char('Prefix',size=64),
|
||||||
'suffix': fields.char('Suffix',size=64),
|
'suffix': fields.char('Suffix',size=64),
|
||||||
'number_next': fields.integer('Next Number', required=True),
|
'number_next': fields.integer('Next Number', required=True),
|
||||||
'number_increment': fields.integer('Increment Number', required=True),
|
'number_increment': fields.integer('Increment Number', required=True),
|
||||||
'padding' : fields.integer('Number padding', required=True),
|
'padding' : fields.integer('Number padding', required=True),
|
||||||
}
|
}
|
||||||
_defaults = {
|
_defaults = {
|
||||||
'active': lambda *a: True,
|
'active': lambda *a: True,
|
||||||
'number_increment': lambda *a: 1,
|
'number_increment': lambda *a: 1,
|
||||||
'number_next': lambda *a: 1,
|
'number_next': lambda *a: 1,
|
||||||
'padding' : lambda *a : 0,
|
'padding' : lambda *a : 0,
|
||||||
}
|
}
|
||||||
|
|
||||||
def _process(self, s):
|
def _process(self, s):
|
||||||
return (s or '') % {'year':time.strftime('%Y'), 'month': time.strftime('%m'), 'day':time.strftime('%d')}
|
return (s or '') % {'year':time.strftime('%Y'), 'month': time.strftime('%m'), 'day':time.strftime('%d')}
|
||||||
|
|
||||||
def get_id(self, cr, uid, sequence_id, test='id=%d'):
|
def get_id(self, cr, uid, sequence_id, test='id=%d'):
|
||||||
cr.execute('lock table ir_sequence')
|
cr.execute('lock table ir_sequence')
|
||||||
cr.execute('select id,number_next,number_increment,prefix,suffix,padding from ir_sequence where '+test+' and active=True', (sequence_id,))
|
cr.execute('select id,number_next,number_increment,prefix,suffix,padding from ir_sequence where '+test+' and active=True', (sequence_id,))
|
||||||
res = cr.dictfetchone()
|
res = cr.dictfetchone()
|
||||||
if res:
|
if res:
|
||||||
cr.execute('update ir_sequence set number_next=number_next+number_increment where id=%d and active=True', (res['id'],))
|
cr.execute('update ir_sequence set number_next=number_next+number_increment where id=%d and active=True', (res['id'],))
|
||||||
if res['number_next']:
|
if res['number_next']:
|
||||||
return self._process(res['prefix']) + '%%0%sd' % res['padding'] % res['number_next'] + self._process(res['suffix'])
|
return self._process(res['prefix']) + '%%0%sd' % res['padding'] % res['number_next'] + self._process(res['suffix'])
|
||||||
else:
|
else:
|
||||||
return self._process(res['prefix']) + self._process(res['suffix'])
|
return self._process(res['prefix']) + self._process(res['suffix'])
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def get(self, cr, uid, code):
|
def get(self, cr, uid, code):
|
||||||
return self.get_id(cr, uid, code, test='code=%s')
|
return self.get_id(cr, uid, code, test='code=%s')
|
||||||
ir_sequence()
|
ir_sequence()
|
||||||
|
|
||||||
|
|
|
@ -32,131 +32,131 @@ from osv.osv import Cacheable
|
||||||
import tools
|
import tools
|
||||||
|
|
||||||
TRANSLATION_TYPE = [
|
TRANSLATION_TYPE = [
|
||||||
('field', 'Field'),
|
('field', 'Field'),
|
||||||
('model', 'Model'),
|
('model', 'Model'),
|
||||||
('rml', 'RML'),
|
('rml', 'RML'),
|
||||||
('selection', 'Selection'),
|
('selection', 'Selection'),
|
||||||
('view', 'View'),
|
('view', 'View'),
|
||||||
('wizard_button', 'Wizard Button'),
|
('wizard_button', 'Wizard Button'),
|
||||||
('wizard_field', 'Wizard Field'),
|
('wizard_field', 'Wizard Field'),
|
||||||
('wizard_view', 'Wizard View'),
|
('wizard_view', 'Wizard View'),
|
||||||
('xsl', 'XSL'),
|
('xsl', 'XSL'),
|
||||||
('help', 'Help'),
|
('help', 'Help'),
|
||||||
('code', 'Code'),
|
('code', 'Code'),
|
||||||
('constraint', 'Constraint'),
|
('constraint', 'Constraint'),
|
||||||
]
|
]
|
||||||
|
|
||||||
class ir_translation(osv.osv, Cacheable):
|
class ir_translation(osv.osv, Cacheable):
|
||||||
_name = "ir.translation"
|
_name = "ir.translation"
|
||||||
_log_access = False
|
_log_access = False
|
||||||
|
|
||||||
def _get_language(self, cr, uid, context):
|
def _get_language(self, cr, uid, context):
|
||||||
lang_obj = self.pool.get('res.lang')
|
lang_obj = self.pool.get('res.lang')
|
||||||
lang_ids = lang_obj.search(cr, uid, [('translatable', '=', True)],
|
lang_ids = lang_obj.search(cr, uid, [('translatable', '=', True)],
|
||||||
context=context)
|
context=context)
|
||||||
langs = lang_obj.browse(cr, uid, lang_ids, context=context)
|
langs = lang_obj.browse(cr, uid, lang_ids, context=context)
|
||||||
res = [(lang.code, lang.name) for lang in langs]
|
res = [(lang.code, lang.name) for lang in langs]
|
||||||
for lang_dict in tools.scan_languages():
|
for lang_dict in tools.scan_languages():
|
||||||
if lang_dict not in res:
|
if lang_dict not in res:
|
||||||
res.append(lang_dict)
|
res.append(lang_dict)
|
||||||
return res
|
return res
|
||||||
|
|
||||||
_columns = {
|
_columns = {
|
||||||
'name': fields.char('Field Name', size=128, required=True),
|
'name': fields.char('Field Name', size=128, required=True),
|
||||||
'res_id': fields.integer('Resource ID'),
|
'res_id': fields.integer('Resource ID'),
|
||||||
'lang': fields.selection(_get_language, string='Language', size=5),
|
'lang': fields.selection(_get_language, string='Language', size=5),
|
||||||
'type': fields.selection(TRANSLATION_TYPE, string='Type', size=16),
|
'type': fields.selection(TRANSLATION_TYPE, string='Type', size=16),
|
||||||
'src': fields.text('Source'),
|
'src': fields.text('Source'),
|
||||||
'value': fields.text('Translation Value'),
|
'value': fields.text('Translation Value'),
|
||||||
}
|
}
|
||||||
_sql = """
|
_sql = """
|
||||||
create index ir_translation_ltn on ir_translation (lang,type,name);
|
create index ir_translation_ltn on ir_translation (lang,type,name);
|
||||||
create index ir_translation_res_id on ir_translation (res_id);
|
create index ir_translation_res_id on ir_translation (res_id);
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def _get_ids(self, cr, uid, name, tt, lang, ids):
|
def _get_ids(self, cr, uid, name, tt, lang, ids):
|
||||||
translations, to_fetch = {}, []
|
translations, to_fetch = {}, []
|
||||||
for id in ids:
|
for id in ids:
|
||||||
trans = self.get((lang, name, id))
|
trans = self.get((lang, name, id))
|
||||||
if trans is not None:
|
if trans is not None:
|
||||||
translations[id] = trans
|
translations[id] = trans
|
||||||
else:
|
else:
|
||||||
to_fetch.append(id)
|
to_fetch.append(id)
|
||||||
if to_fetch:
|
if to_fetch:
|
||||||
cr.execute('select res_id,value ' \
|
cr.execute('select res_id,value ' \
|
||||||
'from ir_translation ' \
|
'from ir_translation ' \
|
||||||
'where lang=%s ' \
|
'where lang=%s ' \
|
||||||
'and type=%s ' \
|
'and type=%s ' \
|
||||||
'and name=%s ' \
|
'and name=%s ' \
|
||||||
'and res_id in ('+','.join(map(str, to_fetch))+')',
|
'and res_id in ('+','.join(map(str, to_fetch))+')',
|
||||||
(lang,tt,name))
|
(lang,tt,name))
|
||||||
for res_id, value in cr.fetchall():
|
for res_id, value in cr.fetchall():
|
||||||
self.add((lang, tt, name, res_id), value)
|
self.add((lang, tt, name, res_id), value)
|
||||||
translations[res_id] = value
|
translations[res_id] = value
|
||||||
for res_id in ids:
|
for res_id in ids:
|
||||||
if res_id not in translations:
|
if res_id not in translations:
|
||||||
self.add((lang, tt, name, res_id), False)
|
self.add((lang, tt, name, res_id), False)
|
||||||
translations[res_id] = False
|
translations[res_id] = False
|
||||||
return translations
|
return translations
|
||||||
|
|
||||||
def _set_ids(self, cr, uid, name, tt, lang, ids, value):
|
def _set_ids(self, cr, uid, name, tt, lang, ids, value):
|
||||||
cr.execute('delete from ir_translation ' \
|
cr.execute('delete from ir_translation ' \
|
||||||
'where lang=%s ' \
|
'where lang=%s ' \
|
||||||
'and type=%s ' \
|
'and type=%s ' \
|
||||||
'and name=%s ' \
|
'and name=%s ' \
|
||||||
'and res_id in ('+','.join(map(str,ids))+')',
|
'and res_id in ('+','.join(map(str,ids))+')',
|
||||||
(lang,tt,name))
|
(lang,tt,name))
|
||||||
for id in ids:
|
for id in ids:
|
||||||
self.create(cr, uid, {
|
self.create(cr, uid, {
|
||||||
'lang':lang,
|
'lang':lang,
|
||||||
'type':tt,
|
'type':tt,
|
||||||
'name':name,
|
'name':name,
|
||||||
'res_id':id,
|
'res_id':id,
|
||||||
'value':value,
|
'value':value,
|
||||||
})
|
})
|
||||||
return len(ids)
|
return len(ids)
|
||||||
|
|
||||||
def _get_source(self, cr, uid, name, tt, lang, source=None):
|
def _get_source(self, cr, uid, name, tt, lang, source=None):
|
||||||
trans = self.get((lang, tt, name, source))
|
trans = self.get((lang, tt, name, source))
|
||||||
if trans is not None:
|
if trans is not None:
|
||||||
return trans
|
return trans
|
||||||
|
|
||||||
if source:
|
if source:
|
||||||
#if isinstance(source, unicode):
|
#if isinstance(source, unicode):
|
||||||
# source = source.encode('utf8')
|
# source = source.encode('utf8')
|
||||||
cr.execute('select value ' \
|
cr.execute('select value ' \
|
||||||
'from ir_translation ' \
|
'from ir_translation ' \
|
||||||
'where lang=%s ' \
|
'where lang=%s ' \
|
||||||
'and type=%s ' \
|
'and type=%s ' \
|
||||||
'and name=%s ' \
|
'and name=%s ' \
|
||||||
'and src=%s',
|
'and src=%s',
|
||||||
(lang, tt, str(name), source))
|
(lang, tt, str(name), source))
|
||||||
else:
|
else:
|
||||||
cr.execute('select value ' \
|
cr.execute('select value ' \
|
||||||
'from ir_translation ' \
|
'from ir_translation ' \
|
||||||
'where lang=%s ' \
|
'where lang=%s ' \
|
||||||
'and type=%s ' \
|
'and type=%s ' \
|
||||||
'and name=%s',
|
'and name=%s',
|
||||||
(lang, tt, str(name)))
|
(lang, tt, str(name)))
|
||||||
res = cr.fetchone()
|
res = cr.fetchone()
|
||||||
|
|
||||||
trad = res and res[0] or ''
|
trad = res and res[0] or ''
|
||||||
self.add((lang, tt, name, source), trad)
|
self.add((lang, tt, name, source), trad)
|
||||||
return trad
|
return trad
|
||||||
|
|
||||||
def unlink(self, cursor, user, ids, context=None):
|
def unlink(self, cursor, user, ids, context=None):
|
||||||
self.clear()
|
self.clear()
|
||||||
return super(ir_translation, self).unlink(cusor, user, ids,
|
return super(ir_translation, self).unlink(cusor, user, ids,
|
||||||
context=context)
|
context=context)
|
||||||
|
|
||||||
def create(self, cursor, user, vals, context=None):
|
def create(self, cursor, user, vals, context=None):
|
||||||
self.clear()
|
self.clear()
|
||||||
return super(ir_translation, self).create(cursor, user, vals,
|
return super(ir_translation, self).create(cursor, user, vals,
|
||||||
context=context)
|
context=context)
|
||||||
|
|
||||||
def write(self, cursor, user, ids, vals, context=None):
|
def write(self, cursor, user, ids, vals, context=None):
|
||||||
self.clear()
|
self.clear()
|
||||||
return super(ir_translation, self).write(cursor, user, ids, vals,
|
return super(ir_translation, self).write(cursor, user, ids, vals,
|
||||||
context=context)
|
context=context)
|
||||||
|
|
||||||
ir_translation()
|
ir_translation()
|
||||||
|
|
|
@ -32,12 +32,12 @@ from osv.orm import browse_null, browse_record
|
||||||
import re
|
import re
|
||||||
|
|
||||||
def one_in(setA, setB):
|
def one_in(setA, setB):
|
||||||
"""Check the presence of an element of setA in setB
|
"""Check the presence of an element of setA in setB
|
||||||
"""
|
"""
|
||||||
for x in setA:
|
for x in setA:
|
||||||
if x in setB:
|
if x in setB:
|
||||||
return True
|
return True
|
||||||
return False
|
return False
|
||||||
|
|
||||||
icons = map(lambda x: (x,x), ['STOCK_ABOUT', 'STOCK_ADD', 'STOCK_APPLY', 'STOCK_BOLD',
|
icons = map(lambda x: (x,x), ['STOCK_ABOUT', 'STOCK_ADD', 'STOCK_APPLY', 'STOCK_BOLD',
|
||||||
'STOCK_CANCEL', 'STOCK_CDROM', 'STOCK_CLEAR', 'STOCK_CLOSE', 'STOCK_COLOR_PICKER',
|
'STOCK_CANCEL', 'STOCK_CDROM', 'STOCK_CLEAR', 'STOCK_CLOSE', 'STOCK_COLOR_PICKER',
|
||||||
|
@ -68,143 +68,143 @@ icons = map(lambda x: (x,x), ['STOCK_ABOUT', 'STOCK_ADD', 'STOCK_APPLY', 'STOCK_
|
||||||
])
|
])
|
||||||
|
|
||||||
class many2many_unique(fields.many2many):
|
class many2many_unique(fields.many2many):
|
||||||
def set(self, cr, obj, id, name, values, user=None, context=None):
|
def set(self, cr, obj, id, name, values, user=None, context=None):
|
||||||
if not values:
|
if not values:
|
||||||
return
|
return
|
||||||
val = values[:]
|
val = values[:]
|
||||||
for act in values:
|
for act in values:
|
||||||
if act[0]==4:
|
if act[0]==4:
|
||||||
cr.execute('SELECT * FROM '+self._rel+' \
|
cr.execute('SELECT * FROM '+self._rel+' \
|
||||||
WHERE '+self._id1+'=%d AND '+self._id2+'=%d', (id, act[1]))
|
WHERE '+self._id1+'=%d AND '+self._id2+'=%d', (id, act[1]))
|
||||||
if cr.fetchall():
|
if cr.fetchall():
|
||||||
val.remove(act)
|
val.remove(act)
|
||||||
return super(many2many_unique, self).set(cr, obj, id, name, val, user=user,
|
return super(many2many_unique, self).set(cr, obj, id, name, val, user=user,
|
||||||
context=context)
|
context=context)
|
||||||
|
|
||||||
|
|
||||||
class ir_ui_menu(osv.osv):
|
class ir_ui_menu(osv.osv):
|
||||||
_name = 'ir.ui.menu'
|
_name = 'ir.ui.menu'
|
||||||
def search(self, cr, uid, args, offset=0, limit=2000, order=None,
|
def search(self, cr, uid, args, offset=0, limit=2000, order=None,
|
||||||
context=None, count=False):
|
context=None, count=False):
|
||||||
if context is None:
|
if context is None:
|
||||||
context = {}
|
context = {}
|
||||||
ids = osv.orm.orm.search(self, cr, uid, args, offset, limit, order,
|
ids = osv.orm.orm.search(self, cr, uid, args, offset, limit, order,
|
||||||
context=context)
|
context=context)
|
||||||
if uid==1:
|
if uid==1:
|
||||||
return ids
|
return ids
|
||||||
user_groups = self.pool.get('res.users').read(cr, uid, [uid])[0]['groups_id']
|
user_groups = self.pool.get('res.users').read(cr, uid, [uid])[0]['groups_id']
|
||||||
result = []
|
result = []
|
||||||
for menu in self.browse(cr, uid, ids):
|
for menu in self.browse(cr, uid, ids):
|
||||||
if not len(menu.groups_id):
|
if not len(menu.groups_id):
|
||||||
result.append(menu.id)
|
result.append(menu.id)
|
||||||
continue
|
continue
|
||||||
for g in menu.groups_id:
|
for g in menu.groups_id:
|
||||||
if g.id in user_groups:
|
if g.id in user_groups:
|
||||||
result.append(menu.id)
|
result.append(menu.id)
|
||||||
break
|
break
|
||||||
return result
|
return result
|
||||||
|
|
||||||
def _get_full_name(self, cr, uid, ids, name, args, context):
|
def _get_full_name(self, cr, uid, ids, name, args, context):
|
||||||
res = {}
|
res = {}
|
||||||
for m in self.browse(cr, uid, ids, context=context):
|
for m in self.browse(cr, uid, ids, context=context):
|
||||||
res[m.id] = self._get_one_full_name(m)
|
res[m.id] = self._get_one_full_name(m)
|
||||||
return res
|
return res
|
||||||
|
|
||||||
def _get_one_full_name(self, menu, level=6):
|
def _get_one_full_name(self, menu, level=6):
|
||||||
if level<=0:
|
if level<=0:
|
||||||
return '...'
|
return '...'
|
||||||
if menu.parent_id:
|
if menu.parent_id:
|
||||||
parent_path = self._get_one_full_name(menu.parent_id, level-1) + "/"
|
parent_path = self._get_one_full_name(menu.parent_id, level-1) + "/"
|
||||||
else:
|
else:
|
||||||
parent_path = ''
|
parent_path = ''
|
||||||
return parent_path + menu.name
|
return parent_path + menu.name
|
||||||
|
|
||||||
def copy(self, cr, uid, id, default=None, context=None):
|
def copy(self, cr, uid, id, default=None, context=None):
|
||||||
ir_values_obj = self.pool.get('ir.values')
|
ir_values_obj = self.pool.get('ir.values')
|
||||||
res = super(ir_ui_menu, self).copy(cr, uid, id, context=context)
|
res = super(ir_ui_menu, self).copy(cr, uid, id, context=context)
|
||||||
datas=self.read(cr,uid,[res],['name'])[0]
|
datas=self.read(cr,uid,[res],['name'])[0]
|
||||||
rex=re.compile('\([0-9]+\)')
|
rex=re.compile('\([0-9]+\)')
|
||||||
concat=rex.findall(datas['name'])
|
concat=rex.findall(datas['name'])
|
||||||
if concat:
|
if concat:
|
||||||
next_num=eval(concat[0])+1
|
next_num=eval(concat[0])+1
|
||||||
datas['name']=rex.sub(('(%d)'%next_num),datas['name'])
|
datas['name']=rex.sub(('(%d)'%next_num),datas['name'])
|
||||||
else:
|
else:
|
||||||
datas['name']=datas['name']+'(1)'
|
datas['name']=datas['name']+'(1)'
|
||||||
self.write(cr,uid,[res],{'name':datas['name']})
|
self.write(cr,uid,[res],{'name':datas['name']})
|
||||||
ids = ir_values_obj.search(cr, uid, [
|
ids = ir_values_obj.search(cr, uid, [
|
||||||
('model', '=', 'ir.ui.menu'),
|
('model', '=', 'ir.ui.menu'),
|
||||||
('res_id', '=', id),
|
('res_id', '=', id),
|
||||||
])
|
])
|
||||||
for iv in ir_values_obj.browse(cr, uid, ids):
|
for iv in ir_values_obj.browse(cr, uid, ids):
|
||||||
new_id = ir_values_obj.copy(cr, uid, iv.id,
|
new_id = ir_values_obj.copy(cr, uid, iv.id,
|
||||||
default={'res_id': res}, context=context)
|
default={'res_id': res}, context=context)
|
||||||
return res
|
return res
|
||||||
|
|
||||||
def _action(self, cursor, user, ids, name, arg, context=None):
|
def _action(self, cursor, user, ids, name, arg, context=None):
|
||||||
res = {}
|
res = {}
|
||||||
values_obj = self.pool.get('ir.values')
|
values_obj = self.pool.get('ir.values')
|
||||||
value_ids = values_obj.search(cursor, user, [
|
value_ids = values_obj.search(cursor, user, [
|
||||||
('model', '=', self._name), ('key', '=', 'action'),
|
('model', '=', self._name), ('key', '=', 'action'),
|
||||||
('key2', '=', 'tree_but_open'), ('res_id', 'in', ids)],
|
('key2', '=', 'tree_but_open'), ('res_id', 'in', ids)],
|
||||||
context=context)
|
context=context)
|
||||||
values_action = {}
|
values_action = {}
|
||||||
for value in values_obj.browse(cursor, user, value_ids,
|
for value in values_obj.browse(cursor, user, value_ids,
|
||||||
context=context):
|
context=context):
|
||||||
values_action[value.res_id] = value.value
|
values_action[value.res_id] = value.value
|
||||||
for menu_id in ids:
|
for menu_id in ids:
|
||||||
res[menu_id] = values_action.get(menu_id, False)
|
res[menu_id] = values_action.get(menu_id, False)
|
||||||
return res
|
return res
|
||||||
|
|
||||||
def _action_inv(self, cursor, user, menu_id, name, value, arg, context=None):
|
def _action_inv(self, cursor, user, menu_id, name, value, arg, context=None):
|
||||||
if context is None:
|
if context is None:
|
||||||
context = {}
|
context = {}
|
||||||
ctx = context.copy()
|
ctx = context.copy()
|
||||||
if 'read_delta' in ctx:
|
if 'read_delta' in ctx:
|
||||||
del ctx['read_delta']
|
del ctx['read_delta']
|
||||||
values_obj = self.pool.get('ir.values')
|
values_obj = self.pool.get('ir.values')
|
||||||
values_ids = values_obj.search(cursor, user, [
|
values_ids = values_obj.search(cursor, user, [
|
||||||
('model', '=', self._name), ('key', '=', 'action'),
|
('model', '=', self._name), ('key', '=', 'action'),
|
||||||
('key2', '=', 'tree_but_open'), ('res_id', '=', menu_id)],
|
('key2', '=', 'tree_but_open'), ('res_id', '=', menu_id)],
|
||||||
context=context)
|
context=context)
|
||||||
if values_ids:
|
if values_ids:
|
||||||
values_obj.write(cursor, user, values_ids[0], {'value': value},
|
values_obj.write(cursor, user, values_ids[0], {'value': value},
|
||||||
context=ctx)
|
context=ctx)
|
||||||
else:
|
else:
|
||||||
values_obj.create(cursor, user, {
|
values_obj.create(cursor, user, {
|
||||||
'name': 'Menuitem',
|
'name': 'Menuitem',
|
||||||
'model': self._name,
|
'model': self._name,
|
||||||
'value': value,
|
'value': value,
|
||||||
'object': True,
|
'object': True,
|
||||||
'key': 'action',
|
'key': 'action',
|
||||||
'key2': 'tree_but_open',
|
'key2': 'tree_but_open',
|
||||||
'res_id': menu_id,
|
'res_id': menu_id,
|
||||||
}, context=ctx)
|
}, context=ctx)
|
||||||
|
|
||||||
_columns = {
|
_columns = {
|
||||||
'name': fields.char('Menu', size=64, required=True, translate=True),
|
'name': fields.char('Menu', size=64, required=True, translate=True),
|
||||||
'sequence': fields.integer('Sequence'),
|
'sequence': fields.integer('Sequence'),
|
||||||
'child_id' : fields.one2many('ir.ui.menu', 'parent_id','Child ids'),
|
'child_id' : fields.one2many('ir.ui.menu', 'parent_id','Child ids'),
|
||||||
'parent_id': fields.many2one('ir.ui.menu', 'Parent Menu', select=True),
|
'parent_id': fields.many2one('ir.ui.menu', 'Parent Menu', select=True),
|
||||||
'groups_id': many2many_unique('res.groups', 'ir_ui_menu_group_rel',
|
'groups_id': many2many_unique('res.groups', 'ir_ui_menu_group_rel',
|
||||||
'menu_id', 'gid', 'Groups'),
|
'menu_id', 'gid', 'Groups'),
|
||||||
'complete_name': fields.function(_get_full_name, method=True,
|
'complete_name': fields.function(_get_full_name, method=True,
|
||||||
string='Complete Name', type='char', size=128),
|
string='Complete Name', type='char', size=128),
|
||||||
'icon': fields.selection(icons, 'Icon', size=64),
|
'icon': fields.selection(icons, 'Icon', size=64),
|
||||||
'action': fields.function(_action, fnct_inv=_action_inv,
|
'action': fields.function(_action, fnct_inv=_action_inv,
|
||||||
method=True, type='reference', string='Action',
|
method=True, type='reference', string='Action',
|
||||||
selection=[
|
selection=[
|
||||||
('ir.actions.report.custom', 'ir.actions.report.custom'),
|
('ir.actions.report.custom', 'ir.actions.report.custom'),
|
||||||
('ir.actions.report.xml', 'ir.actions.report.xml'),
|
('ir.actions.report.xml', 'ir.actions.report.xml'),
|
||||||
('ir.actions.act_window', 'ir.actions.act_window'),
|
('ir.actions.act_window', 'ir.actions.act_window'),
|
||||||
('ir.actions.wizard', 'ir.actions.wizard'),
|
('ir.actions.wizard', 'ir.actions.wizard'),
|
||||||
('ir.actions.url', 'ir.actions.act_url'),
|
('ir.actions.url', 'ir.actions.act_url'),
|
||||||
]),
|
]),
|
||||||
}
|
}
|
||||||
_defaults = {
|
_defaults = {
|
||||||
'icon' : lambda *a: 'STOCK_OPEN',
|
'icon' : lambda *a: 'STOCK_OPEN',
|
||||||
'sequence' : lambda *a: 10
|
'sequence' : lambda *a: 10
|
||||||
}
|
}
|
||||||
_order = "sequence,id"
|
_order = "sequence,id"
|
||||||
ir_ui_menu()
|
ir_ui_menu()
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -34,89 +34,89 @@ import netsvc
|
||||||
import os
|
import os
|
||||||
|
|
||||||
def _check_xml(self, cr, uid, ids, context={}):
|
def _check_xml(self, cr, uid, ids, context={}):
|
||||||
for view in self.browse(cr, uid, ids, context):
|
for view in self.browse(cr, uid, ids, context):
|
||||||
eview = etree.fromstring(view.arch)
|
eview = etree.fromstring(view.arch)
|
||||||
frng = tools.file_open(os.path.join('base','rng',view.type+'.rng'))
|
frng = tools.file_open(os.path.join('base','rng',view.type+'.rng'))
|
||||||
relaxng = etree.RelaxNG(file=frng)
|
relaxng = etree.RelaxNG(file=frng)
|
||||||
if not relaxng.validate(eview):
|
if not relaxng.validate(eview):
|
||||||
logger = netsvc.Logger()
|
logger = netsvc.Logger()
|
||||||
logger.notifyChannel('init', netsvc.LOG_ERROR, 'The view do not fit the required schema !')
|
logger.notifyChannel('init', netsvc.LOG_ERROR, 'The view do not fit the required schema !')
|
||||||
logger.notifyChannel('init', netsvc.LOG_ERROR, relaxng.error_log.last_error)
|
logger.notifyChannel('init', netsvc.LOG_ERROR, relaxng.error_log.last_error)
|
||||||
print view.arch
|
print view.arch
|
||||||
return False
|
return False
|
||||||
return True
|
return True
|
||||||
|
|
||||||
class view(osv.osv):
|
class view(osv.osv):
|
||||||
_name = 'ir.ui.view'
|
_name = 'ir.ui.view'
|
||||||
_columns = {
|
_columns = {
|
||||||
'name': fields.char('View Name',size=64, required=True),
|
'name': fields.char('View Name',size=64, required=True),
|
||||||
'model': fields.char('Model', size=64, required=True),
|
'model': fields.char('Model', size=64, required=True),
|
||||||
'priority': fields.integer('Priority', required=True),
|
'priority': fields.integer('Priority', required=True),
|
||||||
'type': fields.selection((
|
'type': fields.selection((
|
||||||
('tree','Tree'),
|
('tree','Tree'),
|
||||||
('form','Form'),
|
('form','Form'),
|
||||||
('graph', 'Graph'),
|
('graph', 'Graph'),
|
||||||
('calendar', 'Calendar')), 'View Type', required=True),
|
('calendar', 'Calendar')), 'View Type', required=True),
|
||||||
'arch': fields.text('View Architecture', required=True),
|
'arch': fields.text('View Architecture', required=True),
|
||||||
'inherit_id': fields.many2one('ir.ui.view', 'Inherited View'),
|
'inherit_id': fields.many2one('ir.ui.view', 'Inherited View'),
|
||||||
'field_parent': fields.char('Childs Field',size=64),
|
'field_parent': fields.char('Childs Field',size=64),
|
||||||
}
|
}
|
||||||
_defaults = {
|
_defaults = {
|
||||||
'arch': lambda *a: '<?xml version="1.0"?>\n<tree title="Unknwown">\n\t<field name="name"/>\n</tree>',
|
'arch': lambda *a: '<?xml version="1.0"?>\n<tree title="Unknwown">\n\t<field name="name"/>\n</tree>',
|
||||||
'priority': lambda *a: 16
|
'priority': lambda *a: 16
|
||||||
}
|
}
|
||||||
_order = "priority"
|
_order = "priority"
|
||||||
_constraints = [
|
_constraints = [
|
||||||
(_check_xml, 'Invalid XML for View Architecture!', ['arch'])
|
(_check_xml, 'Invalid XML for View Architecture!', ['arch'])
|
||||||
]
|
]
|
||||||
|
|
||||||
view()
|
view()
|
||||||
|
|
||||||
#class UserView(osv.osv):
|
#class UserView(osv.osv):
|
||||||
# _name = 'ir.ui.view.user'
|
# _name = 'ir.ui.view.user'
|
||||||
# _columns = {
|
# _columns = {
|
||||||
# 'name': fields.char('View Name',size=64, required=True),
|
# 'name': fields.char('View Name',size=64, required=True),
|
||||||
# 'model': fields.char('Model', size=64, required=True),
|
# 'model': fields.char('Model', size=64, required=True),
|
||||||
# 'priority': fields.integer('Priority', required=True),
|
# 'priority': fields.integer('Priority', required=True),
|
||||||
# 'type': fields.selection((
|
# 'type': fields.selection((
|
||||||
# ('tree','Tree'),
|
# ('tree','Tree'),
|
||||||
# ('form','Form'),
|
# ('form','Form'),
|
||||||
# ('graph', 'Graph'),
|
# ('graph', 'Graph'),
|
||||||
# ('calendar', 'Calendar')), 'View Type', required=True),
|
# ('calendar', 'Calendar')), 'View Type', required=True),
|
||||||
# 'arch': fields.text('View Architecture', required=True),
|
# 'arch': fields.text('View Architecture', required=True),
|
||||||
# 'inherit_id': fields.many2one('ir.ui.view', 'Inherited View'),
|
# 'inherit_id': fields.many2one('ir.ui.view', 'Inherited View'),
|
||||||
# 'field_parent': fields.char('Childs Field',size=64),
|
# 'field_parent': fields.char('Childs Field',size=64),
|
||||||
# 'user_id': fields.many2one('res.users', 'User'),
|
# 'user_id': fields.many2one('res.users', 'User'),
|
||||||
# 'ref_id': fields.many2one('ir.ui.view', 'Inherited View'),
|
# 'ref_id': fields.many2one('ir.ui.view', 'Inherited View'),
|
||||||
# }
|
# }
|
||||||
# _defaults = {
|
# _defaults = {
|
||||||
# 'arch': lambda *a: '<?xml version="1.0"?>\n<tree title="Unknwown">\n\t<field name="name"/>\n</tree>',
|
# 'arch': lambda *a: '<?xml version="1.0"?>\n<tree title="Unknwown">\n\t<field name="name"/>\n</tree>',
|
||||||
# 'priority': lambda *a: 16
|
# 'priority': lambda *a: 16
|
||||||
# }
|
# }
|
||||||
# _order = "priority"
|
# _order = "priority"
|
||||||
# _constraints = [
|
# _constraints = [
|
||||||
# (_check_xml, 'Invalid XML for View Architecture!', ['arch'])
|
# (_check_xml, 'Invalid XML for View Architecture!', ['arch'])
|
||||||
# ]
|
# ]
|
||||||
#
|
#
|
||||||
#UserView()
|
#UserView()
|
||||||
|
|
||||||
class view_sc(osv.osv):
|
class view_sc(osv.osv):
|
||||||
_name = 'ir.ui.view_sc'
|
_name = 'ir.ui.view_sc'
|
||||||
_columns = {
|
_columns = {
|
||||||
'name': fields.char('Shortcut Name', size=64, required=True),
|
'name': fields.char('Shortcut Name', size=64, required=True),
|
||||||
'res_id': fields.many2one('ir.values','Resource Ref.', ondelete='cascade'),
|
'res_id': fields.many2one('ir.values','Resource Ref.', ondelete='cascade'),
|
||||||
'sequence': fields.integer('Sequence'),
|
'sequence': fields.integer('Sequence'),
|
||||||
'user_id': fields.many2one('res.users', 'User Ref.', required=True, ondelete='cascade'),
|
'user_id': fields.many2one('res.users', 'User Ref.', required=True, ondelete='cascade'),
|
||||||
'resource': fields.char('Resource Name', size=64, required=True)
|
'resource': fields.char('Resource Name', size=64, required=True)
|
||||||
}
|
}
|
||||||
|
|
||||||
def get_sc(self, cr, uid, user_id, model='ir.ui.menu', context={}):
|
def get_sc(self, cr, uid, user_id, model='ir.ui.menu', context={}):
|
||||||
ids = self.search(cr, uid, [('user_id','=',user_id),('resource','=',model)], context=context)
|
ids = self.search(cr, uid, [('user_id','=',user_id),('resource','=',model)], context=context)
|
||||||
return self.read(cr, uid, ids, ['res_id','name'], context=context)
|
return self.read(cr, uid, ids, ['res_id','name'], context=context)
|
||||||
|
|
||||||
_order = 'sequence'
|
_order = 'sequence'
|
||||||
_defaults = {
|
_defaults = {
|
||||||
'resource': lambda *a: 'ir.ui.menu',
|
'resource': lambda *a: 'ir.ui.menu',
|
||||||
'user_id': lambda obj, cr, uid, context: uid,
|
'user_id': lambda obj, cr, uid, context: uid,
|
||||||
}
|
}
|
||||||
view_sc()
|
view_sc()
|
||||||
|
|
|
@ -32,221 +32,221 @@ import pickle
|
||||||
from tools.translate import _
|
from tools.translate import _
|
||||||
|
|
||||||
class ir_values(osv.osv):
|
class ir_values(osv.osv):
|
||||||
_name = 'ir.values'
|
_name = 'ir.values'
|
||||||
|
|
||||||
def _value_unpickle(self, cursor, user, ids, name, arg, context=None):
|
def _value_unpickle(self, cursor, user, ids, name, arg, context=None):
|
||||||
res = {}
|
res = {}
|
||||||
for report in self.browse(cursor, user, ids, context=context):
|
for report in self.browse(cursor, user, ids, context=context):
|
||||||
value = report[name[:-9]]
|
value = report[name[:-9]]
|
||||||
if not report.object and value:
|
if not report.object and value:
|
||||||
try:
|
try:
|
||||||
value = str(pickle.loads(value))
|
value = str(pickle.loads(value))
|
||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
res[report.id] = value
|
res[report.id] = value
|
||||||
return res
|
return res
|
||||||
|
|
||||||
def _value_pickle(self, cursor, user, id, name, value, arg, context=None):
|
def _value_pickle(self, cursor, user, id, name, value, arg, context=None):
|
||||||
if context is None:
|
if context is None:
|
||||||
context = {}
|
context = {}
|
||||||
ctx = context.copy()
|
ctx = context.copy()
|
||||||
if 'read_delta' in ctx:
|
if 'read_delta' in ctx:
|
||||||
del ctx['read_delta']
|
del ctx['read_delta']
|
||||||
if not self.browse(cursor, user, id, context=context).object:
|
if not self.browse(cursor, user, id, context=context).object:
|
||||||
value = pickle.dumps(eval(value))
|
value = pickle.dumps(eval(value))
|
||||||
self.write(cursor, user, id, {name[:-9]: value}, context=ctx)
|
self.write(cursor, user, id, {name[:-9]: value}, context=ctx)
|
||||||
|
|
||||||
_columns = {
|
_columns = {
|
||||||
'name': fields.char('Name', size=128),
|
'name': fields.char('Name', size=128),
|
||||||
'model': fields.char('Model', size=128),
|
'model': fields.char('Model', size=128),
|
||||||
'value': fields.text('Value'),
|
'value': fields.text('Value'),
|
||||||
'value_unpickle': fields.function(_value_unpickle, fnct_inv=_value_pickle,
|
'value_unpickle': fields.function(_value_unpickle, fnct_inv=_value_pickle,
|
||||||
method=True, type='text', string='Value'),
|
method=True, type='text', string='Value'),
|
||||||
'object': fields.boolean('Is Object'),
|
'object': fields.boolean('Is Object'),
|
||||||
'key': fields.char('Type', size=128),
|
'key': fields.char('Type', size=128),
|
||||||
'key2': fields.char('Value', size=256),
|
'key2': fields.char('Value', size=256),
|
||||||
'meta': fields.text('Meta Datas'),
|
'meta': fields.text('Meta Datas'),
|
||||||
'meta_unpickle': fields.function(_value_unpickle, fnct_inv=_value_pickle,
|
'meta_unpickle': fields.function(_value_unpickle, fnct_inv=_value_pickle,
|
||||||
method=True, type='text', string='Meta Datas'),
|
method=True, type='text', string='Meta Datas'),
|
||||||
'res_id': fields.integer('Resource ID'),
|
'res_id': fields.integer('Resource ID'),
|
||||||
'user_id': fields.many2one('res.users', 'User', ondelete='cascade'),
|
'user_id': fields.many2one('res.users', 'User', ondelete='cascade'),
|
||||||
'company_id': fields.many2one('res.company', 'Company')
|
'company_id': fields.many2one('res.company', 'Company')
|
||||||
}
|
}
|
||||||
_defaults = {
|
_defaults = {
|
||||||
'key': lambda *a: 'action',
|
'key': lambda *a: 'action',
|
||||||
'key2': lambda *a: 'tree_but_open',
|
'key2': lambda *a: 'tree_but_open',
|
||||||
'company_id': lambda *a: False
|
'company_id': lambda *a: False
|
||||||
}
|
}
|
||||||
|
|
||||||
def _auto_init(self, cr, context={}):
|
def _auto_init(self, cr, context={}):
|
||||||
super(ir_values, self)._auto_init(cr, context)
|
super(ir_values, self)._auto_init(cr, context)
|
||||||
cr.execute('SELECT indexname FROM pg_indexes WHERE indexname = \'ir_values_key_model_key2_index\'')
|
cr.execute('SELECT indexname FROM pg_indexes WHERE indexname = \'ir_values_key_model_key2_index\'')
|
||||||
if not cr.fetchone():
|
if not cr.fetchone():
|
||||||
cr.execute('CREATE INDEX ir_values_key_model_key2_index ON ir_values (key, model, key2)')
|
cr.execute('CREATE INDEX ir_values_key_model_key2_index ON ir_values (key, model, key2)')
|
||||||
cr.commit()
|
cr.commit()
|
||||||
|
|
||||||
def set(self, cr, uid, key, key2, name, models, value, replace=True, isobject=False, meta=False, preserve_user=False, company=False):
|
def set(self, cr, uid, key, key2, name, models, value, replace=True, isobject=False, meta=False, preserve_user=False, company=False):
|
||||||
if type(value)==type(u''):
|
if type(value)==type(u''):
|
||||||
value = value.encode('utf8')
|
value = value.encode('utf8')
|
||||||
if not isobject:
|
if not isobject:
|
||||||
value = pickle.dumps(value)
|
value = pickle.dumps(value)
|
||||||
if meta:
|
if meta:
|
||||||
meta = pickle.dumps(meta)
|
meta = pickle.dumps(meta)
|
||||||
ids_res = []
|
ids_res = []
|
||||||
for model in models:
|
for model in models:
|
||||||
if type(model)==type([]) or type(model)==type(()):
|
if type(model)==type([]) or type(model)==type(()):
|
||||||
model,res_id = model
|
model,res_id = model
|
||||||
else:
|
else:
|
||||||
res_id=False
|
res_id=False
|
||||||
if replace:
|
if replace:
|
||||||
if key in ('meta', 'default'):
|
if key in ('meta', 'default'):
|
||||||
ids = self.search(cr, uid, [
|
ids = self.search(cr, uid, [
|
||||||
('key', '=', key),
|
('key', '=', key),
|
||||||
('key2', '=', key2),
|
('key2', '=', key2),
|
||||||
('name', '=', name),
|
('name', '=', name),
|
||||||
('model', '=', model),
|
('model', '=', model),
|
||||||
('res_id', '=', res_id),
|
('res_id', '=', res_id),
|
||||||
('user_id', '=', preserve_user and uid)
|
('user_id', '=', preserve_user and uid)
|
||||||
])
|
])
|
||||||
else:
|
else:
|
||||||
ids = self.search(cr, uid, [
|
ids = self.search(cr, uid, [
|
||||||
('key', '=', key),
|
('key', '=', key),
|
||||||
('key2', '=', key2),
|
('key2', '=', key2),
|
||||||
('value', '=', value),
|
('value', '=', value),
|
||||||
('model', '=', model),
|
('model', '=', model),
|
||||||
('res_id', '=', res_id),
|
('res_id', '=', res_id),
|
||||||
('user_id', '=', preserve_user and uid)
|
('user_id', '=', preserve_user and uid)
|
||||||
])
|
])
|
||||||
self.unlink(cr, uid, ids)
|
self.unlink(cr, uid, ids)
|
||||||
vals = {
|
vals = {
|
||||||
'name': name,
|
'name': name,
|
||||||
'value': value,
|
'value': value,
|
||||||
'model': model,
|
'model': model,
|
||||||
'object': isobject,
|
'object': isobject,
|
||||||
'key': key,
|
'key': key,
|
||||||
'key2': key2 and key2[:200],
|
'key2': key2 and key2[:200],
|
||||||
'meta': meta,
|
'meta': meta,
|
||||||
'user_id': preserve_user and uid,
|
'user_id': preserve_user and uid,
|
||||||
}
|
}
|
||||||
if company:
|
if company:
|
||||||
cid = self.pool.get('res.users').browse(cr, uid, uid, context={}).company_id.id
|
cid = self.pool.get('res.users').browse(cr, uid, uid, context={}).company_id.id
|
||||||
vals['company_id']=cid
|
vals['company_id']=cid
|
||||||
if res_id:
|
if res_id:
|
||||||
vals['res_id']= res_id
|
vals['res_id']= res_id
|
||||||
ids_res.append(self.create(cr, uid, vals))
|
ids_res.append(self.create(cr, uid, vals))
|
||||||
return ids_res
|
return ids_res
|
||||||
|
|
||||||
def get(self, cr, uid, key, key2, models, meta=False, context={}, res_id_req=False, without_user=True, key2_req=True):
|
def get(self, cr, uid, key, key2, models, meta=False, context={}, res_id_req=False, without_user=True, key2_req=True):
|
||||||
result = []
|
result = []
|
||||||
for m in models:
|
for m in models:
|
||||||
if type(m)==type([]) or type(m)==type(()):
|
if type(m)==type([]) or type(m)==type(()):
|
||||||
m,res_id = m
|
m,res_id = m
|
||||||
else:
|
else:
|
||||||
res_id=False
|
res_id=False
|
||||||
|
|
||||||
where1 = ['key=%s','model=%s']
|
where1 = ['key=%s','model=%s']
|
||||||
where2 = [key,str(m)]
|
where2 = [key,str(m)]
|
||||||
where_opt = []
|
where_opt = []
|
||||||
if key2:
|
if key2:
|
||||||
where1.append('key2=%s')
|
where1.append('key2=%s')
|
||||||
where2.append(key2[:200])
|
where2.append(key2[:200])
|
||||||
else:
|
else:
|
||||||
dest = where1
|
dest = where1
|
||||||
if not key2_req or meta:
|
if not key2_req or meta:
|
||||||
dest=where_opt
|
dest=where_opt
|
||||||
dest.append('key2 is null')
|
dest.append('key2 is null')
|
||||||
|
|
||||||
if res_id_req and (models[-1][0]==m):
|
if res_id_req and (models[-1][0]==m):
|
||||||
if res_id:
|
if res_id:
|
||||||
where1.append('res_id=%d' % (res_id,))
|
where1.append('res_id=%d' % (res_id,))
|
||||||
else:
|
else:
|
||||||
where1.append('(res_id is NULL)')
|
where1.append('(res_id is NULL)')
|
||||||
elif res_id:
|
elif res_id:
|
||||||
if (models[-1][0]==m):
|
if (models[-1][0]==m):
|
||||||
where1.append('(res_id=%d or (res_id is null))' % (res_id,))
|
where1.append('(res_id=%d or (res_id is null))' % (res_id,))
|
||||||
where_opt.append('res_id=%d' % (res_id,))
|
where_opt.append('res_id=%d' % (res_id,))
|
||||||
else:
|
else:
|
||||||
where1.append('res_id=%d' % (res_id,))
|
where1.append('res_id=%d' % (res_id,))
|
||||||
|
|
||||||
# if not without_user:
|
# if not without_user:
|
||||||
where_opt.append('user_id=%d' % (uid,))
|
where_opt.append('user_id=%d' % (uid,))
|
||||||
|
|
||||||
|
|
||||||
result = []
|
result = []
|
||||||
ok = True
|
ok = True
|
||||||
while ok:
|
while ok:
|
||||||
if not where_opt:
|
if not where_opt:
|
||||||
cr.execute('select id from ir_values where ' +\
|
cr.execute('select id from ir_values where ' +\
|
||||||
' and '.join(where1)+' and user_id is null', where2)
|
' and '.join(where1)+' and user_id is null', where2)
|
||||||
else:
|
else:
|
||||||
cr.execute('select id from ir_values where ' +\
|
cr.execute('select id from ir_values where ' +\
|
||||||
' and '.join(where1+where_opt), where2)
|
' and '.join(where1+where_opt), where2)
|
||||||
result.extend([x[0] for x in cr.fetchall()])
|
result.extend([x[0] for x in cr.fetchall()])
|
||||||
if len(where_opt):
|
if len(where_opt):
|
||||||
where_opt.pop()
|
where_opt.pop()
|
||||||
else:
|
else:
|
||||||
ok = False
|
ok = False
|
||||||
|
|
||||||
if result:
|
if result:
|
||||||
break
|
break
|
||||||
|
|
||||||
if not result:
|
if not result:
|
||||||
return []
|
return []
|
||||||
cid = self.pool.get('res.users').browse(cr, uid, uid, context={}).company_id.id
|
cid = self.pool.get('res.users').browse(cr, uid, uid, context={}).company_id.id
|
||||||
cr.execute('select id,name,value,object,meta, key ' \
|
cr.execute('select id,name,value,object,meta, key ' \
|
||||||
'from ir_values ' \
|
'from ir_values ' \
|
||||||
'where id in ('+','.join(map(str,result))+') ' \
|
'where id in ('+','.join(map(str,result))+') ' \
|
||||||
'and (company_id is null or company_id = %d) '\
|
'and (company_id is null or company_id = %d) '\
|
||||||
'ORDER BY user_id', (cid,))
|
'ORDER BY user_id', (cid,))
|
||||||
result = cr.fetchall()
|
result = cr.fetchall()
|
||||||
|
|
||||||
def _result_get(x, keys):
|
def _result_get(x, keys):
|
||||||
if x[1] in keys:
|
if x[1] in keys:
|
||||||
return False
|
return False
|
||||||
keys.append(x[1])
|
keys.append(x[1])
|
||||||
if x[3]:
|
if x[3]:
|
||||||
model,id = x[2].split(',')
|
model,id = x[2].split(',')
|
||||||
try:
|
try:
|
||||||
id = int(id)
|
id = int(id)
|
||||||
datas = self.pool.get(model).read(cr, uid, [id], False, context)
|
datas = self.pool.get(model).read(cr, uid, [id], False, context)
|
||||||
except:
|
except:
|
||||||
return False
|
return False
|
||||||
if not len(datas):
|
if not len(datas):
|
||||||
#ir_del(cr, uid, x[0])
|
#ir_del(cr, uid, x[0])
|
||||||
return False
|
return False
|
||||||
def clean(x):
|
def clean(x):
|
||||||
for key in ('report_sxw_content', 'report_rml_content',
|
for key in ('report_sxw_content', 'report_rml_content',
|
||||||
'report_sxw', 'report_rml', 'report_sxw_content_data',
|
'report_sxw', 'report_rml', 'report_sxw_content_data',
|
||||||
'report_rml_content_data'):
|
'report_rml_content_data'):
|
||||||
if key in x:
|
if key in x:
|
||||||
del x[key]
|
del x[key]
|
||||||
return x
|
return x
|
||||||
datas = clean(datas[0])
|
datas = clean(datas[0])
|
||||||
else:
|
else:
|
||||||
datas = pickle.loads(x[2])
|
datas = pickle.loads(x[2])
|
||||||
if meta:
|
if meta:
|
||||||
meta2 = pickle.loads(x[4])
|
meta2 = pickle.loads(x[4])
|
||||||
return (x[0],x[1],datas,meta2)
|
return (x[0],x[1],datas,meta2)
|
||||||
return (x[0],x[1],datas)
|
return (x[0],x[1],datas)
|
||||||
keys = []
|
keys = []
|
||||||
res = filter(bool, map(lambda x: _result_get(x, keys), list(result)))
|
res = filter(bool, map(lambda x: _result_get(x, keys), list(result)))
|
||||||
res2 = res[:]
|
res2 = res[:]
|
||||||
for r in res:
|
for r in res:
|
||||||
if type(r) == type([]):
|
if type(r) == type([]):
|
||||||
if r[2]['type'] == 'ir.actions.report.xml' or r[2]['type'] == 'ir.actions.report.xml':
|
if r[2]['type'] == 'ir.actions.report.xml' or r[2]['type'] == 'ir.actions.report.xml':
|
||||||
print
|
print
|
||||||
if r[2].has_key('groups_id'):
|
if r[2].has_key('groups_id'):
|
||||||
groups = r[2]['groups_id']
|
groups = r[2]['groups_id']
|
||||||
if len(groups) > 0:
|
if len(groups) > 0:
|
||||||
group_ids = ','.join([ str(x) for x in r[2]['groups_id']])
|
group_ids = ','.join([ str(x) for x in r[2]['groups_id']])
|
||||||
cr.execute("select count(*) from res_groups_users_rel where gid in (%s) and uid='%s'" % (group_ids, uid))
|
cr.execute("select count(*) from res_groups_users_rel where gid in (%s) and uid='%s'" % (group_ids, uid))
|
||||||
gr_ids = cr.fetchall()
|
gr_ids = cr.fetchall()
|
||||||
if not gr_ids[0][0] > 0:
|
if not gr_ids[0][0] > 0:
|
||||||
res2.remove(r)
|
res2.remove(r)
|
||||||
else:
|
else:
|
||||||
#raise osv.except_osv('Error !','You have not permission to perform operation !!!')
|
#raise osv.except_osv('Error !','You have not permission to perform operation !!!')
|
||||||
res2.remove(r)
|
res2.remove(r)
|
||||||
return res2
|
return res2
|
||||||
ir_values()
|
ir_values()
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -30,57 +30,57 @@
|
||||||
from osv import fields,osv
|
from osv import fields,osv
|
||||||
|
|
||||||
class wizard_model_menu(osv.osv_memory):
|
class wizard_model_menu(osv.osv_memory):
|
||||||
_name = 'wizard.ir.model.menu.create'
|
_name = 'wizard.ir.model.menu.create'
|
||||||
_columns = {
|
_columns = {
|
||||||
'model_id': fields.many2one('ir.model','Model', required=True),
|
'model_id': fields.many2one('ir.model','Model', required=True),
|
||||||
'menu_id': fields.many2one('ir.ui.menu', 'Parent Menu', required=True),
|
'menu_id': fields.many2one('ir.ui.menu', 'Parent Menu', required=True),
|
||||||
'name': fields.char('Menu Name', size=64, required=True),
|
'name': fields.char('Menu Name', size=64, required=True),
|
||||||
'view_ids': fields.one2many('wizard.ir.model.menu.create.line', 'wizard_id', 'Views'),
|
'view_ids': fields.one2many('wizard.ir.model.menu.create.line', 'wizard_id', 'Views'),
|
||||||
}
|
}
|
||||||
_defaults = {
|
_defaults = {
|
||||||
'model_id': lambda self,cr,uid,ctx: ctx.get('model_id', False)
|
'model_id': lambda self,cr,uid,ctx: ctx.get('model_id', False)
|
||||||
}
|
}
|
||||||
def menu_create(self, cr, uid, ids, context={}):
|
def menu_create(self, cr, uid, ids, context={}):
|
||||||
for menu in self.browse(cr, uid, ids, context):
|
for menu in self.browse(cr, uid, ids, context):
|
||||||
view_mode = []
|
view_mode = []
|
||||||
views = []
|
views = []
|
||||||
for view in menu.view_ids:
|
for view in menu.view_ids:
|
||||||
view_mode.append(view.view_type)
|
view_mode.append(view.view_type)
|
||||||
views.append( (0,0,{
|
views.append( (0,0,{
|
||||||
'view_id': view.view_id and view.view_id.id or False,
|
'view_id': view.view_id and view.view_id.id or False,
|
||||||
'view_mode': view.view_type,
|
'view_mode': view.view_type,
|
||||||
'sequence': view.sequence
|
'sequence': view.sequence
|
||||||
}))
|
}))
|
||||||
val = {
|
val = {
|
||||||
'name': menu.name,
|
'name': menu.name,
|
||||||
'res_model': menu.model_id.model,
|
'res_model': menu.model_id.model,
|
||||||
'view_type': 'form',
|
'view_type': 'form',
|
||||||
'view_mode': ','.join(view_mode)
|
'view_mode': ','.join(view_mode)
|
||||||
}
|
}
|
||||||
if views:
|
if views:
|
||||||
val['view_ids'] = views
|
val['view_ids'] = views
|
||||||
else:
|
else:
|
||||||
val['view_mode'] = 'tree,form'
|
val['view_mode'] = 'tree,form'
|
||||||
action_id = self.pool.get('ir.actions.act_window').create(cr, uid, val)
|
action_id = self.pool.get('ir.actions.act_window').create(cr, uid, val)
|
||||||
self.pool.get('ir.ui.menu').create(cr, uid, {
|
self.pool.get('ir.ui.menu').create(cr, uid, {
|
||||||
'name': menu.name,
|
'name': menu.name,
|
||||||
'parent_id': menu.menu_id.id,
|
'parent_id': menu.menu_id.id,
|
||||||
'action': 'ir.actions.act_window,%d' % (action_id,),
|
'action': 'ir.actions.act_window,%d' % (action_id,),
|
||||||
'icon': 'STOCK_INDENT'
|
'icon': 'STOCK_INDENT'
|
||||||
}, context)
|
}, context)
|
||||||
return {'type':'ir.actions.act_window_close'}
|
return {'type':'ir.actions.act_window_close'}
|
||||||
wizard_model_menu()
|
wizard_model_menu()
|
||||||
|
|
||||||
class wizard_model_menu_line(osv.osv_memory):
|
class wizard_model_menu_line(osv.osv_memory):
|
||||||
_name = 'wizard.ir.model.menu.create.line'
|
_name = 'wizard.ir.model.menu.create.line'
|
||||||
_columns = {
|
_columns = {
|
||||||
'wizard_id': fields.many2one('wizard.ir.model.menu.create','Wizard'),
|
'wizard_id': fields.many2one('wizard.ir.model.menu.create','Wizard'),
|
||||||
'sequence': fields.integer('Sequence'),
|
'sequence': fields.integer('Sequence'),
|
||||||
'view_type': fields.selection([('tree','Tree'),('form','Form'),('graph','Graph'),('calendar','Calendar')],'View Type',required=True),
|
'view_type': fields.selection([('tree','Tree'),('form','Form'),('graph','Graph'),('calendar','Calendar')],'View Type',required=True),
|
||||||
'view_id': fields.many2one('ir.ui.view', 'View'),
|
'view_id': fields.many2one('ir.ui.view', 'View'),
|
||||||
}
|
}
|
||||||
_defaults = {
|
_defaults = {
|
||||||
'view_type': lambda self,cr,uid,ctx: 'tree'
|
'view_type': lambda self,cr,uid,ctx: 'tree'
|
||||||
}
|
}
|
||||||
wizard_model_menu_line()
|
wizard_model_menu_line()
|
||||||
|
|
||||||
|
|
|
@ -34,175 +34,175 @@ import report,pooler,tools
|
||||||
|
|
||||||
|
|
||||||
def graph_get(cr, graph, wkf_id, nested=False, workitem={}):
|
def graph_get(cr, graph, wkf_id, nested=False, workitem={}):
|
||||||
import pydot
|
import pydot
|
||||||
cr.execute('select * from wkf_activity where wkf_id=%d', (wkf_id,))
|
cr.execute('select * from wkf_activity where wkf_id=%d', (wkf_id,))
|
||||||
nodes = cr.dictfetchall()
|
nodes = cr.dictfetchall()
|
||||||
activities = {}
|
activities = {}
|
||||||
actfrom = {}
|
actfrom = {}
|
||||||
actto = {}
|
actto = {}
|
||||||
for n in nodes:
|
for n in nodes:
|
||||||
activities[n['id']] = n
|
activities[n['id']] = n
|
||||||
if n['subflow_id'] and nested:
|
if n['subflow_id'] and nested:
|
||||||
cr.execute('select * from wkf where id=%d', (n['subflow_id'],))
|
cr.execute('select * from wkf where id=%d', (n['subflow_id'],))
|
||||||
wkfinfo = cr.dictfetchone()
|
wkfinfo = cr.dictfetchone()
|
||||||
graph2 = pydot.Cluster('subflow'+str(n['subflow_id']), fontsize=12, label = "Subflow: "+n['name']+'\\nOSV: '+wkfinfo['osv'])
|
graph2 = pydot.Cluster('subflow'+str(n['subflow_id']), fontsize=12, label = "Subflow: "+n['name']+'\\nOSV: '+wkfinfo['osv'])
|
||||||
(s1,s2) = graph_get(cr, graph2, n['subflow_id'], nested,workitem)
|
(s1,s2) = graph_get(cr, graph2, n['subflow_id'], nested,workitem)
|
||||||
graph.add_subgraph(graph2)
|
graph.add_subgraph(graph2)
|
||||||
actfrom[n['id']] = s2
|
actfrom[n['id']] = s2
|
||||||
actto[n['id']] = s1
|
actto[n['id']] = s1
|
||||||
else:
|
else:
|
||||||
args = {}
|
args = {}
|
||||||
if n['flow_start'] or n['flow_stop']:
|
if n['flow_start'] or n['flow_stop']:
|
||||||
args['style']='filled'
|
args['style']='filled'
|
||||||
args['color']='lightgrey'
|
args['color']='lightgrey'
|
||||||
args['label']=n['name']
|
args['label']=n['name']
|
||||||
if n['subflow_id']:
|
if n['subflow_id']:
|
||||||
args['shape'] = 'box'
|
args['shape'] = 'box'
|
||||||
if n['id'] in workitem:
|
if n['id'] in workitem:
|
||||||
args['label']+='\\nx '+str(workitem[n['id']])
|
args['label']+='\\nx '+str(workitem[n['id']])
|
||||||
args['color'] = "red"
|
args['color'] = "red"
|
||||||
graph.add_node(pydot.Node(n['id'], **args))
|
graph.add_node(pydot.Node(n['id'], **args))
|
||||||
actfrom[n['id']] = (n['id'],{})
|
actfrom[n['id']] = (n['id'],{})
|
||||||
actto[n['id']] = (n['id'],{})
|
actto[n['id']] = (n['id'],{})
|
||||||
cr.execute('select * from wkf_transition where act_from in ('+','.join(map(lambda x: str(x['id']),nodes))+')')
|
cr.execute('select * from wkf_transition where act_from in ('+','.join(map(lambda x: str(x['id']),nodes))+')')
|
||||||
transitions = cr.dictfetchall()
|
transitions = cr.dictfetchall()
|
||||||
for t in transitions:
|
for t in transitions:
|
||||||
args = {}
|
args = {}
|
||||||
args['label'] = str(t['condition']).replace(' or ', '\\nor ').replace(' and ', '\\nand ')
|
args['label'] = str(t['condition']).replace(' or ', '\\nor ').replace(' and ', '\\nand ')
|
||||||
if t['signal']:
|
if t['signal']:
|
||||||
args['label'] += '\\n'+str(t['signal'])
|
args['label'] += '\\n'+str(t['signal'])
|
||||||
args['style'] = 'bold'
|
args['style'] = 'bold'
|
||||||
|
|
||||||
if activities[t['act_from']]['split_mode']=='AND':
|
if activities[t['act_from']]['split_mode']=='AND':
|
||||||
args['arrowtail']='box'
|
args['arrowtail']='box'
|
||||||
elif str(activities[t['act_from']]['split_mode'])=='OR ':
|
elif str(activities[t['act_from']]['split_mode'])=='OR ':
|
||||||
args['arrowtail']='inv'
|
args['arrowtail']='inv'
|
||||||
|
|
||||||
if activities[t['act_to']]['join_mode']=='AND':
|
if activities[t['act_to']]['join_mode']=='AND':
|
||||||
args['arrowhead']='crow'
|
args['arrowhead']='crow'
|
||||||
|
|
||||||
activity_from = actfrom[t['act_from']][1].get(t['signal'], actfrom[t['act_from']][0])
|
activity_from = actfrom[t['act_from']][1].get(t['signal'], actfrom[t['act_from']][0])
|
||||||
activity_to = actto[t['act_to']][1].get(t['signal'], actto[t['act_to']][0])
|
activity_to = actto[t['act_to']][1].get(t['signal'], actto[t['act_to']][0])
|
||||||
graph.add_edge(pydot.Edge( activity_from ,activity_to, fontsize=10, **args))
|
graph.add_edge(pydot.Edge( activity_from ,activity_to, fontsize=10, **args))
|
||||||
nodes = cr.dictfetchall()
|
nodes = cr.dictfetchall()
|
||||||
cr.execute('select id from wkf_activity where flow_start=True and wkf_id=%d limit 1', (wkf_id,))
|
cr.execute('select id from wkf_activity where flow_start=True and wkf_id=%d limit 1', (wkf_id,))
|
||||||
start = cr.fetchone()[0]
|
start = cr.fetchone()[0]
|
||||||
cr.execute("select 'subflow.'||name,id from wkf_activity where flow_stop=True and wkf_id=%d", (wkf_id,))
|
cr.execute("select 'subflow.'||name,id from wkf_activity where flow_stop=True and wkf_id=%d", (wkf_id,))
|
||||||
stop = cr.fetchall()
|
stop = cr.fetchall()
|
||||||
stop = (stop[0][1], dict(stop))
|
stop = (stop[0][1], dict(stop))
|
||||||
return ((start,{}),stop)
|
return ((start,{}),stop)
|
||||||
|
|
||||||
|
|
||||||
def graph_instance_get(cr, graph, inst_id, nested=False):
|
def graph_instance_get(cr, graph, inst_id, nested=False):
|
||||||
workitems = {}
|
workitems = {}
|
||||||
cr.execute('select * from wkf_instance where id=%d', (inst_id,))
|
cr.execute('select * from wkf_instance where id=%d', (inst_id,))
|
||||||
inst = cr.dictfetchone()
|
inst = cr.dictfetchone()
|
||||||
|
|
||||||
def workitem_get(instance):
|
def workitem_get(instance):
|
||||||
cr.execute('select act_id,count(*) from wkf_workitem where inst_id=%d group by act_id', (instance,))
|
cr.execute('select act_id,count(*) from wkf_workitem where inst_id=%d group by act_id', (instance,))
|
||||||
workitems = dict(cr.fetchall())
|
workitems = dict(cr.fetchall())
|
||||||
|
|
||||||
cr.execute('select subflow_id from wkf_workitem where inst_id=%d', (instance,))
|
cr.execute('select subflow_id from wkf_workitem where inst_id=%d', (instance,))
|
||||||
for (subflow_id,) in cr.fetchall():
|
for (subflow_id,) in cr.fetchall():
|
||||||
workitems.update(workitem_get(subflow_id))
|
workitems.update(workitem_get(subflow_id))
|
||||||
return workitems
|
return workitems
|
||||||
graph_get(cr, graph, inst['wkf_id'], nested, workitem_get(inst_id))
|
graph_get(cr, graph, inst['wkf_id'], nested, workitem_get(inst_id))
|
||||||
|
|
||||||
#
|
#
|
||||||
# TODO: pas clean: concurrent !!!
|
# TODO: pas clean: concurrent !!!
|
||||||
#
|
#
|
||||||
|
|
||||||
class report_graph_instance(object):
|
class report_graph_instance(object):
|
||||||
def __init__(self, cr, uid, ids, data):
|
def __init__(self, cr, uid, ids, data):
|
||||||
logger = netsvc.Logger()
|
logger = netsvc.Logger()
|
||||||
try:
|
try:
|
||||||
import pydot
|
import pydot
|
||||||
except Exception,e:
|
except Exception,e:
|
||||||
logger.notifyChannel('workflow', netsvc.LOG_WARNING,
|
logger.notifyChannel('workflow', netsvc.LOG_WARNING,
|
||||||
'Import Error for pydot, you will not be able to render workflows\n'
|
'Import Error for pydot, you will not be able to render workflows\n'
|
||||||
'Consider Installing PyDot or dependencies: http://dkbza.org/pydot.html')
|
'Consider Installing PyDot or dependencies: http://dkbza.org/pydot.html')
|
||||||
raise e
|
raise e
|
||||||
self.done = False
|
self.done = False
|
||||||
|
|
||||||
try:
|
try:
|
||||||
cr.execute('select * from wkf where osv=%s limit 1',
|
cr.execute('select * from wkf where osv=%s limit 1',
|
||||||
(data['model'],))
|
(data['model'],))
|
||||||
wkfinfo = cr.dictfetchone()
|
wkfinfo = cr.dictfetchone()
|
||||||
if not wkfinfo:
|
if not wkfinfo:
|
||||||
ps_string = '''%PS-Adobe-3.0
|
ps_string = '''%PS-Adobe-3.0
|
||||||
/inch {72 mul} def
|
/inch {72 mul} def
|
||||||
/Times-Roman findfont 50 scalefont setfont
|
/Times-Roman findfont 50 scalefont setfont
|
||||||
1.5 inch 15 inch moveto
|
1.5 inch 15 inch moveto
|
||||||
(No workflow defined) show
|
(No workflow defined) show
|
||||||
showpage'''
|
showpage'''
|
||||||
else:
|
else:
|
||||||
cr.execute('SELECT id FROM wkf_instance \
|
cr.execute('SELECT id FROM wkf_instance \
|
||||||
WHERE res_id=%d AND wkf_id=%d \
|
WHERE res_id=%d AND wkf_id=%d \
|
||||||
ORDER BY state LIMIT 1',
|
ORDER BY state LIMIT 1',
|
||||||
(data['id'], wkfinfo['id']))
|
(data['id'], wkfinfo['id']))
|
||||||
inst_id = cr.fetchone()
|
inst_id = cr.fetchone()
|
||||||
if not inst_id:
|
if not inst_id:
|
||||||
ps_string = '''%PS-Adobe-3.0
|
ps_string = '''%PS-Adobe-3.0
|
||||||
/inch {72 mul} def
|
/inch {72 mul} def
|
||||||
/Times-Roman findfont 50 scalefont setfont
|
/Times-Roman findfont 50 scalefont setfont
|
||||||
1.5 inch 15 inch moveto
|
1.5 inch 15 inch moveto
|
||||||
(No workflow instance defined) show
|
(No workflow instance defined) show
|
||||||
showpage'''
|
showpage'''
|
||||||
else:
|
else:
|
||||||
inst_id = inst_id[0]
|
inst_id = inst_id[0]
|
||||||
graph = pydot.Dot(fontsize=16, label="\\n\\nWorkflow: %s\\n OSV: %s" % (wkfinfo['name'],wkfinfo['osv']))
|
graph = pydot.Dot(fontsize=16, label="\\n\\nWorkflow: %s\\n OSV: %s" % (wkfinfo['name'],wkfinfo['osv']))
|
||||||
graph.set('size', '10.7,7.3')
|
graph.set('size', '10.7,7.3')
|
||||||
graph.set('center', '1')
|
graph.set('center', '1')
|
||||||
graph.set('ratio', 'auto')
|
graph.set('ratio', 'auto')
|
||||||
graph.set('rotate', '90')
|
graph.set('rotate', '90')
|
||||||
graph.set('rankdir', 'LR')
|
graph.set('rankdir', 'LR')
|
||||||
graph_instance_get(cr, graph, inst_id, data.get('nested', False))
|
graph_instance_get(cr, graph, inst_id, data.get('nested', False))
|
||||||
ps_string = graph.create_ps(prog='dot')
|
ps_string = graph.create_ps(prog='dot')
|
||||||
except Exception, e:
|
except Exception, e:
|
||||||
import traceback, sys
|
import traceback, sys
|
||||||
tb_s = reduce(lambda x, y: x+y, traceback.format_exception(sys.exc_type, sys.exc_value, sys.exc_traceback))
|
tb_s = reduce(lambda x, y: x+y, traceback.format_exception(sys.exc_type, sys.exc_value, sys.exc_traceback))
|
||||||
logger.notifyChannel('workflow', netsvc.LOG_ERROR, 'Exception in call: ' + tb_s)
|
logger.notifyChannel('workflow', netsvc.LOG_ERROR, 'Exception in call: ' + tb_s)
|
||||||
# string is in PS, like the success message would have been
|
# string is in PS, like the success message would have been
|
||||||
ps_string = '''%PS-Adobe-3.0
|
ps_string = '''%PS-Adobe-3.0
|
||||||
/inch {72 mul} def
|
/inch {72 mul} def
|
||||||
/Times-Roman findfont 50 scalefont setfont
|
/Times-Roman findfont 50 scalefont setfont
|
||||||
1.5 inch 15 inch moveto
|
1.5 inch 15 inch moveto
|
||||||
(No workflow available) show
|
(No workflow available) show
|
||||||
showpage'''
|
showpage'''
|
||||||
if os.name == "nt":
|
if os.name == "nt":
|
||||||
prog = 'ps2pdf.bat'
|
prog = 'ps2pdf.bat'
|
||||||
else:
|
else:
|
||||||
prog = 'ps2pdf'
|
prog = 'ps2pdf'
|
||||||
args = (prog, '-', '-')
|
args = (prog, '-', '-')
|
||||||
input, output = tools.exec_command_pipe(*args)
|
input, output = tools.exec_command_pipe(*args)
|
||||||
input.write(ps_string)
|
input.write(ps_string)
|
||||||
input.close()
|
input.close()
|
||||||
self.result = output.read()
|
self.result = output.read()
|
||||||
output.close()
|
output.close()
|
||||||
self.done = True
|
self.done = True
|
||||||
|
|
||||||
def is_done(self):
|
def is_done(self):
|
||||||
return self.done
|
return self.done
|
||||||
|
|
||||||
def get(self):
|
def get(self):
|
||||||
if self.done:
|
if self.done:
|
||||||
return self.result
|
return self.result
|
||||||
else:
|
else:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
class report_graph(report.interface.report_int):
|
class report_graph(report.interface.report_int):
|
||||||
def __init__(self, name, table):
|
def __init__(self, name, table):
|
||||||
report.interface.report_int.__init__(self, name)
|
report.interface.report_int.__init__(self, name)
|
||||||
self.table = table
|
self.table = table
|
||||||
|
|
||||||
def result(self):
|
def result(self):
|
||||||
if self.obj.is_done():
|
if self.obj.is_done():
|
||||||
return (True, self.obj.get(), 'pdf')
|
return (True, self.obj.get(), 'pdf')
|
||||||
else:
|
else:
|
||||||
return (False, False, False)
|
return (False, False, False)
|
||||||
|
|
||||||
def create(self, cr, uid, ids, data, context={}):
|
def create(self, cr, uid, ids, data, context={}):
|
||||||
self.obj = report_graph_instance(cr, uid, ids, data)
|
self.obj = report_graph_instance(cr, uid, ids, data)
|
||||||
return (self.obj.get(), 'pdf')
|
return (self.obj.get(), 'pdf')
|
||||||
|
|
||||||
report_graph('report.workflow.instance.graph', 'ir.workflow')
|
report_graph('report.workflow.instance.graph', 'ir.workflow')
|
||||||
|
|
|
@ -14,339 +14,339 @@ import glob
|
||||||
import pydot
|
import pydot
|
||||||
|
|
||||||
from pyparsing import __version__ as pyparsing_version
|
from pyparsing import __version__ as pyparsing_version
|
||||||
from pyparsing import Literal, CaselessLiteral, Word, \
|
from pyparsing import Literal, CaselessLiteral, Word, \
|
||||||
Upcase, OneOrMore, ZeroOrMore, Forward, NotAny, \
|
Upcase, OneOrMore, ZeroOrMore, Forward, NotAny, \
|
||||||
delimitedList, oneOf, Group, Optional, Combine, \
|
delimitedList, oneOf, Group, Optional, Combine, \
|
||||||
alphas, nums, restOfLine, cStyleComment, nums, \
|
alphas, nums, restOfLine, cStyleComment, nums, \
|
||||||
alphanums, printables, empty, quotedString, \
|
alphanums, printables, empty, quotedString, \
|
||||||
ParseException, ParseResults, CharsNotIn, _noncomma
|
ParseException, ParseResults, CharsNotIn, _noncomma
|
||||||
|
|
||||||
|
|
||||||
class P_AttrList:
|
class P_AttrList:
|
||||||
def __init__(self, toks):
|
def __init__(self, toks):
|
||||||
self.attrs = {}
|
self.attrs = {}
|
||||||
i = 0
|
i = 0
|
||||||
while i < len(toks):
|
while i < len(toks):
|
||||||
attrname = toks[i]
|
attrname = toks[i]
|
||||||
attrvalue = toks[i+1]
|
attrvalue = toks[i+1]
|
||||||
self.attrs[attrname] = attrvalue
|
self.attrs[attrname] = attrvalue
|
||||||
i += 2
|
i += 2
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return "%s(%r)" % (self.__class__.__name__, self.attrs)
|
return "%s(%r)" % (self.__class__.__name__, self.attrs)
|
||||||
|
|
||||||
|
|
||||||
class DefaultStatement(P_AttrList):
|
class DefaultStatement(P_AttrList):
|
||||||
def __init__(self, default_type, attrs):
|
def __init__(self, default_type, attrs):
|
||||||
self.default_type = default_type
|
self.default_type = default_type
|
||||||
self.attrs = attrs
|
self.attrs = attrs
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return "%s(%s, %r)" % \
|
return "%s(%s, %r)" % \
|
||||||
(self.__class__.__name__, self.default_type, self.attrs)
|
(self.__class__.__name__, self.default_type, self.attrs)
|
||||||
|
|
||||||
|
|
||||||
def push_top_graph_stmt(str, loc, toks):
|
def push_top_graph_stmt(str, loc, toks):
|
||||||
attrs = {}
|
attrs = {}
|
||||||
g = None
|
g = None
|
||||||
|
|
||||||
for element in toks:
|
for element in toks:
|
||||||
if isinstance(element, ParseResults) or \
|
if isinstance(element, ParseResults) or \
|
||||||
isinstance(element, tuple) or \
|
isinstance(element, tuple) or \
|
||||||
isinstance(element, list):
|
isinstance(element, list):
|
||||||
|
|
||||||
element = element[0]
|
element = element[0]
|
||||||
|
|
||||||
if element == 'strict':
|
if element == 'strict':
|
||||||
attrs['strict'] = True
|
attrs['strict'] = True
|
||||||
elif element in ['graph', 'digraph']:
|
elif element in ['graph', 'digraph']:
|
||||||
attrs['graph_type'] = element
|
attrs['graph_type'] = element
|
||||||
elif type(element) == type(''):
|
elif type(element) == type(''):
|
||||||
attrs['graph_name'] = element
|
attrs['graph_name'] = element
|
||||||
elif isinstance(element, pydot.Graph):
|
elif isinstance(element, pydot.Graph):
|
||||||
g = pydot.Graph(**attrs)
|
g = pydot.Graph(**attrs)
|
||||||
g.__dict__.update(element.__dict__)
|
g.__dict__.update(element.__dict__)
|
||||||
for e in g.get_edge_list():
|
for e in g.get_edge_list():
|
||||||
e.parent_graph = g
|
e.parent_graph = g
|
||||||
for e in g.get_node_list():
|
for e in g.get_node_list():
|
||||||
e.parent_graph = g
|
e.parent_graph = g
|
||||||
for e in g.get_subgraph_list():
|
for e in g.get_subgraph_list():
|
||||||
e.set_graph_parent(g)
|
e.set_graph_parent(g)
|
||||||
|
|
||||||
elif isinstance(element, P_AttrList):
|
elif isinstance(element, P_AttrList):
|
||||||
attrs.update(element.attrs)
|
attrs.update(element.attrs)
|
||||||
else:
|
else:
|
||||||
raise ValueError, "Unknown element statement: %r " % element
|
raise ValueError, "Unknown element statement: %r " % element
|
||||||
|
|
||||||
if g is not None:
|
if g is not None:
|
||||||
g.__dict__.update(attrs)
|
g.__dict__.update(attrs)
|
||||||
return g
|
return g
|
||||||
|
|
||||||
|
|
||||||
def add_defaults(element, defaults):
|
def add_defaults(element, defaults):
|
||||||
d = element.__dict__
|
d = element.__dict__
|
||||||
for key, value in defaults.items():
|
for key, value in defaults.items():
|
||||||
if not d.get(key):
|
if not d.get(key):
|
||||||
d[key] = value
|
d[key] = value
|
||||||
|
|
||||||
|
|
||||||
def add_elements(g, toks, defaults_graph=None, defaults_node=None, defaults_edge=None):
|
def add_elements(g, toks, defaults_graph=None, defaults_node=None, defaults_edge=None):
|
||||||
|
|
||||||
if defaults_graph is None:
|
if defaults_graph is None:
|
||||||
defaults_graph = {}
|
defaults_graph = {}
|
||||||
if defaults_node is None:
|
if defaults_node is None:
|
||||||
defaults_node = {}
|
defaults_node = {}
|
||||||
if defaults_edge is None:
|
if defaults_edge is None:
|
||||||
defaults_edge = {}
|
defaults_edge = {}
|
||||||
|
|
||||||
for element in toks:
|
for element in toks:
|
||||||
if isinstance(element, pydot.Graph):
|
if isinstance(element, pydot.Graph):
|
||||||
add_defaults(element, defaults_graph)
|
add_defaults(element, defaults_graph)
|
||||||
g.add_subgraph(element)
|
g.add_subgraph(element)
|
||||||
elif isinstance(element, pydot.Node):
|
elif isinstance(element, pydot.Node):
|
||||||
add_defaults(element, defaults_node)
|
add_defaults(element, defaults_node)
|
||||||
g.add_node(element)
|
g.add_node(element)
|
||||||
elif isinstance(element, pydot.Edge):
|
elif isinstance(element, pydot.Edge):
|
||||||
add_defaults(element, defaults_edge)
|
add_defaults(element, defaults_edge)
|
||||||
g.add_edge(element)
|
g.add_edge(element)
|
||||||
elif isinstance(element, ParseResults):
|
elif isinstance(element, ParseResults):
|
||||||
for e in element:
|
for e in element:
|
||||||
add_elements(g, [e], defaults_graph, defaults_node, defaults_edge)
|
add_elements(g, [e], defaults_graph, defaults_node, defaults_edge)
|
||||||
elif isinstance(element, DefaultStatement):
|
elif isinstance(element, DefaultStatement):
|
||||||
if element.default_type == 'graph':
|
if element.default_type == 'graph':
|
||||||
default_graph_attrs = pydot.Node('graph')
|
default_graph_attrs = pydot.Node('graph')
|
||||||
default_graph_attrs.__dict__.update(element.attrs)
|
default_graph_attrs.__dict__.update(element.attrs)
|
||||||
g.add_node(default_graph_attrs)
|
g.add_node(default_graph_attrs)
|
||||||
# defaults_graph.update(element.attrs)
|
# defaults_graph.update(element.attrs)
|
||||||
# g.__dict__.update(element.attrs)
|
# g.__dict__.update(element.attrs)
|
||||||
elif element.default_type == 'node':
|
elif element.default_type == 'node':
|
||||||
default_node_attrs = pydot.Node('node')
|
default_node_attrs = pydot.Node('node')
|
||||||
default_node_attrs.__dict__.update(element.attrs)
|
default_node_attrs.__dict__.update(element.attrs)
|
||||||
g.add_node(default_node_attrs)
|
g.add_node(default_node_attrs)
|
||||||
#defaults_node.update(element.attrs)
|
#defaults_node.update(element.attrs)
|
||||||
elif element.default_type == 'edge':
|
elif element.default_type == 'edge':
|
||||||
default_edge_attrs = pydot.Node('edge')
|
default_edge_attrs = pydot.Node('edge')
|
||||||
default_edge_attrs.__dict__.update(element.attrs)
|
default_edge_attrs.__dict__.update(element.attrs)
|
||||||
g.add_node(default_edge_attrs)
|
g.add_node(default_edge_attrs)
|
||||||
#defaults_edge.update(element.attrs)
|
#defaults_edge.update(element.attrs)
|
||||||
else:
|
else:
|
||||||
raise ValueError, "Unknown DefaultStatement: %s " % element.default_type
|
raise ValueError, "Unknown DefaultStatement: %s " % element.default_type
|
||||||
elif isinstance(element, P_AttrList):
|
elif isinstance(element, P_AttrList):
|
||||||
g.__dict__.update(element.attrs)
|
g.__dict__.update(element.attrs)
|
||||||
else:
|
else:
|
||||||
raise ValueError, "Unknown element statement: %r " % element
|
raise ValueError, "Unknown element statement: %r " % element
|
||||||
|
|
||||||
|
|
||||||
def push_graph_stmt(str, loc, toks):
|
def push_graph_stmt(str, loc, toks):
|
||||||
g = pydot.Subgraph()
|
g = pydot.Subgraph()
|
||||||
add_elements(g, toks)
|
add_elements(g, toks)
|
||||||
return g
|
return g
|
||||||
|
|
||||||
|
|
||||||
def push_subgraph_stmt(str, loc, toks):
|
def push_subgraph_stmt(str, loc, toks):
|
||||||
for e in toks:
|
for e in toks:
|
||||||
if len(e)==3:
|
if len(e)==3:
|
||||||
g = e[2]
|
g = e[2]
|
||||||
g.set_name(e[1])
|
g.set_name(e[1])
|
||||||
|
|
||||||
return g
|
return g
|
||||||
|
|
||||||
|
|
||||||
def push_default_stmt(str, loc, toks):
|
def push_default_stmt(str, loc, toks):
|
||||||
# The pydot class instances should be marked as
|
# The pydot class instances should be marked as
|
||||||
# default statements to be inherited by actual
|
# default statements to be inherited by actual
|
||||||
# graphs, nodes and edges.
|
# graphs, nodes and edges.
|
||||||
# print "push_default_stmt", toks
|
# print "push_default_stmt", toks
|
||||||
default_type = toks[0][0]
|
default_type = toks[0][0]
|
||||||
if len(toks) > 1:
|
if len(toks) > 1:
|
||||||
attrs = toks[1].attrs
|
attrs = toks[1].attrs
|
||||||
else:
|
else:
|
||||||
attrs = {}
|
attrs = {}
|
||||||
|
|
||||||
if default_type in ['graph', 'node', 'edge']:
|
if default_type in ['graph', 'node', 'edge']:
|
||||||
return DefaultStatement(default_type, attrs)
|
return DefaultStatement(default_type, attrs)
|
||||||
else:
|
else:
|
||||||
raise ValueError, "Unknown default statement: %r " % toks
|
raise ValueError, "Unknown default statement: %r " % toks
|
||||||
|
|
||||||
|
|
||||||
def push_attr_list(str, loc, toks):
|
def push_attr_list(str, loc, toks):
|
||||||
p = P_AttrList(toks)
|
p = P_AttrList(toks)
|
||||||
return p
|
return p
|
||||||
|
|
||||||
|
|
||||||
def get_port(node):
|
def get_port(node):
|
||||||
|
|
||||||
if len(node)>1:
|
if len(node)>1:
|
||||||
if isinstance(node[1], ParseResults):
|
if isinstance(node[1], ParseResults):
|
||||||
if len(node[1][0])==2:
|
if len(node[1][0])==2:
|
||||||
if node[1][0][0]==':':
|
if node[1][0][0]==':':
|
||||||
return node[1][0][1]
|
return node[1][0][1]
|
||||||
|
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
def push_edge_stmt(str, loc, toks):
|
def push_edge_stmt(str, loc, toks):
|
||||||
|
|
||||||
tok_attrs = [a for a in toks if isinstance(a, P_AttrList)]
|
tok_attrs = [a for a in toks if isinstance(a, P_AttrList)]
|
||||||
attrs = {}
|
attrs = {}
|
||||||
for a in tok_attrs:
|
for a in tok_attrs:
|
||||||
attrs.update(a.attrs)
|
attrs.update(a.attrs)
|
||||||
|
|
||||||
n_prev = toks[0]
|
n_prev = toks[0]
|
||||||
e = []
|
e = []
|
||||||
for n_next in tuple(toks)[2::2]:
|
for n_next in tuple(toks)[2::2]:
|
||||||
port = get_port(n_prev)
|
port = get_port(n_prev)
|
||||||
if port is not None:
|
if port is not None:
|
||||||
n_prev_port = ':'+port
|
n_prev_port = ':'+port
|
||||||
else:
|
else:
|
||||||
n_prev_port = ''
|
n_prev_port = ''
|
||||||
|
|
||||||
port = get_port(n_next)
|
port = get_port(n_next)
|
||||||
if port is not None:
|
if port is not None:
|
||||||
n_next_port = ':'+port
|
n_next_port = ':'+port
|
||||||
else:
|
else:
|
||||||
n_next_port = ''
|
n_next_port = ''
|
||||||
|
|
||||||
e.append(pydot.Edge(n_prev[0]+n_prev_port, n_next[0]+n_next_port, **attrs))
|
e.append(pydot.Edge(n_prev[0]+n_prev_port, n_next[0]+n_next_port, **attrs))
|
||||||
n_prev = n_next
|
n_prev = n_next
|
||||||
return e
|
return e
|
||||||
|
|
||||||
|
|
||||||
def push_node_stmt(str, loc, toks):
|
def push_node_stmt(str, loc, toks):
|
||||||
|
|
||||||
if len(toks) == 2:
|
if len(toks) == 2:
|
||||||
attrs = toks[1].attrs
|
attrs = toks[1].attrs
|
||||||
else:
|
else:
|
||||||
attrs = {}
|
attrs = {}
|
||||||
|
|
||||||
node_name = toks[0]
|
node_name = toks[0]
|
||||||
if isinstance(node_name, list) or isinstance(node_name, tuple):
|
if isinstance(node_name, list) or isinstance(node_name, tuple):
|
||||||
if len(node_name)>0:
|
if len(node_name)>0:
|
||||||
node_name = node_name[0]
|
node_name = node_name[0]
|
||||||
|
|
||||||
n = pydot.Node(node_name, **attrs)
|
n = pydot.Node(node_name, **attrs)
|
||||||
return n
|
return n
|
||||||
|
|
||||||
|
|
||||||
def strip_quotes( s, l, t ):
|
def strip_quotes( s, l, t ):
|
||||||
return [ t[0].strip('"') ]
|
return [ t[0].strip('"') ]
|
||||||
|
|
||||||
|
|
||||||
graphparser = None
|
graphparser = None
|
||||||
def GRAPH_DEF():
|
def GRAPH_DEF():
|
||||||
global graphparser
|
global graphparser
|
||||||
|
|
||||||
if not graphparser:
|
if not graphparser:
|
||||||
# punctuation
|
# punctuation
|
||||||
colon = Literal(":")
|
colon = Literal(":")
|
||||||
lbrace = Literal("{")
|
lbrace = Literal("{")
|
||||||
rbrace = Literal("}")
|
rbrace = Literal("}")
|
||||||
lbrack = Literal("[")
|
lbrack = Literal("[")
|
||||||
rbrack = Literal("]")
|
rbrack = Literal("]")
|
||||||
lparen = Literal("(")
|
lparen = Literal("(")
|
||||||
rparen = Literal(")")
|
rparen = Literal(")")
|
||||||
equals = Literal("=")
|
equals = Literal("=")
|
||||||
comma = Literal(",")
|
comma = Literal(",")
|
||||||
dot = Literal(".")
|
dot = Literal(".")
|
||||||
slash = Literal("/")
|
slash = Literal("/")
|
||||||
bslash = Literal("\\")
|
bslash = Literal("\\")
|
||||||
star = Literal("*")
|
star = Literal("*")
|
||||||
semi = Literal(";")
|
semi = Literal(";")
|
||||||
at = Literal("@")
|
at = Literal("@")
|
||||||
minus = Literal("-")
|
minus = Literal("-")
|
||||||
|
|
||||||
# keywords
|
# keywords
|
||||||
strict_ = Literal("strict")
|
strict_ = Literal("strict")
|
||||||
graph_ = Literal("graph")
|
graph_ = Literal("graph")
|
||||||
digraph_ = Literal("digraph")
|
digraph_ = Literal("digraph")
|
||||||
subgraph_ = Literal("subgraph")
|
subgraph_ = Literal("subgraph")
|
||||||
node_ = Literal("node")
|
node_ = Literal("node")
|
||||||
edge_ = Literal("edge")
|
edge_ = Literal("edge")
|
||||||
|
|
||||||
identifier = Word(alphanums + "_" ).setName("identifier")
|
identifier = Word(alphanums + "_" ).setName("identifier")
|
||||||
|
|
||||||
double_quote = Literal('"')
|
double_quote = Literal('"')
|
||||||
double_quoted_string = \
|
double_quoted_string = \
|
||||||
Combine( double_quote + ZeroOrMore(CharsNotIn('"')) + double_quote )
|
Combine( double_quote + ZeroOrMore(CharsNotIn('"')) + double_quote )
|
||||||
|
|
||||||
alphastring_ = OneOrMore(CharsNotIn(_noncomma))
|
alphastring_ = OneOrMore(CharsNotIn(_noncomma))
|
||||||
|
|
||||||
ID = (identifier | double_quoted_string.setParseAction(strip_quotes) |\
|
ID = (identifier | double_quoted_string.setParseAction(strip_quotes) |\
|
||||||
alphastring_).setName("ID")
|
alphastring_).setName("ID")
|
||||||
|
|
||||||
html_text = Combine(Literal("<<") + OneOrMore(CharsNotIn(",]")))
|
html_text = Combine(Literal("<<") + OneOrMore(CharsNotIn(",]")))
|
||||||
|
|
||||||
float_number = Combine(Optional(minus) + \
|
float_number = Combine(Optional(minus) + \
|
||||||
OneOrMore(Word(nums + "."))).setName("float_number")
|
OneOrMore(Word(nums + "."))).setName("float_number")
|
||||||
|
|
||||||
righthand_id = (float_number | ID | html_text).setName("righthand_id")
|
righthand_id = (float_number | ID | html_text).setName("righthand_id")
|
||||||
|
|
||||||
port_angle = (at + ID).setName("port_angle")
|
port_angle = (at + ID).setName("port_angle")
|
||||||
|
|
||||||
port_location = (Group(colon + ID) | \
|
port_location = (Group(colon + ID) | \
|
||||||
Group(colon + lparen + ID + comma + ID + rparen)).setName("port_location")
|
Group(colon + lparen + ID + comma + ID + rparen)).setName("port_location")
|
||||||
|
|
||||||
port = (Group(port_location + Optional(port_angle)) | \
|
port = (Group(port_location + Optional(port_angle)) | \
|
||||||
Group(port_angle + Optional(port_location))).setName("port")
|
Group(port_angle + Optional(port_location))).setName("port")
|
||||||
|
|
||||||
node_id = (ID + Optional(port))
|
node_id = (ID + Optional(port))
|
||||||
a_list = OneOrMore(ID + Optional(equals.suppress() + righthand_id) + \
|
a_list = OneOrMore(ID + Optional(equals.suppress() + righthand_id) + \
|
||||||
Optional(comma.suppress())).setName("a_list")
|
Optional(comma.suppress())).setName("a_list")
|
||||||
|
|
||||||
attr_list = OneOrMore(lbrack.suppress() + Optional(a_list) + \
|
attr_list = OneOrMore(lbrack.suppress() + Optional(a_list) + \
|
||||||
rbrack.suppress()).setName("attr_list")
|
rbrack.suppress()).setName("attr_list")
|
||||||
|
|
||||||
attr_stmt = (Group(graph_ | node_ | edge_) + attr_list).setName("attr_stmt")
|
attr_stmt = (Group(graph_ | node_ | edge_) + attr_list).setName("attr_stmt")
|
||||||
|
|
||||||
edgeop = (Literal("--") | Literal("->")).setName("edgeop")
|
edgeop = (Literal("--") | Literal("->")).setName("edgeop")
|
||||||
|
|
||||||
stmt_list = Forward()
|
stmt_list = Forward()
|
||||||
graph_stmt = Group(lbrace.suppress() + Optional(stmt_list) + \
|
graph_stmt = Group(lbrace.suppress() + Optional(stmt_list) + \
|
||||||
rbrace.suppress()).setName("graph_stmt")
|
rbrace.suppress()).setName("graph_stmt")
|
||||||
|
|
||||||
subgraph = (Group(Optional(subgraph_ + Optional(ID)) + graph_stmt) | \
|
subgraph = (Group(Optional(subgraph_ + Optional(ID)) + graph_stmt) | \
|
||||||
Group(subgraph_ + ID)).setName("subgraph")
|
Group(subgraph_ + ID)).setName("subgraph")
|
||||||
|
|
||||||
edgeRHS = OneOrMore(edgeop + Group(node_id | subgraph))
|
edgeRHS = OneOrMore(edgeop + Group(node_id | subgraph))
|
||||||
|
|
||||||
edge_stmt = Group(node_id | subgraph) + edgeRHS + Optional(attr_list)
|
edge_stmt = Group(node_id | subgraph) + edgeRHS + Optional(attr_list)
|
||||||
|
|
||||||
node_stmt = (node_id + Optional(attr_list) + semi.suppress()).setName("node_stmt")
|
node_stmt = (node_id + Optional(attr_list) + semi.suppress()).setName("node_stmt")
|
||||||
|
|
||||||
assignment = (ID + equals.suppress() + righthand_id).setName("assignment")
|
assignment = (ID + equals.suppress() + righthand_id).setName("assignment")
|
||||||
stmt = (assignment | edge_stmt | attr_stmt | node_stmt | subgraph).setName("stmt")
|
stmt = (assignment | edge_stmt | attr_stmt | node_stmt | subgraph).setName("stmt")
|
||||||
stmt_list << OneOrMore(stmt + Optional(semi.suppress()))
|
stmt_list << OneOrMore(stmt + Optional(semi.suppress()))
|
||||||
|
|
||||||
graphparser = (Optional(strict_) + Group((graph_ | digraph_)) + \
|
graphparser = (Optional(strict_) + Group((graph_ | digraph_)) + \
|
||||||
Optional(ID) + graph_stmt).setResultsName("graph")
|
Optional(ID) + graph_stmt).setResultsName("graph")
|
||||||
|
|
||||||
singleLineComment = "//" + restOfLine
|
singleLineComment = "//" + restOfLine
|
||||||
graphparser.ignore(singleLineComment)
|
graphparser.ignore(singleLineComment)
|
||||||
graphparser.ignore(cStyleComment)
|
graphparser.ignore(cStyleComment)
|
||||||
|
|
||||||
assignment.setParseAction(push_attr_list)
|
assignment.setParseAction(push_attr_list)
|
||||||
a_list.setParseAction(push_attr_list)
|
a_list.setParseAction(push_attr_list)
|
||||||
edge_stmt.setParseAction(push_edge_stmt)
|
edge_stmt.setParseAction(push_edge_stmt)
|
||||||
node_stmt.setParseAction(push_node_stmt)
|
node_stmt.setParseAction(push_node_stmt)
|
||||||
attr_stmt.setParseAction(push_default_stmt)
|
attr_stmt.setParseAction(push_default_stmt)
|
||||||
|
|
||||||
subgraph.setParseAction(push_subgraph_stmt)
|
subgraph.setParseAction(push_subgraph_stmt)
|
||||||
graph_stmt.setParseAction(push_graph_stmt)
|
graph_stmt.setParseAction(push_graph_stmt)
|
||||||
graphparser.setParseAction(push_top_graph_stmt)
|
graphparser.setParseAction(push_top_graph_stmt)
|
||||||
|
|
||||||
return graphparser
|
return graphparser
|
||||||
|
|
||||||
|
|
||||||
def parse_dot_data(data):
|
def parse_dot_data(data):
|
||||||
try:
|
try:
|
||||||
graphparser = GRAPH_DEF()
|
graphparser = GRAPH_DEF()
|
||||||
if pyparsing_version >= '1.2':
|
if pyparsing_version >= '1.2':
|
||||||
graphparser.parseWithTabs()
|
graphparser.parseWithTabs()
|
||||||
tokens = graphparser.parseString(data)
|
tokens = graphparser.parseString(data)
|
||||||
graph = tokens.graph
|
graph = tokens.graph
|
||||||
return graph
|
return graph
|
||||||
except ParseException, err:
|
except ParseException, err:
|
||||||
print err.line
|
print err.line
|
||||||
print " "*(err.column-1) + "^"
|
print " "*(err.column-1) + "^"
|
||||||
print err
|
print err
|
||||||
return None
|
return None
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -3,21 +3,21 @@
|
||||||
from distutils.core import setup
|
from distutils.core import setup
|
||||||
import pydot
|
import pydot
|
||||||
|
|
||||||
setup( name = 'pydot',
|
setup( name = 'pydot',
|
||||||
version = pydot.__version__,
|
version = pydot.__version__,
|
||||||
description = 'Python interface to Graphiz\'s Dot',
|
description = 'Python interface to Graphiz\'s Dot',
|
||||||
author = 'Ero Carrera',
|
author = 'Ero Carrera',
|
||||||
author_email = 'ero@dkbza.org',
|
author_email = 'ero@dkbza.org',
|
||||||
url = 'http://dkbza.org/pydot.html',
|
url = 'http://dkbza.org/pydot.html',
|
||||||
license = 'MIT',
|
license = 'MIT',
|
||||||
platforms = ["any"],
|
platforms = ["any"],
|
||||||
classifiers = ['Development Status :: 5 - Production/Stable', \
|
classifiers = ['Development Status :: 5 - Production/Stable', \
|
||||||
'Intended Audience :: Science/Research', \
|
'Intended Audience :: Science/Research', \
|
||||||
'License :: OSI Approved :: MIT License',\
|
'License :: OSI Approved :: MIT License',\
|
||||||
'Natural Language :: English', \
|
'Natural Language :: English', \
|
||||||
'Operating System :: OS Independent', \
|
'Operating System :: OS Independent', \
|
||||||
'Programming Language :: Python', \
|
'Programming Language :: Python', \
|
||||||
'Topic :: Scientific/Engineering :: Visualization',\
|
'Topic :: Scientific/Engineering :: Visualization',\
|
||||||
'Topic :: Software Development :: Libraries :: Python Modules'],
|
'Topic :: Software Development :: Libraries :: Python Modules'],
|
||||||
long_description = "\n".join(pydot.__doc__.split('\n')),
|
long_description = "\n".join(pydot.__doc__.split('\n')),
|
||||||
py_modules = ['pydot', 'dot_parser'])
|
py_modules = ['pydot', 'dot_parser'])
|
||||||
|
|
|
@ -32,157 +32,157 @@ from tools import graph
|
||||||
import netsvc
|
import netsvc
|
||||||
|
|
||||||
class workflow(osv.osv):
|
class workflow(osv.osv):
|
||||||
_name = "workflow"
|
_name = "workflow"
|
||||||
_table = "wkf"
|
_table = "wkf"
|
||||||
_log_access = False
|
_log_access = False
|
||||||
_columns = {
|
_columns = {
|
||||||
'name': fields.char('Name', size=64, required=True),
|
'name': fields.char('Name', size=64, required=True),
|
||||||
'osv': fields.char('Resource Model', size=64, required=True),
|
'osv': fields.char('Resource Model', size=64, required=True),
|
||||||
'on_create': fields.boolean('On Create'),
|
'on_create': fields.boolean('On Create'),
|
||||||
'activities': fields.one2many('workflow.activity', 'wkf_id', 'Activities'),
|
'activities': fields.one2many('workflow.activity', 'wkf_id', 'Activities'),
|
||||||
}
|
}
|
||||||
_defaults = {
|
_defaults = {
|
||||||
'on_create': lambda *a: True
|
'on_create': lambda *a: True
|
||||||
}
|
}
|
||||||
|
|
||||||
def write(self, cr, user, ids, vals, context=None):
|
def write(self, cr, user, ids, vals, context=None):
|
||||||
if not context:
|
if not context:
|
||||||
context={}
|
context={}
|
||||||
wf_service = netsvc.LocalService("workflow")
|
wf_service = netsvc.LocalService("workflow")
|
||||||
wf_service.clear_cache(cr, user)
|
wf_service.clear_cache(cr, user)
|
||||||
return super(workflow, self).write(cr, user, ids, vals, context=context)
|
return super(workflow, self).write(cr, user, ids, vals, context=context)
|
||||||
|
|
||||||
#
|
#
|
||||||
# scale = [stepx, stepy, posx, posy ]
|
# scale = [stepx, stepy, posx, posy ]
|
||||||
#
|
#
|
||||||
|
|
||||||
def graph_get(self, cr, uid, id, scale, context={}):
|
def graph_get(self, cr, uid, id, scale, context={}):
|
||||||
|
|
||||||
nodes= []
|
nodes= []
|
||||||
transitions = []
|
transitions = []
|
||||||
start = []
|
start = []
|
||||||
tres = {}
|
tres = {}
|
||||||
workflow = self.browse(cr, uid, id, context)
|
workflow = self.browse(cr, uid, id, context)
|
||||||
for a in workflow.activities:
|
for a in workflow.activities:
|
||||||
nodes.append((a.id,a.name))
|
nodes.append((a.id,a.name))
|
||||||
if a.flow_start:
|
if a.flow_start:
|
||||||
start.append((a.id,a.name))
|
start.append((a.id,a.name))
|
||||||
for t in a.out_transitions:
|
for t in a.out_transitions:
|
||||||
transitions.append( ((a.id,a.name), (t.act_to.id,t.act_to.name)) )
|
transitions.append( ((a.id,a.name), (t.act_to.id,t.act_to.name)) )
|
||||||
tres[t.id] = (a.id,t.act_to.id)
|
tres[t.id] = (a.id,t.act_to.id)
|
||||||
g = graph(nodes, transitions)
|
g = graph(nodes, transitions)
|
||||||
g.process(start)
|
g.process(start)
|
||||||
g.scale(*scale)
|
g.scale(*scale)
|
||||||
result = g.result_get()
|
result = g.result_get()
|
||||||
results = {}
|
results = {}
|
||||||
|
|
||||||
|
|
||||||
for r in result.items():
|
for r in result.items():
|
||||||
r[1]['name'] = r[0][1]
|
r[1]['name'] = r[0][1]
|
||||||
results[str(r[0][0])] = r[1]
|
results[str(r[0][0])] = r[1]
|
||||||
return {'node': results, 'transition': tres}
|
return {'node': results, 'transition': tres}
|
||||||
|
|
||||||
def create(self, cr, user, vals, context=None):
|
def create(self, cr, user, vals, context=None):
|
||||||
if not context:
|
if not context:
|
||||||
context={}
|
context={}
|
||||||
wf_service = netsvc.LocalService("workflow")
|
wf_service = netsvc.LocalService("workflow")
|
||||||
wf_service.clear_cache(cr, user)
|
wf_service.clear_cache(cr, user)
|
||||||
return super(workflow, self).create(cr, user, vals, context=context)
|
return super(workflow, self).create(cr, user, vals, context=context)
|
||||||
workflow()
|
workflow()
|
||||||
|
|
||||||
class wkf_activity(osv.osv):
|
class wkf_activity(osv.osv):
|
||||||
_name = "workflow.activity"
|
_name = "workflow.activity"
|
||||||
_table = "wkf_activity"
|
_table = "wkf_activity"
|
||||||
_log_access = False
|
_log_access = False
|
||||||
_columns = {
|
_columns = {
|
||||||
'name': fields.char('Name', size=64, required=True),
|
'name': fields.char('Name', size=64, required=True),
|
||||||
'wkf_id': fields.many2one('workflow', 'Workflow', required=True, select=True, ondelete='cascade'),
|
'wkf_id': fields.many2one('workflow', 'Workflow', required=True, select=True, ondelete='cascade'),
|
||||||
'split_mode': fields.selection([('XOR', 'Xor'), ('OR','Or'), ('AND','And')], 'Split Mode', size=3, required=True),
|
'split_mode': fields.selection([('XOR', 'Xor'), ('OR','Or'), ('AND','And')], 'Split Mode', size=3, required=True),
|
||||||
'join_mode': fields.selection([('XOR', 'Xor'), ('AND', 'And')], 'Join Mode', size=3, required=True),
|
'join_mode': fields.selection([('XOR', 'Xor'), ('AND', 'And')], 'Join Mode', size=3, required=True),
|
||||||
'kind': fields.selection([('dummy', 'Dummy'), ('function', 'Function'), ('subflow', 'Subflow'), ('stopall', 'Stop All')], 'Kind', size=64, required=True),
|
'kind': fields.selection([('dummy', 'Dummy'), ('function', 'Function'), ('subflow', 'Subflow'), ('stopall', 'Stop All')], 'Kind', size=64, required=True),
|
||||||
'action': fields.text('Python Action'),
|
'action': fields.text('Python Action'),
|
||||||
'action_id': fields.many2one('ir.actions.server', 'Server Action', ondelete='set null'),
|
'action_id': fields.many2one('ir.actions.server', 'Server Action', ondelete='set null'),
|
||||||
'flow_start': fields.boolean('Flow Start'),
|
'flow_start': fields.boolean('Flow Start'),
|
||||||
'flow_stop': fields.boolean('Flow Stop'),
|
'flow_stop': fields.boolean('Flow Stop'),
|
||||||
'subflow_id': fields.many2one('workflow', 'Subflow'),
|
'subflow_id': fields.many2one('workflow', 'Subflow'),
|
||||||
'signal_send': fields.char('Signal (subflow.*)', size=32),
|
'signal_send': fields.char('Signal (subflow.*)', size=32),
|
||||||
'out_transitions': fields.one2many('workflow.transition', 'act_from', 'Outgoing transitions'),
|
'out_transitions': fields.one2many('workflow.transition', 'act_from', 'Outgoing transitions'),
|
||||||
'in_transitions': fields.one2many('workflow.transition', 'act_to', 'Incoming transitions'),
|
'in_transitions': fields.one2many('workflow.transition', 'act_to', 'Incoming transitions'),
|
||||||
}
|
}
|
||||||
_defaults = {
|
_defaults = {
|
||||||
'kind': lambda *a: 'dummy',
|
'kind': lambda *a: 'dummy',
|
||||||
'join_mode': lambda *a: 'XOR',
|
'join_mode': lambda *a: 'XOR',
|
||||||
'split_mode': lambda *a: 'XOR',
|
'split_mode': lambda *a: 'XOR',
|
||||||
}
|
}
|
||||||
wkf_activity()
|
wkf_activity()
|
||||||
|
|
||||||
class wkf_transition(osv.osv):
|
class wkf_transition(osv.osv):
|
||||||
_table = "wkf_transition"
|
_table = "wkf_transition"
|
||||||
_name = "workflow.transition"
|
_name = "workflow.transition"
|
||||||
_log_access = False
|
_log_access = False
|
||||||
_rec_name = 'signal' #TODO: pas top mais bon...
|
_rec_name = 'signal' #TODO: pas top mais bon...
|
||||||
_columns = {
|
_columns = {
|
||||||
'trigger_model': fields.char('Trigger Type', size=128),
|
'trigger_model': fields.char('Trigger Type', size=128),
|
||||||
'trigger_expr_id': fields.char('Trigger Expr ID', size=128),
|
'trigger_expr_id': fields.char('Trigger Expr ID', size=128),
|
||||||
'signal': fields.char('Signal (button Name)', size=64),
|
'signal': fields.char('Signal (button Name)', size=64),
|
||||||
'role_id': fields.many2one('res.roles', 'Role Required'),
|
'role_id': fields.many2one('res.roles', 'Role Required'),
|
||||||
'condition': fields.char('Condition', required=True, size=128),
|
'condition': fields.char('Condition', required=True, size=128),
|
||||||
'act_from': fields.many2one('workflow.activity', 'Source Activity', required=True, select=True, ondelete='cascade'),
|
'act_from': fields.many2one('workflow.activity', 'Source Activity', required=True, select=True, ondelete='cascade'),
|
||||||
'act_to': fields.many2one('workflow.activity', 'Destination Activity', required=True, select=True, ondelete='cascade'),
|
'act_to': fields.many2one('workflow.activity', 'Destination Activity', required=True, select=True, ondelete='cascade'),
|
||||||
}
|
}
|
||||||
_defaults = {
|
_defaults = {
|
||||||
'condition': lambda *a: 'True',
|
'condition': lambda *a: 'True',
|
||||||
}
|
}
|
||||||
wkf_transition()
|
wkf_transition()
|
||||||
|
|
||||||
class wkf_instance(osv.osv):
|
class wkf_instance(osv.osv):
|
||||||
_table = "wkf_instance"
|
_table = "wkf_instance"
|
||||||
_name = "workflow.instance"
|
_name = "workflow.instance"
|
||||||
_rec_name = 'res_type'
|
_rec_name = 'res_type'
|
||||||
_log_access = False
|
_log_access = False
|
||||||
_columns = {
|
_columns = {
|
||||||
'wkf_id': fields.many2one('workflow', 'Workflow', ondelete='restrict'),
|
'wkf_id': fields.many2one('workflow', 'Workflow', ondelete='restrict'),
|
||||||
'uid': fields.integer('User ID'),
|
'uid': fields.integer('User ID'),
|
||||||
'res_id': fields.integer('Resource ID'),
|
'res_id': fields.integer('Resource ID'),
|
||||||
'res_type': fields.char('Resource Model', size=64),
|
'res_type': fields.char('Resource Model', size=64),
|
||||||
'state': fields.char('State', size=32),
|
'state': fields.char('State', size=32),
|
||||||
}
|
}
|
||||||
def _auto_init(self, cr, context={}):
|
def _auto_init(self, cr, context={}):
|
||||||
super(wkf_instance, self)._auto_init(cr, context)
|
super(wkf_instance, self)._auto_init(cr, context)
|
||||||
cr.execute('SELECT indexname FROM pg_indexes WHERE indexname = \'wkf_instance_res_id_res_type_state_index\'')
|
cr.execute('SELECT indexname FROM pg_indexes WHERE indexname = \'wkf_instance_res_id_res_type_state_index\'')
|
||||||
if not cr.fetchone():
|
if not cr.fetchone():
|
||||||
cr.execute('CREATE INDEX wkf_instance_res_id_res_type_state_index ON wkf_instance (res_id, res_type, state)')
|
cr.execute('CREATE INDEX wkf_instance_res_id_res_type_state_index ON wkf_instance (res_id, res_type, state)')
|
||||||
cr.commit()
|
cr.commit()
|
||||||
wkf_instance()
|
wkf_instance()
|
||||||
|
|
||||||
class wkf_workitem(osv.osv):
|
class wkf_workitem(osv.osv):
|
||||||
_table = "wkf_workitem"
|
_table = "wkf_workitem"
|
||||||
_name = "workflow.workitem"
|
_name = "workflow.workitem"
|
||||||
_log_access = False
|
_log_access = False
|
||||||
_rec_name = 'state'
|
_rec_name = 'state'
|
||||||
_columns = {
|
_columns = {
|
||||||
'act_id': fields.many2one('workflow.activity', 'Activity', required=True, ondelete="cascade"),
|
'act_id': fields.many2one('workflow.activity', 'Activity', required=True, ondelete="cascade"),
|
||||||
'subflow_id': fields.many2one('workflow.instance', 'Subflow', ondelete="cascade"),
|
'subflow_id': fields.many2one('workflow.instance', 'Subflow', ondelete="cascade"),
|
||||||
'inst_id': fields.many2one('workflow.instance', 'Instance', required=True, ondelete="cascade", select=1),
|
'inst_id': fields.many2one('workflow.instance', 'Instance', required=True, ondelete="cascade", select=1),
|
||||||
'state': fields.char('State', size=64),
|
'state': fields.char('State', size=64),
|
||||||
}
|
}
|
||||||
wkf_workitem()
|
wkf_workitem()
|
||||||
|
|
||||||
class wkf_triggers(osv.osv):
|
class wkf_triggers(osv.osv):
|
||||||
_table = "wkf_triggers"
|
_table = "wkf_triggers"
|
||||||
_name = "workflow.triggers"
|
_name = "workflow.triggers"
|
||||||
_log_access = False
|
_log_access = False
|
||||||
_columns = {
|
_columns = {
|
||||||
'res_id': fields.integer('Resource ID', size=128),
|
'res_id': fields.integer('Resource ID', size=128),
|
||||||
'model': fields.char('Model', size=128),
|
'model': fields.char('Model', size=128),
|
||||||
'instance_id': fields.many2one('workflow.instance', 'Destination Instance', ondelete="cascade"),
|
'instance_id': fields.many2one('workflow.instance', 'Destination Instance', ondelete="cascade"),
|
||||||
'workitem_id': fields.many2one('workflow.workitem', 'Workitem', required=True, ondelete="cascade"),
|
'workitem_id': fields.many2one('workflow.workitem', 'Workitem', required=True, ondelete="cascade"),
|
||||||
}
|
}
|
||||||
def _auto_init(self, cr, context={}):
|
def _auto_init(self, cr, context={}):
|
||||||
super(wkf_triggers, self)._auto_init(cr, context)
|
super(wkf_triggers, self)._auto_init(cr, context)
|
||||||
cr.execute('SELECT indexname FROM pg_indexes WHERE indexname = \'wkf_triggers_res_id_model_index\'')
|
cr.execute('SELECT indexname FROM pg_indexes WHERE indexname = \'wkf_triggers_res_id_model_index\'')
|
||||||
if not cr.fetchone():
|
if not cr.fetchone():
|
||||||
cr.execute('CREATE INDEX wkf_triggers_res_id_model_index ON wkf_triggers (res_id, model)')
|
cr.execute('CREATE INDEX wkf_triggers_res_id_model_index ON wkf_triggers (res_id, model)')
|
||||||
cr.commit()
|
cr.commit()
|
||||||
wkf_triggers()
|
wkf_triggers()
|
||||||
|
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -31,33 +31,33 @@ import time
|
||||||
from report import report_sxw
|
from report import report_sxw
|
||||||
|
|
||||||
class ir_module_reference_print(report_sxw.rml_parse):
|
class ir_module_reference_print(report_sxw.rml_parse):
|
||||||
def __init__(self, cr, uid, name, context):
|
def __init__(self, cr, uid, name, context):
|
||||||
super(ir_module_reference_print, self).__init__(cr, uid, name, context)
|
super(ir_module_reference_print, self).__init__(cr, uid, name, context)
|
||||||
self.localcontext.update({
|
self.localcontext.update({
|
||||||
'time': time,
|
'time': time,
|
||||||
'findobj': self._object_find,
|
'findobj': self._object_find,
|
||||||
'objdoc': self._object_doc,
|
'objdoc': self._object_doc,
|
||||||
'findflds': self._fields_find,
|
'findflds': self._fields_find,
|
||||||
})
|
})
|
||||||
def _object_doc(self, obj):
|
def _object_doc(self, obj):
|
||||||
modobj = self.pool.get(obj)
|
modobj = self.pool.get(obj)
|
||||||
return modobj.__doc__
|
return modobj.__doc__
|
||||||
|
|
||||||
def _object_find(self, module):
|
def _object_find(self, module):
|
||||||
modobj = self.pool.get('ir.model')
|
modobj = self.pool.get('ir.model')
|
||||||
if module=='base':
|
if module=='base':
|
||||||
ids = modobj.search(self.cr, self.uid, [('model','=like','res%')])
|
ids = modobj.search(self.cr, self.uid, [('model','=like','res%')])
|
||||||
ids += modobj.search(self.cr, self.uid, [('model','=like','ir%')])
|
ids += modobj.search(self.cr, self.uid, [('model','=like','ir%')])
|
||||||
else:
|
else:
|
||||||
ids = modobj.search(self.cr, self.uid, [('model','=like',module+'%')])
|
ids = modobj.search(self.cr, self.uid, [('model','=like',module+'%')])
|
||||||
return modobj.browse(self.cr, self.uid, ids)
|
return modobj.browse(self.cr, self.uid, ids)
|
||||||
|
|
||||||
def _fields_find(self, obj):
|
def _fields_find(self, obj):
|
||||||
modobj = self.pool.get(obj)
|
modobj = self.pool.get(obj)
|
||||||
res = modobj.fields_get(self.cr, self.uid).items()
|
res = modobj.fields_get(self.cr, self.uid).items()
|
||||||
return res
|
return res
|
||||||
|
|
||||||
report_sxw.report_sxw('report.ir.module.reference', 'ir.module.module',
|
report_sxw.report_sxw('report.ir.module.reference', 'ir.module.module',
|
||||||
'addons/base/module/report/ir_module_reference.rml',
|
'addons/base/module/report/ir_module_reference.rml',
|
||||||
parser=ir_module_reference_print, header=False)
|
parser=ir_module_reference_print, header=False)
|
||||||
|
|
||||||
|
|
|
@ -49,48 +49,48 @@ _info_arch = '''<?xml version="1.0"?>
|
||||||
_info_fields = {}
|
_info_fields = {}
|
||||||
|
|
||||||
class wizard_install_module(wizard.interface):
|
class wizard_install_module(wizard.interface):
|
||||||
def watch_dir(self, cr, uid, data, context):
|
def watch_dir(self, cr, uid, data, context):
|
||||||
mod_obj = pooler.get_pool(cr.dbname).get('ir.module.module')
|
mod_obj = pooler.get_pool(cr.dbname).get('ir.module.module')
|
||||||
all_mods = mod_obj.read(cr, uid, mod_obj.search(cr, uid, []), ['name', 'state'])
|
all_mods = mod_obj.read(cr, uid, mod_obj.search(cr, uid, []), ['name', 'state'])
|
||||||
known_modules = [x['name'] for x in all_mods]
|
known_modules = [x['name'] for x in all_mods]
|
||||||
ls_ad = glob.glob(os.path.join(tools.config['addons_path'], '*', '__terp__.py'))
|
ls_ad = glob.glob(os.path.join(tools.config['addons_path'], '*', '__terp__.py'))
|
||||||
modules = [module_name_re.match(name).group(1) for name in ls_ad]
|
modules = [module_name_re.match(name).group(1) for name in ls_ad]
|
||||||
for fname in os.listdir(tools.config['addons_path']):
|
for fname in os.listdir(tools.config['addons_path']):
|
||||||
if zipfile.is_zipfile(fname):
|
if zipfile.is_zipfile(fname):
|
||||||
modules.append( fname.split('.')[0])
|
modules.append( fname.split('.')[0])
|
||||||
for module in modules:
|
for module in modules:
|
||||||
if module in known_modules:
|
if module in known_modules:
|
||||||
continue
|
continue
|
||||||
terp = mod_obj.get_module_info(module)
|
terp = mod_obj.get_module_info(module)
|
||||||
if not terp.get('installable', True):
|
if not terp.get('installable', True):
|
||||||
continue
|
continue
|
||||||
imp.load_module(module, *imp.find_module(module))
|
imp.load_module(module, *imp.find_module(module))
|
||||||
mod_id = mod_obj.create(cr, uid, {
|
mod_id = mod_obj.create(cr, uid, {
|
||||||
'name': module,
|
'name': module,
|
||||||
'state': 'uninstalled',
|
'state': 'uninstalled',
|
||||||
'description': terp.get('description', ''),
|
'description': terp.get('description', ''),
|
||||||
'shortdesc': terp.get('name', ''),
|
'shortdesc': terp.get('name', ''),
|
||||||
'author': terp.get('author', 'Unknown')})
|
'author': terp.get('author', 'Unknown')})
|
||||||
dependencies = terp.get('depends', [])
|
dependencies = terp.get('depends', [])
|
||||||
for d in dependencies:
|
for d in dependencies:
|
||||||
cr.execute('insert into ir_module_module_dependency (module_id,name) values (%s, %s)', (mod_id, d))
|
cr.execute('insert into ir_module_module_dependency (module_id,name) values (%s, %s)', (mod_id, d))
|
||||||
for module in known_modules:
|
for module in known_modules:
|
||||||
terp = mod_obj.get_module_info(module)
|
terp = mod_obj.get_module_info(module)
|
||||||
if terp.get('installable', True):
|
if terp.get('installable', True):
|
||||||
for mod in all_mods:
|
for mod in all_mods:
|
||||||
if mod['name'] == module and mod['state'] == 'uninstallable':
|
if mod['name'] == module and mod['state'] == 'uninstallable':
|
||||||
mod_obj.write(cr, uid, [mod['id']], {'state': 'uninstalled'})
|
mod_obj.write(cr, uid, [mod['id']], {'state': 'uninstalled'})
|
||||||
return {}
|
return {}
|
||||||
|
|
||||||
states = {
|
states = {
|
||||||
'init': {
|
'init': {
|
||||||
'actions': [],
|
'actions': [],
|
||||||
'result': {'type':'form', 'arch': _info_arch, 'fields': _info_fields, 'state':[('end','Cancel','gtk-cancel'),('addmod','Check new modules','gtk-ok')]}
|
'result': {'type':'form', 'arch': _info_arch, 'fields': _info_fields, 'state':[('end','Cancel','gtk-cancel'),('addmod','Check new modules','gtk-ok')]}
|
||||||
},
|
},
|
||||||
'addmod': {
|
'addmod': {
|
||||||
'actions': [watch_dir],
|
'actions': [watch_dir],
|
||||||
'result': {'type':'state', 'state':'end'}
|
'result': {'type':'state', 'state':'end'}
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
wizard_install_module('module.module.scan')
|
wizard_install_module('module.module.scan')
|
||||||
|
|
||||||
|
|
|
@ -40,114 +40,114 @@ from osv import fields,osv
|
||||||
'''
|
'''
|
||||||
view_form_init="""<?xml version="1.0"?>
|
view_form_init="""<?xml version="1.0"?>
|
||||||
<form string="Export language">
|
<form string="Export language">
|
||||||
<image name="gtk-dialog-info" colspan="2"/>
|
<image name="gtk-dialog-info" colspan="2"/>
|
||||||
<group colspan="2" col="4">
|
<group colspan="2" col="4">
|
||||||
<separator string="Export translation file" colspan="4"/>
|
<separator string="Export translation file" colspan="4"/>
|
||||||
<label align="0.0" string="Choose a language to export:" colspan="4"/>
|
<label align="0.0" string="Choose a language to export:" colspan="4"/>
|
||||||
<field name="lang" colspan="4"/>
|
<field name="lang" colspan="4"/>
|
||||||
</group>
|
</group>
|
||||||
</form>"""
|
</form>"""
|
||||||
|
|
||||||
view_form_finish="""<?xml version="1.0"?>
|
view_form_finish="""<?xml version="1.0"?>
|
||||||
<form string="Export language">
|
<form string="Export language">
|
||||||
<image name="gtk-dialog-info" colspan="2"/>
|
<image name="gtk-dialog-info" colspan="2"/>
|
||||||
<group colspan="2" col="4">
|
<group colspan="2" col="4">
|
||||||
<separator string="Export done" colspan="4"/>
|
<separator string="Export done" colspan="4"/>
|
||||||
<field name="data" readonly="1" colspan="3"/>
|
<field name="data" readonly="1" colspan="3"/>
|
||||||
<label align="0.0" string="Save this document to a .CSV file and open it with\n your favourite spreadsheet software. The file\n encoding is UTF-8. You have to translate the latest\n column before reimporting it." colspan="4"/>
|
<label align="0.0" string="Save this document to a .CSV file and open it with\n your favourite spreadsheet software. The file\n encoding is UTF-8. You have to translate the latest\n column before reimporting it." colspan="4"/>
|
||||||
</group>
|
</group>
|
||||||
</form>"""
|
</form>"""
|
||||||
|
|
||||||
class wizard_export_lang(wizard.interface):
|
class wizard_export_lang(wizard.interface):
|
||||||
def _get_language(self, cr, uid, context):
|
def _get_language(self, cr, uid, context):
|
||||||
lang_obj=pooler.get_pool(cr.dbname).get('res.lang')
|
lang_obj=pooler.get_pool(cr.dbname).get('res.lang')
|
||||||
ids=lang_obj.search(cr, uid, [('active', '=', True),])
|
ids=lang_obj.search(cr, uid, [('active', '=', True),])
|
||||||
langs=lang_obj.browse(cr, uid, ids)
|
langs=lang_obj.browse(cr, uid, ids)
|
||||||
return [(lang.code, lang.translatable and lang.name or _('New language')) for lang in langs]
|
return [(lang.code, lang.translatable and lang.name or _('New language')) for lang in langs]
|
||||||
|
|
||||||
def _get_file(self, cr, uid, data, context):
|
def _get_file(self, cr, uid, data, context):
|
||||||
file=tools.trans_generate(data['form']['lang'], 'all', dbname=cr.dbname)
|
file=tools.trans_generate(data['form']['lang'], 'all', dbname=cr.dbname)
|
||||||
buf=StringIO.StringIO()
|
buf=StringIO.StringIO()
|
||||||
writer=csv.writer(buf, 'UNIX')
|
writer=csv.writer(buf, 'UNIX')
|
||||||
for row in file:
|
for row in file:
|
||||||
writer.writerow(row)
|
writer.writerow(row)
|
||||||
del file
|
del file
|
||||||
out=base64.encodestring(buf.getvalue())
|
out=base64.encodestring(buf.getvalue())
|
||||||
buf.close()
|
buf.close()
|
||||||
return {'data': out}
|
return {'data': out}
|
||||||
|
|
||||||
fields_form={
|
fields_form={
|
||||||
'lang': {'string':'Language', 'type':'selection', 'selection':_get_language,},
|
'lang': {'string':'Language', 'type':'selection', 'selection':_get_language,},
|
||||||
}
|
}
|
||||||
fields_form_finish={
|
fields_form_finish={
|
||||||
'data': {'string':'File', 'type':'binary', 'readonly': True,},
|
'data': {'string':'File', 'type':'binary', 'readonly': True,},
|
||||||
}
|
}
|
||||||
states={
|
states={
|
||||||
'init':{
|
'init':{
|
||||||
'actions': [],
|
'actions': [],
|
||||||
'result': {'type': 'form', 'arch': view_form_init, 'fields': fields_form,
|
'result': {'type': 'form', 'arch': view_form_init, 'fields': fields_form,
|
||||||
'state': [
|
'state': [
|
||||||
('end', 'Cancel', 'gtk-cancel'),
|
('end', 'Cancel', 'gtk-cancel'),
|
||||||
('finish', 'Ok', 'gtk-ok', True)
|
('finish', 'Ok', 'gtk-ok', True)
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
'finish':{
|
'finish':{
|
||||||
'actions': [_get_file],
|
'actions': [_get_file],
|
||||||
'result': {'type': 'form', 'arch': view_form_finish,
|
'result': {'type': 'form', 'arch': view_form_finish,
|
||||||
'fields': fields_form_finish,
|
'fields': fields_form_finish,
|
||||||
'state': [
|
'state': [
|
||||||
('end', 'Close', 'gtk-cancel', True)
|
('end', 'Close', 'gtk-cancel', True)
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
wizard_export_lang('module.lang.export')
|
wizard_export_lang('module.lang.export')
|
||||||
'''
|
'''
|
||||||
|
|
||||||
class wizard_export_lang(osv.osv_memory):
|
class wizard_export_lang(osv.osv_memory):
|
||||||
|
|
||||||
def _get_languages(self, cr, uid, context):
|
def _get_languages(self, cr, uid, context):
|
||||||
lang_obj=pooler.get_pool(cr.dbname).get('res.lang')
|
lang_obj=pooler.get_pool(cr.dbname).get('res.lang')
|
||||||
ids=lang_obj.search(cr, uid, [('active', '=', True),])
|
ids=lang_obj.search(cr, uid, [('active', '=', True),])
|
||||||
langs=lang_obj.browse(cr, uid, ids)
|
langs=lang_obj.browse(cr, uid, ids)
|
||||||
return [(lang.code, lang.translatable and lang.name or _('New language')) for lang in langs]
|
return [(lang.code, lang.translatable and lang.name or _('New language')) for lang in langs]
|
||||||
|
|
||||||
|
|
||||||
def act_cancel(self, cr, uid, ids, context=None):
|
def act_cancel(self, cr, uid, ids, context=None):
|
||||||
#self.unlink(cr, uid, ids, context)
|
#self.unlink(cr, uid, ids, context)
|
||||||
return {'type':'ir.actions.act_window_close' }
|
return {'type':'ir.actions.act_window_close' }
|
||||||
|
|
||||||
def act_destroy(self, *args):
|
def act_destroy(self, *args):
|
||||||
return {'type':'ir.actions.act_window_close' }
|
return {'type':'ir.actions.act_window_close' }
|
||||||
|
|
||||||
def act_getfile(self, cr, uid, ids, context=None):
|
def act_getfile(self, cr, uid, ids, context=None):
|
||||||
this = self.browse(cr, uid, ids)[0]
|
this = self.browse(cr, uid, ids)[0]
|
||||||
mods = map(lambda m: m.name, this.modules)
|
mods = map(lambda m: m.name, this.modules)
|
||||||
mods.sort()
|
mods.sort()
|
||||||
buf=StringIO.StringIO()
|
buf=StringIO.StringIO()
|
||||||
|
|
||||||
tools.trans_export(this.lang, mods, buf, this.format, dbname=cr.dbname)
|
tools.trans_export(this.lang, mods, buf, this.format, dbname=cr.dbname)
|
||||||
|
|
||||||
if this.format == 'csv':
|
if this.format == 'csv':
|
||||||
this.advice = _("Save this document to a .CSV file and open it with your favourite spreadsheet software. The file encoding is UTF-8. You have to translate the latest column before reimporting it.")
|
this.advice = _("Save this document to a .CSV file and open it with your favourite spreadsheet software. The file encoding is UTF-8. You have to translate the latest column before reimporting it.")
|
||||||
elif this.format == 'po':
|
elif this.format == 'po':
|
||||||
this.advice = _("Save this document to a .po file and edit it with a specific software or a text editor. The file encoding is UTF-8.")
|
this.advice = _("Save this document to a .po file and edit it with a specific software or a text editor. The file encoding is UTF-8.")
|
||||||
|
|
||||||
out=base64.encodestring(buf.getvalue())
|
out=base64.encodestring(buf.getvalue())
|
||||||
buf.close()
|
buf.close()
|
||||||
return self.write(cr, uid, ids, {'state':'get', 'data':out, 'advice':this.advice}, context=context)
|
return self.write(cr, uid, ids, {'state':'get', 'data':out, 'advice':this.advice}, context=context)
|
||||||
|
|
||||||
_name = "wizard.module.lang.export"
|
_name = "wizard.module.lang.export"
|
||||||
_columns = {
|
_columns = {
|
||||||
'lang': fields.selection(_get_languages, 'Language',required=True),
|
'lang': fields.selection(_get_languages, 'Language',required=True),
|
||||||
'format': fields.selection( ( ('csv','CSV File'), ('po','PO File') ), 'File Format', required=True),
|
'format': fields.selection( ( ('csv','CSV File'), ('po','PO File') ), 'File Format', required=True),
|
||||||
'modules': fields.many2many('ir.module.module', 'rel_modules_langexport', 'wiz_id', 'module_id', 'Modules', domain=[('state','=','installed')]),
|
'modules': fields.many2many('ir.module.module', 'rel_modules_langexport', 'wiz_id', 'module_id', 'Modules', domain=[('state','=','installed')]),
|
||||||
'data': fields.binary('File', readonly=True),
|
'data': fields.binary('File', readonly=True),
|
||||||
'advice': fields.text('', readonly=True),
|
'advice': fields.text('', readonly=True),
|
||||||
'state': fields.selection( ( ('choose','choose'), # choose language
|
'state': fields.selection( ( ('choose','choose'), # choose language
|
||||||
('get','get'), # get the file
|
('get','get'), # get the file
|
||||||
) ),
|
) ),
|
||||||
}
|
}
|
||||||
_defaults = { 'state': lambda *a: 'choose', }
|
_defaults = { 'state': lambda *a: 'choose', }
|
||||||
wizard_export_lang()
|
wizard_export_lang()
|
||||||
|
|
|
@ -36,53 +36,53 @@ from tempfile import TemporaryFile
|
||||||
|
|
||||||
view_form="""<?xml version="1.0"?>
|
view_form="""<?xml version="1.0"?>
|
||||||
<form string="Import language">
|
<form string="Import language">
|
||||||
<image name="gtk-dialog-info" colspan="2"/>
|
<image name="gtk-dialog-info" colspan="2"/>
|
||||||
<group colspan="2" col="4">
|
<group colspan="2" col="4">
|
||||||
<separator string="Import new language" colspan="4"/>
|
<separator string="Import new language" colspan="4"/>
|
||||||
<field name="name"/>
|
<field name="name"/>
|
||||||
<field name="code"/>
|
<field name="code"/>
|
||||||
<field name="data" colspan="3"/>
|
<field name="data" colspan="3"/>
|
||||||
<label string="You have to import a .CSV file wich is encoded in UTF-8.\nPlease check that the first line of your file is:" colspan="4" align="0.0"/>
|
<label string="You have to import a .CSV file wich is encoded in UTF-8.\nPlease check that the first line of your file is:" colspan="4" align="0.0"/>
|
||||||
<label string="type,name,res_id,src,value" colspan="4"/>
|
<label string="type,name,res_id,src,value" colspan="4"/>
|
||||||
</group>
|
</group>
|
||||||
</form>"""
|
</form>"""
|
||||||
|
|
||||||
fields_form={
|
fields_form={
|
||||||
'name':{'string':'Language name', 'type':'char', 'size':64, 'required':True},
|
'name':{'string':'Language name', 'type':'char', 'size':64, 'required':True},
|
||||||
'code':{'string':'Code (eg:en__US)', 'type':'char', 'size':5, 'required':True},
|
'code':{'string':'Code (eg:en__US)', 'type':'char', 'size':5, 'required':True},
|
||||||
'data':{'string':'File', 'type':'binary', 'required':True},
|
'data':{'string':'File', 'type':'binary', 'required':True},
|
||||||
}
|
}
|
||||||
|
|
||||||
class wizard_import_lang(wizard.interface):
|
class wizard_import_lang(wizard.interface):
|
||||||
|
|
||||||
def _import_lang(self, cr, uid, data, context):
|
def _import_lang(self, cr, uid, data, context):
|
||||||
form=data['form']
|
form=data['form']
|
||||||
fileobj = TemporaryFile('w+')
|
fileobj = TemporaryFile('w+')
|
||||||
fileobj.write( base64.decodestring(form['data']) )
|
fileobj.write( base64.decodestring(form['data']) )
|
||||||
|
|
||||||
# now we determine the file format
|
# now we determine the file format
|
||||||
fileobj.seek(0)
|
fileobj.seek(0)
|
||||||
first_line = fileobj.readline().strip()
|
first_line = fileobj.readline().strip()
|
||||||
fileformat = first_line.endswith("type,name,res_id,src,value") and 'csv' or 'po'
|
fileformat = first_line.endswith("type,name,res_id,src,value") and 'csv' or 'po'
|
||||||
fileobj.seek(0)
|
fileobj.seek(0)
|
||||||
|
|
||||||
tools.trans_load_data(cr.dbname, fileobj, fileformat, form['code'], lang_name=form['name'])
|
tools.trans_load_data(cr.dbname, fileobj, fileformat, form['code'], lang_name=form['name'])
|
||||||
fileobj.close()
|
fileobj.close()
|
||||||
return {}
|
return {}
|
||||||
|
|
||||||
states={
|
states={
|
||||||
'init':{
|
'init':{
|
||||||
'actions': [],
|
'actions': [],
|
||||||
'result': {'type': 'form', 'arch': view_form, 'fields': fields_form,
|
'result': {'type': 'form', 'arch': view_form, 'fields': fields_form,
|
||||||
'state':[
|
'state':[
|
||||||
('end', 'Cancel', 'gtk-cancel'),
|
('end', 'Cancel', 'gtk-cancel'),
|
||||||
('finish', 'Ok', 'gtk-ok', True)
|
('finish', 'Ok', 'gtk-ok', True)
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
'finish':{
|
'finish':{
|
||||||
'actions':[],
|
'actions':[],
|
||||||
'result':{'type':'action', 'action':_import_lang, 'state':'end'}
|
'result':{'type':'action', 'action':_import_lang, 'state':'end'}
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
wizard_import_lang('module.lang.import')
|
wizard_import_lang('module.lang.import')
|
||||||
|
|
|
@ -40,82 +40,82 @@ import base64
|
||||||
|
|
||||||
finish_form ='''<?xml version="1.0"?>
|
finish_form ='''<?xml version="1.0"?>
|
||||||
<form string="Module import">
|
<form string="Module import">
|
||||||
<label string="Module successfully imported !" colspan="4"/>
|
<label string="Module successfully imported !" colspan="4"/>
|
||||||
</form>
|
</form>
|
||||||
'''
|
'''
|
||||||
|
|
||||||
ask_form ='''<?xml version="1.0"?>
|
ask_form ='''<?xml version="1.0"?>
|
||||||
<form string="Module import">
|
<form string="Module import">
|
||||||
<separator string="Module import" colspan="4"/>
|
<separator string="Module import" colspan="4"/>
|
||||||
<label string="Please give your module .ZIP file to import." colspan="4"/>
|
<label string="Please give your module .ZIP file to import." colspan="4"/>
|
||||||
<field name="module_file"/>
|
<field name="module_file"/>
|
||||||
</form>
|
</form>
|
||||||
'''
|
'''
|
||||||
|
|
||||||
ask_fields = {
|
ask_fields = {
|
||||||
'module_file': {'string': 'Module .ZIP file', 'type': 'binary', 'required': True},
|
'module_file': {'string': 'Module .ZIP file', 'type': 'binary', 'required': True},
|
||||||
}
|
}
|
||||||
|
|
||||||
class move_module_wizard(wizard.interface):
|
class move_module_wizard(wizard.interface):
|
||||||
def importzip(self, cr, uid, data, context):
|
def importzip(self, cr, uid, data, context):
|
||||||
module_obj=pooler.get_pool(cr.dbname).get('ir.module.module')
|
module_obj=pooler.get_pool(cr.dbname).get('ir.module.module')
|
||||||
module_data = data['form']['module_file']
|
module_data = data['form']['module_file']
|
||||||
|
|
||||||
val =base64.decodestring(module_data)
|
val =base64.decodestring(module_data)
|
||||||
fp = StringIO.StringIO(val)
|
fp = StringIO.StringIO(val)
|
||||||
fdata = zipfile.ZipFile(fp, 'r')
|
fdata = zipfile.ZipFile(fp, 'r')
|
||||||
fname = fdata.namelist()[0]
|
fname = fdata.namelist()[0]
|
||||||
module_name = os.path.split(fname)[0]
|
module_name = os.path.split(fname)[0]
|
||||||
|
|
||||||
ad = tools.config['addons_path']
|
ad = tools.config['addons_path']
|
||||||
|
|
||||||
fname = os.path.join(ad,module_name+'.zip')
|
fname = os.path.join(ad,module_name+'.zip')
|
||||||
try:
|
try:
|
||||||
fp = file(fname, 'wb')
|
fp = file(fname, 'wb')
|
||||||
fp.write(val)
|
fp.write(val)
|
||||||
fp.close()
|
fp.close()
|
||||||
except IOError, e:
|
except IOError, e:
|
||||||
raise wizard.except_wizard(_('Error !'), _('Can not create the module file: %s !') % (fname,) )
|
raise wizard.except_wizard(_('Error !'), _('Can not create the module file: %s !') % (fname,) )
|
||||||
|
|
||||||
pooler.get_pool(cr.dbname).get('ir.module.module').update_list(cr, uid)
|
pooler.get_pool(cr.dbname).get('ir.module.module').update_list(cr, uid)
|
||||||
return {'module_name': module_name}
|
return {'module_name': module_name}
|
||||||
|
|
||||||
def _action_module_open(self, cr, uid, data, context):
|
def _action_module_open(self, cr, uid, data, context):
|
||||||
return {
|
return {
|
||||||
'domain': str([('name', '=', data['form']['module_name'])]),
|
'domain': str([('name', '=', data['form']['module_name'])]),
|
||||||
'name': 'Module List',
|
'name': 'Module List',
|
||||||
'view_type': 'form',
|
'view_type': 'form',
|
||||||
'view_mode': 'tree,form',
|
'view_mode': 'tree,form',
|
||||||
'res_model': 'ir.module.module',
|
'res_model': 'ir.module.module',
|
||||||
'view_id': False,
|
'view_id': False,
|
||||||
'type': 'ir.actions.act_window'
|
'type': 'ir.actions.act_window'
|
||||||
}
|
}
|
||||||
|
|
||||||
states = {
|
states = {
|
||||||
'init': {
|
'init': {
|
||||||
'actions': [],
|
'actions': [],
|
||||||
'result': {
|
'result': {
|
||||||
'type': 'form',
|
'type': 'form',
|
||||||
'arch': ask_form,
|
'arch': ask_form,
|
||||||
'fields': ask_fields,
|
'fields': ask_fields,
|
||||||
'state': [
|
'state': [
|
||||||
('end', 'Cancel', 'gtk-cancel'),
|
('end', 'Cancel', 'gtk-cancel'),
|
||||||
('import', 'Import module', 'gtk-ok', True)
|
('import', 'Import module', 'gtk-ok', True)
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
'import': {
|
'import': {
|
||||||
'actions': [importzip],
|
'actions': [importzip],
|
||||||
'result': {
|
'result': {
|
||||||
'type':'form',
|
'type':'form',
|
||||||
'arch':finish_form,
|
'arch':finish_form,
|
||||||
'fields':{},
|
'fields':{},
|
||||||
'state':[('open_window','Close')]
|
'state':[('open_window','Close')]
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
'open_window': {
|
'open_window': {
|
||||||
'actions': [],
|
'actions': [],
|
||||||
'result': {'type': 'action', 'action': _action_module_open, 'state':'end'}
|
'result': {'type': 'action', 'action': _action_module_open, 'state':'end'}
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
move_module_wizard('base.module.import')
|
move_module_wizard('base.module.import')
|
||||||
|
|
|
@ -33,59 +33,59 @@ import tools
|
||||||
|
|
||||||
view_form_end = """<?xml version="1.0"?>
|
view_form_end = """<?xml version="1.0"?>
|
||||||
<form string="Language file loaded.">
|
<form string="Language file loaded.">
|
||||||
<image name="gtk-dialog-info" colspan="2"/>
|
<image name="gtk-dialog-info" colspan="2"/>
|
||||||
<group colspan="2" col="4">
|
<group colspan="2" col="4">
|
||||||
<separator string="Installation done" colspan="4"/>
|
<separator string="Installation done" colspan="4"/>
|
||||||
<label align="0.0" string="The selected language has been successfully installed.\nYou must change the preferences of the user and open a new menu to view changes." colspan="4"/>
|
<label align="0.0" string="The selected language has been successfully installed.\nYou must change the preferences of the user and open a new menu to view changes." colspan="4"/>
|
||||||
</group>
|
</group>
|
||||||
</form>"""
|
</form>"""
|
||||||
|
|
||||||
view_form = """<?xml version="1.0"?>
|
view_form = """<?xml version="1.0"?>
|
||||||
<form string="System Upgrade">
|
<form string="System Upgrade">
|
||||||
<image name="gtk-dialog-info" colspan="2"/>
|
<image name="gtk-dialog-info" colspan="2"/>
|
||||||
<group colspan="2" col="4">
|
<group colspan="2" col="4">
|
||||||
<separator string="System Upgrade" colspan="4"/>
|
<separator string="System Upgrade" colspan="4"/>
|
||||||
<label align="0.0" string="Choose a language to install:" colspan="4"/>
|
<label align="0.0" string="Choose a language to install:" colspan="4"/>
|
||||||
<field name="lang" colspan="4"/>
|
<field name="lang" colspan="4"/>
|
||||||
<label align="0.0" string="Note that this operation may take a few minutes." colspan="4"/>
|
<label align="0.0" string="Note that this operation may take a few minutes." colspan="4"/>
|
||||||
</group>
|
</group>
|
||||||
</form>"""
|
</form>"""
|
||||||
|
|
||||||
|
|
||||||
class wizard_lang_install(wizard.interface):
|
class wizard_lang_install(wizard.interface):
|
||||||
def _lang_install(self, cr, uid, data, context):
|
def _lang_install(self, cr, uid, data, context):
|
||||||
lang = data['form']['lang']
|
lang = data['form']['lang']
|
||||||
if lang and lang != 'en_US':
|
if lang and lang != 'en_US':
|
||||||
filename = tools.config["root_path"] + "/i18n/" + lang + ".csv"
|
filename = tools.config["root_path"] + "/i18n/" + lang + ".csv"
|
||||||
tools.trans_load(cr.dbname, filename, lang)
|
tools.trans_load(cr.dbname, filename, lang)
|
||||||
return {}
|
return {}
|
||||||
|
|
||||||
def _get_language(sel, cr, uid, context):
|
def _get_language(sel, cr, uid, context):
|
||||||
return tools.scan_languages()
|
return tools.scan_languages()
|
||||||
|
|
||||||
fields_form = {
|
fields_form = {
|
||||||
'lang': {'string':'Language', 'type':'selection', 'selection':_get_language,
|
'lang': {'string':'Language', 'type':'selection', 'selection':_get_language,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
states = {
|
states = {
|
||||||
'init': {
|
'init': {
|
||||||
'actions': [],
|
'actions': [],
|
||||||
'result': {'type': 'form', 'arch': view_form, 'fields': fields_form,
|
'result': {'type': 'form', 'arch': view_form, 'fields': fields_form,
|
||||||
'state': [
|
'state': [
|
||||||
('end', 'Cancel', 'gtk-cancel'),
|
('end', 'Cancel', 'gtk-cancel'),
|
||||||
('start', 'Start installation', 'gtk-ok', True)
|
('start', 'Start installation', 'gtk-ok', True)
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
'start': {
|
'start': {
|
||||||
'actions': [_lang_install],
|
'actions': [_lang_install],
|
||||||
'result': {'type': 'form', 'arch': view_form_end, 'fields': {},
|
'result': {'type': 'form', 'arch': view_form_end, 'fields': {},
|
||||||
'state': [
|
'state': [
|
||||||
('end', 'Ok', 'gtk-ok', True)
|
('end', 'Ok', 'gtk-ok', True)
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
wizard_lang_install('module.lang.install')
|
wizard_lang_install('module.lang.install')
|
||||||
|
|
||||||
|
|
|
@ -35,121 +35,121 @@ import tools
|
||||||
|
|
||||||
view_form_end = """<?xml version="1.0"?>
|
view_form_end = """<?xml version="1.0"?>
|
||||||
<form string="System upgrade done">
|
<form string="System upgrade done">
|
||||||
<separator string="System upgrade done"/>
|
<separator string="System upgrade done"/>
|
||||||
<label align="0.0" string="The modules have been upgraded / installed !" colspan="4"/>
|
<label align="0.0" string="The modules have been upgraded / installed !" colspan="4"/>
|
||||||
<label align="0.0" string="You may have to reinstall some language pack." colspan="4"/>
|
<label align="0.0" string="You may have to reinstall some language pack." colspan="4"/>
|
||||||
<label align="0.0" string="We suggest you to reload the menu tab (Ctrl+t Ctrl+r)." colspan="4"/>
|
<label align="0.0" string="We suggest you to reload the menu tab (Ctrl+t Ctrl+r)." colspan="4"/>
|
||||||
</form>"""
|
</form>"""
|
||||||
|
|
||||||
view_form = """<?xml version="1.0"?>
|
view_form = """<?xml version="1.0"?>
|
||||||
<form string="System Upgrade">
|
<form string="System Upgrade">
|
||||||
<image name="gtk-dialog-info" colspan="2"/>
|
<image name="gtk-dialog-info" colspan="2"/>
|
||||||
<group colspan="2" col="4">
|
<group colspan="2" col="4">
|
||||||
<label align="0.0" string="Your system will be upgraded." colspan="4"/>
|
<label align="0.0" string="Your system will be upgraded." colspan="4"/>
|
||||||
<label align="0.0" string="Note that this operation my take a few minutes." colspan="4"/>
|
<label align="0.0" string="Note that this operation my take a few minutes." colspan="4"/>
|
||||||
<separator string="Modules to update"/>
|
<separator string="Modules to update"/>
|
||||||
<field name="module_info" nolabel="1" colspan="4"/>
|
<field name="module_info" nolabel="1" colspan="4"/>
|
||||||
<separator string="Modules to download"/>
|
<separator string="Modules to download"/>
|
||||||
<field name="module_download" nolabel="1" colspan="4"/>
|
<field name="module_download" nolabel="1" colspan="4"/>
|
||||||
</group>
|
</group>
|
||||||
</form>"""
|
</form>"""
|
||||||
|
|
||||||
view_field = {
|
view_field = {
|
||||||
"module_info": {'type': 'text', 'string': 'Modules to update',
|
"module_info": {'type': 'text', 'string': 'Modules to update',
|
||||||
'readonly': True},
|
'readonly': True},
|
||||||
"module_download": {'type': 'text', 'string': 'Modules to download',
|
"module_download": {'type': 'text', 'string': 'Modules to download',
|
||||||
'readonly': True},
|
'readonly': True},
|
||||||
}
|
}
|
||||||
|
|
||||||
class wizard_info_get(wizard.interface):
|
class wizard_info_get(wizard.interface):
|
||||||
def _get_install(self, cr, uid, data, context):
|
def _get_install(self, cr, uid, data, context):
|
||||||
pool=pooler.get_pool(cr.dbname)
|
pool=pooler.get_pool(cr.dbname)
|
||||||
mod_obj = pool.get('ir.module.module')
|
mod_obj = pool.get('ir.module.module')
|
||||||
ids = mod_obj.search(cr, uid, [
|
ids = mod_obj.search(cr, uid, [
|
||||||
('state', 'in', ['to upgrade', 'to remove', 'to install'])])
|
('state', 'in', ['to upgrade', 'to remove', 'to install'])])
|
||||||
res = mod_obj.read(cr, uid, ids, ['name','state'], context)
|
res = mod_obj.read(cr, uid, ids, ['name','state'], context)
|
||||||
url = mod_obj.download(cr, uid, ids, download=False, context=context)
|
url = mod_obj.download(cr, uid, ids, download=False, context=context)
|
||||||
return {'module_info': '\n'.join(map(lambda x: x['name']+' : '+x['state'], res)),
|
return {'module_info': '\n'.join(map(lambda x: x['name']+' : '+x['state'], res)),
|
||||||
'module_download': '\n'.join(url)}
|
'module_download': '\n'.join(url)}
|
||||||
|
|
||||||
def _check_upgrade_module(self,cr,uid,data,context):
|
def _check_upgrade_module(self,cr,uid,data,context):
|
||||||
db, pool = pooler.get_db_and_pool(cr.dbname)
|
db, pool = pooler.get_db_and_pool(cr.dbname)
|
||||||
cr = db.cursor()
|
cr = db.cursor()
|
||||||
mod_obj = pool.get('ir.module.module')
|
mod_obj = pool.get('ir.module.module')
|
||||||
ids = mod_obj.search(cr, uid, [
|
ids = mod_obj.search(cr, uid, [
|
||||||
('state', 'in', ['to upgrade', 'to remove', 'to install'])])
|
('state', 'in', ['to upgrade', 'to remove', 'to install'])])
|
||||||
if ids and len(ids):
|
if ids and len(ids):
|
||||||
return 'next'
|
return 'next'
|
||||||
else:
|
else:
|
||||||
return 'end'
|
return 'end'
|
||||||
|
|
||||||
def _upgrade_module(self, cr, uid, data, context):
|
def _upgrade_module(self, cr, uid, data, context):
|
||||||
db, pool = pooler.get_db_and_pool(cr.dbname)
|
db, pool = pooler.get_db_and_pool(cr.dbname)
|
||||||
cr = db.cursor()
|
cr = db.cursor()
|
||||||
mod_obj = pool.get('ir.module.module')
|
mod_obj = pool.get('ir.module.module')
|
||||||
ids = mod_obj.search(cr, uid, [
|
ids = mod_obj.search(cr, uid, [
|
||||||
('state', 'in', ['to upgrade', 'to remove', 'to install'])])
|
('state', 'in', ['to upgrade', 'to remove', 'to install'])])
|
||||||
mod_obj.download(cr, uid, ids, context=context)
|
mod_obj.download(cr, uid, ids, context=context)
|
||||||
cr.commit()
|
cr.commit()
|
||||||
db, pool = pooler.restart_pool(cr.dbname, update_module=True)
|
db, pool = pooler.restart_pool(cr.dbname, update_module=True)
|
||||||
|
|
||||||
lang_obj=pool.get('res.lang')
|
lang_obj=pool.get('res.lang')
|
||||||
lang_ids=lang_obj.search(cr, uid, [])
|
lang_ids=lang_obj.search(cr, uid, [])
|
||||||
langs=lang_obj.browse(cr, uid, lang_ids)
|
langs=lang_obj.browse(cr, uid, lang_ids)
|
||||||
for lang in langs:
|
for lang in langs:
|
||||||
if lang.code and lang.code != 'en_US':
|
if lang.code and lang.code != 'en_US':
|
||||||
filename=os.path.join(tools.config["root_path"], "i18n", lang.code + ".csv")
|
filename=os.path.join(tools.config["root_path"], "i18n", lang.code + ".csv")
|
||||||
tools.trans_load(cr.dbname, filename, lang.code)
|
tools.trans_load(cr.dbname, filename, lang.code)
|
||||||
return {}
|
return {}
|
||||||
|
|
||||||
def _config(self, cr, uid, data, context=None):
|
def _config(self, cr, uid, data, context=None):
|
||||||
return {
|
return {
|
||||||
'view_type': 'form',
|
'view_type': 'form',
|
||||||
"view_mode": 'form',
|
"view_mode": 'form',
|
||||||
'res_model': 'ir.module.module.configuration.wizard',
|
'res_model': 'ir.module.module.configuration.wizard',
|
||||||
'type': 'ir.actions.act_window',
|
'type': 'ir.actions.act_window',
|
||||||
'target':'new',
|
'target':'new',
|
||||||
}
|
}
|
||||||
|
|
||||||
states = {
|
states = {
|
||||||
'init': {
|
'init': {
|
||||||
'actions': [],
|
'actions': [],
|
||||||
'result' : {'type': 'choice', 'next_state': _check_upgrade_module }
|
'result' : {'type': 'choice', 'next_state': _check_upgrade_module }
|
||||||
},
|
},
|
||||||
'next': {
|
'next': {
|
||||||
'actions': [_get_install],
|
'actions': [_get_install],
|
||||||
'result': {'type':'form', 'arch':view_form, 'fields': view_field,
|
'result': {'type':'form', 'arch':view_form, 'fields': view_field,
|
||||||
'state':[
|
'state':[
|
||||||
('end', 'Cancel', 'gtk-cancel'),
|
('end', 'Cancel', 'gtk-cancel'),
|
||||||
('start', 'Start Upgrade', 'gtk-ok', True)
|
('start', 'Start Upgrade', 'gtk-ok', True)
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
'start': {
|
'start': {
|
||||||
'actions': [_upgrade_module],
|
'actions': [_upgrade_module],
|
||||||
'result': {'type':'form', 'arch':view_form_end, 'fields': {},
|
'result': {'type':'form', 'arch':view_form_end, 'fields': {},
|
||||||
'state':[
|
'state':[
|
||||||
('end', 'Close', 'gtk-close', True),
|
('end', 'Close', 'gtk-close', True),
|
||||||
('config', 'Start configuration', 'gtk-ok', True)
|
('config', 'Start configuration', 'gtk-ok', True)
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
'end': {
|
'end': {
|
||||||
'actions': [],
|
'actions': [],
|
||||||
'result': {'type':'form', 'arch':view_form_end, 'fields': {},
|
'result': {'type':'form', 'arch':view_form_end, 'fields': {},
|
||||||
'state':[
|
'state':[
|
||||||
('end', 'Close', 'gtk-close', True),
|
('end', 'Close', 'gtk-close', True),
|
||||||
('config', 'Start configuration', 'gtk-ok', True)
|
('config', 'Start configuration', 'gtk-ok', True)
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
'config':{
|
'config':{
|
||||||
'result': {
|
'result': {
|
||||||
'type': 'action',
|
'type': 'action',
|
||||||
'action': _config,
|
'action': _config,
|
||||||
'state': 'end',
|
'state': 'end',
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
wizard_info_get('module.upgrade')
|
wizard_info_get('module.upgrade')
|
||||||
|
|
||||||
|
|
|
@ -34,70 +34,70 @@ import pooler
|
||||||
|
|
||||||
class wizard_update_module(wizard.interface):
|
class wizard_update_module(wizard.interface):
|
||||||
|
|
||||||
arch = '''<?xml version="1.0"?>
|
arch = '''<?xml version="1.0"?>
|
||||||
<form string="Scan for new modules">
|
<form string="Scan for new modules">
|
||||||
<label string="This function will check for new modules in the 'addons' path and on module repositories:" colspan="4" align="0.0"/>
|
<label string="This function will check for new modules in the 'addons' path and on module repositories:" colspan="4" align="0.0"/>
|
||||||
<field name="repositories" colspan="4" nolabel="1"/>
|
<field name="repositories" colspan="4" nolabel="1"/>
|
||||||
</form>'''
|
</form>'''
|
||||||
fields = {
|
fields = {
|
||||||
'repositories': {'type': 'text', 'string': 'Repositories', 'readonly': True},
|
'repositories': {'type': 'text', 'string': 'Repositories', 'readonly': True},
|
||||||
}
|
}
|
||||||
|
|
||||||
arch_module = '''<?xml version="1.0"?>
|
arch_module = '''<?xml version="1.0"?>
|
||||||
<form string="New modules">
|
<form string="New modules">
|
||||||
<field name="update" colspan="4"/>
|
<field name="update" colspan="4"/>
|
||||||
<field name="add" colspan="4"/>
|
<field name="add" colspan="4"/>
|
||||||
</form>'''
|
</form>'''
|
||||||
|
|
||||||
fields_module = {
|
fields_module = {
|
||||||
'update': {'type': 'integer', 'string': 'Number of modules updated', 'readonly': True},
|
'update': {'type': 'integer', 'string': 'Number of modules updated', 'readonly': True},
|
||||||
'add': {'type': 'integer', 'string': 'Number of modules added', 'readonly': True},
|
'add': {'type': 'integer', 'string': 'Number of modules added', 'readonly': True},
|
||||||
}
|
}
|
||||||
|
|
||||||
def _update_module(self, cr, uid, data, context):
|
def _update_module(self, cr, uid, data, context):
|
||||||
update, add = pooler.get_pool(cr.dbname).get('ir.module.module').update_list(cr, uid)
|
update, add = pooler.get_pool(cr.dbname).get('ir.module.module').update_list(cr, uid)
|
||||||
return {'update': update, 'add': add}
|
return {'update': update, 'add': add}
|
||||||
|
|
||||||
def _action_module_open(self, cr, uid, data, context):
|
def _action_module_open(self, cr, uid, data, context):
|
||||||
return {
|
return {
|
||||||
'domain': str([]),
|
'domain': str([]),
|
||||||
'name': 'Module List',
|
'name': 'Module List',
|
||||||
'view_type': 'form',
|
'view_type': 'form',
|
||||||
'view_mode': 'tree,form',
|
'view_mode': 'tree,form',
|
||||||
'res_model': 'ir.module.module',
|
'res_model': 'ir.module.module',
|
||||||
'view_id': False,
|
'view_id': False,
|
||||||
'type': 'ir.actions.act_window'
|
'type': 'ir.actions.act_window'
|
||||||
}
|
}
|
||||||
|
|
||||||
def _get_repositories(self, cr, uid, data, context):
|
def _get_repositories(self, cr, uid, data, context):
|
||||||
pool = pooler.get_pool(cr.dbname)
|
pool = pooler.get_pool(cr.dbname)
|
||||||
repository_obj = pool.get('ir.module.repository')
|
repository_obj = pool.get('ir.module.repository')
|
||||||
ids = repository_obj.search(cr, uid, [])
|
ids = repository_obj.search(cr, uid, [])
|
||||||
res = repository_obj.read(cr, uid, ids, ['name', 'url'], context)
|
res = repository_obj.read(cr, uid, ids, ['name', 'url'], context)
|
||||||
return {'repositories': '\n'.join(map(lambda x: x['name']+': '+x['url'], res))}
|
return {'repositories': '\n'.join(map(lambda x: x['name']+': '+x['url'], res))}
|
||||||
|
|
||||||
states = {
|
states = {
|
||||||
'init': {
|
'init': {
|
||||||
'actions': [_get_repositories],
|
'actions': [_get_repositories],
|
||||||
'result': {'type': 'form', 'arch': arch, 'fields': fields,
|
'result': {'type': 'form', 'arch': arch, 'fields': fields,
|
||||||
'state': [
|
'state': [
|
||||||
('end', 'Cancel', 'gtk-cancel'),
|
('end', 'Cancel', 'gtk-cancel'),
|
||||||
('update', 'Check new modules', 'gtk-ok', True)
|
('update', 'Check new modules', 'gtk-ok', True)
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
'update': {
|
'update': {
|
||||||
'actions': [_update_module],
|
'actions': [_update_module],
|
||||||
'result': {'type': 'form', 'arch': arch_module, 'fields': fields_module,
|
'result': {'type': 'form', 'arch': arch_module, 'fields': fields_module,
|
||||||
'state': [
|
'state': [
|
||||||
('open_window', 'Ok', 'gtk-ok', True)
|
('open_window', 'Ok', 'gtk-ok', True)
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
'open_window': {
|
'open_window': {
|
||||||
'actions': [],
|
'actions': [],
|
||||||
'result': {'type': 'action', 'action': _action_module_open, 'state':'end'}
|
'result': {'type': 'action', 'action': _action_module_open, 'state':'end'}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
wizard_update_module('module.module.update')
|
wizard_update_module('module.module.update')
|
||||||
|
|
||||||
|
|
|
@ -32,27 +32,27 @@ from osv import fields, osv
|
||||||
|
|
||||||
|
|
||||||
class Bank(osv.osv):
|
class Bank(osv.osv):
|
||||||
_description='Bank'
|
_description='Bank'
|
||||||
_name = 'res.bank'
|
_name = 'res.bank'
|
||||||
_columns = {
|
_columns = {
|
||||||
'name': fields.char('Name', size=128, required=True),
|
'name': fields.char('Name', size=128, required=True),
|
||||||
'code': fields.char('Code', size=64),
|
'code': fields.char('Code', size=64),
|
||||||
'street': fields.char('Street', size=128),
|
'street': fields.char('Street', size=128),
|
||||||
'street2': fields.char('Street2', size=128),
|
'street2': fields.char('Street2', size=128),
|
||||||
'zip': fields.char('Zip', change_default=True, size=24),
|
'zip': fields.char('Zip', change_default=True, size=24),
|
||||||
'city': fields.char('City', size=128),
|
'city': fields.char('City', size=128),
|
||||||
'state': fields.many2one("res.country.state", 'State',
|
'state': fields.many2one("res.country.state", 'State',
|
||||||
domain="[('country_id', '=', country)]"),
|
domain="[('country_id', '=', country)]"),
|
||||||
'country': fields.many2one('res.country', 'Country'),
|
'country': fields.many2one('res.country', 'Country'),
|
||||||
'email': fields.char('E-Mail', size=64),
|
'email': fields.char('E-Mail', size=64),
|
||||||
'phone': fields.char('Phone', size=64),
|
'phone': fields.char('Phone', size=64),
|
||||||
'fax': fields.char('Fax', size=64),
|
'fax': fields.char('Fax', size=64),
|
||||||
'active': fields.boolean('Active'),
|
'active': fields.boolean('Active'),
|
||||||
'bic': fields.char('BIC/Swift code', size=11,
|
'bic': fields.char('BIC/Swift code', size=11,
|
||||||
help="Bank Identifier Code"),
|
help="Bank Identifier Code"),
|
||||||
}
|
}
|
||||||
_defaults = {
|
_defaults = {
|
||||||
'active': lambda *a: 1,
|
'active': lambda *a: 1,
|
||||||
}
|
}
|
||||||
|
|
||||||
Bank()
|
Bank()
|
||||||
|
|
|
@ -32,76 +32,76 @@ from osv import fields, osv
|
||||||
|
|
||||||
|
|
||||||
class Country(osv.osv):
|
class Country(osv.osv):
|
||||||
_name = 'res.country'
|
_name = 'res.country'
|
||||||
_description = 'Country'
|
_description = 'Country'
|
||||||
_columns = {
|
_columns = {
|
||||||
'name': fields.char('Country Name', size=64,
|
'name': fields.char('Country Name', size=64,
|
||||||
help='The full name of the country.', required=True),
|
help='The full name of the country.', required=True),
|
||||||
'code': fields.char('Country Code', size=2,
|
'code': fields.char('Country Code', size=2,
|
||||||
help='The ISO country code in two chars.\n'
|
help='The ISO country code in two chars.\n'
|
||||||
'You can use this field for quick search.', required=True),
|
'You can use this field for quick search.', required=True),
|
||||||
}
|
}
|
||||||
_sql_constraints = [
|
_sql_constraints = [
|
||||||
('name_uniq', 'unique (name)',
|
('name_uniq', 'unique (name)',
|
||||||
'The name of the country must be unique !'),
|
'The name of the country must be unique !'),
|
||||||
('code_uniq', 'unique (code)',
|
('code_uniq', 'unique (code)',
|
||||||
'The code of the country must be unique !')
|
'The code of the country must be unique !')
|
||||||
]
|
]
|
||||||
|
|
||||||
def name_search(self, cr, user, name='', args=None, operator='ilike',
|
def name_search(self, cr, user, name='', args=None, operator='ilike',
|
||||||
context=None, limit=80):
|
context=None, limit=80):
|
||||||
if not args:
|
if not args:
|
||||||
args=[]
|
args=[]
|
||||||
if not context:
|
if not context:
|
||||||
context={}
|
context={}
|
||||||
ids = False
|
ids = False
|
||||||
if len(name) == 2:
|
if len(name) == 2:
|
||||||
ids = self.search(cr, user, [('code', '=', name.upper())] + args,
|
ids = self.search(cr, user, [('code', '=', name.upper())] + args,
|
||||||
limit=limit, context=context)
|
limit=limit, context=context)
|
||||||
if not ids:
|
if not ids:
|
||||||
ids = self.search(cr, user, [('name', operator, name)] + args,
|
ids = self.search(cr, user, [('name', operator, name)] + args,
|
||||||
limit=limit, context=context)
|
limit=limit, context=context)
|
||||||
return self.name_get(cr, user, ids, context)
|
return self.name_get(cr, user, ids, context)
|
||||||
_order='name'
|
_order='name'
|
||||||
|
|
||||||
def create(self, cursor, user, vals, context=None):
|
def create(self, cursor, user, vals, context=None):
|
||||||
if 'code' in vals:
|
if 'code' in vals:
|
||||||
vals['code'] = vals['code'].upper()
|
vals['code'] = vals['code'].upper()
|
||||||
return super(Country, self).create(cursor, user, vals,
|
return super(Country, self).create(cursor, user, vals,
|
||||||
context=context)
|
context=context)
|
||||||
|
|
||||||
def write(self, cursor, user, ids, vals, context=None):
|
def write(self, cursor, user, ids, vals, context=None):
|
||||||
if 'code' in vals:
|
if 'code' in vals:
|
||||||
vals['code'] = vals['code'].upper()
|
vals['code'] = vals['code'].upper()
|
||||||
return super(Country, self).write(cursor, user, ids, vals,
|
return super(Country, self).write(cursor, user, ids, vals,
|
||||||
context=context)
|
context=context)
|
||||||
|
|
||||||
Country()
|
Country()
|
||||||
|
|
||||||
|
|
||||||
class CountryState(osv.osv):
|
class CountryState(osv.osv):
|
||||||
_description="Country state"
|
_description="Country state"
|
||||||
_name = 'res.country.state'
|
_name = 'res.country.state'
|
||||||
_columns = {
|
_columns = {
|
||||||
'country_id': fields.many2one('res.country', 'Country',
|
'country_id': fields.many2one('res.country', 'Country',
|
||||||
required=True),
|
required=True),
|
||||||
'name': fields.char('State Name', size=64, required=True),
|
'name': fields.char('State Name', size=64, required=True),
|
||||||
'code': fields.char('State Code', size=3, required=True),
|
'code': fields.char('State Code', size=3, required=True),
|
||||||
}
|
}
|
||||||
def name_search(self, cr, user, name='', args=None, operator='ilike',
|
def name_search(self, cr, user, name='', args=None, operator='ilike',
|
||||||
context=None, limit=80):
|
context=None, limit=80):
|
||||||
if not args:
|
if not args:
|
||||||
args = []
|
args = []
|
||||||
if not context:
|
if not context:
|
||||||
context = {}
|
context = {}
|
||||||
ids = self.search(cr, user, [('code', '=', name)] + args, limit=limit,
|
ids = self.search(cr, user, [('code', '=', name)] + args, limit=limit,
|
||||||
context=context)
|
context=context)
|
||||||
if not ids:
|
if not ids:
|
||||||
ids = self.search(cr, user, [('name', operator, name)] + args,
|
ids = self.search(cr, user, [('name', operator, name)] + args,
|
||||||
limit=limit, context=context)
|
limit=limit, context=context)
|
||||||
return self.name_get(cr, user, ids, context)
|
return self.name_get(cr, user, ids, context)
|
||||||
|
|
||||||
_order = 'code'
|
_order = 'code'
|
||||||
CountryState()
|
CountryState()
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -34,52 +34,52 @@ from osv import osv,fields
|
||||||
# -------------------------------------------------------------------------
|
# -------------------------------------------------------------------------
|
||||||
|
|
||||||
def _models_get2(self, cr, uid, context={}):
|
def _models_get2(self, cr, uid, context={}):
|
||||||
obj = self.pool.get('ir.model.fields')
|
obj = self.pool.get('ir.model.fields')
|
||||||
ids = obj.search(cr, uid, [('view_load','=',1)])
|
ids = obj.search(cr, uid, [('view_load','=',1)])
|
||||||
res = []
|
res = []
|
||||||
done = {}
|
done = {}
|
||||||
for o in obj.browse(cr, uid, ids, context=context):
|
for o in obj.browse(cr, uid, ids, context=context):
|
||||||
if o.relation not in done:
|
if o.relation not in done:
|
||||||
res.append( [o.relation, o.relation])
|
res.append( [o.relation, o.relation])
|
||||||
done[o.relation] = True
|
done[o.relation] = True
|
||||||
return res
|
return res
|
||||||
|
|
||||||
def _models_get(self, cr, uid, context={}):
|
def _models_get(self, cr, uid, context={}):
|
||||||
obj = self.pool.get('ir.model.fields')
|
obj = self.pool.get('ir.model.fields')
|
||||||
ids = obj.search(cr, uid, [('view_load','=',1)])
|
ids = obj.search(cr, uid, [('view_load','=',1)])
|
||||||
res = []
|
res = []
|
||||||
done = {}
|
done = {}
|
||||||
for o in obj.browse(cr, uid, ids, context=context):
|
for o in obj.browse(cr, uid, ids, context=context):
|
||||||
if o.model_id.id not in done:
|
if o.model_id.id not in done:
|
||||||
res.append( [o.model_id.model, o.model_id.name])
|
res.append( [o.model_id.model, o.model_id.name])
|
||||||
done[o.model_id.id] = True
|
done[o.model_id.id] = True
|
||||||
return res
|
return res
|
||||||
|
|
||||||
class ir_property(osv.osv):
|
class ir_property(osv.osv):
|
||||||
_name = 'ir.property'
|
_name = 'ir.property'
|
||||||
_columns = {
|
_columns = {
|
||||||
'name': fields.char('Name', size=128),
|
'name': fields.char('Name', size=128),
|
||||||
'value': fields.reference('Value', selection=_models_get2, size=128),
|
'value': fields.reference('Value', selection=_models_get2, size=128),
|
||||||
'res_id': fields.reference('Resource', selection=_models_get, size=128),
|
'res_id': fields.reference('Resource', selection=_models_get, size=128),
|
||||||
'company_id': fields.many2one('res.company', 'Company'),
|
'company_id': fields.many2one('res.company', 'Company'),
|
||||||
'fields_id': fields.many2one('ir.model.fields', 'Fields', ondelete='cascade', required=True)
|
'fields_id': fields.many2one('ir.model.fields', 'Fields', ondelete='cascade', required=True)
|
||||||
}
|
}
|
||||||
def unlink(self, cr, uid, ids, context={}):
|
def unlink(self, cr, uid, ids, context={}):
|
||||||
if ids:
|
if ids:
|
||||||
cr.execute('delete from ir_model_fields where id in (select fields_id from ir_property where (fields_id is not null) and (id in ('+','.join(map(str,ids))+')))')
|
cr.execute('delete from ir_model_fields where id in (select fields_id from ir_property where (fields_id is not null) and (id in ('+','.join(map(str,ids))+')))')
|
||||||
res = super(ir_property, self).unlink(cr, uid, ids, context)
|
res = super(ir_property, self).unlink(cr, uid, ids, context)
|
||||||
return res
|
return res
|
||||||
|
|
||||||
def get(self, cr, uid, name, model, res_id=False, context={}):
|
def get(self, cr, uid, name, model, res_id=False, context={}):
|
||||||
cr.execute('select id from ir_model_fields where name=%s and model=%s', (name, model))
|
cr.execute('select id from ir_model_fields where name=%s and model=%s', (name, model))
|
||||||
res = cr.fetchone()
|
res = cr.fetchone()
|
||||||
if res:
|
if res:
|
||||||
ucid = self.pool.get('res.users').browse(cr, uid, uid).company_id.id
|
ucid = self.pool.get('res.users').browse(cr, uid, uid).company_id.id
|
||||||
nid = self.search(cr, uid, [('fields_id','=',res[0]),('res_id','=',res_id),('company_id','=',ucid)])
|
nid = self.search(cr, uid, [('fields_id','=',res[0]),('res_id','=',res_id),('company_id','=',ucid)])
|
||||||
if nid:
|
if nid:
|
||||||
d = self.browse(cr, uid, nid[0], context).value
|
d = self.browse(cr, uid, nid[0], context).value
|
||||||
return (d and int(d.split(',')[1])) or False
|
return (d and int(d.split(',')[1])) or False
|
||||||
return False
|
return False
|
||||||
ir_property()
|
ir_property()
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -35,72 +35,72 @@ from osv import fields,osv
|
||||||
# Sale/Purchase Canal, Media
|
# Sale/Purchase Canal, Media
|
||||||
#
|
#
|
||||||
class res_partner_canal(osv.osv):
|
class res_partner_canal(osv.osv):
|
||||||
_name = "res.partner.canal"
|
_name = "res.partner.canal"
|
||||||
_description = "Channels"
|
_description = "Channels"
|
||||||
_columns = {
|
_columns = {
|
||||||
'name': fields.char('Channel Name',size=64, required=True),
|
'name': fields.char('Channel Name',size=64, required=True),
|
||||||
'active': fields.boolean('Active'),
|
'active': fields.boolean('Active'),
|
||||||
}
|
}
|
||||||
_defaults = {
|
_defaults = {
|
||||||
'active': lambda *a: 1,
|
'active': lambda *a: 1,
|
||||||
}
|
}
|
||||||
res_partner_canal()
|
res_partner_canal()
|
||||||
|
|
||||||
#
|
#
|
||||||
# Partner: State of Mind
|
# Partner: State of Mind
|
||||||
#
|
#
|
||||||
class res_partner_som(osv.osv):
|
class res_partner_som(osv.osv):
|
||||||
_name = "res.partner.som"
|
_name = "res.partner.som"
|
||||||
_columns = {
|
_columns = {
|
||||||
'name': fields.char('State of Mind',size=64, required=True),
|
'name': fields.char('State of Mind',size=64, required=True),
|
||||||
'factor': fields.float('Factor', required=True)
|
'factor': fields.float('Factor', required=True)
|
||||||
}
|
}
|
||||||
res_partner_som()
|
res_partner_som()
|
||||||
|
|
||||||
def _links_get(self, cr, uid, context={}):
|
def _links_get(self, cr, uid, context={}):
|
||||||
obj = self.pool.get('res.request.link')
|
obj = self.pool.get('res.request.link')
|
||||||
ids = obj.search(cr, uid, [])
|
ids = obj.search(cr, uid, [])
|
||||||
res = obj.read(cr, uid, ids, ['object', 'name'], context)
|
res = obj.read(cr, uid, ids, ['object', 'name'], context)
|
||||||
return [(r['object'], r['name']) for r in res]
|
return [(r['object'], r['name']) for r in res]
|
||||||
|
|
||||||
class res_partner_event(osv.osv):
|
class res_partner_event(osv.osv):
|
||||||
_name = "res.partner.event"
|
_name = "res.partner.event"
|
||||||
_columns = {
|
_columns = {
|
||||||
'name': fields.char('Events',size=64, required=True),
|
'name': fields.char('Events',size=64, required=True),
|
||||||
'som': fields.many2one('res.partner.som', 'State of Mind'),
|
'som': fields.many2one('res.partner.som', 'State of Mind'),
|
||||||
'description': fields.text('Description'),
|
'description': fields.text('Description'),
|
||||||
'planned_cost': fields.float('Planned Cost'),
|
'planned_cost': fields.float('Planned Cost'),
|
||||||
'planned_revenue': fields.float('Planned Revenue'),
|
'planned_revenue': fields.float('Planned Revenue'),
|
||||||
'probability': fields.float('Probability (0.50)'),
|
'probability': fields.float('Probability (0.50)'),
|
||||||
'document': fields.reference('Document', selection=_links_get, size=128),
|
'document': fields.reference('Document', selection=_links_get, size=128),
|
||||||
'partner_id': fields.many2one('res.partner', 'Partner', select=True),
|
'partner_id': fields.many2one('res.partner', 'Partner', select=True),
|
||||||
'date': fields.datetime('Date', size=16),
|
'date': fields.datetime('Date', size=16),
|
||||||
'user_id': fields.many2one('res.users', 'User'),
|
'user_id': fields.many2one('res.users', 'User'),
|
||||||
'canal_id': fields.many2one('res.partner.canal', 'Channel'),
|
'canal_id': fields.many2one('res.partner.canal', 'Channel'),
|
||||||
'partner_type': fields.selection([('customer','Customer'),('retailer','Retailer'),('prospect','Commercial Prospect')], 'Partner Relation'),
|
'partner_type': fields.selection([('customer','Customer'),('retailer','Retailer'),('prospect','Commercial Prospect')], 'Partner Relation'),
|
||||||
'type': fields.selection([('sale','Sale Opportunity'),('purchase','Purchase Offer'),('prospect','Prospect Contact')], 'Type of Event'),
|
'type': fields.selection([('sale','Sale Opportunity'),('purchase','Purchase Offer'),('prospect','Prospect Contact')], 'Type of Event'),
|
||||||
'event_ical_id': fields.char('iCal id', size=64),
|
'event_ical_id': fields.char('iCal id', size=64),
|
||||||
}
|
}
|
||||||
_order = 'date desc'
|
_order = 'date desc'
|
||||||
_defaults = {
|
_defaults = {
|
||||||
'date': lambda *a: time.strftime('%Y-%m-%d %H:%M:%S'),
|
'date': lambda *a: time.strftime('%Y-%m-%d %H:%M:%S'),
|
||||||
}
|
}
|
||||||
res_partner_event()
|
res_partner_event()
|
||||||
|
|
||||||
|
|
||||||
class res_partner_event_type(osv.osv):
|
class res_partner_event_type(osv.osv):
|
||||||
_name = "res.partner.event.type"
|
_name = "res.partner.event.type"
|
||||||
_description = "Partner Events"
|
_description = "Partner Events"
|
||||||
_columns = {
|
_columns = {
|
||||||
'name': fields.char('Event Type',size=64, required=True),
|
'name': fields.char('Event Type',size=64, required=True),
|
||||||
'key': fields.char('Key', size=64, required=True),
|
'key': fields.char('Key', size=64, required=True),
|
||||||
'active': fields.boolean('Active'),
|
'active': fields.boolean('Active'),
|
||||||
}
|
}
|
||||||
_defaults = {
|
_defaults = {
|
||||||
'active': lambda *a: 1
|
'active': lambda *a: 1
|
||||||
}
|
}
|
||||||
def check(self, cr, uid, key, context={}):
|
def check(self, cr, uid, key, context={}):
|
||||||
return self.search(cr, uid, [('key','=',key)])
|
return self.search(cr, uid, [('key','=',key)])
|
||||||
res_partner_event_type()
|
res_partner_event_type()
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -35,399 +35,399 @@ import ir
|
||||||
import pooler
|
import pooler
|
||||||
|
|
||||||
class res_partner_function(osv.osv):
|
class res_partner_function(osv.osv):
|
||||||
_name = 'res.partner.function'
|
_name = 'res.partner.function'
|
||||||
_description = 'Function of the contact'
|
_description = 'Function of the contact'
|
||||||
_columns = {
|
_columns = {
|
||||||
'name': fields.char('Function name', size=64, required=True),
|
'name': fields.char('Function name', size=64, required=True),
|
||||||
'code': fields.char('Code', size=8),
|
'code': fields.char('Code', size=8),
|
||||||
}
|
}
|
||||||
_order = 'name'
|
_order = 'name'
|
||||||
res_partner_function()
|
res_partner_function()
|
||||||
|
|
||||||
|
|
||||||
class res_payterm(osv.osv):
|
class res_payterm(osv.osv):
|
||||||
_description = 'Payment term'
|
_description = 'Payment term'
|
||||||
_name = 'res.payterm'
|
_name = 'res.payterm'
|
||||||
_columns = {
|
_columns = {
|
||||||
'name': fields.char('Payment term (short name)', size=64),
|
'name': fields.char('Payment term (short name)', size=64),
|
||||||
}
|
}
|
||||||
res_payterm()
|
res_payterm()
|
||||||
|
|
||||||
class res_partner_category(osv.osv):
|
class res_partner_category(osv.osv):
|
||||||
def name_get(self, cr, uid, ids, context={}):
|
def name_get(self, cr, uid, ids, context={}):
|
||||||
if not len(ids):
|
if not len(ids):
|
||||||
return []
|
return []
|
||||||
reads = self.read(cr, uid, ids, ['name','parent_id'], context)
|
reads = self.read(cr, uid, ids, ['name','parent_id'], context)
|
||||||
res = []
|
res = []
|
||||||
for record in reads:
|
for record in reads:
|
||||||
name = record['name']
|
name = record['name']
|
||||||
if record['parent_id']:
|
if record['parent_id']:
|
||||||
name = record['parent_id'][1]+' / '+name
|
name = record['parent_id'][1]+' / '+name
|
||||||
res.append((record['id'], name))
|
res.append((record['id'], name))
|
||||||
return res
|
return res
|
||||||
|
|
||||||
def _name_get_fnc(self, cr, uid, ids, prop, unknow_none, unknow_dict):
|
def _name_get_fnc(self, cr, uid, ids, prop, unknow_none, unknow_dict):
|
||||||
res = self.name_get(cr, uid, ids)
|
res = self.name_get(cr, uid, ids)
|
||||||
return dict(res)
|
return dict(res)
|
||||||
def _check_recursion(self, cr, uid, ids):
|
def _check_recursion(self, cr, uid, ids):
|
||||||
level = 100
|
level = 100
|
||||||
while len(ids):
|
while len(ids):
|
||||||
cr.execute('select distinct parent_id from res_partner_category where id in ('+','.join(map(str,ids))+')')
|
cr.execute('select distinct parent_id from res_partner_category where id in ('+','.join(map(str,ids))+')')
|
||||||
ids = filter(None, map(lambda x:x[0], cr.fetchall()))
|
ids = filter(None, map(lambda x:x[0], cr.fetchall()))
|
||||||
if not level:
|
if not level:
|
||||||
return False
|
return False
|
||||||
level -= 1
|
level -= 1
|
||||||
return True
|
return True
|
||||||
|
|
||||||
_description='Partner Categories'
|
_description='Partner Categories'
|
||||||
_name = 'res.partner.category'
|
_name = 'res.partner.category'
|
||||||
_columns = {
|
_columns = {
|
||||||
'name': fields.char('Category Name', required=True, size=64),
|
'name': fields.char('Category Name', required=True, size=64),
|
||||||
'parent_id': fields.many2one('res.partner.category', 'Parent Category', select=True),
|
'parent_id': fields.many2one('res.partner.category', 'Parent Category', select=True),
|
||||||
'complete_name': fields.function(_name_get_fnc, method=True, type="char", string='Name'),
|
'complete_name': fields.function(_name_get_fnc, method=True, type="char", string='Name'),
|
||||||
'child_ids': fields.one2many('res.partner.category', 'parent_id', 'Childs Category'),
|
'child_ids': fields.one2many('res.partner.category', 'parent_id', 'Childs Category'),
|
||||||
'active' : fields.boolean('Active', help="The active field allows you to hide the category, without removing it."),
|
'active' : fields.boolean('Active', help="The active field allows you to hide the category, without removing it."),
|
||||||
}
|
}
|
||||||
_constraints = [
|
_constraints = [
|
||||||
(_check_recursion, 'Error ! You can not create recursive categories.', ['parent_id'])
|
(_check_recursion, 'Error ! You can not create recursive categories.', ['parent_id'])
|
||||||
]
|
]
|
||||||
_defaults = {
|
_defaults = {
|
||||||
'active' : lambda *a: 1,
|
'active' : lambda *a: 1,
|
||||||
}
|
}
|
||||||
_order = 'parent_id,name'
|
_order = 'parent_id,name'
|
||||||
res_partner_category()
|
res_partner_category()
|
||||||
|
|
||||||
class res_partner_title(osv.osv):
|
class res_partner_title(osv.osv):
|
||||||
_name = 'res.partner.title'
|
_name = 'res.partner.title'
|
||||||
_columns = {
|
_columns = {
|
||||||
'name': fields.char('Title', required=True, size=46, translate=True),
|
'name': fields.char('Title', required=True, size=46, translate=True),
|
||||||
'shortcut': fields.char('Shortcut', required=True, size=16),
|
'shortcut': fields.char('Shortcut', required=True, size=16),
|
||||||
'domain': fields.selection([('partner','Partner'),('contact','Contact')], 'Domain', required=True, size=24)
|
'domain': fields.selection([('partner','Partner'),('contact','Contact')], 'Domain', required=True, size=24)
|
||||||
}
|
}
|
||||||
_order = 'name'
|
_order = 'name'
|
||||||
res_partner_title()
|
res_partner_title()
|
||||||
|
|
||||||
def _contact_title_get(self, cr, uid, context={}):
|
def _contact_title_get(self, cr, uid, context={}):
|
||||||
obj = self.pool.get('res.partner.title')
|
obj = self.pool.get('res.partner.title')
|
||||||
ids = obj.search(cr, uid, [('domain', '=', 'contact')])
|
ids = obj.search(cr, uid, [('domain', '=', 'contact')])
|
||||||
res = obj.read(cr, uid, ids, ['shortcut','name'], context)
|
res = obj.read(cr, uid, ids, ['shortcut','name'], context)
|
||||||
return [(r['shortcut'], r['name']) for r in res]
|
return [(r['shortcut'], r['name']) for r in res]
|
||||||
|
|
||||||
def _partner_title_get(self, cr, uid, context={}):
|
def _partner_title_get(self, cr, uid, context={}):
|
||||||
obj = self.pool.get('res.partner.title')
|
obj = self.pool.get('res.partner.title')
|
||||||
ids = obj.search(cr, uid, [('domain', '=', 'partner')])
|
ids = obj.search(cr, uid, [('domain', '=', 'partner')])
|
||||||
res = obj.read(cr, uid, ids, ['shortcut','name'], context)
|
res = obj.read(cr, uid, ids, ['shortcut','name'], context)
|
||||||
return [(r['shortcut'], r['name']) for r in res]
|
return [(r['shortcut'], r['name']) for r in res]
|
||||||
|
|
||||||
def _lang_get(self, cr, uid, context={}):
|
def _lang_get(self, cr, uid, context={}):
|
||||||
obj = self.pool.get('res.lang')
|
obj = self.pool.get('res.lang')
|
||||||
ids = obj.search(cr, uid, [])
|
ids = obj.search(cr, uid, [])
|
||||||
res = obj.read(cr, uid, ids, ['code', 'name'], context)
|
res = obj.read(cr, uid, ids, ['code', 'name'], context)
|
||||||
res = [(r['code'], r['name']) for r in res]
|
res = [(r['code'], r['name']) for r in res]
|
||||||
return res + [(False, '')]
|
return res + [(False, '')]
|
||||||
|
|
||||||
class res_partner(osv.osv):
|
class res_partner(osv.osv):
|
||||||
_description='Partner'
|
_description='Partner'
|
||||||
_name = "res.partner"
|
_name = "res.partner"
|
||||||
_order = "name"
|
_order = "name"
|
||||||
_columns = {
|
_columns = {
|
||||||
'name': fields.char('Name', size=128, required=True, select=True),
|
'name': fields.char('Name', size=128, required=True, select=True),
|
||||||
'date': fields.date('Date', select=1),
|
'date': fields.date('Date', select=1),
|
||||||
'title': fields.selection(_partner_title_get, 'Title', size=32),
|
'title': fields.selection(_partner_title_get, 'Title', size=32),
|
||||||
'parent_id': fields.many2one('res.partner','Main Company', select=2),
|
'parent_id': fields.many2one('res.partner','Main Company', select=2),
|
||||||
'child_ids': fields.one2many('res.partner', 'parent_id', 'Partner Ref.'),
|
'child_ids': fields.one2many('res.partner', 'parent_id', 'Partner Ref.'),
|
||||||
'ref': fields.char('Code', size=64),
|
'ref': fields.char('Code', size=64),
|
||||||
'lang': fields.selection(_lang_get, 'Language', size=5),
|
'lang': fields.selection(_lang_get, 'Language', size=5),
|
||||||
'user_id': fields.many2one('res.users', 'Dedicated Salesman', help='The internal user that is in charge of communicating with this partner if any.'),
|
'user_id': fields.many2one('res.users', 'Dedicated Salesman', help='The internal user that is in charge of communicating with this partner if any.'),
|
||||||
'responsible': fields.many2one('res.users', 'Users'),
|
'responsible': fields.many2one('res.users', 'Users'),
|
||||||
'vat': fields.char('VAT',size=32 ,help="Value Added Tax number"),
|
'vat': fields.char('VAT',size=32 ,help="Value Added Tax number"),
|
||||||
'bank_ids': fields.one2many('res.partner.bank', 'partner_id', 'Banks'),
|
'bank_ids': fields.one2many('res.partner.bank', 'partner_id', 'Banks'),
|
||||||
'website': fields.char('Website',size=64),
|
'website': fields.char('Website',size=64),
|
||||||
'comment': fields.text('Notes'),
|
'comment': fields.text('Notes'),
|
||||||
'address': fields.one2many('res.partner.address', 'partner_id', 'Contacts'),
|
'address': fields.one2many('res.partner.address', 'partner_id', 'Contacts'),
|
||||||
'category_id': fields.many2many('res.partner.category', 'res_partner_category_rel', 'partner_id', 'category_id', 'Categories'),
|
'category_id': fields.many2many('res.partner.category', 'res_partner_category_rel', 'partner_id', 'category_id', 'Categories'),
|
||||||
'events': fields.one2many('res.partner.event', 'partner_id', 'Events'),
|
'events': fields.one2many('res.partner.event', 'partner_id', 'Events'),
|
||||||
'credit_limit': fields.float(string='Credit Limit'),
|
'credit_limit': fields.float(string='Credit Limit'),
|
||||||
'ean13': fields.char('EAN13', size=13),
|
'ean13': fields.char('EAN13', size=13),
|
||||||
'active': fields.boolean('Active'),
|
'active': fields.boolean('Active'),
|
||||||
}
|
}
|
||||||
_defaults = {
|
_defaults = {
|
||||||
'active': lambda *a: 1,
|
'active': lambda *a: 1,
|
||||||
}
|
}
|
||||||
_sql_constraints = [
|
_sql_constraints = [
|
||||||
('name_uniq', 'unique (name)', 'The name of the partner must be unique !')
|
('name_uniq', 'unique (name)', 'The name of the partner must be unique !')
|
||||||
]
|
]
|
||||||
|
|
||||||
def copy(self, cr, uid, id, default=None, context={}):
|
def copy(self, cr, uid, id, default=None, context={}):
|
||||||
name = self.read(cr, uid, [id], ['name'])[0]['name']
|
name = self.read(cr, uid, [id], ['name'])[0]['name']
|
||||||
default.update({'name': name+' (copy)'})
|
default.update({'name': name+' (copy)'})
|
||||||
return super(res_partner, self).copy(cr, uid, id, default, context)
|
return super(res_partner, self).copy(cr, uid, id, default, context)
|
||||||
|
|
||||||
def _check_ean_key(self, cr, uid, ids):
|
def _check_ean_key(self, cr, uid, ids):
|
||||||
for partner_o in pooler.get_pool(cr.dbname).get('res.partner').read(cr, uid, ids, ['ean13',]):
|
for partner_o in pooler.get_pool(cr.dbname).get('res.partner').read(cr, uid, ids, ['ean13',]):
|
||||||
thisean=partner_o['ean13']
|
thisean=partner_o['ean13']
|
||||||
if thisean and thisean!='':
|
if thisean and thisean!='':
|
||||||
if len(thisean)!=13:
|
if len(thisean)!=13:
|
||||||
return False
|
return False
|
||||||
sum=0
|
sum=0
|
||||||
for i in range(12):
|
for i in range(12):
|
||||||
if not (i % 2):
|
if not (i % 2):
|
||||||
sum+=int(thisean[i])
|
sum+=int(thisean[i])
|
||||||
else:
|
else:
|
||||||
sum+=3*int(thisean[i])
|
sum+=3*int(thisean[i])
|
||||||
if math.ceil(sum/10.0)*10-sum!=int(thisean[12]):
|
if math.ceil(sum/10.0)*10-sum!=int(thisean[12]):
|
||||||
return False
|
return False
|
||||||
return True
|
return True
|
||||||
|
|
||||||
# _constraints = [(_check_ean_key, 'Error: Invalid ean code', ['ean13'])]
|
# _constraints = [(_check_ean_key, 'Error: Invalid ean code', ['ean13'])]
|
||||||
|
|
||||||
def name_get(self, cr, uid, ids, context={}):
|
def name_get(self, cr, uid, ids, context={}):
|
||||||
if not len(ids):
|
if not len(ids):
|
||||||
return []
|
return []
|
||||||
if context.get('show_ref', False):
|
if context.get('show_ref', False):
|
||||||
rec_name = 'ref'
|
rec_name = 'ref'
|
||||||
else:
|
else:
|
||||||
rec_name = 'name'
|
rec_name = 'name'
|
||||||
|
|
||||||
res = [(r['id'], r[rec_name]) for r in self.read(cr, uid, ids, [rec_name], context)]
|
res = [(r['id'], r[rec_name]) for r in self.read(cr, uid, ids, [rec_name], context)]
|
||||||
return res
|
return res
|
||||||
|
|
||||||
def name_search(self, cr, uid, name, args=None, operator='ilike', context=None, limit=80):
|
def name_search(self, cr, uid, name, args=None, operator='ilike', context=None, limit=80):
|
||||||
if not args:
|
if not args:
|
||||||
args=[]
|
args=[]
|
||||||
if not context:
|
if not context:
|
||||||
context={}
|
context={}
|
||||||
if name:
|
if name:
|
||||||
ids = self.search(cr, uid, [('ref', '=', name)] + args, limit=limit, context=context)
|
ids = self.search(cr, uid, [('ref', '=', name)] + args, limit=limit, context=context)
|
||||||
if not ids:
|
if not ids:
|
||||||
ids = self.search(cr, uid, [('name', operator, name)] + args, limit=limit, context=context)
|
ids = self.search(cr, uid, [('name', operator, name)] + args, limit=limit, context=context)
|
||||||
else:
|
else:
|
||||||
ids = self.search(cr, uid, args, limit=limit, context=context)
|
ids = self.search(cr, uid, args, limit=limit, context=context)
|
||||||
return self.name_get(cr, uid, ids, context)
|
return self.name_get(cr, uid, ids, context)
|
||||||
|
|
||||||
def _email_send(self, cr, uid, ids, email_from, subject, body, on_error=None):
|
def _email_send(self, cr, uid, ids, email_from, subject, body, on_error=None):
|
||||||
partners = self.browse(cr, uid, ids)
|
partners = self.browse(cr, uid, ids)
|
||||||
for partner in partners:
|
for partner in partners:
|
||||||
if len(partner.address):
|
if len(partner.address):
|
||||||
if partner.address[0].email:
|
if partner.address[0].email:
|
||||||
tools.email_send(email_from, [partner.address[0].email], subject, body, on_error)
|
tools.email_send(email_from, [partner.address[0].email], subject, body, on_error)
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def email_send(self, cr, uid, ids, email_from, subject, body, on_error=''):
|
def email_send(self, cr, uid, ids, email_from, subject, body, on_error=''):
|
||||||
while len(ids):
|
while len(ids):
|
||||||
self.pool.get('ir.cron').create(cr, uid, {
|
self.pool.get('ir.cron').create(cr, uid, {
|
||||||
'name': 'Send Partner Emails',
|
'name': 'Send Partner Emails',
|
||||||
'user_id': uid,
|
'user_id': uid,
|
||||||
# 'nextcall': False,
|
# 'nextcall': False,
|
||||||
'model': 'res.partner',
|
'model': 'res.partner',
|
||||||
'function': '_email_send',
|
'function': '_email_send',
|
||||||
'args': repr([ids[:16], email_from, subject, body, on_error])
|
'args': repr([ids[:16], email_from, subject, body, on_error])
|
||||||
})
|
})
|
||||||
ids = ids[16:]
|
ids = ids[16:]
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def address_get(self, cr, uid, ids, adr_pref=['default']):
|
def address_get(self, cr, uid, ids, adr_pref=['default']):
|
||||||
cr.execute('select type,id from res_partner_address where partner_id in ('+','.join(map(str,ids))+')')
|
cr.execute('select type,id from res_partner_address where partner_id in ('+','.join(map(str,ids))+')')
|
||||||
res = cr.fetchall()
|
res = cr.fetchall()
|
||||||
adr = dict(res)
|
adr = dict(res)
|
||||||
# get the id of the (first) default address if there is one,
|
# get the id of the (first) default address if there is one,
|
||||||
# otherwise get the id of the first address in the list
|
# otherwise get the id of the first address in the list
|
||||||
if res:
|
if res:
|
||||||
default_address = adr.get('default', res[0][1])
|
default_address = adr.get('default', res[0][1])
|
||||||
else:
|
else:
|
||||||
default_address = False
|
default_address = False
|
||||||
result = {}
|
result = {}
|
||||||
for a in adr_pref:
|
for a in adr_pref:
|
||||||
result[a] = adr.get(a, default_address)
|
result[a] = adr.get(a, default_address)
|
||||||
return result
|
return result
|
||||||
|
|
||||||
def gen_next_ref(self, cr, uid, ids):
|
def gen_next_ref(self, cr, uid, ids):
|
||||||
if len(ids) != 1:
|
if len(ids) != 1:
|
||||||
return True
|
return True
|
||||||
|
|
||||||
# compute the next number ref
|
# compute the next number ref
|
||||||
cr.execute("select ref from res_partner where ref is not null order by char_length(ref) desc, ref desc limit 1")
|
cr.execute("select ref from res_partner where ref is not null order by char_length(ref) desc, ref desc limit 1")
|
||||||
res = cr.dictfetchall()
|
res = cr.dictfetchall()
|
||||||
ref = res and res[0]['ref'] or '0'
|
ref = res and res[0]['ref'] or '0'
|
||||||
try:
|
try:
|
||||||
nextref = int(ref)+1
|
nextref = int(ref)+1
|
||||||
except e:
|
except e:
|
||||||
raise osv.except_osv(_('Warning'), _("Couldn't generate the next id because some partners have an alphabetic id !"))
|
raise osv.except_osv(_('Warning'), _("Couldn't generate the next id because some partners have an alphabetic id !"))
|
||||||
|
|
||||||
# update the current partner
|
# update the current partner
|
||||||
cr.execute("update res_partner set ref=%d where id=%d", (nextref, ids[0]))
|
cr.execute("update res_partner set ref=%d where id=%d", (nextref, ids[0]))
|
||||||
return True
|
return True
|
||||||
res_partner()
|
res_partner()
|
||||||
|
|
||||||
class res_partner_address(osv.osv):
|
class res_partner_address(osv.osv):
|
||||||
_description ='Partner Contact'
|
_description ='Partner Contact'
|
||||||
_name = 'res.partner.address'
|
_name = 'res.partner.address'
|
||||||
_order = 'id'
|
_order = 'id'
|
||||||
_columns = {
|
_columns = {
|
||||||
'partner_id': fields.many2one('res.partner', 'Partner', required=True, ondelete='cascade', select=True),
|
'partner_id': fields.many2one('res.partner', 'Partner', required=True, ondelete='cascade', select=True),
|
||||||
'type': fields.selection( [ ('default','Default'),('invoice','Invoice'), ('delivery','Delivery'), ('contact','Contact'), ('other','Other') ],'Address Type'),
|
'type': fields.selection( [ ('default','Default'),('invoice','Invoice'), ('delivery','Delivery'), ('contact','Contact'), ('other','Other') ],'Address Type'),
|
||||||
'function': fields.many2one('res.partner.function', 'Function'),
|
'function': fields.many2one('res.partner.function', 'Function'),
|
||||||
'title': fields.selection(_contact_title_get, 'Title', size=32),
|
'title': fields.selection(_contact_title_get, 'Title', size=32),
|
||||||
'name': fields.char('Contact Name', size=64),
|
'name': fields.char('Contact Name', size=64),
|
||||||
'street': fields.char('Street', size=128),
|
'street': fields.char('Street', size=128),
|
||||||
'street2': fields.char('Street2', size=128),
|
'street2': fields.char('Street2', size=128),
|
||||||
'zip': fields.char('Zip', change_default=True, size=24),
|
'zip': fields.char('Zip', change_default=True, size=24),
|
||||||
'city': fields.char('City', size=128),
|
'city': fields.char('City', size=128),
|
||||||
'state_id': fields.many2one("res.country.state", 'State', change_default=True, domain="[('country_id','=',country_id)]"),
|
'state_id': fields.many2one("res.country.state", 'State', change_default=True, domain="[('country_id','=',country_id)]"),
|
||||||
'country_id': fields.many2one('res.country', 'Country', change_default=True),
|
'country_id': fields.many2one('res.country', 'Country', change_default=True),
|
||||||
'email': fields.char('E-Mail', size=240),
|
'email': fields.char('E-Mail', size=240),
|
||||||
'phone': fields.char('Phone', size=64),
|
'phone': fields.char('Phone', size=64),
|
||||||
'fax': fields.char('Fax', size=64),
|
'fax': fields.char('Fax', size=64),
|
||||||
'mobile': fields.char('Mobile', size=64),
|
'mobile': fields.char('Mobile', size=64),
|
||||||
'birthdate': fields.char('Birthdate', size=64),
|
'birthdate': fields.char('Birthdate', size=64),
|
||||||
'active': fields.boolean('Active'),
|
'active': fields.boolean('Active'),
|
||||||
}
|
}
|
||||||
_defaults = {
|
_defaults = {
|
||||||
'active': lambda *a: 1,
|
'active': lambda *a: 1,
|
||||||
}
|
}
|
||||||
|
|
||||||
def name_get(self, cr, user, ids, context={}):
|
def name_get(self, cr, user, ids, context={}):
|
||||||
if not len(ids):
|
if not len(ids):
|
||||||
return []
|
return []
|
||||||
res = []
|
res = []
|
||||||
for r in self.read(cr, user, ids, ['name','zip','city','partner_id', 'street']):
|
for r in self.read(cr, user, ids, ['name','zip','city','partner_id', 'street']):
|
||||||
if context.get('contact_display', 'contact')=='partner':
|
if context.get('contact_display', 'contact')=='partner':
|
||||||
res.append((r['id'], r['partner_id'][1]))
|
res.append((r['id'], r['partner_id'][1]))
|
||||||
else:
|
else:
|
||||||
addr = str(r['name'] or '')
|
addr = str(r['name'] or '')
|
||||||
if r['name'] and (r['zip'] or r['city']):
|
if r['name'] and (r['zip'] or r['city']):
|
||||||
addr += ', '
|
addr += ', '
|
||||||
addr += str(r['street'] or '') + ' ' + str(r['zip'] or '') + ' ' + str(r['city'] or '')
|
addr += str(r['street'] or '') + ' ' + str(r['zip'] or '') + ' ' + str(r['city'] or '')
|
||||||
res.append((r['id'], addr.strip() or '/'))
|
res.append((r['id'], addr.strip() or '/'))
|
||||||
return res
|
return res
|
||||||
|
|
||||||
def name_search(self, cr, user, name, args=None, operator='ilike', context=None, limit=80):
|
def name_search(self, cr, user, name, args=None, operator='ilike', context=None, limit=80):
|
||||||
if not args:
|
if not args:
|
||||||
args=[]
|
args=[]
|
||||||
if not context:
|
if not context:
|
||||||
context={}
|
context={}
|
||||||
if context.get('contact_display', 'contact')=='partner':
|
if context.get('contact_display', 'contact')=='partner':
|
||||||
ids = self.search(cr, user, [('partner_id',operator,name)], limit=limit, context=context)
|
ids = self.search(cr, user, [('partner_id',operator,name)], limit=limit, context=context)
|
||||||
else:
|
else:
|
||||||
ids = self.search(cr, user, [('zip','=',name)] + args, limit=limit, context=context)
|
ids = self.search(cr, user, [('zip','=',name)] + args, limit=limit, context=context)
|
||||||
if not ids:
|
if not ids:
|
||||||
ids = self.search(cr, user, [('city',operator,name)] + args, limit=limit, context=context)
|
ids = self.search(cr, user, [('city',operator,name)] + args, limit=limit, context=context)
|
||||||
if name:
|
if name:
|
||||||
ids += self.search(cr, user, [('name',operator,name)] + args, limit=limit, context=context)
|
ids += self.search(cr, user, [('name',operator,name)] + args, limit=limit, context=context)
|
||||||
ids += self.search(cr, user, [('partner_id',operator,name)] + args, limit=limit, context=context)
|
ids += self.search(cr, user, [('partner_id',operator,name)] + args, limit=limit, context=context)
|
||||||
return self.name_get(cr, user, ids, context=context)
|
return self.name_get(cr, user, ids, context=context)
|
||||||
res_partner_address()
|
res_partner_address()
|
||||||
|
|
||||||
class res_partner_bank_type(osv.osv):
|
class res_partner_bank_type(osv.osv):
|
||||||
_description='Bank Account Type'
|
_description='Bank Account Type'
|
||||||
_name = 'res.partner.bank.type'
|
_name = 'res.partner.bank.type'
|
||||||
_columns = {
|
_columns = {
|
||||||
'name': fields.char('Name', size=64, required=True),
|
'name': fields.char('Name', size=64, required=True),
|
||||||
'code': fields.char('Code', size=64, required=True),
|
'code': fields.char('Code', size=64, required=True),
|
||||||
'field_ids': fields.one2many('res.partner.bank.type.field', 'bank_type_id', 'Type fields'),
|
'field_ids': fields.one2many('res.partner.bank.type.field', 'bank_type_id', 'Type fields'),
|
||||||
}
|
}
|
||||||
res_partner_bank_type()
|
res_partner_bank_type()
|
||||||
|
|
||||||
class res_partner_bank_type_fields(osv.osv):
|
class res_partner_bank_type_fields(osv.osv):
|
||||||
_description='Bank type fields'
|
_description='Bank type fields'
|
||||||
_name = 'res.partner.bank.type.field'
|
_name = 'res.partner.bank.type.field'
|
||||||
_columns = {
|
_columns = {
|
||||||
'name': fields.char('Field name', size=64, required=True),
|
'name': fields.char('Field name', size=64, required=True),
|
||||||
'bank_type_id': fields.many2one('res.partner.bank.type', 'Bank type', required=True, ondelete='cascade'),
|
'bank_type_id': fields.many2one('res.partner.bank.type', 'Bank type', required=True, ondelete='cascade'),
|
||||||
'required': fields.boolean('Required'),
|
'required': fields.boolean('Required'),
|
||||||
'readonly': fields.boolean('Readonly'),
|
'readonly': fields.boolean('Readonly'),
|
||||||
'size': fields.integer('Max. Size'),
|
'size': fields.integer('Max. Size'),
|
||||||
}
|
}
|
||||||
res_partner_bank_type_fields()
|
res_partner_bank_type_fields()
|
||||||
|
|
||||||
|
|
||||||
class res_partner_bank(osv.osv):
|
class res_partner_bank(osv.osv):
|
||||||
'''Bank Accounts'''
|
'''Bank Accounts'''
|
||||||
_name = "res.partner.bank"
|
_name = "res.partner.bank"
|
||||||
_rec_name = "state"
|
_rec_name = "state"
|
||||||
_description = __doc__
|
_description = __doc__
|
||||||
_order = 'sequence'
|
_order = 'sequence'
|
||||||
|
|
||||||
def _bank_type_get(self, cr, uid, context=None):
|
def _bank_type_get(self, cr, uid, context=None):
|
||||||
bank_type_obj = self.pool.get('res.partner.bank.type')
|
bank_type_obj = self.pool.get('res.partner.bank.type')
|
||||||
|
|
||||||
result = []
|
result = []
|
||||||
type_ids = bank_type_obj.search(cr, uid, [])
|
type_ids = bank_type_obj.search(cr, uid, [])
|
||||||
bank_types = bank_type_obj.browse(cr, uid, type_ids)
|
bank_types = bank_type_obj.browse(cr, uid, type_ids)
|
||||||
for bank_type in bank_types:
|
for bank_type in bank_types:
|
||||||
result.append((bank_type.code, bank_type.name))
|
result.append((bank_type.code, bank_type.name))
|
||||||
return result
|
return result
|
||||||
|
|
||||||
def _default_value(self, cursor, user, field, context=None):
|
def _default_value(self, cursor, user, field, context=None):
|
||||||
if field in ('country_id', 'state_id'):
|
if field in ('country_id', 'state_id'):
|
||||||
value = False
|
value = False
|
||||||
else:
|
else:
|
||||||
value = ''
|
value = ''
|
||||||
if not context.get('address', False):
|
if not context.get('address', False):
|
||||||
return value
|
return value
|
||||||
for ham, spam, address in context['address']:
|
for ham, spam, address in context['address']:
|
||||||
if address.get('type', False) == 'default':
|
if address.get('type', False) == 'default':
|
||||||
return address.get(field, value)
|
return address.get(field, value)
|
||||||
elif not address.get('type', False):
|
elif not address.get('type', False):
|
||||||
value = address.get(field, value)
|
value = address.get(field, value)
|
||||||
return value
|
return value
|
||||||
|
|
||||||
_columns = {
|
_columns = {
|
||||||
'name': fields.char('Description', size=128),
|
'name': fields.char('Description', size=128),
|
||||||
'acc_number': fields.char('Account number', size=64, required=False),
|
'acc_number': fields.char('Account number', size=64, required=False),
|
||||||
'bank': fields.many2one('res.bank', 'Bank'),
|
'bank': fields.many2one('res.bank', 'Bank'),
|
||||||
'owner_name': fields.char('Account owner', size=64),
|
'owner_name': fields.char('Account owner', size=64),
|
||||||
'street': fields.char('Street', size=128),
|
'street': fields.char('Street', size=128),
|
||||||
'zip': fields.char('Zip', change_default=True, size=24),
|
'zip': fields.char('Zip', change_default=True, size=24),
|
||||||
'city': fields.char('City', size=128),
|
'city': fields.char('City', size=128),
|
||||||
'country_id': fields.many2one('res.country', 'Country',
|
'country_id': fields.many2one('res.country', 'Country',
|
||||||
change_default=True),
|
change_default=True),
|
||||||
'state_id': fields.many2one("res.country.state", 'State',
|
'state_id': fields.many2one("res.country.state", 'State',
|
||||||
change_default=True, domain="[('country_id','=',country_id)]"),
|
change_default=True, domain="[('country_id','=',country_id)]"),
|
||||||
'partner_id': fields.many2one('res.partner', 'Partner', required=True,
|
'partner_id': fields.many2one('res.partner', 'Partner', required=True,
|
||||||
ondelete='cascade', select=True),
|
ondelete='cascade', select=True),
|
||||||
'state': fields.selection(_bank_type_get, 'Bank type', required=True,
|
'state': fields.selection(_bank_type_get, 'Bank type', required=True,
|
||||||
change_default=True),
|
change_default=True),
|
||||||
'sequence': fields.integer('Sequence'),
|
'sequence': fields.integer('Sequence'),
|
||||||
'state_id': fields.many2one('res.country.state', 'State',
|
'state_id': fields.many2one('res.country.state', 'State',
|
||||||
domain="[('country_id', '=', country_id)]"),
|
domain="[('country_id', '=', country_id)]"),
|
||||||
}
|
}
|
||||||
_defaults = {
|
_defaults = {
|
||||||
'owner_name': lambda obj, cursor, user, context: obj._default_value(
|
'owner_name': lambda obj, cursor, user, context: obj._default_value(
|
||||||
cursor, user, 'name', context=context),
|
cursor, user, 'name', context=context),
|
||||||
'street': lambda obj, cursor, user, context: obj._default_value(
|
'street': lambda obj, cursor, user, context: obj._default_value(
|
||||||
cursor, user, 'street', context=context),
|
cursor, user, 'street', context=context),
|
||||||
'city': lambda obj, cursor, user, context: obj._default_value(
|
'city': lambda obj, cursor, user, context: obj._default_value(
|
||||||
cursor, user, 'city', context=context),
|
cursor, user, 'city', context=context),
|
||||||
'zip': lambda obj, cursor, user, context: obj._default_value(
|
'zip': lambda obj, cursor, user, context: obj._default_value(
|
||||||
cursor, user, 'zip', context=context),
|
cursor, user, 'zip', context=context),
|
||||||
'country_id': lambda obj, cursor, user, context: obj._default_value(
|
'country_id': lambda obj, cursor, user, context: obj._default_value(
|
||||||
cursor, user, 'country_id', context=context),
|
cursor, user, 'country_id', context=context),
|
||||||
'state_id': lambda obj, cursor, user, context: obj._default_value(
|
'state_id': lambda obj, cursor, user, context: obj._default_value(
|
||||||
cursor, user, 'state_id', context=context),
|
cursor, user, 'state_id', context=context),
|
||||||
}
|
}
|
||||||
|
|
||||||
def fields_get(self, cr, uid, fields=None, context=None):
|
def fields_get(self, cr, uid, fields=None, context=None):
|
||||||
res = super(res_partner_bank, self).fields_get(cr, uid, fields, context)
|
res = super(res_partner_bank, self).fields_get(cr, uid, fields, context)
|
||||||
bank_type_obj = self.pool.get('res.partner.bank.type')
|
bank_type_obj = self.pool.get('res.partner.bank.type')
|
||||||
type_ids = bank_type_obj.search(cr, uid, [])
|
type_ids = bank_type_obj.search(cr, uid, [])
|
||||||
types = bank_type_obj.browse(cr, uid, type_ids)
|
types = bank_type_obj.browse(cr, uid, type_ids)
|
||||||
for type in types:
|
for type in types:
|
||||||
for field in type.field_ids:
|
for field in type.field_ids:
|
||||||
if field.name in res:
|
if field.name in res:
|
||||||
res[field.name].setdefault('states', {})
|
res[field.name].setdefault('states', {})
|
||||||
res[field.name]['states'][type.code] = [
|
res[field.name]['states'][type.code] = [
|
||||||
('readonly', field.readonly),
|
('readonly', field.readonly),
|
||||||
('required', field.required)]
|
('required', field.required)]
|
||||||
return res
|
return res
|
||||||
|
|
||||||
def name_get(self, cr, uid, ids, context=None):
|
def name_get(self, cr, uid, ids, context=None):
|
||||||
if not len(ids):
|
if not len(ids):
|
||||||
return []
|
return []
|
||||||
res = []
|
res = []
|
||||||
for id in self.browse(cr, uid, ids):
|
for id in self.browse(cr, uid, ids):
|
||||||
res.append((id.id,id.acc_number))
|
res.append((id.id,id.acc_number))
|
||||||
return res
|
return res
|
||||||
|
|
||||||
res_partner_bank()
|
res_partner_bank()
|
||||||
|
|
||||||
|
|
|
@ -31,16 +31,16 @@ import wizard
|
||||||
import netsvc
|
import netsvc
|
||||||
|
|
||||||
class wizard_clear_ids(wizard.interface):
|
class wizard_clear_ids(wizard.interface):
|
||||||
def _clear_ids(self, cr, uid, data, context):
|
def _clear_ids(self, cr, uid, data, context):
|
||||||
service = netsvc.LocalService("object_proxy")
|
service = netsvc.LocalService("object_proxy")
|
||||||
service.execute(cr.dbname, uid, 'res.partner', 'write', data['ids'], {'ref': False})
|
service.execute(cr.dbname, uid, 'res.partner', 'write', data['ids'], {'ref': False})
|
||||||
return {}
|
return {}
|
||||||
|
|
||||||
states = {
|
states = {
|
||||||
'init': {
|
'init': {
|
||||||
'actions': [_clear_ids],
|
'actions': [_clear_ids],
|
||||||
'result': {'type':'state', 'state':'end'}
|
'result': {'type':'state', 'state':'end'}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
wizard_clear_ids('res.partner.clear_ids')
|
wizard_clear_ids('res.partner.clear_ids')
|
||||||
|
|
||||||
|
|
|
@ -34,70 +34,70 @@ from tools.misc import UpdateableStr
|
||||||
import pooler
|
import pooler
|
||||||
|
|
||||||
def _is_pair(x):
|
def _is_pair(x):
|
||||||
return not x%2
|
return not x%2
|
||||||
|
|
||||||
def _get_ean_key(string):
|
def _get_ean_key(string):
|
||||||
if not string or string=='':
|
if not string or string=='':
|
||||||
return '0'
|
return '0'
|
||||||
if len(string)!=12:
|
if len(string)!=12:
|
||||||
return '0'
|
return '0'
|
||||||
sum=0
|
sum=0
|
||||||
for i in range(12):
|
for i in range(12):
|
||||||
if _is_pair(i):
|
if _is_pair(i):
|
||||||
sum+=int(string[i])
|
sum+=int(string[i])
|
||||||
else:
|
else:
|
||||||
sum+=3*int(string[i])
|
sum+=3*int(string[i])
|
||||||
return str(int(math.ceil(sum/10.0)*10-sum))
|
return str(int(math.ceil(sum/10.0)*10-sum))
|
||||||
|
|
||||||
#FIXME: this is not concurrency safe !!!!
|
#FIXME: this is not concurrency safe !!!!
|
||||||
_check_arch = UpdateableStr()
|
_check_arch = UpdateableStr()
|
||||||
_check_fields = {}
|
_check_fields = {}
|
||||||
|
|
||||||
def _check_key(self, cr, uid, data, context):
|
def _check_key(self, cr, uid, data, context):
|
||||||
partner_table=pooler.get_pool(cr.dbname).get('res.partner')
|
partner_table=pooler.get_pool(cr.dbname).get('res.partner')
|
||||||
partners = partner_table.browse(cr, uid, data['ids'])
|
partners = partner_table.browse(cr, uid, data['ids'])
|
||||||
_check_arch_lst=['<?xml version="1.0"?>', '<form string="Check EAN13">', '<label string=""/>', '<label string=""/>','<label string="Original" />', '<label string="Computed" />']
|
_check_arch_lst=['<?xml version="1.0"?>', '<form string="Check EAN13">', '<label string=""/>', '<label string=""/>','<label string="Original" />', '<label string="Computed" />']
|
||||||
for partner in partners:
|
for partner in partners:
|
||||||
if partner['ean13'] and len(partner['ean13'])>11 and len(partner['ean13'])<14:
|
if partner['ean13'] and len(partner['ean13'])>11 and len(partner['ean13'])<14:
|
||||||
_check_arch_lst.append('<label colspan="2" string="%s" />' % partner['ean13']);
|
_check_arch_lst.append('<label colspan="2" string="%s" />' % partner['ean13']);
|
||||||
key=_get_ean_key(partner['ean13'][:12])
|
key=_get_ean_key(partner['ean13'][:12])
|
||||||
_check_arch_lst.append('<label string=""/>')
|
_check_arch_lst.append('<label string=""/>')
|
||||||
if len(partner['ean13'])==12:
|
if len(partner['ean13'])==12:
|
||||||
_check_arch_lst.append('<label string="" />');
|
_check_arch_lst.append('<label string="" />');
|
||||||
else:
|
else:
|
||||||
_check_arch_lst.append('<label string="%s" />' % partner['ean13'][12])
|
_check_arch_lst.append('<label string="%s" />' % partner['ean13'][12])
|
||||||
_check_arch_lst.append('<label string="%s" />' % key)
|
_check_arch_lst.append('<label string="%s" />' % key)
|
||||||
_check_arch_lst.append('</form>')
|
_check_arch_lst.append('</form>')
|
||||||
_check_arch.string = '\n'.join(_check_arch_lst)
|
_check_arch.string = '\n'.join(_check_arch_lst)
|
||||||
return {}
|
return {}
|
||||||
|
|
||||||
def _update_ean(self, cr, uid, data, context):
|
def _update_ean(self, cr, uid, data, context):
|
||||||
partner_table = pooler.get_pool(cr.dbname).get('res.partner')
|
partner_table = pooler.get_pool(cr.dbname).get('res.partner')
|
||||||
partners = partner_table.browse(cr, uid, data['ids'])
|
partners = partner_table.browse(cr, uid, data['ids'])
|
||||||
for partner in partners:
|
for partner in partners:
|
||||||
partner_table.write(cr, uid, data['ids'], {
|
partner_table.write(cr, uid, data['ids'], {
|
||||||
'ean13': "%s%s" % (partner['ean13'][:12], _get_ean_key(partner['ean13'][:12]))
|
'ean13': "%s%s" % (partner['ean13'][:12], _get_ean_key(partner['ean13'][:12]))
|
||||||
})
|
})
|
||||||
return {}
|
return {}
|
||||||
|
|
||||||
class wiz_ean_check(wizard.interface):
|
class wiz_ean_check(wizard.interface):
|
||||||
states = {
|
states = {
|
||||||
'init': {
|
'init': {
|
||||||
'actions': [_check_key],
|
'actions': [_check_key],
|
||||||
'result': {
|
'result': {
|
||||||
'type': 'form',
|
'type': 'form',
|
||||||
'arch': _check_arch,
|
'arch': _check_arch,
|
||||||
'fields': _check_fields,
|
'fields': _check_fields,
|
||||||
'state': (('end', 'Ignore'), ('correct', 'Correct EAN13'))
|
'state': (('end', 'Ignore'), ('correct', 'Correct EAN13'))
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
'correct' : {
|
'correct' : {
|
||||||
'actions': [_update_ean],
|
'actions': [_update_ean],
|
||||||
'result': {
|
'result': {
|
||||||
'type': 'state',
|
'type': 'state',
|
||||||
'state': 'end'
|
'state': 'end'
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
wiz_ean_check('res.partner.ean13')
|
wiz_ean_check('res.partner.ean13')
|
||||||
|
|
|
@ -33,47 +33,47 @@ import tools
|
||||||
|
|
||||||
sms_send_form = '''<?xml version="1.0"?>
|
sms_send_form = '''<?xml version="1.0"?>
|
||||||
<form string="%s">
|
<form string="%s">
|
||||||
<separator string="%s" colspan="4"/>
|
<separator string="%s" colspan="4"/>
|
||||||
<field name="app_id"/>
|
<field name="app_id"/>
|
||||||
<newline/>
|
<newline/>
|
||||||
<field name="user"/>
|
<field name="user"/>
|
||||||
<field name="password"/>
|
<field name="password"/>
|
||||||
<newline/>
|
<newline/>
|
||||||
<field name="text" colspan="4"/>
|
<field name="text" colspan="4"/>
|
||||||
</form>''' % ('SMS - Gateway: clickatell','Bulk SMS send')
|
</form>''' % ('SMS - Gateway: clickatell','Bulk SMS send')
|
||||||
|
|
||||||
sms_send_fields = {
|
sms_send_fields = {
|
||||||
'app_id': {'string':'API ID', 'type':'char', 'required':True},
|
'app_id': {'string':'API ID', 'type':'char', 'required':True},
|
||||||
'user': {'string':'Login', 'type':'char', 'required':True},
|
'user': {'string':'Login', 'type':'char', 'required':True},
|
||||||
'password': {'string':'Password', 'type':'char', 'required':True},
|
'password': {'string':'Password', 'type':'char', 'required':True},
|
||||||
'text': {'string':'SMS Message', 'type':'text', 'required':True}
|
'text': {'string':'SMS Message', 'type':'text', 'required':True}
|
||||||
}
|
}
|
||||||
|
|
||||||
def _sms_send(self, cr, uid, data, context):
|
def _sms_send(self, cr, uid, data, context):
|
||||||
service = netsvc.LocalService("object_proxy")
|
service = netsvc.LocalService("object_proxy")
|
||||||
|
|
||||||
res_ids = service.execute(cr.dbname, uid, 'res.partner.address', 'search', [('partner_id','in',data['ids']),('type','=','default')])
|
res_ids = service.execute(cr.dbname, uid, 'res.partner.address', 'search', [('partner_id','in',data['ids']),('type','=','default')])
|
||||||
res = service.execute(cr.dbname, uid, 'res.partner.address', 'read', res_ids, ['mobile'])
|
res = service.execute(cr.dbname, uid, 'res.partner.address', 'read', res_ids, ['mobile'])
|
||||||
|
|
||||||
nbr = 0
|
nbr = 0
|
||||||
for r in res:
|
for r in res:
|
||||||
to = r['mobile']
|
to = r['mobile']
|
||||||
if to:
|
if to:
|
||||||
tools.sms_send(data['form']['user'], data['form']['password'], data['form']['app_id'], unicode(data['form']['text'], 'utf-8').encode('latin1'), to)
|
tools.sms_send(data['form']['user'], data['form']['password'], data['form']['app_id'], unicode(data['form']['text'], 'utf-8').encode('latin1'), to)
|
||||||
nbr += 1
|
nbr += 1
|
||||||
return {'sms_sent': nbr}
|
return {'sms_sent': nbr}
|
||||||
|
|
||||||
class part_sms(wizard.interface):
|
class part_sms(wizard.interface):
|
||||||
states = {
|
states = {
|
||||||
'init': {
|
'init': {
|
||||||
'actions': [],
|
'actions': [],
|
||||||
'result': {'type': 'form', 'arch':sms_send_form, 'fields': sms_send_fields, 'state':[('end','Cancel'), ('send','Send SMS')]}
|
'result': {'type': 'form', 'arch':sms_send_form, 'fields': sms_send_fields, 'state':[('end','Cancel'), ('send','Send SMS')]}
|
||||||
},
|
},
|
||||||
'send': {
|
'send': {
|
||||||
'actions': [_sms_send],
|
'actions': [_sms_send],
|
||||||
'result': {'type': 'state', 'state':'end'}
|
'result': {'type': 'state', 'state':'end'}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
part_sms('res.partner.sms_send')
|
part_sms('res.partner.sms_send')
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -33,48 +33,48 @@ import tools
|
||||||
|
|
||||||
email_send_form = '''<?xml version="1.0"?>
|
email_send_form = '''<?xml version="1.0"?>
|
||||||
<form string="Mass Mailing">
|
<form string="Mass Mailing">
|
||||||
<field name="from"/>
|
<field name="from"/>
|
||||||
<newline/>
|
<newline/>
|
||||||
<field name="subject"/>
|
<field name="subject"/>
|
||||||
<newline/>
|
<newline/>
|
||||||
<field name="text"/>
|
<field name="text"/>
|
||||||
</form>'''
|
</form>'''
|
||||||
|
|
||||||
email_send_fields = {
|
email_send_fields = {
|
||||||
'from': {'string':"Sender's email", 'type':'char', 'size':64, 'required':True},
|
'from': {'string':"Sender's email", 'type':'char', 'size':64, 'required':True},
|
||||||
'subject': {'string':'Subject', 'type':'char', 'size':64, 'required':True},
|
'subject': {'string':'Subject', 'type':'char', 'size':64, 'required':True},
|
||||||
'text': {'string':'Message', 'type':'text_tag', 'required':True}
|
'text': {'string':'Message', 'type':'text_tag', 'required':True}
|
||||||
}
|
}
|
||||||
|
|
||||||
# this sends an email to ALL the addresses of the selected partners.
|
# this sends an email to ALL the addresses of the selected partners.
|
||||||
def _mass_mail_send(self, cr, uid, data, context):
|
def _mass_mail_send(self, cr, uid, data, context):
|
||||||
nbr = 0
|
nbr = 0
|
||||||
partners = pooler.get_pool(cr.dbname).get('res.partner').browse(cr, uid, data['ids'], context)
|
partners = pooler.get_pool(cr.dbname).get('res.partner').browse(cr, uid, data['ids'], context)
|
||||||
for partner in partners:
|
for partner in partners:
|
||||||
for adr in partner.address:
|
for adr in partner.address:
|
||||||
if adr.email:
|
if adr.email:
|
||||||
name = adr.name or partner.name
|
name = adr.name or partner.name
|
||||||
to = '%s <%s>' % (name, adr.email)
|
to = '%s <%s>' % (name, adr.email)
|
||||||
#TODO: add some tests to check for invalid email addresses
|
#TODO: add some tests to check for invalid email addresses
|
||||||
#CHECKME: maybe we should use res.partner/email_send
|
#CHECKME: maybe we should use res.partner/email_send
|
||||||
tools.email_send(data['form']['from'], [to], data['form']['subject'], data['form']['text'])
|
tools.email_send(data['form']['from'], [to], data['form']['subject'], data['form']['text'])
|
||||||
nbr += 1
|
nbr += 1
|
||||||
pooler.get_pool(cr.dbname).get('res.partner.event').create(cr, uid,
|
pooler.get_pool(cr.dbname).get('res.partner.event').create(cr, uid,
|
||||||
{'name': 'Email sent through mass mailing',
|
{'name': 'Email sent through mass mailing',
|
||||||
'partner_id': partner.id,
|
'partner_id': partner.id,
|
||||||
'description': data['form']['text'], })
|
'description': data['form']['text'], })
|
||||||
#TODO: log number of message sent
|
#TODO: log number of message sent
|
||||||
return {'email_sent': nbr}
|
return {'email_sent': nbr}
|
||||||
|
|
||||||
class part_email(wizard.interface):
|
class part_email(wizard.interface):
|
||||||
states = {
|
states = {
|
||||||
'init': {
|
'init': {
|
||||||
'actions': [],
|
'actions': [],
|
||||||
'result': {'type': 'form', 'arch': email_send_form, 'fields': email_send_fields, 'state':[('end','Cancel'), ('send','Send Email')]}
|
'result': {'type': 'form', 'arch': email_send_form, 'fields': email_send_fields, 'state':[('end','Cancel'), ('send','Send Email')]}
|
||||||
},
|
},
|
||||||
'send': {
|
'send': {
|
||||||
'actions': [_mass_mail_send],
|
'actions': [_mass_mail_send],
|
||||||
'result': {'type': 'state', 'state':'end'}
|
'result': {'type': 'state', 'state':'end'}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
part_email('res.partner.spam_send')
|
part_email('res.partner.spam_send')
|
||||||
|
|
|
@ -31,152 +31,152 @@ from osv import fields,osv
|
||||||
import tools
|
import tools
|
||||||
|
|
||||||
class res_company(osv.osv):
|
class res_company(osv.osv):
|
||||||
_name = "res.company"
|
_name = "res.company"
|
||||||
|
|
||||||
_columns = {
|
_columns = {
|
||||||
'name': fields.char('Company Name', size=64, required=True),
|
'name': fields.char('Company Name', size=64, required=True),
|
||||||
'parent_id': fields.many2one('res.company', 'Parent Company', select=True),
|
'parent_id': fields.many2one('res.company', 'Parent Company', select=True),
|
||||||
'child_ids': fields.one2many('res.company', 'parent_id', 'Childs Company'),
|
'child_ids': fields.one2many('res.company', 'parent_id', 'Childs Company'),
|
||||||
'partner_id': fields.many2one('res.partner', 'Partner', required=True),
|
'partner_id': fields.many2one('res.partner', 'Partner', required=True),
|
||||||
'rml_header1': fields.char('Report Header', size=200),
|
'rml_header1': fields.char('Report Header', size=200),
|
||||||
'rml_footer1': fields.char('Report Footer 1', size=200),
|
'rml_footer1': fields.char('Report Footer 1', size=200),
|
||||||
'rml_footer2': fields.char('Report Footer 2', size=200),
|
'rml_footer2': fields.char('Report Footer 2', size=200),
|
||||||
'rml_header' : fields.text('RML Header'),
|
'rml_header' : fields.text('RML Header'),
|
||||||
'rml_header2' : fields.text('RML Internal Header'),
|
'rml_header2' : fields.text('RML Internal Header'),
|
||||||
'logo' : fields.binary('Logo'),
|
'logo' : fields.binary('Logo'),
|
||||||
'currency_id': fields.many2one('res.currency', 'Currency', required=True),
|
'currency_id': fields.many2one('res.currency', 'Currency', required=True),
|
||||||
}
|
}
|
||||||
|
|
||||||
def _get_child_ids(self, cr, uid, uid2, context={}):
|
def _get_child_ids(self, cr, uid, uid2, context={}):
|
||||||
company = self.pool.get('res.users').company_get(cr, uid, uid2)
|
company = self.pool.get('res.users').company_get(cr, uid, uid2)
|
||||||
ids = self._get_company_children(cr, uid, company)
|
ids = self._get_company_children(cr, uid, company)
|
||||||
return ids
|
return ids
|
||||||
|
|
||||||
def _get_company_children(self, cr, uid=None, company=None):
|
def _get_company_children(self, cr, uid=None, company=None):
|
||||||
if not company:
|
if not company:
|
||||||
return []
|
return []
|
||||||
ids = self.search(cr, uid, [('parent_id','child_of',[company])])
|
ids = self.search(cr, uid, [('parent_id','child_of',[company])])
|
||||||
return ids
|
return ids
|
||||||
_get_company_children = tools.cache()(_get_company_children)
|
_get_company_children = tools.cache()(_get_company_children)
|
||||||
|
|
||||||
def _get_partner_hierarchy(self, cr, uid, company_id, context={}):
|
def _get_partner_hierarchy(self, cr, uid, company_id, context={}):
|
||||||
if company_id:
|
if company_id:
|
||||||
parent_id = self.browse(cr, uid, company_id)['parent_id']
|
parent_id = self.browse(cr, uid, company_id)['parent_id']
|
||||||
if parent_id:
|
if parent_id:
|
||||||
return self._get_partner_hierarchy(cr, uid, parent_id.id, context)
|
return self._get_partner_hierarchy(cr, uid, parent_id.id, context)
|
||||||
else:
|
else:
|
||||||
return self._get_partner_descendance(cr, uid, company_id, [], context)
|
return self._get_partner_descendance(cr, uid, company_id, [], context)
|
||||||
return []
|
return []
|
||||||
|
|
||||||
def _get_partner_descendance(self, cr, uid, company_id, descendance, context={}):
|
def _get_partner_descendance(self, cr, uid, company_id, descendance, context={}):
|
||||||
descendance.append(self.browse(cr, uid, company_id).partner_id.id)
|
descendance.append(self.browse(cr, uid, company_id).partner_id.id)
|
||||||
for child_id in self._get_company_children(cr, uid, company_id):
|
for child_id in self._get_company_children(cr, uid, company_id):
|
||||||
if child_id != company_id:
|
if child_id != company_id:
|
||||||
descendance = self._get_partner_descendance(cr, uid, child_id, descendance)
|
descendance = self._get_partner_descendance(cr, uid, child_id, descendance)
|
||||||
return descendance
|
return descendance
|
||||||
|
|
||||||
def __init__(self, *args, **argv):
|
def __init__(self, *args, **argv):
|
||||||
return super(res_company, self).__init__(*args, **argv)
|
return super(res_company, self).__init__(*args, **argv)
|
||||||
|
|
||||||
#
|
#
|
||||||
# This function restart the cache on the _get_company_children method
|
# This function restart the cache on the _get_company_children method
|
||||||
#
|
#
|
||||||
def cache_restart(self, uid=None):
|
def cache_restart(self, uid=None):
|
||||||
self._get_company_children()
|
self._get_company_children()
|
||||||
|
|
||||||
def create(self, *args, **argv):
|
def create(self, *args, **argv):
|
||||||
self.cache_restart()
|
self.cache_restart()
|
||||||
return super(res_company, self).create(*args, **argv)
|
return super(res_company, self).create(*args, **argv)
|
||||||
|
|
||||||
def write(self, *args, **argv):
|
def write(self, *args, **argv):
|
||||||
self.cache_restart()
|
self.cache_restart()
|
||||||
# Restart the cache on the company_get method
|
# Restart the cache on the company_get method
|
||||||
self.pool.get('ir.rule').domain_get()
|
self.pool.get('ir.rule').domain_get()
|
||||||
return super(res_company, self).write(*args, **argv)
|
return super(res_company, self).write(*args, **argv)
|
||||||
|
|
||||||
def _get_euro(self, cr, uid, context={}):
|
def _get_euro(self, cr, uid, context={}):
|
||||||
try:
|
try:
|
||||||
return self.pool.get('res.currency').search(cr, uid, [('rate', '=', 1.0),])[0]
|
return self.pool.get('res.currency').search(cr, uid, [('rate', '=', 1.0),])[0]
|
||||||
except:
|
except:
|
||||||
return 1
|
return 1
|
||||||
|
|
||||||
def _check_recursion(self, cr, uid, ids):
|
def _check_recursion(self, cr, uid, ids):
|
||||||
level = 100
|
level = 100
|
||||||
while len(ids):
|
while len(ids):
|
||||||
cr.execute('select distinct parent_id from res_company where id in ('+','.join(map(str,ids))+')')
|
cr.execute('select distinct parent_id from res_company where id in ('+','.join(map(str,ids))+')')
|
||||||
ids = filter(None, map(lambda x:x[0], cr.fetchall()))
|
ids = filter(None, map(lambda x:x[0], cr.fetchall()))
|
||||||
if not level:
|
if not level:
|
||||||
return False
|
return False
|
||||||
level -= 1
|
level -= 1
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def _get_header2(self,cr,uid,ids):
|
def _get_header2(self,cr,uid,ids):
|
||||||
return """
|
return """
|
||||||
<header>
|
<header>
|
||||||
<pageTemplate>
|
<pageTemplate>
|
||||||
<frame id="first" x1="1.3cm" y1="1.5cm" width="18.4cm" height="26.5cm"/>
|
<frame id="first" x1="1.3cm" y1="1.5cm" width="18.4cm" height="26.5cm"/>
|
||||||
<pageGraphics>
|
<pageGraphics>
|
||||||
<fill color="black"/>
|
<fill color="black"/>
|
||||||
<stroke color="black"/>
|
<stroke color="black"/>
|
||||||
<setFont name="Helvetica" size="8"/>
|
<setFont name="Helvetica" size="8"/>
|
||||||
<drawString x="1.3cm" y="28.3cm"> [[ formatLang(time.strftime("%Y-%m-%d"), date=True) ]] [[ time.strftime("%H:%M") ]]</drawString>
|
<drawString x="1.3cm" y="28.3cm"> [[ formatLang(time.strftime("%Y-%m-%d"), date=True) ]] [[ time.strftime("%H:%M") ]]</drawString>
|
||||||
<setFont name="Helvetica-Bold" size="10"/>
|
<setFont name="Helvetica-Bold" size="10"/>
|
||||||
<drawString x="9.8cm" y="28.3cm">[[ company.partner_id.name ]]</drawString>
|
<drawString x="9.8cm" y="28.3cm">[[ company.partner_id.name ]]</drawString>
|
||||||
<setFont name="Helvetica" size="8"/>
|
<setFont name="Helvetica" size="8"/>
|
||||||
<drawRightString x="19.7cm" y="28.3cm"><pageNumber/> / </drawRightString>
|
<drawRightString x="19.7cm" y="28.3cm"><pageNumber/> / </drawRightString>
|
||||||
<drawString x="19.8cm" y="28.3cm"><pageCount/></drawString>
|
<drawString x="19.8cm" y="28.3cm"><pageCount/></drawString>
|
||||||
<stroke color="#aaaaaa"/>
|
<stroke color="#aaaaaa"/>
|
||||||
<lines>1.3cm 28.1cm 20cm 28.1cm</lines>
|
<lines>1.3cm 28.1cm 20cm 28.1cm</lines>
|
||||||
</pageGraphics>
|
</pageGraphics>
|
||||||
</pageTemplate>
|
</pageTemplate>
|
||||||
</header>"""
|
</header>"""
|
||||||
def _get_header(self,cr,uid,ids):
|
def _get_header(self,cr,uid,ids):
|
||||||
try :
|
try :
|
||||||
return tools.file_open('custom/corporate_rml_header.rml').read()
|
return tools.file_open('custom/corporate_rml_header.rml').read()
|
||||||
except:
|
except:
|
||||||
return """
|
return """
|
||||||
<header>
|
<header>
|
||||||
<pageTemplate>
|
<pageTemplate>
|
||||||
<frame id="first" x1="1.3cm" y1="2.5cm" height="23.0cm" width="19cm"/>
|
<frame id="first" x1="1.3cm" y1="2.5cm" height="23.0cm" width="19cm"/>
|
||||||
<pageGraphics>
|
<pageGraphics>
|
||||||
<!-- You Logo - Change X,Y,Width and Height -->
|
<!-- You Logo - Change X,Y,Width and Height -->
|
||||||
<image x="1.3cm" y="27.6cm" height="40.0" >[[company.logo]]</image>
|
<image x="1.3cm" y="27.6cm" height="40.0" >[[company.logo]]</image>
|
||||||
<setFont name="Helvetica" size="8"/>
|
<setFont name="Helvetica" size="8"/>
|
||||||
<fill color="black"/>
|
<fill color="black"/>
|
||||||
<stroke color="black"/>
|
<stroke color="black"/>
|
||||||
<lines>1.3cm 27.7cm 20cm 27.7cm</lines>
|
<lines>1.3cm 27.7cm 20cm 27.7cm</lines>
|
||||||
|
|
||||||
<drawRightString x="20cm" y="27.8cm">[[ company.rml_header1 ]]</drawRightString>
|
<drawRightString x="20cm" y="27.8cm">[[ company.rml_header1 ]]</drawRightString>
|
||||||
|
|
||||||
|
|
||||||
<drawString x="1.3cm" y="27.2cm">[[ company.partner_id.name ]]</drawString>
|
<drawString x="1.3cm" y="27.2cm">[[ company.partner_id.name ]]</drawString>
|
||||||
<drawString x="1.3cm" y="26.8cm">[[ company.partner_id.address and company.partner_id.address[0].street ]]</drawString>
|
<drawString x="1.3cm" y="26.8cm">[[ company.partner_id.address and company.partner_id.address[0].street ]]</drawString>
|
||||||
<drawString x="1.3cm" y="26.4cm">[[ company.partner_id.address and company.partner_id.address[0].zip ]] [[ company.partner_id.address and company.partner_id.address[0].city ]] - [[ company.partner_id.address and company.partner_id.address[0].country_id and company.partner_id.address[0].country_id.name ]]</drawString>
|
<drawString x="1.3cm" y="26.4cm">[[ company.partner_id.address and company.partner_id.address[0].zip ]] [[ company.partner_id.address and company.partner_id.address[0].city ]] - [[ company.partner_id.address and company.partner_id.address[0].country_id and company.partner_id.address[0].country_id.name ]]</drawString>
|
||||||
<drawString x="1.3cm" y="26.0cm">Phone:</drawString>
|
<drawString x="1.3cm" y="26.0cm">Phone:</drawString>
|
||||||
<drawRightString x="7cm" y="26.0cm">[[ company.partner_id.address and company.partner_id.address[0].phone ]]</drawRightString>
|
<drawRightString x="7cm" y="26.0cm">[[ company.partner_id.address and company.partner_id.address[0].phone ]]</drawRightString>
|
||||||
<drawString x="1.3cm" y="25.6cm">Mail:</drawString>
|
<drawString x="1.3cm" y="25.6cm">Mail:</drawString>
|
||||||
<drawRightString x="7cm" y="25.6cm">[[ company.partner_id.address and company.partner_id.address[0].email ]]</drawRightString>
|
<drawRightString x="7cm" y="25.6cm">[[ company.partner_id.address and company.partner_id.address[0].email ]]</drawRightString>
|
||||||
<lines>1.3cm 25.5cm 7cm 25.5cm</lines>
|
<lines>1.3cm 25.5cm 7cm 25.5cm</lines>
|
||||||
|
|
||||||
<!--page bottom-->
|
<!--page bottom-->
|
||||||
|
|
||||||
<lines>1.2cm 2.15cm 19.9cm 2.15cm</lines>
|
<lines>1.2cm 2.15cm 19.9cm 2.15cm</lines>
|
||||||
|
|
||||||
<drawCentredString x="10.5cm" y="1.7cm">[[ company.rml_footer1 ]]</drawCentredString>
|
<drawCentredString x="10.5cm" y="1.7cm">[[ company.rml_footer1 ]]</drawCentredString>
|
||||||
<drawCentredString x="10.5cm" y="1.25cm">[[ company.rml_footer2 ]]</drawCentredString>
|
<drawCentredString x="10.5cm" y="1.25cm">[[ company.rml_footer2 ]]</drawCentredString>
|
||||||
<drawCentredString x="10.5cm" y="0.8cm">Contact : [[ user.name ]] - Page: <pageNumber/></drawCentredString>
|
<drawCentredString x="10.5cm" y="0.8cm">Contact : [[ user.name ]] - Page: <pageNumber/></drawCentredString>
|
||||||
</pageGraphics>
|
</pageGraphics>
|
||||||
</pageTemplate>
|
</pageTemplate>
|
||||||
</header>"""
|
</header>"""
|
||||||
_defaults = {
|
_defaults = {
|
||||||
'currency_id': _get_euro,
|
'currency_id': _get_euro,
|
||||||
'rml_header':_get_header,
|
'rml_header':_get_header,
|
||||||
'rml_header2': _get_header2
|
'rml_header2': _get_header2
|
||||||
}
|
}
|
||||||
|
|
||||||
_constraints = [
|
_constraints = [
|
||||||
(_check_recursion, 'Error! You can not create recursive companies.', ['parent_id'])
|
(_check_recursion, 'Error! You can not create recursive companies.', ['parent_id'])
|
||||||
]
|
]
|
||||||
|
|
||||||
res_company()
|
res_company()
|
||||||
|
|
||||||
|
|
|
@ -39,90 +39,90 @@ import mx.DateTime
|
||||||
from mx.DateTime import RelativeDateTime, now, DateTime, localtime
|
from mx.DateTime import RelativeDateTime, now, DateTime, localtime
|
||||||
|
|
||||||
class res_currency(osv.osv):
|
class res_currency(osv.osv):
|
||||||
def _current_rate(self, cr, uid, ids, name, arg, context={}):
|
def _current_rate(self, cr, uid, ids, name, arg, context={}):
|
||||||
res={}
|
res={}
|
||||||
if 'date' in context:
|
if 'date' in context:
|
||||||
date=context['date']
|
date=context['date']
|
||||||
else:
|
else:
|
||||||
date=time.strftime('%Y-%m-%d')
|
date=time.strftime('%Y-%m-%d')
|
||||||
for id in ids:
|
for id in ids:
|
||||||
cr.execute("SELECT currency_id, rate FROM res_currency_rate WHERE currency_id = %d AND name <= '%s' ORDER BY name desc LIMIT 1" % (id, date))
|
cr.execute("SELECT currency_id, rate FROM res_currency_rate WHERE currency_id = %d AND name <= '%s' ORDER BY name desc LIMIT 1" % (id, date))
|
||||||
if cr.rowcount:
|
if cr.rowcount:
|
||||||
id, rate=cr.fetchall()[0]
|
id, rate=cr.fetchall()[0]
|
||||||
res[id]=rate
|
res[id]=rate
|
||||||
else:
|
else:
|
||||||
res[id]=0
|
res[id]=0
|
||||||
return res
|
return res
|
||||||
_name = "res.currency"
|
_name = "res.currency"
|
||||||
_description = "Currency"
|
_description = "Currency"
|
||||||
_columns = {
|
_columns = {
|
||||||
'name': fields.char('Currency', size=32, required=True),
|
'name': fields.char('Currency', size=32, required=True),
|
||||||
'code': fields.char('Code', size=3),
|
'code': fields.char('Code', size=3),
|
||||||
'rate': fields.function(_current_rate, method=True, string='Current rate', digits=(12,6),
|
'rate': fields.function(_current_rate, method=True, string='Current rate', digits=(12,6),
|
||||||
help='The rate of the currency to the currency of rate 1'),
|
help='The rate of the currency to the currency of rate 1'),
|
||||||
'rate_ids': fields.one2many('res.currency.rate', 'currency_id', 'Rates'),
|
'rate_ids': fields.one2many('res.currency.rate', 'currency_id', 'Rates'),
|
||||||
'accuracy': fields.integer('Computational Accuracy'),
|
'accuracy': fields.integer('Computational Accuracy'),
|
||||||
'rounding': fields.float('Rounding factor', digits=(12,6)),
|
'rounding': fields.float('Rounding factor', digits=(12,6)),
|
||||||
'active': fields.boolean('Active'),
|
'active': fields.boolean('Active'),
|
||||||
}
|
}
|
||||||
_defaults = {
|
_defaults = {
|
||||||
'active': lambda *a: 1,
|
'active': lambda *a: 1,
|
||||||
}
|
}
|
||||||
_order = "code"
|
_order = "code"
|
||||||
|
|
||||||
def round(self, cr, uid, currency, amount):
|
def round(self, cr, uid, currency, amount):
|
||||||
return round(amount / currency.rounding) * currency.rounding
|
return round(amount / currency.rounding) * currency.rounding
|
||||||
|
|
||||||
def is_zero(self, cr, uid, currency, amount):
|
def is_zero(self, cr, uid, currency, amount):
|
||||||
return abs(self.round(cr, uid, currency, amount)) < currency.rounding
|
return abs(self.round(cr, uid, currency, amount)) < currency.rounding
|
||||||
|
|
||||||
def compute(self, cr, uid, from_currency_id, to_currency_id, from_amount, round=True, context={}):
|
def compute(self, cr, uid, from_currency_id, to_currency_id, from_amount, round=True, context={}):
|
||||||
if not from_currency_id:
|
if not from_currency_id:
|
||||||
from_currency_id = to_currency_id
|
from_currency_id = to_currency_id
|
||||||
xc=self.browse(cr, uid, [from_currency_id,to_currency_id], context=context)
|
xc=self.browse(cr, uid, [from_currency_id,to_currency_id], context=context)
|
||||||
from_currency = (xc[0].id == from_currency_id and xc[0]) or xc[1]
|
from_currency = (xc[0].id == from_currency_id and xc[0]) or xc[1]
|
||||||
to_currency = (xc[0].id == to_currency_id and xc[0]) or xc[1]
|
to_currency = (xc[0].id == to_currency_id and xc[0]) or xc[1]
|
||||||
if from_currency['rate'] == 0 or to_currency['rate'] == 0:
|
if from_currency['rate'] == 0 or to_currency['rate'] == 0:
|
||||||
date = context.get('date', time.strftime('%Y-%m-%d'))
|
date = context.get('date', time.strftime('%Y-%m-%d'))
|
||||||
if from_currency['rate'] == 0:
|
if from_currency['rate'] == 0:
|
||||||
code = from_currency.code
|
code = from_currency.code
|
||||||
else:
|
else:
|
||||||
code = to_currency.code
|
code = to_currency.code
|
||||||
raise osv.except_osv(_('Error'), _('No rate found \n' \
|
raise osv.except_osv(_('Error'), _('No rate found \n' \
|
||||||
'for the currency: %s \n' \
|
'for the currency: %s \n' \
|
||||||
'at the date: %s') % (code, date))
|
'at the date: %s') % (code, date))
|
||||||
if to_currency_id==from_currency_id:
|
if to_currency_id==from_currency_id:
|
||||||
if round:
|
if round:
|
||||||
return self.round(cr, uid, to_currency, from_amount)
|
return self.round(cr, uid, to_currency, from_amount)
|
||||||
else:
|
else:
|
||||||
return from_amount
|
return from_amount
|
||||||
else:
|
else:
|
||||||
if round:
|
if round:
|
||||||
return self.round(cr, uid, to_currency, from_amount * to_currency.rate/from_currency.rate)
|
return self.round(cr, uid, to_currency, from_amount * to_currency.rate/from_currency.rate)
|
||||||
else:
|
else:
|
||||||
return (from_amount * to_currency.rate/from_currency.rate)
|
return (from_amount * to_currency.rate/from_currency.rate)
|
||||||
def name_search(self, cr, uid, name, args=[], operator='ilike', context={}, limit=80):
|
def name_search(self, cr, uid, name, args=[], operator='ilike', context={}, limit=80):
|
||||||
args2 = args[:]
|
args2 = args[:]
|
||||||
if name:
|
if name:
|
||||||
args += [('name', operator, name)]
|
args += [('name', operator, name)]
|
||||||
args2 += [('code', operator, name)]
|
args2 += [('code', operator, name)]
|
||||||
ids = self.search(cr, uid, args, limit=limit)
|
ids = self.search(cr, uid, args, limit=limit)
|
||||||
ids += self.search(cr, uid, args2, limit=limit)
|
ids += self.search(cr, uid, args2, limit=limit)
|
||||||
res = self.name_get(cr, uid, ids, context)
|
res = self.name_get(cr, uid, ids, context)
|
||||||
return res
|
return res
|
||||||
res_currency()
|
res_currency()
|
||||||
|
|
||||||
class res_currency_rate(osv.osv):
|
class res_currency_rate(osv.osv):
|
||||||
_name = "res.currency.rate"
|
_name = "res.currency.rate"
|
||||||
_description = "Currency Rate"
|
_description = "Currency Rate"
|
||||||
_columns = {
|
_columns = {
|
||||||
'name': fields.date('Date', required=True, select=True),
|
'name': fields.date('Date', required=True, select=True),
|
||||||
'rate': fields.float('Rate', digits=(12,6), required=True,
|
'rate': fields.float('Rate', digits=(12,6), required=True,
|
||||||
help='The rate of the currency to the currency of rate 1'),
|
help='The rate of the currency to the currency of rate 1'),
|
||||||
'currency_id': fields.many2one('res.currency', 'Currency', readonly=True),
|
'currency_id': fields.many2one('res.currency', 'Currency', readonly=True),
|
||||||
}
|
}
|
||||||
_defaults = {
|
_defaults = {
|
||||||
'name': lambda *a: time.strftime('%Y-%m-%d'),
|
'name': lambda *a: time.strftime('%Y-%m-%d'),
|
||||||
}
|
}
|
||||||
_order = "name desc"
|
_order = "name desc"
|
||||||
res_currency_rate()
|
res_currency_rate()
|
||||||
|
|
|
@ -30,19 +30,19 @@
|
||||||
from osv import fields, osv
|
from osv import fields, osv
|
||||||
|
|
||||||
class lang(osv.osv):
|
class lang(osv.osv):
|
||||||
_name = "res.lang"
|
_name = "res.lang"
|
||||||
_columns = {
|
_columns = {
|
||||||
'name': fields.char('Name', size=64, required=True),
|
'name': fields.char('Name', size=64, required=True),
|
||||||
'code': fields.char('Code', size=5, required=True),
|
'code': fields.char('Code', size=5, required=True),
|
||||||
'translatable': fields.boolean('Translatable'),
|
'translatable': fields.boolean('Translatable'),
|
||||||
'active': fields.boolean('Active'),
|
'active': fields.boolean('Active'),
|
||||||
'direction': fields.selection([('ltr', 'Left-to-right'), ('rtl', 'Right-to-left')], 'Direction',resuired=True),
|
'direction': fields.selection([('ltr', 'Left-to-right'), ('rtl', 'Right-to-left')], 'Direction',resuired=True),
|
||||||
}
|
}
|
||||||
_defaults = {
|
_defaults = {
|
||||||
'active': lambda *a: 1,
|
'active': lambda *a: 1,
|
||||||
'translatable': lambda *a: 0,
|
'translatable': lambda *a: 0,
|
||||||
'direction': lambda *a: 'ltr',
|
'direction': lambda *a: 'ltr',
|
||||||
}
|
}
|
||||||
|
|
||||||
lang()
|
lang()
|
||||||
|
|
||||||
|
|
|
@ -31,97 +31,97 @@ from osv import osv, fields
|
||||||
import time
|
import time
|
||||||
|
|
||||||
def _links_get(self, cr, uid, context={}):
|
def _links_get(self, cr, uid, context={}):
|
||||||
obj = self.pool.get('res.request.link')
|
obj = self.pool.get('res.request.link')
|
||||||
ids = obj.search(cr, uid, [])
|
ids = obj.search(cr, uid, [])
|
||||||
res = obj.read(cr, uid, ids, ['object', 'name'], context)
|
res = obj.read(cr, uid, ids, ['object', 'name'], context)
|
||||||
return [(r['object'], r['name']) for r in res]
|
return [(r['object'], r['name']) for r in res]
|
||||||
|
|
||||||
class res_request(osv.osv):
|
class res_request(osv.osv):
|
||||||
_name = 'res.request'
|
_name = 'res.request'
|
||||||
|
|
||||||
def request_send(self, cr, uid, ids, *args):
|
def request_send(self, cr, uid, ids, *args):
|
||||||
for id in ids:
|
for id in ids:
|
||||||
cr.execute('update res_request set state=%s,date_sent=%s where id=%d', ('waiting', time.strftime('%Y-%m-%d %H:%M:%S'), id))
|
cr.execute('update res_request set state=%s,date_sent=%s where id=%d', ('waiting', time.strftime('%Y-%m-%d %H:%M:%S'), id))
|
||||||
cr.execute('select act_from,act_to,body,date_sent from res_request where id=%d', (id,))
|
cr.execute('select act_from,act_to,body,date_sent from res_request where id=%d', (id,))
|
||||||
values = cr.dictfetchone()
|
values = cr.dictfetchone()
|
||||||
if values['body'] and (len(values['body']) > 128):
|
if values['body'] and (len(values['body']) > 128):
|
||||||
values['name'] = values['body'][:125] + '...'
|
values['name'] = values['body'][:125] + '...'
|
||||||
else:
|
else:
|
||||||
values['name'] = values['body'] or '/'
|
values['name'] = values['body'] or '/'
|
||||||
values['req_id'] = id
|
values['req_id'] = id
|
||||||
self.pool.get('res.request.history').create(cr, uid, values)
|
self.pool.get('res.request.history').create(cr, uid, values)
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def request_reply(self, cr, uid, ids, *args):
|
def request_reply(self, cr, uid, ids, *args):
|
||||||
for id in ids:
|
for id in ids:
|
||||||
cr.execute("update res_request set state='active', act_from=%d, act_to=act_from, trigger_date=NULL, body='' where id=%d", (uid,id))
|
cr.execute("update res_request set state='active', act_from=%d, act_to=act_from, trigger_date=NULL, body='' where id=%d", (uid,id))
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def request_close(self, cr, uid, ids, *args):
|
def request_close(self, cr, uid, ids, *args):
|
||||||
self.write(cr, uid, ids, {'state':'closed', 'active':False})
|
self.write(cr, uid, ids, {'state':'closed', 'active':False})
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def request_get(self, cr, uid):
|
def request_get(self, cr, uid):
|
||||||
cr.execute('select id from res_request where act_to=%d and (trigger_date<=%s or trigger_date is null) and active=True', (uid,time.strftime('%Y-%m-%d')))
|
cr.execute('select id from res_request where act_to=%d and (trigger_date<=%s or trigger_date is null) and active=True', (uid,time.strftime('%Y-%m-%d')))
|
||||||
ids = map(lambda x:x[0], cr.fetchall())
|
ids = map(lambda x:x[0], cr.fetchall())
|
||||||
cr.execute('select id from res_request where act_from=%d and (act_to<>%d) and (trigger_date<=%s or trigger_date is null) and active=True', (uid,uid,time.strftime('%Y-%m-%d')))
|
cr.execute('select id from res_request where act_from=%d and (act_to<>%d) and (trigger_date<=%s or trigger_date is null) and active=True', (uid,uid,time.strftime('%Y-%m-%d')))
|
||||||
ids2 = map(lambda x:x[0], cr.fetchall())
|
ids2 = map(lambda x:x[0], cr.fetchall())
|
||||||
return (ids, ids2)
|
return (ids, ids2)
|
||||||
|
|
||||||
_columns = {
|
_columns = {
|
||||||
'create_date': fields.datetime('Created date', readonly=True),
|
'create_date': fields.datetime('Created date', readonly=True),
|
||||||
'name': fields.char('Subject', states={'waiting':[('readonly',True)],'active':[('readonly',True)],'closed':[('readonly',True)]}, required=True, size=128),
|
'name': fields.char('Subject', states={'waiting':[('readonly',True)],'active':[('readonly',True)],'closed':[('readonly',True)]}, required=True, size=128),
|
||||||
'active': fields.boolean('Active'),
|
'active': fields.boolean('Active'),
|
||||||
'priority': fields.selection([('0','Low'),('1','Normal'),('2','High')], 'Priority', states={'waiting':[('readonly',True)],'closed':[('readonly',True)]}, required=True),
|
'priority': fields.selection([('0','Low'),('1','Normal'),('2','High')], 'Priority', states={'waiting':[('readonly',True)],'closed':[('readonly',True)]}, required=True),
|
||||||
'act_from': fields.many2one('res.users', 'From', required=True, readonly=True, states={'closed':[('readonly',True)]}),
|
'act_from': fields.many2one('res.users', 'From', required=True, readonly=True, states={'closed':[('readonly',True)]}),
|
||||||
'act_to': fields.many2one('res.users', 'To', required=True, states={'waiting':[('readonly',True)],'closed':[('readonly',True)]}),
|
'act_to': fields.many2one('res.users', 'To', required=True, states={'waiting':[('readonly',True)],'closed':[('readonly',True)]}),
|
||||||
'body': fields.text('Request', states={'waiting':[('readonly',True)],'closed':[('readonly',True)]}),
|
'body': fields.text('Request', states={'waiting':[('readonly',True)],'closed':[('readonly',True)]}),
|
||||||
'date_sent': fields.datetime('Date', readonly=True),
|
'date_sent': fields.datetime('Date', readonly=True),
|
||||||
'trigger_date': fields.datetime('Trigger Date', states={'waiting':[('readonly',True)],'closed':[('readonly',True)]}),
|
'trigger_date': fields.datetime('Trigger Date', states={'waiting':[('readonly',True)],'closed':[('readonly',True)]}),
|
||||||
'ref_partner_id':fields.many2one('res.partner', 'Partner Ref.', states={'closed':[('readonly',True)]}),
|
'ref_partner_id':fields.many2one('res.partner', 'Partner Ref.', states={'closed':[('readonly',True)]}),
|
||||||
'ref_doc1':fields.reference('Document Ref 1', selection=_links_get, size=128, states={'closed':[('readonly',True)]}),
|
'ref_doc1':fields.reference('Document Ref 1', selection=_links_get, size=128, states={'closed':[('readonly',True)]}),
|
||||||
'ref_doc2':fields.reference('Document Ref 2', selection=_links_get, size=128, states={'closed':[('readonly',True)]}),
|
'ref_doc2':fields.reference('Document Ref 2', selection=_links_get, size=128, states={'closed':[('readonly',True)]}),
|
||||||
'state': fields.selection([('draft','draft'),('waiting','waiting'),('active','active'),('closed','closed')], 'State', required=True, readonly=True),
|
'state': fields.selection([('draft','draft'),('waiting','waiting'),('active','active'),('closed','closed')], 'State', required=True, readonly=True),
|
||||||
'history': fields.one2many('res.request.history','req_id', 'History')
|
'history': fields.one2many('res.request.history','req_id', 'History')
|
||||||
}
|
}
|
||||||
_defaults = {
|
_defaults = {
|
||||||
'act_from': lambda obj,cr,uid,context={}: uid,
|
'act_from': lambda obj,cr,uid,context={}: uid,
|
||||||
'state': lambda obj,cr,uid,context={}: 'draft',
|
'state': lambda obj,cr,uid,context={}: 'draft',
|
||||||
'active': lambda obj,cr,uid,context={}: True,
|
'active': lambda obj,cr,uid,context={}: True,
|
||||||
'priority': lambda obj,cr,uid,context={}: '1',
|
'priority': lambda obj,cr,uid,context={}: '1',
|
||||||
}
|
}
|
||||||
_order = 'priority desc, trigger_date, create_date desc'
|
_order = 'priority desc, trigger_date, create_date desc'
|
||||||
_table = 'res_request'
|
_table = 'res_request'
|
||||||
res_request()
|
res_request()
|
||||||
|
|
||||||
class res_request_link(osv.osv):
|
class res_request_link(osv.osv):
|
||||||
_name = 'res.request.link'
|
_name = 'res.request.link'
|
||||||
_columns = {
|
_columns = {
|
||||||
'name': fields.char('Name', size=64, required=True, translate=True),
|
'name': fields.char('Name', size=64, required=True, translate=True),
|
||||||
'object': fields.char('Object', size=64, required=True),
|
'object': fields.char('Object', size=64, required=True),
|
||||||
'priority': fields.integer('Priority'),
|
'priority': fields.integer('Priority'),
|
||||||
}
|
}
|
||||||
_defaults = {
|
_defaults = {
|
||||||
'priority': lambda *a: 5,
|
'priority': lambda *a: 5,
|
||||||
}
|
}
|
||||||
_order = 'priority'
|
_order = 'priority'
|
||||||
res_request_link()
|
res_request_link()
|
||||||
|
|
||||||
class res_request_history(osv.osv):
|
class res_request_history(osv.osv):
|
||||||
_name = 'res.request.history'
|
_name = 'res.request.history'
|
||||||
_columns = {
|
_columns = {
|
||||||
'name': fields.char('Summary', size=128, states={'active':[('readonly',True)],'waiting':[('readonly',True)]}, required=True),
|
'name': fields.char('Summary', size=128, states={'active':[('readonly',True)],'waiting':[('readonly',True)]}, required=True),
|
||||||
'req_id': fields.many2one('res.request', 'Request', required=True, ondelete='cascade', select=True),
|
'req_id': fields.many2one('res.request', 'Request', required=True, ondelete='cascade', select=True),
|
||||||
'act_from': fields.many2one('res.users', 'From', required=True, readonly=True),
|
'act_from': fields.many2one('res.users', 'From', required=True, readonly=True),
|
||||||
'act_to': fields.many2one('res.users', 'To', required=True, states={'waiting':[('readonly',True)]}),
|
'act_to': fields.many2one('res.users', 'To', required=True, states={'waiting':[('readonly',True)]}),
|
||||||
'body': fields.text('Body', states={'waiting':[('readonly',True)]}),
|
'body': fields.text('Body', states={'waiting':[('readonly',True)]}),
|
||||||
'date_sent': fields.datetime('Date sent', states={'waiting':[('readonly',True)]}, required=True)
|
'date_sent': fields.datetime('Date sent', states={'waiting':[('readonly',True)]}, required=True)
|
||||||
}
|
}
|
||||||
_defaults = {
|
_defaults = {
|
||||||
'name': lambda *a: 'NoName',
|
'name': lambda *a: 'NoName',
|
||||||
'act_from': lambda obj,cr,uid,context={}: uid,
|
'act_from': lambda obj,cr,uid,context={}: uid,
|
||||||
'act_to': lambda obj,cr,uid,context={}: uid,
|
'act_to': lambda obj,cr,uid,context={}: uid,
|
||||||
'date_sent': lambda *a: time.strftime('%Y-%m-%d %H:%M:%S'),
|
'date_sent': lambda *a: time.strftime('%Y-%m-%d %H:%M:%S'),
|
||||||
}
|
}
|
||||||
res_request_history()
|
res_request_history()
|
||||||
|
|
||||||
|
|
|
@ -32,243 +32,243 @@ import tools
|
||||||
import pytz
|
import pytz
|
||||||
|
|
||||||
class groups(osv.osv):
|
class groups(osv.osv):
|
||||||
_name = "res.groups"
|
_name = "res.groups"
|
||||||
_columns = {
|
_columns = {
|
||||||
'name': fields.char('Group Name', size=64, required=True),
|
'name': fields.char('Group Name', size=64, required=True),
|
||||||
'model_access': fields.one2many('ir.model.access', 'group_id', 'Access Controls'),
|
'model_access': fields.one2many('ir.model.access', 'group_id', 'Access Controls'),
|
||||||
'rule_groups': fields.many2many('ir.rule.group', 'group_rule_group_rel',
|
'rule_groups': fields.many2many('ir.rule.group', 'group_rule_group_rel',
|
||||||
'group_id', 'rule_group_id', 'Rules', domain="[('global', '<>', True)]"),
|
'group_id', 'rule_group_id', 'Rules', domain="[('global', '<>', True)]"),
|
||||||
'menu_access': fields.many2many('ir.ui.menu', 'ir_ui_menu_group_rel', 'gid', 'menu_id', 'Access Menu'),
|
'menu_access': fields.many2many('ir.ui.menu', 'ir_ui_menu_group_rel', 'gid', 'menu_id', 'Access Menu'),
|
||||||
'comment' : fields.text('Comment',size=250),
|
'comment' : fields.text('Comment',size=250),
|
||||||
}
|
}
|
||||||
_sql_constraints = [
|
_sql_constraints = [
|
||||||
('name_uniq', 'unique (name)', 'The name of the group must be unique !')
|
('name_uniq', 'unique (name)', 'The name of the group must be unique !')
|
||||||
]
|
]
|
||||||
|
|
||||||
def write(self, cr, uid, ids, vals, context=None):
|
def write(self, cr, uid, ids, vals, context=None):
|
||||||
if 'name' in vals:
|
if 'name' in vals:
|
||||||
if vals['name'].startswith('-'):
|
if vals['name'].startswith('-'):
|
||||||
raise osv.except_osv(_('Error'),
|
raise osv.except_osv(_('Error'),
|
||||||
_('The name of the group can not start with "-"'))
|
_('The name of the group can not start with "-"'))
|
||||||
res = super(groups, self).write(cr, uid, ids, vals, context=context)
|
res = super(groups, self).write(cr, uid, ids, vals, context=context)
|
||||||
# Restart the cache on the company_get method
|
# Restart the cache on the company_get method
|
||||||
self.pool.get('ir.rule').domain_get()
|
self.pool.get('ir.rule').domain_get()
|
||||||
return res
|
return res
|
||||||
|
|
||||||
def create(self, cr, uid, vals, context=None):
|
def create(self, cr, uid, vals, context=None):
|
||||||
if 'name' in vals:
|
if 'name' in vals:
|
||||||
if vals['name'].startswith('-'):
|
if vals['name'].startswith('-'):
|
||||||
raise osv.except_osv(_('Error'),
|
raise osv.except_osv(_('Error'),
|
||||||
_('The name of the group can not start with "-"'))
|
_('The name of the group can not start with "-"'))
|
||||||
return super(groups, self).create(cr, uid, vals, context=context)
|
return super(groups, self).create(cr, uid, vals, context=context)
|
||||||
|
|
||||||
groups()
|
groups()
|
||||||
|
|
||||||
|
|
||||||
class roles(osv.osv):
|
class roles(osv.osv):
|
||||||
_name = "res.roles"
|
_name = "res.roles"
|
||||||
_columns = {
|
_columns = {
|
||||||
'name': fields.char('Role Name', size=64, required=True),
|
'name': fields.char('Role Name', size=64, required=True),
|
||||||
'parent_id': fields.many2one('res.roles', 'Parent', select=True),
|
'parent_id': fields.many2one('res.roles', 'Parent', select=True),
|
||||||
'child_id': fields.one2many('res.roles', 'parent_id', 'Childs')
|
'child_id': fields.one2many('res.roles', 'parent_id', 'Childs')
|
||||||
}
|
}
|
||||||
_defaults = {
|
_defaults = {
|
||||||
}
|
}
|
||||||
def check(self, cr, uid, ids, role_id):
|
def check(self, cr, uid, ids, role_id):
|
||||||
if role_id in ids:
|
if role_id in ids:
|
||||||
return True
|
return True
|
||||||
cr.execute('select parent_id from res_roles where id=%d', (role_id,))
|
cr.execute('select parent_id from res_roles where id=%d', (role_id,))
|
||||||
roles = cr.fetchone()[0]
|
roles = cr.fetchone()[0]
|
||||||
if roles:
|
if roles:
|
||||||
return self.check(cr, uid, ids, roles)
|
return self.check(cr, uid, ids, roles)
|
||||||
return False
|
return False
|
||||||
roles()
|
roles()
|
||||||
|
|
||||||
def _lang_get(self, cr, uid, context={}):
|
def _lang_get(self, cr, uid, context={}):
|
||||||
obj = self.pool.get('res.lang')
|
obj = self.pool.get('res.lang')
|
||||||
ids = obj.search(cr, uid, [])
|
ids = obj.search(cr, uid, [])
|
||||||
res = obj.read(cr, uid, ids, ['code', 'name'], context)
|
res = obj.read(cr, uid, ids, ['code', 'name'], context)
|
||||||
res = [(r['code'], r['name']) for r in res]
|
res = [(r['code'], r['name']) for r in res]
|
||||||
return res
|
return res
|
||||||
def _tz_get(self,cr,uid, context={}):
|
def _tz_get(self,cr,uid, context={}):
|
||||||
return [(x, x) for x in pytz.all_timezones]
|
return [(x, x) for x in pytz.all_timezones]
|
||||||
|
|
||||||
class users(osv.osv):
|
class users(osv.osv):
|
||||||
_name = "res.users"
|
_name = "res.users"
|
||||||
_log_access = False
|
_log_access = False
|
||||||
_columns = {
|
_columns = {
|
||||||
'name': fields.char('Name', size=64, required=True, select=True),
|
'name': fields.char('Name', size=64, required=True, select=True),
|
||||||
'login': fields.char('Login', size=64, required=True),
|
'login': fields.char('Login', size=64, required=True),
|
||||||
'password': fields.char('Password', size=64, invisible=True),
|
'password': fields.char('Password', size=64, invisible=True),
|
||||||
'signature': fields.text('Signature', size=64),
|
'signature': fields.text('Signature', size=64),
|
||||||
'address_id': fields.many2one('res.partner.address', 'Address'),
|
'address_id': fields.many2one('res.partner.address', 'Address'),
|
||||||
'active': fields.boolean('Active'),
|
'active': fields.boolean('Active'),
|
||||||
'action_id': fields.many2one('ir.actions.actions', 'Home Action'),
|
'action_id': fields.many2one('ir.actions.actions', 'Home Action'),
|
||||||
'menu_id': fields.many2one('ir.actions.actions', 'Menu Action'),
|
'menu_id': fields.many2one('ir.actions.actions', 'Menu Action'),
|
||||||
'groups_id': fields.many2many('res.groups', 'res_groups_users_rel', 'uid', 'gid', 'Groups'),
|
'groups_id': fields.many2many('res.groups', 'res_groups_users_rel', 'uid', 'gid', 'Groups'),
|
||||||
'roles_id': fields.many2many('res.roles', 'res_roles_users_rel', 'uid', 'rid', 'Roles'),
|
'roles_id': fields.many2many('res.roles', 'res_roles_users_rel', 'uid', 'rid', 'Roles'),
|
||||||
'company_id': fields.many2one('res.company', 'Company'),
|
'company_id': fields.many2one('res.company', 'Company'),
|
||||||
'context_lang': fields.selection(_lang_get, 'Language', required=True),
|
'context_lang': fields.selection(_lang_get, 'Language', required=True),
|
||||||
'context_tz': fields.selection(_tz_get, 'Timezone', size=64)
|
'context_tz': fields.selection(_tz_get, 'Timezone', size=64)
|
||||||
}
|
}
|
||||||
def read(self,cr, uid, ids, fields=None, context=None, load='_classic_read'):
|
def read(self,cr, uid, ids, fields=None, context=None, load='_classic_read'):
|
||||||
result = super(users, self).read(cr, uid, ids, fields, context, load)
|
result = super(users, self).read(cr, uid, ids, fields, context, load)
|
||||||
canwrite = self.pool.get('ir.model.access').check(cr, uid, 'res.users', 'write', raise_exception=False)
|
canwrite = self.pool.get('ir.model.access').check(cr, uid, 'res.users', 'write', raise_exception=False)
|
||||||
if not canwrite:
|
if not canwrite:
|
||||||
for r in result:
|
for r in result:
|
||||||
if 'password' in r:
|
if 'password' in r:
|
||||||
r['password'] = '********'
|
r['password'] = '********'
|
||||||
return result
|
return result
|
||||||
|
|
||||||
_sql_constraints = [
|
_sql_constraints = [
|
||||||
('login_key', 'UNIQUE (login)', 'You can not have two users with the same login !')
|
('login_key', 'UNIQUE (login)', 'You can not have two users with the same login !')
|
||||||
]
|
]
|
||||||
def _get_action(self,cr, uid, context={}):
|
def _get_action(self,cr, uid, context={}):
|
||||||
ids = self.pool.get('ir.ui.menu').search(cr, uid, [('usage','=','menu')])
|
ids = self.pool.get('ir.ui.menu').search(cr, uid, [('usage','=','menu')])
|
||||||
return ids and ids[0] or False
|
return ids and ids[0] or False
|
||||||
|
|
||||||
def _get_company(self,cr, uid, context={}):
|
def _get_company(self,cr, uid, context={}):
|
||||||
return self.pool.get('res.users').browse(cr, uid, uid, context).company_id.id
|
return self.pool.get('res.users').browse(cr, uid, uid, context).company_id.id
|
||||||
|
|
||||||
def _get_menu(self,cr, uid, context={}):
|
def _get_menu(self,cr, uid, context={}):
|
||||||
ids = self.pool.get('ir.actions.act_window').search(cr, uid, [('usage','=','menu')])
|
ids = self.pool.get('ir.actions.act_window').search(cr, uid, [('usage','=','menu')])
|
||||||
return ids and ids[0] or False
|
return ids and ids[0] or False
|
||||||
|
|
||||||
_defaults = {
|
_defaults = {
|
||||||
'password' : lambda obj,cr,uid,context={} : '',
|
'password' : lambda obj,cr,uid,context={} : '',
|
||||||
'context_lang': lambda *args: 'en_US',
|
'context_lang': lambda *args: 'en_US',
|
||||||
'active' : lambda obj,cr,uid,context={} : True,
|
'active' : lambda obj,cr,uid,context={} : True,
|
||||||
'menu_id': _get_menu,
|
'menu_id': _get_menu,
|
||||||
'action_id': _get_menu,
|
'action_id': _get_menu,
|
||||||
'company_id': _get_company,
|
'company_id': _get_company,
|
||||||
}
|
}
|
||||||
def company_get(self, cr, uid, uid2):
|
def company_get(self, cr, uid, uid2):
|
||||||
company_id = self.pool.get('res.users').browse(cr, uid, uid).company_id.id
|
company_id = self.pool.get('res.users').browse(cr, uid, uid).company_id.id
|
||||||
return company_id
|
return company_id
|
||||||
company_get = tools.cache()(company_get)
|
company_get = tools.cache()(company_get)
|
||||||
|
|
||||||
def write(self, cr, uid, ids, values, *args, **argv):
|
def write(self, cr, uid, ids, values, *args, **argv):
|
||||||
if (ids == [uid]):
|
if (ids == [uid]):
|
||||||
ok = True
|
ok = True
|
||||||
for k in values.keys():
|
for k in values.keys():
|
||||||
if k not in ('password','signature','action_id', 'context_lang', 'context_tz'):
|
if k not in ('password','signature','action_id', 'context_lang', 'context_tz'):
|
||||||
ok=False
|
ok=False
|
||||||
if ok:
|
if ok:
|
||||||
uid = 1
|
uid = 1
|
||||||
res = super(users, self).write(cr, uid, ids, values, *args, **argv)
|
res = super(users, self).write(cr, uid, ids, values, *args, **argv)
|
||||||
self.company_get()
|
self.company_get()
|
||||||
# Restart the cache on the company_get method
|
# Restart the cache on the company_get method
|
||||||
self.pool.get('ir.rule').domain_get()
|
self.pool.get('ir.rule').domain_get()
|
||||||
return res
|
return res
|
||||||
|
|
||||||
def unlink(self, cr, uid, ids):
|
def unlink(self, cr, uid, ids):
|
||||||
if 1 in ids:
|
if 1 in ids:
|
||||||
raise osv.except_osv(_('Can not remove root user!'), _('You can not remove the root user as it is used internally for resources created by Tiny ERP (updates, module installation, ...)'))
|
raise osv.except_osv(_('Can not remove root user!'), _('You can not remove the root user as it is used internally for resources created by Tiny ERP (updates, module installation, ...)'))
|
||||||
return super(users, self).unlink(cr, uid, ids)
|
return super(users, self).unlink(cr, uid, ids)
|
||||||
|
|
||||||
def name_search(self, cr, user, name='', args=None, operator='ilike', context=None, limit=80):
|
def name_search(self, cr, user, name='', args=None, operator='ilike', context=None, limit=80):
|
||||||
if not args:
|
if not args:
|
||||||
args=[]
|
args=[]
|
||||||
if not context:
|
if not context:
|
||||||
context={}
|
context={}
|
||||||
ids = []
|
ids = []
|
||||||
if name:
|
if name:
|
||||||
ids = self.search(cr, user, [('login','=',name)]+ args, limit=limit)
|
ids = self.search(cr, user, [('login','=',name)]+ args, limit=limit)
|
||||||
if not ids:
|
if not ids:
|
||||||
ids = self.search(cr, user, [('name',operator,name)]+ args, limit=limit)
|
ids = self.search(cr, user, [('name',operator,name)]+ args, limit=limit)
|
||||||
return self.name_get(cr, user, ids)
|
return self.name_get(cr, user, ids)
|
||||||
|
|
||||||
def copy(self, cr, uid, id, default=None, context={}):
|
def copy(self, cr, uid, id, default=None, context={}):
|
||||||
login = self.read(cr, uid, [id], ['login'])[0]['login']
|
login = self.read(cr, uid, [id], ['login'])[0]['login']
|
||||||
default.update({'login': login+' (copy)'})
|
default.update({'login': login+' (copy)'})
|
||||||
return super(users, self).copy(cr, uid, id, default, context)
|
return super(users, self).copy(cr, uid, id, default, context)
|
||||||
|
|
||||||
def context_get(self, cr, uid, context={}):
|
def context_get(self, cr, uid, context={}):
|
||||||
user = self.browse(cr, uid, uid, context)
|
user = self.browse(cr, uid, uid, context)
|
||||||
result = {}
|
result = {}
|
||||||
for k in self._columns.keys():
|
for k in self._columns.keys():
|
||||||
if k.startswith('context_'):
|
if k.startswith('context_'):
|
||||||
result[k[8:]] = getattr(user,k)
|
result[k[8:]] = getattr(user,k)
|
||||||
return result
|
return result
|
||||||
|
|
||||||
def action_get(self, cr, uid, context={}):
|
def action_get(self, cr, uid, context={}):
|
||||||
dataobj = self.pool.get('ir.model.data')
|
dataobj = self.pool.get('ir.model.data')
|
||||||
data_id = dataobj._get_id(cr, 1, 'base', 'action_res_users_my')
|
data_id = dataobj._get_id(cr, 1, 'base', 'action_res_users_my')
|
||||||
return dataobj.browse(cr, uid, data_id, context).res_id
|
return dataobj.browse(cr, uid, data_id, context).res_id
|
||||||
|
|
||||||
def action_next(self,cr,uid,ids,context=None):
|
def action_next(self,cr,uid,ids,context=None):
|
||||||
return{
|
return{
|
||||||
'view_type': 'form',
|
'view_type': 'form',
|
||||||
"view_mode": 'form',
|
"view_mode": 'form',
|
||||||
'res_model': 'ir.module.module.configuration.wizard',
|
'res_model': 'ir.module.module.configuration.wizard',
|
||||||
'type': 'ir.actions.act_window',
|
'type': 'ir.actions.act_window',
|
||||||
'target':'new',
|
'target':'new',
|
||||||
}
|
}
|
||||||
|
|
||||||
def action_continue(self,cr,uid,ids,context={}):
|
def action_continue(self,cr,uid,ids,context={}):
|
||||||
return {
|
return {
|
||||||
'view_type': 'form',
|
'view_type': 'form',
|
||||||
"view_mode": 'form',
|
"view_mode": 'form',
|
||||||
'res_model': 'ir.module.module.configuration.wizard',
|
'res_model': 'ir.module.module.configuration.wizard',
|
||||||
'type': 'ir.actions.act_window',
|
'type': 'ir.actions.act_window',
|
||||||
'target':'new',
|
'target':'new',
|
||||||
}
|
}
|
||||||
def action_new(self,cr,uid,ids,context={}):
|
def action_new(self,cr,uid,ids,context={}):
|
||||||
return {
|
return {
|
||||||
'view_type': 'form',
|
'view_type': 'form',
|
||||||
"view_mode": 'form',
|
"view_mode": 'form',
|
||||||
'res_model': 'res.users',
|
'res_model': 'res.users',
|
||||||
'view_id':self.pool.get('ir.ui.view').search(cr,uid,[('name','=','res.users.confirm.form')]),
|
'view_id':self.pool.get('ir.ui.view').search(cr,uid,[('name','=','res.users.confirm.form')]),
|
||||||
'type': 'ir.actions.act_window',
|
'type': 'ir.actions.act_window',
|
||||||
'target':'new',
|
'target':'new',
|
||||||
}
|
}
|
||||||
users()
|
users()
|
||||||
|
|
||||||
class groups2(osv.osv):
|
class groups2(osv.osv):
|
||||||
_inherit = 'res.groups'
|
_inherit = 'res.groups'
|
||||||
_columns = {
|
_columns = {
|
||||||
'users': fields.many2many('res.users', 'res_groups_users_rel', 'gid', 'uid', 'Users'),
|
'users': fields.many2many('res.users', 'res_groups_users_rel', 'gid', 'uid', 'Users'),
|
||||||
}
|
}
|
||||||
groups2()
|
groups2()
|
||||||
|
|
||||||
|
|
||||||
class res_config_view(osv.osv_memory):
|
class res_config_view(osv.osv_memory):
|
||||||
_name='res.config.view'
|
_name='res.config.view'
|
||||||
_columns = {
|
_columns = {
|
||||||
'name':fields.char('Name', size=64),
|
'name':fields.char('Name', size=64),
|
||||||
'view': fields.selection([('simple','Simple'),('extended','Extended')], 'View', required=True ),
|
'view': fields.selection([('simple','Simple'),('extended','Extended')], 'View', required=True ),
|
||||||
|
|
||||||
}
|
}
|
||||||
_defaults={
|
_defaults={
|
||||||
'view':lambda *args: 'simple',
|
'view':lambda *args: 'simple',
|
||||||
}
|
}
|
||||||
|
|
||||||
def action_cancel(self,cr,uid,ids,conect=None):
|
def action_cancel(self,cr,uid,ids,conect=None):
|
||||||
print ' Cancel action'
|
print ' Cancel action'
|
||||||
return {
|
return {
|
||||||
'view_type': 'form',
|
'view_type': 'form',
|
||||||
"view_mode": 'form',
|
"view_mode": 'form',
|
||||||
'res_model': 'ir.module.module.configuration.wizard',
|
'res_model': 'ir.module.module.configuration.wizard',
|
||||||
'type': 'ir.actions.act_window',
|
'type': 'ir.actions.act_window',
|
||||||
'target':'new',
|
'target':'new',
|
||||||
}
|
}
|
||||||
def action_set(self, cr, uid, ids, context=None):
|
def action_set(self, cr, uid, ids, context=None):
|
||||||
res=self.read(cr,uid,ids)[0]
|
res=self.read(cr,uid,ids)[0]
|
||||||
users_obj = self.pool.get('res.users')
|
users_obj = self.pool.get('res.users')
|
||||||
group_obj=self.pool.get('res.groups')
|
group_obj=self.pool.get('res.groups')
|
||||||
if 'view' in res and res['view'] and res['view']=='extended':
|
if 'view' in res and res['view'] and res['view']=='extended':
|
||||||
group_ids=group_obj.search(cr,uid,[('name','=','Extended View')])
|
group_ids=group_obj.search(cr,uid,[('name','=','Extended View')])
|
||||||
if group_ids and len(group_ids):
|
if group_ids and len(group_ids):
|
||||||
users_obj.write(cr, uid, [3],{
|
users_obj.write(cr, uid, [3],{
|
||||||
'groups_id':[(4,group_ids[0])]
|
'groups_id':[(4,group_ids[0])]
|
||||||
}, context=context)
|
}, context=context)
|
||||||
return {
|
return {
|
||||||
'view_type': 'form',
|
'view_type': 'form',
|
||||||
"view_mode": 'form',
|
"view_mode": 'form',
|
||||||
'res_model': 'ir.module.module.configuration.wizard',
|
'res_model': 'ir.module.module.configuration.wizard',
|
||||||
'type': 'ir.actions.act_window',
|
'type': 'ir.actions.act_window',
|
||||||
'target':'new',
|
'target':'new',
|
||||||
}
|
}
|
||||||
|
|
||||||
res_config_view()
|
res_config_view()
|
||||||
|
|
||||||
|
|
|
@ -45,13 +45,13 @@ done = []
|
||||||
|
|
||||||
print 'digraph G {'
|
print 'digraph G {'
|
||||||
while len(modules):
|
while len(modules):
|
||||||
f = modules.pop(0)
|
f = modules.pop(0)
|
||||||
done.append(f)
|
done.append(f)
|
||||||
if os.path.isfile(os.path.join(f,"__terp__.py")):
|
if os.path.isfile(os.path.join(f,"__terp__.py")):
|
||||||
info=eval(file(os.path.join(f,"__terp__.py")).read())
|
info=eval(file(os.path.join(f,"__terp__.py")).read())
|
||||||
if info.get('installable', True):
|
if info.get('installable', True):
|
||||||
for name in info['depends']:
|
for name in info['depends']:
|
||||||
if name not in done+modules:
|
if name not in done+modules:
|
||||||
modules.append(name)
|
modules.append(name)
|
||||||
print '\t%s -> %s;' % (f, name)
|
print '\t%s -> %s;' % (f, name)
|
||||||
print '}'
|
print '}'
|
||||||
|
|
14
bin/ir/ir.py
14
bin/ir/ir.py
|
@ -31,14 +31,14 @@ import osv
|
||||||
import pooler
|
import pooler
|
||||||
|
|
||||||
def ir_set(cr, uid, key, key2, name, models, value, replace=True, isobject=False, meta=None):
|
def ir_set(cr, uid, key, key2, name, models, value, replace=True, isobject=False, meta=None):
|
||||||
obj = pooler.get_pool(cr.dbname).get('ir.values')
|
obj = pooler.get_pool(cr.dbname).get('ir.values')
|
||||||
return obj.set(cr, uid, key, key2, name, models, value, replace, isobject, meta)
|
return obj.set(cr, uid, key, key2, name, models, value, replace, isobject, meta)
|
||||||
|
|
||||||
def ir_del(cr, uid, id):
|
def ir_del(cr, uid, id):
|
||||||
obj = pooler.get_pool(cr.dbname).get('ir.values')
|
obj = pooler.get_pool(cr.dbname).get('ir.values')
|
||||||
return obj.unlink(cr, uid, [id])
|
return obj.unlink(cr, uid, [id])
|
||||||
|
|
||||||
def ir_get(cr, uid, key, key2, models, meta=False, context={}, res_id_req=False):
|
def ir_get(cr, uid, key, key2, models, meta=False, context={}, res_id_req=False):
|
||||||
obj = pooler.get_pool(cr.dbname).get('ir.values')
|
obj = pooler.get_pool(cr.dbname).get('ir.values')
|
||||||
res = obj.get(cr, uid, key, key2, models, meta=meta, context=context, res_id_req=res_id_req)
|
res = obj.get(cr, uid, key, key2, models, meta=meta, context=context, res_id_req=res_id_req)
|
||||||
return res
|
return res
|
||||||
|
|
576
bin/netsvc.py
576
bin/netsvc.py
|
@ -42,97 +42,97 @@ _res_id=1
|
||||||
_res={}
|
_res={}
|
||||||
|
|
||||||
class ServiceEndPointCall(object):
|
class ServiceEndPointCall(object):
|
||||||
def __init__(self,id,method):
|
def __init__(self,id,method):
|
||||||
self._id=id
|
self._id=id
|
||||||
self._meth=method
|
self._meth=method
|
||||||
def __call__(self,*args):
|
def __call__(self,*args):
|
||||||
_res[self._id]=self._meth(*args)
|
_res[self._id]=self._meth(*args)
|
||||||
return self._id
|
return self._id
|
||||||
|
|
||||||
class ServiceEndPoint(object):
|
class ServiceEndPoint(object):
|
||||||
def __init__(self, name, id):
|
def __init__(self, name, id):
|
||||||
self._id = id
|
self._id = id
|
||||||
self._meth={}
|
self._meth={}
|
||||||
s=_service[name]
|
s=_service[name]
|
||||||
for m in s._method:
|
for m in s._method:
|
||||||
self._meth[m]=s._method[m]
|
self._meth[m]=s._method[m]
|
||||||
def __getattr__(self, name):
|
def __getattr__(self, name):
|
||||||
return ServiceEndPointCall(self._id, self._meth[name])
|
return ServiceEndPointCall(self._id, self._meth[name])
|
||||||
|
|
||||||
class Service(object):
|
class Service(object):
|
||||||
_serviceEndPointID = 0
|
_serviceEndPointID = 0
|
||||||
def __init__(self, name, audience=''):
|
def __init__(self, name, audience=''):
|
||||||
_service[name]=self
|
_service[name]=self
|
||||||
self.__name=name
|
self.__name=name
|
||||||
self._method={}
|
self._method={}
|
||||||
self.exportedMethods=None
|
self.exportedMethods=None
|
||||||
self._response_process=None
|
self._response_process=None
|
||||||
self._response_process_id=None
|
self._response_process_id=None
|
||||||
self._response=None
|
self._response=None
|
||||||
|
|
||||||
def joinGroup(self,name):
|
def joinGroup(self,name):
|
||||||
if not name in _group:
|
if not name in _group:
|
||||||
_group[name]={}
|
_group[name]={}
|
||||||
_group[name][self.__name]=self
|
_group[name][self.__name]=self
|
||||||
|
|
||||||
def exportMethod(self, m):
|
def exportMethod(self, m):
|
||||||
if callable(m):
|
if callable(m):
|
||||||
self._method[m.__name__]=m
|
self._method[m.__name__]=m
|
||||||
|
|
||||||
def serviceEndPoint(self,s):
|
def serviceEndPoint(self,s):
|
||||||
if Service._serviceEndPointID >= 2**16:
|
if Service._serviceEndPointID >= 2**16:
|
||||||
Service._serviceEndPointID = 0
|
Service._serviceEndPointID = 0
|
||||||
Service._serviceEndPointID += 1
|
Service._serviceEndPointID += 1
|
||||||
return ServiceEndPoint(s, self._serviceEndPointID)
|
return ServiceEndPoint(s, self._serviceEndPointID)
|
||||||
|
|
||||||
def conversationId(self):
|
def conversationId(self):
|
||||||
return 1
|
return 1
|
||||||
|
|
||||||
def processResponse(self,s,id):
|
def processResponse(self,s,id):
|
||||||
self._response_process, self._response_process_id = s, id
|
self._response_process, self._response_process_id = s, id
|
||||||
|
|
||||||
def processFailure(self,s,id):
|
def processFailure(self,s,id):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def resumeResponse(self,s):
|
def resumeResponse(self,s):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def cancelResponse(self,s):
|
def cancelResponse(self,s):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def suspendResponse(self,s):
|
def suspendResponse(self,s):
|
||||||
if self._response_process:
|
if self._response_process:
|
||||||
self._response_process(self._response_process_id,
|
self._response_process(self._response_process_id,
|
||||||
_res[self._response_process_id])
|
_res[self._response_process_id])
|
||||||
self._response_process=None
|
self._response_process=None
|
||||||
self._response=s(self._response_process_id)
|
self._response=s(self._response_process_id)
|
||||||
|
|
||||||
def abortResponse(self, error, description, origin, details):
|
def abortResponse(self, error, description, origin, details):
|
||||||
import tools
|
import tools
|
||||||
if not tools.config['debug_mode']:
|
if not tools.config['debug_mode']:
|
||||||
raise Exception("%s -- %s\n\n%s"%(origin,description,details))
|
raise Exception("%s -- %s\n\n%s"%(origin,description,details))
|
||||||
else:
|
else:
|
||||||
raise
|
raise
|
||||||
|
|
||||||
def currentFailure(self,s):
|
def currentFailure(self,s):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
class LocalService(Service):
|
class LocalService(Service):
|
||||||
def __init__(self, name):
|
def __init__(self, name):
|
||||||
self.__name=name
|
self.__name=name
|
||||||
s=_service[name]
|
s=_service[name]
|
||||||
self._service=s
|
self._service=s
|
||||||
for m in s._method:
|
for m in s._method:
|
||||||
setattr(self,m,s._method[m])
|
setattr(self,m,s._method[m])
|
||||||
|
|
||||||
class ServiceUnavailable(Exception):
|
class ServiceUnavailable(Exception):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def service_exist(name):
|
def service_exist(name):
|
||||||
return (name in _service) and bool(_service[name])
|
return (name in _service) and bool(_service[name])
|
||||||
|
|
||||||
def get_rpc_paths():
|
def get_rpc_paths():
|
||||||
return map(lambda s: '/xmlrpc/%s' % s, _service)
|
return map(lambda s: '/xmlrpc/%s' % s, _service)
|
||||||
|
|
||||||
LOG_DEBUG='debug'
|
LOG_DEBUG='debug'
|
||||||
LOG_INFO='info'
|
LOG_INFO='info'
|
||||||
|
@ -141,269 +141,269 @@ LOG_ERROR='error'
|
||||||
LOG_CRITICAL='critical'
|
LOG_CRITICAL='critical'
|
||||||
|
|
||||||
def init_logger():
|
def init_logger():
|
||||||
from tools import config
|
from tools import config
|
||||||
import os
|
import os
|
||||||
|
|
||||||
if config['logfile']:
|
if config['logfile']:
|
||||||
logf = config['logfile']
|
logf = config['logfile']
|
||||||
# test if the directories exist, else create them
|
# test if the directories exist, else create them
|
||||||
try:
|
try:
|
||||||
if not os.path.exists(os.path.dirname(logf)):
|
if not os.path.exists(os.path.dirname(logf)):
|
||||||
os.makedirs(os.path.dirname(logf))
|
os.makedirs(os.path.dirname(logf))
|
||||||
try:
|
try:
|
||||||
fd = open(logf, 'a')
|
fd = open(logf, 'a')
|
||||||
handler = logging.StreamHandler(fd)
|
handler = logging.StreamHandler(fd)
|
||||||
except IOError:
|
except IOError:
|
||||||
sys.stderr.write("ERROR: couldn't open the logfile\n")
|
sys.stderr.write("ERROR: couldn't open the logfile\n")
|
||||||
handler = logging.StreamHandler(sys.stdout)
|
handler = logging.StreamHandler(sys.stdout)
|
||||||
except OSError:
|
except OSError:
|
||||||
sys.stderr.write("ERROR: couldn't create the logfile directory\n")
|
sys.stderr.write("ERROR: couldn't create the logfile directory\n")
|
||||||
handler = logging.StreamHandler(sys.stdout)
|
handler = logging.StreamHandler(sys.stdout)
|
||||||
else:
|
else:
|
||||||
handler = logging.StreamHandler(sys.stdout)
|
handler = logging.StreamHandler(sys.stdout)
|
||||||
|
|
||||||
# create a format for log messages and dates
|
# create a format for log messages and dates
|
||||||
formatter = logging.Formatter('[%(asctime)s] %(levelname)s:%(name)s:%(message)s', '%a %b %d %H:%M:%S %Y')
|
formatter = logging.Formatter('[%(asctime)s] %(levelname)s:%(name)s:%(message)s', '%a %b %d %H:%M:%S %Y')
|
||||||
|
|
||||||
# tell the handler to use this format
|
# tell the handler to use this format
|
||||||
handler.setFormatter(formatter)
|
handler.setFormatter(formatter)
|
||||||
|
|
||||||
# add the handler to the root logger
|
# add the handler to the root logger
|
||||||
logging.getLogger().addHandler(handler)
|
logging.getLogger().addHandler(handler)
|
||||||
logging.getLogger().setLevel(logging.INFO)
|
logging.getLogger().setLevel(logging.INFO)
|
||||||
|
|
||||||
|
|
||||||
class Logger(object):
|
class Logger(object):
|
||||||
def notifyChannel(self,name,level,msg):
|
def notifyChannel(self,name,level,msg):
|
||||||
log = logging.getLogger(name)
|
log = logging.getLogger(name)
|
||||||
getattr(log,level)(msg)
|
getattr(log,level)(msg)
|
||||||
|
|
||||||
class Agent(object):
|
class Agent(object):
|
||||||
_timers = []
|
_timers = []
|
||||||
_logger = Logger()
|
_logger = Logger()
|
||||||
|
|
||||||
def setAlarm(self, fn, dt, args=None, kwargs=None):
|
def setAlarm(self, fn, dt, args=None, kwargs=None):
|
||||||
if not args:
|
if not args:
|
||||||
args=[]
|
args=[]
|
||||||
if not kwargs:
|
if not kwargs:
|
||||||
kwargs={}
|
kwargs={}
|
||||||
wait = dt - time.time()
|
wait = dt - time.time()
|
||||||
if wait > 0:
|
if wait > 0:
|
||||||
self._logger.notifyChannel('timers', LOG_DEBUG, "Job scheduled in %s seconds for %s.%s" % (wait, fn.im_class.__name__, fn.func_name))
|
self._logger.notifyChannel('timers', LOG_DEBUG, "Job scheduled in %s seconds for %s.%s" % (wait, fn.im_class.__name__, fn.func_name))
|
||||||
timer = threading.Timer(wait, fn, args, kwargs)
|
timer = threading.Timer(wait, fn, args, kwargs)
|
||||||
timer.start()
|
timer.start()
|
||||||
self._timers.append(timer)
|
self._timers.append(timer)
|
||||||
for timer in self._timers[:]:
|
for timer in self._timers[:]:
|
||||||
if not timer.isAlive():
|
if not timer.isAlive():
|
||||||
self._timers.remove(timer)
|
self._timers.remove(timer)
|
||||||
|
|
||||||
def quit(cls):
|
def quit(cls):
|
||||||
for timer in cls._timers:
|
for timer in cls._timers:
|
||||||
timer.cancel()
|
timer.cancel()
|
||||||
quit=classmethod(quit)
|
quit=classmethod(quit)
|
||||||
|
|
||||||
class RpcGateway(object):
|
class RpcGateway(object):
|
||||||
def __init__(self, name):
|
def __init__(self, name):
|
||||||
self.name=name
|
self.name=name
|
||||||
|
|
||||||
class Dispatcher(object):
|
class Dispatcher(object):
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
pass
|
pass
|
||||||
def monitor(self,signal):
|
def monitor(self,signal):
|
||||||
pass
|
pass
|
||||||
def run(self):
|
def run(self):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
class xmlrpc(object):
|
class xmlrpc(object):
|
||||||
class RpcGateway(object):
|
class RpcGateway(object):
|
||||||
def __init__(self, name):
|
def __init__(self, name):
|
||||||
self.name=name
|
self.name=name
|
||||||
|
|
||||||
class GenericXMLRPCRequestHandler:
|
class GenericXMLRPCRequestHandler:
|
||||||
def _dispatch(self, method, params):
|
def _dispatch(self, method, params):
|
||||||
import traceback
|
import traceback
|
||||||
try:
|
try:
|
||||||
n=self.path.split("/")[-1]
|
n=self.path.split("/")[-1]
|
||||||
s=LocalService(n)
|
s=LocalService(n)
|
||||||
m=getattr(s,method)
|
m=getattr(s,method)
|
||||||
s._service._response=None
|
s._service._response=None
|
||||||
r=m(*params)
|
r=m(*params)
|
||||||
res=s._service._response
|
res=s._service._response
|
||||||
if res!=None:
|
if res!=None:
|
||||||
r=res
|
r=res
|
||||||
return r
|
return r
|
||||||
except Exception, e:
|
except Exception, e:
|
||||||
tb_s = reduce(lambda x, y: x+y, traceback.format_exception(
|
tb_s = reduce(lambda x, y: x+y, traceback.format_exception(
|
||||||
sys.exc_type, sys.exc_value, sys.exc_traceback))
|
sys.exc_type, sys.exc_value, sys.exc_traceback))
|
||||||
s=str(e)
|
s=str(e)
|
||||||
import tools
|
import tools
|
||||||
if tools.config['debug_mode']:
|
if tools.config['debug_mode']:
|
||||||
import pdb
|
import pdb
|
||||||
tb = sys.exc_info()[2]
|
tb = sys.exc_info()[2]
|
||||||
pdb.post_mortem(tb)
|
pdb.post_mortem(tb)
|
||||||
raise xmlrpclib.Fault(s, tb_s)
|
raise xmlrpclib.Fault(s, tb_s)
|
||||||
|
|
||||||
class SimpleXMLRPCRequestHandler(GenericXMLRPCRequestHandler,
|
class SimpleXMLRPCRequestHandler(GenericXMLRPCRequestHandler,
|
||||||
SimpleXMLRPCServer.SimpleXMLRPCRequestHandler):
|
SimpleXMLRPCServer.SimpleXMLRPCRequestHandler):
|
||||||
SimpleXMLRPCServer.SimpleXMLRPCRequestHandler.rpc_paths = get_rpc_paths()
|
SimpleXMLRPCServer.SimpleXMLRPCRequestHandler.rpc_paths = get_rpc_paths()
|
||||||
|
|
||||||
class SimpleThreadedXMLRPCServer(SocketServer.ThreadingMixIn,
|
class SimpleThreadedXMLRPCServer(SocketServer.ThreadingMixIn,
|
||||||
SimpleXMLRPCServer.SimpleXMLRPCServer):
|
SimpleXMLRPCServer.SimpleXMLRPCServer):
|
||||||
|
|
||||||
def server_bind(self):
|
def server_bind(self):
|
||||||
self.socket.setsockopt(socket.SOL_SOCKET,
|
self.socket.setsockopt(socket.SOL_SOCKET,
|
||||||
socket.SO_REUSEADDR, 1)
|
socket.SO_REUSEADDR, 1)
|
||||||
SimpleXMLRPCServer.SimpleXMLRPCServer.server_bind(self)
|
SimpleXMLRPCServer.SimpleXMLRPCServer.server_bind(self)
|
||||||
|
|
||||||
class HttpDaemon(threading.Thread):
|
class HttpDaemon(threading.Thread):
|
||||||
|
|
||||||
def __init__(self, interface,port, secure=False):
|
def __init__(self, interface,port, secure=False):
|
||||||
threading.Thread.__init__(self)
|
threading.Thread.__init__(self)
|
||||||
self.__port=port
|
self.__port=port
|
||||||
self.__interface=interface
|
self.__interface=interface
|
||||||
self.secure = secure
|
self.secure = secure
|
||||||
if secure:
|
if secure:
|
||||||
from ssl import SecureXMLRPCServer
|
from ssl import SecureXMLRPCServer
|
||||||
class SecureXMLRPCRequestHandler(GenericXMLRPCRequestHandler,
|
class SecureXMLRPCRequestHandler(GenericXMLRPCRequestHandler,
|
||||||
SecureXMLRPCServer.SecureXMLRPCRequestHandler):
|
SecureXMLRPCServer.SecureXMLRPCRequestHandler):
|
||||||
SecureXMLRPCServer.SecureXMLRPCRequestHandler.rpc_paths = get_rpc_paths()
|
SecureXMLRPCServer.SecureXMLRPCRequestHandler.rpc_paths = get_rpc_paths()
|
||||||
class SecureThreadedXMLRPCServer(SocketServer.ThreadingMixIn,
|
class SecureThreadedXMLRPCServer(SocketServer.ThreadingMixIn,
|
||||||
SecureXMLRPCServer.SecureXMLRPCServer):
|
SecureXMLRPCServer.SecureXMLRPCServer):
|
||||||
|
|
||||||
def server_bind(self):
|
def server_bind(self):
|
||||||
self.socket.setsockopt(socket.SOL_SOCKET,
|
self.socket.setsockopt(socket.SOL_SOCKET,
|
||||||
socket.SO_REUSEADDR, 1)
|
socket.SO_REUSEADDR, 1)
|
||||||
SecureXMLRPCServer.SecureXMLRPCServer.server_bind(self)
|
SecureXMLRPCServer.SecureXMLRPCServer.server_bind(self)
|
||||||
|
|
||||||
self.server = SecureThreadedXMLRPCServer((interface, port),
|
self.server = SecureThreadedXMLRPCServer((interface, port),
|
||||||
SecureXMLRPCRequestHandler,0)
|
SecureXMLRPCRequestHandler,0)
|
||||||
else:
|
else:
|
||||||
self.server = SimpleThreadedXMLRPCServer((interface, port),
|
self.server = SimpleThreadedXMLRPCServer((interface, port),
|
||||||
SimpleXMLRPCRequestHandler,0)
|
SimpleXMLRPCRequestHandler,0)
|
||||||
|
|
||||||
def attach(self,path,gw):
|
def attach(self,path,gw):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def stop(self):
|
def stop(self):
|
||||||
self.running = False
|
self.running = False
|
||||||
if os.name <> 'nt':
|
if os.name <> 'nt':
|
||||||
if hasattr(socket, 'SHUT_RDWR'):
|
if hasattr(socket, 'SHUT_RDWR'):
|
||||||
if self.secure:
|
if self.secure:
|
||||||
self.server.socket.sock_shutdown(socket.SHUT_RDWR)
|
self.server.socket.sock_shutdown(socket.SHUT_RDWR)
|
||||||
else:
|
else:
|
||||||
self.server.socket.shutdown(socket.SHUT_RDWR)
|
self.server.socket.shutdown(socket.SHUT_RDWR)
|
||||||
else:
|
else:
|
||||||
if self.secure:
|
if self.secure:
|
||||||
self.server.socket.sock_shutdown(2)
|
self.server.socket.sock_shutdown(2)
|
||||||
else:
|
else:
|
||||||
self.server.socket.shutdown(2)
|
self.server.socket.shutdown(2)
|
||||||
self.server.socket.close()
|
self.server.socket.close()
|
||||||
|
|
||||||
def run(self):
|
def run(self):
|
||||||
self.server.register_introspection_functions()
|
self.server.register_introspection_functions()
|
||||||
|
|
||||||
self.running = True
|
self.running = True
|
||||||
while self.running:
|
while self.running:
|
||||||
self.server.handle_request()
|
self.server.handle_request()
|
||||||
return True
|
return True
|
||||||
|
|
||||||
# If the server need to be run recursively
|
# If the server need to be run recursively
|
||||||
#
|
#
|
||||||
#signal.signal(signal.SIGALRM, self.my_handler)
|
#signal.signal(signal.SIGALRM, self.my_handler)
|
||||||
#signal.alarm(6)
|
#signal.alarm(6)
|
||||||
#while True:
|
#while True:
|
||||||
# self.server.handle_request()
|
# self.server.handle_request()
|
||||||
#signal.alarm(0) # Disable the alarm
|
#signal.alarm(0) # Disable the alarm
|
||||||
|
|
||||||
import tiny_socket
|
import tiny_socket
|
||||||
class TinySocketClientThread(threading.Thread):
|
class TinySocketClientThread(threading.Thread):
|
||||||
def __init__(self, sock, threads):
|
def __init__(self, sock, threads):
|
||||||
threading.Thread.__init__(self)
|
threading.Thread.__init__(self)
|
||||||
self.sock = sock
|
self.sock = sock
|
||||||
self.threads = threads
|
self.threads = threads
|
||||||
|
|
||||||
def run(self):
|
def run(self):
|
||||||
import traceback
|
import traceback
|
||||||
import time
|
import time
|
||||||
import select
|
import select
|
||||||
self.running = True
|
self.running = True
|
||||||
try:
|
try:
|
||||||
ts = tiny_socket.mysocket(self.sock)
|
ts = tiny_socket.mysocket(self.sock)
|
||||||
except:
|
except:
|
||||||
self.sock.close()
|
self.sock.close()
|
||||||
self.threads.remove(self)
|
self.threads.remove(self)
|
||||||
return False
|
return False
|
||||||
while self.running:
|
while self.running:
|
||||||
try:
|
try:
|
||||||
msg = ts.myreceive()
|
msg = ts.myreceive()
|
||||||
except:
|
except:
|
||||||
self.sock.close()
|
self.sock.close()
|
||||||
self.threads.remove(self)
|
self.threads.remove(self)
|
||||||
return False
|
return False
|
||||||
try:
|
try:
|
||||||
s=LocalService(msg[0])
|
s=LocalService(msg[0])
|
||||||
m=getattr(s,msg[1])
|
m=getattr(s,msg[1])
|
||||||
s._service._response=None
|
s._service._response=None
|
||||||
r=m(*msg[2:])
|
r=m(*msg[2:])
|
||||||
res=s._service._response
|
res=s._service._response
|
||||||
if res!=None:
|
if res!=None:
|
||||||
r=res
|
r=res
|
||||||
ts.mysend(r)
|
ts.mysend(r)
|
||||||
except Exception, e:
|
except Exception, e:
|
||||||
tb_s = reduce(lambda x, y: x+y, traceback.format_exception(sys.exc_type, sys.exc_value, sys.exc_traceback))
|
tb_s = reduce(lambda x, y: x+y, traceback.format_exception(sys.exc_type, sys.exc_value, sys.exc_traceback))
|
||||||
s=str(e)
|
s=str(e)
|
||||||
import tools
|
import tools
|
||||||
if tools.config['debug_mode']:
|
if tools.config['debug_mode']:
|
||||||
import pdb
|
import pdb
|
||||||
tb = sys.exc_info()[2]
|
tb = sys.exc_info()[2]
|
||||||
pdb.post_mortem(tb)
|
pdb.post_mortem(tb)
|
||||||
ts.mysend(e, exception=True, traceback=tb_s)
|
ts.mysend(e, exception=True, traceback=tb_s)
|
||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
self.sock.close()
|
self.sock.close()
|
||||||
self.threads.remove(self)
|
self.threads.remove(self)
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def stop(self):
|
def stop(self):
|
||||||
self.running = False
|
self.running = False
|
||||||
|
|
||||||
class TinySocketServerThread(threading.Thread):
|
class TinySocketServerThread(threading.Thread):
|
||||||
def __init__(self, interface, port, secure=False):
|
def __init__(self, interface, port, secure=False):
|
||||||
threading.Thread.__init__(self)
|
threading.Thread.__init__(self)
|
||||||
self.__port=port
|
self.__port=port
|
||||||
self.__interface=interface
|
self.__interface=interface
|
||||||
self.socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
self.socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||||
self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
|
self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
|
||||||
self.socket.bind((self.__interface, self.__port))
|
self.socket.bind((self.__interface, self.__port))
|
||||||
self.socket.listen(5)
|
self.socket.listen(5)
|
||||||
self.threads = []
|
self.threads = []
|
||||||
|
|
||||||
def run(self):
|
def run(self):
|
||||||
import select
|
import select
|
||||||
try:
|
try:
|
||||||
self.running = True
|
self.running = True
|
||||||
while self.running:
|
while self.running:
|
||||||
(clientsocket, address) = self.socket.accept()
|
(clientsocket, address) = self.socket.accept()
|
||||||
ct = TinySocketClientThread(clientsocket, self.threads)
|
ct = TinySocketClientThread(clientsocket, self.threads)
|
||||||
self.threads.append(ct)
|
self.threads.append(ct)
|
||||||
ct.start()
|
ct.start()
|
||||||
self.socket.close()
|
self.socket.close()
|
||||||
except Exception, e:
|
except Exception, e:
|
||||||
self.socket.close()
|
self.socket.close()
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def stop(self):
|
def stop(self):
|
||||||
self.running=False
|
self.running=False
|
||||||
for t in self.threads:
|
for t in self.threads:
|
||||||
t.stop()
|
t.stop()
|
||||||
try:
|
try:
|
||||||
if hasattr(socket, 'SHUT_RDWR'):
|
if hasattr(socket, 'SHUT_RDWR'):
|
||||||
self.socket.shutdown(socket.SHUT_RDWR)
|
self.socket.shutdown(socket.SHUT_RDWR)
|
||||||
else:
|
else:
|
||||||
self.socket.shutdown(2)
|
self.socket.shutdown(2)
|
||||||
self.socket.close()
|
self.socket.close()
|
||||||
except:
|
except:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
# vim:noexpandtab:
|
# vim:noexpandtab:
|
||||||
|
|
||||||
|
|
1000
bin/osv/fields.py
1000
bin/osv/fields.py
File diff suppressed because it is too large
Load Diff
4900
bin/osv/orm.py
4900
bin/osv/orm.py
File diff suppressed because it is too large
Load Diff
510
bin/osv/osv.py
510
bin/osv/osv.py
|
@ -45,315 +45,315 @@ module_class_list = {}
|
||||||
class_pool = {}
|
class_pool = {}
|
||||||
|
|
||||||
class except_osv(Exception):
|
class except_osv(Exception):
|
||||||
def __init__(self, name, value, exc_type='warning'):
|
def __init__(self, name, value, exc_type='warning'):
|
||||||
self.name = name
|
self.name = name
|
||||||
self.exc_type = exc_type
|
self.exc_type = exc_type
|
||||||
self.value = value
|
self.value = value
|
||||||
self.args = (exc_type,name)
|
self.args = (exc_type,name)
|
||||||
|
|
||||||
class osv_pool(netsvc.Service):
|
class osv_pool(netsvc.Service):
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.obj_pool = {}
|
self.obj_pool = {}
|
||||||
self.module_object_list = {}
|
self.module_object_list = {}
|
||||||
self.created = []
|
self.created = []
|
||||||
self._sql_error = {}
|
self._sql_error = {}
|
||||||
self._store_function = {}
|
self._store_function = {}
|
||||||
netsvc.Service.__init__(self, 'object_proxy', audience='')
|
netsvc.Service.__init__(self, 'object_proxy', audience='')
|
||||||
self.joinGroup('web-services')
|
self.joinGroup('web-services')
|
||||||
self.exportMethod(self.exportedMethods)
|
self.exportMethod(self.exportedMethods)
|
||||||
self.exportMethod(self.obj_list)
|
self.exportMethod(self.obj_list)
|
||||||
self.exportMethod(self.exec_workflow)
|
self.exportMethod(self.exec_workflow)
|
||||||
self.exportMethod(self.execute)
|
self.exportMethod(self.execute)
|
||||||
self.exportMethod(self.execute_cr)
|
self.exportMethod(self.execute_cr)
|
||||||
|
|
||||||
def execute_cr(self, cr, uid, obj, method, *args, **kw):
|
def execute_cr(self, cr, uid, obj, method, *args, **kw):
|
||||||
try:
|
try:
|
||||||
object = pooler.get_pool(cr.dbname).get(obj)
|
object = pooler.get_pool(cr.dbname).get(obj)
|
||||||
if not object:
|
if not object:
|
||||||
self.abortResponse(1, 'Object Error', 'warning',
|
self.abortResponse(1, 'Object Error', 'warning',
|
||||||
'Object %s doesn\'t exist' % str(obj))
|
'Object %s doesn\'t exist' % str(obj))
|
||||||
return getattr(object,method)(cr, uid, *args, **kw)
|
return getattr(object,method)(cr, uid, *args, **kw)
|
||||||
except orm.except_orm, inst:
|
except orm.except_orm, inst:
|
||||||
self.abortResponse(1, inst.name, 'warning', inst.value)
|
self.abortResponse(1, inst.name, 'warning', inst.value)
|
||||||
except except_osv, inst:
|
except except_osv, inst:
|
||||||
self.abortResponse(1, inst.name, inst.exc_type, inst.value)
|
self.abortResponse(1, inst.name, inst.exc_type, inst.value)
|
||||||
except psycopg.IntegrityError, inst:
|
except psycopg.IntegrityError, inst:
|
||||||
for key in self._sql_error.keys():
|
for key in self._sql_error.keys():
|
||||||
if key in inst[0]:
|
if key in inst[0]:
|
||||||
self.abortResponse(1, 'Constraint Error', 'warning',
|
self.abortResponse(1, 'Constraint Error', 'warning',
|
||||||
self._sql_error[key])
|
self._sql_error[key])
|
||||||
self.abortResponse(1, 'Integrity Error', 'warning', inst[0])
|
self.abortResponse(1, 'Integrity Error', 'warning', inst[0])
|
||||||
except Exception, e:
|
except Exception, e:
|
||||||
import traceback
|
import traceback
|
||||||
tb_s = reduce(lambda x, y: x+y, traceback.format_exception(
|
tb_s = reduce(lambda x, y: x+y, traceback.format_exception(
|
||||||
sys.exc_type, sys.exc_value, sys.exc_traceback))
|
sys.exc_type, sys.exc_value, sys.exc_traceback))
|
||||||
logger = Logger()
|
logger = Logger()
|
||||||
logger.notifyChannel("web-services", LOG_ERROR,
|
logger.notifyChannel("web-services", LOG_ERROR,
|
||||||
'Exception in call: ' + tb_s)
|
'Exception in call: ' + tb_s)
|
||||||
raise
|
raise
|
||||||
|
|
||||||
def execute(self, db, uid, obj, method, *args, **kw):
|
def execute(self, db, uid, obj, method, *args, **kw):
|
||||||
db, pool = pooler.get_db_and_pool(db)
|
db, pool = pooler.get_db_and_pool(db)
|
||||||
cr = db.cursor()
|
cr = db.cursor()
|
||||||
try:
|
try:
|
||||||
try:
|
try:
|
||||||
res = pool.execute_cr(cr, uid, obj, method, *args, **kw)
|
res = pool.execute_cr(cr, uid, obj, method, *args, **kw)
|
||||||
cr.commit()
|
cr.commit()
|
||||||
except Exception:
|
except Exception:
|
||||||
cr.rollback()
|
cr.rollback()
|
||||||
raise
|
raise
|
||||||
finally:
|
finally:
|
||||||
cr.close()
|
cr.close()
|
||||||
return res
|
return res
|
||||||
|
|
||||||
def exec_workflow_cr(self, cr, uid, obj, method, *args):
|
def exec_workflow_cr(self, cr, uid, obj, method, *args):
|
||||||
wf_service = netsvc.LocalService("workflow")
|
wf_service = netsvc.LocalService("workflow")
|
||||||
return wf_service.trg_validate(uid, obj, args[0], method, cr)
|
return wf_service.trg_validate(uid, obj, args[0], method, cr)
|
||||||
|
|
||||||
def exec_workflow(self, db, uid, obj, method, *args):
|
def exec_workflow(self, db, uid, obj, method, *args):
|
||||||
cr = pooler.get_db(db).cursor()
|
cr = pooler.get_db(db).cursor()
|
||||||
try:
|
try:
|
||||||
try:
|
try:
|
||||||
res = self.exec_workflow_cr(cr, uid, obj, method, *args)
|
res = self.exec_workflow_cr(cr, uid, obj, method, *args)
|
||||||
cr.commit()
|
cr.commit()
|
||||||
except orm.except_orm, inst:
|
except orm.except_orm, inst:
|
||||||
cr.rollback()
|
cr.rollback()
|
||||||
self.abortResponse(1, inst.name, 'warning', inst.value)
|
self.abortResponse(1, inst.name, 'warning', inst.value)
|
||||||
except except_osv, inst:
|
except except_osv, inst:
|
||||||
cr.rollback()
|
cr.rollback()
|
||||||
self.abortResponse(1, inst.name, inst[0], inst.value)
|
self.abortResponse(1, inst.name, inst[0], inst.value)
|
||||||
finally:
|
finally:
|
||||||
cr.close()
|
cr.close()
|
||||||
return res
|
return res
|
||||||
|
|
||||||
def obj_list(self):
|
def obj_list(self):
|
||||||
return self.obj_pool.keys()
|
return self.obj_pool.keys()
|
||||||
|
|
||||||
# adds a new object instance to the object pool.
|
# adds a new object instance to the object pool.
|
||||||
# if it already existed, the instance is replaced
|
# if it already existed, the instance is replaced
|
||||||
def add(self, name, obj_inst):
|
def add(self, name, obj_inst):
|
||||||
if self.obj_pool.has_key(name):
|
if self.obj_pool.has_key(name):
|
||||||
del self.obj_pool[name]
|
del self.obj_pool[name]
|
||||||
self.obj_pool[name] = obj_inst
|
self.obj_pool[name] = obj_inst
|
||||||
|
|
||||||
module = str(obj_inst.__class__)[6:]
|
module = str(obj_inst.__class__)[6:]
|
||||||
module = module[:len(module)-1]
|
module = module[:len(module)-1]
|
||||||
module = module.split('.')[0][2:]
|
module = module.split('.')[0][2:]
|
||||||
self.module_object_list.setdefault(module, []).append(obj_inst)
|
self.module_object_list.setdefault(module, []).append(obj_inst)
|
||||||
|
|
||||||
def get(self, name):
|
def get(self, name):
|
||||||
obj = self.obj_pool.get(name, None)
|
obj = self.obj_pool.get(name, None)
|
||||||
# We cannot uncomment this line because it breaks initialisation since objects do not initialize
|
# We cannot uncomment this line because it breaks initialisation since objects do not initialize
|
||||||
# in the correct order and the ORM doesnt support correctly when some objets do not exist yet
|
# in the correct order and the ORM doesnt support correctly when some objets do not exist yet
|
||||||
# assert obj, "object %s does not exist !" % name
|
# assert obj, "object %s does not exist !" % name
|
||||||
return obj
|
return obj
|
||||||
|
|
||||||
#TODO: pass a list of modules to load
|
#TODO: pass a list of modules to load
|
||||||
def instanciate(self, module, cr):
|
def instanciate(self, module, cr):
|
||||||
# print "module list:", module_list
|
# print "module list:", module_list
|
||||||
# for module in module_list:
|
# for module in module_list:
|
||||||
res = []
|
res = []
|
||||||
class_list = module_class_list.get(module, [])
|
class_list = module_class_list.get(module, [])
|
||||||
# if module not in self.module_object_list:
|
# if module not in self.module_object_list:
|
||||||
# print "%s class_list:" % module, class_list
|
# print "%s class_list:" % module, class_list
|
||||||
for klass in class_list:
|
for klass in class_list:
|
||||||
res.append(klass.createInstance(self, module, cr))
|
res.append(klass.createInstance(self, module, cr))
|
||||||
return res
|
return res
|
||||||
# else:
|
# else:
|
||||||
# print "skipping module", module
|
# print "skipping module", module
|
||||||
|
|
||||||
#pooler.get_pool(cr.dbname) = osv_pool()
|
#pooler.get_pool(cr.dbname) = osv_pool()
|
||||||
|
|
||||||
class osv_memory(orm.orm_memory):
|
class osv_memory(orm.orm_memory):
|
||||||
#__metaclass__ = inheritor
|
#__metaclass__ = inheritor
|
||||||
def __new__(cls):
|
def __new__(cls):
|
||||||
module = str(cls)[6:]
|
module = str(cls)[6:]
|
||||||
module = module[:len(module)-1]
|
module = module[:len(module)-1]
|
||||||
module = module.split('.')[0][2:]
|
module = module.split('.')[0][2:]
|
||||||
if not hasattr(cls, '_module'):
|
if not hasattr(cls, '_module'):
|
||||||
cls._module = module
|
cls._module = module
|
||||||
module_class_list.setdefault(cls._module, []).append(cls)
|
module_class_list.setdefault(cls._module, []).append(cls)
|
||||||
class_pool[cls._name] = cls
|
class_pool[cls._name] = cls
|
||||||
if module not in module_list:
|
if module not in module_list:
|
||||||
module_list.append(cls._module)
|
module_list.append(cls._module)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
#
|
#
|
||||||
# Goal: try to apply inheritancy at the instanciation level and
|
# Goal: try to apply inheritancy at the instanciation level and
|
||||||
# put objects in the pool var
|
# put objects in the pool var
|
||||||
#
|
#
|
||||||
def createInstance(cls, pool, module, cr):
|
def createInstance(cls, pool, module, cr):
|
||||||
name = hasattr(cls,'_name') and cls._name or cls._inherit
|
name = hasattr(cls,'_name') and cls._name or cls._inherit
|
||||||
parent_name = hasattr(cls, '_inherit') and cls._inherit
|
parent_name = hasattr(cls, '_inherit') and cls._inherit
|
||||||
if parent_name:
|
if parent_name:
|
||||||
print 'Inherit not supported in osv_memory object !'
|
print 'Inherit not supported in osv_memory object !'
|
||||||
obj = object.__new__(cls)
|
obj = object.__new__(cls)
|
||||||
obj.__init__(pool, cr)
|
obj.__init__(pool, cr)
|
||||||
return obj
|
return obj
|
||||||
createInstance = classmethod(createInstance)
|
createInstance = classmethod(createInstance)
|
||||||
|
|
||||||
def __init__(self, pool, cr):
|
def __init__(self, pool, cr):
|
||||||
pool.add(self._name, self)
|
pool.add(self._name, self)
|
||||||
self.pool = pool
|
self.pool = pool
|
||||||
orm.orm_memory.__init__(self, cr)
|
orm.orm_memory.__init__(self, cr)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
class osv(orm.orm):
|
class osv(orm.orm):
|
||||||
#__metaclass__ = inheritor
|
#__metaclass__ = inheritor
|
||||||
def __new__(cls):
|
def __new__(cls):
|
||||||
module = str(cls)[6:]
|
module = str(cls)[6:]
|
||||||
module = module[:len(module)-1]
|
module = module[:len(module)-1]
|
||||||
module = module.split('.')[0][2:]
|
module = module.split('.')[0][2:]
|
||||||
if not hasattr(cls, '_module'):
|
if not hasattr(cls, '_module'):
|
||||||
cls._module = module
|
cls._module = module
|
||||||
module_class_list.setdefault(cls._module, []).append(cls)
|
module_class_list.setdefault(cls._module, []).append(cls)
|
||||||
class_pool[cls._name] = cls
|
class_pool[cls._name] = cls
|
||||||
if module not in module_list:
|
if module not in module_list:
|
||||||
module_list.append(cls._module)
|
module_list.append(cls._module)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
#
|
#
|
||||||
# Goal: try to apply inheritancy at the instanciation level and
|
# Goal: try to apply inheritancy at the instanciation level and
|
||||||
# put objects in the pool var
|
# put objects in the pool var
|
||||||
#
|
#
|
||||||
def createInstance(cls, pool, module, cr):
|
def createInstance(cls, pool, module, cr):
|
||||||
parent_name = hasattr(cls, '_inherit') and cls._inherit
|
parent_name = hasattr(cls, '_inherit') and cls._inherit
|
||||||
if parent_name:
|
if parent_name:
|
||||||
parent_class = pool.get(parent_name).__class__
|
parent_class = pool.get(parent_name).__class__
|
||||||
assert parent_class, "parent class %s does not exist !" % parent_name
|
assert parent_class, "parent class %s does not exist !" % parent_name
|
||||||
nattr = {}
|
nattr = {}
|
||||||
for s in ('_columns', '_defaults', '_inherits', '_constraints', '_sql_constraints'):
|
for s in ('_columns', '_defaults', '_inherits', '_constraints', '_sql_constraints'):
|
||||||
new = copy.copy(getattr(pool.get(parent_name), s))
|
new = copy.copy(getattr(pool.get(parent_name), s))
|
||||||
if hasattr(new, 'update'):
|
if hasattr(new, 'update'):
|
||||||
new.update(cls.__dict__.get(s, {}))
|
new.update(cls.__dict__.get(s, {}))
|
||||||
else:
|
else:
|
||||||
new.extend(cls.__dict__.get(s, []))
|
new.extend(cls.__dict__.get(s, []))
|
||||||
nattr[s] = new
|
nattr[s] = new
|
||||||
name = hasattr(cls,'_name') and cls._name or cls._inherit
|
name = hasattr(cls,'_name') and cls._name or cls._inherit
|
||||||
cls = type(name, (cls, parent_class), nattr)
|
cls = type(name, (cls, parent_class), nattr)
|
||||||
obj = object.__new__(cls)
|
obj = object.__new__(cls)
|
||||||
obj.__init__(pool, cr)
|
obj.__init__(pool, cr)
|
||||||
return obj
|
return obj
|
||||||
createInstance = classmethod(createInstance)
|
createInstance = classmethod(createInstance)
|
||||||
|
|
||||||
def __init__(self, pool, cr):
|
def __init__(self, pool, cr):
|
||||||
pool.add(self._name, self)
|
pool.add(self._name, self)
|
||||||
self.pool = pool
|
self.pool = pool
|
||||||
orm.orm.__init__(self, cr)
|
orm.orm.__init__(self, cr)
|
||||||
|
|
||||||
class Cacheable(object):
|
class Cacheable(object):
|
||||||
|
|
||||||
_cache = UpdateableDict()
|
_cache = UpdateableDict()
|
||||||
|
|
||||||
def add(self, key, value):
|
def add(self, key, value):
|
||||||
self._cache[key] = value
|
self._cache[key] = value
|
||||||
|
|
||||||
def invalidate(self, key):
|
def invalidate(self, key):
|
||||||
del self._cache[key]
|
del self._cache[key]
|
||||||
|
|
||||||
def get(self, key):
|
def get(self, key):
|
||||||
try:
|
try:
|
||||||
w = self._cache[key]
|
w = self._cache[key]
|
||||||
return w
|
return w
|
||||||
except KeyError:
|
except KeyError:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def clear(self):
|
def clear(self):
|
||||||
self._cache.clear()
|
self._cache.clear()
|
||||||
self._items = []
|
self._items = []
|
||||||
|
|
||||||
def filter_dict(d, fields):
|
def filter_dict(d, fields):
|
||||||
res = {}
|
res = {}
|
||||||
for f in fields + ['id']:
|
for f in fields + ['id']:
|
||||||
if f in d:
|
if f in d:
|
||||||
res[f] = d[f]
|
res[f] = d[f]
|
||||||
return res
|
return res
|
||||||
|
|
||||||
class cacheable_osv(osv, Cacheable):
|
class cacheable_osv(osv, Cacheable):
|
||||||
|
|
||||||
_relevant = ['lang']
|
_relevant = ['lang']
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
super(cacheable_osv, self).__init__()
|
super(cacheable_osv, self).__init__()
|
||||||
|
|
||||||
def read(self, cr, user, ids, fields=None, context=None, load='_classic_read'):
|
def read(self, cr, user, ids, fields=None, context=None, load='_classic_read'):
|
||||||
if not fields:
|
if not fields:
|
||||||
fields=[]
|
fields=[]
|
||||||
if not context:
|
if not context:
|
||||||
context={}
|
context={}
|
||||||
fields = fields or self._columns.keys()
|
fields = fields or self._columns.keys()
|
||||||
ctx = [context.get(x, False) for x in self._relevant]
|
ctx = [context.get(x, False) for x in self._relevant]
|
||||||
result, tofetch = [], []
|
result, tofetch = [], []
|
||||||
for id in ids:
|
for id in ids:
|
||||||
res = self.get(self._name, id, ctx)
|
res = self.get(self._name, id, ctx)
|
||||||
if not res:
|
if not res:
|
||||||
tofetch.append(id)
|
tofetch.append(id)
|
||||||
else:
|
else:
|
||||||
result.append(filter_dict(res, fields))
|
result.append(filter_dict(res, fields))
|
||||||
|
|
||||||
# gen the list of "local" (ie not inherited) fields which are classic or many2one
|
# gen the list of "local" (ie not inherited) fields which are classic or many2one
|
||||||
nfields = filter(lambda x: x[1]._classic_write, self._columns.items())
|
nfields = filter(lambda x: x[1]._classic_write, self._columns.items())
|
||||||
# gen the list of inherited fields
|
# gen the list of inherited fields
|
||||||
inherits = map(lambda x: (x[0], x[1][2]), self._inherit_fields.items())
|
inherits = map(lambda x: (x[0], x[1][2]), self._inherit_fields.items())
|
||||||
# complete the field list with the inherited fields which are classic or many2one
|
# complete the field list with the inherited fields which are classic or many2one
|
||||||
nfields += filter(lambda x: x[1]._classic_write, inherits)
|
nfields += filter(lambda x: x[1]._classic_write, inherits)
|
||||||
nfields = [x[0] for x in nfields]
|
nfields = [x[0] for x in nfields]
|
||||||
|
|
||||||
res = super(cacheable_osv, self).read(cr, user, tofetch, nfields, context, load)
|
res = super(cacheable_osv, self).read(cr, user, tofetch, nfields, context, load)
|
||||||
for r in res:
|
for r in res:
|
||||||
self.add((self._name, r['id'], ctx), r)
|
self.add((self._name, r['id'], ctx), r)
|
||||||
result.append(filter_dict(r, fields))
|
result.append(filter_dict(r, fields))
|
||||||
|
|
||||||
# Appel de fonction si necessaire
|
# Appel de fonction si necessaire
|
||||||
tofetch = []
|
tofetch = []
|
||||||
for f in fields:
|
for f in fields:
|
||||||
if f not in nfields:
|
if f not in nfields:
|
||||||
tofetch.append(f)
|
tofetch.append(f)
|
||||||
for f in tofetch:
|
for f in tofetch:
|
||||||
fvals = self._columns[f].get(cr, self, ids, f, user, context=context)
|
fvals = self._columns[f].get(cr, self, ids, f, user, context=context)
|
||||||
for r in result:
|
for r in result:
|
||||||
r[f] = fvals[r['id']]
|
r[f] = fvals[r['id']]
|
||||||
|
|
||||||
# TODO: tri par self._order !!
|
# TODO: tri par self._order !!
|
||||||
return result
|
return result
|
||||||
|
|
||||||
def invalidate(self, key):
|
def invalidate(self, key):
|
||||||
del self._cache[key[0]][key[1]]
|
del self._cache[key[0]][key[1]]
|
||||||
|
|
||||||
def write(self, cr, user, ids, values, context=None):
|
def write(self, cr, user, ids, values, context=None):
|
||||||
if not context:
|
if not context:
|
||||||
context={}
|
context={}
|
||||||
for id in ids:
|
for id in ids:
|
||||||
self.invalidate((self._name, id))
|
self.invalidate((self._name, id))
|
||||||
return super(cacheable_osv, self).write(cr, user, ids, values, context)
|
return super(cacheable_osv, self).write(cr, user, ids, values, context)
|
||||||
|
|
||||||
def unlink(self, cr, user, ids):
|
def unlink(self, cr, user, ids):
|
||||||
self.clear()
|
self.clear()
|
||||||
return super(cacheable_osv, self).unlink(cr, user, ids)
|
return super(cacheable_osv, self).unlink(cr, user, ids)
|
||||||
|
|
||||||
#cacheable_osv = osv
|
#cacheable_osv = osv
|
||||||
|
|
||||||
# vim:noexpandtab:
|
# vim:noexpandtab:
|
||||||
|
|
||||||
#class FakePool(object):
|
#class FakePool(object):
|
||||||
# def __init__(self, module):
|
# def __init__(self, module):
|
||||||
# self.preferred_module = module
|
# self.preferred_module = module
|
||||||
|
|
||||||
# def get(self, name):
|
# def get(self, name):
|
||||||
# localpool = module_objects_dict.get(self.preferred_module, {'dict': {}})['dict']
|
# localpool = module_objects_dict.get(self.preferred_module, {'dict': {}})['dict']
|
||||||
# if name in localpool:
|
# if name in localpool:
|
||||||
# obj = localpool[name]
|
# obj = localpool[name]
|
||||||
# else:
|
# else:
|
||||||
# obj = pooler.get_pool(cr.dbname).get(name)
|
# obj = pooler.get_pool(cr.dbname).get(name)
|
||||||
# return obj
|
# return obj
|
||||||
|
|
||||||
# fake_pool = self
|
# fake_pool = self
|
||||||
# class fake_class(obj.__class__):
|
# class fake_class(obj.__class__):
|
||||||
# def __init__(self):
|
# def __init__(self):
|
||||||
# super(fake_class, self).__init__()
|
# super(fake_class, self).__init__()
|
||||||
# self.pool = fake_pool
|
# self.pool = fake_pool
|
||||||
|
|
||||||
# return fake_class()
|
# return fake_class()
|
||||||
|
|
||||||
|
|
|
@ -36,65 +36,65 @@ db_dic = {}
|
||||||
pool_dic = {}
|
pool_dic = {}
|
||||||
|
|
||||||
def get_db_and_pool(db_name, force_demo=False, status=None, update_module=False):
|
def get_db_and_pool(db_name, force_demo=False, status=None, update_module=False):
|
||||||
if not status:
|
if not status:
|
||||||
status={}
|
status={}
|
||||||
if db_name in db_dic:
|
if db_name in db_dic:
|
||||||
db = db_dic[db_name]
|
db = db_dic[db_name]
|
||||||
else:
|
else:
|
||||||
logger = netsvc.Logger()
|
logger = netsvc.Logger()
|
||||||
logger.notifyChannel('pooler', netsvc.LOG_INFO, 'Connecting to %s' % (db_name))
|
logger.notifyChannel('pooler', netsvc.LOG_INFO, 'Connecting to %s' % (db_name))
|
||||||
db = sql_db.db_connect(db_name)
|
db = sql_db.db_connect(db_name)
|
||||||
db_dic[db_name] = db
|
db_dic[db_name] = db
|
||||||
|
|
||||||
if db_name in pool_dic:
|
if db_name in pool_dic:
|
||||||
pool = pool_dic[db_name]
|
pool = pool_dic[db_name]
|
||||||
else:
|
else:
|
||||||
pool = osv.osv.osv_pool()
|
pool = osv.osv.osv_pool()
|
||||||
pool_dic[db_name] = pool
|
pool_dic[db_name] = pool
|
||||||
addons.load_modules(db, force_demo, status, update_module)
|
addons.load_modules(db, force_demo, status, update_module)
|
||||||
|
|
||||||
if not update_module:
|
if not update_module:
|
||||||
import report
|
import report
|
||||||
report.interface.register_all(db)
|
report.interface.register_all(db)
|
||||||
pool.get('ir.cron')._poolJobs(db.dbname)
|
pool.get('ir.cron')._poolJobs(db.dbname)
|
||||||
return db, pool
|
return db, pool
|
||||||
|
|
||||||
def restart_pool(db_name, force_demo=False, update_module=False):
|
def restart_pool(db_name, force_demo=False, update_module=False):
|
||||||
# del db_dic[db_name]
|
# del db_dic[db_name]
|
||||||
del pool_dic[db_name]
|
del pool_dic[db_name]
|
||||||
return get_db_and_pool(db_name, force_demo, update_module=update_module)
|
return get_db_and_pool(db_name, force_demo, update_module=update_module)
|
||||||
|
|
||||||
def close_db(db_name):
|
def close_db(db_name):
|
||||||
if db_name in db_dic:
|
if db_name in db_dic:
|
||||||
db_dic[db_name].truedb.close()
|
db_dic[db_name].truedb.close()
|
||||||
del db_dic[db_name]
|
del db_dic[db_name]
|
||||||
if db_name in pool_dic:
|
if db_name in pool_dic:
|
||||||
del pool_dic[db_name]
|
del pool_dic[db_name]
|
||||||
|
|
||||||
def get_db_only(db_name):
|
def get_db_only(db_name):
|
||||||
if db_name in db_dic:
|
if db_name in db_dic:
|
||||||
db = db_dic[db_name]
|
db = db_dic[db_name]
|
||||||
else:
|
else:
|
||||||
db = sql_db.db_connect(db_name)
|
db = sql_db.db_connect(db_name)
|
||||||
db_dic[db_name] = db
|
db_dic[db_name] = db
|
||||||
return db
|
return db
|
||||||
|
|
||||||
def get_db(db_name):
|
def get_db(db_name):
|
||||||
# print "get_db", db_name
|
# print "get_db", db_name
|
||||||
return get_db_and_pool(db_name)[0]
|
return get_db_and_pool(db_name)[0]
|
||||||
|
|
||||||
def get_pool(db_name, force_demo=False, status=None, update_module=False):
|
def get_pool(db_name, force_demo=False, status=None, update_module=False):
|
||||||
# print "get_pool", db_name
|
# print "get_pool", db_name
|
||||||
pool = get_db_and_pool(db_name, force_demo, status, update_module)[1]
|
pool = get_db_and_pool(db_name, force_demo, status, update_module)[1]
|
||||||
# addons.load_modules(db_name, False)
|
# addons.load_modules(db_name, False)
|
||||||
# if not pool.obj_list():
|
# if not pool.obj_list():
|
||||||
# pool.instanciate()
|
# pool.instanciate()
|
||||||
# print "pool", pool
|
# print "pool", pool
|
||||||
return pool
|
return pool
|
||||||
# return get_db_and_pool(db_name)[1]
|
# return get_db_and_pool(db_name)[1]
|
||||||
|
|
||||||
def init():
|
def init():
|
||||||
global db
|
global db
|
||||||
# db = get_db_only(tools.config['db_name'])
|
# db = get_db_only(tools.config['db_name'])
|
||||||
sql_db.init()
|
sql_db.init()
|
||||||
|
|
||||||
|
|
|
@ -28,6 +28,6 @@
|
||||||
##############################################################################
|
##############################################################################
|
||||||
|
|
||||||
pageSize = {
|
pageSize = {
|
||||||
'A4': (210,297),
|
'A4': (210,297),
|
||||||
'A5': (148.5,105)
|
'A5': (148.5,105)
|
||||||
}
|
}
|
||||||
|
|
1208
bin/report/custom.py
1208
bin/report/custom.py
File diff suppressed because it is too large
Load Diff
|
@ -28,55 +28,55 @@
|
||||||
##############################################################################
|
##############################################################################
|
||||||
|
|
||||||
unites = {
|
unites = {
|
||||||
0: '', 1:'un', 2:'deux', 3:'trois', 4:'quatre', 5:'cinq', 6:'six', 7:'sept', 8:'huit', 9:'neuf',
|
0: '', 1:'un', 2:'deux', 3:'trois', 4:'quatre', 5:'cinq', 6:'six', 7:'sept', 8:'huit', 9:'neuf',
|
||||||
10:'dix', 11:'onze', 12:'douze', 13:'treize', 14:'quatorze', 15:'quinze', 16:'seize',
|
10:'dix', 11:'onze', 12:'douze', 13:'treize', 14:'quatorze', 15:'quinze', 16:'seize',
|
||||||
21:'vingt et un', 31:'trente et un', 41:'quarante et un', 51:'cinquante et un', 61:'soixante et un',
|
21:'vingt et un', 31:'trente et un', 41:'quarante et un', 51:'cinquante et un', 61:'soixante et un',
|
||||||
71:'septante et un', 91:'nonante et un', 80:'quatre-vingts'
|
71:'septante et un', 91:'nonante et un', 80:'quatre-vingts'
|
||||||
}
|
}
|
||||||
|
|
||||||
dizaine = {
|
dizaine = {
|
||||||
1: 'dix', 2:'vingt', 3:'trente',4:'quarante', 5:'cinquante', 6:'soixante', 7:'septante', 8:'quatre-vingt', 9:'nonante'
|
1: 'dix', 2:'vingt', 3:'trente',4:'quarante', 5:'cinquante', 6:'soixante', 7:'septante', 8:'quatre-vingt', 9:'nonante'
|
||||||
}
|
}
|
||||||
|
|
||||||
centaine = {
|
centaine = {
|
||||||
0:'', 1: 'cent', 2:'deux cent', 3:'trois cent',4:'quatre cent', 5:'cinq cent', 6:'six cent', 7:'sept cent', 8:'huit cent', 9:'neuf cent'
|
0:'', 1: 'cent', 2:'deux cent', 3:'trois cent',4:'quatre cent', 5:'cinq cent', 6:'six cent', 7:'sept cent', 8:'huit cent', 9:'neuf cent'
|
||||||
}
|
}
|
||||||
|
|
||||||
mille = {
|
mille = {
|
||||||
0:'', 1:'mille'
|
0:'', 1:'mille'
|
||||||
}
|
}
|
||||||
|
|
||||||
def _100_to_text(chiffre):
|
def _100_to_text(chiffre):
|
||||||
if chiffre in unites:
|
if chiffre in unites:
|
||||||
return unites[chiffre]
|
return unites[chiffre]
|
||||||
else:
|
else:
|
||||||
if chiffre%10>0:
|
if chiffre%10>0:
|
||||||
return dizaine[chiffre / 10]+'-'+unites[chiffre % 10]
|
return dizaine[chiffre / 10]+'-'+unites[chiffre % 10]
|
||||||
else:
|
else:
|
||||||
return dizaine[chiffre / 10]
|
return dizaine[chiffre / 10]
|
||||||
|
|
||||||
def _1000_to_text(chiffre):
|
def _1000_to_text(chiffre):
|
||||||
d = _100_to_text(chiffre % 100)
|
d = _100_to_text(chiffre % 100)
|
||||||
d2 = chiffre/100
|
d2 = chiffre/100
|
||||||
if d2>0 and d:
|
if d2>0 and d:
|
||||||
return centaine[d2]+' '+d
|
return centaine[d2]+' '+d
|
||||||
elif d2>1 and not(d):
|
elif d2>1 and not(d):
|
||||||
return centaine[d2]+'s'
|
return centaine[d2]+'s'
|
||||||
else:
|
else:
|
||||||
return centaine[d2] or d
|
return centaine[d2] or d
|
||||||
|
|
||||||
def _10000_to_text(chiffre):
|
def _10000_to_text(chiffre):
|
||||||
if chiffre==0:
|
if chiffre==0:
|
||||||
return 'zero'
|
return 'zero'
|
||||||
part1 = _1000_to_text(chiffre % 1000)
|
part1 = _1000_to_text(chiffre % 1000)
|
||||||
part2 = mille.get(chiffre / 1000, _1000_to_text(chiffre / 1000)+' mille')
|
part2 = mille.get(chiffre / 1000, _1000_to_text(chiffre / 1000)+' mille')
|
||||||
if part2 and part1:
|
if part2 and part1:
|
||||||
part1 = ' '+part1
|
part1 = ' '+part1
|
||||||
return part2+part1
|
return part2+part1
|
||||||
|
|
||||||
def int_to_text(i):
|
def int_to_text(i):
|
||||||
return _10000_to_text(i)
|
return _10000_to_text(i)
|
||||||
|
|
||||||
if __name__=='__main__':
|
if __name__=='__main__':
|
||||||
for i in range(1,999999,139):
|
for i in range(1,999999,139):
|
||||||
print int_to_text(i)
|
print int_to_text(i)
|
||||||
|
|
|
@ -46,187 +46,187 @@ import urllib
|
||||||
# encode a value to a string in utf8 and converts XML entities
|
# encode a value to a string in utf8 and converts XML entities
|
||||||
#
|
#
|
||||||
def toxml(val):
|
def toxml(val):
|
||||||
if isinstance(val, str):
|
if isinstance(val, str):
|
||||||
str_utf8 = val
|
str_utf8 = val
|
||||||
elif isinstance(val, unicode):
|
elif isinstance(val, unicode):
|
||||||
str_utf8 = val.encode('utf-8')
|
str_utf8 = val.encode('utf-8')
|
||||||
else:
|
else:
|
||||||
str_utf8 = str(val)
|
str_utf8 = str(val)
|
||||||
return str_utf8.replace('&', '&').replace('<','<').replace('>','>')
|
return str_utf8.replace('&', '&').replace('<','<').replace('>','>')
|
||||||
|
|
||||||
class report_int(netsvc.Service):
|
class report_int(netsvc.Service):
|
||||||
def __init__(self, name, audience='*'):
|
def __init__(self, name, audience='*'):
|
||||||
assert not netsvc.service_exist(name), 'The report "%s" already exist!' % name
|
assert not netsvc.service_exist(name), 'The report "%s" already exist!' % name
|
||||||
super(report_int, self).__init__(name, audience)
|
super(report_int, self).__init__(name, audience)
|
||||||
if name[0:7]<>'report.':
|
if name[0:7]<>'report.':
|
||||||
raise Exception, 'ConceptionError, bad report name, should start with "report."'
|
raise Exception, 'ConceptionError, bad report name, should start with "report."'
|
||||||
self.name = name
|
self.name = name
|
||||||
self.id = 0
|
self.id = 0
|
||||||
self.name2 = '.'.join(name.split('.')[1:])
|
self.name2 = '.'.join(name.split('.')[1:])
|
||||||
self.joinGroup('report')
|
self.joinGroup('report')
|
||||||
self.exportMethod(self.create)
|
self.exportMethod(self.create)
|
||||||
|
|
||||||
def create(self, cr, uid, ids, datas, context=None):
|
def create(self, cr, uid, ids, datas, context=None):
|
||||||
return False
|
return False
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Class to automatically build a document using the transformation process:
|
Class to automatically build a document using the transformation process:
|
||||||
XML -> DATAS -> RML -> PDF
|
XML -> DATAS -> RML -> PDF
|
||||||
-> HTML
|
-> HTML
|
||||||
using a XSL:RML transformation
|
using a XSL:RML transformation
|
||||||
"""
|
"""
|
||||||
class report_rml(report_int):
|
class report_rml(report_int):
|
||||||
def __init__(self, name, table, tmpl, xsl):
|
def __init__(self, name, table, tmpl, xsl):
|
||||||
super(report_rml, self).__init__(name)
|
super(report_rml, self).__init__(name)
|
||||||
self.table = table
|
self.table = table
|
||||||
self.tmpl = tmpl
|
self.tmpl = tmpl
|
||||||
self.xsl = xsl
|
self.xsl = xsl
|
||||||
self.bin_datas = {}
|
self.bin_datas = {}
|
||||||
self.generators = {
|
self.generators = {
|
||||||
'pdf': self.create_pdf,
|
'pdf': self.create_pdf,
|
||||||
'html': self.create_html,
|
'html': self.create_html,
|
||||||
'raw': self.create_raw,
|
'raw': self.create_raw,
|
||||||
'sxw': self.create_sxw,
|
'sxw': self.create_sxw,
|
||||||
}
|
}
|
||||||
|
|
||||||
def create(self, cr, uid, ids, datas, context):
|
def create(self, cr, uid, ids, datas, context):
|
||||||
xml = self.create_xml(cr, uid, ids, datas, context)
|
xml = self.create_xml(cr, uid, ids, datas, context)
|
||||||
# file('/tmp/terp.xml','wb+').write(xml)
|
# file('/tmp/terp.xml','wb+').write(xml)
|
||||||
if datas.get('report_type', 'pdf') == 'raw':
|
if datas.get('report_type', 'pdf') == 'raw':
|
||||||
return xml
|
return xml
|
||||||
rml = self.create_rml(cr, xml, uid, context)
|
rml = self.create_rml(cr, xml, uid, context)
|
||||||
# file('/tmp/terp.rml','wb+').write(rml)
|
# file('/tmp/terp.rml','wb+').write(rml)
|
||||||
report_type = datas.get('report_type', 'pdf')
|
report_type = datas.get('report_type', 'pdf')
|
||||||
create_doc = self.generators[report_type]
|
create_doc = self.generators[report_type]
|
||||||
pdf = create_doc(rml)
|
pdf = create_doc(rml)
|
||||||
return (pdf, report_type)
|
return (pdf, report_type)
|
||||||
|
|
||||||
def create_xml(self, cr, uid, ids, datas, context=None):
|
def create_xml(self, cr, uid, ids, datas, context=None):
|
||||||
if not context:
|
if not context:
|
||||||
context={}
|
context={}
|
||||||
doc = print_xml.document(cr, uid, datas, {})
|
doc = print_xml.document(cr, uid, datas, {})
|
||||||
self.bin_datas.update( doc.bin_datas or {})
|
self.bin_datas.update( doc.bin_datas or {})
|
||||||
doc.parse(self.tmpl, ids, self.table, context)
|
doc.parse(self.tmpl, ids, self.table, context)
|
||||||
xml = doc.xml_get()
|
xml = doc.xml_get()
|
||||||
doc.close()
|
doc.close()
|
||||||
return self.post_process_xml_data(cr, uid, xml, context)
|
return self.post_process_xml_data(cr, uid, xml, context)
|
||||||
|
|
||||||
def post_process_xml_data(self, cr, uid, xml, context=None):
|
def post_process_xml_data(self, cr, uid, xml, context=None):
|
||||||
if not context:
|
if not context:
|
||||||
context={}
|
context={}
|
||||||
# find the position of the 3rd tag
|
# find the position of the 3rd tag
|
||||||
# (skip the <?xml ...?> and the "root" tag)
|
# (skip the <?xml ...?> and the "root" tag)
|
||||||
iter = re.finditer('<[^>]*>', xml)
|
iter = re.finditer('<[^>]*>', xml)
|
||||||
i = iter.next()
|
i = iter.next()
|
||||||
i = iter.next()
|
i = iter.next()
|
||||||
pos_xml = i.end()
|
pos_xml = i.end()
|
||||||
|
|
||||||
doc = print_xml.document(cr, uid, {}, {})
|
doc = print_xml.document(cr, uid, {}, {})
|
||||||
tmpl_path = addons.get_module_resource('custom', 'corporate_defaults.xml')
|
tmpl_path = addons.get_module_resource('custom', 'corporate_defaults.xml')
|
||||||
doc.parse(tmpl_path, [uid], 'res.users', context)
|
doc.parse(tmpl_path, [uid], 'res.users', context)
|
||||||
corporate_header = doc.xml_get()
|
corporate_header = doc.xml_get()
|
||||||
doc.close()
|
doc.close()
|
||||||
|
|
||||||
# find the position of the tag after the <?xml ...?> tag
|
# find the position of the tag after the <?xml ...?> tag
|
||||||
iter = re.finditer('<[^>]*>', corporate_header)
|
iter = re.finditer('<[^>]*>', corporate_header)
|
||||||
i = iter.next()
|
i = iter.next()
|
||||||
pos_header = i.end()
|
pos_header = i.end()
|
||||||
|
|
||||||
return xml[:pos_xml] + corporate_header[pos_header:] + xml[pos_xml:]
|
return xml[:pos_xml] + corporate_header[pos_header:] + xml[pos_xml:]
|
||||||
|
|
||||||
#
|
#
|
||||||
# TODO: The translation doesn't work for "<tag t="1">textext<tag> tex</tag>text</tag>"
|
# TODO: The translation doesn't work for "<tag t="1">textext<tag> tex</tag>text</tag>"
|
||||||
#
|
#
|
||||||
def create_rml(self, cr, xml, uid, context=None):
|
def create_rml(self, cr, xml, uid, context=None):
|
||||||
if not context:
|
if not context:
|
||||||
context={}
|
context={}
|
||||||
service = netsvc.LocalService("object_proxy")
|
service = netsvc.LocalService("object_proxy")
|
||||||
|
|
||||||
# In some case we might not use xsl ...
|
# In some case we might not use xsl ...
|
||||||
if not self.xsl:
|
if not self.xsl:
|
||||||
return xml
|
return xml
|
||||||
|
|
||||||
# load XSL (parse it to the XML level)
|
# load XSL (parse it to the XML level)
|
||||||
styledoc = libxml2.parseDoc(tools.file_open(self.xsl).read())
|
styledoc = libxml2.parseDoc(tools.file_open(self.xsl).read())
|
||||||
xsl_path, tail = os.path.split(self.xsl)
|
xsl_path, tail = os.path.split(self.xsl)
|
||||||
for child in styledoc.children:
|
for child in styledoc.children:
|
||||||
if child.name == 'import':
|
if child.name == 'import':
|
||||||
if child.hasProp('href'):
|
if child.hasProp('href'):
|
||||||
imp_file = child.prop('href')
|
imp_file = child.prop('href')
|
||||||
_x, imp_file = tools.file_open(imp_file, subdir=xsl_path, pathinfo=True)
|
_x, imp_file = tools.file_open(imp_file, subdir=xsl_path, pathinfo=True)
|
||||||
child.setProp('href', urllib.quote(str(imp_file)))
|
child.setProp('href', urllib.quote(str(imp_file)))
|
||||||
|
|
||||||
#TODO: get all the translation in one query. That means we have to:
|
#TODO: get all the translation in one query. That means we have to:
|
||||||
# * build a list of items to translate,
|
# * build a list of items to translate,
|
||||||
# * issue the query to translate them,
|
# * issue the query to translate them,
|
||||||
# * (re)build/update the stylesheet with the translated items
|
# * (re)build/update the stylesheet with the translated items
|
||||||
|
|
||||||
# translate the XSL stylesheet
|
# translate the XSL stylesheet
|
||||||
def look_down(child, lang):
|
def look_down(child, lang):
|
||||||
while child is not None:
|
while child is not None:
|
||||||
if (child.type == "element") and child.hasProp('t'):
|
if (child.type == "element") and child.hasProp('t'):
|
||||||
#FIXME: use cursor
|
#FIXME: use cursor
|
||||||
res = service.execute(cr.dbname, uid, 'ir.translation',
|
res = service.execute(cr.dbname, uid, 'ir.translation',
|
||||||
'_get_source', self.name2, 'xsl', lang, child.content)
|
'_get_source', self.name2, 'xsl', lang, child.content)
|
||||||
if res:
|
if res:
|
||||||
child.setContent(res)
|
child.setContent(res)
|
||||||
look_down(child.children, lang)
|
look_down(child.children, lang)
|
||||||
child = child.next
|
child = child.next
|
||||||
|
|
||||||
if context.get('lang', False):
|
if context.get('lang', False):
|
||||||
look_down(styledoc.children, context['lang'])
|
look_down(styledoc.children, context['lang'])
|
||||||
|
|
||||||
# parse XSL
|
# parse XSL
|
||||||
style = libxslt.parseStylesheetDoc(styledoc)
|
style = libxslt.parseStylesheetDoc(styledoc)
|
||||||
# load XML (data)
|
# load XML (data)
|
||||||
doc = libxml2.parseMemory(xml,len(xml))
|
doc = libxml2.parseMemory(xml,len(xml))
|
||||||
# create RML (apply XSL to XML data)
|
# create RML (apply XSL to XML data)
|
||||||
result = style.applyStylesheet(doc, None)
|
result = style.applyStylesheet(doc, None)
|
||||||
# save result to string
|
# save result to string
|
||||||
xml = style.saveResultToString(result)
|
xml = style.saveResultToString(result)
|
||||||
|
|
||||||
style.freeStylesheet()
|
style.freeStylesheet()
|
||||||
doc.freeDoc()
|
doc.freeDoc()
|
||||||
result.freeDoc()
|
result.freeDoc()
|
||||||
return xml
|
return xml
|
||||||
|
|
||||||
def create_pdf(self, xml, logo=None):
|
def create_pdf(self, xml, logo=None):
|
||||||
if logo:
|
if logo:
|
||||||
self.bin_datas['logo'] = logo
|
self.bin_datas['logo'] = logo
|
||||||
else:
|
else:
|
||||||
if 'logo' in self.bin_datas:
|
if 'logo' in self.bin_datas:
|
||||||
del self.bin_datas['logo']
|
del self.bin_datas['logo']
|
||||||
obj = render.rml(xml, self.bin_datas, tools.config['root_path'])
|
obj = render.rml(xml, self.bin_datas, tools.config['root_path'])
|
||||||
obj.render()
|
obj.render()
|
||||||
return obj.get()
|
return obj.get()
|
||||||
|
|
||||||
def create_html(self, xml, logo=None):
|
def create_html(self, xml, logo=None):
|
||||||
obj = render.rml2html(xml, self.bin_datas)
|
obj = render.rml2html(xml, self.bin_datas)
|
||||||
obj.render()
|
obj.render()
|
||||||
return obj.get()
|
return obj.get()
|
||||||
|
|
||||||
def create_raw(self, xml, logo=None):
|
def create_raw(self, xml, logo=None):
|
||||||
return xml
|
return xml
|
||||||
|
|
||||||
def create_sxw(self, path, logo=None):
|
def create_sxw(self, path, logo=None):
|
||||||
return path
|
return path
|
||||||
|
|
||||||
from report_sxw import report_sxw
|
from report_sxw import report_sxw
|
||||||
|
|
||||||
def register_all(db):
|
def register_all(db):
|
||||||
opj = os.path.join
|
opj = os.path.join
|
||||||
cr = db.cursor()
|
cr = db.cursor()
|
||||||
cr.execute("SELECT * FROM ir_act_report_xml WHERE auto ORDER BY id")
|
cr.execute("SELECT * FROM ir_act_report_xml WHERE auto ORDER BY id")
|
||||||
result = cr.dictfetchall()
|
result = cr.dictfetchall()
|
||||||
cr.close()
|
cr.close()
|
||||||
for r in result:
|
for r in result:
|
||||||
if netsvc.service_exist('report.'+r['report_name']):
|
if netsvc.service_exist('report.'+r['report_name']):
|
||||||
continue
|
continue
|
||||||
if r['report_rml'] or r['report_rml_content_data']:
|
if r['report_rml'] or r['report_rml_content_data']:
|
||||||
report_sxw('report.'+r['report_name'], r['model'],
|
report_sxw('report.'+r['report_name'], r['model'],
|
||||||
opj('addons',r['report_rml'] or '/'), header=r['header'])
|
opj('addons',r['report_rml'] or '/'), header=r['header'])
|
||||||
if r['report_xsl']:
|
if r['report_xsl']:
|
||||||
report_rml('report.'+r['report_name'], r['model'],
|
report_rml('report.'+r['report_name'], r['model'],
|
||||||
opj('addons',r['report_xml']),
|
opj('addons',r['report_xml']),
|
||||||
r['report_xsl'] and opj('addons',r['report_xsl']))
|
r['report_xsl'] and opj('addons',r['report_xsl']))
|
||||||
|
|
||||||
|
|
|
@ -28,14 +28,14 @@
|
||||||
from pychart import *
|
from pychart import *
|
||||||
|
|
||||||
colorline = [color.T(r=((r+3) % 11)/10.0,
|
colorline = [color.T(r=((r+3) % 11)/10.0,
|
||||||
g=((g+6) % 11)/10.0,
|
g=((g+6) % 11)/10.0,
|
||||||
b=((b+9) % 11)/10.0)
|
b=((b+9) % 11)/10.0)
|
||||||
for r in range(11) for g in range(11) for b in range(11)]
|
for r in range(11) for g in range(11) for b in range(11)]
|
||||||
|
|
||||||
def choice_colors(n):
|
def choice_colors(n):
|
||||||
if n:
|
if n:
|
||||||
return colorline[0:-1:len(colorline)/n]
|
return colorline[0:-1:len(colorline)/n]
|
||||||
return []
|
return []
|
||||||
|
|
||||||
if __name__=='__main__':
|
if __name__=='__main__':
|
||||||
print choice_colors(10)
|
print choice_colors(10)
|
||||||
|
|
|
@ -30,13 +30,13 @@
|
||||||
import time
|
import time
|
||||||
|
|
||||||
functions = {
|
functions = {
|
||||||
'today': lambda x: time.strftime('%d/%m/%Y', time.localtime()).decode('latin1')
|
'today': lambda x: time.strftime('%d/%m/%Y', time.localtime()).decode('latin1')
|
||||||
}
|
}
|
||||||
|
|
||||||
#
|
#
|
||||||
# TODO: call an object internal function too
|
# TODO: call an object internal function too
|
||||||
#
|
#
|
||||||
def print_fnc(fnc, arg):
|
def print_fnc(fnc, arg):
|
||||||
if fnc in functions:
|
if fnc in functions:
|
||||||
return functions[fnc](arg)
|
return functions[fnc](arg)
|
||||||
return ''
|
return ''
|
||||||
|
|
|
@ -38,85 +38,85 @@ from osv.orm import browse_null, browse_record
|
||||||
import pooler
|
import pooler
|
||||||
|
|
||||||
class InheritDict(dict):
|
class InheritDict(dict):
|
||||||
# Might be usefull when we're doing name lookup for call or eval.
|
# Might be usefull when we're doing name lookup for call or eval.
|
||||||
|
|
||||||
def __init__(self, parent=None):
|
def __init__(self, parent=None):
|
||||||
self.parent = parent
|
self.parent = parent
|
||||||
|
|
||||||
def __getitem__(self, name):
|
def __getitem__(self, name):
|
||||||
if name in self:
|
if name in self:
|
||||||
return super(InheritDict, self).__getitem__(name)
|
return super(InheritDict, self).__getitem__(name)
|
||||||
else:
|
else:
|
||||||
if not self.parent:
|
if not self.parent:
|
||||||
raise KeyError
|
raise KeyError
|
||||||
else:
|
else:
|
||||||
return self.parent[name]
|
return self.parent[name]
|
||||||
|
|
||||||
def tounicode(val):
|
def tounicode(val):
|
||||||
if isinstance(val, str):
|
if isinstance(val, str):
|
||||||
unicode_val = unicode(val, 'utf-8')
|
unicode_val = unicode(val, 'utf-8')
|
||||||
elif isinstance(val, unicode):
|
elif isinstance(val, unicode):
|
||||||
unicode_val = val
|
unicode_val = val
|
||||||
else:
|
else:
|
||||||
unicode_val = unicode(val)
|
unicode_val = unicode(val)
|
||||||
return unicode_val
|
return unicode_val
|
||||||
|
|
||||||
class document(object):
|
class document(object):
|
||||||
def __init__(self, cr, uid, datas, func=False):
|
def __init__(self, cr, uid, datas, func=False):
|
||||||
# create a new document
|
# create a new document
|
||||||
self.cr = cr
|
self.cr = cr
|
||||||
self.pool = pooler.get_pool(cr.dbname)
|
self.pool = pooler.get_pool(cr.dbname)
|
||||||
self.doc = minidom.Document()
|
self.doc = minidom.Document()
|
||||||
self.func = func or {}
|
self.func = func or {}
|
||||||
self.datas = datas
|
self.datas = datas
|
||||||
self.uid = uid
|
self.uid = uid
|
||||||
self.bin_datas = {}
|
self.bin_datas = {}
|
||||||
|
|
||||||
def node_attrs_get(self, node):
|
def node_attrs_get(self, node):
|
||||||
attrs = {}
|
attrs = {}
|
||||||
nattr = node.attributes
|
nattr = node.attributes
|
||||||
for i in range(nattr.length):
|
for i in range(nattr.length):
|
||||||
attr = nattr.item(i)
|
attr = nattr.item(i)
|
||||||
attrs[attr.localName] = attr.nodeValue
|
attrs[attr.localName] = attr.nodeValue
|
||||||
# attrs[attr.name] = attr.nodeValue
|
# attrs[attr.name] = attr.nodeValue
|
||||||
return attrs
|
return attrs
|
||||||
|
|
||||||
def get_value(self, browser, field_path):
|
def get_value(self, browser, field_path):
|
||||||
fields = field_path.split('.')
|
fields = field_path.split('.')
|
||||||
|
|
||||||
if not len(fields):
|
if not len(fields):
|
||||||
print "WARNING: field name is empty!"
|
print "WARNING: field name is empty!"
|
||||||
return ''
|
return ''
|
||||||
|
|
||||||
value = browser
|
value = browser
|
||||||
for f in fields:
|
for f in fields:
|
||||||
if isinstance(value, list):
|
if isinstance(value, list):
|
||||||
if len(value)==0:
|
if len(value)==0:
|
||||||
print "WARNING: empty list found!"
|
print "WARNING: empty list found!"
|
||||||
return ''
|
return ''
|
||||||
# elif len(value)>1:
|
# elif len(value)>1:
|
||||||
# print "WARNING:", len(value), "possibilities for", value[0]._table_name , "picking first..."
|
# print "WARNING:", len(value), "possibilities for", value[0]._table_name , "picking first..."
|
||||||
value = value[0]
|
value = value[0]
|
||||||
if isinstance(value, browse_null):
|
if isinstance(value, browse_null):
|
||||||
return ''
|
return ''
|
||||||
else:
|
else:
|
||||||
value = value[f]
|
value = value[f]
|
||||||
|
|
||||||
if isinstance(value, browse_null) or (type(value)==bool and not value):
|
if isinstance(value, browse_null) or (type(value)==bool and not value):
|
||||||
return ''
|
return ''
|
||||||
else:
|
else:
|
||||||
return value
|
return value
|
||||||
|
|
||||||
def get_value2(self, browser, field_path):
|
def get_value2(self, browser, field_path):
|
||||||
value = self.get_value(browser, field_path)
|
value = self.get_value(browser, field_path)
|
||||||
if isinstance(value, browse_record):
|
if isinstance(value, browse_record):
|
||||||
return value.id
|
return value.id
|
||||||
elif isinstance(value, browse_null):
|
elif isinstance(value, browse_null):
|
||||||
return False
|
return False
|
||||||
else:
|
else:
|
||||||
return value
|
return value
|
||||||
|
|
||||||
def eval(self, record, expr):
|
def eval(self, record, expr):
|
||||||
#TODO: support remote variables (eg address.title) in expr
|
#TODO: support remote variables (eg address.title) in expr
|
||||||
# how to do that: parse the string, find dots, replace those dotted variables by temporary
|
# how to do that: parse the string, find dots, replace those dotted variables by temporary
|
||||||
# "simple ones", fetch the value of those variables and add them (temporarily) to the _data
|
# "simple ones", fetch the value of those variables and add them (temporarily) to the _data
|
||||||
|
@ -126,239 +126,239 @@ class document(object):
|
||||||
# happen if the eval node is the first one using this browse_record
|
# happen if the eval node is the first one using this browse_record
|
||||||
# the next line is a workaround for the problem: it causes the resource to be loaded
|
# the next line is a workaround for the problem: it causes the resource to be loaded
|
||||||
#Pinky: Why not this ? eval(expr, browser) ?
|
#Pinky: Why not this ? eval(expr, browser) ?
|
||||||
# name = browser.name
|
# name = browser.name
|
||||||
# data_dict = browser._data[self.get_value(browser, 'id')]
|
# data_dict = browser._data[self.get_value(browser, 'id')]
|
||||||
return eval(expr)
|
return eval(expr)
|
||||||
|
|
||||||
def parse_node(self, node, parent, browser, datas=None):
|
def parse_node(self, node, parent, browser, datas=None):
|
||||||
# node is the node of the xml template to be parsed
|
# node is the node of the xml template to be parsed
|
||||||
# parent = the parent node in the xml data tree we are creating
|
# parent = the parent node in the xml data tree we are creating
|
||||||
|
|
||||||
if node.nodeType == node.ELEMENT_NODE:
|
if node.nodeType == node.ELEMENT_NODE:
|
||||||
# print '-'*60
|
# print '-'*60
|
||||||
# print "parse_node", node
|
# print "parse_node", node
|
||||||
# print "parent: ", parent
|
# print "parent: ", parent
|
||||||
# print "ids:", ids
|
# print "ids:", ids
|
||||||
# print "model:", model
|
# print "model:", model
|
||||||
# print "datas:", datas
|
# print "datas:", datas
|
||||||
|
|
||||||
# convert the attributes of the node to a dictionary
|
# convert the attributes of the node to a dictionary
|
||||||
|
|
||||||
attrs = self.node_attrs_get(node)
|
attrs = self.node_attrs_get(node)
|
||||||
if 'type' in attrs:
|
if 'type' in attrs:
|
||||||
if attrs['type']=='field':
|
if attrs['type']=='field':
|
||||||
value = self.get_value(browser, attrs['name'])
|
value = self.get_value(browser, attrs['name'])
|
||||||
#TODO: test this
|
#TODO: test this
|
||||||
if value == '' and 'default' in attrs:
|
if value == '' and 'default' in attrs:
|
||||||
value = attrs['default']
|
value = attrs['default']
|
||||||
el = self.doc.createElement(node.localName)
|
el = self.doc.createElement(node.localName)
|
||||||
parent.appendChild(el)
|
parent.appendChild(el)
|
||||||
el_txt = self.doc.createTextNode(tounicode(value))
|
el_txt = self.doc.createTextNode(tounicode(value))
|
||||||
el.appendChild(el_txt)
|
el.appendChild(el_txt)
|
||||||
|
|
||||||
#TODO: test this
|
#TODO: test this
|
||||||
for key, value in attrs.iteritems():
|
for key, value in attrs.iteritems():
|
||||||
if key not in ('type', 'name', 'default'):
|
if key not in ('type', 'name', 'default'):
|
||||||
el.setAttribute(key, value)
|
el.setAttribute(key, value)
|
||||||
|
|
||||||
elif attrs['type']=='attachment':
|
elif attrs['type']=='attachment':
|
||||||
if isinstance(browser, list):
|
if isinstance(browser, list):
|
||||||
model = browser[0]._table_name
|
model = browser[0]._table_name
|
||||||
else:
|
else:
|
||||||
model = browser._table_name
|
model = browser._table_name
|
||||||
|
|
||||||
value = self.get_value(browser, attrs['name'])
|
value = self.get_value(browser, attrs['name'])
|
||||||
|
|
||||||
service = netsvc.LocalService("object_proxy")
|
service = netsvc.LocalService("object_proxy")
|
||||||
ids = service.execute(self.cr.dbname, self.uid, 'ir.attachment', 'search', [('res_model','=',model),('res_id','=',int(value))])
|
ids = service.execute(self.cr.dbname, self.uid, 'ir.attachment', 'search', [('res_model','=',model),('res_id','=',int(value))])
|
||||||
datas = service.execute(self.cr.dbname, self.uid, 'ir.attachment', 'read', ids)
|
datas = service.execute(self.cr.dbname, self.uid, 'ir.attachment', 'read', ids)
|
||||||
|
|
||||||
if len(datas):
|
if len(datas):
|
||||||
# if there are several, pick first
|
# if there are several, pick first
|
||||||
datas = datas[0]
|
datas = datas[0]
|
||||||
fname = str(datas['datas_fname'])
|
fname = str(datas['datas_fname'])
|
||||||
ext = fname.split('.')[-1].lower()
|
ext = fname.split('.')[-1].lower()
|
||||||
if ext in ('jpg','jpeg', 'png'):
|
if ext in ('jpg','jpeg', 'png'):
|
||||||
import base64, StringIO
|
import base64, StringIO
|
||||||
dt = base64.decodestring(datas['datas'])
|
dt = base64.decodestring(datas['datas'])
|
||||||
fp = StringIO.StringIO(dt)
|
fp = StringIO.StringIO(dt)
|
||||||
i = str(len(self.bin_datas))
|
i = str(len(self.bin_datas))
|
||||||
self.bin_datas[i] = fp
|
self.bin_datas[i] = fp
|
||||||
|
|
||||||
el = self.doc.createElement(node.localName)
|
el = self.doc.createElement(node.localName)
|
||||||
parent.appendChild(el)
|
parent.appendChild(el)
|
||||||
# node content is the length of the image
|
# node content is the length of the image
|
||||||
el_txt = self.doc.createTextNode(i)
|
el_txt = self.doc.createTextNode(i)
|
||||||
el.appendChild(el_txt)
|
el.appendChild(el_txt)
|
||||||
|
|
||||||
elif attrs['type']=='data':
|
elif attrs['type']=='data':
|
||||||
#TODO: test this
|
#TODO: test this
|
||||||
el = self.doc.createElement(node.localName)
|
el = self.doc.createElement(node.localName)
|
||||||
parent.appendChild(el)
|
parent.appendChild(el)
|
||||||
txt = self.datas.get('form', {}).get(attrs['name'], '')
|
txt = self.datas.get('form', {}).get(attrs['name'], '')
|
||||||
el_txt = self.doc.createTextNode(tounicode(txt))
|
el_txt = self.doc.createTextNode(tounicode(txt))
|
||||||
el.appendChild(el_txt)
|
el.appendChild(el_txt)
|
||||||
|
|
||||||
elif attrs['type']=='function':
|
elif attrs['type']=='function':
|
||||||
el = self.doc.createElement(node.localName)
|
el = self.doc.createElement(node.localName)
|
||||||
parent.appendChild(el)
|
parent.appendChild(el)
|
||||||
if attrs['name'] in self.func:
|
if attrs['name'] in self.func:
|
||||||
txt = self.func[attrs['name']](node)
|
txt = self.func[attrs['name']](node)
|
||||||
else:
|
else:
|
||||||
txt = print_fnc.print_fnc(attrs['name'], node)
|
txt = print_fnc.print_fnc(attrs['name'], node)
|
||||||
el_txt = self.doc.createTextNode(txt)
|
el_txt = self.doc.createTextNode(txt)
|
||||||
el.appendChild(el_txt)
|
el.appendChild(el_txt)
|
||||||
|
|
||||||
elif attrs['type']=='eval':
|
elif attrs['type']=='eval':
|
||||||
#TODO: faire ca plus proprement
|
#TODO: faire ca plus proprement
|
||||||
if isinstance(browser, list):
|
if isinstance(browser, list):
|
||||||
print "ERROR: EVAL!"
|
print "ERROR: EVAL!"
|
||||||
el = self.doc.createElement(node.localName)
|
el = self.doc.createElement(node.localName)
|
||||||
parent.appendChild(el)
|
parent.appendChild(el)
|
||||||
value = self.eval(browser, attrs['expr'])
|
value = self.eval(browser, attrs['expr'])
|
||||||
el_txt = self.doc.createTextNode(str(value))
|
el_txt = self.doc.createTextNode(str(value))
|
||||||
el.appendChild(el_txt)
|
el.appendChild(el_txt)
|
||||||
|
|
||||||
elif attrs['type']=='fields':
|
elif attrs['type']=='fields':
|
||||||
fields = attrs['name'].split(',')
|
fields = attrs['name'].split(',')
|
||||||
vals = {}
|
vals = {}
|
||||||
for b in browser:
|
for b in browser:
|
||||||
value = tuple([self.get_value2(b, f) for f in fields])
|
value = tuple([self.get_value2(b, f) for f in fields])
|
||||||
if not value in vals:
|
if not value in vals:
|
||||||
vals[value]=[]
|
vals[value]=[]
|
||||||
vals[value].append(b)
|
vals[value].append(b)
|
||||||
keys = vals.keys()
|
keys = vals.keys()
|
||||||
keys.sort()
|
keys.sort()
|
||||||
|
|
||||||
if 'order' in attrs and attrs['order']=='desc':
|
if 'order' in attrs and attrs['order']=='desc':
|
||||||
keys.reverse()
|
keys.reverse()
|
||||||
|
|
||||||
v_list = [vals[k] for k in keys]
|
v_list = [vals[k] for k in keys]
|
||||||
for v in v_list:
|
for v in v_list:
|
||||||
el = self.doc.createElement(node.localName)
|
el = self.doc.createElement(node.localName)
|
||||||
parent.appendChild(el)
|
parent.appendChild(el)
|
||||||
el_cld = node.firstChild
|
el_cld = node.firstChild
|
||||||
while el_cld:
|
while el_cld:
|
||||||
self.parse_node(el_cld, el, v)
|
self.parse_node(el_cld, el, v)
|
||||||
el_cld = el_cld.nextSibling
|
el_cld = el_cld.nextSibling
|
||||||
|
|
||||||
elif attrs['type']=='call':
|
elif attrs['type']=='call':
|
||||||
if len(attrs['args']):
|
if len(attrs['args']):
|
||||||
#TODO: test this
|
#TODO: test this
|
||||||
# fetches the values of the variables which names where passed in the args attribute
|
# fetches the values of the variables which names where passed in the args attribute
|
||||||
args = [self.eval(browser, arg) for arg in attrs['args'].split(',')]
|
args = [self.eval(browser, arg) for arg in attrs['args'].split(',')]
|
||||||
else:
|
else:
|
||||||
args = []
|
args = []
|
||||||
|
|
||||||
# get the object
|
# get the object
|
||||||
if attrs.has_key('model'):
|
if attrs.has_key('model'):
|
||||||
obj = self.pool.get(attrs['model'])
|
obj = self.pool.get(attrs['model'])
|
||||||
else:
|
else:
|
||||||
if isinstance(browser, list):
|
if isinstance(browser, list):
|
||||||
obj = browser[0]._table
|
obj = browser[0]._table
|
||||||
else:
|
else:
|
||||||
obj = browser._table
|
obj = browser._table
|
||||||
|
|
||||||
# get the ids
|
# get the ids
|
||||||
if attrs.has_key('ids'):
|
if attrs.has_key('ids'):
|
||||||
ids = self.eval(browser, attrs['ids'])
|
ids = self.eval(browser, attrs['ids'])
|
||||||
else:
|
else:
|
||||||
if isinstance(browser, list):
|
if isinstance(browser, list):
|
||||||
ids = [b.id for b in browser]
|
ids = [b.id for b in browser]
|
||||||
else:
|
else:
|
||||||
ids = [browser.id]
|
ids = [browser.id]
|
||||||
|
|
||||||
# call the method itself
|
# call the method itself
|
||||||
newdatas = getattr(obj, attrs['name'])(self.cr, self.uid, ids, *args)
|
newdatas = getattr(obj, attrs['name'])(self.cr, self.uid, ids, *args)
|
||||||
|
|
||||||
def parse_result_tree(node, parent, datas):
|
def parse_result_tree(node, parent, datas):
|
||||||
if node.nodeType == node.ELEMENT_NODE:
|
if node.nodeType == node.ELEMENT_NODE:
|
||||||
el = self.doc.createElement(node.localName)
|
el = self.doc.createElement(node.localName)
|
||||||
parent.appendChild(el)
|
parent.appendChild(el)
|
||||||
atr = self.node_attrs_get(node)
|
atr = self.node_attrs_get(node)
|
||||||
if 'value' in atr:
|
if 'value' in atr:
|
||||||
#print "type=>",type(datas[atr['value']])
|
#print "type=>",type(datas[atr['value']])
|
||||||
#print "value=>",datas[atr['value']]
|
#print "value=>",datas[atr['value']]
|
||||||
if not isinstance(datas[atr['value']], (str, unicode)):
|
if not isinstance(datas[atr['value']], (str, unicode)):
|
||||||
txt = self.doc.createTextNode(str(datas[atr['value']]))
|
txt = self.doc.createTextNode(str(datas[atr['value']]))
|
||||||
else:
|
else:
|
||||||
txt = self.doc.createTextNode(datas[atr['value']].decode('utf-8'))
|
txt = self.doc.createTextNode(datas[atr['value']].decode('utf-8'))
|
||||||
el.appendChild(txt)
|
el.appendChild(txt)
|
||||||
else:
|
else:
|
||||||
el_cld = node.firstChild
|
el_cld = node.firstChild
|
||||||
while el_cld:
|
while el_cld:
|
||||||
parse_result_tree(el_cld, el, datas)
|
parse_result_tree(el_cld, el, datas)
|
||||||
el_cld = el_cld.nextSibling
|
el_cld = el_cld.nextSibling
|
||||||
elif node.nodeType==node.TEXT_NODE:
|
elif node.nodeType==node.TEXT_NODE:
|
||||||
el = self.doc.createTextNode(node.nodeValue)
|
el = self.doc.createTextNode(node.nodeValue)
|
||||||
parent.appendChild(el)
|
parent.appendChild(el)
|
||||||
else:
|
else:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
if not isinstance(newdatas, list):
|
if not isinstance(newdatas, list):
|
||||||
newdatas = [newdatas]
|
newdatas = [newdatas]
|
||||||
for newdata in newdatas:
|
for newdata in newdatas:
|
||||||
parse_result_tree(node, parent, newdata)
|
parse_result_tree(node, parent, newdata)
|
||||||
|
|
||||||
elif attrs['type']=='zoom':
|
elif attrs['type']=='zoom':
|
||||||
value = self.get_value(browser, attrs['name'])
|
value = self.get_value(browser, attrs['name'])
|
||||||
|
|
||||||
if value:
|
if value:
|
||||||
if not isinstance(value, list):
|
if not isinstance(value, list):
|
||||||
v_list = [value]
|
v_list = [value]
|
||||||
else:
|
else:
|
||||||
v_list = value
|
v_list = value
|
||||||
for v in v_list:
|
for v in v_list:
|
||||||
el = self.doc.createElement(node.localName)
|
el = self.doc.createElement(node.localName)
|
||||||
parent.appendChild(el)
|
parent.appendChild(el)
|
||||||
el_cld = node.firstChild
|
el_cld = node.firstChild
|
||||||
while el_cld:
|
while el_cld:
|
||||||
self.parse_node(el_cld, el, v)
|
self.parse_node(el_cld, el, v)
|
||||||
el_cld = el_cld.nextSibling
|
el_cld = el_cld.nextSibling
|
||||||
else:
|
else:
|
||||||
# if there is no "type" attribute in the node, copy it to the xml data and parse its childs
|
# if there is no "type" attribute in the node, copy it to the xml data and parse its childs
|
||||||
el = self.doc.createElement(node.localName)
|
el = self.doc.createElement(node.localName)
|
||||||
parent.appendChild(el)
|
parent.appendChild(el)
|
||||||
el_cld = node.firstChild
|
el_cld = node.firstChild
|
||||||
while el_cld:
|
while el_cld:
|
||||||
self.parse_node(el_cld, el, browser)
|
self.parse_node(el_cld, el, browser)
|
||||||
el_cld = el_cld.nextSibling
|
el_cld = el_cld.nextSibling
|
||||||
|
|
||||||
elif node.nodeType==node.TEXT_NODE:
|
elif node.nodeType==node.TEXT_NODE:
|
||||||
# if it's a text node, copy it to the xml data
|
# if it's a text node, copy it to the xml data
|
||||||
el = self.doc.createTextNode(node.nodeValue)
|
el = self.doc.createTextNode(node.nodeValue)
|
||||||
parent.appendChild(el)
|
parent.appendChild(el)
|
||||||
else:
|
else:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def xml_get(self):
|
def xml_get(self):
|
||||||
return self.doc.toxml('utf-8')
|
return self.doc.toxml('utf-8')
|
||||||
|
|
||||||
def parse_tree(self, ids, model, context=None):
|
def parse_tree(self, ids, model, context=None):
|
||||||
if not context:
|
if not context:
|
||||||
context={}
|
context={}
|
||||||
browser = self.pool.get(model).browse(self.cr, self.uid, ids, context)
|
browser = self.pool.get(model).browse(self.cr, self.uid, ids, context)
|
||||||
self.parse_node(self.dom.documentElement, self.doc, browser)
|
self.parse_node(self.dom.documentElement, self.doc, browser)
|
||||||
|
|
||||||
def parse_string(self, xml, ids, model, context=None):
|
def parse_string(self, xml, ids, model, context=None):
|
||||||
if not context:
|
if not context:
|
||||||
context={}
|
context={}
|
||||||
# parses the xml template to memory
|
# parses the xml template to memory
|
||||||
self.dom = minidom.parseString(xml)
|
self.dom = minidom.parseString(xml)
|
||||||
|
|
||||||
# create the xml data from the xml template
|
# create the xml data from the xml template
|
||||||
self.parse_tree(ids, model, context)
|
self.parse_tree(ids, model, context)
|
||||||
|
|
||||||
def parse(self, filename, ids, model, context=None):
|
def parse(self, filename, ids, model, context=None):
|
||||||
if not context:
|
if not context:
|
||||||
context={}
|
context={}
|
||||||
# parses the xml template to memory
|
# parses the xml template to memory
|
||||||
self.dom = minidom.parseString(tools.file_open(filename).read())
|
self.dom = minidom.parseString(tools.file_open(filename).read())
|
||||||
|
|
||||||
# create the xml data from the xml template
|
# create the xml data from the xml template
|
||||||
self.parse_tree(ids, model, context)
|
self.parse_tree(ids, model, context)
|
||||||
|
|
||||||
def close(self):
|
def close(self):
|
||||||
self.doc = None
|
self.doc = None
|
||||||
self.dom = None
|
self.dom = None
|
||||||
|
|
||||||
|
|
|
@ -40,123 +40,123 @@ import libxslt
|
||||||
import time, os
|
import time, os
|
||||||
|
|
||||||
class report_printscreen_list(report_int):
|
class report_printscreen_list(report_int):
|
||||||
def __init__(self, name):
|
def __init__(self, name):
|
||||||
report_int.__init__(self, name)
|
report_int.__init__(self, name)
|
||||||
|
|
||||||
def _parse_node(self, root_node):
|
def _parse_node(self, root_node):
|
||||||
result = []
|
result = []
|
||||||
for node in root_node.childNodes:
|
for node in root_node.childNodes:
|
||||||
if node.localName == 'field':
|
if node.localName == 'field':
|
||||||
attrsa = node.attributes
|
attrsa = node.attributes
|
||||||
attrs = {}
|
attrs = {}
|
||||||
if not attrsa is None:
|
if not attrsa is None:
|
||||||
for i in range(attrsa.length):
|
for i in range(attrsa.length):
|
||||||
attrs[attrsa.item(i).localName] = attrsa.item(i).nodeValue
|
attrs[attrsa.item(i).localName] = attrsa.item(i).nodeValue
|
||||||
result.append(attrs['name'])
|
result.append(attrs['name'])
|
||||||
else:
|
else:
|
||||||
result.extend(self._parse_node(node))
|
result.extend(self._parse_node(node))
|
||||||
return result
|
return result
|
||||||
|
|
||||||
def _parse_string(self, view):
|
def _parse_string(self, view):
|
||||||
dom = minidom.parseString(view)
|
dom = minidom.parseString(view)
|
||||||
return self._parse_node(dom)
|
return self._parse_node(dom)
|
||||||
|
|
||||||
def create(self, cr, uid, ids, datas, context=None):
|
def create(self, cr, uid, ids, datas, context=None):
|
||||||
if not context:
|
if not context:
|
||||||
context={}
|
context={}
|
||||||
datas['ids'] = ids
|
datas['ids'] = ids
|
||||||
pool = pooler.get_pool(cr.dbname)
|
pool = pooler.get_pool(cr.dbname)
|
||||||
model_id = pool.get('ir.model').search(cr, uid, [('model','=',model._name)])
|
model_id = pool.get('ir.model').search(cr, uid, [('model','=',model._name)])
|
||||||
if model_id:
|
if model_id:
|
||||||
model_desc = pool.get('ir.model').browse(cr, uid, model_id, context).name
|
model_desc = pool.get('ir.model').browse(cr, uid, model_id, context).name
|
||||||
else:
|
else:
|
||||||
model_desc = model._description
|
model_desc = model._description
|
||||||
|
|
||||||
model = pool.get(datas['model'])
|
model = pool.get(datas['model'])
|
||||||
result = model.fields_view_get(cr, uid, view_type='tree', context=context)
|
result = model.fields_view_get(cr, uid, view_type='tree', context=context)
|
||||||
|
|
||||||
fields_order = self._parse_string(result['arch'])
|
fields_order = self._parse_string(result['arch'])
|
||||||
rows = model.read(cr, uid, datas['ids'], result['fields'].keys() )
|
rows = model.read(cr, uid, datas['ids'], result['fields'].keys() )
|
||||||
res = self._create_table(uid, datas['ids'], result['fields'], fields_order, rows, context, model._description)
|
res = self._create_table(uid, datas['ids'], result['fields'], fields_order, rows, context, model._description)
|
||||||
return (self.obj.get(), 'pdf')
|
return (self.obj.get(), 'pdf')
|
||||||
|
|
||||||
|
|
||||||
def _create_table(self, uid, ids, fields, fields_order, results, context, title=''):
|
def _create_table(self, uid, ids, fields, fields_order, results, context, title=''):
|
||||||
pageSize=[297.0,210.0]
|
pageSize=[297.0,210.0]
|
||||||
|
|
||||||
impl = minidom.getDOMImplementation()
|
impl = minidom.getDOMImplementation()
|
||||||
new_doc = impl.createDocument(None, "report", None)
|
new_doc = impl.createDocument(None, "report", None)
|
||||||
|
|
||||||
# build header
|
# build header
|
||||||
config = new_doc.createElement("config")
|
config = new_doc.createElement("config")
|
||||||
|
|
||||||
def _append_node(name, text):
|
def _append_node(name, text):
|
||||||
n = new_doc.createElement(name)
|
n = new_doc.createElement(name)
|
||||||
t = new_doc.createTextNode(text)
|
t = new_doc.createTextNode(text)
|
||||||
n.appendChild(t)
|
n.appendChild(t)
|
||||||
config.appendChild(n)
|
config.appendChild(n)
|
||||||
|
|
||||||
_append_node('date', time.strftime('%d/%m/%Y'))
|
_append_node('date', time.strftime('%d/%m/%Y'))
|
||||||
_append_node('PageSize', '%.2fmm,%.2fmm' % tuple(pageSize))
|
_append_node('PageSize', '%.2fmm,%.2fmm' % tuple(pageSize))
|
||||||
_append_node('PageWidth', '%.2f' % (pageSize[0] * 2.8346,))
|
_append_node('PageWidth', '%.2f' % (pageSize[0] * 2.8346,))
|
||||||
_append_node('PageHeight', '%.2f' %(pageSize[1] * 2.8346,))
|
_append_node('PageHeight', '%.2f' %(pageSize[1] * 2.8346,))
|
||||||
|
|
||||||
_append_node('report-header', title)
|
_append_node('report-header', title)
|
||||||
|
|
||||||
l = []
|
l = []
|
||||||
t = 0
|
t = 0
|
||||||
strmax = (pageSize[0]-40) * 2.8346
|
strmax = (pageSize[0]-40) * 2.8346
|
||||||
for f in fields_order:
|
for f in fields_order:
|
||||||
s = 0
|
s = 0
|
||||||
if fields[f]['type'] in ('date','time','float','integer'):
|
if fields[f]['type'] in ('date','time','float','integer'):
|
||||||
s = 60
|
s = 60
|
||||||
strmax -= s
|
strmax -= s
|
||||||
else:
|
else:
|
||||||
t += fields[f].get('size', 56) / 28 + 1
|
t += fields[f].get('size', 56) / 28 + 1
|
||||||
l.append(s)
|
l.append(s)
|
||||||
for pos in range(len(l)):
|
for pos in range(len(l)):
|
||||||
if not l[pos]:
|
if not l[pos]:
|
||||||
s = fields[fields_order[pos]].get('size', 56) / 28 + 1
|
s = fields[fields_order[pos]].get('size', 56) / 28 + 1
|
||||||
l[pos] = strmax * s / t
|
l[pos] = strmax * s / t
|
||||||
_append_node('tableSize', ','.join(map(str,l)) )
|
_append_node('tableSize', ','.join(map(str,l)) )
|
||||||
new_doc.childNodes[0].appendChild(config)
|
new_doc.childNodes[0].appendChild(config)
|
||||||
header = new_doc.createElement("header")
|
header = new_doc.createElement("header")
|
||||||
|
|
||||||
for f in fields_order:
|
for f in fields_order:
|
||||||
field = new_doc.createElement("field")
|
field = new_doc.createElement("field")
|
||||||
field_txt = new_doc.createTextNode(str(fields[f]['string']))
|
field_txt = new_doc.createTextNode(str(fields[f]['string']))
|
||||||
field.appendChild(field_txt)
|
field.appendChild(field_txt)
|
||||||
header.appendChild(field)
|
header.appendChild(field)
|
||||||
|
|
||||||
new_doc.childNodes[0].appendChild(header)
|
new_doc.childNodes[0].appendChild(header)
|
||||||
|
|
||||||
lines = new_doc.createElement("lines")
|
lines = new_doc.createElement("lines")
|
||||||
for line in results:
|
for line in results:
|
||||||
node_line = new_doc.createElement("row")
|
node_line = new_doc.createElement("row")
|
||||||
for f in fields_order:
|
for f in fields_order:
|
||||||
if fields[f]['type']=='many2one' and line[f]:
|
if fields[f]['type']=='many2one' and line[f]:
|
||||||
line[f] = line[f][1]
|
line[f] = line[f][1]
|
||||||
if fields[f]['type'] in ('one2many','many2many') and line[f]:
|
if fields[f]['type'] in ('one2many','many2many') and line[f]:
|
||||||
line[f] = '( '+str(len(line[f])) + ' )'
|
line[f] = '( '+str(len(line[f])) + ' )'
|
||||||
col = new_doc.createElement("col")
|
col = new_doc.createElement("col")
|
||||||
col.setAttribute('tree','no')
|
col.setAttribute('tree','no')
|
||||||
if line[f] != None:
|
if line[f] != None:
|
||||||
txt = new_doc.createTextNode(str(line[f] or ''))
|
txt = new_doc.createTextNode(str(line[f] or ''))
|
||||||
else:
|
else:
|
||||||
txt = new_doc.createTextNode('/')
|
txt = new_doc.createTextNode('/')
|
||||||
col.appendChild(txt)
|
col.appendChild(txt)
|
||||||
node_line.appendChild(col)
|
node_line.appendChild(col)
|
||||||
lines.appendChild(node_line)
|
lines.appendChild(node_line)
|
||||||
new_doc.childNodes[0].appendChild(lines)
|
new_doc.childNodes[0].appendChild(lines)
|
||||||
|
|
||||||
styledoc = libxml2.parseFile(os.path.join(tools.config['root_path'],'addons/base/report/custom_new.xsl'))
|
styledoc = libxml2.parseFile(os.path.join(tools.config['root_path'],'addons/base/report/custom_new.xsl'))
|
||||||
style = libxslt.parseStylesheetDoc(styledoc)
|
style = libxslt.parseStylesheetDoc(styledoc)
|
||||||
doc = libxml2.parseDoc(new_doc.toxml())
|
doc = libxml2.parseDoc(new_doc.toxml())
|
||||||
rml_obj = style.applyStylesheet(doc, None)
|
rml_obj = style.applyStylesheet(doc, None)
|
||||||
rml = style.saveResultToString(rml_obj)
|
rml = style.saveResultToString(rml_obj)
|
||||||
|
|
||||||
self.obj = render.rml(rml)
|
self.obj = render.rml(rml)
|
||||||
self.obj.render()
|
self.obj.render()
|
||||||
return True
|
return True
|
||||||
report_printscreen_list('report.printscreen.form')
|
report_printscreen_list('report.printscreen.form')
|
||||||
|
|
||||||
|
|
|
@ -40,166 +40,166 @@ import libxslt
|
||||||
import time, os
|
import time, os
|
||||||
|
|
||||||
class report_printscreen_list(report_int):
|
class report_printscreen_list(report_int):
|
||||||
def __init__(self, name):
|
def __init__(self, name):
|
||||||
report_int.__init__(self, name)
|
report_int.__init__(self, name)
|
||||||
|
|
||||||
def _parse_node(self, root_node):
|
def _parse_node(self, root_node):
|
||||||
result = []
|
result = []
|
||||||
for node in root_node.childNodes:
|
for node in root_node.childNodes:
|
||||||
if node.localName == 'field':
|
if node.localName == 'field':
|
||||||
attrsa = node.attributes
|
attrsa = node.attributes
|
||||||
attrs = {}
|
attrs = {}
|
||||||
if not attrsa is None:
|
if not attrsa is None:
|
||||||
for i in range(attrsa.length):
|
for i in range(attrsa.length):
|
||||||
attrs[attrsa.item(i).localName] = attrsa.item(i).nodeValue
|
attrs[attrsa.item(i).localName] = attrsa.item(i).nodeValue
|
||||||
result.append(attrs['name'])
|
result.append(attrs['name'])
|
||||||
else:
|
else:
|
||||||
result.extend(self._parse_node(node))
|
result.extend(self._parse_node(node))
|
||||||
return result
|
return result
|
||||||
|
|
||||||
def _parse_string(self, view):
|
def _parse_string(self, view):
|
||||||
dom = minidom.parseString(unicode(view, 'utf-8').encode('utf-8'))
|
dom = minidom.parseString(unicode(view, 'utf-8').encode('utf-8'))
|
||||||
return self._parse_node(dom)
|
return self._parse_node(dom)
|
||||||
|
|
||||||
def create(self, cr, uid, ids, datas, context=None):
|
def create(self, cr, uid, ids, datas, context=None):
|
||||||
if not context:
|
if not context:
|
||||||
context={}
|
context={}
|
||||||
pool = pooler.get_pool(cr.dbname)
|
pool = pooler.get_pool(cr.dbname)
|
||||||
model = pool.get(datas['model'])
|
model = pool.get(datas['model'])
|
||||||
model_id = pool.get('ir.model').search(cr, uid, [('model','=',model._name)])
|
model_id = pool.get('ir.model').search(cr, uid, [('model','=',model._name)])
|
||||||
if model_id:
|
if model_id:
|
||||||
model_desc = pool.get('ir.model').browse(cr, uid, model_id[0], context).name
|
model_desc = pool.get('ir.model').browse(cr, uid, model_id[0], context).name
|
||||||
else:
|
else:
|
||||||
model_desc = model._description
|
model_desc = model._description
|
||||||
|
|
||||||
datas['ids'] = ids
|
datas['ids'] = ids
|
||||||
model = pooler.get_pool(cr.dbname).get(datas['model'])
|
model = pooler.get_pool(cr.dbname).get(datas['model'])
|
||||||
|
|
||||||
result = model.fields_view_get(cr, uid, view_type='tree', context=context)
|
result = model.fields_view_get(cr, uid, view_type='tree', context=context)
|
||||||
fields_order = self._parse_string(result['arch'])
|
fields_order = self._parse_string(result['arch'])
|
||||||
rows = model.read(cr, uid, datas['ids'], result['fields'].keys(), context )
|
rows = model.read(cr, uid, datas['ids'], result['fields'].keys(), context )
|
||||||
res = self._create_table(uid, datas['ids'], result['fields'], fields_order, rows, context, model_desc)
|
res = self._create_table(uid, datas['ids'], result['fields'], fields_order, rows, context, model_desc)
|
||||||
return (self.obj.get(), 'pdf')
|
return (self.obj.get(), 'pdf')
|
||||||
|
|
||||||
|
|
||||||
def _create_table(self, uid, ids, fields, fields_order, results, context, title=''):
|
def _create_table(self, uid, ids, fields, fields_order, results, context, title=''):
|
||||||
pageSize=[297.0, 210.0]
|
pageSize=[297.0, 210.0]
|
||||||
|
|
||||||
impl = minidom.getDOMImplementation()
|
impl = minidom.getDOMImplementation()
|
||||||
new_doc = impl.createDocument(None, "report", None)
|
new_doc = impl.createDocument(None, "report", None)
|
||||||
|
|
||||||
# build header
|
# build header
|
||||||
config = new_doc.createElement("config")
|
config = new_doc.createElement("config")
|
||||||
|
|
||||||
def _append_node(name, text):
|
def _append_node(name, text):
|
||||||
n = new_doc.createElement(name)
|
n = new_doc.createElement(name)
|
||||||
t = new_doc.createTextNode(text)
|
t = new_doc.createTextNode(text)
|
||||||
n.appendChild(t)
|
n.appendChild(t)
|
||||||
config.appendChild(n)
|
config.appendChild(n)
|
||||||
|
|
||||||
_append_node('date', time.strftime('%d/%m/%Y'))
|
_append_node('date', time.strftime('%d/%m/%Y'))
|
||||||
_append_node('PageSize', '%.2fmm,%.2fmm' % tuple(pageSize))
|
_append_node('PageSize', '%.2fmm,%.2fmm' % tuple(pageSize))
|
||||||
_append_node('PageWidth', '%.2f' % (pageSize[0] * 2.8346,))
|
_append_node('PageWidth', '%.2f' % (pageSize[0] * 2.8346,))
|
||||||
_append_node('PageHeight', '%.2f' %(pageSize[1] * 2.8346,))
|
_append_node('PageHeight', '%.2f' %(pageSize[1] * 2.8346,))
|
||||||
|
|
||||||
_append_node('report-header', title)
|
_append_node('report-header', title)
|
||||||
l = []
|
l = []
|
||||||
t = 0
|
t = 0
|
||||||
rowcount=0;
|
rowcount=0;
|
||||||
strmax = (pageSize[0]-40) * 2.8346
|
strmax = (pageSize[0]-40) * 2.8346
|
||||||
temp = []
|
temp = []
|
||||||
count = len(fields_order)
|
count = len(fields_order)
|
||||||
for i in range(0,count):
|
for i in range(0,count):
|
||||||
temp.append(0)
|
temp.append(0)
|
||||||
|
|
||||||
ince = -1;
|
ince = -1;
|
||||||
for f in fields_order:
|
for f in fields_order:
|
||||||
s = 0
|
s = 0
|
||||||
ince += 1
|
ince += 1
|
||||||
if fields[f]['type'] in ('date','time','float','integer'):
|
if fields[f]['type'] in ('date','time','float','integer'):
|
||||||
s = 60
|
s = 60
|
||||||
strmax -= s
|
strmax -= s
|
||||||
if fields[f]['type'] in ('float','integer'):
|
if fields[f]['type'] in ('float','integer'):
|
||||||
temp[ince]=1;
|
temp[ince]=1;
|
||||||
else:
|
else:
|
||||||
t += fields[f].get('size', 80) / 28 + 1
|
t += fields[f].get('size', 80) / 28 + 1
|
||||||
|
|
||||||
l.append(s)
|
l.append(s)
|
||||||
|
|
||||||
for pos in range(len(l)):
|
for pos in range(len(l)):
|
||||||
if not l[pos]:
|
if not l[pos]:
|
||||||
s = fields[fields_order[pos]].get('size', 80) / 28 + 1
|
s = fields[fields_order[pos]].get('size', 80) / 28 + 1
|
||||||
l[pos] = strmax * s / t
|
l[pos] = strmax * s / t
|
||||||
|
|
||||||
_append_node('tableSize', ','.join(map(str,l)) )
|
_append_node('tableSize', ','.join(map(str,l)) )
|
||||||
new_doc.childNodes[0].appendChild(config)
|
new_doc.childNodes[0].appendChild(config)
|
||||||
header = new_doc.createElement("header")
|
header = new_doc.createElement("header")
|
||||||
|
|
||||||
for f in fields_order:
|
for f in fields_order:
|
||||||
field = new_doc.createElement("field")
|
field = new_doc.createElement("field")
|
||||||
field_txt = new_doc.createTextNode(str(fields[f]['string'] or ''))
|
field_txt = new_doc.createTextNode(str(fields[f]['string'] or ''))
|
||||||
field.appendChild(field_txt)
|
field.appendChild(field_txt)
|
||||||
header.appendChild(field)
|
header.appendChild(field)
|
||||||
|
|
||||||
new_doc.childNodes[0].appendChild(header)
|
new_doc.childNodes[0].appendChild(header)
|
||||||
|
|
||||||
lines = new_doc.createElement("lines")
|
lines = new_doc.createElement("lines")
|
||||||
|
|
||||||
tsum = []
|
tsum = []
|
||||||
count = len(fields_order)
|
count = len(fields_order)
|
||||||
for i in range(0,count):
|
for i in range(0,count):
|
||||||
tsum.append(0)
|
tsum.append(0)
|
||||||
|
|
||||||
for line in results:
|
for line in results:
|
||||||
node_line = new_doc.createElement("row")
|
node_line = new_doc.createElement("row")
|
||||||
|
|
||||||
count = -1
|
count = -1
|
||||||
for f in fields_order:
|
for f in fields_order:
|
||||||
count += 1
|
count += 1
|
||||||
if fields[f]['type']=='many2one' and line[f]:
|
if fields[f]['type']=='many2one' and line[f]:
|
||||||
line[f] = line[f][1]
|
line[f] = line[f][1]
|
||||||
if fields[f]['type'] in ('one2many','many2many') and line[f]:
|
if fields[f]['type'] in ('one2many','many2many') and line[f]:
|
||||||
line[f] = '( '+str(len(line[f])) + ' )'
|
line[f] = '( '+str(len(line[f])) + ' )'
|
||||||
col = new_doc.createElement("col")
|
col = new_doc.createElement("col")
|
||||||
col.setAttribute('para','yes')
|
col.setAttribute('para','yes')
|
||||||
col.setAttribute('tree','no')
|
col.setAttribute('tree','no')
|
||||||
if line[f] != None:
|
if line[f] != None:
|
||||||
txt = new_doc.createTextNode(str(line[f] or ''))
|
txt = new_doc.createTextNode(str(line[f] or ''))
|
||||||
if temp[count] == 1:
|
if temp[count] == 1:
|
||||||
tsum[count] = tsum[count] + line[f];
|
tsum[count] = tsum[count] + line[f];
|
||||||
|
|
||||||
else:
|
else:
|
||||||
txt = new_doc.createTextNode('/')
|
txt = new_doc.createTextNode('/')
|
||||||
col.appendChild(txt)
|
col.appendChild(txt)
|
||||||
node_line.appendChild(col)
|
node_line.appendChild(col)
|
||||||
lines.appendChild(node_line)
|
lines.appendChild(node_line)
|
||||||
node_line = new_doc.createElement("row")
|
node_line = new_doc.createElement("row")
|
||||||
lines.appendChild(node_line)
|
lines.appendChild(node_line)
|
||||||
node_line = new_doc.createElement("row")
|
node_line = new_doc.createElement("row")
|
||||||
for f in range(0,count+1):
|
for f in range(0,count+1):
|
||||||
col = new_doc.createElement("col")
|
col = new_doc.createElement("col")
|
||||||
col.setAttribute('para','yes')
|
col.setAttribute('para','yes')
|
||||||
col.setAttribute('tree','no')
|
col.setAttribute('tree','no')
|
||||||
if tsum[f] != None:
|
if tsum[f] != None:
|
||||||
txt = new_doc.createTextNode(str(tsum[f] or ''))
|
txt = new_doc.createTextNode(str(tsum[f] or ''))
|
||||||
else:
|
else:
|
||||||
txt = new_doc.createTextNode('/')
|
txt = new_doc.createTextNode('/')
|
||||||
if f == 0:
|
if f == 0:
|
||||||
txt = new_doc.createTextNode('Total')
|
txt = new_doc.createTextNode('Total')
|
||||||
|
|
||||||
col.appendChild(txt)
|
col.appendChild(txt)
|
||||||
node_line.appendChild(col)
|
node_line.appendChild(col)
|
||||||
lines.appendChild(node_line)
|
lines.appendChild(node_line)
|
||||||
|
|
||||||
new_doc.childNodes[0].appendChild(lines)
|
new_doc.childNodes[0].appendChild(lines)
|
||||||
|
|
||||||
styledoc = libxml2.parseFile(os.path.join(tools.config['root_path'],'addons/base/report/custom_new.xsl'))
|
styledoc = libxml2.parseFile(os.path.join(tools.config['root_path'],'addons/base/report/custom_new.xsl'))
|
||||||
style = libxslt.parseStylesheetDoc(styledoc)
|
style = libxslt.parseStylesheetDoc(styledoc)
|
||||||
doc = libxml2.parseDoc(new_doc.toxml())
|
doc = libxml2.parseDoc(new_doc.toxml())
|
||||||
rml_obj = style.applyStylesheet(doc, None)
|
rml_obj = style.applyStylesheet(doc, None)
|
||||||
rml = style.saveResultToString(rml_obj)
|
rml = style.saveResultToString(rml_obj)
|
||||||
self.obj = render.rml(rml)
|
self.obj = render.rml(rml)
|
||||||
self.obj.render()
|
self.obj.render()
|
||||||
return True
|
return True
|
||||||
report_printscreen_list('report.printscreen.list')
|
report_printscreen_list('report.printscreen.list')
|
||||||
|
|
||||||
|
|
|
@ -33,6 +33,6 @@ from rml import rml, rml2html
|
||||||
from render import render
|
from render import render
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import Image
|
import Image
|
||||||
except:
|
except:
|
||||||
print 'WARNING; Python Imaging not installed, you can use only .JPG pictures !'
|
print 'WARNING; Python Imaging not installed, you can use only .JPG pictures !'
|
||||||
|
|
|
@ -44,38 +44,38 @@ import threading
|
||||||
# _render
|
# _render
|
||||||
#
|
#
|
||||||
class render(object):
|
class render(object):
|
||||||
def __init__(self, bin_datas={}, path='.'):
|
def __init__(self, bin_datas={}, path='.'):
|
||||||
self.done = False
|
self.done = False
|
||||||
self.bin_datas = bin_datas
|
self.bin_datas = bin_datas
|
||||||
self.path = path
|
self.path = path
|
||||||
|
|
||||||
def _render(self):
|
def _render(self):
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def render(self):
|
def render(self):
|
||||||
self.done = False
|
self.done = False
|
||||||
result = self._render()
|
result = self._render()
|
||||||
self._result = result
|
self._result = result
|
||||||
self.done = True
|
self.done = True
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def is_done(self):
|
def is_done(self):
|
||||||
res = self.done
|
res = self.done
|
||||||
return res
|
return res
|
||||||
|
|
||||||
def get(self):
|
def get(self):
|
||||||
if self.is_done():
|
if self.is_done():
|
||||||
return self._result
|
return self._result
|
||||||
else:
|
else:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
if __name__=='__main__':
|
if __name__=='__main__':
|
||||||
import time
|
import time
|
||||||
print 'Multi-thread code !'
|
print 'Multi-thread code !'
|
||||||
r = render()
|
r = render()
|
||||||
r.render()
|
r.render()
|
||||||
while not r.is_done():
|
while not r.is_done():
|
||||||
print 'not yet!'
|
print 'not yet!'
|
||||||
time.sleep(1)
|
time.sleep(1)
|
||||||
print 'done!'
|
print 'done!'
|
||||||
|
|
||||||
|
|
|
@ -32,20 +32,20 @@ import rml2pdf
|
||||||
import rml2html as htmlizer
|
import rml2html as htmlizer
|
||||||
|
|
||||||
class rml(render.render):
|
class rml(render.render):
|
||||||
def __init__(self, xml, datas={}, path='.'):
|
def __init__(self, xml, datas={}, path='.'):
|
||||||
render.render.__init__(self, datas)
|
render.render.__init__(self, datas)
|
||||||
self.xml = xml
|
self.xml = xml
|
||||||
self.output_type = 'pdf'
|
self.output_type = 'pdf'
|
||||||
self.path = path
|
self.path = path
|
||||||
|
|
||||||
def _render(self):
|
def _render(self):
|
||||||
return rml2pdf.parseString(self.xml, images=self.bin_datas, path=self.path)
|
return rml2pdf.parseString(self.xml, images=self.bin_datas, path=self.path)
|
||||||
|
|
||||||
class rml2html(render.render):
|
class rml2html(render.render):
|
||||||
def __init__(self, xml, datas={}):
|
def __init__(self, xml, datas={}):
|
||||||
super(rml2html, self).__init__(datas)
|
super(rml2html, self).__init__(datas)
|
||||||
self.xml = xml
|
self.xml = xml
|
||||||
self.output_type = 'html'
|
self.output_type = 'html'
|
||||||
|
|
||||||
def _render(self):
|
def _render(self):
|
||||||
return htmlizer.parseString(self.xml)
|
return htmlizer.parseString(self.xml)
|
||||||
|
|
|
@ -52,341 +52,341 @@ import copy
|
||||||
import utils
|
import utils
|
||||||
|
|
||||||
class _flowable(object):
|
class _flowable(object):
|
||||||
def __init__(self, template, doc):
|
def __init__(self, template, doc):
|
||||||
self._tags = {
|
self._tags = {
|
||||||
'title': self._tag_title,
|
'title': self._tag_title,
|
||||||
'spacer': self._tag_spacer,
|
'spacer': self._tag_spacer,
|
||||||
'para': self._tag_para,
|
'para': self._tag_para,
|
||||||
'nextFrame': self._tag_next_frame,
|
'nextFrame': self._tag_next_frame,
|
||||||
'blockTable': self._tag_table,
|
'blockTable': self._tag_table,
|
||||||
'pageBreak': self._tag_page_break,
|
'pageBreak': self._tag_page_break,
|
||||||
'setNextTemplate': self._tag_next_template,
|
'setNextTemplate': self._tag_next_template,
|
||||||
}
|
}
|
||||||
self.template = template
|
self.template = template
|
||||||
self.doc = doc
|
self.doc = doc
|
||||||
|
|
||||||
def _tag_page_break(self, node):
|
def _tag_page_break(self, node):
|
||||||
return '<br/>'*3
|
return '<br/>'*3
|
||||||
|
|
||||||
def _tag_next_template(self, node):
|
def _tag_next_template(self, node):
|
||||||
return ''
|
return ''
|
||||||
|
|
||||||
def _tag_next_frame(self, node):
|
def _tag_next_frame(self, node):
|
||||||
result=self.template.frame_stop()
|
result=self.template.frame_stop()
|
||||||
result+='<br/>'
|
result+='<br/>'
|
||||||
result+=self.template.frame_start()
|
result+=self.template.frame_start()
|
||||||
return result
|
return result
|
||||||
|
|
||||||
def _tag_title(self, node):
|
def _tag_title(self, node):
|
||||||
node.tagName='h1'
|
node.tagName='h1'
|
||||||
return node.toxml()
|
return node.toxml()
|
||||||
|
|
||||||
def _tag_spacer(self, node):
|
def _tag_spacer(self, node):
|
||||||
length = 1+int(utils.unit_get(node.getAttribute('length')))/35
|
length = 1+int(utils.unit_get(node.getAttribute('length')))/35
|
||||||
return "<br/>"*length
|
return "<br/>"*length
|
||||||
|
|
||||||
def _tag_table(self, node):
|
def _tag_table(self, node):
|
||||||
node.tagName='table'
|
node.tagName='table'
|
||||||
if node.hasAttribute('colWidths'):
|
if node.hasAttribute('colWidths'):
|
||||||
sizes = map(lambda x: utils.unit_get(x), node.getAttribute('colWidths').split(','))
|
sizes = map(lambda x: utils.unit_get(x), node.getAttribute('colWidths').split(','))
|
||||||
tr = self.doc.createElement('tr')
|
tr = self.doc.createElement('tr')
|
||||||
for s in sizes:
|
for s in sizes:
|
||||||
td = self.doc.createElement('td')
|
td = self.doc.createElement('td')
|
||||||
td.setAttribute("width", str(s))
|
td.setAttribute("width", str(s))
|
||||||
tr.appendChild(td)
|
tr.appendChild(td)
|
||||||
node.appendChild(tr)
|
node.appendChild(tr)
|
||||||
return node.toxml()
|
return node.toxml()
|
||||||
|
|
||||||
def _tag_para(self, node):
|
def _tag_para(self, node):
|
||||||
node.tagName='p'
|
node.tagName='p'
|
||||||
if node.hasAttribute('style'):
|
if node.hasAttribute('style'):
|
||||||
node.setAttribute('class', node.getAttribute('style'))
|
node.setAttribute('class', node.getAttribute('style'))
|
||||||
return node.toxml()
|
return node.toxml()
|
||||||
|
|
||||||
def render(self, node):
|
def render(self, node):
|
||||||
result = self.template.start()
|
result = self.template.start()
|
||||||
result += self.template.frame_start()
|
result += self.template.frame_start()
|
||||||
for n in node.childNodes:
|
for n in node.childNodes:
|
||||||
if n.nodeType==node.ELEMENT_NODE:
|
if n.nodeType==node.ELEMENT_NODE:
|
||||||
if n.localName in self._tags:
|
if n.localName in self._tags:
|
||||||
result += self._tags[n.localName](n)
|
result += self._tags[n.localName](n)
|
||||||
else:
|
else:
|
||||||
pass
|
pass
|
||||||
#print 'tag', n.localName, 'not yet implemented!'
|
#print 'tag', n.localName, 'not yet implemented!'
|
||||||
result += self.template.frame_stop()
|
result += self.template.frame_stop()
|
||||||
result += self.template.end()
|
result += self.template.end()
|
||||||
return result
|
return result
|
||||||
|
|
||||||
class _rml_tmpl_tag(object):
|
class _rml_tmpl_tag(object):
|
||||||
def __init__(self, *args):
|
def __init__(self, *args):
|
||||||
pass
|
pass
|
||||||
def tag_start(self):
|
def tag_start(self):
|
||||||
return ''
|
return ''
|
||||||
def tag_end(self):
|
def tag_end(self):
|
||||||
return False
|
return False
|
||||||
def tag_stop(self):
|
def tag_stop(self):
|
||||||
return ''
|
return ''
|
||||||
def tag_mergeable(self):
|
def tag_mergeable(self):
|
||||||
return True
|
return True
|
||||||
|
|
||||||
class _rml_tmpl_frame(_rml_tmpl_tag):
|
class _rml_tmpl_frame(_rml_tmpl_tag):
|
||||||
def __init__(self, posx, width):
|
def __init__(self, posx, width):
|
||||||
self.width = width
|
self.width = width
|
||||||
self.posx = posx
|
self.posx = posx
|
||||||
def tag_start(self):
|
def tag_start(self):
|
||||||
return '<table border="0" width="%d"><tr><td width="%d"> </td><td>' % (self.width+self.posx,self.posx)
|
return '<table border="0" width="%d"><tr><td width="%d"> </td><td>' % (self.width+self.posx,self.posx)
|
||||||
def tag_end(self):
|
def tag_end(self):
|
||||||
return True
|
return True
|
||||||
def tag_stop(self):
|
def tag_stop(self):
|
||||||
return '</td></tr></table><br/>'
|
return '</td></tr></table><br/>'
|
||||||
def tag_mergeable(self):
|
def tag_mergeable(self):
|
||||||
return False
|
return False
|
||||||
|
|
||||||
# An awfull workaround since I don't really understand the semantic behind merge.
|
# An awfull workaround since I don't really understand the semantic behind merge.
|
||||||
def merge(self, frame):
|
def merge(self, frame):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
class _rml_tmpl_draw_string(_rml_tmpl_tag):
|
class _rml_tmpl_draw_string(_rml_tmpl_tag):
|
||||||
def __init__(self, node, style):
|
def __init__(self, node, style):
|
||||||
self.posx = utils.unit_get(node.getAttribute('x'))
|
self.posx = utils.unit_get(node.getAttribute('x'))
|
||||||
self.posy = utils.unit_get(node.getAttribute('y'))
|
self.posy = utils.unit_get(node.getAttribute('y'))
|
||||||
aligns = {
|
aligns = {
|
||||||
'drawString': 'left',
|
'drawString': 'left',
|
||||||
'drawRightString': 'right',
|
'drawRightString': 'right',
|
||||||
'drawCentredString': 'center'
|
'drawCentredString': 'center'
|
||||||
}
|
}
|
||||||
align = aligns[node.localName]
|
align = aligns[node.localName]
|
||||||
self.pos = [(self.posx, self.posy, align, utils.text_get(node), style.get('td'), style.font_size_get('td'))]
|
self.pos = [(self.posx, self.posy, align, utils.text_get(node), style.get('td'), style.font_size_get('td'))]
|
||||||
|
|
||||||
def tag_start(self):
|
def tag_start(self):
|
||||||
self.pos.sort()
|
self.pos.sort()
|
||||||
res = '<table border="0" cellpadding="0" cellspacing="0"><tr>'
|
res = '<table border="0" cellpadding="0" cellspacing="0"><tr>'
|
||||||
posx = 0
|
posx = 0
|
||||||
i = 0
|
i = 0
|
||||||
for (x,y,align,txt, style, fs) in self.pos:
|
for (x,y,align,txt, style, fs) in self.pos:
|
||||||
if align=="left":
|
if align=="left":
|
||||||
pos2 = len(txt)*fs
|
pos2 = len(txt)*fs
|
||||||
res+='<td width="%d"></td><td style="%s" width="%d">%s</td>' % (x - posx, style, pos2, txt)
|
res+='<td width="%d"></td><td style="%s" width="%d">%s</td>' % (x - posx, style, pos2, txt)
|
||||||
posx = x+pos2
|
posx = x+pos2
|
||||||
if align=="right":
|
if align=="right":
|
||||||
res+='<td width="%d" align="right" style="%s">%s</td>' % (x - posx, style, txt)
|
res+='<td width="%d" align="right" style="%s">%s</td>' % (x - posx, style, txt)
|
||||||
posx = x
|
posx = x
|
||||||
if align=="center":
|
if align=="center":
|
||||||
res+='<td width="%d" align="center" style="%s">%s</td>' % ((x - posx)*2, style, txt)
|
res+='<td width="%d" align="center" style="%s">%s</td>' % ((x - posx)*2, style, txt)
|
||||||
posx = 2*x-posx
|
posx = 2*x-posx
|
||||||
i+=1
|
i+=1
|
||||||
res+='</tr></table>'
|
res+='</tr></table>'
|
||||||
return res
|
return res
|
||||||
def merge(self, ds):
|
def merge(self, ds):
|
||||||
self.pos+=ds.pos
|
self.pos+=ds.pos
|
||||||
|
|
||||||
class _rml_tmpl_draw_lines(_rml_tmpl_tag):
|
class _rml_tmpl_draw_lines(_rml_tmpl_tag):
|
||||||
def __init__(self, node, style):
|
def __init__(self, node, style):
|
||||||
coord = [utils.unit_get(x) for x in utils.text_get(node).split(' ')]
|
coord = [utils.unit_get(x) for x in utils.text_get(node).split(' ')]
|
||||||
self.ok = False
|
self.ok = False
|
||||||
self.posx = coord[0]
|
self.posx = coord[0]
|
||||||
self.posy = coord[1]
|
self.posy = coord[1]
|
||||||
self.width = coord[2]-coord[0]
|
self.width = coord[2]-coord[0]
|
||||||
self.ok = coord[1]==coord[3]
|
self.ok = coord[1]==coord[3]
|
||||||
self.style = style
|
self.style = style
|
||||||
self.style = style.get('hr')
|
self.style = style.get('hr')
|
||||||
|
|
||||||
def tag_start(self):
|
def tag_start(self):
|
||||||
if self.ok:
|
if self.ok:
|
||||||
return '<table border="0" cellpadding="0" cellspacing="0" width="%d"><tr><td width="%d"></td><td><hr width="100%%" style="margin:0px; %s"></td></tr></table>' % (self.posx+self.width,self.posx,self.style)
|
return '<table border="0" cellpadding="0" cellspacing="0" width="%d"><tr><td width="%d"></td><td><hr width="100%%" style="margin:0px; %s"></td></tr></table>' % (self.posx+self.width,self.posx,self.style)
|
||||||
else:
|
else:
|
||||||
return ''
|
return ''
|
||||||
|
|
||||||
class _rml_stylesheet(object):
|
class _rml_stylesheet(object):
|
||||||
def __init__(self, stylesheet, doc):
|
def __init__(self, stylesheet, doc):
|
||||||
self.doc = doc
|
self.doc = doc
|
||||||
self.attrs = {}
|
self.attrs = {}
|
||||||
self._tags = {
|
self._tags = {
|
||||||
'fontSize': lambda x: ('font-size',str(utils.unit_get(x))+'px'),
|
'fontSize': lambda x: ('font-size',str(utils.unit_get(x))+'px'),
|
||||||
'alignment': lambda x: ('text-align',str(x))
|
'alignment': lambda x: ('text-align',str(x))
|
||||||
}
|
}
|
||||||
result = ''
|
result = ''
|
||||||
for ps in stylesheet.getElementsByTagName('paraStyle'):
|
for ps in stylesheet.getElementsByTagName('paraStyle'):
|
||||||
attr = {}
|
attr = {}
|
||||||
attrs = ps.attributes
|
attrs = ps.attributes
|
||||||
for i in range(attrs.length):
|
for i in range(attrs.length):
|
||||||
name = attrs.item(i).localName
|
name = attrs.item(i).localName
|
||||||
attr[name] = ps.getAttribute(name)
|
attr[name] = ps.getAttribute(name)
|
||||||
attrs = []
|
attrs = []
|
||||||
for a in attr:
|
for a in attr:
|
||||||
if a in self._tags:
|
if a in self._tags:
|
||||||
attrs.append("%s:%s" % self._tags[a](attr[a]))
|
attrs.append("%s:%s" % self._tags[a](attr[a]))
|
||||||
if len(attrs):
|
if len(attrs):
|
||||||
result += "p."+attr['name']+" {"+'; '.join(attrs)+"}\n"
|
result += "p."+attr['name']+" {"+'; '.join(attrs)+"}\n"
|
||||||
self.result = result
|
self.result = result
|
||||||
|
|
||||||
def render(self):
|
def render(self):
|
||||||
return self.result
|
return self.result
|
||||||
|
|
||||||
class _rml_draw_style(object):
|
class _rml_draw_style(object):
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.style = {}
|
self.style = {}
|
||||||
self._styles = {
|
self._styles = {
|
||||||
'fill': lambda x: {'td': {'color':x.getAttribute('color')}},
|
'fill': lambda x: {'td': {'color':x.getAttribute('color')}},
|
||||||
'setFont': lambda x: {'td': {'font-size':x.getAttribute('size')+'px'}},
|
'setFont': lambda x: {'td': {'font-size':x.getAttribute('size')+'px'}},
|
||||||
'stroke': lambda x: {'hr': {'color':x.getAttribute('color')}},
|
'stroke': lambda x: {'hr': {'color':x.getAttribute('color')}},
|
||||||
}
|
}
|
||||||
def update(self, node):
|
def update(self, node):
|
||||||
if node.localName in self._styles:
|
if node.localName in self._styles:
|
||||||
result = self._styles[node.localName](node)
|
result = self._styles[node.localName](node)
|
||||||
for key in result:
|
for key in result:
|
||||||
if key in self.style:
|
if key in self.style:
|
||||||
self.style[key].update(result[key])
|
self.style[key].update(result[key])
|
||||||
else:
|
else:
|
||||||
self.style[key] = result[key]
|
self.style[key] = result[key]
|
||||||
def font_size_get(self,tag):
|
def font_size_get(self,tag):
|
||||||
size = utils.unit_get(self.style.get('td', {}).get('font-size','16'))
|
size = utils.unit_get(self.style.get('td', {}).get('font-size','16'))
|
||||||
return size
|
return size
|
||||||
|
|
||||||
def get(self,tag):
|
def get(self,tag):
|
||||||
if not tag in self.style:
|
if not tag in self.style:
|
||||||
return ""
|
return ""
|
||||||
return ';'.join(['%s:%s' % (x[0],x[1]) for x in self.style[tag].items()])
|
return ';'.join(['%s:%s' % (x[0],x[1]) for x in self.style[tag].items()])
|
||||||
|
|
||||||
class _rml_template(object):
|
class _rml_template(object):
|
||||||
def __init__(self, template):
|
def __init__(self, template):
|
||||||
self.frame_pos = -1
|
self.frame_pos = -1
|
||||||
self.frames = []
|
self.frames = []
|
||||||
self.template_order = []
|
self.template_order = []
|
||||||
self.page_template = {}
|
self.page_template = {}
|
||||||
self.loop = 0
|
self.loop = 0
|
||||||
self._tags = {
|
self._tags = {
|
||||||
'drawString': _rml_tmpl_draw_string,
|
'drawString': _rml_tmpl_draw_string,
|
||||||
'drawRightString': _rml_tmpl_draw_string,
|
'drawRightString': _rml_tmpl_draw_string,
|
||||||
'drawCentredString': _rml_tmpl_draw_string,
|
'drawCentredString': _rml_tmpl_draw_string,
|
||||||
'lines': _rml_tmpl_draw_lines
|
'lines': _rml_tmpl_draw_lines
|
||||||
}
|
}
|
||||||
self.style = _rml_draw_style()
|
self.style = _rml_draw_style()
|
||||||
for pt in template.getElementsByTagName('pageTemplate'):
|
for pt in template.getElementsByTagName('pageTemplate'):
|
||||||
frames = {}
|
frames = {}
|
||||||
id = pt.getAttribute('id')
|
id = pt.getAttribute('id')
|
||||||
self.template_order.append(id)
|
self.template_order.append(id)
|
||||||
for tmpl in pt.getElementsByTagName('frame'):
|
for tmpl in pt.getElementsByTagName('frame'):
|
||||||
posy = int(utils.unit_get(tmpl.getAttribute('y1'))) #+utils.unit_get(tmpl.getAttribute('height')))
|
posy = int(utils.unit_get(tmpl.getAttribute('y1'))) #+utils.unit_get(tmpl.getAttribute('height')))
|
||||||
posx = int(utils.unit_get(tmpl.getAttribute('x1')))
|
posx = int(utils.unit_get(tmpl.getAttribute('x1')))
|
||||||
frames[(posy,posx,tmpl.getAttribute('id'))] = _rml_tmpl_frame(posx, utils.unit_get(tmpl.getAttribute('width')))
|
frames[(posy,posx,tmpl.getAttribute('id'))] = _rml_tmpl_frame(posx, utils.unit_get(tmpl.getAttribute('width')))
|
||||||
for tmpl in template.getElementsByTagName('pageGraphics'):
|
for tmpl in template.getElementsByTagName('pageGraphics'):
|
||||||
for n in tmpl.childNodes:
|
for n in tmpl.childNodes:
|
||||||
if n.nodeType==n.ELEMENT_NODE:
|
if n.nodeType==n.ELEMENT_NODE:
|
||||||
if n.localName in self._tags:
|
if n.localName in self._tags:
|
||||||
t = self._tags[n.localName](n, self.style)
|
t = self._tags[n.localName](n, self.style)
|
||||||
frames[(t.posy,t.posx,n.localName)] = t
|
frames[(t.posy,t.posx,n.localName)] = t
|
||||||
else:
|
else:
|
||||||
self.style.update(n)
|
self.style.update(n)
|
||||||
keys = frames.keys()
|
keys = frames.keys()
|
||||||
keys.sort()
|
keys.sort()
|
||||||
keys.reverse()
|
keys.reverse()
|
||||||
self.page_template[id] = []
|
self.page_template[id] = []
|
||||||
for key in range(len(keys)):
|
for key in range(len(keys)):
|
||||||
if key>0 and keys[key-1][0] == keys[key][0]:
|
if key>0 and keys[key-1][0] == keys[key][0]:
|
||||||
if type(self.page_template[id][-1]) == type(frames[keys[key]]):
|
if type(self.page_template[id][-1]) == type(frames[keys[key]]):
|
||||||
if self.page_template[id][-1].tag_mergeable():
|
if self.page_template[id][-1].tag_mergeable():
|
||||||
self.page_template[id][-1].merge(frames[keys[key]])
|
self.page_template[id][-1].merge(frames[keys[key]])
|
||||||
continue
|
continue
|
||||||
self.page_template[id].append(frames[keys[key]])
|
self.page_template[id].append(frames[keys[key]])
|
||||||
self.template = self.template_order[0]
|
self.template = self.template_order[0]
|
||||||
|
|
||||||
def _get_style(self):
|
def _get_style(self):
|
||||||
return self.style
|
return self.style
|
||||||
|
|
||||||
def set_next_template(self):
|
def set_next_template(self):
|
||||||
self.template = self.template_order[(self.template_order.index(name)+1) % self.template_order]
|
self.template = self.template_order[(self.template_order.index(name)+1) % self.template_order]
|
||||||
self.frame_pos = -1
|
self.frame_pos = -1
|
||||||
|
|
||||||
def set_template(self, name):
|
def set_template(self, name):
|
||||||
self.template = name
|
self.template = name
|
||||||
self.frame_pos = -1
|
self.frame_pos = -1
|
||||||
|
|
||||||
def frame_start(self):
|
def frame_start(self):
|
||||||
result = ''
|
result = ''
|
||||||
frames = self.page_template[self.template]
|
frames = self.page_template[self.template]
|
||||||
ok = True
|
ok = True
|
||||||
while ok:
|
while ok:
|
||||||
self.frame_pos += 1
|
self.frame_pos += 1
|
||||||
if self.frame_pos>=len(frames):
|
if self.frame_pos>=len(frames):
|
||||||
self.frame_pos=0
|
self.frame_pos=0
|
||||||
self.loop=1
|
self.loop=1
|
||||||
ok = False
|
ok = False
|
||||||
continue
|
continue
|
||||||
f = frames[self.frame_pos]
|
f = frames[self.frame_pos]
|
||||||
result+=f.tag_start()
|
result+=f.tag_start()
|
||||||
ok = not f.tag_end()
|
ok = not f.tag_end()
|
||||||
if ok:
|
if ok:
|
||||||
result+=f.tag_stop()
|
result+=f.tag_stop()
|
||||||
return result
|
return result
|
||||||
|
|
||||||
def frame_stop(self):
|
def frame_stop(self):
|
||||||
frames = self.page_template[self.template]
|
frames = self.page_template[self.template]
|
||||||
f = frames[self.frame_pos]
|
f = frames[self.frame_pos]
|
||||||
result=f.tag_stop()
|
result=f.tag_stop()
|
||||||
return result
|
return result
|
||||||
|
|
||||||
def start(self):
|
def start(self):
|
||||||
return ''
|
return ''
|
||||||
|
|
||||||
def end(self):
|
def end(self):
|
||||||
result = ''
|
result = ''
|
||||||
while not self.loop:
|
while not self.loop:
|
||||||
result += self.frame_start()
|
result += self.frame_start()
|
||||||
result += self.frame_stop()
|
result += self.frame_stop()
|
||||||
return result
|
return result
|
||||||
|
|
||||||
class _rml_doc(object):
|
class _rml_doc(object):
|
||||||
def __init__(self, data):
|
def __init__(self, data):
|
||||||
self.dom = xml.dom.minidom.parseString(data)
|
self.dom = xml.dom.minidom.parseString(data)
|
||||||
self.filename = self.dom.documentElement.getAttribute('filename')
|
self.filename = self.dom.documentElement.getAttribute('filename')
|
||||||
self.result = ''
|
self.result = ''
|
||||||
|
|
||||||
def render(self, out):
|
def render(self, out):
|
||||||
self.result += '''<!DOCTYPE HTML PUBLIC "-//w3c//DTD HTML 4.0 Frameset//EN">
|
self.result += '''<!DOCTYPE HTML PUBLIC "-//w3c//DTD HTML 4.0 Frameset//EN">
|
||||||
<html>
|
<html>
|
||||||
<head>
|
<head>
|
||||||
<style type="text/css">
|
<style type="text/css">
|
||||||
p {margin:0px; font-size:12px;}
|
p {margin:0px; font-size:12px;}
|
||||||
td {font-size:14px;}
|
td {font-size:14px;}
|
||||||
'''
|
'''
|
||||||
style = self.dom.documentElement.getElementsByTagName('stylesheet')[0]
|
style = self.dom.documentElement.getElementsByTagName('stylesheet')[0]
|
||||||
s = _rml_stylesheet(style, self.dom)
|
s = _rml_stylesheet(style, self.dom)
|
||||||
self.result += s.render()
|
self.result += s.render()
|
||||||
self.result+='''
|
self.result+='''
|
||||||
</style>
|
</style>
|
||||||
</head>
|
</head>
|
||||||
<body>'''
|
<body>'''
|
||||||
|
|
||||||
template = _rml_template(self.dom.documentElement.getElementsByTagName('template')[0])
|
template = _rml_template(self.dom.documentElement.getElementsByTagName('template')[0])
|
||||||
f = _flowable(template, self.dom)
|
f = _flowable(template, self.dom)
|
||||||
self.result += f.render(self.dom.documentElement.getElementsByTagName('story')[0])
|
self.result += f.render(self.dom.documentElement.getElementsByTagName('story')[0])
|
||||||
del f
|
del f
|
||||||
self.result += '</body></html>'
|
self.result += '</body></html>'
|
||||||
out.write( self.result)
|
out.write( self.result)
|
||||||
|
|
||||||
def parseString(data, fout=None):
|
def parseString(data, fout=None):
|
||||||
r = _rml_doc(data)
|
r = _rml_doc(data)
|
||||||
if fout:
|
if fout:
|
||||||
fp = file(fout,'wb')
|
fp = file(fout,'wb')
|
||||||
r.render(fp)
|
r.render(fp)
|
||||||
fp.close()
|
fp.close()
|
||||||
return fout
|
return fout
|
||||||
else:
|
else:
|
||||||
fp = StringIO.StringIO()
|
fp = StringIO.StringIO()
|
||||||
r.render(fp)
|
r.render(fp)
|
||||||
return fp.getvalue()
|
return fp.getvalue()
|
||||||
|
|
||||||
def trml2pdf_help():
|
def trml2pdf_help():
|
||||||
print 'Usage: rml2html input.rml >output.html'
|
print 'Usage: rml2html input.rml >output.html'
|
||||||
print 'Render the standard input (RML) and output an HTML file'
|
print 'Render the standard input (RML) and output an HTML file'
|
||||||
sys.exit(0)
|
sys.exit(0)
|
||||||
|
|
||||||
if __name__=="__main__":
|
if __name__=="__main__":
|
||||||
if len(sys.argv)>1:
|
if len(sys.argv)>1:
|
||||||
if sys.argv[1]=='--help':
|
if sys.argv[1]=='--help':
|
||||||
trml2pdf_help()
|
trml2pdf_help()
|
||||||
print parseString(file(sys.argv[1], 'r').read()),
|
print parseString(file(sys.argv[1], 'r').read()),
|
||||||
else:
|
else:
|
||||||
print 'Usage: trml2pdf input.rml >output.pdf'
|
print 'Usage: trml2pdf input.rml >output.pdf'
|
||||||
print 'Try \'trml2pdf --help\' for more information.'
|
print 'Try \'trml2pdf --help\' for more information.'
|
||||||
|
|
|
@ -47,48 +47,48 @@ import reportlab
|
||||||
import reportlab.lib.units
|
import reportlab.lib.units
|
||||||
|
|
||||||
def text_get(node):
|
def text_get(node):
|
||||||
rc = ''
|
rc = ''
|
||||||
for node in node.childNodes:
|
for node in node.childNodes:
|
||||||
if node.nodeType == node.TEXT_NODE:
|
if node.nodeType == node.TEXT_NODE:
|
||||||
rc = rc + node.data
|
rc = rc + node.data
|
||||||
return rc
|
return rc
|
||||||
|
|
||||||
units = [
|
units = [
|
||||||
(re.compile('^(-?[0-9\.]+)\s*in$'), reportlab.lib.units.inch),
|
(re.compile('^(-?[0-9\.]+)\s*in$'), reportlab.lib.units.inch),
|
||||||
(re.compile('^(-?[0-9\.]+)\s*cm$'), reportlab.lib.units.cm),
|
(re.compile('^(-?[0-9\.]+)\s*cm$'), reportlab.lib.units.cm),
|
||||||
(re.compile('^(-?[0-9\.]+)\s*mm$'), reportlab.lib.units.mm),
|
(re.compile('^(-?[0-9\.]+)\s*mm$'), reportlab.lib.units.mm),
|
||||||
(re.compile('^(-?[0-9\.]+)\s*px$'), 0.7),
|
(re.compile('^(-?[0-9\.]+)\s*px$'), 0.7),
|
||||||
(re.compile('^(-?[0-9\.]+)\s*$'), 1)
|
(re.compile('^(-?[0-9\.]+)\s*$'), 1)
|
||||||
]
|
]
|
||||||
|
|
||||||
def unit_get(size):
|
def unit_get(size):
|
||||||
global units
|
global units
|
||||||
for unit in units:
|
for unit in units:
|
||||||
res = unit[0].search(size, 0)
|
res = unit[0].search(size, 0)
|
||||||
if res:
|
if res:
|
||||||
return int(unit[1]*float(res.group(1))*1.3)
|
return int(unit[1]*float(res.group(1))*1.3)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def tuple_int_get(node, attr_name, default=None):
|
def tuple_int_get(node, attr_name, default=None):
|
||||||
if not node.hasAttribute(attr_name):
|
if not node.hasAttribute(attr_name):
|
||||||
return default
|
return default
|
||||||
res = [int(x) for x in node.getAttribute(attr_name).split(',')]
|
res = [int(x) for x in node.getAttribute(attr_name).split(',')]
|
||||||
return res
|
return res
|
||||||
|
|
||||||
def bool_get(value):
|
def bool_get(value):
|
||||||
return (str(value)=="1") or (value.lower()=='yes')
|
return (str(value)=="1") or (value.lower()=='yes')
|
||||||
|
|
||||||
def attr_get(node, attrs, dict={}):
|
def attr_get(node, attrs, dict={}):
|
||||||
res = {}
|
res = {}
|
||||||
for name in attrs:
|
for name in attrs:
|
||||||
if node.hasAttribute(name):
|
if node.hasAttribute(name):
|
||||||
res[name] = unit_get(node.getAttribute(name))
|
res[name] = unit_get(node.getAttribute(name))
|
||||||
for key in dict:
|
for key in dict:
|
||||||
if node.hasAttribute(key):
|
if node.hasAttribute(key):
|
||||||
if dict[key]=='str':
|
if dict[key]=='str':
|
||||||
res[key] = str(node.getAttribute(key))
|
res[key] = str(node.getAttribute(key))
|
||||||
elif dict[key]=='bool':
|
elif dict[key]=='bool':
|
||||||
res[key] = bool_get(node.getAttribute(key))
|
res[key] = bool_get(node.getAttribute(key))
|
||||||
elif dict[key]=='int':
|
elif dict[key]=='int':
|
||||||
res[key] = int(node.getAttribute(key))
|
res[key] = int(node.getAttribute(key))
|
||||||
return res
|
return res
|
||||||
|
|
|
@ -51,13 +51,13 @@ regex_t = re.compile('\(([0-9\.]*),([0-9\.]*),([0-9\.]*)\)')
|
||||||
regex_h = re.compile('#([0-9a-zA-Z][0-9a-zA-Z])([0-9a-zA-Z][0-9a-zA-Z])([0-9a-zA-Z][0-9a-zA-Z])')
|
regex_h = re.compile('#([0-9a-zA-Z][0-9a-zA-Z])([0-9a-zA-Z][0-9a-zA-Z])([0-9a-zA-Z][0-9a-zA-Z])')
|
||||||
|
|
||||||
def get(col_str):
|
def get(col_str):
|
||||||
global allcols
|
global allcols
|
||||||
if col_str in allcols.keys():
|
if col_str in allcols.keys():
|
||||||
return allcols[col_str]
|
return allcols[col_str]
|
||||||
res = regex_t.search(col_str, 0)
|
res = regex_t.search(col_str, 0)
|
||||||
if res:
|
if res:
|
||||||
return (float(res.group(1)),float(res.group(2)),float(res.group(3)))
|
return (float(res.group(1)),float(res.group(2)),float(res.group(3)))
|
||||||
res = regex_h.search(col_str, 0)
|
res = regex_h.search(col_str, 0)
|
||||||
if res:
|
if res:
|
||||||
return tuple([ float(int(res.group(i),16))/255 for i in range(1,4)])
|
return tuple([ float(int(res.group(i),16))/255 for i in range(1,4)])
|
||||||
return colors.red
|
return colors.red
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -48,47 +48,47 @@ import re
|
||||||
import reportlab
|
import reportlab
|
||||||
|
|
||||||
def text_get(node):
|
def text_get(node):
|
||||||
rc = ''
|
rc = ''
|
||||||
for node in node.childNodes:
|
for node in node.childNodes:
|
||||||
if node.nodeType == node.TEXT_NODE:
|
if node.nodeType == node.TEXT_NODE:
|
||||||
rc = rc + node.data
|
rc = rc + node.data
|
||||||
return rc
|
return rc
|
||||||
|
|
||||||
units = [
|
units = [
|
||||||
(re.compile('^(-?[0-9\.]+)\s*in$'), reportlab.lib.units.inch),
|
(re.compile('^(-?[0-9\.]+)\s*in$'), reportlab.lib.units.inch),
|
||||||
(re.compile('^(-?[0-9\.]+)\s*cm$'), reportlab.lib.units.cm),
|
(re.compile('^(-?[0-9\.]+)\s*cm$'), reportlab.lib.units.cm),
|
||||||
(re.compile('^(-?[0-9\.]+)\s*mm$'), reportlab.lib.units.mm),
|
(re.compile('^(-?[0-9\.]+)\s*mm$'), reportlab.lib.units.mm),
|
||||||
(re.compile('^(-?[0-9\.]+)\s*$'), 1)
|
(re.compile('^(-?[0-9\.]+)\s*$'), 1)
|
||||||
]
|
]
|
||||||
|
|
||||||
def unit_get(size):
|
def unit_get(size):
|
||||||
global units
|
global units
|
||||||
for unit in units:
|
for unit in units:
|
||||||
res = unit[0].search(size, 0)
|
res = unit[0].search(size, 0)
|
||||||
if res:
|
if res:
|
||||||
return unit[1]*float(res.group(1))
|
return unit[1]*float(res.group(1))
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def tuple_int_get(node, attr_name, default=None):
|
def tuple_int_get(node, attr_name, default=None):
|
||||||
if not node.hasAttribute(attr_name):
|
if not node.hasAttribute(attr_name):
|
||||||
return default
|
return default
|
||||||
res = [int(x) for x in node.getAttribute(attr_name).split(',')]
|
res = [int(x) for x in node.getAttribute(attr_name).split(',')]
|
||||||
return res
|
return res
|
||||||
|
|
||||||
def bool_get(value):
|
def bool_get(value):
|
||||||
return (str(value)=="1") or (value.lower()=='yes')
|
return (str(value)=="1") or (value.lower()=='yes')
|
||||||
|
|
||||||
def attr_get(node, attrs, dict={}):
|
def attr_get(node, attrs, dict={}):
|
||||||
res = {}
|
res = {}
|
||||||
for name in attrs:
|
for name in attrs:
|
||||||
if node.hasAttribute(name):
|
if node.hasAttribute(name):
|
||||||
res[name] = unit_get(node.getAttribute(name))
|
res[name] = unit_get(node.getAttribute(name))
|
||||||
for key in dict:
|
for key in dict:
|
||||||
if node.hasAttribute(key):
|
if node.hasAttribute(key):
|
||||||
if dict[key]=='str':
|
if dict[key]=='str':
|
||||||
res[key] = str(node.getAttribute(key))
|
res[key] = str(node.getAttribute(key))
|
||||||
elif dict[key]=='bool':
|
elif dict[key]=='bool':
|
||||||
res[key] = bool_get(node.getAttribute(key))
|
res[key] = bool_get(node.getAttribute(key))
|
||||||
elif dict[key]=='int':
|
elif dict[key]=='int':
|
||||||
res[key] = int(node.getAttribute(key))
|
res[key] = int(node.getAttribute(key))
|
||||||
return res
|
return res
|
||||||
|
|
|
@ -40,52 +40,52 @@ import reportlab.lib
|
||||||
import copy
|
import copy
|
||||||
|
|
||||||
class simple(render.render):
|
class simple(render.render):
|
||||||
def _render(self):
|
def _render(self):
|
||||||
self.result = StringIO()
|
self.result = StringIO()
|
||||||
parser = xml.dom.minidom.parseString(self.xml)
|
parser = xml.dom.minidom.parseString(self.xml)
|
||||||
|
|
||||||
title = parser.documentElement.tagName
|
title = parser.documentElement.tagName
|
||||||
doc = SimpleDocTemplate(self.result, pagesize=A4, title=title,
|
doc = SimpleDocTemplate(self.result, pagesize=A4, title=title,
|
||||||
author='Tiny ERP, Fabien Pinckaers', leftmargin=10*mm, rightmargin=10*mm)
|
author='Tiny ERP, Fabien Pinckaers', leftmargin=10*mm, rightmargin=10*mm)
|
||||||
|
|
||||||
styles = reportlab.lib.styles.getSampleStyleSheet()
|
styles = reportlab.lib.styles.getSampleStyleSheet()
|
||||||
title_style = copy.deepcopy(styles["Heading1"])
|
title_style = copy.deepcopy(styles["Heading1"])
|
||||||
title_style.alignment = reportlab.lib.enums.TA_CENTER
|
title_style.alignment = reportlab.lib.enums.TA_CENTER
|
||||||
story = [ Paragraph(title, title_style) ]
|
story = [ Paragraph(title, title_style) ]
|
||||||
style_level = {}
|
style_level = {}
|
||||||
nodes = [ (parser.documentElement,0) ]
|
nodes = [ (parser.documentElement,0) ]
|
||||||
while len(nodes):
|
while len(nodes):
|
||||||
node = nodes.pop(0)
|
node = nodes.pop(0)
|
||||||
value = ''
|
value = ''
|
||||||
n=len(node[0].childNodes)-1
|
n=len(node[0].childNodes)-1
|
||||||
while n>=0:
|
while n>=0:
|
||||||
if node[0].childNodes[n].nodeType==3:
|
if node[0].childNodes[n].nodeType==3:
|
||||||
value += node[0].childNodes[n].nodeValue
|
value += node[0].childNodes[n].nodeValue
|
||||||
else:
|
else:
|
||||||
nodes.insert( 0, (node[0].childNodes[n], node[1]+1) )
|
nodes.insert( 0, (node[0].childNodes[n], node[1]+1) )
|
||||||
n-=1
|
n-=1
|
||||||
if not node[1] in style_level:
|
if not node[1] in style_level:
|
||||||
style = copy.deepcopy(styles["Normal"])
|
style = copy.deepcopy(styles["Normal"])
|
||||||
style.leftIndent=node[1]*6*mm
|
style.leftIndent=node[1]*6*mm
|
||||||
style.firstLineIndent=-3*mm
|
style.firstLineIndent=-3*mm
|
||||||
style_level[node[1]] = style
|
style_level[node[1]] = style
|
||||||
story.append( Paragraph('<b>%s</b>: %s' % (node[0].tagName, value), style_level[node[1]]))
|
story.append( Paragraph('<b>%s</b>: %s' % (node[0].tagName, value), style_level[node[1]]))
|
||||||
doc.build(story)
|
doc.build(story)
|
||||||
return self.result.getvalue()
|
return self.result.getvalue()
|
||||||
|
|
||||||
if __name__=='__main__':
|
if __name__=='__main__':
|
||||||
import time
|
import time
|
||||||
s = simple('''<test>
|
s = simple('''<test>
|
||||||
<author-list>
|
<author-list>
|
||||||
<author>
|
<author>
|
||||||
<name>Fabien Pinckaers</name>
|
<name>Fabien Pinckaers</name>
|
||||||
<age>23</age>
|
<age>23</age>
|
||||||
</author>
|
</author>
|
||||||
<author>
|
<author>
|
||||||
<name>Michel Pinckaers</name>
|
<name>Michel Pinckaers</name>
|
||||||
<age>53</age>
|
<age>53</age>
|
||||||
</author>
|
</author>
|
||||||
No other
|
No other
|
||||||
</author-list>
|
</author-list>
|
||||||
</test>''')
|
</test>''')
|
||||||
print s.render()
|
print s.render()
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -32,44 +32,44 @@ import tools
|
||||||
_uid_cache = {}
|
_uid_cache = {}
|
||||||
|
|
||||||
def login(db, login, password):
|
def login(db, login, password):
|
||||||
cr = pooler.get_db(db).cursor()
|
cr = pooler.get_db(db).cursor()
|
||||||
cr.execute('select id from res_users where login=%s and password=%s and active', (login.encode('utf-8'), password.encode('utf-8')))
|
cr.execute('select id from res_users where login=%s and password=%s and active', (login.encode('utf-8'), password.encode('utf-8')))
|
||||||
res = cr.fetchone()
|
res = cr.fetchone()
|
||||||
cr.close()
|
cr.close()
|
||||||
if res:
|
if res:
|
||||||
return res[0]
|
return res[0]
|
||||||
else:
|
else:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def check_super(passwd):
|
def check_super(passwd):
|
||||||
if passwd == tools.config['admin_passwd']:
|
if passwd == tools.config['admin_passwd']:
|
||||||
return True
|
return True
|
||||||
else:
|
else:
|
||||||
raise Exception('AccessDenied')
|
raise Exception('AccessDenied')
|
||||||
|
|
||||||
def check(db, uid, passwd):
|
def check(db, uid, passwd):
|
||||||
if _uid_cache.get(db, {}).get(uid) == passwd:
|
if _uid_cache.get(db, {}).get(uid) == passwd:
|
||||||
return True
|
return True
|
||||||
|
|
||||||
cr = pooler.get_db(db).cursor()
|
cr = pooler.get_db(db).cursor()
|
||||||
cr.execute('select count(*) from res_users where id=%d and password=%s', (int(uid), passwd))
|
cr.execute('select count(*) from res_users where id=%d and password=%s', (int(uid), passwd))
|
||||||
res = cr.fetchone()[0]
|
res = cr.fetchone()[0]
|
||||||
cr.close()
|
cr.close()
|
||||||
if not bool(res):
|
if not bool(res):
|
||||||
raise Exception('AccessDenied')
|
raise Exception('AccessDenied')
|
||||||
if res:
|
if res:
|
||||||
if _uid_cache.has_key(db):
|
if _uid_cache.has_key(db):
|
||||||
ulist = _uid_cache[db]
|
ulist = _uid_cache[db]
|
||||||
ulist[uid] = passwd
|
ulist[uid] = passwd
|
||||||
else:
|
else:
|
||||||
_uid_cache[db] = {uid:passwd}
|
_uid_cache[db] = {uid:passwd}
|
||||||
return bool(res)
|
return bool(res)
|
||||||
|
|
||||||
def access(db, uid, passwd, sec_level, ids):
|
def access(db, uid, passwd, sec_level, ids):
|
||||||
cr = pooler.get_db(db).cursor()
|
cr = pooler.get_db(db).cursor()
|
||||||
cr.execute('select id from res_users where id=%s and password=%s', (uid, passwd))
|
cr.execute('select id from res_users where id=%s and password=%s', (uid, passwd))
|
||||||
res = cr.fetchone()
|
res = cr.fetchone()
|
||||||
cr.close()
|
cr.close()
|
||||||
if not res:
|
if not res:
|
||||||
raise Exception('Bad username or password')
|
raise Exception('Bad username or password')
|
||||||
return res[0]
|
return res[0]
|
||||||
|
|
|
@ -45,292 +45,292 @@ from tools.translate import _
|
||||||
logging.basicConfig()
|
logging.basicConfig()
|
||||||
|
|
||||||
class db(netsvc.Service):
|
class db(netsvc.Service):
|
||||||
def __init__(self, name="db"):
|
def __init__(self, name="db"):
|
||||||
netsvc.Service.__init__(self, name)
|
netsvc.Service.__init__(self, name)
|
||||||
self.joinGroup("web-services")
|
self.joinGroup("web-services")
|
||||||
self.exportMethod(self.create)
|
self.exportMethod(self.create)
|
||||||
self.exportMethod(self.get_progress)
|
self.exportMethod(self.get_progress)
|
||||||
self.exportMethod(self.drop)
|
self.exportMethod(self.drop)
|
||||||
self.exportMethod(self.dump)
|
self.exportMethod(self.dump)
|
||||||
self.exportMethod(self.restore)
|
self.exportMethod(self.restore)
|
||||||
self.exportMethod(self.list)
|
self.exportMethod(self.list)
|
||||||
self.exportMethod(self.list_lang)
|
self.exportMethod(self.list_lang)
|
||||||
self.exportMethod(self.change_admin_password)
|
self.exportMethod(self.change_admin_password)
|
||||||
self.actions = {}
|
self.actions = {}
|
||||||
self.id = 0
|
self.id = 0
|
||||||
self.id_protect = threading.Semaphore()
|
self.id_protect = threading.Semaphore()
|
||||||
|
|
||||||
def create(self, password, db_name, demo, lang):
|
def create(self, password, db_name, demo, lang):
|
||||||
security.check_super(password)
|
security.check_super(password)
|
||||||
self.id_protect.acquire()
|
self.id_protect.acquire()
|
||||||
self.id += 1
|
self.id += 1
|
||||||
id = self.id
|
id = self.id
|
||||||
self.id_protect.release()
|
self.id_protect.release()
|
||||||
|
|
||||||
self.actions[id] = {'clean': False}
|
self.actions[id] = {'clean': False}
|
||||||
|
|
||||||
db = sql_db.db_connect('template1', serialize=1)
|
db = sql_db.db_connect('template1', serialize=1)
|
||||||
db.truedb.autocommit()
|
db.truedb.autocommit()
|
||||||
cr = db.cursor()
|
cr = db.cursor()
|
||||||
cr.execute('CREATE DATABASE ' + db_name + ' ENCODING \'unicode\'')
|
cr.execute('CREATE DATABASE ' + db_name + ' ENCODING \'unicode\'')
|
||||||
cr.close()
|
cr.close()
|
||||||
class DBInitialize(object):
|
class DBInitialize(object):
|
||||||
def __call__(self, serv, id, db_name, demo, lang):
|
def __call__(self, serv, id, db_name, demo, lang):
|
||||||
try:
|
try:
|
||||||
serv.actions[id]['progress'] = 0
|
serv.actions[id]['progress'] = 0
|
||||||
clean = False
|
clean = False
|
||||||
cr = sql_db.db_connect(db_name).cursor()
|
cr = sql_db.db_connect(db_name).cursor()
|
||||||
tools.init_db(cr)
|
tools.init_db(cr)
|
||||||
cr.commit()
|
cr.commit()
|
||||||
cr.close()
|
cr.close()
|
||||||
cr = None
|
cr = None
|
||||||
pool = pooler.get_pool(db_name, demo,serv.actions[id],
|
pool = pooler.get_pool(db_name, demo,serv.actions[id],
|
||||||
update_module=True)
|
update_module=True)
|
||||||
if lang and lang != 'en_US':
|
if lang and lang != 'en_US':
|
||||||
filename = tools.config["root_path"] + "/i18n/" + lang + ".csv"
|
filename = tools.config["root_path"] + "/i18n/" + lang + ".csv"
|
||||||
tools.trans_load(db_name, filename, lang)
|
tools.trans_load(db_name, filename, lang)
|
||||||
serv.actions[id]['clean'] = True
|
serv.actions[id]['clean'] = True
|
||||||
cr = sql_db.db_connect(db_name).cursor()
|
cr = sql_db.db_connect(db_name).cursor()
|
||||||
cr.execute('select login, password, name ' \
|
cr.execute('select login, password, name ' \
|
||||||
'from res_users ' \
|
'from res_users ' \
|
||||||
'where login <> \'root\' order by login')
|
'where login <> \'root\' order by login')
|
||||||
serv.actions[id]['users'] = cr.dictfetchall()
|
serv.actions[id]['users'] = cr.dictfetchall()
|
||||||
cr.close()
|
cr.close()
|
||||||
except Exception, e:
|
except Exception, e:
|
||||||
serv.actions[id]['clean'] = False
|
serv.actions[id]['clean'] = False
|
||||||
serv.actions[id]['exception'] = e
|
serv.actions[id]['exception'] = e
|
||||||
from StringIO import StringIO
|
from StringIO import StringIO
|
||||||
import traceback
|
import traceback
|
||||||
e_str = StringIO()
|
e_str = StringIO()
|
||||||
traceback.print_exc(file=e_str)
|
traceback.print_exc(file=e_str)
|
||||||
traceback_str = e_str.getvalue()
|
traceback_str = e_str.getvalue()
|
||||||
e_str.close()
|
e_str.close()
|
||||||
print traceback_str
|
print traceback_str
|
||||||
serv.actions[id]['traceback'] = traceback_str
|
serv.actions[id]['traceback'] = traceback_str
|
||||||
if cr:
|
if cr:
|
||||||
cr.close()
|
cr.close()
|
||||||
logger = netsvc.Logger()
|
logger = netsvc.Logger()
|
||||||
logger.notifyChannel("web-services", netsvc.LOG_INFO,
|
logger.notifyChannel("web-services", netsvc.LOG_INFO,
|
||||||
'CREATE DB: %s' % (db_name))
|
'CREATE DB: %s' % (db_name))
|
||||||
dbi = DBInitialize()
|
dbi = DBInitialize()
|
||||||
create_thread = threading.Thread(target=dbi,
|
create_thread = threading.Thread(target=dbi,
|
||||||
args=(self, id, db_name, demo, lang))
|
args=(self, id, db_name, demo, lang))
|
||||||
create_thread.start()
|
create_thread.start()
|
||||||
self.actions[id]['thread'] = create_thread
|
self.actions[id]['thread'] = create_thread
|
||||||
return id
|
return id
|
||||||
|
|
||||||
def get_progress(self, password, id):
|
def get_progress(self, password, id):
|
||||||
security.check_super(password)
|
security.check_super(password)
|
||||||
if self.actions[id]['thread'].isAlive():
|
if self.actions[id]['thread'].isAlive():
|
||||||
# return addons.init_progress[db_name]
|
# return addons.init_progress[db_name]
|
||||||
return (min(self.actions[id].get('progress', 0),0.95), [])
|
return (min(self.actions[id].get('progress', 0),0.95), [])
|
||||||
else:
|
else:
|
||||||
clean = self.actions[id]['clean']
|
clean = self.actions[id]['clean']
|
||||||
if clean:
|
if clean:
|
||||||
users = self.actions[id]['users']
|
users = self.actions[id]['users']
|
||||||
del self.actions[id]
|
del self.actions[id]
|
||||||
return (1.0, users)
|
return (1.0, users)
|
||||||
else:
|
else:
|
||||||
e = self.actions[id]['exception']
|
e = self.actions[id]['exception']
|
||||||
del self.actions[id]
|
del self.actions[id]
|
||||||
raise Exception, e
|
raise Exception, e
|
||||||
|
|
||||||
def drop(self, password, db_name):
|
def drop(self, password, db_name):
|
||||||
security.check_super(password)
|
security.check_super(password)
|
||||||
pooler.close_db(db_name)
|
pooler.close_db(db_name)
|
||||||
logger = netsvc.Logger()
|
logger = netsvc.Logger()
|
||||||
|
|
||||||
db = sql_db.db_connect('template1', serialize=1)
|
db = sql_db.db_connect('template1', serialize=1)
|
||||||
db.truedb.autocommit()
|
db.truedb.autocommit()
|
||||||
cr = db.cursor()
|
cr = db.cursor()
|
||||||
try:
|
try:
|
||||||
try:
|
try:
|
||||||
cr.execute('DROP DATABASE ' + db_name)
|
cr.execute('DROP DATABASE ' + db_name)
|
||||||
except:
|
except:
|
||||||
logger.notifyChannel("web-service", netsvc.LOG_ERROR,
|
logger.notifyChannel("web-service", netsvc.LOG_ERROR,
|
||||||
'DROP DB: %s failed' % (db_name,))
|
'DROP DB: %s failed' % (db_name,))
|
||||||
raise
|
raise
|
||||||
else:
|
else:
|
||||||
logger.notifyChannel("web-services", netsvc.LOG_INFO,
|
logger.notifyChannel("web-services", netsvc.LOG_INFO,
|
||||||
'DROP DB: %s' % (db_name))
|
'DROP DB: %s' % (db_name))
|
||||||
finally:
|
finally:
|
||||||
cr.close()
|
cr.close()
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def dump(self, password, db_name):
|
def dump(self, password, db_name):
|
||||||
security.check_super(password)
|
security.check_super(password)
|
||||||
logger = netsvc.Logger()
|
logger = netsvc.Logger()
|
||||||
|
|
||||||
if tools.config['db_password']:
|
if tools.config['db_password']:
|
||||||
logger.notifyChannel("web-service", netsvc.LOG_ERROR,
|
logger.notifyChannel("web-service", netsvc.LOG_ERROR,
|
||||||
'DUMP DB: %s doesn\'t work with password' % (db_name,))
|
'DUMP DB: %s doesn\'t work with password' % (db_name,))
|
||||||
raise Exception, "Couldn't dump database with password"
|
raise Exception, "Couldn't dump database with password"
|
||||||
|
|
||||||
cmd = ['pg_dump', '--format=c']
|
cmd = ['pg_dump', '--format=c']
|
||||||
if tools.config['db_user']:
|
if tools.config['db_user']:
|
||||||
cmd.append('--username=' + tools.config['db_user'])
|
cmd.append('--username=' + tools.config['db_user'])
|
||||||
if tools.config['db_host']:
|
if tools.config['db_host']:
|
||||||
cmd.append('--host=' + tools.config['db_host'])
|
cmd.append('--host=' + tools.config['db_host'])
|
||||||
if tools.config['db_port']:
|
if tools.config['db_port']:
|
||||||
cmd.append('--port=' + tools.config['db_port'])
|
cmd.append('--port=' + tools.config['db_port'])
|
||||||
cmd.append(db_name)
|
cmd.append(db_name)
|
||||||
|
|
||||||
stdin, stdout = tools.exec_pg_command_pipe(*tuple(cmd))
|
stdin, stdout = tools.exec_pg_command_pipe(*tuple(cmd))
|
||||||
stdin.close()
|
stdin.close()
|
||||||
data = stdout.read()
|
data = stdout.read()
|
||||||
res = stdout.close()
|
res = stdout.close()
|
||||||
if res:
|
if res:
|
||||||
logger.notifyChannel("web-service", netsvc.LOG_ERROR,
|
logger.notifyChannel("web-service", netsvc.LOG_ERROR,
|
||||||
'DUMP DB: %s failed\n%s' % (db_name, data))
|
'DUMP DB: %s failed\n%s' % (db_name, data))
|
||||||
raise Exception, "Couldn't dump database"
|
raise Exception, "Couldn't dump database"
|
||||||
logger.notifyChannel("web-services", netsvc.LOG_INFO,
|
logger.notifyChannel("web-services", netsvc.LOG_INFO,
|
||||||
'DUMP DB: %s' % (db_name))
|
'DUMP DB: %s' % (db_name))
|
||||||
return base64.encodestring(data)
|
return base64.encodestring(data)
|
||||||
|
|
||||||
def restore(self, password, db_name, data):
|
def restore(self, password, db_name, data):
|
||||||
security.check_super(password)
|
security.check_super(password)
|
||||||
logger = netsvc.Logger()
|
logger = netsvc.Logger()
|
||||||
|
|
||||||
if self.db_exist(db_name):
|
if self.db_exist(db_name):
|
||||||
logger.notifyChannel("web-service", netsvc.LOG_WARNING,
|
logger.notifyChannel("web-service", netsvc.LOG_WARNING,
|
||||||
'RESTORE DB: %s already exists' % (db_name,))
|
'RESTORE DB: %s already exists' % (db_name,))
|
||||||
raise Exception, "Database already exists"
|
raise Exception, "Database already exists"
|
||||||
|
|
||||||
if tools.config['db_password']:
|
if tools.config['db_password']:
|
||||||
logger.notifyChannel("web-service", netsvc.LOG_ERROR,
|
logger.notifyChannel("web-service", netsvc.LOG_ERROR,
|
||||||
'RESTORE DB: %s doesn\'t work with password' % (db_name,))
|
'RESTORE DB: %s doesn\'t work with password' % (db_name,))
|
||||||
raise Exception, "Couldn't restore database with password"
|
raise Exception, "Couldn't restore database with password"
|
||||||
|
|
||||||
db = sql_db.db_connect('template1', serialize=1)
|
db = sql_db.db_connect('template1', serialize=1)
|
||||||
db.truedb.autocommit()
|
db.truedb.autocommit()
|
||||||
cr = db.cursor()
|
cr = db.cursor()
|
||||||
cr.execute('CREATE DATABASE ' + db_name + ' ENCODING \'unicode\'')
|
cr.execute('CREATE DATABASE ' + db_name + ' ENCODING \'unicode\'')
|
||||||
cr.close()
|
cr.close()
|
||||||
|
|
||||||
cmd = ['pg_restore']
|
cmd = ['pg_restore']
|
||||||
if tools.config['db_user']:
|
if tools.config['db_user']:
|
||||||
cmd.append('--username=' + tools.config['db_user'])
|
cmd.append('--username=' + tools.config['db_user'])
|
||||||
if tools.config['db_host']:
|
if tools.config['db_host']:
|
||||||
cmd.append('--host=' + tools.config['db_host'])
|
cmd.append('--host=' + tools.config['db_host'])
|
||||||
if tools.config['db_port']:
|
if tools.config['db_port']:
|
||||||
cmd.append('--port=' + tools.config['db_port'])
|
cmd.append('--port=' + tools.config['db_port'])
|
||||||
cmd.append('--dbname=' + db_name)
|
cmd.append('--dbname=' + db_name)
|
||||||
args2 = tuple(cmd)
|
args2 = tuple(cmd)
|
||||||
|
|
||||||
buf=base64.decodestring(data)
|
buf=base64.decodestring(data)
|
||||||
if os.name == "nt":
|
if os.name == "nt":
|
||||||
tmpfile = (os.environ['TMP'] or 'C:\\') + os.tmpnam()
|
tmpfile = (os.environ['TMP'] or 'C:\\') + os.tmpnam()
|
||||||
file(tmpfile, 'wb').write(buf)
|
file(tmpfile, 'wb').write(buf)
|
||||||
args2=list(args2)
|
args2=list(args2)
|
||||||
args2.append(' ' + tmpfile)
|
args2.append(' ' + tmpfile)
|
||||||
args2=tuple(args2)
|
args2=tuple(args2)
|
||||||
stdin, stdout = tools.exec_pg_command_pipe(*args2)
|
stdin, stdout = tools.exec_pg_command_pipe(*args2)
|
||||||
if not os.name == "nt":
|
if not os.name == "nt":
|
||||||
stdin.write(base64.decodestring(data))
|
stdin.write(base64.decodestring(data))
|
||||||
stdin.close()
|
stdin.close()
|
||||||
res = stdout.close()
|
res = stdout.close()
|
||||||
if res:
|
if res:
|
||||||
raise Exception, "Couldn't restore database"
|
raise Exception, "Couldn't restore database"
|
||||||
logger.notifyChannel("web-services", netsvc.LOG_INFO,
|
logger.notifyChannel("web-services", netsvc.LOG_INFO,
|
||||||
'RESTORE DB: %s' % (db_name))
|
'RESTORE DB: %s' % (db_name))
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def db_exist(self, db_name):
|
def db_exist(self, db_name):
|
||||||
try:
|
try:
|
||||||
db = sql_db.db_connect(db_name)
|
db = sql_db.db_connect(db_name)
|
||||||
db.truedb.close()
|
db.truedb.close()
|
||||||
return True
|
return True
|
||||||
except:
|
except:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def list(self):
|
def list(self):
|
||||||
db = sql_db.db_connect('template1')
|
db = sql_db.db_connect('template1')
|
||||||
try:
|
try:
|
||||||
cr = db.cursor()
|
cr = db.cursor()
|
||||||
db_user = tools.config["db_user"]
|
db_user = tools.config["db_user"]
|
||||||
if not db_user and os.name == 'posix':
|
if not db_user and os.name == 'posix':
|
||||||
import pwd
|
import pwd
|
||||||
db_user = pwd.getpwuid(os.getuid())[0]
|
db_user = pwd.getpwuid(os.getuid())[0]
|
||||||
if not db_user:
|
if not db_user:
|
||||||
cr.execute("select usename from pg_user where usesysid=(select datdba from pg_database where datname=%s)", (tools.config["db_name"],))
|
cr.execute("select usename from pg_user where usesysid=(select datdba from pg_database where datname=%s)", (tools.config["db_name"],))
|
||||||
res = cr.fetchone()
|
res = cr.fetchone()
|
||||||
db_user = res and res[0]
|
db_user = res and res[0]
|
||||||
if db_user:
|
if db_user:
|
||||||
cr.execute("select datname from pg_database where datdba=(select usesysid from pg_user where usename=%s) and datname not in ('template0', 'template1', 'postgres')", (db_user,))
|
cr.execute("select datname from pg_database where datdba=(select usesysid from pg_user where usename=%s) and datname not in ('template0', 'template1', 'postgres')", (db_user,))
|
||||||
else:
|
else:
|
||||||
cr.execute("select datname from pg_database where datname not in('template0', 'template1','postgres')")
|
cr.execute("select datname from pg_database where datname not in('template0', 'template1','postgres')")
|
||||||
res = [name for (name,) in cr.fetchall()]
|
res = [name for (name,) in cr.fetchall()]
|
||||||
cr.close()
|
cr.close()
|
||||||
except:
|
except:
|
||||||
res = []
|
res = []
|
||||||
db.truedb.close()
|
db.truedb.close()
|
||||||
return res
|
return res
|
||||||
|
|
||||||
def change_admin_password(self, old_password, new_password):
|
def change_admin_password(self, old_password, new_password):
|
||||||
security.check_super(old_password)
|
security.check_super(old_password)
|
||||||
tools.config['admin_passwd'] = new_password
|
tools.config['admin_passwd'] = new_password
|
||||||
tools.config.save()
|
tools.config.save()
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def list_lang(self):
|
def list_lang(self):
|
||||||
return tools.scan_languages()
|
return tools.scan_languages()
|
||||||
import glob
|
import glob
|
||||||
file_list = glob.glob(os.path.join(tools.config['root_path'], 'i18n', '*.csv'))
|
file_list = glob.glob(os.path.join(tools.config['root_path'], 'i18n', '*.csv'))
|
||||||
def lang_tuple(fname):
|
def lang_tuple(fname):
|
||||||
lang_dict=tools.get_languages()
|
lang_dict=tools.get_languages()
|
||||||
lang = os.path.basename(fname).split(".")[0]
|
lang = os.path.basename(fname).split(".")[0]
|
||||||
return (lang, lang_dict.get(lang, lang))
|
return (lang, lang_dict.get(lang, lang))
|
||||||
return [lang_tuple(fname) for fname in file_list]
|
return [lang_tuple(fname) for fname in file_list]
|
||||||
db()
|
db()
|
||||||
|
|
||||||
class common(netsvc.Service):
|
class common(netsvc.Service):
|
||||||
def __init__(self,name="common"):
|
def __init__(self,name="common"):
|
||||||
netsvc.Service.__init__(self,name)
|
netsvc.Service.__init__(self,name)
|
||||||
self.joinGroup("web-services")
|
self.joinGroup("web-services")
|
||||||
self.exportMethod(self.ir_get)
|
self.exportMethod(self.ir_get)
|
||||||
self.exportMethod(self.ir_set)
|
self.exportMethod(self.ir_set)
|
||||||
self.exportMethod(self.ir_del)
|
self.exportMethod(self.ir_del)
|
||||||
self.exportMethod(self.about)
|
self.exportMethod(self.about)
|
||||||
self.exportMethod(self.login)
|
self.exportMethod(self.login)
|
||||||
self.exportMethod(self.timezone_get)
|
self.exportMethod(self.timezone_get)
|
||||||
|
|
||||||
def ir_set(self, db, uid, password, keys, args, name, value, replace=True, isobject=False):
|
def ir_set(self, db, uid, password, keys, args, name, value, replace=True, isobject=False):
|
||||||
security.check(db, uid, password)
|
security.check(db, uid, password)
|
||||||
cr = pooler.get_db(db).cursor()
|
cr = pooler.get_db(db).cursor()
|
||||||
res = ir.ir_set(cr,uid, keys, args, name, value, replace, isobject)
|
res = ir.ir_set(cr,uid, keys, args, name, value, replace, isobject)
|
||||||
cr.commit()
|
cr.commit()
|
||||||
cr.close()
|
cr.close()
|
||||||
return res
|
return res
|
||||||
|
|
||||||
def ir_del(self, db, uid, password, id):
|
def ir_del(self, db, uid, password, id):
|
||||||
security.check(db, uid, password)
|
security.check(db, uid, password)
|
||||||
cr = pooler.get_db(db).cursor()
|
cr = pooler.get_db(db).cursor()
|
||||||
res = ir.ir_del(cr,uid, id)
|
res = ir.ir_del(cr,uid, id)
|
||||||
cr.commit()
|
cr.commit()
|
||||||
cr.close()
|
cr.close()
|
||||||
return res
|
return res
|
||||||
|
|
||||||
def ir_get(self, db, uid, password, keys, args=None, meta=None, context=None):
|
def ir_get(self, db, uid, password, keys, args=None, meta=None, context=None):
|
||||||
if not args:
|
if not args:
|
||||||
args=[]
|
args=[]
|
||||||
if not context:
|
if not context:
|
||||||
context={}
|
context={}
|
||||||
security.check(db, uid, password)
|
security.check(db, uid, password)
|
||||||
cr = pooler.get_db(db).cursor()
|
cr = pooler.get_db(db).cursor()
|
||||||
res = ir.ir_get(cr,uid, keys, args, meta, context)
|
res = ir.ir_get(cr,uid, keys, args, meta, context)
|
||||||
cr.commit()
|
cr.commit()
|
||||||
cr.close()
|
cr.close()
|
||||||
return res
|
return res
|
||||||
|
|
||||||
def login(self, db, login, password):
|
def login(self, db, login, password):
|
||||||
res = security.login(db, login, password)
|
res = security.login(db, login, password)
|
||||||
logger = netsvc.Logger()
|
logger = netsvc.Logger()
|
||||||
msg = res and 'successful login' or 'bad login or password'
|
msg = res and 'successful login' or 'bad login or password'
|
||||||
logger.notifyChannel("web-service", netsvc.LOG_INFO, "%s from '%s' using database '%s'" % (msg, login, db))
|
logger.notifyChannel("web-service", netsvc.LOG_INFO, "%s from '%s' using database '%s'" % (msg, login, db))
|
||||||
return res or False
|
return res or False
|
||||||
|
|
||||||
def about(self):
|
def about(self):
|
||||||
return _('''
|
return _('''
|
||||||
|
|
||||||
OpenERP is an ERP+CRM program for small and medium businesses.
|
OpenERP is an ERP+CRM program for small and medium businesses.
|
||||||
|
|
||||||
|
@ -339,35 +339,35 @@ GNU Public Licence.
|
||||||
|
|
||||||
(c) 2003-TODAY, Fabien Pinckaers - Tiny sprl''')
|
(c) 2003-TODAY, Fabien Pinckaers - Tiny sprl''')
|
||||||
|
|
||||||
def timezone_get(self, db, login, password):
|
def timezone_get(self, db, login, password):
|
||||||
return time.tzname[0]
|
return time.tzname[0]
|
||||||
common()
|
common()
|
||||||
|
|
||||||
class objects_proxy(netsvc.Service):
|
class objects_proxy(netsvc.Service):
|
||||||
def __init__(self, name="object"):
|
def __init__(self, name="object"):
|
||||||
netsvc.Service.__init__(self,name)
|
netsvc.Service.__init__(self,name)
|
||||||
self.joinGroup('web-services')
|
self.joinGroup('web-services')
|
||||||
self.exportMethod(self.execute)
|
self.exportMethod(self.execute)
|
||||||
self.exportMethod(self.exec_workflow)
|
self.exportMethod(self.exec_workflow)
|
||||||
self.exportMethod(self.obj_list)
|
self.exportMethod(self.obj_list)
|
||||||
|
|
||||||
def exec_workflow(self, db, uid, passwd, object, method, id):
|
def exec_workflow(self, db, uid, passwd, object, method, id):
|
||||||
security.check(db, uid, passwd)
|
security.check(db, uid, passwd)
|
||||||
service = netsvc.LocalService("object_proxy")
|
service = netsvc.LocalService("object_proxy")
|
||||||
res = service.exec_workflow(db, uid, object, method, id)
|
res = service.exec_workflow(db, uid, object, method, id)
|
||||||
return res
|
return res
|
||||||
|
|
||||||
def execute(self, db, uid, passwd, object, method, *args):
|
def execute(self, db, uid, passwd, object, method, *args):
|
||||||
security.check(db, uid, passwd)
|
security.check(db, uid, passwd)
|
||||||
service = netsvc.LocalService("object_proxy")
|
service = netsvc.LocalService("object_proxy")
|
||||||
res = service.execute(db, uid, object, method, *args)
|
res = service.execute(db, uid, object, method, *args)
|
||||||
return res
|
return res
|
||||||
|
|
||||||
def obj_list(self, db, uid, passwd):
|
def obj_list(self, db, uid, passwd):
|
||||||
security.check(db, uid, passwd)
|
security.check(db, uid, passwd)
|
||||||
service = netsvc.LocalService("object_proxy")
|
service = netsvc.LocalService("object_proxy")
|
||||||
res = service.obj_list()
|
res = service.obj_list()
|
||||||
return res
|
return res
|
||||||
objects_proxy()
|
objects_proxy()
|
||||||
|
|
||||||
|
|
||||||
|
@ -383,44 +383,44 @@ objects_proxy()
|
||||||
# TODO: change local request to OSE request/reply pattern
|
# TODO: change local request to OSE request/reply pattern
|
||||||
#
|
#
|
||||||
class wizard(netsvc.Service):
|
class wizard(netsvc.Service):
|
||||||
def __init__(self, name='wizard'):
|
def __init__(self, name='wizard'):
|
||||||
netsvc.Service.__init__(self,name)
|
netsvc.Service.__init__(self,name)
|
||||||
self.joinGroup('web-services')
|
self.joinGroup('web-services')
|
||||||
self.exportMethod(self.execute)
|
self.exportMethod(self.execute)
|
||||||
self.exportMethod(self.create)
|
self.exportMethod(self.create)
|
||||||
self.id = 0
|
self.id = 0
|
||||||
self.wiz_datas = {}
|
self.wiz_datas = {}
|
||||||
self.wiz_name = {}
|
self.wiz_name = {}
|
||||||
self.wiz_uid = {}
|
self.wiz_uid = {}
|
||||||
|
|
||||||
def _execute(self, db, uid, wiz_id, datas, action, context):
|
def _execute(self, db, uid, wiz_id, datas, action, context):
|
||||||
self.wiz_datas[wiz_id].update(datas)
|
self.wiz_datas[wiz_id].update(datas)
|
||||||
wiz = netsvc.LocalService('wizard.'+self.wiz_name[wiz_id])
|
wiz = netsvc.LocalService('wizard.'+self.wiz_name[wiz_id])
|
||||||
return wiz.execute(db, uid, self.wiz_datas[wiz_id], action, context)
|
return wiz.execute(db, uid, self.wiz_datas[wiz_id], action, context)
|
||||||
|
|
||||||
def create(self, db, uid, passwd, wiz_name, datas=None):
|
def create(self, db, uid, passwd, wiz_name, datas=None):
|
||||||
if not datas:
|
if not datas:
|
||||||
datas={}
|
datas={}
|
||||||
security.check(db, uid, passwd)
|
security.check(db, uid, passwd)
|
||||||
#FIXME: this is not thread-safe
|
#FIXME: this is not thread-safe
|
||||||
self.id += 1
|
self.id += 1
|
||||||
self.wiz_datas[self.id] = {}
|
self.wiz_datas[self.id] = {}
|
||||||
self.wiz_name[self.id] = wiz_name
|
self.wiz_name[self.id] = wiz_name
|
||||||
self.wiz_uid[self.id] = uid
|
self.wiz_uid[self.id] = uid
|
||||||
return self.id
|
return self.id
|
||||||
|
|
||||||
def execute(self, db, uid, passwd, wiz_id, datas, action='init', context=None):
|
def execute(self, db, uid, passwd, wiz_id, datas, action='init', context=None):
|
||||||
if not context:
|
if not context:
|
||||||
context={}
|
context={}
|
||||||
security.check(db, uid, passwd)
|
security.check(db, uid, passwd)
|
||||||
|
|
||||||
if wiz_id in self.wiz_uid:
|
if wiz_id in self.wiz_uid:
|
||||||
if self.wiz_uid[wiz_id] == uid:
|
if self.wiz_uid[wiz_id] == uid:
|
||||||
return self._execute(db, uid, wiz_id, datas, action, context)
|
return self._execute(db, uid, wiz_id, datas, action, context)
|
||||||
else:
|
else:
|
||||||
raise Exception, 'AccessDenied'
|
raise Exception, 'AccessDenied'
|
||||||
else:
|
else:
|
||||||
raise Exception, 'WizardNotFound'
|
raise Exception, 'WizardNotFound'
|
||||||
wizard()
|
wizard()
|
||||||
|
|
||||||
#
|
#
|
||||||
|
@ -430,85 +430,85 @@ wizard()
|
||||||
# False -> True
|
# False -> True
|
||||||
#
|
#
|
||||||
class report_spool(netsvc.Service):
|
class report_spool(netsvc.Service):
|
||||||
def __init__(self, name='report'):
|
def __init__(self, name='report'):
|
||||||
netsvc.Service.__init__(self, name)
|
netsvc.Service.__init__(self, name)
|
||||||
self.joinGroup('web-services')
|
self.joinGroup('web-services')
|
||||||
self.exportMethod(self.report)
|
self.exportMethod(self.report)
|
||||||
self.exportMethod(self.report_get)
|
self.exportMethod(self.report_get)
|
||||||
self._reports = {}
|
self._reports = {}
|
||||||
self.id = 0
|
self.id = 0
|
||||||
self.id_protect = threading.Semaphore()
|
self.id_protect = threading.Semaphore()
|
||||||
|
|
||||||
def report(self, db, uid, passwd, object, ids, datas=None, context=None):
|
def report(self, db, uid, passwd, object, ids, datas=None, context=None):
|
||||||
if not datas:
|
if not datas:
|
||||||
datas={}
|
datas={}
|
||||||
if not context:
|
if not context:
|
||||||
context={}
|
context={}
|
||||||
security.check(db, uid, passwd)
|
security.check(db, uid, passwd)
|
||||||
|
|
||||||
self.id_protect.acquire()
|
self.id_protect.acquire()
|
||||||
self.id += 1
|
self.id += 1
|
||||||
id = self.id
|
id = self.id
|
||||||
self.id_protect.release()
|
self.id_protect.release()
|
||||||
|
|
||||||
self._reports[id] = {'uid': uid, 'result': False, 'state': False, 'exception': None}
|
self._reports[id] = {'uid': uid, 'result': False, 'state': False, 'exception': None}
|
||||||
|
|
||||||
def go(id, uid, ids, datas, context):
|
def go(id, uid, ids, datas, context):
|
||||||
try:
|
try:
|
||||||
cr = pooler.get_db(db).cursor()
|
cr = pooler.get_db(db).cursor()
|
||||||
obj = netsvc.LocalService('report.'+object)
|
obj = netsvc.LocalService('report.'+object)
|
||||||
(result, format) = obj.create(cr, uid, ids, datas, context)
|
(result, format) = obj.create(cr, uid, ids, datas, context)
|
||||||
cr.close()
|
cr.close()
|
||||||
self._reports[id]['result'] = result
|
self._reports[id]['result'] = result
|
||||||
self._reports[id]['format'] = format
|
self._reports[id]['format'] = format
|
||||||
self._reports[id]['state'] = True
|
self._reports[id]['state'] = True
|
||||||
except Exception, exception:
|
except Exception, exception:
|
||||||
import traceback
|
import traceback
|
||||||
import sys
|
import sys
|
||||||
tb_s = reduce(lambda x, y: x+y, traceback.format_exception(
|
tb_s = reduce(lambda x, y: x+y, traceback.format_exception(
|
||||||
sys.exc_type, sys.exc_value, sys.exc_traceback))
|
sys.exc_type, sys.exc_value, sys.exc_traceback))
|
||||||
logger = netsvc.Logger()
|
logger = netsvc.Logger()
|
||||||
logger.notifyChannel('web-service', netsvc.LOG_ERROR,
|
logger.notifyChannel('web-service', netsvc.LOG_ERROR,
|
||||||
'Exception: %s\n%s' % (str(exception), tb_s))
|
'Exception: %s\n%s' % (str(exception), tb_s))
|
||||||
self._reports[id]['exception'] = exception
|
self._reports[id]['exception'] = exception
|
||||||
self._reports[id]['state'] = True
|
self._reports[id]['state'] = True
|
||||||
return True
|
return True
|
||||||
|
|
||||||
thread.start_new_thread(go, (id, uid, ids, datas, context))
|
thread.start_new_thread(go, (id, uid, ids, datas, context))
|
||||||
return id
|
return id
|
||||||
|
|
||||||
def _check_report(self, report_id):
|
def _check_report(self, report_id):
|
||||||
result = self._reports[report_id]
|
result = self._reports[report_id]
|
||||||
if result['exception']:
|
if result['exception']:
|
||||||
raise result['exception']
|
raise result['exception']
|
||||||
res = {'state': result['state']}
|
res = {'state': result['state']}
|
||||||
if res['state']:
|
if res['state']:
|
||||||
if tools.config['reportgz']:
|
if tools.config['reportgz']:
|
||||||
import zlib
|
import zlib
|
||||||
res2 = zlib.compress(result['result'])
|
res2 = zlib.compress(result['result'])
|
||||||
res['code'] = 'zlib'
|
res['code'] = 'zlib'
|
||||||
else:
|
else:
|
||||||
#CHECKME: why is this needed???
|
#CHECKME: why is this needed???
|
||||||
if isinstance(result['result'], unicode):
|
if isinstance(result['result'], unicode):
|
||||||
res2 = result['result'].encode('latin1', 'replace')
|
res2 = result['result'].encode('latin1', 'replace')
|
||||||
else:
|
else:
|
||||||
res2 = result['result']
|
res2 = result['result']
|
||||||
if res2:
|
if res2:
|
||||||
res['result'] = base64.encodestring(res2)
|
res['result'] = base64.encodestring(res2)
|
||||||
res['format'] = result['format']
|
res['format'] = result['format']
|
||||||
del self._reports[report_id]
|
del self._reports[report_id]
|
||||||
return res
|
return res
|
||||||
|
|
||||||
def report_get(self, db, uid, passwd, report_id):
|
def report_get(self, db, uid, passwd, report_id):
|
||||||
security.check(db, uid, passwd)
|
security.check(db, uid, passwd)
|
||||||
|
|
||||||
if report_id in self._reports:
|
if report_id in self._reports:
|
||||||
if self._reports[report_id]['uid'] == uid:
|
if self._reports[report_id]['uid'] == uid:
|
||||||
return self._check_report(report_id)
|
return self._check_report(report_id)
|
||||||
else:
|
else:
|
||||||
raise Exception, 'AccessDenied'
|
raise Exception, 'AccessDenied'
|
||||||
else:
|
else:
|
||||||
raise Exception, 'ReportNotFound'
|
raise Exception, 'ReportNotFound'
|
||||||
|
|
||||||
report_spool()
|
report_spool()
|
||||||
|
|
||||||
|
|
186
bin/sql_db.py
186
bin/sql_db.py
|
@ -31,9 +31,9 @@ import tools
|
||||||
import sys,os
|
import sys,os
|
||||||
|
|
||||||
#try:
|
#try:
|
||||||
# import decimal
|
# import decimal
|
||||||
#except ImportError:
|
#except ImportError:
|
||||||
# from tools import decimal
|
# from tools import decimal
|
||||||
|
|
||||||
|
|
||||||
import re
|
import re
|
||||||
|
@ -43,112 +43,112 @@ re_from = re.compile('.* from "?([a-zA-Z_0-9]+)"? .*$');
|
||||||
re_into = re.compile('.* into "?([a-zA-Z_0-9]+)"? .*$');
|
re_into = re.compile('.* into "?([a-zA-Z_0-9]+)"? .*$');
|
||||||
|
|
||||||
class fake_cursor:
|
class fake_cursor:
|
||||||
nbr = 0
|
nbr = 0
|
||||||
_tables = {}
|
_tables = {}
|
||||||
sql_from_log = {}
|
sql_from_log = {}
|
||||||
sql_into_log = {}
|
sql_into_log = {}
|
||||||
sql_log = False
|
sql_log = False
|
||||||
count = 0
|
count = 0
|
||||||
|
|
||||||
def __init__(self, db, con, dbname):
|
def __init__(self, db, con, dbname):
|
||||||
self.db = db
|
self.db = db
|
||||||
self.obj = db.cursor()
|
self.obj = db.cursor()
|
||||||
self.con = con
|
self.con = con
|
||||||
self.dbname = dbname
|
self.dbname = dbname
|
||||||
|
|
||||||
def execute(self, sql, params=None):
|
def execute(self, sql, params=None):
|
||||||
if not params:
|
if not params:
|
||||||
params=()
|
params=()
|
||||||
def base_string(s):
|
def base_string(s):
|
||||||
if isinstance(s, unicode):
|
if isinstance(s, unicode):
|
||||||
return s.encode('utf-8')
|
return s.encode('utf-8')
|
||||||
return s
|
return s
|
||||||
p=map(base_string, params)
|
p=map(base_string, params)
|
||||||
if isinstance(sql, unicode):
|
if isinstance(sql, unicode):
|
||||||
sql = sql.encode('utf-8')
|
sql = sql.encode('utf-8')
|
||||||
if self.sql_log:
|
if self.sql_log:
|
||||||
now = mdt.now()
|
now = mdt.now()
|
||||||
if p:
|
if p:
|
||||||
res = self.obj.execute(sql, p)
|
res = self.obj.execute(sql, p)
|
||||||
else:
|
else:
|
||||||
res = self.obj.execute(sql)
|
res = self.obj.execute(sql)
|
||||||
if self.sql_log:
|
if self.sql_log:
|
||||||
print "SQL LOG query:", sql
|
print "SQL LOG query:", sql
|
||||||
print "SQL LOG params:", repr(p)
|
print "SQL LOG params:", repr(p)
|
||||||
self.count+=1
|
self.count+=1
|
||||||
res_from = re_from.match(sql.lower())
|
res_from = re_from.match(sql.lower())
|
||||||
if res_from:
|
if res_from:
|
||||||
self.sql_from_log.setdefault(res_from.group(1), [0, 0])
|
self.sql_from_log.setdefault(res_from.group(1), [0, 0])
|
||||||
self.sql_from_log[res_from.group(1)][0] += 1
|
self.sql_from_log[res_from.group(1)][0] += 1
|
||||||
self.sql_from_log[res_from.group(1)][1] += mdt.now() - now
|
self.sql_from_log[res_from.group(1)][1] += mdt.now() - now
|
||||||
res_into = re_into.match(sql.lower())
|
res_into = re_into.match(sql.lower())
|
||||||
if res_into:
|
if res_into:
|
||||||
self.sql_into_log.setdefault(res_into.group(1), [0, 0])
|
self.sql_into_log.setdefault(res_into.group(1), [0, 0])
|
||||||
self.sql_into_log[res_into.group(1)][0] += 1
|
self.sql_into_log[res_into.group(1)][0] += 1
|
||||||
self.sql_into_log[res_into.group(1)][1] += mdt.now() - now
|
self.sql_into_log[res_into.group(1)][1] += mdt.now() - now
|
||||||
return res
|
return res
|
||||||
|
|
||||||
def print_log(self, type='from'):
|
def print_log(self, type='from'):
|
||||||
print "SQL LOG %s:" % (type,)
|
print "SQL LOG %s:" % (type,)
|
||||||
if type == 'from':
|
if type == 'from':
|
||||||
logs = self.sql_from_log.items()
|
logs = self.sql_from_log.items()
|
||||||
else:
|
else:
|
||||||
logs = self.sql_into_log.items()
|
logs = self.sql_into_log.items()
|
||||||
logs.sort(lambda x, y: cmp(x[1][1], y[1][1]))
|
logs.sort(lambda x, y: cmp(x[1][1], y[1][1]))
|
||||||
sum=0
|
sum=0
|
||||||
for r in logs:
|
for r in logs:
|
||||||
print "table:", r[0], ":", str(r[1][1]), "/", r[1][0]
|
print "table:", r[0], ":", str(r[1][1]), "/", r[1][0]
|
||||||
sum+= r[1][1]
|
sum+= r[1][1]
|
||||||
print "SUM:%s/%d"% (sum, self.count)
|
print "SUM:%s/%d"% (sum, self.count)
|
||||||
|
|
||||||
def close(self):
|
def close(self):
|
||||||
if self.sql_log:
|
if self.sql_log:
|
||||||
self.print_log('from')
|
self.print_log('from')
|
||||||
self.print_log('into')
|
self.print_log('into')
|
||||||
self.obj.close()
|
self.obj.close()
|
||||||
|
|
||||||
# This force the cursor to be freed, and thus, available again. It is
|
# This force the cursor to be freed, and thus, available again. It is
|
||||||
# important because otherwise we can overload the server very easily
|
# important because otherwise we can overload the server very easily
|
||||||
# because of a cursor shortage (because cursors are not garbage
|
# because of a cursor shortage (because cursors are not garbage
|
||||||
# collected as fast as they should). The problem is probably due in
|
# collected as fast as they should). The problem is probably due in
|
||||||
# part because browse records keep a reference to the cursor.
|
# part because browse records keep a reference to the cursor.
|
||||||
del self.obj
|
del self.obj
|
||||||
|
|
||||||
def __getattr__(self, name):
|
def __getattr__(self, name):
|
||||||
return getattr(self.obj, name)
|
return getattr(self.obj, name)
|
||||||
|
|
||||||
class fakedb:
|
class fakedb:
|
||||||
def __init__(self, truedb, dbname):
|
def __init__(self, truedb, dbname):
|
||||||
self.truedb = truedb
|
self.truedb = truedb
|
||||||
self.dbname = dbname
|
self.dbname = dbname
|
||||||
|
|
||||||
def cursor(self):
|
def cursor(self):
|
||||||
return fake_cursor(self.truedb, {}, self.dbname)
|
return fake_cursor(self.truedb, {}, self.dbname)
|
||||||
|
|
||||||
def decimalize(symb):
|
def decimalize(symb):
|
||||||
if symb is None: return None
|
if symb is None: return None
|
||||||
if isinstance(symb, float):
|
if isinstance(symb, float):
|
||||||
return decimal.Decimal('%f' % symb)
|
return decimal.Decimal('%f' % symb)
|
||||||
return decimal.Decimal(symb)
|
return decimal.Decimal(symb)
|
||||||
|
|
||||||
def db_connect(db_name, serialize=0):
|
def db_connect(db_name, serialize=0):
|
||||||
host = tools.config['db_host'] and "host=%s" % tools.config['db_host'] or ''
|
host = tools.config['db_host'] and "host=%s" % tools.config['db_host'] or ''
|
||||||
port = tools.config['db_port'] and "port=%s" % tools.config['db_port'] or ''
|
port = tools.config['db_port'] and "port=%s" % tools.config['db_port'] or ''
|
||||||
name = "dbname=%s" % db_name
|
name = "dbname=%s" % db_name
|
||||||
user = tools.config['db_user'] and "user=%s" % tools.config['db_user'] or ''
|
user = tools.config['db_user'] and "user=%s" % tools.config['db_user'] or ''
|
||||||
password = tools.config['db_password'] and "password=%s" % tools.config['db_password'] or ''
|
password = tools.config['db_password'] and "password=%s" % tools.config['db_password'] or ''
|
||||||
maxconn = int(tools.config['db_maxconn']) or 64
|
maxconn = int(tools.config['db_maxconn']) or 64
|
||||||
tdb = psycopg.connect('%s %s %s %s %s' % (host, port, name, user, password),
|
tdb = psycopg.connect('%s %s %s %s %s' % (host, port, name, user, password),
|
||||||
serialize=serialize, maxconn=maxconn)
|
serialize=serialize, maxconn=maxconn)
|
||||||
fdb = fakedb(tdb, db_name)
|
fdb = fakedb(tdb, db_name)
|
||||||
return fdb
|
return fdb
|
||||||
|
|
||||||
def init():
|
def init():
|
||||||
#define DATEOID 1082, define TIMESTAMPOID 1114 see pgtypes.h
|
#define DATEOID 1082, define TIMESTAMPOID 1114 see pgtypes.h
|
||||||
psycopg.register_type(psycopg.new_type((1082,), "date", lambda x:x))
|
psycopg.register_type(psycopg.new_type((1082,), "date", lambda x:x))
|
||||||
psycopg.register_type(psycopg.new_type((1083,), "time", lambda x:x))
|
psycopg.register_type(psycopg.new_type((1083,), "time", lambda x:x))
|
||||||
psycopg.register_type(psycopg.new_type((1114,), "datetime", lambda x:x))
|
psycopg.register_type(psycopg.new_type((1114,), "datetime", lambda x:x))
|
||||||
#psycopg.register_type(psycopg.new_type((700, 701, 1700), 'decimal', decimalize))
|
#psycopg.register_type(psycopg.new_type((700, 701, 1700), 'decimal', decimalize))
|
||||||
|
|
||||||
psycopg.register_type(psycopg.new_type((1082,), "date", lambda x:x))
|
psycopg.register_type(psycopg.new_type((1082,), "date", lambda x:x))
|
||||||
psycopg.register_type(psycopg.new_type((1083,), "time", lambda x:x))
|
psycopg.register_type(psycopg.new_type((1083,), "time", lambda x:x))
|
||||||
|
|
|
@ -30,61 +30,61 @@ import cPickle
|
||||||
import marshal
|
import marshal
|
||||||
|
|
||||||
class Myexception(Exception):
|
class Myexception(Exception):
|
||||||
def __init__(self, faultCode, faultString):
|
def __init__(self, faultCode, faultString):
|
||||||
self.faultCode = faultCode
|
self.faultCode = faultCode
|
||||||
self.faultString = faultString
|
self.faultString = faultString
|
||||||
self.args = (faultCode, faultString)
|
self.args = (faultCode, faultString)
|
||||||
|
|
||||||
class mysocket:
|
class mysocket:
|
||||||
def __init__(self, sock=None):
|
def __init__(self, sock=None):
|
||||||
if sock is None:
|
if sock is None:
|
||||||
self.sock = socket.socket(
|
self.sock = socket.socket(
|
||||||
socket.AF_INET, socket.SOCK_STREAM)
|
socket.AF_INET, socket.SOCK_STREAM)
|
||||||
else:
|
else:
|
||||||
self.sock = sock
|
self.sock = sock
|
||||||
self.sock.settimeout(120)
|
self.sock.settimeout(120)
|
||||||
def connect(self, host, port=False):
|
def connect(self, host, port=False):
|
||||||
if not port:
|
if not port:
|
||||||
protocol, buf = host.split('//')
|
protocol, buf = host.split('//')
|
||||||
host, port = buf.split(':')
|
host, port = buf.split(':')
|
||||||
self.sock.connect((host, int(port)))
|
self.sock.connect((host, int(port)))
|
||||||
def disconnect(self):
|
def disconnect(self):
|
||||||
self.sock.shutdown(socket.SHUT_RDWR)
|
self.sock.shutdown(socket.SHUT_RDWR)
|
||||||
self.sock.close()
|
self.sock.close()
|
||||||
def mysend(self, msg, exception=False, traceback=None):
|
def mysend(self, msg, exception=False, traceback=None):
|
||||||
msg = cPickle.dumps([msg,traceback])
|
msg = cPickle.dumps([msg,traceback])
|
||||||
size = len(msg)
|
size = len(msg)
|
||||||
self.sock.send('%8d' % size)
|
self.sock.send('%8d' % size)
|
||||||
self.sock.send(exception and "1" or "0")
|
self.sock.send(exception and "1" or "0")
|
||||||
totalsent = 0
|
totalsent = 0
|
||||||
while totalsent < size:
|
while totalsent < size:
|
||||||
sent = self.sock.send(msg[totalsent:])
|
sent = self.sock.send(msg[totalsent:])
|
||||||
if sent == 0:
|
if sent == 0:
|
||||||
raise RuntimeError, "socket connection broken"
|
raise RuntimeError, "socket connection broken"
|
||||||
totalsent = totalsent + sent
|
totalsent = totalsent + sent
|
||||||
def myreceive(self):
|
def myreceive(self):
|
||||||
buf=''
|
buf=''
|
||||||
while len(buf) < 8:
|
while len(buf) < 8:
|
||||||
chunk = self.sock.recv(8 - len(buf))
|
chunk = self.sock.recv(8 - len(buf))
|
||||||
if chunk == '':
|
if chunk == '':
|
||||||
raise RuntimeError, "socket connection broken"
|
raise RuntimeError, "socket connection broken"
|
||||||
buf += chunk
|
buf += chunk
|
||||||
size = int(buf)
|
size = int(buf)
|
||||||
buf = self.sock.recv(1)
|
buf = self.sock.recv(1)
|
||||||
if buf != "0":
|
if buf != "0":
|
||||||
exception = buf
|
exception = buf
|
||||||
else:
|
else:
|
||||||
exception = False
|
exception = False
|
||||||
msg = ''
|
msg = ''
|
||||||
while len(msg) < size:
|
while len(msg) < size:
|
||||||
chunk = self.sock.recv(size-len(msg))
|
chunk = self.sock.recv(size-len(msg))
|
||||||
if chunk == '':
|
if chunk == '':
|
||||||
raise RuntimeError, "socket connection broken"
|
raise RuntimeError, "socket connection broken"
|
||||||
msg = msg + chunk
|
msg = msg + chunk
|
||||||
res = cPickle.loads(msg)
|
res = cPickle.loads(msg)
|
||||||
if isinstance(res[0],Exception):
|
if isinstance(res[0],Exception):
|
||||||
if exception:
|
if exception:
|
||||||
raise Myexception(str(res[0]), str(res[1]))
|
raise Myexception(str(res[0]), str(res[1]))
|
||||||
raise res[0]
|
raise res[0]
|
||||||
else:
|
else:
|
||||||
return res[0]
|
return res[0]
|
||||||
|
|
|
@ -75,8 +75,8 @@ import tools
|
||||||
import time
|
import time
|
||||||
|
|
||||||
if sys.platform=='win32':
|
if sys.platform=='win32':
|
||||||
import mx.DateTime
|
import mx.DateTime
|
||||||
mx.DateTime.strptime = lambda x,y: mx.DateTime.mktime(time.strptime(x, y))
|
mx.DateTime.strptime = lambda x,y: mx.DateTime.mktime(time.strptime(x, y))
|
||||||
|
|
||||||
#os.chdir(tools.file_path_root)
|
#os.chdir(tools.file_path_root)
|
||||||
|
|
||||||
|
@ -98,18 +98,18 @@ import pooler
|
||||||
|
|
||||||
# try to connect to the database
|
# try to connect to the database
|
||||||
try:
|
try:
|
||||||
# pooler.init()
|
# pooler.init()
|
||||||
pass
|
pass
|
||||||
except psycopg.OperationalError, err:
|
except psycopg.OperationalError, err:
|
||||||
logger.notifyChannel("init", netsvc.LOG_ERROR, "could not connect to database '%s'!" % (tools.config["db_name"],))
|
logger.notifyChannel("init", netsvc.LOG_ERROR, "could not connect to database '%s'!" % (tools.config["db_name"],))
|
||||||
|
|
||||||
msg = str(err).replace("FATAL:","").strip()
|
msg = str(err).replace("FATAL:","").strip()
|
||||||
db_msg = "database \"%s\" does not exist" % (tools.config["db_name"],)
|
db_msg = "database \"%s\" does not exist" % (tools.config["db_name"],)
|
||||||
|
|
||||||
# Note: this is ugly but since psycopg only uses one exception for all errors
|
# Note: this is ugly but since psycopg only uses one exception for all errors
|
||||||
# I don't think it's possible to do differently
|
# I don't think it's possible to do differently
|
||||||
if msg == db_msg:
|
if msg == db_msg:
|
||||||
print """
|
print """
|
||||||
this database does not exist
|
this database does not exist
|
||||||
|
|
||||||
You need to create it using the command:
|
You need to create it using the command:
|
||||||
|
@ -126,42 +126,42 @@ Two accounts will be created by default:
|
||||||
2. login: demo password : demo
|
2. login: demo password : demo
|
||||||
|
|
||||||
""" % (tools.config["db_name"])
|
""" % (tools.config["db_name"])
|
||||||
else:
|
else:
|
||||||
print "\n "+msg+"\n"
|
print "\n "+msg+"\n"
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
db_name = tools.config["db_name"]
|
db_name = tools.config["db_name"]
|
||||||
|
|
||||||
# test whether it is needed to initialize the db (the db is empty)
|
# test whether it is needed to initialize the db (the db is empty)
|
||||||
try:
|
try:
|
||||||
cr = pooler.get_db_only(db_name).cursor()
|
cr = pooler.get_db_only(db_name).cursor()
|
||||||
except psycopg.OperationalError:
|
except psycopg.OperationalError:
|
||||||
logger.notifyChannel("init", netsvc.LOG_INFO, "could not connect to database '%s'!" % db_name,)
|
logger.notifyChannel("init", netsvc.LOG_INFO, "could not connect to database '%s'!" % db_name,)
|
||||||
cr = None
|
cr = None
|
||||||
if cr:
|
if cr:
|
||||||
cr.execute("SELECT relname FROM pg_class WHERE relkind='r' AND relname='ir_ui_menu'")
|
cr.execute("SELECT relname FROM pg_class WHERE relkind='r' AND relname='ir_ui_menu'")
|
||||||
if len(cr.fetchall())==0:
|
if len(cr.fetchall())==0:
|
||||||
#if False:
|
#if False:
|
||||||
logger.notifyChannel("init", netsvc.LOG_INFO, "init db")
|
logger.notifyChannel("init", netsvc.LOG_INFO, "init db")
|
||||||
tools.init_db(cr)
|
tools.init_db(cr)
|
||||||
# in that case, force --init=all
|
# in that case, force --init=all
|
||||||
tools.config["init"]["all"] = 1
|
tools.config["init"]["all"] = 1
|
||||||
tools.config['update']['all'] = 1
|
tools.config['update']['all'] = 1
|
||||||
if not tools.config['without_demo']:
|
if not tools.config['without_demo']:
|
||||||
tools.config["demo"]['all'] = 1
|
tools.config["demo"]['all'] = 1
|
||||||
cr.close()
|
cr.close()
|
||||||
|
|
||||||
#----------------------------------------------------------
|
#----------------------------------------------------------
|
||||||
# launch modules install/upgrade/removes if needed
|
# launch modules install/upgrade/removes if needed
|
||||||
#----------------------------------------------------------
|
#----------------------------------------------------------
|
||||||
if tools.config['upgrade']:
|
if tools.config['upgrade']:
|
||||||
print 'Upgrading new modules...'
|
print 'Upgrading new modules...'
|
||||||
import tools.upgrade
|
import tools.upgrade
|
||||||
(toinit, toupdate) = tools.upgrade.upgrade()
|
(toinit, toupdate) = tools.upgrade.upgrade()
|
||||||
for m in toinit:
|
for m in toinit:
|
||||||
tools.config['init'][m] = 1
|
tools.config['init'][m] = 1
|
||||||
for m in toupdate:
|
for m in toupdate:
|
||||||
tools.config['update'][m] = 1
|
tools.config['update'][m] = 1
|
||||||
|
|
||||||
#----------------------------------------------------------
|
#----------------------------------------------------------
|
||||||
# import basic modules
|
# import basic modules
|
||||||
|
@ -175,27 +175,27 @@ import addons
|
||||||
|
|
||||||
addons.register_classes()
|
addons.register_classes()
|
||||||
if tools.config['init'] or tools.config['update']:
|
if tools.config['init'] or tools.config['update']:
|
||||||
pooler.get_db_and_pool(tools.config['db_name'], update_module=True)
|
pooler.get_db_and_pool(tools.config['db_name'], update_module=True)
|
||||||
|
|
||||||
#----------------------------------------------------------
|
#----------------------------------------------------------
|
||||||
# translation stuff
|
# translation stuff
|
||||||
#----------------------------------------------------------
|
#----------------------------------------------------------
|
||||||
if tools.config["translate_out"]:
|
if tools.config["translate_out"]:
|
||||||
import csv
|
import csv
|
||||||
|
|
||||||
logger.notifyChannel("init", netsvc.LOG_INFO, 'writing translation file for language %s to %s' % (tools.config["language"], tools.config["translate_out"]))
|
logger.notifyChannel("init", netsvc.LOG_INFO, 'writing translation file for language %s to %s' % (tools.config["language"], tools.config["translate_out"]))
|
||||||
|
|
||||||
fileformat = os.path.splitext(tools.config["translate_out"])[-1][1:].lower()
|
fileformat = os.path.splitext(tools.config["translate_out"])[-1][1:].lower()
|
||||||
buf = file(tools.config["translate_out"], "w")
|
buf = file(tools.config["translate_out"], "w")
|
||||||
tools.trans_export(tools.config["language"], tools.config["translate_modules"], buf, fileformat)
|
tools.trans_export(tools.config["language"], tools.config["translate_modules"], buf, fileformat)
|
||||||
buf.close()
|
buf.close()
|
||||||
|
|
||||||
logger.notifyChannel("init", netsvc.LOG_INFO, 'translation file written succesfully')
|
logger.notifyChannel("init", netsvc.LOG_INFO, 'translation file written succesfully')
|
||||||
sys.exit(0)
|
sys.exit(0)
|
||||||
|
|
||||||
if tools.config["translate_in"]:
|
if tools.config["translate_in"]:
|
||||||
tools.trans_load(tools.config["db_name"], tools.config["translate_in"], tools.config["language"])
|
tools.trans_load(tools.config["db_name"], tools.config["translate_in"], tools.config["language"])
|
||||||
sys.exit(0)
|
sys.exit(0)
|
||||||
|
|
||||||
#----------------------------------------------------------------------------------
|
#----------------------------------------------------------------------------------
|
||||||
# if we don't want the server to continue to run after initialization, we quit here
|
# if we don't want the server to continue to run after initialization, we quit here
|
||||||
|
@ -209,69 +209,69 @@ if tools.config["stop_after_init"]:
|
||||||
#----------------------------------------------------------
|
#----------------------------------------------------------
|
||||||
|
|
||||||
if tools.config['xmlrpc']:
|
if tools.config['xmlrpc']:
|
||||||
try:
|
try:
|
||||||
port = int(tools.config["port"])
|
port = int(tools.config["port"])
|
||||||
except Exception:
|
except Exception:
|
||||||
logger.notifyChannel("init", netsvc.LOG_ERROR, "invalid port '%s'!" % (tools.config["port"],))
|
logger.notifyChannel("init", netsvc.LOG_ERROR, "invalid port '%s'!" % (tools.config["port"],))
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
interface = tools.config["interface"]
|
interface = tools.config["interface"]
|
||||||
secure = tools.config["secure"]
|
secure = tools.config["secure"]
|
||||||
|
|
||||||
httpd = netsvc.HttpDaemon(interface,port, secure)
|
httpd = netsvc.HttpDaemon(interface,port, secure)
|
||||||
|
|
||||||
if tools.config["xmlrpc"]:
|
if tools.config["xmlrpc"]:
|
||||||
xml_gw = netsvc.xmlrpc.RpcGateway('web-services')
|
xml_gw = netsvc.xmlrpc.RpcGateway('web-services')
|
||||||
httpd.attach("/xmlrpc", xml_gw )
|
httpd.attach("/xmlrpc", xml_gw )
|
||||||
logger.notifyChannel("web-services", netsvc.LOG_INFO,
|
logger.notifyChannel("web-services", netsvc.LOG_INFO,
|
||||||
"starting XML-RPC" + \
|
"starting XML-RPC" + \
|
||||||
(tools.config['secure'] and ' Secure' or '') + \
|
(tools.config['secure'] and ' Secure' or '') + \
|
||||||
" services, port " + str(port))
|
" services, port " + str(port))
|
||||||
|
|
||||||
#
|
#
|
||||||
#if tools.config["soap"]:
|
#if tools.config["soap"]:
|
||||||
# soap_gw = netsvc.xmlrpc.RpcGateway('web-services')
|
# soap_gw = netsvc.xmlrpc.RpcGateway('web-services')
|
||||||
# httpd.attach("/soap", soap_gw )
|
# httpd.attach("/soap", soap_gw )
|
||||||
# logger.notifyChannel("web-services", netsvc.LOG_INFO, 'starting SOAP services, port '+str(port))
|
# logger.notifyChannel("web-services", netsvc.LOG_INFO, 'starting SOAP services, port '+str(port))
|
||||||
#
|
#
|
||||||
|
|
||||||
if tools.config['netrpc']:
|
if tools.config['netrpc']:
|
||||||
try:
|
try:
|
||||||
netport = int(tools.config["netport"])
|
netport = int(tools.config["netport"])
|
||||||
except Exception:
|
except Exception:
|
||||||
logger.notifyChannel("init", netsvc.LOG_ERROR, "invalid port '%s'!" % (tools.config["netport"],))
|
logger.notifyChannel("init", netsvc.LOG_ERROR, "invalid port '%s'!" % (tools.config["netport"],))
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
netinterface = tools.config["netinterface"]
|
netinterface = tools.config["netinterface"]
|
||||||
|
|
||||||
tinySocket = netsvc.TinySocketServerThread(netinterface, netport, False)
|
tinySocket = netsvc.TinySocketServerThread(netinterface, netport, False)
|
||||||
logger.notifyChannel("web-services", netsvc.LOG_INFO, "starting netrpc service, port "+str(netport))
|
logger.notifyChannel("web-services", netsvc.LOG_INFO, "starting netrpc service, port "+str(netport))
|
||||||
|
|
||||||
def handler(signum, frame):
|
def handler(signum, frame):
|
||||||
from tools import config
|
from tools import config
|
||||||
if tools.config['netrpc']:
|
if tools.config['netrpc']:
|
||||||
tinySocket.stop()
|
tinySocket.stop()
|
||||||
if tools.config['xmlrpc']:
|
if tools.config['xmlrpc']:
|
||||||
httpd.stop()
|
httpd.stop()
|
||||||
netsvc.Agent.quit()
|
netsvc.Agent.quit()
|
||||||
if config['pidfile']:
|
if config['pidfile']:
|
||||||
os.unlink(config['pidfile'])
|
os.unlink(config['pidfile'])
|
||||||
sys.exit(0)
|
sys.exit(0)
|
||||||
|
|
||||||
from tools import config
|
from tools import config
|
||||||
if config['pidfile']:
|
if config['pidfile']:
|
||||||
fd=open(config['pidfile'], 'w')
|
fd=open(config['pidfile'], 'w')
|
||||||
pidtext="%d" % (os.getpid())
|
pidtext="%d" % (os.getpid())
|
||||||
fd.write(pidtext)
|
fd.write(pidtext)
|
||||||
fd.close()
|
fd.close()
|
||||||
|
|
||||||
signal.signal(signal.SIGINT, handler)
|
signal.signal(signal.SIGINT, handler)
|
||||||
signal.signal(signal.SIGTERM, handler)
|
signal.signal(signal.SIGTERM, handler)
|
||||||
|
|
||||||
logger.notifyChannel("web-services", netsvc.LOG_INFO, 'the server is running, waiting for connections...')
|
logger.notifyChannel("web-services", netsvc.LOG_INFO, 'the server is running, waiting for connections...')
|
||||||
if tools.config['netrpc']:
|
if tools.config['netrpc']:
|
||||||
tinySocket.start()
|
tinySocket.start()
|
||||||
if tools.config['xmlrpc']:
|
if tools.config['xmlrpc']:
|
||||||
httpd.start()
|
httpd.start()
|
||||||
#dispatcher.run()
|
#dispatcher.run()
|
||||||
|
|
||||||
while True:
|
while True:
|
||||||
time.sleep(1)
|
time.sleep(1)
|
||||||
|
|
|
@ -32,146 +32,146 @@
|
||||||
#-------------------------------------------------------------
|
#-------------------------------------------------------------
|
||||||
|
|
||||||
unites = {
|
unites = {
|
||||||
0: '', 1:'un', 2:'deux', 3:'trois', 4:'quatre', 5:'cinq', 6:'six', 7:'sept', 8:'huit', 9:'neuf',
|
0: '', 1:'un', 2:'deux', 3:'trois', 4:'quatre', 5:'cinq', 6:'six', 7:'sept', 8:'huit', 9:'neuf',
|
||||||
10:'dix', 11:'onze', 12:'douze', 13:'treize', 14:'quatorze', 15:'quinze', 16:'seize',
|
10:'dix', 11:'onze', 12:'douze', 13:'treize', 14:'quatorze', 15:'quinze', 16:'seize',
|
||||||
21:'vingt et un', 31:'trente et un', 41:'quarante et un', 51:'cinquante et un', 61:'soixante et un',
|
21:'vingt et un', 31:'trente et un', 41:'quarante et un', 51:'cinquante et un', 61:'soixante et un',
|
||||||
71:'septante et un', 91:'nonante et un', 80:'quatre-vingts'
|
71:'septante et un', 91:'nonante et un', 80:'quatre-vingts'
|
||||||
}
|
}
|
||||||
|
|
||||||
dizaine = {
|
dizaine = {
|
||||||
1: 'dix', 2:'vingt', 3:'trente',4:'quarante', 5:'cinquante', 6:'soixante', 7:'septante', 8:'quatre-vingt', 9:'nonante'
|
1: 'dix', 2:'vingt', 3:'trente',4:'quarante', 5:'cinquante', 6:'soixante', 7:'septante', 8:'quatre-vingt', 9:'nonante'
|
||||||
}
|
}
|
||||||
|
|
||||||
centaine = {
|
centaine = {
|
||||||
0:'', 1: 'cent', 2:'deux cent', 3:'trois cent',4:'quatre cent', 5:'cinq cent', 6:'six cent', 7:'sept cent', 8:'huit cent', 9:'neuf cent'
|
0:'', 1: 'cent', 2:'deux cent', 3:'trois cent',4:'quatre cent', 5:'cinq cent', 6:'six cent', 7:'sept cent', 8:'huit cent', 9:'neuf cent'
|
||||||
}
|
}
|
||||||
|
|
||||||
mille = {
|
mille = {
|
||||||
0:'', 1:'mille'
|
0:'', 1:'mille'
|
||||||
}
|
}
|
||||||
|
|
||||||
def _100_to_text_fr(chiffre):
|
def _100_to_text_fr(chiffre):
|
||||||
if chiffre in unites:
|
if chiffre in unites:
|
||||||
return unites[chiffre]
|
return unites[chiffre]
|
||||||
else:
|
else:
|
||||||
if chiffre%10>0:
|
if chiffre%10>0:
|
||||||
return dizaine[chiffre / 10]+'-'+unites[chiffre % 10]
|
return dizaine[chiffre / 10]+'-'+unites[chiffre % 10]
|
||||||
else:
|
else:
|
||||||
return dizaine[chiffre / 10]
|
return dizaine[chiffre / 10]
|
||||||
|
|
||||||
def _1000_to_text_fr(chiffre):
|
def _1000_to_text_fr(chiffre):
|
||||||
d = _100_to_text_fr(chiffre % 100)
|
d = _100_to_text_fr(chiffre % 100)
|
||||||
d2 = chiffre/100
|
d2 = chiffre/100
|
||||||
if d2>0 and d:
|
if d2>0 and d:
|
||||||
return centaine[d2]+' '+d
|
return centaine[d2]+' '+d
|
||||||
elif d2>1 and not(d):
|
elif d2>1 and not(d):
|
||||||
return centaine[d2]+'s'
|
return centaine[d2]+'s'
|
||||||
else:
|
else:
|
||||||
return centaine[d2] or d
|
return centaine[d2] or d
|
||||||
|
|
||||||
def _10000_to_text_fr(chiffre):
|
def _10000_to_text_fr(chiffre):
|
||||||
if chiffre==0:
|
if chiffre==0:
|
||||||
return 'zero'
|
return 'zero'
|
||||||
part1 = _1000_to_text_fr(chiffre % 1000)
|
part1 = _1000_to_text_fr(chiffre % 1000)
|
||||||
part2 = mille.get(chiffre / 1000, _1000_to_text_fr(chiffre / 1000)+' mille')
|
part2 = mille.get(chiffre / 1000, _1000_to_text_fr(chiffre / 1000)+' mille')
|
||||||
if part2 and part1:
|
if part2 and part1:
|
||||||
part1 = ' '+part1
|
part1 = ' '+part1
|
||||||
return part2+part1
|
return part2+part1
|
||||||
|
|
||||||
def amount_to_text_fr(number, currency):
|
def amount_to_text_fr(number, currency):
|
||||||
units_number = int(number)
|
units_number = int(number)
|
||||||
units_name = currency
|
units_name = currency
|
||||||
if units_number > 1:
|
if units_number > 1:
|
||||||
units_name += 's'
|
units_name += 's'
|
||||||
units = _10000_to_text_fr(units_number)
|
units = _10000_to_text_fr(units_number)
|
||||||
units = units_number and '%s %s' % (units, units_name) or ''
|
units = units_number and '%s %s' % (units, units_name) or ''
|
||||||
|
|
||||||
cents_number = int(number * 100) % 100
|
cents_number = int(number * 100) % 100
|
||||||
cents_name = (cents_number > 1) and 'cents' or 'cent'
|
cents_name = (cents_number > 1) and 'cents' or 'cent'
|
||||||
cents = _100_to_text_fr(cents_number)
|
cents = _100_to_text_fr(cents_number)
|
||||||
cents = cents_number and '%s %s' % (cents, cents_name) or ''
|
cents = cents_number and '%s %s' % (cents, cents_name) or ''
|
||||||
|
|
||||||
if units and cents:
|
if units and cents:
|
||||||
cents = ' '+cents
|
cents = ' '+cents
|
||||||
|
|
||||||
return units + cents
|
return units + cents
|
||||||
|
|
||||||
#-------------------------------------------------------------
|
#-------------------------------------------------------------
|
||||||
# Dutch
|
# Dutch
|
||||||
#-------------------------------------------------------------
|
#-------------------------------------------------------------
|
||||||
|
|
||||||
units_nl = {
|
units_nl = {
|
||||||
0:'', 1:'een', 2:'twee', 3:'drie', 4:'vier', 5:'vijf', 6:'zes', 7:'zeven', 8:'acht', 9:'negen',
|
0:'', 1:'een', 2:'twee', 3:'drie', 4:'vier', 5:'vijf', 6:'zes', 7:'zeven', 8:'acht', 9:'negen',
|
||||||
10:'tien', 11:'elf', 12:'twaalf', 13:'dertien', 14:'veertien'
|
10:'tien', 11:'elf', 12:'twaalf', 13:'dertien', 14:'veertien'
|
||||||
}
|
}
|
||||||
|
|
||||||
tens_nl = {
|
tens_nl = {
|
||||||
1: 'tien', 2:'twintig', 3:'dertig',4:'veertig', 5:'vijftig', 6:'zestig', 7:'zeventig', 8:'tachtig', 9:'negentig'
|
1: 'tien', 2:'twintig', 3:'dertig',4:'veertig', 5:'vijftig', 6:'zestig', 7:'zeventig', 8:'tachtig', 9:'negentig'
|
||||||
}
|
}
|
||||||
|
|
||||||
hundreds_nl = {
|
hundreds_nl = {
|
||||||
0:'', 1: 'honderd',
|
0:'', 1: 'honderd',
|
||||||
}
|
}
|
||||||
|
|
||||||
thousands_nl = {
|
thousands_nl = {
|
||||||
0:'', 1:'duizend'
|
0:'', 1:'duizend'
|
||||||
}
|
}
|
||||||
|
|
||||||
def _100_to_text_nl(number):
|
def _100_to_text_nl(number):
|
||||||
if number in units_nl:
|
if number in units_nl:
|
||||||
return units_nl[number]
|
return units_nl[number]
|
||||||
else:
|
else:
|
||||||
if number%10 > 0:
|
if number%10 > 0:
|
||||||
if number>10 and number<20:
|
if number>10 and number<20:
|
||||||
return units_nl[number % 10]+tens_nl[number / 10]
|
return units_nl[number % 10]+tens_nl[number / 10]
|
||||||
else:
|
else:
|
||||||
units = units_nl[number % 10]
|
units = units_nl[number % 10]
|
||||||
if units[-1] == 'e':
|
if units[-1] == 'e':
|
||||||
joinword = 'ën'
|
joinword = 'ën'
|
||||||
else:
|
else:
|
||||||
joinword = 'en'
|
joinword = 'en'
|
||||||
return units+joinword+tens_nl[number / 10]
|
return units+joinword+tens_nl[number / 10]
|
||||||
else:
|
else:
|
||||||
return tens_nl[number / 10]
|
return tens_nl[number / 10]
|
||||||
|
|
||||||
def _1000_to_text_nl(number):
|
def _1000_to_text_nl(number):
|
||||||
part1 = _100_to_text_nl(number % 100)
|
part1 = _100_to_text_nl(number % 100)
|
||||||
part2 = hundreds_nl.get(number / 100, units_nl[number/100] + hundreds_nl[1])
|
part2 = hundreds_nl.get(number / 100, units_nl[number/100] + hundreds_nl[1])
|
||||||
if part2 and part1:
|
if part2 and part1:
|
||||||
part1 = ' ' + part1
|
part1 = ' ' + part1
|
||||||
return part2 + part1
|
return part2 + part1
|
||||||
|
|
||||||
def _10000_to_text_nl(number):
|
def _10000_to_text_nl(number):
|
||||||
if number==0:
|
if number==0:
|
||||||
return 'nul'
|
return 'nul'
|
||||||
part1 = _1000_to_text_nl(number % 1000)
|
part1 = _1000_to_text_nl(number % 1000)
|
||||||
if thousands_nl.has_key(number / 1000):
|
if thousands_nl.has_key(number / 1000):
|
||||||
part2 = thousands_nl[number / 1000]
|
part2 = thousands_nl[number / 1000]
|
||||||
else:
|
else:
|
||||||
if (number / 1000 % 100 > 0) and (number / 1000 > 100):
|
if (number / 1000 % 100 > 0) and (number / 1000 > 100):
|
||||||
space = ' '
|
space = ' '
|
||||||
else:
|
else:
|
||||||
space = ''
|
space = ''
|
||||||
part2 = _1000_to_text_nl(number / 1000) + space + thousands_nl[1]
|
part2 = _1000_to_text_nl(number / 1000) + space + thousands_nl[1]
|
||||||
if part2 and part1:
|
if part2 and part1:
|
||||||
part1 = ' ' + part1
|
part1 = ' ' + part1
|
||||||
return part2 + part1
|
return part2 + part1
|
||||||
|
|
||||||
def amount_to_text_nl(number, currency):
|
def amount_to_text_nl(number, currency):
|
||||||
units_number = int(number)
|
units_number = int(number)
|
||||||
units_name = currency
|
units_name = currency
|
||||||
units = _10000_to_text_nl(units_number)
|
units = _10000_to_text_nl(units_number)
|
||||||
units = units_number and '%s %s' % (units, units_name) or ''
|
units = units_number and '%s %s' % (units, units_name) or ''
|
||||||
|
|
||||||
cents_number = int(number * 100) % 100
|
cents_number = int(number * 100) % 100
|
||||||
cents_name = 'cent'
|
cents_name = 'cent'
|
||||||
cents = _100_to_text_nl(cents_number)
|
cents = _100_to_text_nl(cents_number)
|
||||||
cents = cents_number and '%s %s' % (cents, cents_name) or ''
|
cents = cents_number and '%s %s' % (cents, cents_name) or ''
|
||||||
|
|
||||||
if units and cents:
|
if units and cents:
|
||||||
cents = ' ' + cents
|
cents = ' ' + cents
|
||||||
|
|
||||||
return units + cents
|
return units + cents
|
||||||
|
|
||||||
#-------------------------------------------------------------
|
#-------------------------------------------------------------
|
||||||
# Generic functions
|
# Generic functions
|
||||||
|
@ -182,32 +182,32 @@ _translate_funcs = {'fr' : amount_to_text_fr, 'nl' : amount_to_text_nl}
|
||||||
#TODO: we should use the country AND language (ex: septante VS soixante dix)
|
#TODO: we should use the country AND language (ex: septante VS soixante dix)
|
||||||
#TODO: we should use en by default, but the translation func is yet to be implemented
|
#TODO: we should use en by default, but the translation func is yet to be implemented
|
||||||
def amount_to_text(nbr, lang='fr', currency='euro'):
|
def amount_to_text(nbr, lang='fr', currency='euro'):
|
||||||
"""
|
"""
|
||||||
Converts an integer to its textual representation, using the language set in the context if any.
|
Converts an integer to its textual representation, using the language set in the context if any.
|
||||||
Example:
|
Example:
|
||||||
1654: mille six cent cinquante-quatre.
|
1654: mille six cent cinquante-quatre.
|
||||||
"""
|
"""
|
||||||
if nbr > 1000000:
|
if nbr > 1000000:
|
||||||
#TODO: use logger
|
#TODO: use logger
|
||||||
print "WARNING: number too large '%d', can't translate it!" % (nbr,)
|
print "WARNING: number too large '%d', can't translate it!" % (nbr,)
|
||||||
return str(nbr)
|
return str(nbr)
|
||||||
|
|
||||||
if not _translate_funcs.has_key(lang):
|
if not _translate_funcs.has_key(lang):
|
||||||
#TODO: use logger
|
#TODO: use logger
|
||||||
print "WARNING: no translation function found for lang: '%s'" % (lang,)
|
print "WARNING: no translation function found for lang: '%s'" % (lang,)
|
||||||
#TODO: (default should be en) same as above
|
#TODO: (default should be en) same as above
|
||||||
lang = 'fr'
|
lang = 'fr'
|
||||||
return _translate_funcs[lang](nbr, currency)
|
return _translate_funcs[lang](nbr, currency)
|
||||||
|
|
||||||
if __name__=='__main__':
|
if __name__=='__main__':
|
||||||
from sys import argv
|
from sys import argv
|
||||||
|
|
||||||
lang = 'nl'
|
lang = 'nl'
|
||||||
if len(argv) < 2:
|
if len(argv) < 2:
|
||||||
for i in range(1,200):
|
for i in range(1,200):
|
||||||
print i, ">>", amount_to_text(i, lang)
|
print i, ">>", amount_to_text(i, lang)
|
||||||
for i in range(200,999999,139):
|
for i in range(200,999999,139):
|
||||||
print i, ">>", amount_to_text(i, lang)
|
print i, ">>", amount_to_text(i, lang)
|
||||||
else:
|
else:
|
||||||
print amount_to_text(int(argv[1]), lang)
|
print amount_to_text(int(argv[1]), lang)
|
||||||
|
|
||||||
|
|
|
@ -31,208 +31,208 @@ import netsvc,logging
|
||||||
|
|
||||||
|
|
||||||
class configmanager(object):
|
class configmanager(object):
|
||||||
def __init__(self, fname=None):
|
def __init__(self, fname=None):
|
||||||
self.options = {
|
self.options = {
|
||||||
'verbose': False,
|
'verbose': False,
|
||||||
'interface': '', # this will bind the server to all interfaces
|
'interface': '', # this will bind the server to all interfaces
|
||||||
'port': '8069',
|
'port': '8069',
|
||||||
'netinterface': '',
|
'netinterface': '',
|
||||||
'netport': '8070',
|
'netport': '8070',
|
||||||
'db_host': False,
|
'db_host': False,
|
||||||
'db_port': False,
|
'db_port': False,
|
||||||
'db_name': 'terp',
|
'db_name': 'terp',
|
||||||
'db_user': False,
|
'db_user': False,
|
||||||
'db_password': False,
|
'db_password': False,
|
||||||
'db_maxconn': 64,
|
'db_maxconn': 64,
|
||||||
'reportgz': False,
|
'reportgz': False,
|
||||||
'netrpc': True,
|
'netrpc': True,
|
||||||
'xmlrpc': True,
|
'xmlrpc': True,
|
||||||
'soap': False,
|
'soap': False,
|
||||||
'translate_in': None,
|
'translate_in': None,
|
||||||
'translate_out': None,
|
'translate_out': None,
|
||||||
'language': None,
|
'language': None,
|
||||||
'pg_path': None,
|
'pg_path': None,
|
||||||
'admin_passwd': 'admin',
|
'admin_passwd': 'admin',
|
||||||
'addons_path': None,
|
'addons_path': None,
|
||||||
'root_path': None,
|
'root_path': None,
|
||||||
'debug_mode': False,
|
'debug_mode': False,
|
||||||
'commit_mode': False,
|
'commit_mode': False,
|
||||||
'pidfile': None,
|
'pidfile': None,
|
||||||
'logfile': None,
|
'logfile': None,
|
||||||
'secure': False,
|
'secure': False,
|
||||||
'smtp_server': 'localhost',
|
'smtp_server': 'localhost',
|
||||||
'smtp_user': False,
|
'smtp_user': False,
|
||||||
'smtp_password': False,
|
'smtp_password': False,
|
||||||
'stop_after_init': False, # this will stop the server after initialization
|
'stop_after_init': False, # this will stop the server after initialization
|
||||||
'price_accuracy': 2,
|
'price_accuracy': 2,
|
||||||
|
|
||||||
'assert_exit_level': logging.WARNING, # level above which a failed assert will
|
'assert_exit_level': logging.WARNING, # level above which a failed assert will
|
||||||
}
|
}
|
||||||
|
|
||||||
assert_exit_levels = (netsvc.LOG_CRITICAL, netsvc.LOG_ERROR, netsvc.LOG_WARNING, netsvc.LOG_INFO, netsvc.LOG_DEBUG)
|
assert_exit_levels = (netsvc.LOG_CRITICAL, netsvc.LOG_ERROR, netsvc.LOG_WARNING, netsvc.LOG_INFO, netsvc.LOG_DEBUG)
|
||||||
|
|
||||||
parser = optparse.OptionParser(version=tinyerp_version_string)
|
parser = optparse.OptionParser(version=tinyerp_version_string)
|
||||||
|
|
||||||
parser.add_option("-c", "--config", dest="config", help="specify alternate config file")
|
parser.add_option("-c", "--config", dest="config", help="specify alternate config file")
|
||||||
parser.add_option("-s", "--save", action="store_true", dest="save", default=False, help="save configuration to ~/.terp_serverrc")
|
parser.add_option("-s", "--save", action="store_true", dest="save", default=False, help="save configuration to ~/.terp_serverrc")
|
||||||
parser.add_option("-v", "--verbose", action="store_true", dest="verbose", default=False, help="enable debugging")
|
parser.add_option("-v", "--verbose", action="store_true", dest="verbose", default=False, help="enable debugging")
|
||||||
parser.add_option("--pidfile", dest="pidfile", help="file where the server pid will be stored")
|
parser.add_option("--pidfile", dest="pidfile", help="file where the server pid will be stored")
|
||||||
parser.add_option("--logfile", dest="logfile", help="file where the server log will be stored")
|
parser.add_option("--logfile", dest="logfile", help="file where the server log will be stored")
|
||||||
|
|
||||||
parser.add_option("-n", "--interface", dest="interface", help="specify the TCP IP address")
|
parser.add_option("-n", "--interface", dest="interface", help="specify the TCP IP address")
|
||||||
parser.add_option("-p", "--port", dest="port", help="specify the TCP port")
|
parser.add_option("-p", "--port", dest="port", help="specify the TCP port")
|
||||||
parser.add_option("--net_interface", dest="netinterface", help="specify the TCP IP address for netrpc")
|
parser.add_option("--net_interface", dest="netinterface", help="specify the TCP IP address for netrpc")
|
||||||
parser.add_option("--net_port", dest="netport", help="specify the TCP port for netrpc")
|
parser.add_option("--net_port", dest="netport", help="specify the TCP port for netrpc")
|
||||||
parser.add_option("--no-netrpc", dest="netrpc", action="store_false", default=True, help="disable netrpc")
|
parser.add_option("--no-netrpc", dest="netrpc", action="store_false", default=True, help="disable netrpc")
|
||||||
parser.add_option("--no-xmlrpc", dest="xmlrpc", action="store_false", default=True, help="disable xmlrpc")
|
parser.add_option("--no-xmlrpc", dest="xmlrpc", action="store_false", default=True, help="disable xmlrpc")
|
||||||
|
|
||||||
parser.add_option("-i", "--init", dest="init", help="init a module (use \"all\" for all modules)")
|
parser.add_option("-i", "--init", dest="init", help="init a module (use \"all\" for all modules)")
|
||||||
parser.add_option("--without-demo", dest="without_demo", help="load demo data for a module (use \"all\" for all modules)", default=False)
|
parser.add_option("--without-demo", dest="without_demo", help="load demo data for a module (use \"all\" for all modules)", default=False)
|
||||||
parser.add_option("-u", "--update", dest="update", help="update a module (use \"all\" for all modules)")
|
parser.add_option("-u", "--update", dest="update", help="update a module (use \"all\" for all modules)")
|
||||||
# stops the server from launching after initialization
|
# stops the server from launching after initialization
|
||||||
parser.add_option("--stop-after-init", action="store_true", dest="stop_after_init", default=False, help="stop the server after it initializes")
|
parser.add_option("--stop-after-init", action="store_true", dest="stop_after_init", default=False, help="stop the server after it initializes")
|
||||||
parser.add_option('--debug', dest='debug_mode', action='store_true', default=False, help='enable debug mode')
|
parser.add_option('--debug', dest='debug_mode', action='store_true', default=False, help='enable debug mode')
|
||||||
parser.add_option("--assert-exit-level", dest='assert_exit_level', help="specify the level at which a failed assertion will stop the server " + str(assert_exit_levels))
|
parser.add_option("--assert-exit-level", dest='assert_exit_level', help="specify the level at which a failed assertion will stop the server " + str(assert_exit_levels))
|
||||||
parser.add_option("-S", "--secure", dest="secure", action="store_true", help="launch server over https instead of http", default=False)
|
parser.add_option("-S", "--secure", dest="secure", action="store_true", help="launch server over https instead of http", default=False)
|
||||||
parser.add_option('--smtp', dest='smtp_server', default='', help='specify the SMTP server for sending email')
|
parser.add_option('--smtp', dest='smtp_server', default='', help='specify the SMTP server for sending email')
|
||||||
parser.add_option('--smtp-user', dest='smtp_user', default='', help='specify the SMTP username for sending email')
|
parser.add_option('--smtp-user', dest='smtp_user', default='', help='specify the SMTP username for sending email')
|
||||||
parser.add_option('--smtp-password', dest='smtp_password', default='', help='specify the SMTP password for sending email')
|
parser.add_option('--smtp-password', dest='smtp_password', default='', help='specify the SMTP password for sending email')
|
||||||
parser.add_option('--price_accuracy', dest='price_accuracy', default='2', help='specify the price accuracy')
|
parser.add_option('--price_accuracy', dest='price_accuracy', default='2', help='specify the price accuracy')
|
||||||
|
|
||||||
group = optparse.OptionGroup(parser, "Modules related options")
|
group = optparse.OptionGroup(parser, "Modules related options")
|
||||||
group.add_option("-g", "--upgrade", action="store_true", dest="upgrade", default=False, help="Upgrade/install/uninstall modules")
|
group.add_option("-g", "--upgrade", action="store_true", dest="upgrade", default=False, help="Upgrade/install/uninstall modules")
|
||||||
|
|
||||||
group = optparse.OptionGroup(parser, "Database related options")
|
group = optparse.OptionGroup(parser, "Database related options")
|
||||||
group.add_option("-d", "--database", dest="db_name", help="specify the database name")
|
group.add_option("-d", "--database", dest="db_name", help="specify the database name")
|
||||||
group.add_option("-r", "--db_user", dest="db_user", help="specify the database user name")
|
group.add_option("-r", "--db_user", dest="db_user", help="specify the database user name")
|
||||||
group.add_option("-w", "--db_password", dest="db_password", help="specify the database password")
|
group.add_option("-w", "--db_password", dest="db_password", help="specify the database password")
|
||||||
group.add_option("--pg_path", dest="pg_path", help="specify the pg executable path")
|
group.add_option("--pg_path", dest="pg_path", help="specify the pg executable path")
|
||||||
group.add_option("--db_host", dest="db_host", help="specify the database host")
|
group.add_option("--db_host", dest="db_host", help="specify the database host")
|
||||||
group.add_option("--db_port", dest="db_port", help="specify the database port")
|
group.add_option("--db_port", dest="db_port", help="specify the database port")
|
||||||
group.add_option("--db_maxconn", dest="db_maxconn", default='64', help="specify the the maximum number of physical connections to posgresql")
|
group.add_option("--db_maxconn", dest="db_maxconn", default='64', help="specify the the maximum number of physical connections to posgresql")
|
||||||
group.add_option("-C", "--commit-mode", dest="commit_mode", action="store_true", help="Several commit during one file importation. Use this for big data importation.", default=False)
|
group.add_option("-C", "--commit-mode", dest="commit_mode", action="store_true", help="Several commit during one file importation. Use this for big data importation.", default=False)
|
||||||
parser.add_option_group(group)
|
parser.add_option_group(group)
|
||||||
|
|
||||||
group = optparse.OptionGroup(parser, "Internationalisation options",
|
group = optparse.OptionGroup(parser, "Internationalisation options",
|
||||||
"Use these options to translate Tiny ERP to another language."
|
"Use these options to translate Tiny ERP to another language."
|
||||||
"See i18n section of the user manual. Options '-l' and '-d' are mandatory.")
|
"See i18n section of the user manual. Options '-l' and '-d' are mandatory.")
|
||||||
|
|
||||||
group.add_option('-l', "--language", dest="language", help="specify the language of the translation file. Use it with --i18n-export and --i18n-import")
|
group.add_option('-l', "--language", dest="language", help="specify the language of the translation file. Use it with --i18n-export and --i18n-import")
|
||||||
group.add_option("--i18n-export", dest="translate_out", help="export all sentences to be translated to a CSV or a PO file and exit")
|
group.add_option("--i18n-export", dest="translate_out", help="export all sentences to be translated to a CSV or a PO file and exit")
|
||||||
group.add_option("--i18n-import", dest="translate_in", help="import a CSV or a PO file with translations and exit")
|
group.add_option("--i18n-import", dest="translate_in", help="import a CSV or a PO file with translations and exit")
|
||||||
group.add_option("--modules", dest="translate_modules", help="specify modules to export. Use in combination with --i18n-export")
|
group.add_option("--modules", dest="translate_modules", help="specify modules to export. Use in combination with --i18n-export")
|
||||||
group.add_option("--addons-path", dest="addons_path", help="specify an alternative addons path.")
|
group.add_option("--addons-path", dest="addons_path", help="specify an alternative addons path.")
|
||||||
parser.add_option_group(group)
|
parser.add_option_group(group)
|
||||||
|
|
||||||
(opt, args) = parser.parse_args()
|
(opt, args) = parser.parse_args()
|
||||||
|
|
||||||
assert not ((opt.translate_in or opt.translate_out) and (not opt.language or not opt.db_name)), "the i18n-import and i18n-export options cannot be used without the language (-l) and database (-d) options"
|
assert not ((opt.translate_in or opt.translate_out) and (not opt.language or not opt.db_name)), "the i18n-import and i18n-export options cannot be used without the language (-l) and database (-d) options"
|
||||||
|
|
||||||
# place/search the config file on Win32 near the server installation
|
# place/search the config file on Win32 near the server installation
|
||||||
# (../etc from the server)
|
# (../etc from the server)
|
||||||
# if the server is run by an unprivileged user, he has to specify location of a config file where he has the rights to write,
|
# if the server is run by an unprivileged user, he has to specify location of a config file where he has the rights to write,
|
||||||
# else he won't be able to save the configurations, or even to start the server...
|
# else he won't be able to save the configurations, or even to start the server...
|
||||||
if os.name == 'nt':
|
if os.name == 'nt':
|
||||||
rcfilepath = os.path.join(os.path.abspath(os.path.dirname(sys.argv[0])), 'tinyerp-server.conf')
|
rcfilepath = os.path.join(os.path.abspath(os.path.dirname(sys.argv[0])), 'tinyerp-server.conf')
|
||||||
else:
|
else:
|
||||||
rcfilepath = os.path.expanduser('~/.terp_serverrc')
|
rcfilepath = os.path.expanduser('~/.terp_serverrc')
|
||||||
|
|
||||||
self.rcfile = fname or opt.config or os.environ.get('TERP_SERVER') or rcfilepath
|
self.rcfile = fname or opt.config or os.environ.get('TERP_SERVER') or rcfilepath
|
||||||
self.load()
|
self.load()
|
||||||
|
|
||||||
|
|
||||||
# Verify that we want to log or not, if not the output will go to stdout
|
# Verify that we want to log or not, if not the output will go to stdout
|
||||||
if self.options['logfile'] in ('None', 'False'):
|
if self.options['logfile'] in ('None', 'False'):
|
||||||
self.options['logfile'] = False
|
self.options['logfile'] = False
|
||||||
# the same for the pidfile
|
# the same for the pidfile
|
||||||
if self.options['pidfile'] in ('None', 'False'):
|
if self.options['pidfile'] in ('None', 'False'):
|
||||||
self.options['pidfile'] = False
|
self.options['pidfile'] = False
|
||||||
|
|
||||||
for arg in ('interface', 'port', 'db_name', 'db_user', 'db_password', 'db_host',
|
for arg in ('interface', 'port', 'db_name', 'db_user', 'db_password', 'db_host',
|
||||||
'db_port', 'logfile', 'pidfile', 'secure', 'smtp_server', 'smtp_user', 'smtp_password', 'price_accuracy', 'netinterface', 'netport', 'db_maxconn', 'commit_mode', 'addons_path'):
|
'db_port', 'logfile', 'pidfile', 'secure', 'smtp_server', 'smtp_user', 'smtp_password', 'price_accuracy', 'netinterface', 'netport', 'db_maxconn', 'commit_mode', 'addons_path'):
|
||||||
if getattr(opt, arg):
|
if getattr(opt, arg):
|
||||||
self.options[arg] = getattr(opt, arg)
|
self.options[arg] = getattr(opt, arg)
|
||||||
|
|
||||||
for arg in ('language', 'translate_out', 'translate_in',
|
for arg in ('language', 'translate_out', 'translate_in',
|
||||||
'upgrade', 'verbose', 'debug_mode',
|
'upgrade', 'verbose', 'debug_mode',
|
||||||
'stop_after_init', 'without_demo', 'netrpc', 'xmlrpc'):
|
'stop_after_init', 'without_demo', 'netrpc', 'xmlrpc'):
|
||||||
self.options[arg] = getattr(opt, arg)
|
self.options[arg] = getattr(opt, arg)
|
||||||
|
|
||||||
if opt.assert_exit_level:
|
if opt.assert_exit_level:
|
||||||
assert opt.assert_exit_level in assert_exit_levels, 'ERROR: The assert-exit-level must be one of those values: '+str(assert_exit_levels)
|
assert opt.assert_exit_level in assert_exit_levels, 'ERROR: The assert-exit-level must be one of those values: '+str(assert_exit_levels)
|
||||||
self.options['assert_exit_level'] = getattr(logging, opt.assert_exit_level.upper())
|
self.options['assert_exit_level'] = getattr(logging, opt.assert_exit_level.upper())
|
||||||
|
|
||||||
if not self.options['root_path'] or self.options['root_path']=='None':
|
if not self.options['root_path'] or self.options['root_path']=='None':
|
||||||
self.options['root_path'] = os.path.abspath(os.path.dirname(sys.argv[0]))
|
self.options['root_path'] = os.path.abspath(os.path.dirname(sys.argv[0]))
|
||||||
if not self.options['addons_path'] or self.options['addons_path']=='None':
|
if not self.options['addons_path'] or self.options['addons_path']=='None':
|
||||||
self.options['addons_path'] = os.path.join(self.options['root_path'], 'addons')
|
self.options['addons_path'] = os.path.join(self.options['root_path'], 'addons')
|
||||||
|
|
||||||
init = {}
|
init = {}
|
||||||
if opt.init:
|
if opt.init:
|
||||||
for i in opt.init.split(','):
|
for i in opt.init.split(','):
|
||||||
init[i] = 1
|
init[i] = 1
|
||||||
self.options['init'] = init
|
self.options['init'] = init
|
||||||
self.options["demo"] = not opt.without_demo and self.options['init'] or {}
|
self.options["demo"] = not opt.without_demo and self.options['init'] or {}
|
||||||
|
|
||||||
update = {}
|
update = {}
|
||||||
if opt.update:
|
if opt.update:
|
||||||
for i in opt.update.split(','):
|
for i in opt.update.split(','):
|
||||||
update[i] = 1
|
update[i] = 1
|
||||||
self.options['update'] = update
|
self.options['update'] = update
|
||||||
|
|
||||||
self.options['translate_modules'] = opt.translate_modules and map(lambda m: m.strip(), opt.translate_modules.split(',')) or ['all']
|
self.options['translate_modules'] = opt.translate_modules and map(lambda m: m.strip(), opt.translate_modules.split(',')) or ['all']
|
||||||
self.options['translate_modules'].sort()
|
self.options['translate_modules'].sort()
|
||||||
|
|
||||||
if opt.pg_path:
|
if opt.pg_path:
|
||||||
self.options['pg_path'] = opt.pg_path
|
self.options['pg_path'] = opt.pg_path
|
||||||
|
|
||||||
if self.options.get('language', False):
|
if self.options.get('language', False):
|
||||||
assert len(self.options['language'])<=5, 'ERROR: The Lang name must take max 5 chars, Eg: -lfr_BE'
|
assert len(self.options['language'])<=5, 'ERROR: The Lang name must take max 5 chars, Eg: -lfr_BE'
|
||||||
if opt.save:
|
if opt.save:
|
||||||
self.save()
|
self.save()
|
||||||
|
|
||||||
def load(self):
|
def load(self):
|
||||||
p = ConfigParser.ConfigParser()
|
p = ConfigParser.ConfigParser()
|
||||||
try:
|
try:
|
||||||
p.read([self.rcfile])
|
p.read([self.rcfile])
|
||||||
for (name,value) in p.items('options'):
|
for (name,value) in p.items('options'):
|
||||||
if value=='True' or value=='true':
|
if value=='True' or value=='true':
|
||||||
value = True
|
value = True
|
||||||
if value=='False' or value=='false':
|
if value=='False' or value=='false':
|
||||||
value = False
|
value = False
|
||||||
self.options[name] = value
|
self.options[name] = value
|
||||||
except IOError:
|
except IOError:
|
||||||
pass
|
pass
|
||||||
except ConfigParser.NoSectionError:
|
except ConfigParser.NoSectionError:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def save(self):
|
def save(self):
|
||||||
p = ConfigParser.ConfigParser()
|
p = ConfigParser.ConfigParser()
|
||||||
p.add_section('options')
|
p.add_section('options')
|
||||||
for o in [opt for opt in self.options.keys() if opt not in ('version','language','translate_out','translate_in','init','update')]:
|
for o in [opt for opt in self.options.keys() if opt not in ('version','language','translate_out','translate_in','init','update')]:
|
||||||
p.set('options', o, self.options[o])
|
p.set('options', o, self.options[o])
|
||||||
|
|
||||||
# try to create the directories and write the file
|
# try to create the directories and write the file
|
||||||
try:
|
try:
|
||||||
if not os.path.exists(os.path.dirname(self.rcfile)):
|
if not os.path.exists(os.path.dirname(self.rcfile)):
|
||||||
os.makedirs(os.path.dirname(self.rcfile))
|
os.makedirs(os.path.dirname(self.rcfile))
|
||||||
try:
|
try:
|
||||||
p.write(file(self.rcfile, 'w'))
|
p.write(file(self.rcfile, 'w'))
|
||||||
except IOError:
|
except IOError:
|
||||||
sys.stderr.write("ERROR: couldn't write the config file\n")
|
sys.stderr.write("ERROR: couldn't write the config file\n")
|
||||||
|
|
||||||
except OSError:
|
except OSError:
|
||||||
# what to do if impossible?
|
# what to do if impossible?
|
||||||
sys.stderr.write("ERROR: couldn't create the config directory\n")
|
sys.stderr.write("ERROR: couldn't create the config directory\n")
|
||||||
|
|
||||||
def get(self, key, default=None):
|
def get(self, key, default=None):
|
||||||
return self.options.get(key, default)
|
return self.options.get(key, default)
|
||||||
|
|
||||||
def __setitem__(self, key, value):
|
def __setitem__(self, key, value):
|
||||||
self.options[key] = value
|
self.options[key] = value
|
||||||
|
|
||||||
def __getitem__(self, key):
|
def __getitem__(self, key):
|
||||||
return self.options[key]
|
return self.options[key]
|
||||||
|
|
||||||
config = configmanager()
|
config = configmanager()
|
||||||
|
|
||||||
|
|
1308
bin/tools/convert.py
1308
bin/tools/convert.py
File diff suppressed because it is too large
Load Diff
|
@ -28,306 +28,306 @@
|
||||||
###############################################################################
|
###############################################################################
|
||||||
|
|
||||||
class graph(object):
|
class graph(object):
|
||||||
def __init__(self, nodes, transitions):
|
def __init__(self, nodes, transitions):
|
||||||
self.nodes = nodes
|
self.nodes = nodes
|
||||||
self.links = transitions
|
self.links = transitions
|
||||||
trans = {}
|
trans = {}
|
||||||
for t in transitions:
|
for t in transitions:
|
||||||
trans.setdefault(t[0], [])
|
trans.setdefault(t[0], [])
|
||||||
trans[t[0]].append(t[1])
|
trans[t[0]].append(t[1])
|
||||||
self.transitions = trans
|
self.transitions = trans
|
||||||
self.result = {}
|
self.result = {}
|
||||||
self.levels = {}
|
self.levels = {}
|
||||||
|
|
||||||
def get_parent(self,node):
|
def get_parent(self,node):
|
||||||
count = 0
|
count = 0
|
||||||
for item in self.transitions:
|
for item in self.transitions:
|
||||||
if self.transitions[item].__contains__(node):
|
if self.transitions[item].__contains__(node):
|
||||||
count +=1
|
count +=1
|
||||||
return count
|
return count
|
||||||
|
|
||||||
def init_rank(self):
|
def init_rank(self):
|
||||||
self.temp = {}
|
self.temp = {}
|
||||||
for link in self.links:
|
for link in self.links:
|
||||||
self.temp[link] = self.result[link[1]]['y'] - self.result[link[0]]['y']
|
self.temp[link] = self.result[link[1]]['y'] - self.result[link[0]]['y']
|
||||||
|
|
||||||
cnt = 0
|
cnt = 0
|
||||||
list_node = []
|
list_node = []
|
||||||
list_edge = []
|
list_edge = []
|
||||||
|
|
||||||
while self.tight_tree()<self.result.__len__():
|
while self.tight_tree()<self.result.__len__():
|
||||||
cnt+=1
|
cnt+=1
|
||||||
list_node = []
|
list_node = []
|
||||||
|
|
||||||
for node in self.nodes:
|
for node in self.nodes:
|
||||||
if node not in self.reachable_nodes:
|
if node not in self.reachable_nodes:
|
||||||
list_node.append(node)
|
list_node.append(node)
|
||||||
list_edge = []
|
list_edge = []
|
||||||
|
|
||||||
for link in self.temp:
|
for link in self.temp:
|
||||||
if link not in self.tree_edges:
|
if link not in self.tree_edges:
|
||||||
list_edge.append(link)
|
list_edge.append(link)
|
||||||
|
|
||||||
slack = 100
|
slack = 100
|
||||||
|
|
||||||
for edge in list_edge:
|
for edge in list_edge:
|
||||||
if (self.reachable_nodes.__contains__(edge[0]) and edge[1] not in self.reachable_nodes) or ( self.reachable_nodes.__contains__(edge[1]) and edge[0] not in self.reachable_nodes):
|
if (self.reachable_nodes.__contains__(edge[0]) and edge[1] not in self.reachable_nodes) or ( self.reachable_nodes.__contains__(edge[1]) and edge[0] not in self.reachable_nodes):
|
||||||
if(slack>self.temp[edge]-1):
|
if(slack>self.temp[edge]-1):
|
||||||
slack = self.temp[edge]-1
|
slack = self.temp[edge]-1
|
||||||
new_edge = edge
|
new_edge = edge
|
||||||
|
|
||||||
if new_edge[0] not in self.reachable_nodes:
|
if new_edge[0] not in self.reachable_nodes:
|
||||||
delta = -(self.temp[new_edge]-1)
|
delta = -(self.temp[new_edge]-1)
|
||||||
else:
|
else:
|
||||||
delta = self.temp[new_edge]-1
|
delta = self.temp[new_edge]-1
|
||||||
|
|
||||||
for node in self.result:
|
for node in self.result:
|
||||||
if node in self.reachable_nodes:
|
if node in self.reachable_nodes:
|
||||||
self.result[node]['y'] += delta
|
self.result[node]['y'] += delta
|
||||||
|
|
||||||
for link in self.temp:
|
for link in self.temp:
|
||||||
self.temp[link] = self.result[link[1]]['y'] - self.result[link[0]]['y']
|
self.temp[link] = self.result[link[1]]['y'] - self.result[link[0]]['y']
|
||||||
|
|
||||||
self.init_cutvalues()
|
self.init_cutvalues()
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def tight_tree(self,):
|
def tight_tree(self,):
|
||||||
self.reachable_nodes = []
|
self.reachable_nodes = []
|
||||||
self.tree_edges = []
|
self.tree_edges = []
|
||||||
self.reachable_node(self.start)
|
self.reachable_node(self.start)
|
||||||
return self.reachable_nodes.__len__()
|
return self.reachable_nodes.__len__()
|
||||||
|
|
||||||
def reachable_node(self,node):
|
def reachable_node(self,node):
|
||||||
if node not in self.reachable_nodes:
|
if node not in self.reachable_nodes:
|
||||||
self.reachable_nodes.append(node)
|
self.reachable_nodes.append(node)
|
||||||
for link in self.temp:
|
for link in self.temp:
|
||||||
if link[0]==node:
|
if link[0]==node:
|
||||||
# print link[0]
|
# print link[0]
|
||||||
if self.temp[link]==1:
|
if self.temp[link]==1:
|
||||||
self.tree_edges.append(link)
|
self.tree_edges.append(link)
|
||||||
if link[1] not in self.reachable_nodes:
|
if link[1] not in self.reachable_nodes:
|
||||||
self.reachable_nodes.append(link[1])
|
self.reachable_nodes.append(link[1])
|
||||||
self.reachable_node(link[1])
|
self.reachable_node(link[1])
|
||||||
|
|
||||||
|
|
||||||
def init_cutvalues(self):
|
def init_cutvalues(self):
|
||||||
self.cut_edges = {}
|
self.cut_edges = {}
|
||||||
self.head_nodes = []
|
self.head_nodes = []
|
||||||
i=0;
|
i=0;
|
||||||
for edge in self.tree_edges:
|
for edge in self.tree_edges:
|
||||||
self.head_nodes = []
|
self.head_nodes = []
|
||||||
rest_edges = []
|
rest_edges = []
|
||||||
rest_edges += self.tree_edges
|
rest_edges += self.tree_edges
|
||||||
rest_edges.__delitem__(i)
|
rest_edges.__delitem__(i)
|
||||||
self.head_component(self.start,rest_edges)
|
self.head_component(self.start,rest_edges)
|
||||||
i+=1
|
i+=1
|
||||||
positive = 0
|
positive = 0
|
||||||
negative = 0
|
negative = 0
|
||||||
for source_node in self.transitions:
|
for source_node in self.transitions:
|
||||||
if source_node in self.head_nodes:
|
if source_node in self.head_nodes:
|
||||||
for dest_node in self.transitions[source_node]:
|
for dest_node in self.transitions[source_node]:
|
||||||
if dest_node not in self.head_nodes:
|
if dest_node not in self.head_nodes:
|
||||||
negative+=1
|
negative+=1
|
||||||
else:
|
else:
|
||||||
for dest_node in self.transitions[source_node]:
|
for dest_node in self.transitions[source_node]:
|
||||||
if dest_node in self.head_nodes:
|
if dest_node in self.head_nodes:
|
||||||
positive+=1
|
positive+=1
|
||||||
|
|
||||||
self.cut_edges[edge] = positive - negative
|
self.cut_edges[edge] = positive - negative
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def head_component(self, node, rest_edges):
|
def head_component(self, node, rest_edges):
|
||||||
if node not in self.head_nodes:
|
if node not in self.head_nodes:
|
||||||
self.head_nodes.append(node)
|
self.head_nodes.append(node)
|
||||||
for link in rest_edges:
|
for link in rest_edges:
|
||||||
if link[0]==node:
|
if link[0]==node:
|
||||||
self.head_component(link[1],rest_edges)
|
self.head_component(link[1],rest_edges)
|
||||||
|
|
||||||
|
|
||||||
def process_ranking(self, node, level=0):
|
def process_ranking(self, node, level=0):
|
||||||
if node not in self.result:
|
if node not in self.result:
|
||||||
self.result[node] = {'x': None, 'y':level, 'mark':0}
|
self.result[node] = {'x': None, 'y':level, 'mark':0}
|
||||||
else:
|
else:
|
||||||
if level > self.result[node]['y']:
|
if level > self.result[node]['y']:
|
||||||
self.result[node]['y'] = level
|
self.result[node]['y'] = level
|
||||||
if self.result[node]['mark']==0:
|
if self.result[node]['mark']==0:
|
||||||
self.result[node]['mark'] = 1
|
self.result[node]['mark'] = 1
|
||||||
for t in self.transitions.get(node, []):
|
for t in self.transitions.get(node, []):
|
||||||
self.process_ranking(t, level+1)
|
self.process_ranking(t, level+1)
|
||||||
|
|
||||||
|
|
||||||
def preprocess_order(self):
|
def preprocess_order(self):
|
||||||
levels = {}
|
levels = {}
|
||||||
for r in self.result:
|
for r in self.result:
|
||||||
l = self.result[r]['y']
|
l = self.result[r]['y']
|
||||||
levels.setdefault(l,[])
|
levels.setdefault(l,[])
|
||||||
levels[l].append(r)
|
levels[l].append(r)
|
||||||
self.levels = levels
|
self.levels = levels
|
||||||
|
|
||||||
def process_order(self, level):
|
def process_order(self, level):
|
||||||
self.levels[level].sort(lambda x,y: cmp(self.result[x]['x'], self.result[y]['x']))
|
self.levels[level].sort(lambda x,y: cmp(self.result[x]['x'], self.result[y]['x']))
|
||||||
for nodepos in range(len(self.levels[level])):
|
for nodepos in range(len(self.levels[level])):
|
||||||
node = self.levels[level][nodepos]
|
node = self.levels[level][nodepos]
|
||||||
if nodepos == 0:
|
if nodepos == 0:
|
||||||
left = self.result[node]['x']- 0.5
|
left = self.result[node]['x']- 0.5
|
||||||
else:
|
else:
|
||||||
left = (self.result[node]['x'] + self.result[self.levels[level][nodepos-1]]['x']) / 2.0
|
left = (self.result[node]['x'] + self.result[self.levels[level][nodepos-1]]['x']) / 2.0
|
||||||
|
|
||||||
if nodepos == (len(self.levels[level])-1):
|
if nodepos == (len(self.levels[level])-1):
|
||||||
right = self.result[node]['x'] + 0.5
|
right = self.result[node]['x'] + 0.5
|
||||||
else:
|
else:
|
||||||
right = (self.result[node]['x'] + self.result[self.levels[level][nodepos+1]]['x']) / 2.0
|
right = (self.result[node]['x'] + self.result[self.levels[level][nodepos+1]]['x']) / 2.0
|
||||||
|
|
||||||
|
|
||||||
if self.transitions.get(node, False):
|
if self.transitions.get(node, False):
|
||||||
if len(self.transitions[node])==1:
|
if len(self.transitions[node])==1:
|
||||||
pos = (left+right)/2.0
|
pos = (left+right)/2.0
|
||||||
step = 0
|
step = 0
|
||||||
else:
|
else:
|
||||||
pos = left
|
pos = left
|
||||||
step = (-left+right) / (len(self.transitions[node])-1)
|
step = (-left+right) / (len(self.transitions[node])-1)
|
||||||
|
|
||||||
for n2 in self.transitions[node]:
|
for n2 in self.transitions[node]:
|
||||||
self.result[n2]['x'] = pos
|
self.result[n2]['x'] = pos
|
||||||
pos += step
|
pos += step
|
||||||
|
|
||||||
def exchange(self,e,f):
|
def exchange(self,e,f):
|
||||||
self.tree_edges.__delitem__(self.tree_edges.index(e))
|
self.tree_edges.__delitem__(self.tree_edges.index(e))
|
||||||
self.tree_edges.append(f)
|
self.tree_edges.append(f)
|
||||||
self.init_cutvalues()
|
self.init_cutvalues()
|
||||||
|
|
||||||
|
|
||||||
def enter_edge(self,edge):
|
def enter_edge(self,edge):
|
||||||
self.head_nodes = []
|
self.head_nodes = []
|
||||||
rest_edges = []
|
rest_edges = []
|
||||||
rest_edges += self.tree_edges
|
rest_edges += self.tree_edges
|
||||||
rest_edges.__delitem__(rest_edges.index(edge))
|
rest_edges.__delitem__(rest_edges.index(edge))
|
||||||
self.head_component(self.start,rest_edges)
|
self.head_component(self.start,rest_edges)
|
||||||
slack = 100
|
slack = 100
|
||||||
for source_node in self.transitions:
|
for source_node in self.transitions:
|
||||||
if source_node in self.head_nodes:
|
if source_node in self.head_nodes:
|
||||||
for dest_node in self.transitions[source_node]:
|
for dest_node in self.transitions[source_node]:
|
||||||
if dest_node not in self.head_nodes:
|
if dest_node not in self.head_nodes:
|
||||||
if(slack>(self.temp[edge]-1)):
|
if(slack>(self.temp[edge]-1)):
|
||||||
slack = self.temp[edge]-1
|
slack = self.temp[edge]-1
|
||||||
new_edge = (source_node,dest_node)
|
new_edge = (source_node,dest_node)
|
||||||
return new_edge
|
return new_edge
|
||||||
|
|
||||||
|
|
||||||
def leave_edge(self):
|
def leave_edge(self):
|
||||||
for edge in self.cut_edges:
|
for edge in self.cut_edges:
|
||||||
if self.cut_edges[edge]<0:
|
if self.cut_edges[edge]<0:
|
||||||
return edge
|
return edge
|
||||||
return ()
|
return ()
|
||||||
|
|
||||||
def process(self, starting_node):
|
def process(self, starting_node):
|
||||||
pos = (len(starting_node) - 1.0)/2.0
|
pos = (len(starting_node) - 1.0)/2.0
|
||||||
self.start = starting_node[0]
|
self.start = starting_node[0]
|
||||||
for s in starting_node:
|
for s in starting_node:
|
||||||
self.process_ranking(s)
|
self.process_ranking(s)
|
||||||
self.result[s]['x'] = pos
|
self.result[s]['x'] = pos
|
||||||
pos += 1.0
|
pos += 1.0
|
||||||
self.init_rank()
|
self.init_rank()
|
||||||
#normalize
|
#normalize
|
||||||
least_rank=100
|
least_rank=100
|
||||||
|
|
||||||
#normalization
|
#normalization
|
||||||
for node in self.result:
|
for node in self.result:
|
||||||
if least_rank>self.result[node]['y']:
|
if least_rank>self.result[node]['y']:
|
||||||
least_rank = self.result[node]['y']
|
least_rank = self.result[node]['y']
|
||||||
|
|
||||||
if(least_rank!=0):
|
if(least_rank!=0):
|
||||||
diff = least_rank
|
diff = least_rank
|
||||||
for node in self.result:
|
for node in self.result:
|
||||||
self.result[node]['y']-=least_rank
|
self.result[node]['y']-=least_rank
|
||||||
|
|
||||||
e = self.leave_edge()
|
e = self.leave_edge()
|
||||||
#while e:
|
#while e:
|
||||||
f = self.enter_edge(e)
|
f = self.enter_edge(e)
|
||||||
self.exchange(e,f)
|
self.exchange(e,f)
|
||||||
e = self.leave_edge()
|
e = self.leave_edge()
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
self.preprocess_order()
|
self.preprocess_order()
|
||||||
for n in self.levels:
|
for n in self.levels:
|
||||||
self.process_order(n)
|
self.process_order(n)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
result = ''
|
result = ''
|
||||||
for l in self.levels:
|
for l in self.levels:
|
||||||
result += 'PosY: ' + str(l) + '\n'
|
result += 'PosY: ' + str(l) + '\n'
|
||||||
for node in self.levels[l]:
|
for node in self.levels[l]:
|
||||||
result += '\tPosX: '+ str(self.result[node]['x']) + ' - Node:' + node + "\n"
|
result += '\tPosX: '+ str(self.result[node]['x']) + ' - Node:' + node + "\n"
|
||||||
return result
|
return result
|
||||||
|
|
||||||
def scale(self, maxx, maxy, plusx2=0, plusy2=0):
|
def scale(self, maxx, maxy, plusx2=0, plusy2=0):
|
||||||
plusx = - min(map(lambda x: x['x'],self.result.values()))
|
plusx = - min(map(lambda x: x['x'],self.result.values()))
|
||||||
plusy = - min(map(lambda x: x['y'],self.result.values()))
|
plusy = - min(map(lambda x: x['y'],self.result.values()))
|
||||||
|
|
||||||
maxcurrent = 1.0
|
maxcurrent = 1.0
|
||||||
diff = 1.0
|
diff = 1.0
|
||||||
for l in self.levels:
|
for l in self.levels:
|
||||||
for n in range(1, len(self.levels[l])):
|
for n in range(1, len(self.levels[l])):
|
||||||
n1 = self.levels[l][n]
|
n1 = self.levels[l][n]
|
||||||
n2 = self.levels[l][n-1]
|
n2 = self.levels[l][n-1]
|
||||||
diff = abs(self.result[n2]['x']-self.result[n1]['x'])
|
diff = abs(self.result[n2]['x']-self.result[n1]['x'])
|
||||||
if diff<maxcurrent:
|
if diff<maxcurrent:
|
||||||
maxcurrent=diff
|
maxcurrent=diff
|
||||||
factor = maxx / diff
|
factor = maxx / diff
|
||||||
for r in self.result:
|
for r in self.result:
|
||||||
self.result[r]['x'] = (self.result[r]['x']+plusx) * factor + plusx2
|
self.result[r]['x'] = (self.result[r]['x']+plusx) * factor + plusx2
|
||||||
self.result[r]['y'] = (self.result[r]['y']+plusy) * maxy + plusy2
|
self.result[r]['y'] = (self.result[r]['y']+plusy) * maxy + plusy2
|
||||||
|
|
||||||
def result_get(self):
|
def result_get(self):
|
||||||
return self.result
|
return self.result
|
||||||
|
|
||||||
if __name__=='__main__':
|
if __name__=='__main__':
|
||||||
starting_node = ['profile'] # put here nodes with flow_start=True
|
starting_node = ['profile'] # put here nodes with flow_start=True
|
||||||
nodes = ['project','account','hr','base','product','mrp','test','profile']
|
nodes = ['project','account','hr','base','product','mrp','test','profile']
|
||||||
transitions = [
|
transitions = [
|
||||||
('profile','mrp'),
|
('profile','mrp'),
|
||||||
('mrp','project'),
|
('mrp','project'),
|
||||||
('project','product'),
|
('project','product'),
|
||||||
('mrp','hr'),
|
('mrp','hr'),
|
||||||
('mrp','test'),
|
('mrp','test'),
|
||||||
('project','account'),
|
('project','account'),
|
||||||
('project','hr'),
|
('project','hr'),
|
||||||
('product','base'),
|
('product','base'),
|
||||||
('account','product'),
|
('account','product'),
|
||||||
('account','test'),
|
('account','test'),
|
||||||
('account','base'),
|
('account','base'),
|
||||||
('hr','base'),
|
('hr','base'),
|
||||||
('test','base')
|
('test','base')
|
||||||
]
|
]
|
||||||
|
|
||||||
radius = 20
|
radius = 20
|
||||||
g = graph(nodes, transitions)
|
g = graph(nodes, transitions)
|
||||||
g.process(starting_node)
|
g.process(starting_node)
|
||||||
g.scale(radius*3,radius*3, radius, radius)
|
g.scale(radius*3,radius*3, radius, radius)
|
||||||
|
|
||||||
print g
|
print g
|
||||||
|
|
||||||
import Image
|
import Image
|
||||||
import ImageDraw
|
import ImageDraw
|
||||||
img = Image.new("RGB", (800, 600), "#ffffff")
|
img = Image.new("RGB", (800, 600), "#ffffff")
|
||||||
draw = ImageDraw.Draw(img)
|
draw = ImageDraw.Draw(img)
|
||||||
|
|
||||||
for name,node in g.result.items():
|
for name,node in g.result.items():
|
||||||
draw.arc( (int(node['x']-radius), int(node['y']-radius),int(node['x']+radius), int(node['y']+radius) ), 0, 360, (128,128,128))
|
draw.arc( (int(node['x']-radius), int(node['y']-radius),int(node['x']+radius), int(node['y']+radius) ), 0, 360, (128,128,128))
|
||||||
draw.text( (int(node['x']), int(node['y'])), name, (128,128,128))
|
draw.text( (int(node['x']), int(node['y'])), name, (128,128,128))
|
||||||
|
|
||||||
|
|
||||||
for nodefrom in g.transitions:
|
for nodefrom in g.transitions:
|
||||||
for nodeto in g.transitions[nodefrom]:
|
for nodeto in g.transitions[nodefrom]:
|
||||||
draw.line( (int(g.result[nodefrom]['x']), int(g.result[nodefrom]['y']),int(g.result[nodeto]['x']),int(g.result[nodeto]['y'])),(128,128,128) )
|
draw.line( (int(g.result[nodefrom]['x']), int(g.result[nodefrom]['y']),int(g.result[nodeto]['x']),int(g.result[nodeto]['y'])),(128,128,128) )
|
||||||
|
|
||||||
img.save("graph.png", "PNG")
|
img.save("graph.png", "PNG")
|
||||||
|
|
||||||
|
|
|
@ -43,131 +43,131 @@ Le message avec le sujet "%s" n'a pu être archivé dans l'ERP.
|
||||||
|
|
||||||
class EmailParser(object):
|
class EmailParser(object):
|
||||||
|
|
||||||
def __init__(self, headers, dispatcher):
|
def __init__(self, headers, dispatcher):
|
||||||
self.headers = headers
|
self.headers = headers
|
||||||
self.dispatcher = dispatcher
|
self.dispatcher = dispatcher
|
||||||
|
|
||||||
def parse(self, msg):
|
def parse(self, msg):
|
||||||
dispatcher((self.headers, msg))
|
dispatcher((self.headers, msg))
|
||||||
|
|
||||||
class CommandDispatcher(object):
|
class CommandDispatcher(object):
|
||||||
|
|
||||||
def __init__(self, receiver):
|
def __init__(self, receiver):
|
||||||
self.receiver = receiver
|
self.receiver = receiver
|
||||||
|
|
||||||
def __call__(self, request):
|
def __call__(self, request):
|
||||||
return self.receiver(request)
|
return self.receiver(request)
|
||||||
|
|
||||||
class RPCProxy(object):
|
class RPCProxy(object):
|
||||||
|
|
||||||
def __init__(self, uid, passwd, host='localhost', port=8069, path='object'):
|
def __init__(self, uid, passwd, host='localhost', port=8069, path='object'):
|
||||||
self.rpc = xmlrpclib.ServerProxy('http://%s:%s/%s' % (host, port, path))
|
self.rpc = xmlrpclib.ServerProxy('http://%s:%s/%s' % (host, port, path))
|
||||||
self.user_id = uid
|
self.user_id = uid
|
||||||
self.passwd = passwd
|
self.passwd = passwd
|
||||||
|
|
||||||
def __call__(self, request):
|
def __call__(self, request):
|
||||||
return self.rpc.execute(self.user_id, self.passwd, *request)
|
return self.rpc.execute(self.user_id, self.passwd, *request)
|
||||||
|
|
||||||
class ReceiverEmail2Event(object):
|
class ReceiverEmail2Event(object):
|
||||||
|
|
||||||
email_re = re.compile(r"""
|
email_re = re.compile(r"""
|
||||||
([a-zA-Z][\w\.-]*[a-zA-Z0-9] # username part
|
([a-zA-Z][\w\.-]*[a-zA-Z0-9] # username part
|
||||||
@ # mandatory @ sign
|
@ # mandatory @ sign
|
||||||
[a-zA-Z0-9][\w\.-]* # domain must start with a letter
|
[a-zA-Z0-9][\w\.-]* # domain must start with a letter
|
||||||
\.
|
\.
|
||||||
[a-z]{2,3} # TLD
|
[a-z]{2,3} # TLD
|
||||||
)
|
)
|
||||||
""", re.VERBOSE)
|
""", re.VERBOSE)
|
||||||
|
|
||||||
project_re = re.compile(r"^ *\[?(\d{4}\.?\d{0,3})\]?", re.UNICODE)
|
project_re = re.compile(r"^ *\[?(\d{4}\.?\d{0,3})\]?", re.UNICODE)
|
||||||
|
|
||||||
|
|
||||||
def __init__(self, rpc):
|
def __init__(self, rpc):
|
||||||
self.rpc = rpc
|
self.rpc = rpc
|
||||||
|
|
||||||
def get_addresses(self, headers, msg):
|
def get_addresses(self, headers, msg):
|
||||||
hcontent = ''
|
hcontent = ''
|
||||||
for header in [h for h in headers if msg.has_key(h)]:
|
for header in [h for h in headers if msg.has_key(h)]:
|
||||||
hcontent += msg[header]
|
hcontent += msg[header]
|
||||||
return self.email_re.findall(hcontent)
|
return self.email_re.findall(hcontent)
|
||||||
|
|
||||||
def get_partners(self, headers, msg):
|
def get_partners(self, headers, msg):
|
||||||
alladdresses = self.get_addresses(headers, msg)
|
alladdresses = self.get_addresses(headers, msg)
|
||||||
address_ids = self.rpc(('res.partner.address', 'search', [('email', 'in', alladdresses)]))
|
address_ids = self.rpc(('res.partner.address', 'search', [('email', 'in', alladdresses)]))
|
||||||
addresses = self.rpc(('res.partner.address', 'read', address_ids))
|
addresses = self.rpc(('res.partner.address', 'read', address_ids))
|
||||||
return [x['partner_id'][0] for x in addresses]
|
return [x['partner_id'][0] for x in addresses]
|
||||||
|
|
||||||
def __call__(self, request):
|
def __call__(self, request):
|
||||||
headers, msg = request
|
headers, msg = request
|
||||||
partners = self.get_partners(headers, msg)
|
partners = self.get_partners(headers, msg)
|
||||||
subject = u''
|
subject = u''
|
||||||
for string, charset in decode_header(msg['Subject']):
|
for string, charset in decode_header(msg['Subject']):
|
||||||
if charset:
|
if charset:
|
||||||
subject += string.decode(charset)
|
subject += string.decode(charset)
|
||||||
else:
|
else:
|
||||||
subject += unicode(string)
|
subject += unicode(string)
|
||||||
if partners:
|
if partners:
|
||||||
self.save_mail(msg, subject, partners)
|
self.save_mail(msg, subject, partners)
|
||||||
else:
|
else:
|
||||||
warning = MIMEText((warn_msg % (subject,)).encode('utf-8'), 'plain', 'utf-8')
|
warning = MIMEText((warn_msg % (subject,)).encode('utf-8'), 'plain', 'utf-8')
|
||||||
warning['Subject'] = 'Message de TinyERP'
|
warning['Subject'] = 'Message de TinyERP'
|
||||||
warning['From'] = 'erp@steel-sa.com'
|
warning['From'] = 'erp@steel-sa.com'
|
||||||
warning['To'] = msg['From']
|
warning['To'] = msg['From']
|
||||||
s = smtplib.SMTP()
|
s = smtplib.SMTP()
|
||||||
s.connect()
|
s.connect()
|
||||||
s.sendmail('erp@steel-sa.com', self.email_re.findall(msg['From']), warning.as_string())
|
s.sendmail('erp@steel-sa.com', self.email_re.findall(msg['From']), warning.as_string())
|
||||||
s.close()
|
s.close()
|
||||||
|
|
||||||
if msg.is_multipart():
|
if msg.is_multipart():
|
||||||
for message in [m for m in msg.get_payload() if m.get_content_type() == 'message/rfc822']:
|
for message in [m for m in msg.get_payload() if m.get_content_type() == 'message/rfc822']:
|
||||||
self((headers, message.get_payload()[0]))
|
self((headers, message.get_payload()[0]))
|
||||||
|
|
||||||
def save_mail(self, msg, subject, partners):
|
def save_mail(self, msg, subject, partners):
|
||||||
counter, description = 1, u''
|
counter, description = 1, u''
|
||||||
if msg.is_multipart():
|
if msg.is_multipart():
|
||||||
for part in msg.get_payload():
|
for part in msg.get_payload():
|
||||||
stockdir = os.path.join('emails', msg['Message-Id'][1:-1])
|
stockdir = os.path.join('emails', msg['Message-Id'][1:-1])
|
||||||
newdir = os.path.join('/tmp', stockdir)
|
newdir = os.path.join('/tmp', stockdir)
|
||||||
filename = part.get_filename()
|
filename = part.get_filename()
|
||||||
if not filename:
|
if not filename:
|
||||||
ext = mimetypes.guess_extension(part.get_type())
|
ext = mimetypes.guess_extension(part.get_type())
|
||||||
if not ext:
|
if not ext:
|
||||||
ext = '.bin'
|
ext = '.bin'
|
||||||
filename = 'part-%03d%s' % (counter, ext)
|
filename = 'part-%03d%s' % (counter, ext)
|
||||||
|
|
||||||
if part.get_content_maintype() == 'multipart':
|
if part.get_content_maintype() == 'multipart':
|
||||||
continue
|
continue
|
||||||
elif part.get_content_maintype() == 'text':
|
elif part.get_content_maintype() == 'text':
|
||||||
if part.get_content_subtype() == 'plain':
|
if part.get_content_subtype() == 'plain':
|
||||||
description += part.get_payload(decode=1).decode(part.get_charsets()[0])
|
description += part.get_payload(decode=1).decode(part.get_charsets()[0])
|
||||||
description += u'\n\nVous trouverez les éventuels fichiers dans le répertoire: %s' % stockdir
|
description += u'\n\nVous trouverez les éventuels fichiers dans le répertoire: %s' % stockdir
|
||||||
continue
|
continue
|
||||||
else:
|
else:
|
||||||
description += u'\n\nCe message est en "%s", vous trouverez ce texte dans le répertoire: %s' % (part.get_content_type(), stockdir)
|
description += u'\n\nCe message est en "%s", vous trouverez ce texte dans le répertoire: %s' % (part.get_content_type(), stockdir)
|
||||||
elif part.get_content_type() == 'message/rfc822':
|
elif part.get_content_type() == 'message/rfc822':
|
||||||
continue
|
continue
|
||||||
if not os.path.isdir(newdir):
|
if not os.path.isdir(newdir):
|
||||||
os.mkdir(newdir)
|
os.mkdir(newdir)
|
||||||
|
|
||||||
counter += 1
|
counter += 1
|
||||||
fd = file(os.path.join(newdir, filename), 'w')
|
fd = file(os.path.join(newdir, filename), 'w')
|
||||||
fd.write(part.get_payload(decode=1))
|
fd.write(part.get_payload(decode=1))
|
||||||
fd.close()
|
fd.close()
|
||||||
else:
|
else:
|
||||||
description = msg.get_payload(decode=1).decode(msg.get_charsets()[0])
|
description = msg.get_payload(decode=1).decode(msg.get_charsets()[0])
|
||||||
|
|
||||||
project = self.project_re.search(subject)
|
project = self.project_re.search(subject)
|
||||||
if project:
|
if project:
|
||||||
project = project.groups()[0]
|
project = project.groups()[0]
|
||||||
else:
|
else:
|
||||||
project = ''
|
project = ''
|
||||||
|
|
||||||
for partner in partners:
|
for partner in partners:
|
||||||
self.rpc(('res.partner.event', 'create', {'name' : subject, 'partner_id' : partner, 'description' : description, 'project' : project}))
|
self.rpc(('res.partner.event', 'create', {'name' : subject, 'partner_id' : partner, 'description' : description, 'project' : project}))
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
rpc_dispatcher = CommandDispatcher(RPCProxy(4, 'admin'))
|
rpc_dispatcher = CommandDispatcher(RPCProxy(4, 'admin'))
|
||||||
dispatcher = CommandDispatcher(ReceiverEmail2Event(rpc_dispatcher))
|
dispatcher = CommandDispatcher(ReceiverEmail2Event(rpc_dispatcher))
|
||||||
parser = EmailParser(['To', 'Cc', 'From'], dispatcher)
|
parser = EmailParser(['To', 'Cc', 'From'], dispatcher)
|
||||||
parser.parse(email.message_from_file(sys.stdin))
|
parser.parse(email.message_from_file(sys.stdin))
|
||||||
|
|
|
@ -44,123 +44,123 @@ import release
|
||||||
import socket
|
import socket
|
||||||
|
|
||||||
if sys.version_info[:2] < (2, 4):
|
if sys.version_info[:2] < (2, 4):
|
||||||
from threadinglocal import local
|
from threadinglocal import local
|
||||||
else:
|
else:
|
||||||
from threading import local
|
from threading import local
|
||||||
|
|
||||||
# initialize a database with base/base.sql
|
# initialize a database with base/base.sql
|
||||||
def init_db(cr):
|
def init_db(cr):
|
||||||
import addons
|
import addons
|
||||||
f = addons.get_module_resource('base', 'base.sql')
|
f = addons.get_module_resource('base', 'base.sql')
|
||||||
for line in file(f).read().split(';'):
|
for line in file(f).read().split(';'):
|
||||||
if (len(line)>0) and (not line.isspace()):
|
if (len(line)>0) and (not line.isspace()):
|
||||||
cr.execute(line)
|
cr.execute(line)
|
||||||
cr.commit()
|
cr.commit()
|
||||||
|
|
||||||
for i in addons.get_modules():
|
for i in addons.get_modules():
|
||||||
terp_file = addons.get_module_resource(i, '__terp__.py')
|
terp_file = addons.get_module_resource(i, '__terp__.py')
|
||||||
mod_path = addons.get_module_path(i)
|
mod_path = addons.get_module_path(i)
|
||||||
info = False
|
info = False
|
||||||
if os.path.isfile(terp_file) and not os.path.isfile(mod_path+'.zip'):
|
if os.path.isfile(terp_file) and not os.path.isfile(mod_path+'.zip'):
|
||||||
info = eval(file(terp_file).read())
|
info = eval(file(terp_file).read())
|
||||||
elif zipfile.is_zipfile(mod_path+'.zip'):
|
elif zipfile.is_zipfile(mod_path+'.zip'):
|
||||||
zfile = zipfile.ZipFile(mod_path+'.zip')
|
zfile = zipfile.ZipFile(mod_path+'.zip')
|
||||||
i = os.path.splitext(i)[0]
|
i = os.path.splitext(i)[0]
|
||||||
info = eval(zfile.read(os.path.join(i, '__terp__.py')))
|
info = eval(zfile.read(os.path.join(i, '__terp__.py')))
|
||||||
if info:
|
if info:
|
||||||
categs = info.get('category', 'Uncategorized').split('/')
|
categs = info.get('category', 'Uncategorized').split('/')
|
||||||
p_id = None
|
p_id = None
|
||||||
while categs:
|
while categs:
|
||||||
if p_id is not None:
|
if p_id is not None:
|
||||||
cr.execute('select id \
|
cr.execute('select id \
|
||||||
from ir_module_category \
|
from ir_module_category \
|
||||||
where name=%s and parent_id=%d', (categs[0], p_id))
|
where name=%s and parent_id=%d', (categs[0], p_id))
|
||||||
else:
|
else:
|
||||||
cr.execute('select id \
|
cr.execute('select id \
|
||||||
from ir_module_category \
|
from ir_module_category \
|
||||||
where name=%s and parent_id is NULL', (categs[0],))
|
where name=%s and parent_id is NULL', (categs[0],))
|
||||||
c_id = cr.fetchone()
|
c_id = cr.fetchone()
|
||||||
if not c_id:
|
if not c_id:
|
||||||
cr.execute('select nextval(\'ir_module_category_id_seq\')')
|
cr.execute('select nextval(\'ir_module_category_id_seq\')')
|
||||||
c_id = cr.fetchone()[0]
|
c_id = cr.fetchone()[0]
|
||||||
cr.execute('insert into ir_module_category \
|
cr.execute('insert into ir_module_category \
|
||||||
(id, name, parent_id) \
|
(id, name, parent_id) \
|
||||||
values (%d, %s, %d)', (c_id, categs[0], p_id))
|
values (%d, %s, %d)', (c_id, categs[0], p_id))
|
||||||
else:
|
else:
|
||||||
c_id = c_id[0]
|
c_id = c_id[0]
|
||||||
p_id = c_id
|
p_id = c_id
|
||||||
categs = categs[1:]
|
categs = categs[1:]
|
||||||
|
|
||||||
active = info.get('active', False)
|
active = info.get('active', False)
|
||||||
installable = info.get('installable', True)
|
installable = info.get('installable', True)
|
||||||
if installable:
|
if installable:
|
||||||
if active:
|
if active:
|
||||||
state = 'to install'
|
state = 'to install'
|
||||||
else:
|
else:
|
||||||
state = 'uninstalled'
|
state = 'uninstalled'
|
||||||
else:
|
else:
|
||||||
state = 'uninstallable'
|
state = 'uninstallable'
|
||||||
cr.execute('select nextval(\'ir_module_module_id_seq\')')
|
cr.execute('select nextval(\'ir_module_module_id_seq\')')
|
||||||
id = cr.fetchone()[0]
|
id = cr.fetchone()[0]
|
||||||
cr.execute('insert into ir_module_module \
|
cr.execute('insert into ir_module_module \
|
||||||
(id, author, latest_version, website, name, shortdesc, description, \
|
(id, author, latest_version, website, name, shortdesc, description, \
|
||||||
category_id, state) \
|
category_id, state) \
|
||||||
values (%d, %s, %s, %s, %s, %s, %s, %d, %s)', (
|
values (%d, %s, %s, %s, %s, %s, %s, %d, %s)', (
|
||||||
id, info.get('author', ''),
|
id, info.get('author', ''),
|
||||||
release.version.rsplit('.', 1)[0] + '.' + info.get('version', ''),
|
release.version.rsplit('.', 1)[0] + '.' + info.get('version', ''),
|
||||||
info.get('website', ''), i, info.get('name', False),
|
info.get('website', ''), i, info.get('name', False),
|
||||||
info.get('description', ''), p_id, state))
|
info.get('description', ''), p_id, state))
|
||||||
dependencies = info.get('depends', [])
|
dependencies = info.get('depends', [])
|
||||||
for d in dependencies:
|
for d in dependencies:
|
||||||
cr.execute('insert into ir_module_module_dependency \
|
cr.execute('insert into ir_module_module_dependency \
|
||||||
(module_id,name) values (%s, %s)', (id, d))
|
(module_id,name) values (%s, %s)', (id, d))
|
||||||
cr.commit()
|
cr.commit()
|
||||||
|
|
||||||
def find_in_path(name):
|
def find_in_path(name):
|
||||||
if os.name == "nt":
|
if os.name == "nt":
|
||||||
sep = ';'
|
sep = ';'
|
||||||
else:
|
else:
|
||||||
sep = ':'
|
sep = ':'
|
||||||
path = [dir for dir in os.environ['PATH'].split(sep)
|
path = [dir for dir in os.environ['PATH'].split(sep)
|
||||||
if os.path.isdir(dir)]
|
if os.path.isdir(dir)]
|
||||||
for dir in path:
|
for dir in path:
|
||||||
val = os.path.join(dir, name)
|
val = os.path.join(dir, name)
|
||||||
if os.path.isfile(val) or os.path.islink(val):
|
if os.path.isfile(val) or os.path.islink(val):
|
||||||
return val
|
return val
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def find_pg_tool(name):
|
def find_pg_tool(name):
|
||||||
if config['pg_path'] and config['pg_path'] != 'None':
|
if config['pg_path'] and config['pg_path'] != 'None':
|
||||||
return os.path.join(config['pg_path'], name)
|
return os.path.join(config['pg_path'], name)
|
||||||
else:
|
else:
|
||||||
return find_in_path(name)
|
return find_in_path(name)
|
||||||
|
|
||||||
def exec_pg_command(name, *args):
|
def exec_pg_command(name, *args):
|
||||||
prog = find_pg_tool(name)
|
prog = find_pg_tool(name)
|
||||||
if not prog:
|
if not prog:
|
||||||
raise Exception('Couldn\'t find %s' % name)
|
raise Exception('Couldn\'t find %s' % name)
|
||||||
args2 = (os.path.basename(prog),) + args
|
args2 = (os.path.basename(prog),) + args
|
||||||
return os.spawnv(os.P_WAIT, prog, args2)
|
return os.spawnv(os.P_WAIT, prog, args2)
|
||||||
|
|
||||||
def exec_pg_command_pipe(name, *args):
|
def exec_pg_command_pipe(name, *args):
|
||||||
prog = find_pg_tool(name)
|
prog = find_pg_tool(name)
|
||||||
if not prog:
|
if not prog:
|
||||||
raise Exception('Couldn\'t find %s' % name)
|
raise Exception('Couldn\'t find %s' % name)
|
||||||
if os.name == "nt":
|
if os.name == "nt":
|
||||||
cmd = '"' + prog + '" ' + ' '.join(args)
|
cmd = '"' + prog + '" ' + ' '.join(args)
|
||||||
else:
|
else:
|
||||||
cmd = prog + ' ' + ' '.join(args)
|
cmd = prog + ' ' + ' '.join(args)
|
||||||
return os.popen2(cmd, 'b')
|
return os.popen2(cmd, 'b')
|
||||||
|
|
||||||
def exec_command_pipe(name, *args):
|
def exec_command_pipe(name, *args):
|
||||||
prog = find_in_path(name)
|
prog = find_in_path(name)
|
||||||
if not prog:
|
if not prog:
|
||||||
raise Exception('Couldn\'t find %s' % name)
|
raise Exception('Couldn\'t find %s' % name)
|
||||||
if os.name == "nt":
|
if os.name == "nt":
|
||||||
cmd = '"'+prog+'" '+' '.join(args)
|
cmd = '"'+prog+'" '+' '.join(args)
|
||||||
else:
|
else:
|
||||||
cmd = prog+' '+' '.join(args)
|
cmd = prog+' '+' '.join(args)
|
||||||
return os.popen2(cmd, 'b')
|
return os.popen2(cmd, 'b')
|
||||||
|
|
||||||
#----------------------------------------------------------
|
#----------------------------------------------------------
|
||||||
# File paths
|
# File paths
|
||||||
|
@ -169,89 +169,89 @@ def exec_command_pipe(name, *args):
|
||||||
#file_path_addons = os.path.join(file_path_root, 'addons')
|
#file_path_addons = os.path.join(file_path_root, 'addons')
|
||||||
|
|
||||||
def file_open(name, mode="r", subdir='addons', pathinfo=False):
|
def file_open(name, mode="r", subdir='addons', pathinfo=False):
|
||||||
"""Open a file from the Tiny ERP root, using a subdir folder.
|
"""Open a file from the Tiny ERP root, using a subdir folder.
|
||||||
|
|
||||||
>>> file_open('hr/report/timesheer.xsl')
|
>>> file_open('hr/report/timesheer.xsl')
|
||||||
>>> file_open('addons/hr/report/timesheet.xsl')
|
>>> file_open('addons/hr/report/timesheet.xsl')
|
||||||
>>> file_open('../../base/report/rml_template.xsl', subdir='addons/hr/report', pathinfo=True)
|
>>> file_open('../../base/report/rml_template.xsl', subdir='addons/hr/report', pathinfo=True)
|
||||||
|
|
||||||
@param name: name of the file
|
@param name: name of the file
|
||||||
@param mode: file open mode
|
@param mode: file open mode
|
||||||
@param subdir: subdirectory
|
@param subdir: subdirectory
|
||||||
@param pathinfo: if True returns tupple (fileobject, filepath)
|
@param pathinfo: if True returns tupple (fileobject, filepath)
|
||||||
|
|
||||||
@return: fileobject if pathinfo is False else (fileobject, filepath)
|
@return: fileobject if pathinfo is False else (fileobject, filepath)
|
||||||
"""
|
"""
|
||||||
|
|
||||||
adp = os.path.normcase(os.path.abspath(config['addons_path']))
|
adp = os.path.normcase(os.path.abspath(config['addons_path']))
|
||||||
rtp = os.path.normcase(os.path.abspath(config['root_path']))
|
rtp = os.path.normcase(os.path.abspath(config['root_path']))
|
||||||
|
|
||||||
if name.replace(os.path.sep, '/').startswith('addons/'):
|
if name.replace(os.path.sep, '/').startswith('addons/'):
|
||||||
subdir = 'addons'
|
subdir = 'addons'
|
||||||
name = name[7:]
|
name = name[7:]
|
||||||
|
|
||||||
# First try to locate in addons_path
|
# First try to locate in addons_path
|
||||||
if subdir:
|
if subdir:
|
||||||
subdir2 = subdir
|
subdir2 = subdir
|
||||||
if subdir2.replace(os.path.sep, '/').startswith('addons/'):
|
if subdir2.replace(os.path.sep, '/').startswith('addons/'):
|
||||||
subdir2 = subdir2[7:]
|
subdir2 = subdir2[7:]
|
||||||
|
|
||||||
subdir2 = (subdir2 != 'addons' or None) and subdir2
|
subdir2 = (subdir2 != 'addons' or None) and subdir2
|
||||||
|
|
||||||
try:
|
try:
|
||||||
if subdir2:
|
if subdir2:
|
||||||
fn = os.path.join(adp, subdir2, name)
|
fn = os.path.join(adp, subdir2, name)
|
||||||
else:
|
else:
|
||||||
fn = os.path.join(adp, name)
|
fn = os.path.join(adp, name)
|
||||||
fn = os.path.normpath(fn)
|
fn = os.path.normpath(fn)
|
||||||
fo = file_open(fn, mode=mode, subdir=None, pathinfo=pathinfo)
|
fo = file_open(fn, mode=mode, subdir=None, pathinfo=pathinfo)
|
||||||
if pathinfo:
|
if pathinfo:
|
||||||
return fo, fn
|
return fo, fn
|
||||||
return fo
|
return fo
|
||||||
except IOError, e:
|
except IOError, e:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
if subdir:
|
if subdir:
|
||||||
name = os.path.join(rtp, subdir, name)
|
name = os.path.join(rtp, subdir, name)
|
||||||
else:
|
else:
|
||||||
name = os.path.join(rtp, name)
|
name = os.path.join(rtp, name)
|
||||||
|
|
||||||
name = os.path.normpath(name)
|
name = os.path.normpath(name)
|
||||||
|
|
||||||
# Check for a zipfile in the path
|
# Check for a zipfile in the path
|
||||||
head = name
|
head = name
|
||||||
zipname = False
|
zipname = False
|
||||||
name2 = False
|
name2 = False
|
||||||
while True:
|
while True:
|
||||||
head, tail = os.path.split(head)
|
head, tail = os.path.split(head)
|
||||||
if not tail:
|
if not tail:
|
||||||
break
|
break
|
||||||
if zipname:
|
if zipname:
|
||||||
zipname = os.path.join(tail, zipname)
|
zipname = os.path.join(tail, zipname)
|
||||||
else:
|
else:
|
||||||
zipname = tail
|
zipname = tail
|
||||||
if zipfile.is_zipfile(head+'.zip'):
|
if zipfile.is_zipfile(head+'.zip'):
|
||||||
import StringIO
|
import StringIO
|
||||||
zfile = zipfile.ZipFile(head+'.zip')
|
zfile = zipfile.ZipFile(head+'.zip')
|
||||||
try:
|
try:
|
||||||
fo = StringIO.StringIO(zfile.read(os.path.join(
|
fo = StringIO.StringIO(zfile.read(os.path.join(
|
||||||
os.path.basename(head), zipname).replace(
|
os.path.basename(head), zipname).replace(
|
||||||
os.sep, '/')))
|
os.sep, '/')))
|
||||||
|
|
||||||
if pathinfo:
|
if pathinfo:
|
||||||
return fo, name
|
return fo, name
|
||||||
return fo
|
return fo
|
||||||
except:
|
except:
|
||||||
name2 = os.path.normpath(os.path.join(head + '.zip', zipname))
|
name2 = os.path.normpath(os.path.join(head + '.zip', zipname))
|
||||||
pass
|
pass
|
||||||
for i in (name2, name):
|
for i in (name2, name):
|
||||||
if i and os.path.isfile(i):
|
if i and os.path.isfile(i):
|
||||||
fo = file(i, mode)
|
fo = file(i, mode)
|
||||||
if pathinfo:
|
if pathinfo:
|
||||||
return fo, i
|
return fo, i
|
||||||
return fo
|
return fo
|
||||||
|
|
||||||
raise IOError, 'File not found : '+str(name)
|
raise IOError, 'File not found : '+str(name)
|
||||||
|
|
||||||
|
|
||||||
#----------------------------------------------------------
|
#----------------------------------------------------------
|
||||||
|
@ -294,244 +294,244 @@ def flatten(list):
|
||||||
# Emails
|
# Emails
|
||||||
#----------------------------------------------------------
|
#----------------------------------------------------------
|
||||||
def email_send(email_from, email_to, subject, body, email_cc=None, email_bcc=None, on_error=False, reply_to=False, tinycrm=False):
|
def email_send(email_from, email_to, subject, body, email_cc=None, email_bcc=None, on_error=False, reply_to=False, tinycrm=False):
|
||||||
"""Send an email."""
|
"""Send an email."""
|
||||||
if not email_cc:
|
if not email_cc:
|
||||||
email_cc=[]
|
email_cc=[]
|
||||||
if not email_bcc:
|
if not email_bcc:
|
||||||
email_bcc=[]
|
email_bcc=[]
|
||||||
import smtplib
|
import smtplib
|
||||||
from email.MIMEText import MIMEText
|
from email.MIMEText import MIMEText
|
||||||
from email.MIMEMultipart import MIMEMultipart
|
from email.MIMEMultipart import MIMEMultipart
|
||||||
from email.Header import Header
|
from email.Header import Header
|
||||||
from email.Utils import formatdate, COMMASPACE
|
from email.Utils import formatdate, COMMASPACE
|
||||||
|
|
||||||
msg = MIMEText(body or '', _charset='utf-8')
|
msg = MIMEText(body or '', _charset='utf-8')
|
||||||
msg['Subject'] = Header(subject.decode('utf8'), 'utf-8')
|
msg['Subject'] = Header(subject.decode('utf8'), 'utf-8')
|
||||||
msg['From'] = email_from
|
msg['From'] = email_from
|
||||||
del msg['Reply-To']
|
del msg['Reply-To']
|
||||||
if reply_to:
|
if reply_to:
|
||||||
msg['Reply-To'] = msg['From']+', '+reply_to
|
msg['Reply-To'] = msg['From']+', '+reply_to
|
||||||
msg['To'] = COMMASPACE.join(email_to)
|
msg['To'] = COMMASPACE.join(email_to)
|
||||||
if email_cc:
|
if email_cc:
|
||||||
msg['Cc'] = COMMASPACE.join(email_cc)
|
msg['Cc'] = COMMASPACE.join(email_cc)
|
||||||
if email_bcc:
|
if email_bcc:
|
||||||
msg['Bcc'] = COMMASPACE.join(email_bcc)
|
msg['Bcc'] = COMMASPACE.join(email_bcc)
|
||||||
msg['Date'] = formatdate(localtime=True)
|
msg['Date'] = formatdate(localtime=True)
|
||||||
if tinycrm:
|
if tinycrm:
|
||||||
msg['Message-Id'] = '<'+str(time.time())+'-tinycrm-'+str(tinycrm)+'@'+socket.gethostname()+'>'
|
msg['Message-Id'] = '<'+str(time.time())+'-tinycrm-'+str(tinycrm)+'@'+socket.gethostname()+'>'
|
||||||
try:
|
try:
|
||||||
s = smtplib.SMTP()
|
s = smtplib.SMTP()
|
||||||
s.connect(config['smtp_server'])
|
s.connect(config['smtp_server'])
|
||||||
if config['smtp_user'] or config['smtp_password']:
|
if config['smtp_user'] or config['smtp_password']:
|
||||||
s.login(config['smtp_user'], config['smtp_password'])
|
s.login(config['smtp_user'], config['smtp_password'])
|
||||||
s.sendmail(email_from, flatten([email_to, email_cc, email_bcc]), msg.as_string())
|
s.sendmail(email_from, flatten([email_to, email_cc, email_bcc]), msg.as_string())
|
||||||
s.quit()
|
s.quit()
|
||||||
except Exception, e:
|
except Exception, e:
|
||||||
import logging
|
import logging
|
||||||
logging.getLogger().error(str(e))
|
logging.getLogger().error(str(e))
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
#----------------------------------------------------------
|
#----------------------------------------------------------
|
||||||
# Emails
|
# Emails
|
||||||
#----------------------------------------------------------
|
#----------------------------------------------------------
|
||||||
def email_send_attach(email_from, email_to, subject, body, email_cc=None, email_bcc=None, on_error=False, reply_to=False, attach=None, tinycrm=False):
|
def email_send_attach(email_from, email_to, subject, body, email_cc=None, email_bcc=None, on_error=False, reply_to=False, attach=None, tinycrm=False):
|
||||||
"""Send an email."""
|
"""Send an email."""
|
||||||
if not email_cc:
|
if not email_cc:
|
||||||
email_cc=[]
|
email_cc=[]
|
||||||
if not email_bcc:
|
if not email_bcc:
|
||||||
email_bcc=[]
|
email_bcc=[]
|
||||||
if not attach:
|
if not attach:
|
||||||
attach=[]
|
attach=[]
|
||||||
import smtplib
|
import smtplib
|
||||||
from email.MIMEText import MIMEText
|
from email.MIMEText import MIMEText
|
||||||
from email.MIMEBase import MIMEBase
|
from email.MIMEBase import MIMEBase
|
||||||
from email.MIMEMultipart import MIMEMultipart
|
from email.MIMEMultipart import MIMEMultipart
|
||||||
from email.Header import Header
|
from email.Header import Header
|
||||||
from email.Utils import formatdate, COMMASPACE
|
from email.Utils import formatdate, COMMASPACE
|
||||||
from email import Encoders
|
from email import Encoders
|
||||||
|
|
||||||
msg = MIMEMultipart()
|
msg = MIMEMultipart()
|
||||||
|
|
||||||
msg['Subject'] = Header(subject.decode('utf8'), 'utf-8')
|
msg['Subject'] = Header(subject.decode('utf8'), 'utf-8')
|
||||||
msg['From'] = email_from
|
msg['From'] = email_from
|
||||||
del msg['Reply-To']
|
del msg['Reply-To']
|
||||||
if reply_to:
|
if reply_to:
|
||||||
msg['Reply-To'] = reply_to
|
msg['Reply-To'] = reply_to
|
||||||
msg['To'] = COMMASPACE.join(email_to)
|
msg['To'] = COMMASPACE.join(email_to)
|
||||||
if email_cc:
|
if email_cc:
|
||||||
msg['Cc'] = COMMASPACE.join(email_cc)
|
msg['Cc'] = COMMASPACE.join(email_cc)
|
||||||
if email_bcc:
|
if email_bcc:
|
||||||
msg['Bcc'] = COMMASPACE.join(email_bcc)
|
msg['Bcc'] = COMMASPACE.join(email_bcc)
|
||||||
if tinycrm:
|
if tinycrm:
|
||||||
msg['Message-Id'] = '<'+str(time.time())+'-tinycrm-'+str(tinycrm)+'@'+socket.gethostname()+'>'
|
msg['Message-Id'] = '<'+str(time.time())+'-tinycrm-'+str(tinycrm)+'@'+socket.gethostname()+'>'
|
||||||
msg['Date'] = formatdate(localtime=True)
|
msg['Date'] = formatdate(localtime=True)
|
||||||
msg.attach( MIMEText(body or '', _charset='utf-8') )
|
msg.attach( MIMEText(body or '', _charset='utf-8') )
|
||||||
for (fname,fcontent) in attach:
|
for (fname,fcontent) in attach:
|
||||||
part = MIMEBase('application', "octet-stream")
|
part = MIMEBase('application', "octet-stream")
|
||||||
part.set_payload( fcontent )
|
part.set_payload( fcontent )
|
||||||
Encoders.encode_base64(part)
|
Encoders.encode_base64(part)
|
||||||
part.add_header('Content-Disposition', 'attachment; filename="%s"' % (fname,))
|
part.add_header('Content-Disposition', 'attachment; filename="%s"' % (fname,))
|
||||||
msg.attach(part)
|
msg.attach(part)
|
||||||
try:
|
try:
|
||||||
s = smtplib.SMTP()
|
s = smtplib.SMTP()
|
||||||
s.connect(config['smtp_server'])
|
s.connect(config['smtp_server'])
|
||||||
if config['smtp_user'] or config['smtp_password']:
|
if config['smtp_user'] or config['smtp_password']:
|
||||||
s.login(config['smtp_user'], config['smtp_password'])
|
s.login(config['smtp_user'], config['smtp_password'])
|
||||||
s.sendmail(email_from, flatten([email_to, email_cc, email_bcc]), msg.as_string())
|
s.sendmail(email_from, flatten([email_to, email_cc, email_bcc]), msg.as_string())
|
||||||
s.quit()
|
s.quit()
|
||||||
except Exception, e:
|
except Exception, e:
|
||||||
import logging
|
import logging
|
||||||
logging.getLogger().error(str(e))
|
logging.getLogger().error(str(e))
|
||||||
return True
|
return True
|
||||||
|
|
||||||
#----------------------------------------------------------
|
#----------------------------------------------------------
|
||||||
# SMS
|
# SMS
|
||||||
#----------------------------------------------------------
|
#----------------------------------------------------------
|
||||||
# text must be latin-1 encoded
|
# text must be latin-1 encoded
|
||||||
def sms_send(user, password, api_id, text, to):
|
def sms_send(user, password, api_id, text, to):
|
||||||
import urllib
|
import urllib
|
||||||
params = urllib.urlencode({'user': user, 'password': password, 'api_id': api_id, 'text': text, 'to':to})
|
params = urllib.urlencode({'user': user, 'password': password, 'api_id': api_id, 'text': text, 'to':to})
|
||||||
#f = urllib.urlopen("http://api.clickatell.com/http/sendmsg", params)
|
#f = urllib.urlopen("http://api.clickatell.com/http/sendmsg", params)
|
||||||
f = urllib.urlopen("http://196.7.150.220/http/sendmsg", params)
|
f = urllib.urlopen("http://196.7.150.220/http/sendmsg", params)
|
||||||
print f.read()
|
print f.read()
|
||||||
return True
|
return True
|
||||||
|
|
||||||
#---------------------------------------------------------
|
#---------------------------------------------------------
|
||||||
# Class that stores an updateable string (used in wizards)
|
# Class that stores an updateable string (used in wizards)
|
||||||
#---------------------------------------------------------
|
#---------------------------------------------------------
|
||||||
class UpdateableStr(local):
|
class UpdateableStr(local):
|
||||||
|
|
||||||
def __init__(self, string=''):
|
def __init__(self, string=''):
|
||||||
self.string = string
|
self.string = string
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return str(self.string)
|
return str(self.string)
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return str(self.string)
|
return str(self.string)
|
||||||
|
|
||||||
def __nonzero__(self):
|
def __nonzero__(self):
|
||||||
return bool(self.string)
|
return bool(self.string)
|
||||||
|
|
||||||
|
|
||||||
class UpdateableDict(local):
|
class UpdateableDict(local):
|
||||||
'''Stores an updateable dict to use in wizards'''
|
'''Stores an updateable dict to use in wizards'''
|
||||||
|
|
||||||
def __init__(self, dict=None):
|
def __init__(self, dict=None):
|
||||||
if dict is None:
|
if dict is None:
|
||||||
dict = {}
|
dict = {}
|
||||||
self.dict = dict
|
self.dict = dict
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return str(self.dict)
|
return str(self.dict)
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return str(self.dict)
|
return str(self.dict)
|
||||||
|
|
||||||
def clear(self):
|
def clear(self):
|
||||||
return self.dict.clear()
|
return self.dict.clear()
|
||||||
|
|
||||||
def keys(self):
|
def keys(self):
|
||||||
return self.dict.keys()
|
return self.dict.keys()
|
||||||
|
|
||||||
def __setitem__(self, i, y):
|
def __setitem__(self, i, y):
|
||||||
self.dict.__setitem__(i, y)
|
self.dict.__setitem__(i, y)
|
||||||
|
|
||||||
def __getitem__(self, i):
|
def __getitem__(self, i):
|
||||||
return self.dict.__getitem__(i)
|
return self.dict.__getitem__(i)
|
||||||
|
|
||||||
def copy(self):
|
def copy(self):
|
||||||
return self.dict.copy()
|
return self.dict.copy()
|
||||||
|
|
||||||
def iteritems(self):
|
def iteritems(self):
|
||||||
return self.dict.iteritems()
|
return self.dict.iteritems()
|
||||||
|
|
||||||
def iterkeys(self):
|
def iterkeys(self):
|
||||||
return self.dict.iterkeys()
|
return self.dict.iterkeys()
|
||||||
|
|
||||||
def itervalues(self):
|
def itervalues(self):
|
||||||
return self.dict.itervalues()
|
return self.dict.itervalues()
|
||||||
|
|
||||||
def pop(self, k, d=None):
|
def pop(self, k, d=None):
|
||||||
return self.dict.pop(k, d)
|
return self.dict.pop(k, d)
|
||||||
|
|
||||||
def popitem(self):
|
def popitem(self):
|
||||||
return self.dict.popitem()
|
return self.dict.popitem()
|
||||||
|
|
||||||
def setdefault(self, k, d=None):
|
def setdefault(self, k, d=None):
|
||||||
return self.dict.setdefault(k, d)
|
return self.dict.setdefault(k, d)
|
||||||
|
|
||||||
def update(self, E, **F):
|
def update(self, E, **F):
|
||||||
return self.dict.update(E, F)
|
return self.dict.update(E, F)
|
||||||
|
|
||||||
def values(self):
|
def values(self):
|
||||||
return self.dict.values()
|
return self.dict.values()
|
||||||
|
|
||||||
def get(self, k, d=None):
|
def get(self, k, d=None):
|
||||||
return self.dict.get(k, d)
|
return self.dict.get(k, d)
|
||||||
|
|
||||||
def has_key(self, k):
|
def has_key(self, k):
|
||||||
return self.dict.has_key(k)
|
return self.dict.has_key(k)
|
||||||
|
|
||||||
def items(self):
|
def items(self):
|
||||||
return self.dict.items()
|
return self.dict.items()
|
||||||
|
|
||||||
def __cmp__(self, y):
|
def __cmp__(self, y):
|
||||||
return self.dict.__cmp__(y)
|
return self.dict.__cmp__(y)
|
||||||
|
|
||||||
def __contains__(self, k):
|
def __contains__(self, k):
|
||||||
return self.dict.__contains__(k)
|
return self.dict.__contains__(k)
|
||||||
|
|
||||||
def __delitem__(self, y):
|
def __delitem__(self, y):
|
||||||
return self.dict.__delitem__(y)
|
return self.dict.__delitem__(y)
|
||||||
|
|
||||||
def __eq__(self, y):
|
def __eq__(self, y):
|
||||||
return self.dict.__eq__(y)
|
return self.dict.__eq__(y)
|
||||||
|
|
||||||
def __ge__(self, y):
|
def __ge__(self, y):
|
||||||
return self.dict.__ge__(y)
|
return self.dict.__ge__(y)
|
||||||
|
|
||||||
def __getitem__(self, y):
|
def __getitem__(self, y):
|
||||||
return self.dict.__getitem__(y)
|
return self.dict.__getitem__(y)
|
||||||
|
|
||||||
def __gt__(self, y):
|
def __gt__(self, y):
|
||||||
return self.dict.__gt__(y)
|
return self.dict.__gt__(y)
|
||||||
|
|
||||||
def __hash__(self):
|
def __hash__(self):
|
||||||
return self.dict.__hash__()
|
return self.dict.__hash__()
|
||||||
|
|
||||||
def __iter__(self):
|
def __iter__(self):
|
||||||
return self.dict.__iter__()
|
return self.dict.__iter__()
|
||||||
|
|
||||||
def __le__(self, y):
|
def __le__(self, y):
|
||||||
return self.dict.__le__(y)
|
return self.dict.__le__(y)
|
||||||
|
|
||||||
def __len__(self):
|
def __len__(self):
|
||||||
return self.dict.__len__()
|
return self.dict.__len__()
|
||||||
|
|
||||||
def __lt__(self, y):
|
def __lt__(self, y):
|
||||||
return self.dict.__lt__(y)
|
return self.dict.__lt__(y)
|
||||||
|
|
||||||
def __ne__(self, y):
|
def __ne__(self, y):
|
||||||
return self.dict.__ne__(y)
|
return self.dict.__ne__(y)
|
||||||
|
|
||||||
|
|
||||||
# Don't use ! Use res.currency.round()
|
# Don't use ! Use res.currency.round()
|
||||||
class currency(float):
|
class currency(float):
|
||||||
|
|
||||||
def __init__(self, value, accuracy=2, rounding=None):
|
def __init__(self, value, accuracy=2, rounding=None):
|
||||||
if rounding is None:
|
if rounding is None:
|
||||||
rounding=10**-accuracy
|
rounding=10**-accuracy
|
||||||
self.rounding=rounding
|
self.rounding=rounding
|
||||||
self.accuracy=accuracy
|
self.accuracy=accuracy
|
||||||
|
|
||||||
def __new__(cls, value, accuracy=2, rounding=None):
|
def __new__(cls, value, accuracy=2, rounding=None):
|
||||||
return float.__new__(cls, round(value, accuracy))
|
return float.__new__(cls, round(value, accuracy))
|
||||||
|
|
||||||
#def __str__(self):
|
#def __str__(self):
|
||||||
# display_value = int(self*(10**(-self.accuracy))/self.rounding)*self.rounding/(10**(-self.accuracy))
|
# display_value = int(self*(10**(-self.accuracy))/self.rounding)*self.rounding/(10**(-self.accuracy))
|
||||||
# return str(display_value)
|
# return str(display_value)
|
||||||
|
|
||||||
|
|
||||||
#
|
#
|
||||||
|
@ -539,99 +539,99 @@ class currency(float):
|
||||||
# Timeout: 0 = no timeout, otherwise in seconds
|
# Timeout: 0 = no timeout, otherwise in seconds
|
||||||
#
|
#
|
||||||
class cache(object):
|
class cache(object):
|
||||||
def __init__(self, timeout=10000, skiparg=2):
|
def __init__(self, timeout=10000, skiparg=2):
|
||||||
self.timeout = timeout
|
self.timeout = timeout
|
||||||
self.cache = {}
|
self.cache = {}
|
||||||
|
|
||||||
def __call__(self, fn):
|
def __call__(self, fn):
|
||||||
arg_names = inspect.getargspec(fn)[0][2:]
|
arg_names = inspect.getargspec(fn)[0][2:]
|
||||||
def cached_result(self2, cr=None, *args, **kwargs):
|
def cached_result(self2, cr=None, *args, **kwargs):
|
||||||
if cr is None:
|
if cr is None:
|
||||||
self.cache = {}
|
self.cache = {}
|
||||||
return True
|
return True
|
||||||
|
|
||||||
# Update named arguments with positional argument values
|
# Update named arguments with positional argument values
|
||||||
kwargs.update(dict(zip(arg_names, args)))
|
kwargs.update(dict(zip(arg_names, args)))
|
||||||
kwargs = kwargs.items()
|
kwargs = kwargs.items()
|
||||||
kwargs.sort()
|
kwargs.sort()
|
||||||
|
|
||||||
# Work out key as a tuple of ('argname', value) pairs
|
# Work out key as a tuple of ('argname', value) pairs
|
||||||
key = (('dbname', cr.dbname),) + tuple(kwargs)
|
key = (('dbname', cr.dbname),) + tuple(kwargs)
|
||||||
|
|
||||||
# Check cache and return cached value if possible
|
# Check cache and return cached value if possible
|
||||||
if key in self.cache:
|
if key in self.cache:
|
||||||
(value, last_time) = self.cache[key]
|
(value, last_time) = self.cache[key]
|
||||||
mintime = time.time() - self.timeout
|
mintime = time.time() - self.timeout
|
||||||
if self.timeout <= 0 or mintime <= last_time:
|
if self.timeout <= 0 or mintime <= last_time:
|
||||||
return value
|
return value
|
||||||
|
|
||||||
# Work out new value, cache it and return it
|
# Work out new value, cache it and return it
|
||||||
# Should copy() this value to avoid futur modf of the cacle ?
|
# Should copy() this value to avoid futur modf of the cacle ?
|
||||||
result = fn(self2,cr,**dict(kwargs))
|
result = fn(self2,cr,**dict(kwargs))
|
||||||
|
|
||||||
self.cache[key] = (result, time.time())
|
self.cache[key] = (result, time.time())
|
||||||
return result
|
return result
|
||||||
return cached_result
|
return cached_result
|
||||||
|
|
||||||
def to_xml(s):
|
def to_xml(s):
|
||||||
return s.replace('&','&').replace('<','<').replace('>','>')
|
return s.replace('&','&').replace('<','<').replace('>','>')
|
||||||
|
|
||||||
def get_languages():
|
def get_languages():
|
||||||
languages={
|
languages={
|
||||||
'zh_CN': 'Chinese (CN)',
|
'zh_CN': 'Chinese (CN)',
|
||||||
'zh_TW': 'Chinese (TW)',
|
'zh_TW': 'Chinese (TW)',
|
||||||
'cs_CZ': 'Czech',
|
'cs_CZ': 'Czech',
|
||||||
'de_DE': 'Deutsch',
|
'de_DE': 'Deutsch',
|
||||||
'es_AR': 'Español (Argentina)',
|
'es_AR': 'Español (Argentina)',
|
||||||
'es_ES': 'Español (España)',
|
'es_ES': 'Español (España)',
|
||||||
'fr_FR': 'Français',
|
'fr_FR': 'Français',
|
||||||
'fr_CH': 'Français (Suisse)',
|
'fr_CH': 'Français (Suisse)',
|
||||||
'en_EN': 'English (default)',
|
'en_EN': 'English (default)',
|
||||||
'hu_HU': 'Hungarian',
|
'hu_HU': 'Hungarian',
|
||||||
'it_IT': 'Italiano',
|
'it_IT': 'Italiano',
|
||||||
'pt_BR': 'Portugese (Brasil)',
|
'pt_BR': 'Portugese (Brasil)',
|
||||||
'pt_PT': 'Portugese (Portugal)',
|
'pt_PT': 'Portugese (Portugal)',
|
||||||
'nl_NL': 'Nederlands',
|
'nl_NL': 'Nederlands',
|
||||||
'ro_RO': 'Romanian',
|
'ro_RO': 'Romanian',
|
||||||
'ru_RU': 'Russian',
|
'ru_RU': 'Russian',
|
||||||
'sv_SE': 'Swedish',
|
'sv_SE': 'Swedish',
|
||||||
}
|
}
|
||||||
return languages
|
return languages
|
||||||
|
|
||||||
def scan_languages():
|
def scan_languages():
|
||||||
import glob
|
import glob
|
||||||
file_list = [os.path.splitext(os.path.basename(f))[0] for f in glob.glob(os.path.join(config['root_path'], 'i18n', '*.csv'))]
|
file_list = [os.path.splitext(os.path.basename(f))[0] for f in glob.glob(os.path.join(config['root_path'], 'i18n', '*.csv'))]
|
||||||
lang_dict = get_languages()
|
lang_dict = get_languages()
|
||||||
return [(lang, lang_dict.get(lang, lang)) for lang in file_list]
|
return [(lang, lang_dict.get(lang, lang)) for lang in file_list]
|
||||||
|
|
||||||
|
|
||||||
def get_user_companies(cr, user):
|
def get_user_companies(cr, user):
|
||||||
def _get_company_children(cr, ids):
|
def _get_company_children(cr, ids):
|
||||||
if not ids:
|
if not ids:
|
||||||
return []
|
return []
|
||||||
cr.execute('SELECT id FROM res_company WHERE parent_id = any(array[%s])' %(','.join([str(x) for x in ids]),))
|
cr.execute('SELECT id FROM res_company WHERE parent_id = any(array[%s])' %(','.join([str(x) for x in ids]),))
|
||||||
res=[x[0] for x in cr.fetchall()]
|
res=[x[0] for x in cr.fetchall()]
|
||||||
res.extend(_get_company_children(cr, res))
|
res.extend(_get_company_children(cr, res))
|
||||||
return res
|
return res
|
||||||
cr.execute('SELECT comp.id FROM res_company AS comp, res_users AS u WHERE u.id = %d AND comp.id = u.company_id' % (user,))
|
cr.execute('SELECT comp.id FROM res_company AS comp, res_users AS u WHERE u.id = %d AND comp.id = u.company_id' % (user,))
|
||||||
compids=[cr.fetchone()[0]]
|
compids=[cr.fetchone()[0]]
|
||||||
compids.extend(_get_company_children(cr, compids))
|
compids.extend(_get_company_children(cr, compids))
|
||||||
return compids
|
return compids
|
||||||
|
|
||||||
def mod10r(number):
|
def mod10r(number):
|
||||||
"""
|
"""
|
||||||
Input number : account or invoice number
|
Input number : account or invoice number
|
||||||
Output return: the same number completed with the recursive mod10
|
Output return: the same number completed with the recursive mod10
|
||||||
key
|
key
|
||||||
"""
|
"""
|
||||||
codec=[0,9,4,6,8,2,7,1,3,5]
|
codec=[0,9,4,6,8,2,7,1,3,5]
|
||||||
report = 0
|
report = 0
|
||||||
result=""
|
result=""
|
||||||
for digit in number:
|
for digit in number:
|
||||||
result += digit
|
result += digit
|
||||||
if digit.isdigit():
|
if digit.isdigit():
|
||||||
report = codec[ (int(digit) + report) % 10 ]
|
report = codec[ (int(digit) + report) % 10 ]
|
||||||
return result + str((10 - report) % 10)
|
return result + str((10 - report) % 10)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -38,7 +38,7 @@ import inspect
|
||||||
import mx.DateTime as mxdt
|
import mx.DateTime as mxdt
|
||||||
|
|
||||||
class UNIX_LINE_TERMINATOR(csv.excel):
|
class UNIX_LINE_TERMINATOR(csv.excel):
|
||||||
lineterminator = '\n'
|
lineterminator = '\n'
|
||||||
|
|
||||||
csv.register_dialect("UNIX", UNIX_LINE_TERMINATOR)
|
csv.register_dialect("UNIX", UNIX_LINE_TERMINATOR)
|
||||||
|
|
||||||
|
@ -46,15 +46,15 @@ csv.register_dialect("UNIX", UNIX_LINE_TERMINATOR)
|
||||||
# TODO: a caching method
|
# TODO: a caching method
|
||||||
#
|
#
|
||||||
def translate(cr, name, source_type, lang, source=None):
|
def translate(cr, name, source_type, lang, source=None):
|
||||||
if source and name:
|
if source and name:
|
||||||
cr.execute('select value from ir_translation where lang=%s and type=%s and name=%s and src=%s', (lang, source_type, str(name), source))
|
cr.execute('select value from ir_translation where lang=%s and type=%s and name=%s and src=%s', (lang, source_type, str(name), source))
|
||||||
elif name:
|
elif name:
|
||||||
cr.execute('select value from ir_translation where lang=%s and type=%s and name=%s', (lang, source_type, str(name)))
|
cr.execute('select value from ir_translation where lang=%s and type=%s and name=%s', (lang, source_type, str(name)))
|
||||||
elif source:
|
elif source:
|
||||||
cr.execute('select value from ir_translation where lang=%s and type=%s and src=%s', (lang, source_type, source))
|
cr.execute('select value from ir_translation where lang=%s and type=%s and src=%s', (lang, source_type, source))
|
||||||
res_trans = cr.fetchone()
|
res_trans = cr.fetchone()
|
||||||
res = res_trans and res_trans[0] or False
|
res = res_trans and res_trans[0] or False
|
||||||
return res
|
return res
|
||||||
|
|
||||||
class GettextAlias(object):
|
class GettextAlias(object):
|
||||||
def __call__(self, source):
|
def __call__(self, source):
|
||||||
|
@ -71,469 +71,469 @@ _ = GettextAlias()
|
||||||
|
|
||||||
# class to handle po files
|
# class to handle po files
|
||||||
class TinyPoFile(object):
|
class TinyPoFile(object):
|
||||||
def __init__(self, buffer):
|
def __init__(self, buffer):
|
||||||
self.buffer = buffer
|
self.buffer = buffer
|
||||||
|
|
||||||
def __iter__(self):
|
def __iter__(self):
|
||||||
self.buffer.seek(0)
|
self.buffer.seek(0)
|
||||||
self.lines = self.buffer.readlines()
|
self.lines = self.buffer.readlines()
|
||||||
self.first = True
|
self.first = True
|
||||||
return self
|
return self
|
||||||
|
|
||||||
def next(self):
|
def next(self):
|
||||||
def unquote(str):
|
def unquote(str):
|
||||||
return str[1:-1].replace("\\n", "\n") \
|
return str[1:-1].replace("\\n", "\n") \
|
||||||
.replace('\\"', "\"")
|
.replace('\\"', "\"")
|
||||||
|
|
||||||
type = name = res_id = source = trad = None
|
type = name = res_id = source = trad = None
|
||||||
|
|
||||||
line = None
|
line = None
|
||||||
while not line:
|
while not line:
|
||||||
if 0 == len(self.lines):
|
if 0 == len(self.lines):
|
||||||
raise StopIteration()
|
raise StopIteration()
|
||||||
line = self.lines.pop(0).strip()
|
line = self.lines.pop(0).strip()
|
||||||
|
|
||||||
while line.startswith('#'):
|
while line.startswith('#'):
|
||||||
if line.startswith('#:'):
|
if line.startswith('#:'):
|
||||||
type, name, res_id = line[2:].strip().split(':')
|
type, name, res_id = line[2:].strip().split(':')
|
||||||
line = self.lines.pop(0).strip()
|
line = self.lines.pop(0).strip()
|
||||||
if not line.startswith('msgid'):
|
if not line.startswith('msgid'):
|
||||||
raise Exception("malformed file")
|
raise Exception("malformed file")
|
||||||
source = line[7:-1]
|
source = line[7:-1]
|
||||||
line = self.lines.pop(0).strip()
|
line = self.lines.pop(0).strip()
|
||||||
if not source and self.first:
|
if not source and self.first:
|
||||||
# if the source is "" and it's the first msgid, it's the special
|
# if the source is "" and it's the first msgid, it's the special
|
||||||
# msgstr with the informations about the traduction and the
|
# msgstr with the informations about the traduction and the
|
||||||
# traductor; we skip it
|
# traductor; we skip it
|
||||||
while line:
|
while line:
|
||||||
line = self.lines.pop(0).strip()
|
line = self.lines.pop(0).strip()
|
||||||
return next()
|
return next()
|
||||||
|
|
||||||
while not line.startswith('msgstr'):
|
while not line.startswith('msgstr'):
|
||||||
if not line:
|
if not line:
|
||||||
raise Exception('malformed file')
|
raise Exception('malformed file')
|
||||||
source += unquote(line)
|
source += unquote(line)
|
||||||
line = self.lines.pop(0).strip()
|
line = self.lines.pop(0).strip()
|
||||||
|
|
||||||
trad = line[8:-1]
|
trad = line[8:-1]
|
||||||
line = self.lines.pop(0).strip()
|
line = self.lines.pop(0).strip()
|
||||||
while line:
|
while line:
|
||||||
trad += unquote(line)
|
trad += unquote(line)
|
||||||
line = self.lines.pop(0).strip()
|
line = self.lines.pop(0).strip()
|
||||||
|
|
||||||
self.first = False
|
self.first = False
|
||||||
return type, name, res_id, source, trad
|
return type, name, res_id, source, trad
|
||||||
|
|
||||||
def write_infos(self, modules):
|
def write_infos(self, modules):
|
||||||
import release
|
import release
|
||||||
self.buffer.write("# Translation of %(project)s.\n" \
|
self.buffer.write("# Translation of %(project)s.\n" \
|
||||||
"# This file containt the translation of the following modules:\n" \
|
"# This file containt the translation of the following modules:\n" \
|
||||||
"%(modules)s" \
|
"%(modules)s" \
|
||||||
"#\n" \
|
"#\n" \
|
||||||
"msgid \"\"\n" \
|
"msgid \"\"\n" \
|
||||||
"msgstr \"\"\n" \
|
"msgstr \"\"\n" \
|
||||||
"\"Project-Id-Version: %(project)s %(version)s\"\n" \
|
"\"Project-Id-Version: %(project)s %(version)s\"\n" \
|
||||||
"\"Report-Msgid-Bugs-To: %(bugmail)s\"\n" \
|
"\"Report-Msgid-Bugs-To: %(bugmail)s\"\n" \
|
||||||
"\"POT-Creation-Date: %(now)s\"\n" \
|
"\"POT-Creation-Date: %(now)s\"\n" \
|
||||||
"\"PO-Revision-Date: %(now)s\"\n" \
|
"\"PO-Revision-Date: %(now)s\"\n" \
|
||||||
"\"Last-Translator: <>\"\n" \
|
"\"Last-Translator: <>\"\n" \
|
||||||
"\"Language-Team: \"\n" \
|
"\"Language-Team: \"\n" \
|
||||||
"\"MIME-Version: 1.0\"\n" \
|
"\"MIME-Version: 1.0\"\n" \
|
||||||
"\"Content-Type: text/plain; charset=UTF-8\"\n" \
|
"\"Content-Type: text/plain; charset=UTF-8\"\n" \
|
||||||
"\"Content-Transfer-Encoding: \"\n" \
|
"\"Content-Transfer-Encoding: \"\n" \
|
||||||
"\"Plural-Forms: \"\n" \
|
"\"Plural-Forms: \"\n" \
|
||||||
"\n"
|
"\n"
|
||||||
|
|
||||||
% { 'project': release.description,
|
% { 'project': release.description,
|
||||||
'version': release.version,
|
'version': release.version,
|
||||||
'modules': reduce(lambda s, m: s + "#\t* %s\n" % m, modules, ""),
|
'modules': reduce(lambda s, m: s + "#\t* %s\n" % m, modules, ""),
|
||||||
'bugmail': 'support@tinyerp.com', #TODO: use variable from release
|
'bugmail': 'support@tinyerp.com', #TODO: use variable from release
|
||||||
'now': mxdt.ISO.strUTC(mxdt.ISO.DateTime.utc()),
|
'now': mxdt.ISO.strUTC(mxdt.ISO.DateTime.utc()),
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
def write(self, module, type, name, res_id, source, trad):
|
def write(self, module, type, name, res_id, source, trad):
|
||||||
def quote(str):
|
def quote(str):
|
||||||
return '"%s"' % str.replace('"','\\"') \
|
return '"%s"' % str.replace('"','\\"') \
|
||||||
.replace('\n', '\\n"\n"')
|
.replace('\n', '\\n"\n"')
|
||||||
|
|
||||||
self.buffer.write("#. module: %s\n" \
|
self.buffer.write("#. module: %s\n" \
|
||||||
"#, python-format\n" \
|
"#, python-format\n" \
|
||||||
"#: %s:%s:%s\n" \
|
"#: %s:%s:%s\n" \
|
||||||
"msgid %s\n" \
|
"msgid %s\n" \
|
||||||
"msgstr %s\n\n" \
|
"msgstr %s\n\n" \
|
||||||
% (module, type, name, str(res_id), quote(source), quote(trad))
|
% (module, type, name, str(res_id), quote(source), quote(trad))
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# Methods to export the translation file
|
# Methods to export the translation file
|
||||||
|
|
||||||
def trans_export(lang, modules, buffer, format, dbname=None):
|
def trans_export(lang, modules, buffer, format, dbname=None):
|
||||||
trans = trans_generate(lang, modules, dbname)
|
trans = trans_generate(lang, modules, dbname)
|
||||||
if format == 'csv':
|
if format == 'csv':
|
||||||
writer=csv.writer(buffer, 'UNIX')
|
writer=csv.writer(buffer, 'UNIX')
|
||||||
for row in trans:
|
for row in trans:
|
||||||
writer.writerow(row)
|
writer.writerow(row)
|
||||||
elif format == 'po':
|
elif format == 'po':
|
||||||
trans.pop(0)
|
trans.pop(0)
|
||||||
writer = tools.TinyPoFile(buffer)
|
writer = tools.TinyPoFile(buffer)
|
||||||
writer.write_infos(modules)
|
writer.write_infos(modules)
|
||||||
for module, type, name, res_id, src, trad in trans:
|
for module, type, name, res_id, src, trad in trans:
|
||||||
writer.write(module, type, name, res_id, src, trad)
|
writer.write(module, type, name, res_id, src, trad)
|
||||||
else:
|
else:
|
||||||
raise Exception(_('Bad file format'))
|
raise Exception(_('Bad file format'))
|
||||||
del trans
|
del trans
|
||||||
|
|
||||||
|
|
||||||
def trans_parse_xsl(de):
|
def trans_parse_xsl(de):
|
||||||
res = []
|
res = []
|
||||||
for n in [i for i in de.childNodes if (i.nodeType == i.ELEMENT_NODE)]:
|
for n in [i for i in de.childNodes if (i.nodeType == i.ELEMENT_NODE)]:
|
||||||
if n.hasAttribute("t"):
|
if n.hasAttribute("t"):
|
||||||
for m in [j for j in n.childNodes if (j.nodeType == j.TEXT_NODE)]:
|
for m in [j for j in n.childNodes if (j.nodeType == j.TEXT_NODE)]:
|
||||||
l = m.data.strip().replace('\n',' ')
|
l = m.data.strip().replace('\n',' ')
|
||||||
if len(l):
|
if len(l):
|
||||||
res.append(l.encode("utf8"))
|
res.append(l.encode("utf8"))
|
||||||
res.extend(trans_parse_xsl(n))
|
res.extend(trans_parse_xsl(n))
|
||||||
return res
|
return res
|
||||||
|
|
||||||
def trans_parse_rml(de):
|
def trans_parse_rml(de):
|
||||||
res = []
|
res = []
|
||||||
for n in [i for i in de.childNodes if (i.nodeType == i.ELEMENT_NODE)]:
|
for n in [i for i in de.childNodes if (i.nodeType == i.ELEMENT_NODE)]:
|
||||||
for m in [j for j in n.childNodes if (j.nodeType == j.TEXT_NODE)]:
|
for m in [j for j in n.childNodes if (j.nodeType == j.TEXT_NODE)]:
|
||||||
string_list = [s.replace('\n', ' ').strip() for s in re.split('\[\[.+?\]\]', m.data)]
|
string_list = [s.replace('\n', ' ').strip() for s in re.split('\[\[.+?\]\]', m.data)]
|
||||||
for s in string_list:
|
for s in string_list:
|
||||||
if s:
|
if s:
|
||||||
res.append(s.encode("utf8"))
|
res.append(s.encode("utf8"))
|
||||||
res.extend(trans_parse_rml(n))
|
res.extend(trans_parse_rml(n))
|
||||||
return res
|
return res
|
||||||
|
|
||||||
def trans_parse_view(de):
|
def trans_parse_view(de):
|
||||||
res = []
|
res = []
|
||||||
if de.hasAttribute("string"):
|
if de.hasAttribute("string"):
|
||||||
s = de.getAttribute('string')
|
s = de.getAttribute('string')
|
||||||
if s:
|
if s:
|
||||||
res.append(s.encode("utf8"))
|
res.append(s.encode("utf8"))
|
||||||
if de.hasAttribute("sum"):
|
if de.hasAttribute("sum"):
|
||||||
s = de.getAttribute('sum')
|
s = de.getAttribute('sum')
|
||||||
if s:
|
if s:
|
||||||
res.append(s.encode("utf8"))
|
res.append(s.encode("utf8"))
|
||||||
for n in [i for i in de.childNodes if (i.nodeType == i.ELEMENT_NODE)]:
|
for n in [i for i in de.childNodes if (i.nodeType == i.ELEMENT_NODE)]:
|
||||||
res.extend(trans_parse_view(n))
|
res.extend(trans_parse_view(n))
|
||||||
return res
|
return res
|
||||||
|
|
||||||
# tests whether an object is in a list of modules
|
# tests whether an object is in a list of modules
|
||||||
def in_modules(object_name, modules):
|
def in_modules(object_name, modules):
|
||||||
if 'all' in modules:
|
if 'all' in modules:
|
||||||
return True
|
return True
|
||||||
|
|
||||||
module_dict = {
|
module_dict = {
|
||||||
'ir': 'base',
|
'ir': 'base',
|
||||||
'res': 'base',
|
'res': 'base',
|
||||||
'workflow': 'base',
|
'workflow': 'base',
|
||||||
}
|
}
|
||||||
module = object_name.split('.')[0]
|
module = object_name.split('.')[0]
|
||||||
module = module_dict.get(module, module)
|
module = module_dict.get(module, module)
|
||||||
return module in modules
|
return module in modules
|
||||||
|
|
||||||
def trans_generate(lang, modules, dbname=None):
|
def trans_generate(lang, modules, dbname=None):
|
||||||
logger = netsvc.Logger()
|
logger = netsvc.Logger()
|
||||||
if not dbname:
|
if not dbname:
|
||||||
dbname=tools.config['db_name']
|
dbname=tools.config['db_name']
|
||||||
pool = pooler.get_pool(dbname)
|
pool = pooler.get_pool(dbname)
|
||||||
trans_obj = pool.get('ir.translation')
|
trans_obj = pool.get('ir.translation')
|
||||||
model_data_obj = pool.get('ir.model.data')
|
model_data_obj = pool.get('ir.model.data')
|
||||||
cr = pooler.get_db(dbname).cursor()
|
cr = pooler.get_db(dbname).cursor()
|
||||||
uid = 1
|
uid = 1
|
||||||
l = pool.obj_pool.items()
|
l = pool.obj_pool.items()
|
||||||
l.sort()
|
l.sort()
|
||||||
|
|
||||||
query = 'SELECT name, model, res_id, module' \
|
query = 'SELECT name, model, res_id, module' \
|
||||||
' FROM ir_model_data'
|
' FROM ir_model_data'
|
||||||
if not 'all' in modules:
|
if not 'all' in modules:
|
||||||
query += ' WHERE module IN (%s)' % ','.join(['%s']*len(modules))
|
query += ' WHERE module IN (%s)' % ','.join(['%s']*len(modules))
|
||||||
query += ' ORDER BY module, model, name'
|
query += ' ORDER BY module, model, name'
|
||||||
|
|
||||||
query_param = not 'all' in modules and modules or None
|
query_param = not 'all' in modules and modules or None
|
||||||
cr.execute(query, query_param)
|
cr.execute(query, query_param)
|
||||||
|
|
||||||
#if 'all' in modules:
|
#if 'all' in modules:
|
||||||
# cr.execute('select name,model,res_id,module from ir_model_data')
|
# cr.execute('select name,model,res_id,module from ir_model_data')
|
||||||
#else:
|
#else:
|
||||||
# cr.execute('select name,model,res_id,module from ir_model_data where module in ('+','.join(['%s']*len(modules))+')', modules)
|
# cr.execute('select name,model,res_id,module from ir_model_data where module in ('+','.join(['%s']*len(modules))+')', modules)
|
||||||
|
|
||||||
|
|
||||||
_to_translate = []
|
_to_translate = []
|
||||||
def push_translation(module, type, name, id, source):
|
def push_translation(module, type, name, id, source):
|
||||||
tuple = (module, type, name, id, source)
|
tuple = (module, type, name, id, source)
|
||||||
if not tuple in _to_translate:
|
if not tuple in _to_translate:
|
||||||
_to_translate.append(tuple)
|
_to_translate.append(tuple)
|
||||||
|
|
||||||
|
|
||||||
for (xml_name,model,res_id,module) in cr.fetchall():
|
for (xml_name,model,res_id,module) in cr.fetchall():
|
||||||
xml_name = module+'.'+xml_name
|
xml_name = module+'.'+xml_name
|
||||||
obj = pool.get(model).browse(cr, uid, res_id)
|
obj = pool.get(model).browse(cr, uid, res_id)
|
||||||
if model=='ir.ui.view':
|
if model=='ir.ui.view':
|
||||||
d = xml.dom.minidom.parseString(obj.arch)
|
d = xml.dom.minidom.parseString(obj.arch)
|
||||||
for t in trans_parse_view(d.documentElement):
|
for t in trans_parse_view(d.documentElement):
|
||||||
push_translation(module, 'view', obj.model, 0, t)
|
push_translation(module, 'view', obj.model, 0, t)
|
||||||
elif model=='ir.actions.wizard':
|
elif model=='ir.actions.wizard':
|
||||||
service_name = 'wizard.'+obj.wiz_name
|
service_name = 'wizard.'+obj.wiz_name
|
||||||
obj2 = netsvc._service[service_name]
|
obj2 = netsvc._service[service_name]
|
||||||
for state_name, state_def in obj2.states.iteritems():
|
for state_name, state_def in obj2.states.iteritems():
|
||||||
if 'result' in state_def:
|
if 'result' in state_def:
|
||||||
result = state_def['result']
|
result = state_def['result']
|
||||||
if result['type'] != 'form':
|
if result['type'] != 'form':
|
||||||
continue
|
continue
|
||||||
name = obj.wiz_name + ',' + state_name
|
name = obj.wiz_name + ',' + state_name
|
||||||
|
|
||||||
# export fields
|
# export fields
|
||||||
for field_name, field_def in result['fields'].iteritems():
|
for field_name, field_def in result['fields'].iteritems():
|
||||||
if 'string' in field_def:
|
if 'string' in field_def:
|
||||||
source = field_def['string']
|
source = field_def['string']
|
||||||
res_name = name + ',' + field_name
|
res_name = name + ',' + field_name
|
||||||
push_translation(module, 'wizard_field', res_name, 0, source)
|
push_translation(module, 'wizard_field', res_name, 0, source)
|
||||||
|
|
||||||
# export arch
|
# export arch
|
||||||
arch = result['arch']
|
arch = result['arch']
|
||||||
if not isinstance(arch, UpdateableStr):
|
if not isinstance(arch, UpdateableStr):
|
||||||
d = xml.dom.minidom.parseString(arch)
|
d = xml.dom.minidom.parseString(arch)
|
||||||
for t in trans_parse_view(d.documentElement):
|
for t in trans_parse_view(d.documentElement):
|
||||||
push_translation(module, 'wizard_view', name, 0, t)
|
push_translation(module, 'wizard_view', name, 0, t)
|
||||||
|
|
||||||
# export button labels
|
# export button labels
|
||||||
for but_args in result['state']:
|
for but_args in result['state']:
|
||||||
button_name = but_args[0]
|
button_name = but_args[0]
|
||||||
button_label = but_args[1]
|
button_label = but_args[1]
|
||||||
res_name = name + ',' + button_name
|
res_name = name + ',' + button_name
|
||||||
push_translation(module, 'wizard_button', res_name, 0, button_label)
|
push_translation(module, 'wizard_button', res_name, 0, button_label)
|
||||||
|
|
||||||
elif model=='ir.model.fields':
|
elif model=='ir.model.fields':
|
||||||
field_name = obj.name
|
field_name = obj.name
|
||||||
field_def = pool.get(obj.model)._columns[field_name]
|
field_def = pool.get(obj.model)._columns[field_name]
|
||||||
|
|
||||||
name = obj.model + "," + field_name
|
name = obj.model + "," + field_name
|
||||||
push_translation(module, 'field', name, 0, field_def.string.encode('utf8'))
|
push_translation(module, 'field', name, 0, field_def.string.encode('utf8'))
|
||||||
|
|
||||||
if field_def.help:
|
if field_def.help:
|
||||||
push_translation(module, 'help', name, 0, field_def.help.encode('utf8'))
|
push_translation(module, 'help', name, 0, field_def.help.encode('utf8'))
|
||||||
|
|
||||||
if field_def.translate:
|
if field_def.translate:
|
||||||
ids = pool.get(obj.model).search(cr, uid, [])
|
ids = pool.get(obj.model).search(cr, uid, [])
|
||||||
obj_values = pool.get(obj.model).read(cr, uid, ids, [field_name])
|
obj_values = pool.get(obj.model).read(cr, uid, ids, [field_name])
|
||||||
for obj_value in obj_values:
|
for obj_value in obj_values:
|
||||||
res_id = obj_value['id']
|
res_id = obj_value['id']
|
||||||
if obj.name in ('ir.model', 'ir.ui.menu'):
|
if obj.name in ('ir.model', 'ir.ui.menu'):
|
||||||
res_id = 0
|
res_id = 0
|
||||||
model_data_ids = model_data_obj.search(cr, uid, [
|
model_data_ids = model_data_obj.search(cr, uid, [
|
||||||
('model', '=', model),
|
('model', '=', model),
|
||||||
('res_id', '=', res_id),
|
('res_id', '=', res_id),
|
||||||
])
|
])
|
||||||
if not model_data_ids:
|
if not model_data_ids:
|
||||||
push_translation(module, 'model', name, 0, obj_value[field_name])
|
push_translation(module, 'model', name, 0, obj_value[field_name])
|
||||||
|
|
||||||
if hasattr(field_def, 'selection') and isinstance(field_def.selection, (list, tuple)):
|
if hasattr(field_def, 'selection') and isinstance(field_def.selection, (list, tuple)):
|
||||||
for key, val in field_def.selection:
|
for key, val in field_def.selection:
|
||||||
push_translation(module, 'selection', name, 0, val.encode('utf8'))
|
push_translation(module, 'selection', name, 0, val.encode('utf8'))
|
||||||
|
|
||||||
elif model=='ir.actions.report.xml':
|
elif model=='ir.actions.report.xml':
|
||||||
name = obj.report_name
|
name = obj.report_name
|
||||||
fname = ""
|
fname = ""
|
||||||
if obj.report_rml:
|
if obj.report_rml:
|
||||||
fname = obj.report_rml
|
fname = obj.report_rml
|
||||||
parse_func = trans_parse_rml
|
parse_func = trans_parse_rml
|
||||||
report_type = "rml"
|
report_type = "rml"
|
||||||
elif obj.report_xsl:
|
elif obj.report_xsl:
|
||||||
fname = obj.report_xsl
|
fname = obj.report_xsl
|
||||||
parse_func = trans_parse_xsl
|
parse_func = trans_parse_xsl
|
||||||
report_type = "xsl"
|
report_type = "xsl"
|
||||||
try:
|
try:
|
||||||
xmlstr = tools.file_open(fname).read()
|
xmlstr = tools.file_open(fname).read()
|
||||||
d = xml.dom.minidom.parseString(xmlstr)
|
d = xml.dom.minidom.parseString(xmlstr)
|
||||||
for t in parse_func(d.documentElement):
|
for t in parse_func(d.documentElement):
|
||||||
push_translation(module, report_type, name, 0, t)
|
push_translation(module, report_type, name, 0, t)
|
||||||
except IOError:
|
except IOError:
|
||||||
if fname:
|
if fname:
|
||||||
logger.notifyChannel("init", netsvc.LOG_WARNING, "couldn't export translation for report %s %s %s" % (name, report_type, fname))
|
logger.notifyChannel("init", netsvc.LOG_WARNING, "couldn't export translation for report %s %s %s" % (name, report_type, fname))
|
||||||
|
|
||||||
for constraint in pool.get(model)._constraints:
|
for constraint in pool.get(model)._constraints:
|
||||||
msg = constraint[1]
|
msg = constraint[1]
|
||||||
push_translation(module, 'constraint', model, 0, msg.encode('utf8'))
|
push_translation(module, 'constraint', model, 0, msg.encode('utf8'))
|
||||||
|
|
||||||
for field_name,field_def in pool.get(model)._columns.items():
|
for field_name,field_def in pool.get(model)._columns.items():
|
||||||
if field_def.translate:
|
if field_def.translate:
|
||||||
name = model + "," + field_name
|
name = model + "," + field_name
|
||||||
trad = getattr(obj, field_name) or ''
|
trad = getattr(obj, field_name) or ''
|
||||||
push_translation(module, 'model', name, xml_name, trad)
|
push_translation(module, 'model', name, xml_name, trad)
|
||||||
|
|
||||||
# parse source code for _() calls
|
# parse source code for _() calls
|
||||||
def get_module_from_path(path):
|
def get_module_from_path(path):
|
||||||
relative_addons_path = tools.config['addons_path'][len(tools.config['root_path'])+1:]
|
relative_addons_path = tools.config['addons_path'][len(tools.config['root_path'])+1:]
|
||||||
if path.startswith(relative_addons_path):
|
if path.startswith(relative_addons_path):
|
||||||
path = path[len(relative_addons_path)+1:]
|
path = path[len(relative_addons_path)+1:]
|
||||||
return path.split(os.path.sep)[0]
|
return path.split(os.path.sep)[0]
|
||||||
return 'base' # files that are not in a module are considered as being in 'base' module
|
return 'base' # files that are not in a module are considered as being in 'base' module
|
||||||
|
|
||||||
modobj = pool.get('ir.module.module')
|
modobj = pool.get('ir.module.module')
|
||||||
for root, dirs, files in os.walk(tools.config['root_path']):
|
for root, dirs, files in os.walk(tools.config['root_path']):
|
||||||
for fname in fnmatch.filter(files, '*.py'):
|
for fname in fnmatch.filter(files, '*.py'):
|
||||||
fabsolutepath = join(root, fname)
|
fabsolutepath = join(root, fname)
|
||||||
frelativepath = fabsolutepath[len(tools.config['root_path'])+1:]
|
frelativepath = fabsolutepath[len(tools.config['root_path'])+1:]
|
||||||
module = get_module_from_path(frelativepath)
|
module = get_module_from_path(frelativepath)
|
||||||
is_mod_installed = modobj.search(cr, uid, [('state', '=', 'installed'), ('name', '=', module)]) <> []
|
is_mod_installed = modobj.search(cr, uid, [('state', '=', 'installed'), ('name', '=', module)]) <> []
|
||||||
if (('all' in modules) or (module in modules)) and is_mod_installed:
|
if (('all' in modules) or (module in modules)) and is_mod_installed:
|
||||||
code_string = tools.file_open(fabsolutepath, subdir='').read()
|
code_string = tools.file_open(fabsolutepath, subdir='').read()
|
||||||
iter = re.finditer(
|
iter = re.finditer(
|
||||||
'[^a-zA-Z0-9_]_\([\s]*["\'](.+?)["\'][\s]*\)',
|
'[^a-zA-Z0-9_]_\([\s]*["\'](.+?)["\'][\s]*\)',
|
||||||
code_string)
|
code_string)
|
||||||
for i in iter:
|
for i in iter:
|
||||||
push_translation(module, 'code', frelativepath, 0, i.group(1).encode('utf8'))
|
push_translation(module, 'code', frelativepath, 0, i.group(1).encode('utf8'))
|
||||||
|
|
||||||
|
|
||||||
out = [["module","type","name","res_id","src","value"]] # header
|
out = [["module","type","name","res_id","src","value"]] # header
|
||||||
# translate strings marked as to be translated
|
# translate strings marked as to be translated
|
||||||
for module, type, name, id, source in _to_translate:
|
for module, type, name, id, source in _to_translate:
|
||||||
trans = trans_obj._get_source(cr, uid, name, type, lang, source)
|
trans = trans_obj._get_source(cr, uid, name, type, lang, source)
|
||||||
out.append([module, type, name, id, source, trans or ''])
|
out.append([module, type, name, id, source, trans or ''])
|
||||||
|
|
||||||
cr.close()
|
cr.close()
|
||||||
return out
|
return out
|
||||||
|
|
||||||
def trans_load(db_name, filename, lang, strict=False):
|
def trans_load(db_name, filename, lang, strict=False):
|
||||||
logger = netsvc.Logger()
|
logger = netsvc.Logger()
|
||||||
try:
|
try:
|
||||||
fileobj = open(filename,'r')
|
fileobj = open(filename,'r')
|
||||||
fileformat = os.path.splitext(filename)[-1][1:].lower()
|
fileformat = os.path.splitext(filename)[-1][1:].lower()
|
||||||
r = trans_load_data(db_name, fileobj, fileformat, lang, strict=False)
|
r = trans_load_data(db_name, fileobj, fileformat, lang, strict=False)
|
||||||
fileobj.close()
|
fileobj.close()
|
||||||
return r
|
return r
|
||||||
except IOError:
|
except IOError:
|
||||||
logger.notifyChannel("init", netsvc.LOG_ERROR, "couldn't read file")
|
logger.notifyChannel("init", netsvc.LOG_ERROR, "couldn't read file")
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def trans_load_data(db_name, fileobj, fileformat, lang, strict=False, lang_name=None):
|
def trans_load_data(db_name, fileobj, fileformat, lang, strict=False, lang_name=None):
|
||||||
logger = netsvc.Logger()
|
logger = netsvc.Logger()
|
||||||
logger.notifyChannel("init", netsvc.LOG_INFO,
|
logger.notifyChannel("init", netsvc.LOG_INFO,
|
||||||
'loading translation file for language %s' % (lang))
|
'loading translation file for language %s' % (lang))
|
||||||
pool = pooler.get_pool(db_name)
|
pool = pooler.get_pool(db_name)
|
||||||
lang_obj = pool.get('res.lang')
|
lang_obj = pool.get('res.lang')
|
||||||
trans_obj = pool.get('ir.translation')
|
trans_obj = pool.get('ir.translation')
|
||||||
model_data_obj = pool.get('ir.model.data')
|
model_data_obj = pool.get('ir.model.data')
|
||||||
try:
|
try:
|
||||||
uid = 1
|
uid = 1
|
||||||
cr = pooler.get_db(db_name).cursor()
|
cr = pooler.get_db(db_name).cursor()
|
||||||
|
|
||||||
ids = lang_obj.search(cr, uid, [('code','=',lang)])
|
ids = lang_obj.search(cr, uid, [('code','=',lang)])
|
||||||
if not ids:
|
if not ids:
|
||||||
if not lang_name:
|
if not lang_name:
|
||||||
lang_name=lang
|
lang_name=lang
|
||||||
languages=tools.get_languages()
|
languages=tools.get_languages()
|
||||||
if lang in languages:
|
if lang in languages:
|
||||||
lang_name=languages[lang]
|
lang_name=languages[lang]
|
||||||
ids = lang_obj.create(cr, uid, {
|
ids = lang_obj.create(cr, uid, {
|
||||||
'code': lang,
|
'code': lang,
|
||||||
'name': lang_name,
|
'name': lang_name,
|
||||||
'translatable': 1,
|
'translatable': 1,
|
||||||
})
|
})
|
||||||
else:
|
else:
|
||||||
lang_obj.write(cr, uid, ids, {'translatable':1})
|
lang_obj.write(cr, uid, ids, {'translatable':1})
|
||||||
lang_ids = lang_obj.search(cr, uid, [])
|
lang_ids = lang_obj.search(cr, uid, [])
|
||||||
langs = lang_obj.read(cr, uid, lang_ids)
|
langs = lang_obj.read(cr, uid, lang_ids)
|
||||||
ls = map(lambda x: (x['code'],x['name']), langs)
|
ls = map(lambda x: (x['code'],x['name']), langs)
|
||||||
|
|
||||||
fileobj.seek(0)
|
fileobj.seek(0)
|
||||||
|
|
||||||
if fileformat == 'csv':
|
if fileformat == 'csv':
|
||||||
reader = csv.reader(fileobj, quotechar='"', delimiter=',')
|
reader = csv.reader(fileobj, quotechar='"', delimiter=',')
|
||||||
# read the first line of the file (it contains columns titles)
|
# read the first line of the file (it contains columns titles)
|
||||||
for row in reader:
|
for row in reader:
|
||||||
f = row
|
f = row
|
||||||
break
|
break
|
||||||
elif fileformat == 'po':
|
elif fileformat == 'po':
|
||||||
reader = TinyPoFile(fileobj)
|
reader = TinyPoFile(fileobj)
|
||||||
f = ['type', 'name', 'res_id', 'src', 'value']
|
f = ['type', 'name', 'res_id', 'src', 'value']
|
||||||
else:
|
else:
|
||||||
raise Exception(_('Bad file format'))
|
raise Exception(_('Bad file format'))
|
||||||
|
|
||||||
# read the rest of the file
|
# read the rest of the file
|
||||||
line = 1
|
line = 1
|
||||||
for row in reader:
|
for row in reader:
|
||||||
line += 1
|
line += 1
|
||||||
#try:
|
#try:
|
||||||
# skip empty rows and rows where the translation field (=last fiefd) is empty
|
# skip empty rows and rows where the translation field (=last fiefd) is empty
|
||||||
if (not row) or (not row[-1]):
|
if (not row) or (not row[-1]):
|
||||||
#print "translate: skip %s" % repr(row)
|
#print "translate: skip %s" % repr(row)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# dictionary which holds values for this line of the csv file
|
# dictionary which holds values for this line of the csv file
|
||||||
# {'lang': ..., 'type': ..., 'name': ..., 'res_id': ...,
|
# {'lang': ..., 'type': ..., 'name': ..., 'res_id': ...,
|
||||||
# 'src': ..., 'value': ...}
|
# 'src': ..., 'value': ...}
|
||||||
dic = {'lang': lang}
|
dic = {'lang': lang}
|
||||||
for i in range(len(f)):
|
for i in range(len(f)):
|
||||||
if f[i] in ('module',):
|
if f[i] in ('module',):
|
||||||
continue
|
continue
|
||||||
dic[f[i]] = row[i]
|
dic[f[i]] = row[i]
|
||||||
|
|
||||||
try:
|
try:
|
||||||
dic['res_id'] = int(dic['res_id'])
|
dic['res_id'] = int(dic['res_id'])
|
||||||
except:
|
except:
|
||||||
model_data_ids = model_data_obj.search(cr, uid, [
|
model_data_ids = model_data_obj.search(cr, uid, [
|
||||||
('model', '=', dic['name'].split(',')[0]),
|
('model', '=', dic['name'].split(',')[0]),
|
||||||
('module', '=', dic['res_id'].split('.', 1)[0]),
|
('module', '=', dic['res_id'].split('.', 1)[0]),
|
||||||
('name', '=', dic['res_id'].split('.', 1)[1]),
|
('name', '=', dic['res_id'].split('.', 1)[1]),
|
||||||
])
|
])
|
||||||
if model_data_ids:
|
if model_data_ids:
|
||||||
dic['res_id'] = model_data_obj.browse(cr, uid,
|
dic['res_id'] = model_data_obj.browse(cr, uid,
|
||||||
model_data_ids[0]).res_id
|
model_data_ids[0]).res_id
|
||||||
else:
|
else:
|
||||||
dic['res_id'] = False
|
dic['res_id'] = False
|
||||||
|
|
||||||
if dic['type'] == 'model' and not strict:
|
if dic['type'] == 'model' and not strict:
|
||||||
(model, field) = dic['name'].split(',')
|
(model, field) = dic['name'].split(',')
|
||||||
|
|
||||||
# get the ids of the resources of this model which share
|
# get the ids of the resources of this model which share
|
||||||
# the same source
|
# the same source
|
||||||
obj = pool.get(model)
|
obj = pool.get(model)
|
||||||
if obj:
|
if obj:
|
||||||
ids = obj.search(cr, uid, [(field, '=', dic['src'])])
|
ids = obj.search(cr, uid, [(field, '=', dic['src'])])
|
||||||
|
|
||||||
# if the resource id (res_id) is in that list, use it,
|
# if the resource id (res_id) is in that list, use it,
|
||||||
# otherwise use the whole list
|
# otherwise use the whole list
|
||||||
ids = (dic['res_id'] in ids) and [dic['res_id']] or ids
|
ids = (dic['res_id'] in ids) and [dic['res_id']] or ids
|
||||||
for id in ids:
|
for id in ids:
|
||||||
dic['res_id'] = id
|
dic['res_id'] = id
|
||||||
ids = trans_obj.search(cr, uid, [
|
ids = trans_obj.search(cr, uid, [
|
||||||
('lang', '=', lang),
|
('lang', '=', lang),
|
||||||
('type', '=', dic['type']),
|
('type', '=', dic['type']),
|
||||||
('name', '=', dic['name']),
|
('name', '=', dic['name']),
|
||||||
('src', '=', dic['src']),
|
('src', '=', dic['src']),
|
||||||
('res_id', '=', dic['res_id'])
|
('res_id', '=', dic['res_id'])
|
||||||
])
|
])
|
||||||
if ids:
|
if ids:
|
||||||
trans_obj.write(cr, uid, ids, {'value': dic['value']})
|
trans_obj.write(cr, uid, ids, {'value': dic['value']})
|
||||||
else:
|
else:
|
||||||
trans_obj.create(cr, uid, dic)
|
trans_obj.create(cr, uid, dic)
|
||||||
else:
|
else:
|
||||||
ids = trans_obj.search(cr, uid, [
|
ids = trans_obj.search(cr, uid, [
|
||||||
('lang', '=', lang),
|
('lang', '=', lang),
|
||||||
('type', '=', dic['type']),
|
('type', '=', dic['type']),
|
||||||
('name', '=', dic['name']),
|
('name', '=', dic['name']),
|
||||||
('src', '=', dic['src'])
|
('src', '=', dic['src'])
|
||||||
])
|
])
|
||||||
if ids:
|
if ids:
|
||||||
trans_obj.write(cr, uid, ids, {'value': dic['value']})
|
trans_obj.write(cr, uid, ids, {'value': dic['value']})
|
||||||
else:
|
else:
|
||||||
trans_obj.create(cr, uid, dic)
|
trans_obj.create(cr, uid, dic)
|
||||||
cr.commit()
|
cr.commit()
|
||||||
#except Exception, e:
|
#except Exception, e:
|
||||||
# logger.notifyChannel('init', netsvc.LOG_ERROR,
|
# logger.notifyChannel('init', netsvc.LOG_ERROR,
|
||||||
# 'Import error: %s on line %d: %s!' % (str(e), line, row))
|
# 'Import error: %s on line %d: %s!' % (str(e), line, row))
|
||||||
# cr.rollback()
|
# cr.rollback()
|
||||||
# cr.close()
|
# cr.close()
|
||||||
# cr = pooler.get_db(db_name).cursor()
|
# cr = pooler.get_db(db_name).cursor()
|
||||||
cr.close()
|
cr.close()
|
||||||
logger.notifyChannel("init", netsvc.LOG_INFO,
|
logger.notifyChannel("init", netsvc.LOG_INFO,
|
||||||
"translation file loaded succesfully")
|
"translation file loaded succesfully")
|
||||||
except IOError:
|
except IOError:
|
||||||
logger.notifyChannel("init", netsvc.LOG_ERROR, "couldn't read file")
|
logger.notifyChannel("init", netsvc.LOG_ERROR, "couldn't read file")
|
||||||
|
|
||||||
|
|
|
@ -36,60 +36,60 @@ import tools
|
||||||
|
|
||||||
# remove an existing version of modules if it exist
|
# remove an existing version of modules if it exist
|
||||||
def remove(name):
|
def remove(name):
|
||||||
adp = tools.config['addons_path']
|
adp = tools.config['addons_path']
|
||||||
addons = os.listdir(adp)
|
addons = os.listdir(adp)
|
||||||
if name in addons:
|
if name in addons:
|
||||||
try:
|
try:
|
||||||
shutil.rmtree(os.path.join(adp, name))
|
shutil.rmtree(os.path.join(adp, name))
|
||||||
except:
|
except:
|
||||||
print "Unable to remove module %s !" % name
|
print "Unable to remove module %s !" % name
|
||||||
|
|
||||||
def install(name, url):
|
def install(name, url):
|
||||||
tar = tarfile.open(mode="r|gz", fileobj=urllib2.urlopen(url))
|
tar = tarfile.open(mode="r|gz", fileobj=urllib2.urlopen(url))
|
||||||
for tarinfo in tar:
|
for tarinfo in tar:
|
||||||
tar.extract(tarinfo, tools.config['addons_path'])
|
tar.extract(tarinfo, tools.config['addons_path'])
|
||||||
|
|
||||||
def upgrade():
|
def upgrade():
|
||||||
import pooler
|
import pooler
|
||||||
cr = pooler.db.cursor()
|
cr = pooler.db.cursor()
|
||||||
|
|
||||||
toinit = []
|
toinit = []
|
||||||
toupdate = []
|
toupdate = []
|
||||||
|
|
||||||
# print 'Check for correct rights (create and unlink on addons)...'
|
# print 'Check for correct rights (create and unlink on addons)...'
|
||||||
# todo: touch addons/test.txt
|
# todo: touch addons/test.txt
|
||||||
# todo: rm addons/test.txt
|
# todo: rm addons/test.txt
|
||||||
|
|
||||||
print 'Check for modules to remove...'
|
print 'Check for modules to remove...'
|
||||||
cr.execute('select id,name,url from ir_module_module where state=%s', ('to remove',))
|
cr.execute('select id,name,url from ir_module_module where state=%s', ('to remove',))
|
||||||
for module_id,name,url in cr.fetchall():
|
for module_id,name,url in cr.fetchall():
|
||||||
print '\tremoving module %s' % name
|
print '\tremoving module %s' % name
|
||||||
remove(name)
|
remove(name)
|
||||||
cr.execute('update ir_module_module set state=%s where id=%d', ('uninstalled', module_id))
|
cr.execute('update ir_module_module set state=%s where id=%d', ('uninstalled', module_id))
|
||||||
cr.commit()
|
cr.commit()
|
||||||
|
|
||||||
print 'Check for modules to upgrade...'
|
print 'Check for modules to upgrade...'
|
||||||
cr.execute('select id,name,url from ir_module_module where state=%s', ('to upgrade',))
|
cr.execute('select id,name,url from ir_module_module where state=%s', ('to upgrade',))
|
||||||
for module_id,name,url in cr.fetchall():
|
for module_id,name,url in cr.fetchall():
|
||||||
print '\tupgrading module %s' % name
|
print '\tupgrading module %s' % name
|
||||||
remove(name)
|
remove(name)
|
||||||
install(name, url)
|
install(name, url)
|
||||||
cr.execute('update ir_module_module set state=%s where id=%d', ('installed', module_id))
|
cr.execute('update ir_module_module set state=%s where id=%d', ('installed', module_id))
|
||||||
cr.commit()
|
cr.commit()
|
||||||
toupdate.append(name)
|
toupdate.append(name)
|
||||||
|
|
||||||
print 'Check for modules to install...'
|
print 'Check for modules to install...'
|
||||||
cr.execute('select id,name,url from ir_module_module where state=%s', ('to install',))
|
cr.execute('select id,name,url from ir_module_module where state=%s', ('to install',))
|
||||||
for module_id,name,url in cr.fetchall():
|
for module_id,name,url in cr.fetchall():
|
||||||
print '\tinstalling module %s' % name
|
print '\tinstalling module %s' % name
|
||||||
install(name, url)
|
install(name, url)
|
||||||
cr.execute('update ir_module_module set state=%s where id=%d', ('installed', module_id))
|
cr.execute('update ir_module_module set state=%s where id=%d', ('installed', module_id))
|
||||||
cr.commit()
|
cr.commit()
|
||||||
toinit.append(name)
|
toinit.append(name)
|
||||||
|
|
||||||
print 'Initializing all datas...'
|
print 'Initializing all datas...'
|
||||||
|
|
||||||
cr.commit()
|
cr.commit()
|
||||||
cr.close()
|
cr.close()
|
||||||
return (toinit, toupdate)
|
return (toinit, toupdate)
|
||||||
|
|
||||||
|
|
|
@ -41,138 +41,138 @@ from netsvc import Logger, LOG_ERROR
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
class except_wizard(Exception):
|
class except_wizard(Exception):
|
||||||
def __init__(self, name, value):
|
def __init__(self, name, value):
|
||||||
self.name = name
|
self.name = name
|
||||||
self.value = value
|
self.value = value
|
||||||
self.args = (name, value)
|
self.args = (name, value)
|
||||||
|
|
||||||
class interface(netsvc.Service):
|
class interface(netsvc.Service):
|
||||||
states = {}
|
states = {}
|
||||||
|
|
||||||
def __init__(self, name):
|
def __init__(self, name):
|
||||||
assert not netsvc.service_exist('wizard.'+name), 'The wizard "%s" already exists!'%name
|
assert not netsvc.service_exist('wizard.'+name), 'The wizard "%s" already exists!'%name
|
||||||
super(interface, self).__init__('wizard.'+name)
|
super(interface, self).__init__('wizard.'+name)
|
||||||
self.exportMethod(self.execute)
|
self.exportMethod(self.execute)
|
||||||
self.wiz_name = name
|
self.wiz_name = name
|
||||||
|
|
||||||
def translate_view(self, cr, node, state, lang):
|
def translate_view(self, cr, node, state, lang):
|
||||||
if node.nodeType == node.ELEMENT_NODE:
|
if node.nodeType == node.ELEMENT_NODE:
|
||||||
if node.hasAttribute('string') and node.getAttribute('string'):
|
if node.hasAttribute('string') and node.getAttribute('string'):
|
||||||
trans = translate(cr, self.wiz_name+','+state, 'wizard_view', lang, node.getAttribute('string').encode('utf8'))
|
trans = translate(cr, self.wiz_name+','+state, 'wizard_view', lang, node.getAttribute('string').encode('utf8'))
|
||||||
if trans:
|
if trans:
|
||||||
node.setAttribute('string', trans.decode('utf8'))
|
node.setAttribute('string', trans.decode('utf8'))
|
||||||
for n in node.childNodes:
|
for n in node.childNodes:
|
||||||
self.translate_view(cr, n, state, lang)
|
self.translate_view(cr, n, state, lang)
|
||||||
|
|
||||||
def execute_cr(self, cr, uid, data, state='init', context=None):
|
def execute_cr(self, cr, uid, data, state='init', context=None):
|
||||||
if not context:
|
if not context:
|
||||||
context={}
|
context={}
|
||||||
res = {}
|
res = {}
|
||||||
try:
|
try:
|
||||||
state_def = self.states[state]
|
state_def = self.states[state]
|
||||||
result_def = state_def.get('result', {})
|
result_def = state_def.get('result', {})
|
||||||
|
|
||||||
actions_res = {}
|
actions_res = {}
|
||||||
# iterate through the list of actions defined for this state
|
# iterate through the list of actions defined for this state
|
||||||
for action in state_def.get('actions', []):
|
for action in state_def.get('actions', []):
|
||||||
# execute them
|
# execute them
|
||||||
action_res = action(self, cr, uid, data, context)
|
action_res = action(self, cr, uid, data, context)
|
||||||
assert isinstance(action_res, dict), 'The return value of wizard actions should be a dictionary'
|
assert isinstance(action_res, dict), 'The return value of wizard actions should be a dictionary'
|
||||||
actions_res.update(action_res)
|
actions_res.update(action_res)
|
||||||
|
|
||||||
res = copy.copy(result_def)
|
res = copy.copy(result_def)
|
||||||
res['datas'] = actions_res
|
res['datas'] = actions_res
|
||||||
|
|
||||||
lang = context.get('lang', False)
|
lang = context.get('lang', False)
|
||||||
if result_def['type'] == 'action':
|
if result_def['type'] == 'action':
|
||||||
res['action'] = result_def['action'](self, cr, uid, data, context)
|
res['action'] = result_def['action'](self, cr, uid, data, context)
|
||||||
elif result_def['type'] == 'form':
|
elif result_def['type'] == 'form':
|
||||||
fields = copy.copy(result_def['fields'])
|
fields = copy.copy(result_def['fields'])
|
||||||
arch = copy.copy(result_def['arch'])
|
arch = copy.copy(result_def['arch'])
|
||||||
button_list = copy.copy(result_def['state'])
|
button_list = copy.copy(result_def['state'])
|
||||||
|
|
||||||
if isinstance(fields, UpdateableDict):
|
if isinstance(fields, UpdateableDict):
|
||||||
fields = fields.dict
|
fields = fields.dict
|
||||||
if isinstance(arch, UpdateableStr):
|
if isinstance(arch, UpdateableStr):
|
||||||
arch = arch.string
|
arch = arch.string
|
||||||
|
|
||||||
# fetch user-set defaut values for the field... shouldn't we pass it the uid?
|
# fetch user-set defaut values for the field... shouldn't we pass it the uid?
|
||||||
defaults = ir.ir_get(cr, uid, 'default', False, [('wizard.'+self.wiz_name, False)])
|
defaults = ir.ir_get(cr, uid, 'default', False, [('wizard.'+self.wiz_name, False)])
|
||||||
default_values = dict([(x[1], x[2]) for x in defaults])
|
default_values = dict([(x[1], x[2]) for x in defaults])
|
||||||
for val in fields.keys():
|
for val in fields.keys():
|
||||||
if 'default' in fields[val]:
|
if 'default' in fields[val]:
|
||||||
# execute default method for this field
|
# execute default method for this field
|
||||||
if callable(fields[val]['default']):
|
if callable(fields[val]['default']):
|
||||||
fields[val]['value'] = fields[val]['default'](uid, data, state)
|
fields[val]['value'] = fields[val]['default'](uid, data, state)
|
||||||
else:
|
else:
|
||||||
fields[val]['value'] = fields[val]['default']
|
fields[val]['value'] = fields[val]['default']
|
||||||
del fields[val]['default']
|
del fields[val]['default']
|
||||||
else:
|
else:
|
||||||
# if user has set a default value for the field, use it
|
# if user has set a default value for the field, use it
|
||||||
if val in default_values:
|
if val in default_values:
|
||||||
fields[val]['value'] = default_values[val]
|
fields[val]['value'] = default_values[val]
|
||||||
if 'selection' in fields[val]:
|
if 'selection' in fields[val]:
|
||||||
if not isinstance(fields[val]['selection'], (tuple, list)):
|
if not isinstance(fields[val]['selection'], (tuple, list)):
|
||||||
fields[val] = copy.copy(fields[val])
|
fields[val] = copy.copy(fields[val])
|
||||||
fields[val]['selection'] = fields[val]['selection'](self, cr, uid, context)
|
fields[val]['selection'] = fields[val]['selection'](self, cr, uid, context)
|
||||||
|
|
||||||
if lang:
|
if lang:
|
||||||
# translate fields
|
# translate fields
|
||||||
for field in fields:
|
for field in fields:
|
||||||
trans = translate(cr, self.wiz_name+','+state+','+field, 'wizard_field', lang)
|
trans = translate(cr, self.wiz_name+','+state+','+field, 'wizard_field', lang)
|
||||||
if trans:
|
if trans:
|
||||||
fields[field]['string'] = trans
|
fields[field]['string'] = trans
|
||||||
|
|
||||||
# translate arch
|
# translate arch
|
||||||
if not isinstance(arch, UpdateableStr):
|
if not isinstance(arch, UpdateableStr):
|
||||||
doc = dom.minidom.parseString(arch)
|
doc = dom.minidom.parseString(arch)
|
||||||
self.translate_view(cr, doc, state, lang)
|
self.translate_view(cr, doc, state, lang)
|
||||||
arch = doc.toxml()
|
arch = doc.toxml()
|
||||||
|
|
||||||
# translate buttons
|
# translate buttons
|
||||||
button_list = list(button_list)
|
button_list = list(button_list)
|
||||||
for i, aa in enumerate(button_list):
|
for i, aa in enumerate(button_list):
|
||||||
button_name = aa[0]
|
button_name = aa[0]
|
||||||
trans = translate(cr, self.wiz_name+','+state+','+button_name, 'wizard_button', lang)
|
trans = translate(cr, self.wiz_name+','+state+','+button_name, 'wizard_button', lang)
|
||||||
if trans:
|
if trans:
|
||||||
aa = list(aa)
|
aa = list(aa)
|
||||||
aa[1] = trans
|
aa[1] = trans
|
||||||
button_list[i] = aa
|
button_list[i] = aa
|
||||||
|
|
||||||
res['fields'] = fields
|
res['fields'] = fields
|
||||||
res['arch'] = arch
|
res['arch'] = arch
|
||||||
res['state'] = button_list
|
res['state'] = button_list
|
||||||
|
|
||||||
except Exception, e:
|
except Exception, e:
|
||||||
if isinstance(e, except_wizard) \
|
if isinstance(e, except_wizard) \
|
||||||
or isinstance(e, except_osv) \
|
or isinstance(e, except_osv) \
|
||||||
or isinstance(e, except_orm):
|
or isinstance(e, except_orm):
|
||||||
self.abortResponse(2, e.name, 'warning', e.value)
|
self.abortResponse(2, e.name, 'warning', e.value)
|
||||||
else:
|
else:
|
||||||
import traceback
|
import traceback
|
||||||
tb_s = reduce(lambda x, y: x+y, traceback.format_exception(
|
tb_s = reduce(lambda x, y: x+y, traceback.format_exception(
|
||||||
sys.exc_type, sys.exc_value, sys.exc_traceback))
|
sys.exc_type, sys.exc_value, sys.exc_traceback))
|
||||||
logger = Logger()
|
logger = Logger()
|
||||||
logger.notifyChannel("web-services", LOG_ERROR,
|
logger.notifyChannel("web-services", LOG_ERROR,
|
||||||
'Exception in call: ' + tb_s)
|
'Exception in call: ' + tb_s)
|
||||||
raise
|
raise
|
||||||
|
|
||||||
if result_def['type'] == 'choice':
|
if result_def['type'] == 'choice':
|
||||||
next_state = result_def['next_state'](self, cr, uid, data, context)
|
next_state = result_def['next_state'](self, cr, uid, data, context)
|
||||||
return self.execute_cr(cr, uid, data, next_state, context)
|
return self.execute_cr(cr, uid, data, next_state, context)
|
||||||
return res
|
return res
|
||||||
|
|
||||||
def execute(self, db, uid, data, state='init', context=None):
|
def execute(self, db, uid, data, state='init', context=None):
|
||||||
if not context:
|
if not context:
|
||||||
context={}
|
context={}
|
||||||
cr = pooler.get_db(db).cursor()
|
cr = pooler.get_db(db).cursor()
|
||||||
try:
|
try:
|
||||||
try:
|
try:
|
||||||
res = self.execute_cr(cr, uid, data, state, context)
|
res = self.execute_cr(cr, uid, data, state, context)
|
||||||
cr.commit()
|
cr.commit()
|
||||||
except Exception:
|
except Exception:
|
||||||
cr.rollback()
|
cr.rollback()
|
||||||
raise
|
raise
|
||||||
finally:
|
finally:
|
||||||
cr.close()
|
cr.close()
|
||||||
return res
|
return res
|
||||||
|
|
|
@ -26,6 +26,6 @@
|
||||||
##############################################################################
|
##############################################################################
|
||||||
|
|
||||||
class except_wkf(Exception):
|
class except_wkf(Exception):
|
||||||
def __init__(self, name, value):
|
def __init__(self, name, value):
|
||||||
self.name = name
|
self.name = name
|
||||||
self.value = value
|
self.value = value
|
||||||
|
|
|
@ -33,54 +33,54 @@ import netsvc
|
||||||
import pooler
|
import pooler
|
||||||
|
|
||||||
def create(cr, ident, wkf_id):
|
def create(cr, ident, wkf_id):
|
||||||
(uid,res_type,res_id) = ident
|
(uid,res_type,res_id) = ident
|
||||||
cr.execute("select nextval('wkf_instance_id_seq')")
|
cr.execute("select nextval('wkf_instance_id_seq')")
|
||||||
id_new = cr.fetchone()[0]
|
id_new = cr.fetchone()[0]
|
||||||
cr.execute('insert into wkf_instance (id,res_type,res_id,uid,wkf_id) values (%d,%s,%s,%s,%s)', (id_new,res_type,res_id,uid,wkf_id))
|
cr.execute('insert into wkf_instance (id,res_type,res_id,uid,wkf_id) values (%d,%s,%s,%s,%s)', (id_new,res_type,res_id,uid,wkf_id))
|
||||||
cr.execute('select * from wkf_activity where flow_start=True and wkf_id=%d', (wkf_id,))
|
cr.execute('select * from wkf_activity where flow_start=True and wkf_id=%d', (wkf_id,))
|
||||||
res = cr.dictfetchall()
|
res = cr.dictfetchall()
|
||||||
stack = []
|
stack = []
|
||||||
workitem.create(cr, res, id_new, ident, stack=stack)
|
workitem.create(cr, res, id_new, ident, stack=stack)
|
||||||
update(cr, id_new, ident)
|
update(cr, id_new, ident)
|
||||||
return id_new
|
return id_new
|
||||||
|
|
||||||
def delete(cr, ident):
|
def delete(cr, ident):
|
||||||
(uid,res_type,res_id) = ident
|
(uid,res_type,res_id) = ident
|
||||||
cr.execute('delete from wkf_instance where res_id=%d and res_type=%s', (res_id,res_type))
|
cr.execute('delete from wkf_instance where res_id=%d and res_type=%s', (res_id,res_type))
|
||||||
|
|
||||||
def validate(cr, inst_id, ident, signal, force_running=False):
|
def validate(cr, inst_id, ident, signal, force_running=False):
|
||||||
cr.execute("select * from wkf_workitem where inst_id=%d", (inst_id,))
|
cr.execute("select * from wkf_workitem where inst_id=%d", (inst_id,))
|
||||||
for witem in cr.dictfetchall():
|
for witem in cr.dictfetchall():
|
||||||
stack = []
|
stack = []
|
||||||
workitem.process(cr, witem, ident, signal, force_running, stack=stack)
|
workitem.process(cr, witem, ident, signal, force_running, stack=stack)
|
||||||
# An action is returned
|
# An action is returned
|
||||||
_update_end(cr, inst_id, ident)
|
_update_end(cr, inst_id, ident)
|
||||||
return stack and stack[0] or False
|
return stack and stack[0] or False
|
||||||
|
|
||||||
def update(cr, inst_id, ident):
|
def update(cr, inst_id, ident):
|
||||||
cr.execute("select * from wkf_workitem where inst_id=%d", (inst_id,))
|
cr.execute("select * from wkf_workitem where inst_id=%d", (inst_id,))
|
||||||
for witem in cr.dictfetchall():
|
for witem in cr.dictfetchall():
|
||||||
stack = []
|
stack = []
|
||||||
workitem.process(cr, witem, ident, stack=stack)
|
workitem.process(cr, witem, ident, stack=stack)
|
||||||
return _update_end(cr, inst_id, ident)
|
return _update_end(cr, inst_id, ident)
|
||||||
|
|
||||||
def _update_end(cr, inst_id, ident):
|
def _update_end(cr, inst_id, ident):
|
||||||
cr.execute('select wkf_id from wkf_instance where id=%d', (inst_id,))
|
cr.execute('select wkf_id from wkf_instance where id=%d', (inst_id,))
|
||||||
wkf_id = cr.fetchone()[0]
|
wkf_id = cr.fetchone()[0]
|
||||||
cr.execute('select state,flow_stop from wkf_workitem w left join wkf_activity a on (a.id=w.act_id) where w.inst_id=%d', (inst_id,))
|
cr.execute('select state,flow_stop from wkf_workitem w left join wkf_activity a on (a.id=w.act_id) where w.inst_id=%d', (inst_id,))
|
||||||
ok=True
|
ok=True
|
||||||
for r in cr.fetchall():
|
for r in cr.fetchall():
|
||||||
if (r[0]<>'complete') or not r[1]:
|
if (r[0]<>'complete') or not r[1]:
|
||||||
ok=False
|
ok=False
|
||||||
break
|
break
|
||||||
if ok:
|
if ok:
|
||||||
cr.execute('select distinct a.name from wkf_activity a left join wkf_workitem w on (a.id=w.act_id) where w.inst_id=%d', (inst_id,))
|
cr.execute('select distinct a.name from wkf_activity a left join wkf_workitem w on (a.id=w.act_id) where w.inst_id=%d', (inst_id,))
|
||||||
act_names = cr.fetchall()
|
act_names = cr.fetchall()
|
||||||
cr.execute("update wkf_instance set state='complete' where id=%d", (inst_id,))
|
cr.execute("update wkf_instance set state='complete' where id=%d", (inst_id,))
|
||||||
cr.execute("update wkf_workitem set state='complete' where subflow_id=%d", (inst_id,))
|
cr.execute("update wkf_workitem set state='complete' where subflow_id=%d", (inst_id,))
|
||||||
cr.execute("select i.id,w.osv,i.res_id from wkf_instance i left join wkf w on (i.wkf_id=w.id) where i.id in (select inst_id from wkf_workitem where subflow_id=%d)", (inst_id,))
|
cr.execute("select i.id,w.osv,i.res_id from wkf_instance i left join wkf w on (i.wkf_id=w.id) where i.id in (select inst_id from wkf_workitem where subflow_id=%d)", (inst_id,))
|
||||||
for i in cr.fetchall():
|
for i in cr.fetchall():
|
||||||
for act_name in act_names:
|
for act_name in act_names:
|
||||||
validate(cr, i[0], (ident[0],i[1],i[2]), 'subflow.'+act_name[0])
|
validate(cr, i[0], (ident[0],i[1],i[2]), 'subflow.'+act_name[0])
|
||||||
return ok
|
return ok
|
||||||
|
|
||||||
|
|
|
@ -34,77 +34,77 @@ import pooler
|
||||||
|
|
||||||
class EnvCall(object):
|
class EnvCall(object):
|
||||||
|
|
||||||
def __init__(self,wf_service,d_arg):
|
def __init__(self,wf_service,d_arg):
|
||||||
self.wf_service=wf_service
|
self.wf_service=wf_service
|
||||||
self.d_arg=d_arg
|
self.d_arg=d_arg
|
||||||
|
|
||||||
def __call__(self,*args):
|
def __call__(self,*args):
|
||||||
arg=self.d_arg+args
|
arg=self.d_arg+args
|
||||||
return self.wf_service.execute_cr(*arg)
|
return self.wf_service.execute_cr(*arg)
|
||||||
|
|
||||||
|
|
||||||
class Env(dict):
|
class Env(dict):
|
||||||
|
|
||||||
def __init__(self, wf_service, cr, uid, model, ids):
|
def __init__(self, wf_service, cr, uid, model, ids):
|
||||||
self.wf_service = wf_service
|
self.wf_service = wf_service
|
||||||
self.cr = cr
|
self.cr = cr
|
||||||
self.uid = uid
|
self.uid = uid
|
||||||
self.model = model
|
self.model = model
|
||||||
self.ids = ids
|
self.ids = ids
|
||||||
self.obj = pooler.get_pool(cr.dbname).get(model)
|
self.obj = pooler.get_pool(cr.dbname).get(model)
|
||||||
self.columns = self.obj._columns.keys() + self.obj._inherit_fields.keys()
|
self.columns = self.obj._columns.keys() + self.obj._inherit_fields.keys()
|
||||||
|
|
||||||
def __getitem__(self, key):
|
def __getitem__(self, key):
|
||||||
if (key in self.columns) or (key in dir(self.obj)):
|
if (key in self.columns) or (key in dir(self.obj)):
|
||||||
res = self.obj.browse(self.cr, self.uid, self.ids[0])
|
res = self.obj.browse(self.cr, self.uid, self.ids[0])
|
||||||
return res[key]
|
return res[key]
|
||||||
#res=self.wf_service.execute_cr(self.cr, self.uid, self.model, 'read',\
|
#res=self.wf_service.execute_cr(self.cr, self.uid, self.model, 'read',\
|
||||||
# self.ids, [key])[0][key]
|
# self.ids, [key])[0][key]
|
||||||
#super(Env, self).__setitem__(key, res)
|
#super(Env, self).__setitem__(key, res)
|
||||||
#return res
|
#return res
|
||||||
#elif key in dir(self.obj):
|
#elif key in dir(self.obj):
|
||||||
# return EnvCall(self.wf_service, (self.cr, self.uid, self.model, key,\
|
# return EnvCall(self.wf_service, (self.cr, self.uid, self.model, key,\
|
||||||
# self.ids))
|
# self.ids))
|
||||||
else:
|
else:
|
||||||
return super(Env, self).__getitem__(key)
|
return super(Env, self).__getitem__(key)
|
||||||
|
|
||||||
def _eval_expr(cr, ident, workitem, action):
|
def _eval_expr(cr, ident, workitem, action):
|
||||||
ret=False
|
ret=False
|
||||||
assert action, 'You used a NULL action in a workflow, use dummy node instead.'
|
assert action, 'You used a NULL action in a workflow, use dummy node instead.'
|
||||||
for line in action.split('\n'):
|
for line in action.split('\n'):
|
||||||
uid=ident[0]
|
uid=ident[0]
|
||||||
model=ident[1]
|
model=ident[1]
|
||||||
ids=[ident[2]]
|
ids=[ident[2]]
|
||||||
if line =='True':
|
if line =='True':
|
||||||
ret=True
|
ret=True
|
||||||
elif line =='False':
|
elif line =='False':
|
||||||
ret=False
|
ret=False
|
||||||
else:
|
else:
|
||||||
wf_service = netsvc.LocalService("object_proxy")
|
wf_service = netsvc.LocalService("object_proxy")
|
||||||
env = Env(wf_service, cr, uid, model, ids)
|
env = Env(wf_service, cr, uid, model, ids)
|
||||||
ret = eval(line, env)
|
ret = eval(line, env)
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
def execute_action(cr, ident, workitem, activity):
|
def execute_action(cr, ident, workitem, activity):
|
||||||
wf_service = netsvc.LocalService("object_proxy")
|
wf_service = netsvc.LocalService("object_proxy")
|
||||||
obj = pooler.get_pool(cr.dbname).get('ir.actions.server')
|
obj = pooler.get_pool(cr.dbname).get('ir.actions.server')
|
||||||
ctx = {'active_id':ident[2], 'active_ids':[ident[2]]}
|
ctx = {'active_id':ident[2], 'active_ids':[ident[2]]}
|
||||||
result = obj.run(cr, ident[0], [activity['action_id']], ctx)
|
result = obj.run(cr, ident[0], [activity['action_id']], ctx)
|
||||||
return result
|
return result
|
||||||
|
|
||||||
def execute(cr, ident, workitem, activity):
|
def execute(cr, ident, workitem, activity):
|
||||||
return _eval_expr(cr, ident, workitem, activity['action'])
|
return _eval_expr(cr, ident, workitem, activity['action'])
|
||||||
|
|
||||||
def check(cr, workitem, ident, transition, signal):
|
def check(cr, workitem, ident, transition, signal):
|
||||||
ok = True
|
ok = True
|
||||||
if transition['signal']:
|
if transition['signal']:
|
||||||
ok = (signal==transition['signal'])
|
ok = (signal==transition['signal'])
|
||||||
|
|
||||||
if transition['role_id']:
|
if transition['role_id']:
|
||||||
uid = ident[0]
|
uid = ident[0]
|
||||||
serv = netsvc.LocalService('object_proxy')
|
serv = netsvc.LocalService('object_proxy')
|
||||||
user_roles = serv.execute_cr(cr, uid, 'res.users', 'read', [uid], ['roles_id'])[0]['roles_id']
|
user_roles = serv.execute_cr(cr, uid, 'res.users', 'read', [uid], ['roles_id'])[0]['roles_id']
|
||||||
ok = ok and serv.execute_cr(cr, uid, 'res.roles', 'check', user_roles, transition['role_id'])
|
ok = ok and serv.execute_cr(cr, uid, 'res.roles', 'check', user_roles, transition['role_id'])
|
||||||
ok = ok and _eval_expr(cr, ident, workitem, transition['condition'])
|
ok = ok and _eval_expr(cr, ident, workitem, transition['condition'])
|
||||||
return ok
|
return ok
|
||||||
|
|
||||||
|
|
|
@ -31,5 +31,5 @@
|
||||||
#
|
#
|
||||||
|
|
||||||
def log(cr,ident,act_id,info=''):
|
def log(cr,ident,act_id,info=''):
|
||||||
pass
|
pass
|
||||||
#cr.execute('insert into wkf_logs (res_type, res_id, uid, act_id, time, info) values (%s,%d,%d,%d,current_time,%s)', (ident[1],int(ident[2]),int(ident[0]),int(act_id),info))
|
#cr.execute('insert into wkf_logs (res_type, res_id, uid, act_id, time, info) values (%s,%d,%d,%d,current_time,%s)', (ident[1],int(ident[2]),int(ident[0]),int(act_id),info))
|
||||||
|
|
|
@ -34,79 +34,79 @@ import netsvc
|
||||||
import pooler
|
import pooler
|
||||||
|
|
||||||
class workflow_service(netsvc.Service):
|
class workflow_service(netsvc.Service):
|
||||||
def __init__(self, name='workflow', audience='*'):
|
def __init__(self, name='workflow', audience='*'):
|
||||||
netsvc.Service.__init__(self, name, audience)
|
netsvc.Service.__init__(self, name, audience)
|
||||||
self.exportMethod(self.trg_write)
|
self.exportMethod(self.trg_write)
|
||||||
self.exportMethod(self.trg_delete)
|
self.exportMethod(self.trg_delete)
|
||||||
self.exportMethod(self.trg_create)
|
self.exportMethod(self.trg_create)
|
||||||
self.exportMethod(self.trg_validate)
|
self.exportMethod(self.trg_validate)
|
||||||
self.exportMethod(self.trg_redirect)
|
self.exportMethod(self.trg_redirect)
|
||||||
self.exportMethod(self.trg_trigger)
|
self.exportMethod(self.trg_trigger)
|
||||||
self.exportMethod(self.clear_cache)
|
self.exportMethod(self.clear_cache)
|
||||||
self.wkf_on_create_cache={}
|
self.wkf_on_create_cache={}
|
||||||
|
|
||||||
def clear_cache(self, cr, uid):
|
def clear_cache(self, cr, uid):
|
||||||
self.wkf_on_create_cache[cr.dbname]={}
|
self.wkf_on_create_cache[cr.dbname]={}
|
||||||
|
|
||||||
def trg_write(self, uid, res_type, res_id, cr):
|
def trg_write(self, uid, res_type, res_id, cr):
|
||||||
ident = (uid,res_type,res_id)
|
ident = (uid,res_type,res_id)
|
||||||
cr.execute('select id from wkf_instance where res_id=%d and res_type=%s and state=%s', (res_id,res_type, 'active'))
|
cr.execute('select id from wkf_instance where res_id=%d and res_type=%s and state=%s', (res_id,res_type, 'active'))
|
||||||
for (id,) in cr.fetchall():
|
for (id,) in cr.fetchall():
|
||||||
instance.update(cr, id, ident)
|
instance.update(cr, id, ident)
|
||||||
|
|
||||||
def trg_trigger(self, uid, res_type, res_id, cr):
|
def trg_trigger(self, uid, res_type, res_id, cr):
|
||||||
cr.execute('select instance_id from wkf_triggers where res_id=%d and model=%s', (res_id,res_type))
|
cr.execute('select instance_id from wkf_triggers where res_id=%d and model=%s', (res_id,res_type))
|
||||||
res = cr.fetchall()
|
res = cr.fetchall()
|
||||||
for (instance_id,) in res:
|
for (instance_id,) in res:
|
||||||
cr.execute('select uid,res_type,res_id from wkf_instance where id=%d', (instance_id,))
|
cr.execute('select uid,res_type,res_id from wkf_instance where id=%d', (instance_id,))
|
||||||
ident = cr.fetchone()
|
ident = cr.fetchone()
|
||||||
instance.update(cr, instance_id, ident)
|
instance.update(cr, instance_id, ident)
|
||||||
|
|
||||||
def trg_delete(self, uid, res_type, res_id, cr):
|
def trg_delete(self, uid, res_type, res_id, cr):
|
||||||
ident = (uid,res_type,res_id)
|
ident = (uid,res_type,res_id)
|
||||||
instance.delete(cr, ident)
|
instance.delete(cr, ident)
|
||||||
|
|
||||||
def trg_create(self, uid, res_type, res_id, cr):
|
def trg_create(self, uid, res_type, res_id, cr):
|
||||||
ident = (uid,res_type,res_id)
|
ident = (uid,res_type,res_id)
|
||||||
self.wkf_on_create_cache.setdefault(cr.dbname, {})
|
self.wkf_on_create_cache.setdefault(cr.dbname, {})
|
||||||
if res_type in self.wkf_on_create_cache[cr.dbname]:
|
if res_type in self.wkf_on_create_cache[cr.dbname]:
|
||||||
wkf_ids = self.wkf_on_create_cache[cr.dbname][res_type]
|
wkf_ids = self.wkf_on_create_cache[cr.dbname][res_type]
|
||||||
else:
|
else:
|
||||||
cr.execute('select id from wkf where osv=%s and on_create=True', (res_type,))
|
cr.execute('select id from wkf where osv=%s and on_create=True', (res_type,))
|
||||||
wkf_ids = cr.fetchall()
|
wkf_ids = cr.fetchall()
|
||||||
self.wkf_on_create_cache[cr.dbname][res_type] = wkf_ids
|
self.wkf_on_create_cache[cr.dbname][res_type] = wkf_ids
|
||||||
for (wkf_id,) in wkf_ids:
|
for (wkf_id,) in wkf_ids:
|
||||||
instance.create(cr, ident, wkf_id)
|
instance.create(cr, ident, wkf_id)
|
||||||
|
|
||||||
def trg_validate(self, uid, res_type, res_id, signal, cr):
|
def trg_validate(self, uid, res_type, res_id, signal, cr):
|
||||||
result = False
|
result = False
|
||||||
ident = (uid,res_type,res_id)
|
ident = (uid,res_type,res_id)
|
||||||
# ids of all active workflow instances for a corresponding resource (id, model_nam)
|
# ids of all active workflow instances for a corresponding resource (id, model_nam)
|
||||||
cr.execute('select id from wkf_instance where res_id=%d and res_type=%s and state=%s', (res_id, res_type, 'active'))
|
cr.execute('select id from wkf_instance where res_id=%d and res_type=%s and state=%s', (res_id, res_type, 'active'))
|
||||||
for (id,) in cr.fetchall():
|
for (id,) in cr.fetchall():
|
||||||
res2 = instance.validate(cr, id, ident, signal)
|
res2 = instance.validate(cr, id, ident, signal)
|
||||||
result = result or res2
|
result = result or res2
|
||||||
return result
|
return result
|
||||||
|
|
||||||
# make all workitems which are waiting for a (subflow) workflow instance
|
# make all workitems which are waiting for a (subflow) workflow instance
|
||||||
# for the old resource point to the (first active) workflow instance for
|
# for the old resource point to the (first active) workflow instance for
|
||||||
# the new resource
|
# the new resource
|
||||||
def trg_redirect(self, uid, res_type, res_id, new_rid, cr):
|
def trg_redirect(self, uid, res_type, res_id, new_rid, cr):
|
||||||
# get ids of wkf instances for the old resource (res_id)
|
# get ids of wkf instances for the old resource (res_id)
|
||||||
#CHECKME: shouldn't we get only active instances?
|
#CHECKME: shouldn't we get only active instances?
|
||||||
cr.execute('select id, wkf_id from wkf_instance where res_id=%d and res_type=%s', (res_id, res_type))
|
cr.execute('select id, wkf_id from wkf_instance where res_id=%d and res_type=%s', (res_id, res_type))
|
||||||
for old_inst_id, wkf_id in cr.fetchall():
|
for old_inst_id, wkf_id in cr.fetchall():
|
||||||
# first active instance for new resource (new_rid), using same wkf
|
# first active instance for new resource (new_rid), using same wkf
|
||||||
cr.execute(
|
cr.execute(
|
||||||
'SELECT id '\
|
'SELECT id '\
|
||||||
'FROM wkf_instance '\
|
'FROM wkf_instance '\
|
||||||
'WHERE res_id=%d AND res_type=%s AND wkf_id=%d AND state=%s',
|
'WHERE res_id=%d AND res_type=%s AND wkf_id=%d AND state=%s',
|
||||||
(new_rid, res_type, wkf_id, 'active'))
|
(new_rid, res_type, wkf_id, 'active'))
|
||||||
new_id = cr.fetchone()
|
new_id = cr.fetchone()
|
||||||
if new_id:
|
if new_id:
|
||||||
# select all workitems which "wait" for the old instance
|
# select all workitems which "wait" for the old instance
|
||||||
cr.execute('select id from wkf_workitem where subflow_id=%d', (old_inst_id,))
|
cr.execute('select id from wkf_workitem where subflow_id=%d', (old_inst_id,))
|
||||||
for (item_id,) in cr.fetchall():
|
for (item_id,) in cr.fetchall():
|
||||||
# redirect all those workitems to the wkf instance of the new resource
|
# redirect all those workitems to the wkf instance of the new resource
|
||||||
cr.execute('update wkf_workitem set subflow_id=%d where id=%d', (new_id[0], item_id))
|
cr.execute('update wkf_workitem set subflow_id=%d where id=%d', (new_id[0], item_id))
|
||||||
workflow_service()
|
workflow_service()
|
||||||
|
|
|
@ -38,158 +38,158 @@ import wkf_expr
|
||||||
import wkf_logs
|
import wkf_logs
|
||||||
|
|
||||||
def create(cr, act_datas, inst_id, ident, stack):
|
def create(cr, act_datas, inst_id, ident, stack):
|
||||||
for act in act_datas:
|
for act in act_datas:
|
||||||
cr.execute("select nextval('wkf_workitem_id_seq')")
|
cr.execute("select nextval('wkf_workitem_id_seq')")
|
||||||
id_new = cr.fetchone()[0]
|
id_new = cr.fetchone()[0]
|
||||||
cr.execute("insert into wkf_workitem (id,act_id,inst_id,state) values (%d,%s,%s,'active')", (id_new, act['id'], inst_id))
|
cr.execute("insert into wkf_workitem (id,act_id,inst_id,state) values (%d,%s,%s,'active')", (id_new, act['id'], inst_id))
|
||||||
cr.execute('select * from wkf_workitem where id=%d',(id_new,))
|
cr.execute('select * from wkf_workitem where id=%d',(id_new,))
|
||||||
res = cr.dictfetchone()
|
res = cr.dictfetchone()
|
||||||
wkf_logs.log(cr,ident,act['id'],'active')
|
wkf_logs.log(cr,ident,act['id'],'active')
|
||||||
process(cr, res, ident, stack=stack)
|
process(cr, res, ident, stack=stack)
|
||||||
|
|
||||||
def process(cr, workitem, ident, signal=None, force_running=False, stack=None):
|
def process(cr, workitem, ident, signal=None, force_running=False, stack=None):
|
||||||
if stack is None:
|
if stack is None:
|
||||||
raise 'Error !!!'
|
raise 'Error !!!'
|
||||||
result = True
|
result = True
|
||||||
cr.execute('select * from wkf_activity where id=%d', (workitem['act_id'],))
|
cr.execute('select * from wkf_activity where id=%d', (workitem['act_id'],))
|
||||||
activity = cr.dictfetchone()
|
activity = cr.dictfetchone()
|
||||||
|
|
||||||
triggers = False
|
triggers = False
|
||||||
if workitem['state']=='active':
|
if workitem['state']=='active':
|
||||||
triggers = True
|
triggers = True
|
||||||
result = _execute(cr, workitem, activity, ident, stack)
|
result = _execute(cr, workitem, activity, ident, stack)
|
||||||
if not result:
|
if not result:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
if workitem['state']=='running':
|
if workitem['state']=='running':
|
||||||
pass
|
pass
|
||||||
|
|
||||||
if workitem['state']=='complete' or force_running:
|
if workitem['state']=='complete' or force_running:
|
||||||
ok = _split_test(cr, workitem, activity['split_mode'], ident, signal, stack)
|
ok = _split_test(cr, workitem, activity['split_mode'], ident, signal, stack)
|
||||||
triggers = triggers and not ok
|
triggers = triggers and not ok
|
||||||
|
|
||||||
if triggers:
|
if triggers:
|
||||||
cr.execute('select * from wkf_transition where act_from=%d', (workitem['act_id'],))
|
cr.execute('select * from wkf_transition where act_from=%d', (workitem['act_id'],))
|
||||||
alltrans = cr.dictfetchall()
|
alltrans = cr.dictfetchall()
|
||||||
for trans in alltrans:
|
for trans in alltrans:
|
||||||
if trans['trigger_model']:
|
if trans['trigger_model']:
|
||||||
ids = wkf_expr._eval_expr(cr,ident,workitem,trans['trigger_expr_id'])
|
ids = wkf_expr._eval_expr(cr,ident,workitem,trans['trigger_expr_id'])
|
||||||
for res_id in ids:
|
for res_id in ids:
|
||||||
cr.execute('select nextval(\'wkf_triggers_id_seq\')')
|
cr.execute('select nextval(\'wkf_triggers_id_seq\')')
|
||||||
id =cr.fetchone()[0]
|
id =cr.fetchone()[0]
|
||||||
cr.execute('insert into wkf_triggers (model,res_id,instance_id,workitem_id,id) values (%s,%d,%d,%d,%d)', (trans['trigger_model'],res_id,workitem['inst_id'], workitem['id'], id))
|
cr.execute('insert into wkf_triggers (model,res_id,instance_id,workitem_id,id) values (%s,%d,%d,%d,%d)', (trans['trigger_model'],res_id,workitem['inst_id'], workitem['id'], id))
|
||||||
|
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
# ---------------------- PRIVATE FUNCS --------------------------------
|
# ---------------------- PRIVATE FUNCS --------------------------------
|
||||||
|
|
||||||
def _state_set(cr, workitem, activity, state, ident):
|
def _state_set(cr, workitem, activity, state, ident):
|
||||||
cr.execute('update wkf_workitem set state=%s where id=%d', (state,workitem['id']))
|
cr.execute('update wkf_workitem set state=%s where id=%d', (state,workitem['id']))
|
||||||
workitem['state'] = state
|
workitem['state'] = state
|
||||||
wkf_logs.log(cr,ident,activity['id'],state)
|
wkf_logs.log(cr,ident,activity['id'],state)
|
||||||
|
|
||||||
def _execute(cr, workitem, activity, ident, stack):
|
def _execute(cr, workitem, activity, ident, stack):
|
||||||
result = True
|
result = True
|
||||||
#
|
#
|
||||||
# send a signal to parent workflow (signal: subflow.signal_name)
|
# send a signal to parent workflow (signal: subflow.signal_name)
|
||||||
#
|
#
|
||||||
if (workitem['state']=='active') and activity['signal_send']:
|
if (workitem['state']=='active') and activity['signal_send']:
|
||||||
cr.execute("select i.id,w.osv,i.res_id from wkf_instance i left join wkf w on (i.wkf_id=w.id) where i.id in (select inst_id from wkf_workitem where subflow_id=%d)", (workitem['inst_id'],))
|
cr.execute("select i.id,w.osv,i.res_id from wkf_instance i left join wkf w on (i.wkf_id=w.id) where i.id in (select inst_id from wkf_workitem where subflow_id=%d)", (workitem['inst_id'],))
|
||||||
for i in cr.fetchall():
|
for i in cr.fetchall():
|
||||||
instance.validate(cr, i[0], (ident[0],i[1],i[2]), activity['signal_send'], force_running=True)
|
instance.validate(cr, i[0], (ident[0],i[1],i[2]), activity['signal_send'], force_running=True)
|
||||||
|
|
||||||
if activity['kind']=='dummy':
|
if activity['kind']=='dummy':
|
||||||
if workitem['state']=='active':
|
if workitem['state']=='active':
|
||||||
_state_set(cr, workitem, activity, 'complete', ident)
|
_state_set(cr, workitem, activity, 'complete', ident)
|
||||||
elif activity['kind']=='function':
|
elif activity['kind']=='function':
|
||||||
if workitem['state']=='active':
|
if workitem['state']=='active':
|
||||||
_state_set(cr, workitem, activity, 'running', ident)
|
_state_set(cr, workitem, activity, 'running', ident)
|
||||||
wkf_expr.execute(cr, ident, workitem, activity)
|
wkf_expr.execute(cr, ident, workitem, activity)
|
||||||
if activity['action_id']:
|
if activity['action_id']:
|
||||||
res2 = wkf_expr.execute_action(cr, ident, workitem, activity)
|
res2 = wkf_expr.execute_action(cr, ident, workitem, activity)
|
||||||
# A client action has been returned
|
# A client action has been returned
|
||||||
if res2:
|
if res2:
|
||||||
stack.append(res2)
|
stack.append(res2)
|
||||||
result=res2
|
result=res2
|
||||||
_state_set(cr, workitem, activity, 'complete', ident)
|
_state_set(cr, workitem, activity, 'complete', ident)
|
||||||
elif activity['kind']=='stopall':
|
elif activity['kind']=='stopall':
|
||||||
if workitem['state']=='active':
|
if workitem['state']=='active':
|
||||||
_state_set(cr, workitem, activity, 'running', ident)
|
_state_set(cr, workitem, activity, 'running', ident)
|
||||||
cr.execute('delete from wkf_workitem where inst_id=%d and id<>%d', (workitem['inst_id'], workitem['id']))
|
cr.execute('delete from wkf_workitem where inst_id=%d and id<>%d', (workitem['inst_id'], workitem['id']))
|
||||||
if activity['action']:
|
if activity['action']:
|
||||||
wkf_expr.execute(cr, ident, workitem, activity)
|
wkf_expr.execute(cr, ident, workitem, activity)
|
||||||
_state_set(cr, workitem, activity, 'complete', ident)
|
_state_set(cr, workitem, activity, 'complete', ident)
|
||||||
elif activity['kind']=='subflow':
|
elif activity['kind']=='subflow':
|
||||||
if workitem['state']=='active':
|
if workitem['state']=='active':
|
||||||
_state_set(cr, workitem, activity, 'running', ident)
|
_state_set(cr, workitem, activity, 'running', ident)
|
||||||
if activity.get('action', False):
|
if activity.get('action', False):
|
||||||
id_new = wkf_expr.execute(cr, ident, workitem, activity)
|
id_new = wkf_expr.execute(cr, ident, workitem, activity)
|
||||||
if not (id_new):
|
if not (id_new):
|
||||||
cr.execute('delete from wkf_workitem where id=%s', (workitem['id'],))
|
cr.execute('delete from wkf_workitem where id=%s', (workitem['id'],))
|
||||||
return False
|
return False
|
||||||
assert type(id_new)==type(1) or type(id_new)==type(1L), 'Wrong return value: '+str(id_new)+' '+str(type(id_new))
|
assert type(id_new)==type(1) or type(id_new)==type(1L), 'Wrong return value: '+str(id_new)+' '+str(type(id_new))
|
||||||
cr.execute('select id from wkf_instance where res_id=%d and wkf_id=%d', (id_new,activity['subflow_id']))
|
cr.execute('select id from wkf_instance where res_id=%d and wkf_id=%d', (id_new,activity['subflow_id']))
|
||||||
id_new = cr.fetchone()[0]
|
id_new = cr.fetchone()[0]
|
||||||
else:
|
else:
|
||||||
id_new = instance.create(cr, ident, activity['subflow_id'])
|
id_new = instance.create(cr, ident, activity['subflow_id'])
|
||||||
cr.execute('update wkf_workitem set subflow_id=%d where id=%s', (id_new, workitem['id']))
|
cr.execute('update wkf_workitem set subflow_id=%d where id=%s', (id_new, workitem['id']))
|
||||||
workitem['subflow_id'] = id_new
|
workitem['subflow_id'] = id_new
|
||||||
if workitem['state']=='running':
|
if workitem['state']=='running':
|
||||||
cr.execute("select state from wkf_instance where id=%d", (workitem['subflow_id'],))
|
cr.execute("select state from wkf_instance where id=%d", (workitem['subflow_id'],))
|
||||||
state= cr.fetchone()[0]
|
state= cr.fetchone()[0]
|
||||||
if state=='complete':
|
if state=='complete':
|
||||||
_state_set(cr, workitem, activity, 'complete', ident)
|
_state_set(cr, workitem, activity, 'complete', ident)
|
||||||
return result
|
return result
|
||||||
|
|
||||||
def _split_test(cr, workitem, split_mode, ident, signal=None, stack=None):
|
def _split_test(cr, workitem, split_mode, ident, signal=None, stack=None):
|
||||||
if stack is None:
|
if stack is None:
|
||||||
raise 'Error !!!'
|
raise 'Error !!!'
|
||||||
cr.execute('select * from wkf_transition where act_from=%d', (workitem['act_id'],))
|
cr.execute('select * from wkf_transition where act_from=%d', (workitem['act_id'],))
|
||||||
test = False
|
test = False
|
||||||
transitions = []
|
transitions = []
|
||||||
alltrans = cr.dictfetchall()
|
alltrans = cr.dictfetchall()
|
||||||
if split_mode=='XOR' or split_mode=='OR':
|
if split_mode=='XOR' or split_mode=='OR':
|
||||||
for transition in alltrans:
|
for transition in alltrans:
|
||||||
if wkf_expr.check(cr, workitem, ident, transition,signal):
|
if wkf_expr.check(cr, workitem, ident, transition,signal):
|
||||||
test = True
|
test = True
|
||||||
transitions.append((transition['id'], workitem['inst_id']))
|
transitions.append((transition['id'], workitem['inst_id']))
|
||||||
if split_mode=='XOR':
|
if split_mode=='XOR':
|
||||||
break
|
break
|
||||||
else:
|
else:
|
||||||
test = True
|
test = True
|
||||||
for transition in alltrans:
|
for transition in alltrans:
|
||||||
if not wkf_expr.check(cr, workitem, ident, transition,signal):
|
if not wkf_expr.check(cr, workitem, ident, transition,signal):
|
||||||
test = False
|
test = False
|
||||||
break
|
break
|
||||||
cr.execute('select count(*) from wkf_witm_trans where trans_id=%d and inst_id=%d', (transition['id'], workitem['inst_id']))
|
cr.execute('select count(*) from wkf_witm_trans where trans_id=%d and inst_id=%d', (transition['id'], workitem['inst_id']))
|
||||||
if not cr.fetchone()[0]:
|
if not cr.fetchone()[0]:
|
||||||
transitions.append((transition['id'], workitem['inst_id']))
|
transitions.append((transition['id'], workitem['inst_id']))
|
||||||
if test and len(transitions):
|
if test and len(transitions):
|
||||||
cr.executemany('insert into wkf_witm_trans (trans_id,inst_id) values (%d,%d)', transitions)
|
cr.executemany('insert into wkf_witm_trans (trans_id,inst_id) values (%d,%d)', transitions)
|
||||||
cr.execute('delete from wkf_workitem where id=%d', (workitem['id'],))
|
cr.execute('delete from wkf_workitem where id=%d', (workitem['id'],))
|
||||||
for t in transitions:
|
for t in transitions:
|
||||||
_join_test(cr, t[0], t[1], ident, stack)
|
_join_test(cr, t[0], t[1], ident, stack)
|
||||||
return True
|
return True
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def _join_test(cr, trans_id, inst_id, ident, stack):
|
def _join_test(cr, trans_id, inst_id, ident, stack):
|
||||||
cr.execute('select * from wkf_activity where id=(select act_to from wkf_transition where id=%d)', (trans_id,))
|
cr.execute('select * from wkf_activity where id=(select act_to from wkf_transition where id=%d)', (trans_id,))
|
||||||
activity = cr.dictfetchone()
|
activity = cr.dictfetchone()
|
||||||
if activity['join_mode']=='XOR':
|
if activity['join_mode']=='XOR':
|
||||||
create(cr,[activity], inst_id, ident, stack)
|
create(cr,[activity], inst_id, ident, stack)
|
||||||
cr.execute('delete from wkf_witm_trans where inst_id=%d and trans_id=%d', (inst_id,trans_id))
|
cr.execute('delete from wkf_witm_trans where inst_id=%d and trans_id=%d', (inst_id,trans_id))
|
||||||
else:
|
else:
|
||||||
cr.execute('select id from wkf_transition where act_to=%d', (activity['id'],))
|
cr.execute('select id from wkf_transition where act_to=%d', (activity['id'],))
|
||||||
trans_ids = cr.fetchall()
|
trans_ids = cr.fetchall()
|
||||||
ok = True
|
ok = True
|
||||||
for (id,) in trans_ids:
|
for (id,) in trans_ids:
|
||||||
cr.execute('select count(*) from wkf_witm_trans where trans_id=%d and inst_id=%d', (id,inst_id))
|
cr.execute('select count(*) from wkf_witm_trans where trans_id=%d and inst_id=%d', (id,inst_id))
|
||||||
res = cr.fetchone()[0]
|
res = cr.fetchone()[0]
|
||||||
if not res:
|
if not res:
|
||||||
ok = False
|
ok = False
|
||||||
break
|
break
|
||||||
if ok:
|
if ok:
|
||||||
for (id,) in trans_ids:
|
for (id,) in trans_ids:
|
||||||
cr.execute('delete from wkf_witm_trans where trans_id=%d and inst_id=%d', (id,inst_id))
|
cr.execute('delete from wkf_witm_trans where trans_id=%d and inst_id=%d', (id,inst_id))
|
||||||
create(cr, [activity], inst_id, ident, stack)
|
create(cr, [activity], inst_id, ident, stack)
|
||||||
|
|
|
@ -53,15 +53,15 @@ options.db_name = 'terp' # default value
|
||||||
parser.parse_args(values=options)
|
parser.parse_args(values=options)
|
||||||
|
|
||||||
if hasattr(options, 'config'):
|
if hasattr(options, 'config'):
|
||||||
configparser = ConfigParser.ConfigParser()
|
configparser = ConfigParser.ConfigParser()
|
||||||
configparser.read([options.config])
|
configparser.read([options.config])
|
||||||
for name, value in configparser.items('options'):
|
for name, value in configparser.items('options'):
|
||||||
if not (hasattr(options, name) and getattr(options, name)):
|
if not (hasattr(options, name) and getattr(options, name)):
|
||||||
if value in ('true', 'True'):
|
if value in ('true', 'True'):
|
||||||
value = True
|
value = True
|
||||||
if value in ('false', 'False'):
|
if value in ('false', 'False'):
|
||||||
value = False
|
value = False
|
||||||
setattr(options, name, value)
|
setattr(options, name, value)
|
||||||
|
|
||||||
# -----
|
# -----
|
||||||
|
|
||||||
|
@ -91,23 +91,23 @@ cr.execute("SELECT c.relname FROM pg_class c, pg_attribute a WHERE c.relname='re
|
||||||
partners=[]
|
partners=[]
|
||||||
drop_payment_term=False
|
drop_payment_term=False
|
||||||
if cr.rowcount:
|
if cr.rowcount:
|
||||||
drop_payment_term=True
|
drop_payment_term=True
|
||||||
cr.execute("select id, payment_term from res_partner where payment_term is not null")
|
cr.execute("select id, payment_term from res_partner where payment_term is not null")
|
||||||
partners = cr.dictfetchall()
|
partners = cr.dictfetchall()
|
||||||
|
|
||||||
# loop over them
|
# loop over them
|
||||||
|
|
||||||
for partner in partners:
|
for partner in partners:
|
||||||
value = 'account.payment.term,%d' % partner['payment_term']
|
value = 'account.payment.term,%d' % partner['payment_term']
|
||||||
res_id = 'res.partner,%d' % partner['id']
|
res_id = 'res.partner,%d' % partner['id']
|
||||||
cr.execute(
|
cr.execute(
|
||||||
"insert into ir_property(name, value, res_id, company_id, fields_id) "\
|
"insert into ir_property(name, value, res_id, company_id, fields_id) "\
|
||||||
"values(%s, %s, %s, %d, %d)",
|
"values(%s, %s, %s, %d, %d)",
|
||||||
('property_payment_term', value, res_id, company_id, fields_id))
|
('property_payment_term', value, res_id, company_id, fields_id))
|
||||||
|
|
||||||
# remove the field
|
# remove the field
|
||||||
if drop_payment_term:
|
if drop_payment_term:
|
||||||
cr.execute("alter table res_partner drop column payment_term")
|
cr.execute("alter table res_partner drop column payment_term")
|
||||||
cr.execute("delete from ir_model_fields where model = 'res.partner' and name = 'payment_term'")
|
cr.execute("delete from ir_model_fields where model = 'res.partner' and name = 'payment_term'")
|
||||||
|
|
||||||
cr.commit()
|
cr.commit()
|
||||||
|
@ -124,10 +124,10 @@ registered_reports = cr.fetchall()
|
||||||
reg_reports_ids = ','.join([str(id) for (id,) in registered_reports])
|
reg_reports_ids = ','.join([str(id) for (id,) in registered_reports])
|
||||||
|
|
||||||
for report in reports_wh_duplicates:
|
for report in reports_wh_duplicates:
|
||||||
cr.execute("select id from ir_act_report_xml where model=%s and report_name=%s and id not in ("+reg_reports_ids+")", (report['model'], report['report_name']))
|
cr.execute("select id from ir_act_report_xml where model=%s and report_name=%s and id not in ("+reg_reports_ids+")", (report['model'], report['report_name']))
|
||||||
(id,) = cr.fetchone()
|
(id,) = cr.fetchone()
|
||||||
cr.execute("delete from ir_act_report_xml where id=%d", (id,))
|
cr.execute("delete from ir_act_report_xml where id=%d", (id,))
|
||||||
cr.execute("delete from ir_values where value='ir.actions.report.xml,%d'", (id,))
|
cr.execute("delete from ir_values where value='ir.actions.report.xml,%d'", (id,))
|
||||||
|
|
||||||
cr.commit()
|
cr.commit()
|
||||||
|
|
||||||
|
|
|
@ -53,15 +53,15 @@ options.db_name = 'terp' # default value
|
||||||
parser.parse_args(values=options)
|
parser.parse_args(values=options)
|
||||||
|
|
||||||
if hasattr(options, 'config'):
|
if hasattr(options, 'config'):
|
||||||
configparser = ConfigParser.ConfigParser()
|
configparser = ConfigParser.ConfigParser()
|
||||||
configparser.read([options.config])
|
configparser.read([options.config])
|
||||||
for name, value in configparser.items('options'):
|
for name, value in configparser.items('options'):
|
||||||
if not (hasattr(options, name) and getattr(options, name)):
|
if not (hasattr(options, name) and getattr(options, name)):
|
||||||
if value in ('true', 'True'):
|
if value in ('true', 'True'):
|
||||||
value = True
|
value = True
|
||||||
if value in ('false', 'False'):
|
if value in ('false', 'False'):
|
||||||
value = False
|
value = False
|
||||||
setattr(options, name, value)
|
setattr(options, name, value)
|
||||||
|
|
||||||
# -----
|
# -----
|
||||||
|
|
||||||
|
@ -79,18 +79,18 @@ cr = db.cursor()
|
||||||
# ------------------------- #
|
# ------------------------- #
|
||||||
|
|
||||||
def change_column(cr, table, column, new_type, copy):
|
def change_column(cr, table, column, new_type, copy):
|
||||||
commands = [
|
commands = [
|
||||||
"ALTER TABLE %s RENAME COLUMN %s TO temp_column" % (table, column),
|
"ALTER TABLE %s RENAME COLUMN %s TO temp_column" % (table, column),
|
||||||
"ALTER TABLE %s ADD COLUMN %s %s" % (table, column, new_type),
|
"ALTER TABLE %s ADD COLUMN %s %s" % (table, column, new_type),
|
||||||
"ALTER TABLE %s DROP COLUMN temp_column" % table
|
"ALTER TABLE %s DROP COLUMN temp_column" % table
|
||||||
]
|
]
|
||||||
if copy:
|
if copy:
|
||||||
commands.insert(
|
commands.insert(
|
||||||
2,
|
2,
|
||||||
"UPDATE %s SET %s=temp_column::%s" % (table, column, new_type))
|
"UPDATE %s SET %s=temp_column::%s" % (table, column, new_type))
|
||||||
|
|
||||||
for command in commands:
|
for command in commands:
|
||||||
cr.execute(command)
|
cr.execute(command)
|
||||||
|
|
||||||
change_column(cr, 'account_account_type', 'code_from', 'varchar(10)', False)
|
change_column(cr, 'account_account_type', 'code_from', 'varchar(10)', False)
|
||||||
change_column(cr, 'account_account_type', 'code_to', 'varchar(10)', False)
|
change_column(cr, 'account_account_type', 'code_to', 'varchar(10)', False)
|
||||||
|
@ -101,17 +101,17 @@ cr.commit()
|
||||||
# ----------------------------------------------------- #
|
# ----------------------------------------------------- #
|
||||||
|
|
||||||
for line in (
|
for line in (
|
||||||
"alter table ir_model_fields add group_name varchar(64)",
|
"alter table ir_model_fields add group_name varchar(64)",
|
||||||
"alter table ir_model_fields add view_load boolean",
|
"alter table ir_model_fields add view_load boolean",
|
||||||
"alter table ir_model_fields alter group_name set default ''",
|
"alter table ir_model_fields alter group_name set default ''",
|
||||||
"alter table ir_model_fields alter view_load set default False",
|
"alter table ir_model_fields alter view_load set default False",
|
||||||
"delete from ir_values where value like '%,False'",
|
"delete from ir_values where value like '%,False'",
|
||||||
):
|
):
|
||||||
try:
|
try:
|
||||||
cr.execute(line)
|
cr.execute(line)
|
||||||
except psycopg.ProgrammingError, e:
|
except psycopg.ProgrammingError, e:
|
||||||
cr.commit()
|
cr.commit()
|
||||||
print e
|
print e
|
||||||
|
|
||||||
cr.commit()
|
cr.commit()
|
||||||
cr.close()
|
cr.close()
|
||||||
|
|
|
@ -53,15 +53,15 @@ options.db_name = 'terp' # default value
|
||||||
parser.parse_args(values=options)
|
parser.parse_args(values=options)
|
||||||
|
|
||||||
if hasattr(options, 'config'):
|
if hasattr(options, 'config'):
|
||||||
configparser = ConfigParser.ConfigParser()
|
configparser = ConfigParser.ConfigParser()
|
||||||
configparser.read([options.config])
|
configparser.read([options.config])
|
||||||
for name, value in configparser.items('options'):
|
for name, value in configparser.items('options'):
|
||||||
if not (hasattr(options, name) and getattr(options, name)):
|
if not (hasattr(options, name) and getattr(options, name)):
|
||||||
if value in ('true', 'True'):
|
if value in ('true', 'True'):
|
||||||
value = True
|
value = True
|
||||||
if value in ('false', 'False'):
|
if value in ('false', 'False'):
|
||||||
value = False
|
value = False
|
||||||
setattr(options, name, value)
|
setattr(options, name, value)
|
||||||
|
|
||||||
# -----
|
# -----
|
||||||
|
|
||||||
|
|
|
@ -53,15 +53,15 @@ options.db_name = 'terp' # default value
|
||||||
parser.parse_args(values=options)
|
parser.parse_args(values=options)
|
||||||
|
|
||||||
if hasattr(options, 'config'):
|
if hasattr(options, 'config'):
|
||||||
configparser = ConfigParser.ConfigParser()
|
configparser = ConfigParser.ConfigParser()
|
||||||
configparser.read([options.config])
|
configparser.read([options.config])
|
||||||
for name, value in configparser.items('options'):
|
for name, value in configparser.items('options'):
|
||||||
if not (hasattr(options, name) and getattr(options, name)):
|
if not (hasattr(options, name) and getattr(options, name)):
|
||||||
if value in ('true', 'True'):
|
if value in ('true', 'True'):
|
||||||
value = True
|
value = True
|
||||||
if value in ('false', 'False'):
|
if value in ('false', 'False'):
|
||||||
value = False
|
value = False
|
||||||
setattr(options, name, value)
|
setattr(options, name, value)
|
||||||
|
|
||||||
# -----
|
# -----
|
||||||
|
|
||||||
|
@ -86,10 +86,10 @@ cr.commit()
|
||||||
# --------------- #
|
# --------------- #
|
||||||
|
|
||||||
while True:
|
while True:
|
||||||
cr.execute("select id from ir_ui_menu where (id not in (select parent_id from ir_ui_menu where parent_id is not null)) and (id not in (select res_id from ir_values where model='ir.ui.menu'))")
|
cr.execute("select id from ir_ui_menu where (id not in (select parent_id from ir_ui_menu where parent_id is not null)) and (id not in (select res_id from ir_values where model='ir.ui.menu'))")
|
||||||
if not cr.rowcount:
|
if not cr.rowcount:
|
||||||
break
|
break
|
||||||
cr.execute("delete from ir_ui_menu where (id not in (select parent_id from ir_ui_menu where parent_id is not null)) and (id not in (select res_id from ir_values where model='ir.ui.menu'))")
|
cr.execute("delete from ir_ui_menu where (id not in (select parent_id from ir_ui_menu where parent_id is not null)) and (id not in (select res_id from ir_values where model='ir.ui.menu'))")
|
||||||
cr.commit()
|
cr.commit()
|
||||||
|
|
||||||
# ----------------------------------------- #
|
# ----------------------------------------- #
|
||||||
|
@ -114,7 +114,7 @@ It is not possible to migrate the data automatically so you need to create the o
|
||||||
And then update the field uos_id of the table account_invoice to match the new id of product_uom.
|
And then update the field uos_id of the table account_invoice to match the new id of product_uom.
|
||||||
|
|
||||||
EXAMPLE:
|
EXAMPLE:
|
||||||
UPDATE account_invoice SET uos_id = new_id WHERE uos_id = old_id;
|
UPDATE account_invoice SET uos_id = new_id WHERE uos_id = old_id;
|
||||||
"""
|
"""
|
||||||
|
|
||||||
cr.close()
|
cr.close()
|
||||||
|
|
|
@ -53,15 +53,15 @@ options.db_name = 'terp' # default value
|
||||||
parser.parse_args(values=options)
|
parser.parse_args(values=options)
|
||||||
|
|
||||||
if hasattr(options, 'config'):
|
if hasattr(options, 'config'):
|
||||||
configparser = ConfigParser.ConfigParser()
|
configparser = ConfigParser.ConfigParser()
|
||||||
configparser.read([options.config])
|
configparser.read([options.config])
|
||||||
for name, value in configparser.items('options'):
|
for name, value in configparser.items('options'):
|
||||||
if not (hasattr(options, name) and getattr(options, name)):
|
if not (hasattr(options, name) and getattr(options, name)):
|
||||||
if value in ('true', 'True'):
|
if value in ('true', 'True'):
|
||||||
value = True
|
value = True
|
||||||
if value in ('false', 'False'):
|
if value in ('false', 'False'):
|
||||||
value = False
|
value = False
|
||||||
setattr(options, name, value)
|
setattr(options, name, value)
|
||||||
|
|
||||||
# -----
|
# -----
|
||||||
|
|
||||||
|
@ -79,18 +79,18 @@ cr = db.cursor()
|
||||||
# ------------------------- #
|
# ------------------------- #
|
||||||
|
|
||||||
def change_column(cr, table, column, new_type, copy):
|
def change_column(cr, table, column, new_type, copy):
|
||||||
commands = [
|
commands = [
|
||||||
"ALTER TABLE %s RENAME COLUMN %s TO temp_column" % (table, column),
|
"ALTER TABLE %s RENAME COLUMN %s TO temp_column" % (table, column),
|
||||||
"ALTER TABLE %s ADD COLUMN %s %s" % (table, column, new_type),
|
"ALTER TABLE %s ADD COLUMN %s %s" % (table, column, new_type),
|
||||||
"ALTER TABLE %s DROP COLUMN temp_column" % table
|
"ALTER TABLE %s DROP COLUMN temp_column" % table
|
||||||
]
|
]
|
||||||
if copy:
|
if copy:
|
||||||
commands.insert(
|
commands.insert(
|
||||||
2,
|
2,
|
||||||
"UPDATE %s SET %s=temp_column::%s" % (table, column, new_type))
|
"UPDATE %s SET %s=temp_column::%s" % (table, column, new_type))
|
||||||
|
|
||||||
for command in commands:
|
for command in commands:
|
||||||
cr.execute(command)
|
cr.execute(command)
|
||||||
|
|
||||||
#change_column(cr, 'crm_case', 'date_closed', 'timestamp', True)
|
#change_column(cr, 'crm_case', 'date_closed', 'timestamp', True)
|
||||||
cr.commit()
|
cr.commit()
|
||||||
|
@ -101,7 +101,7 @@ cr.commit()
|
||||||
|
|
||||||
cr.execute("SELECT name FROM ir_module_module")
|
cr.execute("SELECT name FROM ir_module_module")
|
||||||
if not cr.rowcount:
|
if not cr.rowcount:
|
||||||
for module in set(['base', 'marketing', 'subscription', 'account', 'base_partner_relation', 'audittrail', 'account_followup', 'product', 'hr', 'l10n_simple', 'crm', 'stock', 'hr_timesheet', 'purchase', 'report_purchase', 'mrp', 'sale', 'report_sale', 'delivery', 'project', 'sale_crm', 'hr_timesheet_project', 'scrum', 'report_project',
|
for module in set(['base', 'marketing', 'subscription', 'account', 'base_partner_relation', 'audittrail', 'account_followup', 'product', 'hr', 'l10n_simple', 'crm', 'stock', 'hr_timesheet', 'purchase', 'report_purchase', 'mrp', 'sale', 'report_sale', 'delivery', 'project', 'sale_crm', 'hr_timesheet_project', 'scrum', 'report_project',
|
||||||
'account_followup',
|
'account_followup',
|
||||||
'account',
|
'account',
|
||||||
'audittrail',
|
'audittrail',
|
||||||
|
@ -131,8 +131,8 @@ if not cr.rowcount:
|
||||||
'sandwich',
|
'sandwich',
|
||||||
'scrum',
|
'scrum',
|
||||||
'stock']):
|
'stock']):
|
||||||
cr.execute("INSERT INTO ir_module_module (name, state) VALUES ('%s', 'installed')" % module)
|
cr.execute("INSERT INTO ir_module_module (name, state) VALUES ('%s', 'installed')" % module)
|
||||||
cr.commit()
|
cr.commit()
|
||||||
|
|
||||||
|
|
||||||
# ----------------------------------------------------- #
|
# ----------------------------------------------------- #
|
||||||
|
@ -140,11 +140,11 @@ if not cr.rowcount:
|
||||||
# ----------------------------------------------------- #
|
# ----------------------------------------------------- #
|
||||||
|
|
||||||
for line in (
|
for line in (
|
||||||
"ALTER TABLE ir_module_module ADD demo BOOLEAN DEFAULT False",
|
"ALTER TABLE ir_module_module ADD demo BOOLEAN DEFAULT False",
|
||||||
"delete from ir_values where value like '%,False'",
|
"delete from ir_values where value like '%,False'",
|
||||||
"""UPDATE ir_ui_view set arch='<?xml version="1.0"?><tree string="Menu" toolbar="1"><field icon="icon" name="name"/></tree>' where name='ir.ui.menu.tree' and type='tree' and field_parent='child_id'""",
|
"""UPDATE ir_ui_view set arch='<?xml version="1.0"?><tree string="Menu" toolbar="1"><field icon="icon" name="name"/></tree>' where name='ir.ui.menu.tree' and type='tree' and field_parent='child_id'""",
|
||||||
):
|
):
|
||||||
cr.execute(line)
|
cr.execute(line)
|
||||||
|
|
||||||
cr.commit()
|
cr.commit()
|
||||||
cr.close()
|
cr.close()
|
||||||
|
|
|
@ -53,15 +53,15 @@ options.db_name = 'terp' # default value
|
||||||
parser.parse_args(values=options)
|
parser.parse_args(values=options)
|
||||||
|
|
||||||
if hasattr(options, 'config'):
|
if hasattr(options, 'config'):
|
||||||
configparser = ConfigParser.ConfigParser()
|
configparser = ConfigParser.ConfigParser()
|
||||||
configparser.read([options.config])
|
configparser.read([options.config])
|
||||||
for name, value in configparser.items('options'):
|
for name, value in configparser.items('options'):
|
||||||
if not (hasattr(options, name) and getattr(options, name)):
|
if not (hasattr(options, name) and getattr(options, name)):
|
||||||
if value in ('true', 'True'):
|
if value in ('true', 'True'):
|
||||||
value = True
|
value = True
|
||||||
if value in ('false', 'False'):
|
if value in ('false', 'False'):
|
||||||
value = False
|
value = False
|
||||||
setattr(options, name, value)
|
setattr(options, name, value)
|
||||||
|
|
||||||
# -----
|
# -----
|
||||||
|
|
||||||
|
@ -79,18 +79,18 @@ cr = db.cursor()
|
||||||
# ------------------------- #
|
# ------------------------- #
|
||||||
|
|
||||||
def change_column(cr, table, column, new_type, copy):
|
def change_column(cr, table, column, new_type, copy):
|
||||||
commands = [
|
commands = [
|
||||||
"ALTER TABLE %s RENAME COLUMN %s TO temp_column" % (table, column),
|
"ALTER TABLE %s RENAME COLUMN %s TO temp_column" % (table, column),
|
||||||
"ALTER TABLE %s ADD COLUMN %s %s" % (table, column, new_type),
|
"ALTER TABLE %s ADD COLUMN %s %s" % (table, column, new_type),
|
||||||
"ALTER TABLE %s DROP COLUMN temp_column" % table
|
"ALTER TABLE %s DROP COLUMN temp_column" % table
|
||||||
]
|
]
|
||||||
if copy:
|
if copy:
|
||||||
commands.insert(
|
commands.insert(
|
||||||
2,
|
2,
|
||||||
"UPDATE %s SET %s=temp_column::%s" % (table, column, new_type))
|
"UPDATE %s SET %s=temp_column::%s" % (table, column, new_type))
|
||||||
|
|
||||||
for command in commands:
|
for command in commands:
|
||||||
cr.execute(command)
|
cr.execute(command)
|
||||||
|
|
||||||
change_column(cr, 'crm_case', 'date_closed', 'timestamp', True)
|
change_column(cr, 'crm_case', 'date_closed', 'timestamp', True)
|
||||||
cr.commit()
|
cr.commit()
|
||||||
|
@ -101,19 +101,19 @@ cr.commit()
|
||||||
|
|
||||||
cr.execute("SELECT name FROM ir_module_module")
|
cr.execute("SELECT name FROM ir_module_module")
|
||||||
if not cr.rowcount:
|
if not cr.rowcount:
|
||||||
for module in ('base', 'marketing', 'subscription', 'account', 'base_partner_relation', 'audittrail', 'account_followup', 'product', 'hr', 'l10n_simple', 'crm', 'stock', 'hr_timesheet', 'purchase', 'report_purchase', 'mrp', 'sale', 'report_sale', 'delivery', 'project', 'sale_crm', 'hr_timesheet_project', 'scrum', 'report_project'):
|
for module in ('base', 'marketing', 'subscription', 'account', 'base_partner_relation', 'audittrail', 'account_followup', 'product', 'hr', 'l10n_simple', 'crm', 'stock', 'hr_timesheet', 'purchase', 'report_purchase', 'mrp', 'sale', 'report_sale', 'delivery', 'project', 'sale_crm', 'hr_timesheet_project', 'scrum', 'report_project'):
|
||||||
cr.execute("INSERT INTO ir_module_module (name, state) VALUES ('%s', 'installed')" % module)
|
cr.execute("INSERT INTO ir_module_module (name, state) VALUES ('%s', 'installed')" % module)
|
||||||
cr.commit()
|
cr.commit()
|
||||||
|
|
||||||
# --------------- #
|
# --------------- #
|
||||||
# remove old menu #
|
# remove old menu #
|
||||||
# --------------- #
|
# --------------- #
|
||||||
|
|
||||||
while True:
|
while True:
|
||||||
cr.execute("select id from ir_ui_menu where id not in (select parent_id from ir_ui_menu where parent_id is not null) and id not in (select res_id from ir_model_data where model='ir.ui.menu')")
|
cr.execute("select id from ir_ui_menu where id not in (select parent_id from ir_ui_menu where parent_id is not null) and id not in (select res_id from ir_model_data where model='ir.ui.menu')")
|
||||||
if not cr.rowcount:
|
if not cr.rowcount:
|
||||||
break
|
break
|
||||||
cr.execute("delete from ir_ui_menu where id not in (select parent_id from ir_ui_menu where parent_id is not null) and id not in (select res_id from ir_model_data where model='ir.ui.menu')")
|
cr.execute("delete from ir_ui_menu where id not in (select parent_id from ir_ui_menu where parent_id is not null) and id not in (select res_id from ir_model_data where model='ir.ui.menu')")
|
||||||
cr.commit()
|
cr.commit()
|
||||||
|
|
||||||
# ----------------------------------------------------- #
|
# ----------------------------------------------------- #
|
||||||
|
@ -121,12 +121,12 @@ cr.commit()
|
||||||
# ----------------------------------------------------- #
|
# ----------------------------------------------------- #
|
||||||
|
|
||||||
for line in (
|
for line in (
|
||||||
"ALTER TABLE ir_module_module ADD demo BOOLEAN",
|
"ALTER TABLE ir_module_module ADD demo BOOLEAN",
|
||||||
"ALTER TABLE ir_module_module SET demo DEFAULT False",
|
"ALTER TABLE ir_module_module SET demo DEFAULT False",
|
||||||
"DELETE FROM ir_values WHERE VALUE LIKE '%,False'",
|
"DELETE FROM ir_values WHERE VALUE LIKE '%,False'",
|
||||||
"""UPDATE ir_ui_view set arch='<?xml version="1.0"?><tree string="Menu" toolbar="1"><field icon="icon" name="name"/></tree>' where name='ir.ui.menu.tree' and type='tree' and field_parent='child_id'""",
|
"""UPDATE ir_ui_view set arch='<?xml version="1.0"?><tree string="Menu" toolbar="1"><field icon="icon" name="name"/></tree>' where name='ir.ui.menu.tree' and type='tree' and field_parent='child_id'""",
|
||||||
):
|
):
|
||||||
cr.execute(line)
|
cr.execute(line)
|
||||||
|
|
||||||
cr.commit()
|
cr.commit()
|
||||||
cr.close()
|
cr.close()
|
||||||
|
|
|
@ -54,15 +54,15 @@ options.db_name = 'terp' # default value
|
||||||
parser.parse_args(values=options)
|
parser.parse_args(values=options)
|
||||||
|
|
||||||
if hasattr(options, 'config'):
|
if hasattr(options, 'config'):
|
||||||
configparser = ConfigParser.ConfigParser()
|
configparser = ConfigParser.ConfigParser()
|
||||||
configparser.read([options.config])
|
configparser.read([options.config])
|
||||||
for name, value in configparser.items('options'):
|
for name, value in configparser.items('options'):
|
||||||
if not (hasattr(options, name) and getattr(options, name)):
|
if not (hasattr(options, name) and getattr(options, name)):
|
||||||
if value in ('true', 'True'):
|
if value in ('true', 'True'):
|
||||||
value = True
|
value = True
|
||||||
if value in ('false', 'False'):
|
if value in ('false', 'False'):
|
||||||
value = False
|
value = False
|
||||||
setattr(options, name, value)
|
setattr(options, name, value)
|
||||||
|
|
||||||
# -----
|
# -----
|
||||||
|
|
||||||
|
@ -82,13 +82,13 @@ cr = db.cursor()
|
||||||
cr.execute("""SELECT c.relname,a.attname,a.attlen,a.atttypmod,a.attnotnull,a.atthasdef,t.typname,CASE WHEN a.attlen=-1 THEN a.atttypmod-4 ELSE a.attlen END as size FROM pg_class c,pg_attribute a,pg_type t WHERE c.relname='res_currency' AND a.attname='rounding' AND c.oid=a.attrelid AND a.atttypid=t.oid""")
|
cr.execute("""SELECT c.relname,a.attname,a.attlen,a.atttypmod,a.attnotnull,a.atthasdef,t.typname,CASE WHEN a.attlen=-1 THEN a.atttypmod-4 ELSE a.attlen END as size FROM pg_class c,pg_attribute a,pg_type t WHERE c.relname='res_currency' AND a.attname='rounding' AND c.oid=a.attrelid AND a.atttypid=t.oid""")
|
||||||
res = cr.dictfetchall()
|
res = cr.dictfetchall()
|
||||||
if res[0]['typname'] != 'numeric':
|
if res[0]['typname'] != 'numeric':
|
||||||
for line in (
|
for line in (
|
||||||
"ALTER TABLE res_currency RENAME rounding TO rounding_bak",
|
"ALTER TABLE res_currency RENAME rounding TO rounding_bak",
|
||||||
"ALTER TABLE res_currency ADD rounding NUMERIC(12,6)",
|
"ALTER TABLE res_currency ADD rounding NUMERIC(12,6)",
|
||||||
"UPDATE res_currency SET rounding = power(10, - rounding_bak)",
|
"UPDATE res_currency SET rounding = power(10, - rounding_bak)",
|
||||||
"ALTER TABLE res_currency DROP rounding_bak",
|
"ALTER TABLE res_currency DROP rounding_bak",
|
||||||
):
|
):
|
||||||
cr.execute(line)
|
cr.execute(line)
|
||||||
cr.commit()
|
cr.commit()
|
||||||
|
|
||||||
# ----------------------------- #
|
# ----------------------------- #
|
||||||
|
@ -97,7 +97,7 @@ cr.commit()
|
||||||
|
|
||||||
cr.execute('SELECT conname FROM pg_constraint where conname = \'ir_ui_view_type\'')
|
cr.execute('SELECT conname FROM pg_constraint where conname = \'ir_ui_view_type\'')
|
||||||
if cr.fetchall():
|
if cr.fetchall():
|
||||||
cr.execute('ALTER TABLE ir_ui_view DROP CONSTRAINT ir_ui_view_type')
|
cr.execute('ALTER TABLE ir_ui_view DROP CONSTRAINT ir_ui_view_type')
|
||||||
cr.commit()
|
cr.commit()
|
||||||
|
|
||||||
# ------------------------ #
|
# ------------------------ #
|
||||||
|
@ -106,7 +106,7 @@ cr.commit()
|
||||||
|
|
||||||
cr.execute('SELECT a.attname FROM pg_class c, pg_attribute a WHERE c.relname = \'res_partner_bank\' AND a.attname = \'iban\' AND c.oid = a.attrelid')
|
cr.execute('SELECT a.attname FROM pg_class c, pg_attribute a WHERE c.relname = \'res_partner_bank\' AND a.attname = \'iban\' AND c.oid = a.attrelid')
|
||||||
if cr.fetchall():
|
if cr.fetchall():
|
||||||
cr.execute('ALTER TABLE res_partner_bank RENAME iban TO acc_number')
|
cr.execute('ALTER TABLE res_partner_bank RENAME iban TO acc_number')
|
||||||
cr.commit()
|
cr.commit()
|
||||||
|
|
||||||
# ------------------------------------------- #
|
# ------------------------------------------- #
|
||||||
|
@ -115,12 +115,12 @@ cr.commit()
|
||||||
|
|
||||||
cr.execute('SELECT a.attname FROM pg_class c, pg_attribute a WHERE c.relname = \'ir_model\' AND a.attname = \'perm_id\' AND c.oid = a.attrelid')
|
cr.execute('SELECT a.attname FROM pg_class c, pg_attribute a WHERE c.relname = \'ir_model\' AND a.attname = \'perm_id\' AND c.oid = a.attrelid')
|
||||||
if not cr.fetchall():
|
if not cr.fetchall():
|
||||||
cr.execute("ALTER TABLE ir_model ADD perm_id int references perm on delete set null")
|
cr.execute("ALTER TABLE ir_model ADD perm_id int references perm on delete set null")
|
||||||
cr.commit()
|
cr.commit()
|
||||||
|
|
||||||
cr.execute('SELECT a.attname FROM pg_class c, pg_attribute a WHERE c.relname = \'ir_model_fields\' AND a.attname = \'perm_id\' AND c.oid = a.attrelid')
|
cr.execute('SELECT a.attname FROM pg_class c, pg_attribute a WHERE c.relname = \'ir_model_fields\' AND a.attname = \'perm_id\' AND c.oid = a.attrelid')
|
||||||
if not cr.fetchall():
|
if not cr.fetchall():
|
||||||
cr.execute("ALTER TABLE ir_model_fields ADD perm_id int references perm on delete set null")
|
cr.execute("ALTER TABLE ir_model_fields ADD perm_id int references perm on delete set null")
|
||||||
cr.commit()
|
cr.commit()
|
||||||
|
|
||||||
|
|
||||||
|
@ -138,9 +138,9 @@ cr.commit()
|
||||||
cr.execute('SELECT model_id FROM ir_model_access')
|
cr.execute('SELECT model_id FROM ir_model_access')
|
||||||
res= cr.fetchall()
|
res= cr.fetchall()
|
||||||
for r in res:
|
for r in res:
|
||||||
cr.execute('SELECT id FROM ir_model_access WHERE model_id = %d AND group_id IS NULL', (r[0],))
|
cr.execute('SELECT id FROM ir_model_access WHERE model_id = %d AND group_id IS NULL', (r[0],))
|
||||||
if not cr.fetchall():
|
if not cr.fetchall():
|
||||||
cr.execute("INSERT into ir_model_access (name,model_id,group_id) VALUES ('Auto-generated access by migration',%d,NULL)",(r[0],))
|
cr.execute("INSERT into ir_model_access (name,model_id,group_id) VALUES ('Auto-generated access by migration',%d,NULL)",(r[0],))
|
||||||
cr.commit()
|
cr.commit()
|
||||||
|
|
||||||
# ------------------------------------------------- #
|
# ------------------------------------------------- #
|
||||||
|
@ -149,7 +149,7 @@ cr.commit()
|
||||||
|
|
||||||
cr.execute('SELECT viewname FROM pg_views WHERE viewname = \'report_account_analytic_line_to_invoice\'')
|
cr.execute('SELECT viewname FROM pg_views WHERE viewname = \'report_account_analytic_line_to_invoice\'')
|
||||||
if cr.fetchall():
|
if cr.fetchall():
|
||||||
cr.execute('DROP VIEW report_account_analytic_line_to_invoice')
|
cr.execute('DROP VIEW report_account_analytic_line_to_invoice')
|
||||||
cr.commit()
|
cr.commit()
|
||||||
|
|
||||||
# --------------------------- #
|
# --------------------------- #
|
||||||
|
@ -158,7 +158,7 @@ cr.commit()
|
||||||
|
|
||||||
cr.execute('SELECT * FROM pg_class c, pg_attribute a WHERE c.relname=\'hr_employee\' AND a.attname=\'state\' AND c.oid=a.attrelid')
|
cr.execute('SELECT * FROM pg_class c, pg_attribute a WHERE c.relname=\'hr_employee\' AND a.attname=\'state\' AND c.oid=a.attrelid')
|
||||||
if cr.fetchall():
|
if cr.fetchall():
|
||||||
cr.execute('ALTER TABLE hr_employee DROP state')
|
cr.execute('ALTER TABLE hr_employee DROP state')
|
||||||
cr.commit()
|
cr.commit()
|
||||||
|
|
||||||
# ------------ #
|
# ------------ #
|
||||||
|
@ -167,10 +167,10 @@ cr.commit()
|
||||||
|
|
||||||
cr.execute('SELECT id FROM ir_values where model=\'res.users\' AND key=\'meta\' AND name=\'tz\'')
|
cr.execute('SELECT id FROM ir_values where model=\'res.users\' AND key=\'meta\' AND name=\'tz\'')
|
||||||
if not cr.fetchall():
|
if not cr.fetchall():
|
||||||
import pytz, pickle
|
import pytz, pickle
|
||||||
meta = pickle.dumps({'type':'selection', 'string':'Timezone', 'selection': [(x, x) for x in pytz.all_timezones]})
|
meta = pickle.dumps({'type':'selection', 'string':'Timezone', 'selection': [(x, x) for x in pytz.all_timezones]})
|
||||||
value = pickle.dumps(False)
|
value = pickle.dumps(False)
|
||||||
cr.execute('INSERT INTO ir_values (name, key, model, meta, key2, object, value) VALUES (\'tz\', \'meta\', \'res.users\', %s, \'tz\', %s, %s)', (meta,False, value))
|
cr.execute('INSERT INTO ir_values (name, key, model, meta, key2, object, value) VALUES (\'tz\', \'meta\', \'res.users\', %s, \'tz\', %s, %s)', (meta,False, value))
|
||||||
cr.commit()
|
cr.commit()
|
||||||
|
|
||||||
# ------------------------- #
|
# ------------------------- #
|
||||||
|
@ -179,36 +179,36 @@ cr.commit()
|
||||||
|
|
||||||
cr.execute('SELECT a.attname FROM pg_class c, pg_attribute a, pg_type t WHERE c.relname = \'product_uom\' AND a.attname = \'factor\' AND c.oid = a.attrelid AND a.atttypid = t.oid AND t.typname = \'float8\'')
|
cr.execute('SELECT a.attname FROM pg_class c, pg_attribute a, pg_type t WHERE c.relname = \'product_uom\' AND a.attname = \'factor\' AND c.oid = a.attrelid AND a.atttypid = t.oid AND t.typname = \'float8\'')
|
||||||
if cr.fetchall():
|
if cr.fetchall():
|
||||||
cr.execute('SELECT viewname FROM pg_views WHERE viewname = \'report_account_analytic_planning_stat_account\'')
|
cr.execute('SELECT viewname FROM pg_views WHERE viewname = \'report_account_analytic_planning_stat_account\'')
|
||||||
if cr.fetchall():
|
if cr.fetchall():
|
||||||
cr.execute('DROP VIEW report_account_analytic_planning_stat_account')
|
cr.execute('DROP VIEW report_account_analytic_planning_stat_account')
|
||||||
cr.execute('SELECT viewname FROM pg_views WHERE viewname = \'report_account_analytic_planning_stat\'')
|
cr.execute('SELECT viewname FROM pg_views WHERE viewname = \'report_account_analytic_planning_stat\'')
|
||||||
if cr.fetchall():
|
if cr.fetchall():
|
||||||
cr.execute('DROP VIEW report_account_analytic_planning_stat')
|
cr.execute('DROP VIEW report_account_analytic_planning_stat')
|
||||||
cr.execute('SELECT viewname FROM pg_views WHERE viewname = \'report_account_analytic_planning_stat_user\'')
|
cr.execute('SELECT viewname FROM pg_views WHERE viewname = \'report_account_analytic_planning_stat_user\'')
|
||||||
if cr.fetchall():
|
if cr.fetchall():
|
||||||
cr.execute('DROP VIEW report_account_analytic_planning_stat_user')
|
cr.execute('DROP VIEW report_account_analytic_planning_stat_user')
|
||||||
cr.execute('SELECT viewname FROM pg_views WHERE viewname = \'report_purchase_order_product\'')
|
cr.execute('SELECT viewname FROM pg_views WHERE viewname = \'report_purchase_order_product\'')
|
||||||
if cr.fetchall():
|
if cr.fetchall():
|
||||||
cr.execute('DROP VIEW report_purchase_order_product')
|
cr.execute('DROP VIEW report_purchase_order_product')
|
||||||
cr.execute('SELECT viewname FROM pg_views WHERE viewname = \'report_purchase_order_category\'')
|
cr.execute('SELECT viewname FROM pg_views WHERE viewname = \'report_purchase_order_category\'')
|
||||||
if cr.fetchall():
|
if cr.fetchall():
|
||||||
cr.execute('DROP VIEW report_purchase_order_category')
|
cr.execute('DROP VIEW report_purchase_order_category')
|
||||||
cr.execute('SELECT viewname FROM pg_views WHERE viewname = \'report_sale_order_product\'')
|
cr.execute('SELECT viewname FROM pg_views WHERE viewname = \'report_sale_order_product\'')
|
||||||
if cr.fetchall():
|
if cr.fetchall():
|
||||||
cr.execute('DROP VIEW report_sale_order_product')
|
cr.execute('DROP VIEW report_sale_order_product')
|
||||||
cr.execute('SELECT viewname FROM pg_views WHERE viewname = \'report_sale_order_category\'')
|
cr.execute('SELECT viewname FROM pg_views WHERE viewname = \'report_sale_order_category\'')
|
||||||
if cr.fetchall():
|
if cr.fetchall():
|
||||||
cr.execute('DROP VIEW report_sale_order_category')
|
cr.execute('DROP VIEW report_sale_order_category')
|
||||||
cr.execute('SELECT viewname FROM pg_views WHERE viewname = \'report_hr_timesheet_invoice_journal\'')
|
cr.execute('SELECT viewname FROM pg_views WHERE viewname = \'report_hr_timesheet_invoice_journal\'')
|
||||||
if cr.fetchall():
|
if cr.fetchall():
|
||||||
cr.execute('DROP VIEW report_hr_timesheet_invoice_journal')
|
cr.execute('DROP VIEW report_hr_timesheet_invoice_journal')
|
||||||
|
|
||||||
cr.execute('ALTER TABLE product_uom RENAME COLUMN factor to temp_column')
|
cr.execute('ALTER TABLE product_uom RENAME COLUMN factor to temp_column')
|
||||||
cr.execute('ALTER TABLE product_uom ADD COLUMN factor NUMERIC(12,6)')
|
cr.execute('ALTER TABLE product_uom ADD COLUMN factor NUMERIC(12,6)')
|
||||||
cr.execute('UPDATE product_uom SET factor = temp_column')
|
cr.execute('UPDATE product_uom SET factor = temp_column')
|
||||||
cr.execute('ALTER TABLE product_uom ALTER factor SET NOT NULL')
|
cr.execute('ALTER TABLE product_uom ALTER factor SET NOT NULL')
|
||||||
cr.execute('ALTER TABLE product_uom DROP COLUMN temp_column')
|
cr.execute('ALTER TABLE product_uom DROP COLUMN temp_column')
|
||||||
cr.commit()
|
cr.commit()
|
||||||
|
|
||||||
|
|
||||||
|
@ -218,7 +218,7 @@ cr.commit()
|
||||||
|
|
||||||
cr.execute('SELECT conname FROM pg_constraint where conname = \'stock_production_lot_name_uniq\'')
|
cr.execute('SELECT conname FROM pg_constraint where conname = \'stock_production_lot_name_uniq\'')
|
||||||
if cr.fetchall():
|
if cr.fetchall():
|
||||||
cr.execute('ALTER TABLE stock_production_lot DROP CONSTRAINT stock_production_lot_name_uniq')
|
cr.execute('ALTER TABLE stock_production_lot DROP CONSTRAINT stock_production_lot_name_uniq')
|
||||||
cr.commit()
|
cr.commit()
|
||||||
|
|
||||||
# ------------------------------------ #
|
# ------------------------------------ #
|
||||||
|
@ -235,19 +235,19 @@ cr.commit()
|
||||||
|
|
||||||
cr.execute('SELECT indexname FROm pg_indexes WHERE indexname = \'ir_act_report_xml_pkey\' and tablename = \'ir_act_report_xml\'')
|
cr.execute('SELECT indexname FROm pg_indexes WHERE indexname = \'ir_act_report_xml_pkey\' and tablename = \'ir_act_report_xml\'')
|
||||||
if not cr.fetchall():
|
if not cr.fetchall():
|
||||||
cr.execute('ALTER TABLE ir_act_report_xml ADD PRIMARY KEY (id)')
|
cr.execute('ALTER TABLE ir_act_report_xml ADD PRIMARY KEY (id)')
|
||||||
cr.execute('SELECT indexname FROm pg_indexes WHERE indexname = \'ir_act_report_custom_pkey\' and tablename = \'ir_act_report_custom\'')
|
cr.execute('SELECT indexname FROm pg_indexes WHERE indexname = \'ir_act_report_custom_pkey\' and tablename = \'ir_act_report_custom\'')
|
||||||
if not cr.fetchall():
|
if not cr.fetchall():
|
||||||
cr.execute('ALTER TABLE ir_act_report_custom ADD PRIMARY KEY (id)')
|
cr.execute('ALTER TABLE ir_act_report_custom ADD PRIMARY KEY (id)')
|
||||||
cr.execute('SELECT indexname FROm pg_indexes WHERE indexname = \'ir_act_group_pkey\' and tablename = \'ir_act_group\'')
|
cr.execute('SELECT indexname FROm pg_indexes WHERE indexname = \'ir_act_group_pkey\' and tablename = \'ir_act_group\'')
|
||||||
if not cr.fetchall():
|
if not cr.fetchall():
|
||||||
cr.execute('ALTER TABLE ir_act_group ADD PRIMARY KEY (id)')
|
cr.execute('ALTER TABLE ir_act_group ADD PRIMARY KEY (id)')
|
||||||
cr.execute('SELECT indexname FROm pg_indexes WHERE indexname = \'ir_act_execute_pkey\' and tablename = \'ir_act_execute\'')
|
cr.execute('SELECT indexname FROm pg_indexes WHERE indexname = \'ir_act_execute_pkey\' and tablename = \'ir_act_execute\'')
|
||||||
if not cr.fetchall():
|
if not cr.fetchall():
|
||||||
cr.execute('ALTER TABLE ir_act_execute ADD PRIMARY KEY (id)')
|
cr.execute('ALTER TABLE ir_act_execute ADD PRIMARY KEY (id)')
|
||||||
cr.execute('SELECT indexname FROm pg_indexes WHERE indexname = \'ir_act_wizard_pkey\' and tablename = \'ir_act_wizard\'')
|
cr.execute('SELECT indexname FROm pg_indexes WHERE indexname = \'ir_act_wizard_pkey\' and tablename = \'ir_act_wizard\'')
|
||||||
if not cr.fetchall():
|
if not cr.fetchall():
|
||||||
cr.execute('ALTER TABLE ir_act_wizard ADD PRIMARY KEY (id)')
|
cr.execute('ALTER TABLE ir_act_wizard ADD PRIMARY KEY (id)')
|
||||||
cr.commit()
|
cr.commit()
|
||||||
|
|
||||||
cr.close
|
cr.close
|
||||||
|
|
|
@ -54,15 +54,15 @@ options.db_name = 'terp' # default value
|
||||||
parser.parse_args(values=options)
|
parser.parse_args(values=options)
|
||||||
|
|
||||||
if hasattr(options, 'config'):
|
if hasattr(options, 'config'):
|
||||||
configparser = ConfigParser.ConfigParser()
|
configparser = ConfigParser.ConfigParser()
|
||||||
configparser.read([options.config])
|
configparser.read([options.config])
|
||||||
for name, value in configparser.items('options'):
|
for name, value in configparser.items('options'):
|
||||||
if not (hasattr(options, name) and getattr(options, name)):
|
if not (hasattr(options, name) and getattr(options, name)):
|
||||||
if value in ('true', 'True'):
|
if value in ('true', 'True'):
|
||||||
value = True
|
value = True
|
||||||
if value in ('false', 'False'):
|
if value in ('false', 'False'):
|
||||||
value = False
|
value = False
|
||||||
setattr(options, name, value)
|
setattr(options, name, value)
|
||||||
|
|
||||||
raise Exception('This script is provided as an example, you must custom it before')
|
raise Exception('This script is provided as an example, you must custom it before')
|
||||||
|
|
||||||
|
@ -84,18 +84,18 @@ cr.execute('SELECT code from res_country where code is not null group by code')
|
||||||
res = cr.fetchall()
|
res = cr.fetchall()
|
||||||
|
|
||||||
for c in res:
|
for c in res:
|
||||||
cr.execute('SELECT max(id) from res_country where code = %s group by code', (c[0],))
|
cr.execute('SELECT max(id) from res_country where code = %s group by code', (c[0],))
|
||||||
res2 = cr.fetchone()
|
res2 = cr.fetchone()
|
||||||
cr.execute('SELECT id from res_country where code = %s', (c[0],))
|
cr.execute('SELECT id from res_country where code = %s', (c[0],))
|
||||||
ids = ','.join(map(lambda x: str(x[0]), cr.fetchall()))
|
ids = ','.join(map(lambda x: str(x[0]), cr.fetchall()))
|
||||||
cr.execute('UPDATE res_partner_address set country_id = %d where country_id in ('+ids+')', (res2[0],))
|
cr.execute('UPDATE res_partner_address set country_id = %d where country_id in ('+ids+')', (res2[0],))
|
||||||
cr.execute('DELETE FROM res_country WHERE code = %s and id <> %d', (c[0], res2[0],))
|
cr.execute('DELETE FROM res_country WHERE code = %s and id <> %d', (c[0], res2[0],))
|
||||||
cr.commit()
|
cr.commit()
|
||||||
|
|
||||||
|
|
||||||
cr.execute('SELECT viewname FROM pg_views WHERE viewname = \'report_account_analytic_planning_stat\'')
|
cr.execute('SELECT viewname FROM pg_views WHERE viewname = \'report_account_analytic_planning_stat\'')
|
||||||
if cr.fetchall():
|
if cr.fetchall():
|
||||||
cr.execute('DROP VIEW report_account_analytic_planning_stat')
|
cr.execute('DROP VIEW report_account_analytic_planning_stat')
|
||||||
cr.commit()
|
cr.commit()
|
||||||
|
|
||||||
|
|
||||||
|
@ -104,59 +104,59 @@ res = cr.fetchall()
|
||||||
|
|
||||||
|
|
||||||
for p in res:
|
for p in res:
|
||||||
cr.execute('SELECT max(id) FROM res_partner WHERE name = %s GROUP BY name', (p[0],))
|
cr.execute('SELECT max(id) FROM res_partner WHERE name = %s GROUP BY name', (p[0],))
|
||||||
res2 = cr.fetchone()
|
res2 = cr.fetchone()
|
||||||
cr.execute('UPDATE res_partner set active = False WHERE name = %s and id <> %d', (p[0], res2[0],))
|
cr.execute('UPDATE res_partner set active = False WHERE name = %s and id <> %d', (p[0], res2[0],))
|
||||||
cr.execute('SELECT id FROM res_partner WHERE name = %s AND id <> %d', (p[0], res2[0],))
|
cr.execute('SELECT id FROM res_partner WHERE name = %s AND id <> %d', (p[0], res2[0],))
|
||||||
res3 = cr.fetchall()
|
res3 = cr.fetchall()
|
||||||
i = 0
|
i = 0
|
||||||
for id in res3:
|
for id in res3:
|
||||||
name = p[0]+' old'
|
name = p[0]+' old'
|
||||||
if i:
|
if i:
|
||||||
name = name + ' ' + str(i)
|
name = name + ' ' + str(i)
|
||||||
cr.execute('UPDATE res_partner set name = %s WHERE id = %d', (name, id[0]))
|
cr.execute('UPDATE res_partner set name = %s WHERE id = %d', (name, id[0]))
|
||||||
i += 1
|
i += 1
|
||||||
cr.commit()
|
cr.commit()
|
||||||
|
|
||||||
cr.execute('SELECT viewname FROM pg_views WHERE viewname = \'report_account_analytic_line_to_invoice\'')
|
cr.execute('SELECT viewname FROM pg_views WHERE viewname = \'report_account_analytic_line_to_invoice\'')
|
||||||
if cr.fetchall():
|
if cr.fetchall():
|
||||||
cr.execute('DROP VIEW report_account_analytic_line_to_invoice')
|
cr.execute('DROP VIEW report_account_analytic_line_to_invoice')
|
||||||
cr.execute('SELECT viewname FROM pg_views WHERE viewname = \'report_timesheet_invoice\'')
|
cr.execute('SELECT viewname FROM pg_views WHERE viewname = \'report_timesheet_invoice\'')
|
||||||
if cr.fetchall():
|
if cr.fetchall():
|
||||||
cr.execute('drop VIEW report_timesheet_invoice')
|
cr.execute('drop VIEW report_timesheet_invoice')
|
||||||
cr.execute('SELECT viewname FROM pg_views WHERE viewname = \'report_purchase_order_category\'')
|
cr.execute('SELECT viewname FROM pg_views WHERE viewname = \'report_purchase_order_category\'')
|
||||||
if cr.fetchall():
|
if cr.fetchall():
|
||||||
cr.execute('drop VIEW report_purchase_order_category')
|
cr.execute('drop VIEW report_purchase_order_category')
|
||||||
cr.execute('SELECT viewname FROM pg_views WHERE viewname = \'report_purchase_order_product\'')
|
cr.execute('SELECT viewname FROM pg_views WHERE viewname = \'report_purchase_order_product\'')
|
||||||
if cr.fetchall():
|
if cr.fetchall():
|
||||||
cr.execute('drop VIEW report_purchase_order_product')
|
cr.execute('drop VIEW report_purchase_order_product')
|
||||||
cr.execute('SELECT viewname FROM pg_views WHERE viewname = \'report_sale_order_category\'')
|
cr.execute('SELECT viewname FROM pg_views WHERE viewname = \'report_sale_order_category\'')
|
||||||
if cr.fetchall():
|
if cr.fetchall():
|
||||||
cr.execute('drop VIEW report_sale_order_category')
|
cr.execute('drop VIEW report_sale_order_category')
|
||||||
cr.execute('SELECT viewname FROM pg_views WHERE viewname = \'report_sale_order_product\'')
|
cr.execute('SELECT viewname FROM pg_views WHERE viewname = \'report_sale_order_product\'')
|
||||||
if cr.fetchall():
|
if cr.fetchall():
|
||||||
cr.execute('drop VIEW report_sale_order_product')
|
cr.execute('drop VIEW report_sale_order_product')
|
||||||
cr.execute('SELECT viewname FROM pg_views WHERE viewname = \'report_timesheet_user\'')
|
cr.execute('SELECT viewname FROM pg_views WHERE viewname = \'report_timesheet_user\'')
|
||||||
if cr.fetchall():
|
if cr.fetchall():
|
||||||
cr.execute('drop VIEW report_timesheet_user')
|
cr.execute('drop VIEW report_timesheet_user')
|
||||||
cr.execute('SELECT viewname FROM pg_views WHERE viewname = \'report_task_user_pipeline_open\'')
|
cr.execute('SELECT viewname FROM pg_views WHERE viewname = \'report_task_user_pipeline_open\'')
|
||||||
if cr.fetchall():
|
if cr.fetchall():
|
||||||
cr.execute('drop VIEW report_task_user_pipeline_open')
|
cr.execute('drop VIEW report_task_user_pipeline_open')
|
||||||
cr.execute('SELECT viewname FROM pg_views WHERE viewname = \'hr_timesheet_sheet_sheet_day\'')
|
cr.execute('SELECT viewname FROM pg_views WHERE viewname = \'hr_timesheet_sheet_sheet_day\'')
|
||||||
if cr.fetchall():
|
if cr.fetchall():
|
||||||
cr.execute('drop VIEW hr_timesheet_sheet_sheet_day')
|
cr.execute('drop VIEW hr_timesheet_sheet_sheet_day')
|
||||||
cr.execute('SELECT viewname FROM pg_views WHERE viewname = \'hr_timesheet_sheet_sheet_account\'')
|
cr.execute('SELECT viewname FROM pg_views WHERE viewname = \'hr_timesheet_sheet_sheet_account\'')
|
||||||
if cr.fetchall():
|
if cr.fetchall():
|
||||||
cr.execute('drop VIEW hr_timesheet_sheet_sheet_account')
|
cr.execute('drop VIEW hr_timesheet_sheet_sheet_account')
|
||||||
cr.execute('SELECT viewname from pg_views where viewname = \'sale_journal_sale_stats\'')
|
cr.execute('SELECT viewname from pg_views where viewname = \'sale_journal_sale_stats\'')
|
||||||
if cr.fetchall():
|
if cr.fetchall():
|
||||||
cr.execute('drop VIEW sale_journal_sale_stats')
|
cr.execute('drop VIEW sale_journal_sale_stats')
|
||||||
cr.execute('SELECT viewname from pg_views where viewname = \'sale_journal_picking_stats\'')
|
cr.execute('SELECT viewname from pg_views where viewname = \'sale_journal_picking_stats\'')
|
||||||
if cr.fetchall():
|
if cr.fetchall():
|
||||||
cr.execute('drop VIEW sale_journal_picking_stats')
|
cr.execute('drop VIEW sale_journal_picking_stats')
|
||||||
cr.execute('SELECT viewname from pg_views where viewname = \'sale_journal_invoice_type_stats\'')
|
cr.execute('SELECT viewname from pg_views where viewname = \'sale_journal_invoice_type_stats\'')
|
||||||
if cr.fetchall():
|
if cr.fetchall():
|
||||||
cr.execute('drop VIEW sale_journal_invoice_type_stats')
|
cr.execute('drop VIEW sale_journal_invoice_type_stats')
|
||||||
|
|
||||||
cr.execute('ALTER TABLE product_template ALTER list_price TYPE numeric(16,2)')
|
cr.execute('ALTER TABLE product_template ALTER list_price TYPE numeric(16,2)')
|
||||||
cr.execute('ALTER TABLE product_template ALTER standard_price TYPE numeric(16,2)')
|
cr.execute('ALTER TABLE product_template ALTER standard_price TYPE numeric(16,2)')
|
||||||
|
@ -171,13 +171,13 @@ cr.commit()
|
||||||
|
|
||||||
cr.execute('SELECT tablename FROM pg_tables WHERE tablename = \'subscription_document_fields\'')
|
cr.execute('SELECT tablename FROM pg_tables WHERE tablename = \'subscription_document_fields\'')
|
||||||
if cr.fetchall():
|
if cr.fetchall():
|
||||||
cr.execute('DROP TABLE subscription_document_fields')
|
cr.execute('DROP TABLE subscription_document_fields')
|
||||||
cr.execute('SELECT tablename FROM pg_tables WHERE tablename = \'subscription_document\'')
|
cr.execute('SELECT tablename FROM pg_tables WHERE tablename = \'subscription_document\'')
|
||||||
if cr.fetchall():
|
if cr.fetchall():
|
||||||
cr.execute('DROP TABLE subscription_document')
|
cr.execute('DROP TABLE subscription_document')
|
||||||
cr.execute('SELECT tablename FROM pg_tables WHERE tablename = \'subscription_subscription_history\'')
|
cr.execute('SELECT tablename FROM pg_tables WHERE tablename = \'subscription_subscription_history\'')
|
||||||
if cr.fetchall():
|
if cr.fetchall():
|
||||||
cr.execute('DROP TABLE subscription_subscription_history')
|
cr.execute('DROP TABLE subscription_subscription_history')
|
||||||
cr.commit()
|
cr.commit()
|
||||||
|
|
||||||
# -------------------- #
|
# -------------------- #
|
||||||
|
@ -186,9 +186,9 @@ cr.commit()
|
||||||
|
|
||||||
cr.execute('SELECT a.attname FROM pg_class c, pg_attribute a WHERE c.relname = \'res_currency_rate\' AND a.attname = \'rate_old\' AND c.oid = a.attrelid')
|
cr.execute('SELECT a.attname FROM pg_class c, pg_attribute a WHERE c.relname = \'res_currency_rate\' AND a.attname = \'rate_old\' AND c.oid = a.attrelid')
|
||||||
if not cr.fetchall():
|
if not cr.fetchall():
|
||||||
cr.execute('ALTER TABLE res_currency_rate ADD rate_old NUMERIC(12,6)')
|
cr.execute('ALTER TABLE res_currency_rate ADD rate_old NUMERIC(12,6)')
|
||||||
cr.execute('UPDATE res_currency_rate SET rate_old = rate')
|
cr.execute('UPDATE res_currency_rate SET rate_old = rate')
|
||||||
cr.execute('UPDATE res_currency_rate SET rate = (1 / rate_old)')
|
cr.execute('UPDATE res_currency_rate SET rate = (1 / rate_old)')
|
||||||
cr.commit()
|
cr.commit()
|
||||||
|
|
||||||
cr.close
|
cr.close
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue