Document: cleanup code for lint checking

After pyflakes output, cleanup some unused imports, old code, fix
names for undefined vars.

bzr revid: p_christ@hol.gr-20100810122957-v01nqfq9omzdcs6e
This commit is contained in:
P. Christeas 2010-08-10 15:29:57 +03:00
parent ceacb8c0ce
commit f2f6ec0423
7 changed files with 26 additions and 93 deletions

View File

@ -18,16 +18,13 @@
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import base64
from osv import osv, fields
from osv.orm import except_orm
import urlparse
import netsvc
import os
# import os
import nodes
import StringIO
# import StringIO
class document_directory_content_type(osv.osv):
_name = 'document.directory.content.type'
@ -83,7 +80,7 @@ class document_directory_content(osv.osv):
tname = ''
if content.include_name:
content_name = node.displayname or ''
obj = node.context._dirobj.pool.get(model)
# obj = node.context._dirobj.pool.get(model)
if content_name:
tname = (content.prefix or '') + content_name + (content.suffix or '') + (content.extension or '')
else:

View File

@ -20,13 +20,7 @@
#
##############################################################################
import base64
from osv import osv, fields
from osv.orm import except_orm
import urlparse
import os
class ir_action_report_xml(osv.osv):
_name="ir.actions.report.xml"

View File

@ -22,16 +22,10 @@
import base64
from osv import osv, fields
import urlparse
import os
import pooler
import netsvc
from osv.orm import except_orm
#import StringIO
from psycopg2 import Binary
# from psycopg2 import Binary
#from tools import config
import tools
from tools.translate import _

View File

@ -19,11 +19,8 @@
#
##############################################################################
import base64
from osv import osv, fields
from osv.orm import except_orm
import urlparse
import os
import nodes
@ -141,30 +138,6 @@ class document_directory(osv.osv):
_parent(dir_id, path)
return path
def ol_get_resource_path(self, cr, uid, dir_id, res_model, res_id):
# this method will be used in process module
# to be need test and Improvement if resource dir has parent resource (link resource)
path=[]
def _parent(dir_id,path):
parent=self.browse(cr, uid, dir_id)
if parent.parent_id and not parent.ressource_parent_type_id:
_parent(parent.parent_id.id,path)
path.append(parent.name)
else:
path.append(parent.name)
return path
directory=self.browse(cr,uid,dir_id)
model_ids=self.pool.get('ir.model').search(cr, uid, [('model','=',res_model)])
if directory:
_parent(dir_id,path)
path.append(self.pool.get(directory.ressource_type_id.model).browse(cr, uid, res_id).name)
#user=self.pool.get('res.users').browse(cr,uid,uid)
#return "ftp://%s:%s@localhost:%s/%s/%s"%(user.login,user.password,config.get('ftp_server_port',8021),cr.dbname,'/'.join(path))
# No way we will return the password!
return "ftp://user:pass@host:port/test/this"
return False
def _check_recursion(self, cr, uid, ids):
level = 100
while len(ids):
@ -178,8 +151,9 @@ class document_directory(osv.osv):
_constraints = [
(_check_recursion, 'Error! You can not create recursive Directories.', ['parent_id'])
]
def __init__(self, *args, **kwargs):
res = super(document_directory, self).__init__(*args, **kwargs)
super(document_directory, self).__init__(*args, **kwargs)
#self._cache = {}
def onchange_content_id(self, cr, uid, ids, ressource_type_id):
@ -223,31 +197,8 @@ class document_directory(osv.osv):
""" try to locate the node in uri,
Return a tuple (node_dir, remaining_path)
"""
return (nodes.node_database(context=ncontext), uri)
return (nodes.node_database(context=ncontext), uri)
def old_code():
if not uri:
return node_database(cr, uid, context=context)
turi = tuple(uri)
node = node_class(cr, uid, '/', False, context=context, type='database')
for path in uri[:]:
if path:
node = node.child(path)
if not node:
return False
oo = node.object and (node.object._name, node.object.id) or False
oo2 = node.object2 and (node.object2._name, node.object2.id) or False
return node
def ol_get_childs(self, cr, uid, uri, context={}):
node = self.get_object(cr, uid, uri, context)
if uri:
children = node.children()
else:
children= [node]
result = map(lambda node: node.path_get(), children)
return result
def copy(self, cr, uid, id, default=None, context=None):
if not default:
default ={}

View File

@ -37,7 +37,6 @@ from osv.orm import except_orm
import random
import string
import pooler
import netsvc
import nodes
from content_index import cntIndex
@ -189,7 +188,7 @@ class nodefd_db(StringIO, nodes.node_descriptor):
# we now open a *separate* cursor, to update the data.
# FIXME: this may be improved, for concurrency handling
par = self._get_parent()
uid = par.context.uid
# uid = par.context.uid
cr = pooler.get_db(par.context.dbname).cursor()
try:
if self.mode in ('w', 'w+', 'r+'):
@ -227,7 +226,7 @@ class nodefd_db(StringIO, nodes.node_descriptor):
" WHERE id = %s",
(out, len(data), par.file_id))
cr.commit()
except Exception, e:
except Exception:
logging.getLogger('document.storage').exception('Cannot update db file #%d for close:', par.file_id)
raise
finally:
@ -262,7 +261,7 @@ class nodefd_db64(StringIO, nodes.node_descriptor):
# we now open a *separate* cursor, to update the data.
# FIXME: this may be improved, for concurrency handling
par = self._get_parent()
uid = par.context.uid
# uid = par.context.uid
cr = pooler.get_db(par.context.dbname).cursor()
try:
if self.mode in ('w', 'w+', 'r+'):
@ -288,18 +287,18 @@ class nodefd_db64(StringIO, nodes.node_descriptor):
cr.execute('UPDATE ir_attachment SET db_datas = %s::bytea, file_size=%s, ' \
'index_content = %s, file_type = %s ' \
'WHERE id = %s',
(base64.encodestring(out), len(out), icont_u, mime, par.file_id))
(base64.encodestring(data), len(data), icont_u, mime, par.file_id))
elif self.mode == 'a':
out = self.getvalue()
data = self.getvalue()
# Yes, we're obviously using the wrong representation for storing our
# data as base64-in-bytea
cr.execute("UPDATE ir_attachment " \
"SET db_datas = encode( (COALESCE(decode(encode(db_datas,'escape'),'base64'),'') || decode(%s, 'base64')),'base64')::bytea , " \
" file_size = COALESCE(file_size, 0) + %s " \
" WHERE id = %s",
(base64.encodestring(out), len(out), par.file_id))
(base64.encodestring(data), len(data), par.file_id))
cr.commit()
except Exception, e:
except Exception:
logging.getLogger('document.storage').exception('Cannot update db file #%d for close:', par.file_id)
raise
finally:
@ -500,7 +499,7 @@ class document_storage(osv.osv):
# try to fix their directory.
if ira.file_size:
self._doclog.warning("ir.attachment #%d does not have a filename, trying the name." %ira.id)
sfname = ira.name
# sfname = ira.name
fpath = os.path.join(boo.path,ira.store_fname or ira.name)
if os.path.exists(fpath):
return file(fpath,'rb').read()
@ -695,7 +694,7 @@ class document_storage(osv.osv):
store_fname = os.path.join(*store_path)
return { 'name': new_name, 'datas_fname': new_name, 'store_fname': store_fname }
else:
raise TypeError("No %s storage" % boo.type)
raise TypeError("No %s storage" % sbro.type)
def simple_move(self, cr, uid, file_node, ndir_bro, context=None):
""" A preparation for a file move.
@ -734,7 +733,7 @@ class document_storage(osv.osv):
fname = fil_bo.store_fname
if not fname:
return ValueError("Tried to rename a non-stored file")
path = storage_bo.path
path = sbro.path
oldpath = os.path.join(path, fname)
for ch in ('*', '|', "\\", '/', ':', '"', '<', '>', '?', '..'):
@ -751,7 +750,7 @@ class document_storage(osv.osv):
os.rename(oldpath, newpath)
return { 'name': new_name, 'datas_fname': new_name, 'store_fname': new_name }
else:
raise TypeError("No %s storage" % boo.type)
raise TypeError("No %s storage" % sbro.type)
document_storage()

View File

@ -19,17 +19,14 @@
#
##############################################################################
# import base64
# import StringIO
from osv import osv, fields
from osv.orm import except_orm
# import urlparse
import pooler
from tools.safe_eval import safe_eval
import errno
import os
# import os
import time
import logging
from StringIO import StringIO
@ -44,6 +41,8 @@ from StringIO import StringIO
# root: if we are at the first directory of a ressource
#
logger = logging.getLogger('doc2.nodes')
def _str2time(cre):
""" Convert a string with time representation (from db) into time (float)
@ -902,9 +901,8 @@ class node_res_obj(node_class):
def get_dav_eprop(self, cr, ns, prop):
if ns != 'http://groupdav.org/' or prop != 'resourcetype':
print "Who asked for %s:%s?" % (ns, prop)
logger.warning("Who asked for %s:%s?" % (ns, prop))
return None
res = {}
cntobj = self.context._dirobj.pool.get('document.directory.content')
uid = self.context.uid
ctx = self.context.context.copy()
@ -1374,11 +1372,11 @@ class nodefd_content(StringIO, node_descriptor):
if self.mode in ('w', 'w+', 'r+'):
data = self.getvalue()
cntobj = par.context._dirobj.pool.get('document.directory.content')
cntobj.process_write(cr, uid, parent, data, ctx)
cntobj.process_write(cr, uid, par, data, par.context.context)
elif self.mode == 'a':
raise NotImplementedError
cr.commit()
except Exception, e:
except Exception:
logging.getLogger('document.content').exception('Cannot update db content #%d for close:', par.cnt_id)
raise
finally:

View File

@ -35,7 +35,7 @@ for fname in args:
for line in res[:5]:
print line
except Exception,e:
import traceback,sys
import traceback
tb_s = reduce(lambda x, y: x+y, traceback.format_exception( sys.exc_type, sys.exc_value, sys.exc_traceback))