meta/classes: Various python whitespace fixes

It was pointed out we have a number of weird indentations in the python functions.
This patch cleans up 3, 7 and other weird indentations for the core bbclass files.

It also fixes some wierd (odd) shell function indentation which my searches picked up.

(From OE-Core rev: 8385d6d74624000d68814f4e3266d47bc8885942)

Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
This commit is contained in:
Richard Purdie 2012-08-20 16:52:21 +00:00
parent 7c40daab58
commit 48619958d5
16 changed files with 197 additions and 197 deletions

View File

@ -205,8 +205,8 @@ def preferred_ml_updates(d):
continue
virt = ""
if pkg.startswith("virtual/"):
pkg = pkg.replace("virtual/", "")
virt = "virtual/"
pkg = pkg.replace("virtual/", "")
virt = "virtual/"
for p in prefixes:
newname = "PREFERRED_PROVIDER_" + virt + p + "-" + pkg
if pkg != "kernel":
@ -353,13 +353,13 @@ python () {
appends = bb.utils.explode_deps(d.expand(" ".join(appends)))
newappends = []
for a in appends:
if a.endswith("-native") or a.endswith("-cross"):
newappends.append(a)
elif a.startswith("virtual/"):
subs = a.split("/", 1)[1]
newappends.append("virtual/" + prefix + subs + extension)
else:
newappends.append(prefix + a + extension)
if a.endswith("-native") or a.endswith("-cross"):
newappends.append(a)
elif a.startswith("virtual/"):
subs = a.split("/", 1)[1]
newappends.append("virtual/" + prefix + subs + extension)
else:
newappends.append(prefix + a + extension)
return newappends
def appendVar(varname, appends):

View File

@ -105,8 +105,8 @@ python bugzilla_eventhandler() {
data = e.data
name = bb.event.getName(event)
if name == "MsgNote":
# avoid recursion
return
# avoid recursion
return
if name == "TaskFailed":
xmlrpc = data.getVar("BUGZILLA_XMLRPC", True)

View File

@ -65,7 +65,7 @@ def set_device(e):
try:
for line in open("/proc/diskstats", "r"):
if majordev == int(line.split()[0]) and minordev == int(line.split()[1]):
rdev=line.split()[2]
rdev=line.split()[2]
except:
pass
file = open(e.data.getVar('DEVFILE', True), "w")
@ -100,10 +100,10 @@ def get_diskdata(var, dev, data):
olddiskdata = data.getVar(var, False)
diskdata = {}
if olddiskdata is None:
return
return
newdiskdata = get_diskstats(dev)
for key in olddiskdata.iterkeys():
diskdata["Start"+key] = str(int(olddiskdata[key]))
diskdata["Start"+key] = str(int(olddiskdata[key]))
diskdata["End"+key] = str(int(newdiskdata[key]))
return diskdata

View File

@ -54,12 +54,12 @@ python build_grub_cfg() {
if not workdir:
bb.error("WORKDIR not defined, unable to package")
return
labels = d.getVar('LABELS', True)
if not labels:
bb.debug(1, "LABELS not defined, nothing to do")
return
if labels == []:
bb.debug(1, "No labels, nothing to do")
return
@ -109,7 +109,7 @@ python build_grub_cfg() {
if append:
cfgfile.write('%s' % (append))
cfgfile.write('\n')
if initrd:
cfgfile.write('initrd /initrd')
cfgfile.write('\n}\n')

View File

@ -51,13 +51,13 @@ python() {
# and cross packages which aren't swabber-native or one of its dependencies
# I have ignored them for now...
if not bb.data.inherits_class('native', d) and not bb.data.inherits_class('nativesdk', d) and not bb.data.inherits_class('cross', d):
deps = (d.getVarFlag('do_setscene', 'depends') or "").split()
deps.append('strace-native:do_populate_sysroot')
d.setVarFlag('do_setscene', 'depends', " ".join(deps))
logdir = d.expand("${TRACE_LOGDIR}")
bb.utils.mkdirhier(logdir)
deps = (d.getVarFlag('do_setscene', 'depends') or "").split()
deps.append('strace-native:do_populate_sysroot')
d.setVarFlag('do_setscene', 'depends', " ".join(deps))
logdir = d.expand("${TRACE_LOGDIR}")
bb.utils.mkdirhier(logdir)
else:
d.setVar('STRACEFUNC', '')
d.setVar('STRACEFUNC', '')
}
STRACEPID = "${@os.getpid()}"
@ -76,23 +76,23 @@ imageswab_attachstrace () {
do_generate_swabber_report () {
update_distro ${HOST_DATA}
update_distro ${HOST_DATA}
# Swabber can't create the directory for us
mkdir -p ${SWABBER_REPORT}
# Swabber can't create the directory for us
mkdir -p ${SWABBER_REPORT}
REPORTSTAMP=${SWAB_ORIG_TASK}-`date +%2m%2d%2H%2M%Y`
REPORTSTAMP=${SWAB_ORIG_TASK}-`date +%2m%2d%2H%2M%Y`
if [ `which ccache` ] ; then
CCACHE_DIR=`( ccache -s | grep "cache directory" | grep -o '[^ ]*$' 2> /dev/null )`
fi
if [ `which ccache` ] ; then
CCACHE_DIR=`( ccache -s | grep "cache directory" | grep -o '[^ ]*$' 2> /dev/null )`
fi
if [ "$(ls -A ${HOST_DATA})" ]; then
echo "Generating swabber report"
swabber -d ${HOST_DATA} -l ${SWABBER_LOGS} -o ${SWABBER_REPORT}/report-${REPORTSTAMP}.txt -r ${SWABBER_REPORT}/extra_report-${REPORTSTAMP}.txt -c all -p ${TOPDIR} -f ${OEROOT}/meta/conf/swabber ${TOPDIR} ${OEROOT} ${CCACHE_DIR}
else
echo "No host data, cannot generate swabber report."
fi
if [ "$(ls -A ${HOST_DATA})" ]; then
echo "Generating swabber report"
swabber -d ${HOST_DATA} -l ${SWABBER_LOGS} -o ${SWABBER_REPORT}/report-${REPORTSTAMP}.txt -r ${SWABBER_REPORT}/extra_report-${REPORTSTAMP}.txt -c all -p ${TOPDIR} -f ${OEROOT}/meta/conf/swabber ${TOPDIR} ${OEROOT} ${CCACHE_DIR}
else
echo "No host data, cannot generate swabber report."
fi
}
addtask generate_swabber_report after do_${SWAB_ORIG_TASK}
do_generate_swabber_report[depends] = "swabber-native:do_populate_sysroot"

View File

@ -109,7 +109,7 @@ def package_qa_get_machine_dict():
"linux-gnux32" : {
"x86_64": (62, 0, 0, True, 32),
},
}
}
# Currently not being used by default "desktop"
@ -719,19 +719,19 @@ Rerun configure task after fixing this. The path was '%s'""" % root)
cnf = d.getVar('EXTRA_OECONF', True) or ""
if "gettext" not in d.getVar('P', True) and "gcc-runtime" not in d.getVar('P', True) and "--disable-nls" not in cnf:
ml = d.getVar("MLPREFIX", True) or ""
if bb.data.inherits_class('native', d) or bb.data.inherits_class('cross', d) or bb.data.inherits_class('crosssdk', d) or bb.data.inherits_class('nativesdk', d):
gt = "gettext-native"
elif bb.data.inherits_class('cross-canadian', d):
gt = "gettext-nativesdk"
else:
gt = "virtual/" + ml + "gettext"
deps = bb.utils.explode_deps(d.getVar('DEPENDS', True) or "")
if gt not in deps:
for config in configs:
gnu = "grep \"^[[:space:]]*AM_GNU_GETTEXT\" %s >/dev/null" % config
if subprocess.call(gnu, shell=True) == 0:
bb.fatal("""%s required but not in DEPENDS for file %s.
ml = d.getVar("MLPREFIX", True) or ""
if bb.data.inherits_class('native', d) or bb.data.inherits_class('cross', d) or bb.data.inherits_class('crosssdk', d) or bb.data.inherits_class('nativesdk', d):
gt = "gettext-native"
elif bb.data.inherits_class('cross-canadian', d):
gt = "gettext-nativesdk"
else:
gt = "virtual/" + ml + "gettext"
deps = bb.utils.explode_deps(d.getVar('DEPENDS', True) or "")
if gt not in deps:
for config in configs:
gnu = "grep \"^[[:space:]]*AM_GNU_GETTEXT\" %s >/dev/null" % config
if subprocess.call(gnu, shell=True) == 0:
bb.fatal("""%s required but not in DEPENDS for file %s.
Missing inherit gettext?""" % (gt, config))
if not package_qa_check_license(workdir, d):

View File

@ -230,42 +230,42 @@ def splitfile2(debugsrcdir, d):
sourcefile = d.expand("${WORKDIR}/debugsources.list")
if debugsrcdir and os.path.isfile(sourcefile):
dvar = d.getVar('PKGD', True)
pathprefix = "export PATH=%s; " % d.getVar('PATH', True)
strip = d.getVar("STRIP", True)
objcopy = d.getVar("OBJCOPY", True)
debugedit = d.expand("${STAGING_LIBDIR_NATIVE}/rpm/bin/debugedit")
workdir = d.getVar("WORKDIR", True)
workparentdir = os.path.dirname(workdir)
workbasedir = os.path.basename(workdir)
dvar = d.getVar('PKGD', True)
pathprefix = "export PATH=%s; " % d.getVar('PATH', True)
strip = d.getVar("STRIP", True)
objcopy = d.getVar("OBJCOPY", True)
debugedit = d.expand("${STAGING_LIBDIR_NATIVE}/rpm/bin/debugedit")
workdir = d.getVar("WORKDIR", True)
workparentdir = os.path.dirname(workdir)
workbasedir = os.path.basename(workdir)
nosuchdir = []
basepath = dvar
for p in debugsrcdir.split("/"):
basepath = basepath + "/" + p
if not os.path.exists(basepath):
nosuchdir.append(basepath)
bb.mkdirhier(basepath)
nosuchdir = []
basepath = dvar
for p in debugsrcdir.split("/"):
basepath = basepath + "/" + p
if not os.path.exists(basepath):
nosuchdir.append(basepath)
bb.mkdirhier(basepath)
processdebugsrc = "LC_ALL=C ; sort -z -u '%s' | egrep -v -z '(<internal>|<built-in>)$' | "
# We need to ignore files that are not actually ours
# we do this by only paying attention to items from this package
processdebugsrc += "fgrep -z '%s' | "
processdebugsrc += "(cd '%s' ; cpio -pd0mL --no-preserve-owner '%s%s' 2>/dev/null)"
processdebugsrc = "LC_ALL=C ; sort -z -u '%s' | egrep -v -z '(<internal>|<built-in>)$' | "
# We need to ignore files that are not actually ours
# we do this by only paying attention to items from this package
processdebugsrc += "fgrep -z '%s' | "
processdebugsrc += "(cd '%s' ; cpio -pd0mL --no-preserve-owner '%s%s' 2>/dev/null)"
subprocess.call(processdebugsrc % (sourcefile, workbasedir, workparentdir, dvar, debugsrcdir), shell=True)
subprocess.call(processdebugsrc % (sourcefile, workbasedir, workparentdir, dvar, debugsrcdir), shell=True)
# The copy by cpio may have resulted in some empty directories! Remove these
for root, dirs, files in os.walk("%s%s" % (dvar, debugsrcdir)):
for d in dirs:
dir = os.path.join(root, d)
#bb.note("rmdir -p %s" % dir)
subprocess.call("rmdir -p %s 2>/dev/null" % dir, shell=True)
# The copy by cpio may have resulted in some empty directories! Remove these
for root, dirs, files in os.walk("%s%s" % (dvar, debugsrcdir)):
for d in dirs:
dir = os.path.join(root, d)
#bb.note("rmdir -p %s" % dir)
subprocess.call("rmdir -p %s 2>/dev/null" % dir, shell=True)
# Also remove debugsrcdir if its empty
for p in nosuchdir[::-1]:
if os.path.exists(p) and not os.listdir(p):
os.rmdir(p)
# Also remove debugsrcdir if its empty
for p in nosuchdir[::-1]:
if os.path.exists(p) and not os.listdir(p):
os.rmdir(p)
def runstrip(file, elftype, d):
# Function to strip a single file, called from split_and_strip_files below
@ -735,7 +735,7 @@ python split_and_strip_files () {
file_list = {}
file_links = {}
if (d.getVar('INHIBIT_PACKAGE_DEBUG_SPLIT', True) != '1') and \
(d.getVar('INHIBIT_PACKAGE_STRIP', True) != '1'):
(d.getVar('INHIBIT_PACKAGE_STRIP', True) != '1'):
for root, dirs, files in os.walk(dvar):
for f in files:
file = os.path.join(root, f)

View File

@ -172,23 +172,23 @@ package_install_internal_ipk() {
}
ipk_log_check() {
target="$1"
lf_path="$2"
target="$1"
lf_path="$2"
lf_txt="`cat $lf_path`"
for keyword_die in "exit 1" "Collected errors" ERR Fail
do
if (echo "$lf_txt" | grep -v log_check | grep "$keyword_die") >/dev/null 2>&1
then
echo "log_check: There were error messages in the logfile"
echo -e "log_check: Matched keyword: [$keyword_die]\n"
echo "$lf_txt" | grep -v log_check | grep -C 5 -i "$keyword_die"
echo ""
do_exit=1
fi
done
test "$do_exit" = 1 && exit 1
true
lf_txt="`cat $lf_path`"
for keyword_die in "exit 1" "Collected errors" ERR Fail
do
if (echo "$lf_txt" | grep -v log_check | grep "$keyword_die") >/dev/null 2>&1
then
echo "log_check: There were error messages in the logfile"
echo -e "log_check: Matched keyword: [$keyword_die]\n"
echo "$lf_txt" | grep -v log_check | grep -C 5 -i "$keyword_die"
echo ""
do_exit=1
fi
done
test "$do_exit" = 1 && exit 1
true
}
#

View File

@ -101,23 +101,23 @@ package_generate_rpm_conf_common() {
}
rpm_log_check() {
target="$1"
lf_path="$2"
target="$1"
lf_path="$2"
lf_txt="`cat $lf_path`"
for keyword_die in "Cannot find package" "exit 1" ERR Fail
do
if (echo "$lf_txt" | grep -v log_check | grep "$keyword_die") >/dev/null 2>&1
then
echo "log_check: There were error messages in the logfile"
echo -e "log_check: Matched keyword: [$keyword_die]\n"
echo "$lf_txt" | grep -v log_check | grep -C 5 -i "$keyword_die"
echo ""
do_exit=1
fi
done
test "$do_exit" = 1 && exit 1
true
lf_txt="`cat $lf_path`"
for keyword_die in "Cannot find package" "exit 1" ERR Fail
do
if (echo "$lf_txt" | grep -v log_check | grep "$keyword_die") >/dev/null 2>&1
then
echo "log_check: There were error messages in the logfile"
echo -e "log_check: Matched keyword: [$keyword_die]\n"
echo "$lf_txt" | grep -v log_check | grep -C 5 -i "$keyword_die"
echo ""
do_exit=1
fi
done
test "$do_exit" = 1 && exit 1
true
}

View File

@ -4,7 +4,7 @@ def __note(msg, d):
__recipe_sanity_badruntimevars = "RDEPENDS RPROVIDES RRECOMMENDS RCONFLICTS"
def bad_runtime_vars(cfgdata, d):
if bb.data.inherits_class("native", d) or \
bb.data.inherits_class("cross", d):
bb.data.inherits_class("cross", d):
return
for var in d.getVar("__recipe_sanity_badruntimevars", True).split():
@ -42,7 +42,7 @@ def var_renames_overwrite(cfgdata, d):
def incorrect_nonempty_PACKAGES(cfgdata, d):
if bb.data.inherits_class("native", d) or \
bb.data.inherits_class("cross", d):
bb.data.inherits_class("cross", d):
if d.getVar("PACKAGES", True):
return True

View File

@ -280,10 +280,10 @@ def check_sanity_validmachine(sanity_data):
tunefound = True
if len(dups):
messages = messages + "Error, the PACKAGE_ARCHS variable contains duplicates. The following archs are listed more than once: %s" % " ".join(dups)
messages = messages + "Error, the PACKAGE_ARCHS variable contains duplicates. The following archs are listed more than once: %s" % " ".join(dups)
if tunefound == False:
messages = messages + "Error, the PACKAGE_ARCHS variable does not contain TUNE_PKGARCH (%s)." % tunepkg
messages = messages + "Error, the PACKAGE_ARCHS variable does not contain TUNE_PKGARCH (%s)." % tunepkg
return messages

View File

@ -174,7 +174,7 @@ def sstate_installpkg(ss, d):
sstatepkg = d.getVar('SSTATE_PKG', True) + '_' + ss['name'] + ".tgz"
if not os.path.exists(sstatepkg):
pstaging_fetch(sstatefetch, sstatepkg, d)
pstaging_fetch(sstatefetch, sstatepkg, d)
if not os.path.isfile(sstatepkg):
bb.note("Staging package %s does not exist" % sstatepkg)
@ -259,10 +259,10 @@ def sstate_clean_manifest(manifest, d):
# so we ignore errors here.
try:
if entry.endswith("/"):
if os.path.islink(entry[:-1]):
os.remove(entry[:-1])
elif os.path.exists(entry) and len(os.listdir(entry)) == 0:
os.rmdir(entry[:-1])
if os.path.islink(entry[:-1]):
os.remove(entry[:-1])
elif os.path.exists(entry) and len(os.listdir(entry)) == 0:
os.rmdir(entry[:-1])
else:
oe.path.remove(entry)
except OSError:
@ -314,14 +314,14 @@ python sstate_cleanall() {
for manifest in (os.listdir(manifest_dir)):
if fnmatch.fnmatch(manifest, manifest_pattern):
name = manifest.replace(manifest_pattern[:-1], "")
namemap = d.getVar('SSTATETASKNAMES', True).split()
tasks = d.getVar('SSTATETASKS', True).split()
if name not in namemap:
continue
taskname = tasks[namemap.index(name)]
shared_state = sstate_state_fromvars(d, taskname[3:])
sstate_clean(shared_state, d)
name = manifest.replace(manifest_pattern[:-1], "")
namemap = d.getVar('SSTATETASKNAMES', True).split()
tasks = d.getVar('SSTATETASKS', True).split()
if name not in namemap:
continue
taskname = tasks[namemap.index(name)]
shared_state = sstate_state_fromvars(d, taskname[3:])
sstate_clean(shared_state, d)
}
def sstate_hardcode_path(d):

View File

@ -70,8 +70,8 @@ SYSROOT_LOCK = "${STAGING_DIR}/staging.lock"
# We clean out any existing sstate from the sysroot if we rerun configure
python sysroot_cleansstate () {
ss = sstate_state_fromvars(d, "populate_sysroot")
sstate_clean(ss, d)
ss = sstate_state_fromvars(d, "populate_sysroot")
sstate_clean(ss, d)
}
do_configure[prefuncs] += "sysroot_cleansstate"

View File

@ -2,20 +2,20 @@ def tinder_http_post(server, selector, content_type, body):
import httplib
# now post it
for i in range(0,5):
try:
h = httplib.HTTP(server)
h.putrequest('POST', selector)
h.putheader('content-type', content_type)
h.putheader('content-length', str(len(body)))
h.endheaders()
h.send(body)
errcode, errmsg, headers = h.getreply()
#print errcode, errmsg, headers
return (errcode,errmsg, headers, h.file)
except:
print "Error sending the report!"
# try again
pass
try:
h = httplib.HTTP(server)
h.putrequest('POST', selector)
h.putheader('content-type', content_type)
h.putheader('content-length', str(len(body)))
h.endheaders()
h.send(body)
errcode, errmsg, headers = h.getreply()
#print errcode, errmsg, headers
return (errcode,errmsg, headers, h.file)
except:
print "Error sending the report!"
# try again
pass
# return some garbage
return (-1, "unknown", "unknown", None)

View File

@ -150,22 +150,22 @@ def update_alternatives_after_parse(d):
UPDALTVARS = "ALTERNATIVE ALTERNATIVE_LINK_NAME ALTERNATIVE_TARGET ALTERNATIVE_PRIORITY"
def gen_updatealternativesvardeps(d):
pkgs = (d.getVar("PACKAGES", True) or "").split()
vars = (d.getVar("UPDALTVARS", True) or "").split()
pkgs = (d.getVar("PACKAGES", True) or "").split()
vars = (d.getVar("UPDALTVARS", True) or "").split()
# First compute them for non_pkg versions
for v in vars:
for flag in (d.getVarFlags(v) or {}):
if flag == "doc" or flag == "vardeps" or flag == "vardepsexp":
continue
d.appendVar('%s_VARDEPS' % (v), ' %s:%s' % (flag, d.getVarFlag(v, flag, False)))
for p in pkgs:
for v in vars:
for flag in (d.getVarFlags("%s_%s" % (v,p)) or {}):
# First compute them for non_pkg versions
for v in vars:
for flag in (d.getVarFlags(v) or {}):
if flag == "doc" or flag == "vardeps" or flag == "vardepsexp":
continue
d.appendVar('%s_VARDEPS_%s' % (v,p), ' %s:%s' % (flag, d.getVarFlag('%s_%s' % (v,p), flag, False)))
continue
d.appendVar('%s_VARDEPS' % (v), ' %s:%s' % (flag, d.getVarFlag(v, flag, False)))
for p in pkgs:
for v in vars:
for flag in (d.getVarFlags("%s_%s" % (v,p)) or {}):
if flag == "doc" or flag == "vardeps" or flag == "vardepsexp":
continue
d.appendVar('%s_VARDEPS_%s' % (v,p), ' %s:%s' % (flag, d.getVarFlag('%s_%s' % (v,p), flag, False)))
python __anonymous() {
# deprecated stuff...
@ -176,18 +176,18 @@ python __anonymous() {
}
def gen_updatealternativesvars(d):
ret = []
pkgs = (d.getVar("PACKAGES", True) or "").split()
vars = (d.getVar("UPDALTVARS", True) or "").split()
ret = []
pkgs = (d.getVar("PACKAGES", True) or "").split()
vars = (d.getVar("UPDALTVARS", True) or "").split()
for v in vars:
ret.append(v + "_VARDEPS")
for v in vars:
ret.append(v + "_VARDEPS")
for p in pkgs:
for v in vars:
ret.append(v + "_" + p)
ret.append(v + "_VARDEPS_" + p)
return " ".join(ret)
for p in pkgs:
for v in vars:
ret.append(v + "_" + p)
ret.append(v + "_VARDEPS_" + p)
return " ".join(ret)
# First the deprecated items...
populate_packages[vardeps] += "ALTERNATIVE_LINKS ALTERNATIVE_NAME ALTERNATIVE_PATH"

View File

@ -246,49 +246,49 @@ oe_machinstall() {
}
create_cmdline_wrapper () {
# Create a wrapper script
#
# These are useful to work around relocation issues, by setting environment
# variables which point to paths in the filesystem.
#
# Usage: create_wrapper FILENAME [[VAR=VALUE]..]
# Create a wrapper script
#
# These are useful to work around relocation issues, by setting environment
# variables which point to paths in the filesystem.
#
# Usage: create_wrapper FILENAME [[VAR=VALUE]..]
cmd=$1
shift
cmd=$1
shift
echo "Generating wrapper script for $cmd"
echo "Generating wrapper script for $cmd"
mv $cmd $cmd.real
cmdname=`basename $cmd`.real
cat <<END >$cmd
mv $cmd $cmd.real
cmdname=`basename $cmd`.real
cat <<END >$cmd
#!/bin/sh
realpath=\`readlink -fn \$0\`
exec \`dirname \$realpath\`/$cmdname $@ "\$@"
END
chmod +x $cmd
chmod +x $cmd
}
create_wrapper () {
# Create a wrapper script
#
# These are useful to work around relocation issues, by setting environment
# variables which point to paths in the filesystem.
#
# Usage: create_wrapper FILENAME [[VAR=VALUE]..]
# Create a wrapper script
#
# These are useful to work around relocation issues, by setting environment
# variables which point to paths in the filesystem.
#
# Usage: create_wrapper FILENAME [[VAR=VALUE]..]
cmd=$1
shift
cmd=$1
shift
echo "Generating wrapper script for $cmd"
echo "Generating wrapper script for $cmd"
mv $cmd $cmd.real
cmdname=`basename $cmd`.real
cat <<END >$cmd
mv $cmd $cmd.real
cmdname=`basename $cmd`.real
cat <<END >$cmd
#!/bin/sh
realpath=\`readlink -fn \$0\`
exec env $@ \`dirname \$realpath\`/$cmdname "\$@"
END
chmod +x $cmd
chmod +x $cmd
}
def check_app_exists(app, d):