meta: Convert getVar/getVarFlag(xxx, 1) -> (xxx, True)

Using "1" with getVar is bad coding style and "True" is preferred.
This patch is a sed over the meta directory of the form:

sed \
 -e 's:\(\.getVar([^,()]*, \)1 *):\1True):g' \
 -e 's:\(\.getVarFlag([^,()]*, [^,()]*, \)1 *):\1True):g' \
 -i `grep -ril getVar *`

Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
This commit is contained in:
Richard Purdie 2012-03-03 10:59:25 +00:00
parent d01dadfb87
commit 06f2f8ce0a
83 changed files with 290 additions and 290 deletions

View File

@ -1,8 +1,8 @@
def autotools_dep_prepend(d): def autotools_dep_prepend(d):
if d.getVar('INHIBIT_AUTOTOOLS_DEPS', 1): if d.getVar('INHIBIT_AUTOTOOLS_DEPS', True):
return '' return ''
pn = d.getVar('PN', 1) pn = d.getVar('PN', True)
deps = '' deps = ''
if pn in ['autoconf-native', 'automake-native', 'help2man-native']: if pn in ['autoconf-native', 'automake-native', 'help2man-native']:
@ -13,7 +13,7 @@ def autotools_dep_prepend(d):
deps += 'libtool-native ' deps += 'libtool-native '
if not bb.data.inherits_class('native', d) \ if not bb.data.inherits_class('native', d) \
and not bb.data.inherits_class('cross', d) \ and not bb.data.inherits_class('cross', d) \
and not d.getVar('INHIBIT_DEFAULT_DEPS', 1): and not d.getVar('INHIBIT_DEFAULT_DEPS', True):
deps += 'libtool-cross ' deps += 'libtool-cross '
return deps + 'gnu-config-native ' return deps + 'gnu-config-native '

View File

@ -60,8 +60,8 @@ def base_dep_prepend(d):
# we need that built is the responsibility of the patch function / class, not # we need that built is the responsibility of the patch function / class, not
# the application. # the application.
if not d.getVar('INHIBIT_DEFAULT_DEPS'): if not d.getVar('INHIBIT_DEFAULT_DEPS'):
if (d.getVar('HOST_SYS', 1) != if (d.getVar('HOST_SYS', True) !=
d.getVar('BUILD_SYS', 1)): d.getVar('BUILD_SYS', True)):
deps += " virtual/${TARGET_PREFIX}gcc virtual/${TARGET_PREFIX}compilerlibs virtual/libc " deps += " virtual/${TARGET_PREFIX}gcc virtual/${TARGET_PREFIX}compilerlibs virtual/libc "
return deps return deps
@ -203,7 +203,7 @@ def preferred_ml_updates(d):
def get_layers_branch_rev(d): def get_layers_branch_rev(d):
layers = (d.getVar("BBLAYERS", 1) or "").split() layers = (d.getVar("BBLAYERS", True) or "").split()
layers_branch_rev = ["%-17s = \"%s:%s\"" % (os.path.basename(i), \ layers_branch_rev = ["%-17s = \"%s:%s\"" % (os.path.basename(i), \
base_get_metadata_git_branch(i, None).strip(), \ base_get_metadata_git_branch(i, None).strip(), \
base_get_metadata_git_revision(i, None)) \ base_get_metadata_git_revision(i, None)) \
@ -233,7 +233,7 @@ python base_eventhandler() {
if name.startswith("BuildStarted"): if name.startswith("BuildStarted"):
e.data.setVar( 'BB_VERSION', bb.__version__) e.data.setVar( 'BB_VERSION', bb.__version__)
statusvars = ['BB_VERSION', 'TARGET_ARCH', 'TARGET_OS', 'MACHINE', 'DISTRO', 'DISTRO_VERSION','TUNE_FEATURES', 'TARGET_FPU'] statusvars = ['BB_VERSION', 'TARGET_ARCH', 'TARGET_OS', 'MACHINE', 'DISTRO', 'DISTRO_VERSION','TUNE_FEATURES', 'TARGET_FPU']
statuslines = ["%-17s = \"%s\"" % (i, e.data.getVar(i, 1) or '') for i in statusvars] statuslines = ["%-17s = \"%s\"" % (i, e.data.getVar(i, True) or '') for i in statusvars]
statuslines += get_layers_branch_rev(e.data) statuslines += get_layers_branch_rev(e.data)
statusmsg = "\nOE Build Configuration:\n%s\n" % '\n'.join(statuslines) statusmsg = "\nOE Build Configuration:\n%s\n" % '\n'.join(statuslines)
@ -242,7 +242,7 @@ python base_eventhandler() {
needed_vars = [ "TARGET_ARCH", "TARGET_OS" ] needed_vars = [ "TARGET_ARCH", "TARGET_OS" ]
pesteruser = [] pesteruser = []
for v in needed_vars: for v in needed_vars:
val = e.data.getVar(v, 1) val = e.data.getVar(v, True)
if not val or val == 'INVALID': if not val or val == 'INVALID':
pesteruser.append(v) pesteruser.append(v)
if pesteruser: if pesteruser:
@ -344,7 +344,7 @@ python () {
pr = pr_prefix.group(0) + str(nval) + pr[prval.end():] pr = pr_prefix.group(0) + str(nval) + pr[prval.end():]
d.setVar('PR', pr) d.setVar('PR', pr)
pn = d.getVar('PN', 1) pn = d.getVar('PN', True)
license = d.getVar('LICENSE', True) license = d.getVar('LICENSE', True)
if license == "INVALID": if license == "INVALID":
bb.fatal('This recipe does not have the LICENSE field set (%s)' % pn) bb.fatal('This recipe does not have the LICENSE field set (%s)' % pn)
@ -370,36 +370,36 @@ python () {
d.setVarFlag('do_package_setscene', 'fakeroot', 1) d.setVarFlag('do_package_setscene', 'fakeroot', 1)
source_mirror_fetch = d.getVar('SOURCE_MIRROR_FETCH', 0) source_mirror_fetch = d.getVar('SOURCE_MIRROR_FETCH', 0)
if not source_mirror_fetch: if not source_mirror_fetch:
need_host = d.getVar('COMPATIBLE_HOST', 1) need_host = d.getVar('COMPATIBLE_HOST', True)
if need_host: if need_host:
import re import re
this_host = d.getVar('HOST_SYS', 1) this_host = d.getVar('HOST_SYS', True)
if not re.match(need_host, this_host): if not re.match(need_host, this_host):
raise bb.parse.SkipPackage("incompatible with host %s (not in COMPATIBLE_HOST)" % this_host) raise bb.parse.SkipPackage("incompatible with host %s (not in COMPATIBLE_HOST)" % this_host)
need_machine = d.getVar('COMPATIBLE_MACHINE', 1) need_machine = d.getVar('COMPATIBLE_MACHINE', True)
if need_machine: if need_machine:
import re import re
this_machine = d.getVar('MACHINE', 1) this_machine = d.getVar('MACHINE', True)
if this_machine and not re.match(need_machine, this_machine): if this_machine and not re.match(need_machine, this_machine):
this_soc_family = d.getVar('SOC_FAMILY', 1) this_soc_family = d.getVar('SOC_FAMILY', True)
if (this_soc_family and not re.match(need_machine, this_soc_family)) or not this_soc_family: if (this_soc_family and not re.match(need_machine, this_soc_family)) or not this_soc_family:
raise bb.parse.SkipPackage("incompatible with machine %s (not in COMPATIBLE_MACHINE)" % this_machine) raise bb.parse.SkipPackage("incompatible with machine %s (not in COMPATIBLE_MACHINE)" % this_machine)
dont_want_license = d.getVar('INCOMPATIBLE_LICENSE', 1) dont_want_license = d.getVar('INCOMPATIBLE_LICENSE', True)
if dont_want_license and not pn.endswith("-native") and not pn.endswith("-cross") and not pn.endswith("-cross-initial") and not pn.endswith("-cross-intermediate") and not pn.endswith("-crosssdk-intermediate") and not pn.endswith("-crosssdk") and not pn.endswith("-crosssdk-initial"): if dont_want_license and not pn.endswith("-native") and not pn.endswith("-cross") and not pn.endswith("-cross-initial") and not pn.endswith("-cross-intermediate") and not pn.endswith("-crosssdk-intermediate") and not pn.endswith("-crosssdk") and not pn.endswith("-crosssdk-initial"):
hosttools_whitelist = (d.getVar('HOSTTOOLS_WHITELIST_%s' % dont_want_license, 1) or "").split() hosttools_whitelist = (d.getVar('HOSTTOOLS_WHITELIST_%s' % dont_want_license, True) or "").split()
lgplv2_whitelist = (d.getVar('LGPLv2_WHITELIST_%s' % dont_want_license, 1) or "").split() lgplv2_whitelist = (d.getVar('LGPLv2_WHITELIST_%s' % dont_want_license, True) or "").split()
dont_want_whitelist = (d.getVar('WHITELIST_%s' % dont_want_license, 1) or "").split() dont_want_whitelist = (d.getVar('WHITELIST_%s' % dont_want_license, True) or "").split()
if pn not in hosttools_whitelist and pn not in lgplv2_whitelist and pn not in dont_want_whitelist: if pn not in hosttools_whitelist and pn not in lgplv2_whitelist and pn not in dont_want_whitelist:
this_license = d.getVar('LICENSE', 1) this_license = d.getVar('LICENSE', True)
if incompatible_license(d,dont_want_license): if incompatible_license(d,dont_want_license):
bb.note("SKIPPING %s because it's %s" % (pn, this_license)) bb.note("SKIPPING %s because it's %s" % (pn, this_license))
raise bb.parse.SkipPackage("incompatible with license %s" % this_license) raise bb.parse.SkipPackage("incompatible with license %s" % this_license)
srcuri = d.getVar('SRC_URI', 1) srcuri = d.getVar('SRC_URI', True)
# Svn packages should DEPEND on subversion-native # Svn packages should DEPEND on subversion-native
if "svn://" in srcuri: if "svn://" in srcuri:
d.appendVarFlag('do_fetch', 'depends', ' subversion-native:do_populate_sysroot') d.appendVarFlag('do_fetch', 'depends', ' subversion-native:do_populate_sysroot')
@ -426,8 +426,8 @@ python () {
d.appendVarFlag('do_unpack', 'depends', ' unzip-native:do_populate_sysroot') d.appendVarFlag('do_unpack', 'depends', ' unzip-native:do_populate_sysroot')
# 'multimachine' handling # 'multimachine' handling
mach_arch = d.getVar('MACHINE_ARCH', 1) mach_arch = d.getVar('MACHINE_ARCH', True)
pkg_arch = d.getVar('PACKAGE_ARCH', 1) pkg_arch = d.getVar('PACKAGE_ARCH', True)
if (pkg_arch == mach_arch): if (pkg_arch == mach_arch):
# Already machine specific - nothing further to do # Already machine specific - nothing further to do
@ -458,9 +458,9 @@ python () {
d.setVar('PACKAGE_ARCH', "${MACHINE_ARCH}") d.setVar('PACKAGE_ARCH', "${MACHINE_ARCH}")
return return
packages = d.getVar('PACKAGES', 1).split() packages = d.getVar('PACKAGES', True).split()
for pkg in packages: for pkg in packages:
pkgarch = d.getVar("PACKAGE_ARCH_%s" % pkg, 1) pkgarch = d.getVar("PACKAGE_ARCH_%s" % pkg, True)
# We could look for != PACKAGE_ARCH here but how to choose # We could look for != PACKAGE_ARCH here but how to choose
# if multiple differences are present? # if multiple differences are present?

View File

@ -69,8 +69,8 @@ python do_prepare_copyleft_sources () {
else: else:
bb.debug(1, 'copyleft: %s is included' % p) bb.debug(1, 'copyleft: %s is included' % p)
sources_dir = d.getVar('COPYLEFT_SOURCES_DIR', 1) sources_dir = d.getVar('COPYLEFT_SOURCES_DIR', True)
src_uri = d.getVar('SRC_URI', 1).split() src_uri = d.getVar('SRC_URI', True).split()
fetch = bb.fetch2.Fetch(src_uri, d) fetch = bb.fetch2.Fetch(src_uri, d)
ud = fetch.ud ud = fetch.ud

View File

@ -28,7 +28,7 @@ def get_perl_version(d):
# Determine where the library directories are # Determine where the library directories are
def perl_get_libdirs(d): def perl_get_libdirs(d):
libdir = d.getVar('libdir', 1) libdir = d.getVar('libdir', True)
if is_target(d) == "no": if is_target(d) == "no":
libdir += '/perl-native' libdir += '/perl-native'
libdir += '/perl' libdir += '/perl'

View File

@ -10,9 +10,9 @@ inherit cpan-base
# libmodule-build-perl) # libmodule-build-perl)
# #
def cpan_build_dep_prepend(d): def cpan_build_dep_prepend(d):
if d.getVar('CPAN_BUILD_DEPS', 1): if d.getVar('CPAN_BUILD_DEPS', True):
return '' return ''
pn = d.getVar('PN', 1) pn = d.getVar('PN', True)
if pn in ['libmodule-build-perl', 'libmodule-build-perl-native']: if pn in ['libmodule-build-perl', 'libmodule-build-perl-native']:
return '' return ''
return 'libmodule-build-perl-native ' return 'libmodule-build-perl-native '

View File

@ -22,8 +22,8 @@ python () {
python debian_package_name_hook () { python debian_package_name_hook () {
import glob, copy, stat, errno, re import glob, copy, stat, errno, re
pkgdest = d.getVar('PKGDEST', 1) pkgdest = d.getVar('PKGDEST', True)
packages = d.getVar('PACKAGES', 1) packages = d.getVar('PACKAGES', True)
bin_re = re.compile(".*/s?" + os.path.basename(d.getVar("bindir", True)) + "$") bin_re = re.compile(".*/s?" + os.path.basename(d.getVar("bindir", True)) + "$")
lib_re = re.compile(".*/" + os.path.basename(d.getVar("libdir", True)) + "$") lib_re = re.compile(".*/" + os.path.basename(d.getVar("libdir", True)) + "$")
so_re = re.compile("lib.*\.so") so_re = re.compile("lib.*\.so")
@ -60,7 +60,7 @@ python debian_package_name_hook () {
for f in files: for f in files:
if so_re.match(f): if so_re.match(f):
fp = os.path.join(root, f) fp = os.path.join(root, f)
cmd = (d.getVar('BUILD_PREFIX', 1) or "") + "objdump -p " + fp + " 2>/dev/null" cmd = (d.getVar('BUILD_PREFIX', True) or "") + "objdump -p " + fp + " 2>/dev/null"
fd = os.popen(cmd) fd = os.popen(cmd)
lines = fd.readlines() lines = fd.readlines()
fd.close() fd.close()
@ -74,7 +74,7 @@ python debian_package_name_hook () {
if len(sonames) == 1: if len(sonames) == 1:
soname = sonames[0] soname = sonames[0]
elif len(sonames) > 1: elif len(sonames) > 1:
lead = d.getVar('LEAD_SONAME', 1) lead = d.getVar('LEAD_SONAME', True)
if lead: if lead:
r = re.compile(lead) r = re.compile(lead)
filtered = [] filtered = []
@ -117,7 +117,7 @@ python debian_package_name_hook () {
# and later # and later
# DEBUG: LIBNAMES: pkgname libtic5 devname libtic pkg ncurses-libticw orig_pkg ncurses-libtic debian_pn None newpkg libticw # DEBUG: LIBNAMES: pkgname libtic5 devname libtic pkg ncurses-libticw orig_pkg ncurses-libtic debian_pn None newpkg libticw
# so we need to handle ncurses-libticw->libticw5 before ncurses-libtic->libtic5 # so we need to handle ncurses-libticw->libticw5 before ncurses-libtic->libtic5
for pkg in sorted((d.getVar('AUTO_LIBNAME_PKGS', 1) or "").split(), reverse=True): for pkg in sorted((d.getVar('AUTO_LIBNAME_PKGS', True) or "").split(), reverse=True):
auto_libname(packages, pkg) auto_libname(packages, pkg)
} }

View File

@ -372,7 +372,7 @@ python do_checkpkg() {
f.close() f.close()
if status != "ErrHostNoDir" and re.match("Err", status): if status != "ErrHostNoDir" and re.match("Err", status):
logpath = d.getVar('LOG_DIR', 1) logpath = d.getVar('LOG_DIR', True)
os.system("cp %s %s/" % (f.name, logpath)) os.system("cp %s %s/" % (f.name, logpath))
os.unlink(f.name) os.unlink(f.name)
return status return status

View File

@ -1,4 +1,4 @@
DEPENDS += "${@["python-native python", ""][(d.getVar('PACKAGES', 1) == '')]}" DEPENDS += "${@["python-native python", ""][(d.getVar('PACKAGES', True) == '')]}"
RDEPENDS_${PN} += "${@['', 'python-core']['${PN}' == '${BPN}']}" RDEPENDS_${PN} += "${@['', 'python-core']['${PN}' == '${BPN}']}"
inherit distutils-common-base inherit distutils-common-base

View File

@ -1,3 +1,3 @@
DEPENDS += "${@["python-native", ""][(d.getVar('PACKAGES', 1) == '')]}" DEPENDS += "${@["python-native", ""][(d.getVar('PACKAGES', True) == '')]}"
inherit distutils-common-base inherit distutils-common-base

View File

@ -32,8 +32,8 @@ done
python populate_packages_append () { python populate_packages_append () {
import re import re
packages = d.getVar('PACKAGES', 1).split() packages = d.getVar('PACKAGES', True).split()
pkgdest = d.getVar('PKGDEST', 1) pkgdest = d.getVar('PKGDEST', True)
for pkg in packages: for pkg in packages:
schema_dir = '%s/%s/etc/gconf/schemas' % (pkgdest, pkg) schema_dir = '%s/%s/etc/gconf/schemas' % (pkgdest, pkg)
@ -46,15 +46,15 @@ python populate_packages_append () {
if schemas != []: if schemas != []:
bb.note("adding gconf postinst and prerm scripts to %s" % pkg) bb.note("adding gconf postinst and prerm scripts to %s" % pkg)
d.setVar('SCHEMA_FILES', " ".join(schemas)) d.setVar('SCHEMA_FILES', " ".join(schemas))
postinst = d.getVar('pkg_postinst_%s' % pkg, 1) or d.getVar('pkg_postinst', 1) postinst = d.getVar('pkg_postinst_%s' % pkg, True) or d.getVar('pkg_postinst', True)
if not postinst: if not postinst:
postinst = '#!/bin/sh\n' postinst = '#!/bin/sh\n'
postinst += d.getVar('gconf_postinst', 1) postinst += d.getVar('gconf_postinst', True)
d.setVar('pkg_postinst_%s' % pkg, postinst) d.setVar('pkg_postinst_%s' % pkg, postinst)
prerm = d.getVar('pkg_prerm_%s' % pkg, 1) or d.getVar('pkg_prerm', 1) prerm = d.getVar('pkg_prerm_%s' % pkg, True) or d.getVar('pkg_prerm', True)
if not prerm: if not prerm:
prerm = '#!/bin/sh\n' prerm = '#!/bin/sh\n'
prerm += d.getVar('gconf_prerm', 1) prerm += d.getVar('gconf_prerm', True)
d.setVar('pkg_prerm_%s' % pkg, prerm) d.setVar('pkg_prerm_%s' % pkg, prerm)
rdepends = d.getVar("RDEPENDS_%s" % pkg, True) or "" rdepends = d.getVar("RDEPENDS_%s" % pkg, True) or ""
rdepends += " gconf" rdepends += " gconf"

View File

@ -28,31 +28,31 @@ done
} }
python populate_packages_append () { python populate_packages_append () {
packages = d.getVar('PACKAGES', 1).split() packages = d.getVar('PACKAGES', True).split()
pkgdest = d.getVar('PKGDEST', 1) pkgdest = d.getVar('PKGDEST', True)
for pkg in packages: for pkg in packages:
icon_dir = '%s/%s/%s/icons' % (pkgdest, pkg, d.getVar('datadir', 1)) icon_dir = '%s/%s/%s/icons' % (pkgdest, pkg, d.getVar('datadir', True))
if not os.path.exists(icon_dir): if not os.path.exists(icon_dir):
continue continue
bb.note("adding hicolor-icon-theme dependency to %s" % pkg) bb.note("adding hicolor-icon-theme dependency to %s" % pkg)
rdepends = d.getVar('RDEPENDS_%s' % pkg, 1) rdepends = d.getVar('RDEPENDS_%s' % pkg, True)
rdepends = rdepends + ' ' + d.getVar('MLPREFIX') + "hicolor-icon-theme" rdepends = rdepends + ' ' + d.getVar('MLPREFIX') + "hicolor-icon-theme"
d.setVar('RDEPENDS_%s' % pkg, rdepends) d.setVar('RDEPENDS_%s' % pkg, rdepends)
bb.note("adding gtk-icon-cache postinst and postrm scripts to %s" % pkg) bb.note("adding gtk-icon-cache postinst and postrm scripts to %s" % pkg)
postinst = d.getVar('pkg_postinst_%s' % pkg, 1) or d.getVar('pkg_postinst', 1) postinst = d.getVar('pkg_postinst_%s' % pkg, True) or d.getVar('pkg_postinst', True)
if not postinst: if not postinst:
postinst = '#!/bin/sh\n' postinst = '#!/bin/sh\n'
postinst += d.getVar('gtk_icon_cache_postinst', 1) postinst += d.getVar('gtk_icon_cache_postinst', True)
d.setVar('pkg_postinst_%s' % pkg, postinst) d.setVar('pkg_postinst_%s' % pkg, postinst)
postrm = d.getVar('pkg_postrm_%s' % pkg, 1) or d.getVar('pkg_postrm', 1) postrm = d.getVar('pkg_postrm_%s' % pkg, True) or d.getVar('pkg_postrm', True)
if not postrm: if not postrm:
postrm = '#!/bin/sh\n' postrm = '#!/bin/sh\n'
postrm += d.getVar('gtk_icon_cache_postrm', 1) postrm += d.getVar('gtk_icon_cache_postrm', True)
d.setVar('pkg_postrm_%s' % pkg, postrm) d.setVar('pkg_postrm_%s' % pkg, postrm)
} }

View File

@ -98,15 +98,15 @@ python () {
# is searched for in the BBPATH (same as the old version.) # is searched for in the BBPATH (same as the old version.)
# #
def get_devtable_list(d): def get_devtable_list(d):
devtable = d.getVar('IMAGE_DEVICE_TABLE', 1) devtable = d.getVar('IMAGE_DEVICE_TABLE', True)
if devtable != None: if devtable != None:
return devtable return devtable
str = "" str = ""
devtables = d.getVar('IMAGE_DEVICE_TABLES', 1) devtables = d.getVar('IMAGE_DEVICE_TABLES', True)
if devtables == None: if devtables == None:
devtables = 'files/device_table-minimal.txt' devtables = 'files/device_table-minimal.txt'
for devtable in devtables.split(): for devtable in devtables.split():
str += " %s" % bb.which(d.getVar('BBPATH', 1), devtable) str += " %s" % bb.which(d.getVar('BBPATH', True), devtable)
return str return str
IMAGE_CLASSES ?= "image_types" IMAGE_CLASSES ?= "image_types"
@ -119,7 +119,7 @@ ROOTFS_POSTPROCESS_COMMAND ?= ""
# some default locales # some default locales
IMAGE_LINGUAS ?= "de-de fr-fr en-gb" IMAGE_LINGUAS ?= "de-de fr-fr en-gb"
LINGUAS_INSTALL ?= "${@" ".join(map(lambda s: "locale-base-%s" % s, d.getVar('IMAGE_LINGUAS', 1).split()))}" LINGUAS_INSTALL ?= "${@" ".join(map(lambda s: "locale-base-%s" % s, d.getVar('IMAGE_LINGUAS', True).split()))}"
PSEUDO_PASSWD = "${IMAGE_ROOTFS}" PSEUDO_PASSWD = "${IMAGE_ROOTFS}"

View File

@ -35,12 +35,12 @@ def qemuimagetest_main(d):
casestr = re.compile(r'(?P<scen>\w+\b):(?P<case>\S+$)') casestr = re.compile(r'(?P<scen>\w+\b):(?P<case>\S+$)')
resultstr = re.compile(r'\s*(?P<case>\w+)\s*(?P<pass>\d+)\s*(?P<fail>\d+)\s*(?P<noresult>\d+)') resultstr = re.compile(r'\s*(?P<case>\w+)\s*(?P<pass>\d+)\s*(?P<fail>\d+)\s*(?P<noresult>\d+)')
machine = d.getVar('MACHINE', 1) machine = d.getVar('MACHINE', True)
pname = d.getVar('PN', 1) pname = d.getVar('PN', True)
"""function to save test cases running status""" """function to save test cases running status"""
def teststatus(test, status, index, length): def teststatus(test, status, index, length):
test_status = d.getVar('TEST_STATUS', 1) test_status = d.getVar('TEST_STATUS', True)
if not os.path.exists(test_status): if not os.path.exists(test_status):
raise bb.build.FuncFailed("No test status file existing under TEST_TMP") raise bb.build.FuncFailed("No test status file existing under TEST_TMP")
@ -51,13 +51,13 @@ def qemuimagetest_main(d):
"""funtion to run each case under scenario""" """funtion to run each case under scenario"""
def runtest(scen, case, fulltestpath): def runtest(scen, case, fulltestpath):
resultpath = d.getVar('TEST_RESULT', 1) resultpath = d.getVar('TEST_RESULT', True)
tmppath = d.getVar('TEST_TMP', 1) tmppath = d.getVar('TEST_TMP', True)
"""initialize log file for testcase""" """initialize log file for testcase"""
logpath = d.getVar('TEST_LOG', 1) logpath = d.getVar('TEST_LOG', True)
bb.utils.mkdirhier("%s/%s" % (logpath, scen)) bb.utils.mkdirhier("%s/%s" % (logpath, scen))
caselog = os.path.join(logpath, "%s/log_%s.%s" % (scen, case, d.getVar('DATETIME', 1))) caselog = os.path.join(logpath, "%s/log_%s.%s" % (scen, case, d.getVar('DATETIME', True)))
os.system("touch %s" % caselog) os.system("touch %s" % caselog)
"""export TEST_TMP, TEST_RESULT, DEPLOY_DIR and QEMUARCH""" """export TEST_TMP, TEST_RESULT, DEPLOY_DIR and QEMUARCH"""
@ -141,7 +141,7 @@ def qemuimagetest_main(d):
"""Clean tmp folder for testing""" """Clean tmp folder for testing"""
def clean_tmp(): def clean_tmp():
tmppath = d.getVar('TEST_TMP', 1) tmppath = d.getVar('TEST_TMP', True)
if os.path.isdir(tmppath): if os.path.isdir(tmppath):
for f in os.listdir(tmppath): for f in os.listdir(tmppath):
@ -155,28 +155,28 @@ def qemuimagetest_main(d):
clean_tmp() clean_tmp()
"""check testcase folder and create test log folder""" """check testcase folder and create test log folder"""
testpath = d.getVar('TEST_DIR', 1) testpath = d.getVar('TEST_DIR', True)
bb.utils.mkdirhier(testpath) bb.utils.mkdirhier(testpath)
logpath = d.getVar('TEST_LOG', 1) logpath = d.getVar('TEST_LOG', True)
bb.utils.mkdirhier(logpath) bb.utils.mkdirhier(logpath)
tmppath = d.getVar('TEST_TMP', 1) tmppath = d.getVar('TEST_TMP', True)
bb.utils.mkdirhier(tmppath) bb.utils.mkdirhier(tmppath)
"""initialize test status file""" """initialize test status file"""
test_status = d.getVar('TEST_STATUS', 1) test_status = d.getVar('TEST_STATUS', True)
if os.path.exists(test_status): if os.path.exists(test_status):
os.remove(test_status) os.remove(test_status)
os.system("touch %s" % test_status) os.system("touch %s" % test_status)
"""initialize result file""" """initialize result file"""
resultpath = d.getVar('TEST_RESULT', 1) resultpath = d.getVar('TEST_RESULT', True)
bb.utils.mkdirhier(resultpath) bb.utils.mkdirhier(resultpath)
resultfile = os.path.join(resultpath, "testresult.%s" % d.getVar('DATETIME', 1)) resultfile = os.path.join(resultpath, "testresult.%s" % d.getVar('DATETIME', True))
sresultfile = os.path.join(resultpath, "testresult.log") sresultfile = os.path.join(resultpath, "testresult.log")
machine = d.getVar('MACHINE', 1) machine = d.getVar('MACHINE', True)
if os.path.exists(sresultfile): if os.path.exists(sresultfile):
os.remove(sresultfile) os.remove(sresultfile)
@ -188,7 +188,7 @@ def qemuimagetest_main(d):
f.close() f.close()
"""generate pre-defined testcase list""" """generate pre-defined testcase list"""
testlist = d.getVar('TEST_SCEN', 1) testlist = d.getVar('TEST_SCEN', True)
fulllist = generate_list(testlist) fulllist = generate_list(testlist)
"""Begin testing""" """Begin testing"""

View File

@ -18,7 +18,7 @@ valid_archs = "alpha cris ia64 \
def map_kernel_arch(a, d): def map_kernel_arch(a, d):
import re import re
valid_archs = d.getVar('valid_archs', 1).split() valid_archs = d.getVar('valid_archs', True).split()
if re.match('(i.86|athlon|x86.64)$', a): return 'x86' if re.match('(i.86|athlon|x86.64)$', a): return 'x86'
elif re.match('arm26$', a): return 'arm26' elif re.match('arm26$', a): return 'arm26'
@ -32,7 +32,7 @@ def map_kernel_arch(a, d):
else: else:
bb.error("cannot map '%s' to a linux kernel architecture" % a) bb.error("cannot map '%s' to a linux kernel architecture" % a)
export ARCH = "${@map_kernel_arch(d.getVar('TARGET_ARCH', 1), d)}" export ARCH = "${@map_kernel_arch(d.getVar('TARGET_ARCH', True), d)}"
def map_uboot_arch(a, d): def map_uboot_arch(a, d):
import re import re
@ -41,5 +41,5 @@ def map_uboot_arch(a, d):
elif re.match('i.86$', a): return 'x86' elif re.match('i.86$', a): return 'x86'
return a return a
export UBOOT_ARCH = "${@map_uboot_arch(d.getVar('ARCH', 1), d)}" export UBOOT_ARCH = "${@map_uboot_arch(d.getVar('ARCH', True), d)}"

View File

@ -11,9 +11,9 @@ INITRAMFS_IMAGE ?= ""
INITRAMFS_TASK ?= "" INITRAMFS_TASK ?= ""
python __anonymous () { python __anonymous () {
kerneltype = d.getVar('KERNEL_IMAGETYPE', 1) or '' kerneltype = d.getVar('KERNEL_IMAGETYPE', True) or ''
if kerneltype == 'uImage': if kerneltype == 'uImage':
depends = d.getVar("DEPENDS", 1) depends = d.getVar("DEPENDS", True)
depends = "%s u-boot-mkimage-native" % depends depends = "%s u-boot-mkimage-native" % depends
d.setVar("DEPENDS", depends) d.setVar("DEPENDS", depends)
@ -75,7 +75,7 @@ EXTRA_OEMAKE = ""
KERNEL_ALT_IMAGETYPE ??= "" KERNEL_ALT_IMAGETYPE ??= ""
KERNEL_IMAGETYPE_FOR_MAKE = "${@(lambda s: s[:-3] if s[-3:] == ".gz" else s)(d.getVar('KERNEL_IMAGETYPE', 1))}" KERNEL_IMAGETYPE_FOR_MAKE = "${@(lambda s: s[:-3] if s[-3:] == ".gz" else s)(d.getVar('KERNEL_IMAGETYPE', True))}"
kernel_do_compile() { kernel_do_compile() {
unset CFLAGS CPPFLAGS CXXFLAGS LDFLAGS MACHINE unset CFLAGS CPPFLAGS CXXFLAGS LDFLAGS MACHINE
@ -306,10 +306,10 @@ module_conf_rfcomm = "alias bt-proto-3 rfcomm"
python populate_packages_prepend () { python populate_packages_prepend () {
def extract_modinfo(file): def extract_modinfo(file):
import tempfile, re import tempfile, re
tempfile.tempdir = d.getVar("WORKDIR", 1) tempfile.tempdir = d.getVar("WORKDIR", True)
tf = tempfile.mkstemp() tf = tempfile.mkstemp()
tmpfile = tf[1] tmpfile = tf[1]
cmd = "PATH=\"%s\" %sobjcopy -j .modinfo -O binary %s %s" % (d.getVar("PATH", 1), d.getVar("HOST_PREFIX", 1) or "", file, tmpfile) cmd = "PATH=\"%s\" %sobjcopy -j .modinfo -O binary %s %s" % (d.getVar("PATH", True), d.getVar("HOST_PREFIX", True) or "", file, tmpfile)
os.system(cmd) os.system(cmd)
f = open(tmpfile) f = open(tmpfile)
l = f.read().split("\000") l = f.read().split("\000")
@ -328,18 +328,18 @@ python populate_packages_prepend () {
def parse_depmod(): def parse_depmod():
import re import re
dvar = d.getVar('PKGD', 1) dvar = d.getVar('PKGD', True)
if not dvar: if not dvar:
bb.error("PKGD not defined") bb.error("PKGD not defined")
return return
kernelver = d.getVar('KERNEL_VERSION', 1) kernelver = d.getVar('KERNEL_VERSION', True)
kernelver_stripped = kernelver kernelver_stripped = kernelver
m = re.match('^(.*-hh.*)[\.\+].*$', kernelver) m = re.match('^(.*-hh.*)[\.\+].*$', kernelver)
if m: if m:
kernelver_stripped = m.group(1) kernelver_stripped = m.group(1)
path = d.getVar("PATH", 1) path = d.getVar("PATH", True)
host_prefix = d.getVar("HOST_PREFIX", 1) or "" host_prefix = d.getVar("HOST_PREFIX", True) or ""
cmd = "PATH=\"%s\" %sdepmod -n -a -r -b %s -F %s/boot/System.map-%s %s" % (path, host_prefix, dvar, dvar, kernelver, kernelver_stripped) cmd = "PATH=\"%s\" %sdepmod -n -a -r -b %s -F %s/boot/System.map-%s %s" % (path, host_prefix, dvar, dvar, kernelver, kernelver_stripped)
f = os.popen(cmd, 'r') f = os.popen(cmd, 'r')
@ -377,9 +377,9 @@ python populate_packages_prepend () {
def get_dependencies(file, pattern, format): def get_dependencies(file, pattern, format):
# file no longer includes PKGD # file no longer includes PKGD
file = file.replace(d.getVar('PKGD', 1) or '', '', 1) file = file.replace(d.getVar('PKGD', True) or '', '', 1)
# instead is prefixed with /lib/modules/${KERNEL_VERSION} # instead is prefixed with /lib/modules/${KERNEL_VERSION}
file = file.replace("/lib/modules/%s/" % d.getVar('KERNEL_VERSION', 1) or '', '', 1) file = file.replace("/lib/modules/%s/" % d.getVar('KERNEL_VERSION', True) or '', '', 1)
if module_deps.has_key(file): if module_deps.has_key(file):
import re import re
@ -398,40 +398,40 @@ python populate_packages_prepend () {
import re import re
vals = extract_modinfo(file) vals = extract_modinfo(file)
dvar = d.getVar('PKGD', 1) dvar = d.getVar('PKGD', True)
# If autoloading is requested, output /etc/modutils/<name> and append # If autoloading is requested, output /etc/modutils/<name> and append
# appropriate modprobe commands to the postinst # appropriate modprobe commands to the postinst
autoload = d.getVar('module_autoload_%s' % basename, 1) autoload = d.getVar('module_autoload_%s' % basename, True)
if autoload: if autoload:
name = '%s/etc/modutils/%s' % (dvar, basename) name = '%s/etc/modutils/%s' % (dvar, basename)
f = open(name, 'w') f = open(name, 'w')
for m in autoload.split(): for m in autoload.split():
f.write('%s\n' % m) f.write('%s\n' % m)
f.close() f.close()
postinst = d.getVar('pkg_postinst_%s' % pkg, 1) postinst = d.getVar('pkg_postinst_%s' % pkg, True)
if not postinst: if not postinst:
bb.fatal("pkg_postinst_%s not defined" % pkg) bb.fatal("pkg_postinst_%s not defined" % pkg)
postinst += d.getVar('autoload_postinst_fragment', 1) % autoload postinst += d.getVar('autoload_postinst_fragment', True) % autoload
d.setVar('pkg_postinst_%s' % pkg, postinst) d.setVar('pkg_postinst_%s' % pkg, postinst)
# Write out any modconf fragment # Write out any modconf fragment
modconf = d.getVar('module_conf_%s' % basename, 1) modconf = d.getVar('module_conf_%s' % basename, True)
if modconf: if modconf:
name = '%s/etc/modprobe.d/%s.conf' % (dvar, basename) name = '%s/etc/modprobe.d/%s.conf' % (dvar, basename)
f = open(name, 'w') f = open(name, 'w')
f.write("%s\n" % modconf) f.write("%s\n" % modconf)
f.close() f.close()
files = d.getVar('FILES_%s' % pkg, 1) files = d.getVar('FILES_%s' % pkg, True)
files = "%s /etc/modutils/%s /etc/modutils/%s.conf /etc/modprobe.d/%s.conf" % (files, basename, basename, basename) files = "%s /etc/modutils/%s /etc/modutils/%s.conf /etc/modprobe.d/%s.conf" % (files, basename, basename, basename)
d.setVar('FILES_%s' % pkg, files) d.setVar('FILES_%s' % pkg, files)
if vals.has_key("description"): if vals.has_key("description"):
old_desc = d.getVar('DESCRIPTION_' + pkg, 1) or "" old_desc = d.getVar('DESCRIPTION_' + pkg, True) or ""
d.setVar('DESCRIPTION_' + pkg, old_desc + "; " + vals["description"]) d.setVar('DESCRIPTION_' + pkg, old_desc + "; " + vals["description"])
rdepends_str = d.getVar('RDEPENDS_' + pkg, 1) rdepends_str = d.getVar('RDEPENDS_' + pkg, True)
if rdepends_str: if rdepends_str:
rdepends = rdepends_str.split() rdepends = rdepends_str.split()
else: else:
@ -443,12 +443,12 @@ python populate_packages_prepend () {
module_regex = '^(.*)\.k?o$' module_regex = '^(.*)\.k?o$'
module_pattern = 'kernel-module-%s' module_pattern = 'kernel-module-%s'
postinst = d.getVar('pkg_postinst_modules', 1) postinst = d.getVar('pkg_postinst_modules', True)
postrm = d.getVar('pkg_postrm_modules', 1) postrm = d.getVar('pkg_postrm_modules', True)
do_split_packages(d, root='/lib/firmware', file_regex='^(.*)\.bin$', output_pattern='kernel-firmware-%s', description='Firmware for %s', recursive=True, extra_depends='') do_split_packages(d, root='/lib/firmware', file_regex='^(.*)\.bin$', output_pattern='kernel-firmware-%s', description='Firmware for %s', recursive=True, extra_depends='')
do_split_packages(d, root='/lib/firmware', file_regex='^(.*)\.fw$', output_pattern='kernel-firmware-%s', description='Firmware for %s', recursive=True, extra_depends='') do_split_packages(d, root='/lib/firmware', file_regex='^(.*)\.fw$', output_pattern='kernel-firmware-%s', description='Firmware for %s', recursive=True, extra_depends='')
do_split_packages(d, root='/lib/firmware', file_regex='^(.*)\.cis$', output_pattern='kernel-firmware-%s', description='Firmware for %s', recursive=True, extra_depends='') do_split_packages(d, root='/lib/firmware', file_regex='^(.*)\.cis$', output_pattern='kernel-firmware-%s', description='Firmware for %s', recursive=True, extra_depends='')
do_split_packages(d, root='/lib/modules', file_regex=module_regex, output_pattern=module_pattern, description='%s kernel module', postinst=postinst, postrm=postrm, recursive=True, hook=frob_metadata, extra_depends='update-modules kernel-%s' % d.getVar("KERNEL_VERSION", 1)) do_split_packages(d, root='/lib/modules', file_regex=module_regex, output_pattern=module_pattern, description='%s kernel module', postinst=postinst, postrm=postrm, recursive=True, hook=frob_metadata, extra_depends='update-modules kernel-%s' % d.getVar("KERNEL_VERSION", True))
import re import re
metapkg = "kernel-modules" metapkg = "kernel-modules"
@ -460,7 +460,7 @@ python populate_packages_prepend () {
pkg = module_pattern % legitimize_package_name(re.match(module_regex, os.path.basename(i)).group(1)) pkg = module_pattern % legitimize_package_name(re.match(module_regex, os.path.basename(i)).group(1))
blacklist.append(pkg) blacklist.append(pkg)
metapkg_rdepends = [] metapkg_rdepends = []
packages = d.getVar('PACKAGES', 1).split() packages = d.getVar('PACKAGES', True).split()
for pkg in packages[1:]: for pkg in packages[1:]:
if not pkg in blacklist and not pkg in metapkg_rdepends: if not pkg in blacklist and not pkg in metapkg_rdepends:
metapkg_rdepends.append(pkg) metapkg_rdepends.append(pkg)

View File

@ -18,13 +18,13 @@ do_install() {
} }
def get_libc_fpu_setting(bb, d): def get_libc_fpu_setting(bb, d):
if d.getVar('TARGET_FPU', 1) in [ 'soft' ]: if d.getVar('TARGET_FPU', True) in [ 'soft' ]:
return "--without-fp" return "--without-fp"
return "" return ""
python populate_packages_prepend () { python populate_packages_prepend () {
if d.getVar('DEBIAN_NAMES', 1): if d.getVar('DEBIAN_NAMES', True):
bpn = d.getVar('BPN', 1) bpn = d.getVar('BPN', True)
d.setVar('PKG_'+bpn, 'libc6') d.setVar('PKG_'+bpn, 'libc6')
d.setVar('PKG_'+bpn+'-dev', 'libc6-dev') d.setVar('PKG_'+bpn+'-dev', 'libc6-dev')
} }

View File

@ -261,7 +261,7 @@ def incompatible_license(d,dont_want_license):
from fnmatch import fnmatchcase as fnmatch from fnmatch import fnmatchcase as fnmatch
dont_want_licenses = [] dont_want_licenses = []
dont_want_licenses.append(d.getVar('INCOMPATIBLE_LICENSE', 1)) dont_want_licenses.append(d.getVar('INCOMPATIBLE_LICENSE', True))
if d.getVarFlag('SPDXLICENSEMAP', dont_want_license): if d.getVarFlag('SPDXLICENSEMAP', dont_want_license):
dont_want_licenses.append(d.getVarFlag('SPDXLICENSEMAP', dont_want_license)) dont_want_licenses.append(d.getVarFlag('SPDXLICENSEMAP', dont_want_license))

View File

@ -27,7 +27,7 @@ def base_detect_branch(d):
return "<unknown>" return "<unknown>"
def base_get_scmbasepath(d): def base_get_scmbasepath(d):
return d.getVar( 'COREBASE', 1 ) return d.getVar( 'COREBASE', True)
def base_get_metadata_monotone_branch(path, d): def base_get_metadata_monotone_branch(path, d):
monotone_branch = "<unknown>" monotone_branch = "<unknown>"

View File

@ -1067,7 +1067,7 @@ python emit_pkgdata() {
return size return size
packages = d.getVar('PACKAGES', True) packages = d.getVar('PACKAGES', True)
pkgdest = d.getVar('PKGDEST', 1) pkgdest = d.getVar('PKGDEST', True)
pkgdatadir = d.getVar('PKGDESTWORK', True) pkgdatadir = d.getVar('PKGDESTWORK', True)
# Take shared lock since we're only reading, not writing # Take shared lock since we're only reading, not writing

View File

@ -15,12 +15,12 @@ python package_ipk_fn () {
} }
python package_ipk_install () { python package_ipk_install () {
pkg = d.getVar('PKG', 1) pkg = d.getVar('PKG', True)
pkgfn = d.getVar('PKGFN', 1) pkgfn = d.getVar('PKGFN', True)
rootfs = d.getVar('IMAGE_ROOTFS', 1) rootfs = d.getVar('IMAGE_ROOTFS', True)
ipkdir = d.getVar('DEPLOY_DIR_IPK', 1) ipkdir = d.getVar('DEPLOY_DIR_IPK', True)
stagingdir = d.getVar('STAGING_DIR', 1) stagingdir = d.getVar('STAGING_DIR', True)
tmpdir = d.getVar('TMPDIR', 1) tmpdir = d.getVar('TMPDIR', True)
if None in (pkg,pkgfn,rootfs): if None in (pkg,pkgfn,rootfs):
raise bb.build.FuncFailed("missing variables (one or more of PKG, PKGFN, IMAGEROOTFS)") raise bb.build.FuncFailed("missing variables (one or more of PKG, PKGFN, IMAGEROOTFS)")
@ -289,7 +289,7 @@ python do_package_ipk () {
localdata.setVar('ROOT', '') localdata.setVar('ROOT', '')
localdata.setVar('ROOT_%s' % pkg, root) localdata.setVar('ROOT_%s' % pkg, root)
pkgname = localdata.getVar('PKG_%s' % pkg, 1) pkgname = localdata.getVar('PKG_%s' % pkg, True)
if not pkgname: if not pkgname:
pkgname = pkg pkgname = pkg
localdata.setVar('PKG', pkgname) localdata.setVar('PKG', pkgname)
@ -298,7 +298,7 @@ python do_package_ipk () {
bb.data.update_data(localdata) bb.data.update_data(localdata)
basedir = os.path.join(os.path.dirname(root)) basedir = os.path.join(os.path.dirname(root))
arch = localdata.getVar('PACKAGE_ARCH', 1) arch = localdata.getVar('PACKAGE_ARCH', True)
pkgoutdir = "%s/%s" % (outdir, arch) pkgoutdir = "%s/%s" % (outdir, arch)
bb.mkdirhier(pkgoutdir) bb.mkdirhier(pkgoutdir)
os.chdir(root) os.chdir(root)
@ -310,7 +310,7 @@ python do_package_ipk () {
except ValueError: except ValueError:
pass pass
if not g and localdata.getVar('ALLOW_EMPTY') != "1": if not g and localdata.getVar('ALLOW_EMPTY') != "1":
bb.note("Not creating empty archive for %s-%s-%s" % (pkg, localdata.getVar('PKGV', 1), localdata.getVar('PKGR', 1))) bb.note("Not creating empty archive for %s-%s-%s" % (pkg, localdata.getVar('PKGV', True), localdata.getVar('PKGR', True)))
bb.utils.unlockfile(lf) bb.utils.unlockfile(lf)
continue continue
@ -323,7 +323,7 @@ python do_package_ipk () {
raise bb.build.FuncFailed("unable to open control file for writing.") raise bb.build.FuncFailed("unable to open control file for writing.")
fields = [] fields = []
pe = d.getVar('PKGE', 1) pe = d.getVar('PKGE', True)
if pe and int(pe) > 0: if pe and int(pe) > 0:
fields.append(["Version: %s:%s-%s\n", ['PKGE', 'PKGV', 'PKGR']]) fields.append(["Version: %s:%s-%s\n", ['PKGE', 'PKGV', 'PKGR']])
else: else:
@ -340,7 +340,7 @@ python do_package_ipk () {
def pullData(l, d): def pullData(l, d):
l2 = [] l2 = []
for i in l: for i in l:
l2.append(d.getVar(i, 1)) l2.append(d.getVar(i, True))
return l2 return l2
ctrlfile.write("Package: %s\n" % pkgname) ctrlfile.write("Package: %s\n" % pkgname)
@ -369,12 +369,12 @@ python do_package_ipk () {
bb.build.exec_func("mapping_rename_hook", localdata) bb.build.exec_func("mapping_rename_hook", localdata)
rdepends = bb.utils.explode_dep_versions(localdata.getVar("RDEPENDS", 1) or "") rdepends = bb.utils.explode_dep_versions(localdata.getVar("RDEPENDS", True) or "")
rrecommends = bb.utils.explode_dep_versions(localdata.getVar("RRECOMMENDS", 1) or "") rrecommends = bb.utils.explode_dep_versions(localdata.getVar("RRECOMMENDS", True) or "")
rsuggests = bb.utils.explode_dep_versions(localdata.getVar("RSUGGESTS", 1) or "") rsuggests = bb.utils.explode_dep_versions(localdata.getVar("RSUGGESTS", True) or "")
rprovides = bb.utils.explode_dep_versions(localdata.getVar("RPROVIDES", 1) or "") rprovides = bb.utils.explode_dep_versions(localdata.getVar("RPROVIDES", True) or "")
rreplaces = bb.utils.explode_dep_versions(localdata.getVar("RREPLACES", 1) or "") rreplaces = bb.utils.explode_dep_versions(localdata.getVar("RREPLACES", True) or "")
rconflicts = bb.utils.explode_dep_versions(localdata.getVar("RCONFLICTS", 1) or "") rconflicts = bb.utils.explode_dep_versions(localdata.getVar("RCONFLICTS", True) or "")
if rdepends: if rdepends:
ctrlfile.write("Depends: %s\n" % bb.utils.join_deps(rdepends)) ctrlfile.write("Depends: %s\n" % bb.utils.join_deps(rdepends))
@ -388,14 +388,14 @@ python do_package_ipk () {
ctrlfile.write("Replaces: %s\n" % bb.utils.join_deps(rreplaces)) ctrlfile.write("Replaces: %s\n" % bb.utils.join_deps(rreplaces))
if rconflicts: if rconflicts:
ctrlfile.write("Conflicts: %s\n" % bb.utils.join_deps(rconflicts)) ctrlfile.write("Conflicts: %s\n" % bb.utils.join_deps(rconflicts))
src_uri = localdata.getVar("SRC_URI", 1) src_uri = localdata.getVar("SRC_URI", True)
if src_uri: if src_uri:
src_uri = re.sub("\s+", " ", src_uri) src_uri = re.sub("\s+", " ", src_uri)
ctrlfile.write("Source: %s\n" % " ".join(src_uri.split())) ctrlfile.write("Source: %s\n" % " ".join(src_uri.split()))
ctrlfile.close() ctrlfile.close()
for script in ["preinst", "postinst", "prerm", "postrm"]: for script in ["preinst", "postinst", "prerm", "postrm"]:
scriptvar = localdata.getVar('pkg_%s' % script, 1) scriptvar = localdata.getVar('pkg_%s' % script, True)
if not scriptvar: if not scriptvar:
continue continue
try: try:
@ -407,7 +407,7 @@ python do_package_ipk () {
scriptfile.close() scriptfile.close()
os.chmod(os.path.join(controldir, script), 0755) os.chmod(os.path.join(controldir, script), 0755)
conffiles_str = localdata.getVar("CONFFILES", 1) conffiles_str = localdata.getVar("CONFFILES", True)
if conffiles_str: if conffiles_str:
try: try:
conffiles = file(os.path.join(controldir, 'conffiles'), 'w') conffiles = file(os.path.join(controldir, 'conffiles'), 'w')
@ -419,7 +419,7 @@ python do_package_ipk () {
conffiles.close() conffiles.close()
os.chdir(basedir) os.chdir(basedir)
ret = os.system("PATH=\"%s\" %s %s %s" % (localdata.getVar("PATH", 1), ret = os.system("PATH=\"%s\" %s %s %s" % (localdata.getVar("PATH", True),
d.getVar("OPKGBUILDCMD",1), pkg, pkgoutdir)) d.getVar("OPKGBUILDCMD",1), pkg, pkgoutdir))
if ret != 0: if ret != 0:
bb.utils.unlockfile(lf) bb.utils.unlockfile(lf)

View File

@ -619,7 +619,7 @@ python write_specfile () {
localdata.setVar('ROOT', '') localdata.setVar('ROOT', '')
localdata.setVar('ROOT_%s' % pkg, root) localdata.setVar('ROOT_%s' % pkg, root)
pkgname = localdata.getVar('PKG_%s' % pkg, 1) pkgname = localdata.getVar('PKG_%s' % pkg, True)
if not pkgname: if not pkgname:
pkgname = pkg pkgname = pkg
localdata.setVar('PKG', pkgname) localdata.setVar('PKG', pkgname)

View File

@ -9,9 +9,9 @@ python package_tar_fn () {
} }
python package_tar_install () { python package_tar_install () {
pkg = d.getVar('PKG', 1) pkg = d.getVar('PKG', True)
pkgfn = d.getVar('PKGFN', 1) pkgfn = d.getVar('PKGFN', True)
rootfs = d.getVar('IMAGE_ROOTFS', 1) rootfs = d.getVar('IMAGE_ROOTFS', True)
if None in (pkg,pkgfn,rootfs): if None in (pkg,pkgfn,rootfs):
bb.error("missing variables (one or more of PKG, PKGFN, IMAGEROOTFS)") bb.error("missing variables (one or more of PKG, PKGFN, IMAGEROOTFS)")
@ -35,24 +35,24 @@ python package_tar_install () {
} }
python do_package_tar () { python do_package_tar () {
workdir = d.getVar('WORKDIR', 1) workdir = d.getVar('WORKDIR', True)
if not workdir: if not workdir:
bb.error("WORKDIR not defined, unable to package") bb.error("WORKDIR not defined, unable to package")
return return
outdir = d.getVar('DEPLOY_DIR_TAR', 1) outdir = d.getVar('DEPLOY_DIR_TAR', True)
if not outdir: if not outdir:
bb.error("DEPLOY_DIR_TAR not defined, unable to package") bb.error("DEPLOY_DIR_TAR not defined, unable to package")
return return
bb.mkdirhier(outdir) bb.mkdirhier(outdir)
dvar = d.getVar('D', 1) dvar = d.getVar('D', True)
if not dvar: if not dvar:
bb.error("D not defined, unable to package") bb.error("D not defined, unable to package")
return return
bb.mkdirhier(dvar) bb.mkdirhier(dvar)
packages = d.getVar('PACKAGES', 1) packages = d.getVar('PACKAGES', True)
if not packages: if not packages:
bb.debug(1, "PACKAGES not defined, nothing to package") bb.debug(1, "PACKAGES not defined, nothing to package")
return return
@ -79,11 +79,11 @@ python do_package_tar () {
pkgoutdir = outdir pkgoutdir = outdir
bb.mkdirhier(pkgoutdir) bb.mkdirhier(pkgoutdir)
bb.build.exec_func('package_tar_fn', localdata) bb.build.exec_func('package_tar_fn', localdata)
tarfn = localdata.getVar('PKGFN', 1) tarfn = localdata.getVar('PKGFN', True)
os.chdir(root) os.chdir(root)
from glob import glob from glob import glob
if not glob('*'): if not glob('*'):
bb.note("Not creating empty archive for %s-%s-%s" % (pkg, localdata.getVar('PKGV', 1), localdata.getVar('PKGR', 1))) bb.note("Not creating empty archive for %s-%s-%s" % (pkg, localdata.getVar('PKGV', True), localdata.getVar('PKGR', True)))
continue continue
ret = os.system("tar -czf %s %s" % (tarfn, '.')) ret = os.system("tar -czf %s %s" % (tarfn, '.'))
if ret != 0: if ret != 0:

View File

@ -1,12 +1,12 @@
python read_subpackage_metadata () { python read_subpackage_metadata () {
import oe.packagedata import oe.packagedata
data = oe.packagedata.read_pkgdata(d.getVar('PN', 1), d) data = oe.packagedata.read_pkgdata(d.getVar('PN', True), d)
for key in data.keys(): for key in data.keys():
d.setVar(key, data[key]) d.setVar(key, data[key])
for pkg in d.getVar('PACKAGES', 1).split(): for pkg in d.getVar('PACKAGES', True).split():
sdata = oe.packagedata.read_subpkgdata(pkg, d) sdata = oe.packagedata.read_subpkgdata(pkg, d)
for key in sdata.keys(): for key in sdata.keys():
d.setVar(key, sdata[key]) d.setVar(key, sdata[key])

View File

@ -1,6 +1,6 @@
PKG_DISTRIBUTECOMMAND[func] = "1" PKG_DISTRIBUTECOMMAND[func] = "1"
python do_distribute_packages () { python do_distribute_packages () {
cmd = d.getVar('PKG_DISTRIBUTECOMMAND', 1) cmd = d.getVar('PKG_DISTRIBUTECOMMAND', True)
if not cmd: if not cmd:
raise bb.build.FuncFailed("Unable to distribute packages, PKG_DISTRIBUTECOMMAND not defined") raise bb.build.FuncFailed("Unable to distribute packages, PKG_DISTRIBUTECOMMAND not defined")
bb.build.exec_func('PKG_DISTRIBUTECOMMAND', d) bb.build.exec_func('PKG_DISTRIBUTECOMMAND', d)

View File

@ -1,5 +1,5 @@
python do_pkg_write_metainfo () { python do_pkg_write_metainfo () {
deploydir = d.getVar('DEPLOY_DIR', 1) deploydir = d.getVar('DEPLOY_DIR', True)
if not deploydir: if not deploydir:
bb.error("DEPLOY_DIR not defined, unable to write package info") bb.error("DEPLOY_DIR not defined, unable to write package info")
return return
@ -9,11 +9,11 @@ python do_pkg_write_metainfo () {
except OSError: except OSError:
raise bb.build.FuncFailed("unable to open package-info file for writing.") raise bb.build.FuncFailed("unable to open package-info file for writing.")
name = d.getVar('PN', 1) name = d.getVar('PN', True)
version = d.getVar('PV', 1) version = d.getVar('PV', True)
desc = d.getVar('DESCRIPTION', 1) desc = d.getVar('DESCRIPTION', True)
page = d.getVar('HOMEPAGE', 1) page = d.getVar('HOMEPAGE', True)
lic = d.getVar('LICENSE', 1) lic = d.getVar('LICENSE', True)
infofile.write("|| "+ name +" || "+ version + " || "+ desc +" || "+ page +" || "+ lic + " ||\n" ) infofile.write("|| "+ name +" || "+ version + " || "+ desc +" || "+ page +" || "+ lic + " ||\n" )
infofile.close() infofile.close()

View File

@ -2,8 +2,8 @@ do_populate_sdk[depends] += "dpkg-native:do_populate_sysroot apt-native:do_popul
do_populate_sdk[recrdeptask] += "do_package_write_deb" do_populate_sdk[recrdeptask] += "do_package_write_deb"
DEB_SDK_ARCH = "${@[d.getVar('SDK_ARCH', 1), "i386"]\ DEB_SDK_ARCH = "${@[d.getVar('SDK_ARCH', True), "i386"]\
[d.getVar('SDK_ARCH', 1) in \ [d.getVar('SDK_ARCH', True) in \
["x86", "i486", "i586", "i686", "pentium"]]}" ["x86", "i486", "i586", "i686", "pentium"]]}"
populate_sdk_post_deb () { populate_sdk_post_deb () {

View File

@ -6,7 +6,7 @@
def qemu_target_binary(data): def qemu_target_binary(data):
import bb import bb
target_arch = data.getVar("TARGET_ARCH", 1) target_arch = data.getVar("TARGET_ARCH", True)
if target_arch in ("i486", "i586", "i686"): if target_arch in ("i486", "i586", "i686"):
target_arch = "i386" target_arch = "i386"
elif target_arch == "powerpc": elif target_arch == "powerpc":

View File

@ -1,4 +1,4 @@
DEPENDS_prepend = "${@["qt4-embedded ", ""][(d.getVar('PN', 1)[:12] == 'qt4-embedded')]}" DEPENDS_prepend = "${@["qt4-embedded ", ""][(d.getVar('PN', True)[:12] == 'qt4-embedded')]}"
inherit qmake2 inherit qmake2

View File

@ -1,5 +1,5 @@
def __note(msg, d): def __note(msg, d):
bb.note("%s: recipe_sanity: %s" % (d.getVar("P", 1), msg)) bb.note("%s: recipe_sanity: %s" % (d.getVar("P", True), msg))
__recipe_sanity_badruntimevars = "RDEPENDS RPROVIDES RRECOMMENDS RCONFLICTS" __recipe_sanity_badruntimevars = "RDEPENDS RPROVIDES RRECOMMENDS RCONFLICTS"
def bad_runtime_vars(cfgdata, d): def bad_runtime_vars(cfgdata, d):
@ -7,7 +7,7 @@ def bad_runtime_vars(cfgdata, d):
bb.data.inherits_class("cross", d): bb.data.inherits_class("cross", d):
return return
for var in d.getVar("__recipe_sanity_badruntimevars", 1).split(): for var in d.getVar("__recipe_sanity_badruntimevars", True).split():
val = d.getVar(var, 0) val = d.getVar(var, 0)
if val and val != cfgdata.get(var): if val and val != cfgdata.get(var):
__note("%s should be %s_${PN}" % (var, var), d) __note("%s should be %s_${PN}" % (var, var), d)
@ -15,17 +15,17 @@ def bad_runtime_vars(cfgdata, d):
__recipe_sanity_reqvars = "DESCRIPTION" __recipe_sanity_reqvars = "DESCRIPTION"
__recipe_sanity_reqdiffvars = "LICENSE" __recipe_sanity_reqdiffvars = "LICENSE"
def req_vars(cfgdata, d): def req_vars(cfgdata, d):
for var in d.getVar("__recipe_sanity_reqvars", 1).split(): for var in d.getVar("__recipe_sanity_reqvars", True).split():
if not d.getVar(var, 0): if not d.getVar(var, 0):
__note("%s should be set" % var, d) __note("%s should be set" % var, d)
for var in d.getVar("__recipe_sanity_reqdiffvars", 1).split(): for var in d.getVar("__recipe_sanity_reqdiffvars", True).split():
val = d.getVar(var, 0) val = d.getVar(var, 0)
cfgval = cfgdata.get(var) cfgval = cfgdata.get(var)
# Hardcoding is bad, but I'm lazy. We don't care about license being # Hardcoding is bad, but I'm lazy. We don't care about license being
# unset if the recipe has no sources! # unset if the recipe has no sources!
if var == "LICENSE" and d.getVar("SRC_URI", 1) == cfgdata.get("SRC_URI"): if var == "LICENSE" and d.getVar("SRC_URI", True) == cfgdata.get("SRC_URI"):
continue continue
if not val: if not val:
@ -43,11 +43,11 @@ def var_renames_overwrite(cfgdata, d):
def incorrect_nonempty_PACKAGES(cfgdata, d): def incorrect_nonempty_PACKAGES(cfgdata, d):
if bb.data.inherits_class("native", d) or \ if bb.data.inherits_class("native", d) or \
bb.data.inherits_class("cross", d): bb.data.inherits_class("cross", d):
if d.getVar("PACKAGES", 1): if d.getVar("PACKAGES", True):
return True return True
def can_use_autotools_base(cfgdata, d): def can_use_autotools_base(cfgdata, d):
cfg = d.getVar("do_configure", 1) cfg = d.getVar("do_configure", True)
if not bb.data.inherits_class("autotools", d): if not bb.data.inherits_class("autotools", d):
return False return False
@ -65,10 +65,10 @@ def can_use_autotools_base(cfgdata, d):
def can_remove_FILESPATH(cfgdata, d): def can_remove_FILESPATH(cfgdata, d):
expected = cfgdata.get("FILESPATH") expected = cfgdata.get("FILESPATH")
#expected = "${@':'.join([os.path.normpath(os.path.join(fp, p, o)) for fp in d.getVar('FILESPATHBASE', 1).split(':') for p in d.getVar('FILESPATHPKG', 1).split(':') for o in (d.getVar('OVERRIDES', 1) + ':').split(':') if os.path.exists(os.path.join(fp, p, o))])}:${FILESDIR}" #expected = "${@':'.join([os.path.normpath(os.path.join(fp, p, o)) for fp in d.getVar('FILESPATHBASE', True).split(':') for p in d.getVar('FILESPATHPKG', True).split(':') for o in (d.getVar('OVERRIDES', True) + ':').split(':') if os.path.exists(os.path.join(fp, p, o))])}:${FILESDIR}"
expectedpaths = bb.data.expand(expected, d) expectedpaths = bb.data.expand(expected, d)
unexpanded = d.getVar("FILESPATH", 0) unexpanded = d.getVar("FILESPATH", 0)
filespath = d.getVar("FILESPATH", 1).split(":") filespath = d.getVar("FILESPATH", True).split(":")
filespath = [os.path.normpath(f) for f in filespath if os.path.exists(f)] filespath = [os.path.normpath(f) for f in filespath if os.path.exists(f)]
for fp in filespath: for fp in filespath:
if not fp in expectedpaths: if not fp in expectedpaths:
@ -79,13 +79,13 @@ def can_remove_FILESPATH(cfgdata, d):
def can_remove_FILESDIR(cfgdata, d): def can_remove_FILESDIR(cfgdata, d):
expected = cfgdata.get("FILESDIR") expected = cfgdata.get("FILESDIR")
#expected = "${@bb.which(d.getVar('FILESPATH', 1), '.')}" #expected = "${@bb.which(d.getVar('FILESPATH', True), '.')}"
unexpanded = d.getVar("FILESDIR", 0) unexpanded = d.getVar("FILESDIR", 0)
if unexpanded is None: if unexpanded is None:
return False return False
expanded = os.path.normpath(d.getVar("FILESDIR", 1)) expanded = os.path.normpath(d.getVar("FILESDIR", True))
filespath = d.getVar("FILESPATH", 1).split(":") filespath = d.getVar("FILESPATH", True).split(":")
filespath = [os.path.normpath(f) for f in filespath if os.path.exists(f)] filespath = [os.path.normpath(f) for f in filespath if os.path.exists(f)]
return unexpanded != expected and \ return unexpanded != expected and \
@ -103,7 +103,7 @@ def can_remove_others(p, cfgdata, d):
continue continue
try: try:
expanded = d.getVar(k, 1) expanded = d.getVar(k, True)
cfgexpanded = bb.data.expand(cfgunexpanded, d) cfgexpanded = bb.data.expand(cfgunexpanded, d)
except bb.fetch.ParameterError: except bb.fetch.ParameterError:
continue continue
@ -115,8 +115,8 @@ def can_remove_others(p, cfgdata, d):
(p, cfgunexpanded, unexpanded, expanded)) (p, cfgunexpanded, unexpanded, expanded))
python do_recipe_sanity () { python do_recipe_sanity () {
p = d.getVar("P", 1) p = d.getVar("P", True)
p = "%s %s %s" % (d.getVar("PN", 1), d.getVar("PV", 1), d.getVar("PR", 1)) p = "%s %s %s" % (d.getVar("PN", True), d.getVar("PV", True), d.getVar("PR", True))
sanitychecks = [ sanitychecks = [
(can_remove_FILESDIR, "candidate for removal of FILESDIR"), (can_remove_FILESDIR, "candidate for removal of FILESDIR"),

View File

@ -2,7 +2,7 @@ python siteconfig_do_siteconfig () {
shared_state = sstate_state_fromvars(d) shared_state = sstate_state_fromvars(d)
if shared_state['name'] != 'populate-sysroot': if shared_state['name'] != 'populate-sysroot':
return return
if not os.path.isdir(os.path.join(d.getVar('FILE_DIRNAME', 1), 'site_config')): if not os.path.isdir(os.path.join(d.getVar('FILE_DIRNAME', True), 'site_config')):
bb.debug(1, "No site_config directory, skipping do_siteconfig") bb.debug(1, "No site_config directory, skipping do_siteconfig")
return return
bb.build.exec_func('do_siteconfig_gencache', d) bb.build.exec_func('do_siteconfig_gencache', d)

View File

@ -130,7 +130,7 @@ def siteinfo_get_files(d, no_cache = False):
if no_cache: return sitefiles if no_cache: return sitefiles
# Now check for siteconfig cache files # Now check for siteconfig cache files
path_siteconfig = d.getVar('SITECONFIG_SYSROOTCACHE', 1) path_siteconfig = d.getVar('SITECONFIG_SYSROOTCACHE', True)
if os.path.isdir(path_siteconfig): if os.path.isdir(path_siteconfig):
for i in os.listdir(path_siteconfig): for i in os.listdir(path_siteconfig):
filename = os.path.join(path_siteconfig, i) filename = os.path.join(path_siteconfig, i)

View File

@ -6,7 +6,7 @@ DISTRO ?= "openembedded"
def get_src_tree(d): def get_src_tree(d):
workdir = d.getVar('WORKDIR', 1) workdir = d.getVar('WORKDIR', True)
if not workdir: if not workdir:
bb.error("WORKDIR not defined, unable to find source tree.") bb.error("WORKDIR not defined, unable to find source tree.")
return return
@ -55,8 +55,8 @@ sourcepkg_do_archive_bb() {
python sourcepkg_do_dumpdata() { python sourcepkg_do_dumpdata() {
workdir = d.getVar('WORKDIR', 1) workdir = d.getVar('WORKDIR', True)
distro = d.getVar('DISTRO', 1) distro = d.getVar('DISTRO', True)
s_tree = get_src_tree(d) s_tree = get_src_tree(d)
openembeddeddir = os.path.join(workdir, s_tree, distro) openembeddeddir = os.path.join(workdir, s_tree, distro)
dumpfile = os.path.join(openembeddeddir, bb.data.expand("${P}-${PR}.showdata.dump",d)) dumpfile = os.path.join(openembeddeddir, bb.data.expand("${P}-${PR}.showdata.dump",d))
@ -74,7 +74,7 @@ python sourcepkg_do_dumpdata() {
# emit the metadata which isnt valid shell # emit the metadata which isnt valid shell
for e in d.keys(): for e in d.keys():
if d.getVarFlag(e, 'python'): if d.getVarFlag(e, 'python'):
f.write("\npython %s () {\n%s}\n" % (e, d.getVar(e, 1))) f.write("\npython %s () {\n%s}\n" % (e, d.getVar(e, True)))
f.close() f.close()
} }

View File

@ -3,12 +3,12 @@ python do_distribute_sources () {
l = bb.data.createCopy(d) l = bb.data.createCopy(d)
bb.data.update_data(l) bb.data.update_data(l)
sources_dir = d.getVar('SRC_DISTRIBUTEDIR', 1) sources_dir = d.getVar('SRC_DISTRIBUTEDIR', True)
src_uri = d.getVar('SRC_URI', 1).split() src_uri = d.getVar('SRC_URI', True).split()
fetcher = bb.fetch2.Fetch(src_uri, d) fetcher = bb.fetch2.Fetch(src_uri, d)
ud = fetcher.ud ud = fetcher.ud
licenses = d.getVar('LICENSE', 1).replace('&', '|') licenses = d.getVar('LICENSE', True).replace('&', '|')
licenses = licenses.replace('(', '').replace(')', '') licenses = licenses.replace('(', '').replace(')', '')
clean_licenses = "" clean_licenses = ""
for x in licenses.split(): for x in licenses.split():
@ -20,7 +20,7 @@ python do_distribute_sources () {
for license in clean_licenses.split('|'): for license in clean_licenses.split('|'):
for url in ud.values(): for url in ud.values():
cmd = d.getVar('SRC_DISTRIBUTECOMMAND', 1) cmd = d.getVar('SRC_DISTRIBUTECOMMAND', True)
if not cmd: if not cmd:
raise bb.build.FuncFailed("Unable to distribute sources, SRC_DISTRIBUTECOMMAND not defined") raise bb.build.FuncFailed("Unable to distribute sources, SRC_DISTRIBUTECOMMAND not defined")
url.setup_localpath(d) url.setup_localpath(d)
@ -29,9 +29,9 @@ python do_distribute_sources () {
if url.basename == '*': if url.basename == '*':
import os.path import os.path
dest_dir = os.path.basename(os.path.dirname(os.path.abspath(url.localpath))) dest_dir = os.path.basename(os.path.dirname(os.path.abspath(url.localpath)))
d.setVar('DEST', "%s_%s/" % (d.getVar('PF', 1), dest_dir)) d.setVar('DEST', "%s_%s/" % (d.getVar('PF', True), dest_dir))
else: else:
d.setVar('DEST', "%s_%s" % (d.getVar('PF', 1), url.basename)) d.setVar('DEST', "%s_%s" % (d.getVar('PF', True), url.basename))
else: else:
d.setVar('DEST', '') d.setVar('DEST', '')

View File

@ -57,12 +57,12 @@ python build_syslinux_menu () {
import copy import copy
import sys import sys
workdir = d.getVar('WORKDIR', 1) workdir = d.getVar('WORKDIR', True)
if not workdir: if not workdir:
bb.error("WORKDIR is not defined") bb.error("WORKDIR is not defined")
return return
labels = d.getVar('LABELS', 1) labels = d.getVar('LABELS', True)
if not labels: if not labels:
bb.debug(1, "LABELS not defined, nothing to do") bb.debug(1, "LABELS not defined, nothing to do")
return return
@ -71,7 +71,7 @@ python build_syslinux_menu () {
bb.debug(1, "No labels, nothing to do") bb.debug(1, "No labels, nothing to do")
return return
cfile = d.getVar('SYSLINUXMENU', 1) cfile = d.getVar('SYSLINUXMENU', True)
if not cfile: if not cfile:
raise bb.build.FuncFailed('Unable to read SYSLINUXMENU') raise bb.build.FuncFailed('Unable to read SYSLINUXMENU')
@ -100,7 +100,7 @@ python build_syslinux_menu () {
localdata.setVar('OVERRIDES', label + ':' + overrides) localdata.setVar('OVERRIDES', label + ':' + overrides)
bb.data.update_data(localdata) bb.data.update_data(localdata)
usage = localdata.getVar('USAGE', 1) usage = localdata.getVar('USAGE', True)
cfgfile.write(' \x0F\x30\x3E%16s\x0F\x30\x37: ' % (label)) cfgfile.write(' \x0F\x30\x3E%16s\x0F\x30\x37: ' % (label))
cfgfile.write('%s\n' % (usage)) cfgfile.write('%s\n' % (usage))
@ -114,12 +114,12 @@ python build_syslinux_cfg () {
import copy import copy
import sys import sys
workdir = d.getVar('WORKDIR', 1) workdir = d.getVar('WORKDIR', True)
if not workdir: if not workdir:
bb.error("WORKDIR not defined, unable to package") bb.error("WORKDIR not defined, unable to package")
return return
labels = d.getVar('LABELS', 1) labels = d.getVar('LABELS', True)
if not labels: if not labels:
bb.debug(1, "LABELS not defined, nothing to do") bb.debug(1, "LABELS not defined, nothing to do")
return return
@ -128,7 +128,7 @@ python build_syslinux_cfg () {
bb.debug(1, "No labels, nothing to do") bb.debug(1, "No labels, nothing to do")
return return
cfile = d.getVar('SYSLINUXCFG', 1) cfile = d.getVar('SYSLINUXCFG', True)
if not cfile: if not cfile:
raise bb.build.FuncFailed('Unable to read SYSLINUXCFG') raise bb.build.FuncFailed('Unable to read SYSLINUXCFG')
@ -139,7 +139,7 @@ python build_syslinux_cfg () {
cfgfile.write('# Automatically created by OE\n') cfgfile.write('# Automatically created by OE\n')
opts = d.getVar('SYSLINUX_OPTS', 1) opts = d.getVar('SYSLINUX_OPTS', True)
if opts: if opts:
for opt in opts.split(';'): for opt in opts.split(';'):
@ -148,26 +148,26 @@ python build_syslinux_cfg () {
cfgfile.write('ALLOWOPTIONS 1\n'); cfgfile.write('ALLOWOPTIONS 1\n');
cfgfile.write('DEFAULT %s\n' % (labels.split()[0])) cfgfile.write('DEFAULT %s\n' % (labels.split()[0]))
timeout = d.getVar('SYSLINUX_TIMEOUT', 1) timeout = d.getVar('SYSLINUX_TIMEOUT', True)
if timeout: if timeout:
cfgfile.write('TIMEOUT %s\n' % timeout) cfgfile.write('TIMEOUT %s\n' % timeout)
else: else:
cfgfile.write('TIMEOUT 50\n') cfgfile.write('TIMEOUT 50\n')
prompt = d.getVar('SYSLINUX_PROMPT', 1) prompt = d.getVar('SYSLINUX_PROMPT', True)
if prompt: if prompt:
cfgfile.write('PROMPT %s\n' % prompt) cfgfile.write('PROMPT %s\n' % prompt)
else: else:
cfgfile.write('PROMPT 1\n') cfgfile.write('PROMPT 1\n')
menu = d.getVar('AUTO_SYSLINUXMENU', 1) menu = d.getVar('AUTO_SYSLINUXMENU', True)
# This is ugly. My bad. # This is ugly. My bad.
if menu: if menu:
bb.build.exec_func('build_syslinux_menu', d) bb.build.exec_func('build_syslinux_menu', d)
mfile = d.getVar('SYSLINUXMENU', 1) mfile = d.getVar('SYSLINUXMENU', True)
cfgfile.write('DISPLAY %s\n' % (mfile.split('/')[-1]) ) cfgfile.write('DISPLAY %s\n' % (mfile.split('/')[-1]) )
for label in labels.split(): for label in labels.split():
@ -182,8 +182,8 @@ python build_syslinux_cfg () {
cfgfile.write('LABEL %s\nKERNEL /vmlinuz\n' % (label)) cfgfile.write('LABEL %s\nKERNEL /vmlinuz\n' % (label))
append = localdata.getVar('APPEND', 1) append = localdata.getVar('APPEND', True)
initrd = localdata.getVar('INITRD', 1) initrd = localdata.getVar('INITRD', True)
if append: if append:
cfgfile.write('APPEND ') cfgfile.write('APPEND ')

View File

@ -17,7 +17,7 @@ PACKAGE_ARCH = "all"
# to the list. Their dependencies (RRECOMMENDS) are handled as usual # to the list. Their dependencies (RRECOMMENDS) are handled as usual
# by package_depchains in a following step. # by package_depchains in a following step.
python () { python () {
packages = d.getVar('PACKAGES', 1).split() packages = d.getVar('PACKAGES', True).split()
genpackages = [] genpackages = []
for pkg in packages: for pkg in packages:
for postfix in ['-dbg', '-dev']: for postfix in ['-dbg', '-dev']:

View File

@ -94,22 +94,22 @@ python __anonymous() {
} }
python populate_packages_prepend () { python populate_packages_prepend () {
pkg = d.getVar('PN', 1) pkg = d.getVar('PN', True)
bb.note('adding update-alternatives calls to postinst/postrm for %s' % pkg) bb.note('adding update-alternatives calls to postinst/postrm for %s' % pkg)
postinst = d.getVar('pkg_postinst_%s' % pkg, 1) or d.getVar('pkg_postinst', 1) postinst = d.getVar('pkg_postinst_%s' % pkg, True) or d.getVar('pkg_postinst', True)
if not postinst: if not postinst:
postinst = '#!/bin/sh\n' postinst = '#!/bin/sh\n'
if d.getVar('ALTERNATIVE_LINKS') != None: if d.getVar('ALTERNATIVE_LINKS') != None:
postinst += d.getVar('update_alternatives_batch_postinst', 1) postinst += d.getVar('update_alternatives_batch_postinst', True)
else: else:
postinst += d.getVar('update_alternatives_postinst', 1) postinst += d.getVar('update_alternatives_postinst', True)
d.setVar('pkg_postinst_%s' % pkg, postinst) d.setVar('pkg_postinst_%s' % pkg, postinst)
postrm = d.getVar('pkg_postrm_%s' % pkg, 1) or d.getVar('pkg_postrm', 1) postrm = d.getVar('pkg_postrm_%s' % pkg, True) or d.getVar('pkg_postrm', True)
if not postrm: if not postrm:
postrm = '#!/bin/sh\n' postrm = '#!/bin/sh\n'
if d.getVar('ALTERNATIVE_LINKS') != None: if d.getVar('ALTERNATIVE_LINKS') != None:
postrm += d.getVar('update_alternatives_batch_postrm', 1) postrm += d.getVar('update_alternatives_batch_postrm', True)
else: else:
postrm += d.getVar('update_alternatives_postrm', 1) postrm += d.getVar('update_alternatives_postrm', True)
d.setVar('pkg_postrm_%s' % pkg, postrm) d.setVar('pkg_postrm_%s' % pkg, postrm)
} }

View File

@ -47,7 +47,7 @@ python populate_packages_prepend () {
def update_rcd_package(pkg): def update_rcd_package(pkg):
bb.debug(1, 'adding update-rc.d calls to postinst/postrm for %s' % pkg) bb.debug(1, 'adding update-rc.d calls to postinst/postrm for %s' % pkg)
localdata = bb.data.createCopy(d) localdata = bb.data.createCopy(d)
overrides = localdata.getVar("OVERRIDES", 1) overrides = localdata.getVar("OVERRIDES", True)
localdata.setVar("OVERRIDES", "%s:%s" % (pkg, overrides)) localdata.setVar("OVERRIDES", "%s:%s" % (pkg, overrides))
bb.data.update_data(localdata) bb.data.update_data(localdata)
@ -56,28 +56,28 @@ python populate_packages_prepend () {
execute on the target. Not doing so may cause update_rc.d postinst invoked execute on the target. Not doing so may cause update_rc.d postinst invoked
twice to cause unwanted warnings. twice to cause unwanted warnings.
""" """
postinst = localdata.getVar('pkg_postinst', 1) postinst = localdata.getVar('pkg_postinst', True)
if not postinst: if not postinst:
postinst = '#!/bin/sh\n' postinst = '#!/bin/sh\n'
postinst += localdata.getVar('updatercd_postinst', 1) postinst += localdata.getVar('updatercd_postinst', True)
d.setVar('pkg_postinst_%s' % pkg, postinst) d.setVar('pkg_postinst_%s' % pkg, postinst)
prerm = localdata.getVar('pkg_prerm', 1) prerm = localdata.getVar('pkg_prerm', True)
if not prerm: if not prerm:
prerm = '#!/bin/sh\n' prerm = '#!/bin/sh\n'
prerm += localdata.getVar('updatercd_prerm', 1) prerm += localdata.getVar('updatercd_prerm', True)
d.setVar('pkg_prerm_%s' % pkg, prerm) d.setVar('pkg_prerm_%s' % pkg, prerm)
postrm = localdata.getVar('pkg_postrm', 1) postrm = localdata.getVar('pkg_postrm', True)
if not postrm: if not postrm:
postrm = '#!/bin/sh\n' postrm = '#!/bin/sh\n'
postrm += localdata.getVar('updatercd_postrm', 1) postrm += localdata.getVar('updatercd_postrm', True)
d.setVar('pkg_postrm_%s' % pkg, postrm) d.setVar('pkg_postrm_%s' % pkg, postrm)
pkgs = d.getVar('INITSCRIPT_PACKAGES', 1) pkgs = d.getVar('INITSCRIPT_PACKAGES', True)
if pkgs == None: if pkgs == None:
pkgs = d.getVar('UPDATERCPN', 1) pkgs = d.getVar('UPDATERCPN', True)
packages = (d.getVar('PACKAGES', 1) or "").split() packages = (d.getVar('PACKAGES', True) or "").split()
if not pkgs in packages and packages != []: if not pkgs in packages and packages != []:
pkgs = packages[0] pkgs = packages[0]
for pkg in pkgs.split(): for pkg in pkgs.split():

View File

@ -24,7 +24,7 @@ python do_clean() {
bb.note("Removing " + dir) bb.note("Removing " + dir)
oe.path.remove(dir) oe.path.remove(dir)
for f in (d.getVar('CLEANFUNCS', 1) or '').split(): for f in (d.getVar('CLEANFUNCS', True) or '').split():
bb.build.exec_func(f, d) bb.build.exec_func(f, d)
} }

View File

@ -336,7 +336,7 @@ def base_set_filespath(path, d):
if extrapaths != "": if extrapaths != "":
path = extrapaths.split(":") + path path = extrapaths.split(":") + path
# The ":" ensures we have an 'empty' override # The ":" ensures we have an 'empty' override
overrides = (d.getVar("OVERRIDES", 1) or "") + ":" overrides = (d.getVar("OVERRIDES", True) or "") + ":"
for p in path: for p in path:
if p != "": if p != "":
for o in overrides.split(":"): for o in overrides.split(":"):

View File

@ -102,7 +102,7 @@ ABIEXTENSION ??= ""
TARGET_ARCH = "${TUNE_ARCH}" TARGET_ARCH = "${TUNE_ARCH}"
TARGET_OS = "linux${LIBCEXTENSION}${ABIEXTENSION}" TARGET_OS = "linux${LIBCEXTENSION}${ABIEXTENSION}"
TARGET_VENDOR = "-oe" TARGET_VENDOR = "-oe"
TARGET_SYS = "${TARGET_ARCH}${TARGET_VENDOR}${@['-' + d.getVar('TARGET_OS', 1), ''][d.getVar('TARGET_OS', 1) == ('' or 'custom')]}" TARGET_SYS = "${TARGET_ARCH}${TARGET_VENDOR}${@['-' + d.getVar('TARGET_OS', True), ''][d.getVar('TARGET_OS', True) == ('' or 'custom')]}"
TARGET_PREFIX = "${TARGET_SYS}-" TARGET_PREFIX = "${TARGET_SYS}-"
TARGET_CC_ARCH = "${TUNE_CCARGS}" TARGET_CC_ARCH = "${TUNE_CCARGS}"
TARGET_LD_ARCH = "${TUNE_LDARGS}" TARGET_LD_ARCH = "${TUNE_LDARGS}"
@ -111,7 +111,7 @@ TARGET_AS_ARCH = "${TUNE_ASARGS}"
SDK_ARCH = "${BUILD_ARCH}" SDK_ARCH = "${BUILD_ARCH}"
SDK_OS = "${BUILD_OS}" SDK_OS = "${BUILD_OS}"
SDK_VENDOR = "-oesdk" SDK_VENDOR = "-oesdk"
SDK_SYS = "${SDK_ARCH}${SDK_VENDOR}${@['-' + d.getVar('SDK_OS', 1), ''][d.getVar('SDK_OS', 1) == ('' or 'custom')]}" SDK_SYS = "${SDK_ARCH}${SDK_VENDOR}${@['-' + d.getVar('SDK_OS', True), ''][d.getVar('SDK_OS', True) == ('' or 'custom')]}"
SDK_PREFIX = "${SDK_SYS}-" SDK_PREFIX = "${SDK_SYS}-"
SDK_CC_ARCH = "${BUILD_CC_ARCH}" SDK_CC_ARCH = "${BUILD_CC_ARCH}"
SDK_PACKAGE_ARCHS = "all any noarch ${SDK_ARCH}-nativesdk" SDK_PACKAGE_ARCHS = "all any noarch ${SDK_ARCH}-nativesdk"
@ -119,7 +119,7 @@ SDK_LD_ARCH = "${BUILD_LD_ARCH}"
SDK_AS_ARCH = "${BUILD_AS_ARCH}" SDK_AS_ARCH = "${BUILD_AS_ARCH}"
PACKAGE_ARCH = "${TUNE_PKGARCH}" PACKAGE_ARCH = "${TUNE_PKGARCH}"
MACHINE_ARCH = "${@[d.getVar('TUNE_PKGARCH', 1), d.getVar('MACHINE', 1)][bool(d.getVar('MACHINE', 1))].replace('-', '_')}" MACHINE_ARCH = "${@[d.getVar('TUNE_PKGARCH', True), d.getVar('MACHINE', True)][bool(d.getVar('MACHINE', True))].replace('-', '_')}"
PACKAGE_EXTRA_ARCHS ??= "${PACKAGE_EXTRA_ARCHS_tune-${DEFAULTTUNE}}" PACKAGE_EXTRA_ARCHS ??= "${PACKAGE_EXTRA_ARCHS_tune-${DEFAULTTUNE}}"
PACKAGE_ARCHS = "all any noarch ${PACKAGE_EXTRA_ARCHS} ${MACHINE_ARCH}" PACKAGE_ARCHS = "all any noarch ${PACKAGE_EXTRA_ARCHS} ${MACHINE_ARCH}"
# MACHINE_ARCH shouldn't be included here as a variable dependency # MACHINE_ARCH shouldn't be included here as a variable dependency
@ -300,14 +300,14 @@ FILES_${PN}-locale = "${datadir}/locale"
FILE_DIRNAME = "${@os.path.dirname(d.getVar('FILE'))}" FILE_DIRNAME = "${@os.path.dirname(d.getVar('FILE'))}"
# FILESPATH is set in base.bbclass # FILESPATH is set in base.bbclass
#FILESPATH = "${FILE_DIRNAME}/${PF}:${FILE_DIRNAME}/${P}:${FILE_DIRNAME}/${PN}:${FILE_DIRNAME}/${BP}:${FILE_DIRNAME}/${BPN}:${FILE_DIRNAME}/files:${FILE_DIRNAME}" #FILESPATH = "${FILE_DIRNAME}/${PF}:${FILE_DIRNAME}/${P}:${FILE_DIRNAME}/${PN}:${FILE_DIRNAME}/${BP}:${FILE_DIRNAME}/${BPN}:${FILE_DIRNAME}/files:${FILE_DIRNAME}"
FILESDIR = "${@bb.which(d.getVar('FILESPATH', 1), '.')}" FILESDIR = "${@bb.which(d.getVar('FILESPATH', True), '.')}"
################################################################## ##################################################################
# General work and output directories for the build system. # General work and output directories for the build system.
################################################################## ##################################################################
TMPDIR ?= "${TOPDIR}/tmp" TMPDIR ?= "${TOPDIR}/tmp"
CACHE = "${TMPDIR}/cache${@['', '/' + str(d.getVar('MACHINE', 1))][bool(d.getVar('MACHINE', 1))]}${@['', '/' + str(d.getVar('SDKMACHINE', 1))][bool(d.getVar('SDKMACHINE', 1))]}" CACHE = "${TMPDIR}/cache${@['', '/' + str(d.getVar('MACHINE', True))][bool(d.getVar('MACHINE', True))]}${@['', '/' + str(d.getVar('SDKMACHINE', True))][bool(d.getVar('SDKMACHINE', True))]}"
# The persistent cache should be shared by all builds # The persistent cache should be shared by all builds
PERSISTENT_DIR = "${TMPDIR}/cache" PERSISTENT_DIR = "${TMPDIR}/cache"
LOG_DIR = "${TMPDIR}/log" LOG_DIR = "${TMPDIR}/log"
@ -408,7 +408,7 @@ export PATH
# Build utility info. # Build utility info.
################################################################## ##################################################################
CCACHE = "${@bb.which(d.getVar('PATH', 1), 'ccache') and 'ccache '}" CCACHE = "${@bb.which(d.getVar('PATH', True), 'ccache') and 'ccache '}"
TOOLCHAIN_OPTIONS = " --sysroot=${STAGING_DIR_TARGET}" TOOLCHAIN_OPTIONS = " --sysroot=${STAGING_DIR_TARGET}"
export CCACHE_DIR = "${TMPDIR}/ccache/${MULTIMACH_HOST_SYS}/${PN}" export CCACHE_DIR = "${TMPDIR}/ccache/${MULTIMACH_HOST_SYS}/${PN}"

View File

@ -13,7 +13,7 @@ require conf/distro/include/tclibc-${TCLIBC}.inc
TCLIBCAPPEND ?= "-${TCLIBC}" TCLIBCAPPEND ?= "-${TCLIBC}"
TMPDIR .= "${TCLIBCAPPEND}" TMPDIR .= "${TCLIBCAPPEND}"
CACHE = "${TMPDIR}/cache/${TCMODE}-${TCLIBC}${@['', '/' + str(d.getVar('MACHINE', 1))][bool(d.getVar('MACHINE', 1))]}${@['', '/' + str(d.getVar('SDKMACHINE', 1))][bool(d.getVar('SDKMACHINE', 1))]}" CACHE = "${TMPDIR}/cache/${TCMODE}-${TCLIBC}${@['', '/' + str(d.getVar('MACHINE', True))][bool(d.getVar('MACHINE', True))]}${@['', '/' + str(d.getVar('SDKMACHINE', True))][bool(d.getVar('SDKMACHINE', True))]}"
USER_CLASSES ?= "" USER_CLASSES ?= ""
PACKAGE_CLASSES ?= "package_ipk" PACKAGE_CLASSES ?= "package_ipk"

View File

@ -5,7 +5,7 @@
# but requires more instructions (140% for 70% smaller code) so may be # but requires more instructions (140% for 70% smaller code) so may be
# slower. # slower.
TUNEVALID[thumb] = "Use thumb instructions instead of ARM" TUNEVALID[thumb] = "Use thumb instructions instead of ARM"
ARM_THUMB_M_OPT = "${@['-marm', '-mthumb'][d.getVar('ARM_INSTRUCTION_SET', 1) == 'thumb']}" ARM_THUMB_M_OPT = "${@['-marm', '-mthumb'][d.getVar('ARM_INSTRUCTION_SET', True) == 'thumb']}"
TUNE_CCARGS += "${@bb.utils.contains("TUNE_FEATURES", "thumb", "${ARM_THUMB_M_OPT}", "", d)}" TUNE_CCARGS += "${@bb.utils.contains("TUNE_FEATURES", "thumb", "${ARM_THUMB_M_OPT}", "", d)}"
OVERRIDES .= "${@bb.utils.contains("TUNE_FEATURES", "thumb", ":thumb", "", d)}" OVERRIDES .= "${@bb.utils.contains("TUNE_FEATURES", "thumb", ":thumb", "", d)}"

View File

@ -16,15 +16,15 @@ THUMB_INTERWORK ?= "yes"
# arm system and vice versa. It is strongly recommended that DISTROs not # arm system and vice versa. It is strongly recommended that DISTROs not
# turn this off - the actual cost is very small. # turn this off - the actual cost is very small.
OVERRIDE_THUMB = "${@['', ':thumb'][d.getVar('ARM_INSTRUCTION_SET', 1) == 'thumb']}" OVERRIDE_THUMB = "${@['', ':thumb'][d.getVar('ARM_INSTRUCTION_SET', True) == 'thumb']}"
OVERRIDE_INTERWORK = "${@['', ':thumb-interwork'][d.getVar('THUMB_INTERWORK', 1) == 'yes']}" OVERRIDE_INTERWORK = "${@['', ':thumb-interwork'][d.getVar('THUMB_INTERWORK', True) == 'yes']}"
OVERRIDES .= "${OVERRIDE_THUMB}${OVERRIDE_INTERWORK}" OVERRIDES .= "${OVERRIDE_THUMB}${OVERRIDE_INTERWORK}"
# Compiler and linker options for application code and kernel code. These # Compiler and linker options for application code and kernel code. These
# options ensure that the compiler has the correct settings for the selected # options ensure that the compiler has the correct settings for the selected
# instruction set and interworking. # instruction set and interworking.
ARM_INTERWORK_M_OPT = "${@['-mno-thumb-interwork', '-mthumb-interwork'][d.getVar('THUMB_INTERWORK', 1) == 'yes']}" ARM_INTERWORK_M_OPT = "${@['-mno-thumb-interwork', '-mthumb-interwork'][d.getVar('THUMB_INTERWORK', True) == 'yes']}"
ARM_THUMB_M_OPT = "${@['-marm', '-mthumb'][d.getVar('ARM_INSTRUCTION_SET', 1) == 'thumb']}" ARM_THUMB_M_OPT = "${@['-marm', '-mthumb'][d.getVar('ARM_INSTRUCTION_SET', True) == 'thumb']}"
# #
TUNE_CCARGS += "${ARM_INTERWORK_M_OPT} ${ARM_THUMB_M_OPT}" TUNE_CCARGS += "${ARM_INTERWORK_M_OPT} ${ARM_THUMB_M_OPT}"

View File

@ -220,7 +220,7 @@ class GitApplyTree(PatchTree):
class QuiltTree(PatchSet): class QuiltTree(PatchSet):
def _runcmd(self, args, run = True): def _runcmd(self, args, run = True):
quiltrc = self.d.getVar('QUILTRCFILE', 1) quiltrc = self.d.getVar('QUILTRCFILE', True)
if not run: if not run:
return ["quilt"] + ["--quiltrc"] + [quiltrc] + args return ["quilt"] + ["--quiltrc"] + [quiltrc] + args
runcmd(["quilt"] + ["--quiltrc"] + [quiltrc] + args, self.dir) runcmd(["quilt"] + ["--quiltrc"] + [quiltrc] + args, self.dir)
@ -398,7 +398,7 @@ class UserResolver(Resolver):
# Patch application failed # Patch application failed
patchcmd = self.patchset.Push(True, False, False) patchcmd = self.patchset.Push(True, False, False)
t = self.patchset.d.getVar('T', 1) t = self.patchset.d.getVar('T', True)
if not t: if not t:
bb.msg.fatal("Build", "T not set") bb.msg.fatal("Build", "T not set")
bb.utils.mkdirhier(t) bb.utils.mkdirhier(t)

View File

@ -42,7 +42,7 @@ def relative(src, dest):
def format_display(path, metadata): def format_display(path, metadata):
""" Prepare a path for display to the user. """ """ Prepare a path for display to the user. """
rel = relative(metadata.getVar("TOPDIR", 1), path) rel = relative(metadata.getVar("TOPDIR", True), path)
if len(rel) > len(path): if len(rel) > len(path):
return path return path
else: else:

View File

@ -23,7 +23,7 @@ inherit autotools
python __anonymous () { python __anonymous () {
import re import re
host = d.getVar('HOST_SYS', 1) host = d.getVar('HOST_SYS', True)
if not re.match('i.86.*-linux', host): if not re.match('i.86.*-linux', host):
raise bb.parse.SkipPackage("incompatible with host %s" % host) raise bb.parse.SkipPackage("incompatible with host %s" % host)
} }

View File

@ -8,10 +8,10 @@
python __anonymous () { python __anonymous () {
import bb, re import bb, re
uc_os = (re.match('.*uclibc*', d.getVar('TARGET_OS', 1)) != None) uc_os = (re.match('.*uclibc*', d.getVar('TARGET_OS', True)) != None)
if uc_os: if uc_os:
raise bb.parse.SkipPackage("incompatible with target %s" % raise bb.parse.SkipPackage("incompatible with target %s" %
d.getVar('TARGET_OS', 1)) d.getVar('TARGET_OS', True))
} }
# Set this to zero if you don't want ldconfig in the output package # Set this to zero if you don't want ldconfig in the output package

View File

@ -52,10 +52,10 @@ FILESPATH = "${@base_set_filespath([ '${FILE_DIRNAME}/eglibc-${PV}', '${FILE_DIR
python __anonymous () { python __anonymous () {
import bb, re import bb, re
uc_os = (re.match('.*uclibc$', d.getVar('TARGET_OS', 1)) != None) uc_os = (re.match('.*uclibc$', d.getVar('TARGET_OS', True)) != None)
if uc_os: if uc_os:
raise bb.parse.SkipPackage("incompatible with target %s" % raise bb.parse.SkipPackage("incompatible with target %s" %
d.getVar('TARGET_OS', 1)) d.getVar('TARGET_OS', True))
} }
export libc_cv_slibdir = "${base_libdir}" export libc_cv_slibdir = "${base_libdir}"

View File

@ -55,10 +55,10 @@ FILESPATH = "${@base_set_filespath([ '${FILE_DIRNAME}/eglibc-${PV}', '${FILE_DIR
python __anonymous () { python __anonymous () {
import bb, re import bb, re
uc_os = (re.match('.*uclibc$', d.getVar('TARGET_OS', 1)) != None) uc_os = (re.match('.*uclibc$', d.getVar('TARGET_OS', True)) != None)
if uc_os: if uc_os:
raise bb.parse.SkipPackage("incompatible with target %s" % raise bb.parse.SkipPackage("incompatible with target %s" %
d.getVar('TARGET_OS', 1)) d.getVar('TARGET_OS', True))
} }
export libc_cv_slibdir = "${base_libdir}" export libc_cv_slibdir = "${base_libdir}"

View File

@ -33,7 +33,7 @@ export LDFLAGS += "-ldl"
python populate_packages_prepend () { python populate_packages_prepend () {
# autonamer would call this libxml2-2, but we don't want that # autonamer would call this libxml2-2, but we don't want that
if d.getVar('DEBIAN_NAMES', 1): if d.getVar('DEBIAN_NAMES', True):
d.setVar('PKG_libxml2', '${MLPREFIX}libxml2') d.setVar('PKG_libxml2', '${MLPREFIX}libxml2')
} }

View File

@ -126,8 +126,8 @@ python __anonymous () {
import bb import bb
distro_features = set(d.getVar("DISTRO_FEATURES", 1).split()) distro_features = set(d.getVar("DISTRO_FEATURES", True).split())
machine_features= set(d.getVar("MACHINE_FEATURES", 1).split()) machine_features= set(d.getVar("MACHINE_FEATURES", True).split())
if "bluetooth" in distro_features and not "bluetooth" in machine_features and ("pcmcia" in machine_features or "pci" in machine_features or "usbhost" in machine_features): if "bluetooth" in distro_features and not "bluetooth" in machine_features and ("pcmcia" in machine_features or "pci" in machine_features or "usbhost" in machine_features):
d.setVar("ADD_BT", "task-base-bluetooth") d.setVar("ADD_BT", "task-base-bluetooth")

View File

@ -50,7 +50,7 @@ RDEPENDS_task-core-sdk = "\
#python generate_sdk_pkgs () { #python generate_sdk_pkgs () {
# poky_pkgs = read_pkgdata('task-core', d)['PACKAGES'] # poky_pkgs = read_pkgdata('task-core', d)['PACKAGES']
# pkgs = d.getVar('PACKAGES', 1).split() # pkgs = d.getVar('PACKAGES', True).split()
# for pkg in poky_pkgs.split(): # for pkg in poky_pkgs.split():
# newpkg = pkg.replace('task-core', 'task-core-sdk') # newpkg = pkg.replace('task-core', 'task-core-sdk')
# #

View File

@ -35,7 +35,7 @@ def map_uclibc_arch(a, d):
"""Return the uClibc architecture for the given TARGET_ARCH.""" """Return the uClibc architecture for the given TARGET_ARCH."""
import re import re
valid_archs = d.getVar('valid_archs', 1).split() valid_archs = d.getVar('valid_archs', True).split()
if re.match('^(arm|sa110).*', a): return 'arm' if re.match('^(arm|sa110).*', a): return 'arm'
elif re.match('^(i.86|athlon)$', a): return 'i386' elif re.match('^(i.86|athlon)$', a): return 'i386'
@ -50,14 +50,14 @@ def map_uclibc_arch(a, d):
else: else:
bb.error("cannot map '%s' to a uClibc architecture" % a) bb.error("cannot map '%s' to a uClibc architecture" % a)
export UCLIBC_ARCH = "${@map_uclibc_arch(d.getVar('TARGET_ARCH', 1), d)}" export UCLIBC_ARCH = "${@map_uclibc_arch(d.getVar('TARGET_ARCH', True), d)}"
def map_uclibc_abi(o, d): def map_uclibc_abi(o, d):
"""Return the uClibc ABI for the given TARGET_OS.""" """Return the uClibc ABI for the given TARGET_OS."""
import re import re
arch = d.getVar('TARGET_ARCH', 1) arch = d.getVar('TARGET_ARCH', True)
if map_uclibc_arch(d.getVar('TARGET_ARCH', 1), d) == "arm": if map_uclibc_arch(d.getVar('TARGET_ARCH', True), d) == "arm":
if re.match('.*eabi$', o): return 'ARM_EABI' if re.match('.*eabi$', o): return 'ARM_EABI'
else: return 'ARM_OABI' else: return 'ARM_OABI'
# FIXME: This is inaccurate! Handle o32, n32, n64 # FIXME: This is inaccurate! Handle o32, n32, n64
@ -65,7 +65,7 @@ def map_uclibc_abi(o, d):
elif re.match('^mips.*', arch): return 'MIPS_O32_ABI' elif re.match('^mips.*', arch): return 'MIPS_O32_ABI'
return "" return ""
export UCLIBC_ABI = "${@map_uclibc_abi(d.getVar('TARGET_OS', 1), d)}" export UCLIBC_ABI = "${@map_uclibc_abi(d.getVar('TARGET_OS', True), d)}"
def map_uclibc_endian(a, d): def map_uclibc_endian(a, d):
"""Return the uClibc endianess for the given TARGET_ARCH.""" """Return the uClibc endianess for the given TARGET_ARCH."""
@ -79,7 +79,7 @@ def map_uclibc_endian(a, d):
return 'BIG' return 'BIG'
return 'LITTLE' return 'LITTLE'
export UCLIBC_ENDIAN = "${@map_uclibc_endian(d.getVar('TARGET_ARCH', 1), d)}" export UCLIBC_ENDIAN = "${@map_uclibc_endian(d.getVar('TARGET_ARCH', True), d)}"
# internal helper # internal helper
def uclibc_cfg(feature, features, tokens, cnf, rem): def uclibc_cfg(feature, features, tokens, cnf, rem):

View File

@ -124,9 +124,9 @@ configmangle = '/^KERNEL_HEADERS/d; \
/^SHARED_LIB_LOADER_PREFIX/d; \ /^SHARED_LIB_LOADER_PREFIX/d; \
/^UCLIBC_EXTRA_CFLAGS/d; \ /^UCLIBC_EXTRA_CFLAGS/d; \
s,.*UCLIBC_HAS_WCHAR.*,UCLIBC_HAS_WCHAR=y,g; \ s,.*UCLIBC_HAS_WCHAR.*,UCLIBC_HAS_WCHAR=y,g; \
${@["","s,.*COMPILE_IN_THUMB_MODE.*,COMPILE_IN_THUMB_MODE=y,;"][d.getVar("ARM_INSTRUCTION_SET", 1) != "arm"]} \ ${@["","s,.*COMPILE_IN_THUMB_MODE.*,COMPILE_IN_THUMB_MODE=y,;"][d.getVar("ARM_INSTRUCTION_SET", True) != "arm"]} \
${@["","s,.*UCLIBC_HAS_LOCALE.*,UCLIBC_HAS_LOCALE=y,;"][d.getVar("USE_NLS", 1) == "yes"]} \ ${@["","s,.*UCLIBC_HAS_LOCALE.*,UCLIBC_HAS_LOCALE=y,;"][d.getVar("USE_NLS", True) == "yes"]} \
${@["","s,.*LDSO_GNU_HASH_SUPPORT.*,# LDSO_GNU_HASH_SUPPORT is not set,;"][d.getVar("TARGET_ARCH", 1) in ['mips', 'mipsel', 'mips64', 'mips64el', 'avr32']]} \ ${@["","s,.*LDSO_GNU_HASH_SUPPORT.*,# LDSO_GNU_HASH_SUPPORT is not set,;"][d.getVar("TARGET_ARCH", True) in ['mips', 'mipsel', 'mips64', 'mips64el', 'avr32']]} \
/^CROSS/d; \ /^CROSS/d; \
/^TARGET_ARCH=/d; \ /^TARGET_ARCH=/d; \
/^TARGET_/s,^\([^=]*\).*,# \1 is not set,g; \ /^TARGET_/s,^\([^=]*\).*,# \1 is not set,g; \

View File

@ -20,7 +20,7 @@ python do_install_config () {
data = bb.data.expand(data, d) data = bb.data.expand(data, d)
outdir = os.path.join(d.getVar('D', 1), d.getVar('sysconfdir', 1), 'apt') outdir = os.path.join(d.getVar('D', True), d.getVar('sysconfdir', True), 'apt')
if not os.path.exists(outdir): if not os.path.exists(outdir):
os.makedirs(outdir) os.makedirs(outdir)
outpath = os.path.join(outdir, 'apt.conf.sample') outpath = os.path.join(outdir, 'apt.conf.sample')

View File

@ -59,15 +59,15 @@ FILES_${PN} = "${bindir}/apt-cdrom ${bindir}/apt-get \
${localstatedir} ${sysconfdir} \ ${localstatedir} ${sysconfdir} \
${libdir}/dpkg" ${libdir}/dpkg"
FILES_${PN}-utils = "${bindir}/apt-sortpkgs ${bindir}/apt-extracttemplates" FILES_${PN}-utils = "${bindir}/apt-sortpkgs ${bindir}/apt-extracttemplates"
FILES_${PN}-doc = "${@get_files_apt_doc(d, bb, d.getVar('apt-manpages', 1))} \ FILES_${PN}-doc = "${@get_files_apt_doc(d, bb, d.getVar('apt-manpages', True))} \
${docdir}/apt" ${docdir}/apt"
FILES_${PN}-utils-doc = "${@get_files_apt_doc(d, bb, d.getVar('apt-utils-manpages', 1))}" FILES_${PN}-utils-doc = "${@get_files_apt_doc(d, bb, d.getVar('apt-utils-manpages', True))}"
FILES_${PN}-dev = "${libdir}/libapt*.so ${includedir}" FILES_${PN}-dev = "${libdir}/libapt*.so ${includedir}"
do_install () { do_install () {
set -x set -x
${@get_commands_apt_doc(d, bb, d.getVar('apt-manpages', 1))} ${@get_commands_apt_doc(d, bb, d.getVar('apt-manpages', True))}
${@get_commands_apt_doc(d, bb, d.getVar('apt-utils-manpages', 1))} ${@get_commands_apt_doc(d, bb, d.getVar('apt-utils-manpages', True))}
install -d ${D}${bindir} install -d ${D}${bindir}
install -m 0755 bin/apt-cdrom ${D}${bindir}/ install -m 0755 bin/apt-cdrom ${D}${bindir}/
install -m 0755 bin/apt-get ${D}${bindir}/ install -m 0755 bin/apt-get ${D}${bindir}/

View File

@ -9,6 +9,6 @@ SRC_URI = "${GNU_MIRROR}/automake/automake-${PV}.tar.bz2 "
inherit autotools inherit autotools
export AUTOMAKE = "${@bb.which('automake', d.getVar('PATH', 1))}" export AUTOMAKE = "${@bb.which('automake', d.getVar('PATH', True))}"
FILES_${PN} += "${datadir}/automake* ${datadir}/aclocal*" FILES_${PN} += "${datadir}/automake* ${datadir}/aclocal*"

View File

@ -13,8 +13,8 @@ SRC_URI[sha256sum] = "5e18bff75f01656c64f553412a8905527e1b85efaf3163c6fb81ea5aac
# Strip ${prefix} from ${docdir}, set result into docdir_stripped # Strip ${prefix} from ${docdir}, set result into docdir_stripped
python () { python () {
prefix=d.getVar("prefix", 1) prefix=d.getVar("prefix", True)
docdir=d.getVar("docdir", 1) docdir=d.getVar("docdir", True)
if not docdir.startswith(prefix): if not docdir.startswith(prefix):
raise bb.build.FuncFailed('docdir must contain prefix as its prefix') raise bb.build.FuncFailed('docdir must contain prefix as its prefix')

View File

@ -10,14 +10,14 @@ inherit autotools gettext
FILESDIR = "${@os.path.dirname(d.getVar('FILE',1))}/gcc-${PV}" FILESDIR = "${@os.path.dirname(d.getVar('FILE',1))}/gcc-${PV}"
def get_gcc_fpu_setting(bb, d): def get_gcc_fpu_setting(bb, d):
if d.getVar('TARGET_FPU', 1) in [ 'soft' ]: if d.getVar('TARGET_FPU', True) in [ 'soft' ]:
return "--with-float=soft" return "--with-float=soft"
if d.getVar('TARGET_FPU', 1) in [ 'ppc-efd' ]: if d.getVar('TARGET_FPU', True) in [ 'ppc-efd' ]:
return "--enable-e500_double" return "--enable-e500_double"
return "" return ""
def get_gcc_mips_plt_setting(bb, d): def get_gcc_mips_plt_setting(bb, d):
if d.getVar('TARGET_ARCH', 1) in [ 'mips', 'mipsel' ] and 'mplt' in d.getVar('DISTRO_FEATURES',1).split() : if d.getVar('TARGET_ARCH', True) in [ 'mips', 'mipsel' ] and 'mplt' in d.getVar('DISTRO_FEATURES',1).split() :
return "--with-mips-plt" return "--with-mips-plt"
return "" return ""

View File

@ -27,7 +27,7 @@ EXTRA_OECONF_INTERMEDIATE ?= ""
GCCMULTILIB = "--disable-multilib" GCCMULTILIB = "--disable-multilib"
EXTRA_OECONF = "${@['--enable-clocale=generic', ''][d.getVar('USE_NLS', 1) != 'no']} \ EXTRA_OECONF = "${@['--enable-clocale=generic', ''][d.getVar('USE_NLS', True) != 'no']} \
--with-gnu-ld \ --with-gnu-ld \
--enable-shared \ --enable-shared \
--enable-languages=${LANGUAGES} \ --enable-languages=${LANGUAGES} \

View File

@ -31,7 +31,7 @@ BBCLASSEXTEND = "native"
DEPENDS = "libunistring bdwgc gmp libtool libffi" DEPENDS = "libunistring bdwgc gmp libtool libffi"
# add guile-native only to the target recipe's DEPENDS # add guile-native only to the target recipe's DEPENDS
DEPENDS += "${@['guile-native', ''][d.getVar('PN', 1) != 'guile']}" DEPENDS += "${@['guile-native', ''][d.getVar('PN', True) != 'guile']}"
EXTRA_OECONF += "${@['--without-libltdl-prefix --without-libgmp-prefix', ''][bb.data.inherits_class('native',d)]}" EXTRA_OECONF += "${@['--without-libltdl-prefix --without-libgmp-prefix', ''][bb.data.inherits_class('native',d)]}"

View File

@ -287,7 +287,7 @@ FILES_perl-module-unicore += "${libdir}/perl/${PV}/unicore"
# packages (actually the non modules packages and not created too) # packages (actually the non modules packages and not created too)
ALLOW_EMPTY_perl-modules = "1" ALLOW_EMPTY_perl-modules = "1"
PACKAGES_append = " perl-modules " PACKAGES_append = " perl-modules "
RRECOMMENDS_perl-modules = "${@d.getVar('PACKAGES', 1).replace('${PN}-modules ', '').replace('${PN}-dbg ', '').replace('${PN}-misc ', '').replace('${PN}-dev ', '').replace('${PN}-pod ', '').replace('${PN}-doc ', '')}" RRECOMMENDS_perl-modules = "${@d.getVar('PACKAGES', True).replace('${PN}-modules ', '').replace('${PN}-dbg ', '').replace('${PN}-misc ', '').replace('${PN}-dev ', '').replace('${PN}-pod ', '').replace('${PN}-doc ', '')}"
python populate_packages_prepend () { python populate_packages_prepend () {
libdir = bb.data.expand('${libdir}/perl/${PV}', d) libdir = bb.data.expand('${libdir}/perl/${PV}', d)

View File

@ -70,7 +70,7 @@ do_configure_prepend () {
python __anonymous () { python __anonymous () {
import re import re
pn = d.getVar("PN", 1) pn = d.getVar("PN", True)
if not pn.endswith('-native') and not pn.endswith('-nativesdk'): if not pn.endswith('-native') and not pn.endswith('-nativesdk'):
raise bb.parse.SkipPackage("unfs-server is intended for native/nativesdk builds only") raise bb.parse.SkipPackage("unfs-server is intended for native/nativesdk builds only")
} }

View File

@ -58,7 +58,7 @@ fakeroot do_install () {
python do_package_append() { python do_package_append() {
# Change permissions back the way they were, they probably had a reason... # Change permissions back the way they were, they probably had a reason...
workdir = d.getVar('WORKDIR', 1) workdir = d.getVar('WORKDIR', True)
os.system('chmod 0511 %s/install/cups/var/run/cups/certs' % workdir) os.system('chmod 0511 %s/install/cups/var/run/cups/certs' % workdir)
} }

View File

@ -58,7 +58,7 @@ PACKAGES_DYNAMIC += "gdk-pixbuf-loader-*"
PACKAGES_DYNAMIC_virtclass-native = "" PACKAGES_DYNAMIC_virtclass-native = ""
python populate_packages_prepend () { python populate_packages_prepend () {
postinst_pixbufloader = d.getVar("postinst_pixbufloader", 1) postinst_pixbufloader = d.getVar("postinst_pixbufloader", True)
loaders_root = bb.data.expand('${libdir}/gdk-pixbuf-2.0/${LIBV}/loaders', d) loaders_root = bb.data.expand('${libdir}/gdk-pixbuf-2.0/${LIBV}/loaders', d)

View File

@ -34,8 +34,8 @@ PACKAGES_DYNAMIC += "gdk-pixbuf-loader-* gtk-immodule-* gtk-printbackend-*"
python populate_packages_prepend () { python populate_packages_prepend () {
import os.path import os.path
prologue = d.getVar("postinst_prologue", 1) prologue = d.getVar("postinst_prologue", True)
postinst_pixbufloader = d.getVar("postinst_pixbufloader", 1) postinst_pixbufloader = d.getVar("postinst_pixbufloader", True)
gtk_libdir = bb.data.expand('${libdir}/gtk-2.0/${LIBV}', d) gtk_libdir = bb.data.expand('${libdir}/gtk-2.0/${LIBV}', d)
loaders_root = os.path.join(gtk_libdir, 'loaders') loaders_root = os.path.join(gtk_libdir, 'loaders')
@ -46,6 +46,6 @@ python populate_packages_prepend () {
do_split_packages(d, immodules_root, '^im-(.*)\.so$', 'gtk-immodule-%s', 'GTK input module for %s', prologue + 'gtk-query-immodules-2.0 > /etc/gtk-2.0/gtk.immodules') do_split_packages(d, immodules_root, '^im-(.*)\.so$', 'gtk-immodule-%s', 'GTK input module for %s', prologue + 'gtk-query-immodules-2.0 > /etc/gtk-2.0/gtk.immodules')
do_split_packages(d, printmodules_root, '^libprintbackend-(.*)\.so$', 'gtk-printbackend-%s', 'GTK printbackend module for %s') do_split_packages(d, printmodules_root, '^libprintbackend-(.*)\.so$', 'gtk-printbackend-%s', 'GTK printbackend module for %s')
if (d.getVar('DEBIAN_NAMES', 1)): if (d.getVar('DEBIAN_NAMES', True)):
d.setVar('PKG_${PN}', '${MLPREFIX}libgtk-2.0') d.setVar('PKG_${PN}', '${MLPREFIX}libgtk-2.0')
} }

View File

@ -34,8 +34,8 @@ PACKAGES_DYNAMIC += "gdk-pixbuf-loader-* gtk-immodule-* gtk-printbackend-*"
python populate_packages_prepend () { python populate_packages_prepend () {
import os.path import os.path
prologue = d.getVar("postinst_prologue", 1) prologue = d.getVar("postinst_prologue", True)
postinst_pixbufloader = d.getVar("postinst_pixbufloader", 1) postinst_pixbufloader = d.getVar("postinst_pixbufloader", True)
gtk_libdir = bb.data.expand('${libdir}/gtk-2.0/${LIBV}', d) gtk_libdir = bb.data.expand('${libdir}/gtk-2.0/${LIBV}', d)
loaders_root = os.path.join(gtk_libdir, 'loaders') loaders_root = os.path.join(gtk_libdir, 'loaders')
@ -46,6 +46,6 @@ python populate_packages_prepend () {
do_split_packages(d, immodules_root, '^im-(.*)\.so$', 'gtk-immodule-%s', 'GTK input module for %s', prologue + 'gtk-query-immodules-2.0 > /etc/gtk-2.0/gtk.immodules') do_split_packages(d, immodules_root, '^im-(.*)\.so$', 'gtk-immodule-%s', 'GTK input module for %s', prologue + 'gtk-query-immodules-2.0 > /etc/gtk-2.0/gtk.immodules')
do_split_packages(d, printmodules_root, '^libprintbackend-(.*)\.so$', 'gtk-printbackend-%s', 'GTK printbackend module for %s') do_split_packages(d, printmodules_root, '^libprintbackend-(.*)\.so$', 'gtk-printbackend-%s', 'GTK printbackend module for %s')
if (d.getVar('DEBIAN_NAMES', 1)): if (d.getVar('DEBIAN_NAMES', True)):
d.setVar('PKG_${PN}', '${MLPREFIX}libgtk-2.0') d.setVar('PKG_${PN}', '${MLPREFIX}libgtk-2.0')
} }

View File

@ -41,7 +41,7 @@ PACKAGES_DYNAMIC += "gtk-immodule-* gtk-printbackend-*"
python populate_packages_prepend () { python populate_packages_prepend () {
import os.path import os.path
prologue = d.getVar("postinst_prologue", 1) prologue = d.getVar("postinst_prologue", True)
gtk_libdir = bb.data.expand('${libdir}/gtk-2.0/${LIBV}', d) gtk_libdir = bb.data.expand('${libdir}/gtk-2.0/${LIBV}', d)
immodules_root = os.path.join(gtk_libdir, 'immodules') immodules_root = os.path.join(gtk_libdir, 'immodules')
@ -50,6 +50,6 @@ python populate_packages_prepend () {
do_split_packages(d, immodules_root, '^im-(.*)\.so$', 'gtk-immodule-%s', 'GTK input module for %s', prologue + 'gtk-query-immodules-2.0 > /etc/gtk-2.0/gtk.immodules') do_split_packages(d, immodules_root, '^im-(.*)\.so$', 'gtk-immodule-%s', 'GTK input module for %s', prologue + 'gtk-query-immodules-2.0 > /etc/gtk-2.0/gtk.immodules')
do_split_packages(d, printmodules_root, '^libprintbackend-(.*)\.so$', 'gtk-printbackend-%s', 'GTK printbackend module for %s') do_split_packages(d, printmodules_root, '^libprintbackend-(.*)\.so$', 'gtk-printbackend-%s', 'GTK printbackend module for %s')
if (d.getVar('DEBIAN_NAMES', 1)): if (d.getVar('DEBIAN_NAMES', True)):
d.setVar('PKG_${PN}', '${MLPREFIX}libgtk-2.0') d.setVar('PKG_${PN}', '${MLPREFIX}libgtk-2.0')
} }

View File

@ -31,8 +31,8 @@ inherit gnome
python populate_packages_prepend() { python populate_packages_prepend() {
import os.path import os.path
engines_root = os.path.join(d.getVar('libdir', 1), "gtk-2.0/2.10.0/engines") engines_root = os.path.join(d.getVar('libdir', True), "gtk-2.0/2.10.0/engines")
themes_root = os.path.join(d.getVar('datadir', 1), "themes") themes_root = os.path.join(d.getVar('datadir', True), "themes")
do_split_packages(d, engines_root, '^lib(.*)\.so$', 'gtk-engine-%s', 'GTK %s theme engine', extra_depends='') do_split_packages(d, engines_root, '^lib(.*)\.so$', 'gtk-engine-%s', 'GTK %s theme engine', extra_depends='')
do_split_packages(d, themes_root, '(.*)', 'gtk-theme-%s', 'GTK theme %s', allow_dirs=True, extra_depends='') do_split_packages(d, themes_root, '(.*)', 'gtk-theme-%s', 'GTK theme %s', allow_dirs=True, extra_depends='')

View File

@ -1,6 +1,6 @@
def get_cairo_fpu_setting(bb, d): def get_cairo_fpu_setting(bb, d):
if d.getVar('TARGET_FPU', 1) in [ 'soft' ]: if d.getVar('TARGET_FPU', True) in [ 'soft' ]:
return "--disable-some-floating-point" return "--disable-some-floating-point"
return "" return ""

View File

@ -1,6 +1,6 @@
def get_clutter_fpu_setting(bb, d): def get_clutter_fpu_setting(bb, d):
if d.getVar('TARGET_FPU', 1) in [ 'soft' ]: if d.getVar('TARGET_FPU', True) in [ 'soft' ]:
return "--without-fpu" return "--without-fpu"
return "" return ""

View File

@ -3,7 +3,7 @@ inherit native
DEPENDS = "freetype-native expat-native zlib-native" DEPENDS = "freetype-native expat-native zlib-native"
EXTRA_OEMAKE = "" EXTRA_OEMAKE = ""
EXTRA_OECONF = "${@[' --disable-docs',' --disable-docs --with-freetype-config=%s/freetype-config' % d.getVar('STAGING_BINDIR', 1)][os.path.isfile('%s/freetype-config' % d.getVar('STAGING_BINDIR', 1))]}" EXTRA_OECONF = "${@[' --disable-docs',' --disable-docs --with-freetype-config=%s/freetype-config' % d.getVar('STAGING_BINDIR', True)][os.path.isfile('%s/freetype-config' % d.getVar('STAGING_BINDIR', True))]}"
do_install_append () { do_install_append () {
install -d ${D}${bindir}/ install -d ${D}${bindir}/

View File

@ -13,7 +13,7 @@ EXTRA_OECONF += "--with-driver=dri --disable-egl --disable-gallium --without-gal
python populate_packages_prepend() { python populate_packages_prepend() {
import os.path import os.path
dri_drivers_root = os.path.join(d.getVar('libdir', 1), "dri") dri_drivers_root = os.path.join(d.getVar('libdir', True), "dri")
do_split_packages(d, dri_drivers_root, '^(.*)_dri\.so$', 'mesa-dri-driver-%s', 'Mesa %s DRI driver', extra_depends='') do_split_packages(d, dri_drivers_root, '^(.*)_dri\.so$', 'mesa-dri-driver-%s', 'Mesa %s DRI driver', extra_depends='')
} }

View File

@ -50,7 +50,7 @@ fi
} }
python populate_packages_prepend () { python populate_packages_prepend () {
prologue = d.getVar("postinst_prologue", 1) prologue = d.getVar("postinst_prologue", True)
modules_root = bb.data.expand('${libdir}/pango/${LIBV}/modules', d) modules_root = bb.data.expand('${libdir}/pango/${LIBV}/modules', d)

View File

@ -26,7 +26,7 @@ XORG_PN = "libXft"
BBCLASSEXTEND = "native" BBCLASSEXTEND = "native"
python () { python () {
if d.getVar('DEBIAN_NAMES', 1): if d.getVar('DEBIAN_NAMES', True):
d.setVar('PKG_${PN}', '${MLPREFIX}libxft2') d.setVar('PKG_${PN}', '${MLPREFIX}libxft2')
} }

View File

@ -5,11 +5,11 @@ KERNEL_DEVICETREE_FLAGS = "-R 8 -p 0x3000"
python __anonymous () { python __anonymous () {
import bb import bb
devicetree = d.getVar("KERNEL_DEVICETREE", 1) or '' devicetree = d.getVar("KERNEL_DEVICETREE", True) or ''
if devicetree: if devicetree:
depends = d.getVar("DEPENDS", 1) depends = d.getVar("DEPENDS", True)
d.setVar("DEPENDS", "%s dtc-native" % depends) d.setVar("DEPENDS", "%s dtc-native" % depends)
packages = d.getVar("PACKAGES", 1) packages = d.getVar("PACKAGES", True)
d.setVar("PACKAGES", "%s kernel-devicetree" % packages) d.setVar("PACKAGES", "%s kernel-devicetree" % packages)
} }

View File

@ -1,6 +1,6 @@
def get_alsa_fpu_setting(bb, d): def get_alsa_fpu_setting(bb, d):
if d.getVar('TARGET_FPU', 1) in [ 'soft' ]: if d.getVar('TARGET_FPU', True) in [ 'soft' ]:
return "--with-softfloat" return "--with-softfloat"
return "" return ""

View File

@ -10,13 +10,13 @@ python populate_packages_prepend () {
do_split_packages(d, gst_libdir, 'libgst(.*)\.la$', bb.data.expand('${PN}-%s-dev', d), 'GStreamer plugin for %s (development files)', extra_depends=bb.data.expand('${PN}-dev',d)) do_split_packages(d, gst_libdir, 'libgst(.*)\.la$', bb.data.expand('${PN}-%s-dev', d), 'GStreamer plugin for %s (development files)', extra_depends=bb.data.expand('${PN}-dev',d))
do_split_packages(d, gst_libdir, 'libgst(.*)\.a$', bb.data.expand('${PN}-%s-staticdev', d), 'GStreamer plugin for %s (static development files)', extra_depends=bb.data.expand('${PN}-staticdev',d)) do_split_packages(d, gst_libdir, 'libgst(.*)\.a$', bb.data.expand('${PN}-%s-staticdev', d), 'GStreamer plugin for %s (static development files)', extra_depends=bb.data.expand('${PN}-staticdev',d))
pn = d.getVar('PN', 1) pn = d.getVar('PN', True)
metapkg = pn + '-meta' metapkg = pn + '-meta'
d.setVar('ALLOW_EMPTY_' + metapkg, "1") d.setVar('ALLOW_EMPTY_' + metapkg, "1")
d.setVar('FILES_' + metapkg, "") d.setVar('FILES_' + metapkg, "")
blacklist = [ pn + '-locale', pn + '-dev', pn + '-dbg', pn + '-doc' ] blacklist = [ pn + '-locale', pn + '-dev', pn + '-dbg', pn + '-doc' ]
metapkg_rdepends = [] metapkg_rdepends = []
packages = d.getVar('PACKAGES', 1).split() packages = d.getVar('PACKAGES', True).split()
for pkg in packages[1:]: for pkg in packages[1:]:
if not pkg in blacklist and not pkg in metapkg_rdepends and not pkg.endswith('-dev') and not pkg.endswith('-dbg') and not pkg.count('locale') and not pkg.count('-staticdev'): if not pkg in blacklist and not pkg in metapkg_rdepends and not pkg.endswith('-dev') and not pkg.endswith('-dbg') and not pkg.count('locale') and not pkg.count('-staticdev'):
metapkg_rdepends.append(pkg) metapkg_rdepends.append(pkg)

View File

@ -9,7 +9,7 @@ QT_GRAPHICS_SYSTEM ?= "raster"
VIRTUAL-RUNTIME_xserver_common ?= "x11-common" VIRTUAL-RUNTIME_xserver_common ?= "x11-common"
def _get_extra_rdepends(d): def _get_extra_rdepends(d):
gs = d.getVar('QT_GRAPHICS_SYSTEM', 1) gs = d.getVar('QT_GRAPHICS_SYSTEM', True)
if gs == "opengl": if gs == "opengl":
return "qt4-plugin-graphicssystems-glgraphicssystem" return "qt4-plugin-graphicssystems-glgraphicssystem"

View File

@ -45,7 +45,7 @@ python __anonymous () {
lib_packages = [] lib_packages = []
dev_packages = [] dev_packages = []
dbg_packages = [] dbg_packages = []
for name in d.getVar("QT_LIB_NAMES", 1).split(): for name in d.getVar("QT_LIB_NAMES", True).split():
pkg = d.getVar("QT_BASE_LIB", True) + name.lower().replace("qt", "").replace("_", "-") + "4" pkg = d.getVar("QT_BASE_LIB", True) + name.lower().replace("qt", "").replace("_", "-") + "4"
# NOTE: the headers for QtAssistantClient are different # NOTE: the headers for QtAssistantClient are different
incname = name.replace("QtAssistantClient", "QtAssistant") incname = name.replace("QtAssistantClient", "QtAssistant")
@ -61,10 +61,10 @@ python __anonymous () {
lib_packages.append(pkg) lib_packages.append(pkg)
dev_packages.append("%s-dev" % pkg) dev_packages.append("%s-dev" % pkg)
dbg_packages.append("%s-dbg" % pkg) dbg_packages.append("%s-dbg" % pkg)
for name in d.getVar("OTHER_PACKAGES", 1).split(): for name in d.getVar("OTHER_PACKAGES", True).split():
dbg_packages.append("%s-dbg" % name) dbg_packages.append("%s-dbg" % name)
for name in d.getVar("QT_EXTRA_LIBS", 1).split(): for name in d.getVar("QT_EXTRA_LIBS", True).split():
pkg = d.getVar("QT_BASE_LIB", True) + name.lower().replace("qt", "").replace("_", "-") + "4" pkg = d.getVar("QT_BASE_LIB", True) + name.lower().replace("qt", "").replace("_", "-") + "4"
d.setVar("FILES_%s" % pkg, "${libdir}/lib%(name)s.so.*" % locals()) d.setVar("FILES_%s" % pkg, "${libdir}/lib%(name)s.so.*" % locals())
d.setVar("FILES_%s-dev" % pkg, """${libdir}/lib%(name)s.prl d.setVar("FILES_%s-dev" % pkg, """${libdir}/lib%(name)s.prl

View File

@ -4,7 +4,7 @@ ARM_INSTRUCTION_SET = "arm"
def qt_arch(d): def qt_arch(d):
import bb, re import bb, re
arch = d.getVar('TARGET_ARCH', 1) arch = d.getVar('TARGET_ARCH', True)
if re.match("^i.86$", arch): if re.match("^i.86$", arch):
arch = "i386" arch = "i386"
elif re.match("^arm.*", arch): elif re.match("^arm.*", arch):

View File

@ -61,7 +61,7 @@ FILES_${PN}-extra = "/usr/games/ /usr/share/applications /etc/gconf/schemas"
python __anonymous () { python __anonymous () {
import bb import bb
var = bb.data.expand("FILES_${PN}", d, 1) var = bb.data.expand("FILES_${PN}", d, 1)
data = d.getVar(var, 1) data = d.getVar(var, True)
for name in ("bridges", "fifteen", "inertia", "map", "samegame", "slant"): for name in ("bridges", "fifteen", "inertia", "map", "samegame", "slant"):
data = data + " /usr/games/%s" % name data = data + " /usr/games/%s" % name
data = data + " /usr/share/applications/%s.desktop" % name data = data + " /usr/share/applications/%s.desktop" % name

View File

@ -51,11 +51,11 @@ python __anonymous () {
packages = [] packages = []
extras = [] extras = []
for lib in d.getVar('BOOST_LIBS', 1).split( ): for lib in d.getVar('BOOST_LIBS', True).split( ):
pkg = "boost-%s" % lib.replace("_", "-") pkg = "boost-%s" % lib.replace("_", "-")
extras.append("--with-%s" % lib) extras.append("--with-%s" % lib)
packages.append(pkg) packages.append(pkg)
if not d.getVar("FILES_%s" % pkg, 1): if not d.getVar("FILES_%s" % pkg, True):
d.setVar("FILES_%s" % pkg, "${libdir}/libboost_%s*.so.*" % lib) d.setVar("FILES_%s" % pkg, "${libdir}/libboost_%s*.so.*" % lib)
d.setVar("BOOST_PACKAGES", " ".join(packages)) d.setVar("BOOST_PACKAGES", " ".join(packages))
d.setVar("BJAM_EXTRA", " ".join(extras)) d.setVar("BJAM_EXTRA", " ".join(extras))