Convert to use direct access to the data store (instead of bb.data.*Var*())

This is the result of running the following over the metadata:

sed \
-e 's:bb.data.\(setVar([^,()]*,[^,()]*\), *\([^ )]*\) *):\2.\1):g' \
-e 's:bb.data.\(setVarFlag([^,()]*,[^,()]*,[^,()]*\), *\([^) ]*\) *):\2.\1):g' \
-e 's:bb.data.\(getVar([^,()]*\), *\([^(), ]*\) *,\([^)]*\)):\2.\1,\3):g' \
-e 's:bb.data.\(getVarFlag([^,()]*,[^,()]*\), *\([^(), ]*\) *,\([^)]*\)):\2.\1,\3):g' \
-e 's:bb.data.\(getVarFlag([^,()]*,[^,()]*\), *\([^() ]*\) *):\2.\1):g' \
-e 's:bb.data.\(getVar([^,()]*\), *\([^) ]*\) *):\2.\1):g' \
-i `grep -ril bb.data *`

(From OE-Core rev: b22831fd63164c4db9c0b72934d7d734a6585251)

Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
This commit is contained in:
Richard Purdie 2011-11-09 15:00:01 +00:00
parent 5d3860f4a8
commit c8dee9b92d
130 changed files with 1056 additions and 1056 deletions

View File

@ -9,7 +9,7 @@ RDEPENDS_${PN} = "glibc-gconv-ibm850 glibc-gconv-cp1252 \
SRC_URI = "http://www.abiword.org/downloads/abiword/${PV}/source/abiword-${PV}.tar.gz" SRC_URI = "http://www.abiword.org/downloads/abiword/${PV}/source/abiword-${PV}.tar.gz"
#want 2.x from 2.x.y for the installation directory #want 2.x from 2.x.y for the installation directory
SHRT_VER = "${@bb.data.getVar('PV',d,1).split('.')[0]}.${@bb.data.getVar('PV',d,1).split('.')[1]}" SHRT_VER = "${@d.getVar('PV',1).split('.')[0]}.${@d.getVar('PV',1).split('.')[1]}"
FILES_${PN} += " \ FILES_${PN} += " \
${datadir}/icons/* \ ${datadir}/icons/* \

View File

@ -17,7 +17,7 @@ SVNURI = "svn://svn.abisource.com/abiword/trunk;module=abiword;proto=http"
SVNSRC = "${WORKDIR}/abi" SVNSRC = "${WORKDIR}/abi"
#want 2.x from 2.x.y for the installation directory #want 2.x from 2.x.y for the installation directory
SHRT_VER = "${@bb.data.getVar('PV',d,1).split('.')[0]}.${@bb.data.getVar('PV',d,1).split('.')[1]}" SHRT_VER = "${@d.getVar('PV',1).split('.')[0]}.${@d.getVar('PV',1).split('.')[1]}"
FILES_${PN} += " \ FILES_${PN} += " \
${datadir}/icons/* \ ${datadir}/icons/* \

View File

@ -1,6 +1,6 @@
def get_poppler_fpu_setting(bb, d): def get_poppler_fpu_setting(bb, d):
if bb.data.getVar('TARGET_FPU', d, 1) in [ 'soft' ]: if d.getVar('TARGET_FPU', 1) in [ 'soft' ]:
return "--enable-fixedpoint" return "--enable-fixedpoint"
return "" return ""

View File

@ -1,8 +1,8 @@
def autotools_dep_prepend(d): def autotools_dep_prepend(d):
if bb.data.getVar('INHIBIT_AUTOTOOLS_DEPS', d, 1): if d.getVar('INHIBIT_AUTOTOOLS_DEPS', 1):
return '' return ''
pn = bb.data.getVar('PN', d, 1) pn = d.getVar('PN', 1)
deps = '' deps = ''
if pn in ['autoconf-native', 'automake-native', 'help2man-native']: if pn in ['autoconf-native', 'automake-native', 'help2man-native']:
@ -13,7 +13,7 @@ def autotools_dep_prepend(d):
deps += 'libtool-native ' deps += 'libtool-native '
if not bb.data.inherits_class('native', d) \ if not bb.data.inherits_class('native', d) \
and not bb.data.inherits_class('cross', d) \ and not bb.data.inherits_class('cross', d) \
and not bb.data.getVar('INHIBIT_DEFAULT_DEPS', d, 1): and not d.getVar('INHIBIT_DEFAULT_DEPS', 1):
deps += 'libtool-cross ' deps += 'libtool-cross '
return deps + 'gnu-config-native ' return deps + 'gnu-config-native '

View File

@ -60,9 +60,9 @@ def base_dep_prepend(d):
# INHIBIT_DEFAULT_DEPS doesn't apply to the patch command. Whether or not # INHIBIT_DEFAULT_DEPS doesn't apply to the patch command. Whether or not
# we need that built is the responsibility of the patch function / class, not # we need that built is the responsibility of the patch function / class, not
# the application. # the application.
if not bb.data.getVar('INHIBIT_DEFAULT_DEPS', d): if not d.getVar('INHIBIT_DEFAULT_DEPS'):
if (bb.data.getVar('HOST_SYS', d, 1) != if (d.getVar('HOST_SYS', 1) !=
bb.data.getVar('BUILD_SYS', d, 1)): d.getVar('BUILD_SYS', 1)):
deps += " virtual/${TARGET_PREFIX}gcc virtual/${TARGET_PREFIX}compilerlibs virtual/libc " deps += " virtual/${TARGET_PREFIX}gcc virtual/${TARGET_PREFIX}compilerlibs virtual/libc "
return deps return deps
@ -73,13 +73,13 @@ DEPENDS_prepend="${BASEDEPENDS} "
FILESPATH = "${@base_set_filespath([ "${FILE_DIRNAME}/${PF}", "${FILE_DIRNAME}/${P}", "${FILE_DIRNAME}/${PN}", "${FILE_DIRNAME}/${BP}", "${FILE_DIRNAME}/${BPN}", "${FILE_DIRNAME}/files", "${FILE_DIRNAME}" ], d)}" FILESPATH = "${@base_set_filespath([ "${FILE_DIRNAME}/${PF}", "${FILE_DIRNAME}/${P}", "${FILE_DIRNAME}/${PN}", "${FILE_DIRNAME}/${BP}", "${FILE_DIRNAME}/${BPN}", "${FILE_DIRNAME}/files", "${FILE_DIRNAME}" ], d)}"
# THISDIR only works properly with imediate expansion as it has to run # THISDIR only works properly with imediate expansion as it has to run
# in the context of the location its used (:=) # in the context of the location its used (:=)
THISDIR = "${@os.path.dirname(bb.data.getVar('FILE', d, True))}" THISDIR = "${@os.path.dirname(d.getVar('FILE', True))}"
addtask fetch addtask fetch
do_fetch[dirs] = "${DL_DIR}" do_fetch[dirs] = "${DL_DIR}"
python base_do_fetch() { python base_do_fetch() {
src_uri = (bb.data.getVar('SRC_URI', d, True) or "").split() src_uri = (d.getVar('SRC_URI', True) or "").split()
if len(src_uri) == 0: if len(src_uri) == 0:
return return
@ -96,14 +96,14 @@ python base_do_fetch() {
addtask unpack after do_fetch addtask unpack after do_fetch
do_unpack[dirs] = "${WORKDIR}" do_unpack[dirs] = "${WORKDIR}"
python base_do_unpack() { python base_do_unpack() {
src_uri = (bb.data.getVar('SRC_URI', d, True) or "").split() src_uri = (d.getVar('SRC_URI', True) or "").split()
if len(src_uri) == 0: if len(src_uri) == 0:
return return
localdata = bb.data.createCopy(d) localdata = bb.data.createCopy(d)
bb.data.update_data(localdata) bb.data.update_data(localdata)
rootdir = bb.data.getVar('WORKDIR', localdata, True) rootdir = localdata.getVar('WORKDIR', True)
try: try:
fetcher = bb.fetch2.Fetch(src_uri, localdata) fetcher = bb.fetch2.Fetch(src_uri, localdata)
@ -118,7 +118,7 @@ def generate_git_config(e):
from bb import data from bb import data
if data.getVar('GIT_CORE_CONFIG', e.data, True): if data.getVar('GIT_CORE_CONFIG', e.data, True):
gitconfig_path = bb.data.getVar('GIT_CONFIG', e.data, True) gitconfig_path = e.data.getVar('GIT_CONFIG', True)
proxy_command = " gitproxy = %s\n" % data.getVar('GIT_PROXY_COMMAND', e.data, True) proxy_command = " gitproxy = %s\n" % data.getVar('GIT_PROXY_COMMAND', e.data, True)
bb.mkdirhier(bb.data.expand("${STAGING_DIR_NATIVE}/usr/etc/", e.data)) bb.mkdirhier(bb.data.expand("${STAGING_DIR_NATIVE}/usr/etc/", e.data))
@ -207,11 +207,11 @@ python base_eventhandler() {
name = getName(e) name = getName(e)
if name.startswith("BuildStarted"): if name.startswith("BuildStarted"):
bb.data.setVar( 'BB_VERSION', bb.__version__, e.data ) e.data.setVar( 'BB_VERSION', bb.__version__)
statusvars = ['BB_VERSION', 'TARGET_ARCH', 'TARGET_OS', 'MACHINE', 'DISTRO', 'DISTRO_VERSION','TUNE_FEATURES', 'TARGET_FPU'] statusvars = ['BB_VERSION', 'TARGET_ARCH', 'TARGET_OS', 'MACHINE', 'DISTRO', 'DISTRO_VERSION','TUNE_FEATURES', 'TARGET_FPU']
statuslines = ["%-17s = \"%s\"" % (i, bb.data.getVar(i, e.data, 1) or '') for i in statusvars] statuslines = ["%-17s = \"%s\"" % (i, e.data.getVar(i, 1) or '') for i in statusvars]
layers = (bb.data.getVar("BBLAYERS", e.data, 1) or "").split() layers = (e.data.getVar("BBLAYERS", 1) or "").split()
layers_branch_rev = ["%-17s = \"%s:%s\"" % (os.path.basename(i), \ layers_branch_rev = ["%-17s = \"%s:%s\"" % (os.path.basename(i), \
base_get_metadata_git_branch(i, None).strip(), \ base_get_metadata_git_branch(i, None).strip(), \
base_get_metadata_git_revision(i, None)) \ base_get_metadata_git_revision(i, None)) \
@ -237,7 +237,7 @@ python base_eventhandler() {
needed_vars = [ "TARGET_ARCH", "TARGET_OS" ] needed_vars = [ "TARGET_ARCH", "TARGET_OS" ]
pesteruser = [] pesteruser = []
for v in needed_vars: for v in needed_vars:
val = bb.data.getVar(v, e.data, 1) val = e.data.getVar(v, 1)
if not val or val == 'INVALID': if not val or val == 'INVALID':
pesteruser.append(v) pesteruser.append(v)
if pesteruser: if pesteruser:
@ -330,23 +330,23 @@ python () {
appendVar('EXTRA_OECONF', extraconf) appendVar('EXTRA_OECONF', extraconf)
# If PRINC is set, try and increase the PR value by the amount specified # If PRINC is set, try and increase the PR value by the amount specified
princ = bb.data.getVar('PRINC', d, True) princ = d.getVar('PRINC', True)
if princ: if princ:
pr = bb.data.getVar('PR', d, True) pr = d.getVar('PR', True)
pr_prefix = re.search("\D+",pr) pr_prefix = re.search("\D+",pr)
prval = re.search("\d+",pr) prval = re.search("\d+",pr)
if pr_prefix is None or prval is None: if pr_prefix is None or prval is None:
bb.error("Unable to analyse format of PR variable: %s" % pr) bb.error("Unable to analyse format of PR variable: %s" % pr)
nval = int(prval.group(0)) + int(princ) nval = int(prval.group(0)) + int(princ)
pr = pr_prefix.group(0) + str(nval) + pr[prval.end():] pr = pr_prefix.group(0) + str(nval) + pr[prval.end():]
bb.data.setVar('PR', pr, d) d.setVar('PR', pr)
pn = bb.data.getVar('PN', d, 1) pn = d.getVar('PN', 1)
license = bb.data.getVar('LICENSE', d, True) license = d.getVar('LICENSE', True)
if license == "INVALID": if license == "INVALID":
bb.fatal('This recipe does not have the LICENSE field set (%s)' % pn) bb.fatal('This recipe does not have the LICENSE field set (%s)' % pn)
commercial_license = " %s " % bb.data.getVar('COMMERCIAL_LICENSE', d, 1) commercial_license = " %s " % d.getVar('COMMERCIAL_LICENSE', 1)
import re import re
pnr = "[ \t]%s[ \t]" % pn.replace('+', "\+") pnr = "[ \t]%s[ \t]" % pn.replace('+', "\+")
if commercial_license and re.search(pnr, commercial_license): if commercial_license and re.search(pnr, commercial_license):
@ -356,86 +356,86 @@ python () {
# If we're building a target package we need to use fakeroot (pseudo) # If we're building a target package we need to use fakeroot (pseudo)
# in order to capture permissions, owners, groups and special files # in order to capture permissions, owners, groups and special files
if not bb.data.inherits_class('native', d) and not bb.data.inherits_class('cross', d): if not bb.data.inherits_class('native', d) and not bb.data.inherits_class('cross', d):
bb.data.setVarFlag('do_configure', 'umask', 022, d) d.setVarFlag('do_configure', 'umask', 022)
bb.data.setVarFlag('do_compile', 'umask', 022, d) d.setVarFlag('do_compile', 'umask', 022)
deps = (bb.data.getVarFlag('do_install', 'depends', d) or "").split() deps = (d.getVarFlag('do_install', 'depends') or "").split()
deps.append('virtual/fakeroot-native:do_populate_sysroot') deps.append('virtual/fakeroot-native:do_populate_sysroot')
bb.data.setVarFlag('do_install', 'depends', " ".join(deps),d) bb.data.setVarFlag('do_install', 'depends', " ".join(deps),d)
bb.data.setVarFlag('do_install', 'fakeroot', 1, d) d.setVarFlag('do_install', 'fakeroot', 1)
bb.data.setVarFlag('do_install', 'umask', 022, d) d.setVarFlag('do_install', 'umask', 022)
deps = (bb.data.getVarFlag('do_package', 'depends', d) or "").split() deps = (d.getVarFlag('do_package', 'depends') or "").split()
deps.append('virtual/fakeroot-native:do_populate_sysroot') deps.append('virtual/fakeroot-native:do_populate_sysroot')
bb.data.setVarFlag('do_package', 'depends', " ".join(deps),d) bb.data.setVarFlag('do_package', 'depends', " ".join(deps),d)
bb.data.setVarFlag('do_package', 'fakeroot', 1, d) d.setVarFlag('do_package', 'fakeroot', 1)
bb.data.setVarFlag('do_package', 'umask', 022, d) d.setVarFlag('do_package', 'umask', 022)
bb.data.setVarFlag('do_package_setscene', 'fakeroot', 1, d) d.setVarFlag('do_package_setscene', 'fakeroot', 1)
source_mirror_fetch = bb.data.getVar('SOURCE_MIRROR_FETCH', d, 0) source_mirror_fetch = d.getVar('SOURCE_MIRROR_FETCH', 0)
if not source_mirror_fetch: if not source_mirror_fetch:
need_host = bb.data.getVar('COMPATIBLE_HOST', d, 1) need_host = d.getVar('COMPATIBLE_HOST', 1)
if need_host: if need_host:
import re import re
this_host = bb.data.getVar('HOST_SYS', d, 1) this_host = d.getVar('HOST_SYS', 1)
if not re.match(need_host, this_host): if not re.match(need_host, this_host):
raise bb.parse.SkipPackage("incompatible with host %s" % this_host) raise bb.parse.SkipPackage("incompatible with host %s" % this_host)
need_machine = bb.data.getVar('COMPATIBLE_MACHINE', d, 1) need_machine = d.getVar('COMPATIBLE_MACHINE', 1)
if need_machine: if need_machine:
import re import re
this_machine = bb.data.getVar('MACHINE', d, 1) this_machine = d.getVar('MACHINE', 1)
if this_machine and not re.match(need_machine, this_machine): if this_machine and not re.match(need_machine, this_machine):
this_soc_family = bb.data.getVar('SOC_FAMILY', d, 1) this_soc_family = d.getVar('SOC_FAMILY', 1)
if (this_soc_family and not re.match(need_machine, this_soc_family)) or not this_soc_family: if (this_soc_family and not re.match(need_machine, this_soc_family)) or not this_soc_family:
raise bb.parse.SkipPackage("incompatible with machine %s" % this_machine) raise bb.parse.SkipPackage("incompatible with machine %s" % this_machine)
dont_want_license = bb.data.getVar('INCOMPATIBLE_LICENSE', d, 1) dont_want_license = d.getVar('INCOMPATIBLE_LICENSE', 1)
if dont_want_license and not pn.endswith("-native") and not pn.endswith("-cross") and not pn.endswith("-cross-initial") and not pn.endswith("-cross-intermediate") and not pn.endswith("-crosssdk-intermediate") and not pn.endswith("-crosssdk") and not pn.endswith("-crosssdk-initial"): if dont_want_license and not pn.endswith("-native") and not pn.endswith("-cross") and not pn.endswith("-cross-initial") and not pn.endswith("-cross-intermediate") and not pn.endswith("-crosssdk-intermediate") and not pn.endswith("-crosssdk") and not pn.endswith("-crosssdk-initial"):
hosttools_whitelist = (bb.data.getVar('HOSTTOOLS_WHITELIST_%s' % dont_want_license, d, 1) or "").split() hosttools_whitelist = (d.getVar('HOSTTOOLS_WHITELIST_%s' % dont_want_license, 1) or "").split()
lgplv2_whitelist = (bb.data.getVar('LGPLv2_WHITELIST_%s' % dont_want_license, d, 1) or "").split() lgplv2_whitelist = (d.getVar('LGPLv2_WHITELIST_%s' % dont_want_license, 1) or "").split()
dont_want_whitelist = (bb.data.getVar('WHITELIST_%s' % dont_want_license, d, 1) or "").split() dont_want_whitelist = (d.getVar('WHITELIST_%s' % dont_want_license, 1) or "").split()
if pn not in hosttools_whitelist and pn not in lgplv2_whitelist and pn not in dont_want_whitelist: if pn not in hosttools_whitelist and pn not in lgplv2_whitelist and pn not in dont_want_whitelist:
import re import re
this_license = bb.data.getVar('LICENSE', d, 1) this_license = d.getVar('LICENSE', 1)
if this_license and re.search(dont_want_license, this_license): if this_license and re.search(dont_want_license, this_license):
bb.note("SKIPPING %s because it's %s" % (pn, this_license)) bb.note("SKIPPING %s because it's %s" % (pn, this_license))
raise bb.parse.SkipPackage("incompatible with license %s" % this_license) raise bb.parse.SkipPackage("incompatible with license %s" % this_license)
# Git packages should DEPEND on git-native # Git packages should DEPEND on git-native
srcuri = bb.data.getVar('SRC_URI', d, 1) srcuri = d.getVar('SRC_URI', 1)
if "git://" in srcuri: if "git://" in srcuri:
depends = bb.data.getVarFlag('do_fetch', 'depends', d) or "" depends = d.getVarFlag('do_fetch', 'depends') or ""
depends = depends + " git-native:do_populate_sysroot" depends = depends + " git-native:do_populate_sysroot"
bb.data.setVarFlag('do_fetch', 'depends', depends, d) d.setVarFlag('do_fetch', 'depends', depends)
# Mercurial packages should DEPEND on mercurial-native # Mercurial packages should DEPEND on mercurial-native
elif "hg://" in srcuri: elif "hg://" in srcuri:
depends = bb.data.getVarFlag('do_fetch', 'depends', d) or "" depends = d.getVarFlag('do_fetch', 'depends') or ""
depends = depends + " mercurial-native:do_populate_sysroot" depends = depends + " mercurial-native:do_populate_sysroot"
bb.data.setVarFlag('do_fetch', 'depends', depends, d) d.setVarFlag('do_fetch', 'depends', depends)
# OSC packages should DEPEND on osc-native # OSC packages should DEPEND on osc-native
elif "osc://" in srcuri: elif "osc://" in srcuri:
depends = bb.data.getVarFlag('do_fetch', 'depends', d) or "" depends = d.getVarFlag('do_fetch', 'depends') or ""
depends = depends + " osc-native:do_populate_sysroot" depends = depends + " osc-native:do_populate_sysroot"
bb.data.setVarFlag('do_fetch', 'depends', depends, d) d.setVarFlag('do_fetch', 'depends', depends)
# *.xz should depends on xz-native for unpacking # *.xz should depends on xz-native for unpacking
# Not endswith because of "*.patch.xz;patch=1". Need bb.decodeurl in future # Not endswith because of "*.patch.xz;patch=1". Need bb.decodeurl in future
if '.xz' in srcuri: if '.xz' in srcuri:
depends = bb.data.getVarFlag('do_unpack', 'depends', d) or "" depends = d.getVarFlag('do_unpack', 'depends') or ""
depends = depends + " xz-native:do_populate_sysroot" depends = depends + " xz-native:do_populate_sysroot"
bb.data.setVarFlag('do_unpack', 'depends', depends, d) d.setVarFlag('do_unpack', 'depends', depends)
# unzip-native should already be staged before unpacking ZIP recipes # unzip-native should already be staged before unpacking ZIP recipes
if ".zip" in srcuri: if ".zip" in srcuri:
depends = bb.data.getVarFlag('do_unpack', 'depends', d) or "" depends = d.getVarFlag('do_unpack', 'depends') or ""
depends = depends + " unzip-native:do_populate_sysroot" depends = depends + " unzip-native:do_populate_sysroot"
bb.data.setVarFlag('do_unpack', 'depends', depends, d) d.setVarFlag('do_unpack', 'depends', depends)
# 'multimachine' handling # 'multimachine' handling
mach_arch = bb.data.getVar('MACHINE_ARCH', d, 1) mach_arch = d.getVar('MACHINE_ARCH', 1)
pkg_arch = bb.data.getVar('PACKAGE_ARCH', d, 1) pkg_arch = d.getVar('PACKAGE_ARCH', 1)
if (pkg_arch == mach_arch): if (pkg_arch == mach_arch):
# Already machine specific - nothing further to do # Already machine specific - nothing further to do
@ -445,7 +445,7 @@ python () {
# We always try to scan SRC_URI for urls with machine overrides # We always try to scan SRC_URI for urls with machine overrides
# unless the package sets SRC_URI_OVERRIDES_PACKAGE_ARCH=0 # unless the package sets SRC_URI_OVERRIDES_PACKAGE_ARCH=0
# #
override = bb.data.getVar('SRC_URI_OVERRIDES_PACKAGE_ARCH', d, 1) override = d.getVar('SRC_URI_OVERRIDES_PACKAGE_ARCH', 1)
if override != '0': if override != '0':
paths = [] paths = []
for p in [ "${PF}", "${P}", "${PN}", "files", "" ]: for p in [ "${PF}", "${P}", "${PN}", "files", "" ]:
@ -461,18 +461,18 @@ python () {
for mp in paths: for mp in paths:
if local.startswith(mp): if local.startswith(mp):
#bb.note("overriding PACKAGE_ARCH from %s to %s" % (pkg_arch, mach_arch)) #bb.note("overriding PACKAGE_ARCH from %s to %s" % (pkg_arch, mach_arch))
bb.data.setVar('PACKAGE_ARCH', "${MACHINE_ARCH}", d) d.setVar('PACKAGE_ARCH', "${MACHINE_ARCH}")
return return
packages = bb.data.getVar('PACKAGES', d, 1).split() packages = d.getVar('PACKAGES', 1).split()
for pkg in packages: for pkg in packages:
pkgarch = bb.data.getVar("PACKAGE_ARCH_%s" % pkg, d, 1) pkgarch = d.getVar("PACKAGE_ARCH_%s" % pkg, 1)
# We could look for != PACKAGE_ARCH here but how to choose # We could look for != PACKAGE_ARCH here but how to choose
# if multiple differences are present? # if multiple differences are present?
# Look through PACKAGE_ARCHS for the priority order? # Look through PACKAGE_ARCHS for the priority order?
if pkgarch and pkgarch == mach_arch: if pkgarch and pkgarch == mach_arch:
bb.data.setVar('PACKAGE_ARCH', "${MACHINE_ARCH}", d) d.setVar('PACKAGE_ARCH', "${MACHINE_ARCH}")
bb.warn("Recipe %s is marked as only being architecture specific but seems to have machine specific packages?! The recipe may as well mark itself as machine specific directly." % d.getVar("PN", True)) bb.warn("Recipe %s is marked as only being architecture specific but seems to have machine specific packages?! The recipe may as well mark itself as machine specific directly." % d.getVar("PN", True))
} }
@ -483,7 +483,7 @@ python do_cleansstate() {
addtask cleanall after do_cleansstate addtask cleanall after do_cleansstate
python do_cleanall() { python do_cleanall() {
src_uri = (bb.data.getVar('SRC_URI', d, True) or "").split() src_uri = (d.getVar('SRC_URI', True) or "").split()
if len(src_uri) == 0: if len(src_uri) == 0:
return return

View File

@ -109,14 +109,14 @@ python bugzilla_eventhandler() {
return return
if name == "TaskFailed": if name == "TaskFailed":
xmlrpc = bb.data.getVar("BUGZILLA_XMLRPC", data, True) xmlrpc = data.getVar("BUGZILLA_XMLRPC", True)
user = bb.data.getVar("BUGZILLA_USER", data, True) user = data.getVar("BUGZILLA_USER", True)
passw = bb.data.getVar("BUGZILLA_PASS", data, True) passw = data.getVar("BUGZILLA_PASS", True)
product = bb.data.getVar("BUGZILLA_PRODUCT", data, True) product = data.getVar("BUGZILLA_PRODUCT", True)
compon = bb.data.getVar("BUGZILLA_COMPONENT", data, True) compon = data.getVar("BUGZILLA_COMPONENT", True)
version = bb.data.getVar("BUGZILLA_VERSION", data, True) version = data.getVar("BUGZILLA_VERSION", True)
proxy = bb.data.getVar('http_proxy', data, True ) proxy = data.getVar('http_proxy', True )
if (proxy): if (proxy):
import urllib2 import urllib2
s, u, p, hostport = urllib2._parse_proxy(proxy) s, u, p, hostport = urllib2._parse_proxy(proxy)
@ -132,14 +132,14 @@ python bugzilla_eventhandler() {
'component': compon} 'component': compon}
# evil hack to figure out what is going on # evil hack to figure out what is going on
debug_file = open(os.path.join(bb.data.getVar("TMPDIR", data, True),"..","bugzilla-log"),"a") debug_file = open(os.path.join(data.getVar("TMPDIR", True),"..","bugzilla-log"),"a")
file = None file = None
bugname = "%(package)s-%(pv)s-autobuild" % { "package" : bb.data.getVar("PN", data, True), bugname = "%(package)s-%(pv)s-autobuild" % { "package" : data.getVar("PN", True),
"pv" : bb.data.getVar("PV", data, True), "pv" : data.getVar("PV", True),
} }
log_file = glob.glob("%s/log.%s.*" % (bb.data.getVar('T', event.data, True), event.task)) log_file = glob.glob("%s/log.%s.*" % (event.data.getVar('T', True), event.task))
text = "The %s step in %s failed at %s for machine %s" % (e.task, bb.data.getVar("PN", data, True), bb.data.getVar('DATETIME', data, True), bb.data.getVar( 'MACHINE', data, True ) ) text = "The %s step in %s failed at %s for machine %s" % (e.task, data.getVar("PN", True), data.getVar('DATETIME', True), data.getVar( 'MACHINE', True ) )
if len(log_file) != 0: if len(log_file) != 0:
print >> debug_file, "Adding log file %s" % log_file[0] print >> debug_file, "Adding log file %s" % log_file[0]
file = open(log_file[0], 'r') file = open(log_file[0], 'r')
@ -167,7 +167,7 @@ python bugzilla_eventhandler() {
if bug_number and log: if bug_number and log:
print >> debug_file, "The bug is known as '%s'" % bug_number print >> debug_file, "The bug is known as '%s'" % bug_number
desc = "Build log for machine %s" % (bb.data.getVar('MACHINE', data, True)) desc = "Build log for machine %s" % (data.getVar('MACHINE', True))
if not bugzilla_create_attachment(debug_file, server, args.copy(), bug_number, text, log_file[0], log, desc): if not bugzilla_create_attachment(debug_file, server, args.copy(), bug_number, text, log_file[0], log, desc):
print >> debug_file, "Failed to attach the build log for bug #%s" % bug_number print >> debug_file, "Failed to attach the build log for bug #%s" % bug_number
else: else:
@ -181,6 +181,6 @@ python bugzilla_eventhandler() {
# store bug number for oestats-client # store bug number for oestats-client
if bug_number: if bug_number:
bb.data.setVar('OESTATS_BUG_NUMBER', bug_number, data) data.setVar('OESTATS_BUG_NUMBER', bug_number)
} }

View File

@ -21,25 +21,25 @@ def get_cputime():
return sum(int(field) for field in fields) return sum(int(field) for field in fields)
def set_bn(e): def set_bn(e):
bn = e.getPkgs()[0] + "-" + bb.data.getVar('MACHINE',e.data, True) bn = e.getPkgs()[0] + "-" + e.data.getVar('MACHINE', True)
try: try:
os.remove(bb.data.getVar('BNFILE', e.data, True)) os.remove(e.data.getVar('BNFILE', True))
except: except:
pass pass
file = open(bb.data.getVar('BNFILE', e.data, True), "w") file = open(e.data.getVar('BNFILE', True), "w")
file.write(os.path.join(bn, bb.data.getVar('BUILDNAME', e.data, True))) file.write(os.path.join(bn, e.data.getVar('BUILDNAME', True)))
file.close() file.close()
def get_bn(e): def get_bn(e):
file = open(bb.data.getVar('BNFILE', e.data, True)) file = open(e.data.getVar('BNFILE', True))
bn = file.readline() bn = file.readline()
file.close() file.close()
return bn return bn
def set_device(e): def set_device(e):
tmpdir = bb.data.getVar('TMPDIR', e.data, True) tmpdir = e.data.getVar('TMPDIR', True)
try: try:
os.remove(bb.data.getVar('DEVFILE', e.data, True)) os.remove(e.data.getVar('DEVFILE', True))
except: except:
pass pass
############################################################################ ############################################################################
@ -66,12 +66,12 @@ def set_device(e):
rdev=line.split()[2] rdev=line.split()[2]
else: else:
rdev="NoLogicalDevice" rdev="NoLogicalDevice"
file = open(bb.data.getVar('DEVFILE', e.data, True), "w") file = open(e.data.getVar('DEVFILE', True), "w")
file.write(rdev) file.write(rdev)
file.close() file.close()
def get_device(e): def get_device(e):
file = open(bb.data.getVar('DEVFILE', e.data, True)) file = open(e.data.getVar('DEVFILE', True))
device = file.readline() device = file.readline()
file.close() file.close()
return device return device
@ -126,7 +126,7 @@ def get_timedata(var, data):
def write_task_data(status, logfile, dev, e): def write_task_data(status, logfile, dev, e):
bn = get_bn(e) bn = get_bn(e)
bsdir = os.path.join(bb.data.getVar('BUILDSTATS_BASE', e.data, True), bn) bsdir = os.path.join(e.data.getVar('BUILDSTATS_BASE', True), bn)
taskdir = os.path.join(bsdir, bb.data.expand("${PF}", e.data)) taskdir = os.path.join(bsdir, bb.data.expand("${PF}", e.data))
file = open(os.path.join(logfile), "a") file = open(os.path.join(logfile), "a")
timedata = get_timedata("__timedata_task", e.data) timedata = get_timedata("__timedata_task", e.data)
@ -168,7 +168,7 @@ python run_buildstats () {
# set the buildname # set the buildname
######################################################################## ########################################################################
try: try:
bb.mkdirhier(bb.data.getVar('BUILDSTATS_BASE', e.data, True)) bb.mkdirhier(e.data.getVar('BUILDSTATS_BASE', True))
except: except:
pass pass
set_bn(e) set_bn(e)
@ -176,7 +176,7 @@ python run_buildstats () {
set_device(e) set_device(e)
device = get_device(e) device = get_device(e)
bsdir = os.path.join(bb.data.getVar('BUILDSTATS_BASE', e.data, True), bn) bsdir = os.path.join(e.data.getVar('BUILDSTATS_BASE', True), bn)
try: try:
bb.mkdirhier(bsdir) bb.mkdirhier(bsdir)
except: except:
@ -199,7 +199,7 @@ python run_buildstats () {
elif isinstance(e, bb.event.BuildCompleted): elif isinstance(e, bb.event.BuildCompleted):
bn = get_bn(e) bn = get_bn(e)
device = get_device(e) device = get_device(e)
bsdir = os.path.join(bb.data.getVar('BUILDSTATS_BASE', e.data, True), bn) bsdir = os.path.join(e.data.getVar('BUILDSTATS_BASE', True), bn)
taskdir = os.path.join(bsdir, bb.data.expand("${PF}", e.data)) taskdir = os.path.join(bsdir, bb.data.expand("${PF}", e.data))
build_time = os.path.join(bsdir, "build_stats") build_time = os.path.join(bsdir, "build_stats")
file = open(build_time, "a") file = open(build_time, "a")
@ -224,7 +224,7 @@ python run_buildstats () {
if isinstance(e, bb.build.TaskStarted): if isinstance(e, bb.build.TaskStarted):
bn = get_bn(e) bn = get_bn(e)
device = get_device(e) device = get_device(e)
bsdir = os.path.join(bb.data.getVar('BUILDSTATS_BASE', e.data, True), bn) bsdir = os.path.join(e.data.getVar('BUILDSTATS_BASE', True), bn)
taskdir = os.path.join(bsdir, bb.data.expand("${PF}", e.data)) taskdir = os.path.join(bsdir, bb.data.expand("${PF}", e.data))
if device != "NoLogicalDevice": if device != "NoLogicalDevice":
set_diskdata("__diskdata_task", device, e.data) set_diskdata("__diskdata_task", device, e.data)
@ -242,14 +242,14 @@ python run_buildstats () {
elif isinstance(e, bb.build.TaskSucceeded): elif isinstance(e, bb.build.TaskSucceeded):
bn = get_bn(e) bn = get_bn(e)
device = get_device(e) device = get_device(e)
bsdir = os.path.join(bb.data.getVar('BUILDSTATS_BASE', e.data, True), bn) bsdir = os.path.join(e.data.getVar('BUILDSTATS_BASE', True), bn)
taskdir = os.path.join(bsdir, bb.data.expand("${PF}", e.data)) taskdir = os.path.join(bsdir, bb.data.expand("${PF}", e.data))
write_task_data("passed", os.path.join(taskdir, e.task), device, e) write_task_data("passed", os.path.join(taskdir, e.task), device, e)
if e.task == "do_rootfs": if e.task == "do_rootfs":
bsdir = os.path.join(bb.data.getVar('BUILDSTATS_BASE', e.data, True), bn) bsdir = os.path.join(e.data.getVar('BUILDSTATS_BASE', True), bn)
bs=os.path.join(bsdir, "build_stats") bs=os.path.join(bsdir, "build_stats")
file = open(bs,"a") file = open(bs,"a")
rootfs = bb.data.getVar('IMAGE_ROOTFS', e.data, True) rootfs = e.data.getVar('IMAGE_ROOTFS', True)
rootfs_size = subprocess.Popen(["du", "-sh", rootfs], stdout=subprocess.PIPE).stdout.read() rootfs_size = subprocess.Popen(["du", "-sh", rootfs], stdout=subprocess.PIPE).stdout.read()
file.write("Uncompressed Rootfs size: %s" % rootfs_size) file.write("Uncompressed Rootfs size: %s" % rootfs_size)
file.close() file.close()
@ -257,7 +257,7 @@ python run_buildstats () {
elif isinstance(e, bb.build.TaskFailed): elif isinstance(e, bb.build.TaskFailed):
bn = get_bn(e) bn = get_bn(e)
device = get_device(e) device = get_device(e)
bsdir = os.path.join(bb.data.getVar('BUILDSTATS_BASE', e.data, True), bn) bsdir = os.path.join(e.data.getVar('BUILDSTATS_BASE', True), bn)
taskdir = os.path.join(bsdir, bb.data.expand("${PF}", e.data)) taskdir = os.path.join(bsdir, bb.data.expand("${PF}", e.data))
write_task_data("failed", os.path.join(taskdir, e.task), device, e) write_task_data("failed", os.path.join(taskdir, e.task), device, e)
######################################################################## ########################################################################

View File

@ -28,7 +28,7 @@ def get_perl_version(d):
# Determine where the library directories are # Determine where the library directories are
def perl_get_libdirs(d): def perl_get_libdirs(d):
libdir = bb.data.getVar('libdir', d, 1) libdir = d.getVar('libdir', 1)
if is_target(d) == "no": if is_target(d) == "no":
libdir += '/perl-native' libdir += '/perl-native'
libdir += '/perl' libdir += '/perl'

View File

@ -10,9 +10,9 @@ inherit cpan-base
# libmodule-build-perl) # libmodule-build-perl)
# #
def cpan_build_dep_prepend(d): def cpan_build_dep_prepend(d):
if bb.data.getVar('CPAN_BUILD_DEPS', d, 1): if d.getVar('CPAN_BUILD_DEPS', 1):
return '' return ''
pn = bb.data.getVar('PN', d, 1) pn = d.getVar('PN', 1)
if pn in ['libmodule-build-perl', 'libmodule-build-perl-native']: if pn in ['libmodule-build-perl', 'libmodule-build-perl-native']:
return '' return ''
return 'libmodule-build-perl-native ' return 'libmodule-build-perl-native '

View File

@ -16,7 +16,7 @@ STAGING_BINDIR_TOOLCHAIN = "${STAGING_DIR_NATIVE}${bindir_native}/${SDK_ARCH}${S
# #
PACKAGE_ARCH = "${SDK_ARCH}-nativesdk" PACKAGE_ARCH = "${SDK_ARCH}-nativesdk"
python () { python () {
archs = bb.data.getVar('PACKAGE_ARCHS', d, True).split() archs = d.getVar('PACKAGE_ARCHS', True).split()
sdkarchs = [] sdkarchs = []
for arch in archs: for arch in archs:
sdkarchs.append(arch + '-nativesdk') sdkarchs.append(arch + '-nativesdk')

View File

@ -22,8 +22,8 @@ python () {
python debian_package_name_hook () { python debian_package_name_hook () {
import glob, copy, stat, errno, re import glob, copy, stat, errno, re
pkgdest = bb.data.getVar('PKGDEST', d, 1) pkgdest = d.getVar('PKGDEST', 1)
packages = bb.data.getVar('PACKAGES', d, 1) packages = d.getVar('PACKAGES', 1)
bin_re = re.compile(".*/s?" + os.path.basename(d.getVar("bindir", True)) + "$") bin_re = re.compile(".*/s?" + os.path.basename(d.getVar("bindir", True)) + "$")
lib_re = re.compile(".*/" + os.path.basename(d.getVar("libdir", True)) + "$") lib_re = re.compile(".*/" + os.path.basename(d.getVar("libdir", True)) + "$")
so_re = re.compile("lib.*\.so") so_re = re.compile("lib.*\.so")
@ -60,7 +60,7 @@ python debian_package_name_hook () {
for f in files: for f in files:
if so_re.match(f): if so_re.match(f):
fp = os.path.join(root, f) fp = os.path.join(root, f)
cmd = (bb.data.getVar('BUILD_PREFIX', d, 1) or "") + "objdump -p " + fp + " 2>/dev/null" cmd = (d.getVar('BUILD_PREFIX', 1) or "") + "objdump -p " + fp + " 2>/dev/null"
fd = os.popen(cmd) fd = os.popen(cmd)
lines = fd.readlines() lines = fd.readlines()
fd.close() fd.close()
@ -74,7 +74,7 @@ python debian_package_name_hook () {
if len(sonames) == 1: if len(sonames) == 1:
soname = sonames[0] soname = sonames[0]
elif len(sonames) > 1: elif len(sonames) > 1:
lead = bb.data.getVar('LEAD_SONAME', d, 1) lead = d.getVar('LEAD_SONAME', 1)
if lead: if lead:
r = re.compile(lead) r = re.compile(lead)
filtered = [] filtered = []
@ -95,21 +95,21 @@ python debian_package_name_hook () {
if soname_result: if soname_result:
(pkgname, devname) = soname_result (pkgname, devname) = soname_result
for pkg in packages.split(): for pkg in packages.split():
if (bb.data.getVar('PKG_' + pkg, d) or bb.data.getVar('DEBIAN_NOAUTONAME_' + pkg, d)): if (d.getVar('PKG_' + pkg) or d.getVar('DEBIAN_NOAUTONAME_' + pkg)):
continue continue
debian_pn = bb.data.getVar('DEBIANNAME_' + pkg, d) debian_pn = d.getVar('DEBIANNAME_' + pkg)
if debian_pn: if debian_pn:
newpkg = debian_pn newpkg = debian_pn
elif pkg == orig_pkg: elif pkg == orig_pkg:
newpkg = pkgname newpkg = pkgname
else: else:
newpkg = pkg.replace(orig_pkg, devname, 1) newpkg = pkg.replace(orig_pkg, devname, 1)
mlpre=bb.data.getVar('MLPREFIX', d, True) mlpre=d.getVar('MLPREFIX', True)
if mlpre: if mlpre:
if not newpkg.find(mlpre) == 0: if not newpkg.find(mlpre) == 0:
newpkg = mlpre + newpkg newpkg = mlpre + newpkg
if newpkg != pkg: if newpkg != pkg:
bb.data.setVar('PKG_' + pkg, newpkg, d) d.setVar('PKG_' + pkg, newpkg)
# reversed sort is needed when some package is substring of another # reversed sort is needed when some package is substring of another
# ie in ncurses we get without reverse sort: # ie in ncurses we get without reverse sort:
@ -117,7 +117,7 @@ python debian_package_name_hook () {
# and later # and later
# DEBUG: LIBNAMES: pkgname libtic5 devname libtic pkg ncurses-libticw orig_pkg ncurses-libtic debian_pn None newpkg libticw # DEBUG: LIBNAMES: pkgname libtic5 devname libtic pkg ncurses-libticw orig_pkg ncurses-libtic debian_pn None newpkg libticw
# so we need to handle ncurses-libticw->libticw5 before ncurses-libtic->libtic5 # so we need to handle ncurses-libticw->libticw5 before ncurses-libtic->libtic5
for pkg in sorted((bb.data.getVar('AUTO_LIBNAME_PKGS', d, 1) or "").split(), reverse=True): for pkg in sorted((d.getVar('AUTO_LIBNAME_PKGS', 1) or "").split(), reverse=True):
auto_libname(packages, pkg) auto_libname(packages, pkg)
} }

View File

@ -19,87 +19,87 @@ addtask distrodata_np
do_distrodata_np[nostamp] = "1" do_distrodata_np[nostamp] = "1"
python do_distrodata_np() { python do_distrodata_np() {
localdata = bb.data.createCopy(d) localdata = bb.data.createCopy(d)
pn = bb.data.getVar("PN", d, True) pn = d.getVar("PN", True)
bb.note("Package Name: %s" % pn) bb.note("Package Name: %s" % pn)
import oe.distro_check as dist_check import oe.distro_check as dist_check
tmpdir = bb.data.getVar('TMPDIR', d, True) tmpdir = d.getVar('TMPDIR', True)
distro_check_dir = os.path.join(tmpdir, "distro_check") distro_check_dir = os.path.join(tmpdir, "distro_check")
datetime = bb.data.getVar('DATETIME', localdata, True) datetime = localdata.getVar('DATETIME', True)
dist_check.update_distro_data(distro_check_dir, datetime) dist_check.update_distro_data(distro_check_dir, datetime)
if pn.find("-native") != -1: if pn.find("-native") != -1:
pnstripped = pn.split("-native") pnstripped = pn.split("-native")
bb.note("Native Split: %s" % pnstripped) bb.note("Native Split: %s" % pnstripped)
bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + bb.data.getVar('OVERRIDES', d, True), localdata) bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True), localdata)
bb.data.update_data(localdata) bb.data.update_data(localdata)
if pn.find("-nativesdk") != -1: if pn.find("-nativesdk") != -1:
pnstripped = pn.split("-nativesdk") pnstripped = pn.split("-nativesdk")
bb.note("Native Split: %s" % pnstripped) bb.note("Native Split: %s" % pnstripped)
bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + bb.data.getVar('OVERRIDES', d, True), localdata) bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True), localdata)
bb.data.update_data(localdata) bb.data.update_data(localdata)
if pn.find("-cross") != -1: if pn.find("-cross") != -1:
pnstripped = pn.split("-cross") pnstripped = pn.split("-cross")
bb.note("cross Split: %s" % pnstripped) bb.note("cross Split: %s" % pnstripped)
bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + bb.data.getVar('OVERRIDES', d, True), localdata) bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True), localdata)
bb.data.update_data(localdata) bb.data.update_data(localdata)
if pn.find("-crosssdk") != -1: if pn.find("-crosssdk") != -1:
pnstripped = pn.split("-crosssdk") pnstripped = pn.split("-crosssdk")
bb.note("cross Split: %s" % pnstripped) bb.note("cross Split: %s" % pnstripped)
bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + bb.data.getVar('OVERRIDES', d, True), localdata) bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True), localdata)
bb.data.update_data(localdata) bb.data.update_data(localdata)
if pn.find("-initial") != -1: if pn.find("-initial") != -1:
pnstripped = pn.split("-initial") pnstripped = pn.split("-initial")
bb.note("initial Split: %s" % pnstripped) bb.note("initial Split: %s" % pnstripped)
bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + bb.data.getVar('OVERRIDES', d, True), localdata) bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True), localdata)
bb.data.update_data(localdata) bb.data.update_data(localdata)
"""generate package information from .bb file""" """generate package information from .bb file"""
pname = bb.data.getVar('PN', localdata, True) pname = localdata.getVar('PN', True)
pcurver = bb.data.getVar('PV', localdata, True) pcurver = localdata.getVar('PV', True)
pdesc = bb.data.getVar('DESCRIPTION', localdata, True) pdesc = localdata.getVar('DESCRIPTION', True)
if pdesc is not None: if pdesc is not None:
pdesc = pdesc.replace(',','') pdesc = pdesc.replace(',','')
pdesc = pdesc.replace('\n','') pdesc = pdesc.replace('\n','')
pgrp = bb.data.getVar('SECTION', localdata, True) pgrp = localdata.getVar('SECTION', True)
plicense = bb.data.getVar('LICENSE', localdata, True).replace(',','_') plicense = localdata.getVar('LICENSE', True).replace(',','_')
if bb.data.getVar('LIC_FILES_CHKSUM', localdata, True): if localdata.getVar('LIC_FILES_CHKSUM', True):
pchksum="1" pchksum="1"
else: else:
pchksum="0" pchksum="0"
if bb.data.getVar('RECIPE_STATUS', localdata, True): if localdata.getVar('RECIPE_STATUS', True):
hasrstatus="1" hasrstatus="1"
else: else:
hasrstatus="0" hasrstatus="0"
rstatus = bb.data.getVar('RECIPE_STATUS', localdata, True) rstatus = localdata.getVar('RECIPE_STATUS', True)
if rstatus is not None: if rstatus is not None:
rstatus = rstatus.replace(',','') rstatus = rstatus.replace(',','')
pupver = bb.data.getVar('RECIPE_LATEST_VERSION', localdata, True) pupver = localdata.getVar('RECIPE_LATEST_VERSION', True)
if pcurver == pupver: if pcurver == pupver:
vermatch="1" vermatch="1"
else: else:
vermatch="0" vermatch="0"
noupdate_reason = bb.data.getVar('RECIPE_NO_UPDATE_REASON', localdata, True) noupdate_reason = localdata.getVar('RECIPE_NO_UPDATE_REASON', True)
if noupdate_reason is None: if noupdate_reason is None:
noupdate="0" noupdate="0"
else: else:
noupdate="1" noupdate="1"
noupdate_reason = noupdate_reason.replace(',','') noupdate_reason = noupdate_reason.replace(',','')
ris = bb.data.getVar('RECIPE_INTEL_SECTION', localdata, True) ris = localdata.getVar('RECIPE_INTEL_SECTION', True)
maintainer = bb.data.getVar('RECIPE_MAINTAINER', localdata, True) maintainer = localdata.getVar('RECIPE_MAINTAINER', True)
rttr = bb.data.getVar('RECIPE_TIME_BETWEEN_LAST_TWO_RELEASES', localdata, True) rttr = localdata.getVar('RECIPE_TIME_BETWEEN_LAST_TWO_RELEASES', True)
rlrd = bb.data.getVar('RECIPE_LATEST_RELEASE_DATE', localdata, True) rlrd = localdata.getVar('RECIPE_LATEST_RELEASE_DATE', True)
dc = bb.data.getVar('DEPENDENCY_CHECK', localdata, True) dc = localdata.getVar('DEPENDENCY_CHECK', True)
rc = bb.data.getVar('RECIPE_COMMENTS', localdata, True) rc = localdata.getVar('RECIPE_COMMENTS', True)
result = dist_check.compare_in_distro_packages_list(distro_check_dir, localdata) result = dist_check.compare_in_distro_packages_list(distro_check_dir, localdata)
bb.note("DISTRO: %s,%s,%s,%s,%s,%s,%s,%s,%s, %s, %s, %s\n" % \ bb.note("DISTRO: %s,%s,%s,%s,%s,%s,%s,%s,%s, %s, %s, %s\n" % \
@ -113,81 +113,81 @@ python do_distrodata_np() {
addtask distrodata addtask distrodata
do_distrodata[nostamp] = "1" do_distrodata[nostamp] = "1"
python do_distrodata() { python do_distrodata() {
logpath = bb.data.getVar('LOG_DIR', d, True) logpath = d.getVar('LOG_DIR', True)
bb.utils.mkdirhier(logpath) bb.utils.mkdirhier(logpath)
logfile = os.path.join(logpath, "distrodata.csv") logfile = os.path.join(logpath, "distrodata.csv")
import oe.distro_check as dist_check import oe.distro_check as dist_check
localdata = bb.data.createCopy(d) localdata = bb.data.createCopy(d)
tmpdir = bb.data.getVar('TMPDIR', d, True) tmpdir = d.getVar('TMPDIR', True)
distro_check_dir = os.path.join(tmpdir, "distro_check") distro_check_dir = os.path.join(tmpdir, "distro_check")
datetime = bb.data.getVar('DATETIME', localdata, True) datetime = localdata.getVar('DATETIME', True)
dist_check.update_distro_data(distro_check_dir, datetime) dist_check.update_distro_data(distro_check_dir, datetime)
pn = bb.data.getVar("PN", d, True) pn = d.getVar("PN", True)
bb.note("Package Name: %s" % pn) bb.note("Package Name: %s" % pn)
if pn.find("-native") != -1: if pn.find("-native") != -1:
pnstripped = pn.split("-native") pnstripped = pn.split("-native")
bb.note("Native Split: %s" % pnstripped) bb.note("Native Split: %s" % pnstripped)
bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + bb.data.getVar('OVERRIDES', d, True), localdata) bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True), localdata)
bb.data.update_data(localdata) bb.data.update_data(localdata)
if pn.find("-cross") != -1: if pn.find("-cross") != -1:
pnstripped = pn.split("-cross") pnstripped = pn.split("-cross")
bb.note("cross Split: %s" % pnstripped) bb.note("cross Split: %s" % pnstripped)
bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + bb.data.getVar('OVERRIDES', d, True), localdata) bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True), localdata)
bb.data.update_data(localdata) bb.data.update_data(localdata)
if pn.find("-initial") != -1: if pn.find("-initial") != -1:
pnstripped = pn.split("-initial") pnstripped = pn.split("-initial")
bb.note("initial Split: %s" % pnstripped) bb.note("initial Split: %s" % pnstripped)
bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + bb.data.getVar('OVERRIDES', d, True), localdata) bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True), localdata)
bb.data.update_data(localdata) bb.data.update_data(localdata)
"""generate package information from .bb file""" """generate package information from .bb file"""
pname = bb.data.getVar('PN', localdata, True) pname = localdata.getVar('PN', True)
pcurver = bb.data.getVar('PV', localdata, True) pcurver = localdata.getVar('PV', True)
pdesc = bb.data.getVar('DESCRIPTION', localdata, True) pdesc = localdata.getVar('DESCRIPTION', True)
if pdesc is not None: if pdesc is not None:
pdesc = pdesc.replace(',','') pdesc = pdesc.replace(',','')
pdesc = pdesc.replace('\n','') pdesc = pdesc.replace('\n','')
pgrp = bb.data.getVar('SECTION', localdata, True) pgrp = localdata.getVar('SECTION', True)
plicense = bb.data.getVar('LICENSE', localdata, True).replace(',','_') plicense = localdata.getVar('LICENSE', True).replace(',','_')
if bb.data.getVar('LIC_FILES_CHKSUM', localdata, True): if localdata.getVar('LIC_FILES_CHKSUM', True):
pchksum="1" pchksum="1"
else: else:
pchksum="0" pchksum="0"
if bb.data.getVar('RECIPE_STATUS', localdata, True): if localdata.getVar('RECIPE_STATUS', True):
hasrstatus="1" hasrstatus="1"
else: else:
hasrstatus="0" hasrstatus="0"
rstatus = bb.data.getVar('RECIPE_STATUS', localdata, True) rstatus = localdata.getVar('RECIPE_STATUS', True)
if rstatus is not None: if rstatus is not None:
rstatus = rstatus.replace(',','') rstatus = rstatus.replace(',','')
pupver = bb.data.getVar('RECIPE_LATEST_VERSION', localdata, True) pupver = localdata.getVar('RECIPE_LATEST_VERSION', True)
if pcurver == pupver: if pcurver == pupver:
vermatch="1" vermatch="1"
else: else:
vermatch="0" vermatch="0"
noupdate_reason = bb.data.getVar('RECIPE_NO_UPDATE_REASON', localdata, True) noupdate_reason = localdata.getVar('RECIPE_NO_UPDATE_REASON', True)
if noupdate_reason is None: if noupdate_reason is None:
noupdate="0" noupdate="0"
else: else:
noupdate="1" noupdate="1"
noupdate_reason = noupdate_reason.replace(',','') noupdate_reason = noupdate_reason.replace(',','')
ris = bb.data.getVar('RECIPE_INTEL_SECTION', localdata, True) ris = localdata.getVar('RECIPE_INTEL_SECTION', True)
maintainer = bb.data.getVar('RECIPE_MAINTAINER', localdata, True) maintainer = localdata.getVar('RECIPE_MAINTAINER', True)
rttr = bb.data.getVar('RECIPE_TIME_BETWEEN_LAST_TWO_RELEASES', localdata, True) rttr = localdata.getVar('RECIPE_TIME_BETWEEN_LAST_TWO_RELEASES', True)
rlrd = bb.data.getVar('RECIPE_LATEST_RELEASE_DATE', localdata, True) rlrd = localdata.getVar('RECIPE_LATEST_RELEASE_DATE', True)
dc = bb.data.getVar('DEPENDENCY_CHECK', localdata, True) dc = localdata.getVar('DEPENDENCY_CHECK', True)
rc = bb.data.getVar('RECIPE_COMMENTS', localdata, True) rc = localdata.getVar('RECIPE_COMMENTS', True)
# do the comparison # do the comparison
result = dist_check.compare_in_distro_packages_list(distro_check_dir, localdata) result = dist_check.compare_in_distro_packages_list(distro_check_dir, localdata)
@ -298,7 +298,7 @@ python do_checkpkg() {
Clear internal url cache as it's a temporary check. Not doing so will have Clear internal url cache as it's a temporary check. Not doing so will have
bitbake check url multiple times when looping through a single url bitbake check url multiple times when looping through a single url
""" """
fn = bb.data.getVar('FILE', d, True) fn = d.getVar('FILE', True)
bb.fetch2.urldata_cache[fn] = {} bb.fetch2.urldata_cache[fn] = {}
""" """
@ -329,7 +329,7 @@ python do_checkpkg() {
Return new version if success, or else error in "Errxxxx" style Return new version if success, or else error in "Errxxxx" style
""" """
def check_new_dir(url, curver, d): def check_new_dir(url, curver, d):
pn = bb.data.getVar('PN', d, True) pn = d.getVar('PN', True)
f = tempfile.NamedTemporaryFile(delete=False, prefix="%s-1-" % pn) f = tempfile.NamedTemporaryFile(delete=False, prefix="%s-1-" % pn)
status = internal_fetch_wget(url, d, f) status = internal_fetch_wget(url, d, f)
fhtml = f.read() fhtml = f.read()
@ -372,7 +372,7 @@ python do_checkpkg() {
f.close() f.close()
if status != "ErrHostNoDir" and re.match("Err", status): if status != "ErrHostNoDir" and re.match("Err", status):
logpath = bb.data.getVar('LOG_DIR', d, 1) logpath = d.getVar('LOG_DIR', 1)
os.system("cp %s %s/" % (f.name, logpath)) os.system("cp %s %s/" % (f.name, logpath))
os.unlink(f.name) os.unlink(f.name)
return status return status
@ -388,7 +388,7 @@ python do_checkpkg() {
"""possible to have no version in pkg name, such as spectrum-fw""" """possible to have no version in pkg name, such as spectrum-fw"""
if not re.search("\d+", curname): if not re.search("\d+", curname):
return pcurver return pcurver
pn = bb.data.getVar('PN', d, True) pn = d.getVar('PN', True)
f = tempfile.NamedTemporaryFile(delete=False, prefix="%s-2-" % pn) f = tempfile.NamedTemporaryFile(delete=False, prefix="%s-2-" % pn)
status = internal_fetch_wget(url, d, f) status = internal_fetch_wget(url, d, f)
fhtml = f.read() fhtml = f.read()
@ -431,55 +431,55 @@ python do_checkpkg() {
f.close() f.close()
"""if host hasn't directory information, no need to save tmp file""" """if host hasn't directory information, no need to save tmp file"""
if status != "ErrHostNoDir" and re.match("Err", status): if status != "ErrHostNoDir" and re.match("Err", status):
logpath = bb.data.getVar('LOG_DIR', d, True) logpath = d.getVar('LOG_DIR', True)
os.system("cp %s %s/" % (f.name, logpath)) os.system("cp %s %s/" % (f.name, logpath))
os.unlink(f.name) os.unlink(f.name)
return status return status
"""first check whether a uri is provided""" """first check whether a uri is provided"""
src_uri = bb.data.getVar('SRC_URI', d, True) src_uri = d.getVar('SRC_URI', True)
if not src_uri: if not src_uri:
return return
"""initialize log files.""" """initialize log files."""
logpath = bb.data.getVar('LOG_DIR', d, True) logpath = d.getVar('LOG_DIR', True)
bb.utils.mkdirhier(logpath) bb.utils.mkdirhier(logpath)
logfile = os.path.join(logpath, "checkpkg.csv") logfile = os.path.join(logpath, "checkpkg.csv")
"""generate package information from .bb file""" """generate package information from .bb file"""
pname = bb.data.getVar('PN', d, True) pname = d.getVar('PN', True)
if pname.find("-native") != -1: if pname.find("-native") != -1:
pnstripped = pname.split("-native") pnstripped = pname.split("-native")
bb.note("Native Split: %s" % pnstripped) bb.note("Native Split: %s" % pnstripped)
bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + bb.data.getVar('OVERRIDES', d, True), localdata) bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True), localdata)
bb.data.update_data(localdata) bb.data.update_data(localdata)
if pname.find("-cross") != -1: if pname.find("-cross") != -1:
pnstripped = pname.split("-cross") pnstripped = pname.split("-cross")
bb.note("cross Split: %s" % pnstripped) bb.note("cross Split: %s" % pnstripped)
bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + bb.data.getVar('OVERRIDES', d, True), localdata) bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True), localdata)
bb.data.update_data(localdata) bb.data.update_data(localdata)
if pname.find("-initial") != -1: if pname.find("-initial") != -1:
pnstripped = pname.split("-initial") pnstripped = pname.split("-initial")
bb.note("initial Split: %s" % pnstripped) bb.note("initial Split: %s" % pnstripped)
bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + bb.data.getVar('OVERRIDES', d, True), localdata) bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True), localdata)
bb.data.update_data(localdata) bb.data.update_data(localdata)
pdesc = bb.data.getVar('DESCRIPTION', localdata, True) pdesc = localdata.getVar('DESCRIPTION', True)
pgrp = bb.data.getVar('SECTION', localdata, True) pgrp = localdata.getVar('SECTION', True)
pversion = bb.data.getVar('PV', localdata, True) pversion = localdata.getVar('PV', True)
plicense = bb.data.getVar('LICENSE', localdata, True) plicense = localdata.getVar('LICENSE', True)
psection = bb.data.getVar('SECTION', localdata, True) psection = localdata.getVar('SECTION', True)
phome = bb.data.getVar('HOMEPAGE', localdata, True) phome = localdata.getVar('HOMEPAGE', True)
prelease = bb.data.getVar('PR', localdata, True) prelease = localdata.getVar('PR', True)
ppriority = bb.data.getVar('PRIORITY', localdata, True) ppriority = localdata.getVar('PRIORITY', True)
pdepends = bb.data.getVar('DEPENDS', localdata, True) pdepends = localdata.getVar('DEPENDS', True)
pbugtracker = bb.data.getVar('BUGTRACKER', localdata, True) pbugtracker = localdata.getVar('BUGTRACKER', True)
ppe = bb.data.getVar('PE', localdata, True) ppe = localdata.getVar('PE', True)
psrcuri = bb.data.getVar('SRC_URI', localdata, True) psrcuri = localdata.getVar('SRC_URI', True)
maintainer = bb.data.getVar('RECIPE_MAINTAINER', localdata, True) maintainer = localdata.getVar('RECIPE_MAINTAINER', True)
found = 0 found = 0
for uri in src_uri.split(): for uri in src_uri.split():
@ -497,9 +497,9 @@ python do_checkpkg() {
(type, host, path, user, pswd, parm) = bb.decodeurl(uri) (type, host, path, user, pswd, parm) = bb.decodeurl(uri)
if type in ['http', 'https', 'ftp']: if type in ['http', 'https', 'ftp']:
pcurver = bb.data.getVar('PV', d, True) pcurver = d.getVar('PV', True)
else: else:
pcurver = bb.data.getVar("SRCREV", d, True) pcurver = d.getVar("SRCREV", True)
if type in ['http', 'https', 'ftp']: if type in ['http', 'https', 'ftp']:
newver = pcurver newver = pcurver
@ -639,7 +639,7 @@ python do_checkpkg() {
pstatus += ":%s%s" % (host, path) pstatus += ":%s%s" % (host, path)
"""Read from manual distro tracking fields as alternative""" """Read from manual distro tracking fields as alternative"""
pmver = bb.data.getVar("RECIPE_LATEST_VERSION", d, True) pmver = d.getVar("RECIPE_LATEST_VERSION", True)
if not pmver: if not pmver:
pmver = "N/A" pmver = "N/A"
pmstatus = "ErrNoRecipeData" pmstatus = "ErrNoRecipeData"
@ -688,12 +688,12 @@ python do_distro_check() {
localdata = bb.data.createCopy(d) localdata = bb.data.createCopy(d)
bb.data.update_data(localdata) bb.data.update_data(localdata)
tmpdir = bb.data.getVar('TMPDIR', d, True) tmpdir = d.getVar('TMPDIR', True)
distro_check_dir = os.path.join(tmpdir, "distro_check") distro_check_dir = os.path.join(tmpdir, "distro_check")
logpath = bb.data.getVar('LOG_DIR', d, True) logpath = d.getVar('LOG_DIR', True)
bb.utils.mkdirhier(logpath) bb.utils.mkdirhier(logpath)
result_file = os.path.join(logpath, "distrocheck.csv") result_file = os.path.join(logpath, "distrocheck.csv")
datetime = bb.data.getVar('DATETIME', localdata, True) datetime = localdata.getVar('DATETIME', True)
dc.update_distro_data(distro_check_dir, datetime) dc.update_distro_data(distro_check_dir, datetime)
# do the comparison # do the comparison
@ -734,12 +734,12 @@ python do_checklicense() {
import os import os
import bb import bb
import shutil import shutil
logpath = bb.data.getVar('LOG_DIR', d, True) logpath = d.getVar('LOG_DIR', True)
bb.utils.mkdirhier(logpath) bb.utils.mkdirhier(logpath)
pn = bb.data.getVar('PN', d, True) pn = d.getVar('PN', True)
logfile = os.path.join(logpath, "missinglicense.csv") logfile = os.path.join(logpath, "missinglicense.csv")
generic_directory = bb.data.getVar('COMMON_LICENSE_DIR', d, True) generic_directory = d.getVar('COMMON_LICENSE_DIR', True)
license_types = bb.data.getVar('LICENSE', d, True) license_types = d.getVar('LICENSE', True)
for license_type in ((license_types.replace('+', '').replace('|', '&') for license_type in ((license_types.replace('+', '').replace('|', '&')
.replace('(', '').replace(')', '').replace(';', '') .replace('(', '').replace(')', '').replace(';', '')
.replace(',', '').replace(" ", "").split("&"))): .replace(',', '').replace(" ", "").split("&"))):

View File

@ -1,4 +1,4 @@
DEPENDS += "${@["python-native python", ""][(bb.data.getVar('PACKAGES', d, 1) == '')]}" DEPENDS += "${@["python-native python", ""][(d.getVar('PACKAGES', 1) == '')]}"
RDEPENDS_${PN} += "${@['', 'python-core']['${PN}' == '${BPN}']}" RDEPENDS_${PN} += "${@['', 'python-core']['${PN}' == '${BPN}']}"
inherit distutils-common-base inherit distutils-common-base

View File

@ -1,3 +1,3 @@
DEPENDS += "${@["python-native", ""][(bb.data.getVar('PACKAGES', d, 1) == '')]}" DEPENDS += "${@["python-native", ""][(d.getVar('PACKAGES', 1) == '')]}"
inherit distutils-common-base inherit distutils-common-base

View File

@ -27,8 +27,8 @@ done
python populate_packages_append () { python populate_packages_append () {
import re import re
packages = bb.data.getVar('PACKAGES', d, 1).split() packages = d.getVar('PACKAGES', 1).split()
pkgdest = bb.data.getVar('PKGDEST', d, 1) pkgdest = d.getVar('PKGDEST', 1)
for pkg in packages: for pkg in packages:
schema_dir = '%s/%s/etc/gconf/schemas' % (pkgdest, pkg) schema_dir = '%s/%s/etc/gconf/schemas' % (pkgdest, pkg)
@ -41,15 +41,15 @@ python populate_packages_append () {
if schemas != []: if schemas != []:
bb.note("adding gconf postinst and prerm scripts to %s" % pkg) bb.note("adding gconf postinst and prerm scripts to %s" % pkg)
bb.data.setVar('SCHEMA_FILES', " ".join(schemas), d) bb.data.setVar('SCHEMA_FILES', " ".join(schemas), d)
postinst = bb.data.getVar('pkg_postinst_%s' % pkg, d, 1) or bb.data.getVar('pkg_postinst', d, 1) postinst = d.getVar('pkg_postinst_%s' % pkg, 1) or d.getVar('pkg_postinst', 1)
if not postinst: if not postinst:
postinst = '#!/bin/sh\n' postinst = '#!/bin/sh\n'
postinst += bb.data.getVar('gconf_postinst', d, 1) postinst += d.getVar('gconf_postinst', 1)
bb.data.setVar('pkg_postinst_%s' % pkg, postinst, d) d.setVar('pkg_postinst_%s' % pkg, postinst)
prerm = bb.data.getVar('pkg_prerm_%s' % pkg, d, 1) or bb.data.getVar('pkg_prerm', d, 1) prerm = d.getVar('pkg_prerm_%s' % pkg, 1) or d.getVar('pkg_prerm', 1)
if not prerm: if not prerm:
prerm = '#!/bin/sh\n' prerm = '#!/bin/sh\n'
prerm += bb.data.getVar('gconf_prerm', d, 1) prerm += d.getVar('gconf_prerm', 1)
bb.data.setVar('pkg_prerm_%s' % pkg, prerm, d) d.setVar('pkg_prerm_%s' % pkg, prerm)
} }

View File

@ -28,31 +28,31 @@ done
} }
python populate_packages_append () { python populate_packages_append () {
packages = bb.data.getVar('PACKAGES', d, 1).split() packages = d.getVar('PACKAGES', 1).split()
pkgdest = bb.data.getVar('PKGDEST', d, 1) pkgdest = d.getVar('PKGDEST', 1)
for pkg in packages: for pkg in packages:
icon_dir = '%s/%s/%s/icons' % (pkgdest, pkg, bb.data.getVar('datadir', d, 1)) icon_dir = '%s/%s/%s/icons' % (pkgdest, pkg, d.getVar('datadir', 1))
if not os.path.exists(icon_dir): if not os.path.exists(icon_dir):
continue continue
bb.note("adding hicolor-icon-theme dependency to %s" % pkg) bb.note("adding hicolor-icon-theme dependency to %s" % pkg)
rdepends = bb.data.getVar('RDEPENDS_%s' % pkg, d, 1) rdepends = d.getVar('RDEPENDS_%s' % pkg, 1)
rdepends = rdepends + ' ' + bb.data.getVar('MLPREFIX', d) + "hicolor-icon-theme" rdepends = rdepends + ' ' + d.getVar('MLPREFIX') + "hicolor-icon-theme"
bb.data.setVar('RDEPENDS_%s' % pkg, rdepends, d) d.setVar('RDEPENDS_%s' % pkg, rdepends)
bb.note("adding gtk-icon-cache postinst and postrm scripts to %s" % pkg) bb.note("adding gtk-icon-cache postinst and postrm scripts to %s" % pkg)
postinst = bb.data.getVar('pkg_postinst_%s' % pkg, d, 1) or bb.data.getVar('pkg_postinst', d, 1) postinst = d.getVar('pkg_postinst_%s' % pkg, 1) or d.getVar('pkg_postinst', 1)
if not postinst: if not postinst:
postinst = '#!/bin/sh\n' postinst = '#!/bin/sh\n'
postinst += bb.data.getVar('gtk_icon_cache_postinst', d, 1) postinst += d.getVar('gtk_icon_cache_postinst', 1)
bb.data.setVar('pkg_postinst_%s' % pkg, postinst, d) d.setVar('pkg_postinst_%s' % pkg, postinst)
postrm = bb.data.getVar('pkg_postrm_%s' % pkg, d, 1) or bb.data.getVar('pkg_postrm', d, 1) postrm = d.getVar('pkg_postrm_%s' % pkg, 1) or d.getVar('pkg_postrm', 1)
if not postrm: if not postrm:
postrm = '#!/bin/sh\n' postrm = '#!/bin/sh\n'
postrm += bb.data.getVar('gtk_icon_cache_postrm', d, 1) postrm += d.getVar('gtk_icon_cache_postrm', 1)
bb.data.setVar('pkg_postrm_%s' % pkg, postrm, d) d.setVar('pkg_postrm_%s' % pkg, postrm)
} }

View File

@ -32,7 +32,7 @@ def icecc_dep_prepend(d):
# INHIBIT_DEFAULT_DEPS doesn't apply to the patch command. Whether or not # INHIBIT_DEFAULT_DEPS doesn't apply to the patch command. Whether or not
# we need that built is the responsibility of the patch function / class, not # we need that built is the responsibility of the patch function / class, not
# the application. # the application.
if not bb.data.getVar('INHIBIT_DEFAULT_DEPS', d): if not d.getVar('INHIBIT_DEFAULT_DEPS'):
return "icecc-create-env-native" return "icecc-create-env-native"
return "" return ""
@ -54,7 +54,7 @@ def create_path(compilers, bb, d):
staging += "-kernel" staging += "-kernel"
#check if the icecc path is set by the user #check if the icecc path is set by the user
icecc = bb.data.getVar('ICECC_PATH', d) or os.popen("which icecc").read()[:-1] icecc = d.getVar('ICECC_PATH') or os.popen("which icecc").read()[:-1]
# Create the dir if necessary # Create the dir if necessary
try: try:
@ -81,7 +81,7 @@ def use_icc(bb,d):
package_tmp = bb.data.expand('${PN}', d) package_tmp = bb.data.expand('${PN}', d)
system_class_blacklist = [ "none" ] system_class_blacklist = [ "none" ]
user_class_blacklist = (bb.data.getVar('ICECC_USER_CLASS_BL', d) or "none").split() user_class_blacklist = (d.getVar('ICECC_USER_CLASS_BL') or "none").split()
package_class_blacklist = system_class_blacklist + user_class_blacklist package_class_blacklist = system_class_blacklist + user_class_blacklist
for black in package_class_blacklist: for black in package_class_blacklist:
@ -92,7 +92,7 @@ def use_icc(bb,d):
#"system" package blacklist contains a list of packages that can not distribute compile tasks #"system" package blacklist contains a list of packages that can not distribute compile tasks
#for one reason or the other #for one reason or the other
system_package_blacklist = [ "uclibc", "glibc", "gcc", "bind", "u-boot", "dhcp-forwarder", "enchant", "connman", "orbit2" ] system_package_blacklist = [ "uclibc", "glibc", "gcc", "bind", "u-boot", "dhcp-forwarder", "enchant", "connman", "orbit2" ]
user_package_blacklist = (bb.data.getVar('ICECC_USER_PACKAGE_BL', d) or "").split() user_package_blacklist = (d.getVar('ICECC_USER_PACKAGE_BL') or "").split()
package_blacklist = system_package_blacklist + user_package_blacklist package_blacklist = system_package_blacklist + user_package_blacklist
for black in package_blacklist: for black in package_blacklist:
@ -100,7 +100,7 @@ def use_icc(bb,d):
#bb.note(package_tmp, ' found in blacklist, disable icecc') #bb.note(package_tmp, ' found in blacklist, disable icecc')
return "no" return "no"
if bb.data.getVar('PARALLEL_MAKE', d) == "": if d.getVar('PARALLEL_MAKE') == "":
bb.note(package_tmp, " ", bb.data.expand('${PV}', d), " has empty PARALLEL_MAKE, disable icecc") bb.note(package_tmp, " ", bb.data.expand('${PV}', d), " has empty PARALLEL_MAKE, disable icecc")
return "no" return "no"
@ -119,8 +119,8 @@ def icc_version(bb, d):
if use_icc(bb, d) == "no": if use_icc(bb, d) == "no":
return "" return ""
parallel = bb.data.getVar('ICECC_PARALLEL_MAKE', d) or "" parallel = d.getVar('ICECC_PARALLEL_MAKE') or ""
bb.data.setVar("PARALLEL_MAKE", parallel, d) d.setVar("PARALLEL_MAKE", parallel)
if icc_is_native(bb, d): if icc_is_native(bb, d):
archive_name = "local-host-env" archive_name = "local-host-env"
@ -130,7 +130,7 @@ def icc_version(bb, d):
prefix = bb.data.expand('${HOST_PREFIX}' , d) prefix = bb.data.expand('${HOST_PREFIX}' , d)
distro = bb.data.expand('${DISTRO}', d) distro = bb.data.expand('${DISTRO}', d)
target_sys = bb.data.expand('${TARGET_SYS}', d) target_sys = bb.data.expand('${TARGET_SYS}', d)
float = bb.data.getVar('TARGET_FPU', d) or "hard" float = d.getVar('TARGET_FPU') or "hard"
archive_name = prefix + distro + "-" + target_sys + "-" + float archive_name = prefix + distro + "-" + target_sys + "-" + float
if icc_is_kernel(bb, d): if icc_is_kernel(bb, d):
archive_name += "-kernel" archive_name += "-kernel"

View File

@ -51,13 +51,13 @@ python() {
# and cross packages which aren't swabber-native or one of its dependencies # and cross packages which aren't swabber-native or one of its dependencies
# I have ignored them for now... # I have ignored them for now...
if not bb.data.inherits_class('native', d) and not bb.data.inherits_class('nativesdk', d) and not bb.data.inherits_class('cross', d): if not bb.data.inherits_class('native', d) and not bb.data.inherits_class('nativesdk', d) and not bb.data.inherits_class('cross', d):
deps = (bb.data.getVarFlag('do_setscene', 'depends', d) or "").split() deps = (d.getVarFlag('do_setscene', 'depends') or "").split()
deps.append('strace-native:do_populate_sysroot') deps.append('strace-native:do_populate_sysroot')
bb.data.setVarFlag('do_setscene', 'depends', " ".join(deps), d) bb.data.setVarFlag('do_setscene', 'depends', " ".join(deps), d)
logdir = bb.data.expand("${TRACE_LOGDIR}", d) logdir = bb.data.expand("${TRACE_LOGDIR}", d)
bb.utils.mkdirhier(logdir) bb.utils.mkdirhier(logdir)
else: else:
bb.data.setVar('STRACEFUNC', '', d) d.setVar('STRACEFUNC', '')
} }
STRACEPID = "${@os.getpid()}" STRACEPID = "${@os.getpid()}"

View File

@ -74,17 +74,17 @@ IMAGE_TYPE = ${@base_contains("IMAGE_FSTYPES", "live", "live", "empty", d)}
inherit image-${IMAGE_TYPE} inherit image-${IMAGE_TYPE}
python () { python () {
deps = bb.data.getVarFlag('do_rootfs', 'depends', d) or "" deps = d.getVarFlag('do_rootfs', 'depends') or ""
for type in (bb.data.getVar('IMAGE_FSTYPES', d, True) or "").split(): for type in (d.getVar('IMAGE_FSTYPES', True) or "").split():
for dep in ((bb.data.getVar('IMAGE_DEPENDS_%s' % type, d) or "").split() or []): for dep in ((d.getVar('IMAGE_DEPENDS_%s' % type) or "").split() or []):
deps += " %s:do_populate_sysroot" % dep deps += " %s:do_populate_sysroot" % dep
for dep in (bb.data.getVar('EXTRA_IMAGEDEPENDS', d, True) or "").split(): for dep in (d.getVar('EXTRA_IMAGEDEPENDS', True) or "").split():
deps += " %s:do_populate_sysroot" % dep deps += " %s:do_populate_sysroot" % dep
bb.data.setVarFlag('do_rootfs', 'depends', deps, d) d.setVarFlag('do_rootfs', 'depends', deps)
# If we don't do this we try and run the mapping hooks while parsing which is slow # If we don't do this we try and run the mapping hooks while parsing which is slow
# bitbake should really provide something to let us know this... # bitbake should really provide something to let us know this...
if bb.data.getVar('BB_WORKERCONTEXT', d, True) is not None: if d.getVar('BB_WORKERCONTEXT', True) is not None:
runtime_mapping_rename("PACKAGE_INSTALL", d) runtime_mapping_rename("PACKAGE_INSTALL", d)
runtime_mapping_rename("PACKAGE_INSTALL_ATTEMPTONLY", d) runtime_mapping_rename("PACKAGE_INSTALL_ATTEMPTONLY", d)
} }
@ -98,15 +98,15 @@ python () {
# is searched for in the BBPATH (same as the old version.) # is searched for in the BBPATH (same as the old version.)
# #
def get_devtable_list(d): def get_devtable_list(d):
devtable = bb.data.getVar('IMAGE_DEVICE_TABLE', d, 1) devtable = d.getVar('IMAGE_DEVICE_TABLE', 1)
if devtable != None: if devtable != None:
return devtable return devtable
str = "" str = ""
devtables = bb.data.getVar('IMAGE_DEVICE_TABLES', d, 1) devtables = d.getVar('IMAGE_DEVICE_TABLES', 1)
if devtables == None: if devtables == None:
devtables = 'files/device_table-minimal.txt' devtables = 'files/device_table-minimal.txt'
for devtable in devtables.split(): for devtable in devtables.split():
str += " %s" % bb.which(bb.data.getVar('BBPATH', d, 1), devtable) str += " %s" % bb.which(d.getVar('BBPATH', 1), devtable)
return str return str
IMAGE_CLASSES ?= "image_types" IMAGE_CLASSES ?= "image_types"
@ -119,7 +119,7 @@ ROOTFS_POSTPROCESS_COMMAND ?= ""
# some default locales # some default locales
IMAGE_LINGUAS ?= "de-de fr-fr en-gb" IMAGE_LINGUAS ?= "de-de fr-fr en-gb"
LINGUAS_INSTALL = "${@" ".join(map(lambda s: "locale-base-%s" % s, bb.data.getVar('IMAGE_LINGUAS', d, 1).split()))}" LINGUAS_INSTALL = "${@" ".join(map(lambda s: "locale-base-%s" % s, d.getVar('IMAGE_LINGUAS', 1).split()))}"
do_rootfs[nostamp] = "1" do_rootfs[nostamp] = "1"
do_rootfs[dirs] = "${TOPDIR}" do_rootfs[dirs] = "${TOPDIR}"

View File

@ -1,8 +1,8 @@
def get_imagecmds(d): def get_imagecmds(d):
cmds = "\n" cmds = "\n"
old_overrides = bb.data.getVar('OVERRIDES', d, 0) old_overrides = d.getVar('OVERRIDES', 0)
types = bb.data.getVar('IMAGE_FSTYPES', d, True).split() types = d.getVar('IMAGE_FSTYPES', True).split()
# Live images will be processed via inheriting bbclass and # Live images will be processed via inheriting bbclass and
# does not get processed here. # does not get processed here.
# live images also depend on ext3 so ensure its present # live images also depend on ext3 so ensure its present

View File

@ -35,12 +35,12 @@ def qemuimagetest_main(d):
casestr = re.compile(r'(?P<scen>\w+\b):(?P<case>\S+$)') casestr = re.compile(r'(?P<scen>\w+\b):(?P<case>\S+$)')
resultstr = re.compile(r'\s*(?P<case>\w+)\s*(?P<pass>\d+)\s*(?P<fail>\d+)\s*(?P<noresult>\d+)') resultstr = re.compile(r'\s*(?P<case>\w+)\s*(?P<pass>\d+)\s*(?P<fail>\d+)\s*(?P<noresult>\d+)')
machine = bb.data.getVar('MACHINE', d, 1) machine = d.getVar('MACHINE', 1)
pname = bb.data.getVar('PN', d, 1) pname = d.getVar('PN', 1)
"""function to save test cases running status""" """function to save test cases running status"""
def teststatus(test, status, index, length): def teststatus(test, status, index, length):
test_status = bb.data.getVar('TEST_STATUS', d, 1) test_status = d.getVar('TEST_STATUS', 1)
if not os.path.exists(test_status): if not os.path.exists(test_status):
raise bb.build.FuncFailed("No test status file existing under TEST_TMP") raise bb.build.FuncFailed("No test status file existing under TEST_TMP")
@ -51,30 +51,30 @@ def qemuimagetest_main(d):
"""funtion to run each case under scenario""" """funtion to run each case under scenario"""
def runtest(scen, case, fulltestpath): def runtest(scen, case, fulltestpath):
resultpath = bb.data.getVar('TEST_RESULT', d, 1) resultpath = d.getVar('TEST_RESULT', 1)
tmppath = bb.data.getVar('TEST_TMP', d, 1) tmppath = d.getVar('TEST_TMP', 1)
"""initialize log file for testcase""" """initialize log file for testcase"""
logpath = bb.data.getVar('TEST_LOG', d, 1) logpath = d.getVar('TEST_LOG', 1)
bb.utils.mkdirhier("%s/%s" % (logpath, scen)) bb.utils.mkdirhier("%s/%s" % (logpath, scen))
caselog = os.path.join(logpath, "%s/log_%s.%s" % (scen, case, bb.data.getVar('DATETIME', d, 1))) caselog = os.path.join(logpath, "%s/log_%s.%s" % (scen, case, d.getVar('DATETIME', 1)))
os.system("touch %s" % caselog) os.system("touch %s" % caselog)
"""export TEST_TMP, TEST_RESULT, DEPLOY_DIR and QEMUARCH""" """export TEST_TMP, TEST_RESULT, DEPLOY_DIR and QEMUARCH"""
os.environ["PATH"] = bb.data.getVar("PATH", d, True) os.environ["PATH"] = d.getVar("PATH", True)
os.environ["TEST_TMP"] = tmppath os.environ["TEST_TMP"] = tmppath
os.environ["TEST_RESULT"] = resultpath os.environ["TEST_RESULT"] = resultpath
os.environ["DEPLOY_DIR"] = bb.data.getVar("DEPLOY_DIR", d, True) os.environ["DEPLOY_DIR"] = d.getVar("DEPLOY_DIR", True)
os.environ["QEMUARCH"] = machine os.environ["QEMUARCH"] = machine
os.environ["QEMUTARGET"] = pname os.environ["QEMUTARGET"] = pname
os.environ["DISPLAY"] = bb.data.getVar("DISPLAY", d, True) os.environ["DISPLAY"] = d.getVar("DISPLAY", True)
os.environ["COREBASE"] = bb.data.getVar("COREBASE", d, True) os.environ["COREBASE"] = d.getVar("COREBASE", True)
os.environ["TOPDIR"] = bb.data.getVar("TOPDIR", d, True) os.environ["TOPDIR"] = d.getVar("TOPDIR", True)
os.environ["OE_TMPDIR"] = bb.data.getVar("TMPDIR", d, True) os.environ["OE_TMPDIR"] = d.getVar("TMPDIR", True)
os.environ["TEST_STATUS"] = bb.data.getVar("TEST_STATUS", d, True) os.environ["TEST_STATUS"] = d.getVar("TEST_STATUS", True)
os.environ["TARGET_IPSAVE"] = bb.data.getVar("TARGET_IPSAVE", d, True) os.environ["TARGET_IPSAVE"] = d.getVar("TARGET_IPSAVE", True)
os.environ["TEST_SERIALIZE"] = bb.data.getVar("TEST_SERIALIZE", d, True) os.environ["TEST_SERIALIZE"] = d.getVar("TEST_SERIALIZE", True)
os.environ["SDK_NAME"] = bb.data.getVar("SDK_NAME", d, True) os.environ["SDK_NAME"] = d.getVar("SDK_NAME", True)
"""run Test Case""" """run Test Case"""
bb.note("Run %s test in scenario %s" % (case, scen)) bb.note("Run %s test in scenario %s" % (case, scen))
@ -92,13 +92,13 @@ def qemuimagetest_main(d):
if n: if n:
item = n.group('scen') item = n.group('scen')
casefile = n.group('case') casefile = n.group('case')
for dir in bb.data.getVar("QEMUIMAGETESTS", d, True).split(): for dir in d.getVar("QEMUIMAGETESTS", True).split():
fulltestcase = os.path.join(dir, item, casefile) fulltestcase = os.path.join(dir, item, casefile)
if not os.path.isfile(fulltestcase): if not os.path.isfile(fulltestcase):
raise bb.build.FuncFailed("Testcase %s not found" % fulltestcase) raise bb.build.FuncFailed("Testcase %s not found" % fulltestcase)
list.append((item, casefile, fulltestcase)) list.append((item, casefile, fulltestcase))
else: else:
for dir in bb.data.getVar("QEMUIMAGETESTS", d, True).split(): for dir in d.getVar("QEMUIMAGETESTS", True).split():
scenlist = os.path.join(dir, "scenario", machine, pname) scenlist = os.path.join(dir, "scenario", machine, pname)
if not os.path.isfile(scenlist): if not os.path.isfile(scenlist):
raise bb.build.FuncFailed("No scenario list file named %s found" % scenlist) raise bb.build.FuncFailed("No scenario list file named %s found" % scenlist)
@ -118,7 +118,7 @@ def qemuimagetest_main(d):
"""Clean tmp folder for testing""" """Clean tmp folder for testing"""
def clean_tmp(): def clean_tmp():
tmppath = bb.data.getVar('TEST_TMP', d, 1) tmppath = d.getVar('TEST_TMP', 1)
if os.path.isdir(tmppath): if os.path.isdir(tmppath):
for f in os.listdir(tmppath): for f in os.listdir(tmppath):
@ -132,28 +132,28 @@ def qemuimagetest_main(d):
clean_tmp() clean_tmp()
"""check testcase folder and create test log folder""" """check testcase folder and create test log folder"""
testpath = bb.data.getVar('TEST_DIR', d, 1) testpath = d.getVar('TEST_DIR', 1)
bb.utils.mkdirhier(testpath) bb.utils.mkdirhier(testpath)
logpath = bb.data.getVar('TEST_LOG', d, 1) logpath = d.getVar('TEST_LOG', 1)
bb.utils.mkdirhier(logpath) bb.utils.mkdirhier(logpath)
tmppath = bb.data.getVar('TEST_TMP', d, 1) tmppath = d.getVar('TEST_TMP', 1)
bb.utils.mkdirhier(tmppath) bb.utils.mkdirhier(tmppath)
"""initialize test status file""" """initialize test status file"""
test_status = bb.data.getVar('TEST_STATUS', d, 1) test_status = d.getVar('TEST_STATUS', 1)
if os.path.exists(test_status): if os.path.exists(test_status):
os.remove(test_status) os.remove(test_status)
os.system("touch %s" % test_status) os.system("touch %s" % test_status)
"""initialize result file""" """initialize result file"""
resultpath = bb.data.getVar('TEST_RESULT', d, 1) resultpath = d.getVar('TEST_RESULT', 1)
bb.utils.mkdirhier(resultpath) bb.utils.mkdirhier(resultpath)
resultfile = os.path.join(resultpath, "testresult.%s" % bb.data.getVar('DATETIME', d, 1)) resultfile = os.path.join(resultpath, "testresult.%s" % d.getVar('DATETIME', 1))
sresultfile = os.path.join(resultpath, "testresult.log") sresultfile = os.path.join(resultpath, "testresult.log")
machine = bb.data.getVar('MACHINE', d, 1) machine = d.getVar('MACHINE', 1)
if os.path.exists(sresultfile): if os.path.exists(sresultfile):
os.remove(sresultfile) os.remove(sresultfile)
@ -165,7 +165,7 @@ def qemuimagetest_main(d):
f.close() f.close()
"""generate pre-defined testcase list""" """generate pre-defined testcase list"""
testlist = bb.data.getVar('TEST_SCEN', d, 1) testlist = d.getVar('TEST_SCEN', 1)
fulllist = generate_list(testlist) fulllist = generate_list(testlist)
"""Begin testing""" """Begin testing"""

View File

@ -105,7 +105,7 @@ ERROR_QA ?= "dev-so debug-deps dev-deps debug-files arch la2 pkgconfig la perms"
def package_qa_clean_path(path,d): def package_qa_clean_path(path,d):
""" Remove the common prefix from the path. In this case it is the TMPDIR""" """ Remove the common prefix from the path. In this case it is the TMPDIR"""
return path.replace(bb.data.getVar('TMPDIR',d,True),"") return path.replace(d.getVar('TMPDIR',True),"")
def package_qa_write_error(error, d): def package_qa_write_error(error, d):
logfile = d.getVar('QA_LOGFILE', True) logfile = d.getVar('QA_LOGFILE', True)
@ -132,13 +132,13 @@ def package_qa_check_rpath(file,name, d, elf, messages):
if not elf: if not elf:
return return
scanelf = os.path.join(bb.data.getVar('STAGING_BINDIR_NATIVE',d,True),'scanelf') scanelf = os.path.join(d.getVar('STAGING_BINDIR_NATIVE',True),'scanelf')
bad_dirs = [bb.data.getVar('TMPDIR', d, True) + "/work", bb.data.getVar('STAGING_DIR_TARGET', d, True)] bad_dirs = [d.getVar('TMPDIR', True) + "/work", d.getVar('STAGING_DIR_TARGET', True)]
bad_dir_test = bb.data.getVar('TMPDIR', d, True) bad_dir_test = d.getVar('TMPDIR', True)
if not os.path.exists(scanelf): if not os.path.exists(scanelf):
bb.fatal("Can not check RPATH, scanelf (part of pax-utils-native) not found") bb.fatal("Can not check RPATH, scanelf (part of pax-utils-native) not found")
if not bad_dirs[0] in bb.data.getVar('WORKDIR', d, True): if not bad_dirs[0] in d.getVar('WORKDIR', True):
bb.fatal("This class assumed that WORKDIR is ${TMPDIR}/work... Not doing any check") bb.fatal("This class assumed that WORKDIR is ${TMPDIR}/work... Not doing any check")
output = os.popen("%s -B -F%%r#F '%s'" % (scanelf,file)) output = os.popen("%s -B -F%%r#F '%s'" % (scanelf,file))
@ -156,11 +156,11 @@ def package_qa_check_useless_rpaths(file, name, d, elf, messages):
if not elf: if not elf:
return return
objdump = bb.data.getVar('OBJDUMP', d, True) objdump = d.getVar('OBJDUMP', True)
env_path = bb.data.getVar('PATH', d, True) env_path = d.getVar('PATH', True)
libdir = bb.data.getVar("libdir", d, True) libdir = d.getVar("libdir", True)
base_libdir = bb.data.getVar("base_libdir", d, True) base_libdir = d.getVar("base_libdir", True)
import re import re
rpath_re = re.compile("\s+RPATH\s+(.*)") rpath_re = re.compile("\s+RPATH\s+(.*)")
@ -209,8 +209,8 @@ def package_qa_check_arch(path,name,d, elf, messages):
if not elf: if not elf:
return return
target_os = bb.data.getVar('TARGET_OS', d, True) target_os = d.getVar('TARGET_OS', True)
target_arch = bb.data.getVar('TARGET_ARCH', d, True) target_arch = d.getVar('TARGET_ARCH', True)
# FIXME: Cross package confuse this check, so just skip them # FIXME: Cross package confuse this check, so just skip them
for s in ['cross', 'nativesdk', 'cross-canadian']: for s in ['cross', 'nativesdk', 'cross-canadian']:
@ -243,7 +243,7 @@ def package_qa_check_desktop(path, name, d, elf, messages):
Run all desktop files through desktop-file-validate. Run all desktop files through desktop-file-validate.
""" """
if path.endswith(".desktop"): if path.endswith(".desktop"):
desktop_file_validate = os.path.join(bb.data.getVar('STAGING_BINDIR_NATIVE',d,True),'desktop-file-validate') desktop_file_validate = os.path.join(d.getVar('STAGING_BINDIR_NATIVE',True),'desktop-file-validate')
output = os.popen("%s %s" % (desktop_file_validate, path)) output = os.popen("%s %s" % (desktop_file_validate, path))
# This only produces output on errors # This only produces output on errors
for l in output: for l in output:
@ -261,14 +261,14 @@ def package_qa_hash_style(path, name, d, elf, messages):
if os.path.islink(path): if os.path.islink(path):
return return
gnu_hash = "--hash-style=gnu" in bb.data.getVar('LDFLAGS', d, True) gnu_hash = "--hash-style=gnu" in d.getVar('LDFLAGS', True)
if not gnu_hash: if not gnu_hash:
gnu_hash = "--hash-style=both" in bb.data.getVar('LDFLAGS', d, True) gnu_hash = "--hash-style=both" in d.getVar('LDFLAGS', True)
if not gnu_hash: if not gnu_hash:
return return
objdump = bb.data.getVar('OBJDUMP', d, True) objdump = d.getVar('OBJDUMP', True)
env_path = bb.data.getVar('PATH', d, True) env_path = d.getVar('PATH', True)
sane = False sane = False
has_syms = False has_syms = False
@ -299,7 +299,7 @@ def package_qa_check_buildpaths(path, name, d, elf, messages):
if os.path.islink(path): if os.path.islink(path):
return return
tmpdir = bb.data.getVar('TMPDIR', d, True) tmpdir = d.getVar('TMPDIR', True)
file_content = open(path).read() file_content = open(path).read()
if tmpdir in file_content: if tmpdir in file_content:
messages.append("File %s in package contained reference to tmpdir" % package_qa_clean_path(path,d)) messages.append("File %s in package contained reference to tmpdir" % package_qa_clean_path(path,d))
@ -311,9 +311,9 @@ def package_qa_check_license(workdir, d):
import tempfile import tempfile
sane = True sane = True
lic_files = bb.data.getVar('LIC_FILES_CHKSUM', d, True) lic_files = d.getVar('LIC_FILES_CHKSUM', True)
lic = bb.data.getVar('LICENSE', d, True) lic = d.getVar('LICENSE', True)
pn = bb.data.getVar('PN', d, True) pn = d.getVar('PN', True)
if lic == "CLOSED": if lic == "CLOSED":
return True return True
@ -324,7 +324,7 @@ def package_qa_check_license(workdir, d):
bb.error(pn + ": Recipe file does not have license file information (LIC_FILES_CHKSUM)") bb.error(pn + ": Recipe file does not have license file information (LIC_FILES_CHKSUM)")
return False return False
srcdir = bb.data.getVar('S', d, True) srcdir = d.getVar('S', True)
for url in lic_files.split(): for url in lic_files.split():
(type, host, path, user, pswd, parm) = bb.decodeurl(url) (type, host, path, user, pswd, parm) = bb.decodeurl(url)
@ -384,7 +384,7 @@ def package_qa_check_staged(path,d):
""" """
sane = True sane = True
tmpdir = bb.data.getVar('TMPDIR', d, True) tmpdir = d.getVar('TMPDIR', True)
workdir = os.path.join(tmpdir, "work") workdir = os.path.join(tmpdir, "work")
installed = "installed=yes" installed = "installed=yes"
@ -417,8 +417,8 @@ def package_qa_walk(path, warnfuncs, errorfuncs, skip, package, d):
import oe.qa import oe.qa
#if this will throw an exception, then fix the dict above #if this will throw an exception, then fix the dict above
target_os = bb.data.getVar('TARGET_OS', d, True) target_os = d.getVar('TARGET_OS', True)
target_arch = bb.data.getVar('TARGET_ARCH', d, True) target_arch = d.getVar('TARGET_ARCH', True)
warnings = [] warnings = []
errors = [] errors = []
@ -457,19 +457,19 @@ def package_qa_check_rdepends(pkg, pkgdest, skip, d):
localdata = bb.data.createCopy(d) localdata = bb.data.createCopy(d)
root = "%s/%s" % (pkgdest, pkg) root = "%s/%s" % (pkgdest, pkg)
bb.data.setVar('ROOT', '', localdata) localdata.setVar('ROOT', '')
bb.data.setVar('ROOT_%s' % pkg, root, localdata) localdata.setVar('ROOT_%s' % pkg, root)
pkgname = bb.data.getVar('PKG_%s' % pkg, localdata, True) pkgname = localdata.getVar('PKG_%s' % pkg, True)
if not pkgname: if not pkgname:
pkgname = pkg pkgname = pkg
bb.data.setVar('PKG', pkgname, localdata) localdata.setVar('PKG', pkgname)
bb.data.setVar('OVERRIDES', pkg, localdata) localdata.setVar('OVERRIDES', pkg)
bb.data.update_data(localdata) bb.data.update_data(localdata)
# Now check the RDEPENDS # Now check the RDEPENDS
rdepends = bb.utils.explode_deps(bb.data.getVar('RDEPENDS', localdata, True) or "") rdepends = bb.utils.explode_deps(localdata.getVar('RDEPENDS', True) or "")
# Now do the sanity check!!! # Now do the sanity check!!!
@ -487,8 +487,8 @@ def package_qa_check_rdepends(pkg, pkgdest, skip, d):
python do_package_qa () { python do_package_qa () {
bb.note("DO PACKAGE QA") bb.note("DO PACKAGE QA")
logdir = bb.data.getVar('T', d, True) logdir = d.getVar('T', True)
pkg = bb.data.getVar('PN', d, True) pkg = d.getVar('PN', True)
# Check the compile log for host contamination # Check the compile log for host contamination
compilelog = os.path.join(logdir,"log.do_compile") compilelog = os.path.join(logdir,"log.do_compile")
@ -508,8 +508,8 @@ python do_package_qa () {
(pkg, installlog)) (pkg, installlog))
# Scan the packages... # Scan the packages...
pkgdest = bb.data.getVar('PKGDEST', d, True) pkgdest = d.getVar('PKGDEST', True)
packages = bb.data.getVar('PACKAGES',d, True) packages = d.getVar('PACKAGES', True)
# no packages should be scanned # no packages should be scanned
if not packages: if not packages:
@ -521,7 +521,7 @@ python do_package_qa () {
walk_sane = True walk_sane = True
rdepends_sane = True rdepends_sane = True
for package in packages.split(): for package in packages.split():
skip = (bb.data.getVar('INSANE_SKIP_' + package, d, True) or "").split() skip = (d.getVar('INSANE_SKIP_' + package, True) or "").split()
if skip: if skip:
bb.note("Package %s skipping QA tests: %s" % (package, str(skip))) bb.note("Package %s skipping QA tests: %s" % (package, str(skip)))
warnchecks = [] warnchecks = []
@ -560,7 +560,7 @@ python do_qa_staging() {
python do_qa_configure() { python do_qa_configure() {
configs = [] configs = []
workdir = bb.data.getVar('WORKDIR', d, True) workdir = d.getVar('WORKDIR', True)
bb.note("Checking autotools environment for common misconfiguration") bb.note("Checking autotools environment for common misconfiguration")
for root, dirs, files in os.walk(workdir): for root, dirs, files in os.walk(workdir):
statement = "grep -e 'CROSS COMPILE Badness:' -e 'is unsafe for cross-compilation' %s > /dev/null" % \ statement = "grep -e 'CROSS COMPILE Badness:' -e 'is unsafe for cross-compilation' %s > /dev/null" % \
@ -575,8 +575,8 @@ Rerun configure task after fixing this. The path was '%s'""" % root)
if "configure.in" in files: if "configure.in" in files:
configs.append(os.path.join(root, "configure.in")) configs.append(os.path.join(root, "configure.in"))
cnf = bb.data.getVar('EXTRA_OECONF', d, True) or "" cnf = d.getVar('EXTRA_OECONF', True) or ""
if "gettext" not in bb.data.getVar('P', d, True) and "gcc-runtime" not in bb.data.getVar('P', d, True) and "--disable-nls" not in cnf: if "gettext" not in d.getVar('P', True) and "gcc-runtime" not in d.getVar('P', True) and "--disable-nls" not in cnf:
ml = d.getVar("MLPREFIX", True) or "" ml = d.getVar("MLPREFIX", True) or ""
if bb.data.inherits_class('native', d) or bb.data.inherits_class('cross', d) or bb.data.inherits_class('crosssdk', d) or bb.data.inherits_class('nativesdk', d): if bb.data.inherits_class('native', d) or bb.data.inherits_class('cross', d) or bb.data.inherits_class('crosssdk', d) or bb.data.inherits_class('nativesdk', d):
gt = "gettext-native" gt = "gettext-native"
@ -584,7 +584,7 @@ Rerun configure task after fixing this. The path was '%s'""" % root)
gt = "gettext-nativesdk" gt = "gettext-nativesdk"
else: else:
gt = "virtual/" + ml + "gettext" gt = "virtual/" + ml + "gettext"
deps = bb.utils.explode_deps(bb.data.getVar('DEPENDS', d, True) or "") deps = bb.utils.explode_deps(d.getVar('DEPENDS', True) or "")
if gt not in deps: if gt not in deps:
for config in configs: for config in configs:
gnu = "grep \"^[[:space:]]*AM_GNU_GETTEXT\" %s >/dev/null" % config gnu = "grep \"^[[:space:]]*AM_GNU_GETTEXT\" %s >/dev/null" % config

View File

@ -18,7 +18,7 @@ valid_archs = "alpha cris ia64 \
def map_kernel_arch(a, d): def map_kernel_arch(a, d):
import re import re
valid_archs = bb.data.getVar('valid_archs', d, 1).split() valid_archs = d.getVar('valid_archs', 1).split()
if re.match('(i.86|athlon|x86.64)$', a): return 'x86' if re.match('(i.86|athlon|x86.64)$', a): return 'x86'
elif re.match('arm26$', a): return 'arm26' elif re.match('arm26$', a): return 'arm26'
@ -32,7 +32,7 @@ def map_kernel_arch(a, d):
else: else:
bb.error("cannot map '%s' to a linux kernel architecture" % a) bb.error("cannot map '%s' to a linux kernel architecture" % a)
export ARCH = "${@map_kernel_arch(bb.data.getVar('TARGET_ARCH', d, 1), d)}" export ARCH = "${@map_kernel_arch(d.getVar('TARGET_ARCH', 1), d)}"
def map_uboot_arch(a, d): def map_uboot_arch(a, d):
import re import re
@ -41,5 +41,5 @@ def map_uboot_arch(a, d):
elif re.match('i.86$', a): return 'x86' elif re.match('i.86$', a): return 'x86'
return a return a
export UBOOT_ARCH = "${@map_uboot_arch(bb.data.getVar('ARCH', d, 1), d)}" export UBOOT_ARCH = "${@map_uboot_arch(d.getVar('ARCH', 1), d)}"

View File

@ -104,7 +104,7 @@ python do_kernel_configcheck() {
bb.plain("NOTE: validating kernel configuration") bb.plain("NOTE: validating kernel configuration")
pathprefix = "export PATH=%s; " % bb.data.getVar('PATH', d, True) pathprefix = "export PATH=%s; " % d.getVar('PATH', True)
cmd = bb.data.expand("cd ${B}/..; kconf_check -config- ${B} ${S} ${B} ${KBRANCH}",d ) cmd = bb.data.expand("cd ${B}/..; kconf_check -config- ${B} ${S} ${B} ${KBRANCH}",d )
ret, result = commands.getstatusoutput("%s%s" % (pathprefix, cmd)) ret, result = commands.getstatusoutput("%s%s" % (pathprefix, cmd))

View File

@ -11,15 +11,15 @@ INITRAMFS_IMAGE ?= ""
INITRAMFS_TASK ?= "" INITRAMFS_TASK ?= ""
python __anonymous () { python __anonymous () {
kerneltype = bb.data.getVar('KERNEL_IMAGETYPE', d, 1) or '' kerneltype = d.getVar('KERNEL_IMAGETYPE', 1) or ''
if kerneltype == 'uImage': if kerneltype == 'uImage':
depends = bb.data.getVar("DEPENDS", d, 1) depends = d.getVar("DEPENDS", 1)
depends = "%s u-boot-mkimage-native" % depends depends = "%s u-boot-mkimage-native" % depends
bb.data.setVar("DEPENDS", depends, d) d.setVar("DEPENDS", depends)
image = bb.data.getVar('INITRAMFS_IMAGE', d, True) image = d.getVar('INITRAMFS_IMAGE', True)
if image: if image:
bb.data.setVar('INITRAMFS_TASK', '${INITRAMFS_IMAGE}:do_rootfs', d) d.setVar('INITRAMFS_TASK', '${INITRAMFS_IMAGE}:do_rootfs')
} }
inherit kernel-arch deploy inherit kernel-arch deploy
@ -31,7 +31,7 @@ PACKAGES_DYNAMIC += "kernel-firmware-*"
export OS = "${TARGET_OS}" export OS = "${TARGET_OS}"
export CROSS_COMPILE = "${TARGET_PREFIX}" export CROSS_COMPILE = "${TARGET_PREFIX}"
KERNEL_PRIORITY = "${@bb.data.getVar('PV',d,1).split('-')[0].split('.')[-1]}" KERNEL_PRIORITY = "${@d.getVar('PV',1).split('-')[0].split('.')[-1]}"
KERNEL_RELEASE ?= "${KERNEL_VERSION}" KERNEL_RELEASE ?= "${KERNEL_VERSION}"
@ -56,7 +56,7 @@ KERNEL_IMAGEDEST = "boot"
# #
# configuration # configuration
# #
export CMDLINE_CONSOLE = "console=${@bb.data.getVar("KERNEL_CONSOLE",d,1) or "ttyS0"}" export CMDLINE_CONSOLE = "console=${@d.getVar("KERNEL_CONSOLE",1) or "ttyS0"}"
KERNEL_VERSION = "${@get_kernelversion('${B}')}" KERNEL_VERSION = "${@get_kernelversion('${B}')}"
@ -75,7 +75,7 @@ EXTRA_OEMAKE = ""
KERNEL_ALT_IMAGETYPE ??= "" KERNEL_ALT_IMAGETYPE ??= ""
KERNEL_IMAGETYPE_FOR_MAKE = "${@(lambda s: s[:-3] if s[-3:] == ".gz" else s)(bb.data.getVar('KERNEL_IMAGETYPE', d, 1))}" KERNEL_IMAGETYPE_FOR_MAKE = "${@(lambda s: s[:-3] if s[-3:] == ".gz" else s)(d.getVar('KERNEL_IMAGETYPE', 1))}"
kernel_do_compile() { kernel_do_compile() {
unset CFLAGS CPPFLAGS CXXFLAGS LDFLAGS MACHINE unset CFLAGS CPPFLAGS CXXFLAGS LDFLAGS MACHINE
@ -306,10 +306,10 @@ module_conf_rfcomm = "alias bt-proto-3 rfcomm"
python populate_packages_prepend () { python populate_packages_prepend () {
def extract_modinfo(file): def extract_modinfo(file):
import tempfile, re import tempfile, re
tempfile.tempdir = bb.data.getVar("WORKDIR", d, 1) tempfile.tempdir = d.getVar("WORKDIR", 1)
tf = tempfile.mkstemp() tf = tempfile.mkstemp()
tmpfile = tf[1] tmpfile = tf[1]
cmd = "PATH=\"%s\" %sobjcopy -j .modinfo -O binary %s %s" % (bb.data.getVar("PATH", d, 1), bb.data.getVar("HOST_PREFIX", d, 1) or "", file, tmpfile) cmd = "PATH=\"%s\" %sobjcopy -j .modinfo -O binary %s %s" % (d.getVar("PATH", 1), d.getVar("HOST_PREFIX", 1) or "", file, tmpfile)
os.system(cmd) os.system(cmd)
f = open(tmpfile) f = open(tmpfile)
l = f.read().split("\000") l = f.read().split("\000")
@ -328,18 +328,18 @@ python populate_packages_prepend () {
def parse_depmod(): def parse_depmod():
import re import re
dvar = bb.data.getVar('PKGD', d, 1) dvar = d.getVar('PKGD', 1)
if not dvar: if not dvar:
bb.error("PKGD not defined") bb.error("PKGD not defined")
return return
kernelver = bb.data.getVar('KERNEL_VERSION', d, 1) kernelver = d.getVar('KERNEL_VERSION', 1)
kernelver_stripped = kernelver kernelver_stripped = kernelver
m = re.match('^(.*-hh.*)[\.\+].*$', kernelver) m = re.match('^(.*-hh.*)[\.\+].*$', kernelver)
if m: if m:
kernelver_stripped = m.group(1) kernelver_stripped = m.group(1)
path = bb.data.getVar("PATH", d, 1) path = d.getVar("PATH", 1)
host_prefix = bb.data.getVar("HOST_PREFIX", d, 1) or "" host_prefix = d.getVar("HOST_PREFIX", 1) or ""
cmd = "PATH=\"%s\" %sdepmod -n -a -r -b %s -F %s/boot/System.map-%s %s" % (path, host_prefix, dvar, dvar, kernelver, kernelver_stripped) cmd = "PATH=\"%s\" %sdepmod -n -a -r -b %s -F %s/boot/System.map-%s %s" % (path, host_prefix, dvar, dvar, kernelver, kernelver_stripped)
f = os.popen(cmd, 'r') f = os.popen(cmd, 'r')
@ -377,9 +377,9 @@ python populate_packages_prepend () {
def get_dependencies(file, pattern, format): def get_dependencies(file, pattern, format):
# file no longer includes PKGD # file no longer includes PKGD
file = file.replace(bb.data.getVar('PKGD', d, 1) or '', '', 1) file = file.replace(d.getVar('PKGD', 1) or '', '', 1)
# instead is prefixed with /lib/modules/${KERNEL_VERSION} # instead is prefixed with /lib/modules/${KERNEL_VERSION}
file = file.replace("/lib/modules/%s/" % bb.data.getVar('KERNEL_VERSION', d, 1) or '', '', 1) file = file.replace("/lib/modules/%s/" % d.getVar('KERNEL_VERSION', 1) or '', '', 1)
if module_deps.has_key(file): if module_deps.has_key(file):
import re import re
@ -398,40 +398,40 @@ python populate_packages_prepend () {
import re import re
vals = extract_modinfo(file) vals = extract_modinfo(file)
dvar = bb.data.getVar('PKGD', d, 1) dvar = d.getVar('PKGD', 1)
# If autoloading is requested, output /etc/modutils/<name> and append # If autoloading is requested, output /etc/modutils/<name> and append
# appropriate modprobe commands to the postinst # appropriate modprobe commands to the postinst
autoload = bb.data.getVar('module_autoload_%s' % basename, d, 1) autoload = d.getVar('module_autoload_%s' % basename, 1)
if autoload: if autoload:
name = '%s/etc/modutils/%s' % (dvar, basename) name = '%s/etc/modutils/%s' % (dvar, basename)
f = open(name, 'w') f = open(name, 'w')
for m in autoload.split(): for m in autoload.split():
f.write('%s\n' % m) f.write('%s\n' % m)
f.close() f.close()
postinst = bb.data.getVar('pkg_postinst_%s' % pkg, d, 1) postinst = d.getVar('pkg_postinst_%s' % pkg, 1)
if not postinst: if not postinst:
bb.fatal("pkg_postinst_%s not defined" % pkg) bb.fatal("pkg_postinst_%s not defined" % pkg)
postinst += bb.data.getVar('autoload_postinst_fragment', d, 1) % autoload postinst += d.getVar('autoload_postinst_fragment', 1) % autoload
bb.data.setVar('pkg_postinst_%s' % pkg, postinst, d) d.setVar('pkg_postinst_%s' % pkg, postinst)
# Write out any modconf fragment # Write out any modconf fragment
modconf = bb.data.getVar('module_conf_%s' % basename, d, 1) modconf = d.getVar('module_conf_%s' % basename, 1)
if modconf: if modconf:
name = '%s/etc/modprobe.d/%s.conf' % (dvar, basename) name = '%s/etc/modprobe.d/%s.conf' % (dvar, basename)
f = open(name, 'w') f = open(name, 'w')
f.write("%s\n" % modconf) f.write("%s\n" % modconf)
f.close() f.close()
files = bb.data.getVar('FILES_%s' % pkg, d, 1) files = d.getVar('FILES_%s' % pkg, 1)
files = "%s /etc/modutils/%s /etc/modutils/%s.conf /etc/modprobe.d/%s.conf" % (files, basename, basename, basename) files = "%s /etc/modutils/%s /etc/modutils/%s.conf /etc/modprobe.d/%s.conf" % (files, basename, basename, basename)
bb.data.setVar('FILES_%s' % pkg, files, d) d.setVar('FILES_%s' % pkg, files)
if vals.has_key("description"): if vals.has_key("description"):
old_desc = bb.data.getVar('DESCRIPTION_' + pkg, d, 1) or "" old_desc = d.getVar('DESCRIPTION_' + pkg, 1) or ""
bb.data.setVar('DESCRIPTION_' + pkg, old_desc + "; " + vals["description"], d) d.setVar('DESCRIPTION_' + pkg, old_desc + "; " + vals["description"])
rdepends_str = bb.data.getVar('RDEPENDS_' + pkg, d, 1) rdepends_str = d.getVar('RDEPENDS_' + pkg, 1)
if rdepends_str: if rdepends_str:
rdepends = rdepends_str.split() rdepends = rdepends_str.split()
else: else:
@ -443,29 +443,29 @@ python populate_packages_prepend () {
module_regex = '^(.*)\.k?o$' module_regex = '^(.*)\.k?o$'
module_pattern = 'kernel-module-%s' module_pattern = 'kernel-module-%s'
postinst = bb.data.getVar('pkg_postinst_modules', d, 1) postinst = d.getVar('pkg_postinst_modules', 1)
postrm = bb.data.getVar('pkg_postrm_modules', d, 1) postrm = d.getVar('pkg_postrm_modules', 1)
do_split_packages(d, root='/lib/firmware', file_regex='^(.*)\.bin$', output_pattern='kernel-firmware-%s', description='Firmware for %s', recursive=True, extra_depends='') do_split_packages(d, root='/lib/firmware', file_regex='^(.*)\.bin$', output_pattern='kernel-firmware-%s', description='Firmware for %s', recursive=True, extra_depends='')
do_split_packages(d, root='/lib/firmware', file_regex='^(.*)\.fw$', output_pattern='kernel-firmware-%s', description='Firmware for %s', recursive=True, extra_depends='') do_split_packages(d, root='/lib/firmware', file_regex='^(.*)\.fw$', output_pattern='kernel-firmware-%s', description='Firmware for %s', recursive=True, extra_depends='')
do_split_packages(d, root='/lib/firmware', file_regex='^(.*)\.cis$', output_pattern='kernel-firmware-%s', description='Firmware for %s', recursive=True, extra_depends='') do_split_packages(d, root='/lib/firmware', file_regex='^(.*)\.cis$', output_pattern='kernel-firmware-%s', description='Firmware for %s', recursive=True, extra_depends='')
do_split_packages(d, root='/lib/modules', file_regex=module_regex, output_pattern=module_pattern, description='%s kernel module', postinst=postinst, postrm=postrm, recursive=True, hook=frob_metadata, extra_depends='update-modules kernel-%s' % bb.data.getVar("KERNEL_VERSION", d, 1)) do_split_packages(d, root='/lib/modules', file_regex=module_regex, output_pattern=module_pattern, description='%s kernel module', postinst=postinst, postrm=postrm, recursive=True, hook=frob_metadata, extra_depends='update-modules kernel-%s' % d.getVar("KERNEL_VERSION", 1))
import re import re
metapkg = "kernel-modules" metapkg = "kernel-modules"
bb.data.setVar('ALLOW_EMPTY_' + metapkg, "1", d) d.setVar('ALLOW_EMPTY_' + metapkg, "1")
bb.data.setVar('FILES_' + metapkg, "", d) d.setVar('FILES_' + metapkg, "")
blacklist = [ 'kernel-dev', 'kernel-image', 'kernel-base', 'kernel-vmlinux', 'perf', 'perf-dbg', 'kernel-misc' ] blacklist = [ 'kernel-dev', 'kernel-image', 'kernel-base', 'kernel-vmlinux', 'perf', 'perf-dbg', 'kernel-misc' ]
for l in module_deps.values(): for l in module_deps.values():
for i in l: for i in l:
pkg = module_pattern % legitimize_package_name(re.match(module_regex, os.path.basename(i)).group(1)) pkg = module_pattern % legitimize_package_name(re.match(module_regex, os.path.basename(i)).group(1))
blacklist.append(pkg) blacklist.append(pkg)
metapkg_rdepends = [] metapkg_rdepends = []
packages = bb.data.getVar('PACKAGES', d, 1).split() packages = d.getVar('PACKAGES', 1).split()
for pkg in packages[1:]: for pkg in packages[1:]:
if not pkg in blacklist and not pkg in metapkg_rdepends: if not pkg in blacklist and not pkg in metapkg_rdepends:
metapkg_rdepends.append(pkg) metapkg_rdepends.append(pkg)
bb.data.setVar('RDEPENDS_' + metapkg, ' '.join(metapkg_rdepends), d) bb.data.setVar('RDEPENDS_' + metapkg, ' '.join(metapkg_rdepends), d)
bb.data.setVar('DESCRIPTION_' + metapkg, 'Kernel modules meta package', d) d.setVar('DESCRIPTION_' + metapkg, 'Kernel modules meta package')
packages.append(metapkg) packages.append(metapkg)
bb.data.setVar('PACKAGES', ' '.join(packages), d) bb.data.setVar('PACKAGES', ' '.join(packages), d)
} }

View File

@ -18,13 +18,13 @@ do_install() {
} }
def get_libc_fpu_setting(bb, d): def get_libc_fpu_setting(bb, d):
if bb.data.getVar('TARGET_FPU', d, 1) in [ 'soft' ]: if d.getVar('TARGET_FPU', 1) in [ 'soft' ]:
return "--without-fp" return "--without-fp"
return "" return ""
python populate_packages_prepend () { python populate_packages_prepend () {
if bb.data.getVar('DEBIAN_NAMES', d, 1): if d.getVar('DEBIAN_NAMES', 1):
bpn = bb.data.getVar('BPN', d, 1) bpn = d.getVar('BPN', 1)
bb.data.setVar('PKG_'+bpn, 'libc6', d) d.setVar('PKG_'+bpn, 'libc6')
bb.data.setVar('PKG_'+bpn+'-dev', 'libc6-dev', d) d.setVar('PKG_'+bpn+'-dev', 'libc6-dev')
} }

View File

@ -10,7 +10,7 @@
GLIBC_INTERNAL_USE_BINARY_LOCALE ?= "ondevice" GLIBC_INTERNAL_USE_BINARY_LOCALE ?= "ondevice"
python __anonymous () { python __anonymous () {
enabled = bb.data.getVar("ENABLE_BINARY_LOCALE_GENERATION", d, 1) enabled = d.getVar("ENABLE_BINARY_LOCALE_GENERATION", 1)
pn = d.getVar("PN", True) pn = d.getVar("PN", True)
if pn.endswith("-initial"): if pn.endswith("-initial"):
@ -19,21 +19,21 @@ python __anonymous () {
if enabled and int(enabled): if enabled and int(enabled):
import re import re
target_arch = bb.data.getVar("TARGET_ARCH", d, 1) target_arch = d.getVar("TARGET_ARCH", 1)
binary_arches = bb.data.getVar("BINARY_LOCALE_ARCHES", d, 1) or "" binary_arches = d.getVar("BINARY_LOCALE_ARCHES", 1) or ""
use_cross_localedef = bb.data.getVar("LOCALE_GENERATION_WITH_CROSS-LOCALEDEF", d, 1) or "" use_cross_localedef = d.getVar("LOCALE_GENERATION_WITH_CROSS-LOCALEDEF", 1) or ""
for regexp in binary_arches.split(" "): for regexp in binary_arches.split(" "):
r = re.compile(regexp) r = re.compile(regexp)
if r.match(target_arch): if r.match(target_arch):
depends = bb.data.getVar("DEPENDS", d, 1) depends = d.getVar("DEPENDS", 1)
if use_cross_localedef == "1" : if use_cross_localedef == "1" :
depends = "%s cross-localedef-native" % depends depends = "%s cross-localedef-native" % depends
else: else:
depends = "%s qemu-native" % depends depends = "%s qemu-native" % depends
bb.data.setVar("DEPENDS", depends, d) d.setVar("DEPENDS", depends)
bb.data.setVar("GLIBC_INTERNAL_USE_BINARY_LOCALE", "compile", d) d.setVar("GLIBC_INTERNAL_USE_BINARY_LOCALE", "compile")
break break
} }
@ -109,19 +109,19 @@ inherit qemu
python package_do_split_gconvs () { python package_do_split_gconvs () {
import os, re import os, re
if (bb.data.getVar('PACKAGE_NO_GCONV', d, 1) == '1'): if (d.getVar('PACKAGE_NO_GCONV', 1) == '1'):
bb.note("package requested not splitting gconvs") bb.note("package requested not splitting gconvs")
return return
if not bb.data.getVar('PACKAGES', d, 1): if not d.getVar('PACKAGES', 1):
return return
bpn = bb.data.getVar('BPN', d, 1) bpn = d.getVar('BPN', 1)
libdir = bb.data.getVar('libdir', d, 1) libdir = d.getVar('libdir', 1)
if not libdir: if not libdir:
bb.error("libdir not defined") bb.error("libdir not defined")
return return
datadir = bb.data.getVar('datadir', d, 1) datadir = d.getVar('datadir', 1)
if not datadir: if not datadir:
bb.error("datadir not defined") bb.error("datadir not defined")
return return
@ -191,17 +191,17 @@ python package_do_split_gconvs () {
do_split_packages(d, locales_dir, file_regex='(.*)', output_pattern=bpn+'-localedata-%s', \ do_split_packages(d, locales_dir, file_regex='(.*)', output_pattern=bpn+'-localedata-%s', \
description='locale definition for %s', hook=calc_locale_deps, extra_depends='') description='locale definition for %s', hook=calc_locale_deps, extra_depends='')
bb.data.setVar('PACKAGES', bb.data.getVar('PACKAGES', d) + ' ' + bb.data.getVar('MLPREFIX', d) + bpn + '-gconv', d) bb.data.setVar('PACKAGES', d.getVar('PACKAGES') + ' ' + d.getVar('MLPREFIX') + bpn + '-gconv', d)
use_bin = bb.data.getVar("GLIBC_INTERNAL_USE_BINARY_LOCALE", d, 1) use_bin = d.getVar("GLIBC_INTERNAL_USE_BINARY_LOCALE", 1)
dot_re = re.compile("(.*)\.(.*)") dot_re = re.compile("(.*)\.(.*)")
#GLIBC_GENERATE_LOCALES var specifies which locales to be supported, empty or "all" means all locales #GLIBC_GENERATE_LOCALES var specifies which locales to be supported, empty or "all" means all locales
if use_bin != "precompiled": if use_bin != "precompiled":
supported = bb.data.getVar('GLIBC_GENERATE_LOCALES', d, 1) supported = d.getVar('GLIBC_GENERATE_LOCALES', 1)
if not supported or supported == "all": if not supported or supported == "all":
f = open(base_path_join(bb.data.getVar('WORKDIR', d, 1), "SUPPORTED"), "r") f = open(base_path_join(d.getVar('WORKDIR', 1), "SUPPORTED"), "r")
supported = f.readlines() supported = f.readlines()
f.close() f.close()
else: else:
@ -209,7 +209,7 @@ python package_do_split_gconvs () {
supported = map(lambda s:s.replace(".", " ") + "\n", supported) supported = map(lambda s:s.replace(".", " ") + "\n", supported)
else: else:
supported = [] supported = []
full_bin_path = bb.data.getVar('PKGD', d, True) + binary_locales_dir full_bin_path = d.getVar('PKGD', True) + binary_locales_dir
for dir in os.listdir(full_bin_path): for dir in os.listdir(full_bin_path):
dbase = dir.split(".") dbase = dir.split(".")
d2 = " " d2 = " "
@ -218,7 +218,7 @@ python package_do_split_gconvs () {
supported.append(dbase[0] + d2) supported.append(dbase[0] + d2)
# Collate the locales by base and encoding # Collate the locales by base and encoding
utf8_only = int(bb.data.getVar('LOCALE_UTF8_ONLY', d, 1) or 0) utf8_only = int(d.getVar('LOCALE_UTF8_ONLY', 1) or 0)
encodings = {} encodings = {}
for l in supported: for l in supported:
l = l[:-1] l = l[:-1]
@ -235,9 +235,9 @@ python package_do_split_gconvs () {
def output_locale_source(name, pkgname, locale, encoding): def output_locale_source(name, pkgname, locale, encoding):
bb.data.setVar('RDEPENDS_%s' % pkgname, 'localedef %s-localedata-%s %s-charmap-%s' % \ bb.data.setVar('RDEPENDS_%s' % pkgname, 'localedef %s-localedata-%s %s-charmap-%s' % \
(bpn, legitimize_package_name(locale), bpn, legitimize_package_name(encoding)), d) (bpn, legitimize_package_name(locale), bpn, legitimize_package_name(encoding)), d)
bb.data.setVar('pkg_postinst_%s' % pkgname, bb.data.getVar('locale_base_postinst', d, 1) \ bb.data.setVar('pkg_postinst_%s' % pkgname, d.getVar('locale_base_postinst', 1) \
% (locale, encoding, locale), d) % (locale, encoding, locale), d)
bb.data.setVar('pkg_postrm_%s' % pkgname, bb.data.getVar('locale_base_postrm', d, 1) % \ bb.data.setVar('pkg_postrm_%s' % pkgname, d.getVar('locale_base_postrm', 1) % \
(locale, encoding, locale), d) (locale, encoding, locale), d)
def output_locale_binary_rdepends(name, pkgname, locale, encoding): def output_locale_binary_rdepends(name, pkgname, locale, encoding):
@ -248,23 +248,23 @@ python package_do_split_gconvs () {
libc_name = name libc_name = name
bb.data.setVar('RDEPENDS_%s' % pkgname, legitimize_package_name('%s-binary-localedata-%s' \ bb.data.setVar('RDEPENDS_%s' % pkgname, legitimize_package_name('%s-binary-localedata-%s' \
% (bpn, libc_name)), d) % (bpn, libc_name)), d)
rprovides = (bb.data.getVar('RPROVIDES_%s' % pkgname, d, True) or "").split() rprovides = (d.getVar('RPROVIDES_%s' % pkgname, True) or "").split()
rprovides.append(legitimize_package_name('%s-binary-localedata-%s' % (bpn, libc_name))) rprovides.append(legitimize_package_name('%s-binary-localedata-%s' % (bpn, libc_name)))
bb.data.setVar('RPROVIDES_%s' % pkgname, " ".join(rprovides), d) bb.data.setVar('RPROVIDES_%s' % pkgname, " ".join(rprovides), d)
commands = {} commands = {}
def output_locale_binary(name, pkgname, locale, encoding): def output_locale_binary(name, pkgname, locale, encoding):
treedir = base_path_join(bb.data.getVar("WORKDIR", d, 1), "locale-tree") treedir = base_path_join(d.getVar("WORKDIR", 1), "locale-tree")
ldlibdir = base_path_join(treedir, bb.data.getVar("base_libdir", d, 1)) ldlibdir = base_path_join(treedir, d.getVar("base_libdir", 1))
path = bb.data.getVar("PATH", d, 1) path = d.getVar("PATH", 1)
i18npath = base_path_join(treedir, datadir, "i18n") i18npath = base_path_join(treedir, datadir, "i18n")
gconvpath = base_path_join(treedir, "iconvdata") gconvpath = base_path_join(treedir, "iconvdata")
outputpath = base_path_join(treedir, libdir, "locale") outputpath = base_path_join(treedir, libdir, "locale")
use_cross_localedef = bb.data.getVar("LOCALE_GENERATION_WITH_CROSS-LOCALEDEF", d, 1) or "0" use_cross_localedef = d.getVar("LOCALE_GENERATION_WITH_CROSS-LOCALEDEF", 1) or "0"
if use_cross_localedef == "1": if use_cross_localedef == "1":
target_arch = bb.data.getVar('TARGET_ARCH', d, True) target_arch = d.getVar('TARGET_ARCH', True)
locale_arch_options = { \ locale_arch_options = { \
"arm": " --uint32-align=4 --little-endian ", \ "arm": " --uint32-align=4 --little-endian ", \
"powerpc": " --uint32-align=4 --big-endian ", \ "powerpc": " --uint32-align=4 --big-endian ", \
@ -292,9 +292,9 @@ python package_do_split_gconvs () {
--inputfile=%s/i18n/locales/%s --charmap=%s %s" \ --inputfile=%s/i18n/locales/%s --charmap=%s %s" \
% (treedir, datadir, locale, encoding, name) % (treedir, datadir, locale, encoding, name)
qemu_options = bb.data.getVar("QEMU_OPTIONS_%s" % bb.data.getVar('PACKAGE_ARCH', d, 1), d, 1) qemu_options = bb.data.getVar("QEMU_OPTIONS_%s" % d.getVar('PACKAGE_ARCH', 1), d, 1)
if not qemu_options: if not qemu_options:
qemu_options = bb.data.getVar('QEMU_OPTIONS', d, 1) qemu_options = d.getVar('QEMU_OPTIONS', 1)
cmd = "PSEUDO_RELOADED=YES PATH=\"%s\" I18NPATH=\"%s\" %s -L %s \ cmd = "PSEUDO_RELOADED=YES PATH=\"%s\" I18NPATH=\"%s\" %s -L %s \
-E LD_LIBRARY_PATH=%s %s %s/bin/localedef %s" % \ -E LD_LIBRARY_PATH=%s %s %s/bin/localedef %s" % \
@ -305,14 +305,14 @@ python package_do_split_gconvs () {
bb.note("generating locale %s (%s)" % (locale, encoding)) bb.note("generating locale %s (%s)" % (locale, encoding))
def output_locale(name, locale, encoding): def output_locale(name, locale, encoding):
pkgname = bb.data.getVar('MLPREFIX', d) + 'locale-base-' + legitimize_package_name(name) pkgname = d.getVar('MLPREFIX') + 'locale-base-' + legitimize_package_name(name)
bb.data.setVar('ALLOW_EMPTY_%s' % pkgname, '1', d) d.setVar('ALLOW_EMPTY_%s' % pkgname, '1')
bb.data.setVar('PACKAGES', '%s %s' % (pkgname, bb.data.getVar('PACKAGES', d, 1)), d) bb.data.setVar('PACKAGES', '%s %s' % (pkgname, d.getVar('PACKAGES', 1)), d)
rprovides = ' virtual-locale-%s' % legitimize_package_name(name) rprovides = ' virtual-locale-%s' % legitimize_package_name(name)
m = re.match("(.*)_(.*)", name) m = re.match("(.*)_(.*)", name)
if m: if m:
rprovides += ' virtual-locale-%s' % m.group(1) rprovides += ' virtual-locale-%s' % m.group(1)
bb.data.setVar('RPROVIDES_%s' % pkgname, rprovides, d) d.setVar('RPROVIDES_%s' % pkgname, rprovides)
if use_bin == "compile": if use_bin == "compile":
output_locale_binary_rdepends(name, pkgname, locale, encoding) output_locale_binary_rdepends(name, pkgname, locale, encoding)
@ -347,7 +347,7 @@ python package_do_split_gconvs () {
bb.note(" " + " ".join(non_utf8)) bb.note(" " + " ".join(non_utf8))
if use_bin == "compile": if use_bin == "compile":
makefile = base_path_join(bb.data.getVar("WORKDIR", d, 1), "locale-tree", "Makefile") makefile = base_path_join(d.getVar("WORKDIR", 1), "locale-tree", "Makefile")
m = open(makefile, "w") m = open(makefile, "w")
m.write("all: %s\n\n" % " ".join(commands.keys())) m.write("all: %s\n\n" % " ".join(commands.keys()))
for cmd in commands: for cmd in commands:

View File

@ -104,10 +104,10 @@ python do_populate_lic() {
# If the generic does not exist we need to check to see if there is an SPDX mapping to it # If the generic does not exist we need to check to see if there is an SPDX mapping to it
if not os.path.isfile(os.path.join(generic_directory, license_type)): if not os.path.isfile(os.path.join(generic_directory, license_type)):
if bb.data.getVarFlag('SPDXLICENSEMAP', license_type, d) != None: if d.getVarFlag('SPDXLICENSEMAP', license_type) != None:
# Great, there is an SPDXLICENSEMAP. We can copy! # Great, there is an SPDXLICENSEMAP. We can copy!
bb.note("We need to use a SPDXLICENSEMAP for %s" % (license_type)) bb.note("We need to use a SPDXLICENSEMAP for %s" % (license_type))
spdx_generic = bb.data.getVarFlag('SPDXLICENSEMAP', license_type, d) spdx_generic = d.getVarFlag('SPDXLICENSEMAP', license_type)
copy_license(generic_directory, gen_lic_dest, spdx_generic) copy_license(generic_directory, gen_lic_dest, spdx_generic)
link_license(gen_lic_dest, destdir, spdx_generic) link_license(gen_lic_dest, destdir, spdx_generic)
else: else:
@ -120,16 +120,16 @@ python do_populate_lic() {
link_license(gen_lic_dest, destdir, license_type) link_license(gen_lic_dest, destdir, license_type)
# All the license types for the package # All the license types for the package
license_types = bb.data.getVar('LICENSE', d, True) license_types = d.getVar('LICENSE', True)
# All the license files for the package # All the license files for the package
lic_files = bb.data.getVar('LIC_FILES_CHKSUM', d, True) lic_files = d.getVar('LIC_FILES_CHKSUM', True)
pn = bb.data.getVar('PN', d, True) pn = d.getVar('PN', True)
# The base directory we wrangle licenses to # The base directory we wrangle licenses to
destdir = os.path.join(bb.data.getVar('LICSSTATEDIR', d, True), pn) destdir = os.path.join(d.getVar('LICSSTATEDIR', True), pn)
# The license files are located in S/LIC_FILE_CHECKSUM. # The license files are located in S/LIC_FILE_CHECKSUM.
srcdir = bb.data.getVar('S', d, True) srcdir = d.getVar('S', True)
# Directory we store the generic licenses as set in the distro configuration # Directory we store the generic licenses as set in the distro configuration
generic_directory = bb.data.getVar('COMMON_LICENSE_DIR', d, True) generic_directory = d.getVar('COMMON_LICENSE_DIR', True)
try: try:
bb.mkdirhier(destdir) bb.mkdirhier(destdir)
@ -154,7 +154,7 @@ python do_populate_lic() {
if ret is False or ret == 0: if ret is False or ret == 0:
bb.warn("%s could not be copied for some reason. It may not exist. WARN for now." % srclicfile) bb.warn("%s could not be copied for some reason. It may not exist. WARN for now." % srclicfile)
gen_lic_dest = os.path.join(bb.data.getVar('LICENSE_DIRECTORY', d, True), "common-licenses") gen_lic_dest = os.path.join(d.getVar('LICENSE_DIRECTORY', True), "common-licenses")
clean_licenses = "" clean_licenses = ""

View File

@ -27,7 +27,7 @@ def base_detect_branch(d):
return "<unknown>" return "<unknown>"
def base_get_scmbasepath(d): def base_get_scmbasepath(d):
return bb.data.getVar( 'COREBASE', d, 1 ) return d.getVar( 'COREBASE', 1 )
def base_get_metadata_monotone_branch(path, d): def base_get_metadata_monotone_branch(path, d):
monotone_branch = "<unknown>" monotone_branch = "<unknown>"

View File

@ -98,18 +98,18 @@ python native_virtclass_handler () {
if not isinstance(e, bb.event.RecipePreFinalise): if not isinstance(e, bb.event.RecipePreFinalise):
return return
classextend = bb.data.getVar('BBCLASSEXTEND', e.data, True) or "" classextend = e.data.getVar('BBCLASSEXTEND', True) or ""
if "native" not in classextend: if "native" not in classextend:
return return
pn = bb.data.getVar("PN", e.data, True) pn = e.data.getVar("PN", True)
if not pn.endswith("-native"): if not pn.endswith("-native"):
return return
def map_dependencies(varname, d, suffix = ""): def map_dependencies(varname, d, suffix = ""):
if suffix: if suffix:
varname = varname + "_" + suffix varname = varname + "_" + suffix
deps = bb.data.getVar(varname, d, True) deps = d.getVar(varname, True)
if not deps: if not deps:
return return
deps = bb.utils.explode_deps(deps) deps = bb.utils.explode_deps(deps)
@ -131,15 +131,15 @@ python native_virtclass_handler () {
map_dependencies("RPROVIDES", e.data, pkg) map_dependencies("RPROVIDES", e.data, pkg)
map_dependencies("RREPLACES", e.data, pkg) map_dependencies("RREPLACES", e.data, pkg)
provides = bb.data.getVar("PROVIDES", e.data, True) provides = e.data.getVar("PROVIDES", True)
for prov in provides.split(): for prov in provides.split():
if prov.find(pn) != -1: if prov.find(pn) != -1:
continue continue
if not prov.endswith("-native"): if not prov.endswith("-native"):
provides = provides.replace(prov, prov + "-native") provides = provides.replace(prov, prov + "-native")
bb.data.setVar("PROVIDES", provides, e.data) e.data.setVar("PROVIDES", provides)
bb.data.setVar("OVERRIDES", bb.data.getVar("OVERRIDES", e.data, False) + ":virtclass-native", e.data) bb.data.setVar("OVERRIDES", e.data.getVar("OVERRIDES", False) + ":virtclass-native", e.data)
} }
addhandler native_virtclass_handler addhandler native_virtclass_handler

View File

@ -11,7 +11,7 @@ STAGING_BINDIR_TOOLCHAIN = "${STAGING_DIR_NATIVE}${bindir_native}/${SDK_ARCH}${S
# #
PACKAGE_ARCH = "${SDK_ARCH}-nativesdk" PACKAGE_ARCH = "${SDK_ARCH}-nativesdk"
python () { python () {
archs = bb.data.getVar('PACKAGE_ARCHS', d, True).split() archs = d.getVar('PACKAGE_ARCHS', True).split()
sdkarchs = [] sdkarchs = []
for arch in archs: for arch in archs:
sdkarchs.append(arch + '-nativesdk') sdkarchs.append(arch + '-nativesdk')
@ -62,22 +62,22 @@ python nativesdk_virtclass_handler () {
if not isinstance(e, bb.event.RecipePreFinalise): if not isinstance(e, bb.event.RecipePreFinalise):
return return
pn = bb.data.getVar("PN", e.data, True) pn = e.data.getVar("PN", True)
if not pn.endswith("-nativesdk"): if not pn.endswith("-nativesdk"):
return return
bb.data.setVar("OVERRIDES", bb.data.getVar("OVERRIDES", e.data, False) + ":virtclass-nativesdk", e.data) bb.data.setVar("OVERRIDES", e.data.getVar("OVERRIDES", False) + ":virtclass-nativesdk", e.data)
} }
python () { python () {
pn = bb.data.getVar("PN", d, True) pn = d.getVar("PN", True)
if not pn.endswith("-nativesdk"): if not pn.endswith("-nativesdk"):
return return
def map_dependencies(varname, d, suffix = ""): def map_dependencies(varname, d, suffix = ""):
if suffix: if suffix:
varname = varname + "_" + suffix varname = varname + "_" + suffix
deps = bb.data.getVar(varname, d, True) deps = d.getVar(varname, True)
if not deps: if not deps:
return return
deps = bb.utils.explode_deps(deps) deps = bb.utils.explode_deps(deps)
@ -101,13 +101,13 @@ python () {
# map_dependencies("RPROVIDES", d, pkg) # map_dependencies("RPROVIDES", d, pkg)
# map_dependencies("RREPLACES", d, pkg) # map_dependencies("RREPLACES", d, pkg)
provides = bb.data.getVar("PROVIDES", d, True) provides = d.getVar("PROVIDES", True)
for prov in provides.split(): for prov in provides.split():
if prov.find(pn) != -1: if prov.find(pn) != -1:
continue continue
if not prov.endswith("-nativesdk"): if not prov.endswith("-nativesdk"):
provides = provides.replace(prov, prov + "-nativesdk") provides = provides.replace(prov, prov + "-nativesdk")
bb.data.setVar("PROVIDES", provides, d) d.setVar("PROVIDES", provides)
} }
addhandler nativesdk_virtclass_handler addhandler nativesdk_virtclass_handler

View File

@ -88,9 +88,9 @@ def do_split_packages(d, root, file_regex, output_pattern, description, postinst
if newdeps: if newdeps:
extra_depends = " ".join(newdeps) extra_depends = " ".join(newdeps)
dvar = bb.data.getVar('PKGD', d, True) dvar = d.getVar('PKGD', True)
packages = bb.data.getVar('PACKAGES', d, True).split() packages = d.getVar('PACKAGES', True).split()
if postinst: if postinst:
postinst = '#!/bin/sh\n' + postinst + '\n' postinst = '#!/bin/sh\n' + postinst + '\n'
@ -136,7 +136,7 @@ def do_split_packages(d, root, file_regex, output_pattern, description, postinst
packages = [pkg] + packages packages = [pkg] + packages
else: else:
packages.append(pkg) packages.append(pkg)
oldfiles = bb.data.getVar('FILES_' + pkg, d, True) oldfiles = d.getVar('FILES_' + pkg, True)
if not oldfiles: if not oldfiles:
the_files = [os.path.join(root, o)] the_files = [os.path.join(root, o)]
if aux_files_pattern: if aux_files_pattern:
@ -153,17 +153,17 @@ def do_split_packages(d, root, file_regex, output_pattern, description, postinst
the_files.append(aux_files_pattern_verbatim % m.group(1)) the_files.append(aux_files_pattern_verbatim % m.group(1))
bb.data.setVar('FILES_' + pkg, " ".join(the_files), d) bb.data.setVar('FILES_' + pkg, " ".join(the_files), d)
if extra_depends != '': if extra_depends != '':
the_depends = bb.data.getVar('RDEPENDS_' + pkg, d, True) the_depends = d.getVar('RDEPENDS_' + pkg, True)
if the_depends: if the_depends:
the_depends = '%s %s' % (the_depends, extra_depends) the_depends = '%s %s' % (the_depends, extra_depends)
else: else:
the_depends = extra_depends the_depends = extra_depends
bb.data.setVar('RDEPENDS_' + pkg, the_depends, d) d.setVar('RDEPENDS_' + pkg, the_depends)
bb.data.setVar('DESCRIPTION_' + pkg, description % on, d) d.setVar('DESCRIPTION_' + pkg, description % on)
if postinst: if postinst:
bb.data.setVar('pkg_postinst_' + pkg, postinst, d) d.setVar('pkg_postinst_' + pkg, postinst)
if postrm: if postrm:
bb.data.setVar('pkg_postrm_' + pkg, postrm, d) d.setVar('pkg_postrm_' + pkg, postrm)
else: else:
bb.data.setVar('FILES_' + pkg, oldfiles + " " + os.path.join(root, o), d) bb.data.setVar('FILES_' + pkg, oldfiles + " " + os.path.join(root, o), d)
if callable(hook): if callable(hook):
@ -174,13 +174,13 @@ def do_split_packages(d, root, file_regex, output_pattern, description, postinst
PACKAGE_DEPENDS += "file-native" PACKAGE_DEPENDS += "file-native"
python () { python () {
if bb.data.getVar('PACKAGES', d, True) != '': if d.getVar('PACKAGES', True) != '':
deps = bb.data.getVarFlag('do_package', 'depends', d) or "" deps = d.getVarFlag('do_package', 'depends') or ""
for dep in (bb.data.getVar('PACKAGE_DEPENDS', d, True) or "").split(): for dep in (d.getVar('PACKAGE_DEPENDS', True) or "").split():
deps += " %s:do_populate_sysroot" % dep deps += " %s:do_populate_sysroot" % dep
bb.data.setVarFlag('do_package', 'depends', deps, d) d.setVarFlag('do_package', 'depends', deps)
deps = (bb.data.getVarFlag('do_package', 'deptask', d) or "").split() deps = (d.getVarFlag('do_package', 'deptask') or "").split()
# shlibs requires any DEPENDS to have already packaged for the *.list files # shlibs requires any DEPENDS to have already packaged for the *.list files
deps.append("do_package") deps.append("do_package")
bb.data.setVarFlag('do_package', 'deptask', " ".join(deps), d) bb.data.setVarFlag('do_package', 'deptask', " ".join(deps), d)
@ -198,9 +198,9 @@ def splitfile(file, debugfile, debugsrcdir, d):
import commands, stat import commands, stat
dvar = bb.data.getVar('PKGD', d, True) dvar = d.getVar('PKGD', True)
pathprefix = "export PATH=%s; " % bb.data.getVar('PATH', d, True) pathprefix = "export PATH=%s; " % d.getVar('PATH', True)
objcopy = bb.data.getVar("OBJCOPY", d, True) objcopy = d.getVar("OBJCOPY", True)
debugedit = bb.data.expand("${STAGING_LIBDIR_NATIVE}/rpm/bin/debugedit", d) debugedit = bb.data.expand("${STAGING_LIBDIR_NATIVE}/rpm/bin/debugedit", d)
workdir = bb.data.expand("${WORKDIR}", d) workdir = bb.data.expand("${WORKDIR}", d)
workparentdir = os.path.dirname(workdir) workparentdir = os.path.dirname(workdir)
@ -240,10 +240,10 @@ def splitfile2(debugsrcdir, d):
import commands, stat import commands, stat
dvar = bb.data.getVar('PKGD', d, True) dvar = d.getVar('PKGD', True)
pathprefix = "export PATH=%s; " % bb.data.getVar('PATH', d, True) pathprefix = "export PATH=%s; " % d.getVar('PATH', True)
strip = bb.data.getVar("STRIP", d, True) strip = d.getVar("STRIP", True)
objcopy = bb.data.getVar("OBJCOPY", d, True) objcopy = d.getVar("OBJCOPY", True)
debugedit = bb.data.expand("${STAGING_LIBDIR_NATIVE}/rpm/bin/debugedit", d) debugedit = bb.data.expand("${STAGING_LIBDIR_NATIVE}/rpm/bin/debugedit", d)
workdir = bb.data.expand("${WORKDIR}", d) workdir = bb.data.expand("${WORKDIR}", d)
workparentdir = os.path.dirname(workdir) workparentdir = os.path.dirname(workdir)
@ -279,8 +279,8 @@ def runstrip(file, elftype, d):
import commands, stat import commands, stat
pathprefix = "export PATH=%s; " % bb.data.getVar('PATH', d, True) pathprefix = "export PATH=%s; " % d.getVar('PATH', True)
strip = bb.data.getVar("STRIP", d, True) strip = d.getVar("STRIP", True)
# Handle kernel modules specifically - .debug directories here are pointless # Handle kernel modules specifically - .debug directories here are pointless
if file.find("/lib/modules/") != -1 and file.endswith(".ko"): if file.find("/lib/modules/") != -1 and file.endswith(".ko"):
@ -329,10 +329,10 @@ def get_package_mapping (pkg, d):
return pkg return pkg
def runtime_mapping_rename (varname, d): def runtime_mapping_rename (varname, d):
#bb.note("%s before: %s" % (varname, bb.data.getVar(varname, d, True))) #bb.note("%s before: %s" % (varname, d.getVar(varname, True)))
new_depends = [] new_depends = []
deps = bb.utils.explode_dep_versions(bb.data.getVar(varname, d, True) or "") deps = bb.utils.explode_dep_versions(d.getVar(varname, True) or "")
for depend in deps: for depend in deps:
# Have to be careful with any version component of the depend # Have to be careful with any version component of the depend
new_depend = get_package_mapping(depend, d) new_depend = get_package_mapping(depend, d)
@ -343,7 +343,7 @@ def runtime_mapping_rename (varname, d):
bb.data.setVar(varname, " ".join(new_depends) or None, d) bb.data.setVar(varname, " ".join(new_depends) or None, d)
#bb.note("%s after: %s" % (varname, bb.data.getVar(varname, d, True))) #bb.note("%s after: %s" % (varname, d.getVar(varname, True)))
# #
# Package functions suitable for inclusion in PACKAGEFUNCS # Package functions suitable for inclusion in PACKAGEFUNCS
@ -359,19 +359,19 @@ python package_get_auto_pr() {
} }
python package_do_split_locales() { python package_do_split_locales() {
if (bb.data.getVar('PACKAGE_NO_LOCALE', d, True) == '1'): if (d.getVar('PACKAGE_NO_LOCALE', True) == '1'):
bb.debug(1, "package requested not splitting locales") bb.debug(1, "package requested not splitting locales")
return return
packages = (bb.data.getVar('PACKAGES', d, True) or "").split() packages = (d.getVar('PACKAGES', True) or "").split()
datadir = bb.data.getVar('datadir', d, True) datadir = d.getVar('datadir', True)
if not datadir: if not datadir:
bb.note("datadir not defined") bb.note("datadir not defined")
return return
dvar = bb.data.getVar('PKGD', d, True) dvar = d.getVar('PKGD', True)
pn = bb.data.getVar('PN', d, True) pn = d.getVar('PN', True)
if pn + '-locale' in packages: if pn + '-locale' in packages:
packages.remove(pn + '-locale') packages.remove(pn + '-locale')
@ -392,9 +392,9 @@ python package_do_split_locales() {
if mainpkg.find('-dev'): if mainpkg.find('-dev'):
mainpkg = mainpkg.replace('-dev', '') mainpkg = mainpkg.replace('-dev', '')
summary = bb.data.getVar('SUMMARY', d, True) or pn summary = d.getVar('SUMMARY', True) or pn
description = bb.data.getVar('DESCRIPTION', d, True) or "" description = d.getVar('DESCRIPTION', True) or ""
locale_section = bb.data.getVar('LOCALE_SECTION', d, True) locale_section = d.getVar('LOCALE_SECTION', True)
for l in locales: for l in locales:
ln = legitimize_package_name(l) ln = legitimize_package_name(l)
pkg = pn + '-locale-' + ln pkg = pn + '-locale-' + ln
@ -405,7 +405,7 @@ python package_do_split_locales() {
bb.data.setVar('SUMMARY_' + pkg, '%s - %s translations' % (summary, l), d) bb.data.setVar('SUMMARY_' + pkg, '%s - %s translations' % (summary, l), d)
bb.data.setVar('DESCRIPTION_' + pkg, '%s This package contains language translation files for the %s locale.' % (description, l), d) bb.data.setVar('DESCRIPTION_' + pkg, '%s This package contains language translation files for the %s locale.' % (description, l), d)
if locale_section: if locale_section:
bb.data.setVar('SECTION_' + pkg, locale_section, d) d.setVar('SECTION_' + pkg, locale_section)
bb.data.setVar('PACKAGES', ' '.join(packages), d) bb.data.setVar('PACKAGES', ' '.join(packages), d)
@ -415,14 +415,14 @@ python package_do_split_locales() {
# glibc-localedata-translit* won't install as a dependency # glibc-localedata-translit* won't install as a dependency
# for some other package which breaks meta-toolchain # for some other package which breaks meta-toolchain
# Probably breaks since virtual-locale- isn't provided anywhere # Probably breaks since virtual-locale- isn't provided anywhere
#rdep = (bb.data.getVar('RDEPENDS_%s' % mainpkg, d, True) or bb.data.getVar('RDEPENDS', d, True) or "").split() #rdep = (d.getVar('RDEPENDS_%s' % mainpkg, True) or d.getVar('RDEPENDS', True) or "").split()
#rdep.append('%s-locale*' % pn) #rdep.append('%s-locale*' % pn)
#bb.data.setVar('RDEPENDS_%s' % mainpkg, ' '.join(rdep), d) #bb.data.setVar('RDEPENDS_%s' % mainpkg, ' '.join(rdep), d)
} }
python perform_packagecopy () { python perform_packagecopy () {
dest = bb.data.getVar('D', d, True) dest = d.getVar('D', True)
dvar = bb.data.getVar('PKGD', d, True) dvar = d.getVar('PKGD', True)
bb.mkdirhier(dvar) bb.mkdirhier(dvar)
@ -549,16 +549,16 @@ python fixup_perms () {
# paths are resolved via BBPATH # paths are resolved via BBPATH
def get_fs_perms_list(d): def get_fs_perms_list(d):
str = "" str = ""
fs_perms_tables = bb.data.getVar('FILESYSTEM_PERMS_TABLES', d, True) fs_perms_tables = d.getVar('FILESYSTEM_PERMS_TABLES', True)
if not fs_perms_tables: if not fs_perms_tables:
fs_perms_tables = 'files/fs-perms.txt' fs_perms_tables = 'files/fs-perms.txt'
for conf_file in fs_perms_tables.split(): for conf_file in fs_perms_tables.split():
str += " %s" % bb.which(bb.data.getVar('BBPATH', d, True), conf_file) str += " %s" % bb.which(d.getVar('BBPATH', True), conf_file)
return str return str
dvar = bb.data.getVar('PKGD', d, True) dvar = d.getVar('PKGD', True)
fs_perms_table = {} fs_perms_table = {}
@ -586,7 +586,7 @@ python fixup_perms () {
'oldincludedir' ] 'oldincludedir' ]
for path in target_path_vars: for path in target_path_vars:
dir = bb.data.getVar(path, d, True) or "" dir = d.getVar(path, True) or ""
if dir == "": if dir == "":
continue continue
fs_perms_table[dir] = fs_perms_entry(bb.data.expand("%s 0755 root root false - - -" % (dir), d)) fs_perms_table[dir] = fs_perms_entry(bb.data.expand("%s 0755 root root false - - -" % (dir), d))
@ -664,11 +664,11 @@ python fixup_perms () {
python split_and_strip_files () { python split_and_strip_files () {
import commands, stat, errno import commands, stat, errno
dvar = bb.data.getVar('PKGD', d, True) dvar = d.getVar('PKGD', True)
pn = bb.data.getVar('PN', d, True) pn = d.getVar('PN', True)
# We default to '.debug' style # We default to '.debug' style
if bb.data.getVar('PACKAGE_DEBUG_SPLIT_STYLE', d, True) == 'debug-file-directory': if d.getVar('PACKAGE_DEBUG_SPLIT_STYLE', True) == 'debug-file-directory':
# Single debug-file-directory style debug info # Single debug-file-directory style debug info
debugappend = ".debug" debugappend = ".debug"
debugdir = "" debugdir = ""
@ -691,7 +691,7 @@ python split_and_strip_files () {
# 8 - shared library # 8 - shared library
def isELF(path): def isELF(path):
type = 0 type = 0
pathprefix = "export PATH=%s; " % bb.data.getVar('PATH', d, True) pathprefix = "export PATH=%s; " % d.getVar('PATH', True)
ret, result = commands.getstatusoutput("%sfile '%s'" % (pathprefix, path)) ret, result = commands.getstatusoutput("%sfile '%s'" % (pathprefix, path))
if ret: if ret:
@ -715,8 +715,8 @@ python split_and_strip_files () {
# #
file_list = {} file_list = {}
file_links = {} file_links = {}
if (bb.data.getVar('INHIBIT_PACKAGE_DEBUG_SPLIT', d, True) != '1') and \ if (d.getVar('INHIBIT_PACKAGE_DEBUG_SPLIT', True) != '1') and \
(bb.data.getVar('INHIBIT_PACKAGE_STRIP', d, True) != '1'): (d.getVar('INHIBIT_PACKAGE_STRIP', True) != '1'):
for root, dirs, files in os.walk(dvar): for root, dirs, files in os.walk(dvar):
for f in files: for f in files:
file = os.path.join(root, f) file = os.path.join(root, f)
@ -764,7 +764,7 @@ python split_and_strip_files () {
# #
# First lets process debug splitting # First lets process debug splitting
# #
if (bb.data.getVar('INHIBIT_PACKAGE_DEBUG_SPLIT', d, True) != '1'): if (d.getVar('INHIBIT_PACKAGE_DEBUG_SPLIT', True) != '1'):
for file in file_list: for file in file_list:
src = file[len(dvar):] src = file[len(dvar):]
dest = debuglibdir + os.path.dirname(src) + debugdir + "/" + os.path.basename(src) + debugappend dest = debuglibdir + os.path.dirname(src) + debugdir + "/" + os.path.basename(src) + debugappend
@ -842,7 +842,7 @@ python split_and_strip_files () {
# #
# Now lets go back over things and strip them # Now lets go back over things and strip them
# #
if (bb.data.getVar('INHIBIT_PACKAGE_STRIP', d, True) != '1'): if (d.getVar('INHIBIT_PACKAGE_STRIP', True) != '1'):
for file in file_list: for file in file_list:
if file_list[file].startswith("ELF: "): if file_list[file].startswith("ELF: "):
elf_file = int(file_list[file][5:]) elf_file = int(file_list[file][5:])
@ -856,11 +856,11 @@ python split_and_strip_files () {
python populate_packages () { python populate_packages () {
import glob, stat, errno, re import glob, stat, errno, re
workdir = bb.data.getVar('WORKDIR', d, True) workdir = d.getVar('WORKDIR', True)
outdir = bb.data.getVar('DEPLOY_DIR', d, True) outdir = d.getVar('DEPLOY_DIR', True)
dvar = bb.data.getVar('PKGD', d, True) dvar = d.getVar('PKGD', True)
packages = bb.data.getVar('PACKAGES', d, True) packages = d.getVar('PACKAGES', True)
pn = bb.data.getVar('PN', d, True) pn = d.getVar('PN', True)
bb.mkdirhier(outdir) bb.mkdirhier(outdir)
os.chdir(dvar) os.chdir(dvar)
@ -877,7 +877,7 @@ python populate_packages () {
else: else:
package_list.append(pkg) package_list.append(pkg)
pkgdest = bb.data.getVar('PKGDEST', d, True) pkgdest = d.getVar('PKGDEST', True)
os.system('rm -rf %s' % pkgdest) os.system('rm -rf %s' % pkgdest)
seen = [] seen = []
@ -887,14 +887,14 @@ python populate_packages () {
root = os.path.join(pkgdest, pkg) root = os.path.join(pkgdest, pkg)
bb.mkdirhier(root) bb.mkdirhier(root)
bb.data.setVar('PKG', pkg, localdata) localdata.setVar('PKG', pkg)
overrides = bb.data.getVar('OVERRIDES', localdata, True) overrides = localdata.getVar('OVERRIDES', True)
if not overrides: if not overrides:
raise bb.build.FuncFailed('OVERRIDES not defined') raise bb.build.FuncFailed('OVERRIDES not defined')
bb.data.setVar('OVERRIDES', overrides + ':' + pkg, localdata) localdata.setVar('OVERRIDES', overrides + ':' + pkg)
bb.data.update_data(localdata) bb.data.update_data(localdata)
filesvar = bb.data.getVar('FILES', localdata, True) or "" filesvar = localdata.getVar('FILES', True) or ""
files = filesvar.split() files = filesvar.split()
file_links = {} file_links = {}
for file in files: for file in files:
@ -973,9 +973,9 @@ python populate_packages () {
bb.build.exec_func("package_name_hook", d) bb.build.exec_func("package_name_hook", d)
for pkg in package_list: for pkg in package_list:
pkgname = bb.data.getVar('PKG_%s' % pkg, d, True) pkgname = d.getVar('PKG_%s' % pkg, True)
if pkgname is None: if pkgname is None:
bb.data.setVar('PKG_%s' % pkg, pkg, d) d.setVar('PKG_%s' % pkg, pkg)
dangling_links = {} dangling_links = {}
pkg_files = {} pkg_files = {}
@ -999,7 +999,7 @@ python populate_packages () {
dangling_links[pkg].append(os.path.normpath(target)) dangling_links[pkg].append(os.path.normpath(target))
for pkg in package_list: for pkg in package_list:
rdepends = bb.utils.explode_dep_versions(bb.data.getVar('RDEPENDS_' + pkg, d, True) or bb.data.getVar('RDEPENDS', d, True) or "") rdepends = bb.utils.explode_dep_versions(d.getVar('RDEPENDS_' + pkg, True) or d.getVar('RDEPENDS', True) or "")
for l in dangling_links[pkg]: for l in dangling_links[pkg]:
found = False found = False
@ -1040,9 +1040,9 @@ python emit_pkgdata() {
f.write('%s: %s\n' % (var, encode(val))) f.write('%s: %s\n' % (var, encode(val)))
return return
packages = bb.data.getVar('PACKAGES', d, True) packages = d.getVar('PACKAGES', True)
pkgdest = bb.data.getVar('PKGDEST', d, 1) pkgdest = d.getVar('PKGDEST', 1)
pkgdatadir = bb.data.getVar('PKGDESTWORK', d, True) pkgdatadir = d.getVar('PKGDESTWORK', True)
# Take shared lock since we're only reading, not writing # Take shared lock since we're only reading, not writing
lf = bb.utils.lockfile(bb.data.expand("${PACKAGELOCK}", d), True) lf = bb.utils.lockfile(bb.data.expand("${PACKAGELOCK}", d), True)
@ -1052,7 +1052,7 @@ python emit_pkgdata() {
f.write("PACKAGES: %s\n" % packages) f.write("PACKAGES: %s\n" % packages)
f.close() f.close()
workdir = bb.data.getVar('WORKDIR', d, True) workdir = d.getVar('WORKDIR', True)
for pkg in packages.split(): for pkg in packages.split():
subdata_file = pkgdatadir + "/runtime/%s" % pkg subdata_file = pkgdatadir + "/runtime/%s" % pkg
@ -1080,19 +1080,19 @@ python emit_pkgdata() {
write_if_exists(sf, pkg, 'pkg_preinst') write_if_exists(sf, pkg, 'pkg_preinst')
write_if_exists(sf, pkg, 'pkg_prerm') write_if_exists(sf, pkg, 'pkg_prerm')
write_if_exists(sf, pkg, 'FILERPROVIDESFLIST') write_if_exists(sf, pkg, 'FILERPROVIDESFLIST')
for dfile in (bb.data.getVar('FILERPROVIDESFLIST_' + pkg, d, True) or "").split(): for dfile in (d.getVar('FILERPROVIDESFLIST_' + pkg, True) or "").split():
write_if_exists(sf, pkg, 'FILERPROVIDES_' + dfile) write_if_exists(sf, pkg, 'FILERPROVIDES_' + dfile)
write_if_exists(sf, pkg, 'FILERDEPENDSFLIST') write_if_exists(sf, pkg, 'FILERDEPENDSFLIST')
for dfile in (bb.data.getVar('FILERDEPENDSFLIST_' + pkg, d, True) or "").split(): for dfile in (d.getVar('FILERDEPENDSFLIST_' + pkg, True) or "").split():
write_if_exists(sf, pkg, 'FILERDEPENDS_' + dfile) write_if_exists(sf, pkg, 'FILERDEPENDS_' + dfile)
sf.close() sf.close()
allow_empty = bb.data.getVar('ALLOW_EMPTY_%s' % pkg, d, True) allow_empty = d.getVar('ALLOW_EMPTY_%s' % pkg, True)
if not allow_empty: if not allow_empty:
allow_empty = bb.data.getVar('ALLOW_EMPTY', d, True) allow_empty = d.getVar('ALLOW_EMPTY', True)
root = "%s/%s" % (pkgdest, pkg) root = "%s/%s" % (pkgdest, pkg)
os.chdir(root) os.chdir(root)
g = glob('*') g = glob('*')
@ -1123,8 +1123,8 @@ RPMDEPS = "${STAGING_LIBDIR_NATIVE}/rpm/bin/rpmdeps"
python package_do_filedeps() { python package_do_filedeps() {
import os, re import os, re
pkgdest = bb.data.getVar('PKGDEST', d, True) pkgdest = d.getVar('PKGDEST', True)
packages = bb.data.getVar('PACKAGES', d, True) packages = d.getVar('PACKAGES', True)
rpmdeps = bb.data.expand("${RPMDEPS}", d) rpmdeps = bb.data.expand("${RPMDEPS}", d)
r = re.compile(r'[<>=]+ +[^ ]*') r = re.compile(r'[<>=]+ +[^ ]*')
@ -1189,7 +1189,7 @@ SHLIBSWORKDIR = "${WORKDIR}/shlibs"
python package_do_shlibs() { python package_do_shlibs() {
import re import re
exclude_shlibs = bb.data.getVar('EXCLUDE_FROM_SHLIBS', d, 0) exclude_shlibs = d.getVar('EXCLUDE_FROM_SHLIBS', 0)
if exclude_shlibs: if exclude_shlibs:
bb.note("not generating shlibs") bb.note("not generating shlibs")
return return
@ -1197,27 +1197,27 @@ python package_do_shlibs() {
lib_re = re.compile("^.*\.so") lib_re = re.compile("^.*\.so")
libdir_re = re.compile(".*/lib$") libdir_re = re.compile(".*/lib$")
packages = bb.data.getVar('PACKAGES', d, True) packages = d.getVar('PACKAGES', True)
targetos = bb.data.getVar('TARGET_OS', d, True) targetos = d.getVar('TARGET_OS', True)
workdir = bb.data.getVar('WORKDIR', d, True) workdir = d.getVar('WORKDIR', True)
ver = bb.data.getVar('PKGV', d, True) ver = d.getVar('PKGV', True)
if not ver: if not ver:
bb.error("PKGV not defined") bb.error("PKGV not defined")
return return
pkgdest = bb.data.getVar('PKGDEST', d, True) pkgdest = d.getVar('PKGDEST', True)
shlibs_dir = bb.data.getVar('SHLIBSDIR', d, True) shlibs_dir = d.getVar('SHLIBSDIR', True)
shlibswork_dir = bb.data.getVar('SHLIBSWORKDIR', d, True) shlibswork_dir = d.getVar('SHLIBSWORKDIR', True)
# Take shared lock since we're only reading, not writing # Take shared lock since we're only reading, not writing
lf = bb.utils.lockfile(bb.data.expand("${PACKAGELOCK}", d)) lf = bb.utils.lockfile(bb.data.expand("${PACKAGELOCK}", d))
def linux_so(root, path, file): def linux_so(root, path, file):
cmd = bb.data.getVar('OBJDUMP', d, True) + " -p " + os.path.join(root, file) + " 2>/dev/null" cmd = d.getVar('OBJDUMP', True) + " -p " + os.path.join(root, file) + " 2>/dev/null"
cmd = "PATH=\"%s\" %s" % (bb.data.getVar('PATH', d, True), cmd) cmd = "PATH=\"%s\" %s" % (d.getVar('PATH', True), cmd)
fd = os.popen(cmd) fd = os.popen(cmd)
lines = fd.readlines() lines = fd.readlines()
fd.close() fd.close()
@ -1264,7 +1264,7 @@ python package_do_shlibs() {
if not combo in sonames: if not combo in sonames:
sonames.append(combo) sonames.append(combo)
if file.endswith('.dylib') or file.endswith('.so'): if file.endswith('.dylib') or file.endswith('.so'):
lafile = fullpath.replace(os.path.join(pkgdest, pkg), bb.data.getVar('PKGD', d, True)) lafile = fullpath.replace(os.path.join(pkgdest, pkg), d.getVar('PKGD', True))
# Drop suffix # Drop suffix
lafile = lafile.rsplit(".",1)[0] lafile = lafile.rsplit(".",1)[0]
lapath = os.path.dirname(lafile) lapath = os.path.dirname(lafile)
@ -1299,26 +1299,26 @@ python package_do_shlibs() {
needed[pkg].append(name) needed[pkg].append(name)
#bb.note("Adding %s for %s" % (name, pkg)) #bb.note("Adding %s for %s" % (name, pkg))
if bb.data.getVar('PACKAGE_SNAP_LIB_SYMLINKS', d, True) == "1": if d.getVar('PACKAGE_SNAP_LIB_SYMLINKS', True) == "1":
snap_symlinks = True snap_symlinks = True
else: else:
snap_symlinks = False snap_symlinks = False
if (bb.data.getVar('USE_LDCONFIG', d, True) or "1") == "1": if (d.getVar('USE_LDCONFIG', True) or "1") == "1":
use_ldconfig = True use_ldconfig = True
else: else:
use_ldconfig = False use_ldconfig = False
needed = {} needed = {}
shlib_provider = {} shlib_provider = {}
private_libs = bb.data.getVar('PRIVATE_LIBS', d, True) private_libs = d.getVar('PRIVATE_LIBS', True)
for pkg in packages.split(): for pkg in packages.split():
needs_ldconfig = False needs_ldconfig = False
bb.debug(2, "calculating shlib provides for %s" % pkg) bb.debug(2, "calculating shlib provides for %s" % pkg)
pkgver = bb.data.getVar('PKGV_' + pkg, d, True) pkgver = d.getVar('PKGV_' + pkg, True)
if not pkgver: if not pkgver:
pkgver = bb.data.getVar('PV_' + pkg, d, True) pkgver = d.getVar('PV_' + pkg, True)
if not pkgver: if not pkgver:
pkgver = ver pkgver = ver
@ -1352,11 +1352,11 @@ python package_do_shlibs() {
fd.close() fd.close()
if needs_ldconfig and use_ldconfig: if needs_ldconfig and use_ldconfig:
bb.debug(1, 'adding ldconfig call to postinst for %s' % pkg) bb.debug(1, 'adding ldconfig call to postinst for %s' % pkg)
postinst = bb.data.getVar('pkg_postinst_%s' % pkg, d, True) or bb.data.getVar('pkg_postinst', d, True) postinst = d.getVar('pkg_postinst_%s' % pkg, True) or d.getVar('pkg_postinst', True)
if not postinst: if not postinst:
postinst = '#!/bin/sh\n' postinst = '#!/bin/sh\n'
postinst += bb.data.getVar('ldconfig_postinst_fragment', d, True) postinst += d.getVar('ldconfig_postinst_fragment', True)
bb.data.setVar('pkg_postinst_%s' % pkg, postinst, d) d.setVar('pkg_postinst_%s' % pkg, postinst)
list_re = re.compile('^(.*)\.list$') list_re = re.compile('^(.*)\.list$')
for dir in [shlibs_dir]: for dir in [shlibs_dir]:
@ -1380,7 +1380,7 @@ python package_do_shlibs() {
bb.utils.unlockfile(lf) bb.utils.unlockfile(lf)
assumed_libs = bb.data.getVar('ASSUME_SHLIBS', d, True) assumed_libs = d.getVar('ASSUME_SHLIBS', True)
if assumed_libs: if assumed_libs:
for e in assumed_libs.split(): for e in assumed_libs.split():
l, dep_pkg = e.split(":") l, dep_pkg = e.split(":")
@ -1424,12 +1424,12 @@ python package_do_shlibs() {
python package_do_pkgconfig () { python package_do_pkgconfig () {
import re import re
packages = bb.data.getVar('PACKAGES', d, True) packages = d.getVar('PACKAGES', True)
workdir = bb.data.getVar('WORKDIR', d, True) workdir = d.getVar('WORKDIR', True)
pkgdest = bb.data.getVar('PKGDEST', d, True) pkgdest = d.getVar('PKGDEST', True)
shlibs_dir = bb.data.getVar('SHLIBSDIR', d, True) shlibs_dir = d.getVar('SHLIBSDIR', True)
shlibswork_dir = bb.data.getVar('SHLIBSWORKDIR', d, True) shlibswork_dir = d.getVar('SHLIBSWORKDIR', True)
pc_re = re.compile('(.*)\.pc$') pc_re = re.compile('(.*)\.pc$')
var_re = re.compile('(.*)=(.*)') var_re = re.compile('(.*)=(.*)')
@ -1515,9 +1515,9 @@ python package_do_pkgconfig () {
} }
python read_shlibdeps () { python read_shlibdeps () {
packages = bb.data.getVar('PACKAGES', d, True).split() packages = d.getVar('PACKAGES', True).split()
for pkg in packages: for pkg in packages:
rdepends = bb.utils.explode_dep_versions(bb.data.getVar('RDEPENDS_' + pkg, d, 0) or bb.data.getVar('RDEPENDS', d, 0) or "") rdepends = bb.utils.explode_dep_versions(d.getVar('RDEPENDS_' + pkg, 0) or d.getVar('RDEPENDS', 0) or "")
for extension in ".shlibdeps", ".pcdeps", ".clilibdeps": for extension in ".shlibdeps", ".pcdeps", ".clilibdeps":
depsfile = bb.data.expand("${PKGDEST}/" + pkg + extension, d) depsfile = bb.data.expand("${PKGDEST}/" + pkg + extension, d)
@ -1544,14 +1544,14 @@ python package_depchains() {
package. package.
""" """
packages = bb.data.getVar('PACKAGES', d, True) packages = d.getVar('PACKAGES', True)
postfixes = (bb.data.getVar('DEPCHAIN_POST', d, True) or '').split() postfixes = (d.getVar('DEPCHAIN_POST', True) or '').split()
prefixes = (bb.data.getVar('DEPCHAIN_PRE', d, True) or '').split() prefixes = (d.getVar('DEPCHAIN_PRE', True) or '').split()
def pkg_adddeprrecs(pkg, base, suffix, getname, depends, d): def pkg_adddeprrecs(pkg, base, suffix, getname, depends, d):
#bb.note('depends for %s is %s' % (base, depends)) #bb.note('depends for %s is %s' % (base, depends))
rreclist = bb.utils.explode_dep_versions(bb.data.getVar('RRECOMMENDS_' + pkg, d, True) or bb.data.getVar('RRECOMMENDS', d, True) or "") rreclist = bb.utils.explode_dep_versions(d.getVar('RRECOMMENDS_' + pkg, True) or d.getVar('RRECOMMENDS', True) or "")
for depend in depends: for depend in depends:
if depend.find('-native') != -1 or depend.find('-cross') != -1 or depend.startswith('virtual/'): if depend.find('-native') != -1 or depend.find('-cross') != -1 or depend.startswith('virtual/'):
@ -1572,7 +1572,7 @@ python package_depchains() {
def pkg_addrrecs(pkg, base, suffix, getname, rdepends, d): def pkg_addrrecs(pkg, base, suffix, getname, rdepends, d):
#bb.note('rdepends for %s is %s' % (base, rdepends)) #bb.note('rdepends for %s is %s' % (base, rdepends))
rreclist = bb.utils.explode_dep_versions(bb.data.getVar('RRECOMMENDS_' + pkg, d, True) or bb.data.getVar('RRECOMMENDS', d, True) or "") rreclist = bb.utils.explode_dep_versions(d.getVar('RRECOMMENDS_' + pkg, True) or d.getVar('RRECOMMENDS', True) or "")
for depend in rdepends: for depend in rdepends:
if depend.find('virtual-locale-') != -1: if depend.find('virtual-locale-') != -1:
@ -1596,15 +1596,15 @@ python package_depchains() {
list.append(dep) list.append(dep)
depends = [] depends = []
for dep in bb.utils.explode_deps(bb.data.getVar('DEPENDS', d, True) or ""): for dep in bb.utils.explode_deps(d.getVar('DEPENDS', True) or ""):
add_dep(depends, dep) add_dep(depends, dep)
rdepends = [] rdepends = []
for dep in bb.utils.explode_deps(bb.data.getVar('RDEPENDS', d, True) or ""): for dep in bb.utils.explode_deps(d.getVar('RDEPENDS', True) or ""):
add_dep(rdepends, dep) add_dep(rdepends, dep)
for pkg in packages.split(): for pkg in packages.split():
for dep in bb.utils.explode_deps(bb.data.getVar('RDEPENDS_' + pkg, d, True) or ""): for dep in bb.utils.explode_deps(d.getVar('RDEPENDS_' + pkg, True) or ""):
add_dep(rdepends, dep) add_dep(rdepends, dep)
#bb.note('rdepends is %s' % rdepends) #bb.note('rdepends is %s' % rdepends)
@ -1630,7 +1630,7 @@ python package_depchains() {
for suffix in pkgs: for suffix in pkgs:
for pkg in pkgs[suffix]: for pkg in pkgs[suffix]:
if bb.data.getVarFlag('RRECOMMENDS_' + pkg, 'nodeprrecs', d): if d.getVarFlag('RRECOMMENDS_' + pkg, 'nodeprrecs'):
continue continue
(base, func) = pkgs[suffix][pkg] (base, func) = pkgs[suffix][pkg]
if suffix == "-dev": if suffix == "-dev":
@ -1639,7 +1639,7 @@ python package_depchains() {
pkg_addrrecs(pkg, base, suffix, func, rdepends, d) pkg_addrrecs(pkg, base, suffix, func, rdepends, d)
else: else:
rdeps = [] rdeps = []
for dep in bb.utils.explode_deps(bb.data.getVar('RDEPENDS_' + base, d, True) or bb.data.getVar('RDEPENDS', d, True) or ""): for dep in bb.utils.explode_deps(d.getVar('RDEPENDS_' + base, True) or d.getVar('RDEPENDS', True) or ""):
add_dep(rdeps, dep) add_dep(rdeps, dep)
pkg_addrrecs(pkg, base, suffix, func, rdeps, d) pkg_addrrecs(pkg, base, suffix, func, rdeps, d)
} }
@ -1679,22 +1679,22 @@ python do_package () {
# as any change to rpmdeps requires this to be rerun. # as any change to rpmdeps requires this to be rerun.
# PACKAGE_BBCLASS_VERSION = "1" # PACKAGE_BBCLASS_VERSION = "1"
packages = (bb.data.getVar('PACKAGES', d, True) or "").split() packages = (d.getVar('PACKAGES', True) or "").split()
if len(packages) < 1: if len(packages) < 1:
bb.debug(1, "No packages to build, skipping do_package") bb.debug(1, "No packages to build, skipping do_package")
return return
workdir = bb.data.getVar('WORKDIR', d, True) workdir = d.getVar('WORKDIR', True)
outdir = bb.data.getVar('DEPLOY_DIR', d, True) outdir = d.getVar('DEPLOY_DIR', True)
dest = bb.data.getVar('D', d, True) dest = d.getVar('D', True)
dvar = bb.data.getVar('PKGD', d, True) dvar = d.getVar('PKGD', True)
pn = bb.data.getVar('PN', d, True) pn = d.getVar('PN', True)
if not workdir or not outdir or not dest or not dvar or not pn or not packages: if not workdir or not outdir or not dest or not dvar or not pn or not packages:
bb.error("WORKDIR, DEPLOY_DIR, D, PN and PKGD all must be defined, unable to package") bb.error("WORKDIR, DEPLOY_DIR, D, PN and PKGD all must be defined, unable to package")
return return
for f in (bb.data.getVar('PACKAGEFUNCS', d, True) or '').split(): for f in (d.getVar('PACKAGEFUNCS', True) or '').split():
bb.build.exec_func(f, d) bb.build.exec_func(f, d)
} }

View File

@ -11,18 +11,18 @@ DPKG_ARCH ?= "${TARGET_ARCH}"
PKGWRITEDIRDEB = "${WORKDIR}/deploy-debs" PKGWRITEDIRDEB = "${WORKDIR}/deploy-debs"
python package_deb_fn () { python package_deb_fn () {
bb.data.setVar('PKGFN', bb.data.getVar('PKG',d), d) bb.data.setVar('PKGFN', d.getVar('PKG'), d)
} }
addtask package_deb_install addtask package_deb_install
python do_package_deb_install () { python do_package_deb_install () {
pkg = bb.data.getVar('PKG', d, True) pkg = d.getVar('PKG', True)
pkgfn = bb.data.getVar('PKGFN', d, True) pkgfn = d.getVar('PKGFN', True)
rootfs = bb.data.getVar('IMAGE_ROOTFS', d, True) rootfs = d.getVar('IMAGE_ROOTFS', True)
debdir = bb.data.getVar('DEPLOY_DIR_DEB', d, True) debdir = d.getVar('DEPLOY_DIR_DEB', True)
apt_config = bb.data.expand('${STAGING_ETCDIR_NATIVE}/apt/apt.conf', d) apt_config = bb.data.expand('${STAGING_ETCDIR_NATIVE}/apt/apt.conf', d)
stagingbindir = bb.data.getVar('STAGING_BINDIR_NATIVE', d, True) stagingbindir = d.getVar('STAGING_BINDIR_NATIVE', True)
tmpdir = bb.data.getVar('TMPDIR', d, True) tmpdir = d.getVar('TMPDIR', True)
if None in (pkg,pkgfn,rootfs): if None in (pkg,pkgfn,rootfs):
raise bb.build.FuncFailed("missing variables (one or more of PKG, PKGFN, IMAGE_ROOTFS)") raise bb.build.FuncFailed("missing variables (one or more of PKG, PKGFN, IMAGE_ROOTFS)")
@ -206,22 +206,22 @@ python do_package_deb () {
import re, copy import re, copy
import textwrap import textwrap
workdir = bb.data.getVar('WORKDIR', d, True) workdir = d.getVar('WORKDIR', True)
if not workdir: if not workdir:
bb.error("WORKDIR not defined, unable to package") bb.error("WORKDIR not defined, unable to package")
return return
outdir = bb.data.getVar('PKGWRITEDIRDEB', d, True) outdir = d.getVar('PKGWRITEDIRDEB', True)
if not outdir: if not outdir:
bb.error("PKGWRITEDIRDEB not defined, unable to package") bb.error("PKGWRITEDIRDEB not defined, unable to package")
return return
packages = bb.data.getVar('PACKAGES', d, True) packages = d.getVar('PACKAGES', True)
if not packages: if not packages:
bb.debug(1, "PACKAGES not defined, nothing to package") bb.debug(1, "PACKAGES not defined, nothing to package")
return return
tmpdir = bb.data.getVar('TMPDIR', d, True) tmpdir = d.getVar('TMPDIR', True)
if os.access(os.path.join(tmpdir, "stamps", "DEB_PACKAGE_INDEX_CLEAN"),os.R_OK): if os.access(os.path.join(tmpdir, "stamps", "DEB_PACKAGE_INDEX_CLEAN"),os.R_OK):
os.unlink(os.path.join(tmpdir, "stamps", "DEB_PACKAGE_INDEX_CLEAN")) os.unlink(os.path.join(tmpdir, "stamps", "DEB_PACKAGE_INDEX_CLEAN"))
@ -230,7 +230,7 @@ python do_package_deb () {
bb.debug(1, "No packages; nothing to do") bb.debug(1, "No packages; nothing to do")
return return
pkgdest = bb.data.getVar('PKGDEST', d, True) pkgdest = d.getVar('PKGDEST', True)
for pkg in packages.split(): for pkg in packages.split():
localdata = bb.data.createCopy(d) localdata = bb.data.createCopy(d)
@ -238,19 +238,19 @@ python do_package_deb () {
lf = bb.utils.lockfile(root + ".lock") lf = bb.utils.lockfile(root + ".lock")
bb.data.setVar('ROOT', '', localdata) localdata.setVar('ROOT', '')
bb.data.setVar('ROOT_%s' % pkg, root, localdata) localdata.setVar('ROOT_%s' % pkg, root)
pkgname = bb.data.getVar('PKG_%s' % pkg, localdata, True) pkgname = localdata.getVar('PKG_%s' % pkg, True)
if not pkgname: if not pkgname:
pkgname = pkg pkgname = pkg
bb.data.setVar('PKG', pkgname, localdata) localdata.setVar('PKG', pkgname)
bb.data.setVar('OVERRIDES', pkg, localdata) localdata.setVar('OVERRIDES', pkg)
bb.data.update_data(localdata) bb.data.update_data(localdata)
basedir = os.path.join(os.path.dirname(root)) basedir = os.path.join(os.path.dirname(root))
pkgoutdir = os.path.join(outdir, bb.data.getVar('PACKAGE_ARCH', localdata, True)) pkgoutdir = os.path.join(outdir, localdata.getVar('PACKAGE_ARCH', True))
bb.mkdirhier(pkgoutdir) bb.mkdirhier(pkgoutdir)
os.chdir(root) os.chdir(root)
@ -261,8 +261,8 @@ python do_package_deb () {
del g[g.index('./DEBIAN')] del g[g.index('./DEBIAN')]
except ValueError: except ValueError:
pass pass
if not g and bb.data.getVar('ALLOW_EMPTY', localdata) != "1": if not g and localdata.getVar('ALLOW_EMPTY') != "1":
bb.note("Not creating empty archive for %s-%s-%s" % (pkg, bb.data.getVar('PKGV', localdata, True), bb.data.getVar('PKGR', localdata, True))) bb.note("Not creating empty archive for %s-%s-%s" % (pkg, localdata.getVar('PKGV', True), localdata.getVar('PKGR', True)))
bb.utils.unlockfile(lf) bb.utils.unlockfile(lf)
continue continue
@ -278,7 +278,7 @@ python do_package_deb () {
raise bb.build.FuncFailed("unable to open control file for writing.") raise bb.build.FuncFailed("unable to open control file for writing.")
fields = [] fields = []
pe = bb.data.getVar('PKGE', d, True) pe = d.getVar('PKGE', True)
if pe and int(pe) > 0: if pe and int(pe) > 0:
fields.append(["Version: %s:%s-%s\n", ['PKGE', 'PKGV', 'PKGR']]) fields.append(["Version: %s:%s-%s\n", ['PKGE', 'PKGV', 'PKGR']])
else: else:
@ -298,10 +298,10 @@ python do_package_deb () {
def pullData(l, d): def pullData(l, d):
l2 = [] l2 = []
for i in l: for i in l:
data = bb.data.getVar(i, d, True) data = d.getVar(i, True)
if data is None: if data is None:
raise KeyError(f) raise KeyError(f)
if i == 'DPKG_ARCH' and bb.data.getVar('PACKAGE_ARCH', d, True) == 'all': if i == 'DPKG_ARCH' and d.getVar('PACKAGE_ARCH', True) == 'all':
data = 'all' data = 'all'
l2.append(data) l2.append(data)
return l2 return l2
@ -311,12 +311,12 @@ python do_package_deb () {
try: try:
for (c, fs) in fields: for (c, fs) in fields:
for f in fs: for f in fs:
if bb.data.getVar(f, localdata) is None: if localdata.getVar(f) is None:
raise KeyError(f) raise KeyError(f)
# Special behavior for description... # Special behavior for description...
if 'DESCRIPTION' in fs: if 'DESCRIPTION' in fs:
summary = bb.data.getVar('SUMMARY', localdata, True) or bb.data.getVar('DESCRIPTION', localdata, True) or "." summary = localdata.getVar('SUMMARY', True) or localdata.getVar('DESCRIPTION', True) or "."
description = bb.data.getVar('DESCRIPTION', localdata, True) or "." description = localdata.getVar('DESCRIPTION', True) or "."
description = textwrap.dedent(description).strip() description = textwrap.dedent(description).strip()
ctrlfile.write('Description: %s\n' % unicode(summary)) ctrlfile.write('Description: %s\n' % unicode(summary))
ctrlfile.write('%s\n' % unicode(textwrap.fill(description, width=74, initial_indent=' ', subsequent_indent=' '))) ctrlfile.write('%s\n' % unicode(textwrap.fill(description, width=74, initial_indent=' ', subsequent_indent=' ')))
@ -332,18 +332,18 @@ python do_package_deb () {
bb.build.exec_func("mapping_rename_hook", localdata) bb.build.exec_func("mapping_rename_hook", localdata)
rdepends = bb.utils.explode_dep_versions(bb.data.getVar("RDEPENDS", localdata, True) or "") rdepends = bb.utils.explode_dep_versions(localdata.getVar("RDEPENDS", True) or "")
for dep in rdepends: for dep in rdepends:
if '*' in dep: if '*' in dep:
del rdepends[dep] del rdepends[dep]
rrecommends = bb.utils.explode_dep_versions(bb.data.getVar("RRECOMMENDS", localdata, True) or "") rrecommends = bb.utils.explode_dep_versions(localdata.getVar("RRECOMMENDS", True) or "")
for dep in rrecommends: for dep in rrecommends:
if '*' in dep: if '*' in dep:
del rrecommends[dep] del rrecommends[dep]
rsuggests = bb.utils.explode_dep_versions(bb.data.getVar("RSUGGESTS", localdata, True) or "") rsuggests = bb.utils.explode_dep_versions(localdata.getVar("RSUGGESTS", True) or "")
rprovides = bb.utils.explode_dep_versions(bb.data.getVar("RPROVIDES", localdata, True) or "") rprovides = bb.utils.explode_dep_versions(localdata.getVar("RPROVIDES", True) or "")
rreplaces = bb.utils.explode_dep_versions(bb.data.getVar("RREPLACES", localdata, True) or "") rreplaces = bb.utils.explode_dep_versions(localdata.getVar("RREPLACES", True) or "")
rconflicts = bb.utils.explode_dep_versions(bb.data.getVar("RCONFLICTS", localdata, True) or "") rconflicts = bb.utils.explode_dep_versions(localdata.getVar("RCONFLICTS", True) or "")
if rdepends: if rdepends:
ctrlfile.write("Depends: %s\n" % unicode(bb.utils.join_deps(rdepends))) ctrlfile.write("Depends: %s\n" % unicode(bb.utils.join_deps(rdepends)))
if rsuggests: if rsuggests:
@ -359,7 +359,7 @@ python do_package_deb () {
ctrlfile.close() ctrlfile.close()
for script in ["preinst", "postinst", "prerm", "postrm"]: for script in ["preinst", "postinst", "prerm", "postrm"]:
scriptvar = bb.data.getVar('pkg_%s' % script, localdata, True) scriptvar = localdata.getVar('pkg_%s' % script, True)
if not scriptvar: if not scriptvar:
continue continue
try: try:
@ -372,7 +372,7 @@ python do_package_deb () {
scriptfile.close() scriptfile.close()
os.chmod(os.path.join(controldir, script), 0755) os.chmod(os.path.join(controldir, script), 0755)
conffiles_str = bb.data.getVar("CONFFILES", localdata, True) conffiles_str = localdata.getVar("CONFFILES", True)
if conffiles_str: if conffiles_str:
try: try:
conffiles = file(os.path.join(controldir, 'conffiles'), 'w') conffiles = file(os.path.join(controldir, 'conffiles'), 'w')
@ -384,7 +384,7 @@ python do_package_deb () {
conffiles.close() conffiles.close()
os.chdir(basedir) os.chdir(basedir)
ret = os.system("PATH=\"%s\" dpkg-deb -b %s %s" % (bb.data.getVar("PATH", localdata, True), root, pkgoutdir)) ret = os.system("PATH=\"%s\" dpkg-deb -b %s %s" % (localdata.getVar("PATH", True), root, pkgoutdir))
if ret != 0: if ret != 0:
bb.utils.prunedir(controldir) bb.utils.prunedir(controldir)
bb.utils.unlockfile(lf) bb.utils.unlockfile(lf)
@ -405,17 +405,17 @@ python do_package_write_deb_setscene () {
addtask do_package_write_deb_setscene addtask do_package_write_deb_setscene
python () { python () {
if bb.data.getVar('PACKAGES', d, True) != '': if d.getVar('PACKAGES', True) != '':
deps = (bb.data.getVarFlag('do_package_write_deb', 'depends', d) or "").split() deps = (d.getVarFlag('do_package_write_deb', 'depends') or "").split()
deps.append('dpkg-native:do_populate_sysroot') deps.append('dpkg-native:do_populate_sysroot')
deps.append('virtual/fakeroot-native:do_populate_sysroot') deps.append('virtual/fakeroot-native:do_populate_sysroot')
bb.data.setVarFlag('do_package_write_deb', 'depends', " ".join(deps), d) bb.data.setVarFlag('do_package_write_deb', 'depends', " ".join(deps), d)
bb.data.setVarFlag('do_package_write_deb', 'fakeroot', "1", d) d.setVarFlag('do_package_write_deb', 'fakeroot', "1")
bb.data.setVarFlag('do_package_write_deb_setscene', 'fakeroot', "1", d) d.setVarFlag('do_package_write_deb_setscene', 'fakeroot', "1")
# Map TARGET_ARCH to Debian's ideas about architectures # Map TARGET_ARCH to Debian's ideas about architectures
if bb.data.getVar('DPKG_ARCH', d, True) in ["x86", "i486", "i586", "i686", "pentium"]: if d.getVar('DPKG_ARCH', True) in ["x86", "i486", "i586", "i686", "pentium"]:
bb.data.setVar('DPKG_ARCH', 'i386', d) d.setVar('DPKG_ARCH', 'i386')
} }
python do_package_write_deb () { python do_package_write_deb () {

View File

@ -11,16 +11,16 @@ PKGWRITEDIRIPK = "${WORKDIR}/deploy-ipks"
OPKGBUILDCMD ??= "opkg-build" OPKGBUILDCMD ??= "opkg-build"
python package_ipk_fn () { python package_ipk_fn () {
bb.data.setVar('PKGFN', bb.data.getVar('PKG',d), d) bb.data.setVar('PKGFN', d.getVar('PKG'), d)
} }
python package_ipk_install () { python package_ipk_install () {
pkg = bb.data.getVar('PKG', d, 1) pkg = d.getVar('PKG', 1)
pkgfn = bb.data.getVar('PKGFN', d, 1) pkgfn = d.getVar('PKGFN', 1)
rootfs = bb.data.getVar('IMAGE_ROOTFS', d, 1) rootfs = d.getVar('IMAGE_ROOTFS', 1)
ipkdir = bb.data.getVar('DEPLOY_DIR_IPK', d, 1) ipkdir = d.getVar('DEPLOY_DIR_IPK', 1)
stagingdir = bb.data.getVar('STAGING_DIR', d, 1) stagingdir = d.getVar('STAGING_DIR', 1)
tmpdir = bb.data.getVar('TMPDIR', d, 1) tmpdir = d.getVar('TMPDIR', 1)
if None in (pkg,pkgfn,rootfs): if None in (pkg,pkgfn,rootfs):
raise bb.build.FuncFailed("missing variables (one or more of PKG, PKGFN, IMAGEROOTFS)") raise bb.build.FuncFailed("missing variables (one or more of PKG, PKGFN, IMAGEROOTFS)")
@ -36,7 +36,7 @@ python package_ipk_install () {
# Generate ipk.conf if it or the stamp doesnt exist # Generate ipk.conf if it or the stamp doesnt exist
conffile = os.path.join(stagingdir,"ipkg.conf") conffile = os.path.join(stagingdir,"ipkg.conf")
if not os.access(conffile, os.R_OK): if not os.access(conffile, os.R_OK):
ipkg_archs = bb.data.getVar('PACKAGE_ARCHS',d) ipkg_archs = d.getVar('PACKAGE_ARCHS')
if ipkg_archs is None: if ipkg_archs is None:
bb.error("PACKAGE_ARCHS missing") bb.error("PACKAGE_ARCHS missing")
raise FuncFailed raise FuncFailed
@ -259,15 +259,15 @@ python do_package_ipk () {
import re, copy import re, copy
import textwrap import textwrap
workdir = bb.data.getVar('WORKDIR', d, True) workdir = d.getVar('WORKDIR', True)
outdir = bb.data.getVar('PKGWRITEDIRIPK', d, True) outdir = d.getVar('PKGWRITEDIRIPK', True)
tmpdir = bb.data.getVar('TMPDIR', d, True) tmpdir = d.getVar('TMPDIR', True)
pkgdest = bb.data.getVar('PKGDEST', d, True) pkgdest = d.getVar('PKGDEST', True)
if not workdir or not outdir or not tmpdir: if not workdir or not outdir or not tmpdir:
bb.error("Variables incorrectly set, unable to package") bb.error("Variables incorrectly set, unable to package")
return return
packages = bb.data.getVar('PACKAGES', d, True) packages = d.getVar('PACKAGES', True)
if not packages or packages == '': if not packages or packages == '':
bb.debug(1, "No packages; nothing to do") bb.debug(1, "No packages; nothing to do")
return return
@ -283,18 +283,18 @@ python do_package_ipk () {
lf = bb.utils.lockfile(root + ".lock") lf = bb.utils.lockfile(root + ".lock")
bb.data.setVar('ROOT', '', localdata) localdata.setVar('ROOT', '')
bb.data.setVar('ROOT_%s' % pkg, root, localdata) localdata.setVar('ROOT_%s' % pkg, root)
pkgname = bb.data.getVar('PKG_%s' % pkg, localdata, 1) pkgname = localdata.getVar('PKG_%s' % pkg, 1)
if not pkgname: if not pkgname:
pkgname = pkg pkgname = pkg
bb.data.setVar('PKG', pkgname, localdata) localdata.setVar('PKG', pkgname)
bb.data.setVar('OVERRIDES', pkg, localdata) localdata.setVar('OVERRIDES', pkg)
bb.data.update_data(localdata) bb.data.update_data(localdata)
basedir = os.path.join(os.path.dirname(root)) basedir = os.path.join(os.path.dirname(root))
arch = bb.data.getVar('PACKAGE_ARCH', localdata, 1) arch = localdata.getVar('PACKAGE_ARCH', 1)
pkgoutdir = "%s/%s" % (outdir, arch) pkgoutdir = "%s/%s" % (outdir, arch)
bb.mkdirhier(pkgoutdir) bb.mkdirhier(pkgoutdir)
os.chdir(root) os.chdir(root)
@ -305,8 +305,8 @@ python do_package_ipk () {
del g[g.index('./CONTROL')] del g[g.index('./CONTROL')]
except ValueError: except ValueError:
pass pass
if not g and bb.data.getVar('ALLOW_EMPTY', localdata) != "1": if not g and localdata.getVar('ALLOW_EMPTY') != "1":
bb.note("Not creating empty archive for %s-%s-%s" % (pkg, bb.data.getVar('PKGV', localdata, 1), bb.data.getVar('PKGR', localdata, 1))) bb.note("Not creating empty archive for %s-%s-%s" % (pkg, localdata.getVar('PKGV', 1), localdata.getVar('PKGR', 1)))
bb.utils.unlockfile(lf) bb.utils.unlockfile(lf)
continue continue
@ -319,7 +319,7 @@ python do_package_ipk () {
raise bb.build.FuncFailed("unable to open control file for writing.") raise bb.build.FuncFailed("unable to open control file for writing.")
fields = [] fields = []
pe = bb.data.getVar('PKGE', d, 1) pe = d.getVar('PKGE', 1)
if pe and int(pe) > 0: if pe and int(pe) > 0:
fields.append(["Version: %s:%s-%s\n", ['PKGE', 'PKGV', 'PKGR']]) fields.append(["Version: %s:%s-%s\n", ['PKGE', 'PKGV', 'PKGR']])
else: else:
@ -336,7 +336,7 @@ python do_package_ipk () {
def pullData(l, d): def pullData(l, d):
l2 = [] l2 = []
for i in l: for i in l:
l2.append(bb.data.getVar(i, d, 1)) l2.append(d.getVar(i, 1))
return l2 return l2
ctrlfile.write("Package: %s\n" % pkgname) ctrlfile.write("Package: %s\n" % pkgname)
@ -344,12 +344,12 @@ python do_package_ipk () {
try: try:
for (c, fs) in fields: for (c, fs) in fields:
for f in fs: for f in fs:
if bb.data.getVar(f, localdata) is None: if localdata.getVar(f) is None:
raise KeyError(f) raise KeyError(f)
# Special behavior for description... # Special behavior for description...
if 'DESCRIPTION' in fs: if 'DESCRIPTION' in fs:
summary = bb.data.getVar('SUMMARY', localdata, True) or bb.data.getVar('DESCRIPTION', localdata, True) or "." summary = localdata.getVar('SUMMARY', True) or localdata.getVar('DESCRIPTION', True) or "."
description = bb.data.getVar('DESCRIPTION', localdata, True) or "." description = localdata.getVar('DESCRIPTION', True) or "."
description = textwrap.dedent(description).strip() description = textwrap.dedent(description).strip()
ctrlfile.write('Description: %s\n' % summary) ctrlfile.write('Description: %s\n' % summary)
ctrlfile.write('%s\n' % textwrap.fill(description, width=74, initial_indent=' ', subsequent_indent=' ')) ctrlfile.write('%s\n' % textwrap.fill(description, width=74, initial_indent=' ', subsequent_indent=' '))
@ -365,12 +365,12 @@ python do_package_ipk () {
bb.build.exec_func("mapping_rename_hook", localdata) bb.build.exec_func("mapping_rename_hook", localdata)
rdepends = bb.utils.explode_dep_versions(bb.data.getVar("RDEPENDS", localdata, 1) or "") rdepends = bb.utils.explode_dep_versions(localdata.getVar("RDEPENDS", 1) or "")
rrecommends = bb.utils.explode_dep_versions(bb.data.getVar("RRECOMMENDS", localdata, 1) or "") rrecommends = bb.utils.explode_dep_versions(localdata.getVar("RRECOMMENDS", 1) or "")
rsuggests = bb.utils.explode_dep_versions(bb.data.getVar("RSUGGESTS", localdata, 1) or "") rsuggests = bb.utils.explode_dep_versions(localdata.getVar("RSUGGESTS", 1) or "")
rprovides = bb.utils.explode_dep_versions(bb.data.getVar("RPROVIDES", localdata, 1) or "") rprovides = bb.utils.explode_dep_versions(localdata.getVar("RPROVIDES", 1) or "")
rreplaces = bb.utils.explode_dep_versions(bb.data.getVar("RREPLACES", localdata, 1) or "") rreplaces = bb.utils.explode_dep_versions(localdata.getVar("RREPLACES", 1) or "")
rconflicts = bb.utils.explode_dep_versions(bb.data.getVar("RCONFLICTS", localdata, 1) or "") rconflicts = bb.utils.explode_dep_versions(localdata.getVar("RCONFLICTS", 1) or "")
if rdepends: if rdepends:
ctrlfile.write("Depends: %s\n" % bb.utils.join_deps(rdepends)) ctrlfile.write("Depends: %s\n" % bb.utils.join_deps(rdepends))
@ -384,14 +384,14 @@ python do_package_ipk () {
ctrlfile.write("Replaces: %s\n" % bb.utils.join_deps(rreplaces)) ctrlfile.write("Replaces: %s\n" % bb.utils.join_deps(rreplaces))
if rconflicts: if rconflicts:
ctrlfile.write("Conflicts: %s\n" % bb.utils.join_deps(rconflicts)) ctrlfile.write("Conflicts: %s\n" % bb.utils.join_deps(rconflicts))
src_uri = bb.data.getVar("SRC_URI", localdata, 1) src_uri = localdata.getVar("SRC_URI", 1)
if src_uri: if src_uri:
src_uri = re.sub("\s+", " ", src_uri) src_uri = re.sub("\s+", " ", src_uri)
ctrlfile.write("Source: %s\n" % " ".join(src_uri.split())) ctrlfile.write("Source: %s\n" % " ".join(src_uri.split()))
ctrlfile.close() ctrlfile.close()
for script in ["preinst", "postinst", "prerm", "postrm"]: for script in ["preinst", "postinst", "prerm", "postrm"]:
scriptvar = bb.data.getVar('pkg_%s' % script, localdata, 1) scriptvar = localdata.getVar('pkg_%s' % script, 1)
if not scriptvar: if not scriptvar:
continue continue
try: try:
@ -403,7 +403,7 @@ python do_package_ipk () {
scriptfile.close() scriptfile.close()
os.chmod(os.path.join(controldir, script), 0755) os.chmod(os.path.join(controldir, script), 0755)
conffiles_str = bb.data.getVar("CONFFILES", localdata, 1) conffiles_str = localdata.getVar("CONFFILES", 1)
if conffiles_str: if conffiles_str:
try: try:
conffiles = file(os.path.join(controldir, 'conffiles'), 'w') conffiles = file(os.path.join(controldir, 'conffiles'), 'w')
@ -415,8 +415,8 @@ python do_package_ipk () {
conffiles.close() conffiles.close()
os.chdir(basedir) os.chdir(basedir)
ret = os.system("PATH=\"%s\" %s %s %s" % (bb.data.getVar("PATH", localdata, 1), ret = os.system("PATH=\"%s\" %s %s %s" % (localdata.getVar("PATH", 1),
bb.data.getVar("OPKGBUILDCMD",d,1), pkg, pkgoutdir)) d.getVar("OPKGBUILDCMD",1), pkg, pkgoutdir))
if ret != 0: if ret != 0:
bb.utils.unlockfile(lf) bb.utils.unlockfile(lf)
raise bb.build.FuncFailed("opkg-build execution failed") raise bb.build.FuncFailed("opkg-build execution failed")
@ -437,13 +437,13 @@ python do_package_write_ipk_setscene () {
addtask do_package_write_ipk_setscene addtask do_package_write_ipk_setscene
python () { python () {
if bb.data.getVar('PACKAGES', d, True) != '': if d.getVar('PACKAGES', True) != '':
deps = (bb.data.getVarFlag('do_package_write_ipk', 'depends', d) or "").split() deps = (d.getVarFlag('do_package_write_ipk', 'depends') or "").split()
deps.append('opkg-utils-native:do_populate_sysroot') deps.append('opkg-utils-native:do_populate_sysroot')
deps.append('virtual/fakeroot-native:do_populate_sysroot') deps.append('virtual/fakeroot-native:do_populate_sysroot')
bb.data.setVarFlag('do_package_write_ipk', 'depends', " ".join(deps), d) bb.data.setVarFlag('do_package_write_ipk', 'depends', " ".join(deps), d)
bb.data.setVarFlag('do_package_write_ipk', 'fakeroot', "1", d) d.setVarFlag('do_package_write_ipk', 'fakeroot', "1")
bb.data.setVarFlag('do_package_write_ipk_setscene', 'fakeroot', "1", d) d.setVarFlag('do_package_write_ipk_setscene', 'fakeroot', "1")
} }
python do_package_write_ipk () { python do_package_write_ipk () {

View File

@ -8,7 +8,7 @@ RPMBUILD="rpmbuild"
PKGWRITEDIRRPM = "${WORKDIR}/deploy-rpms" PKGWRITEDIRRPM = "${WORKDIR}/deploy-rpms"
python package_rpm_fn () { python package_rpm_fn () {
bb.data.setVar('PKGFN', bb.data.getVar('PKG',d), d) bb.data.setVar('PKGFN', d.getVar('PKG'), d)
} }
python package_rpm_install () { python package_rpm_install () {
@ -406,7 +406,7 @@ python write_specfile () {
name = "".join(name.split(eext[1] + '-')) name = "".join(name.split(eext[1] + '-'))
return name return name
# ml = bb.data.getVar("MLPREFIX", d, True) # ml = d.getVar("MLPREFIX", True)
# if ml and name and len(ml) != 0 and name.find(ml) == 0: # if ml and name and len(ml) != 0 and name.find(ml) == 0:
# return ml.join(name.split(ml, 1)[1:]) # return ml.join(name.split(ml, 1)[1:])
# return name # return name
@ -426,7 +426,7 @@ python write_specfile () {
# after renaming we cannot look up the dependencies in the packagedata # after renaming we cannot look up the dependencies in the packagedata
# store. # store.
def translate_vers(varname, d): def translate_vers(varname, d):
depends = bb.data.getVar(varname, d, True) depends = d.getVar(varname, True)
if depends: if depends:
depends_dict = bb.utils.explode_dep_versions(depends) depends_dict = bb.utils.explode_dep_versions(depends)
newdeps_dict = {} newdeps_dict = {}
@ -481,34 +481,34 @@ python write_specfile () {
scr = scr[:pos] + 'if [ "$1" = "0" ] ; then\n' + scr[pos:] + '\nfi' scr = scr[:pos] + 'if [ "$1" = "0" ] ; then\n' + scr[pos:] + '\nfi'
return scr return scr
packages = bb.data.getVar('PACKAGES', d, True) packages = d.getVar('PACKAGES', True)
if not packages or packages == '': if not packages or packages == '':
bb.debug(1, "No packages; nothing to do") bb.debug(1, "No packages; nothing to do")
return return
pkgdest = bb.data.getVar('PKGDEST', d, True) pkgdest = d.getVar('PKGDEST', True)
if not pkgdest: if not pkgdest:
bb.fatal("No PKGDEST") bb.fatal("No PKGDEST")
return return
outspecfile = bb.data.getVar('OUTSPECFILE', d, True) outspecfile = d.getVar('OUTSPECFILE', True)
if not outspecfile: if not outspecfile:
bb.fatal("No OUTSPECFILE") bb.fatal("No OUTSPECFILE")
return return
# Construct the SPEC file... # Construct the SPEC file...
srcname = strip_multilib(bb.data.getVar('PN', d, True), d) srcname = strip_multilib(d.getVar('PN', True), d)
srcsummary = (bb.data.getVar('SUMMARY', d, True) or bb.data.getVar('DESCRIPTION', d, True) or ".") srcsummary = (d.getVar('SUMMARY', True) or d.getVar('DESCRIPTION', True) or ".")
srcversion = bb.data.getVar('PKGV', d, True).replace('-', '+') srcversion = d.getVar('PKGV', True).replace('-', '+')
srcrelease = bb.data.getVar('PKGR', d, True) srcrelease = d.getVar('PKGR', True)
srcepoch = (bb.data.getVar('PKGE', d, True) or "") srcepoch = (d.getVar('PKGE', True) or "")
srclicense = bb.data.getVar('LICENSE', d, True) srclicense = d.getVar('LICENSE', True)
srcsection = bb.data.getVar('SECTION', d, True) srcsection = d.getVar('SECTION', True)
srcmaintainer = bb.data.getVar('MAINTAINER', d, True) srcmaintainer = d.getVar('MAINTAINER', True)
srchomepage = bb.data.getVar('HOMEPAGE', d, True) srchomepage = d.getVar('HOMEPAGE', True)
srcdescription = bb.data.getVar('DESCRIPTION', d, True) or "." srcdescription = d.getVar('DESCRIPTION', True) or "."
srcdepends = strip_multilib(bb.data.getVar('DEPENDS', d, True), d) srcdepends = strip_multilib(d.getVar('DEPENDS', True), d)
srcrdepends = [] srcrdepends = []
srcrrecommends = [] srcrrecommends = []
srcrsuggests = [] srcrsuggests = []
@ -538,28 +538,28 @@ python write_specfile () {
lf = bb.utils.lockfile(root + ".lock") lf = bb.utils.lockfile(root + ".lock")
bb.data.setVar('ROOT', '', localdata) localdata.setVar('ROOT', '')
bb.data.setVar('ROOT_%s' % pkg, root, localdata) localdata.setVar('ROOT_%s' % pkg, root)
pkgname = bb.data.getVar('PKG_%s' % pkg, localdata, 1) pkgname = localdata.getVar('PKG_%s' % pkg, 1)
if not pkgname: if not pkgname:
pkgname = pkg pkgname = pkg
bb.data.setVar('PKG', pkgname, localdata) localdata.setVar('PKG', pkgname)
bb.data.setVar('OVERRIDES', pkg, localdata) localdata.setVar('OVERRIDES', pkg)
bb.data.update_data(localdata) bb.data.update_data(localdata)
conffiles = (bb.data.getVar('CONFFILES', localdata, True) or "").split() conffiles = (localdata.getVar('CONFFILES', True) or "").split()
splitname = strip_multilib(pkgname, d) splitname = strip_multilib(pkgname, d)
splitsummary = (bb.data.getVar('SUMMARY', localdata, True) or bb.data.getVar('DESCRIPTION', localdata, True) or ".") splitsummary = (localdata.getVar('SUMMARY', True) or localdata.getVar('DESCRIPTION', True) or ".")
splitversion = (bb.data.getVar('PKGV', localdata, True) or "").replace('-', '+') splitversion = (localdata.getVar('PKGV', True) or "").replace('-', '+')
splitrelease = (bb.data.getVar('PKGR', localdata, True) or "") splitrelease = (localdata.getVar('PKGR', True) or "")
splitepoch = (bb.data.getVar('PKGE', localdata, True) or "") splitepoch = (localdata.getVar('PKGE', True) or "")
splitlicense = (bb.data.getVar('LICENSE', localdata, True) or "") splitlicense = (localdata.getVar('LICENSE', True) or "")
splitsection = (bb.data.getVar('SECTION', localdata, True) or "") splitsection = (localdata.getVar('SECTION', True) or "")
splitdescription = (bb.data.getVar('DESCRIPTION', localdata, True) or ".") splitdescription = (localdata.getVar('DESCRIPTION', True) or ".")
translate_vers('RDEPENDS', localdata) translate_vers('RDEPENDS', localdata)
translate_vers('RRECOMMENDS', localdata) translate_vers('RRECOMMENDS', localdata)
@ -571,12 +571,12 @@ python write_specfile () {
# Map the dependencies into their final form # Map the dependencies into their final form
bb.build.exec_func("mapping_rename_hook", localdata) bb.build.exec_func("mapping_rename_hook", localdata)
splitrdepends = strip_multilib(bb.data.getVar('RDEPENDS', localdata, True), d) or "" splitrdepends = strip_multilib(localdata.getVar('RDEPENDS', True), d) or ""
splitrrecommends = strip_multilib(bb.data.getVar('RRECOMMENDS', localdata, True), d) or "" splitrrecommends = strip_multilib(localdata.getVar('RRECOMMENDS', True), d) or ""
splitrsuggests = strip_multilib(bb.data.getVar('RSUGGESTS', localdata, True), d) or "" splitrsuggests = strip_multilib(localdata.getVar('RSUGGESTS', True), d) or ""
splitrprovides = strip_multilib(bb.data.getVar('RPROVIDES', localdata, True), d) or "" splitrprovides = strip_multilib(localdata.getVar('RPROVIDES', True), d) or ""
splitrreplaces = strip_multilib(bb.data.getVar('RREPLACES', localdata, True), d) or "" splitrreplaces = strip_multilib(localdata.getVar('RREPLACES', True), d) or ""
splitrconflicts = strip_multilib(bb.data.getVar('RCONFLICTS', localdata, True), d) or "" splitrconflicts = strip_multilib(localdata.getVar('RCONFLICTS', True), d) or ""
splitrobsoletes = [] splitrobsoletes = []
# For now we need to manually supplement RPROVIDES with any update-alternatives links # For now we need to manually supplement RPROVIDES with any update-alternatives links
@ -592,14 +592,14 @@ python write_specfile () {
srcrreplaces = splitrreplaces srcrreplaces = splitrreplaces
srcrconflicts = splitrconflicts srcrconflicts = splitrconflicts
srcpreinst = bb.data.getVar('pkg_preinst', localdata, True) srcpreinst = localdata.getVar('pkg_preinst', True)
srcpostinst = bb.data.getVar('pkg_postinst', localdata, True) srcpostinst = localdata.getVar('pkg_postinst', True)
srcprerm = bb.data.getVar('pkg_prerm', localdata, True) srcprerm = localdata.getVar('pkg_prerm', True)
srcpostrm = bb.data.getVar('pkg_postrm', localdata, True) srcpostrm = localdata.getVar('pkg_postrm', True)
file_list = [] file_list = []
walk_files(root, file_list, conffiles) walk_files(root, file_list, conffiles)
if not file_list and bb.data.getVar('ALLOW_EMPTY', localdata) != "1": if not file_list and localdata.getVar('ALLOW_EMPTY') != "1":
bb.note("Not creating empty RPM package for %s" % splitname) bb.note("Not creating empty RPM package for %s" % splitname)
else: else:
bb.note("Creating RPM package for %s" % splitname) bb.note("Creating RPM package for %s" % splitname)
@ -672,7 +672,7 @@ python write_specfile () {
# Now process scriptlets # Now process scriptlets
for script in ["preinst", "postinst", "prerm", "postrm"]: for script in ["preinst", "postinst", "prerm", "postrm"]:
scriptvar = bb.data.getVar('pkg_%s' % script, localdata, True) scriptvar = localdata.getVar('pkg_%s' % script, True)
if not scriptvar: if not scriptvar:
continue continue
if script == 'preinst': if script == 'preinst':
@ -691,7 +691,7 @@ python write_specfile () {
# Now process files # Now process files
file_list = [] file_list = []
walk_files(root, file_list, conffiles) walk_files(root, file_list, conffiles)
if not file_list and bb.data.getVar('ALLOW_EMPTY', localdata) != "1": if not file_list and localdata.getVar('ALLOW_EMPTY') != "1":
bb.note("Not creating empty RPM package for %s" % splitname) bb.note("Not creating empty RPM package for %s" % splitname)
else: else:
spec_files_bottom.append('%%files -n %s' % splitname) spec_files_bottom.append('%%files -n %s' % splitname)
@ -813,29 +813,29 @@ python do_package_rpm () {
# We need a simple way to remove the MLPREFIX from the package name, # We need a simple way to remove the MLPREFIX from the package name,
# and dependency information... # and dependency information...
def strip_multilib(name, d): def strip_multilib(name, d):
ml = bb.data.getVar("MLPREFIX", d, True) ml = d.getVar("MLPREFIX", True)
if ml and name and len(ml) != 0 and name.find(ml) >= 0: if ml and name and len(ml) != 0 and name.find(ml) >= 0:
return "".join(name.split(ml)) return "".join(name.split(ml))
return name return name
workdir = bb.data.getVar('WORKDIR', d, True) workdir = d.getVar('WORKDIR', True)
outdir = bb.data.getVar('DEPLOY_DIR_IPK', d, True) outdir = d.getVar('DEPLOY_DIR_IPK', True)
tmpdir = bb.data.getVar('TMPDIR', d, True) tmpdir = d.getVar('TMPDIR', True)
pkgd = bb.data.getVar('PKGD', d, True) pkgd = d.getVar('PKGD', True)
pkgdest = bb.data.getVar('PKGDEST', d, True) pkgdest = d.getVar('PKGDEST', True)
if not workdir or not outdir or not pkgd or not tmpdir: if not workdir or not outdir or not pkgd or not tmpdir:
bb.error("Variables incorrectly set, unable to package") bb.error("Variables incorrectly set, unable to package")
return return
packages = bb.data.getVar('PACKAGES', d, True) packages = d.getVar('PACKAGES', True)
if not packages or packages == '': if not packages or packages == '':
bb.debug(1, "No packages; nothing to do") bb.debug(1, "No packages; nothing to do")
return return
# Construct the spec file... # Construct the spec file...
srcname = strip_multilib(bb.data.getVar('PN', d, True), d) srcname = strip_multilib(d.getVar('PN', True), d)
outspecfile = workdir + "/" + srcname + ".spec" outspecfile = workdir + "/" + srcname + ".spec"
bb.data.setVar('OUTSPECFILE', outspecfile, d) d.setVar('OUTSPECFILE', outspecfile)
bb.build.exec_func('write_specfile', d) bb.build.exec_func('write_specfile', d)
# Construct per file dependencies file # Construct per file dependencies file
@ -844,10 +844,10 @@ python do_package_rpm () {
outfile.write("\n# Dependency table\n") outfile.write("\n# Dependency table\n")
for pkg in packages.split(): for pkg in packages.split():
dependsflist_key = 'FILE' + varname + 'FLIST' + "_" + pkg dependsflist_key = 'FILE' + varname + 'FLIST' + "_" + pkg
dependsflist = (bb.data.getVar(dependsflist_key, d, True) or "") dependsflist = (d.getVar(dependsflist_key, True) or "")
for dfile in dependsflist.split(): for dfile in dependsflist.split():
key = "FILE" + varname + "_" + dfile + "_" + pkg key = "FILE" + varname + "_" + dfile + "_" + pkg
depends_dict = bb.utils.explode_dep_versions(bb.data.getVar(key, d, True) or "") depends_dict = bb.utils.explode_dep_versions(d.getVar(key, True) or "")
file = dfile.replace("@underscore@", "_") file = dfile.replace("@underscore@", "_")
file = file.replace("@closebrace@", "]") file = file.replace("@closebrace@", "]")
file = file.replace("@openbrace@", "[") file = file.replace("@openbrace@", "[")
@ -899,15 +899,15 @@ python do_package_rpm () {
os.chmod(outprovides, 0755) os.chmod(outprovides, 0755)
# Setup the rpmbuild arguments... # Setup the rpmbuild arguments...
rpmbuild = bb.data.getVar('RPMBUILD', d, True) rpmbuild = d.getVar('RPMBUILD', True)
targetsys = bb.data.getVar('TARGET_SYS', d, True) targetsys = d.getVar('TARGET_SYS', True)
targetvendor = bb.data.getVar('TARGET_VENDOR', d, True) targetvendor = d.getVar('TARGET_VENDOR', True)
package_arch = bb.data.getVar('PACKAGE_ARCH', d, True) or "" package_arch = d.getVar('PACKAGE_ARCH', True) or ""
if package_arch not in "all any noarch".split(): if package_arch not in "all any noarch".split():
ml_prefix = (bb.data.getVar('MLPREFIX', d, True) or "").replace("-", "_") ml_prefix = (d.getVar('MLPREFIX', True) or "").replace("-", "_")
bb.data.setVar('PACKAGE_ARCH_EXTEND', ml_prefix + package_arch, d) d.setVar('PACKAGE_ARCH_EXTEND', ml_prefix + package_arch)
else: else:
bb.data.setVar('PACKAGE_ARCH_EXTEND', package_arch, d) d.setVar('PACKAGE_ARCH_EXTEND', package_arch)
pkgwritedir = bb.data.expand('${PKGWRITEDIRRPM}/${PACKAGE_ARCH_EXTEND}', d) pkgwritedir = bb.data.expand('${PKGWRITEDIRRPM}/${PACKAGE_ARCH_EXTEND}', d)
pkgarch = bb.data.expand('${PACKAGE_ARCH_EXTEND}${TARGET_VENDOR}-${TARGET_OS}', d) pkgarch = bb.data.expand('${PACKAGE_ARCH_EXTEND}${TARGET_VENDOR}-${TARGET_OS}', d)
magicfile = bb.data.expand('${STAGING_DIR_NATIVE}/usr/share/misc/magic.mgc', d) magicfile = bb.data.expand('${STAGING_DIR_NATIVE}/usr/share/misc/magic.mgc', d)
@ -927,19 +927,19 @@ python do_package_rpm () {
cmd = cmd + " -bb " + outspecfile cmd = cmd + " -bb " + outspecfile
# Build the rpm package! # Build the rpm package!
bb.data.setVar('BUILDSPEC', cmd + "\n", d) d.setVar('BUILDSPEC', cmd + "\n")
bb.data.setVarFlag('BUILDSPEC', 'func', '1', d) d.setVarFlag('BUILDSPEC', 'func', '1')
bb.build.exec_func('BUILDSPEC', d) bb.build.exec_func('BUILDSPEC', d)
} }
python () { python () {
if bb.data.getVar('PACKAGES', d, True) != '': if d.getVar('PACKAGES', True) != '':
deps = (bb.data.getVarFlag('do_package_write_rpm', 'depends', d) or "").split() deps = (d.getVarFlag('do_package_write_rpm', 'depends') or "").split()
deps.append('rpm-native:do_populate_sysroot') deps.append('rpm-native:do_populate_sysroot')
deps.append('virtual/fakeroot-native:do_populate_sysroot') deps.append('virtual/fakeroot-native:do_populate_sysroot')
bb.data.setVarFlag('do_package_write_rpm', 'depends', " ".join(deps), d) bb.data.setVarFlag('do_package_write_rpm', 'depends', " ".join(deps), d)
bb.data.setVarFlag('do_package_write_rpm', 'fakeroot', 1, d) d.setVarFlag('do_package_write_rpm', 'fakeroot', 1)
bb.data.setVarFlag('do_package_write_rpm_setscene', 'fakeroot', 1, d) d.setVarFlag('do_package_write_rpm_setscene', 'fakeroot', 1)
} }
SSTATETASKS += "do_package_write_rpm" SSTATETASKS += "do_package_write_rpm"

View File

@ -3,15 +3,15 @@ inherit package
IMAGE_PKGTYPE ?= "tar" IMAGE_PKGTYPE ?= "tar"
python package_tar_fn () { python package_tar_fn () {
fn = os.path.join(bb.data.getVar('DEPLOY_DIR_TAR', d), "%s-%s-%s.tar.gz" % (bb.data.getVar('PKG', d), bb.data.getVar('PKGV', d), bb.data.getVar('PKGR', d))) fn = os.path.join(d.getVar('DEPLOY_DIR_TAR'), "%s-%s-%s.tar.gz" % (d.getVar('PKG'), d.getVar('PKGV'), d.getVar('PKGR')))
fn = bb.data.expand(fn, d) fn = bb.data.expand(fn, d)
bb.data.setVar('PKGFN', fn, d) d.setVar('PKGFN', fn)
} }
python package_tar_install () { python package_tar_install () {
pkg = bb.data.getVar('PKG', d, 1) pkg = d.getVar('PKG', 1)
pkgfn = bb.data.getVar('PKGFN', d, 1) pkgfn = d.getVar('PKGFN', 1)
rootfs = bb.data.getVar('IMAGE_ROOTFS', d, 1) rootfs = d.getVar('IMAGE_ROOTFS', 1)
if None in (pkg,pkgfn,rootfs): if None in (pkg,pkgfn,rootfs):
bb.error("missing variables (one or more of PKG, PKGFN, IMAGEROOTFS)") bb.error("missing variables (one or more of PKG, PKGFN, IMAGEROOTFS)")
@ -35,24 +35,24 @@ python package_tar_install () {
} }
python do_package_tar () { python do_package_tar () {
workdir = bb.data.getVar('WORKDIR', d, 1) workdir = d.getVar('WORKDIR', 1)
if not workdir: if not workdir:
bb.error("WORKDIR not defined, unable to package") bb.error("WORKDIR not defined, unable to package")
return return
outdir = bb.data.getVar('DEPLOY_DIR_TAR', d, 1) outdir = d.getVar('DEPLOY_DIR_TAR', 1)
if not outdir: if not outdir:
bb.error("DEPLOY_DIR_TAR not defined, unable to package") bb.error("DEPLOY_DIR_TAR not defined, unable to package")
return return
bb.mkdirhier(outdir) bb.mkdirhier(outdir)
dvar = bb.data.getVar('D', d, 1) dvar = d.getVar('D', 1)
if not dvar: if not dvar:
bb.error("D not defined, unable to package") bb.error("D not defined, unable to package")
return return
bb.mkdirhier(dvar) bb.mkdirhier(dvar)
packages = bb.data.getVar('PACKAGES', d, 1) packages = d.getVar('PACKAGES', 1)
if not packages: if not packages:
bb.debug(1, "PACKAGES not defined, nothing to package") bb.debug(1, "PACKAGES not defined, nothing to package")
return return
@ -61,11 +61,11 @@ python do_package_tar () {
localdata = bb.data.createCopy(d) localdata = bb.data.createCopy(d)
root = "%s/install/%s" % (workdir, pkg) root = "%s/install/%s" % (workdir, pkg)
bb.data.setVar('ROOT', '', localdata) localdata.setVar('ROOT', '')
bb.data.setVar('ROOT_%s' % pkg, root, localdata) localdata.setVar('ROOT_%s' % pkg, root)
bb.data.setVar('PKG', pkg, localdata) localdata.setVar('PKG', pkg)
overrides = bb.data.getVar('OVERRIDES', localdata) overrides = localdata.getVar('OVERRIDES')
if not overrides: if not overrides:
raise bb.build.FuncFailed('OVERRIDES not defined') raise bb.build.FuncFailed('OVERRIDES not defined')
overrides = bb.data.expand(overrides, localdata) overrides = bb.data.expand(overrides, localdata)
@ -73,17 +73,17 @@ python do_package_tar () {
bb.data.update_data(localdata) bb.data.update_data(localdata)
root = bb.data.getVar('ROOT', localdata) root = localdata.getVar('ROOT')
bb.mkdirhier(root) bb.mkdirhier(root)
basedir = os.path.dirname(root) basedir = os.path.dirname(root)
pkgoutdir = outdir pkgoutdir = outdir
bb.mkdirhier(pkgoutdir) bb.mkdirhier(pkgoutdir)
bb.build.exec_func('package_tar_fn', localdata) bb.build.exec_func('package_tar_fn', localdata)
tarfn = bb.data.getVar('PKGFN', localdata, 1) tarfn = localdata.getVar('PKGFN', 1)
os.chdir(root) os.chdir(root)
from glob import glob from glob import glob
if not glob('*'): if not glob('*'):
bb.note("Not creating empty archive for %s-%s-%s" % (pkg, bb.data.getVar('PKGV', localdata, 1), bb.data.getVar('PKGR', localdata, 1))) bb.note("Not creating empty archive for %s-%s-%s" % (pkg, localdata.getVar('PKGV', 1), localdata.getVar('PKGR', 1)))
continue continue
ret = os.system("tar -czf %s %s" % (tarfn, '.')) ret = os.system("tar -czf %s %s" % (tarfn, '.'))
if ret != 0: if ret != 0:
@ -91,12 +91,12 @@ python do_package_tar () {
} }
python () { python () {
if bb.data.getVar('PACKAGES', d, True) != '': if d.getVar('PACKAGES', True) != '':
deps = (bb.data.getVarFlag('do_package_write_tar', 'depends', d) or "").split() deps = (d.getVarFlag('do_package_write_tar', 'depends') or "").split()
deps.append('tar-native:do_populate_sysroot') deps.append('tar-native:do_populate_sysroot')
deps.append('virtual/fakeroot-native:do_populate_sysroot') deps.append('virtual/fakeroot-native:do_populate_sysroot')
bb.data.setVarFlag('do_package_write_tar', 'depends', " ".join(deps), d) bb.data.setVarFlag('do_package_write_tar', 'depends', " ".join(deps), d)
bb.data.setVarFlag('do_package_write_ipk', 'fakeroot', "1", d) d.setVarFlag('do_package_write_ipk', 'fakeroot', "1")
} }

View File

@ -1,13 +1,13 @@
python read_subpackage_metadata () { python read_subpackage_metadata () {
import oe.packagedata import oe.packagedata
data = oe.packagedata.read_pkgdata(bb.data.getVar('PN', d, 1), d) data = oe.packagedata.read_pkgdata(d.getVar('PN', 1), d)
for key in data.keys(): for key in data.keys():
bb.data.setVar(key, data[key], d) d.setVar(key, data[key])
for pkg in bb.data.getVar('PACKAGES', d, 1).split(): for pkg in d.getVar('PACKAGES', 1).split():
sdata = oe.packagedata.read_subpkgdata(pkg, d) sdata = oe.packagedata.read_subpkgdata(pkg, d)
for key in sdata.keys(): for key in sdata.keys():
bb.data.setVar(key, sdata[key], d) d.setVar(key, sdata[key])
} }

View File

@ -10,8 +10,8 @@ PKGHIST_DIR = "${TMPDIR}/pkghistory/${BASEPKG_TARGET_SYS}/"
# for comparision when writing future packages # for comparision when writing future packages
# #
python emit_pkghistory() { python emit_pkghistory() {
packages = bb.data.getVar('PACKAGES', d, True) packages = d.getVar('PACKAGES', True)
pkghistdir = bb.data.getVar('PKGHIST_DIR', d, True) pkghistdir = d.getVar('PKGHIST_DIR', True)
# Should check PACKAGES here to see if anything removed # Should check PACKAGES here to see if anything removed
@ -72,14 +72,14 @@ def check_pkghistory(pkg, pe, pv, pr, lastversion):
def write_pkghistory(pkg, pe, pv, pr, d): def write_pkghistory(pkg, pe, pv, pr, d):
bb.debug(2, "Writing package history") bb.debug(2, "Writing package history")
pkghistdir = bb.data.getVar('PKGHIST_DIR', d, True) pkghistdir = d.getVar('PKGHIST_DIR', True)
verpath = os.path.join(pkghistdir, pkg, pe, pv, pr) verpath = os.path.join(pkghistdir, pkg, pe, pv, pr)
if not os.path.exists(verpath): if not os.path.exists(verpath):
os.makedirs(verpath) os.makedirs(verpath)
def write_latestlink(pkg, pe, pv, pr, d): def write_latestlink(pkg, pe, pv, pr, d):
pkghistdir = bb.data.getVar('PKGHIST_DIR', d, True) pkghistdir = d.getVar('PKGHIST_DIR', True)
def rm_link(path): def rm_link(path):
try: try:

View File

@ -10,7 +10,7 @@ inherit terminal
python patch_do_patch() { python patch_do_patch() {
import oe.patch import oe.patch
src_uri = (bb.data.getVar('SRC_URI', d, 1) or '').split() src_uri = (d.getVar('SRC_URI', 1) or '').split()
if not src_uri: if not src_uri:
return return
@ -20,23 +20,23 @@ python patch_do_patch() {
"git": oe.patch.GitApplyTree, "git": oe.patch.GitApplyTree,
} }
cls = patchsetmap[bb.data.getVar('PATCHTOOL', d, 1) or 'quilt'] cls = patchsetmap[d.getVar('PATCHTOOL', 1) or 'quilt']
resolvermap = { resolvermap = {
"noop": oe.patch.NOOPResolver, "noop": oe.patch.NOOPResolver,
"user": oe.patch.UserResolver, "user": oe.patch.UserResolver,
} }
rcls = resolvermap[bb.data.getVar('PATCHRESOLVE', d, 1) or 'user'] rcls = resolvermap[d.getVar('PATCHRESOLVE', 1) or 'user']
s = bb.data.getVar('S', d, 1) s = d.getVar('S', 1)
path = os.getenv('PATH') path = os.getenv('PATH')
os.putenv('PATH', bb.data.getVar('PATH', d, 1)) os.putenv('PATH', d.getVar('PATH', 1))
classes = {} classes = {}
workdir = bb.data.getVar('WORKDIR', d, 1) workdir = d.getVar('WORKDIR', 1)
for url in src_uri: for url in src_uri:
(type, host, path, user, pswd, parm) = bb.decodeurl(url) (type, host, path, user, pswd, parm) = bb.decodeurl(url)
@ -76,13 +76,13 @@ python patch_do_patch() {
pname = os.path.basename(local) pname = os.path.basename(local)
if "mindate" in parm or "maxdate" in parm: if "mindate" in parm or "maxdate" in parm:
pn = bb.data.getVar('PN', d, 1) pn = d.getVar('PN', 1)
srcdate = bb.data.getVar('SRCDATE_%s' % pn, d, 1) srcdate = d.getVar('SRCDATE_%s' % pn, 1)
if not srcdate: if not srcdate:
srcdate = bb.data.getVar('SRCDATE', d, 1) srcdate = d.getVar('SRCDATE', 1)
if srcdate == "now": if srcdate == "now":
srcdate = bb.data.getVar('DATE', d, 1) srcdate = d.getVar('DATE', 1)
if "maxdate" in parm and parm["maxdate"] < srcdate: if "maxdate" in parm and parm["maxdate"] < srcdate:
bb.note("Patch '%s' is outdated" % pname) bb.note("Patch '%s' is outdated" % pname)
@ -94,25 +94,25 @@ python patch_do_patch() {
if "minrev" in parm: if "minrev" in parm:
srcrev = bb.data.getVar('SRCREV', d, 1) srcrev = d.getVar('SRCREV', 1)
if srcrev and srcrev < parm["minrev"]: if srcrev and srcrev < parm["minrev"]:
bb.note("Patch '%s' applies to later revisions" % pname) bb.note("Patch '%s' applies to later revisions" % pname)
continue continue
if "maxrev" in parm: if "maxrev" in parm:
srcrev = bb.data.getVar('SRCREV', d, 1) srcrev = d.getVar('SRCREV', 1)
if srcrev and srcrev > parm["maxrev"]: if srcrev and srcrev > parm["maxrev"]:
bb.note("Patch '%s' applies to earlier revisions" % pname) bb.note("Patch '%s' applies to earlier revisions" % pname)
continue continue
if "rev" in parm: if "rev" in parm:
srcrev = bb.data.getVar('SRCREV', d, 1) srcrev = d.getVar('SRCREV', 1)
if srcrev and parm["rev"] not in srcrev: if srcrev and parm["rev"] not in srcrev:
bb.note("Patch '%s' doesn't apply to revision" % pname) bb.note("Patch '%s' doesn't apply to revision" % pname)
continue continue
if "notrev" in parm: if "notrev" in parm:
srcrev = bb.data.getVar('SRCREV', d, 1) srcrev = d.getVar('SRCREV', 1)
if srcrev and parm["notrev"] in srcrev: if srcrev and parm["notrev"] in srcrev:
bb.note("Patch '%s' doesn't apply to revision" % pname) bb.note("Patch '%s' doesn't apply to revision" % pname)
continue continue

View File

@ -1,6 +1,6 @@
PKG_DISTRIBUTECOMMAND[func] = "1" PKG_DISTRIBUTECOMMAND[func] = "1"
python do_distribute_packages () { python do_distribute_packages () {
cmd = bb.data.getVar('PKG_DISTRIBUTECOMMAND', d, 1) cmd = d.getVar('PKG_DISTRIBUTECOMMAND', 1)
if not cmd: if not cmd:
raise bb.build.FuncFailed("Unable to distribute packages, PKG_DISTRIBUTECOMMAND not defined") raise bb.build.FuncFailed("Unable to distribute packages, PKG_DISTRIBUTECOMMAND not defined")
bb.build.exec_func('PKG_DISTRIBUTECOMMAND', d) bb.build.exec_func('PKG_DISTRIBUTECOMMAND', d)

View File

@ -1,5 +1,5 @@
python do_pkg_write_metainfo () { python do_pkg_write_metainfo () {
deploydir = bb.data.getVar('DEPLOY_DIR', d, 1) deploydir = d.getVar('DEPLOY_DIR', 1)
if not deploydir: if not deploydir:
bb.error("DEPLOY_DIR not defined, unable to write package info") bb.error("DEPLOY_DIR not defined, unable to write package info")
return return
@ -9,11 +9,11 @@ python do_pkg_write_metainfo () {
except OSError: except OSError:
raise bb.build.FuncFailed("unable to open package-info file for writing.") raise bb.build.FuncFailed("unable to open package-info file for writing.")
name = bb.data.getVar('PN', d, 1) name = d.getVar('PN', 1)
version = bb.data.getVar('PV', d, 1) version = d.getVar('PV', 1)
desc = bb.data.getVar('DESCRIPTION', d, 1) desc = d.getVar('DESCRIPTION', 1)
page = bb.data.getVar('HOMEPAGE', d, 1) page = d.getVar('HOMEPAGE', 1)
lic = bb.data.getVar('LICENSE', d, 1) lic = d.getVar('LICENSE', 1)
infofile.write("|| "+ name +" || "+ version + " || "+ desc +" || "+ page +" || "+ lic + " ||\n" ) infofile.write("|| "+ name +" || "+ version + " || "+ desc +" || "+ page +" || "+ lic + " ||\n" )
infofile.close() infofile.close()

View File

@ -2,8 +2,8 @@ do_populate_sdk[depends] += "dpkg-native:do_populate_sysroot apt-native:do_popul
do_populate_sdk[recrdeptask] += "do_package_write_deb" do_populate_sdk[recrdeptask] += "do_package_write_deb"
DEB_SDK_ARCH = "${@[bb.data.getVar('SDK_ARCH', d, 1), "i386"]\ DEB_SDK_ARCH = "${@[d.getVar('SDK_ARCH', 1), "i386"]\
[bb.data.getVar('SDK_ARCH', d, 1) in \ [d.getVar('SDK_ARCH', 1) in \
["x86", "i486", "i586", "i686", "pentium"]]}" ["x86", "i486", "i586", "i686", "pentium"]]}"
populate_sdk_post_deb () { populate_sdk_post_deb () {

View File

@ -127,6 +127,6 @@ python () {
localdata.setVar("DEFAULTTUNE", localdata.getVar("DEFAULTTUNE_virtclass-multilib-" + eext[1], False) or "") localdata.setVar("DEFAULTTUNE", localdata.getVar("DEFAULTTUNE_virtclass-multilib-" + eext[1], False) or "")
ml_package_archs += localdata.getVar("PACKAGE_ARCHS", True) or "" ml_package_archs += localdata.getVar("PACKAGE_ARCHS", True) or ""
#bb.note("ML_PACKAGE_ARCHS %s %s %s" % (eext[1], localdata.getVar("PACKAGE_ARCHS", True) or "(none)", overrides)) #bb.note("ML_PACKAGE_ARCHS %s %s %s" % (eext[1], localdata.getVar("PACKAGE_ARCHS", True) or "(none)", overrides))
bb.data.setVar('MULTILIB_PACKAGE_ARCHS', ml_package_archs, d) d.setVar('MULTILIB_PACKAGE_ARCHS', ml_package_archs)
} }

View File

@ -6,7 +6,7 @@
def qemu_target_binary(data): def qemu_target_binary(data):
import bb import bb
target_arch = bb.data.getVar("TARGET_ARCH", data, 1) target_arch = data.getVar("TARGET_ARCH", 1)
if target_arch in ("i486", "i586", "i686"): if target_arch in ("i486", "i586", "i686"):
target_arch = "i386" target_arch = "i386"
elif target_arch == "powerpc": elif target_arch == "powerpc":

View File

@ -1,4 +1,4 @@
DEPENDS_prepend = "${@["qt4-embedded ", ""][(bb.data.getVar('PN', d, 1)[:12] == 'qt4-embedded')]}" DEPENDS_prepend = "${@["qt4-embedded ", ""][(d.getVar('PN', 1)[:12] == 'qt4-embedded')]}"
inherit qmake2 inherit qmake2

View File

@ -1,4 +1,4 @@
DEPENDS_prepend = "${@["qt4-x11-free ", ""][(bb.data.getVar('BPN', d, True)[:12] == 'qt4-x11-free')]}" DEPENDS_prepend = "${@["qt4-x11-free ", ""][(d.getVar('BPN', True)[:12] == 'qt4-x11-free')]}"
inherit qmake2 inherit qmake2

View File

@ -8,7 +8,7 @@ def process_dir (directory, d):
import stat import stat
cmd = bb.data.expand('${CHRPATH_BIN}', d) cmd = bb.data.expand('${CHRPATH_BIN}', d)
tmpdir = bb.data.getVar('TMPDIR', d) tmpdir = d.getVar('TMPDIR')
basedir = bb.data.expand('${base_prefix}', d) basedir = bb.data.expand('${base_prefix}', d)
#bb.debug("Checking %s for binaries to process" % directory) #bb.debug("Checking %s for binaries to process" % directory)

View File

@ -185,13 +185,13 @@ ipk_insert_feed_uris () {
python () { python () {
if bb.data.getVar('BUILD_IMAGES_FROM_FEEDS', d, True): if d.getVar('BUILD_IMAGES_FROM_FEEDS', True):
flags = bb.data.getVarFlag('do_rootfs', 'recrdeptask', d) flags = d.getVarFlag('do_rootfs', 'recrdeptask')
flags = flags.replace("do_package_write_ipk", "") flags = flags.replace("do_package_write_ipk", "")
flags = flags.replace("do_deploy", "") flags = flags.replace("do_deploy", "")
flags = flags.replace("do_populate_sysroot", "") flags = flags.replace("do_populate_sysroot", "")
bb.data.setVarFlag('do_rootfs', 'recrdeptask', flags, d) d.setVarFlag('do_rootfs', 'recrdeptask', flags)
bb.data.setVar('OPKG_PREPROCESS_COMMANDS', "package_generate_archlist\nipk_insert_feed_uris", d) d.setVar('OPKG_PREPROCESS_COMMANDS', "package_generate_archlist\nipk_insert_feed_uris")
bb.data.setVar('OPKG_POSTPROCESS_COMMANDS', '', d) d.setVar('OPKG_POSTPROCESS_COMMANDS', '')
} }

View File

@ -200,14 +200,14 @@ install_all_locales() {
} }
python () { python () {
if bb.data.getVar('BUILD_IMAGES_FROM_FEEDS', d, True): if d.getVar('BUILD_IMAGES_FROM_FEEDS', True):
flags = bb.data.getVarFlag('do_rootfs', 'recrdeptask', d) flags = d.getVarFlag('do_rootfs', 'recrdeptask')
flags = flags.replace("do_package_write_rpm", "") flags = flags.replace("do_package_write_rpm", "")
flags = flags.replace("do_deploy", "") flags = flags.replace("do_deploy", "")
flags = flags.replace("do_populate_sysroot", "") flags = flags.replace("do_populate_sysroot", "")
bb.data.setVarFlag('do_rootfs', 'recrdeptask', flags, d) d.setVarFlag('do_rootfs', 'recrdeptask', flags)
bb.data.setVar('RPM_PREPROCESS_COMMANDS', '', d) d.setVar('RPM_PREPROCESS_COMMANDS', '')
bb.data.setVar('RPM_POSTPROCESS_COMMANDS', '', d) d.setVar('RPM_POSTPROCESS_COMMANDS', '')
ml_package_archs = "" ml_package_archs = ""
ml_prefix_list = "" ml_prefix_list = ""
@ -224,6 +224,6 @@ python () {
ml_package_archs += " " + package_archs ml_package_archs += " " + package_archs
ml_prefix_list += " " + eext[1] ml_prefix_list += " " + eext[1]
#bb.note("ML_PACKAGE_ARCHS %s %s %s" % (eext[1], localdata.getVar("PACKAGE_ARCHS", True) or "(none)", overrides)) #bb.note("ML_PACKAGE_ARCHS %s %s %s" % (eext[1], localdata.getVar("PACKAGE_ARCHS", True) or "(none)", overrides))
bb.data.setVar('MULTILIB_PACKAGE_ARCHS', ml_package_archs, d) d.setVar('MULTILIB_PACKAGE_ARCHS', ml_package_archs)
bb.data.setVar('MULTILIB_PREFIX_LIST', ml_prefix_list, d) d.setVar('MULTILIB_PREFIX_LIST', ml_prefix_list)
} }

View File

@ -14,7 +14,7 @@ def raise_sanity_error(msg):
def check_conf_exists(fn, data): def check_conf_exists(fn, data):
bbpath = [] bbpath = []
fn = bb.data.expand(fn, data) fn = bb.data.expand(fn, data)
vbbpath = bb.data.getVar("BBPATH", data) vbbpath = data.getVar("BBPATH")
if vbbpath: if vbbpath:
bbpath += vbbpath.split(":") bbpath += vbbpath.split(":")
for p in bbpath: for p in bbpath:
@ -87,12 +87,12 @@ def check_connectivity(d):
# URI's to check can be set in the CONNECTIVITY_CHECK_URIS variable # URI's to check can be set in the CONNECTIVITY_CHECK_URIS variable
# using the same syntax as for SRC_URI. If the variable is not set # using the same syntax as for SRC_URI. If the variable is not set
# the check is skipped # the check is skipped
test_uris = (bb.data.getVar('CONNECTIVITY_CHECK_URIS', d, True) or "").split() test_uris = (d.getVar('CONNECTIVITY_CHECK_URIS', True) or "").split()
retval = "" retval = ""
# Only check connectivity if network enabled and the # Only check connectivity if network enabled and the
# CONNECTIVITY_CHECK_URIS are set # CONNECTIVITY_CHECK_URIS are set
network_enabled = not bb.data.getVar('BB_NO_NETWORK', d, True) network_enabled = not d.getVar('BB_NO_NETWORK', True)
check_enabled = len(test_uris) check_enabled = len(test_uris)
# Take a copy of the data store and unset MIRRORS and PREMIRROS # Take a copy of the data store and unset MIRRORS and PREMIRROS
data = bb.data.createCopy(d) data = bb.data.createCopy(d)
@ -105,7 +105,7 @@ def check_connectivity(d):
except Exception: except Exception:
# Allow the message to be configured so that users can be # Allow the message to be configured so that users can be
# pointed to a support mechanism. # pointed to a support mechanism.
msg = bb.data.getVar('CONNECTIVITY_CHECK_MSG', data, True) or "" msg = data.getVar('CONNECTIVITY_CHECK_MSG', True) or ""
if len(msg) == 0: if len(msg) == 0:
msg = "Failed to fetch test data from the network. Please ensure your network is configured correctly.\n" msg = "Failed to fetch test data from the network. Please ensure your network is configured correctly.\n"
retval = msg retval = msg
@ -450,7 +450,7 @@ def check_sanity(e):
addhandler check_sanity_eventhandler addhandler check_sanity_eventhandler
python check_sanity_eventhandler() { python check_sanity_eventhandler() {
if bb.event.getName(e) == "ConfigParsed" and bb.data.getVar("BB_WORKERCONTEXT", e.data, True) != "1": if bb.event.getName(e) == "ConfigParsed" and e.data.getVar("BB_WORKERCONTEXT", True) != "1":
check_sanity(e) check_sanity(e)
return return

View File

@ -2,7 +2,7 @@ python siteconfig_do_siteconfig () {
shared_state = sstate_state_fromvars(d) shared_state = sstate_state_fromvars(d)
if shared_state['name'] != 'populate-sysroot': if shared_state['name'] != 'populate-sysroot':
return return
if not os.path.isdir(os.path.join(bb.data.getVar('FILE_DIRNAME', d, 1), 'site_config')): if not os.path.isdir(os.path.join(d.getVar('FILE_DIRNAME', 1), 'site_config')):
bb.debug(1, "No site_config directory, skipping do_siteconfig") bb.debug(1, "No site_config directory, skipping do_siteconfig")
return return
bb.build.exec_func('do_siteconfig_gencache', d) bb.build.exec_func('do_siteconfig_gencache', d)

View File

@ -130,7 +130,7 @@ def siteinfo_get_files(d, no_cache = False):
if no_cache: return sitefiles if no_cache: return sitefiles
# Now check for siteconfig cache files # Now check for siteconfig cache files
path_siteconfig = bb.data.getVar('SITECONFIG_SYSROOTCACHE', d, 1) path_siteconfig = d.getVar('SITECONFIG_SYSROOTCACHE', 1)
if os.path.isdir(path_siteconfig): if os.path.isdir(path_siteconfig):
for i in os.listdir(path_siteconfig): for i in os.listdir(path_siteconfig):
filename = os.path.join(path_siteconfig, i) filename = os.path.join(path_siteconfig, i)

View File

@ -6,12 +6,12 @@ DISTRO ?= "openembedded"
def get_src_tree(d): def get_src_tree(d):
workdir = bb.data.getVar('WORKDIR', d, 1) workdir = d.getVar('WORKDIR', 1)
if not workdir: if not workdir:
bb.error("WORKDIR not defined, unable to find source tree.") bb.error("WORKDIR not defined, unable to find source tree.")
return return
s = bb.data.getVar('S', d, 0) s = d.getVar('S', 0)
if not s: if not s:
bb.error("S not defined, unable to find source tree.") bb.error("S not defined, unable to find source tree.")
return return
@ -55,8 +55,8 @@ sourcepkg_do_archive_bb() {
python sourcepkg_do_dumpdata() { python sourcepkg_do_dumpdata() {
workdir = bb.data.getVar('WORKDIR', d, 1) workdir = d.getVar('WORKDIR', 1)
distro = bb.data.getVar('DISTRO', d, 1) distro = d.getVar('DISTRO', 1)
s_tree = get_src_tree(d) s_tree = get_src_tree(d)
openembeddeddir = os.path.join(workdir, s_tree, distro) openembeddeddir = os.path.join(workdir, s_tree, distro)
dumpfile = os.path.join(openembeddeddir, bb.data.expand("${P}-${PR}.showdata.dump",d)) dumpfile = os.path.join(openembeddeddir, bb.data.expand("${P}-${PR}.showdata.dump",d))
@ -73,8 +73,8 @@ python sourcepkg_do_dumpdata() {
bb.data.emit_env(f, d, True) bb.data.emit_env(f, d, True)
# emit the metadata which isnt valid shell # emit the metadata which isnt valid shell
for e in d.keys(): for e in d.keys():
if bb.data.getVarFlag(e, 'python', d): if d.getVarFlag(e, 'python'):
f.write("\npython %s () {\n%s}\n" % (e, bb.data.getVar(e, d, 1))) f.write("\npython %s () {\n%s}\n" % (e, d.getVar(e, 1)))
f.close() f.close()
} }

View File

@ -3,12 +3,12 @@ python do_distribute_sources () {
l = bb.data.createCopy(d) l = bb.data.createCopy(d)
bb.data.update_data(l) bb.data.update_data(l)
sources_dir = bb.data.getVar('SRC_DISTRIBUTEDIR', d, 1) sources_dir = d.getVar('SRC_DISTRIBUTEDIR', 1)
src_uri = bb.data.getVar('SRC_URI', d, 1).split() src_uri = d.getVar('SRC_URI', 1).split()
fetcher = bb.fetch2.Fetch(src_uri, d) fetcher = bb.fetch2.Fetch(src_uri, d)
ud = fetcher.ud ud = fetcher.ud
licenses = bb.data.getVar('LICENSE', d, 1).replace('&', '|') licenses = d.getVar('LICENSE', 1).replace('&', '|')
licenses = licenses.replace('(', '').replace(')', '') licenses = licenses.replace('(', '').replace(')', '')
clean_licenses = "" clean_licenses = ""
for x in licenses.split(): for x in licenses.split():
@ -20,20 +20,20 @@ python do_distribute_sources () {
for license in clean_licenses.split('|'): for license in clean_licenses.split('|'):
for url in ud.values(): for url in ud.values():
cmd = bb.data.getVar('SRC_DISTRIBUTECOMMAND', d, 1) cmd = d.getVar('SRC_DISTRIBUTECOMMAND', 1)
if not cmd: if not cmd:
raise bb.build.FuncFailed("Unable to distribute sources, SRC_DISTRIBUTECOMMAND not defined") raise bb.build.FuncFailed("Unable to distribute sources, SRC_DISTRIBUTECOMMAND not defined")
url.setup_localpath(d) url.setup_localpath(d)
bb.data.setVar('SRC', url.localpath, d) d.setVar('SRC', url.localpath)
if url.type == 'file': if url.type == 'file':
if url.basename == '*': if url.basename == '*':
import os.path import os.path
dest_dir = os.path.basename(os.path.dirname(os.path.abspath(url.localpath))) dest_dir = os.path.basename(os.path.dirname(os.path.abspath(url.localpath)))
bb.data.setVar('DEST', "%s_%s/" % (bb.data.getVar('PF', d, 1), dest_dir), d) bb.data.setVar('DEST', "%s_%s/" % (d.getVar('PF', 1), dest_dir), d)
else: else:
bb.data.setVar('DEST', "%s_%s" % (bb.data.getVar('PF', d, 1), url.basename), d) bb.data.setVar('DEST', "%s_%s" % (d.getVar('PF', 1), url.basename), d)
else: else:
bb.data.setVar('DEST', '', d) d.setVar('DEST', '')
bb.data.setVar('SRC_DISTRIBUTEDIR', "%s/%s" % (sources_dir, license), d) bb.data.setVar('SRC_DISTRIBUTEDIR', "%s/%s" % (sources_dir, license), d)
bb.build.exec_func('SRC_DISTRIBUTECOMMAND', d) bb.build.exec_func('SRC_DISTRIBUTECOMMAND', d)

View File

@ -20,7 +20,7 @@ SSTATEPOSTINSTFUNCS ?= ""
python () { python () {
if bb.data.inherits_class('native', d): if bb.data.inherits_class('native', d):
bb.data.setVar('SSTATE_PKGARCH', bb.data.getVar('BUILD_ARCH', d), d) bb.data.setVar('SSTATE_PKGARCH', d.getVar('BUILD_ARCH'), d)
elif bb.data.inherits_class('cross', d): elif bb.data.inherits_class('cross', d):
bb.data.setVar('SSTATE_PKGARCH', bb.data.expand("${BUILD_ARCH}_${TUNE_PKGARCH}", d), d) bb.data.setVar('SSTATE_PKGARCH', bb.data.expand("${BUILD_ARCH}_${TUNE_PKGARCH}", d), d)
bb.data.setVar('SSTATE_MANMACH', bb.data.expand("${BUILD_ARCH}_${MACHINE}", d), d) bb.data.setVar('SSTATE_MANMACH', bb.data.expand("${BUILD_ARCH}_${MACHINE}", d), d)
@ -37,19 +37,19 @@ python () {
# reused if we manipulate the paths # reused if we manipulate the paths
if bb.data.inherits_class('native', d) or bb.data.inherits_class('cross', d) or bb.data.inherits_class('sdk', d) or bb.data.inherits_class('crosssdk', d): if bb.data.inherits_class('native', d) or bb.data.inherits_class('cross', d) or bb.data.inherits_class('sdk', d) or bb.data.inherits_class('crosssdk', d):
scan_cmd = "grep -Irl ${STAGING_DIR} ${SSTATE_BUILDDIR}" scan_cmd = "grep -Irl ${STAGING_DIR} ${SSTATE_BUILDDIR}"
bb.data.setVar('SSTATE_SCAN_CMD', scan_cmd, d) d.setVar('SSTATE_SCAN_CMD', scan_cmd)
unique_tasks = set((bb.data.getVar('SSTATETASKS', d, True) or "").split()) unique_tasks = set((d.getVar('SSTATETASKS', True) or "").split())
d.setVar('SSTATETASKS', " ".join(unique_tasks)) d.setVar('SSTATETASKS', " ".join(unique_tasks))
namemap = [] namemap = []
for task in unique_tasks: for task in unique_tasks:
namemap.append(bb.data.getVarFlag(task, 'sstate-name', d)) namemap.append(d.getVarFlag(task, 'sstate-name'))
funcs = bb.data.getVarFlag(task, 'prefuncs', d) or "" funcs = d.getVarFlag(task, 'prefuncs') or ""
funcs = "sstate_task_prefunc " + funcs funcs = "sstate_task_prefunc " + funcs
bb.data.setVarFlag(task, 'prefuncs', funcs, d) d.setVarFlag(task, 'prefuncs', funcs)
funcs = bb.data.getVarFlag(task, 'postfuncs', d) or "" funcs = d.getVarFlag(task, 'postfuncs') or ""
funcs = funcs + " sstate_task_postfunc" funcs = funcs + " sstate_task_postfunc"
bb.data.setVarFlag(task, 'postfuncs', funcs, d) d.setVarFlag(task, 'postfuncs', funcs)
d.setVar('SSTATETASKNAMES', " ".join(namemap)) d.setVar('SSTATETASKNAMES', " ".join(namemap))
} }
@ -65,18 +65,18 @@ def sstate_init(name, task, d):
def sstate_state_fromvars(d, task = None): def sstate_state_fromvars(d, task = None):
if task is None: if task is None:
task = bb.data.getVar('BB_CURRENTTASK', d, True) task = d.getVar('BB_CURRENTTASK', True)
if not task: if not task:
bb.fatal("sstate code running without task context?!") bb.fatal("sstate code running without task context?!")
task = task.replace("_setscene", "") task = task.replace("_setscene", "")
name = bb.data.expand(bb.data.getVarFlag("do_" + task, 'sstate-name', d), d) name = bb.data.expand(d.getVarFlag("do_" + task, 'sstate-name'), d)
inputs = (bb.data.expand(bb.data.getVarFlag("do_" + task, 'sstate-inputdirs', d) or "", d)).split() inputs = (bb.data.expand(d.getVarFlag("do_" + task, 'sstate-inputdirs') or "", d)).split()
outputs = (bb.data.expand(bb.data.getVarFlag("do_" + task, 'sstate-outputdirs', d) or "", d)).split() outputs = (bb.data.expand(d.getVarFlag("do_" + task, 'sstate-outputdirs') or "", d)).split()
plaindirs = (bb.data.expand(bb.data.getVarFlag("do_" + task, 'sstate-plaindirs', d) or "", d)).split() plaindirs = (bb.data.expand(d.getVarFlag("do_" + task, 'sstate-plaindirs') or "", d)).split()
lockfiles = (bb.data.expand(bb.data.getVarFlag("do_" + task, 'sstate-lockfile', d) or "", d)).split() lockfiles = (bb.data.expand(d.getVarFlag("do_" + task, 'sstate-lockfile') or "", d)).split()
lockfilesshared = (bb.data.expand(bb.data.getVarFlag("do_" + task, 'sstate-lockfile-shared', d) or "", d)).split() lockfilesshared = (bb.data.expand(d.getVarFlag("do_" + task, 'sstate-lockfile-shared') or "", d)).split()
interceptfuncs = (bb.data.expand(bb.data.getVarFlag("do_" + task, 'sstate-interceptfuncs', d) or "", d)).split() interceptfuncs = (bb.data.expand(d.getVarFlag("do_" + task, 'sstate-interceptfuncs') or "", d)).split()
if not name or len(inputs) != len(outputs): if not name or len(inputs) != len(outputs):
bb.fatal("sstate variables not setup correctly?!") bb.fatal("sstate variables not setup correctly?!")
@ -139,7 +139,7 @@ def sstate_install(ss, d):
f.write(di + "\n") f.write(di + "\n")
f.close() f.close()
for postinst in (bb.data.getVar('SSTATEPOSTINSTFUNCS', d, True) or '').split(): for postinst in (d.getVar('SSTATEPOSTINSTFUNCS', True) or '').split():
bb.build.exec_func(postinst, d) bb.build.exec_func(postinst, d)
for lock in locks: for lock in locks:
@ -156,7 +156,7 @@ def sstate_installpkg(ss, d):
oe.path.remove(dir) oe.path.remove(dir)
sstateinst = bb.data.expand("${WORKDIR}/sstate-install-%s/" % ss['name'], d) sstateinst = bb.data.expand("${WORKDIR}/sstate-install-%s/" % ss['name'], d)
sstatepkg = bb.data.getVar('SSTATE_PKG', d, True) + '_' + ss['name'] + ".tgz" sstatepkg = d.getVar('SSTATE_PKG', True) + '_' + ss['name'] + ".tgz"
if not os.path.exists(sstatepkg): if not os.path.exists(sstatepkg):
pstaging_fetch(sstatepkg, d) pstaging_fetch(sstatepkg, d)
@ -167,16 +167,16 @@ def sstate_installpkg(ss, d):
sstate_clean(ss, d) sstate_clean(ss, d)
bb.data.setVar('SSTATE_INSTDIR', sstateinst, d) d.setVar('SSTATE_INSTDIR', sstateinst)
bb.data.setVar('SSTATE_PKG', sstatepkg, d) d.setVar('SSTATE_PKG', sstatepkg)
bb.build.exec_func('sstate_unpack_package', d) bb.build.exec_func('sstate_unpack_package', d)
# Fixup hardcoded paths # Fixup hardcoded paths
fixmefn = sstateinst + "fixmepath" fixmefn = sstateinst + "fixmepath"
if os.path.isfile(fixmefn): if os.path.isfile(fixmefn):
staging = bb.data.getVar('STAGING_DIR', d, True) staging = d.getVar('STAGING_DIR', True)
staging_target = bb.data.getVar('STAGING_DIR_TARGET', d, True) staging_target = d.getVar('STAGING_DIR_TARGET', True)
staging_host = bb.data.getVar('STAGING_DIR_HOST', d, True) staging_host = d.getVar('STAGING_DIR_HOST', True)
fixmefd = open(fixmefn, "r") fixmefd = open(fixmefn, "r")
fixmefiles = fixmefd.readlines() fixmefiles = fixmefd.readlines()
fixmefd.close() fixmefd.close()
@ -206,13 +206,13 @@ def sstate_installpkg(ss, d):
def sstate_clean_cachefile(ss, d): def sstate_clean_cachefile(ss, d):
import oe.path import oe.path
sstatepkgdir = bb.data.getVar('SSTATE_DIR', d, True) sstatepkgdir = d.getVar('SSTATE_DIR', True)
sstatepkgfile = sstatepkgdir + '/' + bb.data.getVar('SSTATE_PKGSPEC', d, True) + "*_" + ss['name'] + ".tgz*" sstatepkgfile = sstatepkgdir + '/' + d.getVar('SSTATE_PKGSPEC', True) + "*_" + ss['name'] + ".tgz*"
bb.note("Removing %s" % sstatepkgfile) bb.note("Removing %s" % sstatepkgfile)
oe.path.remove(sstatepkgfile) oe.path.remove(sstatepkgfile)
def sstate_clean_cachefiles(d): def sstate_clean_cachefiles(d):
for task in (bb.data.getVar('SSTATETASKS', d, True) or "").split(): for task in (d.getVar('SSTATETASKS', True) or "").split():
ss = sstate_state_fromvars(d, task[3:]) ss = sstate_state_fromvars(d, task[3:])
sstate_clean_cachefile(ss, d) sstate_clean_cachefile(ss, d)
@ -274,10 +274,10 @@ CLEANFUNCS += "sstate_cleanall"
python sstate_cleanall() { python sstate_cleanall() {
import fnmatch import fnmatch
bb.note("Removing shared state for package %s" % bb.data.getVar('PN', d, True)) bb.note("Removing shared state for package %s" % d.getVar('PN', True))
manifest_dir = bb.data.getVar('SSTATE_MANIFESTS', d, True) manifest_dir = d.getVar('SSTATE_MANIFESTS', True)
manifest_prefix = bb.data.getVar("SSTATE_MANFILEPREFIX", d, True) manifest_prefix = d.getVar("SSTATE_MANFILEPREFIX", True)
manifest_pattern = os.path.basename(manifest_prefix) + ".*" manifest_pattern = os.path.basename(manifest_prefix) + ".*"
if not os.path.exists(manifest_dir): if not os.path.exists(manifest_dir):
@ -298,7 +298,7 @@ python sstate_cleanall() {
def sstate_hardcode_path(d): def sstate_hardcode_path(d):
# Need to remove hardcoded paths and fix these when we install the # Need to remove hardcoded paths and fix these when we install the
# staging packages. # staging packages.
sstate_scan_cmd = bb.data.getVar('SSTATE_SCAN_CMD', d, True) sstate_scan_cmd = d.getVar('SSTATE_SCAN_CMD', True)
p = os.popen("%s" % sstate_scan_cmd) p = os.popen("%s" % sstate_scan_cmd)
file_list = p.read() file_list = p.read()
@ -306,10 +306,10 @@ def sstate_hardcode_path(d):
p.close() p.close()
return return
staging = bb.data.getVar('STAGING_DIR', d, True) staging = d.getVar('STAGING_DIR', True)
staging_target = bb.data.getVar('STAGING_DIR_TARGET', d, True) staging_target = d.getVar('STAGING_DIR_TARGET', True)
staging_host = bb.data.getVar('STAGING_DIR_HOST', d, True) staging_host = d.getVar('STAGING_DIR_HOST', True)
sstate_builddir = bb.data.getVar('SSTATE_BUILDDIR', d, True) sstate_builddir = d.getVar('SSTATE_BUILDDIR', True)
for i in file_list.split('\n'): for i in file_list.split('\n'):
if not i: if not i:
@ -349,10 +349,10 @@ def sstate_package(ss, d):
os.remove(path) os.remove(path)
os.symlink(base, path) os.symlink(base, path)
tmpdir = bb.data.getVar('TMPDIR', d, True) tmpdir = d.getVar('TMPDIR', True)
sstatebuild = bb.data.expand("${WORKDIR}/sstate-build-%s/" % ss['name'], d) sstatebuild = bb.data.expand("${WORKDIR}/sstate-build-%s/" % ss['name'], d)
sstatepkg = bb.data.getVar('SSTATE_PKG', d, True) + '_'+ ss['name'] + ".tgz" sstatepkg = d.getVar('SSTATE_PKG', True) + '_'+ ss['name'] + ".tgz"
bb.mkdirhier(sstatebuild) bb.mkdirhier(sstatebuild)
bb.mkdirhier(os.path.dirname(sstatepkg)) bb.mkdirhier(os.path.dirname(sstatepkg))
for state in ss['dirs']: for state in ss['dirs']:
@ -369,15 +369,15 @@ def sstate_package(ss, d):
bb.debug(2, "Preparing tree %s for packaging at %s" % (state[1], sstatebuild + state[0])) bb.debug(2, "Preparing tree %s for packaging at %s" % (state[1], sstatebuild + state[0]))
oe.path.copytree(state[1], sstatebuild + state[0]) oe.path.copytree(state[1], sstatebuild + state[0])
workdir = bb.data.getVar('WORKDIR', d, True) workdir = d.getVar('WORKDIR', True)
for plain in ss['plaindirs']: for plain in ss['plaindirs']:
pdir = plain.replace(workdir, sstatebuild) pdir = plain.replace(workdir, sstatebuild)
bb.mkdirhier(plain) bb.mkdirhier(plain)
bb.mkdirhier(pdir) bb.mkdirhier(pdir)
oe.path.copytree(plain, pdir) oe.path.copytree(plain, pdir)
bb.data.setVar('SSTATE_BUILDDIR', sstatebuild, d) d.setVar('SSTATE_BUILDDIR', sstatebuild)
bb.data.setVar('SSTATE_PKG', sstatepkg, d) d.setVar('SSTATE_PKG', sstatepkg)
sstate_hardcode_path(d) sstate_hardcode_path(d)
bb.build.exec_func('sstate_create_package', d) bb.build.exec_func('sstate_create_package', d)
@ -389,7 +389,7 @@ def pstaging_fetch(sstatepkg, d):
import bb.fetch2 import bb.fetch2
# Only try and fetch if the user has configured a mirror # Only try and fetch if the user has configured a mirror
mirrors = bb.data.getVar('SSTATE_MIRRORS', d, True) mirrors = d.getVar('SSTATE_MIRRORS', True)
if not mirrors: if not mirrors:
return return
@ -402,9 +402,9 @@ def pstaging_fetch(sstatepkg, d):
bb.mkdirhier(dldir) bb.mkdirhier(dldir)
bb.data.setVar('DL_DIR', dldir, localdata) localdata.setVar('DL_DIR', dldir)
bb.data.setVar('PREMIRRORS', mirrors, localdata) localdata.setVar('PREMIRRORS', mirrors)
bb.data.setVar('SRC_URI', srcuri, localdata) localdata.setVar('SRC_URI', srcuri)
# Try a fetch from the sstate mirror, if it fails just return and # Try a fetch from the sstate mirror, if it fails just return and
# we will build the package # we will build the package
@ -493,15 +493,15 @@ def sstate_checkhashes(sq_fn, sq_task, sq_hash, sq_hashfn, d):
else: else:
bb.debug(2, "SState: Looked for but didn't find file %s" % sstatefile) bb.debug(2, "SState: Looked for but didn't find file %s" % sstatefile)
mirrors = bb.data.getVar("SSTATE_MIRRORS", d, True) mirrors = d.getVar("SSTATE_MIRRORS", True)
if mirrors: if mirrors:
# Copy the data object and override DL_DIR and SRC_URI # Copy the data object and override DL_DIR and SRC_URI
localdata = bb.data.createCopy(d) localdata = bb.data.createCopy(d)
bb.data.update_data(localdata) bb.data.update_data(localdata)
dldir = bb.data.expand("${SSTATE_DIR}", localdata) dldir = bb.data.expand("${SSTATE_DIR}", localdata)
bb.data.setVar('DL_DIR', dldir, localdata) localdata.setVar('DL_DIR', dldir)
bb.data.setVar('PREMIRRORS', mirrors, localdata) localdata.setVar('PREMIRRORS', mirrors)
bb.debug(2, "SState using premirror of: %s" % mirrors) bb.debug(2, "SState using premirror of: %s" % mirrors)
@ -513,7 +513,7 @@ def sstate_checkhashes(sq_fn, sq_task, sq_hash, sq_hashfn, d):
sstatefile = sstatefile.replace("${BB_TASKHASH}", sq_hash[task]) sstatefile = sstatefile.replace("${BB_TASKHASH}", sq_hash[task])
srcuri = "file://" + os.path.basename(sstatefile) srcuri = "file://" + os.path.basename(sstatefile)
bb.data.setVar('SRC_URI', srcuri, localdata) localdata.setVar('SRC_URI', srcuri)
bb.debug(2, "SState: Attempting to fetch %s" % srcuri) bb.debug(2, "SState: Attempting to fetch %s" % srcuri)
try: try:

View File

@ -84,7 +84,7 @@ python do_populate_sysroot () {
# #
bb.build.exec_func("sysroot_stage_all", d) bb.build.exec_func("sysroot_stage_all", d)
for f in (bb.data.getVar('SYSROOT_PREPROCESS_FUNCS', d, True) or '').split(): for f in (d.getVar('SYSROOT_PREPROCESS_FUNCS', True) or '').split():
bb.build.exec_func(f, d) bb.build.exec_func(f, d)
} }
@ -100,8 +100,8 @@ python do_populate_sysroot_setscene () {
addtask do_populate_sysroot_setscene addtask do_populate_sysroot_setscene
python () { python () {
if bb.data.getVar('do_stage', d, True) is not None: if d.getVar('do_stage', True) is not None:
bb.fatal("Legacy staging found for %s as it has a do_stage function. This will need conversion to a do_install or often simply removal to work with Poky" % bb.data.getVar("FILE", d, True)) bb.fatal("Legacy staging found for %s as it has a do_stage function. This will need conversion to a do_install or often simply removal to work with Poky" % d.getVar("FILE", True))
} }

View File

@ -8,12 +8,12 @@ python build_syslinux_menu () {
import copy import copy
import sys import sys
workdir = bb.data.getVar('WORKDIR', d, 1) workdir = d.getVar('WORKDIR', 1)
if not workdir: if not workdir:
bb.error("WORKDIR is not defined") bb.error("WORKDIR is not defined")
return return
labels = bb.data.getVar('LABELS', d, 1) labels = d.getVar('LABELS', 1)
if not labels: if not labels:
bb.debug(1, "LABELS not defined, nothing to do") bb.debug(1, "LABELS not defined, nothing to do")
return return
@ -22,7 +22,7 @@ python build_syslinux_menu () {
bb.debug(1, "No labels, nothing to do") bb.debug(1, "No labels, nothing to do")
return return
cfile = bb.data.getVar('SYSLINUXMENU', d, 1) cfile = d.getVar('SYSLINUXMENU', 1)
if not cfile: if not cfile:
raise bb.build.FuncFailed('Unable to read SYSLINUXMENU') raise bb.build.FuncFailed('Unable to read SYSLINUXMENU')
@ -45,15 +45,15 @@ python build_syslinux_menu () {
from copy import deepcopy from copy import deepcopy
localdata = deepcopy(d) localdata = deepcopy(d)
overrides = bb.data.getVar('OVERRIDES', localdata) overrides = localdata.getVar('OVERRIDES')
if not overrides: if not overrides:
raise bb.build.FuncFailed('OVERRIDES not defined') raise bb.build.FuncFailed('OVERRIDES not defined')
overrides = bb.data.expand(overrides, localdata) overrides = bb.data.expand(overrides, localdata)
bb.data.setVar('OVERRIDES', label + ':' + overrides, localdata) localdata.setVar('OVERRIDES', label + ':' + overrides)
bb.data.update_data(localdata) bb.data.update_data(localdata)
usage = bb.data.getVar('USAGE', localdata, 1) usage = localdata.getVar('USAGE', 1)
cfgfile.write(' \x0F\x30\x3E%16s\x0F\x30\x37: ' % (label)) cfgfile.write(' \x0F\x30\x3E%16s\x0F\x30\x37: ' % (label))
cfgfile.write('%s\n' % (usage)) cfgfile.write('%s\n' % (usage))
@ -67,12 +67,12 @@ python build_syslinux_cfg () {
import copy import copy
import sys import sys
workdir = bb.data.getVar('WORKDIR', d, 1) workdir = d.getVar('WORKDIR', 1)
if not workdir: if not workdir:
bb.error("WORKDIR not defined, unable to package") bb.error("WORKDIR not defined, unable to package")
return return
labels = bb.data.getVar('LABELS', d, 1) labels = d.getVar('LABELS', 1)
if not labels: if not labels:
bb.debug(1, "LABELS not defined, nothing to do") bb.debug(1, "LABELS not defined, nothing to do")
return return
@ -81,7 +81,7 @@ python build_syslinux_cfg () {
bb.debug(1, "No labels, nothing to do") bb.debug(1, "No labels, nothing to do")
return return
cfile = bb.data.getVar('SYSLINUXCFG', d, 1) cfile = d.getVar('SYSLINUXCFG', 1)
if not cfile: if not cfile:
raise bb.build.FuncFailed('Unable to read SYSLINUXCFG') raise bb.build.FuncFailed('Unable to read SYSLINUXCFG')
@ -98,7 +98,7 @@ python build_syslinux_cfg () {
cfgfile.write('# Automatically created by OE\n') cfgfile.write('# Automatically created by OE\n')
opts = bb.data.getVar('SYSLINUX_OPTS', d, 1) opts = d.getVar('SYSLINUX_OPTS', 1)
if opts: if opts:
for opt in opts.split(';'): for opt in opts.split(';'):
@ -107,7 +107,7 @@ python build_syslinux_cfg () {
cfgfile.write('ALLOWOPTIONS 1\n'); cfgfile.write('ALLOWOPTIONS 1\n');
cfgfile.write('DEFAULT %s\n' % (labels.split()[0])) cfgfile.write('DEFAULT %s\n' % (labels.split()[0]))
timeout = bb.data.getVar('SYSLINUX_TIMEOUT', d, 1) timeout = d.getVar('SYSLINUX_TIMEOUT', 1)
if timeout: if timeout:
cfgfile.write('TIMEOUT %s\n' % timeout) cfgfile.write('TIMEOUT %s\n' % timeout)
@ -116,29 +116,29 @@ python build_syslinux_cfg () {
cfgfile.write('PROMPT 1\n') cfgfile.write('PROMPT 1\n')
menu = bb.data.getVar('AUTO_SYSLINUXMENU', d, 1) menu = d.getVar('AUTO_SYSLINUXMENU', 1)
# This is ugly. My bad. # This is ugly. My bad.
if menu: if menu:
bb.build.exec_func('build_syslinux_menu', d) bb.build.exec_func('build_syslinux_menu', d)
mfile = bb.data.getVar('SYSLINUXMENU', d, 1) mfile = d.getVar('SYSLINUXMENU', 1)
cfgfile.write('DISPLAY %s\n' % (mfile.split('/')[-1]) ) cfgfile.write('DISPLAY %s\n' % (mfile.split('/')[-1]) )
for label in labels.split(): for label in labels.split():
localdata = bb.data.createCopy(d) localdata = bb.data.createCopy(d)
overrides = bb.data.getVar('OVERRIDES', localdata, True) overrides = localdata.getVar('OVERRIDES', True)
if not overrides: if not overrides:
raise bb.build.FuncFailed('OVERRIDES not defined') raise bb.build.FuncFailed('OVERRIDES not defined')
bb.data.setVar('OVERRIDES', label + ':' + overrides, localdata) localdata.setVar('OVERRIDES', label + ':' + overrides)
bb.data.update_data(localdata) bb.data.update_data(localdata)
cfgfile.write('LABEL %s\nKERNEL vmlinuz\n' % (label)) cfgfile.write('LABEL %s\nKERNEL vmlinuz\n' % (label))
append = bb.data.getVar('APPEND', localdata, 1) append = localdata.getVar('APPEND', 1)
initrd = bb.data.getVar('INITRD', localdata, 1) initrd = localdata.getVar('INITRD', 1)
if append: if append:
cfgfile.write('APPEND ') cfgfile.write('APPEND ')

View File

@ -17,7 +17,7 @@ PACKAGE_ARCH = "all"
# to the list. Their dependencies (RRECOMMENDS) are handled as usual # to the list. Their dependencies (RRECOMMENDS) are handled as usual
# by package_depchains in a following step. # by package_depchains in a following step.
python () { python () {
packages = bb.data.getVar('PACKAGES', d, 1).split() packages = d.getVar('PACKAGES', 1).split()
genpackages = [] genpackages = []
for pkg in packages: for pkg in packages:
for postfix in ['-dbg', '-dev']: for postfix in ['-dbg', '-dev']:

View File

@ -137,8 +137,8 @@ toolchain_create_sdk_version () {
} }
python __anonymous () { python __anonymous () {
deps = bb.data.getVarFlag('do_configure', 'depends', d) or "" deps = d.getVarFlag('do_configure', 'depends') or ""
for dep in (bb.data.getVar('TOOLCHAIN_NEED_CONFIGSITE_CACHE', d, True) or "").split(): for dep in (d.getVar('TOOLCHAIN_NEED_CONFIGSITE_CACHE', True) or "").split():
deps += " %s:do_populate_sysroot" % dep deps += " %s:do_populate_sysroot" % dep
bb.data.setVarFlag('do_configure', 'depends', deps, d) d.setVarFlag('do_configure', 'depends', deps)
} }

View File

@ -78,38 +78,38 @@ fi
} }
def update_alternatives_after_parse(d): def update_alternatives_after_parse(d):
if bb.data.getVar('ALTERNATIVE_LINKS', d) != None: if d.getVar('ALTERNATIVE_LINKS') != None:
doinstall = bb.data.getVar('do_install', d, 0) doinstall = d.getVar('do_install', 0)
doinstall += bb.data.getVar('update_alternatives_batch_doinstall', d, 0) doinstall += d.getVar('update_alternatives_batch_doinstall', 0)
bb.data.setVar('do_install', doinstall, d) d.setVar('do_install', doinstall)
return return
if bb.data.getVar('ALTERNATIVE_NAME', d) == None: if d.getVar('ALTERNATIVE_NAME') == None:
raise bb.build.FuncFailed, "%s inherits update-alternatives but doesn't set ALTERNATIVE_NAME" % bb.data.getVar('FILE', d) raise bb.build.FuncFailed, "%s inherits update-alternatives but doesn't set ALTERNATIVE_NAME" % d.getVar('FILE')
if bb.data.getVar('ALTERNATIVE_PATH', d) == None: if d.getVar('ALTERNATIVE_PATH') == None:
raise bb.build.FuncFailed, "%s inherits update-alternatives but doesn't set ALTERNATIVE_PATH" % bb.data.getVar('FILE', d) raise bb.build.FuncFailed, "%s inherits update-alternatives but doesn't set ALTERNATIVE_PATH" % d.getVar('FILE')
python __anonymous() { python __anonymous() {
update_alternatives_after_parse(d) update_alternatives_after_parse(d)
} }
python populate_packages_prepend () { python populate_packages_prepend () {
pkg = bb.data.getVar('PN', d, 1) pkg = d.getVar('PN', 1)
bb.note('adding update-alternatives calls to postinst/postrm for %s' % pkg) bb.note('adding update-alternatives calls to postinst/postrm for %s' % pkg)
postinst = bb.data.getVar('pkg_postinst_%s' % pkg, d, 1) or bb.data.getVar('pkg_postinst', d, 1) postinst = d.getVar('pkg_postinst_%s' % pkg, 1) or d.getVar('pkg_postinst', 1)
if not postinst: if not postinst:
postinst = '#!/bin/sh\n' postinst = '#!/bin/sh\n'
if bb.data.getVar('ALTERNATIVE_LINKS', d) != None: if d.getVar('ALTERNATIVE_LINKS') != None:
postinst += bb.data.getVar('update_alternatives_batch_postinst', d, 1) postinst += d.getVar('update_alternatives_batch_postinst', 1)
else: else:
postinst += bb.data.getVar('update_alternatives_postinst', d, 1) postinst += d.getVar('update_alternatives_postinst', 1)
bb.data.setVar('pkg_postinst_%s' % pkg, postinst, d) d.setVar('pkg_postinst_%s' % pkg, postinst)
postrm = bb.data.getVar('pkg_postrm_%s' % pkg, d, 1) or bb.data.getVar('pkg_postrm', d, 1) postrm = d.getVar('pkg_postrm_%s' % pkg, 1) or d.getVar('pkg_postrm', 1)
if not postrm: if not postrm:
postrm = '#!/bin/sh\n' postrm = '#!/bin/sh\n'
if bb.data.getVar('ALTERNATIVE_LINKS', d) != None: if d.getVar('ALTERNATIVE_LINKS') != None:
postrm += bb.data.getVar('update_alternatives_batch_postrm', d, 1) postrm += d.getVar('update_alternatives_batch_postrm', 1)
else: else:
postrm += bb.data.getVar('update_alternatives_postrm', d, 1) postrm += d.getVar('update_alternatives_postrm', 1)
bb.data.setVar('pkg_postrm_%s' % pkg, postrm, d) d.setVar('pkg_postrm_%s' % pkg, postrm)
} }

View File

@ -30,11 +30,11 @@ update-rc.d $D ${INITSCRIPT_NAME} remove
def update_rc_after_parse(d): def update_rc_after_parse(d):
if bb.data.getVar('INITSCRIPT_PACKAGES', d) == None: if d.getVar('INITSCRIPT_PACKAGES') == None:
if bb.data.getVar('INITSCRIPT_NAME', d) == None: if d.getVar('INITSCRIPT_NAME') == None:
raise bb.build.FuncFailed, "%s inherits update-rc.d but doesn't set INITSCRIPT_NAME" % bb.data.getVar('FILE', d) raise bb.build.FuncFailed, "%s inherits update-rc.d but doesn't set INITSCRIPT_NAME" % d.getVar('FILE')
if bb.data.getVar('INITSCRIPT_PARAMS', d) == None: if d.getVar('INITSCRIPT_PARAMS') == None:
raise bb.build.FuncFailed, "%s inherits update-rc.d but doesn't set INITSCRIPT_PARAMS" % bb.data.getVar('FILE', d) raise bb.build.FuncFailed, "%s inherits update-rc.d but doesn't set INITSCRIPT_PARAMS" % d.getVar('FILE')
python __anonymous() { python __anonymous() {
update_rc_after_parse(d) update_rc_after_parse(d)
@ -44,7 +44,7 @@ python populate_packages_prepend () {
def update_rcd_package(pkg): def update_rcd_package(pkg):
bb.debug(1, 'adding update-rc.d calls to postinst/postrm for %s' % pkg) bb.debug(1, 'adding update-rc.d calls to postinst/postrm for %s' % pkg)
localdata = bb.data.createCopy(d) localdata = bb.data.createCopy(d)
overrides = bb.data.getVar("OVERRIDES", localdata, 1) overrides = localdata.getVar("OVERRIDES", 1)
bb.data.setVar("OVERRIDES", "%s:%s" % (pkg, overrides), localdata) bb.data.setVar("OVERRIDES", "%s:%s" % (pkg, overrides), localdata)
bb.data.update_data(localdata) bb.data.update_data(localdata)
@ -53,28 +53,28 @@ python populate_packages_prepend () {
execute on the target. Not doing so may cause update_rc.d postinst invoked execute on the target. Not doing so may cause update_rc.d postinst invoked
twice to cause unwanted warnings. twice to cause unwanted warnings.
""" """
postinst = bb.data.getVar('pkg_postinst', localdata, 1) postinst = localdata.getVar('pkg_postinst', 1)
if not postinst: if not postinst:
postinst = '#!/bin/sh\n' postinst = '#!/bin/sh\n'
postinst += bb.data.getVar('updatercd_postinst', localdata, 1) postinst += localdata.getVar('updatercd_postinst', 1)
bb.data.setVar('pkg_postinst_%s' % pkg, postinst, d) d.setVar('pkg_postinst_%s' % pkg, postinst)
prerm = bb.data.getVar('pkg_prerm', localdata, 1) prerm = localdata.getVar('pkg_prerm', 1)
if not prerm: if not prerm:
prerm = '#!/bin/sh\n' prerm = '#!/bin/sh\n'
prerm += bb.data.getVar('updatercd_prerm', localdata, 1) prerm += localdata.getVar('updatercd_prerm', 1)
bb.data.setVar('pkg_prerm_%s' % pkg, prerm, d) d.setVar('pkg_prerm_%s' % pkg, prerm)
postrm = bb.data.getVar('pkg_postrm', localdata, 1) postrm = localdata.getVar('pkg_postrm', 1)
if not postrm: if not postrm:
postrm = '#!/bin/sh\n' postrm = '#!/bin/sh\n'
postrm += bb.data.getVar('updatercd_postrm', localdata, 1) postrm += localdata.getVar('updatercd_postrm', 1)
bb.data.setVar('pkg_postrm_%s' % pkg, postrm, d) d.setVar('pkg_postrm_%s' % pkg, postrm)
pkgs = bb.data.getVar('INITSCRIPT_PACKAGES', d, 1) pkgs = d.getVar('INITSCRIPT_PACKAGES', 1)
if pkgs == None: if pkgs == None:
pkgs = bb.data.getVar('UPDATERCPN', d, 1) pkgs = d.getVar('UPDATERCPN', 1)
packages = (bb.data.getVar('PACKAGES', d, 1) or "").split() packages = (d.getVar('PACKAGES', 1) or "").split()
if not pkgs in packages and packages != []: if not pkgs in packages and packages != []:
pkgs = packages[0] pkgs = packages[0]
for pkg in pkgs.split(): for pkg in pkgs.split():

View File

@ -107,11 +107,11 @@ def update_useradd_after_parse(d):
useradd_packages = d.getVar('USERADD_PACKAGES', True) useradd_packages = d.getVar('USERADD_PACKAGES', True)
if not useradd_packages: if not useradd_packages:
raise bb.build.FuncFailed, "%s inherits useradd but doesn't set USERADD_PACKAGES" % bb.data.getVar('FILE', d) raise bb.build.FuncFailed, "%s inherits useradd but doesn't set USERADD_PACKAGES" % d.getVar('FILE')
for pkg in useradd_packages.split(): for pkg in useradd_packages.split():
if not d.getVar('USERADD_PARAM_%s' % pkg, True) and not d.getVar('GROUPADD_PARAM_%s' % pkg, True): if not d.getVar('USERADD_PARAM_%s' % pkg, True) and not d.getVar('GROUPADD_PARAM_%s' % pkg, True):
raise bb.build.FuncFailed, "%s inherits useradd but doesn't set USERADD_PARAM or GROUPADD_PARAM for package %s" % (bb.data.getVar('FILE', d), pkg) raise bb.build.FuncFailed, "%s inherits useradd but doesn't set USERADD_PARAM or GROUPADD_PARAM for package %s" % (d.getVar('FILE'), pkg)
python __anonymous() { python __anonymous() {
update_useradd_after_parse(d) update_useradd_after_parse(d)
@ -147,12 +147,12 @@ fakeroot python populate_packages_prepend () {
if not preinst: if not preinst:
preinst = '#!/bin/sh\n' preinst = '#!/bin/sh\n'
preinst += d.getVar('useradd_preinst', True) preinst += d.getVar('useradd_preinst', True)
bb.data.setVar('pkg_preinst_%s' % pkg, preinst, d) d.setVar('pkg_preinst_%s' % pkg, preinst)
# RDEPENDS setup # RDEPENDS setup
rdepends = d.getVar("RDEPENDS_%s" % pkg, True) or "" rdepends = d.getVar("RDEPENDS_%s" % pkg, True) or ""
rdepends += " base-passwd shadow" rdepends += " base-passwd shadow"
bb.data.setVar("RDEPENDS_%s" % pkg, rdepends, d) d.setVar("RDEPENDS_%s" % pkg, rdepends)
# Add the user/group preinstall scripts and RDEPENDS requirements # Add the user/group preinstall scripts and RDEPENDS requirements
# to packages specified by USERADD_PACKAGES # to packages specified by USERADD_PACKAGES

View File

@ -6,7 +6,7 @@ python do_listtasks() {
#bb.data.emit_env(sys.__stdout__, d) #bb.data.emit_env(sys.__stdout__, d)
# emit the metadata which isnt valid shell # emit the metadata which isnt valid shell
for e in d.keys(): for e in d.keys():
if bb.data.getVarFlag(e, 'task', d): if d.getVarFlag(e, 'task'):
bb.plain("%s" % e) bb.plain("%s" % e)
} }
@ -20,18 +20,18 @@ python do_clean() {
bb.note("Removing " + dir) bb.note("Removing " + dir)
oe.path.remove(dir) oe.path.remove(dir)
dir = "%s.*" % bb.data.expand(bb.data.getVar('STAMP', d), d) dir = "%s.*" % bb.data.expand(d.getVar('STAMP'), d)
bb.note("Removing " + dir) bb.note("Removing " + dir)
oe.path.remove(dir) oe.path.remove(dir)
for f in (bb.data.getVar('CLEANFUNCS', d, 1) or '').split(): for f in (d.getVar('CLEANFUNCS', 1) or '').split():
bb.build.exec_func(f, d) bb.build.exec_func(f, d)
} }
addtask checkuri addtask checkuri
do_checkuri[nostamp] = "1" do_checkuri[nostamp] = "1"
python do_checkuri() { python do_checkuri() {
src_uri = (bb.data.getVar('SRC_URI', d, True) or "").split() src_uri = (d.getVar('SRC_URI', True) or "").split()
if len(src_uri) == 0: if len(src_uri) == 0:
return return

View File

@ -331,12 +331,12 @@ def explode_deps(s):
def base_set_filespath(path, d): def base_set_filespath(path, d):
filespath = [] filespath = []
extrapaths = (bb.data.getVar("FILESEXTRAPATHS", d, True) or "") extrapaths = (d.getVar("FILESEXTRAPATHS", True) or "")
# Don't prepend empty strings to the path list # Don't prepend empty strings to the path list
if extrapaths != "": if extrapaths != "":
path = extrapaths.split(":") + path path = extrapaths.split(":") + path
# The ":" ensures we have an 'empty' override # The ":" ensures we have an 'empty' override
overrides = (bb.data.getVar("OVERRIDES", d, 1) or "") + ":" overrides = (d.getVar("OVERRIDES", 1) or "") + ":"
for p in path: for p in path:
if p != "": if p != "":
for o in overrides.split(":"): for o in overrides.split(":"):

View File

@ -99,7 +99,7 @@ ABIEXTENSION ??= ""
TARGET_ARCH = "${TUNE_ARCH}" TARGET_ARCH = "${TUNE_ARCH}"
TARGET_OS = "linux${LIBCEXTENSION}${ABIEXTENSION}" TARGET_OS = "linux${LIBCEXTENSION}${ABIEXTENSION}"
TARGET_VENDOR = "-oe" TARGET_VENDOR = "-oe"
TARGET_SYS = "${TARGET_ARCH}${TARGET_VENDOR}${@['-' + bb.data.getVar('TARGET_OS', d, 1), ''][bb.data.getVar('TARGET_OS', d, 1) == ('' or 'custom')]}" TARGET_SYS = "${TARGET_ARCH}${TARGET_VENDOR}${@['-' + d.getVar('TARGET_OS', 1), ''][d.getVar('TARGET_OS', 1) == ('' or 'custom')]}"
TARGET_PREFIX = "${TARGET_SYS}-" TARGET_PREFIX = "${TARGET_SYS}-"
TARGET_CC_ARCH = "${TUNE_CCARGS}" TARGET_CC_ARCH = "${TUNE_CCARGS}"
TARGET_LD_ARCH = "${TUNE_LDARGS}" TARGET_LD_ARCH = "${TUNE_LDARGS}"
@ -108,7 +108,7 @@ TARGET_AS_ARCH = "${TUNE_ASARGS}"
SDK_ARCH = "${BUILD_ARCH}" SDK_ARCH = "${BUILD_ARCH}"
SDK_OS = "${BUILD_OS}" SDK_OS = "${BUILD_OS}"
SDK_VENDOR = "-oesdk" SDK_VENDOR = "-oesdk"
SDK_SYS = "${SDK_ARCH}${SDK_VENDOR}${@['-' + bb.data.getVar('SDK_OS', d, 1), ''][bb.data.getVar('SDK_OS', d, 1) == ('' or 'custom')]}" SDK_SYS = "${SDK_ARCH}${SDK_VENDOR}${@['-' + d.getVar('SDK_OS', 1), ''][d.getVar('SDK_OS', 1) == ('' or 'custom')]}"
SDK_PREFIX = "${SDK_SYS}-" SDK_PREFIX = "${SDK_SYS}-"
SDK_CC_ARCH = "${BUILD_CC_ARCH}" SDK_CC_ARCH = "${BUILD_CC_ARCH}"
SDK_PACKAGE_ARCHS = "all any noarch ${SDK_ARCH}-nativesdk" SDK_PACKAGE_ARCHS = "all any noarch ${SDK_ARCH}-nativesdk"
@ -116,7 +116,7 @@ SDK_LD_ARCH = "${BUILD_LD_ARCH}"
SDK_AS_ARCH = "${BUILD_AS_ARCH}" SDK_AS_ARCH = "${BUILD_AS_ARCH}"
PACKAGE_ARCH = "${TUNE_PKGARCH}" PACKAGE_ARCH = "${TUNE_PKGARCH}"
MACHINE_ARCH = "${@[bb.data.getVar('TUNE_PKGARCH', d, 1), bb.data.getVar('MACHINE', d, 1)][bool(bb.data.getVar('MACHINE', d, 1))].replace('-', '_')}" MACHINE_ARCH = "${@[d.getVar('TUNE_PKGARCH', 1), d.getVar('MACHINE', 1)][bool(d.getVar('MACHINE', 1))].replace('-', '_')}"
PACKAGE_EXTRA_ARCHS ??= "${PACKAGE_EXTRA_ARCHS_tune-${DEFAULTTUNE}}" PACKAGE_EXTRA_ARCHS ??= "${PACKAGE_EXTRA_ARCHS_tune-${DEFAULTTUNE}}"
PACKAGE_ARCHS = "all any noarch ${PACKAGE_EXTRA_ARCHS} ${MACHINE_ARCH}" PACKAGE_ARCHS = "all any noarch ${PACKAGE_EXTRA_ARCHS} ${MACHINE_ARCH}"
# MACHINE_ARCH shouldn't be included here as a variable dependency # MACHINE_ARCH shouldn't be included here as a variable dependency
@ -167,33 +167,33 @@ ASSUME_PROVIDED = "\
# Package default variables. # Package default variables.
################################################################## ##################################################################
PN = "${@bb.parse.BBHandler.vars_from_file(bb.data.getVar('FILE',d),d)[0] or 'defaultpkgname'}" PN = "${@bb.parse.BBHandler.vars_from_file(d.getVar('FILE'),d)[0] or 'defaultpkgname'}"
PV = "${@bb.parse.BBHandler.vars_from_file(bb.data.getVar('FILE',d),d)[1] or '1.0'}" PV = "${@bb.parse.BBHandler.vars_from_file(d.getVar('FILE'),d)[1] or '1.0'}"
PR = "${@bb.parse.BBHandler.vars_from_file(bb.data.getVar('FILE',d),d)[2] or 'r0'}" PR = "${@bb.parse.BBHandler.vars_from_file(d.getVar('FILE'),d)[2] or 'r0'}"
PF = "${PN}-${EXTENDPE}${PV}-${PR}" PF = "${PN}-${EXTENDPE}${PV}-${PR}"
EXTENDPE = "${@['','${PE\x7d_'][bb.data.getVar('PE',d,1) > 0]}" EXTENDPE = "${@['','${PE\x7d_'][d.getVar('PE',1) > 0]}"
P = "${PN}-${PV}" P = "${PN}-${PV}"
EXTENDPRAUTO = "${@['.${PRAUTO\x7d',''][bb.data.getVar('PRAUTO',d,1) is None]}" EXTENDPRAUTO = "${@['.${PRAUTO\x7d',''][d.getVar('PRAUTO',1) is None]}"
PRAUTOINX = "${PF}" PRAUTOINX = "${PF}"
PKGV ?= "${PV}" PKGV ?= "${PV}"
PKGR ?= "${PR}${EXTENDPRAUTO}" PKGR ?= "${PR}${EXTENDPRAUTO}"
PKGE ?= "${@['','${PE\x7d'][bb.data.getVar('PE',d,1) > 0]}" PKGE ?= "${@['','${PE\x7d'][d.getVar('PE',1) > 0]}"
EXTENDPKGEVER = "${@['','${PKGE\x7d:'][bb.data.getVar('PKGE',d,1).strip() != '']}" EXTENDPKGEVER = "${@['','${PKGE\x7d:'][d.getVar('PKGE',1).strip() != '']}"
EXTENDPKGV ?= "${EXTENDPKGEVER}${PKGV}-${PKGR}" EXTENDPKGV ?= "${EXTENDPKGEVER}${PKGV}-${PKGR}"
# Base package name # Base package name
# Automatically derives "foo" from "foo-native", "foo-cross" or "foo-initial" # Automatically derives "foo" from "foo-native", "foo-cross" or "foo-initial"
# otherwise it is the same as PN and P # otherwise it is the same as PN and P
SPECIAL_PKGSUFFIX = "-native -cross -initial -intermediate -nativesdk -crosssdk -cross-canadian" SPECIAL_PKGSUFFIX = "-native -cross -initial -intermediate -nativesdk -crosssdk -cross-canadian"
BPN = "${@base_prune_suffix(bb.data.getVar('PN', d, True), bb.data.getVar('SPECIAL_PKGSUFFIX', d, True).split(), d)}" BPN = "${@base_prune_suffix(d.getVar('PN', True), d.getVar('SPECIAL_PKGSUFFIX', True).split(), d)}"
BP = "${BPN}-${PV}" BP = "${BPN}-${PV}"
# #
# network based PR service # network based PR service
# #
USE_PR_SERV = "${@[1,0][(bb.data.getVar('PRSERV_HOST',d,1) is None) or (bb.data.getVar('PRSERV_PORT',d,1) is None)]}" USE_PR_SERV = "${@[1,0][(d.getVar('PRSERV_HOST',1) is None) or (d.getVar('PRSERV_PORT',1) is None)]}"
# Package info. # Package info.
@ -288,7 +288,7 @@ DOTDEBUG-dbg = "${bindir}/.debug ${sbindir}/.debug ${libexecdir}/.debug ${libdir
DEBUGFILEDIRECTORY-dbg = "/usr/lib/debug /usr/src/debug" DEBUGFILEDIRECTORY-dbg = "/usr/lib/debug /usr/src/debug"
FILES_${PN}-dbg = "${@bb.data.getVar(['DOTDEBUG-dbg', 'DEBUGFILEDIRECTORY-dbg'][bb.data.getVar('PACKAGE_DEBUG_SPLIT_STYLE', d, 1) == 'debug-file-directory'], d, 1)}" FILES_${PN}-dbg = "${@bb.data.getVar(['DOTDEBUG-dbg', 'DEBUGFILEDIRECTORY-dbg'][d.getVar('PACKAGE_DEBUG_SPLIT_STYLE', 1) == 'debug-file-directory'], d, 1)}"
SECTION_${PN}-dbg = "devel" SECTION_${PN}-dbg = "devel"
ALLOW_EMPTY_${PN}-dbg = "1" ALLOW_EMPTY_${PN}-dbg = "1"
@ -298,17 +298,17 @@ FILES_${PN}-locale = "${datadir}/locale"
# File manifest # File manifest
FILE_DIRNAME = "${@os.path.dirname(bb.data.getVar('FILE', d))}" FILE_DIRNAME = "${@os.path.dirname(d.getVar('FILE'))}"
# FILESPATH is set in base.bbclass # FILESPATH is set in base.bbclass
#FILESPATH = "${FILE_DIRNAME}/${PF}:${FILE_DIRNAME}/${P}:${FILE_DIRNAME}/${PN}:${FILE_DIRNAME}/${BP}:${FILE_DIRNAME}/${BPN}:${FILE_DIRNAME}/files:${FILE_DIRNAME}" #FILESPATH = "${FILE_DIRNAME}/${PF}:${FILE_DIRNAME}/${P}:${FILE_DIRNAME}/${PN}:${FILE_DIRNAME}/${BP}:${FILE_DIRNAME}/${BPN}:${FILE_DIRNAME}/files:${FILE_DIRNAME}"
FILESDIR = "${@bb.which(bb.data.getVar('FILESPATH', d, 1), '.')}" FILESDIR = "${@bb.which(d.getVar('FILESPATH', 1), '.')}"
################################################################## ##################################################################
# General work and output directories for the build system. # General work and output directories for the build system.
################################################################## ##################################################################
TMPDIR ?= "${TOPDIR}/tmp" TMPDIR ?= "${TOPDIR}/tmp"
CACHE = "${TMPDIR}/cache${@['', '/' + str(bb.data.getVar('MACHINE', d, 1))][bool(bb.data.getVar('MACHINE', d, 1))]}${@['', '/' + str(bb.data.getVar('SDKMACHINE', d, 1))][bool(bb.data.getVar('SDKMACHINE', d, 1))]}" CACHE = "${TMPDIR}/cache${@['', '/' + str(d.getVar('MACHINE', 1))][bool(d.getVar('MACHINE', 1))]}${@['', '/' + str(d.getVar('SDKMACHINE', 1))][bool(d.getVar('SDKMACHINE', 1))]}"
# The persistent cache should be shared by all builds # The persistent cache should be shared by all builds
PERSISTENT_DIR = "${TMPDIR}/cache" PERSISTENT_DIR = "${TMPDIR}/cache"
LOG_DIR = "${TMPDIR}/log" LOG_DIR = "${TMPDIR}/log"
@ -403,7 +403,7 @@ export PATH
# Build utility info. # Build utility info.
################################################################## ##################################################################
CCACHE = "${@bb.which(bb.data.getVar('PATH', d, 1), 'ccache') and 'ccache '}" CCACHE = "${@bb.which(d.getVar('PATH', 1), 'ccache') and 'ccache '}"
TOOLCHAIN_OPTIONS = " --sysroot=${STAGING_DIR_TARGET}" TOOLCHAIN_OPTIONS = " --sysroot=${STAGING_DIR_TARGET}"
export CCACHE_DIR = "${TMPDIR}/ccache/${MULTIMACH_HOST_SYS}/${PN}" export CCACHE_DIR = "${TMPDIR}/ccache/${MULTIMACH_HOST_SYS}/${PN}"
@ -505,7 +505,7 @@ DEBUG_FLAGS ?= "-g -feliminate-unused-debug-types"
# Disabled until the option works properly -feliminate-dwarf2-dups # Disabled until the option works properly -feliminate-dwarf2-dups
FULL_OPTIMIZATION = "-O2 -pipe ${DEBUG_FLAGS}" FULL_OPTIMIZATION = "-O2 -pipe ${DEBUG_FLAGS}"
DEBUG_OPTIMIZATION = "-O -fno-omit-frame-pointer ${DEBUG_FLAGS} -pipe" DEBUG_OPTIMIZATION = "-O -fno-omit-frame-pointer ${DEBUG_FLAGS} -pipe"
SELECTED_OPTIMIZATION = "${@bb.data.getVar(['FULL_OPTIMIZATION', 'DEBUG_OPTIMIZATION'][bb.data.getVar('DEBUG_BUILD', d, 1) == '1'], d, 1)}" SELECTED_OPTIMIZATION = "${@bb.data.getVar(['FULL_OPTIMIZATION', 'DEBUG_OPTIMIZATION'][d.getVar('DEBUG_BUILD', 1) == '1'], d, 1)}"
BUILD_OPTIMIZATION = "-O2 -pipe" BUILD_OPTIMIZATION = "-O2 -pipe"
################################################################## ##################################################################

View File

@ -13,7 +13,7 @@ require conf/distro/include/tclibc-${TCLIBC}.inc
TCLIBCAPPEND ?= "-${TCLIBC}" TCLIBCAPPEND ?= "-${TCLIBC}"
TMPDIR .= "${TCLIBCAPPEND}" TMPDIR .= "${TCLIBCAPPEND}"
CACHE = "${TMPDIR}/cache/${TCMODE}-${TCLIBC}${@['', '/' + str(bb.data.getVar('MACHINE', d, 1))][bool(bb.data.getVar('MACHINE', d, 1))]}${@['', '/' + str(bb.data.getVar('SDKMACHINE', d, 1))][bool(bb.data.getVar('SDKMACHINE', d, 1))]}" CACHE = "${TMPDIR}/cache/${TCMODE}-${TCLIBC}${@['', '/' + str(d.getVar('MACHINE', 1))][bool(d.getVar('MACHINE', 1))]}${@['', '/' + str(d.getVar('SDKMACHINE', 1))][bool(d.getVar('SDKMACHINE', 1))]}"
USER_CLASSES ?= "" USER_CLASSES ?= ""
PACKAGE_CLASSES ?= "package_ipk" PACKAGE_CLASSES ?= "package_ipk"

View File

@ -5,7 +5,7 @@
# but requires more instructions (140% for 70% smaller code) so may be # but requires more instructions (140% for 70% smaller code) so may be
# slower. # slower.
TUNEVALID[thumb] = "Use thumb instructions instead of ARM" TUNEVALID[thumb] = "Use thumb instructions instead of ARM"
ARM_THUMB_M_OPT = "${@['-mno-thumb', '-mthumb'][bb.data.getVar('ARM_INSTRUCTION_SET', d, 1) == 'thumb']}" ARM_THUMB_M_OPT = "${@['-mno-thumb', '-mthumb'][d.getVar('ARM_INSTRUCTION_SET', 1) == 'thumb']}"
TUNE_CCARGS += "${@bb.utils.contains("TUNE_FEATURES", "thumb", "${ARM_THUMB_M_OPT}", "", d)}" TUNE_CCARGS += "${@bb.utils.contains("TUNE_FEATURES", "thumb", "${ARM_THUMB_M_OPT}", "", d)}"
OVERRIDES .= "${@bb.utils.contains("TUNE_FEATURES", "thumb", ":thumb", "", d)}" OVERRIDES .= "${@bb.utils.contains("TUNE_FEATURES", "thumb", ":thumb", "", d)}"

View File

@ -16,15 +16,15 @@ THUMB_INTERWORK ?= "yes"
# arm system and vice versa. It is strongly recommended that DISTROs not # arm system and vice versa. It is strongly recommended that DISTROs not
# turn this off - the actual cost is very small. # turn this off - the actual cost is very small.
OVERRIDE_THUMB = "${@['', ':thumb'][bb.data.getVar('ARM_INSTRUCTION_SET', d, 1) == 'thumb']}" OVERRIDE_THUMB = "${@['', ':thumb'][d.getVar('ARM_INSTRUCTION_SET', 1) == 'thumb']}"
OVERRIDE_INTERWORK = "${@['', ':thumb-interwork'][bb.data.getVar('THUMB_INTERWORK', d, 1) == 'yes']}" OVERRIDE_INTERWORK = "${@['', ':thumb-interwork'][d.getVar('THUMB_INTERWORK', 1) == 'yes']}"
OVERRIDES .= "${OVERRIDE_THUMB}${OVERRIDE_INTERWORK}" OVERRIDES .= "${OVERRIDE_THUMB}${OVERRIDE_INTERWORK}"
# Compiler and linker options for application code and kernel code. These # Compiler and linker options for application code and kernel code. These
# options ensure that the compiler has the correct settings for the selected # options ensure that the compiler has the correct settings for the selected
# instruction set and interworking. # instruction set and interworking.
ARM_INTERWORK_M_OPT = "${@['-mno-thumb-interwork', '-mthumb-interwork'][bb.data.getVar('THUMB_INTERWORK', d, 1) == 'yes']}" ARM_INTERWORK_M_OPT = "${@['-mno-thumb-interwork', '-mthumb-interwork'][d.getVar('THUMB_INTERWORK', 1) == 'yes']}"
ARM_THUMB_M_OPT = "${@['-mno-thumb', '-mthumb'][bb.data.getVar('ARM_INSTRUCTION_SET', d, 1) == 'thumb']}" ARM_THUMB_M_OPT = "${@['-mno-thumb', '-mthumb'][d.getVar('ARM_INSTRUCTION_SET', 1) == 'thumb']}"
# #
TUNE_CCARGS += "${ARM_INTERWORK_M_OPT} ${ARM_THUMB_M_OPT}" TUNE_CCARGS += "${ARM_INTERWORK_M_OPT} ${ARM_THUMB_M_OPT}"

View File

@ -276,32 +276,32 @@ def compare_in_distro_packages_list(distro_check_dir, d):
localdata = bb.data.createCopy(d) localdata = bb.data.createCopy(d)
pkglst_dir = os.path.join(distro_check_dir, "package_lists") pkglst_dir = os.path.join(distro_check_dir, "package_lists")
matching_distros = [] matching_distros = []
pn = bb.data.getVar('PN', d, True) pn = d.getVar('PN', True)
recipe_name = bb.data.getVar('PN', d, True) recipe_name = d.getVar('PN', True)
bb.note("Checking: %s" % pn) bb.note("Checking: %s" % pn)
trim_dict = dict({"-native":"-native", "-cross":"-cross", "-initial":"-initial"}) trim_dict = dict({"-native":"-native", "-cross":"-cross", "-initial":"-initial"})
if pn.find("-native") != -1: if pn.find("-native") != -1:
pnstripped = pn.split("-native") pnstripped = pn.split("-native")
bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + bb.data.getVar('OVERRIDES', d, True), localdata) bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True), localdata)
bb.data.update_data(localdata) bb.data.update_data(localdata)
recipe_name = pnstripped[0] recipe_name = pnstripped[0]
if pn.find("-cross") != -1: if pn.find("-cross") != -1:
pnstripped = pn.split("-cross") pnstripped = pn.split("-cross")
bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + bb.data.getVar('OVERRIDES', d, True), localdata) bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True), localdata)
bb.data.update_data(localdata) bb.data.update_data(localdata)
recipe_name = pnstripped[0] recipe_name = pnstripped[0]
if pn.find("-initial") != -1: if pn.find("-initial") != -1:
pnstripped = pn.split("-initial") pnstripped = pn.split("-initial")
bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + bb.data.getVar('OVERRIDES', d, True), localdata) bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True), localdata)
bb.data.update_data(localdata) bb.data.update_data(localdata)
recipe_name = pnstripped[0] recipe_name = pnstripped[0]
bb.note("Recipe: %s" % recipe_name) bb.note("Recipe: %s" % recipe_name)
tmp = bb.data.getVar('DISTRO_PN_ALIAS', localdata, True) tmp = localdata.getVar('DISTRO_PN_ALIAS', True)
distro_exceptions = dict({"OE-Core":'OE-Core', "OpenedHand":'OpenedHand', "Intel":'Intel', "Upstream":'Upstream', "Windriver":'Windriver', "OSPDT":'OSPDT Approved', "Poky":'poky'}) distro_exceptions = dict({"OE-Core":'OE-Core', "OpenedHand":'OpenedHand', "Intel":'Intel', "Upstream":'Upstream', "Windriver":'Windriver', "OSPDT":'OSPDT Approved', "Poky":'poky'})
@ -343,23 +343,23 @@ def compare_in_distro_packages_list(distro_check_dir, d):
return matching_distros return matching_distros
def create_log_file(d, logname): def create_log_file(d, logname):
logpath = bb.data.getVar('LOG_DIR', d, True) logpath = d.getVar('LOG_DIR', True)
bb.utils.mkdirhier(logpath) bb.utils.mkdirhier(logpath)
logfn, logsuffix = os.path.splitext(logname) logfn, logsuffix = os.path.splitext(logname)
logfile = os.path.join(logpath, "%s.%s%s" % (logfn, bb.data.getVar('DATETIME', d, True), logsuffix)) logfile = os.path.join(logpath, "%s.%s%s" % (logfn, d.getVar('DATETIME', True), logsuffix))
if not os.path.exists(logfile): if not os.path.exists(logfile):
slogfile = os.path.join(logpath, logname) slogfile = os.path.join(logpath, logname)
if os.path.exists(slogfile): if os.path.exists(slogfile):
os.remove(slogfile) os.remove(slogfile)
os.system("touch %s" % logfile) os.system("touch %s" % logfile)
os.symlink(logfile, slogfile) os.symlink(logfile, slogfile)
bb.data.setVar('LOG_FILE', logfile, d) d.setVar('LOG_FILE', logfile)
return logfile return logfile
def save_distro_check_result(result, datetime, result_file, d): def save_distro_check_result(result, datetime, result_file, d):
pn = bb.data.getVar('PN', d, True) pn = d.getVar('PN', True)
logdir = bb.data.getVar('LOG_DIR', d, True) logdir = d.getVar('LOG_DIR', True)
if not logdir: if not logdir:
bb.error("LOG_DIR variable is not defined, can't write the distro_check results") bb.error("LOG_DIR variable is not defined, can't write the distro_check results")
return return

View File

@ -179,7 +179,7 @@ class GitApplyTree(PatchTree):
class QuiltTree(PatchSet): class QuiltTree(PatchSet):
def _runcmd(self, args, run = True): def _runcmd(self, args, run = True):
quiltrc = bb.data.getVar('QUILTRCFILE', self.d, 1) quiltrc = self.d.getVar('QUILTRCFILE', 1)
if not run: if not run:
return ["quilt"] + ["--quiltrc"] + [quiltrc] + args return ["quilt"] + ["--quiltrc"] + [quiltrc] + args
runcmd(["quilt"] + ["--quiltrc"] + [quiltrc] + args, self.dir) runcmd(["quilt"] + ["--quiltrc"] + [quiltrc] + args, self.dir)
@ -357,7 +357,7 @@ class UserResolver(Resolver):
# Patch application failed # Patch application failed
patchcmd = self.patchset.Push(True, False, False) patchcmd = self.patchset.Push(True, False, False)
t = bb.data.getVar('T', self.patchset.d, 1) t = self.patchset.d.getVar('T', 1)
if not t: if not t:
bb.msg.fatal("Build", "T not set") bb.msg.fatal("Build", "T not set")
bb.utils.mkdirhier(t) bb.utils.mkdirhier(t)

View File

@ -16,19 +16,19 @@ def ifelse(condition, iftrue = True, iffalse = False):
return iffalse return iffalse
def conditional(variable, checkvalue, truevalue, falsevalue, d): def conditional(variable, checkvalue, truevalue, falsevalue, d):
if bb.data.getVar(variable,d,1) == checkvalue: if d.getVar(variable,1) == checkvalue:
return truevalue return truevalue
else: else:
return falsevalue return falsevalue
def less_or_equal(variable, checkvalue, truevalue, falsevalue, d): def less_or_equal(variable, checkvalue, truevalue, falsevalue, d):
if float(bb.data.getVar(variable,d,1)) <= float(checkvalue): if float(d.getVar(variable,1)) <= float(checkvalue):
return truevalue return truevalue
else: else:
return falsevalue return falsevalue
def version_less_or_equal(variable, checkvalue, truevalue, falsevalue, d): def version_less_or_equal(variable, checkvalue, truevalue, falsevalue, d):
result = bb.vercmp(bb.data.getVar(variable,d,True), checkvalue) result = bb.vercmp(d.getVar(variable,True), checkvalue)
if result <= 0: if result <= 0:
return truevalue return truevalue
else: else:
@ -48,7 +48,7 @@ def contains(variable, checkvalues, truevalue, falsevalue, d):
return falsevalue return falsevalue
def both_contain(variable1, variable2, checkvalue, d): def both_contain(variable1, variable2, checkvalue, d):
if bb.data.getVar(variable1,d,1).find(checkvalue) != -1 and bb.data.getVar(variable2,d,1).find(checkvalue) != -1: if d.getVar(variable1,1).find(checkvalue) != -1 and d.getVar(variable2,1).find(checkvalue) != -1:
return checkvalue return checkvalue
else: else:
return "" return ""

View File

@ -23,7 +23,7 @@ inherit autotools
python __anonymous () { python __anonymous () {
import re import re
host = bb.data.getVar('HOST_SYS', d, 1) host = d.getVar('HOST_SYS', 1)
if not re.match('i.86.*-linux', host): if not re.match('i.86.*-linux', host):
raise bb.parse.SkipPackage("incompatible with host %s" % host) raise bb.parse.SkipPackage("incompatible with host %s" % host)
} }

View File

@ -33,7 +33,7 @@ do_configure() {
python __anonymous () { python __anonymous () {
import re import re
host = bb.data.getVar('HOST_SYS', d, 1) host = d.getVar('HOST_SYS', 1)
if not re.match('x86.64.*-linux', host) and not re.match('i.86.*-linux', host): if not re.match('x86.64.*-linux', host) and not re.match('i.86.*-linux', host):
raise bb.parse.SkipPackage("incompatible with host %s" % host) raise bb.parse.SkipPackage("incompatible with host %s" % host)
} }

View File

@ -10,7 +10,7 @@ LICENSE = "GPLv2+"
LIC_FILES_CHKSUM = "file://COPYING;md5=1707d6db1d42237583f50183a5651ecb \ LIC_FILES_CHKSUM = "file://COPYING;md5=1707d6db1d42237583f50183a5651ecb \
file://README;beginline=1;endline=22;md5=3a00ef51d3fc96e9d6c1bc4708ccd3b5" file://README;beginline=1;endline=22;md5=3a00ef51d3fc96e9d6c1bc4708ccd3b5"
FILESDIR = "${@os.path.dirname(bb.data.getVar('FILE',d,1))}/u-boot-git/${MACHINE}" FILESDIR = "${@os.path.dirname(d.getVar('FILE',1))}/u-boot-git/${MACHINE}"
# This revision corresponds to the tag "v2011.03" # This revision corresponds to the tag "v2011.03"
# We use the revision in order to avoid having to fetch it from the repo during parse # We use the revision in order to avoid having to fetch it from the repo during parse

View File

@ -10,7 +10,7 @@ LICENSE = "GPLv2+"
LIC_FILES_CHKSUM = "file://COPYING;md5=1707d6db1d42237583f50183a5651ecb \ LIC_FILES_CHKSUM = "file://COPYING;md5=1707d6db1d42237583f50183a5651ecb \
file://README;beginline=1;endline=22;md5=5ba4218ac89af7846802d0348df3fb90" file://README;beginline=1;endline=22;md5=5ba4218ac89af7846802d0348df3fb90"
FILESDIR = "${@os.path.dirname(bb.data.getVar('FILE',d,1))}/u-boot-git/${MACHINE}" FILESDIR = "${@os.path.dirname(d.getVar('FILE',1))}/u-boot-git/${MACHINE}"
# This revision corresponds to the tag "v2011.06" # This revision corresponds to the tag "v2011.06"
# We use the revision in order to avoid having to fetch it from the repo during parse # We use the revision in order to avoid having to fetch it from the repo during parse

View File

@ -1,6 +1,6 @@
require x-load.inc require x-load.inc
FILESDIR = "${@os.path.dirname(bb.data.getVar('FILE',d,1))}/x-load-git/${MACHINE}" FILESDIR = "${@os.path.dirname(d.getVar('FILE',1))}/x-load-git/${MACHINE}"
LICENSE = "GPLv2+" LICENSE = "GPLv2+"
LIC_FILES_CHKSUM = "file://README;beginline=1;endline=25;md5=ef08d08cb99057bbb5b9d6d0c5a4396f" LIC_FILES_CHKSUM = "file://README;beginline=1;endline=25;md5=ef08d08cb99057bbb5b9d6d0c5a4396f"

View File

@ -64,5 +64,5 @@ python populate_packages_prepend() {
plugintype = package.split( '-' )[-1] plugintype = package.split( '-' )[-1]
if plugintype in depmap: if plugintype in depmap:
bb.note( "Adding rdependency on %s to package %s" % ( depmap[plugintype], package ) ) bb.note( "Adding rdependency on %s to package %s" % ( depmap[plugintype], package ) )
bb.data.setVar("RDEPENDS_%s" % package, depmap[plugintype], d) d.setVar("RDEPENDS_%s" % package, depmap[plugintype])
} }

View File

@ -49,8 +49,8 @@ def busybox_cfg(feature, features, tokens, cnf, rem):
# Map distro and machine features to config settings # Map distro and machine features to config settings
def features_to_busybox_settings(d): def features_to_busybox_settings(d):
cnf, rem = ([], []) cnf, rem = ([], [])
distro_features = bb.data.getVar('DISTRO_FEATURES', d, True).split() distro_features = d.getVar('DISTRO_FEATURES', True).split()
machine_features = bb.data.getVar('MACHINE_FEATURES', d, True).split() machine_features = d.getVar('MACHINE_FEATURES', True).split()
busybox_cfg('ipv6', distro_features, 'CONFIG_FEATURE_IPV6', cnf, rem) busybox_cfg('ipv6', distro_features, 'CONFIG_FEATURE_IPV6', cnf, rem)
busybox_cfg('largefile', distro_features, 'CONFIG_LFS', cnf, rem) busybox_cfg('largefile', distro_features, 'CONFIG_LFS', cnf, rem)
busybox_cfg('largefile', distro_features, 'CONFIG_FDISK_SUPPORT_LARGE_DISKS', cnf, rem) busybox_cfg('largefile', distro_features, 'CONFIG_FDISK_SUPPORT_LARGE_DISKS', cnf, rem)
@ -79,7 +79,7 @@ DO_IPv6 := ${@base_contains('DISTRO_FEATURES', 'ipv6', 1, 0, d)}
python () { python () {
if "${OE_DEL}": if "${OE_DEL}":
bb.data.setVar('configmangle_append', "${OE_DEL}" + "\n", d) d.setVar('configmangle_append', "${OE_DEL}" + "\n")
if "${OE_FEATURES}": if "${OE_FEATURES}":
bb.data.setVar('configmangle_append', bb.data.setVar('configmangle_append',
"/^### DISTRO FEATURES$/a\\\n%s\n\n" % "/^### DISTRO FEATURES$/a\\\n%s\n\n" %

View File

@ -83,7 +83,7 @@ def distro_features_check_deps(distro_features):
# Map distro features to eglibc options settings # Map distro features to eglibc options settings
def features_to_eglibc_settings(d): def features_to_eglibc_settings(d):
cnf = ([]) cnf = ([])
distro_features = (bb.data.getVar('DISTRO_FEATURES', d, True) or '').split() distro_features = (d.getVar('DISTRO_FEATURES', True) or '').split()
distro_features_check_deps(distro_features) distro_features_check_deps(distro_features)
@ -128,8 +128,8 @@ def features_to_eglibc_settings(d):
# try to fix disable charsets/locales/locale-code compile fail # try to fix disable charsets/locales/locale-code compile fail
if 'libc-charsets' in distro_features and 'libc-locales' in distro_features and 'libc-locale-code' in distro_features: if 'libc-charsets' in distro_features and 'libc-locales' in distro_features and 'libc-locale-code' in distro_features:
bb.data.setVar('PACKAGE_NO_GCONV', '0', d) d.setVar('PACKAGE_NO_GCONV', '0')
else: else:
bb.data.setVar('PACKAGE_NO_GCONV', '1', d) d.setVar('PACKAGE_NO_GCONV', '1')
return "\n".join(cnf) return "\n".join(cnf)

View File

@ -8,10 +8,10 @@
python __anonymous () { python __anonymous () {
import bb, re import bb, re
uc_os = (re.match('.*uclibc*', bb.data.getVar('TARGET_OS', d, 1)) != None) uc_os = (re.match('.*uclibc*', d.getVar('TARGET_OS', 1)) != None)
if uc_os: if uc_os:
raise bb.parse.SkipPackage("incompatible with target %s" % raise bb.parse.SkipPackage("incompatible with target %s" %
bb.data.getVar('TARGET_OS', d, 1)) d.getVar('TARGET_OS', 1))
} }
# Set this to zero if you don't want ldconfig in the output package # Set this to zero if you don't want ldconfig in the output package

View File

@ -53,10 +53,10 @@ FILESPATH = "${@base_set_filespath([ '${FILE_DIRNAME}/eglibc-${PV}', '${FILE_DIR
python __anonymous () { python __anonymous () {
import bb, re import bb, re
uc_os = (re.match('.*uclibc$', bb.data.getVar('TARGET_OS', d, 1)) != None) uc_os = (re.match('.*uclibc$', d.getVar('TARGET_OS', 1)) != None)
if uc_os: if uc_os:
raise bb.parse.SkipPackage("incompatible with target %s" % raise bb.parse.SkipPackage("incompatible with target %s" %
bb.data.getVar('TARGET_OS', d, 1)) d.getVar('TARGET_OS', 1))
} }
export libc_cv_slibdir = "${base_libdir}" export libc_cv_slibdir = "${base_libdir}"

View File

@ -54,10 +54,10 @@ FILESPATH = "${@base_set_filespath([ '${FILE_DIRNAME}/eglibc-${PV}', '${FILE_DIR
python __anonymous () { python __anonymous () {
import bb, re import bb, re
uc_os = (re.match('.*uclibc$', bb.data.getVar('TARGET_OS', d, 1)) != None) uc_os = (re.match('.*uclibc$', d.getVar('TARGET_OS', 1)) != None)
if uc_os: if uc_os:
raise bb.parse.SkipPackage("incompatible with target %s" % raise bb.parse.SkipPackage("incompatible with target %s" %
bb.data.getVar('TARGET_OS', d, 1)) d.getVar('TARGET_OS', 1))
} }
export libc_cv_slibdir = "${base_libdir}" export libc_cv_slibdir = "${base_libdir}"

View File

@ -7,7 +7,7 @@ DEPENDS += "libffi python-argparse-native"
DEPENDS_virtclass-native += "libffi-native python-argparse-native" DEPENDS_virtclass-native += "libffi-native python-argparse-native"
DEPENDS_virtclass-nativesdk += "libffi-nativesdk python-argparse-native zlib-nativesdk" DEPENDS_virtclass-nativesdk += "libffi-nativesdk python-argparse-native zlib-nativesdk"
SHRT_VER = "${@bb.data.getVar('PV',d,1).split('.')[0]}.${@bb.data.getVar('PV',d,1).split('.')[1]}" SHRT_VER = "${@d.getVar('PV',1).split('.')[0]}.${@d.getVar('PV',1).split('.')[1]}"
QSORT_PATCH = "file://remove.test.for.qsort_r.patch" QSORT_PATCH = "file://remove.test.for.qsort_r.patch"
QSORT_PATCH_virtclass-native = "" QSORT_PATCH_virtclass-native = ""

View File

@ -33,8 +33,8 @@ export LDFLAGS += "-ldl"
python populate_packages_prepend () { python populate_packages_prepend () {
# autonamer would call this libxml2-2, but we don't want that # autonamer would call this libxml2-2, but we don't want that
if bb.data.getVar('DEBIAN_NAMES', d, 1): if d.getVar('DEBIAN_NAMES', 1):
bb.data.setVar('PKG_libxml2', '${MLPREFIX}libxml2', d) d.setVar('PKG_libxml2', '${MLPREFIX}libxml2')
} }
PACKAGES += "${PN}-utils" PACKAGES += "${PN}-utils"

View File

@ -126,17 +126,17 @@ python __anonymous () {
import bb import bb
distro_features = set(bb.data.getVar("DISTRO_FEATURES", d, 1).split()) distro_features = set(d.getVar("DISTRO_FEATURES", 1).split())
machine_features= set(bb.data.getVar("MACHINE_FEATURES", d, 1).split()) machine_features= set(d.getVar("MACHINE_FEATURES", 1).split())
if "bluetooth" in distro_features and not "bluetooth" in machine_features and ("pcmcia" in machine_features or "pci" in machine_features or "usbhost" in machine_features): if "bluetooth" in distro_features and not "bluetooth" in machine_features and ("pcmcia" in machine_features or "pci" in machine_features or "usbhost" in machine_features):
bb.data.setVar("ADD_BT", "task-base-bluetooth", d) d.setVar("ADD_BT", "task-base-bluetooth")
if "wifi" in distro_features and not "wifi" in machine_features and ("pcmcia" in machine_features or "pci" in machine_features or "usbhost" in machine_features): if "wifi" in distro_features and not "wifi" in machine_features and ("pcmcia" in machine_features or "pci" in machine_features or "usbhost" in machine_features):
bb.data.setVar("ADD_WIFI", "task-base-wifi", d) d.setVar("ADD_WIFI", "task-base-wifi")
if "3g" in distro_features and not "3g" in machine_features and ("pcmcia" in machine_features or "pci" in machine_features or "usbhost" in machine_features): if "3g" in distro_features and not "3g" in machine_features and ("pcmcia" in machine_features or "pci" in machine_features or "usbhost" in machine_features):
bb.data.setVar("ADD_3G", "task-base-3g", d) d.setVar("ADD_3G", "task-base-3g")
} }
# #

View File

@ -50,7 +50,7 @@ RDEPENDS_task-core-sdk = "\
#python generate_sdk_pkgs () { #python generate_sdk_pkgs () {
# poky_pkgs = read_pkgdata('task-core', d)['PACKAGES'] # poky_pkgs = read_pkgdata('task-core', d)['PACKAGES']
# pkgs = bb.data.getVar('PACKAGES', d, 1).split() # pkgs = d.getVar('PACKAGES', 1).split()
# for pkg in poky_pkgs.split(): # for pkg in poky_pkgs.split():
# newpkg = pkg.replace('task-core', 'task-core-sdk') # newpkg = pkg.replace('task-core', 'task-core-sdk')
# #
@ -79,9 +79,9 @@ RDEPENDS_task-core-sdk = "\
# if packaged('%s-dev' % name, d): # if packaged('%s-dev' % name, d):
# rreclist.append('%s-dev' % name) # rreclist.append('%s-dev' % name)
# #
# oldrrec = bb.data.getVar('RRECOMMENDS_%s' % newpkg, d) or '' # oldrrec = d.getVar('RRECOMMENDS_%s' % newpkg) or ''
# bb.data.setVar('RRECOMMENDS_%s' % newpkg, oldrrec + ' ' + ' '.join(rreclist), d) # bb.data.setVar('RRECOMMENDS_%s' % newpkg, oldrrec + ' ' + ' '.join(rreclist), d)
# # bb.note('RRECOMMENDS_%s = "%s"' % (newpkg, bb.data.getVar('RRECOMMENDS_%s' % newpkg, d))) # # bb.note('RRECOMMENDS_%s = "%s"' % (newpkg, d.getVar('RRECOMMENDS_%s' % newpkg)))
# #
# # bb.note('pkgs is %s' % pkgs) # # bb.note('pkgs is %s' % pkgs)
# bb.data.setVar('PACKAGES', ' '.join(pkgs), d) # bb.data.setVar('PACKAGES', ' '.join(pkgs), d)

View File

@ -35,7 +35,7 @@ def map_uclibc_arch(a, d):
"""Return the uClibc architecture for the given TARGET_ARCH.""" """Return the uClibc architecture for the given TARGET_ARCH."""
import re import re
valid_archs = bb.data.getVar('valid_archs', d, 1).split() valid_archs = d.getVar('valid_archs', 1).split()
if re.match('^(arm|sa110).*', a): return 'arm' if re.match('^(arm|sa110).*', a): return 'arm'
elif re.match('^(i.86|athlon)$', a): return 'i386' elif re.match('^(i.86|athlon)$', a): return 'i386'
@ -50,14 +50,14 @@ def map_uclibc_arch(a, d):
else: else:
bb.error("cannot map '%s' to a uClibc architecture" % a) bb.error("cannot map '%s' to a uClibc architecture" % a)
export UCLIBC_ARCH = "${@map_uclibc_arch(bb.data.getVar('TARGET_ARCH', d, 1), d)}" export UCLIBC_ARCH = "${@map_uclibc_arch(d.getVar('TARGET_ARCH', 1), d)}"
def map_uclibc_abi(o, d): def map_uclibc_abi(o, d):
"""Return the uClibc ABI for the given TARGET_OS.""" """Return the uClibc ABI for the given TARGET_OS."""
import re import re
arch = bb.data.getVar('TARGET_ARCH', d, 1) arch = d.getVar('TARGET_ARCH', 1)
if map_uclibc_arch(bb.data.getVar('TARGET_ARCH', d, 1), d) == "arm": if map_uclibc_arch(d.getVar('TARGET_ARCH', 1), d) == "arm":
if re.match('.*eabi$', o): return 'ARM_EABI' if re.match('.*eabi$', o): return 'ARM_EABI'
else: return 'ARM_OABI' else: return 'ARM_OABI'
# FIXME: This is inaccurate! Handle o32, n32, n64 # FIXME: This is inaccurate! Handle o32, n32, n64
@ -65,7 +65,7 @@ def map_uclibc_abi(o, d):
elif re.match('^mips.*', arch): return 'MIPS_O32_ABI' elif re.match('^mips.*', arch): return 'MIPS_O32_ABI'
return "" return ""
export UCLIBC_ABI = "${@map_uclibc_abi(bb.data.getVar('TARGET_OS', d, 1), d)}" export UCLIBC_ABI = "${@map_uclibc_abi(d.getVar('TARGET_OS', 1), d)}"
def map_uclibc_endian(a, d): def map_uclibc_endian(a, d):
"""Return the uClibc endianess for the given TARGET_ARCH.""" """Return the uClibc endianess for the given TARGET_ARCH."""
@ -79,7 +79,7 @@ def map_uclibc_endian(a, d):
return 'BIG' return 'BIG'
return 'LITTLE' return 'LITTLE'
export UCLIBC_ENDIAN = "${@map_uclibc_endian(bb.data.getVar('TARGET_ARCH', d, 1), d)}" export UCLIBC_ENDIAN = "${@map_uclibc_endian(d.getVar('TARGET_ARCH', 1), d)}"
# internal helper # internal helper
def uclibc_cfg(feature, features, tokens, cnf, rem): def uclibc_cfg(feature, features, tokens, cnf, rem):
@ -94,8 +94,8 @@ def uclibc_cfg(feature, features, tokens, cnf, rem):
# Map distro and machine features to config settings # Map distro and machine features to config settings
def features_to_uclibc_settings(d): def features_to_uclibc_settings(d):
cnf, rem = ([], []) cnf, rem = ([], [])
distro_features = bb.data.getVar('DISTRO_FEATURES', d, True).split() distro_features = d.getVar('DISTRO_FEATURES', True).split()
machine_features = bb.data.getVar('MACHINE_FEATURES', d, True).split() machine_features = d.getVar('MACHINE_FEATURES', True).split()
uclibc_cfg('ipv4', distro_features, 'UCLIBC_HAS_IPV4', cnf, rem) uclibc_cfg('ipv4', distro_features, 'UCLIBC_HAS_IPV4', cnf, rem)
uclibc_cfg('ipv6', distro_features, 'UCLIBC_HAS_IPV6', cnf, rem) uclibc_cfg('ipv6', distro_features, 'UCLIBC_HAS_IPV6', cnf, rem)
uclibc_cfg('largefile', distro_features, 'UCLIBC_HAS_LFS', cnf, rem) uclibc_cfg('largefile', distro_features, 'UCLIBC_HAS_LFS', cnf, rem)

View File

@ -125,9 +125,9 @@ configmangle = '/^KERNEL_HEADERS/d; \
/^SHARED_LIB_LOADER_PREFIX/d; \ /^SHARED_LIB_LOADER_PREFIX/d; \
/^UCLIBC_EXTRA_CFLAGS/d; \ /^UCLIBC_EXTRA_CFLAGS/d; \
s,.*UCLIBC_HAS_WCHAR.*,UCLIBC_HAS_WCHAR=y,g; \ s,.*UCLIBC_HAS_WCHAR.*,UCLIBC_HAS_WCHAR=y,g; \
${@["","s,.*COMPILE_IN_THUMB_MODE.*,COMPILE_IN_THUMB_MODE=y,;"][bb.data.getVar("ARM_INSTRUCTION_SET", d, 1) != "arm"]} \ ${@["","s,.*COMPILE_IN_THUMB_MODE.*,COMPILE_IN_THUMB_MODE=y,;"][d.getVar("ARM_INSTRUCTION_SET", 1) != "arm"]} \
${@["","s,.*UCLIBC_HAS_LOCALE.*,UCLIBC_HAS_LOCALE=y,;"][bb.data.getVar("USE_NLS", d, 1) == "yes"]} \ ${@["","s,.*UCLIBC_HAS_LOCALE.*,UCLIBC_HAS_LOCALE=y,;"][d.getVar("USE_NLS", 1) == "yes"]} \
${@["","s,.*LDSO_GNU_HASH_SUPPORT.*,# LDSO_GNU_HASH_SUPPORT is not set,;"][bb.data.getVar("TARGET_ARCH", d, 1) in ['mips', 'mipsel', 'mips64', 'mips64el', 'avr32']]} \ ${@["","s,.*LDSO_GNU_HASH_SUPPORT.*,# LDSO_GNU_HASH_SUPPORT is not set,;"][d.getVar("TARGET_ARCH", 1) in ['mips', 'mipsel', 'mips64', 'mips64el', 'avr32']]} \
/^CROSS/d; \ /^CROSS/d; \
/^TARGET_ARCH=/d; \ /^TARGET_ARCH=/d; \
/^TARGET_/s,^\([^=]*\).*,# \1 is not set,g; \ /^TARGET_/s,^\([^=]*\).*,# \1 is not set,g; \
@ -139,7 +139,7 @@ OE_FEATURES := "${@features_to_uclibc_conf(d)}"
OE_DEL := "${@features_to_uclibc_del(d)}" OE_DEL := "${@features_to_uclibc_del(d)}"
python () { python () {
if "${OE_DEL}": if "${OE_DEL}":
bb.data.setVar('configmangle_append', "${OE_DEL}" + "\n", d) d.setVar('configmangle_append', "${OE_DEL}" + "\n")
if "${OE_FEATURES}": if "${OE_FEATURES}":
bb.data.setVar('configmangle_append', bb.data.setVar('configmangle_append',
"/^### DISTRO FEATURES$/a\\\n%s\n\n" % "/^### DISTRO FEATURES$/a\\\n%s\n\n" %
@ -161,7 +161,7 @@ python () {
("${UCLIBC_ARCH}", "${UCLIBC_ARCH}"), ("${UCLIBC_ARCH}", "${UCLIBC_ARCH}"),
d) d)
bb.data.setVar('configmangle_append', bb.data.setVar('configmangle_append',
"/^### FPU$/a\\\n%s\n\n" % (["UCLIBC_HAS_FPU=y","# UCLIBC_HAS_FPU is not set"][bb.data.getVar('TARGET_FPU', d, 1) in [ 'soft' ]]), d) "/^### FPU$/a\\\n%s\n\n" % (["UCLIBC_HAS_FPU=y","# UCLIBC_HAS_FPU is not set"][d.getVar('TARGET_FPU', 1) in [ 'soft' ]]), d)
if "${UCLIBC_ENDIAN}": if "${UCLIBC_ENDIAN}":
bb.data.setVar('configmangle_append', bb.data.setVar('configmangle_append',
"/^### ABI$/a\\\nARCH_WANTS_%s_ENDIAN=y\n\n" % ("${UCLIBC_ENDIAN}"), "/^### ABI$/a\\\nARCH_WANTS_%s_ENDIAN=y\n\n" % ("${UCLIBC_ENDIAN}"),

View File

@ -13,14 +13,14 @@ python do_install () {
} }
python do_install_config () { python do_install_config () {
indir = os.path.dirname(bb.data.getVar('FILE',d,1)) indir = os.path.dirname(d.getVar('FILE',1))
infile = file(os.path.join(indir, 'files', 'apt.conf'), 'r') infile = file(os.path.join(indir, 'files', 'apt.conf'), 'r')
data = infile.read() data = infile.read()
infile.close() infile.close()
data = bb.data.expand(data, d) data = bb.data.expand(data, d)
outdir = os.path.join(bb.data.getVar('D', d, 1), bb.data.getVar('sysconfdir', d, 1), 'apt') outdir = os.path.join(d.getVar('D', 1), d.getVar('sysconfdir', 1), 'apt')
if not os.path.exists(outdir): if not os.path.exists(outdir):
os.makedirs(outdir) os.makedirs(outdir)
outpath = os.path.join(outdir, 'apt.conf.sample') outpath = os.path.join(outdir, 'apt.conf.sample')

View File

@ -59,15 +59,15 @@ FILES_${PN} = "${bindir}/apt-cdrom ${bindir}/apt-get \
${localstatedir} ${sysconfdir} \ ${localstatedir} ${sysconfdir} \
${libdir}/dpkg" ${libdir}/dpkg"
FILES_${PN}-utils = "${bindir}/apt-sortpkgs ${bindir}/apt-extracttemplates" FILES_${PN}-utils = "${bindir}/apt-sortpkgs ${bindir}/apt-extracttemplates"
FILES_${PN}-doc = "${@get_files_apt_doc(d, bb, bb.data.getVar('apt-manpages', d, 1))} \ FILES_${PN}-doc = "${@get_files_apt_doc(d, bb, d.getVar('apt-manpages', 1))} \
${docdir}/apt" ${docdir}/apt"
FILES_${PN}-utils-doc = "${@get_files_apt_doc(d, bb, bb.data.getVar('apt-utils-manpages', d, 1))}" FILES_${PN}-utils-doc = "${@get_files_apt_doc(d, bb, d.getVar('apt-utils-manpages', 1))}"
FILES_${PN}-dev = "${libdir}/libapt*.so ${includedir}" FILES_${PN}-dev = "${libdir}/libapt*.so ${includedir}"
do_install () { do_install () {
set -x set -x
${@get_commands_apt_doc(d, bb, bb.data.getVar('apt-manpages', d, 1))} ${@get_commands_apt_doc(d, bb, d.getVar('apt-manpages', 1))}
${@get_commands_apt_doc(d, bb, bb.data.getVar('apt-utils-manpages', d, 1))} ${@get_commands_apt_doc(d, bb, d.getVar('apt-utils-manpages', 1))}
install -d ${D}${bindir} install -d ${D}${bindir}
install -m 0755 bin/apt-cdrom ${D}${bindir}/ install -m 0755 bin/apt-cdrom ${D}${bindir}/
install -m 0755 bin/apt-get ${D}${bindir}/ install -m 0755 bin/apt-get ${D}${bindir}/

View File

@ -10,6 +10,6 @@ SRC_URI = "${GNU_MIRROR}/automake/automake-${PV}.tar.bz2 "
inherit autotools inherit autotools
export AUTOMAKE = "${@bb.which('automake', bb.data.getVar('PATH', d, 1))}" export AUTOMAKE = "${@bb.which('automake', d.getVar('PATH', 1))}"
FILES_${PN} += "${datadir}/automake* ${datadir}/aclocal*" FILES_${PN} += "${datadir}/automake* ${datadir}/aclocal*"

View File

@ -11,7 +11,7 @@ LIC_FILES_CHKSUM = "file://Copyright.txt;md5=f372516292ff7c33337bf16a74a5f9a8 \
INC_PR = "r1" INC_PR = "r1"
CMAKE_MAJOR_VERSION = "${@'.'.join(bb.data.getVar('PV',d,1).split('.')[0:2])}" CMAKE_MAJOR_VERSION = "${@'.'.join(d.getVar('PV',1).split('.')[0:2])}"
SRC_URI = "http://www.cmake.org/files/v${CMAKE_MAJOR_VERSION}/cmake-${PV}.tar.gz \ SRC_URI = "http://www.cmake.org/files/v${CMAKE_MAJOR_VERSION}/cmake-${PV}.tar.gz \
file://support-oe-qt4-tools-names.patch" file://support-oe-qt4-tools-names.patch"

View File

@ -13,8 +13,8 @@ SRC_URI[sha256sum] = "5e18bff75f01656c64f553412a8905527e1b85efaf3163c6fb81ea5aac
# Strip ${prefix} from ${docdir}, set result into docdir_stripped # Strip ${prefix} from ${docdir}, set result into docdir_stripped
python () { python () {
prefix=bb.data.getVar("prefix", d, 1) prefix=d.getVar("prefix", 1)
docdir=bb.data.getVar("docdir", d, 1) docdir=d.getVar("docdir", 1)
if not docdir.startswith(prefix): if not docdir.startswith(prefix):
raise bb.build.FuncFailed('docdir must contain prefix as its prefix') raise bb.build.FuncFailed('docdir must contain prefix as its prefix')
@ -23,7 +23,7 @@ python () {
if len(docdir_stripped) > 0 and docdir_stripped[0] == '/': if len(docdir_stripped) > 0 and docdir_stripped[0] == '/':
docdir_stripped = docdir_stripped[1:] docdir_stripped = docdir_stripped[1:]
bb.data.setVar("docdir_stripped", docdir_stripped, d) d.setVar("docdir_stripped", docdir_stripped)
} }
EXTRA_OECMAKE=" \ EXTRA_OECMAKE=" \

View File

@ -7,17 +7,17 @@ NATIVEDEPS = ""
inherit autotools gettext inherit autotools gettext
FILESDIR = "${@os.path.dirname(bb.data.getVar('FILE',d,1))}/gcc-${PV}" FILESDIR = "${@os.path.dirname(d.getVar('FILE',1))}/gcc-${PV}"
def get_gcc_fpu_setting(bb, d): def get_gcc_fpu_setting(bb, d):
if bb.data.getVar('TARGET_FPU', d, 1) in [ 'soft' ]: if d.getVar('TARGET_FPU', 1) in [ 'soft' ]:
return "--with-float=soft" return "--with-float=soft"
if bb.data.getVar('TARGET_FPU', d, 1) in [ 'ppc-efd' ]: if d.getVar('TARGET_FPU', 1) in [ 'ppc-efd' ]:
return "--enable-e500_double" return "--enable-e500_double"
return "" return ""
def get_gcc_mips_plt_setting(bb, d): def get_gcc_mips_plt_setting(bb, d):
if bb.data.getVar('TARGET_ARCH', d, 1) in [ 'mips', 'mipsel' ] and 'mplt' in bb.data.getVar('DISTRO_FEATURES',d,1).split() : if d.getVar('TARGET_ARCH', 1) in [ 'mips', 'mipsel' ] and 'mplt' in d.getVar('DISTRO_FEATURES',1).split() :
return "--with-mips-plt" return "--with-mips-plt"
return "" return ""

View File

@ -27,7 +27,7 @@ EXTRA_OECONF_INTERMEDIATE ?= ""
GCCMULTILIB = "--disable-multilib" GCCMULTILIB = "--disable-multilib"
EXTRA_OECONF = "${@['--enable-clocale=generic', ''][bb.data.getVar('USE_NLS', d, 1) != 'no']} \ EXTRA_OECONF = "${@['--enable-clocale=generic', ''][d.getVar('USE_NLS', 1) != 'no']} \
--with-gnu-ld \ --with-gnu-ld \
--enable-shared \ --enable-shared \
--enable-languages=${LANGUAGES} \ --enable-languages=${LANGUAGES} \

View File

@ -8,7 +8,7 @@ DEPENDS_virtclass-native = "perl-native-runtime"
INHIBIT_DEFAULT_DEPS = "1" INHIBIT_DEFAULT_DEPS = "1"
FIXEDSRCDATE = "${@bb.data.getVar('FILE', d, 1).split('_')[-1].split('.')[0]}" FIXEDSRCDATE = "${@d.getVar('FILE', 1).split('_')[-1].split('.')[0]}"
PV = "0.1+cvs${FIXEDSRCDATE}" PV = "0.1+cvs${FIXEDSRCDATE}"
PR = "r4" PR = "r4"

View File

@ -2,7 +2,7 @@ DESCRIPTION = "Utility scripts for internationalizing XML"
SECTION = "devel" SECTION = "devel"
LICENSE = "GPLv2" LICENSE = "GPLv2"
URLV="${@'.'.join(bb.data.getVar('PV',d,1).split('.')[0:2])}" URLV="${@'.'.join(d.getVar('PV',1).split('.')[0:2])}"
SRC_URI = "${GNOME_MIRROR}/intltool/${URLV}/intltool-${PV}.tar.bz2" SRC_URI = "${GNOME_MIRROR}/intltool/${URLV}/intltool-${PV}.tar.bz2"
S = "${WORKDIR}/intltool-${PV}" S = "${WORKDIR}/intltool-${PV}"

View File

@ -12,7 +12,7 @@ DEPENDS_virtclass-nativesdk = "curl-nativesdk"
PE = "1" PE = "1"
FILESDIR = "${@os.path.dirname(bb.data.getVar('FILE',d,1))}/opkg" FILESDIR = "${@os.path.dirname(d.getVar('FILE',1))}/opkg"
# Werror gives all kinds bounds issuses with gcc 4.3.3 # Werror gives all kinds bounds issuses with gcc 4.3.3
do_configure_prepend() { do_configure_prepend() {

View File

@ -285,7 +285,7 @@ FILES_perl-module-unicore += "${libdir}/perl/${PV}/unicore"
# packages (actually the non modules packages and not created too) # packages (actually the non modules packages and not created too)
ALLOW_EMPTY_perl-modules = "1" ALLOW_EMPTY_perl-modules = "1"
PACKAGES_append = " perl-modules " PACKAGES_append = " perl-modules "
RRECOMMENDS_perl-modules = "${@bb.data.getVar('PACKAGES', d, 1).replace('${PN}-modules ', '').replace('${PN}-dbg ', '').replace('${PN}-misc ', '').replace('${PN}-dev ', '').replace('${PN}-pod ', '').replace('${PN}-doc ', '')}" RRECOMMENDS_perl-modules = "${@d.getVar('PACKAGES', 1).replace('${PN}-modules ', '').replace('${PN}-dbg ', '').replace('${PN}-misc ', '').replace('${PN}-dev ', '').replace('${PN}-pod ', '').replace('${PN}-doc ', '')}"
python populate_packages_prepend () { python populate_packages_prepend () {
libdir = bb.data.expand('${libdir}/perl/${PV}', d) libdir = bb.data.expand('${libdir}/perl/${PV}', d)

View File

@ -7,7 +7,7 @@ DEPENDS_virtclass-native = "glib-2.0-native"
RDEPENDS_virtclass-native = "" RDEPENDS_virtclass-native = ""
PR = "r3" PR = "r3"
MAJ_VER = "${@bb.data.getVar('PV',d,1).split('.')[0]}.${@bb.data.getVar('PV',d,1).split('.')[1]}" MAJ_VER = "${@d.getVar('PV',1).split('.')[0]}.${@d.getVar('PV',1).split('.')[1]}"
SRC_URI = "${GNOME_MIRROR}/pygobject/${MAJ_VER}/pygobject-${PV}.tar.bz2" SRC_URI = "${GNOME_MIRROR}/pygobject/${MAJ_VER}/pygobject-${PV}.tar.bz2"

Some files were not shown because too many files have changed in this diff Show More