Convert tab indentation in python functions into four-space

(From OE-Core rev: 604d46c686d06d62d5a07b9c7f4fa170f99307d8)

Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
This commit is contained in:
Richard Purdie 2012-07-11 17:33:43 +00:00
parent 99203edda6
commit bfd279de32
71 changed files with 3585 additions and 3587 deletions

View File

@ -1,23 +1,23 @@
def autotools_dep_prepend(d):
if d.getVar('INHIBIT_AUTOTOOLS_DEPS', True):
return ''
if d.getVar('INHIBIT_AUTOTOOLS_DEPS', True):
return ''
pn = d.getVar('PN', True)
deps = ''
pn = d.getVar('PN', True)
deps = ''
if pn in ['autoconf-native', 'automake-native', 'help2man-native']:
return deps
deps += 'autoconf-native automake-native '
if pn in ['autoconf-native', 'automake-native', 'help2man-native']:
return deps
deps += 'autoconf-native automake-native '
if not pn in ['libtool', 'libtool-native'] and not pn.endswith("libtool-cross"):
deps += 'libtool-native '
if not bb.data.inherits_class('native', d) \
if not pn in ['libtool', 'libtool-native'] and not pn.endswith("libtool-cross"):
deps += 'libtool-native '
if not bb.data.inherits_class('native', d) \
and not bb.data.inherits_class('nativesdk', d) \
and not bb.data.inherits_class('cross', d) \
and not d.getVar('INHIBIT_DEFAULT_DEPS', True):
deps += 'libtool-cross '
deps += 'libtool-cross '
return deps + 'gnu-config-native '
return deps + 'gnu-config-native '
EXTRA_OEMAKE = ""
@ -35,15 +35,15 @@ EXTRA_AUTORECONF = "--exclude=autopoint"
export lt_cv_sys_lib_dlsearch_path_spec = "${libdir} ${base_libdir}"
def autotools_set_crosscompiling(d):
if not bb.data.inherits_class('native', d):
return " cross_compiling=yes"
return ""
if not bb.data.inherits_class('native', d):
return " cross_compiling=yes"
return ""
def append_libtool_sysroot(d):
# Only supply libtool sysroot option for non-native packages
if not bb.data.inherits_class('native', d):
return '--with-libtool-sysroot=${STAGING_DIR_HOST}'
return ""
# Only supply libtool sysroot option for non-native packages
if not bb.data.inherits_class('native', d):
return '--with-libtool-sysroot=${STAGING_DIR_HOST}'
return ""
# EXTRA_OECONF_append = "${@autotools_set_crosscompiling(d)}"

View File

@ -33,7 +33,7 @@ def oe_import(d):
python oe_import_eh () {
if isinstance(e, bb.event.ConfigParsed):
oe_import(e.data)
oe_import(e.data)
}
addhandler oe_import_eh
@ -50,21 +50,20 @@ oe_runmake() {
def base_dep_prepend(d):
#
# Ideally this will check a flag so we will operate properly in
# the case where host == build == target, for now we don't work in
# that case though.
#
#
# Ideally this will check a flag so we will operate properly in
# the case where host == build == target, for now we don't work in
# that case though.
#
deps = ""
# INHIBIT_DEFAULT_DEPS doesn't apply to the patch command. Whether or not
# we need that built is the responsibility of the patch function / class, not
# the application.
if not d.getVar('INHIBIT_DEFAULT_DEPS'):
if (d.getVar('HOST_SYS', True) !=
d.getVar('BUILD_SYS', True)):
deps += " virtual/${TARGET_PREFIX}gcc virtual/${TARGET_PREFIX}compilerlibs virtual/libc "
return deps
deps = ""
# INHIBIT_DEFAULT_DEPS doesn't apply to the patch command. Whether or not
# we need that built is the responsibility of the patch function / class, not
# the application.
if not d.getVar('INHIBIT_DEFAULT_DEPS'):
if (d.getVar('HOST_SYS', True) != d.getVar('BUILD_SYS', True)):
deps += " virtual/${TARGET_PREFIX}gcc virtual/${TARGET_PREFIX}compilerlibs virtual/libc "
return deps
BASEDEPENDS = "${@base_dep_prepend(d)}"
@ -80,61 +79,61 @@ do_fetch[dirs] = "${DL_DIR}"
do_fetch[file-checksums] = "${@bb.fetch.get_checksum_file_list(d)}"
python base_do_fetch() {
src_uri = (d.getVar('SRC_URI', True) or "").split()
if len(src_uri) == 0:
return
src_uri = (d.getVar('SRC_URI', True) or "").split()
if len(src_uri) == 0:
return
localdata = bb.data.createCopy(d)
bb.data.update_data(localdata)
localdata = bb.data.createCopy(d)
bb.data.update_data(localdata)
try:
fetcher = bb.fetch2.Fetch(src_uri, localdata)
fetcher.download()
except bb.fetch2.BBFetchException, e:
raise bb.build.FuncFailed(e)
try:
fetcher = bb.fetch2.Fetch(src_uri, localdata)
fetcher.download()
except bb.fetch2.BBFetchException, e:
raise bb.build.FuncFailed(e)
}
addtask unpack after do_fetch
do_unpack[dirs] = "${WORKDIR}"
do_unpack[cleandirs] = "${S}/patches"
python base_do_unpack() {
src_uri = (d.getVar('SRC_URI', True) or "").split()
if len(src_uri) == 0:
return
src_uri = (d.getVar('SRC_URI', True) or "").split()
if len(src_uri) == 0:
return
localdata = bb.data.createCopy(d)
bb.data.update_data(localdata)
localdata = bb.data.createCopy(d)
bb.data.update_data(localdata)
rootdir = localdata.getVar('WORKDIR', True)
rootdir = localdata.getVar('WORKDIR', True)
try:
fetcher = bb.fetch2.Fetch(src_uri, localdata)
fetcher.unpack(rootdir)
except bb.fetch2.BBFetchException, e:
raise bb.build.FuncFailed(e)
try:
fetcher = bb.fetch2.Fetch(src_uri, localdata)
fetcher.unpack(rootdir)
except bb.fetch2.BBFetchException, e:
raise bb.build.FuncFailed(e)
}
GIT_CONFIG_PATH = "${STAGING_DIR_NATIVE}/etc"
GIT_CONFIG = "${GIT_CONFIG_PATH}/gitconfig"
def generate_git_config(e):
from bb import data
from bb import data
if data.getVar('GIT_CORE_CONFIG', e.data, True):
gitconfig_path = e.data.getVar('GIT_CONFIG', True)
proxy_command = " gitProxy = %s\n" % data.getVar('OE_GIT_PROXY_COMMAND', e.data, True)
if data.getVar('GIT_CORE_CONFIG', e.data, True):
gitconfig_path = e.data.getVar('GIT_CONFIG', True)
proxy_command = " gitProxy = %s\n" % data.getVar('OE_GIT_PROXY_COMMAND', e.data, True)
bb.mkdirhier(e.data.expand("${GIT_CONFIG_PATH}"))
if (os.path.exists(gitconfig_path)):
os.remove(gitconfig_path)
bb.mkdirhier(e.data.expand("${GIT_CONFIG_PATH}"))
if (os.path.exists(gitconfig_path)):
os.remove(gitconfig_path)
f = open(gitconfig_path, 'w')
f.write("[core]\n")
ignore_hosts = data.getVar('GIT_PROXY_IGNORE', e.data, True).split()
for ignore_host in ignore_hosts:
f.write(" gitProxy = none for %s\n" % ignore_host)
f.write(proxy_command)
f.close
f = open(gitconfig_path, 'w')
f.write("[core]\n")
ignore_hosts = data.getVar('GIT_PROXY_IGNORE', e.data, True).split()
for ignore_host in ignore_hosts:
f.write(" gitProxy = none for %s\n" % ignore_host)
f.write(proxy_command)
f.close
def pkgarch_mapping(d):
# Compatibility mappings of TUNE_PKGARCH (opt in)
@ -205,69 +204,69 @@ def preferred_ml_updates(d):
def get_layers_branch_rev(d):
layers = (d.getVar("BBLAYERS", True) or "").split()
layers_branch_rev = ["%-17s = \"%s:%s\"" % (os.path.basename(i), \
base_get_metadata_git_branch(i, None).strip(), \
base_get_metadata_git_revision(i, None)) \
for i in layers]
i = len(layers_branch_rev)-1
p1 = layers_branch_rev[i].find("=")
s1 = layers_branch_rev[i][p1:]
while i > 0:
p2 = layers_branch_rev[i-1].find("=")
s2= layers_branch_rev[i-1][p2:]
if s1 == s2:
layers_branch_rev[i-1] = layers_branch_rev[i-1][0:p2]
i -= 1
else:
i -= 1
p1 = layers_branch_rev[i].find("=")
s1= layers_branch_rev[i][p1:]
return layers_branch_rev
layers = (d.getVar("BBLAYERS", True) or "").split()
layers_branch_rev = ["%-17s = \"%s:%s\"" % (os.path.basename(i), \
base_get_metadata_git_branch(i, None).strip(), \
base_get_metadata_git_revision(i, None)) \
for i in layers]
i = len(layers_branch_rev)-1
p1 = layers_branch_rev[i].find("=")
s1 = layers_branch_rev[i][p1:]
while i > 0:
p2 = layers_branch_rev[i-1].find("=")
s2= layers_branch_rev[i-1][p2:]
if s1 == s2:
layers_branch_rev[i-1] = layers_branch_rev[i-1][0:p2]
i -= 1
else:
i -= 1
p1 = layers_branch_rev[i].find("=")
s1= layers_branch_rev[i][p1:]
return layers_branch_rev
BUILDCFG_FUNCS ??= "buildcfg_vars get_layers_branch_rev buildcfg_neededvars"
BUILDCFG_FUNCS[type] = "list"
def buildcfg_vars(d):
statusvars = oe.data.typed_value('BUILDCFG_VARS', d)
for var in statusvars:
value = d.getVar(var, True)
if value is not None:
yield '%-17s = "%s"' % (var, value)
statusvars = oe.data.typed_value('BUILDCFG_VARS', d)
for var in statusvars:
value = d.getVar(var, True)
if value is not None:
yield '%-17s = "%s"' % (var, value)
def buildcfg_neededvars(d):
needed_vars = oe.data.typed_value("BUILDCFG_NEEDEDVARS", d)
pesteruser = []
for v in needed_vars:
val = d.getVar(v, True)
if not val or val == 'INVALID':
pesteruser.append(v)
needed_vars = oe.data.typed_value("BUILDCFG_NEEDEDVARS", d)
pesteruser = []
for v in needed_vars:
val = d.getVar(v, True)
if not val or val == 'INVALID':
pesteruser.append(v)
if pesteruser:
bb.fatal('The following variable(s) were not set: %s\nPlease set them directly, or choose a MACHINE or DISTRO that sets them.' % ', '.join(pesteruser))
if pesteruser:
bb.fatal('The following variable(s) were not set: %s\nPlease set them directly, or choose a MACHINE or DISTRO that sets them.' % ', '.join(pesteruser))
addhandler base_eventhandler
python base_eventhandler() {
if isinstance(e, bb.event.ConfigParsed):
e.data.setVar('BB_VERSION', bb.__version__)
generate_git_config(e)
pkgarch_mapping(e.data)
preferred_ml_updates(e.data)
if isinstance(e, bb.event.ConfigParsed):
e.data.setVar('BB_VERSION', bb.__version__)
generate_git_config(e)
pkgarch_mapping(e.data)
preferred_ml_updates(e.data)
if isinstance(e, bb.event.BuildStarted):
statuslines = []
for func in oe.data.typed_value('BUILDCFG_FUNCS', e.data):
g = globals()
if func not in g:
bb.warn("Build configuration function '%s' does not exist" % func)
else:
flines = g[func](e.data)
if flines:
statuslines.extend(flines)
if isinstance(e, bb.event.BuildStarted):
statuslines = []
for func in oe.data.typed_value('BUILDCFG_FUNCS', e.data):
g = globals()
if func not in g:
bb.warn("Build configuration function '%s' does not exist" % func)
else:
flines = g[func](e.data)
if flines:
statuslines.extend(flines)
statusheader = e.data.getVar('BUILDCFG_HEADER', True)
bb.plain('\n%s\n%s\n' % (statusheader, '\n'.join(statuslines)))
statusheader = e.data.getVar('BUILDCFG_HEADER', True)
bb.plain('\n%s\n%s\n' % (statusheader, '\n'.join(statuslines)))
}
addtask configure after do_patch
@ -546,18 +545,18 @@ python do_cleansstate() {
addtask cleanall after do_cleansstate
python do_cleanall() {
src_uri = (d.getVar('SRC_URI', True) or "").split()
if len(src_uri) == 0:
return
src_uri = (d.getVar('SRC_URI', True) or "").split()
if len(src_uri) == 0:
return
localdata = bb.data.createCopy(d)
bb.data.update_data(localdata)
localdata = bb.data.createCopy(d)
bb.data.update_data(localdata)
try:
fetcher = bb.fetch2.Fetch(src_uri, localdata)
fetcher.clean()
except bb.fetch2.BBFetchException, e:
raise bb.build.FuncFailed(e)
try:
fetcher = bb.fetch2.Fetch(src_uri, localdata)
fetcher.clean()
except bb.fetch2.BBFetchException, e:
raise bb.build.FuncFailed(e)
}
do_cleanall[nostamp] = "1"

View File

@ -92,8 +92,8 @@ build_boot_dd() {
}
python do_bootdirectdisk() {
bb.build.exec_func('build_syslinux_cfg', d)
bb.build.exec_func('build_boot_dd', d)
bb.build.exec_func('build_syslinux_cfg', d)
bb.build.exec_func('build_boot_dd', d)
}
addtask bootdirectdisk before do_build

View File

@ -42,15 +42,15 @@ EFI_CLASS = "${@base_contains("MACHINE_FEATURES", "efi", "grub-efi", "dummy", d)
# contain "efi". This way legacy is supported by default if neither is
# specified, maintaining the original behavior.
def pcbios(d):
pcbios = base_contains("MACHINE_FEATURES", "pcbios", "1", "0", d)
if pcbios == "0":
pcbios = base_contains("MACHINE_FEATURES", "efi", "0", "1", d)
return pcbios
pcbios = base_contains("MACHINE_FEATURES", "pcbios", "1", "0", d)
if pcbios == "0":
pcbios = base_contains("MACHINE_FEATURES", "efi", "0", "1", d)
return pcbios
def pcbios_class(d):
if d.getVar("PCBIOS", True) == "1":
return "syslinux"
return "dummy"
if d.getVar("PCBIOS", True) == "1":
return "syslinux"
return "dummy"
PCBIOS = "${@pcbios(d)}"
PCBIOS_CLASS = "${@pcbios_class(d)}"
@ -181,12 +181,12 @@ build_hddimg() {
}
python do_bootimg() {
if d.getVar("PCBIOS", True) == "1":
bb.build.exec_func('build_syslinux_cfg', d)
if d.getVar("EFI", True) == "1":
bb.build.exec_func('build_grub_cfg', d)
bb.build.exec_func('build_hddimg', d)
bb.build.exec_func('build_iso', d)
if d.getVar("PCBIOS", True) == "1":
bb.build.exec_func('build_syslinux_cfg', d)
if d.getVar("EFI", True) == "1":
bb.build.exec_func('build_grub_cfg', d)
bb.build.exec_func('build_hddimg', d)
bb.build.exec_func('build_iso', d)
}
addtask bootimg before do_build

View File

@ -11,28 +11,28 @@ PERL_OWN_DIR = "${@["", "/perl-native"][(bb.data.inherits_class('native', d))]}"
# Determine the staged version of perl from the perl configuration file
def get_perl_version(d):
import re
cfg = d.expand('${STAGING_LIBDIR}${PERL_OWN_DIR}/perl/config.sh')
try:
f = open(cfg, 'r')
except IOError:
return None
l = f.readlines();
f.close();
r = re.compile("^version='(\d*\.\d*\.\d*)'")
for s in l:
m = r.match(s)
if m:
return m.group(1)
return None
import re
cfg = d.expand('${STAGING_LIBDIR}${PERL_OWN_DIR}/perl/config.sh')
try:
f = open(cfg, 'r')
except IOError:
return None
l = f.readlines();
f.close();
r = re.compile("^version='(\d*\.\d*\.\d*)'")
for s in l:
m = r.match(s)
if m:
return m.group(1)
return None
# Determine where the library directories are
def perl_get_libdirs(d):
libdir = d.getVar('libdir', True)
if is_target(d) == "no":
libdir += '/perl-native'
libdir += '/perl'
return libdir
libdir = d.getVar('libdir', True)
if is_target(d) == "no":
libdir += '/perl-native'
libdir += '/perl'
return libdir
def is_target(d):
if not bb.data.inherits_class('native', d):

View File

@ -20,105 +20,105 @@ python () {
}
python debian_package_name_hook () {
import glob, copy, stat, errno, re
import glob, copy, stat, errno, re
pkgdest = d.getVar('PKGDEST', True)
packages = d.getVar('PACKAGES', True)
bin_re = re.compile(".*/s?" + os.path.basename(d.getVar("bindir", True)) + "$")
lib_re = re.compile(".*/" + os.path.basename(d.getVar("libdir", True)) + "$")
so_re = re.compile("lib.*\.so")
pkgdest = d.getVar('PKGDEST', True)
packages = d.getVar('PACKAGES', True)
bin_re = re.compile(".*/s?" + os.path.basename(d.getVar("bindir", True)) + "$")
lib_re = re.compile(".*/" + os.path.basename(d.getVar("libdir", True)) + "$")
so_re = re.compile("lib.*\.so")
def socrunch(s):
s = s.lower().replace('_', '-')
m = re.match("^(.*)(.)\.so\.(.*)$", s)
if m is None:
return None
if m.group(2) in '0123456789':
bin = '%s%s-%s' % (m.group(1), m.group(2), m.group(3))
else:
bin = m.group(1) + m.group(2) + m.group(3)
dev = m.group(1) + m.group(2)
return (bin, dev)
def socrunch(s):
s = s.lower().replace('_', '-')
m = re.match("^(.*)(.)\.so\.(.*)$", s)
if m is None:
return None
if m.group(2) in '0123456789':
bin = '%s%s-%s' % (m.group(1), m.group(2), m.group(3))
else:
bin = m.group(1) + m.group(2) + m.group(3)
dev = m.group(1) + m.group(2)
return (bin, dev)
def isexec(path):
try:
s = os.stat(path)
except (os.error, AttributeError):
return 0
return (s[stat.ST_MODE] & stat.S_IEXEC)
def isexec(path):
try:
s = os.stat(path)
except (os.error, AttributeError):
return 0
return (s[stat.ST_MODE] & stat.S_IEXEC)
def auto_libname(packages, orig_pkg):
sonames = []
has_bins = 0
has_libs = 0
pkg_dir = os.path.join(pkgdest, orig_pkg)
for root, dirs, files in os.walk(pkg_dir):
if bin_re.match(root) and files:
has_bins = 1
if lib_re.match(root) and files:
has_libs = 1
for f in files:
if so_re.match(f):
fp = os.path.join(root, f)
cmd = (d.getVar('TARGET_PREFIX', True) or "") + "objdump -p " + fp + " 2>/dev/null"
fd = os.popen(cmd)
lines = fd.readlines()
fd.close()
for l in lines:
m = re.match("\s+SONAME\s+([^\s]*)", l)
if m and not m.group(1) in sonames:
sonames.append(m.group(1))
def auto_libname(packages, orig_pkg):
sonames = []
has_bins = 0
has_libs = 0
pkg_dir = os.path.join(pkgdest, orig_pkg)
for root, dirs, files in os.walk(pkg_dir):
if bin_re.match(root) and files:
has_bins = 1
if lib_re.match(root) and files:
has_libs = 1
for f in files:
if so_re.match(f):
fp = os.path.join(root, f)
cmd = (d.getVar('TARGET_PREFIX', True) or "") + "objdump -p " + fp + " 2>/dev/null"
fd = os.popen(cmd)
lines = fd.readlines()
fd.close()
for l in lines:
m = re.match("\s+SONAME\s+([^\s]*)", l)
if m and not m.group(1) in sonames:
sonames.append(m.group(1))
bb.debug(1, 'LIBNAMES: pkg %s libs %d bins %d sonames %s' % (orig_pkg, has_libs, has_bins, sonames))
soname = None
if len(sonames) == 1:
soname = sonames[0]
elif len(sonames) > 1:
lead = d.getVar('LEAD_SONAME', True)
if lead:
r = re.compile(lead)
filtered = []
for s in sonames:
if r.match(s):
filtered.append(s)
if len(filtered) == 1:
soname = filtered[0]
elif len(filtered) > 1:
bb.note("Multiple matches (%s) for LEAD_SONAME '%s'" % (", ".join(filtered), lead))
else:
bb.note("Multiple libraries (%s) found, but LEAD_SONAME '%s' doesn't match any of them" % (", ".join(sonames), lead))
else:
bb.note("Multiple libraries (%s) found and LEAD_SONAME not defined" % ", ".join(sonames))
bb.debug(1, 'LIBNAMES: pkg %s libs %d bins %d sonames %s' % (orig_pkg, has_libs, has_bins, sonames))
soname = None
if len(sonames) == 1:
soname = sonames[0]
elif len(sonames) > 1:
lead = d.getVar('LEAD_SONAME', True)
if lead:
r = re.compile(lead)
filtered = []
for s in sonames:
if r.match(s):
filtered.append(s)
if len(filtered) == 1:
soname = filtered[0]
elif len(filtered) > 1:
bb.note("Multiple matches (%s) for LEAD_SONAME '%s'" % (", ".join(filtered), lead))
else:
bb.note("Multiple libraries (%s) found, but LEAD_SONAME '%s' doesn't match any of them" % (", ".join(sonames), lead))
else:
bb.note("Multiple libraries (%s) found and LEAD_SONAME not defined" % ", ".join(sonames))
if has_libs and not has_bins and soname:
soname_result = socrunch(soname)
if soname_result:
(pkgname, devname) = soname_result
for pkg in packages.split():
if (d.getVar('PKG_' + pkg) or d.getVar('DEBIAN_NOAUTONAME_' + pkg)):
continue
debian_pn = d.getVar('DEBIANNAME_' + pkg)
if debian_pn:
newpkg = debian_pn
elif pkg == orig_pkg:
newpkg = pkgname
else:
newpkg = pkg.replace(orig_pkg, devname, 1)
mlpre=d.getVar('MLPREFIX', True)
if mlpre:
if not newpkg.find(mlpre) == 0:
newpkg = mlpre + newpkg
if newpkg != pkg:
d.setVar('PKG_' + pkg, newpkg)
if has_libs and not has_bins and soname:
soname_result = socrunch(soname)
if soname_result:
(pkgname, devname) = soname_result
for pkg in packages.split():
if (d.getVar('PKG_' + pkg) or d.getVar('DEBIAN_NOAUTONAME_' + pkg)):
continue
debian_pn = d.getVar('DEBIANNAME_' + pkg)
if debian_pn:
newpkg = debian_pn
elif pkg == orig_pkg:
newpkg = pkgname
else:
newpkg = pkg.replace(orig_pkg, devname, 1)
mlpre=d.getVar('MLPREFIX', True)
if mlpre:
if not newpkg.find(mlpre) == 0:
newpkg = mlpre + newpkg
if newpkg != pkg:
d.setVar('PKG_' + pkg, newpkg)
# reversed sort is needed when some package is substring of another
# ie in ncurses we get without reverse sort:
# DEBUG: LIBNAMES: pkgname libtic5 devname libtic pkg ncurses-libtic orig_pkg ncurses-libtic debian_pn None newpkg libtic5
# and later
# DEBUG: LIBNAMES: pkgname libtic5 devname libtic pkg ncurses-libticw orig_pkg ncurses-libtic debian_pn None newpkg libticw
# so we need to handle ncurses-libticw->libticw5 before ncurses-libtic->libtic5
for pkg in sorted((d.getVar('AUTO_LIBNAME_PKGS', True) or "").split(), reverse=True):
auto_libname(packages, pkg)
# reversed sort is needed when some package is substring of another
# ie in ncurses we get without reverse sort:
# DEBUG: LIBNAMES: pkgname libtic5 devname libtic pkg ncurses-libtic orig_pkg ncurses-libtic debian_pn None newpkg libtic5
# and later
# DEBUG: LIBNAMES: pkgname libtic5 devname libtic pkg ncurses-libticw orig_pkg ncurses-libtic debian_pn None newpkg libticw
# so we need to handle ncurses-libticw->libticw5 before ncurses-libtic->libtic5
for pkg in sorted((d.getVar('AUTO_LIBNAME_PKGS', True) or "").split(), reverse=True):
auto_libname(packages, pkg)
}
EXPORT_FUNCTIONS package_name_hook

View File

@ -39,33 +39,33 @@ done
}
python populate_packages_append () {
import re
packages = d.getVar('PACKAGES', True).split()
pkgdest = d.getVar('PKGDEST', True)
for pkg in packages:
schema_dir = '%s/%s/etc/gconf/schemas' % (pkgdest, pkg)
schemas = []
schema_re = re.compile(".*\.schemas$")
if os.path.exists(schema_dir):
for f in os.listdir(schema_dir):
if schema_re.match(f):
schemas.append(f)
if schemas != []:
bb.note("adding gconf postinst and prerm scripts to %s" % pkg)
d.setVar('SCHEMA_FILES', " ".join(schemas))
postinst = d.getVar('pkg_postinst_%s' % pkg, True) or d.getVar('pkg_postinst', True)
if not postinst:
postinst = '#!/bin/sh\n'
postinst += d.getVar('gconf_postinst', True)
d.setVar('pkg_postinst_%s' % pkg, postinst)
prerm = d.getVar('pkg_prerm_%s' % pkg, True) or d.getVar('pkg_prerm', True)
if not prerm:
prerm = '#!/bin/sh\n'
prerm += d.getVar('gconf_prerm', True)
d.setVar('pkg_prerm_%s' % pkg, prerm)
rdepends = d.getVar("RDEPENDS_%s" % pkg, True) or ""
rdepends += ' ' + d.getVar('MLPREFIX') + 'gconf'
d.setVar("RDEPENDS_%s" % pkg, rdepends)
import re
packages = d.getVar('PACKAGES', True).split()
pkgdest = d.getVar('PKGDEST', True)
for pkg in packages:
schema_dir = '%s/%s/etc/gconf/schemas' % (pkgdest, pkg)
schemas = []
schema_re = re.compile(".*\.schemas$")
if os.path.exists(schema_dir):
for f in os.listdir(schema_dir):
if schema_re.match(f):
schemas.append(f)
if schemas != []:
bb.note("adding gconf postinst and prerm scripts to %s" % pkg)
d.setVar('SCHEMA_FILES', " ".join(schemas))
postinst = d.getVar('pkg_postinst_%s' % pkg, True) or d.getVar('pkg_postinst', True)
if not postinst:
postinst = '#!/bin/sh\n'
postinst += d.getVar('gconf_postinst', True)
d.setVar('pkg_postinst_%s' % pkg, postinst)
prerm = d.getVar('pkg_prerm_%s' % pkg, True) or d.getVar('pkg_prerm', True)
if not prerm:
prerm = '#!/bin/sh\n'
prerm += d.getVar('gconf_prerm', True)
d.setVar('pkg_prerm_%s' % pkg, prerm)
rdepends = d.getVar("RDEPENDS_%s" % pkg, True) or ""
rdepends += ' ' + d.getVar('MLPREFIX') + 'gconf'
d.setVar("RDEPENDS_%s" % pkg, rdepends)
}

View File

@ -1,7 +1,7 @@
def gnome_verdir(v):
import re
m = re.match("^([0-9]+)\.([0-9]+)", v)
return "%s.%s" % (m.group(1), m.group(2))
import re
m = re.match("^([0-9]+)\.([0-9]+)", v)
return "%s.%s" % (m.group(1), m.group(2))
GNOME_COMPRESS_TYPE ?= "bz2"
SECTION ?= "x11/gnome"

View File

@ -28,31 +28,31 @@ done
}
python populate_packages_append () {
packages = d.getVar('PACKAGES', True).split()
pkgdest = d.getVar('PKGDEST', True)
for pkg in packages:
icon_dir = '%s/%s/%s/icons' % (pkgdest, pkg, d.getVar('datadir', True))
if not os.path.exists(icon_dir):
continue
packages = d.getVar('PACKAGES', True).split()
pkgdest = d.getVar('PKGDEST', True)
for pkg in packages:
icon_dir = '%s/%s/%s/icons' % (pkgdest, pkg, d.getVar('datadir', True))
if not os.path.exists(icon_dir):
continue
bb.note("adding hicolor-icon-theme dependency to %s" % pkg)
rdepends = d.getVar('RDEPENDS_%s' % pkg, True)
rdepends = rdepends + ' ' + d.getVar('MLPREFIX') + "hicolor-icon-theme"
d.setVar('RDEPENDS_%s' % pkg, rdepends)
bb.note("adding gtk-icon-cache postinst and postrm scripts to %s" % pkg)
postinst = d.getVar('pkg_postinst_%s' % pkg, True) or d.getVar('pkg_postinst', True)
if not postinst:
postinst = '#!/bin/sh\n'
postinst += d.getVar('gtk_icon_cache_postinst', True)
d.setVar('pkg_postinst_%s' % pkg, postinst)
bb.note("adding hicolor-icon-theme dependency to %s" % pkg)
rdepends = d.getVar('RDEPENDS_%s' % pkg, True)
rdepends = rdepends + ' ' + d.getVar('MLPREFIX') + "hicolor-icon-theme"
d.setVar('RDEPENDS_%s' % pkg, rdepends)
bb.note("adding gtk-icon-cache postinst and postrm scripts to %s" % pkg)
postinst = d.getVar('pkg_postinst_%s' % pkg, True) or d.getVar('pkg_postinst', True)
if not postinst:
postinst = '#!/bin/sh\n'
postinst += d.getVar('gtk_icon_cache_postinst', True)
d.setVar('pkg_postinst_%s' % pkg, postinst)
postrm = d.getVar('pkg_postrm_%s' % pkg, True) or d.getVar('pkg_postrm', True)
if not postrm:
postrm = '#!/bin/sh\n'
postrm += d.getVar('gtk_icon_cache_postrm', True)
d.setVar('pkg_postrm_%s' % pkg, postrm)
postrm = d.getVar('pkg_postrm_%s' % pkg, True) or d.getVar('pkg_postrm', True)
if not postrm:
postrm = '#!/bin/sh\n'
postrm += d.getVar('gtk_icon_cache_postrm', True)
d.setVar('pkg_postrm_%s' % pkg, postrm)
}

View File

@ -164,28 +164,28 @@ do_rootfs[umask] = "022"
fakeroot do_rootfs () {
#set -x
# When use the rpm incremental image generation, don't remove the rootfs
if [ "${INC_RPM_IMAGE_GEN}" != "1" -o "${IMAGE_PKGTYPE}" != "rpm" ]; then
rm -rf ${IMAGE_ROOTFS}
elif [ -d ${T}/saved_rpmlib/var/lib/rpm ]; then
# Move the rpmlib back
if [ ! -d ${IMAGE_ROOTFS}/var/lib/rpm ]; then
mkdir -p ${IMAGE_ROOTFS}/var/lib/
mv ${T}/saved_rpmlib/var/lib/rpm ${IMAGE_ROOTFS}/var/lib/
fi
fi
# When use the rpm incremental image generation, don't remove the rootfs
if [ "${INC_RPM_IMAGE_GEN}" != "1" -o "${IMAGE_PKGTYPE}" != "rpm" ]; then
rm -rf ${IMAGE_ROOTFS}
elif [ -d ${T}/saved_rpmlib/var/lib/rpm ]; then
# Move the rpmlib back
if [ ! -d ${IMAGE_ROOTFS}/var/lib/rpm ]; then
mkdir -p ${IMAGE_ROOTFS}/var/lib/
mv ${T}/saved_rpmlib/var/lib/rpm ${IMAGE_ROOTFS}/var/lib/
fi
fi
rm -rf ${MULTILIB_TEMP_ROOTFS}
mkdir -p ${IMAGE_ROOTFS}
mkdir -p ${DEPLOY_DIR_IMAGE}
cp ${COREBASE}/meta/files/deploydir_readme.txt ${DEPLOY_DIR_IMAGE}/README_-_DO_NOT_DELETE_FILES_IN_THIS_DIRECTORY.txt || true
# If "${IMAGE_ROOTFS}/dev" exists, then the device had been made by
# the previous build
# If "${IMAGE_ROOTFS}/dev" exists, then the device had been made by
# the previous build
if [ "${USE_DEVFS}" != "1" -a ! -r "${IMAGE_ROOTFS}/dev" ]; then
for devtable in ${@get_devtable_list(d)}; do
# Always return ture since there maybe already one when use the
# incremental image generation
# Always return ture since there maybe already one when use the
# incremental image generation
makedevs -r ${IMAGE_ROOTFS} -D $devtable
done
fi
@ -398,7 +398,7 @@ rootfs_trim_schemas () {
# Need this in case no files exist
if [ -e $schema ]; then
oe-trim-schemas $schema > $schema.new
mv $schema.new $schema
mv $schema.new $schema
fi
done
}

View File

@ -48,7 +48,7 @@ def get_imagecmds(d):
types.remove("live")
if d.getVar('IMAGE_LINK_NAME', True):
cmds += " rm -f ${DEPLOY_DIR_IMAGE}/${IMAGE_LINK_NAME}.*"
cmds += "\trm -f ${DEPLOY_DIR_IMAGE}/${IMAGE_LINK_NAME}.*"
for type in types:
ccmd = []

View File

@ -7,38 +7,38 @@
valid_archs = "alpha cris ia64 \
i386 x86 \
m68knommu m68k ppc powerpc powerpc64 ppc64 \
sparc sparc64 \
sparc sparc64 \
arm \
m32r mips \
sh sh64 um h8300 \
parisc s390 v850 \
avr32 blackfin \
microblaze"
sh sh64 um h8300 \
parisc s390 v850 \
avr32 blackfin \
microblaze"
def map_kernel_arch(a, d):
import re
import re
valid_archs = d.getVar('valid_archs', True).split()
valid_archs = d.getVar('valid_archs', True).split()
if re.match('(i.86|athlon|x86.64)$', a): return 'x86'
elif re.match('armeb$', a): return 'arm'
elif re.match('mips(el|64|64el)$', a): return 'mips'
elif re.match('p(pc|owerpc)(|64)', a): return 'powerpc'
elif re.match('sh(3|4)$', a): return 'sh'
elif re.match('bfin', a): return 'blackfin'
elif re.match('microblazeel', a): return 'microblaze'
elif a in valid_archs: return a
else:
bb.error("cannot map '%s' to a linux kernel architecture" % a)
if re.match('(i.86|athlon|x86.64)$', a): return 'x86'
elif re.match('armeb$', a): return 'arm'
elif re.match('mips(el|64|64el)$', a): return 'mips'
elif re.match('p(pc|owerpc)(|64)', a): return 'powerpc'
elif re.match('sh(3|4)$', a): return 'sh'
elif re.match('bfin', a): return 'blackfin'
elif re.match('microblazeel', a): return 'microblaze'
elif a in valid_archs: return a
else:
bb.error("cannot map '%s' to a linux kernel architecture" % a)
export ARCH = "${@map_kernel_arch(d.getVar('TARGET_ARCH', True), d)}"
def map_uboot_arch(a, d):
import re
import re
if re.match('p(pc|owerpc)(|64)', a): return 'ppc'
elif re.match('i.86$', a): return 'x86'
return a
if re.match('p(pc|owerpc)(|64)', a): return 'ppc'
elif re.match('i.86$', a): return 'x86'
return a
export UBOOT_ARCH = "${@map_uboot_arch(d.getVar('ARCH', True), d)}"

View File

@ -6,41 +6,41 @@ SRCTREECOVEREDTASKS += "do_kernel_link_vmlinux do_kernel_configme do_validate_br
# returns local (absolute) path names for all valid patches in the
# src_uri
def find_patches(d):
patches=src_patches(d)
patch_list=[]
for p in patches:
_, _, local, _, _, _ = bb.decodeurl(p)
patch_list.append(local)
patches = src_patches(d)
patch_list=[]
for p in patches:
_, _, local, _, _, _ = bb.decodeurl(p)
patch_list.append(local)
return patch_list
return patch_list
# returns all the elements from the src uri that are .scc files
def find_sccs(d):
sources=src_patches(d, True)
sources_list=[]
for s in sources:
base, ext = os.path.splitext(os.path.basename(s))
if ext and ext in ('.scc' '.cfg'):
sources_list.append(s)
elif base and base in 'defconfig':
sources_list.append(s)
sources=src_patches(d, True)
sources_list=[]
for s in sources:
base, ext = os.path.splitext(os.path.basename(s))
if ext and ext in ('.scc' '.cfg'):
sources_list.append(s)
elif base and base in 'defconfig':
sources_list.append(s)
return sources_list
return sources_list
# this is different from find_patches, in that it returns a colon separated
# list of <patches>:<subdir> instead of just a list of patches
def find_urls(d):
patches=src_patches(d)
fetch = bb.fetch2.Fetch([], d)
patch_list=[]
for p in patches:
_, _, local, _, _, _ = bb.decodeurl(p)
for url in fetch.urls:
urldata = fetch.ud[url]
if urldata.localpath == local:
patch_list.append(local+':'+urldata.path)
patches=src_patches(d)
fetch = bb.fetch2.Fetch([], d)
patch_list=[]
for p in patches:
_, _, local, _, _, _ = bb.decodeurl(p)
for url in fetch.urls:
urldata = fetch.ud[url]
if urldata.localpath == local:
patch_list.append(local+':'+urldata.path)
return patch_list
return patch_list
do_patch() {

View File

@ -310,177 +310,177 @@ module_conf_sco = "alias bt-proto-2 sco"
module_conf_rfcomm = "alias bt-proto-3 rfcomm"
python populate_packages_prepend () {
def extract_modinfo(file):
import tempfile, re, subprocess
tempfile.tempdir = d.getVar("WORKDIR", True)
tf = tempfile.mkstemp()
tmpfile = tf[1]
cmd = "PATH=\"%s\" %sobjcopy -j .modinfo -O binary %s %s" % (d.getVar("PATH", True), d.getVar("HOST_PREFIX", True) or "", file, tmpfile)
subprocess.call(cmd, shell=True)
f = open(tmpfile)
l = f.read().split("\000")
f.close()
os.close(tf[0])
os.unlink(tmpfile)
exp = re.compile("([^=]+)=(.*)")
vals = {}
for i in l:
m = exp.match(i)
if not m:
continue
vals[m.group(1)] = m.group(2)
return vals
def parse_depmod():
import re
def extract_modinfo(file):
import tempfile, re, subprocess
tempfile.tempdir = d.getVar("WORKDIR", True)
tf = tempfile.mkstemp()
tmpfile = tf[1]
cmd = "PATH=\"%s\" %sobjcopy -j .modinfo -O binary %s %s" % (d.getVar("PATH", True), d.getVar("HOST_PREFIX", True) or "", file, tmpfile)
subprocess.call(cmd, shell=True)
f = open(tmpfile)
l = f.read().split("\000")
f.close()
os.close(tf[0])
os.unlink(tmpfile)
exp = re.compile("([^=]+)=(.*)")
vals = {}
for i in l:
m = exp.match(i)
if not m:
continue
vals[m.group(1)] = m.group(2)
return vals
def parse_depmod():
import re
dvar = d.getVar('PKGD', True)
if not dvar:
bb.error("PKGD not defined")
return
dvar = d.getVar('PKGD', True)
if not dvar:
bb.error("PKGD not defined")
return
kernelver = d.getVar('KERNEL_VERSION', True)
kernelver_stripped = kernelver
m = re.match('^(.*-hh.*)[\.\+].*$', kernelver)
if m:
kernelver_stripped = m.group(1)
path = d.getVar("PATH", True)
kernelver = d.getVar('KERNEL_VERSION', True)
kernelver_stripped = kernelver
m = re.match('^(.*-hh.*)[\.\+].*$', kernelver)
if m:
kernelver_stripped = m.group(1)
path = d.getVar("PATH", True)
cmd = "PATH=\"%s\" depmod -n -a -b %s -F %s/boot/System.map-%s %s" % (path, dvar, dvar, kernelver, kernelver_stripped)
f = os.popen(cmd, 'r')
cmd = "PATH=\"%s\" depmod -n -a -b %s -F %s/boot/System.map-%s %s" % (path, dvar, dvar, kernelver, kernelver_stripped)
f = os.popen(cmd, 'r')
deps = {}
pattern0 = "^(.*\.k?o):..*$"
pattern1 = "^(.*\.k?o):\s*(.*\.k?o)\s*$"
pattern2 = "^(.*\.k?o):\s*(.*\.k?o)\s*\\\$"
pattern3 = "^\t(.*\.k?o)\s*\\\$"
pattern4 = "^\t(.*\.k?o)\s*$"
deps = {}
pattern0 = "^(.*\.k?o):..*$"
pattern1 = "^(.*\.k?o):\s*(.*\.k?o)\s*$"
pattern2 = "^(.*\.k?o):\s*(.*\.k?o)\s*\\\$"
pattern3 = "^\t(.*\.k?o)\s*\\\$"
pattern4 = "^\t(.*\.k?o)\s*$"
line = f.readline()
while line:
if not re.match(pattern0, line):
line = f.readline()
continue
m1 = re.match(pattern1, line)
if m1:
deps[m1.group(1)] = m1.group(2).split()
else:
m2 = re.match(pattern2, line)
if m2:
deps[m2.group(1)] = m2.group(2).split()
line = f.readline()
m3 = re.match(pattern3, line)
while m3:
deps[m2.group(1)].extend(m3.group(1).split())
line = f.readline()
m3 = re.match(pattern3, line)
m4 = re.match(pattern4, line)
deps[m2.group(1)].extend(m4.group(1).split())
line = f.readline()
f.close()
return deps
def get_dependencies(file, pattern, format):
# file no longer includes PKGD
file = file.replace(d.getVar('PKGD', True) or '', '', 1)
# instead is prefixed with /lib/modules/${KERNEL_VERSION}
file = file.replace("/lib/modules/%s/" % d.getVar('KERNEL_VERSION', True) or '', '', 1)
line = f.readline()
while line:
if not re.match(pattern0, line):
line = f.readline()
continue
m1 = re.match(pattern1, line)
if m1:
deps[m1.group(1)] = m1.group(2).split()
else:
m2 = re.match(pattern2, line)
if m2:
deps[m2.group(1)] = m2.group(2).split()
line = f.readline()
m3 = re.match(pattern3, line)
while m3:
deps[m2.group(1)].extend(m3.group(1).split())
line = f.readline()
m3 = re.match(pattern3, line)
m4 = re.match(pattern4, line)
deps[m2.group(1)].extend(m4.group(1).split())
line = f.readline()
f.close()
return deps
def get_dependencies(file, pattern, format):
# file no longer includes PKGD
file = file.replace(d.getVar('PKGD', True) or '', '', 1)
# instead is prefixed with /lib/modules/${KERNEL_VERSION}
file = file.replace("/lib/modules/%s/" % d.getVar('KERNEL_VERSION', True) or '', '', 1)
if module_deps.has_key(file):
import re
dependencies = []
for i in module_deps[file]:
m = re.match(pattern, os.path.basename(i))
if not m:
continue
on = legitimize_package_name(m.group(1))
dependency_pkg = format % on
dependencies.append(dependency_pkg)
return dependencies
return []
if module_deps.has_key(file):
import re
dependencies = []
for i in module_deps[file]:
m = re.match(pattern, os.path.basename(i))
if not m:
continue
on = legitimize_package_name(m.group(1))
dependency_pkg = format % on
dependencies.append(dependency_pkg)
return dependencies
return []
def frob_metadata(file, pkg, pattern, format, basename):
import re
vals = extract_modinfo(file)
def frob_metadata(file, pkg, pattern, format, basename):
import re
vals = extract_modinfo(file)
dvar = d.getVar('PKGD', True)
dvar = d.getVar('PKGD', True)
# If autoloading is requested, output /etc/modules-load.d/<name>.conf and append
# appropriate modprobe commands to the postinst
autoload = d.getVar('module_autoload_%s' % basename, True)
if autoload:
name = '%s/etc/modules-load.d/%s.conf' % (dvar, basename)
f = open(name, 'w')
for m in autoload.split():
f.write('%s\n' % m)
f.close()
postinst = d.getVar('pkg_postinst_%s' % pkg, True)
if not postinst:
bb.fatal("pkg_postinst_%s not defined" % pkg)
postinst += d.getVar('autoload_postinst_fragment', True) % autoload
d.setVar('pkg_postinst_%s' % pkg, postinst)
# If autoloading is requested, output /etc/modules-load.d/<name>.conf and append
# appropriate modprobe commands to the postinst
autoload = d.getVar('module_autoload_%s' % basename, True)
if autoload:
name = '%s/etc/modules-load.d/%s.conf' % (dvar, basename)
f = open(name, 'w')
for m in autoload.split():
f.write('%s\n' % m)
f.close()
postinst = d.getVar('pkg_postinst_%s' % pkg, True)
if not postinst:
bb.fatal("pkg_postinst_%s not defined" % pkg)
postinst += d.getVar('autoload_postinst_fragment', True) % autoload
d.setVar('pkg_postinst_%s' % pkg, postinst)
# Write out any modconf fragment
modconf = d.getVar('module_conf_%s' % basename, True)
if modconf:
name = '%s/etc/modprobe.d/%s.conf' % (dvar, basename)
f = open(name, 'w')
f.write("%s\n" % modconf)
f.close()
# Write out any modconf fragment
modconf = d.getVar('module_conf_%s' % basename, True)
if modconf:
name = '%s/etc/modprobe.d/%s.conf' % (dvar, basename)
f = open(name, 'w')
f.write("%s\n" % modconf)
f.close()
files = d.getVar('FILES_%s' % pkg, True)
files = "%s /etc/modules-load.d/%s.conf /etc/modprobe.d/%s.conf" % (files, basename, basename)
d.setVar('FILES_%s' % pkg, files)
files = d.getVar('FILES_%s' % pkg, True)
files = "%s /etc/modules-load.d/%s.conf /etc/modprobe.d/%s.conf" % (files, basename, basename)
d.setVar('FILES_%s' % pkg, files)
if vals.has_key("description"):
old_desc = d.getVar('DESCRIPTION_' + pkg, True) or ""
d.setVar('DESCRIPTION_' + pkg, old_desc + "; " + vals["description"])
if vals.has_key("description"):
old_desc = d.getVar('DESCRIPTION_' + pkg, True) or ""
d.setVar('DESCRIPTION_' + pkg, old_desc + "; " + vals["description"])
rdepends_str = d.getVar('RDEPENDS_' + pkg, True)
if rdepends_str:
rdepends = rdepends_str.split()
else:
rdepends = []
rdepends.extend(get_dependencies(file, pattern, format))
d.setVar('RDEPENDS_' + pkg, ' '.join(rdepends))
rdepends_str = d.getVar('RDEPENDS_' + pkg, True)
if rdepends_str:
rdepends = rdepends_str.split()
else:
rdepends = []
rdepends.extend(get_dependencies(file, pattern, format))
d.setVar('RDEPENDS_' + pkg, ' '.join(rdepends))
module_deps = parse_depmod()
module_regex = '^(.*)\.k?o$'
module_pattern = 'kernel-module-%s'
module_deps = parse_depmod()
module_regex = '^(.*)\.k?o$'
module_pattern = 'kernel-module-%s'
postinst = d.getVar('pkg_postinst_modules', True)
postrm = d.getVar('pkg_postrm_modules', True)
do_split_packages(d, root='/lib/firmware', file_regex='^(.*)\.bin$', output_pattern='kernel-firmware-%s', description='Firmware for %s', recursive=True, extra_depends='')
do_split_packages(d, root='/lib/firmware', file_regex='^(.*)\.fw$', output_pattern='kernel-firmware-%s', description='Firmware for %s', recursive=True, extra_depends='')
do_split_packages(d, root='/lib/firmware', file_regex='^(.*)\.cis$', output_pattern='kernel-firmware-%s', description='Firmware for %s', recursive=True, extra_depends='')
do_split_packages(d, root='/lib/modules', file_regex=module_regex, output_pattern=module_pattern, description='%s kernel module', postinst=postinst, postrm=postrm, recursive=True, hook=frob_metadata, extra_depends='update-modules kernel-%s' % d.getVar("KERNEL_VERSION", True))
postinst = d.getVar('pkg_postinst_modules', True)
postrm = d.getVar('pkg_postrm_modules', True)
do_split_packages(d, root='/lib/firmware', file_regex='^(.*)\.bin$', output_pattern='kernel-firmware-%s', description='Firmware for %s', recursive=True, extra_depends='')
do_split_packages(d, root='/lib/firmware', file_regex='^(.*)\.fw$', output_pattern='kernel-firmware-%s', description='Firmware for %s', recursive=True, extra_depends='')
do_split_packages(d, root='/lib/firmware', file_regex='^(.*)\.cis$', output_pattern='kernel-firmware-%s', description='Firmware for %s', recursive=True, extra_depends='')
do_split_packages(d, root='/lib/modules', file_regex=module_regex, output_pattern=module_pattern, description='%s kernel module', postinst=postinst, postrm=postrm, recursive=True, hook=frob_metadata, extra_depends='update-modules kernel-%s' % d.getVar("KERNEL_VERSION", True))
# If modules-load.d and modprobe.d are empty at this point, remove them to
# avoid warnings. removedirs only raises an OSError if an empty
# directory cannot be removed.
dvar = d.getVar('PKGD', True)
for dir in ["%s/etc/modprobe.d" % (dvar), "%s/etc/modules-load.d" % (dvar), "%s/etc" % (dvar)]:
if len(os.listdir(dir)) == 0:
os.rmdir(dir)
# If modules-load.d and modprobe.d are empty at this point, remove them to
# avoid warnings. removedirs only raises an OSError if an empty
# directory cannot be removed.
dvar = d.getVar('PKGD', True)
for dir in ["%s/etc/modprobe.d" % (dvar), "%s/etc/modules-load.d" % (dvar), "%s/etc" % (dvar)]:
if len(os.listdir(dir)) == 0:
os.rmdir(dir)
import re
metapkg = "kernel-modules"
d.setVar('ALLOW_EMPTY_' + metapkg, "1")
d.setVar('FILES_' + metapkg, "")
blacklist = [ 'kernel-dev', 'kernel-image', 'kernel-base', 'kernel-vmlinux' ]
for l in module_deps.values():
for i in l:
pkg = module_pattern % legitimize_package_name(re.match(module_regex, os.path.basename(i)).group(1))
blacklist.append(pkg)
metapkg_rdepends = []
packages = d.getVar('PACKAGES', True).split()
for pkg in packages[1:]:
if not pkg in blacklist and not pkg in metapkg_rdepends:
metapkg_rdepends.append(pkg)
d.setVar('RDEPENDS_' + metapkg, ' '.join(metapkg_rdepends))
d.setVar('DESCRIPTION_' + metapkg, 'Kernel modules meta package')
packages.append(metapkg)
d.setVar('PACKAGES', ' '.join(packages))
import re
metapkg = "kernel-modules"
d.setVar('ALLOW_EMPTY_' + metapkg, "1")
d.setVar('FILES_' + metapkg, "")
blacklist = [ 'kernel-dev', 'kernel-image', 'kernel-base', 'kernel-vmlinux' ]
for l in module_deps.values():
for i in l:
pkg = module_pattern % legitimize_package_name(re.match(module_regex, os.path.basename(i)).group(1))
blacklist.append(pkg)
metapkg_rdepends = []
packages = d.getVar('PACKAGES', True).split()
for pkg in packages[1:]:
if not pkg in blacklist and not pkg in metapkg_rdepends:
metapkg_rdepends.append(pkg)
d.setVar('RDEPENDS_' + metapkg, ' '.join(metapkg_rdepends))
d.setVar('DESCRIPTION_' + metapkg, 'Kernel modules meta package')
packages.append(metapkg)
d.setVar('PACKAGES', ' '.join(packages))
}
# Support checking the kernel size since some kernels need to reside in partitions

View File

@ -23,13 +23,13 @@ def get_libc_fpu_setting(bb, d):
return ""
python populate_packages_prepend () {
if d.getVar('DEBIAN_NAMES', True):
bpn = d.getVar('BPN', True)
d.setVar('PKG_'+bpn, 'libc6')
d.setVar('PKG_'+bpn+'-dev', 'libc6-dev')
d.setVar('PKG_'+bpn+'-dbg', 'libc6-dbg')
# For backward compatibility with old -dbg package
d.appendVar('RPROVIDES_' + bpn + '-dbg', ' libc-dbg')
d.appendVar('RCONFLICTS_' + bpn + '-dbg', ' libc-dbg')
d.appendVar('RREPLACES_' + bpn + '-dbg', ' libc-dbg')
if d.getVar('DEBIAN_NAMES', True):
bpn = d.getVar('BPN', True)
d.setVar('PKG_'+bpn, 'libc6')
d.setVar('PKG_'+bpn+'-dev', 'libc6-dev')
d.setVar('PKG_'+bpn+'-dbg', 'libc6-dbg')
# For backward compatibility with old -dbg package
d.appendVar('RPROVIDES_' + bpn + '-dbg', ' libc-dbg')
d.appendVar('RCONFLICTS_' + bpn + '-dbg', ' libc-dbg')
d.appendVar('RREPLACES_' + bpn + '-dbg', ' libc-dbg')
}

View File

@ -28,10 +28,10 @@ python __anonymous () {
if r.match(target_arch):
depends = d.getVar("DEPENDS", True)
if use_cross_localedef == "1" :
depends = "%s cross-localedef-native" % depends
else:
depends = "%s qemu-native" % depends
if use_cross_localedef == "1" :
depends = "%s cross-localedef-native" % depends
else:
depends = "%s qemu-native" % depends
d.setVar("DEPENDS", depends)
d.setVar("GLIBC_INTERNAL_USE_BINARY_LOCALE", "compile")
break
@ -118,270 +118,270 @@ do_collect_bins_from_locale_tree() {
inherit qemu
python package_do_split_gconvs () {
import os, re
if (d.getVar('PACKAGE_NO_GCONV', True) == '1'):
bb.note("package requested not splitting gconvs")
return
import os, re
if (d.getVar('PACKAGE_NO_GCONV', True) == '1'):
bb.note("package requested not splitting gconvs")
return
if not d.getVar('PACKAGES', True):
return
if not d.getVar('PACKAGES', True):
return
mlprefix = d.getVar("MLPREFIX", True) or ""
mlprefix = d.getVar("MLPREFIX", True) or ""
bpn = d.getVar('BPN', True)
libdir = d.getVar('libdir', True)
if not libdir:
bb.error("libdir not defined")
return
datadir = d.getVar('datadir', True)
if not datadir:
bb.error("datadir not defined")
return
bpn = d.getVar('BPN', True)
libdir = d.getVar('libdir', True)
if not libdir:
bb.error("libdir not defined")
return
datadir = d.getVar('datadir', True)
if not datadir:
bb.error("datadir not defined")
return
gconv_libdir = base_path_join(libdir, "gconv")
charmap_dir = base_path_join(datadir, "i18n", "charmaps")
locales_dir = base_path_join(datadir, "i18n", "locales")
binary_locales_dir = base_path_join(libdir, "locale")
gconv_libdir = base_path_join(libdir, "gconv")
charmap_dir = base_path_join(datadir, "i18n", "charmaps")
locales_dir = base_path_join(datadir, "i18n", "locales")
binary_locales_dir = base_path_join(libdir, "locale")
def calc_gconv_deps(fn, pkg, file_regex, output_pattern, group):
deps = []
f = open(fn, "r")
c_re = re.compile('^copy "(.*)"')
i_re = re.compile('^include "(\w+)".*')
for l in f.readlines():
m = c_re.match(l) or i_re.match(l)
if m:
dp = legitimize_package_name('%s%s-gconv-%s' % (mlprefix, bpn, m.group(1)))
if not dp in deps:
deps.append(dp)
f.close()
if deps != []:
d.setVar('RDEPENDS_%s' % pkg, " ".join(deps))
if bpn != 'glibc':
d.setVar('RPROVIDES_%s' % pkg, pkg.replace(bpn, 'glibc'))
def calc_gconv_deps(fn, pkg, file_regex, output_pattern, group):
deps = []
f = open(fn, "r")
c_re = re.compile('^copy "(.*)"')
i_re = re.compile('^include "(\w+)".*')
for l in f.readlines():
m = c_re.match(l) or i_re.match(l)
if m:
dp = legitimize_package_name('%s%s-gconv-%s' % (mlprefix, bpn, m.group(1)))
if not dp in deps:
deps.append(dp)
f.close()
if deps != []:
d.setVar('RDEPENDS_%s' % pkg, " ".join(deps))
if bpn != 'glibc':
d.setVar('RPROVIDES_%s' % pkg, pkg.replace(bpn, 'glibc'))
do_split_packages(d, gconv_libdir, file_regex='^(.*)\.so$', output_pattern=bpn+'-gconv-%s', \
description='gconv module for character set %s', hook=calc_gconv_deps, \
extra_depends=bpn+'-gconv')
do_split_packages(d, gconv_libdir, file_regex='^(.*)\.so$', output_pattern=bpn+'-gconv-%s', \
description='gconv module for character set %s', hook=calc_gconv_deps, \
extra_depends=bpn+'-gconv')
def calc_charmap_deps(fn, pkg, file_regex, output_pattern, group):
deps = []
f = open(fn, "r")
c_re = re.compile('^copy "(.*)"')
i_re = re.compile('^include "(\w+)".*')
for l in f.readlines():
m = c_re.match(l) or i_re.match(l)
if m:
dp = legitimize_package_name('%s%s-charmap-%s' % (mlprefix, bpn, m.group(1)))
if not dp in deps:
deps.append(dp)
f.close()
if deps != []:
d.setVar('RDEPENDS_%s' % pkg, " ".join(deps))
if bpn != 'glibc':
d.setVar('RPROVIDES_%s' % pkg, pkg.replace(bpn, 'glibc'))
def calc_charmap_deps(fn, pkg, file_regex, output_pattern, group):
deps = []
f = open(fn, "r")
c_re = re.compile('^copy "(.*)"')
i_re = re.compile('^include "(\w+)".*')
for l in f.readlines():
m = c_re.match(l) or i_re.match(l)
if m:
dp = legitimize_package_name('%s%s-charmap-%s' % (mlprefix, bpn, m.group(1)))
if not dp in deps:
deps.append(dp)
f.close()
if deps != []:
d.setVar('RDEPENDS_%s' % pkg, " ".join(deps))
if bpn != 'glibc':
d.setVar('RPROVIDES_%s' % pkg, pkg.replace(bpn, 'glibc'))
do_split_packages(d, charmap_dir, file_regex='^(.*)\.gz$', output_pattern=bpn+'-charmap-%s', \
description='character map for %s encoding', hook=calc_charmap_deps, extra_depends='')
do_split_packages(d, charmap_dir, file_regex='^(.*)\.gz$', output_pattern=bpn+'-charmap-%s', \
description='character map for %s encoding', hook=calc_charmap_deps, extra_depends='')
def calc_locale_deps(fn, pkg, file_regex, output_pattern, group):
deps = []
f = open(fn, "r")
c_re = re.compile('^copy "(.*)"')
i_re = re.compile('^include "(\w+)".*')
for l in f.readlines():
m = c_re.match(l) or i_re.match(l)
if m:
dp = legitimize_package_name(mlprefix+bpn+'-localedata-%s' % m.group(1))
if not dp in deps:
deps.append(dp)
f.close()
if deps != []:
d.setVar('RDEPENDS_%s' % pkg, " ".join(deps))
if bpn != 'glibc':
d.setVar('RPROVIDES_%s' % pkg, pkg.replace(bpn, 'glibc'))
def calc_locale_deps(fn, pkg, file_regex, output_pattern, group):
deps = []
f = open(fn, "r")
c_re = re.compile('^copy "(.*)"')
i_re = re.compile('^include "(\w+)".*')
for l in f.readlines():
m = c_re.match(l) or i_re.match(l)
if m:
dp = legitimize_package_name(mlprefix+bpn+'-localedata-%s' % m.group(1))
if not dp in deps:
deps.append(dp)
f.close()
if deps != []:
d.setVar('RDEPENDS_%s' % pkg, " ".join(deps))
if bpn != 'glibc':
d.setVar('RPROVIDES_%s' % pkg, pkg.replace(bpn, 'glibc'))
do_split_packages(d, locales_dir, file_regex='(.*)', output_pattern=bpn+'-localedata-%s', \
description='locale definition for %s', hook=calc_locale_deps, extra_depends='')
d.setVar('PACKAGES', d.getVar('PACKAGES') + ' ' + d.getVar('MLPREFIX') + bpn + '-gconv')
do_split_packages(d, locales_dir, file_regex='(.*)', output_pattern=bpn+'-localedata-%s', \
description='locale definition for %s', hook=calc_locale_deps, extra_depends='')
d.setVar('PACKAGES', d.getVar('PACKAGES') + ' ' + d.getVar('MLPREFIX') + bpn + '-gconv')
use_bin = d.getVar("GLIBC_INTERNAL_USE_BINARY_LOCALE", True)
use_bin = d.getVar("GLIBC_INTERNAL_USE_BINARY_LOCALE", True)
dot_re = re.compile("(.*)\.(.*)")
dot_re = re.compile("(.*)\.(.*)")
# Read in supported locales and associated encodings
supported = {}
with open(base_path_join(d.getVar('WORKDIR', True), "SUPPORTED")) as f:
for line in f.readlines():
try:
locale, charset = line.rstrip().split()
except ValueError:
continue
supported[locale] = charset
# Read in supported locales and associated encodings
supported = {}
with open(base_path_join(d.getVar('WORKDIR', True), "SUPPORTED")) as f:
for line in f.readlines():
try:
locale, charset = line.rstrip().split()
except ValueError:
continue
supported[locale] = charset
# GLIBC_GENERATE_LOCALES var specifies which locales to be generated. empty or "all" means all locales
to_generate = d.getVar('GLIBC_GENERATE_LOCALES', True)
if not to_generate or to_generate == 'all':
to_generate = supported.keys()
else:
to_generate = to_generate.split()
for locale in to_generate:
if locale not in supported:
if '.' in locale:
charset = locale.split('.')[1]
else:
charset = 'UTF-8'
bb.warn("Unsupported locale '%s', assuming encoding '%s'" % (locale, charset))
supported[locale] = charset
# GLIBC_GENERATE_LOCALES var specifies which locales to be generated. empty or "all" means all locales
to_generate = d.getVar('GLIBC_GENERATE_LOCALES', True)
if not to_generate or to_generate == 'all':
to_generate = supported.keys()
else:
to_generate = to_generate.split()
for locale in to_generate:
if locale not in supported:
if '.' in locale:
charset = locale.split('.')[1]
else:
charset = 'UTF-8'
bb.warn("Unsupported locale '%s', assuming encoding '%s'" % (locale, charset))
supported[locale] = charset
def output_locale_source(name, pkgname, locale, encoding):
d.setVar('RDEPENDS_%s' % pkgname, 'localedef %s-localedata-%s %s-charmap-%s' % \
(mlprefix+bpn, legitimize_package_name(locale), mlprefix+bpn, legitimize_package_name(encoding)))
d.setVar('pkg_postinst_%s' % pkgname, d.getVar('locale_base_postinst', True) \
% (locale, encoding, locale))
d.setVar('pkg_postrm_%s' % pkgname, d.getVar('locale_base_postrm', True) % \
(locale, encoding, locale))
def output_locale_source(name, pkgname, locale, encoding):
d.setVar('RDEPENDS_%s' % pkgname, 'localedef %s-localedata-%s %s-charmap-%s' % \
(mlprefix+bpn, legitimize_package_name(locale), mlprefix+bpn, legitimize_package_name(encoding)))
d.setVar('pkg_postinst_%s' % pkgname, d.getVar('locale_base_postinst', True) \
% (locale, encoding, locale))
d.setVar('pkg_postrm_%s' % pkgname, d.getVar('locale_base_postrm', True) % \
(locale, encoding, locale))
def output_locale_binary_rdepends(name, pkgname, locale, encoding):
m = re.match("(.*)\.(.*)", name)
if m:
libc_name = "%s.%s" % (m.group(1), m.group(2).lower().replace("-",""))
else:
libc_name = name
d.setVar('RDEPENDS_%s' % pkgname, legitimize_package_name('%s-binary-localedata-%s' \
% (mlprefix+bpn, libc_name)))
def output_locale_binary_rdepends(name, pkgname, locale, encoding):
m = re.match("(.*)\.(.*)", name)
if m:
libc_name = "%s.%s" % (m.group(1), m.group(2).lower().replace("-",""))
else:
libc_name = name
d.setVar('RDEPENDS_%s' % pkgname, legitimize_package_name('%s-binary-localedata-%s' \
% (mlprefix+bpn, libc_name)))
commands = {}
commands = {}
def output_locale_binary(name, pkgname, locale, encoding):
treedir = base_path_join(d.getVar("WORKDIR", True), "locale-tree")
ldlibdir = base_path_join(treedir, d.getVar("base_libdir", True))
path = d.getVar("PATH", True)
i18npath = base_path_join(treedir, datadir, "i18n")
gconvpath = base_path_join(treedir, "iconvdata")
outputpath = base_path_join(treedir, libdir, "locale")
def output_locale_binary(name, pkgname, locale, encoding):
treedir = base_path_join(d.getVar("WORKDIR", True), "locale-tree")
ldlibdir = base_path_join(treedir, d.getVar("base_libdir", True))
path = d.getVar("PATH", True)
i18npath = base_path_join(treedir, datadir, "i18n")
gconvpath = base_path_join(treedir, "iconvdata")
outputpath = base_path_join(treedir, libdir, "locale")
use_cross_localedef = d.getVar("LOCALE_GENERATION_WITH_CROSS-LOCALEDEF", True) or "0"
if use_cross_localedef == "1":
target_arch = d.getVar('TARGET_ARCH', True)
locale_arch_options = { \
"arm": " --uint32-align=4 --little-endian ", \
"sh4": " --uint32-align=4 --big-endian ", \
"powerpc": " --uint32-align=4 --big-endian ", \
"powerpc64": " --uint32-align=4 --big-endian ", \
"mips": " --uint32-align=4 --big-endian ", \
"mips64": " --uint32-align=4 --big-endian ", \
"mipsel": " --uint32-align=4 --little-endian ", \
"mips64el":" --uint32-align=4 --little-endian ", \
"i586": " --uint32-align=4 --little-endian ", \
"i686": " --uint32-align=4 --little-endian ", \
"x86_64": " --uint32-align=4 --little-endian " }
use_cross_localedef = d.getVar("LOCALE_GENERATION_WITH_CROSS-LOCALEDEF", True) or "0"
if use_cross_localedef == "1":
target_arch = d.getVar('TARGET_ARCH', True)
locale_arch_options = { \
"arm": " --uint32-align=4 --little-endian ", \
"sh4": " --uint32-align=4 --big-endian ", \
"powerpc": " --uint32-align=4 --big-endian ", \
"powerpc64": " --uint32-align=4 --big-endian ", \
"mips": " --uint32-align=4 --big-endian ", \
"mips64": " --uint32-align=4 --big-endian ", \
"mipsel": " --uint32-align=4 --little-endian ", \
"mips64el":" --uint32-align=4 --little-endian ", \
"i586": " --uint32-align=4 --little-endian ", \
"i686": " --uint32-align=4 --little-endian ", \
"x86_64": " --uint32-align=4 --little-endian " }
if target_arch in locale_arch_options:
localedef_opts = locale_arch_options[target_arch]
else:
bb.error("locale_arch_options not found for target_arch=" + target_arch)
raise bb.build.FuncFailed("unknown arch:" + target_arch + " for locale_arch_options")
if target_arch in locale_arch_options:
localedef_opts = locale_arch_options[target_arch]
else:
bb.error("locale_arch_options not found for target_arch=" + target_arch)
raise bb.build.FuncFailed("unknown arch:" + target_arch + " for locale_arch_options")
localedef_opts += " --force --old-style --no-archive --prefix=%s \
--inputfile=%s/%s/i18n/locales/%s --charmap=%s %s/%s" \
% (treedir, treedir, datadir, locale, encoding, outputpath, name)
localedef_opts += " --force --old-style --no-archive --prefix=%s \
--inputfile=%s/%s/i18n/locales/%s --charmap=%s %s/%s" \
% (treedir, treedir, datadir, locale, encoding, outputpath, name)
cmd = "PATH=\"%s\" I18NPATH=\"%s\" GCONV_PATH=\"%s\" cross-localedef %s" % \
(path, i18npath, gconvpath, localedef_opts)
else: # earlier slower qemu way
qemu = qemu_target_binary(d)
localedef_opts = "--force --old-style --no-archive --prefix=%s \
--inputfile=%s/i18n/locales/%s --charmap=%s %s" \
% (treedir, datadir, locale, encoding, name)
cmd = "PATH=\"%s\" I18NPATH=\"%s\" GCONV_PATH=\"%s\" cross-localedef %s" % \
(path, i18npath, gconvpath, localedef_opts)
else: # earlier slower qemu way
qemu = qemu_target_binary(d)
localedef_opts = "--force --old-style --no-archive --prefix=%s \
--inputfile=%s/i18n/locales/%s --charmap=%s %s" \
% (treedir, datadir, locale, encoding, name)
qemu_options = d.getVar("QEMU_OPTIONS_%s" % d.getVar('PACKAGE_ARCH', True), True)
if not qemu_options:
qemu_options = d.getVar('QEMU_OPTIONS', True)
qemu_options = d.getVar("QEMU_OPTIONS_%s" % d.getVar('PACKAGE_ARCH', True), True)
if not qemu_options:
qemu_options = d.getVar('QEMU_OPTIONS', True)
cmd = "PSEUDO_RELOADED=YES PATH=\"%s\" I18NPATH=\"%s\" %s -L %s \
-E LD_LIBRARY_PATH=%s %s %s/bin/localedef %s" % \
(path, i18npath, qemu, treedir, ldlibdir, qemu_options, treedir, localedef_opts)
cmd = "PSEUDO_RELOADED=YES PATH=\"%s\" I18NPATH=\"%s\" %s -L %s \
-E LD_LIBRARY_PATH=%s %s %s/bin/localedef %s" % \
(path, i18npath, qemu, treedir, ldlibdir, qemu_options, treedir, localedef_opts)
commands["%s/%s" % (outputpath, name)] = cmd
commands["%s/%s" % (outputpath, name)] = cmd
bb.note("generating locale %s (%s)" % (locale, encoding))
bb.note("generating locale %s (%s)" % (locale, encoding))
def output_locale(name, locale, encoding):
pkgname = d.getVar('MLPREFIX') + 'locale-base-' + legitimize_package_name(name)
d.setVar('ALLOW_EMPTY_%s' % pkgname, '1')
d.setVar('PACKAGES', '%s %s' % (pkgname, d.getVar('PACKAGES', True)))
rprovides = ' %svirtual-locale-%s' % (mlprefix, legitimize_package_name(name))
m = re.match("(.*)_(.*)", name)
if m:
rprovides += ' %svirtual-locale-%s' % (mlprefix, m.group(1))
d.setVar('RPROVIDES_%s' % pkgname, rprovides)
def output_locale(name, locale, encoding):
pkgname = d.getVar('MLPREFIX') + 'locale-base-' + legitimize_package_name(name)
d.setVar('ALLOW_EMPTY_%s' % pkgname, '1')
d.setVar('PACKAGES', '%s %s' % (pkgname, d.getVar('PACKAGES', True)))
rprovides = ' %svirtual-locale-%s' % (mlprefix, legitimize_package_name(name))
m = re.match("(.*)_(.*)", name)
if m:
rprovides += ' %svirtual-locale-%s' % (mlprefix, m.group(1))
d.setVar('RPROVIDES_%s' % pkgname, rprovides)
if use_bin == "compile":
output_locale_binary_rdepends(name, pkgname, locale, encoding)
output_locale_binary(name, pkgname, locale, encoding)
elif use_bin == "precompiled":
output_locale_binary_rdepends(name, pkgname, locale, encoding)
else:
output_locale_source(name, pkgname, locale, encoding)
if use_bin == "compile":
output_locale_binary_rdepends(name, pkgname, locale, encoding)
output_locale_binary(name, pkgname, locale, encoding)
elif use_bin == "precompiled":
output_locale_binary_rdepends(name, pkgname, locale, encoding)
else:
output_locale_source(name, pkgname, locale, encoding)
if use_bin == "compile":
bb.note("preparing tree for binary locale generation")
bb.build.exec_func("do_prep_locale_tree", d)
if use_bin == "compile":
bb.note("preparing tree for binary locale generation")
bb.build.exec_func("do_prep_locale_tree", d)
utf8_only = int(d.getVar('LOCALE_UTF8_ONLY', True) or 0)
encodings = {}
for locale in to_generate:
charset = supported[locale]
if utf8_only and charset != 'UTF-8':
continue
utf8_only = int(d.getVar('LOCALE_UTF8_ONLY', True) or 0)
encodings = {}
for locale in to_generate:
charset = supported[locale]
if utf8_only and charset != 'UTF-8':
continue
m = dot_re.match(locale)
if m:
base = m.group(1)
else:
base = locale
m = dot_re.match(locale)
if m:
base = m.group(1)
else:
base = locale
# Precompiled locales are kept as is, obeying SUPPORTED, while
# others are adjusted, ensuring that the non-suffixed locales
# are utf-8, while the suffixed are not.
if use_bin == "precompiled":
output_locale(locale, base, charset)
else:
if charset == 'UTF-8':
output_locale(base, base, charset)
else:
output_locale('%s.%s' % (base, charset), base, charset)
# Precompiled locales are kept as is, obeying SUPPORTED, while
# others are adjusted, ensuring that the non-suffixed locales
# are utf-8, while the suffixed are not.
if use_bin == "precompiled":
output_locale(locale, base, charset)
else:
if charset == 'UTF-8':
output_locale(base, base, charset)
else:
output_locale('%s.%s' % (base, charset), base, charset)
if use_bin == "compile":
makefile = base_path_join(d.getVar("WORKDIR", True), "locale-tree", "Makefile")
m = open(makefile, "w")
m.write("all: %s\n\n" % " ".join(commands.keys()))
for cmd in commands:
m.write(cmd + ":\n")
m.write(" " + commands[cmd] + "\n\n")
m.close()
d.setVar("B", os.path.dirname(makefile))
d.setVar("EXTRA_OEMAKE", "${PARALLEL_MAKE}")
bb.note("Executing binary locale generation makefile")
bb.build.exec_func("oe_runmake", d)
bb.note("collecting binary locales from locale tree")
bb.build.exec_func("do_collect_bins_from_locale_tree", d)
do_split_packages(d, binary_locales_dir, file_regex='(.*)', \
output_pattern=bpn+'-binary-localedata-%s', \
description='binary locale definition for %s', extra_depends='', allow_dirs=True)
elif use_bin == "precompiled":
do_split_packages(d, binary_locales_dir, file_regex='(.*)', \
output_pattern=bpn+'-binary-localedata-%s', \
description='binary locale definition for %s', extra_depends='', allow_dirs=True)
else:
bb.note("generation of binary locales disabled. this may break i18n!")
if use_bin == "compile":
makefile = base_path_join(d.getVar("WORKDIR", True), "locale-tree", "Makefile")
m = open(makefile, "w")
m.write("all: %s\n\n" % " ".join(commands.keys()))
for cmd in commands:
m.write(cmd + ":\n")
m.write("\t" + commands[cmd] + "\n\n")
m.close()
d.setVar("B", os.path.dirname(makefile))
d.setVar("EXTRA_OEMAKE", "${PARALLEL_MAKE}")
bb.note("Executing binary locale generation makefile")
bb.build.exec_func("oe_runmake", d)
bb.note("collecting binary locales from locale tree")
bb.build.exec_func("do_collect_bins_from_locale_tree", d)
do_split_packages(d, binary_locales_dir, file_regex='(.*)', \
output_pattern=bpn+'-binary-localedata-%s', \
description='binary locale definition for %s', extra_depends='', allow_dirs=True)
elif use_bin == "precompiled":
do_split_packages(d, binary_locales_dir, file_regex='(.*)', \
output_pattern=bpn+'-binary-localedata-%s', \
description='binary locale definition for %s', extra_depends='', allow_dirs=True)
else:
bb.note("generation of binary locales disabled. this may break i18n!")
}
# We want to do this indirection so that we can safely 'return'
# from the called function even though we're prepending
python populate_packages_prepend () {
bb.build.exec_func('package_do_split_gconvs', d)
bb.build.exec_func('package_do_split_gconvs', d)
}

View File

@ -385,6 +385,6 @@ do_populate_lic[sstate-outputdirs] = "${LICENSE_DIRECTORY}/"
ROOTFS_POSTPROCESS_COMMAND_prepend = "license_create_manifest; "
python do_populate_lic_setscene () {
sstate_setscene(d)
sstate_setscene(d)
}
addtask do_populate_lic_setscene

View File

@ -2,76 +2,76 @@ METADATA_BRANCH ?= "${@base_detect_branch(d)}"
METADATA_REVISION ?= "${@base_detect_revision(d)}"
def base_detect_revision(d):
path = base_get_scmbasepath(d)
path = base_get_scmbasepath(d)
scms = [base_get_metadata_git_revision, \
base_get_metadata_svn_revision]
scms = [base_get_metadata_git_revision, \
base_get_metadata_svn_revision]
for scm in scms:
rev = scm(path, d)
if rev <> "<unknown>":
return rev
for scm in scms:
rev = scm(path, d)
if rev <> "<unknown>":
return rev
return "<unknown>"
return "<unknown>"
def base_detect_branch(d):
path = base_get_scmbasepath(d)
path = base_get_scmbasepath(d)
scms = [base_get_metadata_git_branch]
scms = [base_get_metadata_git_branch]
for scm in scms:
rev = scm(path, d)
if rev <> "<unknown>":
return rev.strip()
for scm in scms:
rev = scm(path, d)
if rev <> "<unknown>":
return rev.strip()
return "<unknown>"
return "<unknown>"
def base_get_scmbasepath(d):
return d.getVar( 'COREBASE', True)
return d.getVar( 'COREBASE', True)
def base_get_metadata_monotone_branch(path, d):
monotone_branch = "<unknown>"
try:
monotone_branch = file( "%s/_MTN/options" % path ).read().strip()
if monotone_branch.startswith( "database" ):
monotone_branch_words = monotone_branch.split()
monotone_branch = monotone_branch_words[ monotone_branch_words.index( "branch" )+1][1:-1]
except:
pass
return monotone_branch
monotone_branch = "<unknown>"
try:
monotone_branch = file( "%s/_MTN/options" % path ).read().strip()
if monotone_branch.startswith( "database" ):
monotone_branch_words = monotone_branch.split()
monotone_branch = monotone_branch_words[ monotone_branch_words.index( "branch" )+1][1:-1]
except:
pass
return monotone_branch
def base_get_metadata_monotone_revision(path, d):
monotone_revision = "<unknown>"
try:
monotone_revision = file( "%s/_MTN/revision" % path ).read().strip()
if monotone_revision.startswith( "format_version" ):
monotone_revision_words = monotone_revision.split()
monotone_revision = monotone_revision_words[ monotone_revision_words.index( "old_revision" )+1][1:-1]
except IOError:
pass
return monotone_revision
monotone_revision = "<unknown>"
try:
monotone_revision = file( "%s/_MTN/revision" % path ).read().strip()
if monotone_revision.startswith( "format_version" ):
monotone_revision_words = monotone_revision.split()
monotone_revision = monotone_revision_words[ monotone_revision_words.index( "old_revision" )+1][1:-1]
except IOError:
pass
return monotone_revision
def base_get_metadata_svn_revision(path, d):
revision = "<unknown>"
try:
revision = file( "%s/.svn/entries" % path ).readlines()[3].strip()
except IOError:
pass
return revision
revision = "<unknown>"
try:
revision = file( "%s/.svn/entries" % path ).readlines()[3].strip()
except IOError:
pass
return revision
def base_get_metadata_git_branch(path, d):
branch = os.popen('cd %s; git branch 2>&1 | grep "^* " | tr -d "* "' % path).read()
branch = os.popen('cd %s; git branch 2>&1 | grep "^* " | tr -d "* "' % path).read()
if len(branch) != 0:
return branch
return "<unknown>"
if len(branch) != 0:
return branch
return "<unknown>"
def base_get_metadata_git_revision(path, d):
f = os.popen("cd %s; git log -n 1 --pretty=oneline -- 2>&1" % path)
data = f.read()
if f.close() is None:
rev = data.split(" ")[0]
if len(rev) != 0:
return rev
return "<unknown>"
f = os.popen("cd %s; git log -n 1 --pretty=oneline -- 2>&1" % path)
data = f.read()
if f.close() is None:
rev = data.split(" ")[0]
if len(rev) != 0:
return rev
return "<unknown>"

View File

@ -29,32 +29,32 @@ fi
}
python populate_packages_append () {
import re
packages = d.getVar('PACKAGES', True).split()
pkgdest = d.getVar('PKGDEST', True)
import re
packages = d.getVar('PACKAGES', True).split()
pkgdest = d.getVar('PKGDEST', True)
for pkg in packages:
mime_dir = '%s/%s/usr/share/mime/packages' % (pkgdest, pkg)
mimes = []
mime_re = re.compile(".*\.xml$")
if os.path.exists(mime_dir):
for f in os.listdir(mime_dir):
if mime_re.match(f):
mimes.append(f)
if mimes:
bb.note("adding mime postinst and postrm scripts to %s" % pkg)
postinst = d.getVar('pkg_postinst_%s' % pkg, True) or d.getVar('pkg_postinst', True)
if not postinst:
postinst = '#!/bin/sh\n'
postinst += d.getVar('mime_postinst', True)
d.setVar('pkg_postinst_%s' % pkg, postinst)
postrm = d.getVar('pkg_postrm_%s' % pkg, True) or d.getVar('pkg_postrm', True)
if not postrm:
postrm = '#!/bin/sh\n'
postrm += d.getVar('mime_postrm', True)
d.setVar('pkg_postrm_%s' % pkg, postrm)
bb.note("adding shared-mime-info-data dependency to %s" % pkg)
rdepends = explode_deps(d.getVar('RDEPENDS_' + pkg, False) or d.getVar('RDEPENDS', False) or "" )
rdepends.append("shared-mime-info-data")
d.setVar('RDEPENDS_' + pkg, " " + " ".join(rdepends))
for pkg in packages:
mime_dir = '%s/%s/usr/share/mime/packages' % (pkgdest, pkg)
mimes = []
mime_re = re.compile(".*\.xml$")
if os.path.exists(mime_dir):
for f in os.listdir(mime_dir):
if mime_re.match(f):
mimes.append(f)
if mimes:
bb.note("adding mime postinst and postrm scripts to %s" % pkg)
postinst = d.getVar('pkg_postinst_%s' % pkg, True) or d.getVar('pkg_postinst', True)
if not postinst:
postinst = '#!/bin/sh\n'
postinst += d.getVar('mime_postinst', True)
d.setVar('pkg_postinst_%s' % pkg, postinst)
postrm = d.getVar('pkg_postrm_%s' % pkg, True) or d.getVar('pkg_postrm', True)
if not postrm:
postrm = '#!/bin/sh\n'
postrm += d.getVar('mime_postrm', True)
d.setVar('pkg_postrm_%s' % pkg, postrm)
bb.note("adding shared-mime-info-data dependency to %s" % pkg)
rdepends = explode_deps(d.getVar('RDEPENDS_' + pkg, False) or d.getVar('RDEPENDS', False) or "" )
rdepends.append("shared-mime-info-data")
d.setVar('RDEPENDS_' + pkg, " " + " ".join(rdepends))
}

File diff suppressed because it is too large Load Diff

View File

@ -418,8 +418,8 @@ python () {
}
python do_package_write_deb () {
bb.build.exec_func("read_subpackage_metadata", d)
bb.build.exec_func("do_package_deb", d)
bb.build.exec_func("read_subpackage_metadata", d)
bb.build.exec_func("do_package_deb", d)
}
do_package_write_deb[dirs] = "${PKGWRITEDIRDEB}"
do_package_write_deb[umask] = "022"

File diff suppressed because it is too large Load Diff

View File

@ -1,13 +1,13 @@
python read_subpackage_metadata () {
import oe.packagedata
import oe.packagedata
data = oe.packagedata.read_pkgdata(d.getVar('PN', True), d)
data = oe.packagedata.read_pkgdata(d.getVar('PN', True), d)
for key in data.keys():
d.setVar(key, data[key])
for key in data.keys():
d.setVar(key, data[key])
for pkg in d.getVar('PACKAGES', True).split():
sdata = oe.packagedata.read_subpkgdata(pkg, d)
for key in sdata.keys():
d.setVar(key, sdata[key])
for pkg in d.getVar('PACKAGES', True).split():
sdata = oe.packagedata.read_subpkgdata(pkg, d)
for key in sdata.keys():
d.setVar(key, sdata[key])
}

View File

@ -8,164 +8,164 @@ PATCHDEPENDENCY = "${PATCHTOOL}-native:do_populate_sysroot"
inherit terminal
def src_patches(d, all = False ):
workdir = d.getVar('WORKDIR', True)
fetch = bb.fetch2.Fetch([], d)
patches = []
sources = []
for url in fetch.urls:
local = patch_path(url, fetch, workdir)
if not local:
if all:
local = fetch.localpath(url)
sources.append(local)
continue
workdir = d.getVar('WORKDIR', True)
fetch = bb.fetch2.Fetch([], d)
patches = []
sources = []
for url in fetch.urls:
local = patch_path(url, fetch, workdir)
if not local:
if all:
local = fetch.localpath(url)
sources.append(local)
continue
urldata = fetch.ud[url]
parm = urldata.parm
patchname = parm.get('pname') or os.path.basename(local)
urldata = fetch.ud[url]
parm = urldata.parm
patchname = parm.get('pname') or os.path.basename(local)
apply, reason = should_apply(parm, d)
if not apply:
if reason:
bb.note("Patch %s %s" % (patchname, reason))
continue
apply, reason = should_apply(parm, d)
if not apply:
if reason:
bb.note("Patch %s %s" % (patchname, reason))
continue
patchparm = {'patchname': patchname}
if "striplevel" in parm:
striplevel = parm["striplevel"]
elif "pnum" in parm:
#bb.msg.warn(None, "Deprecated usage of 'pnum' url parameter in '%s', please use 'striplevel'" % url)
striplevel = parm["pnum"]
else:
striplevel = '1'
patchparm['striplevel'] = striplevel
patchparm = {'patchname': patchname}
if "striplevel" in parm:
striplevel = parm["striplevel"]
elif "pnum" in parm:
#bb.msg.warn(None, "Deprecated usage of 'pnum' url parameter in '%s', please use 'striplevel'" % url)
striplevel = parm["pnum"]
else:
striplevel = '1'
patchparm['striplevel'] = striplevel
patchdir = parm.get('patchdir')
if patchdir:
patchparm['patchdir'] = patchdir
patchdir = parm.get('patchdir')
if patchdir:
patchparm['patchdir'] = patchdir
localurl = bb.encodeurl(('file', '', local, '', '', patchparm))
patches.append(localurl)
localurl = bb.encodeurl(('file', '', local, '', '', patchparm))
patches.append(localurl)
if all:
return sources
if all:
return sources
return patches
return patches
def patch_path(url, fetch, workdir):
"""Return the local path of a patch, or None if this isn't a patch"""
"""Return the local path of a patch, or None if this isn't a patch"""
local = fetch.localpath(url)
base, ext = os.path.splitext(os.path.basename(local))
if ext in ('.gz', '.bz2', '.Z'):
local = os.path.join(workdir, base)
ext = os.path.splitext(base)[1]
local = fetch.localpath(url)
base, ext = os.path.splitext(os.path.basename(local))
if ext in ('.gz', '.bz2', '.Z'):
local = os.path.join(workdir, base)
ext = os.path.splitext(base)[1]
urldata = fetch.ud[url]
if "apply" in urldata.parm:
apply = oe.types.boolean(urldata.parm["apply"])
if not apply:
return
elif ext not in (".diff", ".patch"):
return
urldata = fetch.ud[url]
if "apply" in urldata.parm:
apply = oe.types.boolean(urldata.parm["apply"])
if not apply:
return
elif ext not in (".diff", ".patch"):
return
return local
return local
def should_apply(parm, d):
"""Determine if we should apply the given patch"""
"""Determine if we should apply the given patch"""
if "mindate" in parm or "maxdate" in parm:
pn = d.getVar('PN', True)
srcdate = d.getVar('SRCDATE_%s' % pn, True)
if not srcdate:
srcdate = d.getVar('SRCDATE', True)
if "mindate" in parm or "maxdate" in parm:
pn = d.getVar('PN', True)
srcdate = d.getVar('SRCDATE_%s' % pn, True)
if not srcdate:
srcdate = d.getVar('SRCDATE', True)
if srcdate == "now":
srcdate = d.getVar('DATE', True)
if srcdate == "now":
srcdate = d.getVar('DATE', True)
if "maxdate" in parm and parm["maxdate"] < srcdate:
return False, 'is outdated'
if "maxdate" in parm and parm["maxdate"] < srcdate:
return False, 'is outdated'
if "mindate" in parm and parm["mindate"] > srcdate:
return False, 'is predated'
if "mindate" in parm and parm["mindate"] > srcdate:
return False, 'is predated'
if "minrev" in parm:
srcrev = d.getVar('SRCREV', True)
if srcrev and srcrev < parm["minrev"]:
return False, 'applies to later revisions'
if "minrev" in parm:
srcrev = d.getVar('SRCREV', True)
if srcrev and srcrev < parm["minrev"]:
return False, 'applies to later revisions'
if "maxrev" in parm:
srcrev = d.getVar('SRCREV', True)
if srcrev and srcrev > parm["maxrev"]:
return False, 'applies to earlier revisions'
if "maxrev" in parm:
srcrev = d.getVar('SRCREV', True)
if srcrev and srcrev > parm["maxrev"]:
return False, 'applies to earlier revisions'
if "rev" in parm:
srcrev = d.getVar('SRCREV', True)
if srcrev and parm["rev"] not in srcrev:
return False, "doesn't apply to revision"
if "rev" in parm:
srcrev = d.getVar('SRCREV', True)
if srcrev and parm["rev"] not in srcrev:
return False, "doesn't apply to revision"
if "notrev" in parm:
srcrev = d.getVar('SRCREV', True)
if srcrev and parm["notrev"] in srcrev:
return False, "doesn't apply to revision"
if "notrev" in parm:
srcrev = d.getVar('SRCREV', True)
if srcrev and parm["notrev"] in srcrev:
return False, "doesn't apply to revision"
return True, None
return True, None
should_apply[vardepsexclude] = "DATE SRCDATE"
python patch_do_patch() {
import oe.patch
import oe.patch
patchsetmap = {
"patch": oe.patch.PatchTree,
"quilt": oe.patch.QuiltTree,
"git": oe.patch.GitApplyTree,
}
patchsetmap = {
"patch": oe.patch.PatchTree,
"quilt": oe.patch.QuiltTree,
"git": oe.patch.GitApplyTree,
}
cls = patchsetmap[d.getVar('PATCHTOOL', True) or 'quilt']
cls = patchsetmap[d.getVar('PATCHTOOL', True) or 'quilt']
resolvermap = {
"noop": oe.patch.NOOPResolver,
"user": oe.patch.UserResolver,
}
resolvermap = {
"noop": oe.patch.NOOPResolver,
"user": oe.patch.UserResolver,
}
rcls = resolvermap[d.getVar('PATCHRESOLVE', True) or 'user']
rcls = resolvermap[d.getVar('PATCHRESOLVE', True) or 'user']
classes = {}
classes = {}
s = d.getVar('S', True)
s = d.getVar('S', True)
path = os.getenv('PATH')
os.putenv('PATH', d.getVar('PATH', True))
path = os.getenv('PATH')
os.putenv('PATH', d.getVar('PATH', True))
for patch in src_patches(d):
_, _, local, _, _, parm = bb.decodeurl(patch)
for patch in src_patches(d):
_, _, local, _, _, parm = bb.decodeurl(patch)
if "patchdir" in parm:
patchdir = parm["patchdir"]
if not os.path.isabs(patchdir):
patchdir = os.path.join(s, patchdir)
else:
patchdir = s
if "patchdir" in parm:
patchdir = parm["patchdir"]
if not os.path.isabs(patchdir):
patchdir = os.path.join(s, patchdir)
else:
patchdir = s
if not patchdir in classes:
patchset = cls(patchdir, d)
resolver = rcls(patchset, oe_terminal)
classes[patchdir] = (patchset, resolver)
patchset.Clean()
else:
patchset, resolver = classes[patchdir]
if not patchdir in classes:
patchset = cls(patchdir, d)
resolver = rcls(patchset, oe_terminal)
classes[patchdir] = (patchset, resolver)
patchset.Clean()
else:
patchset, resolver = classes[patchdir]
bb.note("Applying patch '%s' (%s)" % (parm['patchname'], oe.path.format_display(local, d)))
try:
patchset.Import({"file":local, "strippath": parm['striplevel']}, True)
except Exception as exc:
bb.fatal(str(exc))
try:
resolver.Resolve()
except bb.BBHandledException as e:
bb.fatal(str(e))
bb.note("Applying patch '%s' (%s)" % (parm['patchname'], oe.path.format_display(local, d)))
try:
patchset.Import({"file":local, "strippath": parm['striplevel']}, True)
except Exception as exc:
bb.fatal(str(exc))
try:
resolver.Resolve()
except bb.BBHandledException as e:
bb.fatal(str(e))
}
patch_do_patch[vardepsexclude] = "PATCHRESOLVE"

View File

@ -1,22 +1,22 @@
python do_pkg_write_metainfo () {
deploydir = d.getVar('DEPLOY_DIR', True)
if not deploydir:
bb.error("DEPLOY_DIR not defined, unable to write package info")
return
deploydir = d.getVar('DEPLOY_DIR', True)
if not deploydir:
bb.error("DEPLOY_DIR not defined, unable to write package info")
return
try:
infofile = file(os.path.join(deploydir, 'package-metainfo'), 'a')
except OSError:
raise bb.build.FuncFailed("unable to open package-info file for writing.")
name = d.getVar('PN', True)
version = d.getVar('PV', True)
desc = d.getVar('DESCRIPTION', True)
page = d.getVar('HOMEPAGE', True)
lic = d.getVar('LICENSE', True)
infofile.write("|| "+ name +" || "+ version + " || "+ desc +" || "+ page +" || "+ lic + " ||\n" )
infofile.close()
try:
infofile = file(os.path.join(deploydir, 'package-metainfo'), 'a')
except OSError:
raise bb.build.FuncFailed("unable to open package-info file for writing.")
name = d.getVar('PN', True)
version = d.getVar('PV', True)
desc = d.getVar('DESCRIPTION', True)
page = d.getVar('HOMEPAGE', True)
lic = d.getVar('LICENSE', True)
infofile.write("|| "+ name +" || "+ version + " || "+ desc +" || "+ page +" || "+ lic + " ||\n" )
infofile.close()
}
addtask pkg_write_metainfo after do_package before do_build
addtask pkg_write_metainfo after do_package before do_build

View File

@ -32,29 +32,29 @@ python () {
}
fakeroot python do_populate_sdk() {
bb.build.exec_func("populate_sdk_image", d)
bb.build.exec_func("populate_sdk_image", d)
# Handle multilibs in the SDK environment, siteconfig, etc files...
localdata = bb.data.createCopy(d)
# Handle multilibs in the SDK environment, siteconfig, etc files...
localdata = bb.data.createCopy(d)
# make sure we only use the WORKDIR value from 'd', or it can change
localdata.setVar('WORKDIR', d.getVar('WORKDIR', True))
# make sure we only use the WORKDIR value from 'd', or it can change
localdata.setVar('WORKDIR', d.getVar('WORKDIR', True))
# make sure we only use the SDKTARGETSYSROOT value from 'd'
localdata.setVar('SDKTARGETSYSROOT', d.getVar('SDKTARGETSYSROOT', True))
# make sure we only use the SDKTARGETSYSROOT value from 'd'
localdata.setVar('SDKTARGETSYSROOT', d.getVar('SDKTARGETSYSROOT', True))
# Process DEFAULTTUNE
bb.build.exec_func("create_sdk_files", localdata)
# Process DEFAULTTUNE
bb.build.exec_func("create_sdk_files", localdata)
variants = d.getVar("MULTILIB_VARIANTS", True) or ""
for item in variants.split():
# Load overrides from 'd' to avoid having to reset the value...
overrides = d.getVar("OVERRIDES", False) + ":virtclass-multilib-" + item
localdata.setVar("OVERRIDES", overrides)
bb.data.update_data(localdata)
bb.build.exec_func("create_sdk_files", localdata)
variants = d.getVar("MULTILIB_VARIANTS", True) or ""
for item in variants.split():
# Load overrides from 'd' to avoid having to reset the value...
overrides = d.getVar("OVERRIDES", False) + ":virtclass-multilib-" + item
localdata.setVar("OVERRIDES", overrides)
bb.data.update_data(localdata)
bb.build.exec_func("create_sdk_files", localdata)
bb.build.exec_func("tar_sdk", d)
bb.build.exec_func("tar_sdk", d)
}
fakeroot populate_sdk_image() {

View File

@ -4,12 +4,12 @@
#
def qemu_target_binary(data):
import bb
import bb
target_arch = data.getVar("TARGET_ARCH", True)
if target_arch in ("i486", "i586", "i686"):
target_arch = "i386"
elif target_arch == "powerpc":
target_arch = "ppc"
target_arch = data.getVar("TARGET_ARCH", True)
if target_arch in ("i486", "i586", "i686"):
target_arch = "i386"
elif target_arch == "powerpc":
target_arch = "ppc"
return "qemu-" + target_arch
return "qemu-" + target_arch

View File

@ -185,18 +185,18 @@ def sstate_installpkg(ss, d):
staging_target = d.getVar('STAGING_DIR_TARGET', True)
staging_host = d.getVar('STAGING_DIR_HOST', True)
if bb.data.inherits_class('native', d) or bb.data.inherits_class('nativesdk', d) or bb.data.inherits_class('crosssdk', d) or bb.data.inherits_class('cross-canadian', d):
sstate_sed_cmd = "sed -i -e 's:FIXMESTAGINGDIR:%s:g'" % (staging)
elif bb.data.inherits_class('cross', d):
sstate_sed_cmd = "sed -i -e 's:FIXMESTAGINGDIRTARGET:%s:g; s:FIXMESTAGINGDIR:%s:g'" % (staging_target, staging)
else:
sstate_sed_cmd = "sed -i -e 's:FIXMESTAGINGDIRHOST:%s:g'" % (staging_host)
if bb.data.inherits_class('native', d) or bb.data.inherits_class('nativesdk', d) or bb.data.inherits_class('crosssdk', d) or bb.data.inherits_class('cross-canadian', d):
sstate_sed_cmd = "sed -i -e 's:FIXMESTAGINGDIR:%s:g'" % (staging)
elif bb.data.inherits_class('cross', d):
sstate_sed_cmd = "sed -i -e 's:FIXMESTAGINGDIRTARGET:%s:g; s:FIXMESTAGINGDIR:%s:g'" % (staging_target, staging)
else:
sstate_sed_cmd = "sed -i -e 's:FIXMESTAGINGDIRHOST:%s:g'" % (staging_host)
# Add sstateinst to each filename in fixmepath, use xargs to efficiently call sed
sstate_hardcode_cmd = "sed -e 's:^:%s:g' %s | xargs %s" % (sstateinst, fixmefn, sstate_sed_cmd)
# Add sstateinst to each filename in fixmepath, use xargs to efficiently call sed
sstate_hardcode_cmd = "sed -e 's:^:%s:g' %s | xargs %s" % (sstateinst, fixmefn, sstate_sed_cmd)
print "Replacing fixme paths in sstate package: %s" % (sstate_hardcode_cmd)
subprocess.call(sstate_hardcode_cmd, shell=True)
print "Replacing fixme paths in sstate package: %s" % (sstate_hardcode_cmd)
subprocess.call(sstate_hardcode_cmd, shell=True)
# Need to remove this or we'd copy it into the target directory and may
# conflict with another writer
@ -310,50 +310,50 @@ python sstate_cleanall() {
}
def sstate_hardcode_path(d):
import subprocess
import subprocess
# Need to remove hardcoded paths and fix these when we install the
# staging packages.
#
# Note: the logic in this function needs to match the reverse logic
# in sstate_installpkg(ss, d)
# Need to remove hardcoded paths and fix these when we install the
# staging packages.
#
# Note: the logic in this function needs to match the reverse logic
# in sstate_installpkg(ss, d)
staging = d.getVar('STAGING_DIR', True)
staging_target = d.getVar('STAGING_DIR_TARGET', True)
staging_host = d.getVar('STAGING_DIR_HOST', True)
sstate_builddir = d.getVar('SSTATE_BUILDDIR', True)
staging = d.getVar('STAGING_DIR', True)
staging_target = d.getVar('STAGING_DIR_TARGET', True)
staging_host = d.getVar('STAGING_DIR_HOST', True)
sstate_builddir = d.getVar('SSTATE_BUILDDIR', True)
if bb.data.inherits_class('native', d) or bb.data.inherits_class('nativesdk', d) or bb.data.inherits_class('crosssdk', d) or bb.data.inherits_class('cross-canadian', d):
sstate_grep_cmd = "grep -l -e '%s'" % (staging)
sstate_sed_cmd = "sed -i -e 's:%s:FIXMESTAGINGDIR:g'" % (staging)
elif bb.data.inherits_class('cross', d):
sstate_grep_cmd = "grep -l -e '(%s|%s)'" % (staging_target, staging)
sstate_sed_cmd = "sed -i -e 's:%s:FIXMESTAGINGDIRTARGET:g; s:%s:FIXMESTAGINGDIR:g'" % (staging_target, staging)
else:
sstate_grep_cmd = "grep -l -e '%s'" % (staging_host)
sstate_sed_cmd = "sed -i -e 's:%s:FIXMESTAGINGDIRHOST:g'" % (staging_host)
fixmefn = sstate_builddir + "fixmepath"
if bb.data.inherits_class('native', d) or bb.data.inherits_class('nativesdk', d) or bb.data.inherits_class('crosssdk', d) or bb.data.inherits_class('cross-canadian', d):
sstate_grep_cmd = "grep -l -e '%s'" % (staging)
sstate_sed_cmd = "sed -i -e 's:%s:FIXMESTAGINGDIR:g'" % (staging)
elif bb.data.inherits_class('cross', d):
sstate_grep_cmd = "grep -l -e '(%s|%s)'" % (staging_target, staging)
sstate_sed_cmd = "sed -i -e 's:%s:FIXMESTAGINGDIRTARGET:g; s:%s:FIXMESTAGINGDIR:g'" % (staging_target, staging)
else:
sstate_grep_cmd = "grep -l -e '%s'" % (staging_host)
sstate_sed_cmd = "sed -i -e 's:%s:FIXMESTAGINGDIRHOST:g'" % (staging_host)
fixmefn = sstate_builddir + "fixmepath"
sstate_scan_cmd = d.getVar('SSTATE_SCAN_CMD', True)
sstate_filelist_cmd = "tee %s" % (fixmefn)
sstate_scan_cmd = d.getVar('SSTATE_SCAN_CMD', True)
sstate_filelist_cmd = "tee %s" % (fixmefn)
# fixmepath file needs relative paths, drop sstate_builddir prefix
sstate_filelist_relative_cmd = "sed -i -e 's:^%s::g' %s" % (sstate_builddir, fixmefn)
# fixmepath file needs relative paths, drop sstate_builddir prefix
sstate_filelist_relative_cmd = "sed -i -e 's:^%s::g' %s" % (sstate_builddir, fixmefn)
# Limit the fixpaths and sed operations based on the initial grep search
# This has the side effect of making sure the vfs cache is hot
sstate_hardcode_cmd = "%s | xargs %s | %s | xargs --no-run-if-empty %s" % (sstate_scan_cmd, sstate_grep_cmd, sstate_filelist_cmd, sstate_sed_cmd)
# Limit the fixpaths and sed operations based on the initial grep search
# This has the side effect of making sure the vfs cache is hot
sstate_hardcode_cmd = "%s | xargs %s | %s | xargs --no-run-if-empty %s" % (sstate_scan_cmd, sstate_grep_cmd, sstate_filelist_cmd, sstate_sed_cmd)
print "Removing hardcoded paths from sstate package: '%s'" % (sstate_hardcode_cmd)
subprocess.call(sstate_hardcode_cmd, shell=True)
print "Removing hardcoded paths from sstate package: '%s'" % (sstate_hardcode_cmd)
subprocess.call(sstate_hardcode_cmd, shell=True)
# If the fixmefn is empty, remove it..
if os.stat(fixmefn).st_size == 0:
os.remove(fixmefn)
else:
print "Replacing absolute paths in fixmepath file: '%s'" % (sstate_filelist_relative_cmd)
subprocess.call(sstate_filelist_relative_cmd, shell=True)
if os.stat(fixmefn).st_size == 0:
os.remove(fixmefn)
else:
print "Replacing absolute paths in fixmepath file: '%s'" % (sstate_filelist_relative_cmd)
subprocess.call(sstate_filelist_relative_cmd, shell=True)
def sstate_package(ss, d):
import oe.path

View File

@ -109,7 +109,7 @@ do_populate_sysroot[sstate-outputdirs] = "${STAGING_DIR_HOST}/"
do_populate_sysroot[stamp-extra-info] = "${MACHINE}"
python do_populate_sysroot_setscene () {
sstate_setscene(d)
sstate_setscene(d)
}
addtask do_populate_sysroot_setscene

View File

@ -54,146 +54,146 @@ syslinux_hddimg_install() {
}
python build_syslinux_menu () {
import copy
import sys
import copy
import sys
workdir = d.getVar('WORKDIR', True)
if not workdir:
bb.error("WORKDIR is not defined")
return
labels = d.getVar('LABELS', True)
if not labels:
bb.debug(1, "LABELS not defined, nothing to do")
return
if labels == []:
bb.debug(1, "No labels, nothing to do")
return
workdir = d.getVar('WORKDIR', True)
if not workdir:
bb.error("WORKDIR is not defined")
return
labels = d.getVar('LABELS', True)
if not labels:
bb.debug(1, "LABELS not defined, nothing to do")
return
if labels == []:
bb.debug(1, "No labels, nothing to do")
return
cfile = d.getVar('SYSLINUXMENU', True)
if not cfile:
raise bb.build.FuncFailed('Unable to read SYSLINUXMENU')
cfile = d.getVar('SYSLINUXMENU', True)
if not cfile:
raise bb.build.FuncFailed('Unable to read SYSLINUXMENU')
try:
cfgfile = file(cfile, 'w')
except OSError:
raise bb.build.funcFailed('Unable to open %s' % (cfile))
try:
cfgfile = file(cfile, 'w')
except OSError:
raise bb.build.funcFailed('Unable to open %s' % (cfile))
# Beep the speaker and Clear the screen
cfgfile.write('\x07\x0C')
# Beep the speaker and Clear the screen
cfgfile.write('\x07\x0C')
# The title should be configurable
cfgfile.write('Linux Boot Menu\n')
cfgfile.write('The following targets are available on this image:\n')
cfgfile.write('\n')
# The title should be configurable
cfgfile.write('Linux Boot Menu\n')
cfgfile.write('The following targets are available on this image:\n')
cfgfile.write('\n')
for label in labels.split():
from copy import deepcopy
localdata = deepcopy(d)
for label in labels.split():
from copy import deepcopy
localdata = deepcopy(d)
overrides = localdata.getVar('OVERRIDES')
if not overrides:
raise bb.build.FuncFailed('OVERRIDES not defined')
overrides = localdata.expand(overrides)
localdata.setVar('OVERRIDES', label + ':' + overrides)
bb.data.update_data(localdata)
overrides = localdata.getVar('OVERRIDES')
if not overrides:
raise bb.build.FuncFailed('OVERRIDES not defined')
overrides = localdata.expand(overrides)
localdata.setVar('OVERRIDES', label + ':' + overrides)
bb.data.update_data(localdata)
usage = localdata.getVar('USAGE', True)
cfgfile.write(' \x0F\x30\x3E%16s\x0F\x30\x37: ' % (label))
cfgfile.write('%s\n' % (usage))
usage = localdata.getVar('USAGE', True)
cfgfile.write(' \x0F\x30\x3E%16s\x0F\x30\x37: ' % (label))
cfgfile.write('%s\n' % (usage))
del localdata
del localdata
cfgfile.write('\n')
cfgfile.close()
cfgfile.write('\n')
cfgfile.close()
}
python build_syslinux_cfg () {
import copy
import sys
import copy
import sys
workdir = d.getVar('WORKDIR', True)
if not workdir:
bb.error("WORKDIR not defined, unable to package")
return
labels = d.getVar('LABELS', True)
if not labels:
bb.debug(1, "LABELS not defined, nothing to do")
return
if labels == []:
bb.debug(1, "No labels, nothing to do")
return
workdir = d.getVar('WORKDIR', True)
if not workdir:
bb.error("WORKDIR not defined, unable to package")
return
labels = d.getVar('LABELS', True)
if not labels:
bb.debug(1, "LABELS not defined, nothing to do")
return
if labels == []:
bb.debug(1, "No labels, nothing to do")
return
cfile = d.getVar('SYSLINUXCFG', True)
if not cfile:
raise bb.build.FuncFailed('Unable to read SYSLINUXCFG')
cfile = d.getVar('SYSLINUXCFG', True)
if not cfile:
raise bb.build.FuncFailed('Unable to read SYSLINUXCFG')
try:
cfgfile = file(cfile, 'w')
except OSError:
raise bb.build.funcFailed('Unable to open %s' % (cfile))
try:
cfgfile = file(cfile, 'w')
except OSError:
raise bb.build.funcFailed('Unable to open %s' % (cfile))
cfgfile.write('# Automatically created by OE\n')
cfgfile.write('# Automatically created by OE\n')
opts = d.getVar('SYSLINUX_OPTS', True)
opts = d.getVar('SYSLINUX_OPTS', True)
if opts:
for opt in opts.split(';'):
cfgfile.write('%s\n' % opt)
if opts:
for opt in opts.split(';'):
cfgfile.write('%s\n' % opt)
cfgfile.write('ALLOWOPTIONS 1\n');
cfgfile.write('DEFAULT %s\n' % (labels.split()[0]))
cfgfile.write('ALLOWOPTIONS 1\n');
cfgfile.write('DEFAULT %s\n' % (labels.split()[0]))
timeout = d.getVar('SYSLINUX_TIMEOUT', True)
timeout = d.getVar('SYSLINUX_TIMEOUT', True)
if timeout:
cfgfile.write('TIMEOUT %s\n' % timeout)
else:
cfgfile.write('TIMEOUT 50\n')
if timeout:
cfgfile.write('TIMEOUT %s\n' % timeout)
else:
cfgfile.write('TIMEOUT 50\n')
prompt = d.getVar('SYSLINUX_PROMPT', True)
if prompt:
cfgfile.write('PROMPT %s\n' % prompt)
else:
cfgfile.write('PROMPT 1\n')
prompt = d.getVar('SYSLINUX_PROMPT', True)
if prompt:
cfgfile.write('PROMPT %s\n' % prompt)
else:
cfgfile.write('PROMPT 1\n')
menu = d.getVar('AUTO_SYSLINUXMENU', True)
menu = d.getVar('AUTO_SYSLINUXMENU', True)
# This is ugly. My bad.
# This is ugly. My bad.
if menu:
bb.build.exec_func('build_syslinux_menu', d)
mfile = d.getVar('SYSLINUXMENU', True)
cfgfile.write('DISPLAY %s\n' % (mfile.split('/')[-1]) )
for label in labels.split():
localdata = bb.data.createCopy(d)
if menu:
bb.build.exec_func('build_syslinux_menu', d)
mfile = d.getVar('SYSLINUXMENU', True)
cfgfile.write('DISPLAY %s\n' % (mfile.split('/')[-1]) )
for label in labels.split():
localdata = bb.data.createCopy(d)
overrides = localdata.getVar('OVERRIDES', True)
if not overrides:
raise bb.build.FuncFailed('OVERRIDES not defined')
localdata.setVar('OVERRIDES', label + ':' + overrides)
bb.data.update_data(localdata)
cfgfile.write('LABEL %s\nKERNEL /vmlinuz\n' % (label))
overrides = localdata.getVar('OVERRIDES', True)
if not overrides:
raise bb.build.FuncFailed('OVERRIDES not defined')
localdata.setVar('OVERRIDES', label + ':' + overrides)
bb.data.update_data(localdata)
cfgfile.write('LABEL %s\nKERNEL /vmlinuz\n' % (label))
append = localdata.getVar('APPEND', True)
initrd = localdata.getVar('INITRD', True)
append = localdata.getVar('APPEND', True)
initrd = localdata.getVar('INITRD', True)
if append:
cfgfile.write('APPEND ')
if append:
cfgfile.write('APPEND ')
if initrd:
cfgfile.write('initrd=/initrd ')
if initrd:
cfgfile.write('initrd=/initrd ')
cfgfile.write('LABEL=%s '% (label))
cfgfile.write('LABEL=%s '% (label))
cfgfile.write('%s\n' % (append))
cfgfile.write('%s\n' % (append))
cfgfile.close()
cfgfile.close()
}

View File

@ -113,13 +113,13 @@ def update_alternatives_after_parse(d):
# Convert old format to new format...
alt_links = d.getVar('ALTERNATIVE_LINKS', True) or ""
for alt_link in alt_links.split():
alt_name = os.path.basename(alt_link)
alt_name = os.path.basename(alt_link)
alternative = d.getVar('ALTERNATIVE_%s' % pn, True) or ""
alternative += " " + alt_name
d.setVar('ALTERNATIVE_%s' % pn, alternative)
d.setVarFlag('ALTERNATIVE_LINK_NAME', alt_name, alt_link)
d.setVarFlag('ALTERNATIVE_TARGET', alt_name, alt_link)
alternative = d.getVar('ALTERNATIVE_%s' % pn, True) or ""
alternative += " " + alt_name
d.setVar('ALTERNATIVE_%s' % pn, alternative)
d.setVarFlag('ALTERNATIVE_LINK_NAME', alt_name, alt_link)
d.setVarFlag('ALTERNATIVE_TARGET', alt_name, alt_link)
return
if d.getVar('ALTERNATIVE_NAME') != None or d.getVar('ALTERNATIVE_PATH') != None:
@ -128,15 +128,15 @@ def update_alternatives_after_parse(d):
alt_path = d.getVar('ALTERNATIVE_PATH', True)
alt_link = d.getVar('ALTERNATIVE_LINK', True) or ("%s/%s" % (d.getVar('bindir', True), alt_name))
if alt_name == None:
raise bb.build.build.FuncFailed, "%s inherits update-alternatives but doesn't set ALTERNATIVE_NAME" % d.getVar('FILE')
raise bb.build.build.FuncFailed, "%s inherits update-alternatives but doesn't set ALTERNATIVE_NAME" % d.getVar('FILE')
if alt_path == None:
raise bb.build.build.FuncFailed, "%s inherits update-alternatives but doesn't set ALTERNATIVE_PATH" % d.getVar('FILE')
raise bb.build.build.FuncFailed, "%s inherits update-alternatives but doesn't set ALTERNATIVE_PATH" % d.getVar('FILE')
alternative = d.getVar('ALTERNATIVE_%s' % pn, True) or ""
alternative += " " + alt_name
# Fix the alt_path if it's relative
alt_path = os.path.join(os.path.dirname(alt_link), alt_path)
# Fix the alt_path if it's relative
alt_path = os.path.join(os.path.dirname(alt_link), alt_path)
d.setVar('ALTERNATIVE_%s' % pn, alternative)
d.setVarFlag('ALTERNATIVE_LINK_NAME', alt_name, alt_link)
@ -199,144 +199,144 @@ populate_packages[vardeps] += "${UPDALTVARS} ${@gen_updatealternativesvars(d)}"
# the split and strip steps.. packagecopy seems to be the earliest reasonable
# place.
python perform_packagecopy_append () {
# Check for deprecated usage...
pn = d.getVar('BPN', True)
if d.getVar('ALTERNATIVE_LINKS', True) != None:
bb.warn('%s: Use of ALTERNATIVE_LINKS is deprecated, see update-alternatives.bbclass for more info.' % pn)
# Check for deprecated usage...
pn = d.getVar('BPN', True)
if d.getVar('ALTERNATIVE_LINKS', True) != None:
bb.warn('%s: Use of ALTERNATIVE_LINKS is deprecated, see update-alternatives.bbclass for more info.' % pn)
if d.getVar('ALTERNATIVE_NAME', True) != None or d.getVar('ALTERNATIVE_PATH', True) != None:
bb.warn('%s: Use of ALTERNATIVE_NAME is deprecated, see update-alternatives.bbclass for more info.' % pn)
if d.getVar('ALTERNATIVE_NAME', True) != None or d.getVar('ALTERNATIVE_PATH', True) != None:
bb.warn('%s: Use of ALTERNATIVE_NAME is deprecated, see update-alternatives.bbclass for more info.' % pn)
# Do actual update alternatives processing
pkgdest = d.getVar('PKGD', True)
for pkg in (d.getVar('PACKAGES', True) or "").split():
# If the src == dest, we know we need to rename the dest by appending ${BPN}
link_rename = {}
for alt_name in (d.getVar('ALTERNATIVE_%s' % pkg, True) or "").split():
alt_link = d.getVarFlag('ALTERNATIVE_LINK_NAME', alt_name, True)
if not alt_link:
alt_link = "%s/%s" % (d.getVar('bindir', True), alt_name)
d.setVarFlag('ALTERNATIVE_LINK_NAME', alt_name, alt_link)
# Do actual update alternatives processing
pkgdest = d.getVar('PKGD', True)
for pkg in (d.getVar('PACKAGES', True) or "").split():
# If the src == dest, we know we need to rename the dest by appending ${BPN}
link_rename = {}
for alt_name in (d.getVar('ALTERNATIVE_%s' % pkg, True) or "").split():
alt_link = d.getVarFlag('ALTERNATIVE_LINK_NAME', alt_name, True)
if not alt_link:
alt_link = "%s/%s" % (d.getVar('bindir', True), alt_name)
d.setVarFlag('ALTERNATIVE_LINK_NAME', alt_name, alt_link)
alt_target = d.getVarFlag('ALTERNATIVE_TARGET_%s' % pkg, alt_name, True) or d.getVarFlag('ALTERNATIVE_TARGET', alt_name, True)
alt_target = alt_target or d.getVar('ALTERNATIVE_TARGET_%s' % pkg, True) or d.getVar('ALTERNATIVE_TARGET', True) or alt_link
# Sometimes alt_target is specified as relative to the link name.
alt_target = os.path.join(os.path.dirname(alt_link), alt_target)
alt_target = d.getVarFlag('ALTERNATIVE_TARGET_%s' % pkg, alt_name, True) or d.getVarFlag('ALTERNATIVE_TARGET', alt_name, True)
alt_target = alt_target or d.getVar('ALTERNATIVE_TARGET_%s' % pkg, True) or d.getVar('ALTERNATIVE_TARGET', True) or alt_link
# Sometimes alt_target is specified as relative to the link name.
alt_target = os.path.join(os.path.dirname(alt_link), alt_target)
# If the link and target are the same name, we need to rename the target.
if alt_link == alt_target:
src = '%s/%s' % (pkgdest, alt_target)
alt_target_rename = '%s.%s' % (alt_target, pn)
dest = '%s/%s' % (pkgdest, alt_target_rename)
if os.path.lexists(dest):
bb.note('%s: Already renamed: %s' % (pn, alt_target_rename))
elif os.path.lexists(src):
if os.path.islink(src):
# Delay rename of links
link_rename[alt_target] = alt_target_rename
else:
bb.note('%s: Rename %s -> %s' % (pn, alt_target, alt_target_rename))
os.rename(src, dest)
else:
bb.warn("%s: alternative target (%s or %s) does not exist, skipping..." % (pn, alt_target, alt_target_rename))
continue
d.setVarFlag('ALTERNATIVE_TARGET_%s' % pkg, alt_name, alt_target_rename)
# If the link and target are the same name, we need to rename the target.
if alt_link == alt_target:
src = '%s/%s' % (pkgdest, alt_target)
alt_target_rename = '%s.%s' % (alt_target, pn)
dest = '%s/%s' % (pkgdest, alt_target_rename)
if os.path.lexists(dest):
bb.note('%s: Already renamed: %s' % (pn, alt_target_rename))
elif os.path.lexists(src):
if os.path.islink(src):
# Delay rename of links
link_rename[alt_target] = alt_target_rename
else:
bb.note('%s: Rename %s -> %s' % (pn, alt_target, alt_target_rename))
os.rename(src, dest)
else:
bb.warn("%s: alternative target (%s or %s) does not exist, skipping..." % (pn, alt_target, alt_target_rename))
continue
d.setVarFlag('ALTERNATIVE_TARGET_%s' % pkg, alt_name, alt_target_rename)
# Process delayed link names
# Do these after other renames so we can correct broken links
for alt_target in link_rename:
src = '%s/%s' % (pkgdest, alt_target)
dest = '%s/%s' % (pkgdest, link_rename[alt_target])
link = os.readlink(src)
if os.path.isabs(link):
link_target = pkgdest + os.readlink(src)
else:
link_target = os.path.join(os.path.dirname(src), link)
# Process delayed link names
# Do these after other renames so we can correct broken links
for alt_target in link_rename:
src = '%s/%s' % (pkgdest, alt_target)
dest = '%s/%s' % (pkgdest, link_rename[alt_target])
link = os.readlink(src)
if os.path.isabs(link):
link_target = pkgdest + os.readlink(src)
else:
link_target = os.path.join(os.path.dirname(src), link)
if os.path.lexists(link_target):
# Ok, the link_target exists, we can rename
bb.note('%s: Rename (link) %s -> %s' % (pn, alt_target, link_rename[alt_target]))
os.rename(src, dest)
else:
# Try to resolve the broken link to link.${BPN}
link_maybe = '%s.%s' % (os.readlink(src), pn)
if os.path.lexists(os.path.join(os.path.dirname(src), link_maybe)):
# Ok, the renamed link target exists.. create a new link, and remove the original
bb.note('%s: Creating new link %s -> %s' % (pn, link_rename[alt_target], link_maybe))
os.symlink(link_maybe, dest)
os.unlink(src)
else:
bb.warn('%s: Unable to resolve dangling symlink: %s' % (pn, alt_target))
if os.path.lexists(link_target):
# Ok, the link_target exists, we can rename
bb.note('%s: Rename (link) %s -> %s' % (pn, alt_target, link_rename[alt_target]))
os.rename(src, dest)
else:
# Try to resolve the broken link to link.${BPN}
link_maybe = '%s.%s' % (os.readlink(src), pn)
if os.path.lexists(os.path.join(os.path.dirname(src), link_maybe)):
# Ok, the renamed link target exists.. create a new link, and remove the original
bb.note('%s: Creating new link %s -> %s' % (pn, link_rename[alt_target], link_maybe))
os.symlink(link_maybe, dest)
os.unlink(src)
else:
bb.warn('%s: Unable to resolve dangling symlink: %s' % (pn, alt_target))
}
python populate_packages_prepend () {
pn = d.getVar('BPN', True)
pn = d.getVar('BPN', True)
# Do actual update alternatives processing
pkgdest = d.getVar('PKGD', True)
for pkg in (d.getVar('PACKAGES', True) or "").split():
# Create post install/removal scripts
alt_setup_links = ""
alt_remove_links = ""
for alt_name in (d.getVar('ALTERNATIVE_%s' % pkg, True) or "").split():
alt_link = d.getVarFlag('ALTERNATIVE_LINK_NAME', alt_name, True)
alt_target = d.getVarFlag('ALTERNATIVE_TARGET_%s' % pkg, alt_name, True) or d.getVarFlag('ALTERNATIVE_TARGET', alt_name, True)
alt_target = alt_target or d.getVar('ALTERNATIVE_TARGET_%s' % pkg, True) or d.getVar('ALTERNATIVE_TARGET', True) or alt_link
# Sometimes alt_target is specified as relative to the link name.
alt_target = os.path.join(os.path.dirname(alt_link), alt_target)
# Do actual update alternatives processing
pkgdest = d.getVar('PKGD', True)
for pkg in (d.getVar('PACKAGES', True) or "").split():
# Create post install/removal scripts
alt_setup_links = ""
alt_remove_links = ""
for alt_name in (d.getVar('ALTERNATIVE_%s' % pkg, True) or "").split():
alt_link = d.getVarFlag('ALTERNATIVE_LINK_NAME', alt_name, True)
alt_target = d.getVarFlag('ALTERNATIVE_TARGET_%s' % pkg, alt_name, True) or d.getVarFlag('ALTERNATIVE_TARGET', alt_name, True)
alt_target = alt_target or d.getVar('ALTERNATIVE_TARGET_%s' % pkg, True) or d.getVar('ALTERNATIVE_TARGET', True) or alt_link
# Sometimes alt_target is specified as relative to the link name.
alt_target = os.path.join(os.path.dirname(alt_link), alt_target)
alt_priority = d.getVarFlag('ALTERNATIVE_PRIORITY_%s' % pkg, alt_name, True) or d.getVarFlag('ALTERNATIVE_PRIORITY', alt_name, True)
alt_priority = alt_priority or d.getVar('ALTERNATIVE_PRIORITY_%s' % pkg, True) or d.getVar('ALTERNATIVE_PRIORITY', True)
alt_priority = d.getVarFlag('ALTERNATIVE_PRIORITY_%s' % pkg, alt_name, True) or d.getVarFlag('ALTERNATIVE_PRIORITY', alt_name, True)
alt_priority = alt_priority or d.getVar('ALTERNATIVE_PRIORITY_%s' % pkg, True) or d.getVar('ALTERNATIVE_PRIORITY', True)
# This shouldn't trigger, as it should have been resolved earlier!
if alt_link == alt_target:
bb.note('alt_link == alt_target: %s == %s -- correcting, this should not happen!' % (alt_link, alt_target))
alt_target = '%s.%s' % (alt_target, pn)
# This shouldn't trigger, as it should have been resolved earlier!
if alt_link == alt_target:
bb.note('alt_link == alt_target: %s == %s -- correcting, this should not happen!' % (alt_link, alt_target))
alt_target = '%s.%s' % (alt_target, pn)
if not os.path.lexists('%s/%s' % (pkgdest, alt_target)):
bb.warn('%s: NOT adding alternative provide %s: %s does not exist' % (pn, alt_link, alt_target))
continue
if not os.path.lexists('%s/%s' % (pkgdest, alt_target)):
bb.warn('%s: NOT adding alternative provide %s: %s does not exist' % (pn, alt_link, alt_target))
continue
# Default to generate shell script.. eventually we may want to change this...
alt_target = os.path.relpath(alt_target, os.path.dirname(alt_link))
# Default to generate shell script.. eventually we may want to change this...
alt_target = os.path.relpath(alt_target, os.path.dirname(alt_link))
alt_setup_links += ' update-alternatives --install %s %s %s %s\n' % (alt_link, alt_name, alt_target, alt_priority)
alt_remove_links += ' update-alternatives --remove %s %s\n' % (alt_name, alt_target)
alt_setup_links += '\tupdate-alternatives --install %s %s %s %s\n' % (alt_link, alt_name, alt_target, alt_priority)
alt_remove_links += '\tupdate-alternatives --remove %s %s\n' % (alt_name, alt_target)
if alt_setup_links:
bb.note('adding update-alternatives calls to postinst/postrm for %s' % pkg)
bb.note('%s' % alt_setup_links)
postinst = (d.getVar('pkg_postinst_%s' % pkg, True) or d.getVar('pkg_postinst', True)) or '#!/bin/sh\n'
postinst += alt_setup_links
d.setVar('pkg_postinst_%s' % pkg, postinst)
if alt_setup_links:
bb.note('adding update-alternatives calls to postinst/postrm for %s' % pkg)
bb.note('%s' % alt_setup_links)
postinst = (d.getVar('pkg_postinst_%s' % pkg, True) or d.getVar('pkg_postinst', True)) or '#!/bin/sh\n'
postinst += alt_setup_links
d.setVar('pkg_postinst_%s' % pkg, postinst)
bb.note('%s' % alt_remove_links)
postrm = (d.getVar('pkg_postrm_%s' % pkg, True) or d.getVar('pkg_postrm', True)) or '#!/bin/sh\n'
postrm += alt_remove_links
d.setVar('pkg_postrm_%s' % pkg, postrm)
bb.note('%s' % alt_remove_links)
postrm = (d.getVar('pkg_postrm_%s' % pkg, True) or d.getVar('pkg_postrm', True)) or '#!/bin/sh\n'
postrm += alt_remove_links
d.setVar('pkg_postrm_%s' % pkg, postrm)
}
python package_do_filedeps_append () {
pn = d.getVar('BPN', True)
pkgdest = d.getVar('PKGDEST', True)
pn = d.getVar('BPN', True)
pkgdest = d.getVar('PKGDEST', True)
for pkg in packages.split():
for alt_name in (d.getVar('ALTERNATIVE_%s' % pkg, True) or "").split():
alt_link = d.getVarFlag('ALTERNATIVE_LINK_NAME', alt_name, True)
alt_target = d.getVarFlag('ALTERNATIVE_TARGET_%s' % pkg, alt_name, True) or d.getVarFlag('ALTERNATIVE_TARGET', alt_name, True)
alt_target = alt_target or d.getVar('ALTERNATIVE_TARGET_%s' % pkg, True) or d.getVar('ALTERNATIVE_TARGET', True) or alt_link
for pkg in packages.split():
for alt_name in (d.getVar('ALTERNATIVE_%s' % pkg, True) or "").split():
alt_link = d.getVarFlag('ALTERNATIVE_LINK_NAME', alt_name, True)
alt_target = d.getVarFlag('ALTERNATIVE_TARGET_%s' % pkg, alt_name, True) or d.getVarFlag('ALTERNATIVE_TARGET', alt_name, True)
alt_target = alt_target or d.getVar('ALTERNATIVE_TARGET_%s' % pkg, True) or d.getVar('ALTERNATIVE_TARGET', True) or alt_link
if alt_link == alt_target:
bb.warn('alt_link == alt_target: %s == %s' % (alt_link, alt_target))
alt_target = '%s.%s' % (alt_target, pn)
if alt_link == alt_target:
bb.warn('alt_link == alt_target: %s == %s' % (alt_link, alt_target))
alt_target = '%s.%s' % (alt_target, pn)
if not os.path.lexists('%s/%s/%s' % (pkgdest, pkg, alt_target)):
continue
if not os.path.lexists('%s/%s/%s' % (pkgdest, pkg, alt_target)):
continue
# Add file provide
trans_target = file_translate(alt_target)
d.appendVar('FILERPROVIDES_%s_%s' % (trans_target, pkg), " " + alt_link)
if not trans_target in (d.getVar('FILERPROVIDESFLIST_%s' % pkg, True) or ""):
d.appendVar('FILERPROVIDESFLIST_%s' % pkg, " " + trans_target)
# Add file provide
trans_target = file_translate(alt_target)
d.appendVar('FILERPROVIDES_%s_%s' % (trans_target, pkg), " " + alt_link)
if not trans_target in (d.getVar('FILERPROVIDESFLIST_%s' % pkg, True) or ""):
d.appendVar('FILERPROVIDESFLIST_%s' % pkg, " " + trans_target)
}

View File

@ -44,42 +44,42 @@ python __anonymous() {
}
python populate_packages_prepend () {
def update_rcd_package(pkg):
bb.debug(1, 'adding update-rc.d calls to postinst/postrm for %s' % pkg)
localdata = bb.data.createCopy(d)
overrides = localdata.getVar("OVERRIDES", True)
localdata.setVar("OVERRIDES", "%s:%s" % (pkg, overrides))
bb.data.update_data(localdata)
def update_rcd_package(pkg):
bb.debug(1, 'adding update-rc.d calls to postinst/postrm for %s' % pkg)
localdata = bb.data.createCopy(d)
overrides = localdata.getVar("OVERRIDES", True)
localdata.setVar("OVERRIDES", "%s:%s" % (pkg, overrides))
bb.data.update_data(localdata)
"""
update_rc.d postinst is appended here because pkg_postinst may require to
execute on the target. Not doing so may cause update_rc.d postinst invoked
twice to cause unwanted warnings.
"""
postinst = localdata.getVar('pkg_postinst', True)
if not postinst:
postinst = '#!/bin/sh\n'
postinst += localdata.getVar('updatercd_postinst', True)
d.setVar('pkg_postinst_%s' % pkg, postinst)
"""
update_rc.d postinst is appended here because pkg_postinst may require to
execute on the target. Not doing so may cause update_rc.d postinst invoked
twice to cause unwanted warnings.
"""
postinst = localdata.getVar('pkg_postinst', True)
if not postinst:
postinst = '#!/bin/sh\n'
postinst += localdata.getVar('updatercd_postinst', True)
d.setVar('pkg_postinst_%s' % pkg, postinst)
prerm = localdata.getVar('pkg_prerm', True)
if not prerm:
prerm = '#!/bin/sh\n'
prerm += localdata.getVar('updatercd_prerm', True)
d.setVar('pkg_prerm_%s' % pkg, prerm)
prerm = localdata.getVar('pkg_prerm', True)
if not prerm:
prerm = '#!/bin/sh\n'
prerm += localdata.getVar('updatercd_prerm', True)
d.setVar('pkg_prerm_%s' % pkg, prerm)
postrm = localdata.getVar('pkg_postrm', True)
if not postrm:
postrm = '#!/bin/sh\n'
postrm += localdata.getVar('updatercd_postrm', True)
d.setVar('pkg_postrm_%s' % pkg, postrm)
postrm = localdata.getVar('pkg_postrm', True)
if not postrm:
postrm = '#!/bin/sh\n'
postrm += localdata.getVar('updatercd_postrm', True)
d.setVar('pkg_postrm_%s' % pkg, postrm)
pkgs = d.getVar('INITSCRIPT_PACKAGES', True)
if pkgs == None:
pkgs = d.getVar('UPDATERCPN', True)
packages = (d.getVar('PACKAGES', True) or "").split()
if not pkgs in packages and packages != []:
pkgs = packages[0]
for pkg in pkgs.split():
update_rcd_package(pkg)
pkgs = d.getVar('INITSCRIPT_PACKAGES', True)
if pkgs == None:
pkgs = d.getVar('UPDATERCPN', True)
packages = (d.getVar('PACKAGES', True) or "").split()
if not pkgs in packages and packages != []:
pkgs = packages[0]
for pkg in pkgs.split():
update_rcd_package(pkg)
}

View File

@ -154,61 +154,61 @@ do_package_setscene[depends] = "${USERADDSETSCENEDEPS}"
# Recipe parse-time sanity checks
def update_useradd_after_parse(d):
useradd_packages = d.getVar('USERADD_PACKAGES', True)
useradd_packages = d.getVar('USERADD_PACKAGES', True)
if not useradd_packages:
raise bb.build.FuncFailed, "%s inherits useradd but doesn't set USERADD_PACKAGES" % d.getVar('FILE')
if not useradd_packages:
raise bb.build.FuncFailed, "%s inherits useradd but doesn't set USERADD_PACKAGES" % d.getVar('FILE')
for pkg in useradd_packages.split():
if not d.getVar('USERADD_PARAM_%s' % pkg, True) and not d.getVar('GROUPADD_PARAM_%s' % pkg, True):
raise bb.build.FuncFailed, "%s inherits useradd but doesn't set USERADD_PARAM or GROUPADD_PARAM for package %s" % (d.getVar('FILE'), pkg)
for pkg in useradd_packages.split():
if not d.getVar('USERADD_PARAM_%s' % pkg, True) and not d.getVar('GROUPADD_PARAM_%s' % pkg, True):
raise bb.build.FuncFailed, "%s inherits useradd but doesn't set USERADD_PARAM or GROUPADD_PARAM for package %s" % (d.getVar('FILE'), pkg)
python __anonymous() {
update_useradd_after_parse(d)
update_useradd_after_parse(d)
}
# Return a single [GROUP|USER]ADD_PARAM formatted string which includes the
# [group|user]add parameters for all USERADD_PACKAGES in this recipe
def get_all_cmd_params(d, cmd_type):
import string
param_type = cmd_type.upper() + "ADD_PARAM_%s"
params = []
import string
param_type = cmd_type.upper() + "ADD_PARAM_%s"
params = []
useradd_packages = d.getVar('USERADD_PACKAGES', True) or ""
for pkg in useradd_packages.split():
param = d.getVar(param_type % pkg, True)
if param:
params.append(param)
useradd_packages = d.getVar('USERADD_PACKAGES', True) or ""
for pkg in useradd_packages.split():
param = d.getVar(param_type % pkg, True)
if param:
params.append(param)
return string.join(params, "; ")
return string.join(params, "; ")
# Adds the preinst script into generated packages
fakeroot python populate_packages_prepend () {
def update_useradd_package(pkg):
bb.debug(1, 'adding user/group calls to preinst for %s' % pkg)
def update_useradd_package(pkg):
bb.debug(1, 'adding user/group calls to preinst for %s' % pkg)
"""
useradd preinst is appended here because pkg_preinst may be
required to execute on the target. Not doing so may cause
useradd preinst to be invoked twice, causing unwanted warnings.
"""
preinst = d.getVar('pkg_preinst_%s' % pkg, True) or d.getVar('pkg_preinst', True)
if not preinst:
preinst = '#!/bin/sh\n'
preinst += d.getVar('useradd_preinst', True)
d.setVar('pkg_preinst_%s' % pkg, preinst)
"""
useradd preinst is appended here because pkg_preinst may be
required to execute on the target. Not doing so may cause
useradd preinst to be invoked twice, causing unwanted warnings.
"""
preinst = d.getVar('pkg_preinst_%s' % pkg, True) or d.getVar('pkg_preinst', True)
if not preinst:
preinst = '#!/bin/sh\n'
preinst += d.getVar('useradd_preinst', True)
d.setVar('pkg_preinst_%s' % pkg, preinst)
# RDEPENDS setup
rdepends = d.getVar("RDEPENDS_%s" % pkg, True) or ""
rdepends += ' ' + d.getVar('MLPREFIX') + 'base-passwd'
rdepends += ' ' + d.getVar('MLPREFIX') + 'shadow'
d.setVar("RDEPENDS_%s" % pkg, rdepends)
# RDEPENDS setup
rdepends = d.getVar("RDEPENDS_%s" % pkg, True) or ""
rdepends += ' ' + d.getVar('MLPREFIX') + 'base-passwd'
rdepends += ' ' + d.getVar('MLPREFIX') + 'shadow'
d.setVar("RDEPENDS_%s" % pkg, rdepends)
# Add the user/group preinstall scripts and RDEPENDS requirements
# to packages specified by USERADD_PACKAGES
if not bb.data.inherits_class('nativesdk', d):
useradd_packages = d.getVar('USERADD_PACKAGES', True) or ""
for pkg in useradd_packages.split():
update_useradd_package(pkg)
# Add the user/group preinstall scripts and RDEPENDS requirements
# to packages specified by USERADD_PACKAGES
if not bb.data.inherits_class('nativesdk', d):
useradd_packages = d.getVar('USERADD_PACKAGES', True) or ""
for pkg in useradd_packages.split():
update_useradd_package(pkg)
}

View File

@ -1,13 +1,13 @@
addtask listtasks
do_listtasks[nostamp] = "1"
python do_listtasks() {
import sys
# emit variables and shell functions
#bb.data.emit_env(sys.__stdout__, d)
# emit the metadata which isnt valid shell
for e in d.keys():
if d.getVarFlag(e, 'task'):
bb.plain("%s" % e)
import sys
# emit variables and shell functions
#bb.data.emit_env(sys.__stdout__, d)
# emit the metadata which isnt valid shell
for e in d.keys():
if d.getVarFlag(e, 'task'):
bb.plain("%s" % e)
}
CLEANFUNCS ?= ""
@ -15,34 +15,34 @@ CLEANFUNCS ?= ""
addtask clean
do_clean[nostamp] = "1"
python do_clean() {
"""clear the build and temp directories"""
dir = d.expand("${WORKDIR}")
bb.note("Removing " + dir)
oe.path.remove(dir)
"""clear the build and temp directories"""
dir = d.expand("${WORKDIR}")
bb.note("Removing " + dir)
oe.path.remove(dir)
dir = "%s.*" % bb.data.expand(d.getVar('STAMP'), d)
bb.note("Removing " + dir)
oe.path.remove(dir)
dir = "%s.*" % bb.data.expand(d.getVar('STAMP'), d)
bb.note("Removing " + dir)
oe.path.remove(dir)
for f in (d.getVar('CLEANFUNCS', True) or '').split():
bb.build.exec_func(f, d)
for f in (d.getVar('CLEANFUNCS', True) or '').split():
bb.build.exec_func(f, d)
}
addtask checkuri
do_checkuri[nostamp] = "1"
python do_checkuri() {
src_uri = (d.getVar('SRC_URI', True) or "").split()
if len(src_uri) == 0:
return
src_uri = (d.getVar('SRC_URI', True) or "").split()
if len(src_uri) == 0:
return
localdata = bb.data.createCopy(d)
bb.data.update_data(localdata)
localdata = bb.data.createCopy(d)
bb.data.update_data(localdata)
try:
fetcher = bb.fetch2.Fetch(src_uri, localdata)
fetcher.checkstatus()
except bb.fetch2.BBFetchException, e:
raise bb.build.FuncFailed(e)
try:
fetcher = bb.fetch2.Fetch(src_uri, localdata)
fetcher.checkstatus()
except bb.fetch2.BBFetchException, e:
raise bb.build.FuncFailed(e)
}
addtask checkuriall after do_checkuri

View File

@ -292,77 +292,77 @@ END
}
def check_app_exists(app, d):
from bb import which, data
from bb import which, data
app = data.expand(app, d)
path = data.getVar('PATH', d, 1)
return bool(which(path, app))
app = data.expand(app, d)
path = data.getVar('PATH', d, 1)
return bool(which(path, app))
def explode_deps(s):
return bb.utils.explode_deps(s)
return bb.utils.explode_deps(s)
def base_set_filespath(path, d):
filespath = []
extrapaths = (d.getVar("FILESEXTRAPATHS", True) or "")
# Don't prepend empty strings to the path list
if extrapaths != "":
path = extrapaths.split(":") + path
# The ":" ensures we have an 'empty' override
overrides = (d.getVar("OVERRIDES", True) or "") + ":"
for p in path:
if p != "":
for o in overrides.split(":"):
filespath.append(os.path.join(p, o))
return ":".join(filespath)
filespath = []
extrapaths = (d.getVar("FILESEXTRAPATHS", True) or "")
# Don't prepend empty strings to the path list
if extrapaths != "":
path = extrapaths.split(":") + path
# The ":" ensures we have an 'empty' override
overrides = (d.getVar("OVERRIDES", True) or "") + ":"
for p in path:
if p != "":
for o in overrides.split(":"):
filespath.append(os.path.join(p, o))
return ":".join(filespath)
def extend_variants(d, var, extend, delim=':'):
"""Return a string of all bb class extend variants for the given extend"""
variants = []
whole = d.getVar(var, True) or ""
for ext in whole.split():
eext = ext.split(delim)
if len(eext) > 1 and eext[0] == extend:
variants.append(eext[1])
return " ".join(variants)
"""Return a string of all bb class extend variants for the given extend"""
variants = []
whole = d.getVar(var, True) or ""
for ext in whole.split():
eext = ext.split(delim)
if len(eext) > 1 and eext[0] == extend:
variants.append(eext[1])
return " ".join(variants)
def multilib_pkg_extend(d, pkg):
variants = (d.getVar("MULTILIB_VARIANTS", True) or "").split()
if not variants:
return pkg
pkgs = pkg
for v in variants:
pkgs = pkgs + " " + v + "-" + pkg
return pkgs
variants = (d.getVar("MULTILIB_VARIANTS", True) or "").split()
if not variants:
return pkg
pkgs = pkg
for v in variants:
pkgs = pkgs + " " + v + "-" + pkg
return pkgs
def all_multilib_tune_values(d, var, unique = True, need_split = True, delim = ' '):
"""Return a string of all ${var} in all multilib tune configuration"""
values = []
value = d.getVar(var, True) or ""
if value != "":
if need_split:
for item in value.split(delim):
values.append(item)
else:
values.append(value)
variants = d.getVar("MULTILIB_VARIANTS", True) or ""
for item in variants.split():
localdata = bb.data.createCopy(d)
overrides = localdata.getVar("OVERRIDES", False) + ":virtclass-multilib-" + item
localdata.setVar("OVERRIDES", overrides)
bb.data.update_data(localdata)
value = localdata.getVar(var, True) or ""
if value != "":
if need_split:
for item in value.split(delim):
values.append(item)
else:
values.append(value)
if unique:
#we do this to keep order as much as possible
ret = []
for value in values:
if not value in ret:
ret.append(value)
else:
ret = values
return " ".join(ret)
"""Return a string of all ${var} in all multilib tune configuration"""
values = []
value = d.getVar(var, True) or ""
if value != "":
if need_split:
for item in value.split(delim):
values.append(item)
else:
values.append(value)
variants = d.getVar("MULTILIB_VARIANTS", True) or ""
for item in variants.split():
localdata = bb.data.createCopy(d)
overrides = localdata.getVar("OVERRIDES", False) + ":virtclass-multilib-" + item
localdata.setVar("OVERRIDES", overrides)
bb.data.update_data(localdata)
value = localdata.getVar(var, True) or ""
if value != "":
if need_split:
for item in value.split(delim):
values.append(item)
else:
values.append(value)
if unique:
#we do this to keep order as much as possible
ret = []
for value in values:
if not value in ret:
ret.append(value)
else:
ret = values
return " ".join(ret)

View File

@ -96,20 +96,19 @@ RDEPENDS_${PN} = "\
PACKAGES_DYNAMIC = "${PN}-plugin-*"
python populate_packages_prepend() {
depmap = dict( pppd="ppp",
)
packages = []
multilib_prefix = (d.getVar("MLPREFIX", True) or "")
hook = lambda file,pkg,b,c,d:packages.append((file,pkg))
plugin_dir = d.expand('${libdir}/connman/plugins/')
plugin_name = d.expand('${PN}-plugin-%s')
do_split_packages(d, plugin_dir, '^(.*).so$', plugin_name, '${PN} plugin for %s', extra_depends='', hook=hook, prepend=True )
for (file, package) in packages:
plugintype = package.split( '-' )[-1]
if plugintype in depmap:
rdepends = map(lambda x: multilib_prefix + x, depmap[plugintype].split())
bb.note( "Adding rdependency on %s to %s" % ( rdepends, package ) )
d.setVar("RDEPENDS_%s" % package, " ".join(rdepends))
depmap = dict(pppd="ppp")
packages = []
multilib_prefix = (d.getVar("MLPREFIX", True) or "")
hook = lambda file,pkg,b,c,d:packages.append((file,pkg))
plugin_dir = d.expand('${libdir}/connman/plugins/')
plugin_name = d.expand('${PN}-plugin-%s')
do_split_packages(d, plugin_dir, '^(.*).so$', plugin_name, '${PN} plugin for %s', extra_depends='', hook=hook, prepend=True )
for (file, package) in packages:
plugintype = package.split( '-' )[-1]
if plugintype in depmap:
rdepends = map(lambda x: multilib_prefix + x, depmap[plugintype].split())
bb.note( "Adding rdependency on %s to %s" % ( rdepends, package ) )
d.setVar("RDEPENDS_%s" % package, " ".join(rdepends))
}
PACKAGES =+ "${PN}-tools ${PN}-tests"

View File

@ -52,30 +52,30 @@ base_passwd_sstate_postinst() {
}
python populate_packages_prepend() {
# Add in the preinst function for ${PN}
# We have to do this here as prior to this, passwd/group.master
# would be unavailable. We need to create these files at preinst
# time before the files from the package may be available, hence
# storing the data from the files in the preinst directly.
# Add in the preinst function for ${PN}
# We have to do this here as prior to this, passwd/group.master
# would be unavailable. We need to create these files at preinst
# time before the files from the package may be available, hence
# storing the data from the files in the preinst directly.
f = open(d.expand("${STAGING_DATADIR}/base-passwd/passwd.master"), 'r')
passwd = "".join(f.readlines())
f.close()
f = open(d.expand("${STAGING_DATADIR}/base-passwd/group.master"), 'r')
group = "".join(f.readlines())
f.close()
f = open(d.expand("${STAGING_DATADIR}/base-passwd/passwd.master"), 'r')
passwd = "".join(f.readlines())
f.close()
f = open(d.expand("${STAGING_DATADIR}/base-passwd/group.master"), 'r')
group = "".join(f.readlines())
f.close()
preinst = """#!/bin/sh
preinst = """#!/bin/sh
if [ ! -e $D${sysconfdir}/passwd ]; then
cat << EOF > $D${sysconfdir}/passwd
\tcat << EOF > $D${sysconfdir}/passwd
""" + passwd + """EOF
fi
if [ ! -e $D${sysconfdir}/group ]; then
cat << EOF > $D${sysconfdir}/group
\tcat << EOF > $D${sysconfdir}/group
""" + group + """EOF
fi
"""
d.setVar('pkg_preinst_${PN}', preinst)
d.setVar('pkg_preinst_${PN}', preinst)
}
addtask do_package after do_populate_sysroot

View File

@ -218,23 +218,23 @@ ALTERNATIVE_TARGET[syslog-startup-conf] = "${sysconfdir}/syslog-startup.conf.${B
ALTERNATIVE_TARGET = "/bin/busybox"
python do_package_prepend () {
# We need to load the full set of busybox provides from the /etc/busybox.links
# Use this to see the update-alternatives with the right information
# We need to load the full set of busybox provides from the /etc/busybox.links
# Use this to see the update-alternatives with the right information
dvar = d.getVar('D', True)
pn = d.getVar('PN', True)
f = open('%s/etc/busybox.links' % (dvar), 'r')
dvar = d.getVar('D', True)
pn = d.getVar('PN', True)
f = open('%s/etc/busybox.links' % (dvar), 'r')
for alt_link_name in f:
alt_link_name = alt_link_name.strip()
alt_name = os.path.basename(alt_link_name)
for alt_link_name in f:
alt_link_name = alt_link_name.strip()
alt_name = os.path.basename(alt_link_name)
# Match coreutils
if alt_name == '[':
alt_name = 'lbracket'
# Match coreutils
if alt_name == '[':
alt_name = 'lbracket'
d.appendVar('ALTERNATIVE_%s' % (pn), ' ' + alt_name)
d.setVarFlag('ALTERNATIVE_LINK_NAME', alt_name, alt_link_name)
d.appendVar('ALTERNATIVE_%s' % (pn), ' ' + alt_name)
d.setVarFlag('ALTERNATIVE_LINK_NAME', alt_name, alt_link_name)
}
pkg_postinst_${PN} () {

View File

@ -20,7 +20,7 @@ SRC_URI = "svn://www.eglibc.org/svn/branches/;module=${EGLIBC_BRANCH};protocol=h
S = "${WORKDIR}/${EGLIBC_BRANCH}/localedef"
do_unpack_append() {
bb.build.exec_func('do_move_ports', d)
bb.build.exec_func('do_move_ports', d)
}
do_move_ports() {

View File

@ -20,7 +20,7 @@ SRC_URI = "svn://www.eglibc.org/svn/branches/;module=${EGLIBC_BRANCH};protocol=h
S = "${WORKDIR}/${EGLIBC_BRANCH}/localedef"
do_unpack_append() {
bb.build.exec_func('do_move_ports', d)
bb.build.exec_func('do_move_ports', d)
}
do_move_ports() {

View File

@ -1,54 +1,54 @@
def ld_append_if_tune_exists(d, infos, dict):
tune = d.getVar("DEFAULTTUNE", True) or ""
libdir = d.getVar("base_libdir", True) or ""
if dict.has_key(tune):
infos['ldconfig'].add('{"' + libdir + '/' + dict[tune][0] + '",' + dict[tune][1] + ' }')
infos['lddrewrite'].add(libdir+'/'+dict[tune][0])
tune = d.getVar("DEFAULTTUNE", True) or ""
libdir = d.getVar("base_libdir", True) or ""
if dict.has_key(tune):
infos['ldconfig'].add('{"' + libdir + '/' + dict[tune][0] + '",' + dict[tune][1] + ' }')
infos['lddrewrite'].add(libdir+'/'+dict[tune][0])
def eglibc_dl_info(d):
ld_info_all = {
"mips": ["ld.so.1", "FLAG_ELF_LIBC6"],
"mips64-n32": ["ld.so.1", "FLAG_ELF_LIBC6"],
"mips64": ["ld.so.1", "FLAG_ELF_LIBC6"],
"mipsel": ["ld.so.1", "FLAG_ELF_LIBC6"],
"mips64el-n32": ["ld.so.1", "FLAG_ELF_LIBC6"],
"mips64el": ["ld.so.1", "FLAG_ELF_LIBC6"],
"mips-nf": ["ld.so.1", "FLAG_ELF_LIBC6"],
"mips64-nf-n32": ["ld.so.1", "FLAG_ELF_LIBC6"],
"mips64-nf": ["ld.so.1", "FLAG_ELF_LIBC6"],
"mips64el-nf-n32": ["ld.so.1", "FLAG_ELF_LIBC6"],
"mips64el-nf": ["ld.so.1", "FLAG_ELF_LIBC6"],
"powerpc": ["ld.so.1", "FLAG_ELF_LIBC6"],
"powerpc-nf": ["ld.so.1", "FLAG_ELF_LIBC6"],
"powerpc64": ["ld64.so.1", "FLAG_ELF_LIBC6"],
"powerpc64-nf": ["ld64.so.1", "FLAG_ELF_LIBC6"],
"core2": ["ld-linux.so.2", "FLAG_ELF_LIBC6"],
"core2-64": ["ld-linux-x86-64.so.2", "FLAG_ELF_LIBC6"],
"x86": ["ld-linux.so.2", "FLAG_ELF_LIBC6"],
"x86-64": ["ld-linux-x86-64.so.2", "FLAG_ELF_LIBC6"],
"i586": ["ld-linux.so.2", "FLAG_ELF_LIBC6"],
}
ld_info_all = {
"mips": ["ld.so.1", "FLAG_ELF_LIBC6"],
"mips64-n32": ["ld.so.1", "FLAG_ELF_LIBC6"],
"mips64": ["ld.so.1", "FLAG_ELF_LIBC6"],
"mipsel": ["ld.so.1", "FLAG_ELF_LIBC6"],
"mips64el-n32": ["ld.so.1", "FLAG_ELF_LIBC6"],
"mips64el": ["ld.so.1", "FLAG_ELF_LIBC6"],
"mips-nf": ["ld.so.1", "FLAG_ELF_LIBC6"],
"mips64-nf-n32": ["ld.so.1", "FLAG_ELF_LIBC6"],
"mips64-nf": ["ld.so.1", "FLAG_ELF_LIBC6"],
"mips64el-nf-n32": ["ld.so.1", "FLAG_ELF_LIBC6"],
"mips64el-nf": ["ld.so.1", "FLAG_ELF_LIBC6"],
"powerpc": ["ld.so.1", "FLAG_ELF_LIBC6"],
"powerpc-nf": ["ld.so.1", "FLAG_ELF_LIBC6"],
"powerpc64": ["ld64.so.1", "FLAG_ELF_LIBC6"],
"powerpc64-nf": ["ld64.so.1", "FLAG_ELF_LIBC6"],
"core2": ["ld-linux.so.2", "FLAG_ELF_LIBC6"],
"core2-64": ["ld-linux-x86-64.so.2", "FLAG_ELF_LIBC6"],
"x86": ["ld-linux.so.2", "FLAG_ELF_LIBC6"],
"x86-64": ["ld-linux-x86-64.so.2", "FLAG_ELF_LIBC6"],
"i586": ["ld-linux.so.2", "FLAG_ELF_LIBC6"],
}
infos = {'ldconfig':set(), 'lddrewrite':set()}
ld_append_if_tune_exists(d, infos, ld_info_all)
infos = {'ldconfig':set(), 'lddrewrite':set()}
ld_append_if_tune_exists(d, infos, ld_info_all)
#DEFAULTTUNE_MULTILIB_ORIGINAL
original_tune=d.getVar("DEFAULTTUNE_MULTILIB_ORIGINAL",True)
if original_tune:
localdata = bb.data.createCopy(d)
localdata.setVar("DEFAULTTUNE", original_tune)
ld_append_if_tune_exists(localdata, infos, ld_info_all)
#DEFAULTTUNE_MULTILIB_ORIGINAL
original_tune=d.getVar("DEFAULTTUNE_MULTILIB_ORIGINAL",True)
if original_tune:
localdata = bb.data.createCopy(d)
localdata.setVar("DEFAULTTUNE", original_tune)
ld_append_if_tune_exists(localdata, infos, ld_info_all)
variants = d.getVar("MULTILIB_VARIANTS", True) or ""
for item in variants.split():
localdata = bb.data.createCopy(d)
overrides = localdata.getVar("OVERRIDES", False) + ":virtclass-multilib-" + item
localdata.setVar("OVERRIDES", overrides)
bb.data.update_data(localdata)
ld_append_if_tune_exists(localdata, infos, ld_info_all)
infos['ldconfig'] = ','.join(infos['ldconfig'])
infos['lddrewrite'] = ' '.join(infos['lddrewrite'])
return infos
variants = d.getVar("MULTILIB_VARIANTS", True) or ""
for item in variants.split():
localdata = bb.data.createCopy(d)
overrides = localdata.getVar("OVERRIDES", False) + ":virtclass-multilib-" + item
localdata.setVar("OVERRIDES", overrides)
bb.data.update_data(localdata)
ld_append_if_tune_exists(localdata, infos, ld_info_all)
infos['ldconfig'] = ','.join(infos['ldconfig'])
infos['lddrewrite'] = ' '.join(infos['lddrewrite'])
return infos
EGLIBC_KNOWN_INTERPRETER_NAMES = "${@eglibc_dl_info(d)['ldconfig']}"
RTLDLIST = "${@eglibc_dl_info(d)['lddrewrite']}"

View File

@ -1,14 +1,14 @@
def eglibc_cfg(feature, features, tokens, cnf):
if type(tokens) == type(""):
tokens = [tokens]
if type(features) == type([]) and feature in features:
cnf.extend([token + ' = y' for token in tokens])
else:
for token in tokens:
cnf.extend([token + ' = n'])
if token == 'OPTION_EGLIBC_NSSWITCH':
cnf.extend(["OPTION_EGLIBC_NSSWITCH_FIXED_CONFIG = ${S}/nss/nsswitch.conf"])
cnf.extend(["OPTION_EGLIBC_NSSWITCH_FIXED_FUNCTIONS = ${S}/nss/fixed-nsswitch.functions"])
if type(tokens) == type(""):
tokens = [tokens]
if type(features) == type([]) and feature in features:
cnf.extend([token + ' = y' for token in tokens])
else:
for token in tokens:
cnf.extend([token + ' = n'])
if token == 'OPTION_EGLIBC_NSSWITCH':
cnf.extend(["OPTION_EGLIBC_NSSWITCH_FIXED_CONFIG = ${S}/nss/nsswitch.conf"])
cnf.extend(["OPTION_EGLIBC_NSSWITCH_FIXED_FUNCTIONS = ${S}/nss/fixed-nsswitch.functions"])
# arrange the dependencies among eglibc configuable options according to file option-groups.def from eglibc source code
def distro_features_check_deps(distro_features):

View File

@ -78,7 +78,7 @@ EXTRA_OECONF = "--enable-kernel=${OLDEST_KERNEL} \
EXTRA_OECONF += "${@get_libc_fpu_setting(bb, d)}"
do_unpack_append() {
bb.build.exec_func('do_move_ports', d)
bb.build.exec_func('do_move_ports', d)
}
do_move_ports() {
@ -89,8 +89,8 @@ do_move_ports() {
}
do_patch_append() {
bb.build.exec_func('do_fix_ia_headers', d)
bb.build.exec_func('do_fix_readlib_c', d)
bb.build.exec_func('do_fix_ia_headers', d)
bb.build.exec_func('do_fix_readlib_c', d)
}
# for mips eglibc now builds syscall tables for all abi's

View File

@ -76,7 +76,7 @@ EXTRA_OECONF = "--enable-kernel=${OLDEST_KERNEL} \
EXTRA_OECONF += "${@get_libc_fpu_setting(bb, d)}"
do_unpack_append() {
bb.build.exec_func('do_move_ports', d)
bb.build.exec_func('do_move_ports', d)
}
do_move_ports() {
@ -87,7 +87,7 @@ do_move_ports() {
}
do_patch_append() {
bb.build.exec_func('do_fix_readlib_c', d)
bb.build.exec_func('do_fix_readlib_c', d)
}
# for mips eglibc now builds syscall tables for all abi's

View File

@ -38,9 +38,9 @@ export STAGING_INCDIR
export LDFLAGS += "-ldl"
python populate_packages_prepend () {
# autonamer would call this libxml2-2, but we don't want that
if d.getVar('DEBIAN_NAMES', True):
d.setVar('PKG_libxml2', '${MLPREFIX}libxml2')
# autonamer would call this libxml2-2, but we don't want that
if d.getVar('DEBIAN_NAMES', True):
d.setVar('PKG_libxml2', '${MLPREFIX}libxml2')
}
PACKAGES += "${PN}-utils"

View File

@ -119,8 +119,8 @@ _install_cfgs = "\
"
python do_install () {
bb.build.exec_func("shell_do_install", d)
oe.path.make_relative_symlink(d.expand("${D}${libdir}/libtinfo.so"))
bb.build.exec_func("shell_do_install", d)
oe.path.make_relative_symlink(d.expand("${D}${libdir}/libtinfo.so"))
}
shell_do_install() {
@ -205,12 +205,12 @@ shell_do_install() {
}
python populate_packages_prepend () {
libdir = d.expand("${libdir}")
base_libdir = d.expand("${base_libdir}")
pnbase = d.expand("${PN}-lib%s")
do_split_packages(d, libdir, '^lib(.*)\.so\..*', pnbase, 'ncurses %s library', prepend=True, extra_depends = '', allow_links=True)
if libdir is not base_libdir:
do_split_packages(d, base_libdir, '^lib(.*)\.so\..*', pnbase, 'ncurses %s library', prepend=True, extra_depends = '', allow_links=True)
libdir = d.expand("${libdir}")
base_libdir = d.expand("${base_libdir}")
pnbase = d.expand("${PN}-lib%s")
do_split_packages(d, libdir, '^lib(.*)\.so\..*', pnbase, 'ncurses %s library', prepend=True, extra_depends = '', allow_links=True)
if libdir is not base_libdir:
do_split_packages(d, base_libdir, '^lib(.*)\.so\..*', pnbase, 'ncurses %s library', prepend=True, extra_depends = '', allow_links=True)
}

View File

@ -8,26 +8,26 @@ USE_NLS = "yes"
SRC_URI += "file://db_linking_hack.patch"
python do_install () {
bb.build.exec_func('do_install_base', d)
bb.build.exec_func('do_install_config', d)
bb.build.exec_func('do_install_base', d)
bb.build.exec_func('do_install_config', d)
}
python do_install_config () {
indir = os.path.dirname(d.getVar('FILE',1))
infile = file(os.path.join(indir, 'files', 'apt.conf'), 'r')
data = infile.read()
infile.close()
indir = os.path.dirname(d.getVar('FILE',1))
infile = file(os.path.join(indir, 'files', 'apt.conf'), 'r')
data = infile.read()
infile.close()
data = d.expand(data)
data = d.expand(data)
outdir = os.path.join(d.getVar('D', True), d.getVar('sysconfdir', True), 'apt')
if not os.path.exists(outdir):
os.makedirs(outdir)
outpath = os.path.join(outdir, 'apt.conf.sample')
outdir = os.path.join(d.getVar('D', True), d.getVar('sysconfdir', True), 'apt')
if not os.path.exists(outdir):
os.makedirs(outdir)
outpath = os.path.join(outdir, 'apt.conf.sample')
outfile = file(outpath, 'w')
outfile.write(data)
outfile.close()
outfile = file(outpath, 'w')
outfile.write(data)
outfile.close()
}
do_install_base () {

View File

@ -34,23 +34,23 @@ apt-utils-manpages="doc/apt-extracttemplates.1 \
# doc/apt-ftparchive.1
def get_files_apt_doc(d, bb, manpages):
import re
manpages = re.sub(r'\bdoc/(\S+)/(\S+)\.\1\.(.)\b', r'${mandir}/\1/man\3/\2.\3', manpages)
manpages = re.sub(r'\bdoc/(\S+)\.(.)\b', r'${mandir}/man\2/\1.\2', manpages)
return manpages
import re
manpages = re.sub(r'\bdoc/(\S+)/(\S+)\.\1\.(.)\b', r'${mandir}/\1/man\3/\2.\3', manpages)
manpages = re.sub(r'\bdoc/(\S+)\.(.)\b', r'${mandir}/man\2/\1.\2', manpages)
return manpages
def get_commands_apt_doc(d, bb, manpages):
import os
s = list()
__dir_cache__ = list()
for m in manpages.split():
dest = get_files_apt_doc(d, bb, m)
dir = os.path.dirname(dest)
if not dir in __dir_cache__:
s.append("install -d ${D}/%s" % dir)
__dir_cache__.append(dir)
s.append("install -m 0644 %s ${D}/%s" % (m, dest))
return "\n".join(s)
import os
s = list()
__dir_cache__ = list()
for m in manpages.split():
dest = get_files_apt_doc(d, bb, m)
dir = os.path.dirname(dest)
if not dir in __dir_cache__:
s.append("install -d ${D}/%s" % dir)
__dir_cache__.append(dir)
s.append("install -m 0644 %s ${D}/%s" % (m, dest))
return "\n".join(s)
PACKAGES += "${PN}-utils ${PN}-utils-doc"
FILES_${PN} = "${bindir}/apt-cdrom ${bindir}/apt-get \

View File

@ -294,11 +294,11 @@ PACKAGES_append = " perl-modules "
RRECOMMENDS_perl-modules = "${@d.getVar('PACKAGES', True).replace('${PN}-modules ', '').replace('${PN}-dbg ', '').replace('${PN}-misc ', '').replace('${PN}-dev ', '').replace('${PN}-pod ', '').replace('${PN}-doc ', '')}"
python populate_packages_prepend () {
libdir = d.expand('${libdir}/perl/${PV}')
do_split_packages(d, libdir, 'auto/(Encode/.[^/]*)/.*', 'perl-module-%s', 'perl module %s', recursive=True, allow_dirs=False, match_path=True, prepend=False)
do_split_packages(d, libdir, 'auto/([^/]*)/.*', 'perl-module-%s', 'perl module %s', recursive=True, allow_dirs=False, match_path=True, prepend=False)
do_split_packages(d, libdir, 'Module/([^\/]*).*', 'perl-module-%s', 'perl module %s', recursive=True, allow_dirs=False, match_path=True, prepend=False)
do_split_packages(d, libdir, '(^(?!(CPAN\/|CPANPLUS\/|Module\/|unicore\/|auto\/)[^\/]).*)\.(pm|pl|e2x)', 'perl-module-%s', 'perl module %s', recursive=True, allow_dirs=False, match_path=True, prepend=False)
libdir = d.expand('${libdir}/perl/${PV}')
do_split_packages(d, libdir, 'auto/(Encode/.[^/]*)/.*', 'perl-module-%s', 'perl module %s', recursive=True, allow_dirs=False, match_path=True, prepend=False)
do_split_packages(d, libdir, 'auto/([^/]*)/.*', 'perl-module-%s', 'perl module %s', recursive=True, allow_dirs=False, match_path=True, prepend=False)
do_split_packages(d, libdir, 'Module/([^\/]*).*', 'perl-module-%s', 'perl module %s', recursive=True, allow_dirs=False, match_path=True, prepend=False)
do_split_packages(d, libdir, '(^(?!(CPAN\/|CPANPLUS\/|Module\/|unicore\/|auto\/)[^\/]).*)\.(pm|pl|e2x)', 'perl-module-%s', 'perl module %s', recursive=True, allow_dirs=False, match_path=True, prepend=False)
}
PACKAGES_DYNAMIC = "perl-module-*"

View File

@ -9,10 +9,10 @@ def get_qemu_target_list(d):
for arch in ['mips64', 'mips64el', 'ppcemb']:
if arch in archs:
targets += arch + "-softmmu,"
archs.remove(arch)
archs.remove(arch)
for arch in ['armeb', 'alpha', 'ppc64abi32', 'sparc32plus']:
if arch in archs:
targets += arch + "-linux-user,"
archs.remove(arch)
archs.remove(arch)
return targets + ''.join([arch + "-linux-user" + "," + arch + "-softmmu" + "," for arch in archs]).rstrip(',')

View File

@ -58,10 +58,10 @@ fakeroot do_install () {
}
python do_package_append() {
import subprocess
# Change permissions back the way they were, they probably had a reason...
workdir = d.getVar('WORKDIR', True)
subprocess.call('chmod 0511 %s/install/cups/var/run/cups/certs' % workdir, shell=True)
import subprocess
# Change permissions back the way they were, they probably had a reason...
workdir = d.getVar('WORKDIR', True)
subprocess.call('chmod 0511 %s/install/cups/var/run/cups/certs' % workdir, shell=True)
}
PACKAGES =+ "${PN}-lib ${PN}-libimage"

View File

@ -59,6 +59,6 @@ CONFFILES_${PN} = "${sysconfdir}/lighttpd.conf"
PACKAGES_DYNAMIC = "lighttpd-module-*"
python populate_packages_prepend () {
lighttpd_libdir = d.expand('${libdir}')
do_split_packages(d, lighttpd_libdir, '^mod_(.*)\.so$', 'lighttpd-module-%s', 'Lighttpd module for %s', extra_depends='')
lighttpd_libdir = d.expand('${libdir}')
do_split_packages(d, lighttpd_libdir, '^mod_(.*)\.so$', 'lighttpd-module-%s', 'Lighttpd module for %s', extra_depends='')
}

View File

@ -49,7 +49,7 @@ FILES_${PN} += "/opt/ltp/* /opt/ltp/runtest/* /opt/ltp/scenario_groups/* /opt/lt
TARGET_CC_ARCH += "${LDFLAGS}"
do_unpack_append() {
bb.build.exec_func('do_extract_tarball', d)
bb.build.exec_func('do_extract_tarball', d)
}
do_extract_tarball() {

View File

@ -46,8 +46,8 @@ do_unpack[cleandirs] += "${S}"
# We invoke base do_patch at end, to incorporate any local patch
python do_patch() {
bb.build.exec_func('nettools_do_patch', d)
bb.build.exec_func('patch_do_patch', d)
bb.build.exec_func('nettools_do_patch', d)
bb.build.exec_func('patch_do_patch', d)
}
do_configure() {

View File

@ -53,28 +53,28 @@ RDEPENDS_${PN}-xtests = "libpam pam-plugin-access pam-plugin-debug pam-plugin-cr
RRECOMMENDS_${PN} = "libpam-runtime"
python populate_packages_prepend () {
import os.path
import os.path
def pam_plugin_append_file(pn, dir, file):
nf = os.path.join(dir, file)
of = d.getVar('FILES_' + pn, True)
if of:
nf = of + " " + nf
d.setVar('FILES_' + pn, nf)
def pam_plugin_append_file(pn, dir, file):
nf = os.path.join(dir, file)
of = d.getVar('FILES_' + pn, True)
if of:
nf = of + " " + nf
d.setVar('FILES_' + pn, nf)
dvar = bb.data.expand('${WORKDIR}/package', d, True)
pam_libdir = d.expand('${base_libdir}/security')
pam_sbindir = d.expand('${sbindir}')
pam_filterdir = d.expand('${base_libdir}/security/pam_filter')
dvar = bb.data.expand('${WORKDIR}/package', d, True)
pam_libdir = d.expand('${base_libdir}/security')
pam_sbindir = d.expand('${sbindir}')
pam_filterdir = d.expand('${base_libdir}/security/pam_filter')
do_split_packages(d, pam_libdir, '^pam(.*)\.so$', 'pam-plugin%s', 'PAM plugin for %s', extra_depends='')
pam_plugin_append_file('pam-plugin-unix', pam_sbindir, 'unix_chkpwd')
pam_plugin_append_file('pam-plugin-unix', pam_sbindir, 'unix_update')
pam_plugin_append_file('pam-plugin-tally', pam_sbindir, 'pam_tally')
pam_plugin_append_file('pam-plugin-tally2', pam_sbindir, 'pam_tally2')
pam_plugin_append_file('pam-plugin-timestamp', pam_sbindir, 'pam_timestamp_check')
pam_plugin_append_file('pam-plugin-mkhomedir', pam_sbindir, 'mkhomedir_helper')
do_split_packages(d, pam_filterdir, '^(.*)$', 'pam-filter-%s', 'PAM filter for %s', extra_depends='')
do_split_packages(d, pam_libdir, '^pam(.*)\.so$', 'pam-plugin%s', 'PAM plugin for %s', extra_depends='')
pam_plugin_append_file('pam-plugin-unix', pam_sbindir, 'unix_chkpwd')
pam_plugin_append_file('pam-plugin-unix', pam_sbindir, 'unix_update')
pam_plugin_append_file('pam-plugin-tally', pam_sbindir, 'pam_tally')
pam_plugin_append_file('pam-plugin-tally2', pam_sbindir, 'pam_tally2')
pam_plugin_append_file('pam-plugin-timestamp', pam_sbindir, 'pam_timestamp_check')
pam_plugin_append_file('pam-plugin-mkhomedir', pam_sbindir, 'mkhomedir_helper')
do_split_packages(d, pam_filterdir, '^(.*)$', 'pam-filter-%s', 'PAM filter for %s', extra_depends='')
}
do_install() {

View File

@ -68,11 +68,11 @@ PACKAGES_DYNAMIC += "gdk-pixbuf-loader-*"
PACKAGES_DYNAMIC_virtclass-native = ""
python populate_packages_prepend () {
postinst_pixbufloader = d.getVar("postinst_pixbufloader", True)
postinst_pixbufloader = d.getVar("postinst_pixbufloader", True)
loaders_root = d.expand('${libdir}/gdk-pixbuf-2.0/${LIBV}/loaders')
loaders_root = d.expand('${libdir}/gdk-pixbuf-2.0/${LIBV}/loaders')
do_split_packages(d, loaders_root, '^libpixbufloader-(.*)\.so$', 'gdk-pixbuf-loader-%s', 'GDK pixbuf loader for %s', postinst_pixbufloader)
do_split_packages(d, loaders_root, '^libpixbufloader-(.*)\.so$', 'gdk-pixbuf-loader-%s', 'GDK pixbuf loader for %s', postinst_pixbufloader)
}
do_install_append_virtclass-native() {

View File

@ -32,20 +32,20 @@ LIBV = "2.10.0"
PACKAGES_DYNAMIC += "gdk-pixbuf-loader-* gtk-immodule-* gtk-printbackend-*"
python populate_packages_prepend () {
import os.path
import os.path
prologue = d.getVar("postinst_prologue", True)
postinst_pixbufloader = d.getVar("postinst_pixbufloader", True)
prologue = d.getVar("postinst_prologue", True)
postinst_pixbufloader = d.getVar("postinst_pixbufloader", True)
gtk_libdir = d.expand('${libdir}/gtk-2.0/${LIBV}')
loaders_root = os.path.join(gtk_libdir, 'loaders')
immodules_root = os.path.join(gtk_libdir, 'immodules')
printmodules_root = os.path.join(gtk_libdir, 'printbackends');
gtk_libdir = d.expand('${libdir}/gtk-2.0/${LIBV}')
loaders_root = os.path.join(gtk_libdir, 'loaders')
immodules_root = os.path.join(gtk_libdir, 'immodules')
printmodules_root = os.path.join(gtk_libdir, 'printbackends');
do_split_packages(d, loaders_root, '^libpixbufloader-(.*)\.so$', 'gdk-pixbuf-loader-%s', 'GDK pixbuf loader for %s', postinst_pixbufloader)
do_split_packages(d, immodules_root, '^im-(.*)\.so$', 'gtk-immodule-%s', 'GTK input module for %s', prologue + 'gtk-query-immodules-2.0 > /etc/gtk-2.0/gtk.immodules')
do_split_packages(d, printmodules_root, '^libprintbackend-(.*)\.so$', 'gtk-printbackend-%s', 'GTK printbackend module for %s')
do_split_packages(d, loaders_root, '^libpixbufloader-(.*)\.so$', 'gdk-pixbuf-loader-%s', 'GDK pixbuf loader for %s', postinst_pixbufloader)
do_split_packages(d, immodules_root, '^im-(.*)\.so$', 'gtk-immodule-%s', 'GTK input module for %s', prologue + 'gtk-query-immodules-2.0 > /etc/gtk-2.0/gtk.immodules')
do_split_packages(d, printmodules_root, '^libprintbackend-(.*)\.so$', 'gtk-printbackend-%s', 'GTK printbackend module for %s')
if (d.getVar('DEBIAN_NAMES', True)):
d.setVar('PKG_${PN}', '${MLPREFIX}libgtk-2.0')
if (d.getVar('DEBIAN_NAMES', True)):
d.setVar('PKG_${PN}', '${MLPREFIX}libgtk-2.0')
}

View File

@ -32,20 +32,20 @@ LIBV = "2.10.0"
PACKAGES_DYNAMIC += "gdk-pixbuf-loader-* gtk-immodule-* gtk-printbackend-*"
python populate_packages_prepend () {
import os.path
import os.path
prologue = d.getVar("postinst_prologue", True)
postinst_pixbufloader = d.getVar("postinst_pixbufloader", True)
prologue = d.getVar("postinst_prologue", True)
postinst_pixbufloader = d.getVar("postinst_pixbufloader", True)
gtk_libdir = d.expand('${libdir}/gtk-2.0/${LIBV}')
loaders_root = os.path.join(gtk_libdir, 'loaders')
immodules_root = os.path.join(gtk_libdir, 'immodules')
printmodules_root = os.path.join(gtk_libdir, 'printbackends');
gtk_libdir = d.expand('${libdir}/gtk-2.0/${LIBV}')
loaders_root = os.path.join(gtk_libdir, 'loaders')
immodules_root = os.path.join(gtk_libdir, 'immodules')
printmodules_root = os.path.join(gtk_libdir, 'printbackends');
do_split_packages(d, loaders_root, '^libpixbufloader-(.*)\.so$', 'gdk-pixbuf-loader-%s', 'GDK pixbuf loader for %s', postinst_pixbufloader)
do_split_packages(d, immodules_root, '^im-(.*)\.so$', 'gtk-immodule-%s', 'GTK input module for %s', prologue + 'gtk-query-immodules-2.0 > /etc/gtk-2.0/gtk.immodules')
do_split_packages(d, printmodules_root, '^libprintbackend-(.*)\.so$', 'gtk-printbackend-%s', 'GTK printbackend module for %s')
do_split_packages(d, loaders_root, '^libpixbufloader-(.*)\.so$', 'gdk-pixbuf-loader-%s', 'GDK pixbuf loader for %s', postinst_pixbufloader)
do_split_packages(d, immodules_root, '^im-(.*)\.so$', 'gtk-immodule-%s', 'GTK input module for %s', prologue + 'gtk-query-immodules-2.0 > /etc/gtk-2.0/gtk.immodules')
do_split_packages(d, printmodules_root, '^libprintbackend-(.*)\.so$', 'gtk-printbackend-%s', 'GTK printbackend module for %s')
if (d.getVar('DEBIAN_NAMES', True)):
d.setVar('PKG_${PN}', '${MLPREFIX}libgtk-2.0')
if (d.getVar('DEBIAN_NAMES', True)):
d.setVar('PKG_${PN}', '${MLPREFIX}libgtk-2.0')
}

View File

@ -39,17 +39,17 @@ LIBV = "2.10.0"
PACKAGES_DYNAMIC += "gtk-immodule-* gtk-printbackend-*"
python populate_packages_prepend () {
import os.path
import os.path
prologue = d.getVar("postinst_prologue", True)
prologue = d.getVar("postinst_prologue", True)
gtk_libdir = d.expand('${libdir}/gtk-2.0/${LIBV}')
immodules_root = os.path.join(gtk_libdir, 'immodules')
printmodules_root = os.path.join(gtk_libdir, 'printbackends');
gtk_libdir = d.expand('${libdir}/gtk-2.0/${LIBV}')
immodules_root = os.path.join(gtk_libdir, 'immodules')
printmodules_root = os.path.join(gtk_libdir, 'printbackends');
do_split_packages(d, immodules_root, '^im-(.*)\.so$', 'gtk-immodule-%s', 'GTK input module for %s', prologue + 'gtk-query-immodules-2.0 > /etc/gtk-2.0/gtk.immodules')
do_split_packages(d, printmodules_root, '^libprintbackend-(.*)\.so$', 'gtk-printbackend-%s', 'GTK printbackend module for %s')
do_split_packages(d, immodules_root, '^im-(.*)\.so$', 'gtk-immodule-%s', 'GTK input module for %s', prologue + 'gtk-query-immodules-2.0 > /etc/gtk-2.0/gtk.immodules')
do_split_packages(d, printmodules_root, '^libprintbackend-(.*)\.so$', 'gtk-printbackend-%s', 'GTK printbackend module for %s')
if (d.getVar('DEBIAN_NAMES', True)):
d.setVar('PKG_${PN}', '${MLPREFIX}libgtk-2.0')
if (d.getVar('DEBIAN_NAMES', True)):
d.setVar('PKG_${PN}', '${MLPREFIX}libgtk-2.0')
}

View File

@ -29,14 +29,14 @@ CFLAGS_prepend = "-DHAVE_ANIMATION "
inherit gnome
python populate_packages_prepend() {
import os.path
import os.path
engines_root = os.path.join(d.getVar('libdir', True), "gtk-2.0/2.10.0/engines")
themes_root = os.path.join(d.getVar('datadir', True), "themes")
engines_root = os.path.join(d.getVar('libdir', True), "gtk-2.0/2.10.0/engines")
themes_root = os.path.join(d.getVar('datadir', True), "themes")
do_split_packages(d, engines_root, '^lib(.*)\.so$', 'gtk-engine-%s', 'GTK %s theme engine', extra_depends='')
do_split_packages(d, themes_root, '(.*)', 'gtk-theme-%s', 'GTK theme %s', allow_dirs=True, extra_depends='')
# TODO: mark theme packages as arch all
do_split_packages(d, engines_root, '^lib(.*)\.so$', 'gtk-engine-%s', 'GTK %s theme engine', extra_depends='')
do_split_packages(d, themes_root, '(.*)', 'gtk-theme-%s', 'GTK theme %s', allow_dirs=True, extra_depends='')
# TODO: mark theme packages as arch all
}
SRC_URI += "file://glib-2.32.patch"

View File

@ -1,6 +1,6 @@
def get_cairo_fpu_setting(bb, d):
if d.getVar('TARGET_FPU', True) in [ 'soft' ]:
return "--disable-some-floating-point"
return ""
if d.getVar('TARGET_FPU', True) in [ 'soft' ]:
return "--disable-some-floating-point"
return ""

View File

@ -1,6 +1,6 @@
def get_clutter_fpu_setting(bb, d):
if d.getVar('TARGET_FPU', True) in [ 'soft' ]:
return "--without-fpu"
return ""
if d.getVar('TARGET_FPU', True) in [ 'soft' ]:
return "--without-fpu"
return ""

View File

@ -11,11 +11,11 @@ DRIDRIVERS_append_x86-64 = ",i915,i965"
EXTRA_OECONF += "--with-driver=dri --disable-egl --disable-gallium --without-gallium-drivers --with-dri-drivers=${DRIDRIVERS}"
python populate_packages_prepend() {
import os.path
import os.path
dri_drivers_root = os.path.join(d.getVar('libdir', True), "dri")
dri_drivers_root = os.path.join(d.getVar('libdir', True), "dri")
do_split_packages(d, dri_drivers_root, '^(.*)_dri\.so$', 'mesa-dri-driver-%s', 'Mesa %s DRI driver', extra_depends='')
do_split_packages(d, dri_drivers_root, '^(.*)_dri\.so$', 'mesa-dri-driver-%s', 'Mesa %s DRI driver', extra_depends='')
}
PACKAGES_DYNAMIC = "mesa-dri-driver-*"

View File

@ -61,11 +61,11 @@ do_install_append () {
python populate_packages_prepend () {
prologue = d.getVar("postinst_prologue", True)
prologue = d.getVar("postinst_prologue", True)
modules_root = d.expand('${libdir}/pango/${LIBV}/modules')
modules_root = d.expand('${libdir}/pango/${LIBV}/modules')
do_split_packages(d, modules_root, '^pango-(.*)\.so$', 'pango-module-%s', 'Pango module %s', prologue + '${bindir}/${MLPREFIX}pango-querymodules > /etc/pango/${MLPREFIX}pango.modules')
do_split_packages(d, modules_root, '^pango-(.*)\.so$', 'pango-module-%s', 'Pango module %s', prologue + '${bindir}/${MLPREFIX}pango-querymodules > /etc/pango/${MLPREFIX}pango.modules')
}
FILES_${PN} = "${sysconfdir}/pango/* ${bindir}/* ${libdir}/libpango*${SOLIBS}"

View File

@ -18,5 +18,5 @@ def perf_feature_enabled(feature, trueval, falseval, d):
"""
enabled_features = d.getVar("PERF_FEATURES_ENABLE", True) or ""
if feature in enabled_features:
return trueval
return trueval
return falseval

View File

@ -1,27 +1,27 @@
LIBV = "0.10"
python populate_packages_prepend () {
gst_libdir = d.expand('${libdir}/gstreamer-${LIBV}')
postinst = d.getVar('plugin_postinst', True)
glibdir = d.getVar('libdir', True)
gst_libdir = d.expand('${libdir}/gstreamer-${LIBV}')
postinst = d.getVar('plugin_postinst', True)
glibdir = d.getVar('libdir', True)
do_split_packages(d, glibdir, '^lib(.*)\.so\.*', 'lib%s', 'gstreamer %s library', extra_depends='', allow_links=True)
do_split_packages(d, gst_libdir, 'libgst(.*)\.so$', d.expand('${PN}-%s'), 'GStreamer plugin for %s', postinst=postinst, extra_depends=d.expand('${PN}'))
do_split_packages(d, gst_libdir, 'libgst(.*)\.la$', d.expand('${PN}-%s-dev'), 'GStreamer plugin for %s (development files)', extra_depends=d.expand('${PN}-dev'))
do_split_packages(d, gst_libdir, 'libgst(.*)\.a$', d.expand('${PN}-%s-staticdev'), 'GStreamer plugin for %s (static development files)', extra_depends=d.expand('${PN}-staticdev'))
pn = d.getVar('PN', True)
metapkg = pn + '-meta'
d.setVar('ALLOW_EMPTY_' + metapkg, "1")
d.setVar('FILES_' + metapkg, "")
blacklist = [ pn + '-locale', pn + '-dev', pn + '-dbg', pn + '-doc' ]
metapkg_rdepends = []
packages = d.getVar('PACKAGES', True).split()
for pkg in packages[1:]:
if not pkg in blacklist and not pkg in metapkg_rdepends and not pkg.endswith('-dev') and not pkg.endswith('-dbg') and not pkg.count('locale') and not pkg.count('-staticdev'):
metapkg_rdepends.append(pkg)
d.setVar('RDEPENDS_' + metapkg, ' '.join(metapkg_rdepends))
d.setVar('DESCRIPTION_' + metapkg, pn + ' meta package')
do_split_packages(d, glibdir, '^lib(.*)\.so\.*', 'lib%s', 'gstreamer %s library', extra_depends='', allow_links=True)
do_split_packages(d, gst_libdir, 'libgst(.*)\.so$', d.expand('${PN}-%s'), 'GStreamer plugin for %s', postinst=postinst, extra_depends=d.expand('${PN}'))
do_split_packages(d, gst_libdir, 'libgst(.*)\.la$', d.expand('${PN}-%s-dev'), 'GStreamer plugin for %s (development files)', extra_depends=d.expand('${PN}-dev'))
do_split_packages(d, gst_libdir, 'libgst(.*)\.a$', d.expand('${PN}-%s-staticdev'), 'GStreamer plugin for %s (static development files)', extra_depends=d.expand('${PN}-staticdev'))
pn = d.getVar('PN', True)
metapkg = pn + '-meta'
d.setVar('ALLOW_EMPTY_' + metapkg, "1")
d.setVar('FILES_' + metapkg, "")
blacklist = [ pn + '-locale', pn + '-dev', pn + '-dbg', pn + '-doc' ]
metapkg_rdepends = []
packages = d.getVar('PACKAGES', True).split()
for pkg in packages[1:]:
if not pkg in blacklist and not pkg in metapkg_rdepends and not pkg.endswith('-dev') and not pkg.endswith('-dbg') and not pkg.count('locale') and not pkg.count('-staticdev'):
metapkg_rdepends.append(pkg)
d.setVar('RDEPENDS_' + metapkg, ' '.join(metapkg_rdepends))
d.setVar('DESCRIPTION_' + metapkg, pn + ' meta package')
}
ALLOW_EMPTY = "1"

View File

@ -74,10 +74,10 @@ FILES_libpulse = "${libdir}/libpulse.so.*"
FILES_libpulse-simple = "${libdir}/libpulse-simple.so.*"
FILES_libpulse-browse = "${libdir}/libpulse-browse.so.*"
FILES_libpulse-mainloop-glib = "${libdir}/libpulse-mainloop-glib.so.*"
FILES_${PN}-dbg += "${libexecdir}/pulse/.debug \
${libdir}/pulse-${PV}/modules/.debug"
FILES_${PN}-dev += "${libdir}/pulse-${PV}/modules/*.la ${datadir}/vala ${libdir}/cmake"
FILES_${PN}-dev += "${libdir}/pulse-${PV}/modules/*.la ${datadir}/vala ${libdir}/cmake"
FILES_${PN}-conf = "${sysconfdir}"
FILES_${PN}-bin += "${sysconfdir}/default/volatiles/volatiles.04_pulse"
FILES_${PN}-server = "${bindir}/pulseaudio ${bindir}/start-* ${sysconfdir} ${bindir}/pactl ${base_libdir}/udev/rules.d/*.rules"
@ -105,11 +105,11 @@ pkg_postinst_${PN}-server() {
}
python populate_packages_prepend() {
#d.setVar('PKG_pulseaudio', 'pulseaudio')
#d.setVar('PKG_pulseaudio', 'pulseaudio')
plugindir = d.expand('${libdir}/pulse-${PV}/modules/')
do_split_packages(d, plugindir, '^module-(.*)\.so$', 'pulseaudio-module-%s', 'PulseAudio module for %s', extra_depends='' )
do_split_packages(d, plugindir, '^lib(.*)\.so$', 'pulseaudio-lib-%s', 'PulseAudio library for %s', extra_depends='' )
plugindir = d.expand('${libdir}/pulse-${PV}/modules/')
do_split_packages(d, plugindir, '^module-(.*)\.so$', 'pulseaudio-module-%s', 'PulseAudio module for %s', extra_depends='' )
do_split_packages(d, plugindir, '^lib(.*)\.so$', 'pulseaudio-lib-%s', 'PulseAudio library for %s', extra_depends='' )
}
RDEPENDS_pulseaudio-module-console-kit =+ "consolekit"

View File

@ -30,23 +30,23 @@ PACKAGES_DYNAMIC = "qmmp-plugin-* "
python populate_packages_prepend () {
import os
qmmp_libdir = d.expand('${libdir}/qmmp')
gd = d.expand('${D}/${libdir}/qmmp')
plug_dirs = os.listdir(gd)
import os
qmmp_libdir = d.expand('${libdir}/qmmp')
gd = d.expand('${D}/${libdir}/qmmp')
plug_dirs = os.listdir(gd)
for plug_dir in plug_dirs:
g_plug_dir = os.path.join(qmmp_libdir,plug_dir)
do_split_packages(d, g_plug_dir, '^lib(.*)\.so$', 'qmmp-plugin-' + plug_dir.lower() + '-%s', 'Qmmp' + plug_dir + 'plugin for %s')
for plug_dir in plug_dirs:
g_plug_dir = os.path.join(qmmp_libdir,plug_dir)
do_split_packages(d, g_plug_dir, '^lib(.*)\.so$', 'qmmp-plugin-' + plug_dir.lower() + '-%s', 'Qmmp' + plug_dir + 'plugin for %s')
}
FILES_${PN} = "\
${bindir}/qmmp \
${bindir}/qmmp \
${libdir}/lib*${SOLIBS} \
${datadir}/icons/* \
${datadir}/icons/* \
${datadir}/qmmp/images/* \
${datadir}/applications/* \
"
"
FILES_${PN}-dbg += "\
${libdir}/qmmp/*/.debug/* \

View File

@ -274,59 +274,59 @@ do_compile() {
}
python populate_packages_prepend() {
translation_dir = d.expand('${datadir}/${QT_DIR_NAME}/translations/')
translation_name = d.expand('${QT_BASE_NAME}-translation-%s')
do_split_packages(d, translation_dir, '^(assistant|designer|linguist|qt|qtconfig|qvfb)_(.*)\.qm$', translation_name, '${PN} translation for %s', extra_depends='' )
translation_dir = d.expand('${datadir}/${QT_DIR_NAME}/translations/')
translation_name = d.expand('${QT_BASE_NAME}-translation-%s')
do_split_packages(d, translation_dir, '^(assistant|designer|linguist|qt|qtconfig|qvfb)_(.*)\.qm$', translation_name, '${PN} translation for %s', extra_depends='' )
phrasebook_dir = d.expand('${datadir}/${QT_DIR_NAME}/phrasebooks/')
phrasebook_name = d.expand('${QT_BASE_NAME}-phrasebook-%s')
import os;
if os.path.exists("%s%s" % (d.expand('${D}'), phrasebook_dir)):
do_split_packages(d, phrasebook_dir, '^(.*)\.qph$', phrasebook_name, '${PN} phrasebook for %s', extra_depends='' )
else:
bb.note("The path does not exist:", d.expand('${D}'), phrasebook_dir)
phrasebook_dir = d.expand('${datadir}/${QT_DIR_NAME}/phrasebooks/')
phrasebook_name = d.expand('${QT_BASE_NAME}-phrasebook-%s')
import os;
if os.path.exists("%s%s" % (d.expand('${D}'), phrasebook_dir)):
do_split_packages(d, phrasebook_dir, '^(.*)\.qph$', phrasebook_name, '${PN} phrasebook for %s', extra_depends='' )
else:
bb.note("The path does not exist:", d.expand('${D}'), phrasebook_dir)
# Package all the plugins and their -dbg version and create a meta package
def qtopia_split(path, name, glob):
"""
Split the package into a normal and -dbg package and then add the
new packages to the meta package.
"""
plugin_dir = d.expand('${libdir}/${QT_DIR_NAME}/plugins/%s/' % path)
if not os.path.exists("%s%s" % (d.expand('${D}'), plugin_dir)):
bb.note("The path does not exist:", d.expand('${D}'), plugin_dir)
return
# Package all the plugins and their -dbg version and create a meta package
def qtopia_split(path, name, glob):
"""
Split the package into a normal and -dbg package and then add the
new packages to the meta package.
"""
plugin_dir = d.expand('${libdir}/${QT_DIR_NAME}/plugins/%s/' % path)
if not os.path.exists("%s%s" % (d.expand('${D}'), plugin_dir)):
bb.note("The path does not exist:", d.expand('${D}'), plugin_dir)
return
plugin_name = d.expand('${QT_BASE_NAME}-plugin-%s-%%s' % name)
dev_packages = []
dev_hook = lambda file,pkg,b,c,d:dev_packages.append((file,pkg))
do_split_packages(d, plugin_dir, glob, plugin_name, '${PN} %s for %%s' % name, extra_depends='', hook=dev_hook)
# Create a -dbg package as well
plugin_dir_dbg = d.expand('${libdir}/${QT_DIR_NAME}/plugins/%s/.debug' % path)
packages = d.getVar('PACKAGES')
for (file,package) in dev_packages:
packages = "%s %s-dbg" % (packages, package)
file_name = os.path.join(plugin_dir_dbg, os.path.basename(file))
d.setVar("FILES_%s-dbg" % package, file_name)
d.setVar("DESCRIPTION_%s-dbg" % package, "${PN} %s for %s" % (name, package))
plugin_name = d.expand('${QT_BASE_NAME}-plugin-%s-%%s' % name)
dev_packages = []
dev_hook = lambda file,pkg,b,c,d:dev_packages.append((file,pkg))
do_split_packages(d, plugin_dir, glob, plugin_name, '${PN} %s for %%s' % name, extra_depends='', hook=dev_hook)
# Create a -dbg package as well
plugin_dir_dbg = d.expand('${libdir}/${QT_DIR_NAME}/plugins/%s/.debug' % path)
packages = d.getVar('PACKAGES')
for (file,package) in dev_packages:
packages = "%s %s-dbg" % (packages, package)
file_name = os.path.join(plugin_dir_dbg, os.path.basename(file))
d.setVar("FILES_%s-dbg" % package, file_name)
d.setVar("DESCRIPTION_%s-dbg" % package, "${PN} %s for %s" % (name, package))
d.setVar('PACKAGES', packages)
d.setVar('PACKAGES', packages)
qtopia_split('accessible', 'accessible', '^libq(.*)\.so$')
qtopia_split('codecs', 'codec', '^libq(.*)\.so$')
qtopia_split('decorations', 'decoration', '^libqdecoration(.*)\.so$')
qtopia_split('designer', 'designer', '^lib(.*)\.so$')
qtopia_split('gfxdrivers', 'gfxdriver', '^libq(.*)\.so$')
qtopia_split('graphicssystems','graphicssystems', '^libq(.*)\.so$')
qtopia_split('mousedrivers', 'mousedriver', '^libq(.*)mousedriver\.so$')
qtopia_split('iconengines', 'iconengine', '^libq(.*)\.so$')
qtopia_split('imageformats', 'imageformat', '^libq(.*)\.so$')
qtopia_split('inputmethods', 'inputmethod', '^libq(.*)\.so$')
qtopia_split('sqldrivers', 'sqldriver', '^libq(.*)\.so$')
qtopia_split('script', 'script', '^libqtscript(.*)\.so$')
qtopia_split('styles', 'style', '^libq(.*)\.so$')
qtopia_split('phonon_backend','phonon-backend','^libphonon_(.*)\.so$')
qtopia_split('bearer', 'bearer', '^libq(.*)bearer\.so$')
qtopia_split('accessible', 'accessible', '^libq(.*)\.so$')
qtopia_split('codecs', 'codec', '^libq(.*)\.so$')
qtopia_split('decorations', 'decoration', '^libqdecoration(.*)\.so$')
qtopia_split('designer', 'designer', '^lib(.*)\.so$')
qtopia_split('gfxdrivers', 'gfxdriver', '^libq(.*)\.so$')
qtopia_split('graphicssystems','graphicssystems', '^libq(.*)\.so$')
qtopia_split('mousedrivers', 'mousedriver', '^libq(.*)mousedriver\.so$')
qtopia_split('iconengines', 'iconengine', '^libq(.*)\.so$')
qtopia_split('imageformats', 'imageformat', '^libq(.*)\.so$')
qtopia_split('inputmethods', 'inputmethod', '^libq(.*)\.so$')
qtopia_split('sqldrivers', 'sqldriver', '^libq(.*)\.so$')
qtopia_split('script', 'script', '^libqtscript(.*)\.so$')
qtopia_split('styles', 'style', '^libq(.*)\.so$')
qtopia_split('phonon_backend','phonon-backend','^libphonon_(.*)\.so$')
qtopia_split('bearer', 'bearer', '^libq(.*)bearer\.so$')
}
do_install() {

View File

@ -17,7 +17,7 @@ LIC_FILES_CHKSUM = "file://COPYING;md5=94d55d512a9ba36caa9b7df079bae19f"
S = "${WORKDIR}/git"
do_unpack_append () {
bb.build.exec_func('do_remove_patches', d)
bb.build.exec_func('do_remove_patches', d)
}
do_remove_patches () {

View File

@ -46,8 +46,8 @@ do_compile () {
}
python populate_packages_prepend () {
pcre_libdir = d.expand('${libdir}')
do_split_packages(d, pcre_libdir, '^lib(.*)\.so\.+', 'lib%s', 'libpcre %s library', extra_depends='', allow_links=True, prepend=True)
pcre_libdir = d.expand('${libdir}')
do_split_packages(d, pcre_libdir, '^lib(.*)\.so\.+', 'lib%s', 'libpcre %s library', extra_depends='', allow_links=True, prepend=True)
}
BBCLASSEXTEND = "native"