base.bbclass: Split up as per the patch in OE.dev by Chris Larson making code more readable and modularised
Signed-off-by: Richard Purdie <rpurdie@linux.intel.com>
This commit is contained in:
parent
185cb38f13
commit
9c5386c1fd
|
@ -1,87 +1,6 @@
|
|||
BB_DEFAULT_TASK ?= "build"
|
||||
|
||||
# like os.path.join but doesn't treat absolute RHS specially
|
||||
def base_path_join(a, *p):
|
||||
path = a
|
||||
for b in p:
|
||||
if path == '' or path.endswith('/'):
|
||||
path += b
|
||||
else:
|
||||
path += '/' + b
|
||||
return path
|
||||
|
||||
# for MD5/SHA handling
|
||||
def base_chk_load_parser(config_path):
|
||||
import ConfigParser
|
||||
parser = ConfigParser.ConfigParser()
|
||||
if not len(parser.read(config_path)) == 1:
|
||||
bb.note("Can not open the '%s' ini file" % config_path)
|
||||
raise Exception("Can not open the '%s'" % config_path)
|
||||
|
||||
return parser
|
||||
|
||||
def base_chk_file(parser, pn, pv, src_uri, localpath, data):
|
||||
no_checksum = False
|
||||
# Try PN-PV-SRC_URI first and then try PN-SRC_URI
|
||||
# we rely on the get method to create errors
|
||||
pn_pv_src = "%s-%s-%s" % (pn,pv,src_uri)
|
||||
pn_src = "%s-%s" % (pn,src_uri)
|
||||
if parser.has_section(pn_pv_src):
|
||||
md5 = parser.get(pn_pv_src, "md5")
|
||||
sha256 = parser.get(pn_pv_src, "sha256")
|
||||
elif parser.has_section(pn_src):
|
||||
md5 = parser.get(pn_src, "md5")
|
||||
sha256 = parser.get(pn_src, "sha256")
|
||||
elif parser.has_section(src_uri):
|
||||
md5 = parser.get(src_uri, "md5")
|
||||
sha256 = parser.get(src_uri, "sha256")
|
||||
else:
|
||||
no_checksum = True
|
||||
|
||||
# md5 and sha256 should be valid now
|
||||
if not os.path.exists(localpath):
|
||||
bb.note("The localpath does not exist '%s'" % localpath)
|
||||
raise Exception("The path does not exist '%s'" % localpath)
|
||||
|
||||
|
||||
# Calculate the MD5 and 256-bit SHA checksums
|
||||
md5data = bb.utils.md5_file(localpath)
|
||||
shadata = bb.utils.sha256_file(localpath)
|
||||
|
||||
# sha256_file() can return None if we are running on Python 2.4 (hashlib is
|
||||
# 2.5 onwards, sha in 2.4 is 160-bit only), so check for this and call the
|
||||
# standalone shasum binary if required.
|
||||
if shadata is None:
|
||||
try:
|
||||
shapipe = os.popen('PATH=%s oe_sha256sum %s' % (bb.data.getVar('PATH', data, True), localpath))
|
||||
shadata = (shapipe.readline().split() or [ "" ])[0]
|
||||
shapipe.close()
|
||||
except OSError:
|
||||
raise Exception("Executing shasum failed, please build shasum-native")
|
||||
|
||||
if no_checksum == True: # we do not have conf/checksums.ini entry
|
||||
try:
|
||||
file = open("%s/checksums.ini" % bb.data.getVar("TMPDIR", data, 1), "a")
|
||||
except:
|
||||
return False
|
||||
|
||||
if not file:
|
||||
raise Exception("Creating checksums.ini failed")
|
||||
|
||||
file.write("[%s]\nmd5=%s\nsha256=%s\n\n" % (src_uri, md5data, shadata))
|
||||
file.close()
|
||||
return False
|
||||
|
||||
if not md5 == md5data:
|
||||
bb.note("The MD5Sums did not match. Wanted: '%s' and Got: '%s'" % (md5,md5data))
|
||||
raise Exception("MD5 Sums do not match. Wanted: '%s' Got: '%s'" % (md5, md5data))
|
||||
|
||||
if not sha256 == shadata:
|
||||
bb.note("The SHA256 Sums do not match. Wanted: '%s' Got: '%s'" % (sha256,shadata))
|
||||
raise Exception("SHA256 Sums do not match. Wanted: '%s' Got: '%s'" % (sha256, shadata))
|
||||
|
||||
return True
|
||||
|
||||
inherit utils
|
||||
|
||||
def base_dep_prepend(d):
|
||||
#
|
||||
|
@ -112,62 +31,11 @@ def base_dep_prepend(d):
|
|||
deps += " virtual/${TARGET_PREFIX}gcc virtual/libc "
|
||||
return deps
|
||||
|
||||
def base_read_file(filename):
|
||||
try:
|
||||
f = file( filename, "r" )
|
||||
except IOError, reason:
|
||||
return "" # WARNING: can't raise an error now because of the new RDEPENDS handling. This is a bit ugly. :M:
|
||||
else:
|
||||
return f.read().strip()
|
||||
return None
|
||||
|
||||
def base_conditional(variable, checkvalue, truevalue, falsevalue, d):
|
||||
if bb.data.getVar(variable,d,1) == checkvalue:
|
||||
return truevalue
|
||||
else:
|
||||
return falsevalue
|
||||
|
||||
def base_less_or_equal(variable, checkvalue, truevalue, falsevalue, d):
|
||||
if float(bb.data.getVar(variable,d,1)) <= float(checkvalue):
|
||||
return truevalue
|
||||
else:
|
||||
return falsevalue
|
||||
|
||||
def base_version_less_or_equal(variable, checkvalue, truevalue, falsevalue, d):
|
||||
result = bb.vercmp(bb.data.getVar(variable,d,True), checkvalue)
|
||||
if result <= 0:
|
||||
return truevalue
|
||||
else:
|
||||
return falsevalue
|
||||
|
||||
def base_contains(variable, checkvalues, truevalue, falsevalue, d):
|
||||
matches = 0
|
||||
if type(checkvalues).__name__ == "str":
|
||||
checkvalues = [checkvalues]
|
||||
for value in checkvalues:
|
||||
if bb.data.getVar(variable,d,1).find(value) != -1:
|
||||
matches = matches + 1
|
||||
if matches == len(checkvalues):
|
||||
return truevalue
|
||||
return falsevalue
|
||||
|
||||
def base_both_contain(variable1, variable2, checkvalue, d):
|
||||
if bb.data.getVar(variable1,d,1).find(checkvalue) != -1 and bb.data.getVar(variable2,d,1).find(checkvalue) != -1:
|
||||
return checkvalue
|
||||
else:
|
||||
return ""
|
||||
|
||||
DEPENDS_prepend="${@base_dep_prepend(d)} "
|
||||
DEPENDS_virtclass-native_prepend="${@base_dep_prepend(d)} "
|
||||
DEPENDS_virtclass-nativesdk_prepend="${@base_dep_prepend(d)} "
|
||||
|
||||
def base_prune_suffix(var, suffixes, d):
|
||||
# See if var ends with any of the suffixes listed and
|
||||
# remove it if found
|
||||
for suffix in suffixes:
|
||||
if var.endswith(suffix):
|
||||
return var.replace(suffix, "")
|
||||
return var
|
||||
|
||||
def base_set_filespath(path, d):
|
||||
filespath = []
|
||||
|
@ -180,13 +48,6 @@ def base_set_filespath(path, d):
|
|||
|
||||
FILESPATH = "${@base_set_filespath([ "${FILE_DIRNAME}/${PF}", "${FILE_DIRNAME}/${P}", "${FILE_DIRNAME}/${PN}", "${FILE_DIRNAME}/${BP}", "${FILE_DIRNAME}/${BPN}", "${FILE_DIRNAME}/files", "${FILE_DIRNAME}" ], d)}"
|
||||
|
||||
def oe_filter(f, str, d):
|
||||
from re import match
|
||||
return " ".join(filter(lambda x: match(f, x, 0), str.split()))
|
||||
|
||||
def oe_filter_out(f, str, d):
|
||||
from re import match
|
||||
return " ".join(filter(lambda x: not match(f, x, 0), str.split()))
|
||||
|
||||
die() {
|
||||
oefatal "$*"
|
||||
|
@ -223,173 +84,6 @@ oe_runmake() {
|
|||
${MAKE} ${EXTRA_OEMAKE} "$@" || die "oe_runmake failed"
|
||||
}
|
||||
|
||||
oe_soinstall() {
|
||||
# Purpose: Install shared library file and
|
||||
# create the necessary links
|
||||
# Example:
|
||||
#
|
||||
# oe_
|
||||
#
|
||||
#oenote installing shared library $1 to $2
|
||||
#
|
||||
libname=`basename $1`
|
||||
install -m 755 $1 $2/$libname
|
||||
sonamelink=`${HOST_PREFIX}readelf -d $1 |grep 'Library soname:' |sed -e 's/.*\[\(.*\)\].*/\1/'`
|
||||
solink=`echo $libname | sed -e 's/\.so\..*/.so/'`
|
||||
ln -sf $libname $2/$sonamelink
|
||||
ln -sf $libname $2/$solink
|
||||
}
|
||||
|
||||
oe_libinstall() {
|
||||
# Purpose: Install a library, in all its forms
|
||||
# Example
|
||||
#
|
||||
# oe_libinstall libltdl ${STAGING_LIBDIR}/
|
||||
# oe_libinstall -C src/libblah libblah ${D}/${libdir}/
|
||||
dir=""
|
||||
libtool=""
|
||||
silent=""
|
||||
require_static=""
|
||||
require_shared=""
|
||||
staging_install=""
|
||||
while [ "$#" -gt 0 ]; do
|
||||
case "$1" in
|
||||
-C)
|
||||
shift
|
||||
dir="$1"
|
||||
;;
|
||||
-s)
|
||||
silent=1
|
||||
;;
|
||||
-a)
|
||||
require_static=1
|
||||
;;
|
||||
-so)
|
||||
require_shared=1
|
||||
;;
|
||||
-*)
|
||||
oefatal "oe_libinstall: unknown option: $1"
|
||||
;;
|
||||
*)
|
||||
break;
|
||||
;;
|
||||
esac
|
||||
shift
|
||||
done
|
||||
|
||||
libname="$1"
|
||||
shift
|
||||
destpath="$1"
|
||||
if [ -z "$destpath" ]; then
|
||||
oefatal "oe_libinstall: no destination path specified"
|
||||
fi
|
||||
if echo "$destpath/" | egrep '^${STAGING_LIBDIR}/' >/dev/null
|
||||
then
|
||||
staging_install=1
|
||||
fi
|
||||
|
||||
__runcmd () {
|
||||
if [ -z "$silent" ]; then
|
||||
echo >&2 "oe_libinstall: $*"
|
||||
fi
|
||||
$*
|
||||
}
|
||||
|
||||
if [ -z "$dir" ]; then
|
||||
dir=`pwd`
|
||||
fi
|
||||
|
||||
dotlai=$libname.lai
|
||||
|
||||
# Sanity check that the libname.lai is unique
|
||||
number_of_files=`(cd $dir; find . -name "$dotlai") | wc -l`
|
||||
if [ $number_of_files -gt 1 ]; then
|
||||
oefatal "oe_libinstall: $dotlai is not unique in $dir"
|
||||
fi
|
||||
|
||||
|
||||
dir=$dir`(cd $dir;find . -name "$dotlai") | sed "s/^\.//;s/\/$dotlai\$//;q"`
|
||||
olddir=`pwd`
|
||||
__runcmd cd $dir
|
||||
|
||||
lafile=$libname.la
|
||||
|
||||
# If such file doesn't exist, try to cut version suffix
|
||||
if [ ! -f "$lafile" ]; then
|
||||
libname1=`echo "$libname" | sed 's/-[0-9.]*$//'`
|
||||
lafile1=$libname.la
|
||||
if [ -f "$lafile1" ]; then
|
||||
libname=$libname1
|
||||
lafile=$lafile1
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ -f "$lafile" ]; then
|
||||
# libtool archive
|
||||
eval `cat $lafile|grep "^library_names="`
|
||||
libtool=1
|
||||
else
|
||||
library_names="$libname.so* $libname.dll.a"
|
||||
fi
|
||||
|
||||
__runcmd install -d $destpath/
|
||||
dota=$libname.a
|
||||
if [ -f "$dota" -o -n "$require_static" ]; then
|
||||
rm -f $destpath/$dota
|
||||
__runcmd install -m 0644 $dota $destpath/
|
||||
fi
|
||||
if [ -f "$dotlai" -a -n "$libtool" ]; then
|
||||
if test -n "$staging_install"
|
||||
then
|
||||
# stop libtool using the final directory name for libraries
|
||||
# in staging:
|
||||
__runcmd rm -f $destpath/$libname.la
|
||||
__runcmd sed -e 's/^installed=yes$/installed=no/' \
|
||||
-e '/^dependency_libs=/s,${WORKDIR}[[:alnum:]/\._+-]*/\([[:alnum:]\._+-]*\),${STAGING_LIBDIR}/\1,g' \
|
||||
-e "/^dependency_libs=/s,\([[:space:]']\)${libdir},\1${STAGING_LIBDIR},g" \
|
||||
$dotlai >$destpath/$libname.la
|
||||
else
|
||||
rm -f $destpath/$libname.la
|
||||
__runcmd install -m 0644 $dotlai $destpath/$libname.la
|
||||
fi
|
||||
fi
|
||||
|
||||
for name in $library_names; do
|
||||
files=`eval echo $name`
|
||||
for f in $files; do
|
||||
if [ ! -e "$f" ]; then
|
||||
if [ -n "$libtool" ]; then
|
||||
oefatal "oe_libinstall: $dir/$f not found."
|
||||
fi
|
||||
elif [ -L "$f" ]; then
|
||||
__runcmd cp -P "$f" $destpath/
|
||||
elif [ ! -L "$f" ]; then
|
||||
libfile="$f"
|
||||
rm -f $destpath/$libfile
|
||||
__runcmd install -m 0755 $libfile $destpath/
|
||||
fi
|
||||
done
|
||||
done
|
||||
|
||||
if [ -z "$libfile" ]; then
|
||||
if [ -n "$require_shared" ]; then
|
||||
oefatal "oe_libinstall: unable to locate shared library"
|
||||
fi
|
||||
elif [ -z "$libtool" ]; then
|
||||
# special case hack for non-libtool .so.#.#.# links
|
||||
baselibfile=`basename "$libfile"`
|
||||
if (echo $baselibfile | grep -qE '^lib.*\.so\.[0-9.]*$'); then
|
||||
sonamelink=`${HOST_PREFIX}readelf -d $libfile |grep 'Library soname:' |sed -e 's/.*\[\(.*\)\].*/\1/'`
|
||||
solink=`echo $baselibfile | sed -e 's/\.so\..*/.so/'`
|
||||
if [ -n "$sonamelink" -a x"$baselibfile" != x"$sonamelink" ]; then
|
||||
__runcmd ln -sf $baselibfile $destpath/$sonamelink
|
||||
fi
|
||||
__runcmd ln -sf $baselibfile $destpath/$solink
|
||||
fi
|
||||
fi
|
||||
|
||||
__runcmd cd "$olddir"
|
||||
}
|
||||
|
||||
def package_stagefile(file, d):
|
||||
|
||||
|
@ -409,81 +103,7 @@ package_stagefile_shell() {
|
|||
fi
|
||||
}
|
||||
|
||||
oe_machinstall() {
|
||||
# Purpose: Install machine dependent files, if available
|
||||
# If not available, check if there is a default
|
||||
# If no default, just touch the destination
|
||||
# Example:
|
||||
# $1 $2 $3 $4
|
||||
# oe_machinstall -m 0644 fstab ${D}/etc/fstab
|
||||
#
|
||||
# TODO: Check argument number?
|
||||
#
|
||||
filename=`basename $3`
|
||||
dirname=`dirname $3`
|
||||
|
||||
for o in `echo ${OVERRIDES} | tr ':' ' '`; do
|
||||
if [ -e $dirname/$o/$filename ]; then
|
||||
oenote $dirname/$o/$filename present, installing to $4
|
||||
install $1 $2 $dirname/$o/$filename $4
|
||||
return
|
||||
fi
|
||||
done
|
||||
# oenote overrides specific file NOT present, trying default=$3...
|
||||
if [ -e $3 ]; then
|
||||
oenote $3 present, installing to $4
|
||||
install $1 $2 $3 $4
|
||||
else
|
||||
oenote $3 NOT present, touching empty $4
|
||||
touch $4
|
||||
fi
|
||||
}
|
||||
|
||||
addtask listtasks
|
||||
do_listtasks[nostamp] = "1"
|
||||
python do_listtasks() {
|
||||
import sys
|
||||
# emit variables and shell functions
|
||||
#bb.data.emit_env(sys.__stdout__, d)
|
||||
# emit the metadata which isnt valid shell
|
||||
for e in d.keys():
|
||||
if bb.data.getVarFlag(e, 'task', d):
|
||||
sys.__stdout__.write("%s\n" % e)
|
||||
}
|
||||
|
||||
addtask clean
|
||||
do_clean[dirs] = "${TOPDIR}"
|
||||
do_clean[nostamp] = "1"
|
||||
python base_do_clean() {
|
||||
"""clear the build and temp directories"""
|
||||
dir = bb.data.expand("${WORKDIR}", d)
|
||||
if dir == '//': raise bb.build.FuncFailed("wrong DATADIR")
|
||||
bb.note("removing " + dir)
|
||||
os.system('rm -rf ' + dir)
|
||||
|
||||
dir = "%s.*" % bb.data.expand(bb.data.getVar('STAMP', d), d)
|
||||
bb.note("removing " + dir)
|
||||
os.system('rm -f '+ dir)
|
||||
}
|
||||
|
||||
addtask rebuild after do_${BB_DEFAULT_TASK}
|
||||
do_rebuild[dirs] = "${TOPDIR}"
|
||||
do_rebuild[nostamp] = "1"
|
||||
python base_do_rebuild() {
|
||||
"""rebuild a package"""
|
||||
}
|
||||
|
||||
#addtask mrproper
|
||||
#do_mrproper[dirs] = "${TOPDIR}"
|
||||
#do_mrproper[nostamp] = "1"
|
||||
#python base_do_mrproper() {
|
||||
# """clear downloaded sources, build and temp directories"""
|
||||
# dir = bb.data.expand("${DL_DIR}", d)
|
||||
# if dir == '/': bb.build.FuncFailed("wrong DATADIR")
|
||||
# bb.debug(2, "removing " + dir)
|
||||
# os.system('rm -rf ' + dir)
|
||||
# bb.build.exec_func('do_clean', d)
|
||||
#}
|
||||
inherit utility-tasks
|
||||
|
||||
SCENEFUNCS += "base_scenefunction"
|
||||
|
||||
|
@ -566,57 +186,6 @@ python base_do_fetch() {
|
|||
raise bb.build.FuncFailed("Checksum of '%s' failed" % uri)
|
||||
}
|
||||
|
||||
addtask fetchall after do_fetch
|
||||
do_fetchall[recrdeptask] = "do_fetch"
|
||||
base_do_fetchall() {
|
||||
:
|
||||
}
|
||||
|
||||
addtask checkuri
|
||||
do_checkuri[nostamp] = "1"
|
||||
python do_checkuri() {
|
||||
import sys
|
||||
|
||||
localdata = bb.data.createCopy(d)
|
||||
bb.data.update_data(localdata)
|
||||
|
||||
src_uri = bb.data.getVar('SRC_URI', localdata, 1)
|
||||
|
||||
try:
|
||||
bb.fetch.init(src_uri.split(),d)
|
||||
except bb.fetch.NoMethodError:
|
||||
(type, value, traceback) = sys.exc_info()
|
||||
raise bb.build.FuncFailed("No method: %s" % value)
|
||||
|
||||
try:
|
||||
bb.fetch.checkstatus(localdata)
|
||||
except bb.fetch.MissingParameterError:
|
||||
(type, value, traceback) = sys.exc_info()
|
||||
raise bb.build.FuncFailed("Missing parameters: %s" % value)
|
||||
except bb.fetch.FetchError:
|
||||
(type, value, traceback) = sys.exc_info()
|
||||
raise bb.build.FuncFailed("Fetch failed: %s" % value)
|
||||
except bb.fetch.MD5SumError:
|
||||
(type, value, traceback) = sys.exc_info()
|
||||
raise bb.build.FuncFailed("MD5 failed: %s" % value)
|
||||
except:
|
||||
(type, value, traceback) = sys.exc_info()
|
||||
raise bb.build.FuncFailed("Unknown fetch Error: %s" % value)
|
||||
}
|
||||
|
||||
addtask checkuriall after do_checkuri
|
||||
do_checkuriall[recrdeptask] = "do_checkuri"
|
||||
do_checkuriall[nostamp] = "1"
|
||||
base_do_checkuriall() {
|
||||
:
|
||||
}
|
||||
|
||||
addtask buildall after do_build
|
||||
do_buildall[recrdeptask] = "do_build"
|
||||
base_do_buildall() {
|
||||
:
|
||||
}
|
||||
|
||||
def subprocess_setup():
|
||||
import signal
|
||||
# Python installs a SIGPIPE handler by default. This is usually not what
|
||||
|
@ -720,82 +289,7 @@ python base_do_unpack() {
|
|||
raise bb.build.FuncFailed()
|
||||
}
|
||||
|
||||
METADATA_BRANCH ?= "${@base_detect_branch(d)}"
|
||||
METADATA_REVISION ?= "${@base_detect_revision(d)}"
|
||||
|
||||
def base_detect_revision(d):
|
||||
path = base_get_scmbasepath(d)
|
||||
|
||||
scms = [base_get_metadata_git_revision, \
|
||||
base_get_metadata_svn_revision]
|
||||
|
||||
for scm in scms:
|
||||
rev = scm(path, d)
|
||||
if rev <> "<unknown>":
|
||||
return rev
|
||||
|
||||
return "<unknown>"
|
||||
|
||||
def base_detect_branch(d):
|
||||
path = base_get_scmbasepath(d)
|
||||
|
||||
scms = [base_get_metadata_git_branch]
|
||||
|
||||
for scm in scms:
|
||||
rev = scm(path, d)
|
||||
if rev <> "<unknown>":
|
||||
return rev.strip()
|
||||
|
||||
return "<unknown>"
|
||||
|
||||
|
||||
|
||||
def base_get_scmbasepath(d):
|
||||
path_to_bbfiles = bb.data.getVar( 'BBFILES', d, 1 ).split()
|
||||
return path_to_bbfiles[0][:path_to_bbfiles[0].rindex( "packages" )]
|
||||
|
||||
def base_get_metadata_monotone_branch(path, d):
|
||||
monotone_branch = "<unknown>"
|
||||
try:
|
||||
monotone_branch = file( "%s/_MTN/options" % path ).read().strip()
|
||||
if monotone_branch.startswith( "database" ):
|
||||
monotone_branch_words = monotone_branch.split()
|
||||
monotone_branch = monotone_branch_words[ monotone_branch_words.index( "branch" )+1][1:-1]
|
||||
except:
|
||||
pass
|
||||
return monotone_branch
|
||||
|
||||
def base_get_metadata_monotone_revision(path, d):
|
||||
monotone_revision = "<unknown>"
|
||||
try:
|
||||
monotone_revision = file( "%s/_MTN/revision" % path ).read().strip()
|
||||
if monotone_revision.startswith( "format_version" ):
|
||||
monotone_revision_words = monotone_revision.split()
|
||||
monotone_revision = monotone_revision_words[ monotone_revision_words.index( "old_revision" )+1][1:-1]
|
||||
except IOError:
|
||||
pass
|
||||
return monotone_revision
|
||||
|
||||
def base_get_metadata_svn_revision(path, d):
|
||||
revision = "<unknown>"
|
||||
try:
|
||||
revision = file( "%s/.svn/entries" % path ).readlines()[3].strip()
|
||||
except IOError:
|
||||
pass
|
||||
return revision
|
||||
|
||||
def base_get_metadata_git_branch(path, d):
|
||||
branch = os.popen('cd %s; git branch | grep "^* " | tr -d "* "' % path).read()
|
||||
|
||||
if len(branch) != 0:
|
||||
return branch
|
||||
return "<unknown>"
|
||||
|
||||
def base_get_metadata_git_revision(path, d):
|
||||
rev = os.popen("cd %s; git log -n 1 --pretty=oneline --" % path).read().split(" ")[0]
|
||||
if len(rev) != 0:
|
||||
return rev
|
||||
return "<unknown>"
|
||||
inherit metadata_scm
|
||||
|
||||
GIT_CONFIG = "${STAGING_DIR_NATIVE}/usr/etc/gitconfig"
|
||||
|
||||
|
@ -909,145 +403,7 @@ base_do_compile() {
|
|||
fi
|
||||
}
|
||||
|
||||
|
||||
sysroot_stage_dir() {
|
||||
src="$1"
|
||||
dest="$2"
|
||||
# This will remove empty directories so we can ignore them
|
||||
rmdir "$src" 2> /dev/null || true
|
||||
if [ -d "$src" ]; then
|
||||
mkdir -p "$dest"
|
||||
cp -fpPR "$src"/* "$dest"
|
||||
fi
|
||||
}
|
||||
|
||||
sysroot_stage_libdir() {
|
||||
src="$1"
|
||||
dest="$2"
|
||||
|
||||
olddir=`pwd`
|
||||
cd $src
|
||||
las=$(find . -name \*.la -type f)
|
||||
cd $olddir
|
||||
echo "Found la files: $las"
|
||||
for i in $las
|
||||
do
|
||||
sed -e 's/^installed=yes$/installed=no/' \
|
||||
-e '/^dependency_libs=/s,${WORKDIR}[[:alnum:]/\._+-]*/\([[:alnum:]\._+-]*\),${STAGING_LIBDIR}/\1,g' \
|
||||
-e "/^dependency_libs=/s,\([[:space:]']\)${libdir},\1${STAGING_LIBDIR},g" \
|
||||
-i $src/$i
|
||||
done
|
||||
sysroot_stage_dir $src $dest
|
||||
}
|
||||
|
||||
sysroot_stage_dirs() {
|
||||
from="$1"
|
||||
to="$2"
|
||||
|
||||
sysroot_stage_dir $from${includedir} $to${STAGING_INCDIR}
|
||||
if [ "${BUILD_SYS}" = "${HOST_SYS}" ]; then
|
||||
sysroot_stage_dir $from${bindir} $to${STAGING_DIR_HOST}${bindir}
|
||||
sysroot_stage_dir $from${sbindir} $to${STAGING_DIR_HOST}${sbindir}
|
||||
sysroot_stage_dir $from${base_bindir} $to${STAGING_DIR_HOST}${base_bindir}
|
||||
sysroot_stage_dir $from${base_sbindir} $to${STAGING_DIR_HOST}${base_sbindir}
|
||||
sysroot_stage_dir $from${libexecdir} $to${STAGING_DIR_HOST}${libexecdir}
|
||||
sysroot_stage_dir $from${sysconfdir} $to${STAGING_DIR_HOST}${sysconfdir}
|
||||
fi
|
||||
if [ -d $from${libdir} ]
|
||||
then
|
||||
sysroot_stage_libdir $from/${libdir} $to${STAGING_LIBDIR}
|
||||
fi
|
||||
if [ -d $from${base_libdir} ]
|
||||
then
|
||||
sysroot_stage_libdir $from${base_libdir} $to${STAGING_DIR_HOST}${base_libdir}
|
||||
fi
|
||||
sysroot_stage_dir $from${datadir} $to${STAGING_DATADIR}
|
||||
}
|
||||
|
||||
sysroot_stage_all() {
|
||||
sysroot_stage_dirs ${D} ${SYSROOT_DESTDIR}
|
||||
}
|
||||
|
||||
def is_legacy_staging(d):
|
||||
stagefunc = bb.data.getVar('do_stage', d, True)
|
||||
legacy = True
|
||||
if stagefunc is None:
|
||||
legacy = False
|
||||
elif stagefunc.strip() == "use_do_install_for_stage":
|
||||
legacy = False
|
||||
elif stagefunc.strip() == "autotools_stage_all":
|
||||
legacy = False
|
||||
elif stagefunc.strip() == "do_stage_native" and bb.data.getVar('AUTOTOOLS_NATIVE_STAGE_INSTALL', d, 1) == "1":
|
||||
legacy = False
|
||||
elif bb.data.getVar('NATIVE_INSTALL_WORKS', d, 1) == "1":
|
||||
legacy = False
|
||||
return legacy
|
||||
|
||||
do_populate_sysroot[dirs] = "${STAGING_DIR_TARGET}/${bindir} ${STAGING_DIR_TARGET}/${libdir} \
|
||||
${STAGING_DIR_TARGET}/${includedir} \
|
||||
${STAGING_BINDIR_NATIVE} ${STAGING_LIBDIR_NATIVE} \
|
||||
${STAGING_INCDIR_NATIVE} \
|
||||
${STAGING_DATADIR} \
|
||||
${S} ${B}"
|
||||
|
||||
# Could be compile but populate_sysroot and do_install shouldn't run at the same time
|
||||
addtask populate_sysroot after do_install
|
||||
|
||||
PSTAGING_ACTIVE = "0"
|
||||
SYSROOT_PREPROCESS_FUNCS ?= ""
|
||||
SYSROOT_DESTDIR = "${WORKDIR}/sysroot-destdir/"
|
||||
SYSROOT_LOCK = "${STAGING_DIR}/staging.lock"
|
||||
|
||||
python populate_sysroot_prehook () {
|
||||
return
|
||||
}
|
||||
|
||||
python populate_sysroot_posthook () {
|
||||
return
|
||||
}
|
||||
|
||||
packagedstaging_fastpath () {
|
||||
:
|
||||
}
|
||||
|
||||
python do_populate_sysroot () {
|
||||
#
|
||||
# if do_stage exists, we're legacy. In that case run the do_stage,
|
||||
# modify the SYSROOT_DESTDIR variable and then run the staging preprocess
|
||||
# functions against staging directly.
|
||||
#
|
||||
# Otherwise setup a destdir, copy the results from do_install
|
||||
# and run the staging preprocess against that
|
||||
#
|
||||
pstageactive = (bb.data.getVar("PSTAGING_ACTIVE", d, True) == "1")
|
||||
lockfile = bb.data.getVar("SYSROOT_LOCK", d, True)
|
||||
stagefunc = bb.data.getVar('do_stage', d, True)
|
||||
legacy = is_legacy_staging(d)
|
||||
if legacy:
|
||||
bb.data.setVar("SYSROOT_DESTDIR", "", d)
|
||||
bb.note("Legacy staging mode for %s" % bb.data.getVar("FILE", d, True))
|
||||
lock = bb.utils.lockfile(lockfile)
|
||||
bb.build.exec_func('populate_sysroot_prehook', d)
|
||||
bb.build.exec_func('do_stage', d)
|
||||
for f in (bb.data.getVar('SYSROOT_PREPROCESS_FUNCS', d, True) or '').split():
|
||||
bb.build.exec_func(f, d)
|
||||
bb.build.exec_func('populate_sysroot_posthook', d)
|
||||
bb.utils.unlockfile(lock)
|
||||
else:
|
||||
dest = bb.data.getVar('D', d, True)
|
||||
sysrootdest = bb.data.expand('${SYSROOT_DESTDIR}${STAGING_DIR_TARGET}', d)
|
||||
bb.mkdirhier(sysrootdest)
|
||||
|
||||
bb.build.exec_func("sysroot_stage_all", d)
|
||||
#os.system('cp -pPR %s/* %s/' % (dest, sysrootdest))
|
||||
for f in (bb.data.getVar('SYSROOT_PREPROCESS_FUNCS', d, True) or '').split():
|
||||
bb.build.exec_func(f, d)
|
||||
bb.build.exec_func("packagedstaging_fastpath", d)
|
||||
|
||||
lock = bb.utils.lockfile(lockfile)
|
||||
os.system(bb.data.expand('cp -pPR ${SYSROOT_DESTDIR}${TMPDIR}/* ${TMPDIR}/', d))
|
||||
bb.utils.unlockfile(lock)
|
||||
}
|
||||
inherit staging
|
||||
|
||||
addtask install after do_compile
|
||||
do_install[dirs] = "${D} ${S} ${B}"
|
||||
|
@ -1066,19 +422,6 @@ addtask build after do_populate_sysroot
|
|||
do_build = ""
|
||||
do_build[func] = "1"
|
||||
|
||||
# Make sure MACHINE isn't exported
|
||||
# (breaks binutils at least)
|
||||
MACHINE[unexport] = "1"
|
||||
|
||||
# Make sure TARGET_ARCH isn't exported
|
||||
# (breaks Makefiles using implicit rules, e.g. quilt, as GNU make has this
|
||||
# in them, undocumented)
|
||||
TARGET_ARCH[unexport] = "1"
|
||||
|
||||
# Make sure DISTRO isn't exported
|
||||
# (breaks sysvinit at least)
|
||||
DISTRO[unexport] = "1"
|
||||
|
||||
|
||||
def base_after_parse(d):
|
||||
import exceptions
|
||||
|
@ -1176,7 +519,7 @@ def base_after_parse(d):
|
|||
for pkg in packages:
|
||||
pkgarch = bb.data.getVar("PACKAGE_ARCH_%s" % pkg, d, 1)
|
||||
|
||||
# We could look for != PACKAGE_ARCH here but how to choose
|
||||
# We could look for != PACKAGE_ARCH here but how to choose
|
||||
# if multiple differences are present?
|
||||
# Look through PACKAGE_ARCHS for the priority order?
|
||||
if pkgarch and pkgarch == mach_arch:
|
||||
|
@ -1187,8 +530,6 @@ def base_after_parse(d):
|
|||
|
||||
python () {
|
||||
base_after_parse(d)
|
||||
if is_legacy_staging(d):
|
||||
bb.note("Legacy staging mode for %s" % bb.data.getVar("FILE", d, True))
|
||||
}
|
||||
|
||||
def check_app_exists(app, d):
|
||||
|
@ -1211,63 +552,7 @@ def check_gcc3(data):
|
|||
# Patch handling
|
||||
inherit patch
|
||||
|
||||
EXPORT_FUNCTIONS do_setscene do_clean do_fetch do_unpack do_configure do_compile do_install do_package do_populate_pkgs do_rebuild do_fetchall
|
||||
EXPORT_FUNCTIONS do_setscene do_fetch do_unpack do_configure do_compile do_install do_package
|
||||
|
||||
MIRRORS[func] = "0"
|
||||
MIRRORS () {
|
||||
${DEBIAN_MIRROR}/main http://snapshot.debian.net/archive/pool
|
||||
${DEBIAN_MIRROR} ftp://ftp.de.debian.org/debian/pool
|
||||
${DEBIAN_MIRROR} ftp://ftp.au.debian.org/debian/pool
|
||||
${DEBIAN_MIRROR} ftp://ftp.cl.debian.org/debian/pool
|
||||
${DEBIAN_MIRROR} ftp://ftp.hr.debian.org/debian/pool
|
||||
${DEBIAN_MIRROR} ftp://ftp.fi.debian.org/debian/pool
|
||||
${DEBIAN_MIRROR} ftp://ftp.hk.debian.org/debian/pool
|
||||
${DEBIAN_MIRROR} ftp://ftp.hu.debian.org/debian/pool
|
||||
${DEBIAN_MIRROR} ftp://ftp.ie.debian.org/debian/pool
|
||||
${DEBIAN_MIRROR} ftp://ftp.it.debian.org/debian/pool
|
||||
${DEBIAN_MIRROR} ftp://ftp.jp.debian.org/debian/pool
|
||||
${DEBIAN_MIRROR} ftp://ftp.no.debian.org/debian/pool
|
||||
${DEBIAN_MIRROR} ftp://ftp.pl.debian.org/debian/pool
|
||||
${DEBIAN_MIRROR} ftp://ftp.ro.debian.org/debian/pool
|
||||
${DEBIAN_MIRROR} ftp://ftp.si.debian.org/debian/pool
|
||||
${DEBIAN_MIRROR} ftp://ftp.es.debian.org/debian/pool
|
||||
${DEBIAN_MIRROR} ftp://ftp.se.debian.org/debian/pool
|
||||
${DEBIAN_MIRROR} ftp://ftp.tr.debian.org/debian/pool
|
||||
${GNU_MIRROR} ftp://mirrors.kernel.org/gnu
|
||||
${GNU_MIRROR} ftp://ftp.matrix.com.br/pub/gnu
|
||||
${GNU_MIRROR} ftp://ftp.cs.ubc.ca/mirror2/gnu
|
||||
${GNU_MIRROR} ftp://sunsite.ust.hk/pub/gnu
|
||||
${GNU_MIRROR} ftp://ftp.ayamura.org/pub/gnu
|
||||
${KERNELORG_MIRROR} http://www.kernel.org/pub
|
||||
${KERNELORG_MIRROR} ftp://ftp.us.kernel.org/pub
|
||||
${KERNELORG_MIRROR} ftp://ftp.uk.kernel.org/pub
|
||||
${KERNELORG_MIRROR} ftp://ftp.hk.kernel.org/pub
|
||||
${KERNELORG_MIRROR} ftp://ftp.au.kernel.org/pub
|
||||
${KERNELORG_MIRROR} ftp://ftp.jp.kernel.org/pub
|
||||
ftp://ftp.gnupg.org/gcrypt/ ftp://ftp.franken.de/pub/crypt/mirror/ftp.gnupg.org/gcrypt/
|
||||
ftp://ftp.gnupg.org/gcrypt/ ftp://ftp.surfnet.nl/pub/security/gnupg/
|
||||
ftp://ftp.gnupg.org/gcrypt/ http://gulus.USherbrooke.ca/pub/appl/GnuPG/
|
||||
ftp://dante.ctan.org/tex-archive ftp://ftp.fu-berlin.de/tex/CTAN
|
||||
ftp://dante.ctan.org/tex-archive http://sunsite.sut.ac.jp/pub/archives/ctan/
|
||||
ftp://dante.ctan.org/tex-archive http://ctan.unsw.edu.au/
|
||||
ftp://ftp.gnutls.org/pub/gnutls ftp://ftp.gnutls.org/pub/gnutls/
|
||||
ftp://ftp.gnutls.org/pub/gnutls ftp://ftp.gnupg.org/gcrypt/gnutls/
|
||||
ftp://ftp.gnutls.org/pub/gnutls http://www.mirrors.wiretapped.net/security/network-security/gnutls/
|
||||
ftp://ftp.gnutls.org/pub/gnutls ftp://ftp.mirrors.wiretapped.net/pub/security/network-security/gnutls/
|
||||
ftp://ftp.gnutls.org/pub/gnutls http://josefsson.org/gnutls/releases/
|
||||
http://ftp.info-zip.org/pub/infozip/src/ http://mirror.switch.ch/ftp/mirror/infozip/src/
|
||||
http://ftp.info-zip.org/pub/infozip/src/ ftp://sunsite.icm.edu.pl/pub/unix/archiving/info-zip/src/
|
||||
ftp://lsof.itap.purdue.edu/pub/tools/unix/lsof/ ftp://ftp.cerias.purdue.edu/pub/tools/unix/sysutils/lsof/
|
||||
ftp://lsof.itap.purdue.edu/pub/tools/unix/lsof/ ftp://ftp.tau.ac.il/pub/unix/admin/
|
||||
ftp://lsof.itap.purdue.edu/pub/tools/unix/lsof/ ftp://ftp.cert.dfn.de/pub/tools/admin/lsof/
|
||||
ftp://lsof.itap.purdue.edu/pub/tools/unix/lsof/ ftp://ftp.fu-berlin.de/pub/unix/tools/lsof/
|
||||
ftp://lsof.itap.purdue.edu/pub/tools/unix/lsof/ ftp://ftp.kaizo.org/pub/lsof/
|
||||
ftp://lsof.itap.purdue.edu/pub/tools/unix/lsof/ ftp://ftp.tu-darmstadt.de/pub/sysadmin/lsof/
|
||||
ftp://lsof.itap.purdue.edu/pub/tools/unix/lsof/ ftp://ftp.tux.org/pub/sites/vic.cc.purdue.edu/tools/unix/lsof/
|
||||
ftp://lsof.itap.purdue.edu/pub/tools/unix/lsof/ ftp://gd.tuwien.ac.at/utils/admin-tools/lsof/
|
||||
ftp://lsof.itap.purdue.edu/pub/tools/unix/lsof/ ftp://sunsite.ualberta.ca/pub/Mirror/lsof/
|
||||
ftp://lsof.itap.purdue.edu/pub/tools/unix/lsof/ ftp://the.wiretapped.net/pub/security/host-security/lsof/
|
||||
http://www.apache.org/dist http://archive.apache.org/dist
|
||||
|
||||
}
|
||||
inherit mirrors
|
||||
|
||||
|
|
|
@ -0,0 +1,77 @@
|
|||
METADATA_BRANCH ?= "${@base_detect_branch(d)}"
|
||||
METADATA_REVISION ?= "${@base_detect_revision(d)}"
|
||||
|
||||
def base_detect_revision(d):
|
||||
path = base_get_scmbasepath(d)
|
||||
|
||||
scms = [base_get_metadata_git_revision, \
|
||||
base_get_metadata_svn_revision]
|
||||
|
||||
for scm in scms:
|
||||
rev = scm(path, d)
|
||||
if rev <> "<unknown>":
|
||||
return rev
|
||||
|
||||
return "<unknown>"
|
||||
|
||||
def base_detect_branch(d):
|
||||
path = base_get_scmbasepath(d)
|
||||
|
||||
scms = [base_get_metadata_git_branch]
|
||||
|
||||
for scm in scms:
|
||||
rev = scm(path, d)
|
||||
if rev <> "<unknown>":
|
||||
return rev.strip()
|
||||
|
||||
return "<unknown>"
|
||||
|
||||
|
||||
|
||||
def base_get_scmbasepath(d):
|
||||
path_to_bbfiles = bb.data.getVar( 'BBFILES', d, 1 ).split()
|
||||
return path_to_bbfiles[0][:path_to_bbfiles[0].rindex( "packages" )]
|
||||
|
||||
def base_get_metadata_monotone_branch(path, d):
|
||||
monotone_branch = "<unknown>"
|
||||
try:
|
||||
monotone_branch = file( "%s/_MTN/options" % path ).read().strip()
|
||||
if monotone_branch.startswith( "database" ):
|
||||
monotone_branch_words = monotone_branch.split()
|
||||
monotone_branch = monotone_branch_words[ monotone_branch_words.index( "branch" )+1][1:-1]
|
||||
except:
|
||||
pass
|
||||
return monotone_branch
|
||||
|
||||
def base_get_metadata_monotone_revision(path, d):
|
||||
monotone_revision = "<unknown>"
|
||||
try:
|
||||
monotone_revision = file( "%s/_MTN/revision" % path ).read().strip()
|
||||
if monotone_revision.startswith( "format_version" ):
|
||||
monotone_revision_words = monotone_revision.split()
|
||||
monotone_revision = monotone_revision_words[ monotone_revision_words.index( "old_revision" )+1][1:-1]
|
||||
except IOError:
|
||||
pass
|
||||
return monotone_revision
|
||||
|
||||
def base_get_metadata_svn_revision(path, d):
|
||||
revision = "<unknown>"
|
||||
try:
|
||||
revision = file( "%s/.svn/entries" % path ).readlines()[3].strip()
|
||||
except IOError:
|
||||
pass
|
||||
return revision
|
||||
|
||||
def base_get_metadata_git_branch(path, d):
|
||||
branch = os.popen('cd %s; git branch | grep "^* " | tr -d "* "' % path).read()
|
||||
|
||||
if len(branch) != 0:
|
||||
return branch
|
||||
return "<unknown>"
|
||||
|
||||
def base_get_metadata_git_revision(path, d):
|
||||
rev = os.popen("cd %s; git log -n 1 --pretty=oneline --" % path).read().split(" ")[0]
|
||||
if len(rev) != 0:
|
||||
return rev
|
||||
return "<unknown>"
|
||||
|
|
@ -0,0 +1,58 @@
|
|||
MIRRORS[func] = "0"
|
||||
MIRRORS () {
|
||||
${DEBIAN_MIRROR}/main http://snapshot.debian.net/archive/pool
|
||||
${DEBIAN_MIRROR} ftp://ftp.de.debian.org/debian/pool
|
||||
${DEBIAN_MIRROR} ftp://ftp.au.debian.org/debian/pool
|
||||
${DEBIAN_MIRROR} ftp://ftp.cl.debian.org/debian/pool
|
||||
${DEBIAN_MIRROR} ftp://ftp.hr.debian.org/debian/pool
|
||||
${DEBIAN_MIRROR} ftp://ftp.fi.debian.org/debian/pool
|
||||
${DEBIAN_MIRROR} ftp://ftp.hk.debian.org/debian/pool
|
||||
${DEBIAN_MIRROR} ftp://ftp.hu.debian.org/debian/pool
|
||||
${DEBIAN_MIRROR} ftp://ftp.ie.debian.org/debian/pool
|
||||
${DEBIAN_MIRROR} ftp://ftp.it.debian.org/debian/pool
|
||||
${DEBIAN_MIRROR} ftp://ftp.jp.debian.org/debian/pool
|
||||
${DEBIAN_MIRROR} ftp://ftp.no.debian.org/debian/pool
|
||||
${DEBIAN_MIRROR} ftp://ftp.pl.debian.org/debian/pool
|
||||
${DEBIAN_MIRROR} ftp://ftp.ro.debian.org/debian/pool
|
||||
${DEBIAN_MIRROR} ftp://ftp.si.debian.org/debian/pool
|
||||
${DEBIAN_MIRROR} ftp://ftp.es.debian.org/debian/pool
|
||||
${DEBIAN_MIRROR} ftp://ftp.se.debian.org/debian/pool
|
||||
${DEBIAN_MIRROR} ftp://ftp.tr.debian.org/debian/pool
|
||||
${GNU_MIRROR} ftp://mirrors.kernel.org/gnu
|
||||
${GNU_MIRROR} ftp://ftp.matrix.com.br/pub/gnu
|
||||
${GNU_MIRROR} ftp://ftp.cs.ubc.ca/mirror2/gnu
|
||||
${GNU_MIRROR} ftp://sunsite.ust.hk/pub/gnu
|
||||
${GNU_MIRROR} ftp://ftp.ayamura.org/pub/gnu
|
||||
${KERNELORG_MIRROR} http://www.kernel.org/pub
|
||||
${KERNELORG_MIRROR} ftp://ftp.us.kernel.org/pub
|
||||
${KERNELORG_MIRROR} ftp://ftp.uk.kernel.org/pub
|
||||
${KERNELORG_MIRROR} ftp://ftp.hk.kernel.org/pub
|
||||
${KERNELORG_MIRROR} ftp://ftp.au.kernel.org/pub
|
||||
${KERNELORG_MIRROR} ftp://ftp.jp.kernel.org/pub
|
||||
ftp://ftp.gnupg.org/gcrypt/ ftp://ftp.franken.de/pub/crypt/mirror/ftp.gnupg.org/gcrypt/
|
||||
ftp://ftp.gnupg.org/gcrypt/ ftp://ftp.surfnet.nl/pub/security/gnupg/
|
||||
ftp://ftp.gnupg.org/gcrypt/ http://gulus.USherbrooke.ca/pub/appl/GnuPG/
|
||||
ftp://dante.ctan.org/tex-archive ftp://ftp.fu-berlin.de/tex/CTAN
|
||||
ftp://dante.ctan.org/tex-archive http://sunsite.sut.ac.jp/pub/archives/ctan/
|
||||
ftp://dante.ctan.org/tex-archive http://ctan.unsw.edu.au/
|
||||
ftp://ftp.gnutls.org/pub/gnutls ftp://ftp.gnutls.org/pub/gnutls/
|
||||
ftp://ftp.gnutls.org/pub/gnutls ftp://ftp.gnupg.org/gcrypt/gnutls/
|
||||
ftp://ftp.gnutls.org/pub/gnutls http://www.mirrors.wiretapped.net/security/network-security/gnutls/
|
||||
ftp://ftp.gnutls.org/pub/gnutls ftp://ftp.mirrors.wiretapped.net/pub/security/network-security/gnutls/
|
||||
ftp://ftp.gnutls.org/pub/gnutls http://josefsson.org/gnutls/releases/
|
||||
http://ftp.info-zip.org/pub/infozip/src/ http://mirror.switch.ch/ftp/mirror/infozip/src/
|
||||
http://ftp.info-zip.org/pub/infozip/src/ ftp://sunsite.icm.edu.pl/pub/unix/archiving/info-zip/src/
|
||||
ftp://lsof.itap.purdue.edu/pub/tools/unix/lsof/ ftp://ftp.cerias.purdue.edu/pub/tools/unix/sysutils/lsof/
|
||||
ftp://lsof.itap.purdue.edu/pub/tools/unix/lsof/ ftp://ftp.tau.ac.il/pub/unix/admin/
|
||||
ftp://lsof.itap.purdue.edu/pub/tools/unix/lsof/ ftp://ftp.cert.dfn.de/pub/tools/admin/lsof/
|
||||
ftp://lsof.itap.purdue.edu/pub/tools/unix/lsof/ ftp://ftp.fu-berlin.de/pub/unix/tools/lsof/
|
||||
ftp://lsof.itap.purdue.edu/pub/tools/unix/lsof/ ftp://ftp.kaizo.org/pub/lsof/
|
||||
ftp://lsof.itap.purdue.edu/pub/tools/unix/lsof/ ftp://ftp.tu-darmstadt.de/pub/sysadmin/lsof/
|
||||
ftp://lsof.itap.purdue.edu/pub/tools/unix/lsof/ ftp://ftp.tux.org/pub/sites/vic.cc.purdue.edu/tools/unix/lsof/
|
||||
ftp://lsof.itap.purdue.edu/pub/tools/unix/lsof/ ftp://gd.tuwien.ac.at/utils/admin-tools/lsof/
|
||||
ftp://lsof.itap.purdue.edu/pub/tools/unix/lsof/ ftp://sunsite.ualberta.ca/pub/Mirror/lsof/
|
||||
ftp://lsof.itap.purdue.edu/pub/tools/unix/lsof/ ftp://the.wiretapped.net/pub/security/host-security/lsof/
|
||||
http://www.apache.org/dist http://archive.apache.org/dist
|
||||
|
||||
}
|
||||
|
|
@ -0,0 +1,146 @@
|
|||
python populate_sysroot_prehook () {
|
||||
return
|
||||
}
|
||||
|
||||
python populate_sysroot_posthook () {
|
||||
return
|
||||
}
|
||||
|
||||
packagedstaging_fastpath () {
|
||||
:
|
||||
}
|
||||
|
||||
sysroot_stage_dir() {
|
||||
src="$1"
|
||||
dest="$2"
|
||||
# This will remove empty directories so we can ignore them
|
||||
rmdir "$src" 2> /dev/null || true
|
||||
if [ -d "$src" ]; then
|
||||
mkdir -p "$dest"
|
||||
cp -fpPR "$src"/* "$dest"
|
||||
fi
|
||||
}
|
||||
|
||||
sysroot_stage_libdir() {
|
||||
src="$1"
|
||||
dest="$2"
|
||||
|
||||
olddir=`pwd`
|
||||
cd $src
|
||||
las=$(find . -name \*.la -type f)
|
||||
cd $olddir
|
||||
echo "Found la files: $las"
|
||||
for i in $las
|
||||
do
|
||||
sed -e 's/^installed=yes$/installed=no/' \
|
||||
-e '/^dependency_libs=/s,${WORKDIR}[[:alnum:]/\._+-]*/\([[:alnum:]\._+-]*\),${STAGING_LIBDIR}/\1,g' \
|
||||
-e "/^dependency_libs=/s,\([[:space:]']\)${libdir},\1${STAGING_LIBDIR},g" \
|
||||
-i $src/$i
|
||||
done
|
||||
sysroot_stage_dir $src $dest
|
||||
}
|
||||
|
||||
sysroot_stage_dirs() {
|
||||
from="$1"
|
||||
to="$2"
|
||||
|
||||
sysroot_stage_dir $from${includedir} $to${STAGING_INCDIR}
|
||||
if [ "${BUILD_SYS}" = "${HOST_SYS}" ]; then
|
||||
sysroot_stage_dir $from${bindir} $to${STAGING_DIR_HOST}${bindir}
|
||||
sysroot_stage_dir $from${sbindir} $to${STAGING_DIR_HOST}${sbindir}
|
||||
sysroot_stage_dir $from${base_bindir} $to${STAGING_DIR_HOST}${base_bindir}
|
||||
sysroot_stage_dir $from${base_sbindir} $to${STAGING_DIR_HOST}${base_sbindir}
|
||||
sysroot_stage_dir $from${libexecdir} $to${STAGING_DIR_HOST}${libexecdir}
|
||||
sysroot_stage_dir $from${sysconfdir} $to${STAGING_DIR_HOST}${sysconfdir}
|
||||
fi
|
||||
if [ -d $from${libdir} ]
|
||||
then
|
||||
sysroot_stage_libdir $from/${libdir} $to${STAGING_LIBDIR}
|
||||
fi
|
||||
if [ -d $from${base_libdir} ]
|
||||
then
|
||||
sysroot_stage_libdir $from${base_libdir} $to${STAGING_DIR_HOST}${base_libdir}
|
||||
fi
|
||||
sysroot_stage_dir $from${datadir} $to${STAGING_DATADIR}
|
||||
}
|
||||
|
||||
sysroot_stage_all() {
|
||||
sysroot_stage_dirs ${D} ${SYSROOT_DESTDIR}
|
||||
}
|
||||
|
||||
def is_legacy_staging(d):
|
||||
stagefunc = bb.data.getVar('do_stage', d, True)
|
||||
legacy = True
|
||||
if stagefunc is None:
|
||||
legacy = False
|
||||
elif stagefunc.strip() == "use_do_install_for_stage":
|
||||
legacy = False
|
||||
elif stagefunc.strip() == "autotools_stage_all":
|
||||
legacy = False
|
||||
elif stagefunc.strip() == "do_stage_native" and bb.data.getVar('AUTOTOOLS_NATIVE_STAGE_INSTALL', d, 1) == "1":
|
||||
legacy = False
|
||||
elif bb.data.getVar('NATIVE_INSTALL_WORKS', d, 1) == "1":
|
||||
legacy = False
|
||||
return legacy
|
||||
|
||||
do_populate_sysroot[dirs] = "${STAGING_DIR_TARGET}/${bindir} ${STAGING_DIR_TARGET}/${libdir} \
|
||||
${STAGING_DIR_TARGET}/${includedir} \
|
||||
${STAGING_BINDIR_NATIVE} ${STAGING_LIBDIR_NATIVE} \
|
||||
${STAGING_INCDIR_NATIVE} \
|
||||
${STAGING_DATADIR} \
|
||||
${S} ${B}"
|
||||
|
||||
# Could be compile but populate_sysroot and do_install shouldn't run at the same time
|
||||
addtask populate_sysroot after do_install
|
||||
|
||||
PSTAGING_ACTIVE = "0"
|
||||
SYSROOT_PREPROCESS_FUNCS ?= ""
|
||||
SYSROOT_DESTDIR = "${WORKDIR}/sysroot-destdir/"
|
||||
SYSROOT_LOCK = "${STAGING_DIR}/staging.lock"
|
||||
|
||||
|
||||
python do_populate_sysroot () {
|
||||
#
|
||||
# if do_stage exists, we're legacy. In that case run the do_stage,
|
||||
# modify the SYSROOT_DESTDIR variable and then run the staging preprocess
|
||||
# functions against staging directly.
|
||||
#
|
||||
# Otherwise setup a destdir, copy the results from do_install
|
||||
# and run the staging preprocess against that
|
||||
#
|
||||
pstageactive = (bb.data.getVar("PSTAGING_ACTIVE", d, True) == "1")
|
||||
lockfile = bb.data.getVar("SYSROOT_LOCK", d, True)
|
||||
stagefunc = bb.data.getVar('do_stage', d, True)
|
||||
legacy = is_legacy_staging(d)
|
||||
if legacy:
|
||||
bb.data.setVar("SYSROOT_DESTDIR", "", d)
|
||||
bb.note("Legacy staging mode for %s" % bb.data.getVar("FILE", d, True))
|
||||
lock = bb.utils.lockfile(lockfile)
|
||||
bb.build.exec_func('populate_sysroot_prehook', d)
|
||||
bb.build.exec_func('do_stage', d)
|
||||
for f in (bb.data.getVar('SYSROOT_PREPROCESS_FUNCS', d, True) or '').split():
|
||||
bb.build.exec_func(f, d)
|
||||
bb.build.exec_func('populate_sysroot_posthook', d)
|
||||
bb.utils.unlockfile(lock)
|
||||
else:
|
||||
dest = bb.data.getVar('D', d, True)
|
||||
sysrootdest = bb.data.expand('${SYSROOT_DESTDIR}${STAGING_DIR_TARGET}', d)
|
||||
bb.mkdirhier(sysrootdest)
|
||||
|
||||
bb.build.exec_func("sysroot_stage_all", d)
|
||||
#os.system('cp -pPR %s/* %s/' % (dest, sysrootdest))
|
||||
for f in (bb.data.getVar('SYSROOT_PREPROCESS_FUNCS', d, True) or '').split():
|
||||
bb.build.exec_func(f, d)
|
||||
bb.build.exec_func("packagedstaging_fastpath", d)
|
||||
|
||||
lock = bb.utils.lockfile(lockfile)
|
||||
os.system(bb.data.expand('cp -pPR ${SYSROOT_DESTDIR}${TMPDIR}/* ${TMPDIR}/', d))
|
||||
bb.utils.unlockfile(lock)
|
||||
}
|
||||
|
||||
python () {
|
||||
if is_legacy_staging(d):
|
||||
bb.note("Legacy staging mode for %s" % bb.data.getVar("FILE", d, True))
|
||||
}
|
||||
|
||||
|
|
@ -0,0 +1,97 @@
|
|||
addtask listtasks
|
||||
do_listtasks[nostamp] = "1"
|
||||
python do_listtasks() {
|
||||
import sys
|
||||
# emit variables and shell functions
|
||||
#bb.data.emit_env(sys.__stdout__, d)
|
||||
# emit the metadata which isnt valid shell
|
||||
for e in d.keys():
|
||||
if bb.data.getVarFlag(e, 'task', d):
|
||||
sys.__stdout__.write("%s\n" % e)
|
||||
}
|
||||
|
||||
addtask clean
|
||||
do_clean[dirs] = "${TOPDIR}"
|
||||
do_clean[nostamp] = "1"
|
||||
python base_do_clean() {
|
||||
"""clear the build and temp directories"""
|
||||
dir = bb.data.expand("${WORKDIR}", d)
|
||||
if dir == '//': raise bb.build.FuncFailed("wrong DATADIR")
|
||||
bb.note("removing " + dir)
|
||||
os.system('rm -rf ' + dir)
|
||||
|
||||
dir = "%s.*" % bb.data.expand(bb.data.getVar('STAMP', d), d)
|
||||
bb.note("removing " + dir)
|
||||
os.system('rm -f '+ dir)
|
||||
}
|
||||
|
||||
addtask rebuild after do_${BB_DEFAULT_TASK}
|
||||
do_rebuild[dirs] = "${TOPDIR}"
|
||||
do_rebuild[nostamp] = "1"
|
||||
python base_do_rebuild() {
|
||||
"""rebuild a package"""
|
||||
}
|
||||
|
||||
#addtask mrproper
|
||||
#do_mrproper[dirs] = "${TOPDIR}"
|
||||
#do_mrproper[nostamp] = "1"
|
||||
#python base_do_mrproper() {
|
||||
# """clear downloaded sources, build and temp directories"""
|
||||
# dir = bb.data.expand("${DL_DIR}", d)
|
||||
# if dir == '/': bb.build.FuncFailed("wrong DATADIR")
|
||||
# bb.debug(2, "removing " + dir)
|
||||
# os.system('rm -rf ' + dir)
|
||||
# bb.build.exec_func('do_clean', d)
|
||||
#}
|
||||
|
||||
addtask checkuri
|
||||
do_checkuri[nostamp] = "1"
|
||||
python do_checkuri() {
|
||||
import sys
|
||||
|
||||
localdata = bb.data.createCopy(d)
|
||||
bb.data.update_data(localdata)
|
||||
|
||||
src_uri = bb.data.getVar('SRC_URI', localdata, 1)
|
||||
|
||||
try:
|
||||
bb.fetch.init(src_uri.split(),d)
|
||||
except bb.fetch.NoMethodError:
|
||||
(type, value, traceback) = sys.exc_info()
|
||||
raise bb.build.FuncFailed("No method: %s" % value)
|
||||
|
||||
try:
|
||||
bb.fetch.checkstatus(localdata)
|
||||
except bb.fetch.MissingParameterError:
|
||||
(type, value, traceback) = sys.exc_info()
|
||||
raise bb.build.FuncFailed("Missing parameters: %s" % value)
|
||||
except bb.fetch.FetchError:
|
||||
(type, value, traceback) = sys.exc_info()
|
||||
raise bb.build.FuncFailed("Fetch failed: %s" % value)
|
||||
except bb.fetch.MD5SumError:
|
||||
(type, value, traceback) = sys.exc_info()
|
||||
raise bb.build.FuncFailed("MD5 failed: %s" % value)
|
||||
except:
|
||||
(type, value, traceback) = sys.exc_info()
|
||||
raise bb.build.FuncFailed("Unknown fetch Error: %s" % value)
|
||||
}
|
||||
|
||||
addtask checkuriall after do_checkuri
|
||||
do_checkuriall[recrdeptask] = "do_checkuri"
|
||||
do_checkuriall[nostamp] = "1"
|
||||
base_do_checkuriall() {
|
||||
:
|
||||
}
|
||||
|
||||
addtask fetchall after do_fetch
|
||||
do_fetchall[recrdeptask] = "do_fetch"
|
||||
base_do_fetchall() {
|
||||
:
|
||||
}
|
||||
|
||||
addtask buildall after do_build
|
||||
do_buildall[recrdeptask] = "do_build"
|
||||
base_do_buildall() {
|
||||
:
|
||||
}
|
||||
|
|
@ -0,0 +1,340 @@
|
|||
# like os.path.join but doesn't treat absolute RHS specially
|
||||
def base_path_join(a, *p):
|
||||
path = a
|
||||
for b in p:
|
||||
if path == '' or path.endswith('/'):
|
||||
path += b
|
||||
else:
|
||||
path += '/' + b
|
||||
return path
|
||||
|
||||
# for MD5/SHA handling
|
||||
def base_chk_load_parser(config_path):
|
||||
import ConfigParser
|
||||
parser = ConfigParser.ConfigParser()
|
||||
if not len(parser.read(config_path)) == 1:
|
||||
bb.note("Can not open the '%s' ini file" % config_path)
|
||||
raise Exception("Can not open the '%s'" % config_path)
|
||||
|
||||
return parser
|
||||
|
||||
def base_chk_file(parser, pn, pv, src_uri, localpath, data):
|
||||
no_checksum = False
|
||||
# Try PN-PV-SRC_URI first and then try PN-SRC_URI
|
||||
# we rely on the get method to create errors
|
||||
pn_pv_src = "%s-%s-%s" % (pn,pv,src_uri)
|
||||
pn_src = "%s-%s" % (pn,src_uri)
|
||||
if parser.has_section(pn_pv_src):
|
||||
md5 = parser.get(pn_pv_src, "md5")
|
||||
sha256 = parser.get(pn_pv_src, "sha256")
|
||||
elif parser.has_section(pn_src):
|
||||
md5 = parser.get(pn_src, "md5")
|
||||
sha256 = parser.get(pn_src, "sha256")
|
||||
elif parser.has_section(src_uri):
|
||||
md5 = parser.get(src_uri, "md5")
|
||||
sha256 = parser.get(src_uri, "sha256")
|
||||
else:
|
||||
no_checksum = True
|
||||
|
||||
# md5 and sha256 should be valid now
|
||||
if not os.path.exists(localpath):
|
||||
bb.note("The localpath does not exist '%s'" % localpath)
|
||||
raise Exception("The path does not exist '%s'" % localpath)
|
||||
|
||||
|
||||
# Calculate the MD5 and 256-bit SHA checksums
|
||||
md5data = bb.utils.md5_file(localpath)
|
||||
shadata = bb.utils.sha256_file(localpath)
|
||||
|
||||
# sha256_file() can return None if we are running on Python 2.4 (hashlib is
|
||||
# 2.5 onwards, sha in 2.4 is 160-bit only), so check for this and call the
|
||||
# standalone shasum binary if required.
|
||||
if shadata is None:
|
||||
try:
|
||||
shapipe = os.popen('PATH=%s oe_sha256sum %s' % (bb.data.getVar('PATH', data, True), localpath))
|
||||
shadata = (shapipe.readline().split() or [ "" ])[0]
|
||||
shapipe.close()
|
||||
except OSError:
|
||||
raise Exception("Executing shasum failed, please build shasum-native")
|
||||
|
||||
if no_checksum == True: # we do not have conf/checksums.ini entry
|
||||
try:
|
||||
file = open("%s/checksums.ini" % bb.data.getVar("TMPDIR", data, 1), "a")
|
||||
except:
|
||||
return False
|
||||
|
||||
if not file:
|
||||
raise Exception("Creating checksums.ini failed")
|
||||
|
||||
file.write("[%s]\nmd5=%s\nsha256=%s\n\n" % (src_uri, md5data, shadata))
|
||||
file.close()
|
||||
return False
|
||||
|
||||
if not md5 == md5data:
|
||||
bb.note("The MD5Sums did not match. Wanted: '%s' and Got: '%s'" % (md5,md5data))
|
||||
raise Exception("MD5 Sums do not match. Wanted: '%s' Got: '%s'" % (md5, md5data))
|
||||
|
||||
if not sha256 == shadata:
|
||||
bb.note("The SHA256 Sums do not match. Wanted: '%s' Got: '%s'" % (sha256,shadata))
|
||||
raise Exception("SHA256 Sums do not match. Wanted: '%s' Got: '%s'" % (sha256, shadata))
|
||||
|
||||
return True
|
||||
|
||||
def base_read_file(filename):
|
||||
try:
|
||||
f = file( filename, "r" )
|
||||
except IOError, reason:
|
||||
return "" # WARNING: can't raise an error now because of the new RDEPENDS handling. This is a bit ugly. :M:
|
||||
else:
|
||||
return f.read().strip()
|
||||
return None
|
||||
|
||||
def base_conditional(variable, checkvalue, truevalue, falsevalue, d):
|
||||
if bb.data.getVar(variable,d,1) == checkvalue:
|
||||
return truevalue
|
||||
else:
|
||||
return falsevalue
|
||||
|
||||
def base_less_or_equal(variable, checkvalue, truevalue, falsevalue, d):
|
||||
if float(bb.data.getVar(variable,d,1)) <= float(checkvalue):
|
||||
return truevalue
|
||||
else:
|
||||
return falsevalue
|
||||
|
||||
def base_version_less_or_equal(variable, checkvalue, truevalue, falsevalue, d):
|
||||
result = bb.vercmp(bb.data.getVar(variable,d,True), checkvalue)
|
||||
if result <= 0:
|
||||
return truevalue
|
||||
else:
|
||||
return falsevalue
|
||||
|
||||
def base_contains(variable, checkvalues, truevalue, falsevalue, d):
|
||||
matches = 0
|
||||
if type(checkvalues).__name__ == "str":
|
||||
checkvalues = [checkvalues]
|
||||
for value in checkvalues:
|
||||
if bb.data.getVar(variable,d,1).find(value) != -1:
|
||||
matches = matches + 1
|
||||
if matches == len(checkvalues):
|
||||
return truevalue
|
||||
return falsevalue
|
||||
|
||||
def base_both_contain(variable1, variable2, checkvalue, d):
|
||||
if bb.data.getVar(variable1,d,1).find(checkvalue) != -1 and bb.data.getVar(variable2,d,1).find(checkvalue) != -1:
|
||||
return checkvalue
|
||||
else:
|
||||
return ""
|
||||
|
||||
def base_prune_suffix(var, suffixes, d):
|
||||
# See if var ends with any of the suffixes listed and
|
||||
# remove it if found
|
||||
for suffix in suffixes:
|
||||
if var.endswith(suffix):
|
||||
return var.replace(suffix, "")
|
||||
return var
|
||||
|
||||
def oe_filter(f, str, d):
|
||||
from re import match
|
||||
return " ".join(filter(lambda x: match(f, x, 0), str.split()))
|
||||
|
||||
def oe_filter_out(f, str, d):
|
||||
from re import match
|
||||
return " ".join(filter(lambda x: not match(f, x, 0), str.split()))
|
||||
|
||||
oe_soinstall() {
|
||||
# Purpose: Install shared library file and
|
||||
# create the necessary links
|
||||
# Example:
|
||||
#
|
||||
# oe_
|
||||
#
|
||||
#oenote installing shared library $1 to $2
|
||||
#
|
||||
libname=`basename $1`
|
||||
install -m 755 $1 $2/$libname
|
||||
sonamelink=`${HOST_PREFIX}readelf -d $1 |grep 'Library soname:' |sed -e 's/.*\[\(.*\)\].*/\1/'`
|
||||
solink=`echo $libname | sed -e 's/\.so\..*/.so/'`
|
||||
ln -sf $libname $2/$sonamelink
|
||||
ln -sf $libname $2/$solink
|
||||
}
|
||||
|
||||
oe_libinstall() {
|
||||
# Purpose: Install a library, in all its forms
|
||||
# Example
|
||||
#
|
||||
# oe_libinstall libltdl ${STAGING_LIBDIR}/
|
||||
# oe_libinstall -C src/libblah libblah ${D}/${libdir}/
|
||||
dir=""
|
||||
libtool=""
|
||||
silent=""
|
||||
require_static=""
|
||||
require_shared=""
|
||||
staging_install=""
|
||||
while [ "$#" -gt 0 ]; do
|
||||
case "$1" in
|
||||
-C)
|
||||
shift
|
||||
dir="$1"
|
||||
;;
|
||||
-s)
|
||||
silent=1
|
||||
;;
|
||||
-a)
|
||||
require_static=1
|
||||
;;
|
||||
-so)
|
||||
require_shared=1
|
||||
;;
|
||||
-*)
|
||||
oefatal "oe_libinstall: unknown option: $1"
|
||||
;;
|
||||
*)
|
||||
break;
|
||||
;;
|
||||
esac
|
||||
shift
|
||||
done
|
||||
|
||||
libname="$1"
|
||||
shift
|
||||
destpath="$1"
|
||||
if [ -z "$destpath" ]; then
|
||||
oefatal "oe_libinstall: no destination path specified"
|
||||
fi
|
||||
if echo "$destpath/" | egrep '^${STAGING_LIBDIR}/' >/dev/null
|
||||
then
|
||||
staging_install=1
|
||||
fi
|
||||
|
||||
__runcmd () {
|
||||
if [ -z "$silent" ]; then
|
||||
echo >&2 "oe_libinstall: $*"
|
||||
fi
|
||||
$*
|
||||
}
|
||||
|
||||
if [ -z "$dir" ]; then
|
||||
dir=`pwd`
|
||||
fi
|
||||
|
||||
dotlai=$libname.lai
|
||||
|
||||
# Sanity check that the libname.lai is unique
|
||||
number_of_files=`(cd $dir; find . -name "$dotlai") | wc -l`
|
||||
if [ $number_of_files -gt 1 ]; then
|
||||
oefatal "oe_libinstall: $dotlai is not unique in $dir"
|
||||
fi
|
||||
|
||||
|
||||
dir=$dir`(cd $dir;find . -name "$dotlai") | sed "s/^\.//;s/\/$dotlai\$//;q"`
|
||||
olddir=`pwd`
|
||||
__runcmd cd $dir
|
||||
|
||||
lafile=$libname.la
|
||||
|
||||
# If such file doesn't exist, try to cut version suffix
|
||||
if [ ! -f "$lafile" ]; then
|
||||
libname1=`echo "$libname" | sed 's/-[0-9.]*$//'`
|
||||
lafile1=$libname.la
|
||||
if [ -f "$lafile1" ]; then
|
||||
libname=$libname1
|
||||
lafile=$lafile1
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ -f "$lafile" ]; then
|
||||
# libtool archive
|
||||
eval `cat $lafile|grep "^library_names="`
|
||||
libtool=1
|
||||
else
|
||||
library_names="$libname.so* $libname.dll.a"
|
||||
fi
|
||||
|
||||
__runcmd install -d $destpath/
|
||||
dota=$libname.a
|
||||
if [ -f "$dota" -o -n "$require_static" ]; then
|
||||
rm -f $destpath/$dota
|
||||
__runcmd install -m 0644 $dota $destpath/
|
||||
fi
|
||||
if [ -f "$dotlai" -a -n "$libtool" ]; then
|
||||
if test -n "$staging_install"
|
||||
then
|
||||
# stop libtool using the final directory name for libraries
|
||||
# in staging:
|
||||
__runcmd rm -f $destpath/$libname.la
|
||||
__runcmd sed -e 's/^installed=yes$/installed=no/' \
|
||||
-e '/^dependency_libs=/s,${WORKDIR}[[:alnum:]/\._+-]*/\([[:alnum:]\._+-]*\),${STAGING_LIBDIR}/\1,g' \
|
||||
-e "/^dependency_libs=/s,\([[:space:]']\)${libdir},\1${STAGING_LIBDIR},g" \
|
||||
$dotlai >$destpath/$libname.la
|
||||
else
|
||||
rm -f $destpath/$libname.la
|
||||
__runcmd install -m 0644 $dotlai $destpath/$libname.la
|
||||
fi
|
||||
fi
|
||||
|
||||
for name in $library_names; do
|
||||
files=`eval echo $name`
|
||||
for f in $files; do
|
||||
if [ ! -e "$f" ]; then
|
||||
if [ -n "$libtool" ]; then
|
||||
oefatal "oe_libinstall: $dir/$f not found."
|
||||
fi
|
||||
elif [ -L "$f" ]; then
|
||||
__runcmd cp -P "$f" $destpath/
|
||||
elif [ ! -L "$f" ]; then
|
||||
libfile="$f"
|
||||
rm -f $destpath/$libfile
|
||||
__runcmd install -m 0755 $libfile $destpath/
|
||||
fi
|
||||
done
|
||||
done
|
||||
|
||||
if [ -z "$libfile" ]; then
|
||||
if [ -n "$require_shared" ]; then
|
||||
oefatal "oe_libinstall: unable to locate shared library"
|
||||
fi
|
||||
elif [ -z "$libtool" ]; then
|
||||
# special case hack for non-libtool .so.#.#.# links
|
||||
baselibfile=`basename "$libfile"`
|
||||
if (echo $baselibfile | grep -qE '^lib.*\.so\.[0-9.]*$'); then
|
||||
sonamelink=`${HOST_PREFIX}readelf -d $libfile |grep 'Library soname:' |sed -e 's/.*\[\(.*\)\].*/\1/'`
|
||||
solink=`echo $baselibfile | sed -e 's/\.so\..*/.so/'`
|
||||
if [ -n "$sonamelink" -a x"$baselibfile" != x"$sonamelink" ]; then
|
||||
__runcmd ln -sf $baselibfile $destpath/$sonamelink
|
||||
fi
|
||||
__runcmd ln -sf $baselibfile $destpath/$solink
|
||||
fi
|
||||
fi
|
||||
|
||||
__runcmd cd "$olddir"
|
||||
}
|
||||
|
||||
oe_machinstall() {
|
||||
# Purpose: Install machine dependent files, if available
|
||||
# If not available, check if there is a default
|
||||
# If no default, just touch the destination
|
||||
# Example:
|
||||
# $1 $2 $3 $4
|
||||
# oe_machinstall -m 0644 fstab ${D}/etc/fstab
|
||||
#
|
||||
# TODO: Check argument number?
|
||||
#
|
||||
filename=`basename $3`
|
||||
dirname=`dirname $3`
|
||||
|
||||
for o in `echo ${OVERRIDES} | tr ':' ' '`; do
|
||||
if [ -e $dirname/$o/$filename ]; then
|
||||
oenote $dirname/$o/$filename present, installing to $4
|
||||
install $1 $2 $dirname/$o/$filename $4
|
||||
return
|
||||
fi
|
||||
done
|
||||
# oenote overrides specific file NOT present, trying default=$3...
|
||||
if [ -e $3 ]; then
|
||||
oenote $3 present, installing to $4
|
||||
install $1 $2 $3 $4
|
||||
else
|
||||
oenote $3 NOT present, touching empty $4
|
||||
touch $4
|
||||
fi
|
||||
}
|
|
@ -676,3 +676,18 @@ COMBINED_FEATURES = "\
|
|||
${@base_both_contain("DISTRO_FEATURES", "MACHINE_FEATURES", "usbgadget", d)} \
|
||||
${@base_both_contain("DISTRO_FEATURES", "MACHINE_FEATURES", "usbhost", d)} \
|
||||
${@base_both_contain("DISTRO_FEATURES", "MACHINE_FEATURES", "wifi", d)}"
|
||||
|
||||
|
||||
# Make sure MACHINE isn't exported
|
||||
# (breaks binutils at least)
|
||||
MACHINE[unexport] = "1"
|
||||
|
||||
# Make sure TARGET_ARCH isn't exported
|
||||
# (breaks Makefiles using implicit rules, e.g. quilt, as GNU make has this
|
||||
# in them, undocumented)
|
||||
TARGET_ARCH[unexport] = "1"
|
||||
|
||||
# Make sure DISTRO isn't exported
|
||||
# (breaks sysvinit at least)
|
||||
DISTRO[unexport] = "1"
|
||||
|
||||
|
|
Loading…
Reference in New Issue