2006-08-23 09:42:43 +00:00
BB_DEFAULT_TASK = "build"
2005-08-31 10:45:47 +00:00
2007-08-08 21:04:28 +00:00
# like os.path.join but doesn't treat absolute RHS specially
def base_path_join(a, *p):
path = a
for b in p:
if path == '' or path.endswith('/'):
path += b
else:
path += '/' + b
return path
# for MD5/SHA handling
def base_chk_load_parser(config_path):
import ConfigParser, os, bb
parser = ConfigParser.ConfigParser()
if not len(parser.read(config_path)) == 1:
bb.note("Can not open the '%s' ini file" % config_path)
raise Exception("Can not open the '%s'" % config_path)
return parser
def base_chk_file(parser, pn, pv, src_uri, localpath, data):
import os, bb
2007-10-17 09:54:00 +00:00
no_checksum = False
2007-08-08 21:04:28 +00:00
# Try PN-PV-SRC_URI first and then try PN-SRC_URI
# we rely on the get method to create errors
pn_pv_src = "%s-%s-%s" % (pn,pv,src_uri)
pn_src = "%s-%s" % (pn,src_uri)
if parser.has_section(pn_pv_src):
md5 = parser.get(pn_pv_src, "md5")
sha256 = parser.get(pn_pv_src, "sha256")
elif parser.has_section(pn_src):
md5 = parser.get(pn_src, "md5")
sha256 = parser.get(pn_src, "sha256")
elif parser.has_section(src_uri):
md5 = parser.get(src_uri, "md5")
sha256 = parser.get(src_uri, "sha256")
else:
2007-10-17 09:54:00 +00:00
no_checksum = True
2007-08-08 21:04:28 +00:00
# md5 and sha256 should be valid now
if not os.path.exists(localpath):
bb.note("The localpath does not exist '%s'" % localpath)
raise Exception("The path does not exist '%s'" % localpath)
# call md5(sum) and shasum
try:
md5pipe = os.popen('md5sum ' + localpath)
md5data = (md5pipe.readline().split() or [ "" ])[0]
md5pipe.close()
except OSError:
raise Exception("Executing md5sum failed")
try:
shapipe = os.popen('PATH=%s oe_sha256sum %s' % (bb.data.getVar('PATH', data, True), localpath))
shadata = (shapipe.readline().split() or [ "" ])[0]
shapipe.close()
except OSError:
raise Exception("Executing shasum failed")
2007-10-17 09:54:00 +00:00
if no_checksum == True: # we do not have conf/checksums.ini entry
try:
file = open("%s/checksums.ini" % bb.data.getVar("TMPDIR", data, 1), "a")
except:
return False
if not file:
raise Exception("Creating checksums.ini failed")
file.write("[%s]\nmd5=%s\nsha256=%s\n\n" % (src_uri, md5data, shadata))
file.close()
return False
2007-08-08 21:04:28 +00:00
if not md5 == md5data:
bb.note("The MD5Sums did not match. Wanted: '%s' and Got: '%s'" % (md5,md5data))
raise Exception("MD5 Sums do not match. Wanted: '%s' Got: '%s'" % (md5, md5data))
if not sha256 == shadata:
bb.note("The SHA256 Sums do not match. Wanted: '%s' Got: '%s'" % (sha256,shadata))
raise Exception("SHA256 Sums do not match. Wanted: '%s' Got: '%s'" % (sha256, shadata))
return True
2005-08-31 10:45:47 +00:00
def base_dep_prepend(d):
import bb;
#
# Ideally this will check a flag so we will operate properly in
# the case where host == build == target, for now we don't work in
# that case though.
#
2007-08-08 21:04:28 +00:00
deps = "shasum-native "
if bb.data.getVar('PN', d, True) == "shasum-native":
deps = ""
2005-08-31 10:45:47 +00:00
# INHIBIT_DEFAULT_DEPS doesn't apply to the patch command. Whether or not
# we need that built is the responsibility of the patch function / class, not
# the application.
if not bb.data.getVar('INHIBIT_DEFAULT_DEPS', d):
if (bb.data.getVar('HOST_SYS', d, 1) !=
bb.data.getVar('BUILD_SYS', d, 1)):
deps += " virtual/${TARGET_PREFIX}gcc virtual/libc "
return deps
def base_read_file(filename):
import bb
try:
f = file( filename, "r" )
except IOError, reason:
2006-05-09 16:10:46 +00:00
return "" # WARNING: can't raise an error now because of the new RDEPENDS handling. This is a bit ugly. :M:
2005-08-31 10:45:47 +00:00
else:
return f.read().strip()
return None
def base_conditional(variable, checkvalue, truevalue, falsevalue, d):
import bb
if bb.data.getVar(variable,d,1) == checkvalue:
return truevalue
else:
return falsevalue
2007-08-08 21:04:28 +00:00
def base_less_or_equal(variable, checkvalue, truevalue, falsevalue, d):
import bb
if float(bb.data.getVar(variable,d,1)) <= float(checkvalue):
return truevalue
else:
return falsevalue
2006-12-20 00:00:03 +00:00
def base_contains(variable, checkvalues, truevalue, falsevalue, d):
import bb
matches = 0
if type(checkvalues).__name__ == "str":
checkvalues = [checkvalues]
for value in checkvalues:
if bb.data.getVar(variable,d,1).find(value) != -1:
matches = matches + 1
if matches == len(checkvalues):
return truevalue
return falsevalue
2006-07-24 09:22:17 +00:00
2006-07-26 22:17:08 +00:00
def base_both_contain(variable1, variable2, checkvalue, d):
import bb
if bb.data.getVar(variable1,d,1).find(checkvalue) != -1 and bb.data.getVar(variable2,d,1).find(checkvalue) != -1:
return checkvalue
else:
return ""
2005-08-31 10:45:47 +00:00
DEPENDS_prepend="${@base_dep_prepend(d)} "
def base_set_filespath(path, d):
import os, bb
filespath = []
for p in path:
overrides = bb.data.getVar("OVERRIDES", d, 1) or ""
overrides = overrides + ":"
for o in overrides.split(":"):
filespath.append(os.path.join(p, o))
2006-08-27 16:01:33 +00:00
return ":".join(filespath)
2005-08-31 10:45:47 +00:00
FILESPATH = "${@base_set_filespath([ "${FILE_DIRNAME}/${PF}", "${FILE_DIRNAME}/${P}", "${FILE_DIRNAME}/${PN}", "${FILE_DIRNAME}/files", "${FILE_DIRNAME}" ], d)}"
def oe_filter(f, str, d):
from re import match
return " ".join(filter(lambda x: match(f, x, 0), str.split()))
def oe_filter_out(f, str, d):
from re import match
return " ".join(filter(lambda x: not match(f, x, 0), str.split()))
die() {
oefatal "$*"
}
oenote() {
echo "NOTE:" "$*"
}
oewarn() {
echo "WARNING:" "$*"
}
oefatal() {
echo "FATAL:" "$*"
exit 1
}
oedebug() {
test $# -ge 2 || {
echo "Usage: oedebug level \"message\""
exit 1
}
test ${OEDEBUG:-0} -ge $1 && {
shift
echo "DEBUG:" $*
}
}
oe_runmake() {
if [ x"$MAKE" = x ]; then MAKE=make; fi
oenote ${MAKE} ${EXTRA_OEMAKE} "$@"
${MAKE} ${EXTRA_OEMAKE} "$@" || die "oe_runmake failed"
}
oe_soinstall() {
# Purpose: Install shared library file and
# create the necessary links
# Example:
#
# oe_
#
#oenote installing shared library $1 to $2
#
libname=`basename $1`
install -m 755 $1 $2/$libname
sonamelink=`${HOST_PREFIX}readelf -d $1 |grep 'Library soname:' |sed -e 's/.*\[\(.*\)\].*/\1/'`
solink=`echo $libname | sed -e 's/\.so\..*/.so/'`
ln -sf $libname $2/$sonamelink
ln -sf $libname $2/$solink
}
oe_libinstall() {
# Purpose: Install a library, in all its forms
# Example
#
# oe_libinstall libltdl ${STAGING_LIBDIR}/
# oe_libinstall -C src/libblah libblah ${D}/${libdir}/
dir=""
libtool=""
silent=""
require_static=""
require_shared=""
2005-12-12 14:41:23 +00:00
staging_install=""
2005-08-31 10:45:47 +00:00
while [ "$#" -gt 0 ]; do
case "$1" in
-C)
shift
dir="$1"
;;
-s)
silent=1
;;
-a)
require_static=1
;;
-so)
require_shared=1
;;
-*)
oefatal "oe_libinstall: unknown option: $1"
;;
*)
break;
;;
esac
shift
done
libname="$1"
shift
destpath="$1"
if [ -z "$destpath" ]; then
oefatal "oe_libinstall: no destination path specified"
fi
2005-12-12 14:41:23 +00:00
if echo "$destpath/" | egrep '^${STAGING_LIBDIR}/' >/dev/null
then
staging_install=1
fi
2005-08-31 10:45:47 +00:00
__runcmd () {
if [ -z "$silent" ]; then
echo >&2 "oe_libinstall: $*"
fi
$*
}
if [ -z "$dir" ]; then
dir=`pwd`
fi
2006-02-10 11:38:23 +00:00
dotlai=$libname.lai
2006-08-27 16:01:33 +00:00
dir=$dir`(cd $dir;find . -name "$dotlai") | sed "s/^\.//;s/\/$dotlai\$//;q"`
2005-08-31 10:45:47 +00:00
olddir=`pwd`
__runcmd cd $dir
lafile=$libname.la
2006-11-20 12:51:14 +00:00
# If such file doesn't exist, try to cut version suffix
if [ ! -f "$lafile" ]; then
2006-11-21 17:46:14 +00:00
libname1=`echo "$libname" | sed 's/-[0-9.]*$//'`
lafile1=$libname.la
if [ -f "$lafile1" ]; then
libname=$libname1
lafile=$lafile1
fi
2006-11-20 12:51:14 +00:00
fi
2005-08-31 10:45:47 +00:00
if [ -f "$lafile" ]; then
# libtool archive
eval `cat $lafile|grep "^library_names="`
libtool=1
else
library_names="$libname.so* $libname.dll.a"
fi
__runcmd install -d $destpath/
dota=$libname.a
if [ -f "$dota" -o -n "$require_static" ]; then
__runcmd install -m 0644 $dota $destpath/
fi
if [ -f "$dotlai" -a -n "$libtool" ]; then
2005-12-12 14:41:23 +00:00
if test -n "$staging_install"
then
# stop libtool using the final directory name for libraries
# in staging:
__runcmd rm -f $destpath/$libname.la
2007-11-13 17:17:37 +00:00
__runcmd sed -e 's/^installed=yes$/installed=no/' \
-e '/^dependency_libs=/s,${WORKDIR}[[:alnum:]/\._+-]*/\([[:alnum:]\._+-]*\),${STAGING_LIBDIR}/\1,g' \
2007-11-14 09:12:10 +00:00
-e "/^dependency_libs=/s,\([[:space:]']+\)${libdir},\1${STAGING_LIBDIR},g" \
2007-11-13 17:17:37 +00:00
$dotlai >$destpath/$libname.la
2005-12-12 14:41:23 +00:00
else
__runcmd install -m 0644 $dotlai $destpath/$libname.la
fi
2005-08-31 10:45:47 +00:00
fi
for name in $library_names; do
files=`eval echo $name`
for f in $files; do
if [ ! -e "$f" ]; then
if [ -n "$libtool" ]; then
oefatal "oe_libinstall: $dir/$f not found."
fi
elif [ -L "$f" ]; then
__runcmd cp -P "$f" $destpath/
elif [ ! -L "$f" ]; then
libfile="$f"
__runcmd install -m 0755 $libfile $destpath/
fi
done
done
if [ -z "$libfile" ]; then
if [ -n "$require_shared" ]; then
oefatal "oe_libinstall: unable to locate shared library"
fi
elif [ -z "$libtool" ]; then
# special case hack for non-libtool .so.#.#.# links
baselibfile=`basename "$libfile"`
if (echo $baselibfile | grep -qE '^lib.*\.so\.[0-9.]*$'); then
sonamelink=`${HOST_PREFIX}readelf -d $libfile |grep 'Library soname:' |sed -e 's/.*\[\(.*\)\].*/\1/'`
solink=`echo $baselibfile | sed -e 's/\.so\..*/.so/'`
if [ -n "$sonamelink" -a x"$baselibfile" != x"$sonamelink" ]; then
__runcmd ln -sf $baselibfile $destpath/$sonamelink
fi
__runcmd ln -sf $baselibfile $destpath/$solink
fi
fi
__runcmd cd "$olddir"
}
oe_machinstall() {
# Purpose: Install machine dependent files, if available
# If not available, check if there is a default
# If no default, just touch the destination
# Example:
# $1 $2 $3 $4
# oe_machinstall -m 0644 fstab ${D}/etc/fstab
#
# TODO: Check argument number?
#
filename=`basename $3`
dirname=`dirname $3`
for o in `echo ${OVERRIDES} | tr ':' ' '`; do
if [ -e $dirname/$o/$filename ]; then
oenote $dirname/$o/$filename present, installing to $4
install $1 $2 $dirname/$o/$filename $4
return
fi
done
# oenote overrides specific file NOT present, trying default=$3...
if [ -e $3 ]; then
oenote $3 present, installing to $4
install $1 $2 $3 $4
else
oenote $3 NOT present, touching empty $4
touch $4
fi
}
addtask listtasks
do_listtasks[nostamp] = "1"
python do_listtasks() {
import sys
# emit variables and shell functions
#bb.data.emit_env(sys.__stdout__, d)
# emit the metadata which isnt valid shell
for e in d.keys():
if bb.data.getVarFlag(e, 'task', d):
sys.__stdout__.write("%s\n" % e)
}
addtask clean
do_clean[dirs] = "${TOPDIR}"
do_clean[nostamp] = "1"
do_clean[bbdepcmd] = ""
python base_do_clean() {
"""clear the build and temp directories"""
dir = bb.data.expand("${WORKDIR}", d)
if dir == '//': raise bb.build.FuncFailed("wrong DATADIR")
bb.note("removing " + dir)
os.system('rm -rf ' + dir)
dir = "%s.*" % bb.data.expand(bb.data.getVar('STAMP', d), d)
bb.note("removing " + dir)
os.system('rm -f '+ dir)
}
2006-08-31 09:20:47 +00:00
addtask rebuild
do_rebuild[dirs] = "${TOPDIR}"
do_rebuild[nostamp] = "1"
do_rebuild[bbdepcmd] = ""
python base_do_rebuild() {
"""rebuild a package"""
bb.build.exec_task('do_clean', d)
bb.build.exec_task('do_' + bb.data.getVar('BB_DEFAULT_TASK', d, 1), d)
}
2005-08-31 10:45:47 +00:00
addtask mrproper
do_mrproper[dirs] = "${TOPDIR}"
do_mrproper[nostamp] = "1"
do_mrproper[bbdepcmd] = ""
python base_do_mrproper() {
"""clear downloaded sources, build and temp directories"""
dir = bb.data.expand("${DL_DIR}", d)
if dir == '/': bb.build.FuncFailed("wrong DATADIR")
bb.debug(2, "removing " + dir)
os.system('rm -rf ' + dir)
bb.build.exec_task('do_clean', d)
}
addtask fetch
do_fetch[dirs] = "${DL_DIR}"
2007-08-08 21:04:28 +00:00
do_fetch[depends] = "shasum-native:do_populate_staging"
2005-08-31 10:45:47 +00:00
python base_do_fetch() {
import sys
localdata = bb.data.createCopy(d)
bb.data.update_data(localdata)
src_uri = bb.data.getVar('SRC_URI', localdata, 1)
if not src_uri:
return 1
try:
bb.fetch.init(src_uri.split(),d)
except bb.fetch.NoMethodError:
(type, value, traceback) = sys.exc_info()
raise bb.build.FuncFailed("No method: %s" % value)
try:
bb.fetch.go(localdata)
except bb.fetch.MissingParameterError:
(type, value, traceback) = sys.exc_info()
raise bb.build.FuncFailed("Missing parameters: %s" % value)
except bb.fetch.FetchError:
(type, value, traceback) = sys.exc_info()
raise bb.build.FuncFailed("Fetch failed: %s" % value)
2007-08-08 21:04:28 +00:00
except bb.fetch.MD5SumError:
(type, value, traceback) = sys.exc_info()
raise bb.build.FuncFailed("MD5 failed: %s" % value)
except:
(type, value, traceback) = sys.exc_info()
raise bb.build.FuncFailed("Unknown fetch Error: %s" % value)
# Verify the SHA and MD5 sums we have in OE and check what do
# in
check_sum = bb.which(bb.data.getVar('BBPATH', d, True), "conf/checksums.ini")
if not check_sum:
bb.note("No conf/checksums.ini found, not checking checksums")
return
try:
parser = base_chk_load_parser(check_sum)
except:
bb.note("Creating the CheckSum parser failed")
return
pv = bb.data.getVar('PV', d, True)
pn = bb.data.getVar('PN', d, True)
# Check each URI
for url in src_uri.split():
localpath = bb.data.expand(bb.fetch.localpath(url, localdata), localdata)
(type,host,path,_,_,_) = bb.decodeurl(url)
uri = "%s://%s%s" % (type,host,path)
try:
2007-10-17 09:54:00 +00:00
if type == "http" or type == "https" or type == "ftp" or type == "ftps":
if not base_chk_file(parser, pn, pv,uri, localpath, d):
2007-10-16 14:57:43 +00:00
bb.note("%s-%s: %s has no entry in conf/checksums.ini, not checking URI" % (pn,pv,uri))
2007-08-08 21:04:28 +00:00
except Exception:
raise bb.build.FuncFailed("Checksum of '%s' failed" % uri)
2005-08-31 10:45:47 +00:00
}
2006-11-20 12:51:14 +00:00
addtask fetchall after do_fetch
2006-10-16 23:19:19 +00:00
do_fetchall[recrdeptask] = "do_fetch"
2006-11-20 12:51:14 +00:00
base_do_fetchall() {
:
2006-10-16 23:19:19 +00:00
}
2007-11-25 14:07:17 +00:00
addtask buildall after do_build
do_buildall[recrdeptask] = "do_build"
base_do_buildall() {
:
}
2005-08-31 10:45:47 +00:00
def oe_unpack_file(file, data, url = None):
import bb, os
if not url:
url = "file://%s" % file
dots = file.split(".")
if dots[-1] in ['gz', 'bz2', 'Z']:
efile = os.path.join(bb.data.getVar('WORKDIR', data, 1),os.path.basename('.'.join(dots[0:-1])))
else:
efile = file
cmd = None
if file.endswith('.tar'):
cmd = 'tar x --no-same-owner -f %s' % file
2006-11-20 12:51:14 +00:00
elif file.endswith('.tgz') or file.endswith('.tar.gz') or file.endswith('.tar.Z'):
2005-08-31 10:45:47 +00:00
cmd = 'tar xz --no-same-owner -f %s' % file
elif file.endswith('.tbz') or file.endswith('.tar.bz2'):
cmd = 'bzip2 -dc %s | tar x --no-same-owner -f -' % file
elif file.endswith('.gz') or file.endswith('.Z') or file.endswith('.z'):
cmd = 'gzip -dc %s > %s' % (file, efile)
elif file.endswith('.bz2'):
cmd = 'bzip2 -dc %s > %s' % (file, efile)
elif file.endswith('.zip'):
2006-03-23 22:31:13 +00:00
cmd = 'unzip -q'
(type, host, path, user, pswd, parm) = bb.decodeurl(url)
if 'dos' in parm:
cmd = '%s -a' % cmd
cmd = '%s %s' % (cmd, file)
2005-08-31 10:45:47 +00:00
elif os.path.isdir(file):
filesdir = os.path.realpath(bb.data.getVar("FILESDIR", data, 1))
destdir = "."
if file[0:len(filesdir)] == filesdir:
destdir = file[len(filesdir):file.rfind('/')]
destdir = destdir.strip('/')
if len(destdir) < 1:
destdir = "."
elif not os.access("%s/%s" % (os.getcwd(), destdir), os.F_OK):
os.makedirs("%s/%s" % (os.getcwd(), destdir))
2007-07-07 08:33:13 +00:00
cmd = 'cp -pPR %s %s/%s/' % (file, os.getcwd(), destdir)
2005-08-31 10:45:47 +00:00
else:
(type, host, path, user, pswd, parm) = bb.decodeurl(url)
if not 'patch' in parm:
# The "destdir" handling was specifically done for FILESPATH
# items. So, only do so for file:// entries.
if type == "file":
destdir = bb.decodeurl(url)[1] or "."
else:
destdir = "."
bb.mkdirhier("%s/%s" % (os.getcwd(), destdir))
2007-07-07 08:33:13 +00:00
cmd = 'cp %s %s/%s/' % (file, os.getcwd(), destdir)
2006-08-27 16:01:33 +00:00
2005-08-31 10:45:47 +00:00
if not cmd:
return True
2006-07-21 08:56:29 +00:00
dest = os.path.join(os.getcwd(), os.path.basename(file))
if os.path.exists(dest):
if os.path.samefile(file, dest):
return True
2005-08-31 10:45:47 +00:00
cmd = "PATH=\"%s\" %s" % (bb.data.getVar('PATH', data, 1), cmd)
bb.note("Unpacking %s to %s/" % (file, os.getcwd()))
ret = os.system(cmd)
return ret == 0
addtask unpack after do_fetch
do_unpack[dirs] = "${WORKDIR}"
python base_do_unpack() {
import re, os
localdata = bb.data.createCopy(d)
bb.data.update_data(localdata)
src_uri = bb.data.getVar('SRC_URI', localdata)
if not src_uri:
return
src_uri = bb.data.expand(src_uri, localdata)
for url in src_uri.split():
try:
local = bb.data.expand(bb.fetch.localpath(url, localdata), localdata)
except bb.MalformedUrl, e:
raise FuncFailed('Unable to generate local path for malformed uri: %s' % e)
local = os.path.realpath(local)
ret = oe_unpack_file(local, localdata, url)
if not ret:
raise bb.build.FuncFailed()
}
addhandler base_eventhandler
python base_eventhandler() {
from bb import note, error, data
from bb.event import Handled, NotHandled, getName
import os
messages = {}
messages["Completed"] = "completed"
messages["Succeeded"] = "completed"
messages["Started"] = "started"
messages["Failed"] = "failed"
name = getName(e)
msg = ""
if name.startswith("Pkg"):
msg += "package %s: " % data.getVar("P", e.data, 1)
msg += messages.get(name[3:]) or name[3:]
elif name.startswith("Task"):
msg += "package %s: task %s: " % (data.getVar("PF", e.data, 1), e.task)
msg += messages.get(name[4:]) or name[4:]
elif name.startswith("Build"):
msg += "build %s: " % e.name
msg += messages.get(name[5:]) or name[5:]
elif name == "UnsatisfiedDep":
msg += "package %s: dependency %s %s" % (e.pkg, e.dep, name[:-3].lower())
2006-08-27 16:01:33 +00:00
if msg:
note(msg)
2005-08-31 10:45:47 +00:00
if name.startswith("BuildStarted"):
2005-09-20 07:04:35 +00:00
bb.data.setVar( 'BB_VERSION', bb.__version__, e.data )
path_to_bbfiles = bb.data.getVar( 'BBFILES', e.data, 1 )
2006-05-09 17:41:09 +00:00
path_to_packages = path_to_bbfiles[:path_to_bbfiles.rindex( "packages" )]
2005-09-20 07:04:35 +00:00
monotone_revision = "<unknown>"
try:
2006-08-27 16:01:33 +00:00
monotone_revision = file( "%s/_MTN/revision" % path_to_packages ).read().strip()
2006-11-20 12:51:14 +00:00
if monotone_revision.startswith( "format_version" ):
monotone_revision_words = monotone_revision.split()
monotone_revision = monotone_revision_words[ monotone_revision_words.index( "old_revision" )+1][1:-1]
2005-09-20 07:04:35 +00:00
except IOError:
pass
bb.data.setVar( 'OE_REVISION', monotone_revision, e.data )
2006-05-09 17:41:09 +00:00
statusvars = ['BB_VERSION', 'OE_REVISION', 'TARGET_ARCH', 'TARGET_OS', 'MACHINE', 'DISTRO', 'DISTRO_VERSION','TARGET_FPU']
statuslines = ["%-14s = \"%s\"" % (i, bb.data.getVar(i, e.data, 1) or '') for i in statusvars]
2005-08-31 10:45:47 +00:00
statusmsg = "\nOE Build Configuration:\n%s\n" % '\n'.join(statuslines)
print statusmsg
needed_vars = [ "TARGET_ARCH", "TARGET_OS" ]
pesteruser = []
for v in needed_vars:
val = bb.data.getVar(v, e.data, 1)
if not val or val == 'INVALID':
pesteruser.append(v)
if pesteruser:
bb.fatal('The following variable(s) were not set: %s\nPlease set them directly, or choose a MACHINE or DISTRO that sets them.' % ', '.join(pesteruser))
if not data in e.__dict__:
return NotHandled
log = data.getVar("EVENTLOG", e.data, 1)
if log:
logfile = file(log, "a")
logfile.write("%s\n" % msg)
logfile.close()
return NotHandled
}
addtask configure after do_unpack do_patch
do_configure[dirs] = "${S} ${B}"
do_configure[bbdepcmd] = "do_populate_staging"
2006-08-27 16:01:33 +00:00
do_configure[deptask] = "do_populate_staging"
2005-08-31 10:45:47 +00:00
base_do_configure() {
:
}
addtask compile after do_configure
do_compile[dirs] = "${S} ${B}"
do_compile[bbdepcmd] = "do_populate_staging"
base_do_compile() {
if [ -e Makefile -o -e makefile ]; then
oe_runmake || die "make failed"
else
oenote "nothing to compile"
fi
}
base_do_stage () {
:
}
2007-10-29 21:38:28 +00:00
do_populate_staging[dirs] = "${STAGING_DIR_TARGET}/${layout_bindir} ${STAGING_DIR_TARGET}/${layout_libdir} \
${STAGING_DIR_TARGET}/${layout_includedir} \
2007-10-29 15:16:19 +00:00
${STAGING_BINDIR_NATIVE} ${STAGING_LIBDIR_NATIVE} \
${STAGING_INCDIR_NATIVE} \
2005-08-31 10:45:47 +00:00
${STAGING_DATADIR} \
${S} ${B}"
2007-08-21 15:22:06 +00:00
# Could be compile but populate_staging and do_install shouldn't run at the same time
addtask populate_staging after do_install
2005-08-31 10:45:47 +00:00
python do_populate_staging () {
2007-11-30 08:18:09 +00:00
bb.build.exec_func('do_stage', d)
2005-08-31 10:45:47 +00:00
}
addtask install after do_compile
2006-09-18 20:31:39 +00:00
do_install[dirs] = "${D} ${S} ${B}"
2007-10-29 15:20:25 +00:00
# Remove and re-create ${D} so that is it guaranteed to be empty
2007-09-05 08:41:10 +00:00
do_install[cleandirs] = "${D}"
2005-08-31 10:45:47 +00:00
base_do_install() {
:
}
base_do_package() {
:
}
addtask build after do_populate_staging
do_build = ""
do_build[func] = "1"
# Functions that update metadata based on files outputted
# during the build process.
def explode_deps(s):
r = []
l = s.split()
flag = False
for i in l:
if i[0] == '(':
flag = True
j = []
if flag:
j.append(i)
if i.endswith(')'):
flag = False
r[-1] += ' ' + ' '.join(j)
else:
r.append(i)
return r
2006-10-27 19:26:50 +00:00
def packaged(pkg, d):
import os, bb
2007-11-25 23:47:36 +00:00
return os.access(get_subpkgedata_fn(pkg, d) + '.packaged', os.R_OK)
2005-08-31 10:45:47 +00:00
2006-09-21 16:29:02 +00:00
def read_pkgdatafile(fn):
pkgdata = {}
2005-08-31 10:45:47 +00:00
def decode(str):
import codecs
c = codecs.getdecoder("string_escape")
return c(str)[0]
2006-09-21 16:29:02 +00:00
import os
if os.access(fn, os.R_OK):
import re
f = file(fn, 'r')
2005-08-31 10:45:47 +00:00
lines = f.readlines()
f.close()
r = re.compile("([^:]+):\s*(.*)")
for l in lines:
m = r.match(l)
if m:
2006-09-21 16:29:02 +00:00
pkgdata[m.group(1)] = decode(m.group(2))
return pkgdata
2007-11-25 23:47:36 +00:00
def get_subpkgedata_fn(pkg, d):
import bb, os
archs = bb.data.expand("${PACKAGE_ARCHS}", d).split(" ")
archs.reverse()
for arch in archs:
fn = bb.data.expand('${STAGING_DIR}/pkgdata/' + arch + '${TARGET_VENDOR}-${TARGET_OS}/runtime/%s' % pkg, d)
if os.path.exists(fn):
return fn
return bb.data.expand('${PKGDATA_DIR}/runtime/%s' % pkg, d)
2006-09-21 16:29:02 +00:00
def has_subpkgdata(pkg, d):
import bb, os
2007-11-25 23:47:36 +00:00
return os.access(get_subpkgedata_fn(pkg, d), os.R_OK)
2006-09-21 16:29:02 +00:00
def read_subpkgdata(pkg, d):
import bb, os
2007-11-25 23:47:36 +00:00
return read_pkgdatafile(get_subpkgedata_fn(pkg, d))
2006-09-21 16:29:02 +00:00
def has_pkgdata(pn, d):
import bb, os
2007-08-21 10:40:55 +00:00
fn = bb.data.expand('${PKGDATA_DIR}/%s' % pn, d)
2006-09-21 16:29:02 +00:00
return os.access(fn, os.R_OK)
def read_pkgdata(pn, d):
import bb, os
2007-08-21 10:40:55 +00:00
fn = bb.data.expand('${PKGDATA_DIR}/%s' % pn, d)
2006-09-21 16:29:02 +00:00
return read_pkgdatafile(fn)
python read_subpackage_metadata () {
import bb
data = read_pkgdata(bb.data.getVar('PN', d, 1), d)
for key in data.keys():
bb.data.setVar(key, data[key], d)
for pkg in bb.data.getVar('PACKAGES', d, 1).split():
sdata = read_subpkgdata(pkg, d)
for key in sdata.keys():
bb.data.setVar(key, sdata[key], d)
2005-08-31 10:45:47 +00:00
}
2007-08-12 11:36:37 +00:00
def base_after_parse(d):
import bb, os, exceptions
2007-09-01 15:06:04 +00:00
source_mirror_fetch = bb.data.getVar('SOURCE_MIRROR_FETCH', d, 0)
if not source_mirror_fetch:
need_host = bb.data.getVar('COMPATIBLE_HOST', d, 1)
if need_host:
import re
this_host = bb.data.getVar('HOST_SYS', d, 1)
if not re.match(need_host, this_host):
raise bb.parse.SkipPackage("incompatible with host %s" % this_host)
need_machine = bb.data.getVar('COMPATIBLE_MACHINE', d, 1)
if need_machine:
import re
this_machine = bb.data.getVar('MACHINE', d, 1)
if this_machine and not re.match(need_machine, this_machine):
raise bb.parse.SkipPackage("incompatible with machine %s" % this_machine)
2006-11-20 09:16:34 +00:00
pn = bb.data.getVar('PN', d, 1)
2006-11-20 12:51:14 +00:00
# OBSOLETE in bitbake 1.7.4
2006-11-20 09:16:34 +00:00
srcdate = bb.data.getVar('SRCDATE_%s' % pn, d, 1)
if srcdate != None:
bb.data.setVar('SRCDATE', srcdate, d)
use_nls = bb.data.getVar('USE_NLS_%s' % pn, d, 1)
if use_nls != None:
bb.data.setVar('USE_NLS', use_nls, d)
2007-03-21 16:30:33 +00:00
# Make sure MACHINE isn't exported
# (breaks binutils at least)
2006-11-23 17:51:39 +00:00
bb.data.delVarFlag('MACHINE', 'export', d)
2006-11-25 09:58:34 +00:00
bb.data.setVarFlag('MACHINE', 'unexport', 1, d)
2007-03-21 16:30:33 +00:00
# Make sure DISTRO isn't exported
# (breaks sysvinit at least)
bb.data.delVarFlag('DISTRO', 'export', d)
bb.data.setVarFlag('DISTRO', 'unexport', 1, d)
2006-11-23 17:51:39 +00:00
2007-08-03 14:28:42 +00:00
# Git packages should DEPEND on git-native
srcuri = bb.data.getVar('SRC_URI', d, 1)
if "git://" in srcuri:
2007-08-03 14:42:18 +00:00
depends = bb.data.getVarFlag('do_fetch', 'depends', d) or ""
depends = depends + " git-native:do_populate_staging"
bb.data.setVarFlag('do_fetch', 'depends', depends, d)
2007-08-03 14:28:42 +00:00
2006-11-20 09:16:34 +00:00
mach_arch = bb.data.getVar('MACHINE_ARCH', d, 1)
old_arch = bb.data.getVar('PACKAGE_ARCH', d, 1)
if (old_arch == mach_arch):
# Nothing to do
return
2007-08-11 22:46:16 +00:00
#
# We always try to scan SRC_URI for urls with machine overrides
# unless the package sets SRC_URI_OVERRIDES_PACKAGE_ARCH=0
#
2007-08-01 11:24:50 +00:00
override = bb.data.getVar('SRC_URI_OVERRIDES_PACKAGE_ARCH', d, 1)
2007-08-11 22:46:16 +00:00
if override == '0':
2006-11-20 09:16:34 +00:00
return
2007-09-01 15:06:04 +00:00
2006-11-20 09:16:34 +00:00
paths = []
2007-08-11 22:46:16 +00:00
for p in [ "${PF}", "${P}", "${PN}", "files", "" ]:
2007-08-12 11:36:37 +00:00
path = bb.data.expand(os.path.join("${FILE_DIRNAME}", p, "${MACHINE}"), d)
2007-09-01 15:06:04 +00:00
if os.path.isdir(path):
2007-08-12 11:36:37 +00:00
paths.append(path)
if len(paths) == 0:
return
2006-11-20 09:16:34 +00:00
for s in bb.data.getVar('SRC_URI', d, 1).split():
2007-08-11 22:46:16 +00:00
if not s.startswith("file://"):
continue
2006-11-20 09:16:34 +00:00
local = bb.data.expand(bb.fetch.localpath(s, d), d)
for mp in paths:
if local.startswith(mp):
#bb.note("overriding PACKAGE_ARCH from %s to %s" % (old_arch, mach_arch))
2007-08-11 22:46:16 +00:00
bb.data.setVar('PACKAGE_ARCH', "${MACHINE_ARCH}", d)
2006-11-20 09:16:34 +00:00
return
2005-08-31 10:45:47 +00:00
python () {
2006-11-20 09:16:34 +00:00
base_after_parse(d)
2005-08-31 10:45:47 +00:00
}
2008-01-03 14:07:44 +00:00
def check_app_exists(app, d):
from bb import which, data
app = data.expand(app, d)
path = data.getVar('PATH', d)
return len(which(path, app)) != 0
def check_gcc3(data):
gcc3_versions = 'gcc-3.4 gcc34 gcc-3.4.4 gcc-3.4.6 gcc-3.4.7 gcc-3.3 gcc33 gcc-3.3.6 gcc-3.2 gcc32'
for gcc3 in gcc3_versions.split():
if check_app_exists(gcc3, data):
return gcc3
return False
2006-08-21 00:50:19 +00:00
# Patch handling
inherit patch
2006-11-21 14:34:40 +00:00
# Configuration data from site files
# Move to autotools.bbclass?
inherit siteinfo
2006-10-16 23:19:19 +00:00
EXPORT_FUNCTIONS do_clean do_mrproper do_fetch do_unpack do_configure do_compile do_install do_package do_populate_pkgs do_stage do_rebuild do_fetchall
2005-08-31 10:45:47 +00:00
MIRRORS[func] = "0"
MIRRORS () {
${DEBIAN_MIRROR}/main http://snapshot.debian.net/archive/pool
${DEBIAN_MIRROR} ftp://ftp.de.debian.org/debian/pool
${DEBIAN_MIRROR} ftp://ftp.au.debian.org/debian/pool
${DEBIAN_MIRROR} ftp://ftp.cl.debian.org/debian/pool
${DEBIAN_MIRROR} ftp://ftp.hr.debian.org/debian/pool
${DEBIAN_MIRROR} ftp://ftp.fi.debian.org/debian/pool
${DEBIAN_MIRROR} ftp://ftp.hk.debian.org/debian/pool
${DEBIAN_MIRROR} ftp://ftp.hu.debian.org/debian/pool
${DEBIAN_MIRROR} ftp://ftp.ie.debian.org/debian/pool
${DEBIAN_MIRROR} ftp://ftp.it.debian.org/debian/pool
${DEBIAN_MIRROR} ftp://ftp.jp.debian.org/debian/pool
${DEBIAN_MIRROR} ftp://ftp.no.debian.org/debian/pool
${DEBIAN_MIRROR} ftp://ftp.pl.debian.org/debian/pool
${DEBIAN_MIRROR} ftp://ftp.ro.debian.org/debian/pool
${DEBIAN_MIRROR} ftp://ftp.si.debian.org/debian/pool
${DEBIAN_MIRROR} ftp://ftp.es.debian.org/debian/pool
${DEBIAN_MIRROR} ftp://ftp.se.debian.org/debian/pool
${DEBIAN_MIRROR} ftp://ftp.tr.debian.org/debian/pool
${GNU_MIRROR} ftp://mirrors.kernel.org/gnu
${GNU_MIRROR} ftp://ftp.matrix.com.br/pub/gnu
${GNU_MIRROR} ftp://ftp.cs.ubc.ca/mirror2/gnu
${GNU_MIRROR} ftp://sunsite.ust.hk/pub/gnu
${GNU_MIRROR} ftp://ftp.ayamura.org/pub/gnu
2007-08-08 21:04:28 +00:00
${KERNELORG_MIRROR} http://www.kernel.org/pub
${KERNELORG_MIRROR} ftp://ftp.us.kernel.org/pub
${KERNELORG_MIRROR} ftp://ftp.uk.kernel.org/pub
${KERNELORG_MIRROR} ftp://ftp.hk.kernel.org/pub
${KERNELORG_MIRROR} ftp://ftp.au.kernel.org/pub
${KERNELORG_MIRROR} ftp://ftp.jp.kernel.org/pub
2006-05-27 21:49:50 +00:00
ftp://ftp.gnupg.org/gcrypt/ ftp://ftp.franken.de/pub/crypt/mirror/ftp.gnupg.org/gcrypt/
ftp://ftp.gnupg.org/gcrypt/ ftp://ftp.surfnet.nl/pub/security/gnupg/
ftp://ftp.gnupg.org/gcrypt/ http://gulus.USherbrooke.ca/pub/appl/GnuPG/
2006-08-27 16:01:33 +00:00
ftp://dante.ctan.org/tex-archive ftp://ftp.fu-berlin.de/tex/CTAN
ftp://dante.ctan.org/tex-archive http://sunsite.sut.ac.jp/pub/archives/ctan/
ftp://dante.ctan.org/tex-archive http://ctan.unsw.edu.au/
2006-06-12 20:29:38 +00:00
ftp://ftp.gnutls.org/pub/gnutls ftp://ftp.gnutls.org/pub/gnutls/
ftp://ftp.gnutls.org/pub/gnutls ftp://ftp.gnupg.org/gcrypt/gnutls/
ftp://ftp.gnutls.org/pub/gnutls http://www.mirrors.wiretapped.net/security/network-security/gnutls/
ftp://ftp.gnutls.org/pub/gnutls ftp://ftp.mirrors.wiretapped.net/pub/security/network-security/gnutls/
ftp://ftp.gnutls.org/pub/gnutls http://josefsson.org/gnutls/releases/
2006-11-20 12:51:14 +00:00
http://ftp.info-zip.org/pub/infozip/src/ http://mirror.switch.ch/ftp/mirror/infozip/src/
http://ftp.info-zip.org/pub/infozip/src/ ftp://sunsite.icm.edu.pl/pub/unix/archiving/info-zip/src/
2007-08-08 21:04:28 +00:00
ftp://lsof.itap.purdue.edu/pub/tools/unix/lsof/ ftp://ftp.cerias.purdue.edu/pub/tools/unix/sysutils/lsof/
ftp://lsof.itap.purdue.edu/pub/tools/unix/lsof/ ftp://ftp.tau.ac.il/pub/unix/admin/
ftp://lsof.itap.purdue.edu/pub/tools/unix/lsof/ ftp://ftp.cert.dfn.de/pub/tools/admin/lsof/
ftp://lsof.itap.purdue.edu/pub/tools/unix/lsof/ ftp://ftp.fu-berlin.de/pub/unix/tools/lsof/
ftp://lsof.itap.purdue.edu/pub/tools/unix/lsof/ ftp://ftp.kaizo.org/pub/lsof/
ftp://lsof.itap.purdue.edu/pub/tools/unix/lsof/ ftp://ftp.tu-darmstadt.de/pub/sysadmin/lsof/
ftp://lsof.itap.purdue.edu/pub/tools/unix/lsof/ ftp://ftp.tux.org/pub/sites/vic.cc.purdue.edu/tools/unix/lsof/
ftp://lsof.itap.purdue.edu/pub/tools/unix/lsof/ ftp://gd.tuwien.ac.at/utils/admin-tools/lsof/
ftp://lsof.itap.purdue.edu/pub/tools/unix/lsof/ ftp://sunsite.ualberta.ca/pub/Mirror/lsof/
ftp://lsof.itap.purdue.edu/pub/tools/unix/lsof/ ftp://the.wiretapped.net/pub/security/host-security/lsof/
http://www.apache.org/dist http://archive.apache.org/dist
2005-08-31 10:45:47 +00:00
}
2006-08-27 16:01:33 +00:00