2012-08-23 15:03:10 +00:00
|
|
|
# ex:ts=4:sw=4:sts=4:et
|
|
|
|
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
|
|
|
#
|
2012-08-23 15:08:22 +00:00
|
|
|
# This file is used for archiving sources, patches, and logs to a
|
|
|
|
# tarball. It also output building environment to xxx.dump.data and
|
|
|
|
# create xxx.diff.gz to record all content in ${S} to a diff file.
|
2012-08-23 15:03:10 +00:00
|
|
|
#
|
2012-03-26 10:49:26 +00:00
|
|
|
|
2012-04-01 08:25:50 +00:00
|
|
|
ARCHIVE_EXCLUDE_FROM ?= ".pc autom4te.cache"
|
2012-03-26 10:49:26 +00:00
|
|
|
ARCHIVE_TYPE ?= "TAR SRPM"
|
|
|
|
DISTRO ?= "poky"
|
|
|
|
PATCHES_ARCHIVE_WITH_SERIES = 'TRUE'
|
2012-05-30 09:03:56 +00:00
|
|
|
SOURCE_ARCHIVE_LOG_WITH_SCRIPTS ?= '${@d.getVarFlag('ARCHIVER_MODE', 'log_type') \
|
2012-08-23 15:08:22 +00:00
|
|
|
if d.getVarFlag('ARCHIVER_MODE', 'log_type') != 'none' else 'logs_with_scripts'}'
|
|
|
|
SOURCE_ARCHIVE_PACKAGE_TYPE ?= '${@d.getVarFlag('ARCHIVER_MODE', 'type') \
|
|
|
|
if d.getVarFlag('ARCHIVER_MODE', 'log_type') != 'none' else 'tar'}'
|
|
|
|
FILTER ?= '${@d.getVarFlag('ARCHIVER_MODE', 'filter') \
|
|
|
|
if d.getVarFlag('ARCHIVER_MODE', 'filter')!= 'none' else 'no'}'
|
2012-06-26 05:59:41 +00:00
|
|
|
|
|
|
|
|
|
|
|
COPYLEFT_LICENSE_INCLUDE ?= 'GPL* LGPL*'
|
|
|
|
COPYLEFT_LICENSE_INCLUDE[type] = 'list'
|
|
|
|
COPYLEFT_LICENSE_INCLUDE[doc] = 'Space separated list of globs which include licenses'
|
|
|
|
|
|
|
|
COPYLEFT_LICENSE_EXCLUDE ?= 'CLOSED Proprietary'
|
|
|
|
COPYLEFT_LICENSE_EXCLUDE[type] = 'list'
|
|
|
|
COPYLEFT_LICENSE_INCLUDE[doc] = 'Space separated list of globs which exclude licenses'
|
|
|
|
|
|
|
|
COPYLEFT_RECIPE_TYPE ?= '${@copyleft_recipe_type(d)}'
|
|
|
|
COPYLEFT_RECIPE_TYPE[doc] = 'The "type" of the current recipe (e.g. target, native, cross)'
|
|
|
|
|
|
|
|
COPYLEFT_RECIPE_TYPES ?= 'target'
|
|
|
|
COPYLEFT_RECIPE_TYPES[type] = 'list'
|
|
|
|
COPYLEFT_RECIPE_TYPES[doc] = 'Space separated list of recipe types to include'
|
|
|
|
|
|
|
|
COPYLEFT_AVAILABLE_RECIPE_TYPES = 'target native nativesdk cross crosssdk cross-canadian'
|
|
|
|
COPYLEFT_AVAILABLE_RECIPE_TYPES[type] = 'list'
|
|
|
|
COPYLEFT_AVAILABLE_RECIPE_TYPES[doc] = 'Space separated list of available recipe types'
|
|
|
|
|
|
|
|
def copyleft_recipe_type(d):
|
|
|
|
for recipe_type in oe.data.typed_value('COPYLEFT_AVAILABLE_RECIPE_TYPES', d):
|
|
|
|
if oe.utils.inherits(d, recipe_type):
|
|
|
|
return recipe_type
|
|
|
|
return 'target'
|
|
|
|
|
|
|
|
def copyleft_should_include(d):
|
2012-08-23 15:08:22 +00:00
|
|
|
"""
|
|
|
|
Determine if this recipe's sources should be deployed for compliance
|
|
|
|
"""
|
2012-06-26 05:59:41 +00:00
|
|
|
import ast
|
|
|
|
import oe.license
|
|
|
|
from fnmatch import fnmatchcase as fnmatch
|
|
|
|
|
|
|
|
recipe_type = d.getVar('COPYLEFT_RECIPE_TYPE', True)
|
|
|
|
if recipe_type not in oe.data.typed_value('COPYLEFT_RECIPE_TYPES', d):
|
|
|
|
return False, 'recipe type "%s" is excluded' % recipe_type
|
|
|
|
|
|
|
|
include = oe.data.typed_value('COPYLEFT_LICENSE_INCLUDE', d)
|
|
|
|
exclude = oe.data.typed_value('COPYLEFT_LICENSE_EXCLUDE', d)
|
|
|
|
|
|
|
|
try:
|
|
|
|
is_included, reason = oe.license.is_included(d.getVar('LICENSE', True), include, exclude)
|
|
|
|
except oe.license.LicenseError as exc:
|
|
|
|
bb.fatal('%s: %s' % (d.getVar('PF', True), exc))
|
|
|
|
else:
|
|
|
|
if is_included:
|
|
|
|
return True, 'recipe has included licenses: %s' % ', '.join(reason)
|
|
|
|
else:
|
|
|
|
return False, 'recipe has excluded licenses: %s' % ', '.join(reason)
|
|
|
|
|
|
|
|
def tar_filter(d):
|
2012-08-23 15:08:22 +00:00
|
|
|
"""
|
|
|
|
Only archive the package belongs to COPYLEFT_LICENSE_INCLUDE
|
|
|
|
and ignore the one in COPYLEFT_LICENSE_EXCLUDE. Don't exclude any
|
|
|
|
packages when \"FILTER\" is \"no\"
|
|
|
|
"""
|
2012-06-26 05:59:41 +00:00
|
|
|
if d.getVar('FILTER', True).upper() == "YES":
|
|
|
|
included, reason = copyleft_should_include(d)
|
|
|
|
if not included:
|
2012-08-23 15:03:10 +00:00
|
|
|
return False
|
2012-06-26 05:59:41 +00:00
|
|
|
else:
|
2012-08-23 15:03:10 +00:00
|
|
|
return True
|
2012-06-26 05:59:41 +00:00
|
|
|
else:
|
|
|
|
return False
|
2012-03-26 10:49:26 +00:00
|
|
|
|
|
|
|
def get_bb_inc(d):
|
2012-08-23 15:08:22 +00:00
|
|
|
"""
|
|
|
|
create a directory "script-logs" including .bb and .inc file in ${WORKDIR}
|
|
|
|
"""
|
2012-08-23 15:03:10 +00:00
|
|
|
import re
|
|
|
|
import shutil
|
|
|
|
|
|
|
|
bbinc = []
|
|
|
|
pat=re.compile('require\s*([^\s]*\.*)(.*)')
|
|
|
|
work_dir = d.getVar('WORKDIR', True)
|
|
|
|
bbfile = d.getVar('FILE', True)
|
|
|
|
bbdir = os.path.dirname(bbfile)
|
2012-08-23 15:08:22 +00:00
|
|
|
script_logs = os.path.join(work_dir, 'script-logs')
|
|
|
|
bb_inc = os.path.join(script_logs, 'bb_inc')
|
2012-08-23 15:03:10 +00:00
|
|
|
bb.mkdirhier(script_logs)
|
|
|
|
bb.mkdirhier(bb_inc)
|
|
|
|
|
2012-08-23 15:08:22 +00:00
|
|
|
def find_file(dir, file):
|
2012-08-23 15:03:10 +00:00
|
|
|
for root, dirs, files in os.walk(dir):
|
|
|
|
if file in files:
|
2012-08-23 15:08:22 +00:00
|
|
|
return os.path.join(root, file)
|
2012-08-23 15:03:10 +00:00
|
|
|
|
|
|
|
def get_inc (file):
|
2012-08-23 15:08:22 +00:00
|
|
|
f = open(file, 'r')
|
2012-08-23 15:03:10 +00:00
|
|
|
for line in f.readlines():
|
|
|
|
if 'require' not in line:
|
|
|
|
bbinc.append(file)
|
|
|
|
else:
|
|
|
|
try:
|
|
|
|
incfile = pat.match(line).group(1)
|
2012-08-23 15:08:22 +00:00
|
|
|
incfile = bb.data.expand(os.path.basename(incfile), d)
|
|
|
|
abs_incfile = find_file(bbdir, incfile)
|
2012-08-23 15:03:10 +00:00
|
|
|
if abs_incfile:
|
|
|
|
bbinc.append(abs_incfile)
|
|
|
|
get_inc(abs_incfile)
|
|
|
|
except AttributeError:
|
|
|
|
pass
|
|
|
|
get_inc(bbfile)
|
|
|
|
bbinc = list(set(bbinc))
|
|
|
|
for bbincfile in bbinc:
|
2012-08-23 15:08:22 +00:00
|
|
|
shutil.copy(bbincfile, bb_inc)
|
2012-03-26 10:49:26 +00:00
|
|
|
|
2012-08-23 15:03:10 +00:00
|
|
|
try:
|
2012-08-23 15:08:22 +00:00
|
|
|
bb.mkdirhier(os.path.join(script_logs, 'temp'))
|
|
|
|
oe.path.copytree(os.path.join(work_dir, 'temp'), os.path.join(script_logs, 'temp'))
|
|
|
|
except (IOError, AttributeError):
|
2012-08-23 15:03:10 +00:00
|
|
|
pass
|
|
|
|
return script_logs
|
2012-03-26 10:49:26 +00:00
|
|
|
|
2012-08-23 15:03:10 +00:00
|
|
|
def get_series(d):
|
2012-08-23 15:08:22 +00:00
|
|
|
"""
|
|
|
|
copy patches and series file to a pointed directory which will be
|
|
|
|
archived to tarball in ${WORKDIR}
|
|
|
|
"""
|
2012-08-23 15:03:10 +00:00
|
|
|
import shutil
|
|
|
|
|
|
|
|
src_patches=[]
|
|
|
|
pf = d.getVar('PF', True)
|
|
|
|
work_dir = d.getVar('WORKDIR', True)
|
2012-08-23 15:08:22 +00:00
|
|
|
s = d.getVar('S', True)
|
2012-08-23 15:03:10 +00:00
|
|
|
dest = os.path.join(work_dir, pf + '-series')
|
|
|
|
shutil.rmtree(dest, ignore_errors=True)
|
|
|
|
bb.mkdirhier(dest)
|
|
|
|
|
|
|
|
src_uri = d.getVar('SRC_URI', True).split()
|
|
|
|
fetch = bb.fetch2.Fetch(src_uri, d)
|
|
|
|
locals = (fetch.localpath(url) for url in fetch.urls)
|
|
|
|
for local in locals:
|
|
|
|
src_patches.append(local)
|
2012-08-23 15:08:22 +00:00
|
|
|
if not cmp(work_dir, s):
|
2012-08-23 15:03:10 +00:00
|
|
|
tmp_list = src_patches
|
|
|
|
else:
|
|
|
|
tmp_list = src_patches[1:]
|
2012-03-26 10:49:26 +00:00
|
|
|
|
2012-08-23 15:03:10 +00:00
|
|
|
for patch in tmp_list:
|
|
|
|
try:
|
2012-08-23 15:08:22 +00:00
|
|
|
shutil.copy(patch, dest)
|
2012-08-23 15:03:10 +00:00
|
|
|
except IOError:
|
|
|
|
if os.path.isdir(patch):
|
2012-08-23 15:08:22 +00:00
|
|
|
bb.mkdirhier(os.path.join(dest, patch))
|
|
|
|
oe.path.copytree(patch, os.path.join(dest, patch))
|
2012-08-23 15:03:10 +00:00
|
|
|
return dest
|
2012-03-26 10:49:26 +00:00
|
|
|
|
2012-08-23 15:03:10 +00:00
|
|
|
def get_applying_patches(d):
|
2012-08-23 15:08:22 +00:00
|
|
|
"""
|
|
|
|
only copy applying patches to a pointed directory which will be
|
|
|
|
archived to tarball
|
|
|
|
"""
|
2012-08-23 15:03:10 +00:00
|
|
|
import shutil
|
|
|
|
|
|
|
|
pf = d.getVar('PF', True)
|
|
|
|
work_dir = d.getVar('WORKDIR', True)
|
|
|
|
dest = os.path.join(work_dir, pf + '-patches')
|
|
|
|
shutil.rmtree(dest, ignore_errors=True)
|
|
|
|
bb.mkdirhier(dest)
|
|
|
|
|
|
|
|
patches = src_patches(d)
|
|
|
|
for patch in patches:
|
|
|
|
_, _, local, _, _, parm = bb.decodeurl(patch)
|
|
|
|
if local:
|
2012-08-23 15:08:22 +00:00
|
|
|
shutil.copy(local, dest)
|
2012-08-23 15:03:10 +00:00
|
|
|
return dest
|
2012-03-26 10:49:26 +00:00
|
|
|
|
|
|
|
def not_tarball(d):
|
2012-08-23 15:08:22 +00:00
|
|
|
"""
|
|
|
|
packages including key words 'work-shared', 'native', 'task-' will be passed
|
|
|
|
"""
|
|
|
|
workdir = d.getVar('WORKDIR', True)
|
|
|
|
s = d.getVar('S', True)
|
2012-08-23 15:03:10 +00:00
|
|
|
if 'work-shared' in s or 'task-' in workdir or 'native' in workdir:
|
|
|
|
return True
|
|
|
|
else:
|
|
|
|
return False
|
2012-03-26 10:49:26 +00:00
|
|
|
|
2012-08-23 15:08:22 +00:00
|
|
|
def get_source_from_downloads(d, stage_name):
|
|
|
|
"""
|
|
|
|
copy tarball of $P to $WORKDIR when this tarball exists in $DL_DIR
|
|
|
|
"""
|
2012-08-23 15:03:10 +00:00
|
|
|
if stage_name in 'patched' 'configured':
|
|
|
|
return
|
|
|
|
pf = d.getVar('PF', True)
|
2012-08-23 15:08:22 +00:00
|
|
|
dl_dir = d.getVar('DL_DIR', True)
|
2012-08-23 15:03:10 +00:00
|
|
|
try:
|
2012-08-23 15:08:22 +00:00
|
|
|
source = os.path.join(dl_dir, os.path.basename(d.getVar('SRC_URI', True).split()[0]))
|
2012-08-23 15:03:10 +00:00
|
|
|
if os.path.exists(source) and not os.path.isdir(source):
|
|
|
|
return source
|
|
|
|
except (IndexError, OSError):
|
|
|
|
pass
|
|
|
|
return ''
|
2012-04-01 08:25:50 +00:00
|
|
|
|
2012-08-23 15:08:22 +00:00
|
|
|
def do_tarball(workdir, srcdir, tarname):
|
|
|
|
"""
|
|
|
|
tar "srcdir" under "workdir" to "tarname"
|
|
|
|
"""
|
2012-08-23 15:03:10 +00:00
|
|
|
import tarfile
|
|
|
|
|
|
|
|
sav_dir = os.getcwd()
|
|
|
|
os.chdir(workdir)
|
|
|
|
if (len(os.listdir(srcdir))) != 0:
|
|
|
|
tar = tarfile.open(tarname, "w:gz")
|
|
|
|
tar.add(srcdir)
|
|
|
|
tar.close()
|
|
|
|
else:
|
|
|
|
tarname = ''
|
|
|
|
os.chdir(sav_dir)
|
|
|
|
return tarname
|
|
|
|
|
2012-08-23 15:08:22 +00:00
|
|
|
def archive_sources_from_directory(d, stage_name):
|
|
|
|
"""
|
|
|
|
archive sources codes tree to tarball when tarball of $P doesn't
|
|
|
|
exist in $DL_DIR
|
|
|
|
"""
|
2012-08-23 15:03:10 +00:00
|
|
|
import shutil
|
|
|
|
|
2012-08-23 15:08:22 +00:00
|
|
|
s = d.getVar('S', True)
|
2012-08-23 15:03:10 +00:00
|
|
|
work_dir=d.getVar('WORKDIR', True)
|
2012-08-23 15:08:22 +00:00
|
|
|
PF = d.getVar('PF', True)
|
2012-08-23 15:03:10 +00:00
|
|
|
tarname = PF + '-' + stage_name + ".tar.gz"
|
|
|
|
|
|
|
|
if os.path.exists(s) and work_dir in s:
|
|
|
|
try:
|
2012-08-23 15:08:22 +00:00
|
|
|
source_dir = os.path.join(work_dir, [ i for i in s.replace(work_dir, '').split('/') if i][0])
|
2012-08-23 15:03:10 +00:00
|
|
|
except IndexError:
|
2012-08-23 15:08:22 +00:00
|
|
|
if not cmp(s, work_dir):
|
2012-08-23 15:03:10 +00:00
|
|
|
return ''
|
|
|
|
else:
|
|
|
|
return ''
|
|
|
|
source = os.path.basename(source_dir)
|
2012-08-23 15:08:22 +00:00
|
|
|
return do_tarball(work_dir, source, tarname)
|
2012-03-26 10:49:26 +00:00
|
|
|
|
2012-08-23 15:08:22 +00:00
|
|
|
def archive_sources(d, stage_name):
|
|
|
|
"""
|
|
|
|
copy tarball from $DL_DIR to $WORKDIR if have tarball, archive
|
|
|
|
source codes tree in $WORKDIR if $P is directory instead of tarball
|
|
|
|
"""
|
2012-08-23 15:03:10 +00:00
|
|
|
import shutil
|
2012-08-23 15:08:22 +00:00
|
|
|
|
|
|
|
work_dir = d.getVar('WORKDIR', True)
|
|
|
|
file = get_source_from_downloads(d, stage_name)
|
2012-08-23 15:03:10 +00:00
|
|
|
if file:
|
2012-08-23 15:08:22 +00:00
|
|
|
shutil.copy(file, work_dir)
|
2012-08-23 15:03:10 +00:00
|
|
|
file = os.path.basename(file)
|
|
|
|
else:
|
2012-08-23 15:08:22 +00:00
|
|
|
file = archive_sources_from_directory(d, stage_name)
|
2012-08-23 15:03:10 +00:00
|
|
|
return file
|
2012-03-26 10:49:26 +00:00
|
|
|
|
2012-08-23 15:08:22 +00:00
|
|
|
def archive_patches(d, patchdir, series):
|
|
|
|
"""
|
|
|
|
archive patches to tarball and also include series files if 'series' is True
|
|
|
|
"""
|
2012-08-23 15:03:10 +00:00
|
|
|
import shutil
|
|
|
|
|
2012-08-23 15:08:22 +00:00
|
|
|
s = d.getVar('S', True)
|
2012-08-23 15:03:10 +00:00
|
|
|
work_dir = d.getVar('WORKDIR', True)
|
|
|
|
patch_dir = os.path.basename(patchdir)
|
|
|
|
tarname = patch_dir + ".tar.gz"
|
2012-08-23 15:08:22 +00:00
|
|
|
if series == 'all' and os.path.exists(os.path.join(s, 'patches/series')):
|
|
|
|
shutil.copy(os.path.join(s, 'patches/series'), patchdir)
|
|
|
|
tarname = do_tarball(work_dir, patch_dir, tarname)
|
2012-08-23 15:03:10 +00:00
|
|
|
shutil.rmtree(patchdir, ignore_errors=True)
|
|
|
|
return tarname
|
2012-03-26 10:49:26 +00:00
|
|
|
|
2012-08-23 15:08:22 +00:00
|
|
|
def select_archive_patches(d, option):
|
|
|
|
"""
|
|
|
|
select to archive all patches including non-applying and series or
|
|
|
|
applying patches
|
|
|
|
"""
|
2012-08-23 15:03:10 +00:00
|
|
|
if option == "all":
|
|
|
|
patchdir = get_series(d)
|
|
|
|
elif option == "applying":
|
|
|
|
patchdir = get_applying_patches(d)
|
|
|
|
try:
|
|
|
|
os.rmdir(patchdir)
|
|
|
|
except OSError:
|
2012-08-23 15:08:22 +00:00
|
|
|
tarpatch = archive_patches(d, patchdir, option)
|
2012-08-23 15:03:10 +00:00
|
|
|
return tarpatch
|
|
|
|
return
|
2012-03-26 10:49:26 +00:00
|
|
|
|
2012-08-23 15:08:22 +00:00
|
|
|
def archive_logs(d, logdir, bbinc=False):
|
|
|
|
"""
|
|
|
|
archive logs in temp to tarball and .bb and .inc files if bbinc is True
|
|
|
|
"""
|
2012-08-23 15:03:10 +00:00
|
|
|
import shutil
|
|
|
|
|
2012-08-23 15:08:22 +00:00
|
|
|
pf = d.getVar('PF', True)
|
|
|
|
work_dir = d.getVar('WORKDIR', True)
|
2012-08-23 15:03:10 +00:00
|
|
|
log_dir = os.path.basename(logdir)
|
|
|
|
tarname = pf + '-' + log_dir + ".tar.gz"
|
2012-08-23 15:08:22 +00:00
|
|
|
tarname = do_tarball(work_dir, log_dir, tarname)
|
2012-08-23 15:03:10 +00:00
|
|
|
if bbinc:
|
|
|
|
shutil.rmtree(logdir, ignore_errors=True)
|
|
|
|
return tarname
|
2012-03-26 10:49:26 +00:00
|
|
|
|
|
|
|
def get_licenses(d):
|
2012-08-23 15:08:22 +00:00
|
|
|
"""get licenses for running .bb file"""
|
2012-08-23 15:03:10 +00:00
|
|
|
import oe.license
|
|
|
|
|
|
|
|
licenses_type = d.getVar('LICENSE', True) or ""
|
|
|
|
lics = oe.license.is_included(licenses_type)[1:][0]
|
|
|
|
lice = ''
|
|
|
|
for lic in lics:
|
|
|
|
licens = d.getVarFlag('SPDXLICENSEMAP', lic)
|
|
|
|
if licens != None:
|
|
|
|
lice += licens
|
|
|
|
else:
|
|
|
|
lice += lic
|
|
|
|
return lice
|
|
|
|
|
2012-03-26 10:49:26 +00:00
|
|
|
|
2012-08-23 15:08:22 +00:00
|
|
|
def move_tarball_deploy(d, tarball_list):
|
|
|
|
"""move tarball in location to ${DEPLOY_DIR}/sources"""
|
2012-08-23 15:03:10 +00:00
|
|
|
import shutil
|
|
|
|
|
|
|
|
if tarball_list is []:
|
|
|
|
return
|
|
|
|
target_sys = d.getVar('TARGET_SYS', True)
|
|
|
|
pf = d.getVar('PF', True)
|
|
|
|
licenses = get_licenses(d)
|
2012-08-23 15:08:22 +00:00
|
|
|
work_dir = d.getVar('WORKDIR', True)
|
2012-08-23 15:03:10 +00:00
|
|
|
tar_sources = d.getVar('DEPLOY_DIR', True) + '/sources/' + target_sys + '/' + licenses + '/' + pf
|
|
|
|
if not os.path.exists(tar_sources):
|
|
|
|
bb.mkdirhier(tar_sources)
|
|
|
|
for source in tarball_list:
|
|
|
|
if source:
|
|
|
|
if os.path.exists(os.path.join(tar_sources, source)):
|
2012-08-23 15:08:22 +00:00
|
|
|
os.remove(os.path.join(tar_sources, source))
|
|
|
|
shutil.move(os.path.join(work_dir, source), tar_sources)
|
2012-03-26 10:49:26 +00:00
|
|
|
|
2012-04-01 08:25:50 +00:00
|
|
|
def check_archiving_type(d):
|
2012-08-23 15:08:22 +00:00
|
|
|
"""check the type for archiving package('tar' or 'srpm')"""
|
2012-08-23 15:03:10 +00:00
|
|
|
try:
|
|
|
|
if d.getVar('SOURCE_ARCHIVE_PACKAGE_TYPE', True).upper() not in d.getVar('ARCHIVE_TYPE', True).split():
|
|
|
|
raise AttributeError
|
|
|
|
except AttributeError:
|
|
|
|
bb.fatal("\"SOURCE_ARCHIVE_PACKAGE_TYPE\" is \'tar\' or \'srpm\', no other types")
|
2012-03-26 10:49:26 +00:00
|
|
|
|
2012-08-23 15:08:22 +00:00
|
|
|
def store_package(d, package_name):
|
|
|
|
"""
|
|
|
|
store tarbablls name to file "tar-package"
|
|
|
|
"""
|
2012-08-23 15:03:10 +00:00
|
|
|
try:
|
2012-08-23 15:08:22 +00:00
|
|
|
f = open(os.path.join(d.getVar('WORKDIR', True), 'tar-package'), 'a')
|
2012-08-23 15:03:10 +00:00
|
|
|
f.write(package_name + ' ')
|
|
|
|
f.close()
|
|
|
|
except IOError:
|
|
|
|
pass
|
2012-03-26 10:49:26 +00:00
|
|
|
|
|
|
|
def get_package(d):
|
2012-08-23 15:08:22 +00:00
|
|
|
"""
|
|
|
|
get tarballs name from "tar-package"
|
|
|
|
"""
|
2012-08-23 15:03:10 +00:00
|
|
|
work_dir = (d.getVar('WORKDIR', True))
|
2012-08-23 15:08:22 +00:00
|
|
|
tarpackage = os.path.join(work_dir, 'tar-package')
|
2012-08-23 15:03:10 +00:00
|
|
|
try:
|
2012-08-23 15:08:22 +00:00
|
|
|
f = open(tarpackage, 'r')
|
|
|
|
line = list(set(f.readline().replace('\n', '').split()))
|
|
|
|
except UnboundLocalError, IOError:
|
2012-08-23 15:03:10 +00:00
|
|
|
pass
|
|
|
|
f.close()
|
|
|
|
return line
|
2012-03-26 10:49:26 +00:00
|
|
|
|
|
|
|
|
2012-08-23 15:08:22 +00:00
|
|
|
def archive_sources_patches(d, stage_name):
|
|
|
|
"""
|
|
|
|
archive sources and patches to tarball. stage_name will append
|
|
|
|
strings ${stage_name} to ${PR} as middle name. for example,
|
|
|
|
zlib-1.4.6-prepatch(stage_name).tar.gz
|
|
|
|
"""
|
2012-08-23 15:03:10 +00:00
|
|
|
import shutil
|
|
|
|
|
|
|
|
check_archiving_type(d)
|
|
|
|
if not_tarball(d) or tar_filter(d):
|
|
|
|
return
|
|
|
|
|
2012-08-23 15:08:22 +00:00
|
|
|
source_tar_name = archive_sources(d, stage_name)
|
2012-08-23 15:03:10 +00:00
|
|
|
if stage_name == "prepatch":
|
2012-08-23 15:08:22 +00:00
|
|
|
if d.getVar('PATCHES_ARCHIVE_WITH_SERIES', True).upper() == 'TRUE':
|
|
|
|
patch_tar_name = select_archive_patches(d, "all")
|
|
|
|
elif d.getVar('PATCHES_ARCHIVE_WITH_SERIES', True).upper() == 'FALSE':
|
|
|
|
patch_tar_name = select_archive_patches(d, "applying")
|
2012-08-23 15:03:10 +00:00
|
|
|
else:
|
|
|
|
bb.fatal("Please define 'PATCHES_ARCHIVE_WITH_SERIES' is strings 'True' or 'False' ")
|
|
|
|
else:
|
|
|
|
patch_tar_name = ''
|
|
|
|
|
|
|
|
if d.getVar('SOURCE_ARCHIVE_PACKAGE_TYPE', True).upper() not in 'SRPM':
|
2012-08-23 15:08:22 +00:00
|
|
|
move_tarball_deploy(d, [source_tar_name, patch_tar_name])
|
2012-08-23 15:03:10 +00:00
|
|
|
else:
|
2012-08-23 15:08:22 +00:00
|
|
|
tarpackage = os.path.join(d.getVar('WORKDIR', True), 'tar-package')
|
2012-08-23 15:03:10 +00:00
|
|
|
if os.path.exists(tarpackage):
|
|
|
|
os.remove(tarpackage)
|
|
|
|
for package in os.path.basename(source_tar_name), patch_tar_name:
|
|
|
|
if package:
|
2012-08-23 15:08:22 +00:00
|
|
|
store_package(d, str(package) + ' ')
|
2012-03-26 10:49:26 +00:00
|
|
|
|
|
|
|
def archive_scripts_logs(d):
|
2012-08-23 15:08:22 +00:00
|
|
|
"""
|
|
|
|
archive scripts and logs. scripts include .bb and .inc files and
|
|
|
|
logs include stuff in "temp".
|
|
|
|
"""
|
2012-08-23 15:03:10 +00:00
|
|
|
|
|
|
|
if tar_filter(d):
|
|
|
|
return
|
|
|
|
work_dir = d.getVar('WORKDIR', True)
|
2012-08-23 15:08:22 +00:00
|
|
|
temp_dir = os.path.join(work_dir, 'temp')
|
2012-08-23 15:03:10 +00:00
|
|
|
source_archive_log_with_scripts = d.getVar('SOURCE_ARCHIVE_LOG_WITH_SCRIPTS', True)
|
|
|
|
if source_archive_log_with_scripts == 'logs_with_scripts':
|
|
|
|
logdir = get_bb_inc(d)
|
2012-08-23 15:08:22 +00:00
|
|
|
tarlog = archive_logs(d, logdir, True)
|
2012-08-23 15:03:10 +00:00
|
|
|
elif source_archive_log_with_scripts == 'logs':
|
|
|
|
if os.path.exists(temp_dir):
|
2012-08-23 15:08:22 +00:00
|
|
|
tarlog = archive_logs(d, temp_dir, False)
|
2012-08-23 15:03:10 +00:00
|
|
|
else:
|
|
|
|
return
|
|
|
|
|
|
|
|
if d.getVar('SOURCE_ARCHIVE_PACKAGE_TYPE', True).upper() not in 'SRPM':
|
2012-08-23 15:08:22 +00:00
|
|
|
move_tarball_deploy(d, [tarlog])
|
2012-08-23 15:03:10 +00:00
|
|
|
|
|
|
|
else:
|
2012-08-23 15:08:22 +00:00
|
|
|
store_package(d, tarlog)
|
2012-03-26 10:49:26 +00:00
|
|
|
|
|
|
|
def dumpdata(d):
|
2012-08-23 15:08:22 +00:00
|
|
|
"""
|
|
|
|
dump environment to "${P}-${PR}.showdata.dump" including all
|
|
|
|
kinds of variables and functions when running a task
|
|
|
|
"""
|
2012-08-23 15:03:10 +00:00
|
|
|
|
|
|
|
if tar_filter(d):
|
|
|
|
return
|
|
|
|
workdir = bb.data.getVar('WORKDIR', d, 1)
|
|
|
|
distro = bb.data.getVar('DISTRO', d, 1)
|
|
|
|
s = d.getVar('S', True)
|
|
|
|
pf = d.getVar('PF', True)
|
|
|
|
target_sys = d.getVar('TARGET_SYS', True)
|
|
|
|
licenses = get_licenses(d)
|
|
|
|
dumpdir = d.getVar('DEPLOY_DIR', True) + '/sources/' + target_sys + '/' + licenses + '/' + pf
|
|
|
|
if not os.path.exists(dumpdir):
|
|
|
|
bb.mkdirhier(dumpdir)
|
|
|
|
|
2012-08-23 15:08:22 +00:00
|
|
|
dumpfile = os.path.join(dumpdir, bb.data.expand("${P}-${PR}.showdata.dump", d))
|
2012-08-23 15:03:10 +00:00
|
|
|
|
|
|
|
bb.note("Dumping metadata into '%s'" % dumpfile)
|
|
|
|
f = open(dumpfile, "w")
|
|
|
|
# emit variables and shell functions
|
|
|
|
bb.data.emit_env(f, d, True)
|
2012-08-23 15:08:22 +00:00
|
|
|
# emit the metadata which isn't valid shell
|
2012-08-23 15:03:10 +00:00
|
|
|
for e in d.keys():
|
|
|
|
if bb.data.getVarFlag(e, 'python', d):
|
|
|
|
f.write("\npython %s () {\n%s}\n" % (e, bb.data.getVar(e, d, 1)))
|
|
|
|
f.close()
|
2012-03-26 10:49:26 +00:00
|
|
|
|
|
|
|
def create_diff_gz(d):
|
2012-08-23 15:08:22 +00:00
|
|
|
"""
|
|
|
|
creating .diff.gz in ${DEPLOY_DIR_SRC}/${P}-${PR}.diff.g gz for
|
|
|
|
mapping all content in 's' including patches to xxx.diff.gz
|
|
|
|
"""
|
2012-08-23 15:03:10 +00:00
|
|
|
import shutil
|
|
|
|
import subprocess
|
|
|
|
|
|
|
|
if tar_filter(d):
|
|
|
|
return
|
|
|
|
work_dir = d.getVar('WORKDIR', True)
|
|
|
|
exclude_from = d.getVar('ARCHIVE_EXCLUDE_FROM', True).split()
|
|
|
|
pf = d.getVar('PF', True)
|
|
|
|
licenses = get_licenses(d)
|
|
|
|
target_sys = d.getVar('TARGET_SYS', True)
|
|
|
|
diff_dir = d.getVar('DEPLOY_DIR', True) + '/sources/' + target_sys + '/' + licenses + '/' + pf
|
|
|
|
diff_file = os.path.join(diff_dir, bb.data.expand("${P}-${PR}.diff.gz",d))
|
|
|
|
|
|
|
|
f = open(os.path.join(work_dir,'temp/exclude-from-file'), 'a')
|
|
|
|
for i in exclude_from:
|
|
|
|
f.write(i)
|
|
|
|
f.write("\n")
|
|
|
|
f.close()
|
|
|
|
|
|
|
|
s=d.getVar('S', True)
|
|
|
|
distro = d.getVar('DISTRO',True)
|
|
|
|
dest = s + '/' + distro + '/files'
|
|
|
|
if not os.path.exists(dest):
|
|
|
|
bb.mkdirhier(dest)
|
|
|
|
for i in os.listdir(os.getcwd()):
|
|
|
|
if os.path.isfile(i):
|
|
|
|
try:
|
|
|
|
shutil.copy(i, dest)
|
|
|
|
except IOError:
|
|
|
|
subprocess.call('fakeroot cp -rf ' + i + " " + dest, shell=True)
|
|
|
|
|
|
|
|
bb.note("Creating .diff.gz in ${DEPLOY_DIR_SRC}/${P}-${PR}.diff.gz")
|
|
|
|
cmd = "LC_ALL=C TZ=UTC0 diff --exclude-from=" + work_dir + "/temp/exclude-from-file -Naur " + s + '.org' + ' ' + s + " | gzip -c > " + diff_file
|
|
|
|
d.setVar('DIFF', cmd + "\n")
|
|
|
|
d.setVarFlag('DIFF', 'func', '1')
|
|
|
|
bb.build.exec_func('DIFF', d)
|
|
|
|
shutil.rmtree(s + '.org', ignore_errors=True)
|
2012-03-26 10:49:26 +00:00
|
|
|
|
2012-08-23 15:08:22 +00:00
|
|
|
# This function will run when user want to get tarball for sources and
|
|
|
|
# patches after do_unpack
|
2012-03-26 10:49:26 +00:00
|
|
|
python do_archive_original_sources_patches(){
|
2012-08-23 15:08:22 +00:00
|
|
|
archive_sources_patches(d, 'prepatch')
|
2012-03-26 10:49:26 +00:00
|
|
|
}
|
|
|
|
|
2012-08-23 15:08:22 +00:00
|
|
|
# This function will run when user want to get tarball for patched
|
|
|
|
# sources after do_patch
|
2012-03-26 10:49:26 +00:00
|
|
|
python do_archive_patched_sources(){
|
2012-08-23 15:08:22 +00:00
|
|
|
archive_sources_patches(d, 'patched')
|
2012-03-26 10:49:26 +00:00
|
|
|
}
|
|
|
|
|
2012-08-23 15:08:22 +00:00
|
|
|
# This function will run when user want to get tarball for configured
|
|
|
|
# sources after do_configure
|
2012-03-26 10:49:26 +00:00
|
|
|
python do_archive_configured_sources(){
|
2012-08-23 15:08:22 +00:00
|
|
|
archive_sources_patches(d, 'configured')
|
2012-03-26 10:49:26 +00:00
|
|
|
}
|
|
|
|
|
2012-08-23 15:08:22 +00:00
|
|
|
# This function will run when user want to get tarball for logs or both
|
|
|
|
# logs and scripts(.bb and .inc files)
|
2012-03-26 10:49:26 +00:00
|
|
|
python do_archive_scripts_logs(){
|
2012-08-23 15:03:10 +00:00
|
|
|
archive_scripts_logs(d)
|
2012-03-26 10:49:26 +00:00
|
|
|
}
|
|
|
|
|
2012-08-23 15:08:22 +00:00
|
|
|
# This function will run when user want to know what variable and
|
|
|
|
# functions in a running task are and also can get a diff file including
|
2012-03-26 10:49:26 +00:00
|
|
|
# all content a package should include.
|
|
|
|
python do_dumpdata_create_diff_gz(){
|
2012-08-23 15:03:10 +00:00
|
|
|
dumpdata(d)
|
|
|
|
create_diff_gz(d)
|
2012-03-26 10:49:26 +00:00
|
|
|
}
|
|
|
|
|
2012-08-23 15:08:22 +00:00
|
|
|
# This functions prepare for archiving "linux-yocto" because this
|
|
|
|
# package create directory 's' before do_patch instead of after
|
|
|
|
# do_unpack. This is special control for archiving linux-yocto only.
|
2012-03-26 10:49:26 +00:00
|
|
|
python do_archive_linux_yocto(){
|
2012-08-23 15:03:10 +00:00
|
|
|
s = d.getVar('S', True)
|
|
|
|
if 'linux-yocto' in s:
|
2012-08-23 15:08:22 +00:00
|
|
|
source_tar_name = archive_sources(d, '')
|
2012-08-23 15:03:10 +00:00
|
|
|
if d.getVar('SOURCE_ARCHIVE_PACKAGE_TYPE', True).upper() not in 'SRPM':
|
2012-08-23 15:08:22 +00:00
|
|
|
move_tarball_deploy(d, [source_tar_name, ''])
|
2012-03-26 10:49:26 +00:00
|
|
|
}
|
|
|
|
do_kernel_checkout[postfuncs] += "do_archive_linux_yocto "
|
|
|
|
|
|
|
|
# remove tarball for sources, patches and logs after creating srpm.
|
|
|
|
python do_remove_tarball(){
|
2012-08-23 15:03:10 +00:00
|
|
|
if d.getVar('SOURCE_ARCHIVE_PACKAGE_TYPE', True).upper() == 'SRPM':
|
|
|
|
work_dir = d.getVar('WORKDIR', True)
|
|
|
|
try:
|
|
|
|
for file in os.listdir(os.getcwd()):
|
|
|
|
if file in get_package(d):
|
|
|
|
os.remove(file)
|
2012-08-23 15:08:22 +00:00
|
|
|
os.remove(os.path.join(work_dir, 'tar-package'))
|
|
|
|
except (TypeError, OSError):
|
2012-08-23 15:03:10 +00:00
|
|
|
pass
|
2012-03-26 10:49:26 +00:00
|
|
|
}
|
archiver.bbclass: fix the fakeroot and other issues
* Fix the fakeroot issue
The archiver.bbclass is used for archiving sources, patches, and logs,
it uses the "rpmbuild -bs" from the package_rpm.bbclass to generate the
.src.rpm, but it didn't work (it's not easy to explain it clearly):
Reason:
- It directly used the "fakeroot" command, we don't have such a
command in native tools, so it would use the fakeroot from the host,
and it would fail when there is no fakeroot on the host.
- The "rpmbuild -bs" doesn't need to work under root, but it is in the
function do_package_write_rpm which is running under fakeroot, and
"rpmbuild" needs to know the source file's user/group name, the source
file is the tarball which is created by the postfuncs of do_unpack
or do_patch which doesn't use the fakeroot, so the created file's
owner would be the real user, e.g.: robert, but there is no such a
user under our native tools' fakeroot(pseudo), then the rpmbuild would
fail. It worked when use the host's fakeroot in the past was because
that the host's fakeroot knows the users on the host.
Fix:
- Remove the incorrect "fakeroot".
- Change the source file's owner to root.root under fakeroot will fix the
problem.
* Other fixes:
- The typo: "do_remove_taball -> do_remove_tarball" which will cause the
tarball is not removed.
- Add the _sourcedir defination to the rpmbuild command since the the
SOURCES would be added to the specfile when archiver.bbclass is
inherited, otherwise there would be errors when "rpmbuild -bb", though
the build is OK. It only added the defination to "rpmbuild -bs",
didn't add to "rpmbuild -bb".
[YOCTO #2619]
(From OE-Core rev: ac152f277fdff256def01af4268215a05685a0f7)
Signed-off-by: Robert Yang <liezhi.yang@windriver.com>
Signed-off-by: Saul Wold <sgw@linux.intel.com>
Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
2012-08-22 07:34:34 +00:00
|
|
|
do_remove_tarball[deptask] = "do_archive_scripts_logs"
|
2012-03-26 10:49:26 +00:00
|
|
|
do_package_write_rpm[postfuncs] += "do_remove_tarball "
|
|
|
|
export get_licenses
|
|
|
|
export get_package
|