bitbake: Update along 1.8 branch

git-svn-id: https://svn.o-hand.com/repos/poky/trunk@2345 311d38ba-8fff-0310-9ca6-ca027cbcb966
This commit is contained in:
Richard Purdie 2007-08-03 13:40:52 +00:00
parent 034bbb805b
commit bfc70eb24e
16 changed files with 535 additions and 181 deletions

View File

@ -1,8 +1,17 @@
Changes in Bitbake 1.8.x: Changes in Bitbake 1.8.x:
- Rewrite svn fetcher to make adding extra operations easier
as part of future SRCDATE="now" fixes
(requires new FETCHCMD_svn definition in bitbake.conf)
- Change SVNDIR layout to be more unique (fixes #2644 and #2624)
- Import persistent data store from trunk
- Sync fetcher code with that in trunk, adding SRCREV support for svn
- Add ConfigParsed Event after configuration parsing is complete
- data.emit_var() - only call getVar if we need the variable
Changes in Bitbake 1.8.6:
- Correctly redirect stdin when forking - Correctly redirect stdin when forking
- If parsing errors are found, exit, too many users miss the errors - If parsing errors are found, exit, too many users miss the errors
- Remove supriours PREFERRED_PROVIDER warnings - Remove supriours PREFERRED_PROVIDER warnings
- Start to fix path quoting
Changes in Bitbake 1.8.4: Changes in Bitbake 1.8.4:
- Make sure __inherit_cache is updated before calling include() (from Michael Krelin) - Make sure __inherit_cache is updated before calling include() (from Michael Krelin)

View File

@ -30,6 +30,7 @@ lib/bb/parse/__init__.py
lib/bb/parse/parse_py/__init__.py lib/bb/parse/parse_py/__init__.py
lib/bb/parse/parse_py/BBHandler.py lib/bb/parse/parse_py/BBHandler.py
lib/bb/parse/parse_py/ConfHandler.py lib/bb/parse/parse_py/ConfHandler.py
lib/bb/persist_data.py
lib/bb/providers.py lib/bb/providers.py
lib/bb/runqueue.py lib/bb/runqueue.py
lib/bb/shell.py lib/bb/shell.py

View File

@ -27,7 +27,7 @@ sys.path.insert(0,os.path.join(os.path.dirname(os.path.dirname(sys.argv[0])), 'l
import bb import bb
from bb import cooker from bb import cooker
__version__ = "1.8.5" __version__ = "1.8.7"
#============================================================================# #============================================================================#
# BBOptions # BBOptions

View File

@ -21,7 +21,7 @@
# with this program; if not, write to the Free Software Foundation, Inc., # with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
__version__ = "1.8.5" __version__ = "1.8.7"
__all__ = [ __all__ = [

View File

@ -150,7 +150,7 @@ def exec_func_shell(func, d):
if bb.msg.debug_level['default'] > 0: f.write("set -x\n") if bb.msg.debug_level['default'] > 0: f.write("set -x\n")
data.emit_env(f, d) data.emit_env(f, d)
f.write("cd '%s'\n" % os.getcwd()) f.write("cd %s\n" % os.getcwd())
if func: f.write("%s\n" % func) if func: f.write("%s\n" % func)
f.close() f.close()
os.chmod(runfile, 0775) os.chmod(runfile, 0775)
@ -189,7 +189,7 @@ def exec_func_shell(func, d):
else: else:
maybe_fakeroot = '' maybe_fakeroot = ''
lang_environment = "LC_ALL=C " lang_environment = "LC_ALL=C "
ret = os.system('%s%ssh -e "%s"' % (lang_environment, maybe_fakeroot, runfile)) ret = os.system('%s%ssh -e %s' % (lang_environment, maybe_fakeroot, runfile))
try: try:
os.chdir(prevdir) os.chdir(prevdir)
except: except:

View File

@ -336,6 +336,10 @@ class BBCooker:
if bb.data.getVarFlag(var, 'handler', data): if bb.data.getVarFlag(var, 'handler', data):
bb.event.register(var,bb.data.getVar(var, data)) bb.event.register(var,bb.data.getVar(var, data))
bb.fetch.fetcher_init(self.configuration.data)
bb.event.fire(bb.event.ConfigParsed(self.configuration.data))
except IOError: except IOError:
bb.msg.fatal(bb.msg.domain.Parsing, "Unable to open %s" % afile ) bb.msg.fatal(bb.msg.domain.Parsing, "Unable to open %s" % afile )
except bb.parse.ParseError, details: except bb.parse.ParseError, details:

View File

@ -337,6 +337,12 @@ def emit_var(var, o=sys.__stdout__, d = init(), all=False):
if getVarFlag(var, "python", d): if getVarFlag(var, "python", d):
return 0 return 0
export = getVarFlag(var, "export", d)
unexport = getVarFlag(var, "unexport", d)
func = getVarFlag(var, "func", d)
if not all and not export and not unexport and not func:
return 0
try: try:
if all: if all:
oval = getVar(var, d, 0) oval = getVar(var, d, 0)
@ -362,28 +368,28 @@ def emit_var(var, o=sys.__stdout__, d = init(), all=False):
if (var.find("-") != -1 or var.find(".") != -1 or var.find('{') != -1 or var.find('}') != -1 or var.find('+') != -1) and not all: if (var.find("-") != -1 or var.find(".") != -1 or var.find('{') != -1 or var.find('}') != -1 or var.find('+') != -1) and not all:
return 0 return 0
varExpanded = expand(var, d)
if unexport:
o.write('unset %s\n' % varExpanded)
return 1
val.rstrip() val.rstrip()
if not val: if not val:
return 0 return 0
varExpanded = expand(var, d)
if getVarFlag(var, "func", d): if func:
# NOTE: should probably check for unbalanced {} within the var # NOTE: should probably check for unbalanced {} within the var
o.write("%s() {\n%s\n}\n" % (varExpanded, val)) o.write("%s() {\n%s\n}\n" % (varExpanded, val))
else: return 1
if getVarFlag(var, "unexport", d):
o.write('unset %s\n' % varExpanded) if export:
return 1 o.write('export ')
if getVarFlag(var, "export", d):
o.write('export ') # if we're going to output this within doublequotes,
else: # to a shell, we need to escape the quotes in the var
if not all: alter = re.sub('"', '\\"', val.strip())
return 0 o.write('%s="%s"\n' % (varExpanded, alter))
# if we're going to output this within doublequotes,
# to a shell, we need to escape the quotes in the var
alter = re.sub('"', '\\"', val.strip())
o.write('%s="%s"\n' % (varExpanded, alter))
return 1 return 1

View File

@ -124,6 +124,8 @@ def getName(e):
else: else:
return e.__name__ return e.__name__
class ConfigParsed(Event):
"""Configuration Parsing Complete"""
class PkgBase(Event): class PkgBase(Event):
"""Base class for package events""" """Base class for package events"""

View File

@ -27,6 +27,12 @@ BitBake build tools.
import os, re import os, re
import bb import bb
from bb import data from bb import data
from bb import persist_data
try:
import cPickle as pickle
except ImportError:
import pickle
class FetchError(Exception): class FetchError(Exception):
"""Exception raised when a download fails""" """Exception raised when a download fails"""
@ -74,78 +80,193 @@ def uri_replace(uri, uri_find, uri_replace, d):
return bb.encodeurl(result_decoded) return bb.encodeurl(result_decoded)
methods = [] methods = []
urldata = {}
def init(urls = [], d = None): def fetcher_init(d):
if d == None: """
bb.msg.debug(2, bb.msg.domain.Fetcher, "BUG init called with None as data object!!!") Called to initilize the fetchers once the configuration data is known
return Calls before this must not hit the cache.
"""
pd = persist_data.PersistData(d)
# Clear any cached url data
pd.delDomain("BB_URLDATA")
# When to drop SCM head revisions should be controled by user policy
pd.delDomain("BB_URI_HEADREVS")
# Make sure our domains exist
pd.addDomain("BB_URLDATA")
pd.addDomain("BB_URI_HEADREVS")
pd.addDomain("BB_URI_LOCALCOUNT")
for m in methods: # Function call order is usually:
m.urls = [] # 1. init
# 2. go
# 3. localpaths
# localpath can be called at any time
for u in urls: def init(urls, d, cache = True):
ud = initdata(u, d) urldata = {}
if ud.method:
ud.method.urls.append(u)
def initdata(url, d): if cache:
urldata, pd, fn = getdata(d)
for url in urls:
if url not in urldata:
ud = FetchData(url, d)
for m in methods:
if m.supports(url, ud, d):
ud.init(m, d)
ud.setup_localpath(d)
break
urldata[url] = ud
if cache:
pd.setValue("BB_URLDATA", fn, pickle.dumps(urldata, 0))
return urldata
def getdata(d):
urldata = {}
fn = bb.data.getVar('FILE', d, 1) fn = bb.data.getVar('FILE', d, 1)
if fn not in urldata: pd = persist_data.PersistData(d)
urldata[fn] = {} encdata = pd.getValue("BB_URLDATA", fn)
if url not in urldata[fn]: if encdata:
ud = FetchData() urldata = pickle.loads(str(encdata))
(ud.type, ud.host, ud.path, ud.user, ud.pswd, ud.parm) = bb.decodeurl(data.expand(url, d))
ud.date = Fetch.getSRCDate(ud, d)
for m in methods:
if m.supports(url, ud, d):
ud.localpath = m.localpath(url, ud, d)
ud.md5 = ud.localpath + '.md5'
# if user sets localpath for file, use it instead.
if "localpath" in ud.parm:
ud.localpath = ud.parm["localpath"]
ud.method = m
break
urldata[fn][url] = ud
return urldata[fn][url]
def go(d): return urldata, pd, fn
"""Fetch all urls"""
fn = bb.data.getVar('FILE', d, 1)
for m in methods:
for u in m.urls:
ud = urldata[fn][u]
if ud.localfile and not m.forcefetch(u, ud, d) and os.path.exists(urldata[fn][u].md5):
# File already present along with md5 stamp file
# Touch md5 file to show activity
os.utime(ud.md5, None)
continue
# RP - is olddir needed?
# olddir = os.path.abspath(os.getcwd())
m.go(u, ud , d)
# os.chdir(olddir)
if ud.localfile and not m.forcefetch(u, ud, d):
Fetch.write_md5sum(u, ud, d)
def localpaths(d): def go(d, urldata = None):
"""Return a list of the local filenames, assuming successful fetch""" """
Fetch all urls
"""
if not urldata:
urldata, pd, fn = getdata(d)
for u in urldata:
ud = urldata[u]
m = ud.method
if ud.localfile and not m.forcefetch(u, ud, d) and os.path.exists(ud.md5):
# File already present along with md5 stamp file
# Touch md5 file to show activity
os.utime(ud.md5, None)
continue
m.go(u, ud, d)
if ud.localfile and not m.forcefetch(u, ud, d):
Fetch.write_md5sum(u, ud, d)
def localpaths(d, urldata = None):
"""
Return a list of the local filenames, assuming successful fetch
"""
local = [] local = []
fn = bb.data.getVar('FILE', d, 1) if not urldata:
for m in methods: urldata, pd, fn = getdata(d)
for u in m.urls:
local.append(urldata[fn][u].localpath) for u in urldata:
ud = urldata[u]
local.append(ud.localpath)
return local return local
def localpath(url, d): def get_srcrev(d):
ud = initdata(url, d) """
if ud.method: Return the version string for the current package
return ud.localpath (usually to be used as PV)
Most packages usually only have one SCM so we just pass on the call.
In the multi SCM case, we build a value based on SRCREV_FORMAT which must
have been set.
"""
scms = []
urldata, pd, fn = getdata(d)
if len(urldata) == 0:
src_uri = bb.data.getVar('SRC_URI', d, 1).split()
for url in src_uri:
if url not in urldata:
ud = FetchData(url, d)
for m in methods:
if m.supports(url, ud, d):
ud.init(m, d)
break
urldata[url] = ud
if ud.method.suppports_srcrev():
scms.append(url)
ud.setup_localpath(d)
else:
for u in urldata:
ud = urldata[u]
if ud.method.suppports_srcrev():
scms.append(u)
if len(scms) == 0:
bb.msg.error(bb.msg.domain.Fetcher, "SRCREV was used yet no valid SCM was found in SRC_URI")
raise ParameterError
if len(scms) == 1:
return urldata[scms[0]].method.sortable_revision(scms[0], urldata[scms[0]], d)
bb.msg.error(bb.msg.domain.Fetcher, "Sorry, support for SRCREV_FORMAT still needs to be written")
raise ParameterError
def localpath(url, d, cache = True):
"""
Called from the parser with cache=False since the cache isn't ready
at this point. Also called from classed in OE e.g. patch.bbclass
"""
ud = init([url], d, cache)
if ud[url].method:
return ud[url].localpath
return url return url
def runfetchcmd(cmd, d, quiet = False):
"""
Run cmd returning the command output
Raise an error if interrupted or cmd fails
Optionally echo command output to stdout
"""
bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % cmd)
# Need to export PATH as binary could be in metadata paths
# rather than host provided
pathcmd = 'export PATH=%s; %s' % (data.expand('${PATH}', d), cmd)
stdout_handle = os.popen(pathcmd, "r")
output = ""
while 1:
line = stdout_handle.readline()
if not line:
break
if not quiet:
print line
output += line
status = stdout_handle.close() or 0
signal = status >> 8
exitstatus = status & 0xff
if signal:
raise FetchError("Fetch command %s failed with signal %s, output:\n%s" % (pathcmd, signal, output))
elif status != 0:
raise FetchError("Fetch command %s failed with exit code %s, output:\n%s" % (pathcmd, status, output))
return output
class FetchData(object): class FetchData(object):
"""Class for fetcher variable store""" """Class for fetcher variable store"""
def __init__(self): def __init__(self, url, d):
self.localfile = "" self.localfile = ""
(self.type, self.host, self.path, self.user, self.pswd, self.parm) = bb.decodeurl(data.expand(url, d))
self.date = Fetch.getSRCDate(self, d)
self.url = url
def init(self, method, d):
self.method = method
def setup_localpath(self, d):
if "localpath" in self.parm:
self.localpath = self.parm["localpath"]
else:
self.localpath = self.method.localpath(self.url, self, d)
self.md5 = self.localpath + '.md5'
# if user sets localpath for file, use it instead.
class Fetch(object): class Fetch(object):
@ -182,6 +303,12 @@ class Fetch(object):
""" """
return False return False
def suppports_srcrev(self):
"""
The fetcher supports auto source revisions (SRCREV)
"""
return False
def go(self, url, urldata, d): def go(self, url, urldata, d):
""" """
Fetch urls Fetch urls
@ -269,6 +396,50 @@ class Fetch(object):
md5out.close() md5out.close()
write_md5sum = staticmethod(write_md5sum) write_md5sum = staticmethod(write_md5sum)
def latest_revision(self, url, ud, d):
"""
Look in the cache for the latest revision, if not present ask the SCM.
"""
if not hasattr(self, "_latest_revision"):
raise ParameterError
pd = persist_data.PersistData(d)
key = self._revision_key(url, ud, d)
rev = pd.getValue("BB_URI_HEADREVS", key)
if rev != None:
return str(rev)
rev = self._latest_revision(url, ud, d)
pd.setValue("BB_URI_HEADREVS", key, rev)
return rev
def sortable_revision(self, url, ud, d):
"""
"""
if hasattr(self, "_sortable_revision"):
return self._sortable_revision(url, ud, d)
pd = persist_data.PersistData(d)
key = self._revision_key(url, ud, d)
latest_rev = self.latest_revision(url, ud, d)
last_rev = pd.getValue("BB_URI_LOCALCOUNT", key + "_rev")
count = pd.getValue("BB_URI_LOCALCOUNT", key + "_count")
if last_rev == latest_rev:
return str(count + "+" + latest_rev)
if count is None:
count = "0"
else:
count = str(int(count) + 1)
pd.setValue("BB_URI_LOCALCOUNT", key + "_rev", latest_rev)
pd.setValue("BB_URI_LOCALCOUNT", key + "_count", count)
return str(count + "+" + latest_rev)
import cvs import cvs
import git import git
import local import local
@ -278,11 +449,11 @@ import svk
import ssh import ssh
import perforce import perforce
methods.append(cvs.Cvs())
methods.append(git.Git())
methods.append(local.Local()) methods.append(local.Local())
methods.append(svn.Svn())
methods.append(wget.Wget()) methods.append(wget.Wget())
methods.append(svn.Svn())
methods.append(git.Git())
methods.append(cvs.Cvs())
methods.append(svk.Svk()) methods.append(svk.Svk())
methods.append(ssh.SSH()) methods.append(ssh.SSH())
methods.append(perforce.Perforce()) methods.append(perforce.Perforce())

View File

@ -25,6 +25,7 @@ import bb
from bb import data from bb import data
from bb.fetch import Fetch from bb.fetch import Fetch
from bb.fetch import FetchError from bb.fetch import FetchError
from bb.fetch import runfetchcmd
def prunedir(topdir): def prunedir(topdir):
# Delete everything reachable from the directory named in 'topdir'. # Delete everything reachable from the directory named in 'topdir'.
@ -35,19 +36,6 @@ def prunedir(topdir):
for name in dirs: for name in dirs:
os.rmdir(os.path.join(root, name)) os.rmdir(os.path.join(root, name))
def rungitcmd(cmd,d):
bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % cmd)
# Need to export PATH as git is likely to be in metadata paths
# rather than host provided
pathcmd = 'export PATH=%s; %s' % (data.expand('${PATH}', d), cmd)
myret = os.system(pathcmd)
if myret != 0:
raise FetchError("Git: %s failed" % pathcmd)
class Git(Fetch): class Git(Fetch):
"""Class to fetch a module or modules from git repositories""" """Class to fetch a module or modules from git repositories"""
def supports(self, url, ud, d): def supports(self, url, ud, d):
@ -62,24 +50,22 @@ class Git(Fetch):
if 'protocol' in ud.parm: if 'protocol' in ud.parm:
ud.proto = ud.parm['protocol'] ud.proto = ud.parm['protocol']
ud.tag = "master" tag = data.getVar("SRCREV", d, 0)
if 'tag' in ud.parm: if 'tag' in ud.parm:
ud.tag = ud.parm['tag'] ud.tag = ud.parm['tag']
elif tag and "get_srcrev" not in tag and len(tag) == 40:
ud.tag = tag
else:
ud.tag = self.latest_revision(url, ud, d)
ud.localfile = data.expand('git_%s%s_%s.tar.gz' % (ud.host, ud.path.replace('/', '.'), ud.tag), d) ud.localfile = data.expand('git_%s%s_%s.tar.gz' % (ud.host, ud.path.replace('/', '.'), ud.tag), d)
return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile) return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile)
def forcefetch(self, url, ud, d):
# tag=="master" must always update
if (ud.tag == "master"):
return True
return False
def go(self, loc, ud, d): def go(self, loc, ud, d):
"""Fetch url""" """Fetch url"""
if not self.forcefetch(loc, ud, d) and Fetch.try_mirror(d, ud.localfile): if Fetch.try_mirror(d, ud.localfile):
bb.msg.debug(1, bb.msg.domain.Fetcher, "%s already exists (or was stashed). Skipping git checkout." % ud.localpath) bb.msg.debug(1, bb.msg.domain.Fetcher, "%s already exists (or was stashed). Skipping git checkout." % ud.localpath)
return return
@ -96,32 +82,50 @@ class Git(Fetch):
if Fetch.try_mirror(d, repofilename): if Fetch.try_mirror(d, repofilename):
bb.mkdirhier(repodir) bb.mkdirhier(repodir)
os.chdir(repodir) os.chdir(repodir)
rungitcmd("tar -xzf %s" % (repofile),d) runfetchcmd("tar -xzf %s" % (repofile), d)
else: else:
rungitcmd("git clone -n %s://%s%s %s" % (ud.proto, ud.host, ud.path, repodir),d) runfetchcmd("git clone -n %s://%s%s %s" % (ud.proto, ud.host, ud.path, repodir), d)
os.chdir(repodir) os.chdir(repodir)
rungitcmd("git pull %s://%s%s" % (ud.proto, ud.host, ud.path),d)
rungitcmd("git pull --tags %s://%s%s" % (ud.proto, ud.host, ud.path),d)
rungitcmd("git prune-packed", d)
rungitcmd("git pack-redundant --all | xargs -r rm", d)
# Remove all but the .git directory # Remove all but the .git directory
rungitcmd("rm * -Rf", d) runfetchcmd("rm * -Rf", d)
runfetchcmd("git pull %s://%s%s" % (ud.proto, ud.host, ud.path), d)
runfetchcmd("git pull --tags %s://%s%s" % (ud.proto, ud.host, ud.path), d)
runfetchcmd("git prune-packed", d)
runfetchcmd("git pack-redundant --all | xargs -r rm", d)
# old method of downloading tags # old method of downloading tags
#rungitcmd("rsync -a --verbose --stats --progress rsync://%s%s/ %s" % (ud.host, ud.path, os.path.join(repodir, ".git", "")),d) #runfetchcmd("rsync -a --verbose --stats --progress rsync://%s%s/ %s" % (ud.host, ud.path, os.path.join(repodir, ".git", "")), d)
os.chdir(repodir) os.chdir(repodir)
bb.msg.note(1, bb.msg.domain.Fetcher, "Creating tarball of git repository") bb.msg.note(1, bb.msg.domain.Fetcher, "Creating tarball of git repository")
rungitcmd("tar -czf %s %s" % (repofile, os.path.join(".", ".git", "*") ),d) runfetchcmd("tar -czf %s %s" % (repofile, os.path.join(".", ".git", "*") ), d)
if os.path.exists(codir): if os.path.exists(codir):
prunedir(codir) prunedir(codir)
bb.mkdirhier(codir) bb.mkdirhier(codir)
os.chdir(repodir) os.chdir(repodir)
rungitcmd("git read-tree %s" % (ud.tag),d) runfetchcmd("git read-tree %s" % (ud.tag), d)
rungitcmd("git checkout-index -q -f --prefix=%s -a" % (os.path.join(codir, "git", "")),d) runfetchcmd("git checkout-index -q -f --prefix=%s -a" % (os.path.join(codir, "git", "")), d)
os.chdir(codir) os.chdir(codir)
bb.msg.note(1, bb.msg.domain.Fetcher, "Creating tarball of git checkout") bb.msg.note(1, bb.msg.domain.Fetcher, "Creating tarball of git checkout")
rungitcmd("tar -czf %s %s" % (ud.localpath, os.path.join(".", "*") ),d) runfetchcmd("tar -czf %s %s" % (ud.localpath, os.path.join(".", "*") ), d)
os.chdir(repodir)
prunedir(codir)
def suppports_srcrev(self):
return True
def _revision_key(self, url, ud, d):
"""
Return a unique key for the url
"""
return "git:" + ud.host + ud.path.replace('/', '.')
def _latest_revision(self, url, ud, d):
output = runfetchcmd("git ls-remote %s://%s%s" % (ud.proto, ud.host, ud.path), d, True)
return output.split()[0]

View File

@ -125,7 +125,7 @@ class Perforce(Fetch):
""" """
# try to use the tarball stash # try to use the tarball stash
if not self.forcefetch(loc, ud, d) and Fetch.try_mirror(d, ud.localfile): if Fetch.try_mirror(d, ud.localfile):
bb.msg.debug(1, bb.msg.domain.Fetcher, "%s already exists or was mirrored, skipping perforce checkout." % ud.localpath) bb.msg.debug(1, bb.msg.domain.Fetcher, "%s already exists or was mirrored, skipping perforce checkout." % ud.localpath)
return return

View File

@ -1,17 +1,12 @@
# ex:ts=4:sw=4:sts=4:et # ex:ts=4:sw=4:sts=4:et
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- # -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
""" """
BitBake 'Fetch' implementations BitBake 'Fetch' implementation for svn.
This implementation is for svn. It is based on the cvs implementation.
""" """
# Copyright (C) 2004 Marcin Juszkiewicz # Copyright (C) 2003, 2004 Chris Larson
# # Copyright (C) 2004 Marcin Juszkiewicz
# Classes for obtaining upstream sources for the
# BitBake build tools.
# Copyright (C) 2003, 2004 Chris Larson
# #
# This program is free software; you can redistribute it and/or modify # This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as # it under the terms of the GNU General Public License version 2 as
@ -35,6 +30,7 @@ from bb import data
from bb.fetch import Fetch from bb.fetch import Fetch
from bb.fetch import FetchError from bb.fetch import FetchError
from bb.fetch import MissingParameterError from bb.fetch import MissingParameterError
from bb.fetch import runfetchcmd
class Svn(Fetch): class Svn(Fetch):
"""Class to fetch a module or modules from svn repositories""" """Class to fetch a module or modules from svn repositories"""
@ -47,32 +43,54 @@ class Svn(Fetch):
def localpath(self, url, ud, d): def localpath(self, url, ud, d):
if not "module" in ud.parm: if not "module" in ud.parm:
raise MissingParameterError("svn method needs a 'module' parameter") raise MissingParameterError("svn method needs a 'module' parameter")
else:
ud.module = ud.parm["module"]
ud.revision = "" ud.module = ud.parm["module"]
# Create paths to svn checkouts
relpath = ud.path
if relpath.startswith('/'):
# Remove leading slash as os.path.join can't cope
relpath = relpath[1:]
ud.pkgdir = os.path.join(data.expand('${SVNDIR}', d), ud.host, relpath)
ud.moddir = os.path.join(ud.pkgdir, ud.module)
if 'rev' in ud.parm: if 'rev' in ud.parm:
ud.revision = ud.parm['rev']
if ud.revision:
ud.date = "" ud.date = ""
ud.revision = ud.parm['rev']
elif 'date' in ud.date:
ud.date = ud.parm['date']
ud.revision = ""
else:
#
# ***Nasty hacks***
# If DATE in unexpanded PV, use ud.date (which is set from SRCDATE)
# Will warn people to switch to SRCREV here
#
# How can we tell when a user has overriden SRCDATE?
# check for "get_srcdate" in unexpanded SRCREV - ugly
#
pv = data.getVar("PV", d, 0)
if "DATE" in pv:
ud.revision = ""
else:
rev = data.getVar("SRCREV", d, 0)
if "get_srcrev" in rev:
ud.revision = self.latest_revision(url, ud, d)
else:
ud.revision = rev
ud.date = ""
ud.localfile = data.expand('%s_%s_%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.path.replace('/', '.'), ud.revision, ud.date), d) ud.localfile = data.expand('%s_%s_%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.path.replace('/', '.'), ud.revision, ud.date), d)
return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile) return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile)
def forcefetch(self, url, ud, d): def _buildsvncommand(self, ud, d, command):
if (ud.date == "now"): """
return True Build up an svn commandline based on ud
return False command is "fetch", "update", "info"
"""
def go(self, loc, ud, d): basecmd = data.expand('${FETCHCMD_svn}', d)
"""Fetch url"""
# try to use the tarball stash
if not self.forcefetch(loc, ud, d) and Fetch.try_mirror(d, ud.localfile):
bb.msg.debug(1, bb.msg.domain.Fetcher, "%s already exists or was mirrored, skipping svn checkout." % ud.localpath)
return
proto = "svn" proto = "svn"
if "proto" in ud.parm: if "proto" in ud.parm:
@ -84,12 +102,8 @@ class Svn(Fetch):
svnroot = ud.host + ud.path svnroot = ud.host + ud.path
# either use the revision, or SRCDATE in braces, or nothing for SRCDATE = "now" # either use the revision, or SRCDATE in braces,
options = [] options = []
if ud.revision:
options.append("-r %s" % ud.revision)
elif ud.date != "now":
options.append("-r {%s}" % ud.date)
if ud.user: if ud.user:
options.append("--username %s" % ud.user) options.append("--username %s" % ud.user)
@ -97,48 +111,93 @@ class Svn(Fetch):
if ud.pswd: if ud.pswd:
options.append("--password %s" % ud.pswd) options.append("--password %s" % ud.pswd)
localdata = data.createCopy(d) if command is "info":
data.setVar('OVERRIDES', "svn:%s" % data.getVar('OVERRIDES', localdata), localdata) svncmd = "%s info %s %s://%s/%s/" % (basecmd, " ".join(options), proto, svnroot, ud.module)
data.update_data(localdata) else:
if ud.revision:
options.append("-r %s" % ud.revision)
elif ud.date:
options.append("-r {%s}" % ud.date)
data.setVar('SVNROOT', "%s://%s/%s" % (proto, svnroot, ud.module), localdata) if command is "fetch":
data.setVar('SVNCOOPTS', " ".join(options), localdata) svncmd = "%s co %s %s://%s/%s %s" % (basecmd, " ".join(options), proto, svnroot, ud.module, ud.module)
data.setVar('SVNMODULE', ud.module, localdata) elif command is "update":
svncmd = data.getVar('FETCHCOMMAND', localdata, 1) svncmd = "%s update %s" % (basecmd, " ".join(options))
svnupcmd = data.getVar('UPDATECOMMAND', localdata, 1) else:
raise FetchError("Invalid svn command %s" % command)
if svn_rsh: if svn_rsh:
svncmd = "svn_RSH=\"%s\" %s" % (svn_rsh, svncmd) svncmd = "svn_RSH=\"%s\" %s" % (svn_rsh, svncmd)
svnupcmd = "svn_RSH=\"%s\" %s" % (svn_rsh, svnupcmd)
pkg = data.expand('${PN}', d) return svncmd
pkgdir = os.path.join(data.expand('${SVNDIR}', localdata), pkg)
moddir = os.path.join(pkgdir, ud.module)
bb.msg.debug(2, bb.msg.domain.Fetcher, "Fetch: checking for module directory '" + moddir + "'")
if os.access(os.path.join(moddir, '.svn'), os.R_OK): def go(self, loc, ud, d):
"""Fetch url"""
# try to use the tarball stash
if Fetch.try_mirror(d, ud.localfile):
bb.msg.debug(1, bb.msg.domain.Fetcher, "%s already exists or was mirrored, skipping svn checkout." % ud.localpath)
return
bb.msg.debug(2, bb.msg.domain.Fetcher, "Fetch: checking for module directory '" + ud.moddir + "'")
if os.access(os.path.join(ud.moddir, '.svn'), os.R_OK):
svnupdatecmd = self._buildsvncommand(ud, d, "update")
bb.msg.note(1, bb.msg.domain.Fetcher, "Update " + loc) bb.msg.note(1, bb.msg.domain.Fetcher, "Update " + loc)
# update sources there # update sources there
os.chdir(moddir) os.chdir(ud.moddir)
bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % svnupcmd) bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % svnupdatecmd)
myret = os.system(svnupcmd) runfetchcmd(svnupdatecmd, d)
else: else:
svnfetchcmd = self._buildsvncommand(ud, d, "fetch")
bb.msg.note(1, bb.msg.domain.Fetcher, "Fetch " + loc) bb.msg.note(1, bb.msg.domain.Fetcher, "Fetch " + loc)
# check out sources there # check out sources there
bb.mkdirhier(pkgdir) bb.mkdirhier(ud.pkgdir)
os.chdir(pkgdir) os.chdir(ud.pkgdir)
bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % svncmd) bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % svnfetchcmd)
myret = os.system(svncmd) runfetchcmd(svnfetchcmd, d)
if myret != 0: os.chdir(ud.pkgdir)
raise FetchError(ud.module)
os.chdir(pkgdir)
# tar them up to a defined filename # tar them up to a defined filename
myret = os.system("tar -czf %s %s" % (ud.localpath, os.path.basename(ud.module))) try:
if myret != 0: runfetchcmd("tar -czf %s %s" % (ud.localpath, os.path.basename(ud.module)), d)
except:
t, v, tb = sys.exc_info()
try: try:
os.unlink(ud.localpath) os.unlink(ud.localpath)
except OSError: except OSError:
pass pass
raise FetchError(ud.module) raise t, v, tb
def suppports_srcrev(self):
return True
def _revision_key(self, url, ud, d):
"""
Return a unique key for the url
"""
return "svn:" + ud.moddir
def _latest_revision(self, url, ud, d):
"""
Return the latest upstream revision number
"""
bb.msg.debug(2, bb.msg.domain.Fetcher, "SVN fetcher hitting network for %s" % url)
output = runfetchcmd("LANG=C LC_ALL=C " + self._buildsvncommand(ud, d, "info"), d, True)
revision = None
for line in output.splitlines():
if "Last Changed Rev" in line:
revision = line.split(":")[1].strip()
return revision
def _sortable_revision(self, url, ud, d):
"""
Return a sortable revision number which in our case is the revision number
(use the cached version to avoid network access)
"""
return self.latest_revision(url, ud, d)

View File

@ -37,6 +37,7 @@ domain = bb.utils.Enum(
'Depends', 'Depends',
'Fetcher', 'Fetcher',
'Parsing', 'Parsing',
'PersistData',
'Provider', 'Provider',
'RunQueue', 'RunQueue',
'TaskData', 'TaskData',

View File

@ -400,14 +400,14 @@ def set_additional_vars(file, d, include):
from bb import fetch from bb import fetch
try: try:
fetch.init(src_uri.split(), d) ud = fetch.init(src_uri.split(), d)
a += fetch.localpaths(d, ud)
except fetch.NoMethodError: except fetch.NoMethodError:
pass pass
except bb.MalformedUrl,e: except bb.MalformedUrl,e:
raise ParseError("Unable to generate local paths for SRC_URI due to malformed uri: %s" % e) raise ParseError("Unable to generate local paths for SRC_URI due to malformed uri: %s" % e)
a += fetch.localpaths(d)
del fetch del fetch
data.setVar('A', " ".join(a), d) data.setVar('A', " ".join(a), d)

View File

@ -45,14 +45,17 @@ def localpath(fn, d):
if os.path.exists(fn): if os.path.exists(fn):
return fn return fn
if "://" not in fn:
return fn
localfn = None localfn = None
try: try:
localfn = bb.fetch.localpath(fn, d) localfn = bb.fetch.localpath(fn, d, False)
except bb.MalformedUrl: except bb.MalformedUrl:
pass pass
if not localfn: if not localfn:
localfn = fn return fn
return localfn return localfn
def obtain(fn, data): def obtain(fn, data):
@ -67,14 +70,14 @@ def obtain(fn, data):
return localfn return localfn
bb.mkdirhier(dldir) bb.mkdirhier(dldir)
try: try:
bb.fetch.init([fn]) ud = bb.fetch.init([fn], data, False)
except bb.fetch.NoMethodError: except bb.fetch.NoMethodError:
(type, value, traceback) = sys.exc_info() (type, value, traceback) = sys.exc_info()
bb.msg.debug(1, bb.msg.domain.Parsing, "obtain: no method: %s" % value) bb.msg.debug(1, bb.msg.domain.Parsing, "obtain: no method: %s" % value)
return localfn return localfn
try: try:
bb.fetch.go(data) bb.fetch.go(data, ud)
except bb.fetch.MissingParameterError: except bb.fetch.MissingParameterError:
(type, value, traceback) = sys.exc_info() (type, value, traceback) = sys.exc_info()
bb.msg.debug(1, bb.msg.domain.Parsing, "obtain: missing parameters: %s" % value) bb.msg.debug(1, bb.msg.domain.Parsing, "obtain: missing parameters: %s" % value)

View File

@ -0,0 +1,94 @@
# BitBake Persistent Data Store
#
# Copyright (C) 2007 Richard Purdie
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
import bb, os
try:
import sqlite3
except ImportError:
try:
from pysqlite2 import dbapi2 as sqlite3
except ImportError:
bb.msg.fatal(bb.msg.domain.PersistData, "Importing sqlite3 and pysqlite2 failed, please install one of them. A 'python-pysqlite2' like package is likely to be what you need.")
class PersistData:
"""
BitBake Persistent Data Store
Used to store data in a central location such that other threads/tasks can
access them at some future date.
The "domain" is used as a key to isolate each data pool and in this
implementation corresponds to an SQL table. The SQL table consists of a
simple key and value pair.
Why sqlite? It handles all the locking issues for us.
"""
def __init__(self, d):
self.cachedir = bb.data.getVar("CACHE", d, True)
if self.cachedir in [None, '']:
bb.msg.fatal(bb.msg.domain.PersistData, "Please set the 'CACHE' variable.")
try:
os.stat(self.cachedir)
except OSError:
bb.mkdirhier(self.cachedir)
self.cachefile = os.path.join(self.cachedir,"bb_persist_data.sqlite3")
bb.msg.debug(1, bb.msg.domain.PersistData, "Using '%s' as the persistent data cache" % self.cachefile)
self.connection = sqlite3.connect(self.cachefile, timeout=5, isolation_level=None)
def addDomain(self, domain):
"""
Should be called before any domain is used
Creates it if it doesn't exist.
"""
self.connection.execute("CREATE TABLE IF NOT EXISTS %s(key TEXT, value TEXT);" % domain)
def delDomain(self, domain):
"""
Removes a domain and all the data it contains
"""
self.connection.execute("DROP TABLE IF EXISTS %s;" % domain)
def getValue(self, domain, key):
"""
Return the value of a key for a domain
"""
data = self.connection.execute("SELECT * from %s where key=?;" % domain, [key])
for row in data:
return row[1]
def setValue(self, domain, key, value):
"""
Sets the value of a key for a domain
"""
data = self.connection.execute("SELECT * from %s where key=?;" % domain, [key])
rows = 0
for row in data:
rows = rows + 1
if rows:
self.connection.execute("UPDATE %s SET value=? WHERE key=?;" % domain, [value, key])
else:
self.connection.execute("INSERT into %s(key, value) values (?, ?);" % domain, [key, value])
def delValue(self, domain, key):
"""
Deletes a key/value pair
"""
self.connection.execute("DELETE from %s where key=?;" % domain, [key])