diff --git a/bitbake/ChangeLog b/bitbake/ChangeLog
index 93cc45aaf4..135aba9fee 100644
--- a/bitbake/ChangeLog
+++ b/bitbake/ChangeLog
@@ -1,3 +1,21 @@
+Changes in BitBake 1.7.3:
+
+Changes in BitBake 1.7.1:
+ - Major updates of the dependency handling and execution
+ of tasks
+ - Change of the SVN Fetcher to keep the checkout around
+ courtsey to Paul Sokolovsky (#1367)
+
+Changes in Bitbake 1.6.0:
+ - Better msg handling
+ - COW dict implementation from Tim Ansell (mithro) leading
+ to better performance
+ - Speed up of -s
+
+Changes in Bitbake 1.4.4:
+ - SRCDATE now handling courtsey Justin Patrin
+ - #1017 fix to work with rm_work
+
Changes in BitBake 1.4.2:
- Send logs to oe.pastebin.com instead of pastebin.com
fixes #856
diff --git a/bitbake/MANIFEST b/bitbake/MANIFEST
index 144c74c0fe..f088792aa4 100644
--- a/bitbake/MANIFEST
+++ b/bitbake/MANIFEST
@@ -5,29 +5,34 @@ setup.py
bin/bitdoc
bin/bbimage
bin/bitbake
+lib/bb/COW.py
lib/bb/__init__.py
lib/bb/build.py
lib/bb/cache.py
lib/bb/data.py
lib/bb/data_smart.py
lib/bb/event.py
-lib/bb/fetch/bk.py
+lib/bb/manifest.py
+lib/bb/methodpool.py
+lib/bb/msg.py
+lib/bb/providers.py
+lib/bb/runqueue.py
+lib/bb/shell.py
+lib/bb/taskdata.py
+lib/bb/utils.py
lib/bb/fetch/cvs.py
lib/bb/fetch/git.py
lib/bb/fetch/__init__.py
lib/bb/fetch/local.py
+lib/bb/fetch/perforce.py
+lib/bb/fetch/ssh.py
lib/bb/fetch/svk.py
lib/bb/fetch/svn.py
lib/bb/fetch/wget.py
-lib/bb/fetch/ssh.py
-lib/bb/manifest.py
-lib/bb/methodpool.py
lib/bb/parse/__init__.py
lib/bb/parse/parse_py/BBHandler.py
lib/bb/parse/parse_py/ConfHandler.py
lib/bb/parse/parse_py/__init__.py
-lib/bb/shell.py
-lib/bb/utils.py
doc/COPYING.GPL
doc/COPYING.MIT
doc/manual/html.css
diff --git a/bitbake/TODO b/bitbake/TODO
deleted file mode 100644
index 511fae4a25..0000000000
--- a/bitbake/TODO
+++ /dev/null
@@ -1,18 +0,0 @@
-On popular request by popular people a list of tasks to-do:
-
- -Kill insecure usage of os.system either by properly escaping
- the strings or a faster replacement not involving /bin/sh
- -Introduce a -p option to automatically hotshot/profile the
- run
- -Cache dependencies separately and invalidate them when any file
- changed.
- -...
-
-
-DONE:
-· -On generating the inter package deps do not parse each file multiply
-· times.
- -We build the lists while parsing the data now
-· (WAS: Do not generate the world dependency tree, only when someone
-· requests it.
-
diff --git a/bitbake/bin/bbimage b/bitbake/bin/bbimage
index df6caa28ed..9adedbfc63 100755
--- a/bitbake/bin/bbimage
+++ b/bitbake/bin/bbimage
@@ -18,15 +18,16 @@
# Place, Suite 330, Boston, MA 02111-1307 USA.
import sys, os
-sys.path.append(os.path.join(os.path.dirname(os.path.dirname(sys.argv[0])), 'lib'))
+sys.path.insert(0,os.path.join(os.path.dirname(os.path.dirname(sys.argv[0])), 'lib'))
import bb
from bb import *
-__version__ = 1.0
+__version__ = 1.1
type = "jffs2"
cfg_bb = data.init()
cfg_oespawn = data.init()
+bb.msg.set_debug_level(0)
def usage():
print "Usage: bbimage [options ...]"
diff --git a/bitbake/bin/bitbake b/bitbake/bin/bitbake
index 7fbe7ed5eb..85a0cbc398 100755
--- a/bitbake/bin/bitbake
+++ b/bitbake/bin/bitbake
@@ -7,6 +7,7 @@
# Copyright (C) 2003 - 2005 Michael 'Mickey' Lauer
# Copyright (C) 2005 Holger Hans Peter Freyther
# Copyright (C) 2005 ROAD GmbH
+# Copyright (C) 2006 Richard Purdie
#
# This program is free software; you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free Software
@@ -24,136 +25,13 @@
import sys, os, getopt, glob, copy, os.path, re, time
sys.path.insert(0,os.path.join(os.path.dirname(os.path.dirname(sys.argv[0])), 'lib'))
import bb
-from bb import utils, data, parse, debug, event, fatal, cache
+from bb import utils, data, parse, event, cache, providers, taskdata, runqueue
from sets import Set
import itertools, optparse
parsespin = itertools.cycle( r'|/-\\' )
-bbdebug = 0
-
-__version__ = "1.4.3"
-
-#============================================================================#
-# BBParsingStatus
-#============================================================================#
-class BBParsingStatus:
- """
- The initial idea for this status class is to use the data when it is
- already loaded instead of loading it from various place over and over
- again.
- """
-
- def __init__(self):
- self.providers = {}
- self.rproviders = {}
- self.packages = {}
- self.packages_dynamic = {}
- self.bbfile_priority = {}
- self.bbfile_config_priorities = []
- self.ignored_dependencies = None
- self.possible_world = []
- self.world_target = Set()
- self.pkg_pn = {}
- self.pkg_fn = {}
- self.pkg_pvpr = {}
- self.pkg_dp = {}
- self.pn_provides = {}
- self.all_depends = Set()
- self.build_all = {}
- self.rundeps = {}
- self.runrecs = {}
- self.stamp = {}
-
- def handle_bb_data(self, file_name, bb_cache, cached):
- """
- We will fill the dictionaries with the stuff we
- need for building the tree more fast
- """
-
- pn = bb_cache.getVar('PN', file_name, True)
- pv = bb_cache.getVar('PV', file_name, True)
- pr = bb_cache.getVar('PR', file_name, True)
- dp = int(bb_cache.getVar('DEFAULT_PREFERENCE', file_name, True) or "0")
- provides = Set([pn] + (bb_cache.getVar("PROVIDES", file_name, True) or "").split())
- depends = (bb_cache.getVar("DEPENDS", file_name, True) or "").split()
- packages = (bb_cache.getVar('PACKAGES', file_name, True) or "").split()
- packages_dynamic = (bb_cache.getVar('PACKAGES_DYNAMIC', file_name, True) or "").split()
- rprovides = (bb_cache.getVar("RPROVIDES", file_name, True) or "").split()
-
- # build PackageName to FileName lookup table
- if pn not in self.pkg_pn:
- self.pkg_pn[pn] = []
- self.pkg_pn[pn].append(file_name)
-
- self.build_all[file_name] = int(bb_cache.getVar('BUILD_ALL_DEPS', file_name, True) or "0")
- self.stamp[file_name] = bb_cache.getVar('STAMP', file_name, True)
-
- # build FileName to PackageName lookup table
- self.pkg_fn[file_name] = pn
- self.pkg_pvpr[file_name] = (pv,pr)
- self.pkg_dp[file_name] = dp
-
- # Build forward and reverse provider hashes
- # Forward: virtual -> [filenames]
- # Reverse: PN -> [virtuals]
- if pn not in self.pn_provides:
- self.pn_provides[pn] = Set()
- self.pn_provides[pn] |= provides
-
- for provide in provides:
- if provide not in self.providers:
- self.providers[provide] = []
- self.providers[provide].append(file_name)
-
- for dep in depends:
- self.all_depends.add(dep)
-
- # Build reverse hash for PACKAGES, so runtime dependencies
- # can be be resolved (RDEPENDS, RRECOMMENDS etc.)
- for package in packages:
- if not package in self.packages:
- self.packages[package] = []
- self.packages[package].append(file_name)
- rprovides += (bb_cache.getVar("RPROVIDES_%s" % package, file_name, 1) or "").split()
-
- for package in packages_dynamic:
- if not package in self.packages_dynamic:
- self.packages_dynamic[package] = []
- self.packages_dynamic[package].append(file_name)
-
- for rprovide in rprovides:
- if not rprovide in self.rproviders:
- self.rproviders[rprovide] = []
- self.rproviders[rprovide].append(file_name)
-
- # Build hash of runtime depeneds and rececommends
-
- def add_dep(deplist, deps):
- for dep in deps:
- if not dep in deplist:
- deplist[dep] = ""
-
- if not file_name in self.rundeps:
- self.rundeps[file_name] = {}
- if not file_name in self.runrecs:
- self.runrecs[file_name] = {}
-
- for package in packages + [pn]:
- if not package in self.rundeps[file_name]:
- self.rundeps[file_name][package] = {}
- if not package in self.runrecs[file_name]:
- self.runrecs[file_name][package] = {}
-
- add_dep(self.rundeps[file_name][package], bb.utils.explode_deps(bb_cache.getVar('RDEPENDS', file_name, True) or ""))
- add_dep(self.runrecs[file_name][package], bb.utils.explode_deps(bb_cache.getVar('RRECOMMENDS', file_name, True) or ""))
- add_dep(self.rundeps[file_name][package], bb.utils.explode_deps(bb_cache.getVar("RDEPENDS_%s" % package, file_name, True) or ""))
- add_dep(self.runrecs[file_name][package], bb.utils.explode_deps(bb_cache.getVar("RRECOMMENDS_%s" % package, file_name, True) or ""))
-
- # Collect files we may need for possible world-dep
- # calculations
- if not bb_cache.getVar('BROKEN', file_name, True) and not bb_cache.getVar('EXCLUDE_FROM_WORLD', file_name, True):
- self.possible_world.append(file_name)
+__version__ = "1.7.4"
#============================================================================#
# BBStatistics
@@ -198,207 +76,63 @@ class BBCooker:
Manages one bitbake build run
"""
- ParsingStatus = BBParsingStatus # make it visible from the shell
Statistics = BBStatistics # make it visible from the shell
def __init__( self ):
self.build_cache_fail = []
self.build_cache = []
- self.rbuild_cache = []
- self.building_list = []
- self.build_path = []
- self.consider_msgs_cache = []
- self.preferred = {}
self.stats = BBStatistics()
self.status = None
self.cache = None
self.bb_cache = None
- def tryBuildPackage( self, fn, item, the_data ):
- """Build one package"""
+ def tryBuildPackage(self, fn, item, task, the_data, build_depends):
+ """
+ Build one task of a package, optionally build following task depends
+ """
bb.event.fire(bb.event.PkgStarted(item, the_data))
try:
self.stats.attempt += 1
if self.configuration.force:
- bb.data.setVarFlag('do_%s' % self.configuration.cmd, 'force', 1, the_data)
+ bb.data.setVarFlag('do_%s' % task, 'force', 1, the_data)
+ if not build_depends:
+ bb.data.setVarFlag('do_%s' % task, 'dontrundeps', 1, the_data)
if not self.configuration.dry_run:
- bb.build.exec_task('do_%s' % self.configuration.cmd, the_data)
+ bb.build.exec_task('do_%s' % task, the_data)
bb.event.fire(bb.event.PkgSucceeded(item, the_data))
self.build_cache.append(fn)
return True
except bb.build.FuncFailed:
self.stats.fail += 1
- bb.error("task stack execution failed")
+ bb.msg.error(bb.msg.domain.Build, "task stack execution failed")
bb.event.fire(bb.event.PkgFailed(item, the_data))
self.build_cache_fail.append(fn)
raise
except bb.build.EventException, e:
self.stats.fail += 1
event = e.args[1]
- bb.error("%s event exception, aborting" % bb.event.getName(event))
+ bb.msg.error(bb.msg.domain.Build, "%s event exception, aborting" % bb.event.getName(event))
bb.event.fire(bb.event.PkgFailed(item, the_data))
self.build_cache_fail.append(fn)
raise
- def tryBuild( self, fn, virtual , buildAllDeps , build_depends = []):
+ def tryBuild( self, fn, build_depends):
"""
Build a provider and its dependencies.
build_depends is a list of previous build dependencies (not runtime)
If build_depends is empty, we're dealing with a runtime depends
"""
- the_data = self.bb_cache.loadDataFull(fn, self)
-
- # Only follow all (runtime) dependencies if doing a build
- if not buildAllDeps and self.configuration.cmd is "build":
- buildAllDeps = self.status.build_all[fn]
-
- # Error on build time dependency loops
- if build_depends and build_depends.count(fn) > 1:
- bb.error("%s depends on itself (eventually)" % fn)
- bb.error("upwards chain is: %s" % (" -> ".join(self.build_path)))
- return False
-
- # See if this is a runtime dependency we've already built
- # Or a build dependency being handled in a different build chain
- if fn in self.building_list:
- return self.addRunDeps(fn, virtual , buildAllDeps)
+ the_data = self.bb_cache.loadDataFull(fn, self.configuration.data)
item = self.status.pkg_fn[fn]
- self.building_list.append(fn)
+ if bb.build.stamp_is_current('do_%s' % self.configuration.cmd, the_data) and not self.configuration.force:
+ self.build_cache.append(fn)
+ return True
- pathstr = "%s (%s)" % (item, virtual)
- self.build_path.append(pathstr)
-
- depends_list = (bb.data.getVar('DEPENDS', the_data, True) or "").split()
-
- if self.configuration.verbose:
- bb.note("current path: %s" % (" -> ".join(self.build_path)))
- bb.note("dependencies for %s are: %s" % (item, " ".join(depends_list)))
-
- try:
- failed = False
-
- depcmd = self.configuration.cmd
- bbdepcmd = bb.data.getVarFlag('do_%s' % self.configuration.cmd, 'bbdepcmd', the_data)
- if bbdepcmd is not None:
- if bbdepcmd == "":
- depcmd = None
- else:
- depcmd = bbdepcmd
-
- if depcmd:
- oldcmd = self.configuration.cmd
- self.configuration.cmd = depcmd
-
- for dependency in depends_list:
- if dependency in self.status.ignored_dependencies:
- continue
- if not depcmd:
- continue
- if self.buildProvider( dependency , buildAllDeps , build_depends ) == 0:
- bb.error("dependency %s (for %s) not satisfied" % (dependency,item))
- failed = True
- if self.configuration.abort:
- break
-
- if depcmd:
- self.configuration.cmd = oldcmd
-
- if failed:
- self.stats.deps += 1
- return False
-
- if not self.addRunDeps(fn, virtual , buildAllDeps):
- return False
-
- if bb.build.stamp_is_current('do_%s' % self.configuration.cmd, the_data):
- self.build_cache.append(fn)
- return True
-
- return self.tryBuildPackage( fn, item, the_data )
-
- finally:
- self.building_list.remove(fn)
- self.build_path.remove(pathstr)
-
- def findBestProvider( self, pn, pkg_pn = None):
- """
- If there is a PREFERRED_VERSION, find the highest-priority bbfile
- providing that version. If not, find the latest version provided by
- an bbfile in the highest-priority set.
- """
- if not pkg_pn:
- pkg_pn = self.status.pkg_pn
-
- files = pkg_pn[pn]
- priorities = {}
- for f in files:
- priority = self.status.bbfile_priority[f]
- if priority not in priorities:
- priorities[priority] = []
- priorities[priority].append(f)
- p_list = priorities.keys()
- p_list.sort(lambda a, b: a - b)
- tmp_pn = []
- for p in p_list:
- tmp_pn = [priorities[p]] + tmp_pn
-
- preferred_file = None
-
- localdata = data.createCopy(self.configuration.data)
- bb.data.setVar('OVERRIDES', "%s:%s" % (pn, data.getVar('OVERRIDES', localdata)), localdata)
- bb.data.update_data(localdata)
-
- preferred_v = bb.data.getVar('PREFERRED_VERSION_%s' % pn, localdata, True)
- if preferred_v:
- m = re.match('(.*)_(.*)', preferred_v)
- if m:
- preferred_v = m.group(1)
- preferred_r = m.group(2)
- else:
- preferred_r = None
-
- for file_set in tmp_pn:
- for f in file_set:
- pv,pr = self.status.pkg_pvpr[f]
- if preferred_v == pv and (preferred_r == pr or preferred_r == None):
- preferred_file = f
- preferred_ver = (pv, pr)
- break
- if preferred_file:
- break;
- if preferred_r:
- pv_str = '%s-%s' % (preferred_v, preferred_r)
- else:
- pv_str = preferred_v
- if preferred_file is None:
- bb.note("preferred version %s of %s not available" % (pv_str, pn))
- else:
- bb.debug(1, "selecting %s as PREFERRED_VERSION %s of package %s" % (preferred_file, pv_str, pn))
-
- del localdata
-
- # get highest priority file set
- files = tmp_pn[0]
- latest = None
- latest_p = 0
- latest_f = None
- for file_name in files:
- pv,pr = self.status.pkg_pvpr[file_name]
- dp = self.status.pkg_dp[file_name]
-
- if (latest is None) or ((latest_p == dp) and (utils.vercmp(latest, (pv, pr)) < 0)) or (dp > latest_p):
- latest = (pv, pr)
- latest_f = file_name
- latest_p = dp
- if preferred_file is None:
- preferred_file = latest_f
- preferred_ver = latest
-
- return (latest,latest_f,preferred_ver, preferred_file)
+ return self.tryBuildPackage(fn, item, self.configuration.cmd, the_data, build_depends)
def showVersions( self ):
pkg_pn = self.status.pkg_pn
@@ -407,7 +141,7 @@ class BBCooker:
# Sort by priority
for pn in pkg_pn.keys():
- (last_ver,last_file,pref_ver,pref_file) = self.findBestProvider(pn)
+ (last_ver,last_file,pref_ver,pref_file) = bb.providers.findBestProvider(pn, self.configuration.data, self.status)
preferred_versions[pn] = (pref_ver, pref_file)
latest_versions[pn] = (last_ver, last_file)
@@ -425,7 +159,7 @@ class BBCooker:
print "%-30s %20s %20s" % (p, latest[0][0] + "-" + latest[0][1],
prefstr)
-
+
def showEnvironment( self ):
"""Show the outer or per-package environment"""
@@ -433,268 +167,190 @@ class BBCooker:
self.cb = None
self.bb_cache = bb.cache.init(self)
try:
- self.configuration.data = self.bb_cache.loadDataFull(self.configuration.buildfile, self)
+ self.configuration.data = self.bb_cache.loadDataFull(self.configuration.buildfile, self.configuration.data)
except IOError, e:
- fatal("Unable to read %s: %s" % ( self.configuration.buildfile, e ))
+ bb.msg.fatal(bb.msg.domain.Parsing, "Unable to read %s: %s" % ( self.configuration.buildfile, e ))
except Exception, e:
- fatal("%s" % e)
+ bb.msg.fatal(bb.msg.domain.Parsing, "%s" % e)
# emit variables and shell functions
try:
data.update_data( self.configuration.data )
data.emit_env(sys.__stdout__, self.configuration.data, True)
except Exception, e:
- fatal("%s" % e)
+ bb.msg.fatal(bb.msg.domain.Parsing, "%s" % e)
# emit the metadata which isnt valid shell
+ data.expandKeys( self.configuration.data )
for e in self.configuration.data.keys():
if data.getVarFlag( e, 'python', self.configuration.data ):
sys.__stdout__.write("\npython %s () {\n%s}\n" % (e, data.getVar(e, self.configuration.data, 1)))
- def filterProviders(self, providers, item):
+ def generateDotGraph( self, pkgs_to_build, ignore_deps ):
"""
- Take a list of providers and filter/reorder according to the
- environment variables and previous build results
- """
- eligible = []
- preferred_versions = {}
+ Generate two graphs one for the DEPENDS and RDEPENDS. The current
+ implementation creates crappy graphs ;)
- # Collate providers by PN
- pkg_pn = {}
- for p in providers:
- pn = self.status.pkg_fn[p]
- if pn not in pkg_pn:
- pkg_pn[pn] = []
- pkg_pn[pn].append(p)
-
- bb.debug(1, "providers for %s are: %s" % (item, pkg_pn.keys()))
-
- for pn in pkg_pn.keys():
- preferred_versions[pn] = self.findBestProvider(pn, pkg_pn)[2:4]
- eligible.append(preferred_versions[pn][1])
-
- for p in eligible:
- if p in self.build_cache_fail:
- bb.debug(1, "rejecting already-failed %s" % p)
- eligible.remove(p)
-
- if len(eligible) == 0:
- bb.error("no eligible providers for %s" % item)
- return 0
-
- # look to see if one of them is already staged, or marked as preferred.
- # if so, bump it to the head of the queue
- for p in providers:
- pn = self.status.pkg_fn[p]
- pv, pr = self.status.pkg_pvpr[p]
-
- stamp = '%s.do_populate_staging' % self.status.stamp[p]
- if os.path.exists(stamp):
- (newvers, fn) = preferred_versions[pn]
- if not fn in eligible:
- # package was made ineligible by already-failed check
- continue
- oldver = "%s-%s" % (pv, pr)
- newver = '-'.join(newvers)
- if (newver != oldver):
- extra_chat = "%s (%s) already staged but upgrading to %s to satisfy %s" % (pn, oldver, newver, item)
- else:
- extra_chat = "Selecting already-staged %s (%s) to satisfy %s" % (pn, oldver, item)
- if self.configuration.verbose:
- bb.note("%s" % extra_chat)
- eligible.remove(fn)
- eligible = [fn] + eligible
- discriminated = True
- break
-
- return eligible
-
- def buildProvider( self, item , buildAllDeps , build_depends = [] ):
- """
- Build something to provide a named build requirement
- (takes item names from DEPENDS namespace)
+ pkgs_to_build A list of packages that needs to be built
+ ignore_deps A list of names where processing of dependencies
+ should be stopped. e.g. dependencies that get
"""
- fn = None
- discriminated = False
+ def myFilterProvider( providers, item):
+ """
+ Take a list of providers and filter according to environment
+ variables. In contrast to filterProviders we do not discriminate
+ and take PREFERRED_PROVIDER into account.
+ """
+ eligible = []
+ preferred_versions = {}
- if not item in self.status.providers:
- bb.error("Nothing provides dependency %s" % item)
- bb.event.fire(bb.event.NoProvider(item,self.configuration.data))
- return 0
-
- all_p = self.status.providers[item]
-
- for p in all_p:
- if p in self.build_cache:
- bb.debug(1, "already built %s in this run\n" % p)
- return 1
-
- eligible = self.filterProviders(all_p, item)
-
- if not eligible:
- return 0
-
- prefervar = bb.data.getVar('PREFERRED_PROVIDER_%s' % item, self.configuration.data, 1)
- if prefervar:
- self.preferred[item] = prefervar
-
- if item in self.preferred:
- for p in eligible:
+ # Collate providers by PN
+ pkg_pn = {}
+ for p in providers:
pn = self.status.pkg_fn[p]
- if self.preferred[item] == pn:
- if self.configuration.verbose:
- bb.note("selecting %s to satisfy %s due to PREFERRED_PROVIDERS" % (pn, item))
+ if pn not in pkg_pn:
+ pkg_pn[pn] = []
+ pkg_pn[pn].append(p)
+
+ bb.msg.debug(1, bb.msg.domain.Provider, "providers for %s are: %s" % (item, pkg_pn.keys()))
+
+ for pn in pkg_pn.keys():
+ preferred_versions[pn] = bb.providers.findBestProvider(pn, self.configuration.data, self.status, pkg_pn)[2:4]
+ eligible.append(preferred_versions[pn][1])
+
+ for p in eligible:
+ if p in self.build_cache_fail:
+ bb.msg.debug(1, bb.msg.domain.Provider, "rejecting already-failed %s" % p)
eligible.remove(p)
- eligible = [p] + eligible
- discriminated = True
- break
- if len(eligible) > 1 and discriminated == False:
- if item not in self.consider_msgs_cache:
- providers_list = []
- for fn in eligible:
- providers_list.append(self.status.pkg_fn[fn])
- bb.note("multiple providers are available (%s);" % ", ".join(providers_list))
- bb.note("consider defining PREFERRED_PROVIDER_%s" % item)
- bb.event.fire(bb.event.MultipleProviders(item,providers_list,self.configuration.data))
- self.consider_msgs_cache.append(item)
+ if len(eligible) == 0:
+ bb.msg.error(bb.msg.domain.Provider, "no eligible providers for %s" % item)
+ return 0
+
+ prefervar = bb.data.getVar('PREFERRED_PROVIDER_%s' % item, self.configuration.data, 1)
+
+ # try the preferred provider first
+ if prefervar:
+ for p in eligible:
+ if prefervar == self.status.pkg_fn[p]:
+ bb.msg.note(1, bb.msg.domain.Provider, "Selecting PREFERRED_PROVIDER %s" % prefervar)
+ eligible.remove(p)
+ eligible = [p] + eligible
+
+ return eligible
- # run through the list until we find one that we can build
- for fn in eligible:
- bb.debug(2, "selecting %s to satisfy %s" % (fn, item))
- if self.tryBuild(fn, item, buildAllDeps, build_depends + [fn]):
- return 1
+ # try to avoid adding the same rdepends over an over again
+ seen_depends = []
+ seen_rdepends = []
- bb.note("no buildable providers for %s" % item)
- bb.event.fire(bb.event.NoProvider(item,self.configuration.data))
- return 0
- def buildRProvider( self, item , buildAllDeps ):
- """
- Build something to provide a named runtime requirement
- (takes item names from RDEPENDS/PACKAGES namespace)
- """
+ def add_depends(package_list):
+ """
+ Add all depends of all packages from this list
+ """
+ for package in package_list:
+ if package in seen_depends or package in ignore_deps:
+ continue
- fn = None
- all_p = []
- discriminated = False
+ seen_depends.append( package )
+ if not package in self.status.providers:
+ """
+ We have not seen this name -> error in
+ dependency handling
+ """
+ bb.msg.note(1, bb.msg.domain.Depends, "ERROR with provider: %(package)s" % vars() )
+ print >> depends_file, '"%(package)s" -> ERROR' % vars()
+ continue
- if not buildAllDeps:
- return True
+ # get all providers for this package
+ providers = self.status.providers[package]
- all_p = self.getProvidersRun(item)
+ # now let us find the bestProvider for it
+ fn = myFilterProvider(providers, package)[0]
- if not all_p:
- bb.error("Nothing provides runtime dependency %s" % (item))
- bb.event.fire(bb.event.NoProvider(item,self.configuration.data,runtime=True))
- return False
+ depends = bb.utils.explode_deps(self.bb_cache.getVar('DEPENDS', fn, True) or "")
+ version = self.bb_cache.getVar('PV', fn, True ) + '-' + self.bb_cache.getVar('PR', fn, True)
+ add_depends ( depends )
- for p in all_p:
- if p in self.rbuild_cache:
- bb.debug(2, "Already built %s providing runtime %s\n" % (p,item))
- return True
- if p in self.build_cache:
- bb.debug(2, "Already built %s but adding any further RDEPENDS for %s\n" % (p, item))
- return self.addRunDeps(p, item , buildAllDeps)
+ # now create the node
+ print >> depends_file, '"%(package)s" [label="%(package)s\\n%(version)s"]' % vars()
- eligible = self.filterProviders(all_p, item)
- if not eligible:
- return 0
+ depends = filter( (lambda x: x not in ignore_deps), depends )
+ for depend in depends:
+ print >> depends_file, '"%(package)s" -> "%(depend)s"' % vars()
- preferred = []
- for p in eligible:
- pn = self.status.pkg_fn[p]
- provides = self.status.pn_provides[pn]
- for provide in provides:
- prefervar = bb.data.getVar('PREFERRED_PROVIDER_%s' % provide, self.configuration.data, 1)
- if prefervar == pn:
- if self.configuration.verbose:
- bb.note("selecting %s to satisfy runtime %s due to PREFERRED_PROVIDERS" % (pn, item))
- eligible.remove(p)
- eligible = [p] + eligible
- preferred.append(p)
- if len(eligible) > 1 and len(preferred) == 0:
- if item not in self.consider_msgs_cache:
- providers_list = []
- for fn in eligible:
- providers_list.append(self.status.pkg_fn[fn])
- bb.note("multiple providers are available (%s);" % ", ".join(providers_list))
- bb.note("consider defining a PREFERRED_PROVIDER to match runtime %s" % item)
- bb.event.fire(bb.event.MultipleProviders(item,providers_list,self.configuration.data,runtime=True))
- self.consider_msgs_cache.append(item)
+ def add_all_depends( the_depends, the_rdepends ):
+ """
+ Add both DEPENDS and RDEPENDS. RDEPENDS will get dashed
+ lines
+ """
+ package_list = the_depends + the_rdepends
+ for package in package_list:
+ if package in seen_rdepends or package in ignore_deps:
+ continue
- if len(preferred) > 1:
- if item not in self.consider_msgs_cache:
- providers_list = []
- for fn in preferred:
- providers_list.append(self.status.pkg_fn[fn])
- bb.note("multiple preferred providers are available (%s);" % ", ".join(providers_list))
- bb.note("consider defining only one PREFERRED_PROVIDER to match runtime %s" % item)
- bb.event.fire(bb.event.MultipleProviders(item,providers_list,self.configuration.data,runtime=True))
- self.consider_msgs_cache.append(item)
+ seen_rdepends.append( package )
- # run through the list until we find one that we can build
- for fn in eligible:
- bb.debug(2, "selecting %s to satisfy runtime %s" % (fn, item))
- if self.tryBuild(fn, item, buildAllDeps):
- return True
+ # Let us find out if the package is a DEPENDS or RDEPENDS
+ # and we will set 'providers' with the avilable providers
+ # for the package.
+ if package in the_depends:
+ if not package in self.status.providers:
+ bb.msg.note(1, bb.msg.domain.Depends, "ERROR with provider: %(package)s" % vars() )
+ print >> alldepends_file, '"%(package)s" -> ERROR' % vars()
+ continue
- bb.error("No buildable providers for runtime %s" % item)
- bb.event.fire(bb.event.NoProvider(item,self.configuration.data))
- return False
+ providers = self.status.providers[package]
+ elif package in the_rdepends:
+ if len(bb.providers.getRuntimeProviders(self.status, package)) == 0:
+ bb.msg.note(1, bb.msg.domain.Depends, "ERROR with rprovider: %(package)s" % vars() )
+ print >> alldepends_file, '"%(package)s" -> ERROR [style="dashed"]' % vars()
+ continue
- def getProvidersRun(self, rdepend):
- """
- Return any potential providers of runtime rdepend
- """
- rproviders = []
+ providers = bb.providers.getRuntimeProviders(self.status, package)
+ else:
+ # something went wrong...
+ print "Complete ERROR! %s" % package
+ continue
- if rdepend in self.status.rproviders:
- rproviders += self.status.rproviders[rdepend]
+ # now let us find the bestProvider for it
+ fn = myFilterProvider(providers, package)[0]
- if rdepend in self.status.packages:
- rproviders += self.status.packages[rdepend]
+ # Now we have a filename let us get the depends and RDEPENDS of it
+ depends = bb.utils.explode_deps(self.bb_cache.getVar('DEPENDS', fn, True) or "")
+ if fn in self.status.rundeps and package in self.status.rundeps[fn]:
+ rdepends= self.status.rundeps[fn][package].keys()
+ else:
+ rdepends = []
+ version = self.bb_cache.getVar('PV', fn, True ) + '-' + self.bb_cache.getVar('PR', fn, True)
- if rproviders:
- return rproviders
+ # handle all the depends and rdepends of package
+ add_all_depends ( depends, rdepends )
- # Only search dynamic packages if we can't find anything in other variables
- for pattern in self.status.packages_dynamic:
- regexp = re.compile(pattern)
- if regexp.match(rdepend):
- rproviders += self.status.packages_dynamic[pattern]
+ # now create the node using package name
+ print >> alldepends_file, '"%(package)s" [label="%(package)s\\n%(version)s"]' % vars()
- return rproviders
+ # remove the stuff we want to ignore and add the edges
+ depends = filter( (lambda x: x not in ignore_deps), depends )
+ rdepends = filter( (lambda x: x not in ignore_deps), rdepends )
+ for depend in depends:
+ print >> alldepends_file, '"%(package)s" -> "%(depend)s"' % vars()
+ for depend in rdepends:
+ print >> alldepends_file, '"%(package)s" -> "%(depend)s" [style=dashed]' % vars()
- def addRunDeps(self , fn, item , buildAllDeps):
- """
- Add any runtime dependencies of runtime item provided by fn
- as long as item has't previously been processed by this function.
- """
- if item in self.rbuild_cache:
- return True
+ # Add depends now
+ depends_file = file('depends.dot', 'w' )
+ print >> depends_file, "digraph depends {"
+ add_depends( pkgs_to_build )
+ print >> depends_file, "}"
- if not buildAllDeps:
- return True
-
- rdepends = []
- self.rbuild_cache.append(item)
-
- if fn in self.status.rundeps and item in self.status.rundeps[fn]:
- rdepends += self.status.rundeps[fn][item].keys()
- if fn in self.status.runrecs and item in self.status.runrecs[fn]:
- rdepends += self.status.runrecs[fn][item].keys()
-
- bb.debug(2, "Additional runtime dependencies for %s are: %s" % (item, " ".join(rdepends)))
-
- for rdepend in rdepends:
- if rdepend in self.status.ignored_dependencies:
- continue
- if not self.buildRProvider(rdepend, buildAllDeps):
- return False
- return True
+ # Add all depends now
+ alldepends_file = file('alldepends.dot', 'w' )
+ print >> alldepends_file, "digraph alldepends {"
+ add_all_depends( pkgs_to_build, [] )
+ print >> alldepends_file, "}"
def buildDepgraph( self ):
all_depends = self.status.all_depends
@@ -702,6 +358,7 @@ class BBCooker:
localdata = data.createCopy(self.configuration.data)
bb.data.update_data(localdata)
+ bb.data.expandKeys(localdata)
def calc_bbfile_priority(filename):
for (regex, pri) in self.status.bbfile_config_priorities:
@@ -712,9 +369,9 @@ class BBCooker:
# Handle PREFERRED_PROVIDERS
for p in (bb.data.getVar('PREFERRED_PROVIDERS', localdata, 1) or "").split():
(providee, provider) = p.split(':')
- if providee in self.preferred and self.preferred[providee] != provider:
- bb.error("conflicting preferences for %s: both %s and %s specified" % (providee, provider, self.preferred[providee]))
- self.preferred[providee] = provider
+ if providee in self.status.preferred and self.status.preferred[providee] != provider:
+ bb.msg.error(bb.msg.domain.Provider, "conflicting preferences for %s: both %s and %s specified" % (providee, provider, self.status.preferred[providee]))
+ self.status.preferred[providee] = provider
# Calculate priorities for each file
for p in self.status.pkg_fn.keys():
@@ -726,19 +383,19 @@ class BBCooker:
"""
all_depends = self.status.all_depends
pn_provides = self.status.pn_provides
- bb.debug(1, "collating packages for \"world\"")
+ bb.msg.debug(1, bb.msg.domain.Parsing, "collating packages for \"world\"")
for f in self.status.possible_world:
terminal = True
pn = self.status.pkg_fn[f]
for p in pn_provides[pn]:
if p.startswith('virtual/'):
- bb.debug(2, "skipping %s due to %s provider starting with virtual/" % (f, p))
+ bb.msg.debug(2, bb.msg.domain.Parsing, "World build skipping %s due to %s provider starting with virtual/" % (f, p))
terminal = False
break
for pf in self.status.providers[p]:
if self.status.pkg_fn[pf] != pn:
- bb.debug(2, "skipping %s due to both us and %s providing %s" % (f, pf, p))
+ bb.msg.debug(2, bb.msg.domain.Parsing, "World build skipping %s due to both us and %s providing %s" % (f, pf, p))
terminal = False
break
if terminal:
@@ -748,13 +405,8 @@ class BBCooker:
self.status.possible_world = None
self.status.all_depends = None
- def myProgressCallback( self, x, y, f, bb_cache, from_cache ):
- # feed the status with new input
-
- self.status.handle_bb_data(f, bb_cache, from_cache)
-
- if bbdebug > 0:
- return
+ def myProgressCallback( self, x, y, f, from_cache ):
+ """Update any tty with the progress change"""
if os.isatty(sys.stdout.fileno()):
sys.stdout.write("\rNOTE: Handling BitBake files: %s (%04d/%04d) [%2d %%]" % ( parsespin.next(), x, y, x*100/y ) )
sys.stdout.flush()
@@ -771,9 +423,10 @@ class BBCooker:
try:
from bb import shell
except ImportError, details:
- bb.fatal("Sorry, shell not available (%s)" % details )
+ bb.msg.fatal(bb.msg.domain.Parsing, "Sorry, shell not available (%s)" % details )
else:
bb.data.update_data( self.configuration.data )
+ bb.data.expandKeys(localdata)
shell.start( self )
sys.exit( 0 )
@@ -796,9 +449,9 @@ class BBCooker:
bb.event.register(var,bb.data.getVar(var, data))
except IOError:
- bb.fatal( "Unable to open %s" % afile )
+ bb.msg.fatal(bb.msg.domain.Parsing, "Unable to open %s" % afile )
except bb.parse.ParseError, details:
- bb.fatal( "Unable to parse %s (%s)" % (afile, details) )
+ bb.msg.fatal(bb.msg.domain.Parsing, "Unable to parse %s (%s)" % (afile, details) )
def handleCollections( self, collections ):
"""Handle collections"""
@@ -807,22 +460,22 @@ class BBCooker:
for c in collection_list:
regex = bb.data.getVar("BBFILE_PATTERN_%s" % c, self.configuration.data, 1)
if regex == None:
- bb.error("BBFILE_PATTERN_%s not defined" % c)
+ bb.msg.error(bb.msg.domain.Parsing, "BBFILE_PATTERN_%s not defined" % c)
continue
priority = bb.data.getVar("BBFILE_PRIORITY_%s" % c, self.configuration.data, 1)
if priority == None:
- bb.error("BBFILE_PRIORITY_%s not defined" % c)
+ bb.msg.error(bb.msg.domain.Parsing, "BBFILE_PRIORITY_%s not defined" % c)
continue
try:
cre = re.compile(regex)
except re.error:
- bb.error("BBFILE_PATTERN_%s \"%s\" is not a valid regular expression" % (c, regex))
+ bb.msg.error(bb.msg.domain.Parsing, "BBFILE_PATTERN_%s \"%s\" is not a valid regular expression" % (c, regex))
continue
try:
pri = int(priority)
self.status.bbfile_config_priorities.append((cre, pri))
except ValueError:
- bb.error("invalid value for BBFILE_PRIORITY_%s: \"%s\"" % (c, priority))
+ bb.msg.error(bb.msg.domain.Parsing, "invalid value for BBFILE_PRIORITY_%s: \"%s\"" % (c, priority))
def cook( self, configuration, args ):
@@ -834,11 +487,16 @@ class BBCooker:
self.configuration = configuration
- if not self.configuration.cmd:
- self.configuration.cmd = "build"
+ if self.configuration.verbose:
+ bb.msg.set_verbose(True)
if self.configuration.debug:
- bb.debug_level = self.configuration.debug
+ bb.msg.set_debug_level(self.configuration.debug)
+ else:
+ bb.msg.set_debug_level(0)
+
+ if self.configuration.debug_domains:
+ bb.msg.set_debug_domains(self.configuration.debug_domains)
self.configuration.data = bb.data.init()
@@ -847,6 +505,12 @@ class BBCooker:
self.parseConfigurationFile( os.path.join( "conf", "bitbake.conf" ) )
+ if not self.configuration.cmd:
+ self.configuration.cmd = bb.data.getVar("BB_DEFAULT_TASK", self.configuration.data)
+
+ # For backwards compatibility - REMOVE ME
+ if not self.configuration.cmd:
+ self.configuration.cmd = "build"
#
# Special updated configuration we use for firing events
@@ -871,20 +535,34 @@ class BBCooker:
if self.configuration.buildfile is not None:
bf = os.path.abspath( self.configuration.buildfile )
try:
- bbfile_data = bb.parse.handle(bf, self.configuration.data)
- except IOError:
- bb.fatal("Unable to open %s" % bf)
+ os.stat(bf)
+ except OSError:
+ (filelist, masked) = self.collect_bbfiles()
+ regexp = re.compile(self.configuration.buildfile)
+ matches = []
+ for f in filelist:
+ if regexp.search(f) and os.path.isfile(f):
+ bf = f
+ matches.append(f)
+ if len(matches) != 1:
+ bb.msg.error(bb.msg.domain.Parsing, "Unable to match %s (%s matches found):" % (self.configuration.buildfile, len(matches)))
+ for f in matches:
+ bb.msg.error(bb.msg.domain.Parsing, " %s" % f)
+ sys.exit(1)
+ bf = matches[0]
+
+ bbfile_data = bb.parse.handle(bf, self.configuration.data)
item = bb.data.getVar('PN', bbfile_data, 1)
try:
- self.tryBuildPackage( bf, item, bbfile_data )
+ self.tryBuildPackage(bf, item, self.configuration.cmd, bbfile_data, True)
except bb.build.EventException:
- bb.error( "Build of '%s' failed" % item )
+ bb.msg.error(bb.msg.domain.Build, "Build of '%s' failed" % item )
sys.exit( self.stats.show() )
# initialise the parsing status now we know we will need deps
- self.status = BBParsingStatus()
+ self.status = bb.cache.CacheData()
ignore = bb.data.getVar("ASSUME_PROVIDED", self.configuration.data, 1) or ""
self.status.ignored_dependencies = Set( ignore.split() )
@@ -912,23 +590,23 @@ class BBCooker:
try:
import psyco
except ImportError:
- if bbdebug == 0:
- bb.note("Psyco JIT Compiler (http://psyco.sf.net) not available. Install it to increase performance.")
+ bb.msg.note(1, bb.msg.domain.Collection, "Psyco JIT Compiler (http://psyco.sf.net) not available. Install it to increase performance.")
else:
- psyco.bind( self.collect_bbfiles )
+ psyco.bind( self.parse_bbfiles )
else:
- bb.note("You have disabled Psyco. This decreases performance.")
+ bb.msg.note(1, bb.msg.domain.Collection, "You have disabled Psyco. This decreases performance.")
try:
- bb.debug(1, "collecting .bb files")
- self.collect_bbfiles( self.myProgressCallback )
- bb.debug(1, "parsing complete")
- if bbdebug == 0:
- print
+ bb.msg.debug(1, bb.msg.domain.Collection, "collecting .bb files")
+ (filelist, masked) = self.collect_bbfiles()
+ self.parse_bbfiles(filelist, masked, self.myProgressCallback)
+ bb.msg.debug(1, bb.msg.domain.Collection, "parsing complete")
+ print
if self.configuration.parse_only:
- print "Requested parsing .bb files only. Exiting."
+ bb.msg.note(1, bb.msg.domain.Collection, "Requested parsing .bb files only. Exiting.")
return
+
self.buildDepgraph()
if self.configuration.show_versions:
@@ -940,30 +618,41 @@ class BBCooker:
for t in self.status.world_target:
pkgs_to_build.append(t)
+ if self.configuration.dot_graph:
+ self.generateDotGraph( pkgs_to_build, self.configuration.ignored_dot_deps )
+ sys.exit( 0 )
+
bb.event.fire(bb.event.BuildStarted(buildname, pkgs_to_build, self.configuration.event_data))
- failures = 0
- for k in pkgs_to_build:
- failed = False
- try:
- if self.buildProvider( k , False ) == 0:
- # already diagnosed
- failed = True
- except bb.build.EventException:
- bb.error("Build of " + k + " failed")
- failed = True
+ localdata = data.createCopy(self.configuration.data)
+ bb.data.update_data(localdata)
+ bb.data.expandKeys(localdata)
- if failed:
- failures += failures
- if self.configuration.abort:
- sys.exit(1)
+ taskdata = bb.taskdata.TaskData(self.configuration.abort)
+
+ runlist = []
+ try:
+ for k in pkgs_to_build:
+ taskdata.add_provider(localdata, self.status, k)
+ runlist.append([k, "do_%s" % self.configuration.cmd])
+ taskdata.add_unresolved(localdata, self.status)
+ except bb.providers.NoProvider:
+ sys.exit(1)
+
+ rq = bb.runqueue.RunQueue()
+ rq.prepare_runqueue(self.configuration.data, self.status, taskdata, runlist)
+ try:
+ failures = rq.execute_runqueue(self, self.configuration.data, self.status, taskdata, runlist)
+ except runqueue.TaskFailure, (fnid, fn, taskname):
+ bb.msg.error(bb.msg.domain.Build, "'%s, %s' failed" % (fn, taskname))
+ sys.exit(1)
bb.event.fire(bb.event.BuildCompleted(buildname, pkgs_to_build, self.configuration.event_data, failures))
sys.exit( self.stats.show() )
except KeyboardInterrupt:
- print "\nNOTE: KeyboardInterrupt - Build not completed."
+ bb.msg.note(1, bb.msg.domain.Collection, "KeyboardInterrupt - Build not completed.")
sys.exit(1)
def get_bbfiles( self, path = os.getcwd() ):
@@ -985,9 +674,8 @@ class BBCooker:
return []
return finddata.readlines()
- def collect_bbfiles( self, progressCallback ):
+ def collect_bbfiles( self ):
"""Collect all available .bb build files"""
- self.cb = progressCallback
parsed, cached, skipped, masked = 0, 0, 0, 0
self.bb_cache = bb.cache.init(self)
@@ -998,7 +686,7 @@ class BBCooker:
files = self.get_bbfiles()
if not len(files):
- bb.error("no files to build.")
+ bb.msg.error(bb.msg.domain.Collection, "no files to build.")
newfiles = []
for f in files:
@@ -1009,62 +697,80 @@ class BBCooker:
continue
newfiles += glob.glob(f) or [ f ]
- bbmask = bb.data.getVar('BBMASK', self.configuration.data, 1) or ""
+ bbmask = bb.data.getVar('BBMASK', self.configuration.data, 1)
+
+ if not bbmask:
+ return (newfiles, 0)
+
try:
bbmask_compiled = re.compile(bbmask)
except sre_constants.error:
- bb.fatal("BBMASK is not a valid regular expression.")
+ bb.msg.fatal(bb.msg.domain.Collection, "BBMASK is not a valid regular expression.")
+ finalfiles = []
for i in xrange( len( newfiles ) ):
f = newfiles[i]
if bbmask and bbmask_compiled.search(f):
- bb.debug(1, "bbmake: skipping %s" % f)
+ bb.msg.debug(1, bb.msg.domain.Collection, "skipping masked file %s" % f)
masked += 1
continue
- debug(1, "bbmake: parsing %s" % f)
+ finalfiles.append(f)
+
+ return (finalfiles, masked)
+
+ def parse_bbfiles(self, filelist, masked, progressCallback = None):
+ parsed, cached, skipped = 0, 0, 0
+ for i in xrange( len( filelist ) ):
+ f = filelist[i]
+
+ bb.msg.debug(1, bb.msg.domain.Collection, "parsing %s" % f)
# read a file's metadata
try:
- fromCache, skip = self.bb_cache.loadData(f, self)
+ fromCache, skip = self.bb_cache.loadData(f, self.configuration.data)
if skip:
skipped += 1
- #bb.note("Skipping %s" % f)
+ bb.msg.debug(2, bb.msg.domain.Collection, "skipping %s" % f)
self.bb_cache.skip(f)
continue
elif fromCache: cached += 1
else: parsed += 1
deps = None
+ # Disabled by RP as was no longer functional
# allow metadata files to add items to BBFILES
#data.update_data(self.pkgdata[f])
- addbbfiles = self.bb_cache.getVar('BBFILES', f, False) or None
- if addbbfiles:
- for aof in addbbfiles.split():
- if not files.count(aof):
- if not os.path.isabs(aof):
- aof = os.path.join(os.path.dirname(f),aof)
- files.append(aof)
+ #addbbfiles = self.bb_cache.getVar('BBFILES', f, False) or None
+ #if addbbfiles:
+ # for aof in addbbfiles.split():
+ # if not files.count(aof):
+ # if not os.path.isabs(aof):
+ # aof = os.path.join(os.path.dirname(f),aof)
+ # files.append(aof)
+
+ self.bb_cache.handle_data(f, self.status)
# now inform the caller
- if self.cb is not None:
- self.cb( i + 1, len( newfiles ), f, self.bb_cache, fromCache )
+ if progressCallback is not None:
+ progressCallback( i + 1, len( filelist ), f, fromCache )
except IOError, e:
self.bb_cache.remove(f)
- bb.error("opening %s: %s" % (f, e))
+ bb.msg.error(bb.msg.domain.Collection, "opening %s: %s" % (f, e))
pass
except KeyboardInterrupt:
self.bb_cache.sync()
raise
except Exception, e:
self.bb_cache.remove(f)
- bb.error("%s while parsing %s" % (e, f))
+ bb.msg.error(bb.msg.domain.Collection, "%s while parsing %s" % (e, f))
except:
self.bb_cache.remove(f)
raise
- if self.cb is not None:
- print "\rNOTE: Parsing finished. %d cached, %d parsed, %d skipped, %d masked." % ( cached, parsed, skipped, masked ),
+ if progressCallback is not None:
+ print "\r" # need newline after Handling Bitbake files message
+ bb.msg.note(1, bb.msg.domain.Collection, "Parsing finished. %d cached, %d parsed, %d skipped, %d masked." % ( cached, parsed, skipped, masked ))
self.bb_cache.sync()
@@ -1090,11 +796,11 @@ Default BBFILES are the .bb files in the current directory.""" )
parser.add_option( "-f", "--force", help = "force run of specified cmd, regardless of stamp status",
action = "store_true", dest = "force", default = False )
- parser.add_option( "-i", "--interactive", help = "drop into the interactive mode.",
+ parser.add_option( "-i", "--interactive", help = "drop into the interactive mode also called the BitBake shell.",
action = "store_true", dest = "interactive", default = False )
- parser.add_option( "-c", "--cmd", help = "Specify task to execute. Note that this only executes the specified task for the providee and the packages it depends on, i.e. 'compile' does not implicitly call stage for the dependencies (IOW: use only if you know what you are doing)",
- action = "store", dest = "cmd", default = "build" )
+ parser.add_option( "-c", "--cmd", help = "Specify task to execute. Note that this only executes the specified task for the providee and the packages it depends on, i.e. 'compile' does not implicitly call stage for the dependencies (IOW: use only if you know what you are doing). Depending on the base.bbclass a listtaks tasks is defined and will show available tasks",
+ action = "store", dest = "cmd" )
parser.add_option( "-r", "--read", help = "read the specified file before bitbake.conf",
action = "append", dest = "file", default = [] )
@@ -1102,7 +808,7 @@ Default BBFILES are the .bb files in the current directory.""" )
parser.add_option( "-v", "--verbose", help = "output more chit-chat to the terminal",
action = "store_true", dest = "verbose", default = False )
- parser.add_option( "-D", "--debug", help = "Increase the debug level",
+ parser.add_option( "-D", "--debug", help = "Increase the debug level. You can specify this more than once.",
action = "count", dest="debug", default = 0)
parser.add_option( "-n", "--dry-run", help = "don't execute, just go through the motions",
@@ -1120,6 +826,16 @@ Default BBFILES are the .bb files in the current directory.""" )
parser.add_option( "-e", "--environment", help = "show the global or per-package environment (this is what used to be bbread)",
action = "store_true", dest = "show_environment", default = False )
+ parser.add_option( "-g", "--graphviz", help = "emit the dependency trees of the specified packages in the dot syntax",
+ action = "store_true", dest = "dot_graph", default = False )
+
+ parser.add_option( "-I", "--ignore-deps", help = """Stop processing at the given list of dependencies when generating dependency graphs. This can help to make the graph more appealing""",
+ action = "append", dest = "ignored_dot_deps", default = [] )
+
+ parser.add_option( "-l", "--log-domains", help = """Show debug logging for the specified logging domains""",
+ action = "append", dest = "debug_domains", default = [] )
+
+
options, args = parser.parse_args( sys.argv )
cooker = BBCooker()
@@ -1129,3 +845,9 @@ Default BBFILES are the .bb files in the current directory.""" )
if __name__ == "__main__":
main()
+ sys.exit(0)
+ import profile
+ profile.run('main()', "profile.log")
+ import pstats
+ p = pstats.Stats('profile.log')
+ p.print_stats()
diff --git a/bitbake/bin/bitdoc b/bitbake/bin/bitdoc
index 84d2ee23ce..e865e1b998 100755
--- a/bitbake/bin/bitdoc
+++ b/bitbake/bin/bitdoc
@@ -442,7 +442,7 @@ Create a set of html pages (documentation) for a bitbake.conf....
options, args = parser.parse_args( sys.argv )
if options.debug:
- bb.debug_level = options.debug
+ bb.msg.set_debug_level(options.debug)
return options.config, options.output
diff --git a/bitbake/classes/base.bbclass b/bitbake/classes/base.bbclass
index 1d75964f57..cfb82a41cb 100644
--- a/bitbake/classes/base.bbclass
+++ b/bitbake/classes/base.bbclass
@@ -41,7 +41,7 @@ bbdebug() {
exit 1
}
- test ${@bb.debug_level} -ge $1 && {
+ test ${@bb.msg.debug_level} -ge $1 && {
shift
echo "DEBUG:" $*
}
diff --git a/bitbake/conf/bitbake.conf b/bitbake/conf/bitbake.conf
index d288fee78f..19a3fe8ef8 100644
--- a/bitbake/conf/bitbake.conf
+++ b/bitbake/conf/bitbake.conf
@@ -26,11 +26,12 @@ DEPLOY_DIR_IMAGE = "${DEPLOY_DIR}/images"
DL_DIR = "${TMPDIR}/downloads"
FETCHCOMMAND = ""
FETCHCOMMAND_cvs = "/usr/bin/env cvs -d${CVSROOT} co ${CVSCOOPTS} ${CVSMODULE}"
-FETCHCOMMAND_svn = "/usr/bin/env svn co http://${SVNROOT} ${SVNCOOPTS} ${SVNMODULE}"
+FETCHCOMMAND_svn = "/usr/bin/env svn co ${SVNCOOPTS} ${SVNROOT} ${SVNMODULE}"
FETCHCOMMAND_wget = "/usr/bin/env wget -t 5 --passive-ftp -P ${DL_DIR} ${URI}"
FILESDIR = "${@bb.which(bb.data.getVar('FILESPATH', d, 1), '.')}"
FILESPATH = "${FILE_DIRNAME}/${PF}:${FILE_DIRNAME}/${P}:${FILE_DIRNAME}/${PN}:${FILE_DIRNAME}/files:${FILE_DIRNAME}"
FILE_DIRNAME = "${@os.path.dirname(bb.data.getVar('FILE', d))}"
+GITDIR = "${DL_DIR}/git"
IMAGE_CMD = "_NO_DEFINED_IMAGE_TYPES_"
IMAGE_ROOTFS = "${TMPDIR}/rootfs"
MKTEMPCMD = "mktemp -q ${TMPBASE}"
@@ -47,9 +48,11 @@ RESUMECOMMAND_wget = "/usr/bin/env wget -c -t 5 --passive-ftp -P ${DL_DIR} ${URI
S = "${WORKDIR}/${P}"
SRC_URI = "file://${FILE}"
STAMP = "${TMPDIR}/stamps/${PF}"
+SVNDIR = "${DL_DIR}/svn"
T = "${WORKDIR}/temp"
TARGET_ARCH = "${BUILD_ARCH}"
TMPDIR = "${TOPDIR}/tmp"
UPDATECOMMAND = ""
UPDATECOMMAND_cvs = "/usr/bin/env cvs -d${CVSROOT} update ${CVSCOOPTS}"
+UPDATECOMMAND_svn = "/usr/bin/env svn update ${SVNCOOPTS}"
WORKDIR = "${TMPDIR}/work/${PF}"
diff --git a/bitbake/contrib/vim/ftdetect/bitbake.vim b/bitbake/contrib/vim/ftdetect/bitbake.vim
new file mode 100644
index 0000000000..3882a9a08d
--- /dev/null
+++ b/bitbake/contrib/vim/ftdetect/bitbake.vim
@@ -0,0 +1,4 @@
+au BufNewFile,BufRead *.bb setfiletype bitbake
+au BufNewFile,BufRead *.bbclass setfiletype bitbake
+au BufNewFile,BufRead *.inc setfiletype bitbake
+" au BufNewFile,BufRead *.conf setfiletype bitbake
diff --git a/bitbake/contrib/vim/syntax/bitbake.vim b/bitbake/contrib/vim/syntax/bitbake.vim
index 5d2bc633e1..43a1990b0b 100644
--- a/bitbake/contrib/vim/syntax/bitbake.vim
+++ b/bitbake/contrib/vim/syntax/bitbake.vim
@@ -42,11 +42,11 @@ syn region bbString matchgroup=bbQuote start=/'/ skip=/\\$/ excludenl end=/'/ c
syn keyword bbExportFlag export contained nextgroup=bbIdentifier skipwhite
syn match bbVarDeref "${[a-zA-Z0-9\-_\.]\+}" contained
-syn match bbVarDef "^\(export\s*\)\?\([a-zA-Z0-9\-_\.]\+\(_[${}a-zA-Z0-9\-_\.]\+\)\?\)\s*\(\(:=\)\|\(+=\)\|\(=+\)\|\(?=\)\|=\)\@=" contains=bbExportFlag,bbIdentifier,bbVarDeref nextgroup=bbVarEq
+syn match bbVarDef "^\(export\s*\)\?\([a-zA-Z0-9\-_\.]\+\(_[${}a-zA-Z0-9\-_\.]\+\)\?\)\s*\(:=\|+=\|=+\|\.=\|=\.\|?=\|=\)\@=" contains=bbExportFlag,bbIdentifier,bbVarDeref nextgroup=bbVarEq
syn match bbIdentifier "[a-zA-Z0-9\-_\.]\+" display contained
"syn keyword bbVarEq = display contained nextgroup=bbVarValue
-syn match bbVarEq "\(:=\)\|\(+=\)\|\(=+\)\|\(?=\)\|=" contained nextgroup=bbVarValue
+syn match bbVarEq "\(:=\|+=\|=+\|\.=\|=\.\|?=\|=\)" contained nextgroup=bbVarValue
syn match bbVarValue ".*$" contained contains=bbString,bbVarDeref
@@ -90,8 +90,8 @@ syn region bbDefRegion start='^def\s\+\w\+\s*([^)]*)\s*:\s*$' end='^\(\s\|$\)\@
" BitBake statements
-syn keyword bbStatement include inherit addtask addhandler EXPORT_FUNCTIONS display contained
-syn match bbStatementLine "^\(include\|inherit\|addtask\|addhandler\|EXPORT_FUNCTIONS\)\s\+" contains=bbStatement nextgroup=bbStatementRest
+syn keyword bbStatement include inherit require addtask addhandler EXPORT_FUNCTIONS display contained
+syn match bbStatementLine "^\(include\|inherit\|require\|addtask\|addhandler\|EXPORT_FUNCTIONS\)\s\+" contains=bbStatement nextgroup=bbStatementRest
syn match bbStatementRest ".*$" contained contains=bbString,bbVarDeref
" Highlight
diff --git a/bitbake/doc/manual/usermanual.xml b/bitbake/doc/manual/usermanual.xml
index c314236c6f..7eb12035ad 100644
--- a/bitbake/doc/manual/usermanual.xml
+++ b/bitbake/doc/manual/usermanual.xml
@@ -17,7 +17,7 @@
Phil Blundell
- This work is licensed under the Creative Commons Attribution License. To view a copy of this license, visit http://creativecommons.org/licenses/by/2.0/ or send a letter to Creative Commons, 559 Nathan Abbott Way, Stanford, California 94305, USA.
+ This work is licensed under the Creative Commons Attribution License. To view a copy of this license, visit http://creativecommons.org/licenses/by/2.5/ or send a letter to Creative Commons, 559 Nathan Abbott Way, Stanford, California 94305, USA.
@@ -195,7 +195,7 @@ addtask printdate before do_build
Events
NOTE: This is only supported in .bb and .bbclass files.
- BitBake also implements a means of registering event handlers. Events are triggered at certain points during operation, such as, the beginning of operation against a given .bb, the start of a given task, task failure, task success, et cetera. The intent was to make it easy to do things like email notifications on build failure.
+ BitBake allows to install event handlers. Events are triggered at certain points during operation, such as, the beginning of operation against a given .bb, the start of a given task, task failure, task success, et cetera. The intent was to make it easy to do things like email notifications on build failure.
addhandler myclass_eventhandler
python myclass_eventhandler() {
from bb.event import NotHandled, getName
@@ -205,6 +205,7 @@ python myclass_eventhandler() {
print "The file we run for is %s" % data.getVar('FILE', e.data, True)
return NotHandled
+}
This event handler gets called every time an event is triggered. A global variable e is defined. e.data contains an instance of bb.data. With the getName(e)
method one can get the name of the triggered event.The above event handler prints the name
@@ -344,15 +345,19 @@ options:
cannot be remade, the other dependencies of these
targets can be processed all the same.
-f, --force force run of specified cmd, regardless of stamp status
- -i, --interactive drop into the interactive mode.
+ -i, --interactive drop into the interactive mode also called the BitBake
+ shell.
-c CMD, --cmd=CMD Specify task to execute. Note that this only executes
the specified task for the providee and the packages
it depends on, i.e. 'compile' does not implicitly call
stage for the dependencies (IOW: use only if you know
- what you are doing)
+ what you are doing). Depending on the base.bbclass a
+ listtaks tasks is defined and will show available
+ tasks
-r FILE, --read=FILE read the specified file before bitbake.conf
-v, --verbose output more chit-chat to the terminal
- -D, --debug Increase the debug level
+ -D, --debug Increase the debug level. You can specify this more
+ than once.
-n, --dry-run don't execute, just go through the motions
-p, --parse-only quit after parsing the BB files (developers only)
-d, --disable-psyco disable using the psyco just-in-time compiler (not
@@ -360,6 +365,12 @@ options:
-s, --show-versions show current and preferred versions of all packages
-e, --environment show the global or per-package environment (this is
what used to be bbread)
+ -g, --graphviz emit the dependency trees of the specified packages in
+ the dot syntax
+ -I IGNORED_DOT_DEPS, --ignore-deps=IGNORED_DOT_DEPS
+ Stop processing at the given list of dependencies when
+ generating dependency graphs. This can help to make
+ the graph more appealing
@@ -386,6 +397,14 @@ options:
$ bitbake virtual/whatever
$ bitbake -c clean virtual/whatever
+
+ Generating dependency graphs
+ BitBake is able to generate dependency graphs using the dot syntax. These graphs can be converted
+to images using the dot application from graphviz.
+Three files will be written into the current working directory, depends.dot containing DEPENDS variables, rdepends.dot and alldepends.dot containing both DEPENDS and RDEPENDS. To stop depending on common depends one can use the -I depend to omit these from the graph. This can lead to more readable graphs. E.g. this way DEPENDS from inherited classes, e.g. base.bbclass, can be removed from the graph.
+ $ bitbake -g blah
+ $ bitbake -g -I virtual/whatever -I bloom blah
+
diff --git a/bitbake/lib/bb/COW.py b/bitbake/lib/bb/COW.py
new file mode 100644
index 0000000000..826d435f98
--- /dev/null
+++ b/bitbake/lib/bb/COW.py
@@ -0,0 +1,305 @@
+# ex:ts=4:sw=4:sts=4:et
+# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
+"""
+This is a copy on write dictionary and set which abuses classes to try and be nice and fast.
+
+Please Note:
+ Be careful when using mutable types (ie Dict and Lists) - operations involving these are SLOW.
+ Assign a file to __warn__ to get warnings about slow operations.
+"""
+
+from inspect import getmro
+
+import copy
+import types, sets
+types.ImmutableTypes = tuple([ \
+ types.BooleanType, \
+ types.ComplexType, \
+ types.FloatType, \
+ types.IntType, \
+ types.LongType, \
+ types.NoneType, \
+ types.TupleType, \
+ sets.ImmutableSet] + \
+ list(types.StringTypes))
+
+MUTABLE = "__mutable__"
+
+class COWMeta(type):
+ pass
+
+class COWDictMeta(COWMeta):
+ __warn__ = False
+ __hasmutable__ = False
+ __marker__ = tuple()
+
+ def __str__(cls):
+ # FIXME: I have magic numbers!
+ return "" % (cls.__count__, len(cls.__dict__) - 3)
+ __repr__ = __str__
+
+ def cow(cls):
+ class C(cls):
+ __count__ = cls.__count__ + 1
+ return C
+ copy = cow
+ __call__ = cow
+
+ def __setitem__(cls, key, value):
+ if not isinstance(value, types.ImmutableTypes):
+ if not isinstance(value, COWMeta):
+ cls.__hasmutable__ = True
+ key += MUTABLE
+ setattr(cls, key, value)
+
+ def __getmutable__(cls, key, readonly=False):
+ nkey = key + MUTABLE
+ try:
+ return cls.__dict__[nkey]
+ except KeyError:
+ pass
+
+ value = getattr(cls, nkey)
+ if readonly:
+ return value
+
+ if not cls.__warn__ is False and not isinstance(value, COWMeta):
+ print >> cls.__warn__, "Warning: Doing a copy because %s is a mutable type." % key
+ try:
+ value = value.copy()
+ except AttributeError, e:
+ value = copy.copy(value)
+ setattr(cls, nkey, value)
+ return value
+
+ __getmarker__ = []
+ def __getreadonly__(cls, key, default=__getmarker__):
+ """\
+ Get a value (even if mutable) which you promise not to change.
+ """
+ return cls.__getitem__(key, default, True)
+
+ def __getitem__(cls, key, default=__getmarker__, readonly=False):
+ try:
+ try:
+ value = getattr(cls, key)
+ except AttributeError:
+ value = cls.__getmutable__(key, readonly)
+
+ # This is for values which have been deleted
+ if value is cls.__marker__:
+ raise AttributeError("key %s does not exist." % key)
+
+ return value
+ except AttributeError, e:
+ if not default is cls.__getmarker__:
+ return default
+
+ raise KeyError(str(e))
+
+ def __delitem__(cls, key):
+ cls.__setitem__(key, cls.__marker__)
+
+ def __revertitem__(cls, key):
+ if not cls.__dict__.has_key(key):
+ key += MUTABLE
+ delattr(cls, key)
+
+ def has_key(cls, key):
+ value = cls.__getreadonly__(key, cls.__marker__)
+ if value is cls.__marker__:
+ return False
+ return True
+
+ def iter(cls, type, readonly=False):
+ for key in dir(cls):
+ if key.startswith("__"):
+ continue
+
+ if key.endswith(MUTABLE):
+ key = key[:-len(MUTABLE)]
+
+ if type == "keys":
+ yield key
+
+ try:
+ if readonly:
+ value = cls.__getreadonly__(key)
+ else:
+ value = cls[key]
+ except KeyError:
+ continue
+
+ if type == "values":
+ yield value
+ if type == "items":
+ yield (key, value)
+ raise StopIteration()
+
+ def iterkeys(cls):
+ return cls.iter("keys")
+ def itervalues(cls, readonly=False):
+ if not cls.__warn__ is False and cls.__hasmutable__ and readonly is False:
+ print >> cls.__warn__, "Warning: If you arn't going to change any of the values call with True."
+ return cls.iter("values", readonly)
+ def iteritems(cls, readonly=False):
+ if not cls.__warn__ is False and cls.__hasmutable__ and readonly is False:
+ print >> cls.__warn__, "Warning: If you arn't going to change any of the values call with True."
+ return cls.iter("items", readonly)
+
+class COWSetMeta(COWDictMeta):
+ def __str__(cls):
+ # FIXME: I have magic numbers!
+ return "" % (cls.__count__, len(cls.__dict__) -3)
+ __repr__ = __str__
+
+ def cow(cls):
+ class C(cls):
+ __count__ = cls.__count__ + 1
+ return C
+
+ def add(cls, value):
+ COWDictMeta.__setitem__(cls, repr(hash(value)), value)
+
+ def remove(cls, value):
+ COWDictMeta.__delitem__(cls, repr(hash(value)))
+
+ def __in__(cls, value):
+ return COWDictMeta.has_key(repr(hash(value)))
+
+ def iterkeys(cls):
+ raise TypeError("sets don't have keys")
+
+ def iteritems(cls):
+ raise TypeError("sets don't have 'items'")
+
+# These are the actual classes you use!
+class COWDictBase(object):
+ __metaclass__ = COWDictMeta
+ __count__ = 0
+
+class COWSetBase(object):
+ __metaclass__ = COWSetMeta
+ __count__ = 0
+
+if __name__ == "__main__":
+ import sys
+ COWDictBase.__warn__ = sys.stderr
+ a = COWDictBase()
+ print "a", a
+
+ a['a'] = 'a'
+ a['b'] = 'b'
+ a['dict'] = {}
+
+ b = a.copy()
+ print "b", b
+ b['c'] = 'b'
+
+ print
+
+ print "a", a
+ for x in a.iteritems():
+ print x
+ print "--"
+ print "b", b
+ for x in b.iteritems():
+ print x
+ print
+
+ b['dict']['a'] = 'b'
+ b['a'] = 'c'
+
+ print "a", a
+ for x in a.iteritems():
+ print x
+ print "--"
+ print "b", b
+ for x in b.iteritems():
+ print x
+ print
+
+ try:
+ b['dict2']
+ except KeyError, e:
+ print "Okay!"
+
+ a['set'] = COWSetBase()
+ a['set'].add("o1")
+ a['set'].add("o1")
+ a['set'].add("o2")
+
+ print "a", a
+ for x in a['set'].itervalues():
+ print x
+ print "--"
+ print "b", b
+ for x in b['set'].itervalues():
+ print x
+ print
+
+ b['set'].add('o3')
+
+ print "a", a
+ for x in a['set'].itervalues():
+ print x
+ print "--"
+ print "b", b
+ for x in b['set'].itervalues():
+ print x
+ print
+
+ a['set2'] = set()
+ a['set2'].add("o1")
+ a['set2'].add("o1")
+ a['set2'].add("o2")
+
+ print "a", a
+ for x in a.iteritems():
+ print x
+ print "--"
+ print "b", b
+ for x in b.iteritems(readonly=True):
+ print x
+ print
+
+ del b['b']
+ try:
+ print b['b']
+ except KeyError:
+ print "Yay! deleted key raises error"
+
+ if b.has_key('b'):
+ print "Boo!"
+ else:
+ print "Yay - has_key with delete works!"
+
+ print "a", a
+ for x in a.iteritems():
+ print x
+ print "--"
+ print "b", b
+ for x in b.iteritems(readonly=True):
+ print x
+ print
+
+ b.__revertitem__('b')
+
+ print "a", a
+ for x in a.iteritems():
+ print x
+ print "--"
+ print "b", b
+ for x in b.iteritems(readonly=True):
+ print x
+ print
+
+ b.__revertitem__('dict')
+ print "a", a
+ for x in a.iteritems():
+ print x
+ print "--"
+ print "b", b
+ for x in b.iteritems(readonly=True):
+ print x
+ print
diff --git a/bitbake/lib/bb/__init__.py b/bitbake/lib/bb/__init__.py
index c3e7a16658..61eb5f3db8 100644
--- a/bitbake/lib/bb/__init__.py
+++ b/bitbake/lib/bb/__init__.py
@@ -23,7 +23,7 @@ this program; if not, write to the Free Software Foundation, Inc., 59 Temple
Place, Suite 330, Boston, MA 02111-1307 USA.
"""
-__version__ = "1.4.3"
+__version__ = "1.7.4"
__all__ = [
@@ -63,24 +63,24 @@ __all__ = [
"manifest",
"methodpool",
"cache",
+ "runqueue",
+ "taskdata",
+ "providers",
]
whitespace = '\t\n\x0b\x0c\r '
lowercase = 'abcdefghijklmnopqrstuvwxyz'
-import sys, os, types, re, string
+import sys, os, types, re, string, bb
+from bb import msg
#projectdir = os.path.dirname(os.path.dirname(os.path.abspath(sys.argv[0])))
projectdir = os.getcwd()
-debug_level = 0
-
if "BBDEBUG" in os.environ:
level = int(os.environ["BBDEBUG"])
if level:
- debug_level = level
- else:
- debug_level = 0
+ bb.msg.set_debug_level(level)
class VarExpandError(Exception):
pass
@@ -99,22 +99,17 @@ class MalformedUrl(Exception):
#######################################################################
#######################################################################
-debug_prepend = ''
-
-
def debug(lvl, *args):
- if debug_level >= lvl:
- print debug_prepend + 'DEBUG:', ''.join(args)
+ bb.msg.std_debug(lvl, ''.join(args))
def note(*args):
- print debug_prepend + 'NOTE:', ''.join(args)
+ bb.msg.std_note(''.join(args))
def error(*args):
- print debug_prepend + 'ERROR:', ''.join(args)
+ bb.msg.std_error(''.join(args))
def fatal(*args):
- print debug_prepend + 'ERROR:', ''.join(args)
- sys.exit(1)
+ bb.msg.std_fatal(''.join(args))
#######################################################################
diff --git a/bitbake/lib/bb/build.py b/bitbake/lib/bb/build.py
index 8e169e002a..942bdc1a39 100644
--- a/bitbake/lib/bb/build.py
+++ b/bitbake/lib/bb/build.py
@@ -25,18 +25,9 @@ You should have received a copy of the GNU General Public License along with
Based on functions from the base bb module, Copyright 2003 Holger Schurig
"""
-from bb import debug, data, fetch, fatal, error, note, event, mkdirhier, utils
+from bb import data, fetch, event, mkdirhier, utils
import bb, os
-# data holds flags and function name for a given task
-_task_data = data.init()
-
-# graph represents task interdependencies
-_task_graph = bb.digraph()
-
-# stack represents execution order, excepting dependencies
-_task_stack = []
-
# events
class FuncFailed(Exception):
"""Executed function failed"""
@@ -76,13 +67,6 @@ class InvalidTask(TaskBase):
# functions
-def init(data):
- global _task_data, _task_graph, _task_stack
- _task_data = data.init()
- _task_graph = bb.digraph()
- _task_stack = []
-
-
def exec_func(func, d, dirs = None):
"""Execute an BB 'function'"""
@@ -163,7 +147,7 @@ def exec_func_shell(func, d):
f = open(runfile, "w")
f.write("#!/bin/sh -e\n")
- if bb.debug_level > 0: f.write("set -x\n")
+ if bb.msg.debug_level['default'] > 0: f.write("set -x\n")
data.emit_env(f, d)
f.write("cd %s\n" % os.getcwd())
@@ -171,18 +155,18 @@ def exec_func_shell(func, d):
f.close()
os.chmod(runfile, 0775)
if not func:
- error("Function not specified")
+ bb.msg.error(bb.msg.domain.Build, "Function not specified")
raise FuncFailed()
# open logs
si = file('/dev/null', 'r')
try:
- if bb.debug_level > 0:
+ if bb.msg.debug_level['default'] > 0:
so = os.popen("tee \"%s\"" % logfile, "w")
else:
so = file(logfile, 'w')
except OSError, e:
- bb.error("opening log file: %s" % e)
+ bb.msg.error(bb.msg.domain.Build, "opening log file: %s" % e)
pass
se = so
@@ -205,7 +189,10 @@ def exec_func_shell(func, d):
else:
maybe_fakeroot = ''
ret = os.system('%ssh -e %s' % (maybe_fakeroot, runfile))
- os.chdir(prevdir)
+ try:
+ os.chdir(prevdir)
+ except:
+ pass
if not interact:
# restore the backups
@@ -224,14 +211,14 @@ def exec_func_shell(func, d):
os.close(ose[0])
if ret==0:
- if bb.debug_level > 0:
+ if bb.msg.debug_level['default'] > 0:
os.remove(runfile)
# os.remove(logfile)
return
else:
- error("function %s failed" % func)
+ bb.msg.error(bb.msg.domain.Build, "function %s failed" % func)
if data.getVar("BBINCLUDELOGS", d):
- error("log data follows (%s)" % logfile)
+ bb.msg.error(bb.msg.domain.Build, "log data follows (%s)" % logfile)
f = open(logfile, "r")
while True:
l = f.readline()
@@ -241,7 +228,7 @@ def exec_func_shell(func, d):
print '| %s' % l
f.close()
else:
- error("see log in %s" % logfile)
+ bb.msg.error(bb.msg.domain.Build, "see log in %s" % logfile)
raise FuncFailed( logfile )
@@ -281,7 +268,7 @@ def exec_task(task, d):
return 1
try:
- debug(1, "Executing task %s" % item)
+ bb.msg.debug(1, bb.msg.domain.Build, "Executing task %s" % item)
old_overrides = data.getVar('OVERRIDES', d, 0)
localdata = data.createCopy(d)
data.setVar('OVERRIDES', 'task_%s:%s' % (item, old_overrides), localdata)
@@ -292,21 +279,63 @@ def exec_task(task, d):
task_cache.append(item)
data.setVar('_task_cache', task_cache, d)
except FuncFailed, reason:
- note( "Task failed: %s" % reason )
+ bb.msg.note(1, bb.msg.domain.Build, "Task failed: %s" % reason )
failedevent = TaskFailed(item, d)
event.fire(failedevent)
raise EventException("Function failed in task: %s" % reason, failedevent)
- # execute
- task_graph.walkdown(task, execute)
+ if data.getVarFlag(task, 'dontrundeps', d):
+ execute(None, task)
+ else:
+ task_graph.walkdown(task, execute)
# make stamp, or cause event and raise exception
if not data.getVarFlag(task, 'nostamp', d):
mkstamp(task, d)
+def stamp_is_current_cache(dataCache, file_name, task, checkdeps = 1):
+ """
+ Check status of a given task's stamp.
+ Returns 0 if it is not current and needs updating.
+ Same as stamp_is_current but works against the dataCache instead of d
+ """
+ task_graph = dataCache.task_queues[file_name]
+
+ if not dataCache.stamp[file_name]:
+ return 0
+
+ stampfile = "%s.%s" % (dataCache.stamp[file_name], task)
+ if not os.access(stampfile, os.F_OK):
+ return 0
+
+ if checkdeps == 0:
+ return 1
+
+ import stat
+ tasktime = os.stat(stampfile)[stat.ST_MTIME]
+
+ _deps = []
+ def checkStamp(graph, task):
+ # check for existance
+ if 'nostamp' in dataCache.task_deps[file_name] and task in dataCache.task_deps[file_name]['nostamp']:
+ return 1
+
+ if not stamp_is_current_cache(dataCache, file_name, task, 0):
+ return 0
+
+ depfile = "%s.%s" % (dataCache.stamp[file_name], task)
+ deptime = os.stat(depfile)[stat.ST_MTIME]
+ if deptime > tasktime:
+ return 0
+ return 1
+
+ return task_graph.walkdown(task, checkStamp)
def stamp_is_current(task, d, checkdeps = 1):
- """Check status of a given task's stamp. returns 0 if it is not current and needs updating."""
+ """
+ Check status of a given task's stamp.
+ Returns 0 if it is not current and needs updating.
+ """
task_graph = data.getVar('_task_graph', d)
if not task_graph:
task_graph = bb.digraph()
@@ -360,7 +389,6 @@ def mkstamp(task, d):
f = open(stamp, "w")
f.close()
-
def add_task(task, deps, d):
task_graph = data.getVar('_task_graph', d)
if not task_graph:
@@ -374,6 +402,21 @@ def add_task(task, deps, d):
# don't assume holding a reference
data.setVar('_task_graph', task_graph, d)
+ task_deps = data.getVar('_task_deps', d)
+ if not task_deps:
+ task_deps = {}
+ def getTask(name):
+ deptask = data.getVarFlag(task, name, d)
+ if deptask:
+ if not name in task_deps:
+ task_deps[name] = {}
+ task_deps[name][task] = deptask
+ getTask('deptask')
+ getTask('rdeptask')
+ getTask('recrdeptask')
+ getTask('nostamp')
+
+ data.setVar('_task_deps', task_deps, d)
def remove_task(task, kill, d):
"""Remove an BB 'task'.
@@ -399,6 +442,3 @@ def task_exists(task, d):
task_graph = bb.digraph()
data.setVar('_task_graph', task_graph, d)
return task_graph.hasnode(task)
-
-def get_task_data():
- return _task_data
diff --git a/bitbake/lib/bb/cache.py b/bitbake/lib/bb/cache.py
index 921a9f7589..05c42518a7 100644
--- a/bitbake/lib/bb/cache.py
+++ b/bitbake/lib/bb/cache.py
@@ -33,15 +33,15 @@ Place, Suite 330, Boston, MA 02111-1307 USA.
import os, re
import bb.data
import bb.utils
+from sets import Set
try:
import cPickle as pickle
except ImportError:
import pickle
- print "NOTE: Importing cPickle failed. Falling back to a very slow implementation."
+ bb.msg.note(1, bb.msg.domain.Cache, "Importing cPickle failed. Falling back to a very slow implementation.")
-# __cache_version__ = "123"
-__cache_version__ = "124" # changes the __depends structure
+__cache_version__ = "125"
class Cache:
"""
@@ -58,14 +58,12 @@ class Cache:
if self.cachedir in [None, '']:
self.has_cache = False
- if cooker.cb is not None:
- print "NOTE: Not using a cache. Set CACHE = to enable."
+ bb.msg.note(1, bb.msg.domain.Cache, "Not using a cache. Set CACHE = to enable.")
else:
self.has_cache = True
self.cachefile = os.path.join(self.cachedir,"bb_cache.dat")
- if cooker.cb is not None:
- print "NOTE: Using cache in '%s'" % self.cachedir
+ bb.msg.debug(1, bb.msg.domain.Cache, "Using cache in '%s'" % self.cachedir)
try:
os.stat( self.cachedir )
except OSError:
@@ -80,7 +78,7 @@ class Cache:
if version_data['BITBAKE_VER'] != bb.__version__:
raise ValueError, 'Bitbake Version Mismatch'
except (ValueError, KeyError):
- bb.note("Invalid cache found, rebuilding...")
+ bb.msg.note(1, bb.msg.domain.Cache, "Invalid cache found, rebuilding...")
self.depends_cache = {}
if self.depends_cache:
@@ -108,7 +106,7 @@ class Cache:
if fn != self.data_fn:
# We're trying to access data in the cache which doesn't exist
# yet setData hasn't been called to setup the right access. Very bad.
- bb.error("Parsing error data_fn %s and fn %s don't match" % (self.data_fn, fn))
+ bb.msg.error(bb.msg.domain.Cache, "Parsing error data_fn %s and fn %s don't match" % (self.data_fn, fn))
result = bb.data.getVar(var, self.data, exp)
self.depends_cache[fn][var] = result
@@ -127,15 +125,15 @@ class Cache:
self.getVar("__depends", fn, True)
self.depends_cache[fn]["CACHETIMESTAMP"] = bb.parse.cached_mtime(fn)
- def loadDataFull(self, fn, cooker):
+ def loadDataFull(self, fn, cfgData):
"""
Return a complete set of data for fn.
To do this, we need to parse the file.
"""
- bb_data, skipped = self.load_bbfile(fn, cooker)
+ bb_data, skipped = self.load_bbfile(fn, cfgData)
return bb_data
- def loadData(self, fn, cooker):
+ def loadData(self, fn, cfgData):
"""
Load a subset of data for fn.
If the cached data is valid we do nothing,
@@ -148,7 +146,7 @@ class Cache:
return True, True
return True, False
- bb_data, skipped = self.load_bbfile(fn, cooker)
+ bb_data, skipped = self.load_bbfile(fn, cfgData)
self.setData(fn, bb_data)
return False, skipped
@@ -175,32 +173,36 @@ class Cache:
# Check file still exists
if self.mtime(fn) == 0:
- bb.debug(2, "Cache: %s not longer exists" % fn)
+ bb.msg.debug(2, bb.msg.domain.Cache, "Cache: %s not longer exists" % fn)
self.remove(fn)
return False
# File isn't in depends_cache
if not fn in self.depends_cache:
- bb.debug(2, "Cache: %s is not cached" % fn)
+ bb.msg.debug(2, bb.msg.domain.Cache, "Cache: %s is not cached" % fn)
self.remove(fn)
return False
# Check the file's timestamp
if bb.parse.cached_mtime(fn) > self.getVar("CACHETIMESTAMP", fn, True):
- bb.debug(2, "Cache: %s changed" % fn)
+ bb.msg.debug(2, bb.msg.domain.Cache, "Cache: %s changed" % fn)
self.remove(fn)
return False
# Check dependencies are still valid
depends = self.getVar("__depends", fn, True)
for f,old_mtime in depends:
+ # Check if file still exists
+ if self.mtime(f) == 0:
+ return False
+
new_mtime = bb.parse.cached_mtime(f)
if (new_mtime > old_mtime):
- bb.debug(2, "Cache: %s's dependency %s changed" % (fn, f))
+ bb.msg.debug(2, bb.msg.domain.Cache, "Cache: %s's dependency %s changed" % (fn, f))
self.remove(fn)
return False
- bb.debug(2, "Depends Cache: %s is clean" % fn)
+ bb.msg.debug(2, bb.msg.domain.Cache, "Depends Cache: %s is clean" % fn)
if not fn in self.clean:
self.clean[fn] = ""
@@ -220,7 +222,7 @@ class Cache:
Remove a fn from the cache
Called from the parser in error cases
"""
- bb.debug(1, "Removing %s from cache" % fn)
+ bb.msg.debug(1, bb.msg.domain.Cache, "Removing %s from cache" % fn)
if fn in self.depends_cache:
del self.depends_cache[fn]
if fn in self.clean:
@@ -229,7 +231,7 @@ class Cache:
def sync(self):
"""
Save the cache
- Called from the parser when complete (or exitting)
+ Called from the parser when complete (or exiting)
"""
if not self.has_cache:
@@ -243,12 +245,103 @@ class Cache:
p.dump([self.depends_cache, version_data])
def mtime(self, cachefile):
- try:
- return os.stat(cachefile)[8]
- except OSError:
- return 0
+ return bb.parse.cached_mtime_noerror(cachefile)
- def load_bbfile( self, bbfile , cooker):
+ def handle_data(self, file_name, cacheData):
+ """
+ Save data we need into the cache
+ """
+
+ pn = self.getVar('PN', file_name, True)
+ pv = self.getVar('PV', file_name, True)
+ pr = self.getVar('PR', file_name, True)
+ dp = int(self.getVar('DEFAULT_PREFERENCE', file_name, True) or "0")
+ provides = Set([pn] + (self.getVar("PROVIDES", file_name, True) or "").split())
+ depends = bb.utils.explode_deps(self.getVar("DEPENDS", file_name, True) or "")
+ packages = (self.getVar('PACKAGES', file_name, True) or "").split()
+ packages_dynamic = (self.getVar('PACKAGES_DYNAMIC', file_name, True) or "").split()
+ rprovides = (self.getVar("RPROVIDES", file_name, True) or "").split()
+
+ cacheData.task_queues[file_name] = self.getVar("_task_graph", file_name, True)
+ cacheData.task_deps[file_name] = self.getVar("_task_deps", file_name, True)
+
+ # build PackageName to FileName lookup table
+ if pn not in cacheData.pkg_pn:
+ cacheData.pkg_pn[pn] = []
+ cacheData.pkg_pn[pn].append(file_name)
+
+ cacheData.stamp[file_name] = self.getVar('STAMP', file_name, True)
+
+ # build FileName to PackageName lookup table
+ cacheData.pkg_fn[file_name] = pn
+ cacheData.pkg_pvpr[file_name] = (pv,pr)
+ cacheData.pkg_dp[file_name] = dp
+
+ # Build forward and reverse provider hashes
+ # Forward: virtual -> [filenames]
+ # Reverse: PN -> [virtuals]
+ if pn not in cacheData.pn_provides:
+ cacheData.pn_provides[pn] = Set()
+ cacheData.pn_provides[pn] |= provides
+
+ for provide in provides:
+ if provide not in cacheData.providers:
+ cacheData.providers[provide] = []
+ cacheData.providers[provide].append(file_name)
+
+ cacheData.deps[file_name] = Set()
+ for dep in depends:
+ cacheData.all_depends.add(dep)
+ cacheData.deps[file_name].add(dep)
+
+ # Build reverse hash for PACKAGES, so runtime dependencies
+ # can be be resolved (RDEPENDS, RRECOMMENDS etc.)
+ for package in packages:
+ if not package in cacheData.packages:
+ cacheData.packages[package] = []
+ cacheData.packages[package].append(file_name)
+ rprovides += (self.getVar("RPROVIDES_%s" % package, file_name, 1) or "").split()
+
+ for package in packages_dynamic:
+ if not package in cacheData.packages_dynamic:
+ cacheData.packages_dynamic[package] = []
+ cacheData.packages_dynamic[package].append(file_name)
+
+ for rprovide in rprovides:
+ if not rprovide in cacheData.rproviders:
+ cacheData.rproviders[rprovide] = []
+ cacheData.rproviders[rprovide].append(file_name)
+
+ # Build hash of runtime depends and rececommends
+
+ def add_dep(deplist, deps):
+ for dep in deps:
+ if not dep in deplist:
+ deplist[dep] = ""
+
+ if not file_name in cacheData.rundeps:
+ cacheData.rundeps[file_name] = {}
+ if not file_name in cacheData.runrecs:
+ cacheData.runrecs[file_name] = {}
+
+ for package in packages + [pn]:
+ if not package in cacheData.rundeps[file_name]:
+ cacheData.rundeps[file_name][package] = {}
+ if not package in cacheData.runrecs[file_name]:
+ cacheData.runrecs[file_name][package] = {}
+
+ add_dep(cacheData.rundeps[file_name][package], bb.utils.explode_deps(self.getVar('RDEPENDS', file_name, True) or ""))
+ add_dep(cacheData.runrecs[file_name][package], bb.utils.explode_deps(self.getVar('RRECOMMENDS', file_name, True) or ""))
+ add_dep(cacheData.rundeps[file_name][package], bb.utils.explode_deps(self.getVar("RDEPENDS_%s" % package, file_name, True) or ""))
+ add_dep(cacheData.runrecs[file_name][package], bb.utils.explode_deps(self.getVar("RRECOMMENDS_%s" % package, file_name, True) or ""))
+
+ # Collect files we may need for possible world-dep
+ # calculations
+ if not self.getVar('BROKEN', file_name, True) and not self.getVar('EXCLUDE_FROM_WORLD', file_name, True):
+ cacheData.possible_world.append(file_name)
+
+
+ def load_bbfile( self, bbfile , config):
"""
Load and parse one .bb build file
Return the data and whether parsing resulted in the file being skipped
@@ -257,25 +350,15 @@ class Cache:
import bb
from bb import utils, data, parse, debug, event, fatal
- topdir = data.getVar('TOPDIR', cooker.configuration.data)
- if not topdir:
- topdir = os.path.abspath(os.getcwd())
- # set topdir to here
- data.setVar('TOPDIR', topdir, cooker.configuration)
- bbfile = os.path.abspath(bbfile)
- bbfile_loc = os.path.abspath(os.path.dirname(bbfile))
# expand tmpdir to include this topdir
- data.setVar('TMPDIR', data.getVar('TMPDIR', cooker.configuration.data, 1) or "", cooker.configuration.data)
- # set topdir to location of .bb file
- topdir = bbfile_loc
- #data.setVar('TOPDIR', topdir, cfg)
- # go there
+ data.setVar('TMPDIR', data.getVar('TMPDIR', config, 1) or "", config)
+ bbfile_loc = os.path.abspath(os.path.dirname(bbfile))
oldpath = os.path.abspath(os.getcwd())
- if self.mtime(topdir):
- os.chdir(topdir)
- bb_data = data.init_db(cooker.configuration.data)
+ if self.mtime(bbfile_loc):
+ os.chdir(bbfile_loc)
+ bb_data = data.init_db(config)
try:
- parse.handle(bbfile, bb_data) # read .bb data
+ bb_data = parse.handle(bbfile, bb_data) # read .bb data
os.chdir(oldpath)
return bb_data, False
except bb.parse.SkipPackage:
@@ -304,3 +387,45 @@ def init(cooker):
"""
return Cache(cooker)
+
+
+#============================================================================#
+# CacheData
+#============================================================================#
+class CacheData:
+ """
+ The data structures we compile from the cached data
+ """
+
+ def __init__(self):
+ """
+ Direct cache variables
+ (from Cache.handle_data)
+ """
+ self.providers = {}
+ self.rproviders = {}
+ self.packages = {}
+ self.packages_dynamic = {}
+ self.possible_world = []
+ self.pkg_pn = {}
+ self.pkg_fn = {}
+ self.pkg_pvpr = {}
+ self.pkg_dp = {}
+ self.pn_provides = {}
+ self.all_depends = Set()
+ self.deps = {}
+ self.rundeps = {}
+ self.runrecs = {}
+ self.task_queues = {}
+ self.task_deps = {}
+ self.stamp = {}
+ self.preferred = {}
+
+ """
+ Indirect Cache variables
+ (set elsewhere)
+ """
+ self.ignored_dependencies = []
+ self.world_target = Set()
+ self.bbfile_priority = {}
+ self.bbfile_config_priorities = []
diff --git a/bitbake/lib/bb/data.py b/bitbake/lib/bb/data.py
index 55d1cc9053..819dff9679 100644
--- a/bitbake/lib/bb/data.py
+++ b/bitbake/lib/bb/data.py
@@ -45,7 +45,8 @@ else:
path = os.path.dirname(os.path.dirname(sys.argv[0]))
sys.path.insert(0,path)
-from bb import note, debug, data_smart
+from bb import data_smart
+import bb
_dict_type = data_smart.DataSmart
@@ -362,10 +363,12 @@ def emit_var(var, o=sys.__stdout__, d = init(), all=False):
val.rstrip()
if not val:
return 0
+
+ varExpanded = expand(var, d)
if getVarFlag(var, "func", d):
# NOTE: should probably check for unbalanced {} within the var
- o.write("%s() {\n%s\n}\n" % (var, val))
+ o.write("%s() {\n%s\n}\n" % (varExpanded, val))
else:
if getVarFlag(var, "export", d):
o.write('export ')
@@ -375,7 +378,7 @@ def emit_var(var, o=sys.__stdout__, d = init(), all=False):
# if we're going to output this within doublequotes,
# to a shell, we need to escape the quotes in the var
alter = re.sub('"', '\\"', val.strip())
- o.write('%s="%s"\n' % (var, alter))
+ o.write('%s="%s"\n' % (varExpanded, alter))
return 1
@@ -430,8 +433,38 @@ def update_data(d):
>>> update_data(d)
>>> print getVar('TEST', d)
local
+
+ CopyMonster:
+ >>> e = d.createCopy()
+ >>> setVar('TEST_foo', 'foo', e)
+ >>> update_data(e)
+ >>> print getVar('TEST', e)
+ local
+
+ >>> setVar('OVERRIDES', 'arm:ramses:local:foo', e)
+ >>> update_data(e)
+ >>> print getVar('TEST', e)
+ foo
+
+ >>> f = d.createCopy()
+ >>> setVar('TEST_moo', 'something', f)
+ >>> setVar('OVERRIDES', 'moo:arm:ramses:local:foo', e)
+ >>> update_data(e)
+ >>> print getVar('TEST', e)
+ foo
+
+
+ >>> h = init()
+ >>> setVar('SRC_URI', 'file://append.foo;patch=1 ', h)
+ >>> g = h.createCopy()
+ >>> setVar('SRC_URI_append_arm', 'file://other.foo;patch=1', g)
+ >>> setVar('OVERRIDES', 'arm:moo', g)
+ >>> update_data(g)
+ >>> print getVar('SRC_URI', g)
+ file://append.foo;patch=1 file://other.foo;patch=1
+
"""
- debug(2, "update_data()")
+ bb.msg.debug(2, bb.msg.domain.Data, "update_data()")
# now ask the cookie monster for help
#print "Cookie Monster"
@@ -460,7 +493,7 @@ def update_data(d):
l = len(o)+1
# see if one should even try
- if not o in d._seen_overrides:
+ if not d._seen_overrides.has_key(o):
continue
vars = d._seen_overrides[o]
@@ -469,10 +502,10 @@ def update_data(d):
try:
d[name] = d[var]
except:
- note ("Untracked delVar")
+ bb.msg.note(1, bb.msg.domain.Data, "Untracked delVar")
# now on to the appends and prepends
- if '_append' in d._special_values:
+ if d._special_values.has_key('_append'):
appends = d._special_values['_append'] or []
for append in appends:
for (a, o) in getVarFlag(append, '_append', d) or []:
@@ -487,7 +520,7 @@ def update_data(d):
setVar(append, sval, d)
- if '_prepend' in d._special_values:
+ if d._special_values.has_key('_prepend'):
prepends = d._special_values['_prepend'] or []
for prepend in prepends:
diff --git a/bitbake/lib/bb/data_smart.py b/bitbake/lib/bb/data_smart.py
index fbd4167fe4..054b852200 100644
--- a/bitbake/lib/bb/data_smart.py
+++ b/bitbake/lib/bb/data_smart.py
@@ -29,14 +29,12 @@ Based on functions from the base bb module, Copyright 2003 Holger Schurig
"""
import copy, os, re, sys, time, types
-from bb import note, debug, error, fatal, utils, methodpool
+import bb
+from bb import utils, methodpool
+from COW import COWDictBase
from sets import Set
+from new import classobj
-try:
- import cPickle as pickle
-except ImportError:
- import pickle
- print "NOTE: Importing cPickle failed. Falling back to a very slow implementation."
__setvar_keyword__ = ["_append","_prepend"]
__setvar_regexp__ = re.compile('(?P.*?)(?P_append|_prepend)(_(?P.*))?')
@@ -45,12 +43,14 @@ __expand_python_regexp__ = re.compile(r"\${@.+?}")
class DataSmart:
- def __init__(self):
+ def __init__(self, special = COWDictBase.copy(), seen = COWDictBase.copy() ):
self.dict = {}
# cookie monster tribute
- self._special_values = {}
- self._seen_overrides = {}
+ self._special_values = special
+ self._seen_overrides = seen
+
+ self.expand_cache = {}
def expand(self,s, varname):
def var_sub(match):
@@ -75,6 +75,9 @@ class DataSmart:
if type(s) is not types.StringType: # sanity check
return s
+ if varname and varname in self.expand_cache:
+ return self.expand_cache[varname]
+
while s.find('$') != -1:
olds = s
try:
@@ -82,15 +85,20 @@ class DataSmart:
s = __expand_python_regexp__.sub(python_sub, s)
if s == olds: break
if type(s) is not types.StringType: # sanity check
- error('expansion of %s returned non-string %s' % (olds, s))
+ bb.msg.error(bb.msg.domain.Data, 'expansion of %s returned non-string %s' % (olds, s))
except KeyboardInterrupt:
raise
except:
- note("%s:%s while evaluating:\n%s" % (sys.exc_info()[0], sys.exc_info()[1], s))
+ bb.msg.note(1, bb.msg.domain.Data, "%s:%s while evaluating:\n%s" % (sys.exc_info()[0], sys.exc_info()[1], s))
raise
+
+ if varname:
+ self.expand_cache[varname] = s
+
return s
def initVar(self, var):
+ self.expand_cache = {}
if not var in self.dict:
self.dict[var] = {}
@@ -119,6 +127,7 @@ class DataSmart:
self.initVar(var)
def setVar(self,var,value):
+ self.expand_cache = {}
match = __setvar_regexp__.match(var)
if match and match.group("keyword") in __setvar_keyword__:
base = match.group('base')
@@ -128,6 +137,7 @@ class DataSmart:
l.append([value, override])
self.setVarFlag(base, keyword, l)
+ # todo make sure keyword is not __doc__ or __module__
# pay the cookie monster
try:
self._special_values[keyword].add( base )
@@ -135,10 +145,6 @@ class DataSmart:
self._special_values[keyword] = Set()
self._special_values[keyword].add( base )
- # SRC_URI_append_simpad is both a flag and a override
- #if not override in self._seen_overrides:
- # self._seen_overrides[override] = Set()
- #self._seen_overrides[override].add( base )
return
if not var in self.dict:
@@ -150,7 +156,7 @@ class DataSmart:
# more cookies for the cookie monster
if '_' in var:
override = var[var.rfind('_')+1:]
- if not override in self._seen_overrides:
+ if not self._seen_overrides.has_key(override):
self._seen_overrides[override] = Set()
self._seen_overrides[override].add( var )
@@ -165,6 +171,7 @@ class DataSmart:
return value
def delVar(self,var):
+ self.expand_cache = {}
self.dict[var] = {}
def setVarFlag(self,var,flag,flagvalue):
@@ -234,10 +241,8 @@ class DataSmart:
Create a copy of self by setting _data to self
"""
# we really want this to be a DataSmart...
- data = DataSmart()
+ data = DataSmart(seen=self._seen_overrides.copy(), special=self._special_values.copy())
data.dict["_data"] = self.dict
- data._seen_overrides = copy.deepcopy(self._seen_overrides)
- data._special_values = copy.deepcopy(self._special_values)
return data
diff --git a/bitbake/lib/bb/fetch/__init__.py b/bitbake/lib/bb/fetch/__init__.py
index 7ab0590765..24aebc41ca 100644
--- a/bitbake/lib/bb/fetch/__init__.py
+++ b/bitbake/lib/bb/fetch/__init__.py
@@ -38,13 +38,16 @@ class NoMethodError(Exception):
class MissingParameterError(Exception):
"""Exception raised when a fetch method is missing a critical parameter in the url"""
+class ParameterError(Exception):
+ """Exception raised when a url cannot be proccessed due to invalid parameters."""
+
class MD5SumError(Exception):
"""Exception raised when a MD5SUM of a file does not match the expected one"""
def uri_replace(uri, uri_find, uri_replace, d):
-# bb.note("uri_replace: operating on %s" % uri)
+# bb.msg.note(1, bb.msg.domain.Fetcher, "uri_replace: operating on %s" % uri)
if not uri or not uri_find or not uri_replace:
- bb.debug(1, "uri_replace: passed an undefined value, not replacing")
+ bb.msg.debug(1, bb.msg.domain.Fetcher, "uri_replace: passed an undefined value, not replacing")
uri_decoded = list(bb.decodeurl(uri))
uri_find_decoded = list(bb.decodeurl(uri_find))
uri_replace_decoded = list(bb.decodeurl(uri_replace))
@@ -62,9 +65,9 @@ def uri_replace(uri, uri_find, uri_replace, d):
localfn = bb.fetch.localpath(uri, d)
if localfn:
result_decoded[loc] = os.path.dirname(result_decoded[loc]) + "/" + os.path.basename(bb.fetch.localpath(uri, d))
-# bb.note("uri_replace: matching %s against %s and replacing with %s" % (i, uri_decoded[loc], uri_replace_decoded[loc]))
+# bb.msg.note(1, bb.msg.domain.Fetcher, "uri_replace: matching %s against %s and replacing with %s" % (i, uri_decoded[loc], uri_replace_decoded[loc]))
else:
-# bb.note("uri_replace: no match")
+# bb.msg.note(1, bb.msg.domain.Fetcher, "uri_replace: no match")
return uri
# else:
# for j in i.keys():
@@ -72,62 +75,94 @@ def uri_replace(uri, uri_find, uri_replace, d):
return bb.encodeurl(result_decoded)
methods = []
+urldata = {}
def init(urls = [], d = None):
if d == None:
- bb.debug(2,"BUG init called with None as data object!!!")
+ bb.msg.debug(2, bb.msg.domain.Fetcher, "BUG init called with None as data object!!!")
return
for m in methods:
m.urls = []
for u in urls:
+ ud = initdata(u, d)
+ if ud.method:
+ ud.method.urls.append(u)
+
+def initdata(url, d):
+ if url not in urldata:
+ ud = FetchData()
+ (ud.type, ud.host, ud.path, ud.user, ud.pswd, ud.parm) = bb.decodeurl(data.expand(url, d))
+ ud.date = Fetch.getSRCDate(d)
for m in methods:
- m.data = d
- if m.supports(u, d):
- m.urls.append(u)
+ if m.supports(url, ud, d):
+ ud.localpath = m.localpath(url, ud, d)
+ ud.md5 = ud.localpath + '.md5'
+ # if user sets localpath for file, use it instead.
+ if "localpath" in ud.parm:
+ ud.localpath = ud.parm["localpath"]
+ ud.method = m
+ break
+ urldata[url] = ud
+ return urldata[url]
def go(d):
"""Fetch all urls"""
for m in methods:
- if m.urls:
- m.go(d)
+ for u in m.urls:
+ ud = urldata[u]
+ if ud.localfile and not m.forcefetch(u, ud, d) and os.path.exists(urldata[u].md5):
+ # File already present along with md5 stamp file
+ # Touch md5 file to show activity
+ os.utime(ud.md5, None)
+ continue
+ # RP - is olddir needed?
+ # olddir = os.path.abspath(os.getcwd())
+ m.go(u, ud , d)
+ # os.chdir(olddir)
+ if ud.localfile and not m.forcefetch(u, ud, d):
+ Fetch.write_md5sum(u, ud, d)
def localpaths(d):
"""Return a list of the local filenames, assuming successful fetch"""
local = []
for m in methods:
for u in m.urls:
- local.append(m.localpath(u, d))
+ local.append(urldata[u].localpath)
return local
def localpath(url, d):
- for m in methods:
- if m.supports(url, d):
- return m.localpath(url, d)
+ ud = initdata(url, d)
+ if ud.method:
+ return ud.localpath
return url
+class FetchData(object):
+ """Class for fetcher variable store"""
+ def __init__(self):
+ self.localfile = ""
+
+
class Fetch(object):
"""Base class for 'fetch'ing data"""
def __init__(self, urls = []):
self.urls = []
- for url in urls:
- if self.supports(bb.decodeurl(url), d) is 1:
- self.urls.append(url)
- def supports(url, d):
- """Check to see if this fetch class supports a given url.
- Expects supplied url in list form, as outputted by bb.decodeurl().
+ def supports(self, url, urldata, d):
+ """
+ Check to see if this fetch class supports a given url.
"""
return 0
- supports = staticmethod(supports)
- def localpath(url, d):
- """Return the local filename of a given url assuming a successful fetch.
+ def localpath(self, url, urldata, d):
+ """
+ Return the local filename of a given url assuming a successful fetch.
+ Can also setup variables in urldata for use in go (saving code duplication
+ and duplicate code execution)
"""
return url
- localpath = staticmethod(localpath)
def setUrls(self, urls):
self.__urls = urls
@@ -137,16 +172,17 @@ class Fetch(object):
urls = property(getUrls, setUrls, None, "Urls property")
- def setData(self, data):
- self.__data = data
+ def forcefetch(self, url, urldata, d):
+ """
+ Force a fetch, even if localpath exists?
+ """
+ return False
- def getData(self):
- return self.__data
-
- data = property(getData, setData, None, "Data property")
-
- def go(self, urls = []):
- """Fetch urls"""
+ def go(self, url, urldata, d):
+ """
+ Fetch urls
+ Assumes localpath was called first
+ """
raise NoMethodError("Missing implementation for url")
def getSRCDate(d):
@@ -155,7 +191,12 @@ class Fetch(object):
d the bb.data module
"""
- return data.getVar("SRCDATE", d, 1) or data.getVar("CVSDATE", d, 1) or data.getVar("DATE", d, 1 )
+ pn = data.getVar("PN", d, 1)
+
+ if pn:
+ return data.getVar("SRCDATE_%s" % pn, d, 1) or data.getVar("CVSDATE_%s" % pn, d, 1) or data.getVar("DATE", d, 1)
+
+ return data.getVar("SRCDATE", d, 1) or data.getVar("CVSDATE", d, 1) or data.getVar("DATE", d, 1)
getSRCDate = staticmethod(getSRCDate)
def try_mirror(d, tarfn):
@@ -168,6 +209,11 @@ class Fetch(object):
d Is a bb.data instance
tarfn is the name of the tarball
"""
+ tarpath = os.path.join(data.getVar("DL_DIR", d, 1), tarfn)
+ if os.access(tarpath, os.R_OK):
+ bb.msg.debug(1, bb.msg.domain.Fetcher, "%s already exists, skipping checkout." % tarfn)
+ return True
+
pn = data.getVar('PN', d, True)
src_tarball_stash = None
if pn:
@@ -176,36 +222,45 @@ class Fetch(object):
for stash in src_tarball_stash:
fetchcmd = data.getVar("FETCHCOMMAND_mirror", d, True) or data.getVar("FETCHCOMMAND_wget", d, True)
uri = stash + tarfn
- bb.note("fetch " + uri)
+ bb.msg.note(1, bb.msg.domain.Fetcher, "fetch " + uri)
fetchcmd = fetchcmd.replace("${URI}", uri)
ret = os.system(fetchcmd)
if ret == 0:
- bb.note("Fetched %s from tarball stash, skipping checkout" % tarfn)
+ bb.msg.note(1, bb.msg.domain.Fetcher, "Fetched %s from tarball stash, skipping checkout" % tarfn)
return True
return False
try_mirror = staticmethod(try_mirror)
- def check_for_tarball(d, tarfn, dldir, date):
+ def verify_md5sum(ud, got_sum):
"""
- Check for a local copy then check the tarball stash.
- Both checks are skipped if date == 'now'.
-
- d Is a bb.data instance
- tarfn is the name of the tarball
- date is the SRCDATE
+ Verify the md5sum we wanted with the one we got
"""
- if "now" != date:
- dl = os.path.join(dldir, tarfn)
- if os.access(dl, os.R_OK):
- bb.debug(1, "%s already exists, skipping checkout." % tarfn)
- return True
+ wanted_sum = None
+ if 'md5sum' in ud.parm:
+ wanted_sum = ud.parm['md5sum']
+ if not wanted_sum:
+ return True
- # try to use the tarball stash
- if Fetch.try_mirror(d, tarfn):
- return True
- return False
- check_for_tarball = staticmethod(check_for_tarball)
+ return wanted_sum == got_sum
+ verify_md5sum = staticmethod(verify_md5sum)
+ def write_md5sum(url, ud, d):
+ if bb.which(data.getVar('PATH', d), 'md5sum'):
+ try:
+ md5pipe = os.popen('md5sum ' + ud.localpath)
+ md5data = (md5pipe.readline().split() or [ "" ])[0]
+ md5pipe.close()
+ except OSError:
+ md5data = ""
+
+ # verify the md5sum
+ if not Fetch.verify_md5sum(ud, md5data):
+ raise MD5SumError(url)
+
+ md5out = file(ud.md5, 'w')
+ md5out.write(md5data)
+ md5out.close()
+ write_md5sum = staticmethod(write_md5sum)
import cvs
import git
@@ -214,6 +269,7 @@ import svn
import wget
import svk
import ssh
+import perforce
methods.append(cvs.Cvs())
methods.append(git.Git())
@@ -222,3 +278,4 @@ methods.append(svn.Svn())
methods.append(wget.Wget())
methods.append(svk.Svk())
methods.append(ssh.SSH())
+methods.append(perforce.Perforce())
diff --git a/bitbake/lib/bb/fetch/cvs.py b/bitbake/lib/bb/fetch/cvs.py
index 0b2477560a..3bdac177eb 100644
--- a/bitbake/lib/bb/fetch/cvs.py
+++ b/bitbake/lib/bb/fetch/cvs.py
@@ -33,164 +33,119 @@ from bb.fetch import FetchError
from bb.fetch import MissingParameterError
class Cvs(Fetch):
- """Class to fetch a module or modules from cvs repositories"""
- def supports(url, d):
- """Check to see if a given url can be fetched with cvs.
- Expects supplied url in list form, as outputted by bb.decodeurl().
+ """
+ Class to fetch a module or modules from cvs repositories
+ """
+ def supports(self, url, ud, d):
"""
- (type, host, path, user, pswd, parm) = bb.decodeurl(data.expand(url, d))
- return type in ['cvs', 'pserver']
- supports = staticmethod(supports)
+ Check to see if a given url can be fetched with cvs.
+ """
+ return ud.type in ['cvs', 'pserver']
- def localpath(url, d):
- (type, host, path, user, pswd, parm) = bb.decodeurl(data.expand(url, d))
- if "localpath" in parm:
-# if user overrides local path, use it.
- return parm["localpath"]
-
- if not "module" in parm:
+ def localpath(self, url, ud, d):
+ if not "module" in ud.parm:
raise MissingParameterError("cvs method needs a 'module' parameter")
- else:
- module = parm["module"]
- if 'tag' in parm:
- tag = parm['tag']
- else:
- tag = ""
- if 'date' in parm:
- date = parm['date']
- else:
- if not tag:
- date = Fetch.getSRCDate(d)
- else:
- date = ""
+ ud.module = ud.parm["module"]
- return os.path.join(data.getVar("DL_DIR", d, 1),data.expand('%s_%s_%s_%s.tar.gz' % ( module.replace('/', '.'), host, tag, date), d))
- localpath = staticmethod(localpath)
+ ud.tag = ""
+ if 'tag' in ud.parm:
+ ud.tag = ud.parm['tag']
- def go(self, d, urls = []):
- """Fetch urls"""
- if not urls:
- urls = self.urls
+ # Override the default date in certain cases
+ if 'date' in ud.parm:
+ ud.date = ud.parm['date']
+ elif ud.tag:
+ ud.date = ""
+
+ ud.localfile = data.expand('%s_%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.tag, ud.date), d)
+
+ return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile)
+
+ def forcefetch(self, url, ud, d):
+ if (ud.date == "now"):
+ return True
+ return False
+
+ def go(self, loc, ud, d):
+
+ # try to use the tarball stash
+ if not self.forcefetch(loc, ud, d) and Fetch.try_mirror(d, ud.localfile):
+ bb.msg.debug(1, bb.msg.domain.Fetcher, "%s already exists or was mirrored, skipping cvs checkout." % ud.localpath)
+ return
+
+ method = "pserver"
+ if "method" in ud.parm:
+ method = ud.parm["method"]
+
+ localdir = ud.module
+ if "localdir" in ud.parm:
+ localdir = ud.parm["localdir"]
+
+ cvs_rsh = None
+ if method == "ext":
+ if "rsh" in ud.parm:
+ cvs_rsh = ud.parm["rsh"]
+
+ if method == "dir":
+ cvsroot = ud.path
+ else:
+ cvsroot = ":" + method + ":" + ud.user
+ if ud.pswd:
+ cvsroot += ":" + ud.pswd
+ cvsroot += "@" + ud.host + ":" + ud.path
+
+ options = []
+ if ud.date:
+ options.append("-D %s" % ud.date)
+ if ud.tag:
+ options.append("-r %s" % ud.tag)
localdata = data.createCopy(d)
data.setVar('OVERRIDES', "cvs:%s" % data.getVar('OVERRIDES', localdata), localdata)
data.update_data(localdata)
- for loc in urls:
- (type, host, path, user, pswd, parm) = bb.decodeurl(data.expand(loc, localdata))
- if not "module" in parm:
- raise MissingParameterError("cvs method needs a 'module' parameter")
- else:
- module = parm["module"]
+ data.setVar('CVSROOT', cvsroot, localdata)
+ data.setVar('CVSCOOPTS', " ".join(options), localdata)
+ data.setVar('CVSMODULE', ud.module, localdata)
+ cvscmd = data.getVar('FETCHCOMMAND', localdata, 1)
+ cvsupdatecmd = data.getVar('UPDATECOMMAND', localdata, 1)
- dlfile = self.localpath(loc, localdata)
- dldir = data.getVar('DL_DIR', localdata, 1)
-# if local path contains the cvs
-# module, consider the dir above it to be the
-# download directory
-# pos = dlfile.find(module)
-# if pos:
-# dldir = dlfile[:pos]
-# else:
-# dldir = os.path.dirname(dlfile)
-
-# setup cvs options
- options = []
- if 'tag' in parm:
- tag = parm['tag']
- else:
- tag = ""
-
- if 'date' in parm:
- date = parm['date']
- else:
- if not tag:
- date = Fetch.getSRCDate(d)
- else:
- date = ""
-
- if "method" in parm:
- method = parm["method"]
- else:
- method = "pserver"
-
- if "localdir" in parm:
- localdir = parm["localdir"]
- else:
- localdir = module
-
- cvs_rsh = None
- if method == "ext":
- if "rsh" in parm:
- cvs_rsh = parm["rsh"]
-
- tarfn = data.expand('%s_%s_%s_%s.tar.gz' % (module.replace('/', '.'), host, tag, date), localdata)
- data.setVar('TARFILES', dlfile, localdata)
- data.setVar('TARFN', tarfn, localdata)
-
- if Fetch.check_for_tarball(d, tarfn, dldir, date):
- continue
-
- if date:
- options.append("-D %s" % date)
- if tag:
- options.append("-r %s" % tag)
-
- olddir = os.path.abspath(os.getcwd())
- os.chdir(data.expand(dldir, localdata))
-
-# setup cvsroot
- if method == "dir":
- cvsroot = path
- else:
- cvsroot = ":" + method + ":" + user
- if pswd:
- cvsroot += ":" + pswd
- cvsroot += "@" + host + ":" + path
-
- data.setVar('CVSROOT', cvsroot, localdata)
- data.setVar('CVSCOOPTS', " ".join(options), localdata)
- data.setVar('CVSMODULE', module, localdata)
- cvscmd = data.getVar('FETCHCOMMAND', localdata, 1)
- cvsupdatecmd = data.getVar('UPDATECOMMAND', localdata, 1)
-
- if cvs_rsh:
- cvscmd = "CVS_RSH=\"%s\" %s" % (cvs_rsh, cvscmd)
- cvsupdatecmd = "CVS_RSH=\"%s\" %s" % (cvs_rsh, cvsupdatecmd)
-
-# create module directory
- bb.debug(2, "Fetch: checking for module directory")
- pkg=data.expand('${PN}', d)
- pkgdir=os.path.join(data.expand('${CVSDIR}', localdata), pkg)
- moddir=os.path.join(pkgdir,localdir)
- if os.access(os.path.join(moddir,'CVS'), os.R_OK):
- bb.note("Update " + loc)
-# update sources there
- os.chdir(moddir)
- myret = os.system(cvsupdatecmd)
- else:
- bb.note("Fetch " + loc)
-# check out sources there
- bb.mkdirhier(pkgdir)
- os.chdir(pkgdir)
- bb.debug(1, "Running %s" % cvscmd)
- myret = os.system(cvscmd)
-
- if myret != 0 or not os.access(moddir, os.R_OK):
- try:
- os.rmdir(moddir)
- except OSError:
- pass
- raise FetchError(module)
+ if cvs_rsh:
+ cvscmd = "CVS_RSH=\"%s\" %s" % (cvs_rsh, cvscmd)
+ cvsupdatecmd = "CVS_RSH=\"%s\" %s" % (cvs_rsh, cvsupdatecmd)
+ # create module directory
+ bb.msg.debug(2, bb.msg.domain.Fetcher, "Fetch: checking for module directory")
+ pkg = data.expand('${PN}', d)
+ pkgdir = os.path.join(data.expand('${CVSDIR}', localdata), pkg)
+ moddir = os.path.join(pkgdir,localdir)
+ if os.access(os.path.join(moddir,'CVS'), os.R_OK):
+ bb.msg.note(1, bb.msg.domain.Fetcher, "Update " + loc)
+ # update sources there
os.chdir(moddir)
- os.chdir('..')
-# tar them up to a defined filename
- myret = os.system("tar -czf %s %s" % (os.path.join(dldir,tarfn), os.path.basename(moddir)))
- if myret != 0:
- try:
- os.unlink(tarfn)
- except OSError:
- pass
- os.chdir(olddir)
- del localdata
+ myret = os.system(cvsupdatecmd)
+ else:
+ bb.msg.note(1, bb.msg.domain.Fetcher, "Fetch " + loc)
+ # check out sources there
+ bb.mkdirhier(pkgdir)
+ os.chdir(pkgdir)
+ bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % cvscmd)
+ myret = os.system(cvscmd)
+
+ if myret != 0 or not os.access(moddir, os.R_OK):
+ try:
+ os.rmdir(moddir)
+ except OSError:
+ pass
+ raise FetchError(ud.module)
+
+ os.chdir(moddir)
+ os.chdir('..')
+ # tar them up to a defined filename
+ myret = os.system("tar -czf %s %s" % (ud.localpath, os.path.basename(moddir)))
+ if myret != 0:
+ try:
+ os.unlink(ud.localpath)
+ except OSError:
+ pass
+ raise FetchError(ud.module)
diff --git a/bitbake/lib/bb/fetch/git.py b/bitbake/lib/bb/fetch/git.py
index 49235c141e..75a7629223 100644
--- a/bitbake/lib/bb/fetch/git.py
+++ b/bitbake/lib/bb/fetch/git.py
@@ -37,7 +37,7 @@ def prunedir(topdir):
def rungitcmd(cmd,d):
- bb.debug(1, "Running %s" % cmd)
+ bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % cmd)
# Need to export PATH as git is likely to be in metadata paths
# rather than host provided
@@ -48,108 +48,80 @@ def rungitcmd(cmd,d):
if myret != 0:
raise FetchError("Git: %s failed" % pathcmd)
-def gettag(parm):
- if 'tag' in parm:
- tag = parm['tag']
- else:
- tag = ""
- if not tag:
- tag = "master"
-
- return tag
-
-def getprotocol(parm):
- if 'protocol' in parm:
- proto = parm['protocol']
- else:
- proto = ""
- if not proto:
- proto = "rsync"
-
- return proto
-
-def localfile(url, d):
- """Return the filename to cache the checkout in"""
- (type, host, path, user, pswd, parm) = bb.decodeurl(data.expand(url, d))
-
- #if user sets localpath for file, use it instead.
- if "localpath" in parm:
- return parm["localpath"]
-
- tag = gettag(parm)
-
- return data.expand('git_%s%s_%s.tar.gz' % (host, path.replace('/', '.'), tag), d)
-
class Git(Fetch):
"""Class to fetch a module or modules from git repositories"""
- def supports(url, d):
- """Check to see if a given url can be fetched with cvs.
- Expects supplied url in list form, as outputted by bb.decodeurl().
+ def supports(self, url, ud, d):
"""
- (type, host, path, user, pswd, parm) = bb.decodeurl(data.expand(url, d))
- return type in ['git']
- supports = staticmethod(supports)
+ Check to see if a given url can be fetched with cvs.
+ """
+ return ud.type in ['git']
- def localpath(url, d):
+ def localpath(self, url, ud, d):
- return os.path.join(data.getVar("DL_DIR", d, 1), localfile(url, d))
+ ud.proto = "rsync"
+ if 'protocol' in ud.parm:
+ ud.proto = ud.parm['protocol']
- localpath = staticmethod(localpath)
+ ud.tag = "master"
+ if 'tag' in ud.parm:
+ ud.tag = ud.parm['tag']
- def go(self, d, urls = []):
- """Fetch urls"""
- if not urls:
- urls = self.urls
+ ud.localfile = data.expand('git_%s%s_%s.tar.gz' % (ud.host, ud.path.replace('/', '.'), ud.tag), d)
- for loc in urls:
- (type, host, path, user, pswd, parm) = bb.decodeurl(data.expand(loc, d))
+ return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile)
- tag = gettag(parm)
- proto = getprotocol(parm)
+ def forcefetch(self, url, ud, d):
+ # tag=="master" must always update
+ if (ud.tag == "master"):
+ return True
+ return False
- gitsrcname = '%s%s' % (host, path.replace('/', '.'))
+ def go(self, loc, ud, d):
+ """Fetch url"""
- repofilename = 'git_%s.tar.gz' % (gitsrcname)
- repofile = os.path.join(data.getVar("DL_DIR", d, 1), repofilename)
- repodir = os.path.join(data.expand('${GITDIR}', d), gitsrcname)
+ if not self.forcefetch(loc, ud, d) and Fetch.try_mirror(d, ud.localfile):
+ bb.msg.debug(1, bb.msg.domain.Fetcher, "%s already exists (or was stashed). Skipping git checkout." % ud.localpath)
+ return
- coname = '%s' % (tag)
- codir = os.path.join(repodir, coname)
+ gitsrcname = '%s%s' % (ud.host, ud.path.replace('/', '.'))
- cofile = self.localpath(loc, d)
+ repofilename = 'git_%s.tar.gz' % (gitsrcname)
+ repofile = os.path.join(data.getVar("DL_DIR", d, 1), repofilename)
+ repodir = os.path.join(data.expand('${GITDIR}', d), gitsrcname)
- # tag=="master" must always update
- if (tag != "master") and Fetch.try_mirror(d, localfile(loc, d)):
- bb.debug(1, "%s already exists (or was stashed). Skipping git checkout." % cofile)
- continue
+ coname = '%s' % (ud.tag)
+ codir = os.path.join(repodir, coname)
- if not os.path.exists(repodir):
- if Fetch.try_mirror(d, repofilename):
- bb.mkdirhier(repodir)
- os.chdir(repodir)
- rungitcmd("tar -xzf %s" % (repofile),d)
- else:
- rungitcmd("git clone -n %s://%s%s %s" % (proto, host, path, repodir),d)
+ if not os.path.exists(repodir):
+ if Fetch.try_mirror(d, repofilename):
+ bb.mkdirhier(repodir)
+ os.chdir(repodir)
+ rungitcmd("tar -xzf %s" % (repofile),d)
+ else:
+ rungitcmd("git clone -n %s://%s%s %s" % (ud.proto, ud.host, ud.path, repodir),d)
- os.chdir(repodir)
- rungitcmd("git pull %s://%s%s" % (proto, host, path),d)
- rungitcmd("git pull --tags %s://%s%s" % (proto, host, path),d)
- rungitcmd("git prune-packed", d)
- # old method of downloading tags
- #rungitcmd("rsync -a --verbose --stats --progress rsync://%s%s/ %s" % (host, path, os.path.join(repodir, ".git", "")),d)
+ os.chdir(repodir)
+ rungitcmd("git pull %s://%s%s" % (ud.proto, ud.host, ud.path),d)
+ rungitcmd("git pull --tags %s://%s%s" % (ud.proto, ud.host, ud.path),d)
+ rungitcmd("git prune-packed", d)
+ rungitcmd("git pack-redundant --all | xargs -r rm", d)
+ # Remove all but the .git directory
+ rungitcmd("rm * -Rf", d)
+ # old method of downloading tags
+ #rungitcmd("rsync -a --verbose --stats --progress rsync://%s%s/ %s" % (ud.host, ud.path, os.path.join(repodir, ".git", "")),d)
- os.chdir(repodir)
- bb.note("Creating tarball of git repository")
- rungitcmd("tar -czf %s %s" % (repofile, os.path.join(".", ".git", "*") ),d)
+ os.chdir(repodir)
+ bb.msg.note(1, bb.msg.domain.Fetcher, "Creating tarball of git repository")
+ rungitcmd("tar -czf %s %s" % (repofile, os.path.join(".", ".git", "*") ),d)
- if os.path.exists(codir):
- prunedir(codir)
+ if os.path.exists(codir):
+ prunedir(codir)
- bb.mkdirhier(codir)
- os.chdir(repodir)
- rungitcmd("git read-tree %s" % (tag),d)
- rungitcmd("git checkout-index -q -f --prefix=%s -a" % (os.path.join(codir, "git", "")),d)
+ bb.mkdirhier(codir)
+ os.chdir(repodir)
+ rungitcmd("git read-tree %s" % (ud.tag),d)
+ rungitcmd("git checkout-index -q -f --prefix=%s -a" % (os.path.join(codir, "git", "")),d)
- os.chdir(codir)
- bb.note("Creating tarball of git checkout")
- rungitcmd("tar -czf %s %s" % (cofile, os.path.join(".", "*") ),d)
+ os.chdir(codir)
+ bb.msg.note(1, bb.msg.domain.Fetcher, "Creating tarball of git checkout")
+ rungitcmd("tar -czf %s %s" % (ud.localpath, os.path.join(".", "*") ),d)
diff --git a/bitbake/lib/bb/fetch/local.py b/bitbake/lib/bb/fetch/local.py
index 51938f823e..5224976704 100644
--- a/bitbake/lib/bb/fetch/local.py
+++ b/bitbake/lib/bb/fetch/local.py
@@ -31,15 +31,13 @@ from bb import data
from bb.fetch import Fetch
class Local(Fetch):
- def supports(url, d):
- """Check to see if a given url can be fetched in the local filesystem.
- Expects supplied url in list form, as outputted by bb.decodeurl().
+ def supports(self, url, urldata, d):
"""
- (type, host, path, user, pswd, parm) = bb.decodeurl(data.expand(url, d))
- return type in ['file','patch']
- supports = staticmethod(supports)
+ Check to see if a given url can be fetched with cvs.
+ """
+ return urldata.type in ['file','patch']
- def localpath(url, d):
+ def localpath(self, url, urldata, d):
"""Return the local filename of a given url assuming a successful fetch.
"""
path = url.split("://")[1]
@@ -52,10 +50,10 @@ class Local(Fetch):
filesdir = data.getVar('FILESDIR', d, 1)
if filesdir:
newpath = os.path.join(filesdir, path)
+ # We don't set localfile as for this fetcher the file is already local!
return newpath
- localpath = staticmethod(localpath)
- def go(self, urls = []):
+ def go(self, url, urldata, d):
"""Fetch urls (no-op for Local method)"""
-# no need to fetch local files, we'll deal with them in place.
+ # no need to fetch local files, we'll deal with them in place.
return 1
diff --git a/bitbake/lib/bb/fetch/perforce.py b/bitbake/lib/bb/fetch/perforce.py
new file mode 100644
index 0000000000..88acf69951
--- /dev/null
+++ b/bitbake/lib/bb/fetch/perforce.py
@@ -0,0 +1,213 @@
+#!/usr/bin/env python
+# ex:ts=4:sw=4:sts=4:et
+# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
+"""
+BitBake 'Fetch' implementations
+
+Classes for obtaining upstream sources for the
+BitBake build tools.
+
+Copyright (C) 2003, 2004 Chris Larson
+
+This program is free software; you can redistribute it and/or modify it under
+the terms of the GNU General Public License as published by the Free Software
+Foundation; either version 2 of the License, or (at your option) any later
+version.
+
+This program is distributed in the hope that it will be useful, but WITHOUT
+ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
+
+You should have received a copy of the GNU General Public License along with
+this program; if not, write to the Free Software Foundation, Inc., 59 Temple
+Place, Suite 330, Boston, MA 02111-1307 USA.
+
+Based on functions from the base bb module, Copyright 2003 Holger Schurig
+"""
+
+import os, re
+import bb
+from bb import data
+from bb.fetch import Fetch
+from bb.fetch import FetchError
+from bb.fetch import MissingParameterError
+
+class Perforce(Fetch):
+ def supports(self, url, ud, d):
+ return ud.type in ['p4']
+
+ def doparse(url,d):
+ parm=[]
+ path = url.split("://")[1]
+ delim = path.find("@");
+ if delim != -1:
+ (user,pswd,host,port) = path.split('@')[0].split(":")
+ path = path.split('@')[1]
+ else:
+ (host,port) = data.getVar('P4PORT', d).split(':')
+ user = ""
+ pswd = ""
+
+ if path.find(";") != -1:
+ keys=[]
+ values=[]
+ plist = path.split(';')
+ for item in plist:
+ if item.count('='):
+ (key,value) = item.split('=')
+ keys.append(key)
+ values.append(value)
+
+ parm = dict(zip(keys,values))
+ path = "//" + path.split(';')[0]
+ host += ":%s" % (port)
+ parm["cset"] = Perforce.getcset(d, path, host, user, pswd, parm)
+
+ return host,path,user,pswd,parm
+ doparse = staticmethod(doparse)
+
+ def getcset(d, depot,host,user,pswd,parm):
+ if "cset" in parm:
+ return parm["cset"];
+ if user:
+ data.setVar('P4USER', user, d)
+ if pswd:
+ data.setVar('P4PASSWD', pswd, d)
+ if host:
+ data.setVar('P4PORT', host, d)
+
+ p4date = data.getVar("P4DATE", d, 1)
+ if "revision" in parm:
+ depot += "#%s" % (parm["revision"])
+ elif "label" in parm:
+ depot += "@%s" % (parm["label"])
+ elif p4date:
+ depot += "@%s" % (p4date)
+
+ p4cmd = data.getVar('FETCHCOMMAND_p4', d, 1)
+ bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s changes -m 1 %s" % (p4cmd, depot))
+ p4file = os.popen("%s changes -m 1 %s" % (p4cmd,depot))
+ cset = p4file.readline().strip()
+ bb.msg.debug(1, bb.msg.domain.Fetcher, "READ %s" % (cset))
+ if not cset:
+ return -1
+
+ return cset.split(' ')[1]
+ getcset = staticmethod(getcset)
+
+ def localpath(self, url, ud, d):
+
+ (host,path,user,pswd,parm) = Perforce.doparse(url,d)
+
+ # If a label is specified, we use that as our filename
+
+ if "label" in parm:
+ ud.localfile = "%s.tar.gz" % (parm["label"])
+ return os.path.join(data.getVar("DL_DIR", d, 1), ud.localfile)
+
+ base = path
+ which = path.find('/...')
+ if which != -1:
+ base = path[:which]
+
+ if base[0] == "/":
+ base = base[1:]
+
+ cset = Perforce.getcset(d, path, host, user, pswd, parm)
+
+ ud.localfile = data.expand('%s+%s+%s.tar.gz' % (host,base.replace('/', '.'), cset), d)
+
+ return os.path.join(data.getVar("DL_DIR", d, 1), ud.localfile)
+
+ def go(self, loc, ud, d):
+ """
+ Fetch urls
+ """
+
+ # try to use the tarball stash
+ if not self.forcefetch(loc, ud, d) and Fetch.try_mirror(d, ud.localfile):
+ bb.msg.debug(1, bb.msg.domain.Fetcher, "%s already exists or was mirrored, skipping perforce checkout." % ud.localpath)
+ return
+
+ (host,depot,user,pswd,parm) = Perforce.doparse(loc, d)
+
+ if depot.find('/...') != -1:
+ path = depot[:depot.find('/...')]
+ else:
+ path = depot
+
+ if "module" in parm:
+ module = parm["module"]
+ else:
+ module = os.path.basename(path)
+
+ localdata = data.createCopy(d)
+ data.setVar('OVERRIDES', "p4:%s" % data.getVar('OVERRIDES', localdata), localdata)
+ data.update_data(localdata)
+
+ # Get the p4 command
+ if user:
+ data.setVar('P4USER', user, localdata)
+
+ if pswd:
+ data.setVar('P4PASSWD', pswd, localdata)
+
+ if host:
+ data.setVar('P4PORT', host, localdata)
+
+ p4cmd = data.getVar('FETCHCOMMAND', localdata, 1)
+
+ # create temp directory
+ bb.msg.debug(2, bb.msg.domain.Fetcher, "Fetch: creating temporary directory")
+ bb.mkdirhier(data.expand('${WORKDIR}', localdata))
+ data.setVar('TMPBASE', data.expand('${WORKDIR}/oep4.XXXXXX', localdata), localdata)
+ tmppipe = os.popen(data.getVar('MKTEMPDIRCMD', localdata, 1) or "false")
+ tmpfile = tmppipe.readline().strip()
+ if not tmpfile:
+ bb.error("Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.")
+ raise FetchError(module)
+
+ if "label" in parm:
+ depot = "%s@%s" % (depot,parm["label"])
+ else:
+ cset = Perforce.getcset(d, depot, host, user, pswd, parm)
+ depot = "%s@%s" % (depot,cset)
+
+ os.chdir(tmpfile)
+ bb.msg.note(1, bb.msg.domain.Fetcher, "Fetch " + loc)
+ bb.msg.note(1, bb.msg.domain.Fetcher, "%s files %s" % (p4cmd, depot))
+ p4file = os.popen("%s files %s" % (p4cmd, depot))
+
+ if not p4file:
+ bb.error("Fetch: unable to get the P4 files from %s" % (depot))
+ raise FetchError(module)
+
+ count = 0
+
+ for file in p4file:
+ list = file.split()
+
+ if list[2] == "delete":
+ continue
+
+ dest = list[0][len(path)+1:]
+ where = dest.find("#")
+
+ os.system("%s print -o %s/%s %s" % (p4cmd, module,dest[:where],list[0]))
+ count = count + 1
+
+ if count == 0:
+ bb.error("Fetch: No files gathered from the P4 fetch")
+ raise FetchError(module)
+
+ myret = os.system("tar -czf %s %s" % (ud.localpath, module))
+ if myret != 0:
+ try:
+ os.unlink(ud.localpath)
+ except OSError:
+ pass
+ raise FetchError(module)
+ # cleanup
+ os.system('rm -rf %s' % tmpfile)
+
+
diff --git a/bitbake/lib/bb/fetch/ssh.py b/bitbake/lib/bb/fetch/ssh.py
index 57874d5ba9..e5f69e33e7 100644
--- a/bitbake/lib/bb/fetch/ssh.py
+++ b/bitbake/lib/bb/fetch/ssh.py
@@ -64,59 +64,55 @@ __pattern__ = re.compile(r'''
class SSH(Fetch):
'''Class to fetch a module or modules via Secure Shell'''
- def supports(self, url, d):
+ def supports(self, url, urldata, d):
return __pattern__.match(url) != None
- def localpath(self, url, d):
+ def localpath(self, url, urldata, d):
m = __pattern__.match(url)
path = m.group('path')
host = m.group('host')
- lpath = os.path.join(data.getVar('DL_DIR', d, 1), host, os.path.basename(path))
+ lpath = os.path.join(data.getVar('DL_DIR', d, True), host, os.path.basename(path))
return lpath
- def go(self, d, urls = []):
- if not urls:
- urls = self.urls
+ def go(self, url, urldata, d):
+ dldir = data.getVar('DL_DIR', d, 1)
- for url in urls:
- dldir = data.getVar('DL_DIR', d, 1)
+ m = __pattern__.match(url)
+ path = m.group('path')
+ host = m.group('host')
+ port = m.group('port')
+ user = m.group('user')
+ password = m.group('pass')
- m = __pattern__.match(url)
- path = m.group('path')
- host = m.group('host')
- port = m.group('port')
- user = m.group('user')
- password = m.group('pass')
+ ldir = os.path.join(dldir, host)
+ lpath = os.path.join(ldir, os.path.basename(path))
- ldir = os.path.join(dldir, host)
- lpath = os.path.join(ldir, os.path.basename(path))
+ if not os.path.exists(ldir):
+ os.makedirs(ldir)
- if not os.path.exists(ldir):
- os.makedirs(ldir)
+ if port:
+ port = '-P %s' % port
+ else:
+ port = ''
- if port:
- port = '-P %s' % port
- else:
- port = ''
-
- if user:
- fr = user
- if password:
- fr += ':%s' % password
- fr += '@%s' % host
- else:
- fr = host
- fr += ':%s' % path
+ if user:
+ fr = user
+ if password:
+ fr += ':%s' % password
+ fr += '@%s' % host
+ else:
+ fr = host
+ fr += ':%s' % path
- import commands
- cmd = 'scp -B -r %s %s %s/' % (
- port,
- commands.mkarg(fr),
- commands.mkarg(ldir)
- )
+ import commands
+ cmd = 'scp -B -r %s %s %s/' % (
+ port,
+ commands.mkarg(fr),
+ commands.mkarg(ldir)
+ )
- (exitstatus, output) = commands.getstatusoutput(cmd)
- if exitstatus != 0:
- print output
- raise FetchError('Unable to fetch %s' % url)
+ (exitstatus, output) = commands.getstatusoutput(cmd)
+ if exitstatus != 0:
+ print output
+ raise FetchError('Unable to fetch %s' % url)
diff --git a/bitbake/lib/bb/fetch/svk.py b/bitbake/lib/bb/fetch/svk.py
index 19103213cd..29270ab3d8 100644
--- a/bitbake/lib/bb/fetch/svk.py
+++ b/bitbake/lib/bb/fetch/svk.py
@@ -42,112 +42,76 @@ from bb.fetch import MissingParameterError
class Svk(Fetch):
"""Class to fetch a module or modules from svk repositories"""
- def supports(url, d):
- """Check to see if a given url can be fetched with svk.
- Expects supplied url in list form, as outputted by bb.decodeurl().
+ def supports(self, url, ud, d):
"""
- (type, host, path, user, pswd, parm) = bb.decodeurl(data.expand(url, d))
- return type in ['svk']
- supports = staticmethod(supports)
+ Check to see if a given url can be fetched with cvs.
+ """
+ return ud.type in ['svk']
- def localpath(url, d):
- (type, host, path, user, pswd, parm) = bb.decodeurl(data.expand(url, d))
- if "localpath" in parm:
-# if user overrides local path, use it.
- return parm["localpath"]
-
- if not "module" in parm:
+ def localpath(self, url, ud, d):
+ if not "module" in ud.parm:
raise MissingParameterError("svk method needs a 'module' parameter")
else:
- module = parm["module"]
- if 'rev' in parm:
- revision = parm['rev']
- else:
- revision = ""
+ ud.module = ud.parm["module"]
- date = Fetch.getSRCDate(d)
+ ud.revision = ""
+ if 'rev' in ud.parm:
+ ud.revision = ud.parm['rev']
- return os.path.join(data.getVar("DL_DIR", d, 1),data.expand('%s_%s_%s_%s_%s.tar.gz' % ( module.replace('/', '.'), host, path.replace('/', '.'), revision, date), d))
- localpath = staticmethod(localpath)
+ ud.localfile = data.expand('%s_%s_%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.path.replace('/', '.'), ud.revision, ud.date), d)
- def go(self, d, urls = []):
+ return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile)
+
+ def forcefetch(self, url, ud, d):
+ if (ud.date == "now"):
+ return True
+ return False
+
+ def go(self, loc, ud, d):
"""Fetch urls"""
- if not urls:
- urls = self.urls
+ if not self.forcefetch(loc, ud, d) and Fetch.try_mirror(d, ud.localfile):
+ return
+
+ svkroot = ud.host + ud.path
+
+ svkcmd = "svk co -r {%s} %s/%s" % (date, svkroot, ud.module)
+
+ if ud.revision:
+ svkcmd = "svk co -r %s/%s" % (ud.revision, svkroot, ud.module)
+
+ # create temp directory
localdata = data.createCopy(d)
- data.setVar('OVERRIDES', "svk:%s" % data.getVar('OVERRIDES', localdata), localdata)
data.update_data(localdata)
+ bb.msg.debug(2, bb.msg.domain.Fetcher, "Fetch: creating temporary directory")
+ bb.mkdirhier(data.expand('${WORKDIR}', localdata))
+ data.setVar('TMPBASE', data.expand('${WORKDIR}/oesvk.XXXXXX', localdata), localdata)
+ tmppipe = os.popen(data.getVar('MKTEMPDIRCMD', localdata, 1) or "false")
+ tmpfile = tmppipe.readline().strip()
+ if not tmpfile:
+ bb.msg.error(bb.msg.domain.Fetcher, "Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.")
+ raise FetchError(ud.module)
- for loc in urls:
- (type, host, path, user, pswd, parm) = bb.decodeurl(data.expand(loc, localdata))
- if not "module" in parm:
- raise MissingParameterError("svk method needs a 'module' parameter")
- else:
- module = parm["module"]
+ # check out sources there
+ os.chdir(tmpfile)
+ bb.msg.note(1, bb.msg.domain.Fetcher, "Fetch " + loc)
+ bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % svkcmd)
+ myret = os.system(svkcmd)
+ if myret != 0:
+ try:
+ os.rmdir(tmpfile)
+ except OSError:
+ pass
+ raise FetchError(ud.module)
- dlfile = self.localpath(loc, localdata)
- dldir = data.getVar('DL_DIR', localdata, 1)
-
-# setup svk options
- options = []
- if 'rev' in parm:
- revision = parm['rev']
- else:
- revision = ""
-
- date = Fetch.getSRCDate(d)
- tarfn = data.expand('%s_%s_%s_%s_%s.tar.gz' % (module.replace('/', '.'), host, path.replace('/', '.'), revision, date), localdata)
- data.setVar('TARFILES', dlfile, localdata)
- data.setVar('TARFN', tarfn, localdata)
-
- if Fetch.check_for_tarball(d, tarfn, dldir, date):
- continue
-
- olddir = os.path.abspath(os.getcwd())
- os.chdir(data.expand(dldir, localdata))
-
- svkroot = host + path
-
- data.setVar('SVKROOT', svkroot, localdata)
- data.setVar('SVKCOOPTS', " ".join(options), localdata)
- data.setVar('SVKMODULE', module, localdata)
- svkcmd = "svk co -r {%s} %s/%s" % (date, svkroot, module)
-
- if revision:
- svkcmd = "svk co -r %s/%s" % (revision, svkroot, module)
-
-# create temp directory
- bb.debug(2, "Fetch: creating temporary directory")
- bb.mkdirhier(data.expand('${WORKDIR}', localdata))
- data.setVar('TMPBASE', data.expand('${WORKDIR}/oesvk.XXXXXX', localdata), localdata)
- tmppipe = os.popen(data.getVar('MKTEMPDIRCMD', localdata, 1) or "false")
- tmpfile = tmppipe.readline().strip()
- if not tmpfile:
- bb.error("Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.")
- raise FetchError(module)
-
-# check out sources there
- os.chdir(tmpfile)
- bb.note("Fetch " + loc)
- bb.debug(1, "Running %s" % svkcmd)
- myret = os.system(svkcmd)
- if myret != 0:
- try:
- os.rmdir(tmpfile)
- except OSError:
- pass
- raise FetchError(module)
-
- os.chdir(os.path.join(tmpfile, os.path.dirname(module)))
-# tar them up to a defined filename
- myret = os.system("tar -czf %s %s" % (os.path.join(dldir,tarfn), os.path.basename(module)))
- if myret != 0:
- try:
- os.unlink(tarfn)
- except OSError:
- pass
-# cleanup
- os.system('rm -rf %s' % tmpfile)
- os.chdir(olddir)
- del localdata
+ os.chdir(os.path.join(tmpfile, os.path.dirname(ud.module)))
+ # tar them up to a defined filename
+ myret = os.system("tar -czf %s %s" % (ud.localpath, os.path.basename(ud.module)))
+ if myret != 0:
+ try:
+ os.unlink(ud.localpath)
+ except OSError:
+ pass
+ raise FetchError(ud.module)
+ # cleanup
+ os.system('rm -rf %s' % tmpfile)
diff --git a/bitbake/lib/bb/fetch/svn.py b/bitbake/lib/bb/fetch/svn.py
index d1a959371b..b95de2a79b 100644
--- a/bitbake/lib/bb/fetch/svn.py
+++ b/bitbake/lib/bb/fetch/svn.py
@@ -26,6 +26,7 @@ Based on functions from the base bb module, Copyright 2003 Holger Schurig
"""
import os, re
+import sys
import bb
from bb import data
from bb.fetch import Fetch
@@ -34,136 +35,98 @@ from bb.fetch import MissingParameterError
class Svn(Fetch):
"""Class to fetch a module or modules from svn repositories"""
- def supports(url, d):
- """Check to see if a given url can be fetched with svn.
- Expects supplied url in list form, as outputted by bb.decodeurl().
+ def supports(self, url, ud, d):
"""
- (type, host, path, user, pswd, parm) = bb.decodeurl(data.expand(url, d))
- return type in ['svn']
- supports = staticmethod(supports)
+ Check to see if a given url can be fetched with svn.
+ """
+ return ud.type in ['svn']
- def localpath(url, d):
- (type, host, path, user, pswd, parm) = bb.decodeurl(data.expand(url, d))
- if "localpath" in parm:
-# if user overrides local path, use it.
- return parm["localpath"]
-
- if not "module" in parm:
+ def localpath(self, url, ud, d):
+ if not "module" in ud.parm:
raise MissingParameterError("svn method needs a 'module' parameter")
else:
- module = parm["module"]
- if 'rev' in parm:
- revision = parm['rev']
- else:
- revision = ""
+ ud.module = ud.parm["module"]
- date = Fetch.getSRCDate(d)
+ ud.revision = ""
+ if 'rev' in ud.parm:
+ ud.revision = ud.parm['rev']
- return os.path.join(data.getVar("DL_DIR", d, 1),data.expand('%s_%s_%s_%s_%s.tar.gz' % ( module.replace('/', '.'), host, path.replace('/', '.'), revision, date), d))
- localpath = staticmethod(localpath)
+ ud.localfile = data.expand('%s_%s_%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.path.replace('/', '.'), ud.revision, ud.date), d)
- def go(self, d, urls = []):
- """Fetch urls"""
- if not urls:
- urls = self.urls
+ return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile)
+
+ def forcefetch(self, url, ud, d):
+ if (ud.date == "now"):
+ return True
+ return False
+
+ def go(self, loc, ud, d):
+ """Fetch url"""
+
+ # try to use the tarball stash
+ if not self.forcefetch(loc, ud, d) and Fetch.try_mirror(d, ud.localfile):
+ bb.msg.debug(1, bb.msg.domain.Fetcher, "%s already exists or was mirrored, skipping svn checkout." % ud.localpath)
+ return
+
+ proto = "svn"
+ if "proto" in ud.parm:
+ proto = ud.parm["proto"]
+
+ svn_rsh = None
+ if proto == "svn+ssh" and "rsh" in ud.parm:
+ svn_rsh = ud.parm["rsh"]
+
+ svnroot = ud.host + ud.path
+
+ # either use the revision, or SRCDATE in braces, or nothing for SRCDATE = "now"
+ options = []
+ if ud.revision:
+ options.append("-r %s" % ud.revision)
+ elif ud.date != "now":
+ options.append("-r {%s}" % ud.date)
localdata = data.createCopy(d)
data.setVar('OVERRIDES', "svn:%s" % data.getVar('OVERRIDES', localdata), localdata)
data.update_data(localdata)
- for loc in urls:
- (type, host, path, user, pswd, parm) = bb.decodeurl(data.expand(loc, localdata))
- if not "module" in parm:
- raise MissingParameterError("svn method needs a 'module' parameter")
- else:
- module = parm["module"]
+ data.setVar('SVNROOT', "%s://%s/%s" % (proto, svnroot, ud.module), localdata)
+ data.setVar('SVNCOOPTS', " ".join(options), localdata)
+ data.setVar('SVNMODULE', ud.module, localdata)
+ svncmd = data.getVar('FETCHCOMMAND', localdata, 1)
+ svnupcmd = data.getVar('UPDATECOMMAND', localdata, 1)
- dlfile = self.localpath(loc, localdata)
- dldir = data.getVar('DL_DIR', localdata, 1)
-# if local path contains the svn
-# module, consider the dir above it to be the
-# download directory
-# pos = dlfile.find(module)
-# if pos:
-# dldir = dlfile[:pos]
-# else:
-# dldir = os.path.dirname(dlfile)
+ if svn_rsh:
+ svncmd = "svn_RSH=\"%s\" %s" % (svn_rsh, svncmd)
+ svnupcmd = "svn_RSH=\"%s\" %s" % (svn_rsh, svnupcmd)
-# setup svn options
- options = []
- if 'rev' in parm:
- revision = parm['rev']
- else:
- revision = ""
+ pkg = data.expand('${PN}', d)
+ pkgdir = os.path.join(data.expand('${SVNDIR}', localdata), pkg)
+ moddir = os.path.join(pkgdir, ud.module)
+ bb.msg.debug(2, bb.msg.domain.Fetcher, "Fetch: checking for module directory '" + moddir + "'")
- date = Fetch.getSRCDate(d)
-
- if "proto" in parm:
- proto = parm["proto"]
- else:
- proto = "svn"
-
- svn_rsh = None
- if proto == "svn+ssh" and "rsh" in parm:
- svn_rsh = parm["rsh"]
-
- tarfn = data.expand('%s_%s_%s_%s_%s.tar.gz' % (module.replace('/', '.'), host, path.replace('/', '.'), revision, date), localdata)
- data.setVar('TARFILES', dlfile, localdata)
- data.setVar('TARFN', tarfn, localdata)
-
- if Fetch.check_for_tarball(d, tarfn, dldir, date):
- continue
-
- olddir = os.path.abspath(os.getcwd())
- os.chdir(data.expand(dldir, localdata))
-
- svnroot = host + path
-
- data.setVar('SVNROOT', svnroot, localdata)
- data.setVar('SVNCOOPTS', " ".join(options), localdata)
- data.setVar('SVNMODULE', module, localdata)
- svncmd = data.getVar('FETCHCOMMAND', localdata, 1)
- svncmd = "svn co -r {%s} %s://%s/%s" % (date, proto, svnroot, module)
-
- if revision:
- svncmd = "svn co -r %s %s://%s/%s" % (revision, proto, svnroot, module)
- elif date == "now":
- svncmd = "svn co %s://%s/%s" % (proto, svnroot, module)
-
- if svn_rsh:
- svncmd = "svn_RSH=\"%s\" %s" % (svn_rsh, svncmd)
-
-# create temp directory
- bb.debug(2, "Fetch: creating temporary directory")
- bb.mkdirhier(data.expand('${WORKDIR}', localdata))
- data.setVar('TMPBASE', data.expand('${WORKDIR}/oesvn.XXXXXX', localdata), localdata)
- tmppipe = os.popen(data.getVar('MKTEMPDIRCMD', localdata, 1) or "false")
- tmpfile = tmppipe.readline().strip()
- if not tmpfile:
- bb.error("Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.")
- raise FetchError(module)
-
-# check out sources there
- os.chdir(tmpfile)
- bb.note("Fetch " + loc)
- bb.debug(1, "Running %s" % svncmd)
+ if os.access(os.path.join(moddir, '.svn'), os.R_OK):
+ bb.msg.note(1, bb.msg.domain.Fetcher, "Update " + loc)
+ # update sources there
+ os.chdir(moddir)
+ bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % svnupcmd)
+ myret = os.system(svnupcmd)
+ else:
+ bb.msg.note(1, bb.msg.domain.Fetcher, "Fetch " + loc)
+ # check out sources there
+ bb.mkdirhier(pkgdir)
+ os.chdir(pkgdir)
+ bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % svncmd)
myret = os.system(svncmd)
- if myret != 0:
- try:
- os.rmdir(tmpfile)
- except OSError:
- pass
- raise FetchError(module)
- os.chdir(os.path.join(tmpfile, os.path.dirname(module)))
-# tar them up to a defined filename
- myret = os.system("tar -czf %s %s" % (os.path.join(dldir,tarfn), os.path.basename(module)))
- if myret != 0:
- try:
- os.unlink(tarfn)
- except OSError:
- pass
-# cleanup
- os.system('rm -rf %s' % tmpfile)
- os.chdir(olddir)
- del localdata
+ if myret != 0:
+ raise FetchError(ud.module)
+
+ os.chdir(pkgdir)
+ # tar them up to a defined filename
+ myret = os.system("tar -czf %s %s" % (ud.localpath, os.path.basename(ud.module)))
+ if myret != 0:
+ try:
+ os.unlink(ud.localpath)
+ except OSError:
+ pass
+ raise FetchError(ud.module)
diff --git a/bitbake/lib/bb/fetch/wget.py b/bitbake/lib/bb/fetch/wget.py
index e47a8859be..9c9c1675a1 100644
--- a/bitbake/lib/bb/fetch/wget.py
+++ b/bitbake/lib/bb/fetch/wget.py
@@ -30,138 +30,70 @@ import bb
from bb import data
from bb.fetch import Fetch
from bb.fetch import FetchError
-from bb.fetch import MD5SumError
from bb.fetch import uri_replace
class Wget(Fetch):
"""Class to fetch urls via 'wget'"""
- def supports(url, d):
- """Check to see if a given url can be fetched using wget.
- Expects supplied url in list form, as outputted by bb.decodeurl().
+ def supports(self, url, ud, d):
"""
- (type, host, path, user, pswd, parm) = bb.decodeurl(data.expand(url, d))
- return type in ['http','https','ftp']
- supports = staticmethod(supports)
+ Check to see if a given url can be fetched with cvs.
+ """
+ return ud.type in ['http','https','ftp']
- def localpath(url, d):
-# strip off parameters
- (type, host, path, user, pswd, parm) = bb.decodeurl(data.expand(url, d))
- if "localpath" in parm:
-# if user overrides local path, use it.
- return parm["localpath"]
- url = bb.encodeurl([type, host, path, user, pswd, {}])
+ def localpath(self, url, ud, d):
- return os.path.join(data.getVar("DL_DIR", d), os.path.basename(url))
- localpath = staticmethod(localpath)
+ url = bb.encodeurl([ud.type, ud.host, ud.path, ud.user, ud.pswd, {}])
+ ud.basename = os.path.basename(ud.path)
+ ud.localfile = data.expand(os.path.basename(url), d)
- def go(self, d, urls = []):
+ return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile)
+
+ def go(self, uri, ud, d):
"""Fetch urls"""
- def md5_sum(parm, d):
- """
- Return the MD5SUM associated with the to be downloaded
- file.
- It can return None if no md5sum is associated
- """
- try:
- return parm['md5sum']
- except:
- return None
-
- def verify_md5sum(wanted_sum, got_sum):
- """
- Verify the md5sum we wanted with the one we got
- """
- if not wanted_sum:
- return True
-
- return wanted_sum == got_sum
-
- def fetch_uri(uri, basename, dl, md5, parm, d):
- # the MD5 sum we want to verify
- wanted_md5sum = md5_sum(parm, d)
- if os.path.exists(dl):
-# file exists, but we didnt complete it.. trying again..
+ def fetch_uri(uri, ud, d):
+ if os.path.exists(ud.localpath):
+ # file exists, but we didnt complete it.. trying again..
fetchcmd = data.getVar("RESUMECOMMAND", d, 1)
else:
fetchcmd = data.getVar("FETCHCOMMAND", d, 1)
- bb.note("fetch " + uri)
+ bb.msg.note(1, bb.msg.domain.Fetcher, "fetch " + uri)
fetchcmd = fetchcmd.replace("${URI}", uri)
- fetchcmd = fetchcmd.replace("${FILE}", basename)
- bb.debug(2, "executing " + fetchcmd)
+ fetchcmd = fetchcmd.replace("${FILE}", ud.basename)
+ bb.msg.debug(2, bb.msg.domain.Fetcher, "executing " + fetchcmd)
ret = os.system(fetchcmd)
if ret != 0:
return False
# check if sourceforge did send us to the mirror page
- dl_dir = data.getVar("DL_DIR", d, True)
- if not os.path.exists(dl):
- os.system("rm %s*" % dl) # FIXME shell quote it
- bb.debug(2,"sourceforge.net send us to the mirror on %s" % basename)
+ if not os.path.exists(ud.localpath):
+ os.system("rm %s*" % ud.localpath) # FIXME shell quote it
+ bb.msg.debug(2, bb.msg.domain.Fetcher, "sourceforge.net send us to the mirror on %s" % ud.basename)
return False
-# supposedly complete.. write out md5sum
- if bb.which(data.getVar('PATH', d), 'md5sum'):
- try:
- md5pipe = os.popen('md5sum ' + dl)
- md5data = (md5pipe.readline().split() or [ "" ])[0]
- md5pipe.close()
- except OSError:
- md5data = ""
-
- # verify the md5sum
- if not verify_md5sum(wanted_md5sum, md5data):
- raise MD5SumError(uri)
-
- md5out = file(md5, 'w')
- md5out.write(md5data)
- md5out.close()
return True
- if not urls:
- urls = self.urls
-
localdata = data.createCopy(d)
data.setVar('OVERRIDES', "wget:" + data.getVar('OVERRIDES', localdata), localdata)
data.update_data(localdata)
- for uri in urls:
- completed = 0
- (type, host, path, user, pswd, parm) = bb.decodeurl(data.expand(uri, localdata))
- basename = os.path.basename(path)
- dl = self.localpath(uri, d)
- dl = data.expand(dl, localdata)
- md5 = dl + '.md5'
+ premirrors = [ i.split() for i in (data.getVar('PREMIRRORS', localdata, 1) or "").split('\n') if i ]
+ for (find, replace) in premirrors:
+ newuri = uri_replace(uri, find, replace, d)
+ if newuri != uri:
+ if fetch_uri(newuri, ud, localdata):
+ return
- if os.path.exists(md5):
-# complete, nothing to see here..
- continue
+ if fetch_uri(uri, ud, localdata):
+ return
- premirrors = [ i.split() for i in (data.getVar('PREMIRRORS', localdata, 1) or "").split('\n') if i ]
- for (find, replace) in premirrors:
- newuri = uri_replace(uri, find, replace, d)
- if newuri != uri:
- if fetch_uri(newuri, basename, dl, md5, parm, localdata):
- completed = 1
- break
+ # try mirrors
+ mirrors = [ i.split() for i in (data.getVar('MIRRORS', localdata, 1) or "").split('\n') if i ]
+ for (find, replace) in mirrors:
+ newuri = uri_replace(uri, find, replace, d)
+ if newuri != uri:
+ if fetch_uri(newuri, ud, localdata):
+ return
- if completed:
- continue
-
- if fetch_uri(uri, basename, dl, md5, parm, localdata):
- continue
-
-# try mirrors
- mirrors = [ i.split() for i in (data.getVar('MIRRORS', localdata, 1) or "").split('\n') if i ]
- for (find, replace) in mirrors:
- newuri = uri_replace(uri, find, replace, d)
- if newuri != uri:
- if fetch_uri(newuri, basename, dl, md5, parm, localdata):
- completed = 1
- break
-
- if not completed:
- raise FetchError(uri)
-
- del localdata
+ raise FetchError(uri)
diff --git a/bitbake/lib/bb/methodpool.py b/bitbake/lib/bb/methodpool.py
index d7434ed33e..e14986bc19 100644
--- a/bitbake/lib/bb/methodpool.py
+++ b/bitbake/lib/bb/methodpool.py
@@ -61,9 +61,6 @@ def insert_method(modulename, code, fn):
comp = better_compile(code, "", fn )
better_exec(comp, __builtins__, code, fn)
- # hack hack hack XXX
- return
-
# now some instrumentation
code = comp.co_names
for name in code:
diff --git a/bitbake/lib/bb/msg.py b/bitbake/lib/bb/msg.py
new file mode 100644
index 0000000000..473851cc72
--- /dev/null
+++ b/bitbake/lib/bb/msg.py
@@ -0,0 +1,108 @@
+#!/usr/bin/env python
+# ex:ts=4:sw=4:sts=4:et
+# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
+"""
+BitBake 'msg' implementation
+
+Message handling infrastructure for bitbake
+
+# Copyright (C) 2006 Richard Purdie
+
+This program is free software; you can redistribute it and/or modify it under
+the terms of the GNU General Public License as published by the Free Software
+Foundation; either version 2 of the License, or (at your option) any later
+version.
+
+This program is distributed in the hope that it will be useful, but WITHOUT
+ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
+
+You should have received a copy of the GNU General Public License along with
+this program; if not, write to the Free Software Foundation, Inc., 59 Temple
+Place, Suite 330, Boston, MA 02111-1307 USA.
+
+"""
+
+import sys, os, re, bb
+from bb import utils
+
+debug_level = {}
+
+verbose = False
+
+domain = bb.utils.Enum(
+ 'Build',
+ 'Cache',
+ 'Collection',
+ 'Data',
+ 'Depends',
+ 'Fetcher',
+ 'Parsing',
+ 'Provider',
+ 'RunQueue',
+ 'TaskData',
+ 'Util')
+
+#
+# Message control functions
+#
+
+def set_debug_level(level):
+ bb.msg.debug_level = {}
+ for domain in bb.msg.domain:
+ bb.msg.debug_level[domain] = level
+ bb.msg.debug_level['default'] = level
+
+def set_verbose(level):
+ bb.msg.verbose = level
+
+def set_debug_domains(domains):
+ for domain in domains:
+ found = False
+ for ddomain in bb.msg.domain:
+ if domain == str(ddomain):
+ bb.msg.debug_level[ddomain] = bb.msg.debug_level[ddomain] + 1
+ found = True
+ if not found:
+ std_warn("Logging domain %s is not valid, ignoring" % domain)
+
+#
+# Message handling functions
+#
+
+def debug(level, domain, msg, fn = None):
+ if debug_level[domain] >= level:
+ print 'DEBUG: ' + msg
+
+def note(level, domain, msg, fn = None):
+ if level == 1 or verbose or debug_level[domain] >= 1:
+ std_note(msg)
+
+def warn(domain, msg, fn = None):
+ std_warn(msg)
+
+def error(domain, msg, fn = None):
+ std_error(msg)
+
+def fatal(domain, msg, fn = None):
+ std_fatal(msg)
+
+#
+# Compatibility functions for the original message interface
+#
+def std_debug(lvl, msg):
+ if debug_level['default'] >= lvl:
+ print 'DEBUG: ' + msg
+
+def std_note(msg):
+ print 'NOTE: ' + msg
+
+def std_warn(msg):
+ print 'WARNING: ' + msg
+
+def std_error(msg):
+ print 'ERROR: ' + msg
+
+def std_fatal(msg):
+ print 'ERROR: ' + msg
+ sys.exit(1)
diff --git a/bitbake/lib/bb/parse/__init__.py b/bitbake/lib/bb/parse/__init__.py
index 58e17d154a..70fdba03b4 100644
--- a/bitbake/lib/bb/parse/__init__.py
+++ b/bitbake/lib/bb/parse/__init__.py
@@ -37,11 +37,16 @@ class SkipPackage(Exception):
__mtime_cache = {}
def cached_mtime(f):
if not __mtime_cache.has_key(f):
- update_mtime(f)
+ __mtime_cache[f] = os.stat(f)[8]
return __mtime_cache[f]
-def update_mtime(f):
- __mtime_cache[f] = os.stat(f)[8]
+def cached_mtime_noerror(f):
+ if not __mtime_cache.has_key(f):
+ try:
+ __mtime_cache[f] = os.stat(f)[8]
+ except OSError:
+ return 0
+ return __mtime_cache[f]
def mark_dependency(d, f):
if f.startswith('./'):
diff --git a/bitbake/lib/bb/parse/parse_c/BBHandler.py b/bitbake/lib/bb/parse/parse_c/BBHandler.py
index d9f48db17b..b430e1f4e5 100644
--- a/bitbake/lib/bb/parse/parse_c/BBHandler.py
+++ b/bitbake/lib/bb/parse/parse_c/BBHandler.py
@@ -5,33 +5,33 @@
Reads a .bb file and obtains its metadata (using a C++ parser)
Copyright (C) 2006 Tim Robert Ansell
- Copyright (C) 2006 Holger Hans Peter Freyther
-
+ Copyright (C) 2006 Holger Hans Peter Freyther
+
This program is free software; you can redistribute it and/or modify it under
the terms of the GNU General Public License as published by the Free Software
Foundation; either version 2 of the License, or (at your option) any later
version.
- Permission is hereby granted, free of charge, to any person obtaining a copy
- of this software and associated documentation files (the "Software"), to deal
- in the Software without restriction, including without limitation the rights
- to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
- copies of the Software, and to permit persons to whom the Software is
- furnished to do so, subject to the following conditions:
+ Permission is hereby granted, free of charge, to any person obtaining a copy
+ of this software and associated documentation files (the "Software"), to deal
+ in the Software without restriction, including without limitation the rights
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ copies of the Software, and to permit persons to whom the Software is
+ furnished to do so, subject to the following conditions:
- The above copyright notice and this permission notice shall be included in all
- copies or substantial portions of the Software.
+ The above copyright notice and this permission notice shall be included in all
+ copies or substantial portions of the Software.
- THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
- IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
- FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
- SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
- DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
- OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
- THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
+ SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
+ DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+ OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+ THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
-import os
+import os, sys
# The Module we will use here
import bb
@@ -61,51 +61,126 @@ def supports(fn, data):
return fn[-3:] == ".bb" or fn[-8:] == ".bbclass" or fn[-4:] == ".inc" or fn[-5:] == ".conf"
def init(fn, data):
- if not data.getVar('TOPDIR'):
- bb.error('TOPDIR is not set')
- if not data.getVar('BBPATH'):
- bb.error('BBPATH is not set')
+ if not bb.data.getVar('TOPDIR', data):
+ bb.data.setVar('TOPDIR', os.getcwd(), data)
+ if not bb.data.getVar('BBPATH', data):
+ bb.data.setVar('BBPATH', os.path.join(sys.prefix, 'share', 'bitbake'), data)
+
+def handle_inherit(d):
+ """
+ Handle inheriting of classes. This will load all default classes.
+ It could be faster, it could detect infinite loops but this is todo
+ Also this delayed loading of bb.parse could impose a penalty
+ """
+ from bb.parse import handle
+
+ files = (data.getVar('INHERIT', d, True) or "").split()
+ if not "base" in i:
+ files[0:0] = ["base"]
+
+ __inherit_cache = data.getVar('__inherit_cache', d) or []
+ for f in files:
+ file = data.expand(f, d)
+ if file[0] != "/" and file[-8:] != ".bbclass":
+ file = os.path.join('classes', '%s.bbclass' % file)
+
+ if not file in __inherit_cache:
+ debug(2, "BB %s:%d: inheriting %s" % (fn, lineno, file))
+ __inherit_cache.append( file )
+
+ try:
+ handle(file, d, True)
+ except IOError:
+ print "Failed to inherit %s" % file
+ data.setVar('__inherit_cache', __inherit_cache, d)
def handle(fn, d, include):
- print ""
- print "fn: %s" % fn
- print "data: %s" % d
- print dir(d)
- print d.getVar.__doc__
- print "include: %s" % include
+ from bb import data, parse
+
+ (root, ext) = os.path.splitext(os.path.basename(fn))
+ base_name = "%s%s" % (root,ext)
+
+ # initialize with some data
+ init(fn,d)
# check if we include or are the beginning
+ oldfile = None
if include:
- oldfile = d.getVar('FILE')
- else:
- #d.inheritFromOS()
- oldfile = None
+ oldfile = d.getVar('FILE', False)
+ is_conf = False
+ elif ext == ".conf":
+ is_conf = True
+ data.inheritFromOS(d)
# find the file
if not os.path.isabs(fn):
- bb.error("No Absolute FILE name")
- abs_fn = bb.which(d.getVar('BBPATH'), fn)
+ abs_fn = bb.which(d.getVar('BBPATH', True), fn)
else:
abs_fn = fn
# check if the file exists
if not os.path.exists(abs_fn):
- raise IOError("file '%(fn)' not found" % locals() )
+ raise IOError("file '%(fn)s' not found" % locals() )
# now we know the file is around mark it as dep
if include:
parse.mark_dependency(d, abs_fn)
+ # manipulate the bbpath
+ if ext != ".bbclass" and ext != ".conf":
+ old_bb_path = data.getVar('BBPATH', d)
+ data.setVar('BBPATH', os.path.dirname(abs_fn) + (":%s" %old_bb_path) , d)
+
+ # handle INHERITS and base inherit
+ if ext != ".bbclass" and ext != ".conf":
+ data.setVar('FILE', fn, d)
+ handle_interit(d)
+
# now parse this file - by defering it to C++
- parsefile(fn, d)
+ parsefile(abs_fn, d, is_conf)
+
+ # Finish it up
+ if include == 0:
+ data.expandKeys(d)
+ data.update_data(d)
+ #### !!! XXX Finish it up by executing the anonfunc
+
# restore the original FILE
if oldfile:
d.setVar('FILE', oldfile)
+ # restore bbpath
+ if ext != ".bbclass" and ext != ".conf":
+ data.setVar('BBPATH', old_bb_path, d )
+
+
return d
+
+# Needed for BitBake files...
+__pkgsplit_cache__={}
+def vars_from_file(mypkg, d):
+ if not mypkg:
+ return (None, None, None)
+ if mypkg in __pkgsplit_cache__:
+ return __pkgsplit_cache__[mypkg]
+
+ myfile = os.path.splitext(os.path.basename(mypkg))
+ parts = myfile[0].split('_')
+ __pkgsplit_cache__[mypkg] = parts
+ exp = 3 - len(parts)
+ tmplist = []
+ while exp != 0:
+ exp -= 1
+ tmplist.append(None)
+ parts.extend(tmplist)
+ return parts
+
+
+
+
# Inform bitbake that we are a parser
# We need to define all three
from bb.parse import handlers
diff --git a/bitbake/lib/bb/parse/parse_c/Makefile b/bitbake/lib/bb/parse/parse_c/Makefile
index 9eb7ce9d08..77daccb72d 100644
--- a/bitbake/lib/bb/parse/parse_c/Makefile
+++ b/bitbake/lib/bb/parse/parse_c/Makefile
@@ -1,6 +1,6 @@
-test: bitbakec.so
- python test.py
+buil: bitbakec.so
+ echo "Done"
bitbakescanner.cc: bitbakescanner.l
flex -t bitbakescanner.l > bitbakescanner.cc
@@ -28,9 +28,9 @@ bitbakec.so: bitbakec.o bitbakeparser.o bitbakescanner.o
g++ -shared -fPIC bitbakeparser.o bitbakescanner.o bitbakec.o -o bitbakec.so
clean:
- rm *.out
- rm *.cc
- rm bitbakec.c
- rm bitbakec-processed.c
- rm *.o
- rm *.so
+ rm -f *.out
+ rm -f *.cc
+ rm -f bitbakec.c
+ rm -f bitbakec-processed.c
+ rm -f *.o
+ rm -f *.so
diff --git a/bitbake/lib/bb/parse/parse_c/bitbakec.pyx b/bitbake/lib/bb/parse/parse_c/bitbakec.pyx
index 362cc2021e..c666e9b6b1 100644
--- a/bitbake/lib/bb/parse/parse_c/bitbakec.pyx
+++ b/bitbake/lib/bb/parse/parse_c/bitbakec.pyx
@@ -6,96 +6,107 @@ cdef extern from "stdio.h":
FILE *fopen(char*, char*)
int fclose(FILE *fp)
+cdef extern from "string.h":
+ int strlen(char*)
cdef extern from "lexerc.h":
ctypedef struct lex_t:
void* parser
void* scanner
+ char* name
FILE* file
+ int config
void* data
int lineError
int errorParse
- cdef extern void parse(FILE*, object)
+ cdef extern int parse(FILE*, char*, object, int)
-def parsefile(object file, object data):
- print "parsefile: 1", file, data
+def parsefile(object file, object data, object config):
+ #print "parsefile: 1", file, data
# Open the file
cdef FILE* f
f = fopen(file, "r")
- print "parsefile: 2 opening file"
+ #print "parsefile: 2 opening file"
if (f == NULL):
raise IOError("No such file %s." % file)
- print "parsefile: 3 parse"
- parse(f, data)
+ #print "parsefile: 3 parse"
+ parse(f, file, data, config)
# Close the file
- print "parsefile: 4 closing"
fclose(f)
-
+
cdef public void e_assign(lex_t* container, char* key, char* what):
- print "e_assign", key, what
+ #print "e_assign", key, what
+ if what == NULL:
+ print "FUTURE Warning empty string: use \"\""
+ what = ""
+
d =