Update to bitbake 1.4.2 (latest stable branch release). This includes the caching speedups

git-svn-id: https://svn.o-hand.com/repos/poky/trunk@371 311d38ba-8fff-0310-9ca6-ca027cbcb966
This commit is contained in:
Richard Purdie 2006-05-09 15:44:08 +00:00
parent ed234aca98
commit 27dba1e624
28 changed files with 2394 additions and 1637 deletions

View File

@ -1,5 +1,6 @@
Phil Blundell <pb@handhelds.org>
Holger Freyther <zecke@handhelds.org>
Chris Larson <kergoth@handhelds.org>
Mickey Lauer <mickey@Vanille.de>
Richard Purdie <rpurdie@rpsys.net>
Holger Schurig <holgerschurig@gmx.de>
Phil Blundell <pb@handhelds.org>

View File

@ -1,9 +1,26 @@
Changes in BitBake 1.3.x:
Changes in BitBake 1.4.2:
- Send logs to oe.pastebin.com instead of pastebin.com
fixes #856
- Copy the internal bitbake data before building the
dependency graph. This fixes nano not having a
virtual/libc dependency
- Allow multiple TARBALL_STASH entries
- Cache, check if the directory exists before changing
into it
- git speedup cloning by not doing a checkout
- allow to have spaces in filenames (.conf, .bb, .bbclass)
Changes in BitBake 1.4.0:
- Fix to check both RDEPENDS and RDEPENDS_${PN}
- Fix a RDEPENDS parsing bug in utils:explode_deps()
- Update git fetcher behaviour to match git changes
- ASSUME_PROVIDED allowed to include runtime packages
- git fetcher cleanup and efficency improvements
- Change the format of the cache
- Update usermanual to document the Fetchers
- Major changes to caching with a new strategy
giving a major performance increase when reparsing
with few data changes
Changes in BitBake 1.3.3:
- Create a new Fetcher module to ease the

View File

@ -2,10 +2,12 @@ AUTHORS
ChangeLog
MANIFEST
setup.py
bin/bitdoc
bin/bbimage
bin/bitbake
lib/bb/__init__.py
lib/bb/build.py
lib/bb/cache.py
lib/bb/data.py
lib/bb/data_smart.py
lib/bb/event.py
@ -18,6 +20,7 @@ lib/bb/fetch/svk.py
lib/bb/fetch/svn.py
lib/bb/fetch/wget.py
lib/bb/manifest.py
lib/bb/methodpool.py
lib/bb/parse/__init__.py
lib/bb/parse/parse_py/BBHandler.py
lib/bb/parse/parse_py/ConfHandler.py
@ -30,5 +33,6 @@ doc/manual/html.css
doc/manual/Makefile
doc/manual/usermanual.xml
contrib/bbdev.sh
contrib/vim/syntax/bitbake.vim
conf/bitbake.conf
classes/base.bbclass

View File

@ -24,14 +24,14 @@
import sys, os, getopt, glob, copy, os.path, re, time
sys.path.insert(0,os.path.join(os.path.dirname(os.path.dirname(sys.argv[0])), 'lib'))
import bb
from bb import utils, data, parse, debug, event, fatal
from bb import utils, data, parse, debug, event, fatal, cache
from sets import Set
import itertools, optparse
parsespin = itertools.cycle( r'|/-\\' )
bbdebug = 0
__version__ = "1.3.3.2"
__version__ = "1.4.3"
#============================================================================#
# BBParsingStatus
@ -44,7 +44,6 @@ class BBParsingStatus:
"""
def __init__(self):
self.cache_dirty = False
self.providers = {}
self.rproviders = {}
self.packages = {}
@ -60,34 +59,35 @@ class BBParsingStatus:
self.pkg_dp = {}
self.pn_provides = {}
self.all_depends = Set()
self.build_all = {}
self.rundeps = {}
self.runrecs = {}
self.stamp = {}
def handle_bb_data(self, file_name, bb_data, cached):
def handle_bb_data(self, file_name, bb_cache, cached):
"""
We will fill the dictionaries with the stuff we
need for building the tree more fast
"""
if bb_data == None:
return
if not cached:
self.cache_dirty = True
pn = bb.data.getVar('PN', bb_data, True)
pv = bb.data.getVar('PV', bb_data, True)
pr = bb.data.getVar('PR', bb_data, True)
dp = int(bb.data.getVar('DEFAULT_PREFERENCE', bb_data, True) or "0")
provides = Set([pn] + (bb.data.getVar("PROVIDES", bb_data, 1) or "").split())
depends = (bb.data.getVar("DEPENDS", bb_data, True) or "").split()
packages = (bb.data.getVar('PACKAGES', bb_data, True) or "").split()
packages_dynamic = (bb.data.getVar('PACKAGES_DYNAMIC', bb_data, True) or "").split()
rprovides = (bb.data.getVar("RPROVIDES", bb_data, 1) or "").split()
pn = bb_cache.getVar('PN', file_name, True)
pv = bb_cache.getVar('PV', file_name, True)
pr = bb_cache.getVar('PR', file_name, True)
dp = int(bb_cache.getVar('DEFAULT_PREFERENCE', file_name, True) or "0")
provides = Set([pn] + (bb_cache.getVar("PROVIDES", file_name, True) or "").split())
depends = (bb_cache.getVar("DEPENDS", file_name, True) or "").split()
packages = (bb_cache.getVar('PACKAGES', file_name, True) or "").split()
packages_dynamic = (bb_cache.getVar('PACKAGES_DYNAMIC', file_name, True) or "").split()
rprovides = (bb_cache.getVar("RPROVIDES", file_name, True) or "").split()
# build PackageName to FileName lookup table
if pn not in self.pkg_pn:
self.pkg_pn[pn] = []
self.pkg_pn[pn].append(file_name)
self.build_all[file_name] = int(bb_cache.getVar('BUILD_ALL_DEPS', file_name, True) or "0")
self.stamp[file_name] = bb_cache.getVar('STAMP', file_name, True)
# build FileName to PackageName lookup table
self.pkg_fn[file_name] = pn
self.pkg_pvpr[file_name] = (pv,pr)
@ -114,7 +114,7 @@ class BBParsingStatus:
if not package in self.packages:
self.packages[package] = []
self.packages[package].append(file_name)
rprovides += (bb.data.getVar("RPROVIDES_%s" % package, bb_data, 1) or "").split()
rprovides += (bb_cache.getVar("RPROVIDES_%s" % package, file_name, 1) or "").split()
for package in packages_dynamic:
if not package in self.packages_dynamic:
@ -126,9 +126,32 @@ class BBParsingStatus:
self.rproviders[rprovide] = []
self.rproviders[rprovide].append(file_name)
# Build hash of runtime depeneds and rececommends
def add_dep(deplist, deps):
for dep in deps:
if not dep in deplist:
deplist[dep] = ""
if not file_name in self.rundeps:
self.rundeps[file_name] = {}
if not file_name in self.runrecs:
self.runrecs[file_name] = {}
for package in packages + [pn]:
if not package in self.rundeps[file_name]:
self.rundeps[file_name][package] = {}
if not package in self.runrecs[file_name]:
self.runrecs[file_name][package] = {}
add_dep(self.rundeps[file_name][package], bb.utils.explode_deps(bb_cache.getVar('RDEPENDS', file_name, True) or ""))
add_dep(self.runrecs[file_name][package], bb.utils.explode_deps(bb_cache.getVar('RRECOMMENDS', file_name, True) or ""))
add_dep(self.rundeps[file_name][package], bb.utils.explode_deps(bb_cache.getVar("RDEPENDS_%s" % package, file_name, True) or ""))
add_dep(self.runrecs[file_name][package], bb.utils.explode_deps(bb_cache.getVar("RRECOMMENDS_%s" % package, file_name, True) or ""))
# Collect files we may need for possible world-dep
# calculations
if not bb.data.getVar('BROKEN', bb_data, True) and not bb.data.getVar('EXCLUDE_FROM_WORLD', bb_data, True):
if not bb_cache.getVar('BROKEN', file_name, True) and not bb_cache.getVar('EXCLUDE_FROM_WORLD', file_name, True):
self.possible_world.append(file_name)
@ -166,7 +189,6 @@ class BBConfiguration( object ):
def __init__( self, options ):
for key, val in options.__dict__.items():
setattr( self, key, val )
self.data = data.init()
#============================================================================#
# BBCooker
@ -190,8 +212,8 @@ class BBCooker:
self.stats = BBStatistics()
self.status = None
self.pkgdata = None
self.cache = None
self.bb_cache = None
def tryBuildPackage( self, fn, item, the_data ):
"""Build one package"""
@ -226,10 +248,11 @@ class BBCooker:
If build_depends is empty, we're dealing with a runtime depends
"""
the_data = self.pkgdata[fn]
the_data = self.bb_cache.loadDataFull(fn, self)
if not buildAllDeps:
buildAllDeps = bb.data.getVar('BUILD_ALL_DEPS', the_data, True) or False
# Only follow all (runtime) dependencies if doing a build
if not buildAllDeps and self.configuration.cmd is "build":
buildAllDeps = self.status.build_all[fn]
# Error on build time dependency loops
if build_depends and build_depends.count(fn) > 1:
@ -402,12 +425,15 @@ class BBCooker:
print "%-30s %20s %20s" % (p, latest[0][0] + "-" + latest[0][1],
prefstr)
def showEnvironment( self ):
"""Show the outer or per-package environment"""
if self.configuration.buildfile:
self.cb = None
self.bb_cache = bb.cache.init(self)
try:
self.configuration.data, fromCache = self.load_bbfile( self.configuration.buildfile )
self.configuration.data = self.bb_cache.loadDataFull(self.configuration.buildfile, self)
except IOError, e:
fatal("Unable to read %s: %s" % ( self.configuration.buildfile, e ))
except Exception, e:
@ -457,11 +483,10 @@ class BBCooker:
# look to see if one of them is already staged, or marked as preferred.
# if so, bump it to the head of the queue
for p in providers:
the_data = self.pkgdata[p]
pn = bb.data.getVar('PN', the_data, 1)
pv = bb.data.getVar('PV', the_data, 1)
pr = bb.data.getVar('PR', the_data, 1)
stamp = '%s.do_populate_staging' % bb.data.getVar('STAMP', the_data, 1)
pn = self.status.pkg_fn[p]
pv, pr = self.status.pkg_pvpr[p]
stamp = '%s.do_populate_staging' % self.status.stamp[p]
if os.path.exists(stamp):
(newvers, fn) = preferred_versions[pn]
if not fn in eligible:
@ -470,11 +495,11 @@ class BBCooker:
oldver = "%s-%s" % (pv, pr)
newver = '-'.join(newvers)
if (newver != oldver):
extra_chat = "; upgrading from %s to %s" % (oldver, newver)
extra_chat = "%s (%s) already staged but upgrading to %s to satisfy %s" % (pn, oldver, newver, item)
else:
extra_chat = ""
extra_chat = "Selecting already-staged %s (%s) to satisfy %s" % (pn, oldver, item)
if self.configuration.verbose:
bb.note("selecting already-staged %s to satisfy %s%s" % (pn, item, extra_chat))
bb.note("%s" % extra_chat)
eligible.remove(fn)
eligible = [fn] + eligible
discriminated = True
@ -656,20 +681,11 @@ class BBCooker:
rdepends = []
self.rbuild_cache.append(item)
the_data = self.pkgdata[fn]
pn = self.status.pkg_fn[fn]
if (item == pn):
rdepends += bb.utils.explode_deps(bb.data.getVar('RDEPENDS', the_data, True) or "")
rdepends += bb.utils.explode_deps(bb.data.getVar('RRECOMMENDS', the_data, True) or "")
rdepends += bb.utils.explode_deps(bb.data.getVar("RDEPENDS_%s" % pn, the_data, True) or "")
rdepends += bb.utils.explode_deps(bb.data.getVar('RRECOMMENDS_%s' % pn, the_data, True) or "")
else:
packages = (bb.data.getVar('PACKAGES', the_data, 1).split() or "")
for package in packages:
if package == item:
rdepends += bb.utils.explode_deps(bb.data.getVar("RDEPENDS_%s" % package, the_data, True) or "")
rdepends += bb.utils.explode_deps(bb.data.getVar("RRECOMMENDS_%s" % package, the_data, True) or "")
if fn in self.status.rundeps and item in self.status.rundeps[fn]:
rdepends += self.status.rundeps[fn][item].keys()
if fn in self.status.runrecs and item in self.status.runrecs[fn]:
rdepends += self.status.runrecs[fn][item].keys()
bb.debug(2, "Additional runtime dependencies for %s are: %s" % (item, " ".join(rdepends)))
@ -684,6 +700,9 @@ class BBCooker:
all_depends = self.status.all_depends
pn_provides = self.status.pn_provides
localdata = data.createCopy(self.configuration.data)
bb.data.update_data(localdata)
def calc_bbfile_priority(filename):
for (regex, pri) in self.status.bbfile_config_priorities:
if regex.match(filename):
@ -691,17 +710,22 @@ class BBCooker:
return 0
# Handle PREFERRED_PROVIDERS
for p in (bb.data.getVar('PREFERRED_PROVIDERS', self.configuration.data, 1) or "").split():
for p in (bb.data.getVar('PREFERRED_PROVIDERS', localdata, 1) or "").split():
(providee, provider) = p.split(':')
if providee in self.preferred and self.preferred[providee] != provider:
bb.error("conflicting preferences for %s: both %s and %s specified" % (providee, provider, self.preferred[providee]))
self.preferred[providee] = provider
# Calculate priorities for each file
for p in self.pkgdata.keys():
for p in self.status.pkg_fn.keys():
self.status.bbfile_priority[p] = calc_bbfile_priority(p)
# Build package list for "bitbake world"
def buildWorldTargetList(self):
"""
Build package list for "bitbake world"
"""
all_depends = self.status.all_depends
pn_provides = self.status.pn_provides
bb.debug(1, "collating packages for \"world\"")
for f in self.status.possible_world:
terminal = True
@ -724,9 +748,10 @@ class BBCooker:
self.status.possible_world = None
self.status.all_depends = None
def myProgressCallback( self, x, y, f, file_data, from_cache ):
def myProgressCallback( self, x, y, f, bb_cache, from_cache ):
# feed the status with new input
self.status.handle_bb_data(f, file_data, from_cache)
self.status.handle_bb_data(f, bb_cache, from_cache)
if bbdebug > 0:
return
@ -755,6 +780,13 @@ class BBCooker:
def parseConfigurationFile( self, afile ):
try:
self.configuration.data = bb.parse.handle( afile, self.configuration.data )
# Add the handlers we inherited by INHERITS
# FIXME: This assumes that we included at least one .inc file
for var in bb.data.keys(self.configuration.data):
if bb.data.getVarFlag(var, 'handler', self.configuration.data):
bb.event.register(var,bb.data.getVar(var,self.configuration.data))
except IOError:
bb.fatal( "Unable to open %s" % afile )
except bb.parse.ParseError, details:
@ -786,6 +818,12 @@ class BBCooker:
def cook( self, configuration, args ):
"""
We are building stuff here. We do the building
from here. By default we try to execute task
build.
"""
self.configuration = configuration
if not self.configuration.cmd:
@ -801,6 +839,13 @@ class BBCooker:
self.parseConfigurationFile( os.path.join( "conf", "bitbake.conf" ) )
#
# Special updated configuration we use for firing events
#
self.configuration.event_data = bb.data.createCopy(self.configuration.data)
bb.data.update_data(self.configuration.event_data)
if self.configuration.show_environment:
self.showEnvironment()
sys.exit( 0 )
@ -876,18 +921,18 @@ class BBCooker:
print "Requested parsing .bb files only. Exiting."
return
bb.data.update_data( self.configuration.data )
self.buildDepgraph()
if self.configuration.show_versions:
self.showVersions()
sys.exit( 0 )
if 'world' in pkgs_to_build:
self.buildWorldTargetList()
pkgs_to_build.remove('world')
for t in self.status.world_target:
pkgs_to_build.append(t)
bb.event.fire(bb.event.BuildStarted(buildname, pkgs_to_build, self.configuration.data))
bb.event.fire(bb.event.BuildStarted(buildname, pkgs_to_build, self.configuration.event_data))
failures = 0
for k in pkgs_to_build:
@ -905,7 +950,7 @@ class BBCooker:
if self.configuration.abort:
sys.exit(1)
bb.event.fire(bb.event.BuildCompleted(buildname, pkgs_to_build, self.configuration.data, failures))
bb.event.fire(bb.event.BuildCompleted(buildname, pkgs_to_build, self.configuration.event_data, failures))
sys.exit( self.stats.show() )
@ -932,77 +977,12 @@ class BBCooker:
return []
return finddata.readlines()
def deps_clean(self, d):
depstr = data.getVar('__depends', d)
if depstr:
deps = depstr.split(" ")
for dep in deps:
(f,old_mtime_s) = dep.split("@")
old_mtime = int(old_mtime_s)
new_mtime = parse.cached_mtime(f)
if (new_mtime > old_mtime):
return False
return True
def load_bbfile( self, bbfile ):
"""Load and parse one .bb build file"""
if not self.cache in [None, '']:
# get the times
cache_mtime = data.init_db_mtime(self.cache, bbfile)
file_mtime = parse.cached_mtime(bbfile)
if file_mtime > cache_mtime:
#print " : '%s' dirty. reparsing..." % bbfile
pass
else:
#print " : '%s' clean. loading from cache..." % bbfile
cache_data = data.init_db( self.cache, bbfile, False )
if self.deps_clean(cache_data):
return cache_data, True
topdir = data.getVar('TOPDIR', self.configuration.data)
if not topdir:
topdir = os.path.abspath(os.getcwd())
# set topdir to here
data.setVar('TOPDIR', topdir, self.configuration)
bbfile = os.path.abspath(bbfile)
bbfile_loc = os.path.abspath(os.path.dirname(bbfile))
# expand tmpdir to include this topdir
data.setVar('TMPDIR', data.getVar('TMPDIR', self.configuration.data, 1) or "", self.configuration.data)
# set topdir to location of .bb file
topdir = bbfile_loc
#data.setVar('TOPDIR', topdir, cfg)
# go there
oldpath = os.path.abspath(os.getcwd())
os.chdir(topdir)
bb = data.init_db(self.cache,bbfile, True, self.configuration.data)
try:
parse.handle(bbfile, bb) # read .bb data
if not self.cache in [None, '']:
bb.commit(parse.cached_mtime(bbfile)) # write cache
os.chdir(oldpath)
return bb, False
finally:
os.chdir(oldpath)
def collect_bbfiles( self, progressCallback ):
"""Collect all available .bb build files"""
self.cb = progressCallback
parsed, cached, skipped, masked = 0, 0, 0, 0
self.cache = bb.data.getVar( "CACHE", self.configuration.data, 1 )
self.pkgdata = data.pkgdata( not self.cache in [None, ''], self.cache, self.configuration.data )
self.bb_cache = bb.cache.init(self)
if not self.cache in [None, '']:
if self.cb is not None:
print "NOTE: Using cache in '%s'" % self.cache
try:
os.stat( self.cache )
except OSError:
bb.mkdirhier( self.cache )
else:
if self.cb is not None:
print "NOTE: Not using a cache. Set CACHE = <directory> to enable."
files = (data.getVar( "BBFILES", self.configuration.data, 1 ) or "").split()
data.setVar("BBFILES", " ".join(files), self.configuration.data)
@ -1037,43 +1017,49 @@ class BBCooker:
# read a file's metadata
try:
bb_data, fromCache = self.load_bbfile(f)
if fromCache: cached += 1
fromCache, skip = self.bb_cache.loadData(f, self)
if skip:
skipped += 1
#bb.note("Skipping %s" % f)
self.bb_cache.skip(f)
continue
elif fromCache: cached += 1
else: parsed += 1
deps = None
if bb_data is not None:
# allow metadata files to add items to BBFILES
#data.update_data(self.pkgdata[f])
addbbfiles = data.getVar('BBFILES', bb_data) or None
if addbbfiles:
for aof in addbbfiles.split():
if not files.count(aof):
if not os.path.isabs(aof):
aof = os.path.join(os.path.dirname(f),aof)
files.append(aof)
for var in bb_data.keys():
if data.getVarFlag(var, "handler", bb_data) and data.getVar(var, bb_data):
event.register(data.getVar(var, bb_data))
self.pkgdata[f] = bb_data
# allow metadata files to add items to BBFILES
#data.update_data(self.pkgdata[f])
addbbfiles = self.bb_cache.getVar('BBFILES', f, False) or None
if addbbfiles:
for aof in addbbfiles.split():
if not files.count(aof):
if not os.path.isabs(aof):
aof = os.path.join(os.path.dirname(f),aof)
files.append(aof)
# now inform the caller
if self.cb is not None:
self.cb( i + 1, len( newfiles ), f, bb_data, fromCache )
self.cb( i + 1, len( newfiles ), f, self.bb_cache, fromCache )
except IOError, e:
self.bb_cache.remove(f)
bb.error("opening %s: %s" % (f, e))
pass
except bb.parse.SkipPackage:
skipped += 1
pass
except KeyboardInterrupt:
self.bb_cache.sync()
raise
except Exception, e:
self.bb_cache.remove(f)
bb.error("%s while parsing %s" % (e, f))
except:
self.bb_cache.remove(f)
raise
if self.cb is not None:
print "\rNOTE: Parsing finished. %d cached, %d parsed, %d skipped, %d masked." % ( cached, parsed, skipped, masked ),
self.bb_cache.sync()
#============================================================================#
# main
#============================================================================#

Binary file not shown.

View File

@ -51,5 +51,5 @@ T = "${WORKDIR}/temp"
TARGET_ARCH = "${BUILD_ARCH}"
TMPDIR = "${TOPDIR}/tmp"
UPDATECOMMAND = ""
UPDATECOMMAND_cvs = "/usr/bin/env cvs update ${CVSCOOPTS}"
UPDATECOMMAND_cvs = "/usr/bin/env cvs -d${CVSROOT} update ${CVSCOOPTS}"
WORKDIR = "${TMPDIR}/work/${PF}"

View File

@ -23,7 +23,7 @@ this program; if not, write to the Free Software Foundation, Inc., 59 Temple
Place, Suite 330, Boston, MA 02111-1307 USA.
"""
__version__ = "1.3.3.4"
__version__ = "1.4.3"
__all__ = [
@ -60,7 +60,9 @@ __all__ = [
"event",
"build",
"fetch",
"manifest"
"manifest",
"methodpool",
"cache",
]
whitespace = '\t\n\x0b\x0c\r '

306
bitbake/lib/bb/cache.py Normal file
View File

@ -0,0 +1,306 @@
#!/usr/bin/env python
# ex:ts=4:sw=4:sts=4:et
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
"""
BitBake 'Event' implementation
Caching of bitbake variables before task execution
# Copyright (C) 2006 Richard Purdie
# but small sections based on code from bin/bitbake:
# Copyright (C) 2003, 2004 Chris Larson
# Copyright (C) 2003, 2004 Phil Blundell
# Copyright (C) 2003 - 2005 Michael 'Mickey' Lauer
# Copyright (C) 2005 Holger Hans Peter Freyther
# Copyright (C) 2005 ROAD GmbH
This program is free software; you can redistribute it and/or modify it under
the terms of the GNU General Public License as published by the Free Software
Foundation; either version 2 of the License, or (at your option) any later
version.
This program is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
You should have received a copy of the GNU General Public License along with
this program; if not, write to the Free Software Foundation, Inc., 59 Temple
Place, Suite 330, Boston, MA 02111-1307 USA.
"""
import os, re
import bb.data
import bb.utils
try:
import cPickle as pickle
except ImportError:
import pickle
print "NOTE: Importing cPickle failed. Falling back to a very slow implementation."
# __cache_version__ = "123"
__cache_version__ = "124" # changes the __depends structure
class Cache:
"""
BitBake Cache implementation
"""
def __init__(self, cooker):
self.cachedir = bb.data.getVar("CACHE", cooker.configuration.data, True)
self.clean = {}
self.depends_cache = {}
self.data = None
self.data_fn = None
if self.cachedir in [None, '']:
self.has_cache = False
if cooker.cb is not None:
print "NOTE: Not using a cache. Set CACHE = <directory> to enable."
else:
self.has_cache = True
self.cachefile = os.path.join(self.cachedir,"bb_cache.dat")
if cooker.cb is not None:
print "NOTE: Using cache in '%s'" % self.cachedir
try:
os.stat( self.cachedir )
except OSError:
bb.mkdirhier( self.cachedir )
if self.has_cache and (self.mtime(self.cachefile)):
try:
p = pickle.Unpickler( file(self.cachefile,"rb"))
self.depends_cache, version_data = p.load()
if version_data['CACHE_VER'] != __cache_version__:
raise ValueError, 'Cache Version Mismatch'
if version_data['BITBAKE_VER'] != bb.__version__:
raise ValueError, 'Bitbake Version Mismatch'
except (ValueError, KeyError):
bb.note("Invalid cache found, rebuilding...")
self.depends_cache = {}
if self.depends_cache:
for fn in self.depends_cache.keys():
self.clean[fn] = ""
self.cacheValidUpdate(fn)
def getVar(self, var, fn, exp = 0):
"""
Gets the value of a variable
(similar to getVar in the data class)
There are two scenarios:
1. We have cached data - serve from depends_cache[fn]
2. We're learning what data to cache - serve from data
backend but add a copy of the data to the cache.
"""
if fn in self.clean:
return self.depends_cache[fn][var]
if not fn in self.depends_cache:
self.depends_cache[fn] = {}
if fn != self.data_fn:
# We're trying to access data in the cache which doesn't exist
# yet setData hasn't been called to setup the right access. Very bad.
bb.error("Parsing error data_fn %s and fn %s don't match" % (self.data_fn, fn))
result = bb.data.getVar(var, self.data, exp)
self.depends_cache[fn][var] = result
return result
def setData(self, fn, data):
"""
Called to prime bb_cache ready to learn which variables to cache.
Will be followed by calls to self.getVar which aren't cached
but can be fulfilled from self.data.
"""
self.data_fn = fn
self.data = data
# Make sure __depends makes the depends_cache
self.getVar("__depends", fn, True)
self.depends_cache[fn]["CACHETIMESTAMP"] = bb.parse.cached_mtime(fn)
def loadDataFull(self, fn, cooker):
"""
Return a complete set of data for fn.
To do this, we need to parse the file.
"""
bb_data, skipped = self.load_bbfile(fn, cooker)
return bb_data
def loadData(self, fn, cooker):
"""
Load a subset of data for fn.
If the cached data is valid we do nothing,
To do this, we need to parse the file and set the system
to record the variables accessed.
Return the cache status and whether the file was skipped when parsed
"""
if self.cacheValid(fn):
if "SKIPPED" in self.depends_cache[fn]:
return True, True
return True, False
bb_data, skipped = self.load_bbfile(fn, cooker)
self.setData(fn, bb_data)
return False, skipped
def cacheValid(self, fn):
"""
Is the cache valid for fn?
Fast version, no timestamps checked.
"""
# Is cache enabled?
if not self.has_cache:
return False
if fn in self.clean:
return True
return False
def cacheValidUpdate(self, fn):
"""
Is the cache valid for fn?
Make thorough (slower) checks including timestamps.
"""
# Is cache enabled?
if not self.has_cache:
return False
# Check file still exists
if self.mtime(fn) == 0:
bb.debug(2, "Cache: %s not longer exists" % fn)
self.remove(fn)
return False
# File isn't in depends_cache
if not fn in self.depends_cache:
bb.debug(2, "Cache: %s is not cached" % fn)
self.remove(fn)
return False
# Check the file's timestamp
if bb.parse.cached_mtime(fn) > self.getVar("CACHETIMESTAMP", fn, True):
bb.debug(2, "Cache: %s changed" % fn)
self.remove(fn)
return False
# Check dependencies are still valid
depends = self.getVar("__depends", fn, True)
for f,old_mtime in depends:
new_mtime = bb.parse.cached_mtime(f)
if (new_mtime > old_mtime):
bb.debug(2, "Cache: %s's dependency %s changed" % (fn, f))
self.remove(fn)
return False
bb.debug(2, "Depends Cache: %s is clean" % fn)
if not fn in self.clean:
self.clean[fn] = ""
return True
def skip(self, fn):
"""
Mark a fn as skipped
Called from the parser
"""
if not fn in self.depends_cache:
self.depends_cache[fn] = {}
self.depends_cache[fn]["SKIPPED"] = "1"
def remove(self, fn):
"""
Remove a fn from the cache
Called from the parser in error cases
"""
bb.debug(1, "Removing %s from cache" % fn)
if fn in self.depends_cache:
del self.depends_cache[fn]
if fn in self.clean:
del self.clean[fn]
def sync(self):
"""
Save the cache
Called from the parser when complete (or exitting)
"""
if not self.has_cache:
return
version_data = {}
version_data['CACHE_VER'] = __cache_version__
version_data['BITBAKE_VER'] = bb.__version__
p = pickle.Pickler(file(self.cachefile, "wb" ), -1 )
p.dump([self.depends_cache, version_data])
def mtime(self, cachefile):
try:
return os.stat(cachefile)[8]
except OSError:
return 0
def load_bbfile( self, bbfile , cooker):
"""
Load and parse one .bb build file
Return the data and whether parsing resulted in the file being skipped
"""
import bb
from bb import utils, data, parse, debug, event, fatal
topdir = data.getVar('TOPDIR', cooker.configuration.data)
if not topdir:
topdir = os.path.abspath(os.getcwd())
# set topdir to here
data.setVar('TOPDIR', topdir, cooker.configuration)
bbfile = os.path.abspath(bbfile)
bbfile_loc = os.path.abspath(os.path.dirname(bbfile))
# expand tmpdir to include this topdir
data.setVar('TMPDIR', data.getVar('TMPDIR', cooker.configuration.data, 1) or "", cooker.configuration.data)
# set topdir to location of .bb file
topdir = bbfile_loc
#data.setVar('TOPDIR', topdir, cfg)
# go there
oldpath = os.path.abspath(os.getcwd())
if self.mtime(topdir):
os.chdir(topdir)
bb_data = data.init_db(cooker.configuration.data)
try:
parse.handle(bbfile, bb_data) # read .bb data
os.chdir(oldpath)
return bb_data, False
except bb.parse.SkipPackage:
os.chdir(oldpath)
return bb_data, True
except:
os.chdir(oldpath)
raise
def init(cooker):
"""
The Objective: Cache the minimum amount of data possible yet get to the
stage of building packages (i.e. tryBuild) without reparsing any .bb files.
To do this, we intercept getVar calls and only cache the variables we see
being accessed. We rely on the cache getVar calls being made for all
variables bitbake might need to use to reach this stage. For each cached
file we need to track:
* Its mtime
* The mtimes of all its dependencies
* Whether it caused a parse.SkipPackage exception
Files causing parsing errors are evicted from the cache.
"""
return Cache(cooker)

View File

@ -7,6 +7,18 @@ BitBake 'Data' implementations
Functions for interacting with the data structure used by the
BitBake build tools.
The expandData and update_data are the most expensive
operations. At night the cookie monster came by and
suggested 'give me cookies on setting the variables and
things will work out'. Taking this suggestion into account
applying the skills from the not yet passed 'Entwurf und
Analyse von Algorithmen' lecture and the cookie
monster seems to be right. We will track setVar more carefully
to have faster update_data and expandKeys operations.
This is a treade-off between speed and memory again but
the speed is more critical here.
Copyright (C) 2003, 2004 Chris Larson
Copyright (C) 2005 Holger Hans Peter Freyther
@ -36,88 +48,15 @@ sys.path.insert(0,path)
from bb import note, debug, data_smart
_dict_type = data_smart.DataSmart
_dict_p_type = data_smart.DataSmartPackage
class DataDictFull(dict):
"""
This implements our Package Data Storage Interface.
setDirty is a no op as all items are held in memory
"""
def setDirty(self, bbfile, data):
"""
No-Op we assume data was manipulated as some sort of
reference
"""
if not bbfile in self:
raise Exception("File %s was not in dictionary before" % bbfile)
self[bbfile] = data
class DataDictCache:
"""
Databacked Dictionary implementation
"""
def __init__(self, cache_dir, config):
self.cache_dir = cache_dir
self.files = []
self.dirty = {}
self.config = config
def has_key(self,key):
return key in self.files
def keys(self):
return self.files
def __setitem__(self, key, data):
"""
Add the key to the list of known files and
place the data in the cache?
"""
if key in self.files:
return
self.files.append(key)
def __getitem__(self, key):
if not key in self.files:
return None
# if it was dirty we will
if key in self.dirty:
return self.dirty[key]
# not cached yet
return _dict_p_type(self.cache_dir, key,False,self.config)
def setDirty(self, bbfile, data):
"""
Only already added items can be declared dirty!!!
"""
if not bbfile in self.files:
raise Exception("File %s was not in dictionary before" % bbfile)
self.dirty[bbfile] = data
def init():
return _dict_type()
def init_db(cache,name,clean,parent = None):
return _dict_p_type(cache,name,clean,parent)
def init_db_mtime(cache,cache_bbfile):
return _dict_p_type.mtime(cache,cache_bbfile)
def pkgdata(use_cache, cache, config = None):
"""
Return some sort of dictionary to lookup parsed dictionaires
"""
if use_cache:
return DataDictCache(cache, config)
return DataDictFull()
def init_db(parent = None):
if parent:
return parent.createCopy()
else:
return _dict_type()
def createCopy(source):
"""Link the source set to the destination
@ -273,6 +212,27 @@ def setData(newData, d):
"""Sets the data object to the supplied value"""
d = newData
##
## Cookie Monsters' query functions
##
def _get_override_vars(d, override):
"""
Internal!!!
Get the Names of Variables that have a specific
override. This function returns a iterable
Set or an empty list
"""
return []
def _get_var_flags_triple(d):
"""
Internal!!!
"""
return []
__expand_var_regexp__ = re.compile(r"\${[^{}]+}")
__expand_python_regexp__ = re.compile(r"\${@.+?}")
@ -303,43 +263,7 @@ def expand(s, d, varname = None):
>>> print expand('${SRC_URI}', d)
http://somebug.${TARGET_MOO}
"""
def var_sub(match):
key = match.group()[2:-1]
if varname and key:
if varname == key:
raise Exception("variable %s references itself!" % varname)
var = getVar(key, d, 1)
if var is not None:
return var
else:
return match.group()
def python_sub(match):
import bb
code = match.group()[3:-1]
locals()['d'] = d
s = eval(code)
if type(s) == types.IntType: s = str(s)
return s
if type(s) is not types.StringType: # sanity check
return s
while s.find('$') != -1:
olds = s
try:
s = __expand_var_regexp__.sub(var_sub, s)
s = __expand_python_regexp__.sub(python_sub, s)
if s == olds: break
if type(s) is not types.StringType: # sanity check
import bb
bb.error('expansion of %s returned non-string %s' % (olds, s))
except KeyboardInterrupt:
raise
except:
note("%s:%s while evaluating:\n%s" % (sys.exc_info()[0], sys.exc_info()[1], s))
raise
return s
return d.expand(s, varname)
def expandKeys(alterdata, readdata = None):
if readdata == None:
@ -356,7 +280,7 @@ def expandKeys(alterdata, readdata = None):
# setVarFlags(ekey, copy.copy(getVarFlags(key, readdata)), alterdata)
setVar(ekey, val, alterdata)
for i in ('_append', '_prepend', '_delete'):
for i in ('_append', '_prepend'):
dest = getVarFlag(ekey, i, alterdata) or []
src = getVarFlag(key, i, readdata) or []
dest.extend(src)
@ -507,67 +431,76 @@ def update_data(d):
>>> print getVar('TEST', d)
local
"""
debug(2, "update_data()")
# can't do delete env[...] while iterating over the dictionary, so remember them
dodel = []
# now ask the cookie monster for help
#print "Cookie Monster"
#print "Append/Prepend %s" % d._special_values
#print "Overrides %s" % d._seen_overrides
overrides = (getVar('OVERRIDES', d, 1) or "").split(':') or []
def applyOverrides(var, d):
if not overrides:
debug(1, "OVERRIDES not defined, nothing to do")
return
val = getVar(var, d)
for o in overrides:
if var.endswith("_" + o):
l = len(o)+1
name = var[:-l]
#
# Well let us see what breaks here. We used to iterate
# over each variable and apply the override and then
# do the line expanding.
# If we have bad luck - which we will have - the keys
# where in some order that is so important for this
# method which we don't have anymore.
# Anyway we will fix that and write test cases this
# time.
#
# First we apply all overrides
# Then we will handle _append and _prepend
#
for o in overrides:
# calculate '_'+override
l = len(o)+1
# see if one should even try
if not o in d._seen_overrides:
continue
vars = d._seen_overrides[o]
for var in vars:
name = var[:-l]
try:
d[name] = d[var]
except:
note ("Untracked delVar")
for s in keys(d):
applyOverrides(s, d)
sval = getVar(s, d) or ""
# Handle line appends:
for (a, o) in getVarFlag(s, '_append', d) or []:
# maybe the OVERRIDE was not yet added so keep the append
if (o and o in overrides) or not o:
delVarFlag(s, '_append', d)
if o:
if not o in overrides:
# now on to the appends and prepends
if '_append' in d._special_values:
appends = d._special_values['_append'] or []
for append in appends:
for (a, o) in getVarFlag(append, '_append', d) or []:
# maybe the OVERRIDE was not yet added so keep the append
if (o and o in overrides) or not o:
delVarFlag(append, '_append', d)
if o and not o in overrides:
continue
sval+=a
setVar(s, sval, d)
# Handle line prepends
for (a, o) in getVarFlag(s, '_prepend', d) or []:
# maybe the OVERRIDE was not yet added so keep the append
if (o and o in overrides) or not o:
delVarFlag(s, '_prepend', d)
if o:
if not o in overrides:
sval = getVar(append,d) or ""
sval+=a
setVar(append, sval, d)
if '_prepend' in d._special_values:
prepends = d._special_values['_prepend'] or []
for prepend in prepends:
for (a, o) in getVarFlag(prepend, '_prepend', d) or []:
# maybe the OVERRIDE was not yet added so keep the prepend
if (o and o in overrides) or not o:
delVarFlag(prepend, '_prepend', d)
if o and not o in overrides:
continue
sval=a+sval
setVar(s, sval, d)
# Handle line deletions
name = s + "_delete"
nameval = getVar(name, d)
if nameval:
sval = getVar(s, d)
if sval:
new = ''
pattern = nameval.replace('\n','').strip()
for line in sval.split('\n'):
if line.find(pattern) == -1:
new = new + '\n' + line
setVar(s, new, d)
dodel.append(name)
sval = a + (getVar(prepend,d) or "")
setVar(prepend, sval, d)
# delete all environment vars no longer needed
for s in dodel:
delVar(s, d)
def inherits_class(klass, d):
val = getVar('__inherit_cache', d) or ""

View File

@ -8,7 +8,7 @@ BitBake build tools.
Copyright (C) 2003, 2004 Chris Larson
Copyright (C) 2004, 2005 Seb Frankengul
Copyright (C) 2005 Holger Hans Peter Freyther
Copyright (C) 2005, 2006 Holger Hans Peter Freyther
Copyright (C) 2005 Uli Luckas
Copyright (C) 2005 ROAD GmbH
@ -29,7 +29,8 @@ Based on functions from the base bb module, Copyright 2003 Holger Schurig
"""
import copy, os, re, sys, time, types
from bb import note, debug, fatal, utils
from bb import note, debug, error, fatal, utils, methodpool
from sets import Set
try:
import cPickle as pickle
@ -37,9 +38,8 @@ except ImportError:
import pickle
print "NOTE: Importing cPickle failed. Falling back to a very slow implementation."
__setvar_keyword__ = ["_append","_prepend","_delete"]
__setvar_regexp__ = re.compile('(?P<base>.*?)(?P<keyword>_append|_prepend|_delete)(_(?P<add>.*))?')
__setvar_keyword__ = ["_append","_prepend"]
__setvar_regexp__ = re.compile('(?P<base>.*?)(?P<keyword>_append|_prepend)(_(?P<add>.*))?')
__expand_var_regexp__ = re.compile(r"\${[^{}]+}")
__expand_python_regexp__ = re.compile(r"\${@.+?}")
@ -48,6 +48,10 @@ class DataSmart:
def __init__(self):
self.dict = {}
# cookie monster tribute
self._special_values = {}
self._seen_overrides = {}
def expand(self,s, varname):
def var_sub(match):
key = match.group()[2:-1]
@ -78,8 +82,7 @@ class DataSmart:
s = __expand_python_regexp__.sub(python_sub, s)
if s == olds: break
if type(s) is not types.StringType: # sanity check
import bb
bb.error('expansion of %s returned non-string %s' % (olds, s))
error('expansion of %s returned non-string %s' % (olds, s))
except KeyboardInterrupt:
raise
except:
@ -91,18 +94,6 @@ class DataSmart:
if not var in self.dict:
self.dict[var] = {}
def pickle_prep(self, cfg):
if "_data" in self.dict:
if self.dict["_data"] == cfg:
self.dict["_data"] = "cfg";
else: # this is an unknown array for the moment
pass
def unpickle_prep(self, cfg):
if "_data" in self.dict:
if self.dict["_data"] == "cfg":
self.dict["_data"] = cfg;
def _findVar(self,var):
_dest = self.dict
@ -116,14 +107,6 @@ class DataSmart:
return _dest[var]
return None
def _copyVar(self,var,name):
local_var = self._findVar(var)
if local_var:
self.dict[name] = copy.copy(local_var)
else:
debug(1,"Warning, _copyVar %s to %s, %s does not exists" % (var,name,var))
def _makeShadowCopy(self, var):
if var in self.dict:
return
@ -142,11 +125,20 @@ class DataSmart:
keyword = match.group("keyword")
override = match.group('add')
l = self.getVarFlag(base, keyword) or []
if override == 'delete':
if l.count([value, None]):
del l[l.index([value, None])]
l.append([value, override])
self.setVarFlag(base, match.group("keyword"), l)
self.setVarFlag(base, keyword, l)
# pay the cookie monster
try:
self._special_values[keyword].add( base )
except:
self._special_values[keyword] = Set()
self._special_values[keyword].add( base )
# SRC_URI_append_simpad is both a flag and a override
#if not override in self._seen_overrides:
# self._seen_overrides[override] = Set()
#self._seen_overrides[override].add( base )
return
if not var in self.dict:
@ -155,6 +147,13 @@ class DataSmart:
self.delVarFlag(var, 'matchesenv')
self.setVarFlag(var, 'export', 1)
# more cookies for the cookie monster
if '_' in var:
override = var[var.rfind('_')+1:]
if not override in self._seen_overrides:
self._seen_overrides[override] = Set()
self._seen_overrides[override].add( var )
# setting var
self.dict[var]["content"] = value
@ -237,6 +236,8 @@ class DataSmart:
# we really want this to be a DataSmart...
data = DataSmart()
data.dict["_data"] = self.dict
data._seen_overrides = copy.deepcopy(self._seen_overrides)
data._special_values = copy.deepcopy(self._special_values)
return data
@ -254,98 +255,11 @@ class DataSmart:
return keytab.keys()
def __getitem__(self,item):
start = self.dict
while start:
if item in start:
return start[item]
elif "_data" in start:
start = start["_data"]
else:
start = None
return None
#print "Warning deprecated"
return self.getVar(item, False)
def __setitem__(self,var,data):
self._makeShadowCopy(var)
self.dict[var] = data
#print "Warning deprecated"
self.setVar(var,data)
class DataSmartPackage(DataSmart):
"""
Persistent Data Storage
"""
def sanitize_filename(bbfile):
return bbfile.replace( '/', '_' )
sanitize_filename = staticmethod(sanitize_filename)
def unpickle(self):
"""
Restore the dict from memory
"""
cache_bbfile = self.sanitize_filename(self.bbfile)
p = pickle.Unpickler( file("%s/%s"%(self.cache,cache_bbfile),"rb"))
self.dict = p.load()
self.unpickle_prep()
funcstr = self.getVar('__functions__', 0)
if funcstr:
comp = utils.better_compile(funcstr, "<pickled>", self.bbfile)
utils.better_exec(comp, __builtins__, funcstr, self.bbfile)
def linkDataSet(self):
if not self.parent == None:
# assume parent is a DataSmartInstance
self.dict["_data"] = self.parent.dict
def __init__(self,cache,name,clean,parent):
"""
Construct a persistent data instance
"""
#Initialize the dictionary
DataSmart.__init__(self)
self.cache = cache
self.bbfile = os.path.abspath( name )
self.parent = parent
# Either unpickle the data or do copy on write
if clean:
self.linkDataSet()
else:
self.unpickle()
def commit(self, mtime):
"""
Save the package to a permanent storage
"""
self.pickle_prep()
cache_bbfile = self.sanitize_filename(self.bbfile)
p = pickle.Pickler(file("%s/%s" %(self.cache,cache_bbfile), "wb" ), -1 )
p.dump( self.dict )
self.unpickle_prep()
def mtime(cache,bbfile):
cache_bbfile = DataSmartPackage.sanitize_filename(bbfile)
try:
return os.stat( "%s/%s" % (cache,cache_bbfile) )[8]
except OSError:
return 0
mtime = staticmethod(mtime)
def pickle_prep(self):
"""
If self.dict contains a _data key and it is a configuration
we will remember we had a configuration instance attached
"""
if "_data" in self.dict:
if self.dict["_data"] == self.parent:
dest["_data"] = "cfg"
def unpickle_prep(self):
"""
If we had a configuration instance attached, we will reattach it
"""
if "_data" in self.dict:
if self.dict["_data"] == "cfg":
self.dict["_data"] = self.parent

View File

@ -44,7 +44,13 @@ class Event:
NotHandled = 0
Handled = 1
handlers = []
Registered = 10
AlreadyRegistered = 14
# Internal
_handlers = []
_handlers_dict = {}
def tmpHandler(event):
"""Default handler for code events"""
@ -57,7 +63,7 @@ def defaultTmpHandler():
def fire(event):
"""Fire off an Event"""
for h in handlers:
for h in _handlers:
if type(h).__name__ == "code":
exec(h)
if tmpHandler(event) == Handled:
@ -67,15 +73,22 @@ def fire(event):
return Handled
return NotHandled
def register(handler):
def register(name, handler):
"""Register an Event handler"""
# already registered
if name in _handlers_dict:
return AlreadyRegistered
if handler is not None:
# handle string containing python code
if type(handler).__name__ == "str":
return _registerCode(handler)
# prevent duplicate registration
if not handler in handlers:
handlers.append(handler)
_registerCode(handler)
else:
_handlers.append(handler)
_handlers_dict[name] = 1
return Registered
def _registerCode(handlerStr):
"""Register a 'code' Event.
@ -88,24 +101,23 @@ def _registerCode(handlerStr):
tmp = "def tmpHandler(e):\n%s" % handlerStr
comp = bb.utils.better_compile(tmp, "tmpHandler(e)", "bb.event._registerCode")
# prevent duplicate registration
if not comp in handlers:
handlers.append(comp)
_handlers.append(comp)
def remove(handler):
def remove(name, handler):
"""Remove an Event handler"""
for h in handlers:
if type(handler).__name__ == "str":
return _removeCode(handler)
if handler is h:
handlers.remove(handler)
_handlers_dict.pop(name)
if type(handler).__name__ == "str":
return _removeCode(handler)
else:
_handlers.remove(handler)
def _removeCode(handlerStr):
"""Remove a 'code' Event handler
Deprecated interface; call remove instead."""
tmp = "def tmpHandler(e):\n%s" % handlerStr
comp = bb.utils.better_compile(tmp, "tmpHandler(e)", "bb.event._removeCode")
handlers.remove(comp)
_handlers.remove(comp)
def getName(e):
"""Returns the name of a class or class instance"""

View File

@ -168,10 +168,6 @@ class Fetch(object):
d Is a bb.data instance
tarfn is the name of the tarball
"""
tarpath = os.path.join(data.getVar("DL_DIR", d, 1), tarfn)
if os.access(tarpath, os.R_OK):
return True
pn = data.getVar('PN', d, True)
src_tarball_stash = None
if pn:

View File

@ -129,7 +129,7 @@ class Git(Fetch):
os.chdir(repodir)
rungitcmd("tar -xzf %s" % (repofile),d)
else:
rungitcmd("git clone %s://%s%s %s" % (proto, host, path, repodir),d)
rungitcmd("git clone -n %s://%s%s %s" % (proto, host, path, repodir),d)
os.chdir(repodir)
rungitcmd("git pull %s://%s%s" % (proto, host, path),d)

View File

@ -0,0 +1,101 @@
# ex:ts=4:sw=4:sts=4:et
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
#
#
# Copyright (C) 2006 Holger Hans Peter Freyther
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# Neither the name Holger Hans Peter Freyther nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
# HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
# STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING
# IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""
What is a method pool?
BitBake has a global method scope where .bb, .inc and .bbclass
files can install methods. These methods are parsed from strings.
To avoid recompiling and executing these string we introduce
a method pool to do this task.
This pool will be used to compile and execute the functions. It
will be smart enough to
"""
from bb.utils import better_compile, better_exec
from bb import error
# A dict of modules we have handled
# it is the number of .bbclasses + x in size
_parsed_methods = { }
_parsed_fns = { }
def insert_method(modulename, code, fn):
"""
Add code of a module should be added. The methods
will be simply added, no checking will be done
"""
comp = better_compile(code, "<bb>", fn )
better_exec(comp, __builtins__, code, fn)
# hack hack hack XXX
return
# now some instrumentation
code = comp.co_names
for name in code:
if name in ['None', 'False']:
continue
elif name in _parsed_fns and not _parsed_fns[name] == modulename:
error( "Error Method already seen: %s in' %s' now in '%s'" % (name, _parsed_fns[name], modulename))
else:
_parsed_fns[name] = modulename
def check_insert_method(modulename, code, fn):
"""
Add the code if it wasnt added before. The module
name will be used for that
Variables:
@modulename a short name e.g. base.bbclass
@code The actual python code
@fn The filename from the outer file
"""
if not modulename in _parsed_methods:
return insert_method(modulename, code, fn)
def parsed_module(modulename):
"""
Inform me file xyz was parsed
"""
return modulename in _parsed_methods
def get_parsed_dict():
"""
shortcut
"""
return _parsed_methods

View File

@ -46,9 +46,9 @@ def update_mtime(f):
def mark_dependency(d, f):
if f.startswith('./'):
f = "%s/%s" % (os.getcwd(), f[2:])
deps = (bb.data.getVar('__depends', d) or "").split()
deps.append("%s@%s" % (f, cached_mtime(f)))
bb.data.setVar('__depends', " ".join(deps), d)
deps = bb.data.getVar('__depends', d) or []
deps.append( (f, cached_mtime(f)) )
bb.data.setVar('__depends', deps, d)
def supports(fn, data):
"""Returns true if we have a handler for this file, false otherwise"""

View File

@ -1,29 +1,42 @@
# ex:ts=4:sw=4:sts=4:et
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
#
# Copyright (C) 2006 Holger Hans Peter Freyther
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
# THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
"""class for handling .bb files (using a C++ parser)
from bb import data
from bb.parse import ParseError
Reads a .bb file and obtains its metadata (using a C++ parser)
Copyright (C) 2006 Tim Robert Ansell
Copyright (C) 2006 Holger Hans Peter Freyther
This program is free software; you can redistribute it and/or modify it under
the terms of the GNU General Public License as published by the Free Software
Foundation; either version 2 of the License, or (at your option) any later
version.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
import os
# The Module we will use here
import bb
from bitbakec import parsefile
#
# This is the Python Part of the Native Parser Implementation.
@ -34,29 +47,64 @@ from bb.parse import ParseError
#
# The rest of the methods are internal implementation details.
#
# internal
#
def _init(fn, d):
"""
Initialize the data implementation with values of
the environment and data from the file.
"""
pass
#
# public
#
def supports(fn, data):
return fn[-3:] == ".bb" or fn[-8:] == ".bbclass" or fn[-4:] == ".inc"
return fn[-3:] == ".bb" or fn[-8:] == ".bbclass" or fn[-4:] == ".inc" or fn[-5:] == ".conf"
def init(fn, data):
print "Init"
if not data.getVar('TOPDIR'):
bb.error('TOPDIR is not set')
if not data.getVar('BBPATH'):
bb.error('BBPATH is not set')
def handle(fn, data, include):
def handle(fn, d, include):
print ""
print "fn: %s" % fn
print "data: %s" % data
print "data: %s" % d
print dir(d)
print d.getVar.__doc__
print "include: %s" % include
pass
# check if we include or are the beginning
if include:
oldfile = d.getVar('FILE')
else:
#d.inheritFromOS()
oldfile = None
# find the file
if not os.path.isabs(fn):
bb.error("No Absolute FILE name")
abs_fn = bb.which(d.getVar('BBPATH'), fn)
else:
abs_fn = fn
# check if the file exists
if not os.path.exists(abs_fn):
raise IOError("file '%(fn)' not found" % locals() )
# now we know the file is around mark it as dep
if include:
parse.mark_dependency(d, abs_fn)
# now parse this file - by defering it to C++
parsefile(fn, d)
# restore the original FILE
if oldfile:
d.setVar('FILE', oldfile)
return d
# Inform bitbake that we are a parser
# We need to define all three

View File

@ -0,0 +1,36 @@
test: bitbakec.so
python test.py
bitbakescanner.cc: bitbakescanner.l
flex -t bitbakescanner.l > bitbakescanner.cc
bitbakeparser.cc: bitbakeparser.y python_output.h
lemon bitbakeparser.y
mv bitbakeparser.c bitbakeparser.cc
bitbakec.c: bitbakec.pyx
pyrexc bitbakec.pyx
bitbakec-processed.c: bitbakec.c
cat bitbakec.c | sed -e"s/__pyx_f_8bitbakec_//" > bitbakec-processed.c
bitbakec.o: bitbakec-processed.c
gcc -c bitbakec-processed.c -o bitbakec.o -fPIC -I/usr/include/python2.4
bitbakeparser.o: bitbakeparser.cc
g++ -c bitbakeparser.cc -fPIC -I/usr/include/python2.4
bitbakescanner.o: bitbakescanner.cc
g++ -c bitbakescanner.cc -fPIC -I/usr/include/python2.4
bitbakec.so: bitbakec.o bitbakeparser.o bitbakescanner.o
g++ -shared -fPIC bitbakeparser.o bitbakescanner.o bitbakec.o -o bitbakec.so
clean:
rm *.out
rm *.cc
rm bitbakec.c
rm bitbakec-processed.c
rm *.o
rm *.so

View File

@ -0,0 +1,180 @@
# ex:ts=4:sw=4:sts=4:et
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
cdef extern from "stdio.h":
ctypedef int FILE
FILE *fopen(char*, char*)
int fclose(FILE *fp)
cdef extern from "lexerc.h":
ctypedef struct lex_t:
void* parser
void* scanner
FILE* file
void* data
int lineError
int errorParse
cdef extern void parse(FILE*, object)
def parsefile(object file, object data):
print "parsefile: 1", file, data
# Open the file
cdef FILE* f
f = fopen(file, "r")
print "parsefile: 2 opening file"
if (f == NULL):
raise IOError("No such file %s." % file)
print "parsefile: 3 parse"
parse(f, data)
# Close the file
print "parsefile: 4 closing"
fclose(f)
cdef public void e_assign(lex_t* container, char* key, char* what):
print "e_assign", key, what
d = <object>container.data
d.setVar(key, what)
cdef public void e_export(lex_t* c, char* what):
print "e_export", what
#exp:
# bb.data.setVarFlag(key, "export", 1, data)
d = <object>container.data
d.setVarFlag(key, "export", 1)
cdef public void e_immediate(lex_t* c, char* key, char* what):
print "e_immediate", key, what
#colon:
# val = bb.data.expand(groupd["value"], data)
d = <object>c.data
d.setVar(key, d.expand(what))
cdef public void e_cond(lex_t* c, char* key, char* what):
print "e_cond", key, what
#ques:
# val = bb.data.getVar(key, data)
# if val == None:
# val = groupd["value"]
d = <object>c.data
d.setVar(key, (d.getVar(key) or what))
cdef public void e_prepend(lex_t* c, char* key, char* what):
print "e_prepend", key, what
#prepend:
# val = "%s %s" % (groupd["value"], (bb.data.getVar(key, data) or ""))
d = <object>c.data
d.setVar(key, what + " " + (d.getVar(key) or ""))
cdef public void e_append(lex_t* c, char* key, char* what):
print "e_append", key, what
#append:
# val = "%s %s" % ((bb.data.getVar(key, data) or ""), groupd["value"])
d = <object>c.data
d.setVar(key, (d.getVar(key) or "") + " " + what)
cdef public void e_precat(lex_t* c, char* key, char* what):
print "e_precat", key, what
#predot:
# val = "%s%s" % (groupd["value"], (bb.data.getVar(key, data) or ""))
d = <object>c.data
d.setVar(key, what + (d.getVar(key) or ""))
cdef public void e_postcat(lex_t* c, char* key, char* what):
print "e_postcat", key, what
#postdot:
# val = "%s%s" % ((bb.data.getVar(key, data) or ""), groupd["value"])
d = <object>c.data
d.setVar(key, (d.getVar(key) or "") + what)
cdef public void e_addtask(lex_t* c, char* name, char* before, char* after):
print "e_addtask", name, before, after
# func = m.group("func")
# before = m.group("before")
# after = m.group("after")
# if func is None:
# return
# var = "do_" + func
#
# data.setVarFlag(var, "task", 1, d)
#
# if after is not None:
# # set up deps for function
# data.setVarFlag(var, "deps", after.split(), d)
# if before is not None:
# # set up things that depend on this func
# data.setVarFlag(var, "postdeps", before.split(), d)
# return
do = "do_%s" % name
d = <object>c.data
d.setVarFlag(do, "task", 1)
if strlen(before) > 0:
d.setVarFlag(do, "deps", ("%s" % after).split())
if strlen(after) > 0:
d.setVarFlag(do, "deps", ("%s" % before).split())
cdef public void e_addhandler(lex_t* c, char* h):
print "e_addhandler", h
# data.setVarFlag(h, "handler", 1, d)
d = <object>c.data
d.setVarFlag(h, "handler", 1)
cdef public void e_export_func(lex_t* c, char* function):
print "e_export_func", function
pass
cdef public void e_inherit(lex_t* c, char* file):
print "e_inherit", file
pass
cdef public void e_include(lex_t* c, char* file):
print "e_include", file
d = <object>c.data
d.expand(file)
try:
parsefile(file, d)
except IOError:
print "Could not include required file %s" % file
cdef public void e_require(lex_t* c, char* file):
print "e_require", file
d = <object>c.data
d.expand(file)
try:
parsefile(file, d)
except IOError:
raise CParseError("Could not include required file %s" % file)
cdef public void e_proc(lex_t* c, char* key, char* what):
print "e_proc", key, what
pass
cdef public void e_proc_python(lex_t* c, char* key, char* what):
print "e_proc_python", key, what
pass
cdef public void e_proc_fakeroot(lex_t* c, char* key, char* what):
print "e_fakeroot", key, what
pass
cdef public void e_def(lex_t* c, char* a, char* b, char* d):
print "e_def", key, what
pass
cdef public void e_parse_error(lex_t* c):
print "e_parse_error", "line:", lineError, "parse:", errorParse
raise CParseError("There was an parse error, sorry unable to give more information at the current time.")

View File

@ -59,22 +59,22 @@
** defined, then do no error processing.
*/
#define YYCODETYPE unsigned char
#define YYNOCODE 42
#define YYNOCODE 44
#define YYACTIONTYPE unsigned char
#define bbparseTOKENTYPE token_t
typedef union {
bbparseTOKENTYPE yy0;
int yy83;
int yy87;
} YYMINORTYPE;
#define YYSTACKDEPTH 100
#define bbparseARG_SDECL lex_t* lex;
#define bbparseARG_PDECL ,lex_t* lex
#define bbparseARG_FETCH lex_t* lex = yypParser->lex
#define bbparseARG_STORE yypParser->lex = lex
#define YYNSTATE 74
#define YYNRULE 41
#define YYERRORSYMBOL 28
#define YYERRSYMDT yy83
#define YYNSTATE 82
#define YYNRULE 45
#define YYERRORSYMBOL 30
#define YYERRSYMDT yy87
#define YY_NO_ACTION (YYNSTATE+YYNRULE+2)
#define YY_ACCEPT_ACTION (YYNSTATE+YYNRULE+1)
#define YY_ERROR_ACTION (YYNSTATE+YYNRULE)
@ -127,53 +127,63 @@ typedef union {
** yy_default[] Default action for each state.
*/
static const YYACTIONTYPE yy_action[] = {
/* 0 */ 28, 47, 5, 57, 33, 58, 30, 25, 24, 37,
/* 10 */ 45, 14, 2, 29, 41, 3, 16, 4, 23, 39,
/* 20 */ 69, 8, 11, 17, 26, 48, 47, 32, 21, 42,
/* 30 */ 31, 57, 57, 73, 44, 10, 66, 7, 34, 38,
/* 40 */ 57, 51, 72, 116, 1, 62, 6, 49, 52, 35,
/* 50 */ 36, 59, 54, 9, 20, 64, 43, 22, 40, 50,
/* 60 */ 46, 71, 67, 60, 15, 65, 61, 70, 53, 56,
/* 70 */ 27, 12, 68, 63, 84, 55, 18, 84, 13, 84,
/* 80 */ 84, 84, 84, 84, 84, 84, 84, 84, 84, 84,
/* 90 */ 84, 19,
/* 0 */ 82, 3, 7, 8, 38, 22, 39, 24, 26, 32,
/* 10 */ 34, 28, 30, 128, 1, 40, 53, 70, 55, 5,
/* 20 */ 60, 65, 67, 2, 21, 36, 69, 77, 9, 7,
/* 30 */ 11, 6, 13, 15, 17, 19, 12, 52, 50, 4,
/* 40 */ 74, 42, 46, 59, 57, 10, 64, 62, 38, 14,
/* 50 */ 73, 16, 38, 38, 76, 81, 18, 20, 23, 25,
/* 60 */ 27, 29, 31, 33, 35, 37, 56, 51, 90, 54,
/* 70 */ 58, 71, 41, 43, 63, 45, 44, 47, 72, 48,
/* 80 */ 75, 78, 80, 61, 90, 49, 66, 90, 90, 68,
/* 90 */ 90, 90, 90, 90, 90, 79,
};
static const YYCODETYPE yy_lookahead[] = {
/* 0 */ 1, 2, 3, 21, 4, 23, 6, 7, 8, 9,
/* 10 */ 31, 32, 13, 14, 1, 16, 39, 18, 19, 20,
/* 20 */ 37, 38, 22, 24, 25, 1, 2, 4, 10, 6,
/* 30 */ 7, 21, 21, 23, 23, 22, 35, 36, 11, 12,
/* 40 */ 21, 5, 23, 29, 30, 33, 34, 5, 5, 10,
/* 50 */ 12, 10, 5, 22, 39, 15, 40, 11, 10, 5,
/* 60 */ 26, 17, 17, 10, 32, 35, 33, 17, 5, 5,
/* 70 */ 1, 22, 37, 1, 41, 5, 39, 41, 27, 41,
/* 80 */ 41, 41, 41, 41, 41, 41, 41, 41, 41, 41,
/* 90 */ 41, 39,
/* 0 */ 0, 1, 2, 3, 23, 4, 25, 6, 7, 8,
/* 10 */ 9, 10, 11, 31, 32, 15, 16, 1, 18, 42,
/* 20 */ 20, 21, 22, 33, 34, 24, 26, 27, 1, 2,
/* 30 */ 4, 28, 6, 7, 8, 9, 5, 35, 36, 29,
/* 40 */ 24, 13, 14, 37, 38, 34, 39, 40, 23, 5,
/* 50 */ 25, 5, 23, 23, 25, 25, 5, 5, 5, 5,
/* 60 */ 5, 5, 5, 5, 5, 41, 17, 35, 43, 1,
/* 70 */ 37, 24, 12, 12, 39, 12, 14, 12, 41, 13,
/* 80 */ 41, 1, 41, 19, 43, 12, 19, 43, 43, 19,
/* 90 */ 43, 43, 43, 43, 43, 24,
};
#define YY_SHIFT_USE_DFLT (-19)
#define YY_SHIFT_MAX 43
#define YY_SHIFT_USE_DFLT (-20)
static const signed char yy_shift_ofst[] = {
/* 0 */ -19, -1, 18, 40, 45, 24, 18, 40, 45, -19,
/* 10 */ -19, -19, -19, -19, 0, 23, -18, 13, 19, 10,
/* 20 */ 11, 27, 53, 50, 63, 64, 69, 49, 51, 72,
/* 30 */ 70, 36, 42, 43, 39, 38, 41, 47, 48, 44,
/* 40 */ 46, 31, 54, 34,
/* 0 */ -20, 0, -20, 10, -20, 3, -20, -20, 27, -20,
/* 10 */ 26, 31, -20, 44, -20, 46, -20, 51, -20, 52,
/* 20 */ -20, 1, 53, -20, 54, -20, 55, -20, 56, -20,
/* 30 */ 57, -20, 58, -20, 59, -20, -20, -19, -20, -20,
/* 40 */ 60, 28, 61, 62, 63, -20, 65, 66, 73, -20,
/* 50 */ 60, -20, -20, 68, -20, 49, -20, 49, -20, -20,
/* 60 */ 64, -20, 64, -20, -20, 67, -20, 70, -20, 16,
/* 70 */ 47, -20, 25, -20, -20, 29, -20, 80, 71, -20,
/* 80 */ 30, -20,
};
#define YY_REDUCE_USE_DFLT (-24)
#define YY_REDUCE_MAX 13
static const signed char yy_reduce_ofst[] = {
/* 0 */ 14, -21, 12, 1, -17, 32, 33, 30, 35, 37,
/* 10 */ 52, -23, 15, 16,
/* 0 */ -18, -10, -24, -24, -23, -24, -24, -24, 11, -24,
/* 10 */ -24, -24, -24, -24, -24, -24, -24, -24, -24, -24,
/* 20 */ -24, -24, -24, -24, -24, -24, -24, -24, -24, -24,
/* 30 */ -24, -24, -24, -24, -24, -24, 24, -24, -24, -24,
/* 40 */ 2, -24, -24, -24, -24, -24, -24, -24, -24, -24,
/* 50 */ 32, -24, -24, -24, -24, 6, -24, 33, -24, -24,
/* 60 */ 7, -24, 35, -24, -24, -24, -24, -24, -24, -24,
/* 70 */ -24, 37, -24, -24, 39, -24, -24, -24, -24, 41,
/* 80 */ -24, -24,
};
static const YYACTIONTYPE yy_default[] = {
/* 0 */ 76, 74, 115, 115, 115, 115, 94, 99, 103, 107,
/* 10 */ 107, 107, 107, 113, 115, 115, 115, 115, 115, 115,
/* 20 */ 115, 89, 115, 115, 115, 115, 115, 115, 77, 115,
/* 30 */ 115, 115, 115, 115, 115, 90, 115, 115, 115, 115,
/* 40 */ 91, 115, 115, 114, 111, 75, 112, 78, 77, 79,
/* 50 */ 80, 81, 82, 83, 84, 85, 86, 106, 108, 87,
/* 60 */ 88, 92, 93, 95, 96, 97, 98, 100, 101, 102,
/* 70 */ 104, 105, 109, 110,
/* 0 */ 84, 127, 83, 85, 125, 126, 124, 86, 127, 85,
/* 10 */ 127, 127, 87, 127, 88, 127, 89, 127, 90, 127,
/* 20 */ 91, 127, 127, 92, 127, 93, 127, 94, 127, 95,
/* 30 */ 127, 96, 127, 97, 127, 98, 119, 127, 118, 120,
/* 40 */ 127, 101, 127, 102, 127, 99, 127, 103, 127, 100,
/* 50 */ 106, 104, 105, 127, 107, 127, 108, 111, 109, 110,
/* 60 */ 127, 112, 115, 113, 114, 127, 116, 127, 117, 127,
/* 70 */ 127, 119, 127, 121, 119, 127, 122, 127, 127, 119,
/* 80 */ 127, 123,
};
#define YY_SZ_ACTTAB (sizeof(yy_action)/sizeof(yy_action[0]))
@ -260,16 +270,16 @@ void bbparseTrace(FILE *TraceFILE, char *zTracePrompt){
** are required. The following table supplies these names */
static const char *const yyTokenName[] = {
"$", "SYMBOL", "VARIABLE", "EXPORT",
"OP_ASSIGN", "STRING", "OP_IMMEDIATE", "OP_COND",
"OP_PREPEND", "OP_APPEND", "TSYMBOL", "BEFORE",
"AFTER", "ADDTASK", "ADDHANDLER", "FSYMBOL",
"EXPORT_FUNC", "ISYMBOL", "INHERIT", "INCLUDE",
"REQUIRE", "PROC_BODY", "PROC_OPEN", "PROC_CLOSE",
"PYTHON", "FAKEROOT", "DEF_BODY", "DEF_ARGS",
"error", "program", "statements", "statement",
"variable", "task", "tasks", "func",
"funcs", "inherit", "inherits", "proc_body",
"def_body",
"OP_ASSIGN", "STRING", "OP_PREDOT", "OP_POSTDOT",
"OP_IMMEDIATE", "OP_COND", "OP_PREPEND", "OP_APPEND",
"TSYMBOL", "BEFORE", "AFTER", "ADDTASK",
"ADDHANDLER", "FSYMBOL", "EXPORT_FUNC", "ISYMBOL",
"INHERIT", "INCLUDE", "REQUIRE", "PROC_BODY",
"PROC_OPEN", "PROC_CLOSE", "PYTHON", "FAKEROOT",
"DEF_BODY", "DEF_ARGS", "error", "program",
"statements", "statement", "variable", "task",
"tasks", "func", "funcs", "inherit",
"inherits", "proc_body", "def_body",
};
#endif /* NDEBUG */
@ -283,41 +293,45 @@ static const char *const yyRuleName[] = {
/* 3 */ "variable ::= SYMBOL",
/* 4 */ "variable ::= VARIABLE",
/* 5 */ "statement ::= EXPORT variable OP_ASSIGN STRING",
/* 6 */ "statement ::= EXPORT variable OP_IMMEDIATE STRING",
/* 7 */ "statement ::= EXPORT variable OP_COND STRING",
/* 8 */ "statement ::= variable OP_ASSIGN STRING",
/* 9 */ "statement ::= variable OP_PREPEND STRING",
/* 10 */ "statement ::= variable OP_APPEND STRING",
/* 11 */ "statement ::= variable OP_IMMEDIATE STRING",
/* 12 */ "statement ::= variable OP_COND STRING",
/* 13 */ "task ::= TSYMBOL BEFORE TSYMBOL AFTER TSYMBOL",
/* 14 */ "task ::= TSYMBOL AFTER TSYMBOL BEFORE TSYMBOL",
/* 15 */ "task ::= TSYMBOL",
/* 16 */ "task ::= TSYMBOL BEFORE TSYMBOL",
/* 17 */ "task ::= TSYMBOL AFTER TSYMBOL",
/* 18 */ "tasks ::= tasks task",
/* 19 */ "tasks ::= task",
/* 20 */ "statement ::= ADDTASK tasks",
/* 21 */ "statement ::= ADDHANDLER SYMBOL",
/* 22 */ "func ::= FSYMBOL",
/* 23 */ "funcs ::= funcs func",
/* 24 */ "funcs ::= func",
/* 25 */ "statement ::= EXPORT_FUNC funcs",
/* 26 */ "inherit ::= ISYMBOL",
/* 27 */ "inherits ::= inherits inherit",
/* 28 */ "inherits ::= inherit",
/* 29 */ "statement ::= INHERIT inherits",
/* 30 */ "statement ::= INCLUDE ISYMBOL",
/* 31 */ "statement ::= REQUIRE ISYMBOL",
/* 32 */ "proc_body ::= proc_body PROC_BODY",
/* 33 */ "proc_body ::=",
/* 34 */ "statement ::= variable PROC_OPEN proc_body PROC_CLOSE",
/* 35 */ "statement ::= PYTHON SYMBOL PROC_OPEN proc_body PROC_CLOSE",
/* 36 */ "statement ::= PYTHON PROC_OPEN proc_body PROC_CLOSE",
/* 37 */ "statement ::= FAKEROOT SYMBOL PROC_OPEN proc_body PROC_CLOSE",
/* 38 */ "def_body ::= def_body DEF_BODY",
/* 39 */ "def_body ::=",
/* 40 */ "statement ::= SYMBOL DEF_ARGS def_body",
/* 6 */ "statement ::= EXPORT variable OP_PREDOT STRING",
/* 7 */ "statement ::= EXPORT variable OP_POSTDOT STRING",
/* 8 */ "statement ::= EXPORT variable OP_IMMEDIATE STRING",
/* 9 */ "statement ::= EXPORT variable OP_COND STRING",
/* 10 */ "statement ::= variable OP_ASSIGN STRING",
/* 11 */ "statement ::= variable OP_PREDOT STRING",
/* 12 */ "statement ::= variable OP_POSTDOT STRING",
/* 13 */ "statement ::= variable OP_PREPEND STRING",
/* 14 */ "statement ::= variable OP_APPEND STRING",
/* 15 */ "statement ::= variable OP_IMMEDIATE STRING",
/* 16 */ "statement ::= variable OP_COND STRING",
/* 17 */ "task ::= TSYMBOL BEFORE TSYMBOL AFTER TSYMBOL",
/* 18 */ "task ::= TSYMBOL AFTER TSYMBOL BEFORE TSYMBOL",
/* 19 */ "task ::= TSYMBOL",
/* 20 */ "task ::= TSYMBOL BEFORE TSYMBOL",
/* 21 */ "task ::= TSYMBOL AFTER TSYMBOL",
/* 22 */ "tasks ::= tasks task",
/* 23 */ "tasks ::= task",
/* 24 */ "statement ::= ADDTASK tasks",
/* 25 */ "statement ::= ADDHANDLER SYMBOL",
/* 26 */ "func ::= FSYMBOL",
/* 27 */ "funcs ::= funcs func",
/* 28 */ "funcs ::= func",
/* 29 */ "statement ::= EXPORT_FUNC funcs",
/* 30 */ "inherit ::= ISYMBOL",
/* 31 */ "inherits ::= inherits inherit",
/* 32 */ "inherits ::= inherit",
/* 33 */ "statement ::= INHERIT inherits",
/* 34 */ "statement ::= INCLUDE ISYMBOL",
/* 35 */ "statement ::= REQUIRE ISYMBOL",
/* 36 */ "proc_body ::= proc_body PROC_BODY",
/* 37 */ "proc_body ::=",
/* 38 */ "statement ::= variable PROC_OPEN proc_body PROC_CLOSE",
/* 39 */ "statement ::= PYTHON SYMBOL PROC_OPEN proc_body PROC_CLOSE",
/* 40 */ "statement ::= PYTHON PROC_OPEN proc_body PROC_CLOSE",
/* 41 */ "statement ::= FAKEROOT SYMBOL PROC_OPEN proc_body PROC_CLOSE",
/* 42 */ "def_body ::= def_body DEF_BODY",
/* 43 */ "def_body ::=",
/* 44 */ "statement ::= SYMBOL DEF_ARGS def_body",
};
#endif /* NDEBUG */
@ -402,9 +416,11 @@ static void yy_destructor(YYCODETYPE yymajor, YYMINORTYPE *yypminor){
case 25:
case 26:
case 27:
case 28:
case 29:
#line 50 "bitbakeparser.y"
{ (yypminor->yy0).release_this (); }
#line 409 "bitbakeparser.c"
#line 425 "bitbakeparser.c"
break;
default: break; /* If no destructor action specified: do nothing */
}
@ -473,7 +489,9 @@ static int yy_find_shift_action(
int i;
int stateno = pParser->yystack[pParser->yyidx].stateno;
if( stateno>YY_SHIFT_MAX || (i = yy_shift_ofst[stateno])==YY_SHIFT_USE_DFLT ){
/* if( pParser->yyidx<0 ) return YY_NO_ACTION; */
i = yy_shift_ofst[stateno];
if( i==YY_SHIFT_USE_DFLT ){
return yy_default[stateno];
}
if( iLookAhead==YYNOCODE ){
@ -515,8 +533,8 @@ static int yy_find_reduce_action(
int i;
/* int stateno = pParser->yystack[pParser->yyidx].stateno; */
if( stateno>YY_REDUCE_MAX ||
(i = yy_reduce_ofst[stateno])==YY_REDUCE_USE_DFLT ){
i = yy_reduce_ofst[stateno];
if( i==YY_REDUCE_USE_DFLT ){
return yy_default[stateno];
}
if( iLookAhead==YYNOCODE ){
@ -578,47 +596,51 @@ static const struct {
YYCODETYPE lhs; /* Symbol on the left-hand side of the rule */
unsigned char nrhs; /* Number of right-hand side symbols in the rule */
} yyRuleInfo[] = {
{ 29, 1 },
{ 30, 2 },
{ 30, 0 },
{ 32, 1 },
{ 32, 1 },
{ 31, 4 },
{ 31, 4 },
{ 31, 4 },
{ 31, 3 },
{ 31, 3 },
{ 31, 3 },
{ 31, 3 },
{ 31, 3 },
{ 33, 5 },
{ 33, 5 },
{ 33, 1 },
{ 33, 3 },
{ 33, 3 },
{ 34, 2 },
{ 31, 1 },
{ 32, 2 },
{ 32, 0 },
{ 34, 1 },
{ 31, 2 },
{ 31, 2 },
{ 34, 1 },
{ 33, 4 },
{ 33, 4 },
{ 33, 4 },
{ 33, 4 },
{ 33, 4 },
{ 33, 3 },
{ 33, 3 },
{ 33, 3 },
{ 33, 3 },
{ 33, 3 },
{ 33, 3 },
{ 33, 3 },
{ 35, 5 },
{ 35, 5 },
{ 35, 1 },
{ 35, 3 },
{ 35, 3 },
{ 36, 2 },
{ 36, 1 },
{ 31, 2 },
{ 33, 2 },
{ 33, 2 },
{ 37, 1 },
{ 38, 2 },
{ 38, 1 },
{ 31, 2 },
{ 31, 2 },
{ 31, 2 },
{ 39, 2 },
{ 39, 0 },
{ 31, 4 },
{ 31, 5 },
{ 31, 4 },
{ 31, 5 },
{ 33, 2 },
{ 39, 1 },
{ 40, 2 },
{ 40, 0 },
{ 31, 3 },
{ 40, 1 },
{ 33, 2 },
{ 33, 2 },
{ 33, 2 },
{ 41, 2 },
{ 41, 0 },
{ 33, 4 },
{ 33, 5 },
{ 33, 4 },
{ 33, 5 },
{ 42, 2 },
{ 42, 0 },
{ 33, 3 },
};
static void yy_accept(yyParser*); /* Forward Declaration */
@ -672,7 +694,7 @@ static void yy_reduce(
{ yygotominor.yy0.assignString( (char*)yymsp[0].minor.yy0.string() );
yymsp[0].minor.yy0.assignString( 0 );
yymsp[0].minor.yy0.release_this(); }
#line 677 "bitbakeparser.c"
#line 699 "bitbakeparser.c"
break;
case 4:
#line 64 "bitbakeparser.y"
@ -680,7 +702,7 @@ static void yy_reduce(
yygotominor.yy0.assignString( (char*)yymsp[0].minor.yy0.string() );
yymsp[0].minor.yy0.assignString( 0 );
yymsp[0].minor.yy0.release_this(); }
#line 685 "bitbakeparser.c"
#line 707 "bitbakeparser.c"
break;
case 5:
#line 70 "bitbakeparser.y"
@ -689,191 +711,223 @@ static void yy_reduce(
yymsp[-2].minor.yy0.release_this(); yymsp[0].minor.yy0.release_this(); yy_destructor(3,&yymsp[-3].minor);
yy_destructor(4,&yymsp[-1].minor);
}
#line 694 "bitbakeparser.c"
#line 716 "bitbakeparser.c"
break;
case 6:
#line 74 "bitbakeparser.y"
{ e_immediate ( lex, yymsp[-2].minor.yy0.string(), yymsp[0].minor.yy0.string() );
{ e_precat( lex, yymsp[-2].minor.yy0.string(), yymsp[0].minor.yy0.string() );
e_export( lex, yymsp[-2].minor.yy0.string() );
yymsp[-2].minor.yy0.release_this(); yymsp[0].minor.yy0.release_this(); yy_destructor(3,&yymsp[-3].minor);
yy_destructor(6,&yymsp[-1].minor);
}
#line 703 "bitbakeparser.c"
#line 725 "bitbakeparser.c"
break;
case 7:
#line 78 "bitbakeparser.y"
{ e_cond( lex, yymsp[-2].minor.yy0.string(), yymsp[0].minor.yy0.string() );
{ e_postcat( lex, yymsp[-2].minor.yy0.string(), yymsp[0].minor.yy0.string() );
e_export( lex, yymsp[-2].minor.yy0.string() );
yymsp[-2].minor.yy0.release_this(); yymsp[0].minor.yy0.release_this(); yy_destructor(3,&yymsp[-3].minor);
yy_destructor(7,&yymsp[-1].minor);
}
#line 711 "bitbakeparser.c"
#line 734 "bitbakeparser.c"
break;
case 8:
#line 82 "bitbakeparser.y"
{ e_immediate ( lex, yymsp[-2].minor.yy0.string(), yymsp[0].minor.yy0.string() );
e_export( lex, yymsp[-2].minor.yy0.string() );
yymsp[-2].minor.yy0.release_this(); yymsp[0].minor.yy0.release_this(); yy_destructor(3,&yymsp[-3].minor);
yy_destructor(8,&yymsp[-1].minor);
}
#line 743 "bitbakeparser.c"
break;
case 9:
#line 86 "bitbakeparser.y"
{ e_cond( lex, yymsp[-2].minor.yy0.string(), yymsp[0].minor.yy0.string() );
yymsp[-2].minor.yy0.release_this(); yymsp[0].minor.yy0.release_this(); yy_destructor(3,&yymsp[-3].minor);
yy_destructor(9,&yymsp[-1].minor);
}
#line 751 "bitbakeparser.c"
break;
case 10:
#line 90 "bitbakeparser.y"
{ e_assign( lex, yymsp[-2].minor.yy0.string(), yymsp[0].minor.yy0.string() );
yymsp[-2].minor.yy0.release_this(); yymsp[0].minor.yy0.release_this(); yy_destructor(4,&yymsp[-1].minor);
}
#line 718 "bitbakeparser.c"
break;
case 9:
#line 85 "bitbakeparser.y"
{ e_prepend( lex, yymsp[-2].minor.yy0.string(), yymsp[0].minor.yy0.string() );
yymsp[-2].minor.yy0.release_this(); yymsp[0].minor.yy0.release_this(); yy_destructor(8,&yymsp[-1].minor);
}
#line 725 "bitbakeparser.c"
break;
case 10:
#line 88 "bitbakeparser.y"
{ e_append( lex, yymsp[-2].minor.yy0.string() , yymsp[0].minor.yy0.string() );
yymsp[-2].minor.yy0.release_this(); yymsp[0].minor.yy0.release_this(); yy_destructor(9,&yymsp[-1].minor);
}
#line 732 "bitbakeparser.c"
#line 758 "bitbakeparser.c"
break;
case 11:
#line 91 "bitbakeparser.y"
{ e_immediate( lex, yymsp[-2].minor.yy0.string(), yymsp[0].minor.yy0.string() );
#line 93 "bitbakeparser.y"
{ e_precat( lex, yymsp[-2].minor.yy0.string(), yymsp[0].minor.yy0.string() );
yymsp[-2].minor.yy0.release_this(); yymsp[0].minor.yy0.release_this(); yy_destructor(6,&yymsp[-1].minor);
}
#line 739 "bitbakeparser.c"
#line 765 "bitbakeparser.c"
break;
case 12:
#line 94 "bitbakeparser.y"
{ e_cond( lex, yymsp[-2].minor.yy0.string(), yymsp[0].minor.yy0.string() );
#line 96 "bitbakeparser.y"
{ e_postcat( lex, yymsp[-2].minor.yy0.string(), yymsp[0].minor.yy0.string() );
yymsp[-2].minor.yy0.release_this(); yymsp[0].minor.yy0.release_this(); yy_destructor(7,&yymsp[-1].minor);
}
#line 746 "bitbakeparser.c"
#line 772 "bitbakeparser.c"
break;
case 13:
#line 98 "bitbakeparser.y"
{ e_addtask( lex, yymsp[-4].minor.yy0.string(), yymsp[-2].minor.yy0.string(), yymsp[0].minor.yy0.string() );
yymsp[-4].minor.yy0.release_this(); yymsp[-2].minor.yy0.release_this(); yymsp[0].minor.yy0.release_this(); yy_destructor(11,&yymsp[-3].minor);
yy_destructor(12,&yymsp[-1].minor);
#line 99 "bitbakeparser.y"
{ e_prepend( lex, yymsp[-2].minor.yy0.string(), yymsp[0].minor.yy0.string() );
yymsp[-2].minor.yy0.release_this(); yymsp[0].minor.yy0.release_this(); yy_destructor(10,&yymsp[-1].minor);
}
#line 754 "bitbakeparser.c"
#line 779 "bitbakeparser.c"
break;
case 14:
#line 101 "bitbakeparser.y"
{ e_addtask( lex, yymsp[-4].minor.yy0.string(), yymsp[0].minor.yy0.string(), yymsp[-2].minor.yy0.string());
yymsp[-4].minor.yy0.release_this(); yymsp[-2].minor.yy0.release_this(); yymsp[0].minor.yy0.release_this(); yy_destructor(12,&yymsp[-3].minor);
yy_destructor(11,&yymsp[-1].minor);
}
#line 762 "bitbakeparser.c"
break;
case 15:
#line 104 "bitbakeparser.y"
{ e_addtask( lex, yymsp[0].minor.yy0.string(), NULL, NULL);
yymsp[0].minor.yy0.release_this();}
#line 768 "bitbakeparser.c"
break;
case 16:
#line 107 "bitbakeparser.y"
{ e_addtask( lex, yymsp[-2].minor.yy0.string(), yymsp[0].minor.yy0.string(), NULL);
#line 102 "bitbakeparser.y"
{ e_append( lex, yymsp[-2].minor.yy0.string() , yymsp[0].minor.yy0.string() );
yymsp[-2].minor.yy0.release_this(); yymsp[0].minor.yy0.release_this(); yy_destructor(11,&yymsp[-1].minor);
}
#line 775 "bitbakeparser.c"
#line 786 "bitbakeparser.c"
break;
case 17:
#line 110 "bitbakeparser.y"
{ e_addtask( lex, yymsp[-2].minor.yy0.string(), NULL, yymsp[0].minor.yy0.string());
yymsp[-2].minor.yy0.release_this(); yymsp[0].minor.yy0.release_this(); yy_destructor(12,&yymsp[-1].minor);
case 15:
#line 105 "bitbakeparser.y"
{ e_immediate( lex, yymsp[-2].minor.yy0.string(), yymsp[0].minor.yy0.string() );
yymsp[-2].minor.yy0.release_this(); yymsp[0].minor.yy0.release_this(); yy_destructor(8,&yymsp[-1].minor);
}
#line 782 "bitbakeparser.c"
break;
case 21:
#line 117 "bitbakeparser.y"
{ e_addhandler( lex, yymsp[0].minor.yy0.string()); yymsp[0].minor.yy0.release_this (); yy_destructor(14,&yymsp[-1].minor);
}
#line 788 "bitbakeparser.c"
break;
case 22:
#line 119 "bitbakeparser.y"
{ e_export_func( lex, yymsp[0].minor.yy0.string()); yymsp[0].minor.yy0.release_this(); }
#line 793 "bitbakeparser.c"
break;
case 26:
case 16:
#line 108 "bitbakeparser.y"
{ e_cond( lex, yymsp[-2].minor.yy0.string(), yymsp[0].minor.yy0.string() );
yymsp[-2].minor.yy0.release_this(); yymsp[0].minor.yy0.release_this(); yy_destructor(9,&yymsp[-1].minor);
}
#line 800 "bitbakeparser.c"
break;
case 17:
#line 112 "bitbakeparser.y"
{ e_addtask( lex, yymsp[-4].minor.yy0.string(), yymsp[-2].minor.yy0.string(), yymsp[0].minor.yy0.string() );
yymsp[-4].minor.yy0.release_this(); yymsp[-2].minor.yy0.release_this(); yymsp[0].minor.yy0.release_this(); yy_destructor(13,&yymsp[-3].minor);
yy_destructor(14,&yymsp[-1].minor);
}
#line 808 "bitbakeparser.c"
break;
case 18:
#line 115 "bitbakeparser.y"
{ e_addtask( lex, yymsp[-4].minor.yy0.string(), yymsp[0].minor.yy0.string(), yymsp[-2].minor.yy0.string());
yymsp[-4].minor.yy0.release_this(); yymsp[-2].minor.yy0.release_this(); yymsp[0].minor.yy0.release_this(); yy_destructor(14,&yymsp[-3].minor);
yy_destructor(13,&yymsp[-1].minor);
}
#line 816 "bitbakeparser.c"
break;
case 19:
#line 118 "bitbakeparser.y"
{ e_addtask( lex, yymsp[0].minor.yy0.string(), NULL, NULL);
yymsp[0].minor.yy0.release_this();}
#line 822 "bitbakeparser.c"
break;
case 20:
#line 121 "bitbakeparser.y"
{ e_addtask( lex, yymsp[-2].minor.yy0.string(), yymsp[0].minor.yy0.string(), NULL);
yymsp[-2].minor.yy0.release_this(); yymsp[0].minor.yy0.release_this(); yy_destructor(13,&yymsp[-1].minor);
}
#line 829 "bitbakeparser.c"
break;
case 21:
#line 124 "bitbakeparser.y"
{ e_inherit( lex, yymsp[0].minor.yy0.string() ); yymsp[0].minor.yy0.release_this (); }
#line 798 "bitbakeparser.c"
{ e_addtask( lex, yymsp[-2].minor.yy0.string(), NULL, yymsp[0].minor.yy0.string());
yymsp[-2].minor.yy0.release_this(); yymsp[0].minor.yy0.release_this(); yy_destructor(14,&yymsp[-1].minor);
}
#line 836 "bitbakeparser.c"
break;
case 25:
#line 131 "bitbakeparser.y"
{ e_addhandler( lex, yymsp[0].minor.yy0.string()); yymsp[0].minor.yy0.release_this (); yy_destructor(16,&yymsp[-1].minor);
}
#line 842 "bitbakeparser.c"
break;
case 26:
#line 133 "bitbakeparser.y"
{ e_export_func( lex, yymsp[0].minor.yy0.string()); yymsp[0].minor.yy0.release_this(); }
#line 847 "bitbakeparser.c"
break;
case 30:
#line 130 "bitbakeparser.y"
{ e_include( lex, yymsp[0].minor.yy0.string() ); yymsp[0].minor.yy0.release_this(); yy_destructor(19,&yymsp[-1].minor);
}
#line 804 "bitbakeparser.c"
#line 138 "bitbakeparser.y"
{ e_inherit( lex, yymsp[0].minor.yy0.string() ); yymsp[0].minor.yy0.release_this (); }
#line 852 "bitbakeparser.c"
break;
case 31:
#line 133 "bitbakeparser.y"
{ e_require( lex, yymsp[0].minor.yy0.string() ); yymsp[0].minor.yy0.release_this(); yy_destructor(20,&yymsp[-1].minor);
case 34:
#line 144 "bitbakeparser.y"
{ e_include( lex, yymsp[0].minor.yy0.string() ); yymsp[0].minor.yy0.release_this(); yy_destructor(21,&yymsp[-1].minor);
}
#line 810 "bitbakeparser.c"
#line 858 "bitbakeparser.c"
break;
case 32:
#line 136 "bitbakeparser.y"
case 35:
#line 147 "bitbakeparser.y"
{ e_require( lex, yymsp[0].minor.yy0.string() ); yymsp[0].minor.yy0.release_this(); yy_destructor(22,&yymsp[-1].minor);
}
#line 864 "bitbakeparser.c"
break;
case 36:
#line 150 "bitbakeparser.y"
{ /* concatenate body lines */
yygotominor.yy0.assignString( token_t::concatString(yymsp[-1].minor.yy0.string(), yymsp[0].minor.yy0.string()) );
yymsp[-1].minor.yy0.release_this ();
yymsp[0].minor.yy0.release_this ();
}
#line 819 "bitbakeparser.c"
break;
case 33:
#line 141 "bitbakeparser.y"
{ yygotominor.yy0.assignString(0); }
#line 824 "bitbakeparser.c"
break;
case 34:
#line 143 "bitbakeparser.y"
{ e_proc( lex, yymsp[-3].minor.yy0.string(), yymsp[-1].minor.yy0.string() );
yymsp[-3].minor.yy0.release_this(); yymsp[-1].minor.yy0.release_this(); yy_destructor(22,&yymsp[-2].minor);
yy_destructor(23,&yymsp[0].minor);
}
#line 832 "bitbakeparser.c"
break;
case 35:
#line 146 "bitbakeparser.y"
{ e_proc_python ( lex, yymsp[-3].minor.yy0.string(), yymsp[-1].minor.yy0.string() );
yymsp[-3].minor.yy0.release_this(); yymsp[-1].minor.yy0.release_this(); yy_destructor(24,&yymsp[-4].minor);
yy_destructor(22,&yymsp[-2].minor);
yy_destructor(23,&yymsp[0].minor);
}
#line 841 "bitbakeparser.c"
break;
case 36:
#line 149 "bitbakeparser.y"
{ e_proc_python( lex, NULL, yymsp[-1].minor.yy0.string());
yymsp[-1].minor.yy0.release_this (); yy_destructor(24,&yymsp[-3].minor);
yy_destructor(22,&yymsp[-2].minor);
yy_destructor(23,&yymsp[0].minor);
}
#line 850 "bitbakeparser.c"
#line 873 "bitbakeparser.c"
break;
case 37:
#line 153 "bitbakeparser.y"
{ e_proc_fakeroot( lex, yymsp[-3].minor.yy0.string(), yymsp[-1].minor.yy0.string() );
yymsp[-3].minor.yy0.release_this (); yymsp[-1].minor.yy0.release_this (); yy_destructor(25,&yymsp[-4].minor);
yy_destructor(22,&yymsp[-2].minor);
yy_destructor(23,&yymsp[0].minor);
}
#line 859 "bitbakeparser.c"
#line 155 "bitbakeparser.y"
{ yygotominor.yy0.assignString(0); }
#line 878 "bitbakeparser.c"
break;
case 38:
#line 157 "bitbakeparser.y"
{ e_proc( lex, yymsp[-3].minor.yy0.string(), yymsp[-1].minor.yy0.string() );
yymsp[-3].minor.yy0.release_this(); yymsp[-1].minor.yy0.release_this(); yy_destructor(24,&yymsp[-2].minor);
yy_destructor(25,&yymsp[0].minor);
}
#line 886 "bitbakeparser.c"
break;
case 39:
#line 160 "bitbakeparser.y"
{ e_proc_python ( lex, yymsp[-3].minor.yy0.string(), yymsp[-1].minor.yy0.string() );
yymsp[-3].minor.yy0.release_this(); yymsp[-1].minor.yy0.release_this(); yy_destructor(26,&yymsp[-4].minor);
yy_destructor(24,&yymsp[-2].minor);
yy_destructor(25,&yymsp[0].minor);
}
#line 895 "bitbakeparser.c"
break;
case 40:
#line 163 "bitbakeparser.y"
{ e_proc_python( lex, NULL, yymsp[-1].minor.yy0.string());
yymsp[-1].minor.yy0.release_this (); yy_destructor(26,&yymsp[-3].minor);
yy_destructor(24,&yymsp[-2].minor);
yy_destructor(25,&yymsp[0].minor);
}
#line 904 "bitbakeparser.c"
break;
case 41:
#line 167 "bitbakeparser.y"
{ e_proc_fakeroot( lex, yymsp[-3].minor.yy0.string(), yymsp[-1].minor.yy0.string() );
yymsp[-3].minor.yy0.release_this (); yymsp[-1].minor.yy0.release_this (); yy_destructor(27,&yymsp[-4].minor);
yy_destructor(24,&yymsp[-2].minor);
yy_destructor(25,&yymsp[0].minor);
}
#line 913 "bitbakeparser.c"
break;
case 42:
#line 171 "bitbakeparser.y"
{ /* concatenate body lines */
yygotominor.yy0.assignString( token_t::concatString(yymsp[-1].minor.yy0.string(), yymsp[0].minor.yy0.string()) );
yymsp[-1].minor.yy0.release_this (); yymsp[0].minor.yy0.release_this ();
}
#line 867 "bitbakeparser.c"
#line 921 "bitbakeparser.c"
break;
case 39:
#line 161 "bitbakeparser.y"
case 43:
#line 175 "bitbakeparser.y"
{ yygotominor.yy0.assignString( 0 ); }
#line 872 "bitbakeparser.c"
#line 926 "bitbakeparser.c"
break;
case 40:
#line 163 "bitbakeparser.y"
case 44:
#line 177 "bitbakeparser.y"
{ e_def( lex, yymsp[-2].minor.yy0.string(), yymsp[-1].minor.yy0.string(), yymsp[0].minor.yy0.string());
yymsp[-2].minor.yy0.release_this(); yymsp[-1].minor.yy0.release_this(); yymsp[0].minor.yy0.release_this(); }
#line 878 "bitbakeparser.c"
#line 932 "bitbakeparser.c"
break;
};
yygoto = yyRuleInfo[yyruleno].lhs;
@ -932,7 +986,7 @@ static void yy_syntax_error(
#define TOKEN (yyminor.yy0)
#line 52 "bitbakeparser.y"
e_parse_error( lex );
#line 938 "bitbakeparser.c"
#line 992 "bitbakeparser.c"
bbparseARG_STORE; /* Suppress warning about unused %extra_argument variable */
}

View File

@ -3,25 +3,27 @@
#define T_EXPORT 3
#define T_OP_ASSIGN 4
#define T_STRING 5
#define T_OP_IMMEDIATE 6
#define T_OP_COND 7
#define T_OP_PREPEND 8
#define T_OP_APPEND 9
#define T_TSYMBOL 10
#define T_BEFORE 11
#define T_AFTER 12
#define T_ADDTASK 13
#define T_ADDHANDLER 14
#define T_FSYMBOL 15
#define T_EXPORT_FUNC 16
#define T_ISYMBOL 17
#define T_INHERIT 18
#define T_INCLUDE 19
#define T_REQUIRE 20
#define T_PROC_BODY 21
#define T_PROC_OPEN 22
#define T_PROC_CLOSE 23
#define T_PYTHON 24
#define T_FAKEROOT 25
#define T_DEF_BODY 26
#define T_DEF_ARGS 27
#define T_OP_PREDOT 6
#define T_OP_POSTDOT 7
#define T_OP_IMMEDIATE 8
#define T_OP_COND 9
#define T_OP_PREPEND 10
#define T_OP_APPEND 11
#define T_TSYMBOL 12
#define T_BEFORE 13
#define T_AFTER 14
#define T_ADDTASK 15
#define T_ADDHANDLER 16
#define T_FSYMBOL 17
#define T_EXPORT_FUNC 18
#define T_ISYMBOL 19
#define T_INHERIT 20
#define T_INCLUDE 21
#define T_REQUIRE 22
#define T_PROC_BODY 23
#define T_PROC_OPEN 24
#define T_PROC_CLOSE 25
#define T_PYTHON 26
#define T_FAKEROOT 27
#define T_DEF_BODY 28
#define T_DEF_ARGS 29

View File

@ -70,6 +70,14 @@ statement ::= EXPORT variable(s) OP_ASSIGN STRING(v).
{ e_assign( lex, s.string(), v.string() );
e_export( lex, s.string() );
s.release_this(); v.release_this(); }
statement ::= EXPORT variable(s) OP_PREDOT STRING(v).
{ e_precat( lex, s.string(), v.string() );
e_export( lex, s.string() );
s.release_this(); v.release_this(); }
statement ::= EXPORT variable(s) OP_POSTDOT STRING(v).
{ e_postcat( lex, s.string(), v.string() );
e_export( lex, s.string() );
s.release_this(); v.release_this(); }
statement ::= EXPORT variable(s) OP_IMMEDIATE STRING(v).
{ e_immediate ( lex, s.string(), v.string() );
e_export( lex, s.string() );
@ -81,6 +89,12 @@ statement ::= EXPORT variable(s) OP_COND STRING(v).
statement ::= variable(s) OP_ASSIGN STRING(v).
{ e_assign( lex, s.string(), v.string() );
s.release_this(); v.release_this(); }
statement ::= variable(s) OP_PREDOT STRING(v).
{ e_precat( lex, s.string(), v.string() );
s.release_this(); v.release_this(); }
statement ::= variable(s) OP_POSTDOT STRING(v).
{ e_postcat( lex, s.string(), v.string() );
s.release_this(); v.release_this(); }
statement ::= variable(s) OP_PREPEND STRING(v).
{ e_prepend( lex, s.string(), v.string() );
s.release_this(); v.release_this(); }

File diff suppressed because it is too large Load Diff

View File

@ -83,6 +83,7 @@ extern void bbparseTrace(FILE *TraceFILE, char *zTracePrompt);
//static const char* rgbInput;
//static size_t cbInput;
extern "C" {
int lineError;
int errorParse;
@ -93,6 +94,8 @@ enum {
errorUnsupportedFeature,
};
}
#define YY_EXTRA_TYPE lex_t*
/* Read from buffer */
@ -112,6 +115,8 @@ static const char* fixup_escapes (const char* sz);
C_SP [ \t]
COMMENT #.*\n
OP_ASSIGN "="
OP_PREDOT ".="
OP_POSTDOT "=."
OP_IMMEDIATE ":="
OP_PREPEND "=+"
OP_APPEND "+="
@ -166,6 +171,10 @@ PROC \({C_SP}*\)
yyextra->accept (T_OP_IMMEDIATE); }
{OP_ASSIGN} { BEGIN S_RVALUE;
yyextra->accept (T_OP_ASSIGN); }
{OP_PREDOT} { BEGIN S_RVALUE;
yyextra->accept (T_OP_PREDOT); }
{OP_POSTDOT} { BEGIN S_RVALUE;
yyextra->accept (T_OP_POSTDOT); }
{OP_COND} { BEGIN S_RVALUE;
yyextra->accept (T_OP_COND); }
@ -254,35 +263,55 @@ void lex_t::accept (int token, const char* sz)
parse (parser, token, t, this);
}
void lex_t::input (char *buf, int *result, int max_size)
{
printf("lex_t::input %p %d\n", buf, max_size);
*result = fread(buf, 1, max_size, file);
printf("lex_t::input result %d\n", *result);
}
int lex_t::line ()const
{
printf("lex_t::line\n");
return yyget_lineno (scanner);
}
void parse (FILE* file, PyObject* data)
{
void* parser = bbparseAlloc (malloc);
yyscan_t scanner;
lex_t lex;
yylex_init (&scanner);
extern "C" {
lex.parser = parser;
lex.scanner = scanner;
lex.file = file;
lex.data = data;
lex.parse = bbparse;
yyset_extra (&lex, scanner);
void parse (FILE* file, PyObject* data)
{
printf("parse bbparseAlloc\n");
void* parser = bbparseAlloc (malloc);
yyscan_t scanner;
lex_t lex;
printf("parse yylex_init\n");
yylex_init (&scanner);
int result = yylex (scanner);
lex.parser = parser;
lex.scanner = scanner;
lex.file = file;
lex.data = data;
lex.parse = bbparse;
printf("parse yyset_extra\n");
yyset_extra (&lex, scanner);
lex.accept (0);
bbparseTrace (NULL, NULL);
printf("parse yylex\n");
int result = yylex (scanner);
printf("parse result %d\n", result);
if (result != T_EOF)
printf ("premature end of file\n");
lex.accept (0);
printf("parse lex.accept\n");
bbparseTrace (NULL, NULL);
printf("parse bbparseTrace\n");
if (result != T_EOF)
printf ("premature end of file\n");
yylex_destroy (scanner);
bbparseFree (parser, free);
}
yylex_destroy (scanner);
bbparseFree (parser, free);
}

View File

@ -24,30 +24,23 @@ THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#ifndef LEXER_H
#define LEXER_H
/*
* The PyObject Token. Likely to be
* a bb.data implementation
*/
struct PyObject;
#include "Python.h"
/**
* This is used by the Parser and Scanner
* of BitBake.
* The implementation and creation is done
* in the scanner.
*/
extern "C" {
struct lex_t {
void *parser;
void *scanner;
FILE *file;
void* parser;
void* scanner;
FILE* file;
PyObject *data;
void* (*parse)(void*, int, token_t, lex_t*);
void accept(int token, const char* string = 0);
void accept(int token, const char* sz = NULL);
void input(char *buf, int *result, int max_size);
int line()const;
};
}
#endif

View File

@ -0,0 +1,17 @@
#ifndef LEXERC_H
#define LEXERC_H
#include <stdio.h>
extern int lineError;
extern int errorParse;
typedef struct {
void *parser;
void *scanner;
FILE *file;
PyObject *data;
} lex_t;
#endif

View File

@ -27,15 +27,19 @@ This is the glue:
*/
extern "C" {
struct lex_t;
extern void e_assign(lex_t*, const char*, const char*);
extern void e_export(lex_t*, const char*);
extern void e_immediate(lex_t*, const char*, const char*);
extern void e_cond(lex_t*, const char*, const char*);
extern void e_assign(lex_t*, const char*, const char*);
extern void e_prepend(lex_t*, const char*, const char*);
extern void e_append(lex_t*, const char*, const char*);
extern void e_precat(lex_t*, const char*, const char*);
extern void e_postcat(lex_t*, const char*, const char*);
extern void e_addtask(lex_t*, const char*, const char*, const char*);
extern void e_addhandler(lex_t*,const char*);
extern void e_export_func(lex_t*, const char*);
@ -48,4 +52,5 @@ extern void e_proc_fakeroot(lex_t*, const char*, const char*);
extern void e_def(lex_t*, const char*, const char*, const char*);
extern void e_parse_error(lex_t*);
}
#endif // PYTHON_OUTPUT_H

View File

@ -21,9 +21,9 @@
this program; if not, write to the Free Software Foundation, Inc., 59 Temple
Place, Suite 330, Boston, MA 02111-1307 USA."""
import re, bb, os, sys
import re, bb, os, sys, time
import bb.fetch, bb.build, bb.utils
from bb import debug, data, fetch, fatal
from bb import debug, data, fetch, fatal, methodpool
from ConfHandler import include, localpath, obtain, init
from bb.parse import ParseError
@ -44,6 +44,8 @@ __bbpath_found__ = 0
__classname__ = ""
classes = [ None, ]
__parsed_methods__ = methodpool.get_parsed_dict()
def supports(fn, d):
localfn = localpath(fn, d)
return localfn[-3:] == ".bb" or localfn[-8:] == ".bbclass" or localfn[-4:] == ".inc"
@ -78,6 +80,7 @@ def handle(fn, d, include = 0):
debug(2, "BB " + fn + ": handle(data, include)")
(root, ext) = os.path.splitext(os.path.basename(fn))
base_name = "%s%s" % (root,ext)
init(d)
if ext == ".bbclass":
@ -126,10 +129,10 @@ def handle(fn, d, include = 0):
s = f.readline()
if not s: break
s = s.rstrip()
feeder(lineno, s, fn, d)
feeder(lineno, s, fn, base_name, d)
if __inpython__:
# add a blank line to close out any python definition
feeder(lineno + 1, "", fn, d)
feeder(lineno + 1, "", fn, base_name, d)
if ext == ".bbclass":
classes.remove(__classname__)
else:
@ -156,9 +159,15 @@ def handle(fn, d, include = 0):
set_additional_vars(fn, d, include)
data.update_data(d)
all_handlers = {}
for var in data.keys(d):
# try to add the handler
# if we added it remember the choiche
if data.getVarFlag(var, 'handler', d):
bb.event.register(data.getVar(var, d))
handler = data.getVar(var,d)
if bb.event.register(var,handler) == bb.event.Registered:
all_handlers[var] = handler
continue
if not data.getVarFlag(var, 'task', d):
@ -172,12 +181,22 @@ def handle(fn, d, include = 0):
pdeps.append(var)
data.setVarFlag(p, 'deps', pdeps, d)
bb.build.add_task(p, pdeps, d)
# now add the handlers
if not len(all_handlers) == 0:
data.setVar('__all_handlers__', all_handlers, d)
bbpath.pop(0)
if oldfile:
bb.data.setVar("FILE", oldfile, d)
# we have parsed the bb class now
if ext == ".bbclass" or ext == ".inc":
__parsed_methods__[base_name] = 1
return d
def feeder(lineno, s, fn, d):
def feeder(lineno, s, fn, root, d):
global __func_start_regexp__, __inherit_regexp__, __export_func_regexp__, __addtask_regexp__, __addhandler_regexp__, __def_regexp__, __python_func_regexp__, __inpython__,__infunc__, __body__, __bbpath_found__, classes, bb, __residue__
if __infunc__:
if s == '}':
@ -205,13 +224,22 @@ def feeder(lineno, s, fn, d):
__body__.append(s)
return
else:
text = '\n'.join(__body__)
comp = bb.utils.better_compile(text, "<bb>", fn )
bb.utils.better_exec(comp, __builtins__, text, fn)
# Note we will add root to parsedmethods after having parse
# 'this' file. This means we will not parse methods from
# bb classes twice
if not root in __parsed_methods__:
text = '\n'.join(__body__)
methodpool.insert_method( root, text, fn )
funcs = data.getVar('__functions__', d) or {}
if not funcs.has_key( root ):
funcs[root] = text
else:
funcs[root] = "%s\n%s" % (funcs[root], text)
data.setVar('__functions__', funcs, d)
__body__ = []
__inpython__ = False
funcs = data.getVar('__functions__', d) or ""
data.setVar('__functions__', "%s\n%s" % (funcs, text), d)
# fall through
if s == '' or s[0] == '#': return # skip comments and empty lines

View File

@ -3,7 +3,8 @@
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
##########################################################################
#
# Copyright (C) 2005 Michael 'Mickey' Lauer <mickey@Vanille.de>, Vanille Media
# Copyright (C) 2005-2006 Michael 'Mickey' Lauer <mickey@Vanille.de>
# Copyright (C) 2005-2006 Vanille Media
#
# This program is free software; you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free Software
@ -59,7 +60,7 @@ import sys, os, imp, readline, socket, httplib, urllib, commands, popen2, copy,
imp.load_source( "bitbake", os.path.dirname( sys.argv[0] )+"/bitbake" )
from bb import data, parse, build, fatal
__version__ = "0.5.3"
__version__ = "0.5.3.1"
__credits__ = """BitBake Shell Version %s (C) 2005 Michael 'Mickey' Lauer <mickey@Vanille.de>
Type 'help' for more information, press CTRL-D to exit.""" % __version__
@ -263,9 +264,10 @@ class BitBakeShellCommands:
bbfile = params[0]
print "SHELL: Parsing '%s'" % bbfile
parse.update_mtime( bbfile )
bb_data, fromCache = cooker.load_bbfile( bbfile )
cooker.pkgdata[bbfile] = bb_data
if fromCache:
cooker.bb_cache.cacheValidUpdate(bbfile)
fromCache = cooker.bb_cache.loadData(bbfile, cooker)
cooker.bb_cache.sync()
if False: #from Cache
print "SHELL: File has not been updated, not reparsing"
else:
print "SHELL: Parsed"
@ -307,7 +309,7 @@ class BitBakeShellCommands:
what, globexpr = params
if what == "files":
self._checkParsed()
for key in globfilter( cooker.pkgdata.keys(), globexpr ): print key
for key in globfilter( cooker.status.pkg_fn.keys(), globexpr ): print key
elif what == "providers":
self._checkParsed()
for key in globfilter( cooker.status.pkg_pn.keys(), globexpr ): print key
@ -374,7 +376,7 @@ SRC_URI = ""
pasteBin.usage = "<index>"
def pasteLog( self, params ):
"""Send the last event exception error log (if there is one) to http://pastebin.com"""
"""Send the last event exception error log (if there is one) to http://oe.pastebin.com"""
if last_exception is None:
print "SHELL: No Errors yet (Phew)..."
else:
@ -432,7 +434,8 @@ SRC_URI = ""
name, var = params
bbfile = self._findProvider( name )
if bbfile is not None:
value = cooker.pkgdata[bbfile].getVar( var, 1 )
the_data = cooker.bb_cache.loadDataFull(bbfile, cooker)
value = the_data.getVar( var, 1 )
print value
else:
print "ERROR: Nothing provides '%s'" % name
@ -442,13 +445,14 @@ SRC_URI = ""
"""Set contents of variable defined in providee's metadata"""
name, var, value = params
bbfile = self._findProvider( name )
d = cooker.pkgdata[bbfile]
if bbfile is not None:
data.setVar( var, value, d )
print "ERROR: Sorry, this functionality is currently broken"
#d = cooker.pkgdata[bbfile]
#data.setVar( var, value, d )
# mark the change semi persistant
cooker.pkgdata.setDirty(bbfile, d)
print "OK"
#cooker.pkgdata.setDirty(bbfile, d)
#print "OK"
else:
print "ERROR: Nothing provides '%s'" % name
poke.usage = "<providee> <variable> <value>"
@ -458,7 +462,7 @@ SRC_URI = ""
what = params[0]
if what == "files":
self._checkParsed()
for key in cooker.pkgdata.keys(): print key
for key in cooker.status.pkg_fn.keys(): print key
elif what == "providers":
self._checkParsed()
for key in cooker.status.providers.keys(): print key
@ -555,14 +559,14 @@ SRC_URI = ""
def completeFilePath( bbfile ):
"""Get the complete bbfile path"""
if not cooker.pkgdata: return bbfile
for key in cooker.pkgdata.keys():
if not cooker.status.pkg_fn: return bbfile
for key in cooker.status.pkg_fn.keys():
if key.endswith( bbfile ):
return key
return bbfile
def sendToPastebin( content ):
"""Send content to http://www.pastebin.com"""
"""Send content to http://oe.pastebin.com"""
mydata = {}
mydata["parent_pid"] = ""
mydata["format"] = "bash"
@ -572,7 +576,7 @@ def sendToPastebin( content ):
params = urllib.urlencode( mydata )
headers = {"Content-type": "application/x-www-form-urlencoded","Accept": "text/plain"}
conn = httplib.HTTPConnection( "pastebin.com:80" )
conn = httplib.HTTPConnection( "oe.pastebin.com:80" )
conn.request("POST", "/", params, headers )
response = conn.getresponse()
@ -594,10 +598,10 @@ def completer( text, state ):
if u == "<variable>":
allmatches = cooker.configuration.data.keys()
elif u == "<bbfile>":
if cooker.pkgdata is None: allmatches = [ "(No Matches Available. Parsed yet?)" ]
else: allmatches = [ x.split("/")[-1] for x in cooker.pkgdata.keys() ]
if cooker.status.pkg_fn is None: allmatches = [ "(No Matches Available. Parsed yet?)" ]
else: allmatches = [ x.split("/")[-1] for x in cooker.status.pkg_fn.keys() ]
elif u == "<providee>":
if cooker.pkgdata is None: allmatches = [ "(No Matches Available. Parsed yet?)" ]
if cooker.status.pkg_fn is None: allmatches = [ "(No Matches Available. Parsed yet?)" ]
else: allmatches = cooker.status.providers.iterkeys()
else: allmatches = [ "(No tab completion available for this command)" ]
else: allmatches = [ "(No tab completion available for this command)" ]