2011-01-10 14:23:36 +00:00
|
|
|
# ex:ts=4:sw=4:sts=4:et
|
|
|
|
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
|
|
|
"""
|
|
|
|
BitBake 'Fetch' implementations
|
|
|
|
|
|
|
|
Classes for obtaining upstream sources for the
|
|
|
|
BitBake build tools.
|
|
|
|
"""
|
|
|
|
|
|
|
|
# Copyright (C) 2003, 2004 Chris Larson
|
|
|
|
#
|
|
|
|
# This program is free software; you can redistribute it and/or modify
|
|
|
|
# it under the terms of the GNU General Public License version 2 as
|
|
|
|
# published by the Free Software Foundation.
|
|
|
|
#
|
|
|
|
# This program is distributed in the hope that it will be useful,
|
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
# GNU General Public License for more details.
|
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU General Public License along
|
|
|
|
# with this program; if not, write to the Free Software Foundation, Inc.,
|
|
|
|
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
|
|
|
#
|
|
|
|
# Based on functions from the base bb module, Copyright 2003 Holger Schurig
|
|
|
|
|
|
|
|
from __future__ import absolute_import
|
|
|
|
from __future__ import print_function
|
|
|
|
import os, re
|
|
|
|
import logging
|
|
|
|
import bb
|
|
|
|
from bb import data
|
|
|
|
from bb import persist_data
|
2011-01-10 22:05:19 +00:00
|
|
|
from bb import utils
|
2011-01-10 14:23:36 +00:00
|
|
|
|
2011-01-18 14:41:23 +00:00
|
|
|
__version__ = "2"
|
|
|
|
|
2011-02-07 14:46:53 +00:00
|
|
|
logger = logging.getLogger("BitBake.Fetcher")
|
2011-01-10 14:23:36 +00:00
|
|
|
|
2011-02-03 23:23:11 +00:00
|
|
|
class BBFetchException(Exception):
|
2011-02-04 10:26:21 +00:00
|
|
|
"""Class all fetch exceptions inherit from"""
|
|
|
|
def __init__(self, message):
|
|
|
|
self.msg = message
|
|
|
|
Exception.__init__(self, message)
|
|
|
|
|
|
|
|
def __str__(self):
|
|
|
|
return self.msg
|
2011-02-03 23:23:11 +00:00
|
|
|
|
|
|
|
class MalformedUrl(BBFetchException):
|
2011-01-10 14:23:36 +00:00
|
|
|
"""Exception raised when encountering an invalid url"""
|
2011-02-04 10:26:21 +00:00
|
|
|
def __init__(self, url):
|
2011-02-07 20:46:42 +00:00
|
|
|
msg = "The URL: '%s' is invalid and cannot be interpreted" % url
|
2011-02-04 10:26:21 +00:00
|
|
|
self.url = url
|
2011-02-07 20:46:42 +00:00
|
|
|
BBFetchException.__init__(self, msg)
|
|
|
|
self.args = url
|
2011-01-10 14:23:36 +00:00
|
|
|
|
2011-02-03 23:23:11 +00:00
|
|
|
class FetchError(BBFetchException):
|
2011-02-04 10:26:21 +00:00
|
|
|
"""General fetcher exception when something happens incorrectly"""
|
|
|
|
def __init__(self, message, url = None):
|
2011-02-07 20:46:42 +00:00
|
|
|
msg = "Fetcher failure for URL: '%s'. %s" % (url, message)
|
2011-02-04 10:26:21 +00:00
|
|
|
self.url = url
|
2011-02-07 20:46:42 +00:00
|
|
|
BBFetchException.__init__(self, msg)
|
|
|
|
self.args = (message, url)
|
2011-01-10 14:23:36 +00:00
|
|
|
|
2011-02-04 13:14:03 +00:00
|
|
|
class UnpackError(BBFetchException):
|
|
|
|
"""General fetcher exception when something happens incorrectly when unpacking"""
|
|
|
|
def __init__(self, message, url):
|
2011-02-07 20:46:42 +00:00
|
|
|
msg = "Unpack failure for URL: '%s'. %s" % (url, message)
|
2011-02-04 13:14:03 +00:00
|
|
|
self.url = url
|
2011-02-07 20:46:42 +00:00
|
|
|
BBFetchException.__init__(self, msg)
|
|
|
|
self.args = (message, url)
|
2011-02-04 13:14:03 +00:00
|
|
|
|
2011-02-03 23:23:11 +00:00
|
|
|
class NoMethodError(BBFetchException):
|
2011-01-10 14:23:36 +00:00
|
|
|
"""Exception raised when there is no method to obtain a supplied url or set of urls"""
|
2011-02-04 10:26:21 +00:00
|
|
|
def __init__(self, url):
|
2011-02-07 20:46:42 +00:00
|
|
|
msg = "Could not find a fetcher which supports the URL: '%s'" % url
|
2011-02-04 10:26:21 +00:00
|
|
|
self.url = url
|
2011-02-07 20:46:42 +00:00
|
|
|
BBFetchException.__init__(self, msg)
|
|
|
|
self.args = url
|
2011-01-10 14:23:36 +00:00
|
|
|
|
2011-02-03 23:23:11 +00:00
|
|
|
class MissingParameterError(BBFetchException):
|
2011-01-10 14:23:36 +00:00
|
|
|
"""Exception raised when a fetch method is missing a critical parameter in the url"""
|
2011-02-04 10:26:21 +00:00
|
|
|
def __init__(self, missing, url):
|
2011-02-07 20:46:42 +00:00
|
|
|
msg = "URL: '%s' is missing the required parameter '%s'" % (url, missing)
|
2011-02-04 10:26:21 +00:00
|
|
|
self.url = url
|
|
|
|
self.missing = missing
|
2011-02-07 20:46:42 +00:00
|
|
|
BBFetchException.__init__(self, msg)
|
|
|
|
self.args = (missing, url)
|
2011-01-10 14:23:36 +00:00
|
|
|
|
2011-02-03 23:23:11 +00:00
|
|
|
class ParameterError(BBFetchException):
|
2011-01-10 14:23:36 +00:00
|
|
|
"""Exception raised when a url cannot be proccessed due to invalid parameters."""
|
2011-02-04 10:26:21 +00:00
|
|
|
def __init__(self, message, url):
|
2011-02-07 20:46:42 +00:00
|
|
|
msg = "URL: '%s' has invalid parameters. %s" % (url, message)
|
2011-02-04 10:26:21 +00:00
|
|
|
self.url = url
|
2011-02-07 20:46:42 +00:00
|
|
|
BBFetchException.__init__(self, msg)
|
|
|
|
self.args = (message, url)
|
2011-01-10 14:23:36 +00:00
|
|
|
|
2011-02-03 23:23:11 +00:00
|
|
|
class MD5SumError(BBFetchException):
|
2011-02-04 10:26:21 +00:00
|
|
|
"""Exception raised when a MD5 checksum of a file does not match for a downloaded file"""
|
|
|
|
def __init__(self, path, wanted, got, url):
|
2011-02-07 20:46:42 +00:00
|
|
|
msg = "File: '%s' has md5 sum %s when %s was expected (from URL: '%s')" % (path, got, wanted, url)
|
2011-02-04 10:26:21 +00:00
|
|
|
self.url = url
|
|
|
|
self.path = path
|
|
|
|
self.wanted = wanted
|
|
|
|
self.got = got
|
2011-02-07 20:46:42 +00:00
|
|
|
BBFetchException.__init__(self, msg)
|
|
|
|
self.args = (path, wanted, got, url)
|
2011-02-04 10:26:21 +00:00
|
|
|
|
|
|
|
class SHA256SumError(MD5SumError):
|
|
|
|
"""Exception raised when a SHA256 checksum of a file does not match for a downloaded file"""
|
2011-01-10 14:23:36 +00:00
|
|
|
|
|
|
|
def decodeurl(url):
|
|
|
|
"""Decodes an URL into the tokens (scheme, network location, path,
|
|
|
|
user, password, parameters).
|
|
|
|
"""
|
|
|
|
|
|
|
|
m = re.compile('(?P<type>[^:]*)://((?P<user>.+)@)?(?P<location>[^;]+)(;(?P<parm>.*))?').match(url)
|
|
|
|
if not m:
|
|
|
|
raise MalformedUrl(url)
|
|
|
|
|
|
|
|
type = m.group('type')
|
|
|
|
location = m.group('location')
|
|
|
|
if not location:
|
|
|
|
raise MalformedUrl(url)
|
|
|
|
user = m.group('user')
|
|
|
|
parm = m.group('parm')
|
|
|
|
|
|
|
|
locidx = location.find('/')
|
|
|
|
if locidx != -1 and type.lower() != 'file':
|
|
|
|
host = location[:locidx]
|
|
|
|
path = location[locidx:]
|
|
|
|
else:
|
|
|
|
host = ""
|
|
|
|
path = location
|
|
|
|
if user:
|
|
|
|
m = re.compile('(?P<user>[^:]+)(:?(?P<pswd>.*))').match(user)
|
|
|
|
if m:
|
|
|
|
user = m.group('user')
|
|
|
|
pswd = m.group('pswd')
|
|
|
|
else:
|
|
|
|
user = ''
|
|
|
|
pswd = ''
|
|
|
|
|
|
|
|
p = {}
|
|
|
|
if parm:
|
|
|
|
for s in parm.split(';'):
|
|
|
|
s1, s2 = s.split('=')
|
|
|
|
p[s1] = s2
|
|
|
|
|
|
|
|
return (type, host, path, user, pswd, p)
|
|
|
|
|
|
|
|
def encodeurl(decoded):
|
|
|
|
"""Encodes a URL from tokens (scheme, network location, path,
|
|
|
|
user, password, parameters).
|
|
|
|
"""
|
|
|
|
|
|
|
|
(type, host, path, user, pswd, p) = decoded
|
|
|
|
|
2011-02-04 10:26:21 +00:00
|
|
|
if not path:
|
|
|
|
raise MissingParameterError('path', "encoded from the data %s" % str(decoded))
|
|
|
|
if not type:
|
|
|
|
raise MissingParameterError('type', "encoded from the data %s" % str(decoded))
|
2011-01-10 14:23:36 +00:00
|
|
|
url = '%s://' % type
|
2011-02-03 18:19:23 +00:00
|
|
|
if user and type != "file":
|
2011-01-10 14:23:36 +00:00
|
|
|
url += "%s" % user
|
|
|
|
if pswd:
|
|
|
|
url += ":%s" % pswd
|
|
|
|
url += "@"
|
2011-02-03 18:19:23 +00:00
|
|
|
if host and type != "file":
|
2011-01-10 14:23:36 +00:00
|
|
|
url += "%s" % host
|
|
|
|
url += "%s" % path
|
|
|
|
if p:
|
|
|
|
for parm in p:
|
|
|
|
url += ";%s=%s" % (parm, p[parm])
|
|
|
|
|
|
|
|
return url
|
|
|
|
|
2011-02-07 15:28:05 +00:00
|
|
|
def uri_replace(ud, uri_find, uri_replace, d):
|
|
|
|
if not ud.url or not uri_find or not uri_replace:
|
2011-01-10 14:23:36 +00:00
|
|
|
logger.debug(1, "uri_replace: passed an undefined value, not replacing")
|
2011-02-07 15:28:05 +00:00
|
|
|
uri_decoded = list(decodeurl(ud.url))
|
2011-01-10 14:23:36 +00:00
|
|
|
uri_find_decoded = list(decodeurl(uri_find))
|
|
|
|
uri_replace_decoded = list(decodeurl(uri_replace))
|
|
|
|
result_decoded = ['', '', '', '', '', {}]
|
|
|
|
for i in uri_find_decoded:
|
|
|
|
loc = uri_find_decoded.index(i)
|
|
|
|
result_decoded[loc] = uri_decoded[loc]
|
|
|
|
if isinstance(i, basestring):
|
|
|
|
if (re.match(i, uri_decoded[loc])):
|
|
|
|
result_decoded[loc] = re.sub(i, uri_replace_decoded[loc], uri_decoded[loc])
|
|
|
|
if uri_find_decoded.index(i) == 2:
|
2011-02-07 15:28:05 +00:00
|
|
|
if ud.mirrortarball:
|
|
|
|
result_decoded[loc] = os.path.join(os.path.dirname(result_decoded[loc]), os.path.basename(ud.mirrortarball))
|
|
|
|
elif ud.localpath:
|
|
|
|
result_decoded[loc] = os.path.join(os.path.dirname(result_decoded[loc]), os.path.basename(ud.localpath))
|
2011-01-10 14:23:36 +00:00
|
|
|
else:
|
2011-02-07 15:28:05 +00:00
|
|
|
return ud.url
|
2011-01-10 14:23:36 +00:00
|
|
|
return encodeurl(result_decoded)
|
|
|
|
|
|
|
|
methods = []
|
|
|
|
urldata_cache = {}
|
|
|
|
saved_headrevs = {}
|
|
|
|
|
|
|
|
def fetcher_init(d):
|
|
|
|
"""
|
|
|
|
Called to initialize the fetchers once the configuration data is known.
|
|
|
|
Calls before this must not hit the cache.
|
|
|
|
"""
|
|
|
|
pd = persist_data.persist(d)
|
|
|
|
# When to drop SCM head revisions controlled by user policy
|
2011-02-04 14:40:41 +00:00
|
|
|
srcrev_policy = bb.data.getVar('BB_SRCREV_POLICY', d, True) or "clear"
|
2011-01-10 14:23:36 +00:00
|
|
|
if srcrev_policy == "cache":
|
|
|
|
logger.debug(1, "Keeping SRCREV cache due to cache policy of: %s", srcrev_policy)
|
|
|
|
elif srcrev_policy == "clear":
|
|
|
|
logger.debug(1, "Clearing SRCREV cache due to cache policy of: %s", srcrev_policy)
|
|
|
|
try:
|
2011-01-10 18:05:07 +00:00
|
|
|
bb.fetch2.saved_headrevs = pd['BB_URI_HEADREVS'].items()
|
2011-01-10 14:23:36 +00:00
|
|
|
except:
|
|
|
|
pass
|
|
|
|
del pd['BB_URI_HEADREVS']
|
|
|
|
else:
|
|
|
|
raise FetchError("Invalid SRCREV cache policy of: %s" % srcrev_policy)
|
|
|
|
|
|
|
|
for m in methods:
|
|
|
|
if hasattr(m, "init"):
|
|
|
|
m.init(d)
|
|
|
|
|
|
|
|
def fetcher_compare_revisions(d):
|
|
|
|
"""
|
|
|
|
Compare the revisions in the persistant cache with current values and
|
|
|
|
return true/false on whether they've changed.
|
|
|
|
"""
|
|
|
|
|
|
|
|
pd = persist_data.persist(d)
|
|
|
|
data = pd['BB_URI_HEADREVS'].items()
|
2011-01-10 18:05:07 +00:00
|
|
|
data2 = bb.fetch2.saved_headrevs
|
2011-01-10 14:23:36 +00:00
|
|
|
|
|
|
|
changed = False
|
|
|
|
for key in data:
|
|
|
|
if key not in data2 or data2[key] != data[key]:
|
|
|
|
logger.debug(1, "%s changed", key)
|
|
|
|
changed = True
|
|
|
|
return True
|
|
|
|
else:
|
|
|
|
logger.debug(2, "%s did not change", key)
|
|
|
|
return False
|
|
|
|
|
|
|
|
def mirror_from_string(data):
|
|
|
|
return [ i.split() for i in (data or "").replace('\\n','\n').split('\n') if i ]
|
|
|
|
|
|
|
|
def verify_checksum(u, ud, d):
|
|
|
|
"""
|
|
|
|
verify the MD5 and SHA256 checksum for downloaded src
|
|
|
|
|
|
|
|
return value:
|
|
|
|
- True: checksum matched
|
|
|
|
- False: checksum unmatched
|
|
|
|
|
|
|
|
if checksum is missing in recipes file, "BB_STRICT_CHECKSUM" decide the return value.
|
|
|
|
if BB_STRICT_CHECKSUM = "1" then return false as unmatched, otherwise return true as
|
|
|
|
matched
|
|
|
|
"""
|
|
|
|
|
|
|
|
if not ud.type in ["http", "https", "ftp", "ftps"]:
|
|
|
|
return
|
|
|
|
|
|
|
|
md5data = bb.utils.md5_file(ud.localpath)
|
|
|
|
sha256data = bb.utils.sha256_file(ud.localpath)
|
|
|
|
|
|
|
|
if (ud.md5_expected == None or ud.sha256_expected == None):
|
|
|
|
logger.warn('Missing SRC_URI checksum for %s, consider adding to the recipe:\n'
|
|
|
|
'SRC_URI[%s] = "%s"\nSRC_URI[%s] = "%s"',
|
|
|
|
ud.localpath, ud.md5_name, md5data,
|
|
|
|
ud.sha256_name, sha256data)
|
|
|
|
if bb.data.getVar("BB_STRICT_CHECKSUM", d, True) == "1":
|
2011-02-04 10:26:21 +00:00
|
|
|
raise FetchError("No checksum specified for %s." % u, u)
|
2011-01-10 14:23:36 +00:00
|
|
|
return
|
|
|
|
|
2011-02-04 10:26:21 +00:00
|
|
|
if ud.md5_expected != md5data:
|
|
|
|
raise MD5SumError(ud.localpath, ud.md5_expected, md5data, u)
|
|
|
|
|
|
|
|
if ud.sha256_expected != sha256data:
|
|
|
|
raise SHA256SumError(ud.localpath, ud.sha256_expected, sha256data, u)
|
2011-01-10 14:23:36 +00:00
|
|
|
|
2011-01-18 13:53:36 +00:00
|
|
|
def subprocess_setup():
|
|
|
|
import signal
|
|
|
|
# Python installs a SIGPIPE handler by default. This is usually not what
|
|
|
|
# non-Python subprocesses expect.
|
|
|
|
# SIGPIPE errors are known issues with gzip/bash
|
|
|
|
signal.signal(signal.SIGPIPE, signal.SIG_DFL)
|
|
|
|
|
2010-12-27 01:31:38 +00:00
|
|
|
def get_autorev(d):
|
2010-12-27 08:20:34 +00:00
|
|
|
# only not cache src rev in autorev case
|
|
|
|
if bb.data.getVar('BB_SRCREV_POLICY', d, True) != "cache":
|
|
|
|
bb.data.setVar('__BB_DONT_CACHE', '1', d)
|
2010-12-27 01:31:38 +00:00
|
|
|
return "AUTOINC"
|
2011-01-10 14:23:36 +00:00
|
|
|
|
|
|
|
def get_srcrev(d):
|
|
|
|
"""
|
|
|
|
Return the version string for the current package
|
|
|
|
(usually to be used as PV)
|
|
|
|
Most packages usually only have one SCM so we just pass on the call.
|
|
|
|
In the multi SCM case, we build a value based on SRCREV_FORMAT which must
|
|
|
|
have been set.
|
|
|
|
"""
|
|
|
|
|
|
|
|
scms = []
|
2011-02-04 13:14:03 +00:00
|
|
|
fetcher = Fetch(bb.data.getVar('SRC_URI', d, True).split(), d)
|
|
|
|
urldata = fetcher.ud
|
2011-01-10 14:23:36 +00:00
|
|
|
for u in urldata:
|
2011-02-03 21:40:03 +00:00
|
|
|
if urldata[u].method.supports_srcrev():
|
2011-01-10 14:23:36 +00:00
|
|
|
scms.append(u)
|
|
|
|
|
|
|
|
if len(scms) == 0:
|
2011-02-04 10:26:21 +00:00
|
|
|
raise FetchError("SRCREV was used yet no valid SCM was found in SRC_URI")
|
2011-01-10 14:23:36 +00:00
|
|
|
|
fetch2: Allow multiple src rev to be used in one src_uri
* SRC_URI format:
the SRC_URI are extended to allow multiple src rev:
name=<name1>,<name2>,...<name-n>
branch=<branch1>,<branch2>,...,<branch-n>
also SRCREV can be defined with
SRCREV_<name1> = xxxxx
SRCREV_<name2> = xxxxx
* FetchData extention
to support multiple src rev, several FetchData data are added:
- FetchData.names: list of name in SRC_URI, one name per srcrev. name is the index of revision and branch
- FetchData.revisions: dictionary of name->revision.
- FetchData.branches: dictionary of name->branch.
For example, linux-yocto recipes becomes:
SRC_URI = "git://git.pokylinux.org/linux-yocto-2.6.37;protocol=git;branch=${KBRANCH},meta;name=machine,meta"
FetchData.names = ['machine', 'meta']
FetchData.revisions = { 'machine':xxxxx, 'meta':xxxxxx }
FetchData.branches = { 'machine':${KBRANCH}, 'meta':'meta'}
* generic revision handling extension
the related revision handling code in fetch2.__init__.py are changed accordingly. the major change is add name parameter to indicate which src rev to handling. originally there is one src rev per FetchData, so FetchData parameter is enough. now since one FetchData has multiple src rev, it is necessary to use FetchData + name to specifiy src rev.
* git extension
git fetcher are also revised to take advantage of the multiple src rev in FetchData. especially the download() method are enhanced to fetch multiple src rev.
* other fetcher (svn, hg, ...) does not support multiple src rev. they just sync the API to add name, and then simply ignore the name. no actually functional change
Signed-off-by: Yu Ke <ke.yu@intel.com>
2011-01-26 12:14:06 +00:00
|
|
|
if len(scms) == 1 and len(urldata[scms[0]].names) == 1:
|
|
|
|
return urldata[scms[0]].method.sortable_revision(scms[0], urldata[scms[0]], d, urldata[scms[0]].names[0])
|
2011-01-10 14:23:36 +00:00
|
|
|
|
|
|
|
#
|
|
|
|
# Mutiple SCMs are in SRC_URI so we resort to SRCREV_FORMAT
|
|
|
|
#
|
2011-02-04 14:40:41 +00:00
|
|
|
format = bb.data.getVar('SRCREV_FORMAT', d, True)
|
2011-01-10 14:23:36 +00:00
|
|
|
if not format:
|
2011-02-04 10:26:21 +00:00
|
|
|
raise FetchError("The SRCREV_FORMAT variable must be set when multiple SCMs are used.")
|
2011-01-10 14:23:36 +00:00
|
|
|
|
|
|
|
for scm in scms:
|
fetch2: Allow multiple src rev to be used in one src_uri
* SRC_URI format:
the SRC_URI are extended to allow multiple src rev:
name=<name1>,<name2>,...<name-n>
branch=<branch1>,<branch2>,...,<branch-n>
also SRCREV can be defined with
SRCREV_<name1> = xxxxx
SRCREV_<name2> = xxxxx
* FetchData extention
to support multiple src rev, several FetchData data are added:
- FetchData.names: list of name in SRC_URI, one name per srcrev. name is the index of revision and branch
- FetchData.revisions: dictionary of name->revision.
- FetchData.branches: dictionary of name->branch.
For example, linux-yocto recipes becomes:
SRC_URI = "git://git.pokylinux.org/linux-yocto-2.6.37;protocol=git;branch=${KBRANCH},meta;name=machine,meta"
FetchData.names = ['machine', 'meta']
FetchData.revisions = { 'machine':xxxxx, 'meta':xxxxxx }
FetchData.branches = { 'machine':${KBRANCH}, 'meta':'meta'}
* generic revision handling extension
the related revision handling code in fetch2.__init__.py are changed accordingly. the major change is add name parameter to indicate which src rev to handling. originally there is one src rev per FetchData, so FetchData parameter is enough. now since one FetchData has multiple src rev, it is necessary to use FetchData + name to specifiy src rev.
* git extension
git fetcher are also revised to take advantage of the multiple src rev in FetchData. especially the download() method are enhanced to fetch multiple src rev.
* other fetcher (svn, hg, ...) does not support multiple src rev. they just sync the API to add name, and then simply ignore the name. no actually functional change
Signed-off-by: Yu Ke <ke.yu@intel.com>
2011-01-26 12:14:06 +00:00
|
|
|
ud = urldata[scm]
|
|
|
|
for name in ud.names:
|
|
|
|
rev = ud.method.sortable_revision(scm, ud, d, name)
|
2011-01-10 14:23:36 +00:00
|
|
|
format = format.replace(name, rev)
|
|
|
|
|
|
|
|
return format
|
|
|
|
|
2011-02-03 22:02:09 +00:00
|
|
|
def localpath(url, d):
|
2011-02-04 13:14:03 +00:00
|
|
|
fetcher = bb.fetch2.Fetch([url], d)
|
|
|
|
return fetcher.localpath(url)
|
2011-01-10 14:23:36 +00:00
|
|
|
|
2011-02-04 10:26:21 +00:00
|
|
|
def runfetchcmd(cmd, d, quiet = False, cleanup = []):
|
2011-01-10 14:23:36 +00:00
|
|
|
"""
|
|
|
|
Run cmd returning the command output
|
|
|
|
Raise an error if interrupted or cmd fails
|
|
|
|
Optionally echo command output to stdout
|
2011-02-04 10:26:21 +00:00
|
|
|
Optionally remove the files/directories listed in cleanup upon failure
|
2011-01-10 14:23:36 +00:00
|
|
|
"""
|
|
|
|
|
|
|
|
# Need to export PATH as binary could be in metadata paths
|
|
|
|
# rather than host provided
|
|
|
|
# Also include some other variables.
|
|
|
|
# FIXME: Should really include all export varaiables?
|
|
|
|
exportvars = ['PATH', 'GIT_PROXY_COMMAND', 'GIT_PROXY_HOST',
|
|
|
|
'GIT_PROXY_PORT', 'GIT_CONFIG', 'http_proxy', 'ftp_proxy',
|
|
|
|
'https_proxy', 'no_proxy', 'ALL_PROXY', 'all_proxy',
|
|
|
|
'SSH_AUTH_SOCK', 'SSH_AGENT_PID', 'HOME']
|
|
|
|
|
|
|
|
for var in exportvars:
|
|
|
|
val = data.getVar(var, d, True)
|
|
|
|
if val:
|
|
|
|
cmd = 'export ' + var + '=\"%s\"; %s' % (val, cmd)
|
|
|
|
|
|
|
|
logger.debug(1, "Running %s", cmd)
|
|
|
|
|
|
|
|
# redirect stderr to stdout
|
|
|
|
stdout_handle = os.popen(cmd + " 2>&1", "r")
|
|
|
|
output = ""
|
|
|
|
|
|
|
|
while True:
|
|
|
|
line = stdout_handle.readline()
|
|
|
|
if not line:
|
|
|
|
break
|
|
|
|
if not quiet:
|
|
|
|
print(line, end=' ')
|
|
|
|
output += line
|
|
|
|
|
|
|
|
status = stdout_handle.close() or 0
|
|
|
|
signal = status >> 8
|
|
|
|
exitstatus = status & 0xff
|
|
|
|
|
2011-02-04 10:26:21 +00:00
|
|
|
if (signal or status != 0):
|
|
|
|
for f in cleanup:
|
|
|
|
try:
|
|
|
|
bb.utils.remove(f, True)
|
|
|
|
except OSError:
|
|
|
|
pass
|
|
|
|
|
|
|
|
if signal:
|
|
|
|
raise FetchError("Fetch command %s failed with signal %s, output:\n%s" % (cmd, signal, output))
|
|
|
|
elif status != 0:
|
|
|
|
raise FetchError("Fetch command %s failed with exit code %s, output:\n%s" % (cmd, status, output))
|
2011-01-10 14:23:36 +00:00
|
|
|
|
|
|
|
return output
|
|
|
|
|
2011-01-24 06:58:26 +00:00
|
|
|
def check_network_access(d, info = ""):
|
|
|
|
"""
|
|
|
|
log remote network access, and error if BB_NO_NETWORK is set
|
|
|
|
"""
|
|
|
|
if bb.data.getVar("BB_NO_NETWORK", d, True) == "1":
|
2011-02-04 10:26:21 +00:00
|
|
|
raise FetchError("BB_NO_NETWORK is set, but the fetcher code attempted network access with the command %s" % info)
|
2011-01-24 06:58:26 +00:00
|
|
|
else:
|
2011-02-04 10:26:21 +00:00
|
|
|
logger.debug(1, "Fetcher accessed the network with the command %s" % info)
|
2011-01-24 06:58:26 +00:00
|
|
|
|
2011-02-08 12:40:40 +00:00
|
|
|
def try_mirrors(d, origud, mirrors, check = False):
|
2011-01-10 14:23:36 +00:00
|
|
|
"""
|
|
|
|
Try to use a mirrored version of the sources.
|
|
|
|
This method will be automatically called before the fetchers go.
|
|
|
|
|
|
|
|
d Is a bb.data instance
|
|
|
|
uri is the original uri we're trying to download
|
|
|
|
mirrors is the list of mirrors we're going to try
|
|
|
|
"""
|
|
|
|
ld = d.createCopy()
|
|
|
|
for (find, replace) in mirrors:
|
2011-02-08 12:40:40 +00:00
|
|
|
newuri = uri_replace(origud, find, replace, ld)
|
|
|
|
if newuri == origud.url:
|
2011-02-07 12:08:32 +00:00
|
|
|
continue
|
|
|
|
try:
|
|
|
|
ud = FetchData(newuri, ld)
|
2011-01-10 14:23:36 +00:00
|
|
|
ud.setup_localpath(ld)
|
|
|
|
|
2011-02-07 12:08:32 +00:00
|
|
|
if check:
|
|
|
|
found = ud.method.checkstatus(newuri, ud, ld)
|
|
|
|
if found:
|
|
|
|
return found
|
2011-02-08 12:46:25 +00:00
|
|
|
continue
|
|
|
|
|
|
|
|
if ud.method.need_update(newuri, ud, ld):
|
2011-02-07 12:08:32 +00:00
|
|
|
ud.method.download(newuri, ud, ld)
|
|
|
|
if hasattr(ud.method,"build_mirror_data"):
|
|
|
|
ud.method.build_mirror_data(newuri, ud, ld)
|
2011-02-08 12:46:25 +00:00
|
|
|
|
|
|
|
if not ud.localpath or not os.path.exists(ud.localpath):
|
|
|
|
continue
|
|
|
|
|
|
|
|
if ud.localpath == origud.localpath:
|
2011-02-07 12:08:32 +00:00
|
|
|
return ud.localpath
|
|
|
|
|
2011-02-08 12:46:25 +00:00
|
|
|
# We may be obtaining a mirror tarball which needs further processing by the real fetcher
|
|
|
|
# If that tarball is a local file:// we need to provide a symlink to it
|
|
|
|
dldir = ld.getVar("DL_DIR", True)
|
|
|
|
if not ud.localpath.startswith(dldir):
|
|
|
|
if os.path.basename(ud.localpath) != os.path.basename(origud.localpath):
|
|
|
|
os.symlink(ud.localpath, os.path.join(dldir, os.path.basename(ud.localpath)))
|
|
|
|
return None
|
|
|
|
# Otherwise the result is a local file:// and we symlink to it
|
|
|
|
if not os.path.exists(origud.localpath):
|
|
|
|
os.symlink(ud.localpath, origud.localpath)
|
|
|
|
return ud.localpath
|
|
|
|
|
2011-02-07 12:08:32 +00:00
|
|
|
except bb.fetch2.BBFetchException:
|
2011-02-08 12:40:40 +00:00
|
|
|
logger.debug(1, "Mirror fetch failure for url %s (original url: %s)" % (newuri, origud.url))
|
2011-02-08 17:30:45 +00:00
|
|
|
try:
|
|
|
|
if os.path.isfile(ud.localpath):
|
|
|
|
bb.utils.remove(ud.localpath)
|
|
|
|
except UnboundLocalError:
|
|
|
|
pass
|
2011-02-07 12:08:32 +00:00
|
|
|
continue
|
2011-01-10 14:23:36 +00:00
|
|
|
return None
|
|
|
|
|
2011-02-04 10:59:42 +00:00
|
|
|
def srcrev_internal_helper(ud, d, name):
|
|
|
|
"""
|
|
|
|
Return:
|
|
|
|
a) a source revision if specified
|
2011-02-04 11:06:16 +00:00
|
|
|
b) latest revision if SRCREV="AUTOINC"
|
2011-02-04 10:59:42 +00:00
|
|
|
c) None if not specified
|
|
|
|
"""
|
|
|
|
|
|
|
|
if 'rev' in ud.parm:
|
|
|
|
return ud.parm['rev']
|
|
|
|
|
|
|
|
if 'tag' in ud.parm:
|
|
|
|
return ud.parm['tag']
|
|
|
|
|
|
|
|
rev = None
|
|
|
|
if name != '':
|
2011-02-04 14:40:41 +00:00
|
|
|
pn = data.getVar("PN", d, True)
|
|
|
|
rev = data.getVar("SRCREV_%s_pn-%s" % (name, pn), d, True)
|
2011-02-04 10:59:42 +00:00
|
|
|
if not rev:
|
2011-02-04 14:40:41 +00:00
|
|
|
rev = data.getVar("SRCREV_%s" % name, d, True)
|
2011-02-04 10:59:42 +00:00
|
|
|
if not rev:
|
2011-02-04 14:40:41 +00:00
|
|
|
rev = data.getVar("SRCREV", d, True)
|
2011-02-04 10:59:42 +00:00
|
|
|
if rev == "INVALID":
|
|
|
|
raise FetchError("Please set SRCREV to a valid value", ud.url)
|
|
|
|
if rev == "AUTOINC":
|
|
|
|
rev = ud.method.latest_revision(ud.url, ud, d, name)
|
|
|
|
|
|
|
|
return rev
|
2011-01-10 14:23:36 +00:00
|
|
|
|
|
|
|
class FetchData(object):
|
|
|
|
"""
|
|
|
|
A class which represents the fetcher state for a given URI.
|
|
|
|
"""
|
|
|
|
def __init__(self, url, d):
|
2011-02-07 12:08:32 +00:00
|
|
|
# localpath is the location of a downloaded result. If not set, the file is local.
|
2011-02-09 22:30:29 +00:00
|
|
|
self.donestamp = None
|
2011-01-10 14:23:36 +00:00
|
|
|
self.localfile = ""
|
2011-02-03 21:40:03 +00:00
|
|
|
self.localpath = None
|
|
|
|
self.lockfile = None
|
2011-02-07 15:28:05 +00:00
|
|
|
self.mirrortarball = None
|
2011-01-10 14:23:36 +00:00
|
|
|
(self.type, self.host, self.path, self.user, self.pswd, self.parm) = decodeurl(data.expand(url, d))
|
2011-02-04 11:02:15 +00:00
|
|
|
self.date = self.getSRCDate(d)
|
2011-01-10 14:23:36 +00:00
|
|
|
self.url = url
|
|
|
|
if not self.user and "user" in self.parm:
|
|
|
|
self.user = self.parm["user"]
|
|
|
|
if not self.pswd and "pswd" in self.parm:
|
|
|
|
self.pswd = self.parm["pswd"]
|
|
|
|
self.setup = False
|
|
|
|
|
|
|
|
if "name" in self.parm:
|
|
|
|
self.md5_name = "%s.md5sum" % self.parm["name"]
|
|
|
|
self.sha256_name = "%s.sha256sum" % self.parm["name"]
|
|
|
|
else:
|
|
|
|
self.md5_name = "md5sum"
|
|
|
|
self.sha256_name = "sha256sum"
|
|
|
|
self.md5_expected = bb.data.getVarFlag("SRC_URI", self.md5_name, d)
|
|
|
|
self.sha256_expected = bb.data.getVarFlag("SRC_URI", self.sha256_name, d)
|
|
|
|
|
2011-02-03 00:16:23 +00:00
|
|
|
self.names = self.parm.get("name",'default').split(',')
|
2011-02-03 21:40:03 +00:00
|
|
|
|
|
|
|
self.method = None
|
2011-01-10 14:23:36 +00:00
|
|
|
for m in methods:
|
|
|
|
if m.supports(url, self, d):
|
|
|
|
self.method = m
|
2011-02-03 21:40:03 +00:00
|
|
|
break
|
|
|
|
|
|
|
|
if not self.method:
|
2011-02-04 10:26:21 +00:00
|
|
|
raise NoMethodError(url)
|
2011-02-03 21:40:03 +00:00
|
|
|
|
|
|
|
if self.method.supports_srcrev():
|
2011-02-03 21:43:01 +00:00
|
|
|
self.revisions = {}
|
|
|
|
for name in self.names:
|
2011-02-04 10:59:42 +00:00
|
|
|
self.revisions[name] = srcrev_internal_helper(self, d, name)
|
2011-02-03 21:43:01 +00:00
|
|
|
|
|
|
|
# add compatibility code for non name specified case
|
|
|
|
if len(self.names) == 1:
|
|
|
|
self.revision = self.revisions[self.names[0]]
|
2011-02-03 21:40:03 +00:00
|
|
|
|
|
|
|
if hasattr(self.method, "urldata_init"):
|
|
|
|
self.method.urldata_init(self, d)
|
|
|
|
|
|
|
|
if "localpath" in self.parm:
|
|
|
|
# if user sets localpath for file, use it instead.
|
|
|
|
self.localpath = self.parm["localpath"]
|
|
|
|
self.basename = os.path.basename(self.localpath)
|
|
|
|
elif self.localfile:
|
|
|
|
self.localpath = self.method.localpath(self.url, self, d)
|
|
|
|
|
|
|
|
if self.localfile and self.localpath:
|
|
|
|
# Note: These files should always be in DL_DIR whereas localpath may not be.
|
|
|
|
basepath = bb.data.expand("${DL_DIR}/%s" % os.path.basename(self.localpath), d)
|
2011-02-04 10:30:54 +00:00
|
|
|
self.donestamp = basepath + '.done'
|
2011-02-03 21:40:03 +00:00
|
|
|
self.lockfile = basepath + '.lock'
|
2011-01-10 14:23:36 +00:00
|
|
|
|
|
|
|
def setup_localpath(self, d):
|
2011-02-03 21:40:03 +00:00
|
|
|
if not self.localpath:
|
2011-02-03 18:24:25 +00:00
|
|
|
self.localpath = self.method.localpath(self.url, self, d)
|
2011-01-10 14:23:36 +00:00
|
|
|
|
2011-02-04 11:02:15 +00:00
|
|
|
def getSRCDate(self, d):
|
|
|
|
"""
|
|
|
|
Return the SRC Date for the component
|
|
|
|
|
|
|
|
d the bb.data module
|
|
|
|
"""
|
|
|
|
if "srcdate" in self.parm:
|
|
|
|
return self.parm['srcdate']
|
|
|
|
|
2011-02-04 11:03:34 +00:00
|
|
|
pn = data.getVar("PN", d, True)
|
2011-02-04 11:02:15 +00:00
|
|
|
|
|
|
|
if pn:
|
2011-02-04 11:03:34 +00:00
|
|
|
return data.getVar("SRCDATE_%s" % pn, d, True) or data.getVar("SRCDATE", d, True) or data.getVar("DATE", d, True)
|
2011-02-04 11:02:15 +00:00
|
|
|
|
2011-02-04 11:03:34 +00:00
|
|
|
return data.getVar("SRCDATE", d, True) or data.getVar("DATE", d, True)
|
2011-01-10 14:23:36 +00:00
|
|
|
|
2011-02-04 10:49:27 +00:00
|
|
|
class FetchMethod(object):
|
2011-01-10 14:23:36 +00:00
|
|
|
"""Base class for 'fetch'ing data"""
|
|
|
|
|
|
|
|
def __init__(self, urls = []):
|
|
|
|
self.urls = []
|
|
|
|
|
|
|
|
def supports(self, url, urldata, d):
|
|
|
|
"""
|
|
|
|
Check to see if this fetch class supports a given url.
|
|
|
|
"""
|
|
|
|
return 0
|
|
|
|
|
|
|
|
def localpath(self, url, urldata, d):
|
|
|
|
"""
|
|
|
|
Return the local filename of a given url assuming a successful fetch.
|
|
|
|
Can also setup variables in urldata for use in go (saving code duplication
|
|
|
|
and duplicate code execution)
|
|
|
|
"""
|
2011-02-03 21:20:39 +00:00
|
|
|
return os.path.join(data.getVar("DL_DIR", d, True), urldata.localfile)
|
|
|
|
|
2011-01-10 14:23:36 +00:00
|
|
|
def _strip_leading_slashes(self, relpath):
|
|
|
|
"""
|
|
|
|
Remove leading slash as os.path.join can't cope
|
|
|
|
"""
|
|
|
|
while os.path.isabs(relpath):
|
|
|
|
relpath = relpath[1:]
|
|
|
|
return relpath
|
|
|
|
|
|
|
|
def setUrls(self, urls):
|
|
|
|
self.__urls = urls
|
|
|
|
|
|
|
|
def getUrls(self):
|
|
|
|
return self.__urls
|
|
|
|
|
|
|
|
urls = property(getUrls, setUrls, None, "Urls property")
|
|
|
|
|
2011-02-07 12:08:32 +00:00
|
|
|
def need_update(self, url, ud, d):
|
2011-01-10 14:23:36 +00:00
|
|
|
"""
|
|
|
|
Force a fetch, even if localpath exists?
|
|
|
|
"""
|
2011-02-07 12:08:32 +00:00
|
|
|
if os.path.exists(ud.localpath):
|
|
|
|
return False
|
|
|
|
return True
|
2011-01-10 14:23:36 +00:00
|
|
|
|
|
|
|
def supports_srcrev(self):
|
|
|
|
"""
|
|
|
|
The fetcher supports auto source revisions (SRCREV)
|
|
|
|
"""
|
|
|
|
return False
|
|
|
|
|
2011-01-18 15:03:53 +00:00
|
|
|
def download(self, url, urldata, d):
|
2011-01-10 14:23:36 +00:00
|
|
|
"""
|
|
|
|
Fetch urls
|
|
|
|
Assumes localpath was called first
|
|
|
|
"""
|
2011-02-04 10:26:21 +00:00
|
|
|
raise NoMethodError(url)
|
2011-01-10 14:23:36 +00:00
|
|
|
|
2011-01-18 14:08:09 +00:00
|
|
|
def unpack(self, urldata, rootdir, data):
|
2011-01-18 13:53:36 +00:00
|
|
|
import subprocess
|
2011-02-08 00:18:18 +00:00
|
|
|
iterate = False
|
2011-01-18 14:08:09 +00:00
|
|
|
file = urldata.localpath
|
2011-01-18 13:53:36 +00:00
|
|
|
dots = file.split(".")
|
|
|
|
if dots[-1] in ['gz', 'bz2', 'Z']:
|
2011-02-04 14:40:41 +00:00
|
|
|
efile = os.path.join(bb.data.getVar('WORKDIR', data, True),os.path.basename('.'.join(dots[0:-1])))
|
2011-01-18 13:53:36 +00:00
|
|
|
else:
|
|
|
|
efile = file
|
|
|
|
cmd = None
|
2011-02-08 00:18:18 +00:00
|
|
|
|
2011-01-18 13:53:36 +00:00
|
|
|
if file.endswith('.tar'):
|
|
|
|
cmd = 'tar x --no-same-owner -f %s' % file
|
|
|
|
elif file.endswith('.tgz') or file.endswith('.tar.gz') or file.endswith('.tar.Z'):
|
|
|
|
cmd = 'tar xz --no-same-owner -f %s' % file
|
|
|
|
elif file.endswith('.tbz') or file.endswith('.tbz2') or file.endswith('.tar.bz2'):
|
|
|
|
cmd = 'bzip2 -dc %s | tar x --no-same-owner -f -' % file
|
|
|
|
elif file.endswith('.gz') or file.endswith('.Z') or file.endswith('.z'):
|
|
|
|
cmd = 'gzip -dc %s > %s' % (file, efile)
|
|
|
|
elif file.endswith('.bz2'):
|
|
|
|
cmd = 'bzip2 -dc %s > %s' % (file, efile)
|
|
|
|
elif file.endswith('.tar.xz'):
|
|
|
|
cmd = 'xz -dc %s | tar x --no-same-owner -f -' % file
|
|
|
|
elif file.endswith('.xz'):
|
|
|
|
cmd = 'xz -dc %s > %s' % (file, efile)
|
|
|
|
elif file.endswith('.zip') or file.endswith('.jar'):
|
|
|
|
cmd = 'unzip -q -o'
|
2011-01-18 14:08:09 +00:00
|
|
|
if 'dos' in urldata.parm:
|
2011-01-18 13:53:36 +00:00
|
|
|
cmd = '%s -a' % cmd
|
|
|
|
cmd = "%s '%s'" % (cmd, file)
|
2011-02-08 00:18:18 +00:00
|
|
|
elif file.endswith('.src.rpm') or file.endswith('.srpm'):
|
|
|
|
if 'unpack' in urldata.parm:
|
|
|
|
unpack_file = ("%s" % urldata.parm['unpack'])
|
|
|
|
cmd = 'rpm2cpio.sh %s | cpio -i %s' % (file, unpack_file)
|
|
|
|
iterate = True
|
|
|
|
iterate_file = unpack_file
|
|
|
|
else:
|
|
|
|
cmd = 'rpm2cpio.sh %s | cpio -i' % (file)
|
2011-01-18 13:53:36 +00:00
|
|
|
else:
|
2011-02-08 00:18:18 +00:00
|
|
|
# If file == dest, then avoid any copies, as we already put the file into dest!
|
|
|
|
dest = os.path.join(rootdir, os.path.basename(file))
|
|
|
|
if (file != dest) and not (os.path.exists(dest) and os.path.samefile(file, dest)):
|
|
|
|
if os.path.isdir(file):
|
|
|
|
filesdir = os.path.realpath(bb.data.getVar("FILESDIR", data, True))
|
2011-01-18 13:53:36 +00:00
|
|
|
destdir = "."
|
2011-02-08 00:18:18 +00:00
|
|
|
if file[0:len(filesdir)] == filesdir:
|
|
|
|
destdir = file[len(filesdir):file.rfind('/')]
|
|
|
|
destdir = destdir.strip('/')
|
|
|
|
if len(destdir) < 1:
|
|
|
|
destdir = "."
|
|
|
|
elif not os.access("%s/%s" % (rootdir, destdir), os.F_OK):
|
|
|
|
os.makedirs("%s/%s" % (rootdir, destdir))
|
|
|
|
cmd = 'cp -pPR %s %s/%s/' % (file, rootdir, destdir)
|
|
|
|
else:
|
|
|
|
if not 'patch' in urldata.parm:
|
|
|
|
# The "destdir" handling was specifically done for FILESPATH
|
|
|
|
# items. So, only do so for file:// entries.
|
|
|
|
if urldata.type == "file" and urldata.path.find("/") != -1:
|
|
|
|
destdir = urldata.path.rsplit("/", 1)[0]
|
|
|
|
else:
|
|
|
|
destdir = "."
|
|
|
|
bb.mkdirhier("%s/%s" % (rootdir, destdir))
|
|
|
|
cmd = 'cp %s %s/%s/' % (file, rootdir, destdir)
|
2011-01-18 13:53:36 +00:00
|
|
|
|
|
|
|
if not cmd:
|
2011-02-04 13:14:03 +00:00
|
|
|
return
|
2011-01-18 13:53:36 +00:00
|
|
|
|
|
|
|
# Change to subdir before executing command
|
|
|
|
save_cwd = os.getcwd();
|
2011-01-18 14:08:09 +00:00
|
|
|
os.chdir(rootdir)
|
|
|
|
if 'subdir' in urldata.parm:
|
|
|
|
newdir = ("%s/%s" % (rootdir, urldata.parm['subdir']))
|
2011-01-18 13:53:36 +00:00
|
|
|
bb.mkdirhier(newdir)
|
|
|
|
os.chdir(newdir)
|
|
|
|
|
2011-02-04 14:40:41 +00:00
|
|
|
cmd = "PATH=\"%s\" %s" % (bb.data.getVar('PATH', data, True), cmd)
|
2011-01-18 13:53:36 +00:00
|
|
|
bb.note("Unpacking %s to %s/" % (file, os.getcwd()))
|
|
|
|
ret = subprocess.call(cmd, preexec_fn=subprocess_setup, shell=True)
|
|
|
|
|
|
|
|
os.chdir(save_cwd)
|
|
|
|
|
2011-02-04 13:14:03 +00:00
|
|
|
if ret != 0:
|
|
|
|
raise UnpackError("Unpack command %s failed with return value %s" % (cmd, ret), urldata.url)
|
|
|
|
|
2011-02-08 00:18:18 +00:00
|
|
|
if iterate is True:
|
|
|
|
iterate_urldata = urldata
|
|
|
|
iterate_urldata.localpath = "%s/%s" % (rootdir, iterate_file)
|
|
|
|
self.unpack(urldata, rootdir, data)
|
|
|
|
|
2011-02-04 13:14:03 +00:00
|
|
|
return
|
2011-01-18 13:53:36 +00:00
|
|
|
|
2011-02-09 22:30:29 +00:00
|
|
|
def clean(self, urldata, d):
|
|
|
|
"""
|
|
|
|
Clean any existing full or partial download
|
|
|
|
"""
|
|
|
|
bb.utils.remove(urldata.localpath)
|
|
|
|
|
2011-01-10 14:23:36 +00:00
|
|
|
def try_premirror(self, url, urldata, d):
|
|
|
|
"""
|
|
|
|
Should premirrors be used?
|
|
|
|
"""
|
2011-02-07 12:08:32 +00:00
|
|
|
return True
|
2011-01-10 14:23:36 +00:00
|
|
|
|
|
|
|
def checkstatus(self, url, urldata, d):
|
|
|
|
"""
|
|
|
|
Check the status of a URL
|
|
|
|
Assumes localpath was called first
|
|
|
|
"""
|
|
|
|
logger.info("URL %s could not be checked for status since no method exists.", url)
|
|
|
|
return True
|
|
|
|
|
fetch2: Allow multiple src rev to be used in one src_uri
* SRC_URI format:
the SRC_URI are extended to allow multiple src rev:
name=<name1>,<name2>,...<name-n>
branch=<branch1>,<branch2>,...,<branch-n>
also SRCREV can be defined with
SRCREV_<name1> = xxxxx
SRCREV_<name2> = xxxxx
* FetchData extention
to support multiple src rev, several FetchData data are added:
- FetchData.names: list of name in SRC_URI, one name per srcrev. name is the index of revision and branch
- FetchData.revisions: dictionary of name->revision.
- FetchData.branches: dictionary of name->branch.
For example, linux-yocto recipes becomes:
SRC_URI = "git://git.pokylinux.org/linux-yocto-2.6.37;protocol=git;branch=${KBRANCH},meta;name=machine,meta"
FetchData.names = ['machine', 'meta']
FetchData.revisions = { 'machine':xxxxx, 'meta':xxxxxx }
FetchData.branches = { 'machine':${KBRANCH}, 'meta':'meta'}
* generic revision handling extension
the related revision handling code in fetch2.__init__.py are changed accordingly. the major change is add name parameter to indicate which src rev to handling. originally there is one src rev per FetchData, so FetchData parameter is enough. now since one FetchData has multiple src rev, it is necessary to use FetchData + name to specifiy src rev.
* git extension
git fetcher are also revised to take advantage of the multiple src rev in FetchData. especially the download() method are enhanced to fetch multiple src rev.
* other fetcher (svn, hg, ...) does not support multiple src rev. they just sync the API to add name, and then simply ignore the name. no actually functional change
Signed-off-by: Yu Ke <ke.yu@intel.com>
2011-01-26 12:14:06 +00:00
|
|
|
def localcount_internal_helper(ud, d, name):
|
2011-01-10 14:23:36 +00:00
|
|
|
"""
|
|
|
|
Return:
|
|
|
|
a) a locked localcount if specified
|
|
|
|
b) None otherwise
|
|
|
|
"""
|
|
|
|
|
|
|
|
localcount = None
|
fetch2: Allow multiple src rev to be used in one src_uri
* SRC_URI format:
the SRC_URI are extended to allow multiple src rev:
name=<name1>,<name2>,...<name-n>
branch=<branch1>,<branch2>,...,<branch-n>
also SRCREV can be defined with
SRCREV_<name1> = xxxxx
SRCREV_<name2> = xxxxx
* FetchData extention
to support multiple src rev, several FetchData data are added:
- FetchData.names: list of name in SRC_URI, one name per srcrev. name is the index of revision and branch
- FetchData.revisions: dictionary of name->revision.
- FetchData.branches: dictionary of name->branch.
For example, linux-yocto recipes becomes:
SRC_URI = "git://git.pokylinux.org/linux-yocto-2.6.37;protocol=git;branch=${KBRANCH},meta;name=machine,meta"
FetchData.names = ['machine', 'meta']
FetchData.revisions = { 'machine':xxxxx, 'meta':xxxxxx }
FetchData.branches = { 'machine':${KBRANCH}, 'meta':'meta'}
* generic revision handling extension
the related revision handling code in fetch2.__init__.py are changed accordingly. the major change is add name parameter to indicate which src rev to handling. originally there is one src rev per FetchData, so FetchData parameter is enough. now since one FetchData has multiple src rev, it is necessary to use FetchData + name to specifiy src rev.
* git extension
git fetcher are also revised to take advantage of the multiple src rev in FetchData. especially the download() method are enhanced to fetch multiple src rev.
* other fetcher (svn, hg, ...) does not support multiple src rev. they just sync the API to add name, and then simply ignore the name. no actually functional change
Signed-off-by: Yu Ke <ke.yu@intel.com>
2011-01-26 12:14:06 +00:00
|
|
|
if name != '':
|
2011-02-04 14:40:41 +00:00
|
|
|
pn = data.getVar("PN", d, True)
|
|
|
|
localcount = data.getVar("LOCALCOUNT_" + name, d, True)
|
2011-01-10 14:23:36 +00:00
|
|
|
if not localcount:
|
2011-02-04 14:40:41 +00:00
|
|
|
localcount = data.getVar("LOCALCOUNT", d, True)
|
2011-01-10 14:23:36 +00:00
|
|
|
return localcount
|
|
|
|
|
|
|
|
localcount_internal_helper = staticmethod(localcount_internal_helper)
|
|
|
|
|
fetch2: Allow multiple src rev to be used in one src_uri
* SRC_URI format:
the SRC_URI are extended to allow multiple src rev:
name=<name1>,<name2>,...<name-n>
branch=<branch1>,<branch2>,...,<branch-n>
also SRCREV can be defined with
SRCREV_<name1> = xxxxx
SRCREV_<name2> = xxxxx
* FetchData extention
to support multiple src rev, several FetchData data are added:
- FetchData.names: list of name in SRC_URI, one name per srcrev. name is the index of revision and branch
- FetchData.revisions: dictionary of name->revision.
- FetchData.branches: dictionary of name->branch.
For example, linux-yocto recipes becomes:
SRC_URI = "git://git.pokylinux.org/linux-yocto-2.6.37;protocol=git;branch=${KBRANCH},meta;name=machine,meta"
FetchData.names = ['machine', 'meta']
FetchData.revisions = { 'machine':xxxxx, 'meta':xxxxxx }
FetchData.branches = { 'machine':${KBRANCH}, 'meta':'meta'}
* generic revision handling extension
the related revision handling code in fetch2.__init__.py are changed accordingly. the major change is add name parameter to indicate which src rev to handling. originally there is one src rev per FetchData, so FetchData parameter is enough. now since one FetchData has multiple src rev, it is necessary to use FetchData + name to specifiy src rev.
* git extension
git fetcher are also revised to take advantage of the multiple src rev in FetchData. especially the download() method are enhanced to fetch multiple src rev.
* other fetcher (svn, hg, ...) does not support multiple src rev. they just sync the API to add name, and then simply ignore the name. no actually functional change
Signed-off-by: Yu Ke <ke.yu@intel.com>
2011-01-26 12:14:06 +00:00
|
|
|
def latest_revision(self, url, ud, d, name):
|
2011-01-10 14:23:36 +00:00
|
|
|
"""
|
|
|
|
Look in the cache for the latest revision, if not present ask the SCM.
|
|
|
|
"""
|
|
|
|
if not hasattr(self, "_latest_revision"):
|
2011-02-04 10:26:21 +00:00
|
|
|
raise ParameterError("The fetcher for this URL does not support _latest_revision", url)
|
2011-01-10 14:23:36 +00:00
|
|
|
|
|
|
|
pd = persist_data.persist(d)
|
|
|
|
revs = pd['BB_URI_HEADREVS']
|
fetch2: Allow multiple src rev to be used in one src_uri
* SRC_URI format:
the SRC_URI are extended to allow multiple src rev:
name=<name1>,<name2>,...<name-n>
branch=<branch1>,<branch2>,...,<branch-n>
also SRCREV can be defined with
SRCREV_<name1> = xxxxx
SRCREV_<name2> = xxxxx
* FetchData extention
to support multiple src rev, several FetchData data are added:
- FetchData.names: list of name in SRC_URI, one name per srcrev. name is the index of revision and branch
- FetchData.revisions: dictionary of name->revision.
- FetchData.branches: dictionary of name->branch.
For example, linux-yocto recipes becomes:
SRC_URI = "git://git.pokylinux.org/linux-yocto-2.6.37;protocol=git;branch=${KBRANCH},meta;name=machine,meta"
FetchData.names = ['machine', 'meta']
FetchData.revisions = { 'machine':xxxxx, 'meta':xxxxxx }
FetchData.branches = { 'machine':${KBRANCH}, 'meta':'meta'}
* generic revision handling extension
the related revision handling code in fetch2.__init__.py are changed accordingly. the major change is add name parameter to indicate which src rev to handling. originally there is one src rev per FetchData, so FetchData parameter is enough. now since one FetchData has multiple src rev, it is necessary to use FetchData + name to specifiy src rev.
* git extension
git fetcher are also revised to take advantage of the multiple src rev in FetchData. especially the download() method are enhanced to fetch multiple src rev.
* other fetcher (svn, hg, ...) does not support multiple src rev. they just sync the API to add name, and then simply ignore the name. no actually functional change
Signed-off-by: Yu Ke <ke.yu@intel.com>
2011-01-26 12:14:06 +00:00
|
|
|
key = self.generate_revision_key(url, ud, d, name)
|
2011-01-10 14:23:36 +00:00
|
|
|
rev = revs[key]
|
|
|
|
if rev != None:
|
|
|
|
return str(rev)
|
|
|
|
|
fetch2: Allow multiple src rev to be used in one src_uri
* SRC_URI format:
the SRC_URI are extended to allow multiple src rev:
name=<name1>,<name2>,...<name-n>
branch=<branch1>,<branch2>,...,<branch-n>
also SRCREV can be defined with
SRCREV_<name1> = xxxxx
SRCREV_<name2> = xxxxx
* FetchData extention
to support multiple src rev, several FetchData data are added:
- FetchData.names: list of name in SRC_URI, one name per srcrev. name is the index of revision and branch
- FetchData.revisions: dictionary of name->revision.
- FetchData.branches: dictionary of name->branch.
For example, linux-yocto recipes becomes:
SRC_URI = "git://git.pokylinux.org/linux-yocto-2.6.37;protocol=git;branch=${KBRANCH},meta;name=machine,meta"
FetchData.names = ['machine', 'meta']
FetchData.revisions = { 'machine':xxxxx, 'meta':xxxxxx }
FetchData.branches = { 'machine':${KBRANCH}, 'meta':'meta'}
* generic revision handling extension
the related revision handling code in fetch2.__init__.py are changed accordingly. the major change is add name parameter to indicate which src rev to handling. originally there is one src rev per FetchData, so FetchData parameter is enough. now since one FetchData has multiple src rev, it is necessary to use FetchData + name to specifiy src rev.
* git extension
git fetcher are also revised to take advantage of the multiple src rev in FetchData. especially the download() method are enhanced to fetch multiple src rev.
* other fetcher (svn, hg, ...) does not support multiple src rev. they just sync the API to add name, and then simply ignore the name. no actually functional change
Signed-off-by: Yu Ke <ke.yu@intel.com>
2011-01-26 12:14:06 +00:00
|
|
|
revs[key] = rev = self._latest_revision(url, ud, d, name)
|
2011-01-10 14:23:36 +00:00
|
|
|
return rev
|
|
|
|
|
fetch2: Allow multiple src rev to be used in one src_uri
* SRC_URI format:
the SRC_URI are extended to allow multiple src rev:
name=<name1>,<name2>,...<name-n>
branch=<branch1>,<branch2>,...,<branch-n>
also SRCREV can be defined with
SRCREV_<name1> = xxxxx
SRCREV_<name2> = xxxxx
* FetchData extention
to support multiple src rev, several FetchData data are added:
- FetchData.names: list of name in SRC_URI, one name per srcrev. name is the index of revision and branch
- FetchData.revisions: dictionary of name->revision.
- FetchData.branches: dictionary of name->branch.
For example, linux-yocto recipes becomes:
SRC_URI = "git://git.pokylinux.org/linux-yocto-2.6.37;protocol=git;branch=${KBRANCH},meta;name=machine,meta"
FetchData.names = ['machine', 'meta']
FetchData.revisions = { 'machine':xxxxx, 'meta':xxxxxx }
FetchData.branches = { 'machine':${KBRANCH}, 'meta':'meta'}
* generic revision handling extension
the related revision handling code in fetch2.__init__.py are changed accordingly. the major change is add name parameter to indicate which src rev to handling. originally there is one src rev per FetchData, so FetchData parameter is enough. now since one FetchData has multiple src rev, it is necessary to use FetchData + name to specifiy src rev.
* git extension
git fetcher are also revised to take advantage of the multiple src rev in FetchData. especially the download() method are enhanced to fetch multiple src rev.
* other fetcher (svn, hg, ...) does not support multiple src rev. they just sync the API to add name, and then simply ignore the name. no actually functional change
Signed-off-by: Yu Ke <ke.yu@intel.com>
2011-01-26 12:14:06 +00:00
|
|
|
def sortable_revision(self, url, ud, d, name):
|
2011-01-10 14:23:36 +00:00
|
|
|
"""
|
|
|
|
|
|
|
|
"""
|
|
|
|
if hasattr(self, "_sortable_revision"):
|
|
|
|
return self._sortable_revision(url, ud, d)
|
|
|
|
|
|
|
|
pd = persist_data.persist(d)
|
|
|
|
localcounts = pd['BB_URI_LOCALCOUNT']
|
fetch2: Allow multiple src rev to be used in one src_uri
* SRC_URI format:
the SRC_URI are extended to allow multiple src rev:
name=<name1>,<name2>,...<name-n>
branch=<branch1>,<branch2>,...,<branch-n>
also SRCREV can be defined with
SRCREV_<name1> = xxxxx
SRCREV_<name2> = xxxxx
* FetchData extention
to support multiple src rev, several FetchData data are added:
- FetchData.names: list of name in SRC_URI, one name per srcrev. name is the index of revision and branch
- FetchData.revisions: dictionary of name->revision.
- FetchData.branches: dictionary of name->branch.
For example, linux-yocto recipes becomes:
SRC_URI = "git://git.pokylinux.org/linux-yocto-2.6.37;protocol=git;branch=${KBRANCH},meta;name=machine,meta"
FetchData.names = ['machine', 'meta']
FetchData.revisions = { 'machine':xxxxx, 'meta':xxxxxx }
FetchData.branches = { 'machine':${KBRANCH}, 'meta':'meta'}
* generic revision handling extension
the related revision handling code in fetch2.__init__.py are changed accordingly. the major change is add name parameter to indicate which src rev to handling. originally there is one src rev per FetchData, so FetchData parameter is enough. now since one FetchData has multiple src rev, it is necessary to use FetchData + name to specifiy src rev.
* git extension
git fetcher are also revised to take advantage of the multiple src rev in FetchData. especially the download() method are enhanced to fetch multiple src rev.
* other fetcher (svn, hg, ...) does not support multiple src rev. they just sync the API to add name, and then simply ignore the name. no actually functional change
Signed-off-by: Yu Ke <ke.yu@intel.com>
2011-01-26 12:14:06 +00:00
|
|
|
key = self.generate_revision_key(url, ud, d, name)
|
2011-01-10 14:23:36 +00:00
|
|
|
|
fetch2: Allow multiple src rev to be used in one src_uri
* SRC_URI format:
the SRC_URI are extended to allow multiple src rev:
name=<name1>,<name2>,...<name-n>
branch=<branch1>,<branch2>,...,<branch-n>
also SRCREV can be defined with
SRCREV_<name1> = xxxxx
SRCREV_<name2> = xxxxx
* FetchData extention
to support multiple src rev, several FetchData data are added:
- FetchData.names: list of name in SRC_URI, one name per srcrev. name is the index of revision and branch
- FetchData.revisions: dictionary of name->revision.
- FetchData.branches: dictionary of name->branch.
For example, linux-yocto recipes becomes:
SRC_URI = "git://git.pokylinux.org/linux-yocto-2.6.37;protocol=git;branch=${KBRANCH},meta;name=machine,meta"
FetchData.names = ['machine', 'meta']
FetchData.revisions = { 'machine':xxxxx, 'meta':xxxxxx }
FetchData.branches = { 'machine':${KBRANCH}, 'meta':'meta'}
* generic revision handling extension
the related revision handling code in fetch2.__init__.py are changed accordingly. the major change is add name parameter to indicate which src rev to handling. originally there is one src rev per FetchData, so FetchData parameter is enough. now since one FetchData has multiple src rev, it is necessary to use FetchData + name to specifiy src rev.
* git extension
git fetcher are also revised to take advantage of the multiple src rev in FetchData. especially the download() method are enhanced to fetch multiple src rev.
* other fetcher (svn, hg, ...) does not support multiple src rev. they just sync the API to add name, and then simply ignore the name. no actually functional change
Signed-off-by: Yu Ke <ke.yu@intel.com>
2011-01-26 12:14:06 +00:00
|
|
|
latest_rev = self._build_revision(url, ud, d, name)
|
2011-01-10 14:23:36 +00:00
|
|
|
last_rev = localcounts[key + '_rev']
|
|
|
|
uselocalcount = bb.data.getVar("BB_LOCALCOUNT_OVERRIDE", d, True) or False
|
|
|
|
count = None
|
|
|
|
if uselocalcount:
|
2011-02-04 10:49:27 +00:00
|
|
|
count = FetchMethod.localcount_internal_helper(ud, d, name)
|
2011-01-10 14:23:36 +00:00
|
|
|
if count is None:
|
2011-02-01 23:33:19 +00:00
|
|
|
count = localcounts[key + '_count'] or "0"
|
2011-01-10 14:23:36 +00:00
|
|
|
|
|
|
|
if last_rev == latest_rev:
|
|
|
|
return str(count + "+" + latest_rev)
|
|
|
|
|
|
|
|
buildindex_provided = hasattr(self, "_sortable_buildindex")
|
|
|
|
if buildindex_provided:
|
|
|
|
count = self._sortable_buildindex(url, ud, d, latest_rev)
|
|
|
|
|
|
|
|
if count is None:
|
|
|
|
count = "0"
|
|
|
|
elif uselocalcount or buildindex_provided:
|
|
|
|
count = str(count)
|
|
|
|
else:
|
|
|
|
count = str(int(count) + 1)
|
|
|
|
|
|
|
|
localcounts[key + '_rev'] = latest_rev
|
|
|
|
localcounts[key + '_count'] = count
|
|
|
|
|
|
|
|
return str(count + "+" + latest_rev)
|
|
|
|
|
fetch2: Allow multiple src rev to be used in one src_uri
* SRC_URI format:
the SRC_URI are extended to allow multiple src rev:
name=<name1>,<name2>,...<name-n>
branch=<branch1>,<branch2>,...,<branch-n>
also SRCREV can be defined with
SRCREV_<name1> = xxxxx
SRCREV_<name2> = xxxxx
* FetchData extention
to support multiple src rev, several FetchData data are added:
- FetchData.names: list of name in SRC_URI, one name per srcrev. name is the index of revision and branch
- FetchData.revisions: dictionary of name->revision.
- FetchData.branches: dictionary of name->branch.
For example, linux-yocto recipes becomes:
SRC_URI = "git://git.pokylinux.org/linux-yocto-2.6.37;protocol=git;branch=${KBRANCH},meta;name=machine,meta"
FetchData.names = ['machine', 'meta']
FetchData.revisions = { 'machine':xxxxx, 'meta':xxxxxx }
FetchData.branches = { 'machine':${KBRANCH}, 'meta':'meta'}
* generic revision handling extension
the related revision handling code in fetch2.__init__.py are changed accordingly. the major change is add name parameter to indicate which src rev to handling. originally there is one src rev per FetchData, so FetchData parameter is enough. now since one FetchData has multiple src rev, it is necessary to use FetchData + name to specifiy src rev.
* git extension
git fetcher are also revised to take advantage of the multiple src rev in FetchData. especially the download() method are enhanced to fetch multiple src rev.
* other fetcher (svn, hg, ...) does not support multiple src rev. they just sync the API to add name, and then simply ignore the name. no actually functional change
Signed-off-by: Yu Ke <ke.yu@intel.com>
2011-01-26 12:14:06 +00:00
|
|
|
def generate_revision_key(self, url, ud, d, name):
|
|
|
|
key = self._revision_key(url, ud, d, name)
|
2011-01-10 14:23:36 +00:00
|
|
|
return "%s-%s" % (key, bb.data.getVar("PN", d, True) or "")
|
|
|
|
|
2011-02-04 13:14:03 +00:00
|
|
|
class Fetch(object):
|
2011-02-07 21:12:51 +00:00
|
|
|
def __init__(self, urls, d, cache = True):
|
2011-02-04 13:14:03 +00:00
|
|
|
if len(urls) == 0:
|
2011-02-04 14:40:41 +00:00
|
|
|
urls = d.getVar("SRC_URI", True).split()
|
2011-02-04 13:14:03 +00:00
|
|
|
self.urls = urls
|
|
|
|
self.d = d
|
|
|
|
self.ud = {}
|
|
|
|
|
2011-02-04 14:40:41 +00:00
|
|
|
fn = bb.data.getVar('FILE', d, True)
|
2011-02-07 21:12:51 +00:00
|
|
|
if cache and fn in urldata_cache:
|
2011-02-04 13:14:03 +00:00
|
|
|
self.ud = urldata_cache[fn]
|
|
|
|
|
|
|
|
for url in urls:
|
|
|
|
if url not in self.ud:
|
|
|
|
self.ud[url] = FetchData(url, d)
|
|
|
|
|
2011-02-07 21:12:51 +00:00
|
|
|
if cache:
|
|
|
|
urldata_cache[fn] = self.ud
|
2011-02-04 13:14:03 +00:00
|
|
|
|
|
|
|
def localpath(self, url):
|
|
|
|
if url not in self.urls:
|
|
|
|
self.ud[url] = FetchData(url, self.d)
|
|
|
|
|
|
|
|
self.ud[url].setup_localpath(self.d)
|
|
|
|
return bb.data.expand(self.ud[url].localpath, self.d)
|
|
|
|
|
|
|
|
def localpaths(self):
|
|
|
|
"""
|
|
|
|
Return a list of the local filenames, assuming successful fetch
|
|
|
|
"""
|
|
|
|
local = []
|
|
|
|
|
|
|
|
for u in self.urls:
|
|
|
|
ud = self.ud[u]
|
|
|
|
ud.setup_localpath(self.d)
|
|
|
|
local.append(ud.localpath)
|
|
|
|
|
|
|
|
return local
|
|
|
|
|
|
|
|
def download(self, urls = []):
|
|
|
|
"""
|
|
|
|
Fetch all urls
|
|
|
|
"""
|
|
|
|
if len(urls) == 0:
|
|
|
|
urls = self.urls
|
|
|
|
|
|
|
|
for u in urls:
|
|
|
|
ud = self.ud[u]
|
|
|
|
ud.setup_localpath(self.d)
|
|
|
|
m = ud.method
|
|
|
|
localpath = ""
|
|
|
|
|
|
|
|
if not ud.localfile:
|
|
|
|
continue
|
|
|
|
|
|
|
|
lf = bb.utils.lockfile(ud.lockfile)
|
|
|
|
|
2011-02-08 01:29:46 +00:00
|
|
|
try:
|
|
|
|
if not m.need_update(u, ud, self.d):
|
|
|
|
localpath = ud.localpath
|
|
|
|
elif m.try_premirror(u, ud, self.d):
|
|
|
|
mirrors = mirror_from_string(bb.data.getVar('PREMIRRORS', self.d, True))
|
2011-02-08 12:46:25 +00:00
|
|
|
localpath = try_mirrors(self.d, ud, mirrors, False)
|
2011-02-08 01:29:46 +00:00
|
|
|
|
2011-02-09 01:23:56 +00:00
|
|
|
if bb.data.getVar("BB_FETCH_PREMIRRORONLY", self.d, True) is not None:
|
2011-02-08 12:44:06 +00:00
|
|
|
bb.data.setVar("BB_NO_NETWORK", "1", self.d)
|
|
|
|
|
|
|
|
if not localpath and m.need_update(u, ud, self.d):
|
|
|
|
try:
|
|
|
|
m.download(u, ud, self.d)
|
|
|
|
if hasattr(m, "build_mirror_data"):
|
|
|
|
m.build_mirror_data(u, ud, self.d)
|
|
|
|
localpath = ud.localpath
|
|
|
|
|
|
|
|
except BBFetchException:
|
|
|
|
# Remove any incomplete fetch
|
|
|
|
if os.path.isfile(ud.localpath):
|
|
|
|
bb.utils.remove(ud.localpath)
|
|
|
|
mirrors = mirror_from_string(bb.data.getVar('MIRRORS', self.d, True))
|
|
|
|
localpath = try_mirrors (self.d, ud, mirrors)
|
2011-02-08 01:29:46 +00:00
|
|
|
|
|
|
|
if not localpath or not os.path.exists(localpath):
|
|
|
|
raise FetchError("Unable to fetch URL %s from any source." % u, u)
|
|
|
|
|
|
|
|
if os.path.exists(ud.donestamp):
|
|
|
|
# Touch the done stamp file to show active use of the download
|
2011-02-07 12:08:32 +00:00
|
|
|
try:
|
2011-02-08 01:29:46 +00:00
|
|
|
os.utime(ud.donestamp, None)
|
|
|
|
except:
|
|
|
|
# Errors aren't fatal here
|
|
|
|
pass
|
|
|
|
else:
|
|
|
|
# Only check the checksums if we've not seen this item before, then create the stamp
|
|
|
|
verify_checksum(u, ud, self.d)
|
|
|
|
open(ud.donestamp, 'w').close()
|
2011-02-04 13:14:03 +00:00
|
|
|
|
2011-02-08 01:29:46 +00:00
|
|
|
finally:
|
|
|
|
bb.utils.unlockfile(lf)
|
2011-02-04 13:14:03 +00:00
|
|
|
|
|
|
|
def checkstatus(self, urls = []):
|
|
|
|
"""
|
|
|
|
Check all urls exist upstream
|
|
|
|
"""
|
|
|
|
|
|
|
|
if len(urls) == 0:
|
|
|
|
urls = self.urls
|
|
|
|
|
|
|
|
for u in urls:
|
|
|
|
ud = self.ud[u]
|
|
|
|
ud.setup_localpath(self.d)
|
|
|
|
m = ud.method
|
|
|
|
logger.debug(1, "Testing URL %s", u)
|
|
|
|
# First try checking uri, u, from PREMIRRORS
|
|
|
|
mirrors = mirror_from_string(bb.data.getVar('PREMIRRORS', self.d, True))
|
2011-02-07 15:28:05 +00:00
|
|
|
ret = try_mirrors(self.d, ud, mirrors, True)
|
2011-02-04 13:14:03 +00:00
|
|
|
if not ret:
|
|
|
|
# Next try checking from the original uri, u
|
|
|
|
try:
|
|
|
|
ret = m.checkstatus(u, ud, self.d)
|
|
|
|
except:
|
|
|
|
# Finally, try checking uri, u, from MIRRORS
|
|
|
|
mirrors = mirror_from_string(bb.data.getVar('MIRRORS', self.d, True))
|
2011-02-07 15:28:05 +00:00
|
|
|
ret = try_mirrors (self.d, ud, mirrors, True)
|
2011-02-04 13:14:03 +00:00
|
|
|
|
|
|
|
if not ret:
|
|
|
|
raise FetchError("URL %s doesn't work" % u, u)
|
|
|
|
|
|
|
|
def unpack(self, root, urls = []):
|
|
|
|
"""
|
|
|
|
Check all urls exist upstream
|
|
|
|
"""
|
|
|
|
|
|
|
|
if len(urls) == 0:
|
|
|
|
urls = self.urls
|
|
|
|
|
|
|
|
for u in urls:
|
|
|
|
ud = self.ud[u]
|
|
|
|
ud.setup_localpath(self.d)
|
|
|
|
|
|
|
|
if bb.data.expand(self.localpath, self.d) is None:
|
|
|
|
continue
|
|
|
|
|
|
|
|
if ud.lockfile:
|
|
|
|
lf = bb.utils.lockfile(ud.lockfile)
|
|
|
|
|
|
|
|
ud.method.unpack(ud, root, self.d)
|
|
|
|
|
|
|
|
if ud.lockfile:
|
|
|
|
bb.utils.unlockfile(lf)
|
|
|
|
|
2011-02-09 22:30:29 +00:00
|
|
|
def clean(self, urls = []):
|
|
|
|
"""
|
|
|
|
Clean files that the fetcher gets or places
|
|
|
|
"""
|
|
|
|
|
|
|
|
if len(urls) == 0:
|
|
|
|
urls = self.urls
|
|
|
|
|
|
|
|
for url in urls:
|
|
|
|
if url not in self.ud:
|
|
|
|
self.ud[url] = FetchData(url, d)
|
|
|
|
ud = self.ud[url]
|
|
|
|
ud.setup_localpath(self.d)
|
|
|
|
|
|
|
|
if not ud.localfile or self.localpath is None:
|
|
|
|
continue
|
|
|
|
|
|
|
|
if ud.lockfile:
|
|
|
|
lf = bb.utils.lockfile(ud.lockfile)
|
|
|
|
|
|
|
|
ud.method.clean(ud, self.d)
|
|
|
|
if ud.donestamp:
|
|
|
|
bb.utils.remove(ud.donestamp)
|
|
|
|
|
|
|
|
if ud.lockfile:
|
|
|
|
bb.utils.unlockfile(lf)
|
|
|
|
|
2011-01-10 14:23:36 +00:00
|
|
|
from . import cvs
|
|
|
|
from . import git
|
|
|
|
from . import local
|
|
|
|
from . import svn
|
|
|
|
from . import wget
|
|
|
|
from . import svk
|
|
|
|
from . import ssh
|
|
|
|
from . import perforce
|
|
|
|
from . import bzr
|
|
|
|
from . import hg
|
|
|
|
from . import osc
|
|
|
|
from . import repo
|
|
|
|
|
|
|
|
methods.append(local.Local())
|
|
|
|
methods.append(wget.Wget())
|
|
|
|
methods.append(svn.Svn())
|
|
|
|
methods.append(git.Git())
|
|
|
|
methods.append(cvs.Cvs())
|
|
|
|
methods.append(svk.Svk())
|
|
|
|
methods.append(ssh.SSH())
|
|
|
|
methods.append(perforce.Perforce())
|
|
|
|
methods.append(bzr.Bzr())
|
|
|
|
methods.append(hg.Hg())
|
|
|
|
methods.append(osc.Osc())
|
|
|
|
methods.append(repo.Repo())
|