Moin,
Am Sun, 20 May 2007 16:04:21 +0100 schrieb Richard Purdie:
> Has there been any progress on a bitbake 1.8 version of this patch? I
> think you had a partially complete but inefficient version?
Sorry for being away so long. When I last worked at it my frustration
was growing and at some point it snapped and I threw the code into a
corner. I then had some problems but am back now.
I faced several problems and will detail the possible solutions I
found. There still remains a problem with bitbake's caching algorithm:
It doesn't properly pick up remote changes because it uses a cached
copy of the generated version identifier.
What I did: I added to the fetcher module a function to export
variables of the form SRCREV_refname for all URLs that have a refname
parameter set. I have two concepts: revision_counter and
revision_identifier. The latter is opaque and need not be comparable
(e.g. git object hashes), while the former must be monotonically
increasing.
For svn both are the same ('Last changed rev' from svn info), for git
the revision_identifier is the hash and the revision_counter is created
locally. To make this possible I introduced something called
fetcher_state (e.g. a pickle'd file in FETCHERSTATEDIR, which I also
added). (Note that I maybe should replace the fetcher_state code with
the runcache code described below which handles locking better.)
In order to not have to go to the network for each revision_identifier
lookup I initially just cached it in the bb.data object that is always
passed around. Didn't work. So I then tried to cache it in the bb.fetch
module (e.g. like the already existing urldata). That did work,
somewhat, but left me wondering for hours why it sometimes just did not
work. With no indication to the problem whatsoever. I then found out
that you sneaked an os.fork() into bb.runqueue which of course makes
all caching in memory impossible.
So I need to do my revision caching on disk, even though I actually
only want to cache for the duration of a bitbake run. I therefore
instituted something I call "runcache" (actually "per-run cache", but
that's just too long) that is just a pickle'd cache file, which is
supposed to be cleared at the beginning of each run. With proper
locking of course.
I have attached a patch against current bitbake svn, as well as a patch
against some older OpenMoko svn to show how to use the new
functionality.
In the openmoko patch you will see another necessary workaround when
setting the SRCREV_foo: in order to be able to bb.fetch.export_srcrevs()
one must call bb.fetch.init() which calls initdata (), which might
expand FILESPATH which might contain PV which might contain
${SRCREV_foo}.
--
Henryk Plötz
Grüße aus Berlin
~ Help Microsoft fight software piracy: Give Linux to a friend today! ~
Index: classes/base.bbclass
===================================================================
--- classes/base.bbclass (Revision 2345)
+++ classes/base.bbclass (Arbeitskopie)
@@ -679,48 +679,6 @@
bb.data.setVar(key, sdata[key], d)
}
-def base_get_revisions(d):
- import bb, sys
- localdata = bb.data.createCopy(d)
-
- src_uri = bb.data.getVar('SRC_URI', localdata, 1)
- if not src_uri:
- ## FIXME Emit a warning here? Should an empty SRC_URI be allowed?
- return []
-
- import time
- FALLBACKREVS = [ "0", time.strftime('%Y%m%d%H%M', time.gmtime()) ]
- # The idea here is that a zero for the first revision component should be smaller
- # than any real revision so that real revisions always override the date-fallback
-
- if not hasattr(bb.fetch, "compute_revisions"):
- bb.note( "This BitBake doesn't support revision fetching, falling back to current date" )
- return FALLBACKREVS
-
- try:
- bb.fetch.init(src_uri.split(),d)
- except bb.fetch.NoMethodError:
- (type, value, traceback) = sys.exc_info()
- raise bb.build.FuncFailed("No method: %s" % value)
-
- revs = []
- try:
- revs = bb.fetch.compute_revisions(localdata)
- except bb.fetch.MissingParameterError:
- (type, value, traceback) = sys.exc_info()
- raise bb.build.FuncFailed("Missing parameters: %s" % value)
- except bb.fetch.FetchError:
- (type, value, traceback) = sys.exc_info()
- raise bb.build.FuncFailed("Fetch failed: %s" % value)
- except bb.fetch.QueryError:
- (type, value, traceback) = sys.exc_info()
- bb.error( "Revision fetching failed: '%s', falling back to current date." % value)
- return FALLBACKREVS
-
- bb.note("Retrieved remote revisions: %r" % revs)
-
- return revs
-
def base_after_parse_two(d):
import bb
import exceptions
@@ -746,17 +704,9 @@
srcdate = bb.data.getVar('SRCDATE_%s' % pn, d, 1)
if srcdate != None:
bb.data.setVar('SRCDATE', srcdate, d)
-
- if srcdate == 'now':
- revisions = base_get_revisions(d)
-
- pr = bb.data.getVar('PR', d, 1)
- if pr:
- revisions.insert( 0, pr )
- elif revisions:
- revisions[0] = "r" + revisions[0]
-
- bb.data.setVar('PR', '_'.join(revisions), d)
+ #if srcdate == 'now':
+ # import time
+ # bb.data.setVar('PR', time.strftime('%Y%m%d%H%M', time.gmtime()), d)
use_nls = bb.data.getVar('USE_NLS_%s' % pn, d, 1)
if use_nls != None:
@@ -787,8 +737,40 @@
bb.data.setVar('PACKAGE_ARCH', mach_arch, d)
return
+def base_after_parse_three(d):
+ import bb
+
+ pv_nonex = bb.data.getVar('PV', d, 0)
+ if "${SRCREV" not in pv_nonex: # FIXME Is there a better way to find out whether PV contains something like ${SRCREV_foo}?
+ return # Nothing to do for us
+ src_uri = bb.data.getVar('SRC_URI', d, 1)
+ if not src_uri:
+ return # or should we fail?
+
+ if not hasattr(bb.fetch.Fetch, "get_revision_identifier"):
+ raise bb.build.FuncFailed( "This BitBake doesn't support revision fetching, failing" )
+
+ localdata = bb.data.createCopy(d)
+ ## Sorry, this is purely self-defense
+ ## bb.fetch.init() calls initdata(), which might expand FILESPATH which might contain PV which might contain ${SRCREV_foo}
+ ## This ad-hoc construction overrides getVar to replace all occurences of ${SRCREV_*} with ""
+ import re
+ srcrev_re = re.compile(r'\$\{SRCREV[^}]*\}')
+ localdata.myOldGetVar = localdata.getVar
+ localdata.getVar = lambda var, exp=0: localdata.myOldGetVar(var, exp) and srcrev_re.sub("", localdata.myOldGetVar(var, exp))
+ bb.note("getVar override on %s" % localdata)
+
+ try:
+ bb.fetch.init(src_uri.split(),localdata)
+ except bb.fetch.NoMethodError:
+ (type, value, traceback) = sys.exc_info()
+ raise bb.build.FuncFailed("No method: %s" % value)
+
+ bb.fetch.export_srcrevs(d)
+
python () {
+ base_after_parse_three(d)
base_after_parse_two(d)
base_after_parse(d)
}
Index: packages/uboot/uboot-gta01_svn.bb
===================================================================
--- packages/uboot/uboot-gta01_svn.bb (Revision 2345)
+++ packages/uboot/uboot-gta01_svn.bb (Arbeitskopie)
@@ -3,16 +3,17 @@
LICENSE = "GPL"
SECTION = "bootloader"
PRIORITY = "optional"
-PV = "1.2.0+svn${SRCDATE}"
+PV = "1.2.1+svn${SRCREV_svn1}+git${SRCREV_git1}+svn${SRCREV_svn2}"
PR = "r10"
PROVIDES = "virtual/bootloader"
S = "${WORKDIR}/git"
-SRC_URI = "git://www.denx.de/git/u-boot.git/;protocol=git \
- svn://svn.openmoko.org/trunk/src/target/u-boot;module=patches;proto=http \
+SRC_URI = "git://www.denx.de/git/u-boot.git/;protocol=git;refname=git1 \
+ svn://svn.openmoko.org/trunk/src/target/u-boot;module=patches;proto=http;refname=svn1 \
file://uboot-20070311-tools_makefile_ln_sf.patch;patch=1 \
- "
+ svn:///home/henryk/SVN/test/;module=a;proto=file;refname=svn2 \
+ "
EXTRA_OEMAKE = "CROSS_COMPILE=${TARGET_PREFIX}"
TARGET_LDFLAGS = ""
Index: conf/bitbake.conf
===================================================================
--- conf/bitbake.conf (Revision 835)
+++ conf/bitbake.conf (Arbeitskopie)
@@ -28,6 +28,7 @@
FETCHCOMMAND_cvs = "/usr/bin/env cvs -d${CVSROOT} co ${CVSCOOPTS} ${CVSMODULE}"
FETCHCOMMAND_svn = "/usr/bin/env svn co ${SVNCOOPTS} ${SVNROOT} ${SVNMODULE}"
FETCHCOMMAND_wget = "/usr/bin/env wget -t 5 --passive-ftp -P ${DL_DIR} ${URI}"
+FETCHERSTATEDIR = "${DL_DIR}/fetcherstate"
FILESDIR = "[EMAIL PROTECTED](bb.data.getVar('FILESPATH', d, 1), '.')}"
FILESPATH = "${FILE_DIRNAME}/${PF}:${FILE_DIRNAME}/${P}:${FILE_DIRNAME}/${PN}:${FILE_DIRNAME}/files:${FILE_DIRNAME}"
FILE_DIRNAME = "[EMAIL PROTECTED](bb.data.getVar('FILE', d))}"
Index: lib/bb/runcache.py
===================================================================
--- lib/bb/runcache.py (Revision 0)
+++ lib/bb/runcache.py (Revision 0)
@@ -0,0 +1,183 @@
+# ex:ts=4:sw=4:sts=4:et
+# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
+#
+# This class implements a file based cache to be used to give
+# multiple processes access to the cached data
+
+# Copyright (C) 2007 Henryk Ploetz
+
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+
+import fcntl
+from fcntl import LOCK_UN, LOCK_SH, LOCK_EX
+import UserDict, os
+
+class StateError(Exception):
+ pass
+
+try:
+ import cPickle as pickle
+except ImportError:
+ import pickle
+
+
+class Runcache(UserDict.DictMixin):
+ """This represents a cache object, similar to a dictionary object.
+ However, you must acquire a lock before doing any read or write operation.
+ (Write operations are cached until you call unlock() or flush()).
+ """
+
+ def __init__(self, fname):
+ """Create a new runcache object.
+ These objects must not be shared between several threads (e.g.
+ each thread must create its own object). Using an already existing
+ object in a forked process should be fine as long as the object was
+ in the unlocked state."""
+ self.fname = fname
+ self.fp = None
+ self.state = LOCK_UN
+ self.data = {}
+ self.dirty = False
+
+ def _open_and_lock(self, openmode, fdopenmode, lockmode):
+ "Open the file and set a lock on it"
+ fp, self.fp = 0, None
+ try:
+ fp = os.open(self.fname, openmode)
+ self.fp = os.fdopen(fp, fdopenmode)
+ r = fcntl.flock(self.fp, lockmode)
+ if r:
+ raise EnvironmentError, "fcntl.flock() returned %s" % r
+ except:
+ if fp and not self.fp: os.close(fp)
+ elif self.fp:
+ self.fp.close()
+ self.fp = None
+ raise
+
+ def _read_data(self):
+ "Read the data from the file."
+ try:
+ self.data = pickle.load(self.fp)
+ self.dirty = False
+ except (EnvironmentError, EOFError):
+ self.data = {}
+ self.dirty = True
+
+ def lock_shared(self):
+ """Create a shared, read-only lock for the cache.
+ Will block until the lock has been acquired."""
+ if self.state == LOCK_SH: return
+ if self.state == LOCK_EX: self.unlock()
+
+ if self.fname is not None:
+ self._open_and_lock(os.O_RDONLY | os.O_CREAT, "rb", LOCK_SH)
+ self._read_data()
+ self.state = LOCK_SH
+
+ def lock_exclusive(self, dontload=False):
+ """Create an exclusive, read-write lock for the cache.
+ Will block until the lock has been acquired.
+ If dontload is True then the cache will not be loaded from disk,
+ overwriting the on-disk version on the next unlock or flush."""
+ if self.state == LOCK_EX: return
+ if self.state == LOCK_SH: self.unlock()
+
+ if self.fname is not None:
+ self._open_and_lock(os.O_RDWR | os.O_CREAT, "r+b", LOCK_EX)
+ if dontload:
+ self.dirty = True
+ else:
+ self._read_data()
+ self.state = LOCK_EX
+
+ def unlock(self):
+ """Free the previously acquired lock (and possibly commit all write-operations)."""
+ if self.state == LOCK_UN: return
+ if self.state == LOCK_EX: self.flush()
+
+ if self.fname is not None:
+ r = fcntl.flock(self.fp, LOCK_UN)
+ if r:
+ raise EnvironmentError, "fcntl.flock() returned %s" % r
+
+ self.fp.close()
+ self.fp = None
+
+ self.state = LOCK_UN
+
+ def flush(self, force=False):
+ """Commit all write operations."""
+ if self.state != LOCK_EX: raise StateError, "flush() called without a lock being held."
+ if self.fname is None: return
+ if not self.dirty and not force: return
+ self.fp.seek(0)
+ self.fp.truncate(0)
+ pickle.dump(self.data, self.fp)
+ self.dirty = False
+
+ def clean(self):
+ """Empty the cache.
+ (Note that for convenience you may call this with or without a lock being held.
+ It will be automatically acquired and released if necessary.)"""
+ oldstate = self.state
+
+ if oldstate != LOCK_EX:
+ self.lock_exclusive(dontload=True)
+
+ self.data = {}
+
+ if oldstate == LOCK_UN:
+ self.unlock()
+ elif oldstate == LOCK_SH:
+ self.lock_shared()
+
+ ## Interface for UserDict.DictMixin which in turn will provide a regular dict interface
+ def __getitem__(self, key):
+ if self.state not in (LOCK_EX, LOCK_SH):
+ raise StateError, "__getitem__() called without a lock being held."
+ return self.data[key]
+
+ def __setitem__(self, key, value):
+ if self.state not in (LOCK_EX, ):
+ raise StateError, "__setitem__() called without an exclusive lock being held."
+ self.dirty = True
+ self.data[key] = value
+
+ def __delitem__(self, key):
+ if self.state not in (LOCK_EX, ):
+ raise StateError, "__getitem__() called without an exclusive lock being held."
+ self.dirty = True
+ del self.data[key]
+
+ def keys():
+ if self.state not in (LOCK_EX, LOCK_SH):
+ raise StateError, "keys() called without a lock being held."
+ return self.data.keys()
+
+ ## Convenience interface
+ def from_data(cls, data, basename="runcache.dat"):
+ """Creates a Runcache object pointing to a file in the directory indicated by the CACHE variable in d (which must be a bb.data).
+ If CACHE is not set then a warning is emitted and a dummy object returned with all file accesses disabled.
+ """
+ import bb
+ cachedir = bb.data.getVar("CACHE", data, True)
+ if cachedir:
+ bb.mkdirhier(cachedir)
+ return cls(os.path.join(cachedir, basename))
+ else:
+ bb.note("WARNING: CACHE variable is not set. Per-run cache disabled.")
+ return cls(None)
+ from_data = classmethod(from_data)
+
Index: lib/bb/__init__.py
===================================================================
--- lib/bb/__init__.py (Revision 835)
+++ lib/bb/__init__.py (Arbeitskopie)
@@ -61,6 +61,7 @@
"manifest",
"methodpool",
"cache",
+ "runcache",
"runqueue",
"taskdata",
"providers",
Index: lib/bb/utils.py
===================================================================
--- lib/bb/utils.py (Revision 835)
+++ lib/bb/utils.py (Arbeitskopie)
@@ -110,6 +110,10 @@
max_line = min(line+4,len(body)-1)
for i in range(min_line,max_line+1):
bb.msg.error(bb.msg.domain.Util, "\t%.4d:%s" % (i, body[i-1]) )
+
+ # FIXME DEBUG Remove this
+ import traceback
+ traceback.print_exc()
def better_compile(text, file, realfile):
Index: lib/bb/fetch/__init__.py
===================================================================
--- lib/bb/fetch/__init__.py (Revision 835)
+++ lib/bb/fetch/__init__.py (Arbeitskopie)
@@ -28,6 +28,11 @@
import bb
from bb import data
+try:
+ import cPickle as pickle
+except ImportError:
+ import pickle
+
class FetchError(Exception):
"""Exception raised when a download fails"""
@@ -127,6 +132,23 @@
if ud.localfile and not m.forcefetch(u, ud, d):
Fetch.write_md5sum(u, ud, d)
+def export_srcrevs(d):
+ """Export variables of the form SRCREV_foo for all URLs"""
+ fn = bb.data.getVar('FILE', d, 1)
+ for m in methods:
+ for u in m.urls:
+ ud = urldata[fn][u]
+ if not ud.parm.has_key("refname"):
+ continue
+
+ varname = 'SRCREV_%s' % ud.parm["refname"]
+ if bb.data.getVar(varname, d, 0) is not None:
+ continue
+
+ rc = m.get_revision_counter(u, ud, d)
+ bb.debug(1,"Setting srcrev %s to %s" % (ud.parm["refname"], rc))
+ bb.data.setVar(varname, rc, d)
+
def localpaths(d):
"""Return a list of the local filenames, assuming successful fetch"""
local = []
@@ -167,7 +189,126 @@
and duplicate code execution)
"""
return url
+
+ def _get_revision_identifier(self, url, urldata, d):
+ raise NoMethodError("_get_revision_identifier() not implemented for class %s" % self.__class__)
+ def get_revision_identifier(self, url, urldata, d):
+ """Get a revision identifier for the given URL.
+
+ A revision identifier is hereby defined as a string which is guaranteed to change when the
+ source pointed to by the URL changes. Revision identifiers must be regarded as opaque strings
+ that can only be compared for (in-)equality. No ordering is guaranteed. No claims are made as
+ to how revision identifiers for different URLs compare. Revision identifiers for the same URL
+ from the same package should be globally comparable between different developers.
+
+ It is expected that it will be possible to get something suitable as a revision identifier for
+ an URL without downloading its complete contents first.
+
+ Note: This is a wrapper for _get_revision_identifier(), do not override in derived classes."""
+
+ if hasattr(urldata, "revision_identifier"):
+ ri = urldata.revision_identifier
+ bb.debug(1, "Using cached revision identifier %s for URL %s" % (ri, url))
+ return ri
+
+ runcache = bb.runcache.Runcache.from_data(d)
+ ri = None
+ try:
+ runcache.lock_shared()
+ ri = runcache.get(url, None)
+ finally:
+ runcache.unlock()
+
+ if ri is None:
+ try:
+ ri = self._get_revision_identifier(url, urldata, d)
+ except NoMethodError:
+ import sys, time
+ bb.error("NoMethodError: %s, falling back to current date" % sys.exc_info()[1])
+ ri = time.strftime('%Y%m%d%H%M', time.gmtime())
+
+ try:
+ runcache.lock_exclusive()
+ runcache[url] = ri
+ finally:
+ runcache.unlock()
+ else: bb.debug(1, "Got revision identifier %s from runcache for url %s" % (ri,url))
+
+ urldata.revision_identifier = ri
+ bb.debug(1, "Retrieved revision identifier %s for URL %s" % (ri, url))
+ return ri
+
+ def _get_state_file_name(self, url, urldata, d):
+ """Return a filename (without path component) for storing local state.
+ May be overridden by individual fetchers."""
+ return url.replace("/", ".")
+
+ def load_fetcher_state(self, url, urldata, d, key):
+ "Load the saved local state for this fetcher given by key"
+ fetcherstatedir = data.getVar("FETCHERSTATEDIR", d, 1)
+ bb.mkdirhier(fetcherstatedir)
+ statefilename = os.path.join(fetcherstatedir, self._get_state_file_name(url, urldata, d))
+
+ # FIXME Locking?
+ fp = file(statefilename, "r")
+ obj = pickle.load(fp)
+ fp.close()
+
+ return obj[key]
+
+ def save_fetcher_state(self, url, urldata, d, key, value):
+ "Save some local state for this fetcher under a key"
+ fetcherstatedir = data.getVar("FETCHERSTATEDIR", d, 1)
+ bb.mkdirhier(fetcherstatedir)
+ statefilename = os.path.join(fetcherstatedir, self._get_state_file_name(url, urldata, d))
+
+ # FIXME Locking?
+ try:
+ fp = file(statefilename, "r")
+ obj = pickle.load(fp)
+ fp.close()
+ except (EnvironmentError, EOFError):
+ obj = {}
+
+ obj[key] = value
+
+ fp = file(statefilename, "w")
+ pickle.dump(obj,fp)
+ fp.close()
+
+ def get_revision_counter(self, url, urldata, d):
+ """Get a revision counter for the given URL.
+
+ A revision counter is similar to a revision identifier in that it changes when the source
+ changes. However, it must also behave monotonically increasing with regard to 'natural' sort
+ ordering.
+
+ The default implementation falls back to using get_revision_identifier(). It keeps a local
+ file with the last result from get_revision_identifier() that was seen and a local
+ revision counter which is increased by 1 each time that the returned revision identifier
+ changes.
+
+ Derived classes are encouraged to override the default implementation."""
+
+ ri = self.get_revision_identifier(url, urldata, d)
+
+ try:
+ last_known_value = self.load_fetcher_state(url, urldata, d, "REVISION_COUNTER")
+ except (EnvironmentError, EOFError, KeyError):
+ last_known_value = (None, 0)
+
+ last_ri, last_rc = last_known_value
+
+ if ri != last_ri:
+ rc = last_rc+1
+ self.save_fetcher_state(url, urldata, d, "REVISION_COUNTER", (ri, rc) )
+ else:
+ rc = last_rc
+
+ bb.debug(1, "Computed local revision counter %s" % rc)
+ return "local%s" % rc
+
def setUrls(self, urls):
self.__urls = urls
Index: lib/bb/fetch/git.py
===================================================================
--- lib/bb/fetch/git.py (Revision 835)
+++ lib/bb/fetch/git.py (Arbeitskopie)
@@ -35,7 +35,9 @@
for name in dirs:
os.rmdir(os.path.join(root, name))
-def rungitcmd(cmd,d):
+def rungitcmd(cmd, d, capture_output = False):
+ """Run a git command, optionally capturing the output.
+ If capture_output is False then os.system() will be used, otherwise os.popen() will be used."""
bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % cmd)
@@ -43,10 +45,17 @@
# rather than host provided
pathcmd = 'export PATH=%s; %s' % (data.expand('${PATH}', d), cmd)
- myret = os.system(pathcmd)
-
- if myret != 0:
+ if not capture_output:
+ myret = os.system(pathcmd)
+ else:
+ import commands
+ myret, result = commands.getstatusoutput(pathcmd)
+
+ if myret:
raise FetchError("Git: %s failed" % pathcmd)
+
+ if capture_output:
+ return result
class Git(Fetch):
"""Class to fetch a module or modules from git repositories"""
@@ -56,6 +65,12 @@
"""
return ud.type in ['git']
+ def _get_revision_identifier(self, url, urldata, d):
+ bb.note("GIT fetcher to network for revision identifier")
+ output = rungitcmd("git ls-remote %s://%s%s %s" % (urldata.proto, urldata.host, urldata.path, urldata.tag), d, capture_output=True)
+ ri = output.split()[0]
+ return ri
+
def localpath(self, url, ud, d):
ud.proto = "rsync"
@@ -66,7 +81,7 @@
if 'tag' in ud.parm:
ud.tag = ud.parm['tag']
- ud.localfile = data.expand('git_%s%s_%s.tar.gz' % (ud.host, ud.path.replace('/', '.'), ud.tag), d)
+ ud.localfile = data.expand('git_%s%s_%s_%s.tar.gz' % (ud.host, ud.path.replace('/', '.'), ud.tag, self.get_revision_identifier(url, ud, d)), d)
return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile)
Index: lib/bb/fetch/svn.py
===================================================================
--- lib/bb/fetch/svn.py (Revision 835)
+++ lib/bb/fetch/svn.py (Arbeitskopie)
@@ -44,6 +44,74 @@
"""
return ud.type in ['svn']
+ def _svnoptions(ud):
+ ## Common code from _get_revision_identifier() and go()
+
+ # either use the revision, or SRCDATE in braces, or nothing for SRCDATE = "now"
+ options = []
+ if ud.revision:
+ options.append("-r %s" % ud.revision)
+ elif ud.date != "now":
+ options.append("-r {%s}" % ud.date)
+
+ if ud.user:
+ options.append("--username %s" % ud.user)
+
+ if ud.pswd:
+ options.append("--password %s" % ud.pswd)
+
+ return options
+ _svnoptions = staticmethod(_svnoptions)
+
+ def _get_revision_identifier(self, url, urldata, d):
+ proto = "svn"
+ if "proto" in urldata.parm:
+ proto = urldata.parm["proto"]
+
+ svn_rsh = None
+ if proto == "svn+ssh" and "rsh" in urldata.parm:
+ svn_rsh = urldata.parm["rsh"]
+
+ svnroot = urldata.host + urldata.path
+
+ svn_url = "%s://%s/%s" % (proto, svnroot, urldata.module)
+ opts = " ".join(self._svnoptions(urldata))
+
+ svncmd = "LANG= LC_ALL= svn info %s %s" % (svn_url, opts)
+
+ if svn_rsh:
+ svncmd = "svn_RSH=\"%s\" %s" % (svn_rsh, svncmd)
+
+ stdout_handle = os.popen(svncmd, "r")
+ output = stdout_handle.read()
+
+ revision = None
+ for line in output.splitlines():
+ if "Last Changed Rev" in line:
+ revision = line.split(":")[1].strip()
+
+ bb.note("SVN fetcher to network for revision identifier")
+ ## Returns None on success instead of 0
+ status = stdout_handle.close() or 0
+ exitstatus, signal = status >> 8, status & 0xff
+ if signal and (revision is None or not revision.isdigit()):
+ ## svn was killed before returning the needed information
+ raise FetchError, "Command '%s' was killed with signal %i before returning the revision information" % signal
+ elif signal and revision is not None and revision.isdigit():
+ ## svn was killed, but the output is probably OK up to the "Last Changed Rev" line
+ pass
+ elif status != 0:
+ raise FetchError, "Nonzero exit from '%s': Either your svn binary doesn't support remote querying or failed for some other reason (e.g. network error)" % svncmd
+
+ if revision is None:
+ raise FetchError, "Output from '%s' not understood" % svncmd
+
+ return revision
+
+ def get_revision_counter(self, url, urldata, d):
+ "For SVN: revision counter == revision identifier == 'Last Changed Rev' from `svn info`"
+ return self.get_revision_identifier(url, urldata, d)
+
def localpath(self, url, ud, d):
if not "module" in ud.parm:
raise MissingParameterError("svn method needs a 'module' parameter")
@@ -56,8 +124,12 @@
if ud.revision:
ud.date = ""
+
+ ri = ud.date
+ if ri == "now" and not ud.revision:
+ ri = self.get_revision_identifier(url, ud, d)
- ud.localfile = data.expand('%s_%s_%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.path.replace('/', '.'), ud.revision, ud.date), d)
+ ud.localfile = data.expand('%s_%s_%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.path.replace('/', '.'), ud.revision, ri), d)
return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile)
@@ -84,19 +156,8 @@
svnroot = ud.host + ud.path
- # either use the revision, or SRCDATE in braces, or nothing for SRCDATE = "now"
- options = []
- if ud.revision:
- options.append("-r %s" % ud.revision)
- elif ud.date != "now":
- options.append("-r {%s}" % ud.date)
+ options = self._svnoptions(ud)
- if ud.user:
- options.append("--username %s" % ud.user)
-
- if ud.pswd:
- options.append("--password %s" % ud.pswd)
-
localdata = data.createCopy(d)
data.setVar('OVERRIDES', "svn:%s" % data.getVar('OVERRIDES', localdata), localdata)
data.update_data(localdata)
Index: lib/bb/cooker.py
===================================================================
--- lib/bb/cooker.py (Revision 835)
+++ lib/bb/cooker.py (Arbeitskopie)
@@ -24,7 +24,7 @@
import sys, os, getopt, glob, copy, os.path, re, time
import bb
-from bb import utils, data, parse, event, cache, providers, taskdata, runqueue
+from bb import utils, data, parse, event, cache, providers, taskdata, runqueue, runcache
from sets import Set
import itertools, sre_constants
@@ -490,6 +490,9 @@
if self.configuration.buildfile is not None:
return self.buildFile(self.configuration.buildfile)
+
+ # Flush the per-run cache
+ bb.runcache.Runcache.from_data(self.configuration.data).clean()
# initialise the parsing status now we know we will need deps
self.updateCache()
_______________________________________________
Bitbake-dev mailing list
[email protected]
https://lists.berlios.de/mailman/listinfo/bitbake-dev