1 # ex:ts=4:sw=4:sts=4:et
2 # -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
4 BitBake 'Fetch' implementations
6 Classes for obtaining upstream sources for the
10 # Copyright (C) 2003, 2004 Chris Larson
12 # This program is free software; you can redistribute it and/or modify
13 # it under the terms of the GNU General Public License version 2 as
14 # published by the Free Software Foundation.
16 # This program is distributed in the hope that it will be useful,
17 # but WITHOUT ANY WARRANTY; without even the implied warranty of
18 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19 # GNU General Public License for more details.
21 # You should have received a copy of the GNU General Public License along
22 # with this program; if not, write to the Free Software Foundation, Inc.,
23 # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
25 # Based on functions from the base bb module, Copyright 2003 Holger Schurig
30 from bb import persist_data
33 import cPickle as pickle
37 class FetchError(Exception):
38 """Exception raised when a download fails"""
40 class NoMethodError(Exception):
41 """Exception raised when there is no method to obtain a supplied url or set of urls"""
43 class MissingParameterError(Exception):
44 """Exception raised when a fetch method is missing a critical parameter in the url"""
46 class ParameterError(Exception):
47 """Exception raised when a url cannot be proccessed due to invalid parameters."""
49 class MD5SumError(Exception):
50 """Exception raised when a MD5SUM of a file does not match the expected one"""
52 def uri_replace(uri, uri_find, uri_replace, d):
53 # bb.msg.note(1, bb.msg.domain.Fetcher, "uri_replace: operating on %s" % uri)
54 if not uri or not uri_find or not uri_replace:
55 bb.msg.debug(1, bb.msg.domain.Fetcher, "uri_replace: passed an undefined value, not replacing")
56 uri_decoded = list(bb.decodeurl(uri))
57 uri_find_decoded = list(bb.decodeurl(uri_find))
58 uri_replace_decoded = list(bb.decodeurl(uri_replace))
59 result_decoded = ['','','','','',{}]
60 for i in uri_find_decoded:
61 loc = uri_find_decoded.index(i)
62 result_decoded[loc] = uri_decoded[loc]
64 if type(i) == types.StringType:
66 if (re.match(i, uri_decoded[loc])):
67 result_decoded[loc] = re.sub(i, uri_replace_decoded[loc], uri_decoded[loc])
68 if uri_find_decoded.index(i) == 2:
70 localfn = bb.fetch.localpath(uri, d)
72 result_decoded[loc] = os.path.dirname(result_decoded[loc]) + "/" + os.path.basename(bb.fetch.localpath(uri, d))
73 # bb.msg.note(1, bb.msg.domain.Fetcher, "uri_replace: matching %s against %s and replacing with %s" % (i, uri_decoded[loc], uri_replace_decoded[loc]))
75 # bb.msg.note(1, bb.msg.domain.Fetcher, "uri_replace: no match")
79 # FIXME: apply replacements against options
80 return bb.encodeurl(result_decoded)
87 Called to initilize the fetchers once the configuration data is known
88 Calls before this must not hit the cache.
90 pd = persist_data.PersistData(d)
91 # When to drop SCM head revisions controled by user policy
92 srcrev_policy = bb.data.getVar('BB_SRCREV_POLICY', d, 1) or "clear"
93 if srcrev_policy == "cache":
94 bb.msg.debug(1, bb.msg.domain.Fetcher, "Keeping SRCREV cache due to cache policy of: %s" % srcrev_policy)
95 elif srcrev_policy == "clear":
96 bb.msg.debug(1, bb.msg.domain.Fetcher, "Clearing SRCREV cache due to cache policy of: %s" % srcrev_policy)
97 pd.delDomain("BB_URI_HEADREVS")
99 bb.msg.fatal(bb.msg.domain.Fetcher, "Invalid SRCREV cache policy of: %s" % srcrev_policy)
100 # Make sure our domains exist
101 pd.addDomain("BB_URI_HEADREVS")
102 pd.addDomain("BB_URI_LOCALCOUNT")
104 # Function call order is usually:
108 # localpath can be called at any time
110 def init(urls, d, setup = True):
112 fn = bb.data.getVar('FILE', d, 1)
113 if fn in urldata_cache:
114 urldata = urldata_cache[fn]
117 if url not in urldata:
118 urldata[url] = FetchData(url, d)
122 if not urldata[url].setup:
123 urldata[url].setup_localpath(d)
125 urldata_cache[fn] = urldata
131 init must have previously been called
133 urldata = init([], d, True)
138 if ud.localfile and not m.forcefetch(u, ud, d) and os.path.exists(ud.md5):
139 # File already present along with md5 stamp file
140 # Touch md5 file to show activity
141 os.utime(ud.md5, None)
144 if ud.localfile and not m.forcefetch(u, ud, d):
145 Fetch.write_md5sum(u, ud, d)
149 Return a list of the local filenames, assuming successful fetch
152 urldata = init([], d, True)
156 local.append(ud.localpath)
162 Return the version string for the current package
163 (usually to be used as PV)
164 Most packages usually only have one SCM so we just pass on the call.
165 In the multi SCM case, we build a value based on SRCREV_FORMAT which must
169 # Only call setup_localpath on URIs which suppports_srcrev()
170 urldata = init(bb.data.getVar('SRC_URI', d, 1).split(), d, False)
173 if ud.method.suppports_srcrev():
175 ud.setup_localpath(d)
179 bb.msg.error(bb.msg.domain.Fetcher, "SRCREV was used yet no valid SCM was found in SRC_URI")
183 return urldata[scms[0]].method.sortable_revision(scms[0], urldata[scms[0]], d)
185 bb.msg.error(bb.msg.domain.Fetcher, "Sorry, support for SRCREV_FORMAT still needs to be written")
188 def localpath(url, d, cache = True):
190 Called from the parser with cache=False since the cache isn't ready
191 at this point. Also called from classed in OE e.g. patch.bbclass
195 return ud[url].localpath
198 def runfetchcmd(cmd, d, quiet = False):
200 Run cmd returning the command output
201 Raise an error if interrupted or cmd fails
202 Optionally echo command output to stdout
204 bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % cmd)
206 # Need to export PATH as binary could be in metadata paths
207 # rather than host provided
208 pathcmd = 'export PATH=%s; %s' % (data.expand('${PATH}', d), cmd)
210 stdout_handle = os.popen(pathcmd, "r")
214 line = stdout_handle.readline()
221 status = stdout_handle.close() or 0
223 exitstatus = status & 0xff
226 raise FetchError("Fetch command %s failed with signal %s, output:\n%s" % (pathcmd, signal, output))
228 raise FetchError("Fetch command %s failed with exit code %s, output:\n%s" % (pathcmd, status, output))
232 class FetchData(object):
234 A class which represents the fetcher state for a given URI.
236 def __init__(self, url, d):
238 (self.type, self.host, self.path, self.user, self.pswd, self.parm) = bb.decodeurl(data.expand(url, d))
239 self.date = Fetch.getSRCDate(self, d)
243 if m.supports(url, self, d):
247 def setup_localpath(self, d):
249 if "localpath" in self.parm:
250 self.localpath = self.parm["localpath"]
252 self.localpath = self.method.localpath(self.url, self, d)
253 self.md5 = self.localpath + '.md5'
254 # if user sets localpath for file, use it instead.
258 """Base class for 'fetch'ing data"""
260 def __init__(self, urls = []):
263 def supports(self, url, urldata, d):
265 Check to see if this fetch class supports a given url.
269 def localpath(self, url, urldata, d):
271 Return the local filename of a given url assuming a successful fetch.
272 Can also setup variables in urldata for use in go (saving code duplication
273 and duplicate code execution)
277 def setUrls(self, urls):
283 urls = property(getUrls, setUrls, None, "Urls property")
285 def forcefetch(self, url, urldata, d):
287 Force a fetch, even if localpath exists?
291 def suppports_srcrev(self):
293 The fetcher supports auto source revisions (SRCREV)
297 def go(self, url, urldata, d):
300 Assumes localpath was called first
302 raise NoMethodError("Missing implementation for url")
304 def getSRCDate(urldata, d):
306 Return the SRC Date for the component
310 if "srcdate" in urldata.parm:
311 return urldata.parm['srcdate']
313 pn = data.getVar("PN", d, 1)
316 return data.getVar("SRCDATE_%s" % pn, d, 1) or data.getVar("CVSDATE_%s" % pn, d, 1) or data.getVar("DATE", d, 1)
318 return data.getVar("SRCDATE", d, 1) or data.getVar("CVSDATE", d, 1) or data.getVar("DATE", d, 1)
319 getSRCDate = staticmethod(getSRCDate)
321 def try_mirror(d, tarfn):
323 Try to use a mirrored version of the sources. We do this
324 to avoid massive loads on foreign cvs and svn servers.
325 This method will be used by the different fetcher
328 d Is a bb.data instance
329 tarfn is the name of the tarball
331 tarpath = os.path.join(data.getVar("DL_DIR", d, 1), tarfn)
332 if os.access(tarpath, os.R_OK):
333 bb.msg.debug(1, bb.msg.domain.Fetcher, "%s already exists, skipping checkout." % tarfn)
336 pn = data.getVar('PN', d, True)
337 src_tarball_stash = None
339 src_tarball_stash = (data.getVar('SRC_TARBALL_STASH_%s' % pn, d, True) or data.getVar('CVS_TARBALL_STASH_%s' % pn, d, True) or data.getVar('SRC_TARBALL_STASH', d, True) or data.getVar('CVS_TARBALL_STASH', d, True) or "").split()
341 for stash in src_tarball_stash:
342 fetchcmd = data.getVar("FETCHCOMMAND_mirror", d, True) or data.getVar("FETCHCOMMAND_wget", d, True)
344 bb.msg.note(1, bb.msg.domain.Fetcher, "fetch " + uri)
345 fetchcmd = fetchcmd.replace("${URI}", uri)
346 ret = os.system(fetchcmd)
348 bb.msg.note(1, bb.msg.domain.Fetcher, "Fetched %s from tarball stash, skipping checkout" % tarfn)
351 try_mirror = staticmethod(try_mirror)
353 def verify_md5sum(ud, got_sum):
355 Verify the md5sum we wanted with the one we got
358 if 'md5sum' in ud.parm:
359 wanted_sum = ud.parm['md5sum']
363 return wanted_sum == got_sum
364 verify_md5sum = staticmethod(verify_md5sum)
366 def write_md5sum(url, ud, d):
367 if bb.which(data.getVar('PATH', d), 'md5sum'):
369 md5pipe = os.popen('md5sum ' + ud.localpath)
370 md5data = (md5pipe.readline().split() or [ "" ])[0]
376 if not Fetch.verify_md5sum(ud, md5data):
377 raise MD5SumError(url)
379 md5out = file(ud.md5, 'w')
380 md5out.write(md5data)
382 write_md5sum = staticmethod(write_md5sum)
384 def latest_revision(self, url, ud, d):
386 Look in the cache for the latest revision, if not present ask the SCM.
388 if not hasattr(self, "_latest_revision"):
391 pd = persist_data.PersistData(d)
392 key = self._revision_key(url, ud, d)
393 rev = pd.getValue("BB_URI_HEADREVS", key)
397 rev = self._latest_revision(url, ud, d)
398 pd.setValue("BB_URI_HEADREVS", key, rev)
401 def sortable_revision(self, url, ud, d):
405 if hasattr(self, "_sortable_revision"):
406 return self._sortable_revision(url, ud, d)
408 pd = persist_data.PersistData(d)
409 key = self._revision_key(url, ud, d)
410 latest_rev = self.latest_revision(url, ud, d)
411 last_rev = pd.getValue("BB_URI_LOCALCOUNT", key + "_rev")
412 count = pd.getValue("BB_URI_LOCALCOUNT", key + "_count")
414 if last_rev == latest_rev:
415 return str(count + "+" + latest_rev)
420 count = str(int(count) + 1)
422 pd.setValue("BB_URI_LOCALCOUNT", key + "_rev", latest_rev)
423 pd.setValue("BB_URI_LOCALCOUNT", key + "_count", count)
425 return str(count + "+" + latest_rev)
437 methods.append(local.Local())
438 methods.append(wget.Wget())
439 methods.append(svn.Svn())
440 methods.append(git.Git())
441 methods.append(cvs.Cvs())
442 methods.append(svk.Svk())
443 methods.append(ssh.SSH())
444 methods.append(perforce.Perforce())