2 # ex:ts=4:sw=4:sts=4:et
3 # -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
5 BitBake 'Event' implementation
7 Caching of bitbake variables before task execution
9 # Copyright (C) 2006 Richard Purdie
11 # but small sections based on code from bin/bitbake:
12 # Copyright (C) 2003, 2004 Chris Larson
13 # Copyright (C) 2003, 2004 Phil Blundell
14 # Copyright (C) 2003 - 2005 Michael 'Mickey' Lauer
15 # Copyright (C) 2005 Holger Hans Peter Freyther
16 # Copyright (C) 2005 ROAD GmbH
18 This program is free software; you can redistribute it and/or modify it under
19 the terms of the GNU General Public License as published by the Free Software
20 Foundation; either version 2 of the License, or (at your option) any later
23 This program is distributed in the hope that it will be useful, but WITHOUT
24 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
25 FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
27 You should have received a copy of the GNU General Public License along with
28 this program; if not, write to the Free Software Foundation, Inc., 59 Temple
29 Place, Suite 330, Boston, MA 02111-1307 USA.
39 import cPickle as pickle
42 bb.msg.note(1, bb.msg.domain.Cache, "Importing cPickle failed. Falling back to a very slow implementation.")
44 __cache_version__ = "125"
48 BitBake Cache implementation
50 def __init__(self, cooker):
53 self.cachedir = bb.data.getVar("CACHE", cooker.configuration.data, True)
55 self.depends_cache = {}
59 if self.cachedir in [None, '']:
60 self.has_cache = False
61 bb.msg.note(1, bb.msg.domain.Cache, "Not using a cache. Set CACHE = <directory> to enable.")
64 self.cachefile = os.path.join(self.cachedir,"bb_cache.dat")
66 bb.msg.debug(1, bb.msg.domain.Cache, "Using cache in '%s'" % self.cachedir)
68 os.stat( self.cachedir )
70 bb.mkdirhier( self.cachedir )
72 if self.has_cache and (self.mtime(self.cachefile)):
74 p = pickle.Unpickler( file(self.cachefile,"rb"))
75 self.depends_cache, version_data = p.load()
76 if version_data['CACHE_VER'] != __cache_version__:
77 raise ValueError, 'Cache Version Mismatch'
78 if version_data['BITBAKE_VER'] != bb.__version__:
79 raise ValueError, 'Bitbake Version Mismatch'
80 except (ValueError, KeyError):
81 bb.msg.note(1, bb.msg.domain.Cache, "Invalid cache found, rebuilding...")
82 self.depends_cache = {}
84 if self.depends_cache:
85 for fn in self.depends_cache.keys():
87 self.cacheValidUpdate(fn)
89 def getVar(self, var, fn, exp = 0):
91 Gets the value of a variable
92 (similar to getVar in the data class)
94 There are two scenarios:
95 1. We have cached data - serve from depends_cache[fn]
96 2. We're learning what data to cache - serve from data
97 backend but add a copy of the data to the cache.
101 return self.depends_cache[fn][var]
103 if not fn in self.depends_cache:
104 self.depends_cache[fn] = {}
106 if fn != self.data_fn:
107 # We're trying to access data in the cache which doesn't exist
108 # yet setData hasn't been called to setup the right access. Very bad.
109 bb.msg.error(bb.msg.domain.Cache, "Parsing error data_fn %s and fn %s don't match" % (self.data_fn, fn))
111 result = bb.data.getVar(var, self.data, exp)
112 self.depends_cache[fn][var] = result
115 def setData(self, fn, data):
117 Called to prime bb_cache ready to learn which variables to cache.
118 Will be followed by calls to self.getVar which aren't cached
119 but can be fulfilled from self.data.
124 # Make sure __depends makes the depends_cache
125 self.getVar("__depends", fn, True)
126 self.depends_cache[fn]["CACHETIMESTAMP"] = bb.parse.cached_mtime(fn)
128 def loadDataFull(self, fn, cfgData):
130 Return a complete set of data for fn.
131 To do this, we need to parse the file.
133 bb_data, skipped = self.load_bbfile(fn, cfgData)
136 def loadData(self, fn, cfgData):
138 Load a subset of data for fn.
139 If the cached data is valid we do nothing,
140 To do this, we need to parse the file and set the system
141 to record the variables accessed.
142 Return the cache status and whether the file was skipped when parsed
144 if self.cacheValid(fn):
145 if "SKIPPED" in self.depends_cache[fn]:
149 bb_data, skipped = self.load_bbfile(fn, cfgData)
150 self.setData(fn, bb_data)
151 return False, skipped
153 def cacheValid(self, fn):
155 Is the cache valid for fn?
156 Fast version, no timestamps checked.
159 if not self.has_cache:
165 def cacheValidUpdate(self, fn):
167 Is the cache valid for fn?
168 Make thorough (slower) checks including timestamps.
171 if not self.has_cache:
174 # Check file still exists
175 if self.mtime(fn) == 0:
176 bb.msg.debug(2, bb.msg.domain.Cache, "Cache: %s not longer exists" % fn)
180 # File isn't in depends_cache
181 if not fn in self.depends_cache:
182 bb.msg.debug(2, bb.msg.domain.Cache, "Cache: %s is not cached" % fn)
186 # Check the file's timestamp
187 if bb.parse.cached_mtime(fn) > self.getVar("CACHETIMESTAMP", fn, True):
188 bb.msg.debug(2, bb.msg.domain.Cache, "Cache: %s changed" % fn)
192 # Check dependencies are still valid
193 depends = self.getVar("__depends", fn, True)
194 for f,old_mtime in depends:
195 # Check if file still exists
196 if self.mtime(f) == 0:
199 new_mtime = bb.parse.cached_mtime(f)
200 if (new_mtime > old_mtime):
201 bb.msg.debug(2, bb.msg.domain.Cache, "Cache: %s's dependency %s changed" % (fn, f))
205 bb.msg.debug(2, bb.msg.domain.Cache, "Depends Cache: %s is clean" % fn)
206 if not fn in self.clean:
214 Called from the parser
216 if not fn in self.depends_cache:
217 self.depends_cache[fn] = {}
218 self.depends_cache[fn]["SKIPPED"] = "1"
220 def remove(self, fn):
222 Remove a fn from the cache
223 Called from the parser in error cases
225 bb.msg.debug(1, bb.msg.domain.Cache, "Removing %s from cache" % fn)
226 if fn in self.depends_cache:
227 del self.depends_cache[fn]
234 Called from the parser when complete (or exiting)
237 if not self.has_cache:
241 version_data['CACHE_VER'] = __cache_version__
242 version_data['BITBAKE_VER'] = bb.__version__
244 p = pickle.Pickler(file(self.cachefile, "wb" ), -1 )
245 p.dump([self.depends_cache, version_data])
247 def mtime(self, cachefile):
249 return os.stat(cachefile)[8]
253 def handle_data(self, file_name, cacheData):
255 Save data we need into the cache
258 pn = self.getVar('PN', file_name, True)
259 pv = self.getVar('PV', file_name, True)
260 pr = self.getVar('PR', file_name, True)
261 dp = int(self.getVar('DEFAULT_PREFERENCE', file_name, True) or "0")
262 provides = Set([pn] + (self.getVar("PROVIDES", file_name, True) or "").split())
263 depends = bb.utils.explode_deps(self.getVar("DEPENDS", file_name, True) or "")
264 packages = (self.getVar('PACKAGES', file_name, True) or "").split()
265 packages_dynamic = (self.getVar('PACKAGES_DYNAMIC', file_name, True) or "").split()
266 rprovides = (self.getVar("RPROVIDES", file_name, True) or "").split()
268 cacheData.task_queues[file_name] = self.getVar("_task_graph", file_name, True)
269 cacheData.task_deps[file_name] = self.getVar("_task_deps", file_name, True)
271 # build PackageName to FileName lookup table
272 if pn not in cacheData.pkg_pn:
273 cacheData.pkg_pn[pn] = []
274 cacheData.pkg_pn[pn].append(file_name)
276 cacheData.stamp[file_name] = self.getVar('STAMP', file_name, True)
278 # build FileName to PackageName lookup table
279 cacheData.pkg_fn[file_name] = pn
280 cacheData.pkg_pvpr[file_name] = (pv,pr)
281 cacheData.pkg_dp[file_name] = dp
283 # Build forward and reverse provider hashes
284 # Forward: virtual -> [filenames]
285 # Reverse: PN -> [virtuals]
286 if pn not in cacheData.pn_provides:
287 cacheData.pn_provides[pn] = Set()
288 cacheData.pn_provides[pn] |= provides
290 for provide in provides:
291 if provide not in cacheData.providers:
292 cacheData.providers[provide] = []
293 cacheData.providers[provide].append(file_name)
295 cacheData.deps[file_name] = Set()
297 cacheData.all_depends.add(dep)
298 cacheData.deps[file_name].add(dep)
300 # Build reverse hash for PACKAGES, so runtime dependencies
301 # can be be resolved (RDEPENDS, RRECOMMENDS etc.)
302 for package in packages:
303 if not package in cacheData.packages:
304 cacheData.packages[package] = []
305 cacheData.packages[package].append(file_name)
306 rprovides += (self.getVar("RPROVIDES_%s" % package, file_name, 1) or "").split()
308 for package in packages_dynamic:
309 if not package in cacheData.packages_dynamic:
310 cacheData.packages_dynamic[package] = []
311 cacheData.packages_dynamic[package].append(file_name)
313 for rprovide in rprovides:
314 if not rprovide in cacheData.rproviders:
315 cacheData.rproviders[rprovide] = []
316 cacheData.rproviders[rprovide].append(file_name)
318 # Build hash of runtime depends and rececommends
320 def add_dep(deplist, deps):
322 if not dep in deplist:
325 if not file_name in cacheData.rundeps:
326 cacheData.rundeps[file_name] = {}
327 if not file_name in cacheData.runrecs:
328 cacheData.runrecs[file_name] = {}
330 for package in packages + [pn]:
331 if not package in cacheData.rundeps[file_name]:
332 cacheData.rundeps[file_name][package] = {}
333 if not package in cacheData.runrecs[file_name]:
334 cacheData.runrecs[file_name][package] = {}
336 add_dep(cacheData.rundeps[file_name][package], bb.utils.explode_deps(self.getVar('RDEPENDS', file_name, True) or ""))
337 add_dep(cacheData.runrecs[file_name][package], bb.utils.explode_deps(self.getVar('RRECOMMENDS', file_name, True) or ""))
338 add_dep(cacheData.rundeps[file_name][package], bb.utils.explode_deps(self.getVar("RDEPENDS_%s" % package, file_name, True) or ""))
339 add_dep(cacheData.runrecs[file_name][package], bb.utils.explode_deps(self.getVar("RRECOMMENDS_%s" % package, file_name, True) or ""))
341 # Collect files we may need for possible world-dep
343 if not self.getVar('BROKEN', file_name, True) and not self.getVar('EXCLUDE_FROM_WORLD', file_name, True):
344 cacheData.possible_world.append(file_name)
347 def load_bbfile( self, bbfile , config):
349 Load and parse one .bb build file
350 Return the data and whether parsing resulted in the file being skipped
354 from bb import utils, data, parse, debug, event, fatal
356 # expand tmpdir to include this topdir
357 data.setVar('TMPDIR', data.getVar('TMPDIR', config, 1) or "", config)
358 bbfile_loc = os.path.abspath(os.path.dirname(bbfile))
359 oldpath = os.path.abspath(os.getcwd())
360 if self.mtime(bbfile_loc):
362 bb_data = data.init_db(config)
364 bb_data = parse.handle(bbfile, bb_data) # read .bb data
366 return bb_data, False
367 except bb.parse.SkipPackage:
376 The Objective: Cache the minimum amount of data possible yet get to the
377 stage of building packages (i.e. tryBuild) without reparsing any .bb files.
379 To do this, we intercept getVar calls and only cache the variables we see
380 being accessed. We rely on the cache getVar calls being made for all
381 variables bitbake might need to use to reach this stage. For each cached
382 file we need to track:
385 * The mtimes of all its dependencies
386 * Whether it caused a parse.SkipPackage exception
388 Files causing parsing errors are evicted from the cache.
395 #============================================================================#
397 #============================================================================#
400 The data structures we compile from the cached data
405 Direct cache variables
406 (from Cache.handle_data)
411 self.packages_dynamic = {}
412 self.possible_world = []
417 self.pn_provides = {}
418 self.all_depends = Set()
422 self.task_queues = {}
428 Indirect Cache variables
431 self.ignored_dependencies = []
432 self.world_target = Set()
433 self.bbfile_priority = {}
434 self.bbfile_config_priorities = []