1 # ex:ts=4:sw=4:sts=4:et
2 # -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
4 # BitBake 'Event' implementation
6 # Caching of bitbake variables before task execution
8 # Copyright (C) 2006 Richard Purdie
10 # but small sections based on code from bin/bitbake:
11 # Copyright (C) 2003, 2004 Chris Larson
12 # Copyright (C) 2003, 2004 Phil Blundell
13 # Copyright (C) 2003 - 2005 Michael 'Mickey' Lauer
14 # Copyright (C) 2005 Holger Hans Peter Freyther
15 # Copyright (C) 2005 ROAD GmbH
17 # This program is free software; you can redistribute it and/or modify
18 # it under the terms of the GNU General Public License version 2 as
19 # published by the Free Software Foundation.
21 # This program is distributed in the hope that it will be useful,
22 # but WITHOUT ANY WARRANTY; without even the implied warranty of
23 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
24 # GNU General Public License for more details.
26 # You should have received a copy of the GNU General Public License along
27 # with this program; if not, write to the Free Software Foundation, Inc.,
28 # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
37 import cPickle as pickle
40 bb.msg.note(1, bb.msg.domain.Cache, "Importing cPickle failed. Falling back to a very slow implementation.")
42 __cache_version__ = "127"
46 BitBake Cache implementation
48 def __init__(self, cooker):
51 self.cachedir = bb.data.getVar("CACHE", cooker.configuration.data, True)
53 self.depends_cache = {}
57 if self.cachedir in [None, '']:
58 self.has_cache = False
59 bb.msg.note(1, bb.msg.domain.Cache, "Not using a cache. Set CACHE = <directory> to enable.")
62 self.cachefile = os.path.join(self.cachedir,"bb_cache.dat")
64 bb.msg.debug(1, bb.msg.domain.Cache, "Using cache in '%s'" % self.cachedir)
66 os.stat( self.cachedir )
68 bb.mkdirhier( self.cachedir )
70 if self.has_cache and (self.mtime(self.cachefile)):
72 p = pickle.Unpickler( file(self.cachefile,"rb"))
73 self.depends_cache, version_data = p.load()
74 if version_data['CACHE_VER'] != __cache_version__:
75 raise ValueError, 'Cache Version Mismatch'
76 if version_data['BITBAKE_VER'] != bb.__version__:
77 raise ValueError, 'Bitbake Version Mismatch'
79 bb.msg.note(1, bb.msg.domain.Cache, "Truncated cache found, rebuilding...")
80 self.depends_cache = {}
81 except (ValueError, KeyError):
82 bb.msg.note(1, bb.msg.domain.Cache, "Invalid cache found, rebuilding...")
83 self.depends_cache = {}
85 if self.depends_cache:
86 for fn in self.depends_cache.keys():
88 self.cacheValidUpdate(fn)
90 def getVar(self, var, fn, exp = 0):
92 Gets the value of a variable
93 (similar to getVar in the data class)
95 There are two scenarios:
96 1. We have cached data - serve from depends_cache[fn]
97 2. We're learning what data to cache - serve from data
98 backend but add a copy of the data to the cache.
102 return self.depends_cache[fn][var]
104 if not fn in self.depends_cache:
105 self.depends_cache[fn] = {}
107 if fn != self.data_fn:
108 # We're trying to access data in the cache which doesn't exist
109 # yet setData hasn't been called to setup the right access. Very bad.
110 bb.msg.error(bb.msg.domain.Cache, "Parsing error data_fn %s and fn %s don't match" % (self.data_fn, fn))
112 result = bb.data.getVar(var, self.data, exp)
113 self.depends_cache[fn][var] = result
116 def setData(self, fn, data):
118 Called to prime bb_cache ready to learn which variables to cache.
119 Will be followed by calls to self.getVar which aren't cached
120 but can be fulfilled from self.data.
125 # Make sure __depends makes the depends_cache
126 self.getVar("__depends", fn, True)
127 self.depends_cache[fn]["CACHETIMESTAMP"] = bb.parse.cached_mtime(fn)
129 def loadDataFull(self, fn, cfgData):
131 Return a complete set of data for fn.
132 To do this, we need to parse the file.
134 bb_data, skipped = self.load_bbfile(fn, cfgData)
137 def loadData(self, fn, cfgData):
139 Load a subset of data for fn.
140 If the cached data is valid we do nothing,
141 To do this, we need to parse the file and set the system
142 to record the variables accessed.
143 Return the cache status and whether the file was skipped when parsed
145 if self.cacheValid(fn):
146 if "SKIPPED" in self.depends_cache[fn]:
150 bb_data, skipped = self.load_bbfile(fn, cfgData)
151 self.setData(fn, bb_data)
152 return False, skipped
154 def cacheValid(self, fn):
156 Is the cache valid for fn?
157 Fast version, no timestamps checked.
160 if not self.has_cache:
166 def cacheValidUpdate(self, fn):
168 Is the cache valid for fn?
169 Make thorough (slower) checks including timestamps.
172 if not self.has_cache:
175 # Check file still exists
176 if self.mtime(fn) == 0:
177 bb.msg.debug(2, bb.msg.domain.Cache, "Cache: %s not longer exists" % fn)
181 # File isn't in depends_cache
182 if not fn in self.depends_cache:
183 bb.msg.debug(2, bb.msg.domain.Cache, "Cache: %s is not cached" % fn)
187 # Check the file's timestamp
188 if bb.parse.cached_mtime(fn) > self.getVar("CACHETIMESTAMP", fn, True):
189 bb.msg.debug(2, bb.msg.domain.Cache, "Cache: %s changed" % fn)
193 # Check dependencies are still valid
194 depends = self.getVar("__depends", fn, True)
195 for f,old_mtime in depends:
196 # Check if file still exists
197 if self.mtime(f) == 0:
200 new_mtime = bb.parse.cached_mtime(f)
201 if (new_mtime > old_mtime):
202 bb.msg.debug(2, bb.msg.domain.Cache, "Cache: %s's dependency %s changed" % (fn, f))
206 bb.msg.debug(2, bb.msg.domain.Cache, "Depends Cache: %s is clean" % fn)
207 if not fn in self.clean:
215 Called from the parser
217 if not fn in self.depends_cache:
218 self.depends_cache[fn] = {}
219 self.depends_cache[fn]["SKIPPED"] = "1"
221 def remove(self, fn):
223 Remove a fn from the cache
224 Called from the parser in error cases
226 bb.msg.debug(1, bb.msg.domain.Cache, "Removing %s from cache" % fn)
227 if fn in self.depends_cache:
228 del self.depends_cache[fn]
235 Called from the parser when complete (or exiting)
238 if not self.has_cache:
242 version_data['CACHE_VER'] = __cache_version__
243 version_data['BITBAKE_VER'] = bb.__version__
245 p = pickle.Pickler(file(self.cachefile, "wb" ), -1 )
246 p.dump([self.depends_cache, version_data])
248 def mtime(self, cachefile):
249 return bb.parse.cached_mtime_noerror(cachefile)
251 def handle_data(self, file_name, cacheData):
253 Save data we need into the cache
256 pn = self.getVar('PN', file_name, True)
257 pe = self.getVar('PE', file_name, True) or "0"
258 pv = self.getVar('PV', file_name, True)
259 pr = self.getVar('PR', file_name, True)
260 dp = int(self.getVar('DEFAULT_PREFERENCE', file_name, True) or "0")
261 provides = Set([pn] + (self.getVar("PROVIDES", file_name, True) or "").split())
262 depends = bb.utils.explode_deps(self.getVar("DEPENDS", file_name, True) or "")
263 packages = (self.getVar('PACKAGES', file_name, True) or "").split()
264 packages_dynamic = (self.getVar('PACKAGES_DYNAMIC', file_name, True) or "").split()
265 rprovides = (self.getVar("RPROVIDES", file_name, True) or "").split()
267 cacheData.task_queues[file_name] = self.getVar("_task_graph", file_name, True)
268 cacheData.task_deps[file_name] = self.getVar("_task_deps", file_name, True)
270 # build PackageName to FileName lookup table
271 if pn not in cacheData.pkg_pn:
272 cacheData.pkg_pn[pn] = []
273 cacheData.pkg_pn[pn].append(file_name)
275 cacheData.stamp[file_name] = self.getVar('STAMP', file_name, True)
277 # build FileName to PackageName lookup table
278 cacheData.pkg_fn[file_name] = pn
279 cacheData.pkg_pepvpr[file_name] = (pe,pv,pr)
280 cacheData.pkg_dp[file_name] = dp
282 # Build forward and reverse provider hashes
283 # Forward: virtual -> [filenames]
284 # Reverse: PN -> [virtuals]
285 if pn not in cacheData.pn_provides:
286 cacheData.pn_provides[pn] = Set()
287 cacheData.pn_provides[pn] |= provides
289 cacheData.fn_provides[file_name] = Set()
290 for provide in provides:
291 if provide not in cacheData.providers:
292 cacheData.providers[provide] = []
293 cacheData.providers[provide].append(file_name)
294 cacheData.fn_provides[file_name].add(provide)
296 cacheData.deps[file_name] = Set()
298 cacheData.all_depends.add(dep)
299 cacheData.deps[file_name].add(dep)
301 # Build reverse hash for PACKAGES, so runtime dependencies
302 # can be be resolved (RDEPENDS, RRECOMMENDS etc.)
303 for package in packages:
304 if not package in cacheData.packages:
305 cacheData.packages[package] = []
306 cacheData.packages[package].append(file_name)
307 rprovides += (self.getVar("RPROVIDES_%s" % package, file_name, 1) or "").split()
309 for package in packages_dynamic:
310 if not package in cacheData.packages_dynamic:
311 cacheData.packages_dynamic[package] = []
312 cacheData.packages_dynamic[package].append(file_name)
314 for rprovide in rprovides:
315 if not rprovide in cacheData.rproviders:
316 cacheData.rproviders[rprovide] = []
317 cacheData.rproviders[rprovide].append(file_name)
319 # Build hash of runtime depends and rececommends
321 def add_dep(deplist, deps):
323 if not dep in deplist:
326 if not file_name in cacheData.rundeps:
327 cacheData.rundeps[file_name] = {}
328 if not file_name in cacheData.runrecs:
329 cacheData.runrecs[file_name] = {}
331 for package in packages + [pn]:
332 if not package in cacheData.rundeps[file_name]:
333 cacheData.rundeps[file_name][package] = {}
334 if not package in cacheData.runrecs[file_name]:
335 cacheData.runrecs[file_name][package] = {}
337 add_dep(cacheData.rundeps[file_name][package], bb.utils.explode_deps(self.getVar('RDEPENDS', file_name, True) or ""))
338 add_dep(cacheData.runrecs[file_name][package], bb.utils.explode_deps(self.getVar('RRECOMMENDS', file_name, True) or ""))
339 add_dep(cacheData.rundeps[file_name][package], bb.utils.explode_deps(self.getVar("RDEPENDS_%s" % package, file_name, True) or ""))
340 add_dep(cacheData.runrecs[file_name][package], bb.utils.explode_deps(self.getVar("RRECOMMENDS_%s" % package, file_name, True) or ""))
342 # Collect files we may need for possible world-dep
344 if not self.getVar('BROKEN', file_name, True) and not self.getVar('EXCLUDE_FROM_WORLD', file_name, True):
345 cacheData.possible_world.append(file_name)
348 def load_bbfile( self, bbfile , config):
350 Load and parse one .bb build file
351 Return the data and whether parsing resulted in the file being skipped
355 from bb import utils, data, parse, debug, event, fatal
357 # expand tmpdir to include this topdir
358 data.setVar('TMPDIR', data.getVar('TMPDIR', config, 1) or "", config)
359 bbfile_loc = os.path.abspath(os.path.dirname(bbfile))
360 oldpath = os.path.abspath(os.getcwd())
361 if self.mtime(bbfile_loc):
363 bb_data = data.init_db(config)
365 bb_data = parse.handle(bbfile, bb_data) # read .bb data
367 return bb_data, False
368 except bb.parse.SkipPackage:
377 The Objective: Cache the minimum amount of data possible yet get to the
378 stage of building packages (i.e. tryBuild) without reparsing any .bb files.
380 To do this, we intercept getVar calls and only cache the variables we see
381 being accessed. We rely on the cache getVar calls being made for all
382 variables bitbake might need to use to reach this stage. For each cached
383 file we need to track:
386 * The mtimes of all its dependencies
387 * Whether it caused a parse.SkipPackage exception
389 Files causing parsing errors are evicted from the cache.
396 #============================================================================#
398 #============================================================================#
401 The data structures we compile from the cached data
406 Direct cache variables
407 (from Cache.handle_data)
412 self.packages_dynamic = {}
413 self.possible_world = []
418 self.pn_provides = {}
419 self.fn_provides = {}
420 self.all_depends = Set()
424 self.task_queues = {}
430 Indirect Cache variables
433 self.ignored_dependencies = []
434 self.world_target = Set()
435 self.bbfile_priority = {}
436 self.bbfile_config_priorities = []