2 # ex:ts=4:sw=4:sts=4:et
3 # -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
5 BitBake 'Event' implementation
7 Caching of bitbake variables before task execution
9 # Copyright (C) 2006 Richard Purdie
11 # but small sections based on code from bin/bitbake:
12 # Copyright (C) 2003, 2004 Chris Larson
13 # Copyright (C) 2003, 2004 Phil Blundell
14 # Copyright (C) 2003 - 2005 Michael 'Mickey' Lauer
15 # Copyright (C) 2005 Holger Hans Peter Freyther
16 # Copyright (C) 2005 ROAD GmbH
18 This program is free software; you can redistribute it and/or modify it under
19 the terms of the GNU General Public License as published by the Free Software
20 Foundation; either version 2 of the License, or (at your option) any later
23 This program is distributed in the hope that it will be useful, but WITHOUT
24 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
25 FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
27 You should have received a copy of the GNU General Public License along with
28 this program; if not, write to the Free Software Foundation, Inc., 59 Temple
29 Place, Suite 330, Boston, MA 02111-1307 USA.
39 import cPickle as pickle
42 bb.msg.note(1, bb.msg.domain.Cache, "Importing cPickle failed. Falling back to a very slow implementation.")
44 __cache_version__ = "125"
48 BitBake Cache implementation
50 def __init__(self, cooker):
53 self.cachedir = bb.data.getVar("CACHE", cooker.configuration.data, True)
55 self.depends_cache = {}
59 if self.cachedir in [None, '']:
60 self.has_cache = False
61 bb.msg.note(1, bb.msg.domain.Cache, "Not using a cache. Set CACHE = <directory> to enable.")
64 self.cachefile = os.path.join(self.cachedir,"bb_cache.dat")
66 bb.msg.debug(1, bb.msg.domain.Cache, "Using cache in '%s'" % self.cachedir)
68 os.stat( self.cachedir )
70 bb.mkdirhier( self.cachedir )
72 if self.has_cache and (self.mtime(self.cachefile)):
74 p = pickle.Unpickler( file(self.cachefile,"rb"))
75 self.depends_cache, version_data = p.load()
76 if version_data['CACHE_VER'] != __cache_version__:
77 raise ValueError, 'Cache Version Mismatch'
78 if version_data['BITBAKE_VER'] != bb.__version__:
79 raise ValueError, 'Bitbake Version Mismatch'
80 except (ValueError, KeyError):
81 bb.msg.note(1, bb.msg.domain.Cache, "Invalid cache found, rebuilding...")
82 self.depends_cache = {}
84 if self.depends_cache:
85 for fn in self.depends_cache.keys():
87 self.cacheValidUpdate(fn)
89 def getVar(self, var, fn, exp = 0):
91 Gets the value of a variable
92 (similar to getVar in the data class)
94 There are two scenarios:
95 1. We have cached data - serve from depends_cache[fn]
96 2. We're learning what data to cache - serve from data
97 backend but add a copy of the data to the cache.
101 return self.depends_cache[fn][var]
103 if not fn in self.depends_cache:
104 self.depends_cache[fn] = {}
106 if fn != self.data_fn:
107 # We're trying to access data in the cache which doesn't exist
108 # yet setData hasn't been called to setup the right access. Very bad.
109 bb.msg.error(bb.msg.domain.Cache, "Parsing error data_fn %s and fn %s don't match" % (self.data_fn, fn))
111 result = bb.data.getVar(var, self.data, exp)
112 self.depends_cache[fn][var] = result
115 def setData(self, fn, data):
117 Called to prime bb_cache ready to learn which variables to cache.
118 Will be followed by calls to self.getVar which aren't cached
119 but can be fulfilled from self.data.
124 # Make sure __depends makes the depends_cache
125 self.getVar("__depends", fn, True)
126 self.depends_cache[fn]["CACHETIMESTAMP"] = bb.parse.cached_mtime(fn)
128 def loadDataFull(self, fn, cfgData):
130 Return a complete set of data for fn.
131 To do this, we need to parse the file.
133 bb_data, skipped = self.load_bbfile(fn, cfgData)
136 def loadData(self, fn, cfgData):
138 Load a subset of data for fn.
139 If the cached data is valid we do nothing,
140 To do this, we need to parse the file and set the system
141 to record the variables accessed.
142 Return the cache status and whether the file was skipped when parsed
144 if self.cacheValid(fn):
145 if "SKIPPED" in self.depends_cache[fn]:
149 bb_data, skipped = self.load_bbfile(fn, cfgData)
150 self.setData(fn, bb_data)
151 return False, skipped
153 def cacheValid(self, fn):
155 Is the cache valid for fn?
156 Fast version, no timestamps checked.
159 if not self.has_cache:
165 def cacheValidUpdate(self, fn):
167 Is the cache valid for fn?
168 Make thorough (slower) checks including timestamps.
171 if not self.has_cache:
174 # Check file still exists
175 if self.mtime(fn) == 0:
176 bb.msg.debug(2, bb.msg.domain.Cache, "Cache: %s not longer exists" % fn)
180 # File isn't in depends_cache
181 if not fn in self.depends_cache:
182 bb.msg.debug(2, bb.msg.domain.Cache, "Cache: %s is not cached" % fn)
186 # Check the file's timestamp
187 if bb.parse.cached_mtime(fn) > self.getVar("CACHETIMESTAMP", fn, True):
188 bb.msg.debug(2, bb.msg.domain.Cache, "Cache: %s changed" % fn)
192 # Check dependencies are still valid
193 depends = self.getVar("__depends", fn, True)
194 for f,old_mtime in depends:
195 # Check if file still exists
196 if self.mtime(f) == 0:
199 new_mtime = bb.parse.cached_mtime(f)
200 if (new_mtime > old_mtime):
201 bb.msg.debug(2, bb.msg.domain.Cache, "Cache: %s's dependency %s changed" % (fn, f))
205 bb.msg.debug(2, bb.msg.domain.Cache, "Depends Cache: %s is clean" % fn)
206 if not fn in self.clean:
214 Called from the parser
216 if not fn in self.depends_cache:
217 self.depends_cache[fn] = {}
218 self.depends_cache[fn]["SKIPPED"] = "1"
220 def remove(self, fn):
222 Remove a fn from the cache
223 Called from the parser in error cases
225 bb.msg.debug(1, bb.msg.domain.Cache, "Removing %s from cache" % fn)
226 if fn in self.depends_cache:
227 del self.depends_cache[fn]
234 Called from the parser when complete (or exiting)
237 if not self.has_cache:
241 version_data['CACHE_VER'] = __cache_version__
242 version_data['BITBAKE_VER'] = bb.__version__
244 p = pickle.Pickler(file(self.cachefile, "wb" ), -1 )
245 p.dump([self.depends_cache, version_data])
247 def mtime(self, cachefile):
248 return bb.parse.cached_mtime_noerror(cachefile)
250 def handle_data(self, file_name, cacheData):
252 Save data we need into the cache
255 pn = self.getVar('PN', file_name, True)
256 pv = self.getVar('PV', file_name, True)
257 pr = self.getVar('PR', file_name, True)
258 dp = int(self.getVar('DEFAULT_PREFERENCE', file_name, True) or "0")
259 provides = Set([pn] + (self.getVar("PROVIDES", file_name, True) or "").split())
260 depends = bb.utils.explode_deps(self.getVar("DEPENDS", file_name, True) or "")
261 packages = (self.getVar('PACKAGES', file_name, True) or "").split()
262 packages_dynamic = (self.getVar('PACKAGES_DYNAMIC', file_name, True) or "").split()
263 rprovides = (self.getVar("RPROVIDES", file_name, True) or "").split()
265 cacheData.task_queues[file_name] = self.getVar("_task_graph", file_name, True)
266 cacheData.task_deps[file_name] = self.getVar("_task_deps", file_name, True)
268 # build PackageName to FileName lookup table
269 if pn not in cacheData.pkg_pn:
270 cacheData.pkg_pn[pn] = []
271 cacheData.pkg_pn[pn].append(file_name)
273 cacheData.stamp[file_name] = self.getVar('STAMP', file_name, True)
275 # build FileName to PackageName lookup table
276 cacheData.pkg_fn[file_name] = pn
277 cacheData.pkg_pvpr[file_name] = (pv,pr)
278 cacheData.pkg_dp[file_name] = dp
280 # Build forward and reverse provider hashes
281 # Forward: virtual -> [filenames]
282 # Reverse: PN -> [virtuals]
283 if pn not in cacheData.pn_provides:
284 cacheData.pn_provides[pn] = Set()
285 cacheData.pn_provides[pn] |= provides
287 for provide in provides:
288 if provide not in cacheData.providers:
289 cacheData.providers[provide] = []
290 cacheData.providers[provide].append(file_name)
292 cacheData.deps[file_name] = Set()
294 cacheData.all_depends.add(dep)
295 cacheData.deps[file_name].add(dep)
297 # Build reverse hash for PACKAGES, so runtime dependencies
298 # can be be resolved (RDEPENDS, RRECOMMENDS etc.)
299 for package in packages:
300 if not package in cacheData.packages:
301 cacheData.packages[package] = []
302 cacheData.packages[package].append(file_name)
303 rprovides += (self.getVar("RPROVIDES_%s" % package, file_name, 1) or "").split()
305 for package in packages_dynamic:
306 if not package in cacheData.packages_dynamic:
307 cacheData.packages_dynamic[package] = []
308 cacheData.packages_dynamic[package].append(file_name)
310 for rprovide in rprovides:
311 if not rprovide in cacheData.rproviders:
312 cacheData.rproviders[rprovide] = []
313 cacheData.rproviders[rprovide].append(file_name)
315 # Build hash of runtime depends and rececommends
317 def add_dep(deplist, deps):
319 if not dep in deplist:
322 if not file_name in cacheData.rundeps:
323 cacheData.rundeps[file_name] = {}
324 if not file_name in cacheData.runrecs:
325 cacheData.runrecs[file_name] = {}
327 for package in packages + [pn]:
328 if not package in cacheData.rundeps[file_name]:
329 cacheData.rundeps[file_name][package] = {}
330 if not package in cacheData.runrecs[file_name]:
331 cacheData.runrecs[file_name][package] = {}
333 add_dep(cacheData.rundeps[file_name][package], bb.utils.explode_deps(self.getVar('RDEPENDS', file_name, True) or ""))
334 add_dep(cacheData.runrecs[file_name][package], bb.utils.explode_deps(self.getVar('RRECOMMENDS', file_name, True) or ""))
335 add_dep(cacheData.rundeps[file_name][package], bb.utils.explode_deps(self.getVar("RDEPENDS_%s" % package, file_name, True) or ""))
336 add_dep(cacheData.runrecs[file_name][package], bb.utils.explode_deps(self.getVar("RRECOMMENDS_%s" % package, file_name, True) or ""))
338 # Collect files we may need for possible world-dep
340 if not self.getVar('BROKEN', file_name, True) and not self.getVar('EXCLUDE_FROM_WORLD', file_name, True):
341 cacheData.possible_world.append(file_name)
344 def load_bbfile( self, bbfile , config):
346 Load and parse one .bb build file
347 Return the data and whether parsing resulted in the file being skipped
351 from bb import utils, data, parse, debug, event, fatal
353 # expand tmpdir to include this topdir
354 data.setVar('TMPDIR', data.getVar('TMPDIR', config, 1) or "", config)
355 bbfile_loc = os.path.abspath(os.path.dirname(bbfile))
356 oldpath = os.path.abspath(os.getcwd())
357 if self.mtime(bbfile_loc):
359 bb_data = data.init_db(config)
361 bb_data = parse.handle(bbfile, bb_data) # read .bb data
363 return bb_data, False
364 except bb.parse.SkipPackage:
373 The Objective: Cache the minimum amount of data possible yet get to the
374 stage of building packages (i.e. tryBuild) without reparsing any .bb files.
376 To do this, we intercept getVar calls and only cache the variables we see
377 being accessed. We rely on the cache getVar calls being made for all
378 variables bitbake might need to use to reach this stage. For each cached
379 file we need to track:
382 * The mtimes of all its dependencies
383 * Whether it caused a parse.SkipPackage exception
385 Files causing parsing errors are evicted from the cache.
392 #============================================================================#
394 #============================================================================#
397 The data structures we compile from the cached data
402 Direct cache variables
403 (from Cache.handle_data)
408 self.packages_dynamic = {}
409 self.possible_world = []
414 self.pn_provides = {}
415 self.all_depends = Set()
419 self.task_queues = {}
425 Indirect Cache variables
428 self.ignored_dependencies = []
429 self.world_target = Set()
430 self.bbfile_priority = {}
431 self.bbfile_config_priorities = []