1 BB_DEFAULT_TASK ?= "build"
3 # like os.path.join but doesn't treat absolute RHS specially
4 def base_path_join(a, *p):
7 if path == '' or path.endswith('/'):
13 # for MD5/SHA handling
14 def base_chk_load_parser(config_path):
15 import ConfigParser, os, bb
16 parser = ConfigParser.ConfigParser()
17 if not len(parser.read(config_path)) == 1:
18 bb.note("Can not open the '%s' ini file" % config_path)
19 raise Exception("Can not open the '%s'" % config_path)
23 def base_chk_file(parser, pn, pv, src_uri, localpath, data):
26 # Try PN-PV-SRC_URI first and then try PN-SRC_URI
27 # we rely on the get method to create errors
28 pn_pv_src = "%s-%s-%s" % (pn,pv,src_uri)
29 pn_src = "%s-%s" % (pn,src_uri)
30 if parser.has_section(pn_pv_src):
31 md5 = parser.get(pn_pv_src, "md5")
32 sha256 = parser.get(pn_pv_src, "sha256")
33 elif parser.has_section(pn_src):
34 md5 = parser.get(pn_src, "md5")
35 sha256 = parser.get(pn_src, "sha256")
36 elif parser.has_section(src_uri):
37 md5 = parser.get(src_uri, "md5")
38 sha256 = parser.get(src_uri, "sha256")
42 # md5 and sha256 should be valid now
43 if not os.path.exists(localpath):
44 bb.note("The localpath does not exist '%s'" % localpath)
45 raise Exception("The path does not exist '%s'" % localpath)
48 # call md5(sum) and shasum
50 md5pipe = os.popen('md5sum ' + localpath)
51 md5data = (md5pipe.readline().split() or [ "" ])[0]
54 raise Exception("Executing md5sum failed")
57 shapipe = os.popen('PATH=%s oe_sha256sum %s' % (bb.data.getVar('PATH', data, True), localpath))
58 shadata = (shapipe.readline().split() or [ "" ])[0]
61 raise Exception("Executing shasum failed")
63 if no_checksum == True: # we do not have conf/checksums.ini entry
65 file = open("%s/checksums.ini" % bb.data.getVar("TMPDIR", data, 1), "a")
70 raise Exception("Creating checksums.ini failed")
72 file.write("[%s]\nmd5=%s\nsha256=%s\n\n" % (src_uri, md5data, shadata))
76 if not md5 == md5data:
77 bb.note("The MD5Sums did not match. Wanted: '%s' and Got: '%s'" % (md5,md5data))
78 raise Exception("MD5 Sums do not match. Wanted: '%s' Got: '%s'" % (md5, md5data))
80 if not sha256 == shadata:
81 bb.note("The SHA256 Sums do not match. Wanted: '%s' Got: '%s'" % (sha256,shadata))
82 raise Exception("SHA256 Sums do not match. Wanted: '%s' Got: '%s'" % (sha256, shadata))
87 def base_dep_prepend(d):
90 # Ideally this will check a flag so we will operate properly in
91 # the case where host == build == target, for now we don't work in
94 deps = "shasum-native "
95 if bb.data.getVar('PN', d, True) == "shasum-native":
98 # INHIBIT_DEFAULT_DEPS doesn't apply to the patch command. Whether or not
99 # we need that built is the responsibility of the patch function / class, not
101 if not bb.data.getVar('INHIBIT_DEFAULT_DEPS', d):
102 if (bb.data.getVar('HOST_SYS', d, 1) !=
103 bb.data.getVar('BUILD_SYS', d, 1)):
104 deps += " virtual/${TARGET_PREFIX}gcc virtual/libc "
107 def base_read_file(filename):
110 f = file( filename, "r" )
111 except IOError, reason:
112 return "" # WARNING: can't raise an error now because of the new RDEPENDS handling. This is a bit ugly. :M:
114 return f.read().strip()
117 def base_conditional(variable, checkvalue, truevalue, falsevalue, d):
119 if bb.data.getVar(variable,d,1) == checkvalue:
124 def base_less_or_equal(variable, checkvalue, truevalue, falsevalue, d):
126 if float(bb.data.getVar(variable,d,1)) <= float(checkvalue):
131 def base_version_less_or_equal(variable, checkvalue, truevalue, falsevalue, d):
133 result = bb.vercmp(bb.data.getVar(variable,d,True), checkvalue)
139 def base_contains(variable, checkvalues, truevalue, falsevalue, d):
142 if type(checkvalues).__name__ == "str":
143 checkvalues = [checkvalues]
144 for value in checkvalues:
145 if bb.data.getVar(variable,d,1).find(value) != -1:
146 matches = matches + 1
147 if matches == len(checkvalues):
151 def base_both_contain(variable1, variable2, checkvalue, d):
153 if bb.data.getVar(variable1,d,1).find(checkvalue) != -1 and bb.data.getVar(variable2,d,1).find(checkvalue) != -1:
158 DEPENDS_prepend="${@base_dep_prepend(d)} "
160 def base_set_filespath(path, d):
164 overrides = bb.data.getVar("OVERRIDES", d, 1) or ""
165 overrides = overrides + ":"
166 for o in overrides.split(":"):
167 filespath.append(os.path.join(p, o))
168 return ":".join(filespath)
170 FILESPATH = "${@base_set_filespath([ "${FILE_DIRNAME}/${PF}", "${FILE_DIRNAME}/${P}", "${FILE_DIRNAME}/${PN}", "${FILE_DIRNAME}/files", "${FILE_DIRNAME}" ], d)}"
172 def oe_filter(f, str, d):
174 return " ".join(filter(lambda x: match(f, x, 0), str.split()))
176 def oe_filter_out(f, str, d):
178 return " ".join(filter(lambda x: not match(f, x, 0), str.split()))
199 echo "Usage: oedebug level \"message\""
203 test ${OEDEBUG:-0} -ge $1 && {
210 if [ x"$MAKE" = x ]; then MAKE=make; fi
211 oenote ${MAKE} ${EXTRA_OEMAKE} "$@"
212 ${MAKE} ${EXTRA_OEMAKE} "$@" || die "oe_runmake failed"
216 # Purpose: Install shared library file and
217 # create the necessary links
222 #oenote installing shared library $1 to $2
224 libname=`basename $1`
225 install -m 755 $1 $2/$libname
226 sonamelink=`${HOST_PREFIX}readelf -d $1 |grep 'Library soname:' |sed -e 's/.*\[\(.*\)\].*/\1/'`
227 solink=`echo $libname | sed -e 's/\.so\..*/.so/'`
228 ln -sf $libname $2/$sonamelink
229 ln -sf $libname $2/$solink
233 # Purpose: Install a library, in all its forms
236 # oe_libinstall libltdl ${STAGING_LIBDIR}/
237 # oe_libinstall -C src/libblah libblah ${D}/${libdir}/
244 while [ "$#" -gt 0 ]; do
260 oefatal "oe_libinstall: unknown option: $1"
272 if [ -z "$destpath" ]; then
273 oefatal "oe_libinstall: no destination path specified"
275 if echo "$destpath/" | egrep '^${STAGING_LIBDIR}/' >/dev/null
281 if [ -z "$silent" ]; then
282 echo >&2 "oe_libinstall: $*"
287 if [ -z "$dir" ]; then
291 dir=$dir`(cd $dir;find . -name "$dotlai") | sed "s/^\.//;s/\/$dotlai\$//;q"`
297 # If such file doesn't exist, try to cut version suffix
298 if [ ! -f "$lafile" ]; then
299 libname1=`echo "$libname" | sed 's/-[0-9.]*$//'`
301 if [ -f "$lafile1" ]; then
307 if [ -f "$lafile" ]; then
309 eval `cat $lafile|grep "^library_names="`
312 library_names="$libname.so* $libname.dll.a"
315 __runcmd install -d $destpath/
317 if [ -f "$dota" -o -n "$require_static" ]; then
318 __runcmd install -m 0644 $dota $destpath/
320 if [ -f "$dotlai" -a -n "$libtool" ]; then
321 if test -n "$staging_install"
323 # stop libtool using the final directory name for libraries
325 __runcmd rm -f $destpath/$libname.la
326 __runcmd sed -e 's/^installed=yes$/installed=no/' \
327 -e '/^dependency_libs=/s,${WORKDIR}[[:alnum:]/\._+-]*/\([[:alnum:]\._+-]*\),${STAGING_LIBDIR}/\1,g' \
328 $dotlai >$destpath/$libname.la
330 __runcmd install -m 0644 $dotlai $destpath/$libname.la
334 for name in $library_names; do
335 files=`eval echo $name`
337 if [ ! -e "$f" ]; then
338 if [ -n "$libtool" ]; then
339 oefatal "oe_libinstall: $dir/$f not found."
341 elif [ -L "$f" ]; then
342 __runcmd cp -P "$f" $destpath/
343 elif [ ! -L "$f" ]; then
345 __runcmd install -m 0755 $libfile $destpath/
350 if [ -z "$libfile" ]; then
351 if [ -n "$require_shared" ]; then
352 oefatal "oe_libinstall: unable to locate shared library"
354 elif [ -z "$libtool" ]; then
355 # special case hack for non-libtool .so.#.#.# links
356 baselibfile=`basename "$libfile"`
357 if (echo $baselibfile | grep -qE '^lib.*\.so\.[0-9.]*$'); then
358 sonamelink=`${HOST_PREFIX}readelf -d $libfile |grep 'Library soname:' |sed -e 's/.*\[\(.*\)\].*/\1/'`
359 solink=`echo $baselibfile | sed -e 's/\.so\..*/.so/'`
360 if [ -n "$sonamelink" -a x"$baselibfile" != x"$sonamelink" ]; then
361 __runcmd ln -sf $baselibfile $destpath/$sonamelink
363 __runcmd ln -sf $baselibfile $destpath/$solink
367 __runcmd cd "$olddir"
371 # Purpose: Install machine dependent files, if available
372 # If not available, check if there is a default
373 # If no default, just touch the destination
376 # oe_machinstall -m 0644 fstab ${D}/etc/fstab
378 # TODO: Check argument number?
380 filename=`basename $3`
383 for o in `echo ${OVERRIDES} | tr ':' ' '`; do
384 if [ -e $dirname/$o/$filename ]; then
385 oenote $dirname/$o/$filename present, installing to $4
386 install $1 $2 $dirname/$o/$filename $4
390 # oenote overrides specific file NOT present, trying default=$3...
392 oenote $3 present, installing to $4
395 oenote $3 NOT present, touching empty $4
401 do_listtasks[nostamp] = "1"
402 python do_listtasks() {
404 # emit variables and shell functions
405 #bb.data.emit_env(sys.__stdout__, d)
406 # emit the metadata which isnt valid shell
408 if bb.data.getVarFlag(e, 'task', d):
409 sys.__stdout__.write("%s\n" % e)
413 do_clean[dirs] = "${TOPDIR}"
414 do_clean[nostamp] = "1"
415 python base_do_clean() {
416 """clear the build and temp directories"""
417 dir = bb.data.expand("${WORKDIR}", d)
418 if dir == '//': raise bb.build.FuncFailed("wrong DATADIR")
419 bb.note("removing " + dir)
420 os.system('rm -rf ' + dir)
422 dir = "%s.*" % bb.data.expand(bb.data.getVar('STAMP', d), d)
423 bb.note("removing " + dir)
424 os.system('rm -f '+ dir)
427 #Uncomment this for bitbake 1.8.12
428 #addtask rebuild after do_${BB_DEFAULT_TASK}
430 do_rebuild[dirs] = "${TOPDIR}"
431 do_rebuild[nostamp] = "1"
432 python base_do_rebuild() {
433 """rebuild a package"""
434 from bb import __version__
436 from distutils.version import LooseVersion
438 def LooseVersion(v): print "WARNING: sanity.bbclass can't compare versions without python-distutils"; return 1
439 if (LooseVersion(__version__) < LooseVersion('1.8.11')):
440 bb.build.exec_func('do_clean', d)
441 bb.build.exec_task('do_' + bb.data.getVar('BB_DEFAULT_TASK', d, 1), d)
445 do_mrproper[dirs] = "${TOPDIR}"
446 do_mrproper[nostamp] = "1"
447 python base_do_mrproper() {
448 """clear downloaded sources, build and temp directories"""
449 dir = bb.data.expand("${DL_DIR}", d)
450 if dir == '/': bb.build.FuncFailed("wrong DATADIR")
451 bb.debug(2, "removing " + dir)
452 os.system('rm -rf ' + dir)
453 bb.build.exec_func('do_clean', d)
457 do_fetch[dirs] = "${DL_DIR}"
458 do_fetch[depends] = "shasum-native:do_populate_staging"
459 python base_do_fetch() {
462 localdata = bb.data.createCopy(d)
463 bb.data.update_data(localdata)
465 src_uri = bb.data.getVar('SRC_URI', localdata, 1)
470 bb.fetch.init(src_uri.split(),d)
471 except bb.fetch.NoMethodError:
472 (type, value, traceback) = sys.exc_info()
473 raise bb.build.FuncFailed("No method: %s" % value)
476 bb.fetch.go(localdata)
477 except bb.fetch.MissingParameterError:
478 (type, value, traceback) = sys.exc_info()
479 raise bb.build.FuncFailed("Missing parameters: %s" % value)
480 except bb.fetch.FetchError:
481 (type, value, traceback) = sys.exc_info()
482 raise bb.build.FuncFailed("Fetch failed: %s" % value)
483 except bb.fetch.MD5SumError:
484 (type, value, traceback) = sys.exc_info()
485 raise bb.build.FuncFailed("MD5 failed: %s" % value)
487 (type, value, traceback) = sys.exc_info()
488 raise bb.build.FuncFailed("Unknown fetch Error: %s" % value)
491 # Verify the SHA and MD5 sums we have in OE and check what do
493 check_sum = bb.which(bb.data.getVar('BBPATH', d, True), "conf/checksums.ini")
495 bb.note("No conf/checksums.ini found, not checking checksums")
499 parser = base_chk_load_parser(check_sum)
501 bb.note("Creating the CheckSum parser failed")
504 pv = bb.data.getVar('PV', d, True)
505 pn = bb.data.getVar('PN', d, True)
508 for url in src_uri.split():
509 localpath = bb.data.expand(bb.fetch.localpath(url, localdata), localdata)
510 (type,host,path,_,_,_) = bb.decodeurl(url)
511 uri = "%s://%s%s" % (type,host,path)
513 if type == "http" or type == "https" or type == "ftp" or type == "ftps":
514 if not base_chk_file(parser, pn, pv,uri, localpath, d):
515 bb.note("%s-%s: %s has no entry in conf/checksums.ini, not checking URI" % (pn,pv,uri))
517 raise bb.build.FuncFailed("Checksum of '%s' failed" % uri)
520 addtask fetchall after do_fetch
521 do_fetchall[recrdeptask] = "do_fetch"
526 addtask buildall after do_build
527 do_buildall[recrdeptask] = "do_build"
533 def oe_unpack_file(file, data, url = None):
536 url = "file://%s" % file
537 dots = file.split(".")
538 if dots[-1] in ['gz', 'bz2', 'Z']:
539 efile = os.path.join(bb.data.getVar('WORKDIR', data, 1),os.path.basename('.'.join(dots[0:-1])))
543 if file.endswith('.tar'):
544 cmd = 'tar x --no-same-owner -f %s' % file
545 elif file.endswith('.tgz') or file.endswith('.tar.gz') or file.endswith('.tar.Z'):
546 cmd = 'tar xz --no-same-owner -f %s' % file
547 elif file.endswith('.tbz') or file.endswith('.tbz2') or file.endswith('.tar.bz2'):
548 cmd = 'bzip2 -dc %s | tar x --no-same-owner -f -' % file
549 elif file.endswith('.gz') or file.endswith('.Z') or file.endswith('.z'):
550 cmd = 'gzip -dc %s > %s' % (file, efile)
551 elif file.endswith('.bz2'):
552 cmd = 'bzip2 -dc %s > %s' % (file, efile)
553 elif file.endswith('.zip'):
555 (type, host, path, user, pswd, parm) = bb.decodeurl(url)
558 cmd = '%s %s' % (cmd, file)
559 elif os.path.isdir(file):
560 filesdir = os.path.realpath(bb.data.getVar("FILESDIR", data, 1))
562 if file[0:len(filesdir)] == filesdir:
563 destdir = file[len(filesdir):file.rfind('/')]
564 destdir = destdir.strip('/')
567 elif not os.access("%s/%s" % (os.getcwd(), destdir), os.F_OK):
568 os.makedirs("%s/%s" % (os.getcwd(), destdir))
569 cmd = 'cp -pPR %s %s/%s/' % (file, os.getcwd(), destdir)
571 (type, host, path, user, pswd, parm) = bb.decodeurl(url)
572 if not 'patch' in parm:
573 # The "destdir" handling was specifically done for FILESPATH
574 # items. So, only do so for file:// entries.
576 destdir = bb.decodeurl(url)[1] or "."
579 bb.mkdirhier("%s/%s" % (os.getcwd(), destdir))
580 cmd = 'cp %s %s/%s/' % (file, os.getcwd(), destdir)
585 dest = os.path.join(os.getcwd(), os.path.basename(file))
586 if os.path.exists(dest):
587 if os.path.samefile(file, dest):
590 cmd = "PATH=\"%s\" %s" % (bb.data.getVar('PATH', data, 1), cmd)
591 bb.note("Unpacking %s to %s/" % (file, os.getcwd()))
595 addtask unpack after do_fetch
596 do_unpack[dirs] = "${WORKDIR}"
597 python base_do_unpack() {
600 localdata = bb.data.createCopy(d)
601 bb.data.update_data(localdata)
603 src_uri = bb.data.getVar('SRC_URI', localdata)
606 src_uri = bb.data.expand(src_uri, localdata)
607 for url in src_uri.split():
609 local = bb.data.expand(bb.fetch.localpath(url, localdata), localdata)
610 except bb.MalformedUrl, e:
611 raise FuncFailed('Unable to generate local path for malformed uri: %s' % e)
612 local = os.path.realpath(local)
613 ret = oe_unpack_file(local, localdata, url)
615 raise bb.build.FuncFailed()
619 addhandler base_eventhandler
620 python base_eventhandler() {
621 from bb import note, error, data
622 from bb.event import Handled, NotHandled, getName
626 messages["Completed"] = "completed"
627 messages["Succeeded"] = "completed"
628 messages["Started"] = "started"
629 messages["Failed"] = "failed"
633 if name.startswith("Pkg"):
634 msg += "package %s: " % data.getVar("P", e.data, 1)
635 msg += messages.get(name[3:]) or name[3:]
636 elif name.startswith("Task"):
637 msg += "package %s: task %s: " % (data.getVar("PF", e.data, 1), e.task)
638 msg += messages.get(name[4:]) or name[4:]
639 elif name.startswith("Build"):
640 msg += "build %s: " % e.name
641 msg += messages.get(name[5:]) or name[5:]
642 elif name == "UnsatisfiedDep":
643 msg += "package %s: dependency %s %s" % (e.pkg, e.dep, name[:-3].lower())
647 if name.startswith("BuildStarted"):
648 bb.data.setVar( 'BB_VERSION', bb.__version__, e.data )
649 path_to_bbfiles = bb.data.getVar( 'BBFILES', e.data, 1 )
650 path_to_packages = path_to_bbfiles[:path_to_bbfiles.rindex( "packages" )]
651 monotone_revision = "<unknown>"
653 monotone_revision = file( "%s/_MTN/revision" % path_to_packages ).read().strip()
654 if monotone_revision.startswith( "format_version" ):
655 monotone_revision_words = monotone_revision.split()
656 monotone_revision = monotone_revision_words[ monotone_revision_words.index( "old_revision" )+1][1:-1]
659 bb.data.setVar( 'OE_REVISION', monotone_revision, e.data )
660 statusvars = ['BB_VERSION', 'OE_REVISION', 'TARGET_ARCH', 'TARGET_OS', 'MACHINE', 'DISTRO', 'DISTRO_VERSION','TARGET_FPU']
661 statuslines = ["%-14s = \"%s\"" % (i, bb.data.getVar(i, e.data, 1) or '') for i in statusvars]
662 statusmsg = "\nOE Build Configuration:\n%s\n" % '\n'.join(statuslines)
665 needed_vars = [ "TARGET_ARCH", "TARGET_OS" ]
667 for v in needed_vars:
668 val = bb.data.getVar(v, e.data, 1)
669 if not val or val == 'INVALID':
672 bb.fatal('The following variable(s) were not set: %s\nPlease set them directly, or choose a MACHINE or DISTRO that sets them.' % ', '.join(pesteruser))
675 # Handle removing stamps for 'rebuild' task
677 if name.startswith("StampUpdate"):
678 for (fn, task) in e.targets:
679 #print "%s %s" % (task, fn)
680 if task == "do_rebuild":
681 dir = "%s.*" % e.stampPrefix[fn]
682 bb.note("Removing stamps: " + dir)
683 os.system('rm -f '+ dir)
685 if not data in e.__dict__:
688 log = data.getVar("EVENTLOG", e.data, 1)
690 logfile = file(log, "a")
691 logfile.write("%s\n" % msg)
697 addtask configure after do_unpack do_patch
698 do_configure[dirs] = "${S} ${B}"
699 do_configure[deptask] = "do_populate_staging"
700 base_do_configure() {
704 addtask compile after do_configure
705 do_compile[dirs] = "${S} ${B}"
707 if [ -e Makefile -o -e makefile ]; then
708 oe_runmake || die "make failed"
710 oenote "nothing to compile"
718 do_populate_staging[dirs] = "${STAGING_DIR_TARGET}/${layout_bindir} ${STAGING_DIR_TARGET}/${layout_libdir} \
719 ${STAGING_DIR_TARGET}/${layout_includedir} \
720 ${STAGING_BINDIR_NATIVE} ${STAGING_LIBDIR_NATIVE} \
721 ${STAGING_INCDIR_NATIVE} \
725 # Could be compile but populate_staging and do_install shouldn't run at the same time
726 addtask populate_staging after do_install
728 python do_populate_staging () {
729 bb.build.exec_func('do_stage', d)
732 addtask install after do_compile
733 do_install[dirs] = "${D} ${S} ${B}"
734 # Remove and re-create ${D} so that is it guaranteed to be empty
735 do_install[cleandirs] = "${D}"
745 addtask build after do_populate_staging
749 # Functions that update metadata based on files outputted
750 # during the build process.
764 r[-1] += ' ' + ' '.join(j)
769 def packaged(pkg, d):
771 return os.access(get_subpkgedata_fn(pkg, d) + '.packaged', os.R_OK)
773 def read_pkgdatafile(fn):
778 c = codecs.getdecoder("string_escape")
782 if os.access(fn, os.R_OK):
785 lines = f.readlines()
787 r = re.compile("([^:]+):\s*(.*)")
791 pkgdata[m.group(1)] = decode(m.group(2))
795 def get_subpkgedata_fn(pkg, d):
797 archs = bb.data.expand("${PACKAGE_ARCHS}", d).split(" ")
799 pkgdata = bb.data.expand('${STAGING_DIR}/pkgdata/', d)
800 targetdir = bb.data.expand('${TARGET_VENDOR}-${TARGET_OS}/runtime/', d)
802 fn = pkgdata + arch + targetdir + pkg
803 if os.path.exists(fn):
805 return bb.data.expand('${PKGDATA_DIR}/runtime/%s' % pkg, d)
807 def has_subpkgdata(pkg, d):
809 return os.access(get_subpkgedata_fn(pkg, d), os.R_OK)
811 def read_subpkgdata(pkg, d):
813 return read_pkgdatafile(get_subpkgedata_fn(pkg, d))
815 def has_pkgdata(pn, d):
817 fn = bb.data.expand('${PKGDATA_DIR}/%s' % pn, d)
818 return os.access(fn, os.R_OK)
820 def read_pkgdata(pn, d):
822 fn = bb.data.expand('${PKGDATA_DIR}/%s' % pn, d)
823 return read_pkgdatafile(fn)
825 python read_subpackage_metadata () {
827 data = read_pkgdata(bb.data.getVar('PN', d, 1), d)
829 for key in data.keys():
830 bb.data.setVar(key, data[key], d)
832 for pkg in bb.data.getVar('PACKAGES', d, 1).split():
833 sdata = read_subpkgdata(pkg, d)
834 for key in sdata.keys():
835 bb.data.setVar(key, sdata[key], d)
838 # Make sure MACHINE isn't exported
839 # (breaks binutils at least)
840 MACHINE[unexport] = "1"
842 # Make sure TARGET_ARCH isn't exported
843 # (breaks Makefiles using implicit rules, e.g. quilt, as GNU make has this
844 # in them, undocumented)
845 TARGET_ARCH[unexport] = "1"
847 # Make sure DISTRO isn't exported
848 # (breaks sysvinit at least)
849 DISTRO[unexport] = "1"
852 def base_after_parse(d):
853 import bb, os, exceptions
855 source_mirror_fetch = bb.data.getVar('SOURCE_MIRROR_FETCH', d, 0)
856 if not source_mirror_fetch:
857 need_host = bb.data.getVar('COMPATIBLE_HOST', d, 1)
860 this_host = bb.data.getVar('HOST_SYS', d, 1)
861 if not re.match(need_host, this_host):
862 raise bb.parse.SkipPackage("incompatible with host %s" % this_host)
864 need_machine = bb.data.getVar('COMPATIBLE_MACHINE', d, 1)
867 this_machine = bb.data.getVar('MACHINE', d, 1)
868 if this_machine and not re.match(need_machine, this_machine):
869 raise bb.parse.SkipPackage("incompatible with machine %s" % this_machine)
871 pn = bb.data.getVar('PN', d, 1)
873 # OBSOLETE in bitbake 1.7.4
874 srcdate = bb.data.getVar('SRCDATE_%s' % pn, d, 1)
876 bb.data.setVar('SRCDATE', srcdate, d)
878 use_nls = bb.data.getVar('USE_NLS_%s' % pn, d, 1)
880 bb.data.setVar('USE_NLS', use_nls, d)
882 # Git packages should DEPEND on git-native
883 srcuri = bb.data.getVar('SRC_URI', d, 1)
884 if "git://" in srcuri:
885 depends = bb.data.getVarFlag('do_fetch', 'depends', d) or ""
886 depends = depends + " git-native:do_populate_staging"
887 bb.data.setVarFlag('do_fetch', 'depends', depends, d)
889 mach_arch = bb.data.getVar('MACHINE_ARCH', d, 1)
890 old_arch = bb.data.getVar('PACKAGE_ARCH', d, 1)
891 if (old_arch == mach_arch):
896 # We always try to scan SRC_URI for urls with machine overrides
897 # unless the package sets SRC_URI_OVERRIDES_PACKAGE_ARCH=0
899 override = bb.data.getVar('SRC_URI_OVERRIDES_PACKAGE_ARCH', d, 1)
904 for p in [ "${PF}", "${P}", "${PN}", "files", "" ]:
905 path = bb.data.expand(os.path.join("${FILE_DIRNAME}", p, "${MACHINE}"), d)
906 if os.path.isdir(path):
911 for s in srcuri.split():
912 if not s.startswith("file://"):
914 local = bb.data.expand(bb.fetch.localpath(s, d), d)
916 if local.startswith(mp):
917 #bb.note("overriding PACKAGE_ARCH from %s to %s" % (old_arch, mach_arch))
918 bb.data.setVar('PACKAGE_ARCH', "${MACHINE_ARCH}", d)
923 from bb import __version__
926 # Remove this for bitbake 1.8.12
928 from distutils.version import LooseVersion
930 def LooseVersion(v): print "WARNING: sanity.bbclass can't compare versions without python-distutils"; return 1
931 if (LooseVersion(__version__) >= LooseVersion('1.8.11')):
932 deps = bb.data.getVarFlag('do_rebuild', 'deps', d) or []
933 deps.append('do_' + bb.data.getVar('BB_DEFAULT_TASK', d, 1))
934 bb.data.setVarFlag('do_rebuild', 'deps', deps, d)
937 def check_app_exists(app, d):
938 from bb import which, data
940 app = data.expand(app, d)
941 path = data.getVar('PATH', d, 1)
942 return len(which(path, app)) != 0
944 def check_gcc3(data):
946 gcc3_versions = 'gcc-3.4 gcc34 gcc-3.4.4 gcc-3.4.6 gcc-3.4.7 gcc-3.3 gcc33 gcc-3.3.6 gcc-3.2 gcc32'
948 for gcc3 in gcc3_versions.split():
949 if check_app_exists(gcc3, data):
957 # Configuration data from site files
958 # Move to autotools.bbclass?
961 EXPORT_FUNCTIONS do_clean do_mrproper do_fetch do_unpack do_configure do_compile do_install do_package do_populate_pkgs do_stage do_rebuild do_fetchall
965 ${DEBIAN_MIRROR}/main http://snapshot.debian.net/archive/pool
966 ${DEBIAN_MIRROR} ftp://ftp.de.debian.org/debian/pool
967 ${DEBIAN_MIRROR} ftp://ftp.au.debian.org/debian/pool
968 ${DEBIAN_MIRROR} ftp://ftp.cl.debian.org/debian/pool
969 ${DEBIAN_MIRROR} ftp://ftp.hr.debian.org/debian/pool
970 ${DEBIAN_MIRROR} ftp://ftp.fi.debian.org/debian/pool
971 ${DEBIAN_MIRROR} ftp://ftp.hk.debian.org/debian/pool
972 ${DEBIAN_MIRROR} ftp://ftp.hu.debian.org/debian/pool
973 ${DEBIAN_MIRROR} ftp://ftp.ie.debian.org/debian/pool
974 ${DEBIAN_MIRROR} ftp://ftp.it.debian.org/debian/pool
975 ${DEBIAN_MIRROR} ftp://ftp.jp.debian.org/debian/pool
976 ${DEBIAN_MIRROR} ftp://ftp.no.debian.org/debian/pool
977 ${DEBIAN_MIRROR} ftp://ftp.pl.debian.org/debian/pool
978 ${DEBIAN_MIRROR} ftp://ftp.ro.debian.org/debian/pool
979 ${DEBIAN_MIRROR} ftp://ftp.si.debian.org/debian/pool
980 ${DEBIAN_MIRROR} ftp://ftp.es.debian.org/debian/pool
981 ${DEBIAN_MIRROR} ftp://ftp.se.debian.org/debian/pool
982 ${DEBIAN_MIRROR} ftp://ftp.tr.debian.org/debian/pool
983 ${GNU_MIRROR} ftp://mirrors.kernel.org/gnu
984 ${GNU_MIRROR} ftp://ftp.matrix.com.br/pub/gnu
985 ${GNU_MIRROR} ftp://ftp.cs.ubc.ca/mirror2/gnu
986 ${GNU_MIRROR} ftp://sunsite.ust.hk/pub/gnu
987 ${GNU_MIRROR} ftp://ftp.ayamura.org/pub/gnu
988 ${KERNELORG_MIRROR} http://www.kernel.org/pub
989 ${KERNELORG_MIRROR} ftp://ftp.us.kernel.org/pub
990 ${KERNELORG_MIRROR} ftp://ftp.uk.kernel.org/pub
991 ${KERNELORG_MIRROR} ftp://ftp.hk.kernel.org/pub
992 ${KERNELORG_MIRROR} ftp://ftp.au.kernel.org/pub
993 ${KERNELORG_MIRROR} ftp://ftp.jp.kernel.org/pub
994 ftp://ftp.gnupg.org/gcrypt/ ftp://ftp.franken.de/pub/crypt/mirror/ftp.gnupg.org/gcrypt/
995 ftp://ftp.gnupg.org/gcrypt/ ftp://ftp.surfnet.nl/pub/security/gnupg/
996 ftp://ftp.gnupg.org/gcrypt/ http://gulus.USherbrooke.ca/pub/appl/GnuPG/
997 ftp://dante.ctan.org/tex-archive ftp://ftp.fu-berlin.de/tex/CTAN
998 ftp://dante.ctan.org/tex-archive http://sunsite.sut.ac.jp/pub/archives/ctan/
999 ftp://dante.ctan.org/tex-archive http://ctan.unsw.edu.au/
1000 ftp://ftp.gnutls.org/pub/gnutls ftp://ftp.gnutls.org/pub/gnutls/
1001 ftp://ftp.gnutls.org/pub/gnutls ftp://ftp.gnupg.org/gcrypt/gnutls/
1002 ftp://ftp.gnutls.org/pub/gnutls http://www.mirrors.wiretapped.net/security/network-security/gnutls/
1003 ftp://ftp.gnutls.org/pub/gnutls ftp://ftp.mirrors.wiretapped.net/pub/security/network-security/gnutls/
1004 ftp://ftp.gnutls.org/pub/gnutls http://josefsson.org/gnutls/releases/
1005 http://ftp.info-zip.org/pub/infozip/src/ http://mirror.switch.ch/ftp/mirror/infozip/src/
1006 http://ftp.info-zip.org/pub/infozip/src/ ftp://sunsite.icm.edu.pl/pub/unix/archiving/info-zip/src/
1007 ftp://lsof.itap.purdue.edu/pub/tools/unix/lsof/ ftp://ftp.cerias.purdue.edu/pub/tools/unix/sysutils/lsof/
1008 ftp://lsof.itap.purdue.edu/pub/tools/unix/lsof/ ftp://ftp.tau.ac.il/pub/unix/admin/
1009 ftp://lsof.itap.purdue.edu/pub/tools/unix/lsof/ ftp://ftp.cert.dfn.de/pub/tools/admin/lsof/
1010 ftp://lsof.itap.purdue.edu/pub/tools/unix/lsof/ ftp://ftp.fu-berlin.de/pub/unix/tools/lsof/
1011 ftp://lsof.itap.purdue.edu/pub/tools/unix/lsof/ ftp://ftp.kaizo.org/pub/lsof/
1012 ftp://lsof.itap.purdue.edu/pub/tools/unix/lsof/ ftp://ftp.tu-darmstadt.de/pub/sysadmin/lsof/
1013 ftp://lsof.itap.purdue.edu/pub/tools/unix/lsof/ ftp://ftp.tux.org/pub/sites/vic.cc.purdue.edu/tools/unix/lsof/
1014 ftp://lsof.itap.purdue.edu/pub/tools/unix/lsof/ ftp://gd.tuwien.ac.at/utils/admin-tools/lsof/
1015 ftp://lsof.itap.purdue.edu/pub/tools/unix/lsof/ ftp://sunsite.ualberta.ca/pub/Mirror/lsof/
1016 ftp://lsof.itap.purdue.edu/pub/tools/unix/lsof/ ftp://the.wiretapped.net/pub/security/host-security/lsof/
1017 http://www.apache.org/dist http://archive.apache.org/dist