1 BB_DEFAULT_TASK ?= "build"
3 # like os.path.join but doesn't treat absolute RHS specially
4 def base_path_join(a, *p):
7 if path == '' or path.endswith('/'):
13 # for MD5/SHA handling
14 def base_chk_load_parser(config_path):
15 import ConfigParser, os, bb
16 parser = ConfigParser.ConfigParser()
17 if not len(parser.read(config_path)) == 1:
18 bb.note("Can not open the '%s' ini file" % config_path)
19 raise Exception("Can not open the '%s'" % config_path)
23 def base_chk_file(parser, pn, pv, src_uri, localpath, data):
26 # Try PN-PV-SRC_URI first and then try PN-SRC_URI
27 # we rely on the get method to create errors
28 pn_pv_src = "%s-%s-%s" % (pn,pv,src_uri)
29 pn_src = "%s-%s" % (pn,src_uri)
30 if parser.has_section(pn_pv_src):
31 md5 = parser.get(pn_pv_src, "md5")
32 sha256 = parser.get(pn_pv_src, "sha256")
33 elif parser.has_section(pn_src):
34 md5 = parser.get(pn_src, "md5")
35 sha256 = parser.get(pn_src, "sha256")
36 elif parser.has_section(src_uri):
37 md5 = parser.get(src_uri, "md5")
38 sha256 = parser.get(src_uri, "sha256")
42 # md5 and sha256 should be valid now
43 if not os.path.exists(localpath):
44 bb.note("The localpath does not exist '%s'" % localpath)
45 raise Exception("The path does not exist '%s'" % localpath)
48 # call md5(sum) and shasum
50 md5pipe = os.popen('md5sum ' + localpath)
51 md5data = (md5pipe.readline().split() or [ "" ])[0]
54 raise Exception("Executing md5sum failed")
57 shapipe = os.popen('PATH=%s oe_sha256sum %s' % (bb.data.getVar('PATH', data, True), localpath))
58 shadata = (shapipe.readline().split() or [ "" ])[0]
61 raise Exception("Executing shasum failed")
63 if no_checksum == True: # we do not have conf/checksums.ini entry
65 file = open("%s/checksums.ini" % bb.data.getVar("TMPDIR", data, 1), "a")
70 raise Exception("Creating checksums.ini failed")
72 file.write("[%s]\nmd5=%s\nsha256=%s\n\n" % (src_uri, md5data, shadata))
76 if not md5 == md5data:
77 bb.note("The MD5Sums did not match. Wanted: '%s' and Got: '%s'" % (md5,md5data))
78 raise Exception("MD5 Sums do not match. Wanted: '%s' Got: '%s'" % (md5, md5data))
80 if not sha256 == shadata:
81 bb.note("The SHA256 Sums do not match. Wanted: '%s' Got: '%s'" % (sha256,shadata))
82 raise Exception("SHA256 Sums do not match. Wanted: '%s' Got: '%s'" % (sha256, shadata))
87 def base_dep_prepend(d):
90 # Ideally this will check a flag so we will operate properly in
91 # the case where host == build == target, for now we don't work in
94 deps = "shasum-native "
95 if bb.data.getVar('PN', d, True) == "shasum-native":
98 # INHIBIT_DEFAULT_DEPS doesn't apply to the patch command. Whether or not
99 # we need that built is the responsibility of the patch function / class, not
101 if not bb.data.getVar('INHIBIT_DEFAULT_DEPS', d):
102 if (bb.data.getVar('HOST_SYS', d, 1) !=
103 bb.data.getVar('BUILD_SYS', d, 1)):
104 deps += " virtual/${TARGET_PREFIX}gcc virtual/libc "
107 def base_read_file(filename):
110 f = file( filename, "r" )
111 except IOError, reason:
112 return "" # WARNING: can't raise an error now because of the new RDEPENDS handling. This is a bit ugly. :M:
114 return f.read().strip()
117 def base_conditional(variable, checkvalue, truevalue, falsevalue, d):
119 if bb.data.getVar(variable,d,1) == checkvalue:
124 def base_less_or_equal(variable, checkvalue, truevalue, falsevalue, d):
126 if float(bb.data.getVar(variable,d,1)) <= float(checkvalue):
131 def base_version_less_or_equal(variable, checkvalue, truevalue, falsevalue, d):
133 result = bb.vercmp(bb.data.getVar(variable,d,True), checkvalue)
139 def base_contains(variable, checkvalues, truevalue, falsevalue, d):
142 if type(checkvalues).__name__ == "str":
143 checkvalues = [checkvalues]
144 for value in checkvalues:
145 if bb.data.getVar(variable,d,1).find(value) != -1:
146 matches = matches + 1
147 if matches == len(checkvalues):
151 def base_both_contain(variable1, variable2, checkvalue, d):
153 if bb.data.getVar(variable1,d,1).find(checkvalue) != -1 and bb.data.getVar(variable2,d,1).find(checkvalue) != -1:
158 DEPENDS_prepend="${@base_dep_prepend(d)} "
160 def base_set_filespath(path, d):
164 overrides = bb.data.getVar("OVERRIDES", d, 1) or ""
165 overrides = overrides + ":"
166 for o in overrides.split(":"):
167 filespath.append(os.path.join(p, o))
168 return ":".join(filespath)
170 FILESPATH = "${@base_set_filespath([ "${FILE_DIRNAME}/${PF}", "${FILE_DIRNAME}/${P}", "${FILE_DIRNAME}/${PN}", "${FILE_DIRNAME}/files", "${FILE_DIRNAME}" ], d)}"
172 def oe_filter(f, str, d):
174 return " ".join(filter(lambda x: match(f, x, 0), str.split()))
176 def oe_filter_out(f, str, d):
178 return " ".join(filter(lambda x: not match(f, x, 0), str.split()))
199 echo "Usage: oedebug level \"message\""
203 test ${OEDEBUG:-0} -ge $1 && {
210 if [ x"$MAKE" = x ]; then MAKE=make; fi
211 oenote ${MAKE} ${EXTRA_OEMAKE} "$@"
212 ${MAKE} ${EXTRA_OEMAKE} "$@" || die "oe_runmake failed"
216 # Purpose: Install shared library file and
217 # create the necessary links
222 #oenote installing shared library $1 to $2
224 libname=`basename $1`
225 install -m 755 $1 $2/$libname
226 sonamelink=`${HOST_PREFIX}readelf -d $1 |grep 'Library soname:' |sed -e 's/.*\[\(.*\)\].*/\1/'`
227 solink=`echo $libname | sed -e 's/\.so\..*/.so/'`
228 ln -sf $libname $2/$sonamelink
229 ln -sf $libname $2/$solink
233 # Purpose: Install a library, in all its forms
236 # oe_libinstall libltdl ${STAGING_LIBDIR}/
237 # oe_libinstall -C src/libblah libblah ${D}/${libdir}/
244 while [ "$#" -gt 0 ]; do
260 oefatal "oe_libinstall: unknown option: $1"
272 if [ -z "$destpath" ]; then
273 oefatal "oe_libinstall: no destination path specified"
275 if echo "$destpath/" | egrep '^${STAGING_LIBDIR}/' >/dev/null
281 if [ -z "$silent" ]; then
282 echo >&2 "oe_libinstall: $*"
287 if [ -z "$dir" ]; then
291 dir=$dir`(cd $dir;find . -name "$dotlai") | sed "s/^\.//;s/\/$dotlai\$//;q"`
297 # If such file doesn't exist, try to cut version suffix
298 if [ ! -f "$lafile" ]; then
299 libname1=`echo "$libname" | sed 's/-[0-9.]*$//'`
301 if [ -f "$lafile1" ]; then
307 if [ -f "$lafile" ]; then
309 eval `cat $lafile|grep "^library_names="`
312 library_names="$libname.so* $libname.dll.a"
315 __runcmd install -d $destpath/
317 if [ -f "$dota" -o -n "$require_static" ]; then
318 __runcmd install -m 0644 $dota $destpath/
320 if [ -f "$dotlai" -a -n "$libtool" ]; then
321 if test -n "$staging_install"
323 # stop libtool using the final directory name for libraries
325 __runcmd rm -f $destpath/$libname.la
326 __runcmd sed -e 's/^installed=yes$/installed=no/' \
327 -e '/^dependency_libs=/s,${WORKDIR}[[:alnum:]/\._+-]*/\([[:alnum:]\._+-]*\),${STAGING_LIBDIR}/\1,g' \
328 -e "/^dependency_libs=/s,\([[:space:]']+\)${libdir},\1${STAGING_LIBDIR},g" \
329 $dotlai >$destpath/$libname.la
331 __runcmd install -m 0644 $dotlai $destpath/$libname.la
335 for name in $library_names; do
336 files=`eval echo $name`
338 if [ ! -e "$f" ]; then
339 if [ -n "$libtool" ]; then
340 oefatal "oe_libinstall: $dir/$f not found."
342 elif [ -L "$f" ]; then
343 __runcmd cp -P "$f" $destpath/
344 elif [ ! -L "$f" ]; then
346 __runcmd install -m 0755 $libfile $destpath/
351 if [ -z "$libfile" ]; then
352 if [ -n "$require_shared" ]; then
353 oefatal "oe_libinstall: unable to locate shared library"
355 elif [ -z "$libtool" ]; then
356 # special case hack for non-libtool .so.#.#.# links
357 baselibfile=`basename "$libfile"`
358 if (echo $baselibfile | grep -qE '^lib.*\.so\.[0-9.]*$'); then
359 sonamelink=`${HOST_PREFIX}readelf -d $libfile |grep 'Library soname:' |sed -e 's/.*\[\(.*\)\].*/\1/'`
360 solink=`echo $baselibfile | sed -e 's/\.so\..*/.so/'`
361 if [ -n "$sonamelink" -a x"$baselibfile" != x"$sonamelink" ]; then
362 __runcmd ln -sf $baselibfile $destpath/$sonamelink
364 __runcmd ln -sf $baselibfile $destpath/$solink
368 __runcmd cd "$olddir"
371 def package_stagefile(file, d):
374 if bb.data.getVar('PSTAGING_ACTIVE', d, True) == "1":
375 destfile = file.replace(bb.data.getVar("TMPDIR", d, 1), bb.data.getVar("PSTAGE_TMPDIR_STAGE", d, 1))
376 bb.mkdirhier(os.path.dirname(destfile))
377 #print "%s to %s" % (file, destfile)
378 bb.copyfile(file, destfile)
380 package_stagefile_shell() {
381 if [ "$PSTAGING_ACTIVE" = "1" ]; then
383 destfile=`echo $srcfile | sed s#${TMPDIR}#${PSTAGE_TMPDIR_STAGE}#`
384 destdir=`dirname $destfile`
386 cp -dp $srcfile $destfile
391 # Purpose: Install machine dependent files, if available
392 # If not available, check if there is a default
393 # If no default, just touch the destination
396 # oe_machinstall -m 0644 fstab ${D}/etc/fstab
398 # TODO: Check argument number?
400 filename=`basename $3`
403 for o in `echo ${OVERRIDES} | tr ':' ' '`; do
404 if [ -e $dirname/$o/$filename ]; then
405 oenote $dirname/$o/$filename present, installing to $4
406 install $1 $2 $dirname/$o/$filename $4
410 # oenote overrides specific file NOT present, trying default=$3...
412 oenote $3 present, installing to $4
415 oenote $3 NOT present, touching empty $4
421 do_listtasks[nostamp] = "1"
422 python do_listtasks() {
424 # emit variables and shell functions
425 #bb.data.emit_env(sys.__stdout__, d)
426 # emit the metadata which isnt valid shell
428 if bb.data.getVarFlag(e, 'task', d):
429 sys.__stdout__.write("%s\n" % e)
433 do_clean[dirs] = "${TOPDIR}"
434 do_clean[nostamp] = "1"
435 python base_do_clean() {
436 """clear the build and temp directories"""
437 dir = bb.data.expand("${WORKDIR}", d)
438 if dir == '//': raise bb.build.FuncFailed("wrong DATADIR")
439 bb.note("removing " + dir)
440 os.system('rm -rf ' + dir)
442 dir = "%s.*" % bb.data.expand(bb.data.getVar('STAMP', d), d)
443 bb.note("removing " + dir)
444 os.system('rm -f '+ dir)
447 #Uncomment this for bitbake 1.8.12
448 #addtask rebuild after do_${BB_DEFAULT_TASK}
450 do_rebuild[dirs] = "${TOPDIR}"
451 do_rebuild[nostamp] = "1"
452 python base_do_rebuild() {
453 """rebuild a package"""
454 from bb import __version__
456 from distutils.version import LooseVersion
458 def LooseVersion(v): print "WARNING: sanity.bbclass can't compare versions without python-distutils"; return 1
459 if (LooseVersion(__version__) < LooseVersion('1.8.11')):
460 bb.build.exec_func('do_clean', d)
461 bb.build.exec_task('do_' + bb.data.getVar('BB_DEFAULT_TASK', d, 1), d)
465 do_mrproper[dirs] = "${TOPDIR}"
466 do_mrproper[nostamp] = "1"
467 python base_do_mrproper() {
468 """clear downloaded sources, build and temp directories"""
469 dir = bb.data.expand("${DL_DIR}", d)
470 if dir == '/': bb.build.FuncFailed("wrong DATADIR")
471 bb.debug(2, "removing " + dir)
472 os.system('rm -rf ' + dir)
473 bb.build.exec_func('do_clean', d)
477 do_fetch[dirs] = "${DL_DIR}"
478 do_fetch[depends] = "shasum-native:do_populate_staging"
479 python base_do_fetch() {
482 localdata = bb.data.createCopy(d)
483 bb.data.update_data(localdata)
485 src_uri = bb.data.getVar('SRC_URI', localdata, 1)
490 bb.fetch.init(src_uri.split(),d)
491 except bb.fetch.NoMethodError:
492 (type, value, traceback) = sys.exc_info()
493 raise bb.build.FuncFailed("No method: %s" % value)
496 bb.fetch.go(localdata)
497 except bb.fetch.MissingParameterError:
498 (type, value, traceback) = sys.exc_info()
499 raise bb.build.FuncFailed("Missing parameters: %s" % value)
500 except bb.fetch.FetchError:
501 (type, value, traceback) = sys.exc_info()
502 raise bb.build.FuncFailed("Fetch failed: %s" % value)
503 except bb.fetch.MD5SumError:
504 (type, value, traceback) = sys.exc_info()
505 raise bb.build.FuncFailed("MD5 failed: %s" % value)
507 (type, value, traceback) = sys.exc_info()
508 raise bb.build.FuncFailed("Unknown fetch Error: %s" % value)
511 # Verify the SHA and MD5 sums we have in OE and check what do
513 check_sum = bb.which(bb.data.getVar('BBPATH', d, True), "conf/checksums.ini")
515 bb.note("No conf/checksums.ini found, not checking checksums")
519 parser = base_chk_load_parser(check_sum)
521 bb.note("Creating the CheckSum parser failed")
524 pv = bb.data.getVar('PV', d, True)
525 pn = bb.data.getVar('PN', d, True)
528 for url in src_uri.split():
529 localpath = bb.data.expand(bb.fetch.localpath(url, localdata), localdata)
530 (type,host,path,_,_,_) = bb.decodeurl(url)
531 uri = "%s://%s%s" % (type,host,path)
533 if type == "http" or type == "https" or type == "ftp" or type == "ftps":
534 if not base_chk_file(parser, pn, pv,uri, localpath, d):
535 bb.note("%s-%s: %s has no entry in conf/checksums.ini, not checking URI" % (pn,pv,uri))
537 raise bb.build.FuncFailed("Checksum of '%s' failed" % uri)
540 addtask fetchall after do_fetch
541 do_fetchall[recrdeptask] = "do_fetch"
546 addtask buildall after do_build
547 do_buildall[recrdeptask] = "do_build"
553 def oe_unpack_file(file, data, url = None):
556 url = "file://%s" % file
557 dots = file.split(".")
558 if dots[-1] in ['gz', 'bz2', 'Z']:
559 efile = os.path.join(bb.data.getVar('WORKDIR', data, 1),os.path.basename('.'.join(dots[0:-1])))
563 if file.endswith('.tar'):
564 cmd = 'tar x --no-same-owner -f %s' % file
565 elif file.endswith('.tgz') or file.endswith('.tar.gz') or file.endswith('.tar.Z'):
566 cmd = 'tar xz --no-same-owner -f %s' % file
567 elif file.endswith('.tbz') or file.endswith('.tbz2') or file.endswith('.tar.bz2'):
568 cmd = 'bzip2 -dc %s | tar x --no-same-owner -f -' % file
569 elif file.endswith('.gz') or file.endswith('.Z') or file.endswith('.z'):
570 cmd = 'gzip -dc %s > %s' % (file, efile)
571 elif file.endswith('.bz2'):
572 cmd = 'bzip2 -dc %s > %s' % (file, efile)
573 elif file.endswith('.zip'):
575 (type, host, path, user, pswd, parm) = bb.decodeurl(url)
578 cmd = '%s %s' % (cmd, file)
579 elif os.path.isdir(file):
580 filesdir = os.path.realpath(bb.data.getVar("FILESDIR", data, 1))
582 if file[0:len(filesdir)] == filesdir:
583 destdir = file[len(filesdir):file.rfind('/')]
584 destdir = destdir.strip('/')
587 elif not os.access("%s/%s" % (os.getcwd(), destdir), os.F_OK):
588 os.makedirs("%s/%s" % (os.getcwd(), destdir))
589 cmd = 'cp -pPR %s %s/%s/' % (file, os.getcwd(), destdir)
591 (type, host, path, user, pswd, parm) = bb.decodeurl(url)
592 if not 'patch' in parm:
593 # The "destdir" handling was specifically done for FILESPATH
594 # items. So, only do so for file:// entries.
596 destdir = bb.decodeurl(url)[1] or "."
599 bb.mkdirhier("%s/%s" % (os.getcwd(), destdir))
600 cmd = 'cp %s %s/%s/' % (file, os.getcwd(), destdir)
605 dest = os.path.join(os.getcwd(), os.path.basename(file))
606 if os.path.exists(dest):
607 if os.path.samefile(file, dest):
610 cmd = "PATH=\"%s\" %s" % (bb.data.getVar('PATH', data, 1), cmd)
611 bb.note("Unpacking %s to %s/" % (file, os.getcwd()))
615 addtask unpack after do_fetch
616 do_unpack[dirs] = "${WORKDIR}"
617 python base_do_unpack() {
620 localdata = bb.data.createCopy(d)
621 bb.data.update_data(localdata)
623 src_uri = bb.data.getVar('SRC_URI', localdata)
626 src_uri = bb.data.expand(src_uri, localdata)
627 for url in src_uri.split():
629 local = bb.data.expand(bb.fetch.localpath(url, localdata), localdata)
630 except bb.MalformedUrl, e:
631 raise FuncFailed('Unable to generate local path for malformed uri: %s' % e)
632 local = os.path.realpath(local)
633 ret = oe_unpack_file(local, localdata, url)
635 raise bb.build.FuncFailed()
639 addhandler base_eventhandler
640 python base_eventhandler() {
641 from bb import note, error, data
642 from bb.event import Handled, NotHandled, getName
646 messages["Completed"] = "completed"
647 messages["Succeeded"] = "completed"
648 messages["Started"] = "started"
649 messages["Failed"] = "failed"
653 if name.startswith("Pkg"):
654 msg += "package %s: " % data.getVar("P", e.data, 1)
655 msg += messages.get(name[3:]) or name[3:]
656 elif name.startswith("Task"):
657 msg += "package %s: task %s: " % (data.getVar("PF", e.data, 1), e.task)
658 msg += messages.get(name[4:]) or name[4:]
659 elif name.startswith("Build"):
660 msg += "build %s: " % e.name
661 msg += messages.get(name[5:]) or name[5:]
662 elif name == "UnsatisfiedDep":
663 msg += "package %s: dependency %s %s" % (e.pkg, e.dep, name[:-3].lower())
667 if name.startswith("BuildStarted"):
668 bb.data.setVar( 'BB_VERSION', bb.__version__, e.data )
669 path_to_bbfiles = bb.data.getVar( 'BBFILES', e.data, 1 )
670 path_to_packages = path_to_bbfiles[:path_to_bbfiles.rindex( "packages" )]
671 monotone_revision = "<unknown>"
673 monotone_revision = file( "%s/_MTN/revision" % path_to_packages ).read().strip()
674 if monotone_revision.startswith( "format_version" ):
675 monotone_revision_words = monotone_revision.split()
676 monotone_revision = monotone_revision_words[ monotone_revision_words.index( "old_revision" )+1][1:-1]
679 bb.data.setVar( 'OE_REVISION', monotone_revision, e.data )
680 statusvars = ['BB_VERSION', 'OE_REVISION', 'TARGET_ARCH', 'TARGET_OS', 'MACHINE', 'DISTRO', 'DISTRO_VERSION','TARGET_FPU']
681 statuslines = ["%-14s = \"%s\"" % (i, bb.data.getVar(i, e.data, 1) or '') for i in statusvars]
682 statusmsg = "\nOE Build Configuration:\n%s\n" % '\n'.join(statuslines)
685 needed_vars = [ "TARGET_ARCH", "TARGET_OS" ]
687 for v in needed_vars:
688 val = bb.data.getVar(v, e.data, 1)
689 if not val or val == 'INVALID':
692 bb.fatal('The following variable(s) were not set: %s\nPlease set them directly, or choose a MACHINE or DISTRO that sets them.' % ', '.join(pesteruser))
695 # Handle removing stamps for 'rebuild' task
697 if name.startswith("StampUpdate"):
698 for (fn, task) in e.targets:
699 #print "%s %s" % (task, fn)
700 if task == "do_rebuild":
701 dir = "%s.*" % e.stampPrefix[fn]
702 bb.note("Removing stamps: " + dir)
703 os.system('rm -f '+ dir)
705 if not data in e.__dict__:
708 log = data.getVar("EVENTLOG", e.data, 1)
710 logfile = file(log, "a")
711 logfile.write("%s\n" % msg)
717 addtask configure after do_unpack do_patch
718 do_configure[dirs] = "${S} ${B}"
719 do_configure[deptask] = "do_populate_staging"
720 base_do_configure() {
724 addtask compile after do_configure
725 do_compile[dirs] = "${S} ${B}"
727 if [ -e Makefile -o -e makefile ]; then
728 oe_runmake || die "make failed"
730 oenote "nothing to compile"
738 do_populate_staging[dirs] = "${STAGING_DIR_TARGET}/${layout_bindir} ${STAGING_DIR_TARGET}/${layout_libdir} \
739 ${STAGING_DIR_TARGET}/${layout_includedir} \
740 ${STAGING_BINDIR_NATIVE} ${STAGING_LIBDIR_NATIVE} \
741 ${STAGING_INCDIR_NATIVE} \
745 # Could be compile but populate_staging and do_install shouldn't run at the same time
746 addtask populate_staging after do_install
748 python do_populate_staging () {
749 bb.build.exec_func('do_stage', d)
752 addtask install after do_compile
753 do_install[dirs] = "${D} ${S} ${B}"
754 # Remove and re-create ${D} so that is it guaranteed to be empty
755 do_install[cleandirs] = "${D}"
765 addtask build after do_populate_staging
769 # Functions that update metadata based on files outputted
770 # during the build process.
784 r[-1] += ' ' + ' '.join(j)
789 def packaged(pkg, d):
791 return os.access(get_subpkgedata_fn(pkg, d) + '.packaged', os.R_OK)
793 def read_pkgdatafile(fn):
798 c = codecs.getdecoder("string_escape")
802 if os.access(fn, os.R_OK):
805 lines = f.readlines()
807 r = re.compile("([^:]+):\s*(.*)")
811 pkgdata[m.group(1)] = decode(m.group(2))
815 def get_subpkgedata_fn(pkg, d):
817 archs = bb.data.expand("${PACKAGE_ARCHS}", d).split(" ")
819 pkgdata = bb.data.expand('${STAGING_DIR}/pkgdata/', d)
820 targetdir = bb.data.expand('${TARGET_VENDOR}-${TARGET_OS}/runtime/', d)
822 fn = pkgdata + arch + targetdir + pkg
823 if os.path.exists(fn):
825 return bb.data.expand('${PKGDATA_DIR}/runtime/%s' % pkg, d)
827 def has_subpkgdata(pkg, d):
829 return os.access(get_subpkgedata_fn(pkg, d), os.R_OK)
831 def read_subpkgdata(pkg, d):
833 return read_pkgdatafile(get_subpkgedata_fn(pkg, d))
835 def has_pkgdata(pn, d):
837 fn = bb.data.expand('${PKGDATA_DIR}/%s' % pn, d)
838 return os.access(fn, os.R_OK)
840 def read_pkgdata(pn, d):
842 fn = bb.data.expand('${PKGDATA_DIR}/%s' % pn, d)
843 return read_pkgdatafile(fn)
845 python read_subpackage_metadata () {
847 data = read_pkgdata(bb.data.getVar('PN', d, 1), d)
849 for key in data.keys():
850 bb.data.setVar(key, data[key], d)
852 for pkg in bb.data.getVar('PACKAGES', d, 1).split():
853 sdata = read_subpkgdata(pkg, d)
854 for key in sdata.keys():
855 bb.data.setVar(key, sdata[key], d)
858 # Make sure MACHINE isn't exported
859 # (breaks binutils at least)
860 MACHINE[unexport] = "1"
862 # Make sure TARGET_ARCH isn't exported
863 # (breaks Makefiles using implicit rules, e.g. quilt, as GNU make has this
864 # in them, undocumented)
865 TARGET_ARCH[unexport] = "1"
867 # Make sure DISTRO isn't exported
868 # (breaks sysvinit at least)
869 DISTRO[unexport] = "1"
872 def base_after_parse(d):
873 import bb, os, exceptions
875 source_mirror_fetch = bb.data.getVar('SOURCE_MIRROR_FETCH', d, 0)
876 if not source_mirror_fetch:
877 need_host = bb.data.getVar('COMPATIBLE_HOST', d, 1)
880 this_host = bb.data.getVar('HOST_SYS', d, 1)
881 if not re.match(need_host, this_host):
882 raise bb.parse.SkipPackage("incompatible with host %s" % this_host)
884 need_machine = bb.data.getVar('COMPATIBLE_MACHINE', d, 1)
887 this_machine = bb.data.getVar('MACHINE', d, 1)
888 if this_machine and not re.match(need_machine, this_machine):
889 raise bb.parse.SkipPackage("incompatible with machine %s" % this_machine)
891 pn = bb.data.getVar('PN', d, 1)
893 # OBSOLETE in bitbake 1.7.4
894 srcdate = bb.data.getVar('SRCDATE_%s' % pn, d, 1)
896 bb.data.setVar('SRCDATE', srcdate, d)
898 use_nls = bb.data.getVar('USE_NLS_%s' % pn, d, 1)
900 bb.data.setVar('USE_NLS', use_nls, d)
902 # Git packages should DEPEND on git-native
903 srcuri = bb.data.getVar('SRC_URI', d, 1)
904 if "git://" in srcuri:
905 depends = bb.data.getVarFlag('do_fetch', 'depends', d) or ""
906 depends = depends + " git-native:do_populate_staging"
907 bb.data.setVarFlag('do_fetch', 'depends', depends, d)
909 mach_arch = bb.data.getVar('MACHINE_ARCH', d, 1)
910 old_arch = bb.data.getVar('PACKAGE_ARCH', d, 1)
911 if (old_arch == mach_arch):
916 # We always try to scan SRC_URI for urls with machine overrides
917 # unless the package sets SRC_URI_OVERRIDES_PACKAGE_ARCH=0
919 override = bb.data.getVar('SRC_URI_OVERRIDES_PACKAGE_ARCH', d, 1)
924 for p in [ "${PF}", "${P}", "${PN}", "files", "" ]:
925 path = bb.data.expand(os.path.join("${FILE_DIRNAME}", p, "${MACHINE}"), d)
926 if os.path.isdir(path):
931 for s in srcuri.split():
932 if not s.startswith("file://"):
934 local = bb.data.expand(bb.fetch.localpath(s, d), d)
936 if local.startswith(mp):
937 #bb.note("overriding PACKAGE_ARCH from %s to %s" % (old_arch, mach_arch))
938 bb.data.setVar('PACKAGE_ARCH', "${MACHINE_ARCH}", d)
943 from bb import __version__
946 # Remove this for bitbake 1.8.12
948 from distutils.version import LooseVersion
950 def LooseVersion(v): print "WARNING: sanity.bbclass can't compare versions without python-distutils"; return 1
951 if (LooseVersion(__version__) >= LooseVersion('1.8.11')):
952 deps = bb.data.getVarFlag('do_rebuild', 'deps', d) or []
953 deps.append('do_' + bb.data.getVar('BB_DEFAULT_TASK', d, 1))
954 bb.data.setVarFlag('do_rebuild', 'deps', deps, d)
957 def check_app_exists(app, d):
958 from bb import which, data
960 app = data.expand(app, d)
961 path = data.getVar('PATH', d, 1)
962 return len(which(path, app)) != 0
964 def check_gcc3(data):
966 gcc3_versions = 'gcc-3.4 gcc34 gcc-3.4.4 gcc-3.4.6 gcc-3.4.7 gcc-3.3 gcc33 gcc-3.3.6 gcc-3.2 gcc32'
968 for gcc3 in gcc3_versions.split():
969 if check_app_exists(gcc3, data):
977 # Configuration data from site files
978 # Move to autotools.bbclass?
981 EXPORT_FUNCTIONS do_clean do_mrproper do_fetch do_unpack do_configure do_compile do_install do_package do_populate_pkgs do_stage do_rebuild do_fetchall
985 ${DEBIAN_MIRROR}/main http://snapshot.debian.net/archive/pool
986 ${DEBIAN_MIRROR} ftp://ftp.de.debian.org/debian/pool
987 ${DEBIAN_MIRROR} ftp://ftp.au.debian.org/debian/pool
988 ${DEBIAN_MIRROR} ftp://ftp.cl.debian.org/debian/pool
989 ${DEBIAN_MIRROR} ftp://ftp.hr.debian.org/debian/pool
990 ${DEBIAN_MIRROR} ftp://ftp.fi.debian.org/debian/pool
991 ${DEBIAN_MIRROR} ftp://ftp.hk.debian.org/debian/pool
992 ${DEBIAN_MIRROR} ftp://ftp.hu.debian.org/debian/pool
993 ${DEBIAN_MIRROR} ftp://ftp.ie.debian.org/debian/pool
994 ${DEBIAN_MIRROR} ftp://ftp.it.debian.org/debian/pool
995 ${DEBIAN_MIRROR} ftp://ftp.jp.debian.org/debian/pool
996 ${DEBIAN_MIRROR} ftp://ftp.no.debian.org/debian/pool
997 ${DEBIAN_MIRROR} ftp://ftp.pl.debian.org/debian/pool
998 ${DEBIAN_MIRROR} ftp://ftp.ro.debian.org/debian/pool
999 ${DEBIAN_MIRROR} ftp://ftp.si.debian.org/debian/pool
1000 ${DEBIAN_MIRROR} ftp://ftp.es.debian.org/debian/pool
1001 ${DEBIAN_MIRROR} ftp://ftp.se.debian.org/debian/pool
1002 ${DEBIAN_MIRROR} ftp://ftp.tr.debian.org/debian/pool
1003 ${GNU_MIRROR} ftp://mirrors.kernel.org/gnu
1004 ${GNU_MIRROR} ftp://ftp.matrix.com.br/pub/gnu
1005 ${GNU_MIRROR} ftp://ftp.cs.ubc.ca/mirror2/gnu
1006 ${GNU_MIRROR} ftp://sunsite.ust.hk/pub/gnu
1007 ${GNU_MIRROR} ftp://ftp.ayamura.org/pub/gnu
1008 ${KERNELORG_MIRROR} http://www.kernel.org/pub
1009 ${KERNELORG_MIRROR} ftp://ftp.us.kernel.org/pub
1010 ${KERNELORG_MIRROR} ftp://ftp.uk.kernel.org/pub
1011 ${KERNELORG_MIRROR} ftp://ftp.hk.kernel.org/pub
1012 ${KERNELORG_MIRROR} ftp://ftp.au.kernel.org/pub
1013 ${KERNELORG_MIRROR} ftp://ftp.jp.kernel.org/pub
1014 ftp://ftp.gnupg.org/gcrypt/ ftp://ftp.franken.de/pub/crypt/mirror/ftp.gnupg.org/gcrypt/
1015 ftp://ftp.gnupg.org/gcrypt/ ftp://ftp.surfnet.nl/pub/security/gnupg/
1016 ftp://ftp.gnupg.org/gcrypt/ http://gulus.USherbrooke.ca/pub/appl/GnuPG/
1017 ftp://dante.ctan.org/tex-archive ftp://ftp.fu-berlin.de/tex/CTAN
1018 ftp://dante.ctan.org/tex-archive http://sunsite.sut.ac.jp/pub/archives/ctan/
1019 ftp://dante.ctan.org/tex-archive http://ctan.unsw.edu.au/
1020 ftp://ftp.gnutls.org/pub/gnutls ftp://ftp.gnutls.org/pub/gnutls/
1021 ftp://ftp.gnutls.org/pub/gnutls ftp://ftp.gnupg.org/gcrypt/gnutls/
1022 ftp://ftp.gnutls.org/pub/gnutls http://www.mirrors.wiretapped.net/security/network-security/gnutls/
1023 ftp://ftp.gnutls.org/pub/gnutls ftp://ftp.mirrors.wiretapped.net/pub/security/network-security/gnutls/
1024 ftp://ftp.gnutls.org/pub/gnutls http://josefsson.org/gnutls/releases/
1025 http://ftp.info-zip.org/pub/infozip/src/ http://mirror.switch.ch/ftp/mirror/infozip/src/
1026 http://ftp.info-zip.org/pub/infozip/src/ ftp://sunsite.icm.edu.pl/pub/unix/archiving/info-zip/src/
1027 ftp://lsof.itap.purdue.edu/pub/tools/unix/lsof/ ftp://ftp.cerias.purdue.edu/pub/tools/unix/sysutils/lsof/
1028 ftp://lsof.itap.purdue.edu/pub/tools/unix/lsof/ ftp://ftp.tau.ac.il/pub/unix/admin/
1029 ftp://lsof.itap.purdue.edu/pub/tools/unix/lsof/ ftp://ftp.cert.dfn.de/pub/tools/admin/lsof/
1030 ftp://lsof.itap.purdue.edu/pub/tools/unix/lsof/ ftp://ftp.fu-berlin.de/pub/unix/tools/lsof/
1031 ftp://lsof.itap.purdue.edu/pub/tools/unix/lsof/ ftp://ftp.kaizo.org/pub/lsof/
1032 ftp://lsof.itap.purdue.edu/pub/tools/unix/lsof/ ftp://ftp.tu-darmstadt.de/pub/sysadmin/lsof/
1033 ftp://lsof.itap.purdue.edu/pub/tools/unix/lsof/ ftp://ftp.tux.org/pub/sites/vic.cc.purdue.edu/tools/unix/lsof/
1034 ftp://lsof.itap.purdue.edu/pub/tools/unix/lsof/ ftp://gd.tuwien.ac.at/utils/admin-tools/lsof/
1035 ftp://lsof.itap.purdue.edu/pub/tools/unix/lsof/ ftp://sunsite.ualberta.ca/pub/Mirror/lsof/
1036 ftp://lsof.itap.purdue.edu/pub/tools/unix/lsof/ ftp://the.wiretapped.net/pub/security/host-security/lsof/
1037 http://www.apache.org/dist http://archive.apache.org/dist