Update drivers
[vuplus_openembedded] / classes / base.bbclass
index b50bc86..f9e9e27 100644 (file)
@@ -10,18 +10,97 @@ def base_path_join(a, *p):
             path += '/' + b
     return path
 
+def base_path_relative(src, dest):
+    """ Return a relative path from src to dest.
+
+    >>> base_path_relative("/usr/bin", "/tmp/foo/bar")
+    ../../tmp/foo/bar
+
+    >>> base_path_relative("/usr/bin", "/usr/lib")
+    ../lib
+
+    >>> base_path_relative("/tmp", "/tmp/foo/bar")
+    foo/bar
+    """
+    from os.path import sep, pardir, normpath, commonprefix
+
+    destlist = normpath(dest).split(sep)
+    srclist = normpath(src).split(sep)
+
+    # Find common section of the path
+    common = commonprefix([destlist, srclist])
+    commonlen = len(common)
+
+    # Climb back to the point where they differentiate
+    relpath = [ pardir ] * (len(srclist) - commonlen)
+    if commonlen < len(destlist):
+        # Add remaining portion
+        relpath += destlist[commonlen:]
+
+    return sep.join(relpath)
+
+def base_path_out(path, d):
+    """ Prepare a path for display to the user. """
+    rel = base_path_relative(d.getVar("TOPDIR", 1), path)
+    if len(rel) > len(path):
+        return path
+    else:
+        return rel
+
 # for MD5/SHA handling
-def base_chk_load_parser(config_path):
-    import ConfigParser, os, bb
+def base_chk_load_parser(config_paths):
+    import ConfigParser
     parser = ConfigParser.ConfigParser()
-    if not len(parser.read(config_path)) == 1:
-        bb.note("Can not open the '%s' ini file" % config_path)
-        raise Exception("Can not open the '%s'" % config_path)
+    if len(parser.read(config_paths)) < 1:
+        raise ValueError("no ini files could be found")
 
     return parser
 
+def base_chk_file_vars(parser, localpath, params, data):
+    try:
+        name = params["name"]
+    except KeyError:
+        return False
+    flagName = "%s.md5sum" % name
+    want_md5sum = bb.data.getVarFlag("SRC_URI", flagName, data)
+    flagName = "%s.sha256sum" % name
+    want_sha256sum = bb.data.getVarFlag("SRC_URI", flagName, data)
+
+    if (want_sha256sum == None and want_md5sum == None):
+        # no checksums to check, nothing to do
+        return False
+
+    if not os.path.exists(localpath):
+        localpath = base_path_out(localpath, data)
+        bb.note("The localpath does not exist '%s'" % localpath)
+        raise Exception("The path does not exist '%s'" % localpath)
+
+    if want_md5sum:
+        try:
+           md5pipe = os.popen('PATH=%s md5sum %s' % (bb.data.getVar('PATH', data, True), localpath))
+            md5data = (md5pipe.readline().split() or [ "" ])[0]
+            md5pipe.close()
+        except OSError, e:
+            raise Exception("Executing md5sum failed")
+        if want_md5sum != md5data:
+            bb.note("The MD5Sums did not match. Wanted: '%s' and Got: '%s'" % (want_md5sum, md5data))
+            raise Exception("MD5 Sums do not match. Wanted: '%s' Got: '%s'" % (want_md5sum, md5data))
+
+    if want_sha256sum:
+        try:
+            shapipe = os.popen('PATH=%s oe_sha256sum %s' % (bb.data.getVar('PATH', data, True), localpath))
+            sha256data = (shapipe.readline().split() or [ "" ])[0]
+            shapipe.close()
+        except OSError, e:
+            raise Exception("Executing shasum failed")
+        if want_sha256sum != sha256data:
+            bb.note("The SHA256Sums did not match. Wanted: '%s' and Got: '%s'" % (want_sha256sum, sha256data))
+            raise Exception("SHA256 Sums do not match. Wanted: '%s' Got: '%s'" % (want_sha256sum, sha256data))
+
+    return True
+
+
 def base_chk_file(parser, pn, pv, src_uri, localpath, data):
-    import os, bb
     no_checksum = False
     # Try PN-PV-SRC_URI first and then try PN-SRC_URI
     # we rely on the get method to create errors
@@ -41,13 +120,14 @@ def base_chk_file(parser, pn, pv, src_uri, localpath, data):
 
     # md5 and sha256 should be valid now
     if not os.path.exists(localpath):
+        localpath = base_path_out(localpath, data)
         bb.note("The localpath does not exist '%s'" % localpath)
         raise Exception("The path does not exist '%s'" % localpath)
 
 
     # call md5(sum) and shasum
     try:
-        md5pipe = os.popen('md5sum ' + localpath)
+       md5pipe = os.popen('PATH=%s md5sum %s' % (bb.data.getVar('PATH', data, True), localpath))
         md5data = (md5pipe.readline().split() or [ "" ])[0]
         md5pipe.close()
     except OSError:
@@ -71,7 +151,31 @@ def base_chk_file(parser, pn, pv, src_uri, localpath, data):
         
         file.write("[%s]\nmd5=%s\nsha256=%s\n\n" % (src_uri, md5data, shadata))
         file.close()
-        return False
+
+        from string import maketrans
+        trtable = maketrans("", "")
+        uname = src_uri.split("/")[-1].translate(trtable, "-+._")
+
+        try:
+            ufile = open("%s/%s.sum" % (bb.data.getVar("TMPDIR", data, 1), uname), "wt")
+        except:
+            return False
+
+        if not ufile:
+            raise Exception("Creating %s.sum failed" % uname)
+
+        ufile.write("SRC_URI = \"%s;name=%s\"\nSRC_URI[%s.md5sum] = \"%s\"\nSRC_URI[%s.sha256sum] = \"%s\"\n" % (src_uri, uname, uname, md5data, uname, shadata))
+        ufile.close()
+
+        if not bb.data.getVar("OE_STRICT_CHECKSUMS",data, True):
+            bb.note("This package has no entry in checksums.ini, please add one")
+            bb.note("\n[%s]\nmd5=%s\nsha256=%s" % (src_uri, md5data, shadata))
+            bb.note("This package has no checksums in corresponding recipe, please add")
+            bb.note("SRC_URI = \"%s;name=%s\"\nSRC_URI[%s.md5sum] = \"%s\"\nSRC_URI[%s.sha256sum] = \"%s\"\n" % (src_uri, uname, uname, md5data, uname, shadata))
+            return True
+        else:
+            bb.note("Missing checksum")
+            return False
 
     if not md5 == md5data:
         bb.note("The MD5Sums did not match. Wanted: '%s' and Got: '%s'" % (md5,md5data))
@@ -85,15 +189,16 @@ def base_chk_file(parser, pn, pv, src_uri, localpath, data):
 
 
 def base_dep_prepend(d):
-       import bb
        #
        # Ideally this will check a flag so we will operate properly in
        # the case where host == build == target, for now we don't work in
        # that case though.
        #
-       deps = "shasum-native "
-       if bb.data.getVar('PN', d, True) == "shasum-native":
+       deps = "shasum-native coreutils-native"
+       if bb.data.getVar('PN', d, True) == "shasum-native" or bb.data.getVar('PN', d, True) == "stagemanager-native":
                deps = ""
+       if bb.data.getVar('PN', d, True) == "coreutils-native":
+               deps = "shasum-native"
 
        # INHIBIT_DEFAULT_DEPS doesn't apply to the patch command.  Whether or  not
        # we need that built is the responsibility of the patch function / class, not
@@ -105,7 +210,6 @@ def base_dep_prepend(d):
        return deps
 
 def base_read_file(filename):
-       import bb
        try:
                f = file( filename, "r" )
        except IOError, reason:
@@ -114,22 +218,25 @@ def base_read_file(filename):
                return f.read().strip()
        return None
 
+def base_ifelse(condition, iftrue = True, iffalse = False):
+    if condition:
+        return iftrue
+    else:
+        return iffalse
+
 def base_conditional(variable, checkvalue, truevalue, falsevalue, d):
-       import bb
        if bb.data.getVar(variable,d,1) == checkvalue:
                return truevalue
        else:
                return falsevalue
 
 def base_less_or_equal(variable, checkvalue, truevalue, falsevalue, d):
-       import bb
        if float(bb.data.getVar(variable,d,1)) <= float(checkvalue):
                return truevalue
        else:
                return falsevalue
 
 def base_version_less_or_equal(variable, checkvalue, truevalue, falsevalue, d):
-    import bb
     result = bb.vercmp(bb.data.getVar(variable,d,True), checkvalue)
     if result <= 0:
         return truevalue
@@ -137,28 +244,39 @@ def base_version_less_or_equal(variable, checkvalue, truevalue, falsevalue, d):
         return falsevalue
 
 def base_contains(variable, checkvalues, truevalue, falsevalue, d):
-       import bb
+       val = bb.data.getVar(variable,d,1)
+       if not val:
+               return falsevalue
        matches = 0
        if type(checkvalues).__name__ == "str":
                checkvalues = [checkvalues]
        for value in checkvalues:
-               if bb.data.getVar(variable,d,1).find(value) != -1:      
+               if val.find(value) != -1:
                        matches = matches + 1
        if matches == len(checkvalues):
-               return truevalue                
+               return truevalue
        return falsevalue
 
 def base_both_contain(variable1, variable2, checkvalue, d):
-       import bb
        if bb.data.getVar(variable1,d,1).find(checkvalue) != -1 and bb.data.getVar(variable2,d,1).find(checkvalue) != -1:
                return checkvalue
        else:
                return ""
 
 DEPENDS_prepend="${@base_dep_prepend(d)} "
+DEPENDS_virtclass-native_prepend="${@base_dep_prepend(d)} "
+DEPENDS_virtclass-nativesdk_prepend="${@base_dep_prepend(d)} "
+
+def base_prune_suffix(var, suffixes, d):
+    # See if var ends with any of the suffixes listed and 
+    # remove it if found
+    for suffix in suffixes:
+        if var.endswith(suffix):
+            return var.replace(suffix, "")
+    return var
 
 def base_set_filespath(path, d):
-       import os, bb
+       bb.note("base_set_filespath usage is deprecated, %s should be fixed" % d.getVar("P", 1))
        filespath = []
        # The ":" ensures we have an 'empty' override
        overrides = (bb.data.getVar("OVERRIDES", d, 1) or "") + ":"
@@ -167,8 +285,6 @@ def base_set_filespath(path, d):
                        filespath.append(os.path.join(p, o))
        return ":".join(filespath)
 
-FILESPATH = "${@base_set_filespath([ "${FILE_DIRNAME}/${PF}", "${FILE_DIRNAME}/${P}", "${FILE_DIRNAME}/${PN}", "${FILE_DIRNAME}/files", "${FILE_DIRNAME}" ], d)}"
-
 def oe_filter(f, str, d):
        from re import match
        return " ".join(filter(lambda x: match(f, x, 0), str.split()))
@@ -378,7 +494,6 @@ oe_libinstall() {
 }
 
 def package_stagefile(file, d):
-    import bb, os
 
     if bb.data.getVar('PSTAGING_ACTIVE', d, True) == "1":
         destfile = file.replace(bb.data.getVar("TMPDIR", d, 1), bb.data.getVar("PSTAGE_TMPDIR_STAGE", d, 1))
@@ -445,29 +560,25 @@ python base_do_clean() {
        """clear the build and temp directories"""
        dir = bb.data.expand("${WORKDIR}", d)
        if dir == '//': raise bb.build.FuncFailed("wrong DATADIR")
-       bb.note("removing " + dir)
+       bb.note("removing " + base_path_out(dir, d))
        os.system('rm -rf ' + dir)
 
        dir = "%s.*" % bb.data.expand(bb.data.getVar('STAMP', d), d)
-       bb.note("removing " + dir)
+       bb.note("removing " + base_path_out(dir, d))
        os.system('rm -f '+ dir)
 }
 
-#Uncomment this for bitbake 1.8.12
-#addtask rebuild after do_${BB_DEFAULT_TASK}
-addtask rebuild
+python do_cleanall() {
+    pass
+}
+do_cleanall[recrdeptask] = "do_clean"
+addtask cleanall after do_clean
+
+addtask rebuild after do_${BB_DEFAULT_TASK}
 do_rebuild[dirs] = "${TOPDIR}"
 do_rebuild[nostamp] = "1"
 python base_do_rebuild() {
        """rebuild a package"""
-       from bb import __version__
-       try:
-               from distutils.version import LooseVersion
-       except ImportError:
-               def LooseVersion(v): print "WARNING: sanity.bbclass can't compare versions without python-distutils"; return 1
-       if (LooseVersion(__version__) < LooseVersion('1.8.11')):
-               bb.build.exec_func('do_clean', d)
-               bb.build.exec_task('do_' + bb.data.getVar('BB_DEFAULT_TASK', d, 1), d)
 }
 
 addtask mrproper
@@ -482,6 +593,37 @@ python base_do_mrproper() {
        bb.build.exec_func('do_clean', d)
 }
 
+addtask distclean
+do_distclean[dirs] = "${TOPDIR}"
+do_distclean[nostamp] = "1"
+python base_do_distclean() {
+       """clear downloaded sources, build and temp directories"""
+
+       bb.build.exec_func('do_clean', d)
+
+       src_uri = bb.data.getVar('SRC_URI', d, 1)
+       if not src_uri:
+               return
+
+       for uri in src_uri.split():
+               if bb.decodeurl(uri)[0] == "file":
+                       continue
+
+               try:
+                       local = bb.data.expand(bb.fetch.localpath(uri, d), d)
+               except bb.MalformedUrl, e:
+                       bb.debug(1, 'Unable to generate local path for malformed uri: %s' % e)
+               else:
+                       bb.note("removing %s" % base_path_out(local, d))
+                       try:
+                               if os.path.exists(local + ".md5"):
+                                       os.remove(local + ".md5")
+                               if os.path.exists(local):
+                                       os.remove(local)
+                       except OSError, e:
+                               bb.note("Error in removal: %s" % e)
+}
+
 SCENEFUNCS += "base_scenefunction"
                                                                                        
 python base_do_setscene () {
@@ -518,6 +660,9 @@ python base_do_fetch() {
        except bb.fetch.NoMethodError:
                (type, value, traceback) = sys.exc_info()
                raise bb.build.FuncFailed("No method: %s" % value)
+       except bb.MalformedUrl:
+               (type, value, traceback) = sys.exc_info()
+               raise bb.build.FuncFailed("Malformed URL: %s" % value)
 
        try:
                bb.fetch.go(localdata)
@@ -537,15 +682,20 @@ python base_do_fetch() {
 
        # Verify the SHA and MD5 sums we have in OE and check what do
        # in
-       check_sum = bb.which(bb.data.getVar('BBPATH', d, True), "conf/checksums.ini")
-       if not check_sum:
-               bb.note("No conf/checksums.ini found, not checking checksums")
-               return
+       checksum_paths = bb.data.getVar('BBPATH', d, True).split(":")
+
+       # reverse the list to give precedence to directories that
+       # appear first in BBPATH
+       checksum_paths.reverse()
 
+       checksum_files = ["%s/conf/checksums.ini" % path for path in checksum_paths]
        try:
-               parser = base_chk_load_parser(check_sum)
+               parser = base_chk_load_parser(checksum_files)
+       except ValueError:
+               bb.note("No conf/checksums.ini found, not checking checksums")
+               return
        except:
-               bb.note("Creating the CheckSum parser failed")
+               bb.note("Creating the CheckSum parser failed: %s:%s" % (sys.exc_info()[0], sys.exc_info()[1]))
                return
 
        pv = bb.data.getVar('PV', d, True)
@@ -554,15 +704,15 @@ python base_do_fetch() {
        # Check each URI
        for url in src_uri.split():
                localpath = bb.data.expand(bb.fetch.localpath(url, localdata), localdata)
-               (type,host,path,_,_,_) = bb.decodeurl(url)
+               (type,host,path,_,_,params) = bb.decodeurl(url)
                uri = "%s://%s%s" % (type,host,path)
                try:
-                       if type == "http" or type == "https" or type == "ftp" or type == "ftps":
-                               if not base_chk_file(parser, pn, pv,uri, localpath, d):
-                                       if not bb.data.getVar("OE_ALLOW_INSECURE_DOWNLOADS",d, True):
-                                               bb.fatal("%s-%s: %s has no entry in conf/checksums.ini, not checking URI" % (pn,pv,uri))
+                       if type in [ "http", "https", "ftp", "ftps" ]:
+                               if not (base_chk_file_vars(parser, localpath, params, d) or base_chk_file(parser, pn, pv,uri, localpath, d)):
+                                       if not bb.data.getVar("OE_ALLOW_INSECURE_DOWNLOADS", d, True):
+                                               bb.fatal("%s-%s: %s has no checksum defined, cannot check archive integrity" % (pn,pv,uri))
                                        else:
-                                               bb.note("%s-%s: %s has no entry in conf/checksums.ini, not checking URI" % (pn,pv,uri))
+                                               bb.note("%s-%s: %s has no checksum defined, archive integrity not checked" % (pn,pv,uri))
                except Exception:
                        raise bb.build.FuncFailed("Checksum of '%s' failed" % uri)
 }
@@ -618,13 +768,18 @@ base_do_buildall() {
        :
 }
 
+def subprocess_setup():
+       import signal
+       # Python installs a SIGPIPE handler by default. This is usually not what
+       # non-Python subprocesses expect.
+       signal.signal(signal.SIGPIPE, signal.SIG_DFL)
 
 def oe_unpack_file(file, data, url = None):
-       import bb, os
+       import subprocess
        if not url:
                url = "file://%s" % file
        dots = file.split(".")
-       if dots[-1] in ['gz', 'bz2', 'Z']:
+       if dots[-1] in ['gz', 'bz2', 'Z', 'xz', 'lz']:
                efile = os.path.join(bb.data.getVar('WORKDIR', data, 1),os.path.basename('.'.join(dots[0:-1])))
        else:
                efile = file
@@ -639,6 +794,14 @@ def oe_unpack_file(file, data, url = None):
                cmd = 'gzip -dc %s > %s' % (file, efile)
        elif file.endswith('.bz2'):
                cmd = 'bzip2 -dc %s > %s' % (file, efile)
+       elif file.endswith('.tar.xz'):
+               cmd = 'xz -dc %s | tar x --no-same-owner -f -' % file
+       elif file.endswith('.xz'):
+               cmd = 'xz -dc %s > %s' % (file, efile)
+       elif file.endswith('.tar.lz'):
+               cmd = 'lzip -dc %s | tar x --no-same-owner -f -' % file
+       elif file.endswith('.lz'):
+               cmd = 'lzip -dc %s > %s' % (file, efile)
        elif file.endswith('.zip') or file.endswith('.jar'):
                cmd = 'unzip -q -o'
                (type, host, path, user, pswd, parm) = bb.decodeurl(url)
@@ -646,15 +809,18 @@ def oe_unpack_file(file, data, url = None):
                        cmd = '%s -a' % cmd
                cmd = '%s %s' % (cmd, file)
        elif os.path.isdir(file):
-               filesdir = os.path.realpath(bb.data.getVar("FILESDIR", data, 1))
                destdir = "."
-               if file[0:len(filesdir)] == filesdir:
-                       destdir = file[len(filesdir):file.rfind('/')]
-                       destdir = destdir.strip('/')
-                       if len(destdir) < 1:
-                               destdir = "."
-                       elif not os.access("%s/%s" % (os.getcwd(), destdir), os.F_OK):
-                               os.makedirs("%s/%s" % (os.getcwd(), destdir))
+               filespath = bb.data.getVar("FILESPATH", data, 1).split(":")
+               for fp in filespath:
+                       if file[0:len(fp)] == fp:
+                               destdir = file[len(fp):file.rfind('/')]
+                               destdir = destdir.strip('/')
+                               if len(destdir) < 1:
+                                       destdir = "."
+                               elif not os.access("%s/%s" % (os.getcwd(), destdir), os.F_OK):
+                                       os.makedirs("%s/%s" % (os.getcwd(), destdir))
+                               break
+
                cmd = 'cp -pPR %s %s/%s/' % (file, os.getcwd(), destdir)
        else:
                (type, host, path, user, pswd, parm) = bb.decodeurl(url)
@@ -676,15 +842,26 @@ def oe_unpack_file(file, data, url = None):
                if os.path.samefile(file, dest):
                        return True
 
+       # Change to subdir before executing command
+       save_cwd = os.getcwd();
+       parm = bb.decodeurl(url)[5]
+       if 'subdir' in parm:
+               newdir = ("%s/%s" % (os.getcwd(), parm['subdir']))
+               bb.mkdirhier(newdir)
+               os.chdir(newdir)
+
        cmd = "PATH=\"%s\" %s" % (bb.data.getVar('PATH', data, 1), cmd)
-       bb.note("Unpacking %s to %s/" % (file, os.getcwd()))
-       ret = os.system(cmd)
+       bb.note("Unpacking %s to %s/" % (base_path_out(file, data), base_path_out(os.getcwd(), data)))
+       ret = subprocess.call(cmd, preexec_fn=subprocess_setup, shell=True)
+
+       os.chdir(save_cwd)
+
        return ret == 0
 
 addtask unpack after do_fetch
 do_unpack[dirs] = "${WORKDIR}"
 python base_do_unpack() {
-       import re, os
+       import re
 
        localdata = bb.data.createCopy(d)
        bb.data.update_data(localdata)
@@ -697,22 +874,53 @@ python base_do_unpack() {
                try:
                        local = bb.data.expand(bb.fetch.localpath(url, localdata), localdata)
                except bb.MalformedUrl, e:
-                       raise FuncFailed('Unable to generate local path for malformed uri: %s' % e)
+                       raise bb.build.FuncFailed('Unable to generate local path for malformed uri: %s' % e)
+               if not local:
+                       raise bb.build.FuncFailed('Unable to locate local file for %s' % url)
                local = os.path.realpath(local)
                ret = oe_unpack_file(local, localdata, url)
                if not ret:
                        raise bb.build.FuncFailed()
 }
 
-def base_get_scmbasepath(d):
-       import bb
-       path_to_bbfiles = bb.data.getVar( 'BBFILES', d, 1 ).split()
-       return path_to_bbfiles[0][:path_to_bbfiles[0].rindex( "packages" )]
+METADATA_SCM = "${@base_get_scm(d)}"
+METADATA_REVISION = "${@base_get_scm_revision(d)}"
+METADATA_BRANCH = "${@base_get_scm_branch(d)}"
+
+def base_get_scm(d):
+       from bb import which
+       baserepo = os.path.dirname(os.path.dirname(which(d.getVar("BBPATH", 1), "classes/base.bbclass")))
+       for (scm, scmpath) in {"svn": ".svn",
+                              "git": ".git",
+                              "monotone": "_MTN"}.iteritems():
+               if os.path.exists(os.path.join(baserepo, scmpath)):
+                       return "%s %s" % (scm, baserepo)
+       return "<unknown> %s" % baserepo
+
+def base_get_scm_revision(d):
+       (scm, path) = d.getVar("METADATA_SCM", 1).split()
+       try:
+               if scm != "<unknown>":
+                       return globals()["base_get_metadata_%s_revision" % scm](path, d)
+               else:
+                       return scm
+       except KeyError:
+               return "<unknown>"
 
-def base_get_metadata_monotone_branch(d):
+def base_get_scm_branch(d):
+       (scm, path) = d.getVar("METADATA_SCM", 1).split()
+       try:
+               if scm != "<unknown>":
+                       return globals()["base_get_metadata_%s_branch" % scm](path, d)
+               else:
+                       return scm
+       except KeyError:
+               return "<unknown>"
+
+def base_get_metadata_monotone_branch(path, d):
        monotone_branch = "<unknown>"
        try:
-               monotone_branch = file( "%s/_MTN/options" % base_get_scmbasepath(d) ).read().strip()
+               monotone_branch = file( "%s/_MTN/options" % path ).read().strip()
                if monotone_branch.startswith( "database" ):
                        monotone_branch_words = monotone_branch.split()
                        monotone_branch = monotone_branch_words[ monotone_branch_words.index( "branch" )+1][1:-1]
@@ -720,10 +928,10 @@ def base_get_metadata_monotone_branch(d):
                pass
        return monotone_branch
 
-def base_get_metadata_monotone_revision(d):
+def base_get_metadata_monotone_revision(path, d):
        monotone_revision = "<unknown>"
        try:
-               monotone_revision = file( "%s/_MTN/revision" % base_get_scmbasepath(d) ).read().strip()
+               monotone_revision = file( "%s/_MTN/revision" % path ).read().strip()
                if monotone_revision.startswith( "format_version" ):
                        monotone_revision_words = monotone_revision.split()
                        monotone_revision = monotone_revision_words[ monotone_revision_words.index( "old_revision" )+1][1:-1]
@@ -731,93 +939,56 @@ def base_get_metadata_monotone_revision(d):
                pass
        return monotone_revision
 
-def base_get_metadata_svn_revision(d):
+def base_get_metadata_svn_revision(path, d):
        revision = "<unknown>"
        try:
-               revision = file( "%s/.svn/entries" % base_get_scmbasepath(d) ).readlines()[3].strip()
+               revision = file( "%s/.svn/entries" % path ).readlines()[3].strip()
        except IOError:
                pass
        return revision
 
-def base_get_metadata_git_branch(d):
-       import os
-       branch = os.popen('cd %s; git-branch | grep "^* " | tr -d "* "' % base_get_scmbasepath(d)).read()
+def base_get_metadata_git_branch(path, d):
+       branch = os.popen('cd %s; PATH=%s git symbolic-ref HEAD 2>/dev/null' % (path, d.getVar("PATH", 1))).read().rstrip()
 
        if len(branch) != 0:
-               return branch
+               return branch.replace("refs/heads/", "")
        return "<unknown>"
 
-def base_get_metadata_git_revision(d):
-       import os
-       rev = os.popen("cd %s; git-log -n 1 --pretty=oneline --" % base_get_scmbasepath(d)).read().split(" ")[0]
+def base_get_metadata_git_revision(path, d):
+       rev = os.popen("cd %s; PATH=%s git show-ref HEAD 2>/dev/null" % (path, d.getVar("PATH", 1))).read().split(" ")[0].rstrip()
        if len(rev) != 0:
                return rev
        return "<unknown>"
 
-def base_detect_revision(d):
-       scms = [base_get_metadata_monotone_revision, \
-                       base_get_metadata_svn_revision, \
-                       base_get_metadata_git_revision]
-
-       for scm in scms:
-               rev = scm(d)
-               if rev <> "<unknown>":
-                       return rev
-
-       return "<unknown>"      
-
-def base_detect_branch(d):
-       scms = [base_get_metadata_monotone_branch, \
-                       base_get_metadata_git_branch]
-
-       for scm in scms:
-               rev = scm(d)
-               if rev <> "<unknown>":
-                       return rev.strip()
-
-       return "<unknown>"      
-       
-       
-
-METADATA_BRANCH ?= "${@base_detect_branch(d)}"
-METADATA_REVISION ?= "${@base_detect_revision(d)}"
 
 addhandler base_eventhandler
 python base_eventhandler() {
        from bb import note, error, data
        from bb.event import Handled, NotHandled, getName
-       import os
 
-       messages = {}
-       messages["Completed"] = "completed"
-       messages["Succeeded"] = "completed"
-       messages["Started"] = "started"
-       messages["Failed"] = "failed"
 
        name = getName(e)
-       msg = ""
-       if name.startswith("Pkg"):
-               msg += "package %s: " % data.getVar("P", e.data, 1)
-               msg += messages.get(name[3:]) or name[3:]
-       elif name.startswith("Task"):
-               msg += "package %s: task %s: " % (data.getVar("PF", e.data, 1), e.task)
-               msg += messages.get(name[4:]) or name[4:]
-       elif name.startswith("Build"):
-               msg += "build %s: " % e.name
-               msg += messages.get(name[5:]) or name[5:]
+       if name == "TaskCompleted":
+               msg = "package %s: task %s is complete." % (data.getVar("PF", e.data, 1), e.task)
        elif name == "UnsatisfiedDep":
-               msg += "package %s: dependency %s %s" % (e.pkg, e.dep, name[:-3].lower())
-       if msg:
-               note(msg)
+               msg = "package %s: dependency %s %s" % (e.pkg, e.dep, name[:-3].lower())
+       else:
+               return NotHandled
+
+       # Only need to output when using 1.8 or lower, the UI code handles it
+       # otherwise
+       if (int(bb.__version__.split(".")[0]) <= 1 and int(bb.__version__.split(".")[1]) <= 8):
+               if msg:
+                       note(msg)
 
        if name.startswith("BuildStarted"):
                bb.data.setVar( 'BB_VERSION', bb.__version__, e.data )
-               statusvars = ['BB_VERSION', 'METADATA_BRANCH', 'METADATA_REVISION', 'TARGET_ARCH', 'TARGET_OS', 'MACHINE', 'DISTRO', 'DISTRO_VERSION','TARGET_FPU']
+               statusvars = bb.data.getVar("BUILDCFG_VARS", e.data, 1).split()
                statuslines = ["%-17s = \"%s\"" % (i, bb.data.getVar(i, e.data, 1) or '') for i in statusvars]
-               statusmsg = "\nOE Build Configuration:\n%s\n" % '\n'.join(statuslines)
+               statusmsg = "\n%s\n%s\n" % (bb.data.getVar("BUILDCFG_HEADER", e.data, 1), "\n".join(statuslines))
                print statusmsg
 
-               needed_vars = [ "TARGET_ARCH", "TARGET_OS" ]
+               needed_vars = bb.data.getVar("BUILDCFG_NEEDEDVARS", e.data, 1).split()
                pesteruser = []
                for v in needed_vars:
                        val = bb.data.getVar(v, e.data, 1)
@@ -867,12 +1038,87 @@ base_do_compile() {
        fi
 }
 
-base_do_stage () {
-       :
+
+sysroot_stage_dir() {
+       src="$1"
+       dest="$2"
+       # This will remove empty directories so we can ignore them
+       rmdir "$src" 2> /dev/null || true
+       if [ -d "$src" ]; then
+               mkdir -p "$dest"
+               cp -fpPR "$src"/* "$dest"
+       fi
 }
 
-do_populate_staging[dirs] = "${STAGING_DIR_TARGET}/${layout_bindir} ${STAGING_DIR_TARGET}/${layout_libdir} \
-                            ${STAGING_DIR_TARGET}/${layout_includedir} \
+sysroot_stage_libdir() {
+       src="$1"
+       dest="$2"
+
+       olddir=`pwd`
+       cd $src
+       las=$(find . -name \*.la -type f)
+       cd $olddir
+       echo "Found la files: $las"              
+       for i in $las
+       do
+               sed -e 's/^installed=yes$/installed=no/' \
+                   -e '/^dependency_libs=/s,${WORKDIR}[[:alnum:]/\._+-]*/\([[:alnum:]\._+-]*\),${STAGING_LIBDIR}/\1,g' \
+                   -e "/^dependency_libs=/s,\([[:space:]']\)${libdir},\1${STAGING_LIBDIR},g" \
+                   -i $src/$i
+       done
+       sysroot_stage_dir $src $dest
+}
+
+sysroot_stage_dirs() {
+       from="$1"
+       to="$2"
+
+       sysroot_stage_dir $from${includedir} $to${STAGING_INCDIR}
+       if [ "${BUILD_SYS}" = "${HOST_SYS}" ]; then
+               sysroot_stage_dir $from${bindir} $to${STAGING_DIR_HOST}${bindir}
+               sysroot_stage_dir $from${sbindir} $to${STAGING_DIR_HOST}${sbindir}
+               sysroot_stage_dir $from${base_bindir} $to${STAGING_DIR_HOST}${base_bindir}
+               sysroot_stage_dir $from${base_sbindir} $to${STAGING_DIR_HOST}${base_sbindir}
+               sysroot_stage_dir $from${libexecdir} $to${STAGING_DIR_HOST}${libexecdir}
+               if [ "${prefix}/lib" != "${libdir}" ]; then
+                       # python puts its files in here, make sure they are staged as well
+                       autotools_stage_dir $from/${prefix}/lib $to${STAGING_DIR_HOST}${prefix}/lib
+               fi
+       fi
+       if [ -d $from${libdir} ]
+       then
+               sysroot_stage_libdir $from/${libdir} $to${STAGING_LIBDIR}
+       fi
+       if [ -d $from${base_libdir} ]
+       then
+               sysroot_stage_libdir $from${base_libdir} $to${STAGING_DIR_HOST}${base_libdir}
+       fi
+       sysroot_stage_dir $from${datadir} $to${STAGING_DATADIR}
+}
+
+sysroot_stage_all() {
+       sysroot_stage_dirs ${D} ${SYSROOT_DESTDIR}
+}
+
+def is_legacy_staging(d):
+    stagefunc = bb.data.getVar('do_stage', d, True)
+    legacy = True
+    if stagefunc is None:
+        legacy = False
+    elif stagefunc.strip() == "autotools_stage_all":
+        legacy = False
+    elif stagefunc.strip() == "do_stage_native" and bb.data.getVar('AUTOTOOLS_NATIVE_STAGE_INSTALL', d, 1) == "1":
+        legacy = False
+    elif bb.data.getVar('NATIVE_INSTALL_WORKS', d, 1) == "1":
+        legacy = False
+    if bb.data.getVar('PSTAGE_BROKEN_DESTDIR', d, 1) == "1":
+        legacy = True
+    if bb.data.getVar('FORCE_LEGACY_STAGING', d, 1) == "1":
+        legacy = True
+    return legacy
+
+do_populate_staging[dirs] = "${STAGING_DIR_TARGET}/${bindir} ${STAGING_DIR_TARGET}/${libdir} \
+                            ${STAGING_DIR_TARGET}/${includedir} \
                             ${STAGING_BINDIR_NATIVE} ${STAGING_LIBDIR_NATIVE} \
                             ${STAGING_INCDIR_NATIVE} \
                             ${STAGING_DATADIR} \
@@ -881,8 +1127,61 @@ do_populate_staging[dirs] = "${STAGING_DIR_TARGET}/${layout_bindir} ${STAGING_DI
 # Could be compile but populate_staging and do_install shouldn't run at the same time
 addtask populate_staging after do_install
 
+SYSROOT_PREPROCESS_FUNCS ?= ""
+SYSROOT_DESTDIR = "${WORKDIR}/sysroot-destdir/"
+SYSROOT_LOCK = "${STAGING_DIR}/staging.lock"
+
+python populate_staging_prehook () {
+       return
+}
+
+python populate_staging_posthook () {
+       return
+}
+
+packagedstaging_fastpath () {
+       :
+}
+
 python do_populate_staging () {
-    bb.build.exec_func('do_stage', d)
+    #
+    # if do_stage exists, we're legacy. In that case run the do_stage,
+    # modify the SYSROOT_DESTDIR variable and then run the staging preprocess
+    # functions against staging directly.
+    #
+    # Otherwise setup a destdir, copy the results from do_install
+    # and run the staging preprocess against that
+    #
+    pstageactive = (bb.data.getVar("PSTAGING_ACTIVE", d, True) == "1")
+    lockfile = bb.data.getVar("SYSROOT_LOCK", d, True)
+    stagefunc = bb.data.getVar('do_stage', d, True)
+    legacy = is_legacy_staging(d)
+    if legacy:
+        bb.data.setVar("SYSROOT_DESTDIR", "", d)
+        bb.note("Legacy staging mode for %s" % bb.data.getVar("FILE", d, True))
+        if bb.data.getVarFlags('do_stage', d) is None:
+            bb.fatal("This recipe (%s) has a do_stage_prepend or do_stage_append and do_stage now doesn't exist. Please rename this to do_stage()" % bb.data.getVar("FILE", d, True))
+        lock = bb.utils.lockfile(lockfile)
+        bb.build.exec_func('populate_staging_prehook', d)
+        bb.build.exec_func('do_stage', d)
+        for f in (bb.data.getVar('SYSROOT_PREPROCESS_FUNCS', d, True) or '').split():
+            bb.build.exec_func(f, d)
+        bb.build.exec_func('populate_staging_posthook', d)
+        bb.utils.unlockfile(lock)
+    else:
+        dest = bb.data.getVar('D', d, True)
+        sysrootdest = bb.data.expand('${SYSROOT_DESTDIR}${STAGING_DIR_TARGET}', d)
+        bb.mkdirhier(sysrootdest)
+
+        bb.build.exec_func("sysroot_stage_all", d)
+        #os.system('cp -pPR %s/* %s/' % (dest, sysrootdest))
+        for f in (bb.data.getVar('SYSROOT_PREPROCESS_FUNCS', d, True) or '').split():
+            bb.build.exec_func(f, d)
+        bb.build.exec_func("packagedstaging_fastpath", d)
+
+        lock = bb.utils.lockfile(lockfile)
+        os.system(bb.data.expand('cp -pPR ${SYSROOT_DESTDIR}${TMPDIR}/* ${TMPDIR}/', d))
+        bb.utils.unlockfile(lock)
 }
 
 addtask install after do_compile
@@ -902,6 +1201,8 @@ addtask build after do_populate_staging
 do_build = ""
 do_build[func] = "1"
 
+inherit packagedata
+
 # Functions that update metadata based on files outputted
 # during the build process.
 
@@ -922,75 +1223,6 @@ def explode_deps(s):
                        r.append(i)
        return r
 
-def packaged(pkg, d):
-       import os, bb
-       return os.access(get_subpkgedata_fn(pkg, d) + '.packaged', os.R_OK)
-
-def read_pkgdatafile(fn):
-       pkgdata = {}
-
-       def decode(str):
-               import codecs
-               c = codecs.getdecoder("string_escape")
-               return c(str)[0]
-
-       import os
-       if os.access(fn, os.R_OK):
-               import re
-               f = file(fn, 'r')
-               lines = f.readlines()
-               f.close()
-               r = re.compile("([^:]+):\s*(.*)")
-               for l in lines:
-                       m = r.match(l)
-                       if m:
-                               pkgdata[m.group(1)] = decode(m.group(2))
-
-       return pkgdata
-
-def get_subpkgedata_fn(pkg, d):
-       import bb, os
-       archs = bb.data.expand("${PACKAGE_ARCHS}", d).split(" ")
-       archs.reverse()
-       pkgdata = bb.data.expand('${TMPDIR}/pkgdata/', d)
-       targetdir = bb.data.expand('${TARGET_VENDOR}-${TARGET_OS}/runtime/', d)
-       for arch in archs:
-               fn = pkgdata + arch + targetdir + pkg
-               if os.path.exists(fn):
-                       return fn
-       return bb.data.expand('${PKGDATA_DIR}/runtime/%s' % pkg, d)
-
-def has_subpkgdata(pkg, d):
-       import bb, os
-       return os.access(get_subpkgedata_fn(pkg, d), os.R_OK)
-
-def read_subpkgdata(pkg, d):
-       import bb
-       return read_pkgdatafile(get_subpkgedata_fn(pkg, d))
-
-def has_pkgdata(pn, d):
-       import bb, os
-       fn = bb.data.expand('${PKGDATA_DIR}/%s' % pn, d)
-       return os.access(fn, os.R_OK)
-
-def read_pkgdata(pn, d):
-       import bb
-       fn = bb.data.expand('${PKGDATA_DIR}/%s' % pn, d)
-       return read_pkgdatafile(fn)
-
-python read_subpackage_metadata () {
-       import bb
-       data = read_pkgdata(bb.data.getVar('PN', d, 1), d)
-
-       for key in data.keys():
-               bb.data.setVar(key, data[key], d)
-
-       for pkg in bb.data.getVar('PACKAGES', d, 1).split():
-               sdata = read_subpkgdata(pkg, d)
-               for key in sdata.keys():
-                       bb.data.setVar(key, sdata[key], d)
-}
-
 # Make sure MACHINE isn't exported
 # (breaks binutils at least)
 MACHINE[unexport] = "1"
@@ -1006,7 +1238,7 @@ DISTRO[unexport] = "1"
 
 
 def base_after_parse(d):
-    import bb, os, exceptions
+    import exceptions
 
     source_mirror_fetch = bb.data.getVar('SOURCE_MIRROR_FETCH', d, 0)
     if not source_mirror_fetch:
@@ -1089,19 +1321,12 @@ def base_after_parse(d):
     bb.data.setVar('MULTIMACH_ARCH', multiarch, d)
 
 python () {
-    import bb
-    from bb import __version__
     base_after_parse(d)
+    if is_legacy_staging(d):
+        bb.debug(1, "Legacy staging mode for %s" % bb.data.getVar("FILE", d, True))
+        if bb.data.getVarFlags('do_stage', d) is None:
+            bb.error("This recipe (%s) has a do_stage_prepend or do_stage_append and do_stage now doesn't exist. Please rename this to do_stage()" % bb.data.getVar("FILE", d, True))
 
-    # Remove this for bitbake 1.8.12
-    try:
-        from distutils.version import LooseVersion
-    except ImportError:
-        def LooseVersion(v): print "WARNING: sanity.bbclass can't compare versions without python-distutils"; return 1
-    if (LooseVersion(__version__) >= LooseVersion('1.8.11')):
-        deps = bb.data.getVarFlag('do_rebuild', 'deps', d) or []
-        deps.append('do_' + bb.data.getVar('BB_DEFAULT_TASK', d, 1))
-        bb.data.setVarFlag('do_rebuild', 'deps', deps, d)
 }
 
 def check_app_exists(app, d):
@@ -1112,8 +1337,11 @@ def check_app_exists(app, d):
        return len(which(path, app)) != 0
 
 def check_gcc3(data):
+       # Primarly used by qemu to make sure we have a workable gcc-3.4.x.
+       # Start by checking for the program name as we build it, was not
+       # all host-provided gcc-3.4's will work.
 
-       gcc3_versions = 'gcc-3.4 gcc34 gcc-3.4.4 gcc-3.4.6 gcc-3.4.7 gcc-3.3 gcc33 gcc-3.3.6 gcc-3.2 gcc32'
+       gcc3_versions = 'gcc-3.4.6 gcc-3.4.4 gcc34 gcc-3.4 gcc-3.4.7 gcc-3.3 gcc33 gcc-3.3.6 gcc-3.2 gcc32'
 
        for gcc3 in gcc3_versions.split():
                if check_app_exists(gcc3, data):
@@ -1128,7 +1356,7 @@ inherit patch
 # Move to autotools.bbclass?
 inherit siteinfo
 
-EXPORT_FUNCTIONS do_setscene do_clean do_mrproper do_fetch do_unpack do_configure do_compile do_install do_package do_populate_pkgs do_stage do_rebuild do_fetchall
+EXPORT_FUNCTIONS do_setscene do_clean do_mrproper do_distclean do_fetch do_unpack do_configure do_compile do_install do_package do_populate_pkgs do_rebuild do_fetchall
 
 MIRRORS[func] = "0"
 MIRRORS () {
@@ -1151,7 +1379,6 @@ ${DEBIAN_MIRROR}  ftp://ftp.es.debian.org/debian/pool
 ${DEBIAN_MIRROR}       ftp://ftp.se.debian.org/debian/pool
 ${DEBIAN_MIRROR}       ftp://ftp.tr.debian.org/debian/pool
 ${GNU_MIRROR}  ftp://mirrors.kernel.org/gnu
-${GNU_MIRROR}  ftp://ftp.matrix.com.br/pub/gnu
 ${GNU_MIRROR}  ftp://ftp.cs.ubc.ca/mirror2/gnu
 ${GNU_MIRROR}  ftp://sunsite.ust.hk/pub/gnu
 ${GNU_MIRROR}  ftp://ftp.ayamura.org/pub/gnu
@@ -1185,6 +1412,10 @@ ftp://lsof.itap.purdue.edu/pub/tools/unix/lsof/  ftp://gd.tuwien.ac.at/utils/adm
 ftp://lsof.itap.purdue.edu/pub/tools/unix/lsof/  ftp://sunsite.ualberta.ca/pub/Mirror/lsof/
 ftp://lsof.itap.purdue.edu/pub/tools/unix/lsof/  ftp://the.wiretapped.net/pub/security/host-security/lsof/
 http://www.apache.org/dist  http://archive.apache.org/dist
+ftp://.*/.*     http://mirrors.openembedded.org/
+https?$://.*/.* http://mirrors.openembedded.org/
+ftp://.*/.*     http://sources.openembedded.org/
+https?$://.*/.* http://sources.openembedded.org/
 
 }