SCCS merged
authorChris Larson <clarson@kergoth.com>
Thu, 1 Apr 2004 18:25:47 +0000 (18:25 +0000)
committerChris Larson <clarson@kergoth.com>
Thu, 1 Apr 2004 18:25:47 +0000 (18:25 +0000)
bin/oe/__init__.py
bin/oe/build.py
bin/oemake

index d00984e..3007927 100644 (file)
@@ -16,40 +16,41 @@ __version__ = "1.1"
 
 __all__ = [
 
-       "debug",
-       "note",
-       "error",
-       "fatal",
-
-       "mkdirhier",
-       "movefile",
-
-       "tokenize",
-       "evaluate",
-       "flatten",
-       "relparse",
-       "ververify",
-       "isjustname",
-       "isspecific",
-       "pkgsplit",
-       "catpkgsplit",
-       "vercmp",
-       "pkgcmp",
-       "dep_parenreduce",
-       "dep_opconvert",
-       "digraph",
+    "debug",
+    "note",
+    "error",
+    "fatal",
+
+    "mkdirhier",
+    "movefile",
+
+    "tokenize",
+    "evaluate",
+    "flatten",
+    "relparse",
+    "ververify",
+    "isjustname",
+    "isspecific",
+    "pkgsplit",
+    "catpkgsplit",
+    "vercmp",
+    "pkgcmp",
+    "dep_parenreduce",
+    "dep_opconvert",
+    "digraph",
 
 # fetch
-       "decodeurl",
-       "encodeurl",
+    "decodeurl",
+    "encodeurl",
 
 # modules
-       "parse",
-       "data",
-       "event",
-       "build",
-       "fetch",
-       "manifest"
+    "parse",
+    "data",
+    "event",
+    "build",
+    "fetch",
+    "make",
+    "manifest"
  ]
 
 import sys,os,string,types,re
@@ -58,18 +59,18 @@ import sys,os,string,types,re
 # Check for the Python version. A lot of stuff needs Python 2.3 or later
 #
 if sys.version_info[:3] < (2, 3, 0):
-       print "OpenEmbedded needs Python 2.3 or later. Please upgrade."
-       sys.exit(-1)
+    print "OpenEmbedded needs Python 2.3 or later. Please upgrade."
+    sys.exit(-1)
 
 #projectdir = os.path.dirname(os.path.dirname(os.path.abspath(sys.argv[0])))
 projectdir = os.getcwd()
 env = {}
 
 class VarExpandError(Exception):
-       pass
+    pass
 
 class MalformedUrl(Exception):
-       """Exception raised when encountering an invalid url"""
+    """Exception raised when encountering an invalid url"""
 
 
 #######################################################################
@@ -86,18 +87,18 @@ debug_prepend = ''
 
 
 def debug(lvl, *args):
-       if 'OEDEBUG' in os.environ and (os.environ['OEDEBUG'] >= str(lvl)):
-               print debug_prepend + 'DEBUG:', string.join(args, '')
+    if 'OEDEBUG' in env and (env['OEDEBUG'] >= str(lvl)):
+        print debug_prepend + 'DEBUG:', string.join(args, '')
 
 def note(*args):
-       print debug_prepend + 'NOTE:', string.join(args, '')
+    print debug_prepend + 'NOTE:', string.join(args, '')
 
 def error(*args):
-       print debug_prepend + 'ERROR:', string.join(args, '')
+    print debug_prepend + 'ERROR:', string.join(args, '')
 
 def fatal(*args):
-       print debug_prepend + 'ERROR:', string.join(args, '')
-       sys.exit(1)
+    print debug_prepend + 'ERROR:', string.join(args, '')
+    sys.exit(1)
 
 
 #######################################################################
@@ -111,16 +112,16 @@ def fatal(*args):
 #######################################################################
 
 def mkdirhier(dir):
-       """Create a directory like 'mkdir -p', but does not complain if
-       directory already exists like os.makedirs
-       """
+    """Create a directory like 'mkdir -p', but does not complain if
+    directory already exists like os.makedirs
+    """
 
-       debug(3, "mkdirhier(%s)" % dir)
-       try:
-               os.makedirs(dir)
-               debug(2, "created " + dir)
-       except OSError, e:
-               if e.errno != 17: raise e
+    debug(3, "mkdirhier(%s)" % dir)
+    try:
+        os.makedirs(dir)
+        debug(2, "created " + dir)
+    except OSError, e:
+        if e.errno != 17: raise e
 
 
 #######################################################################
@@ -128,101 +129,101 @@ def mkdirhier(dir):
 import stat
 
 def movefile(src,dest,newmtime=None,sstat=None):
-       """Moves a file from src to dest, preserving all permissions and
-       attributes; mtime will be preserved even when moving across
-       filesystems.  Returns true on success and false on failure. Move is
-       atomic.
-       """
-
-       #print "movefile("+src+","+dest+","+str(newmtime)+","+str(sstat)+")"
-       try:
-               if not sstat:
-                       sstat=os.lstat(src)
-       except Exception, e:
-               print "!!! Stating source file failed... movefile()"
-               print "!!!",e
-               return None
-
-       destexists=1
-       try:
-               dstat=os.lstat(dest)
-       except:
-               dstat=os.lstat(os.path.dirname(dest))
-               destexists=0
-
-       if destexists:
-               if stat.S_ISLNK(dstat[stat.ST_MODE]):
-                       try:
-                               os.unlink(dest)
-                               destexists=0
-                       except Exception, e:
-                               pass
-
-       if stat.S_ISLNK(sstat[stat.ST_MODE]):
-               try:
-                       target=os.readlink(src)
-                       if destexists and not stat.S_ISDIR(dstat[stat.ST_MODE]):
-                               os.unlink(dest)
-                       os.symlink(target,dest)
-#                      os.lchown(dest,sstat[stat.ST_UID],sstat[stat.ST_GID])
-                       os.unlink(src)
-                       return os.lstat(dest)
-               except Exception, e:
-                       print "!!! failed to properly create symlink:"
-                       print "!!!",dest,"->",target
-                       print "!!!",e
-                       return None
-
-       renamefailed=1
-       if sstat[stat.ST_DEV]==dstat[stat.ST_DEV]:
-               try:
-                       ret=os.rename(src,dest)
-                       renamefailed=0
-               except Exception, e:
-                       import errno
-                       if e[0]!=errno.EXDEV:
-                               # Some random error.
-                               print "!!! Failed to move",src,"to",dest
-                               print "!!!",e
-                               return None
-                       # Invalid cross-device-link 'bind' mounted or actually Cross-Device
-
-       if renamefailed:
-               didcopy=0
-               if stat.S_ISREG(sstat[stat.ST_MODE]):
-                       try: # For safety copy then move it over.
-                               shutil.copyfile(src,dest+"#new")
-                               os.rename(dest+"#new",dest)
-                               didcopy=1
-                       except Exception, e:
-                               print '!!! copy',src,'->',dest,'failed.'
-                               print "!!!",e
-                               return None
-               else:
-                       #we don't yet handle special, so we need to fall back to /bin/mv
-                       a=getstatusoutput("/bin/mv -f "+"'"+src+"' '"+dest+"'")
-                       if a[0]!=0:
-                               print "!!! Failed to move special file:"
-                               print "!!! '"+src+"' to '"+dest+"'"
-                               print "!!!",a
-                               return None # failure
-               try:
-                       if didcopy:
-                               missingos.lchown(dest,sstat[stat.ST_UID],sstat[stat.ST_GID])
-                               os.chmod(dest, stat.S_IMODE(sstat[stat.ST_MODE])) # Sticky is reset on chown
-                               os.unlink(src)
-               except Exception, e:
-                       print "!!! Failed to chown/chmod/unlink in movefile()"
-                       print "!!!",dest
-                       print "!!!",e
-                       return None
-
-       if newmtime:
-               os.utime(dest,(newmtime,newmtime))
-       else:
-               os.utime(dest, (sstat[stat.ST_ATIME], sstat[stat.ST_MTIME]))
-               newmtime=sstat[stat.ST_MTIME]
-       return newmtime
+    """Moves a file from src to dest, preserving all permissions and
+    attributes; mtime will be preserved even when moving across
+    filesystems.  Returns true on success and false on failure. Move is
+    atomic.
+    """
+
+    #print "movefile("+src+","+dest+","+str(newmtime)+","+str(sstat)+")"
+    try:
+        if not sstat:
+            sstat=os.lstat(src)
+    except Exception, e:
+        print "!!! Stating source file failed... movefile()"
+        print "!!!",e
+        return None
+
+    destexists=1
+    try:
+        dstat=os.lstat(dest)
+    except:
+        dstat=os.lstat(os.path.dirname(dest))
+        destexists=0
+
+    if destexists:
+        if stat.S_ISLNK(dstat[stat.ST_MODE]):
+            try:
+                os.unlink(dest)
+                destexists=0
+            except Exception, e:
+                pass
+
+    if stat.S_ISLNK(sstat[stat.ST_MODE]):
+        try:
+            target=os.readlink(src)
+            if destexists and not stat.S_ISDIR(dstat[stat.ST_MODE]):
+                os.unlink(dest)
+            os.symlink(target,dest)
+#            os.lchown(dest,sstat[stat.ST_UID],sstat[stat.ST_GID])
+            os.unlink(src)
+            return os.lstat(dest)
+        except Exception, e:
+            print "!!! failed to properly create symlink:"
+            print "!!!",dest,"->",target
+            print "!!!",e
+            return None
+
+    renamefailed=1
+    if sstat[stat.ST_DEV]==dstat[stat.ST_DEV]:
+        try:
+            ret=os.rename(src,dest)
+            renamefailed=0
+        except Exception, e:
+            import errno
+            if e[0]!=errno.EXDEV:
+                # Some random error.
+                print "!!! Failed to move",src,"to",dest
+                print "!!!",e
+                return None
+            # Invalid cross-device-link 'bind' mounted or actually Cross-Device
+
+    if renamefailed:
+        didcopy=0
+        if stat.S_ISREG(sstat[stat.ST_MODE]):
+            try: # For safety copy then move it over.
+                shutil.copyfile(src,dest+"#new")
+                os.rename(dest+"#new",dest)
+                didcopy=1
+            except Exception, e:
+                print '!!! copy',src,'->',dest,'failed.'
+                print "!!!",e
+                return None
+        else:
+            #we don't yet handle special, so we need to fall back to /bin/mv
+            a=getstatusoutput("/bin/mv -f "+"'"+src+"' '"+dest+"'")
+            if a[0]!=0:
+                print "!!! Failed to move special file:"
+                print "!!! '"+src+"' to '"+dest+"'"
+                print "!!!",a
+                return None # failure
+        try:
+            if didcopy:
+                missingos.lchown(dest,sstat[stat.ST_UID],sstat[stat.ST_GID])
+                os.chmod(dest, stat.S_IMODE(sstat[stat.ST_MODE])) # Sticky is reset on chown
+                os.unlink(src)
+        except Exception, e:
+            print "!!! Failed to chown/chmod/unlink in movefile()"
+            print "!!!",dest
+            print "!!!",e
+            return None
+
+    if newmtime:
+        os.utime(dest,(newmtime,newmtime))
+    else:
+        os.utime(dest, (sstat[stat.ST_ATIME], sstat[stat.ST_MTIME]))
+        newmtime=sstat[stat.ST_MTIME]
+    return newmtime
 
 
 
@@ -238,112 +239,112 @@ def movefile(src,dest,newmtime=None,sstat=None):
 #######################################################################
 
 def decodeurl(url):
-       """Decodes an URL into the tokens (scheme, network location, path,
-       user, password, parameters). 
-
-       >>> decodeurl("http://www.google.com/index.html")
-       ('http', 'www.google.com', '/index.html', '', '', {})
-
-       CVS url with username, host and cvsroot. The cvs module to check out is in the
-       parameters:
-
-       >>> decodeurl("cvs://anoncvs@cvs.handhelds.org/cvs;module=familiar/dist/ipkg")
-       ('cvs', 'cvs.handhelds.org', '/cvs', 'anoncvs', '', {'module': 'familiar/dist/ipkg'})
-
-       Dito, but this time the username has a password part. And we also request a special tag
-       to check out.
-
-       >>> decodeurl("cvs://anoncvs:anonymous@cvs.handhelds.org/cvs;module=familiar/dist/ipkg;tag=V0-99-81")
-       ('cvs', 'cvs.handhelds.org', '/cvs', 'anoncvs', 'anonymous', {'tag': 'V0-99-81', 'module': 'familiar/dist/ipkg'})
-       """
-
-       #debug(3, "decodeurl('%s')" % url)
-       m = re.compile('([^:]*):/*(.+@)?([^/]+)(/[^;]+);?(.*)').match(url)
-       if not m:
-               raise MalformedUrl(url)
-
-       type = m.group(1)
-       host = m.group(3)
-       path = m.group(4)
-       user = m.group(2)
-       parm = m.group(5)
-       #print "type:", type
-       #print "host:", host
-       #print "path:", path
-       #print "parm:", parm
-       if user:
-               m = re.compile('([^:]+)(:?(.*))@').match(user)
-               if m:
-                       user = m.group(1)
-                       pswd = m.group(3)
-       else:
-               user = ''
-               pswd = ''
-       #print "user:", user
-       #print "pswd:", pswd
-       #print
-       p = {}
-       if parm:
-               for s in parm.split(';'):
-                       s1,s2 = s.split('=')
-                       p[s1] = s2
-                       
-       return (type, host, path, user, pswd, p)
-               
+    """Decodes an URL into the tokens (scheme, network location, path,
+    user, password, parameters).
+
+    >>> decodeurl("http://www.google.com/index.html")
+    ('http', 'www.google.com', '/index.html', '', '', {})
+
+    CVS url with username, host and cvsroot. The cvs module to check out is in the
+    parameters:
+
+    >>> decodeurl("cvs://anoncvs@cvs.handhelds.org/cvs;module=familiar/dist/ipkg")
+    ('cvs', 'cvs.handhelds.org', '/cvs', 'anoncvs', '', {'module': 'familiar/dist/ipkg'})
+
+    Dito, but this time the username has a password part. And we also request a special tag
+    to check out.
+
+    >>> decodeurl("cvs://anoncvs:anonymous@cvs.handhelds.org/cvs;module=familiar/dist/ipkg;tag=V0-99-81")
+    ('cvs', 'cvs.handhelds.org', '/cvs', 'anoncvs', 'anonymous', {'tag': 'V0-99-81', 'module': 'familiar/dist/ipkg'})
+    """
+
+    #debug(3, "decodeurl('%s')" % url)
+    m = re.compile('([^:]*):/*(.+@)?([^/]+)(/[^;]+);?(.*)').match(url)
+    if not m:
+        raise MalformedUrl(url)
+
+    type = m.group(1)
+    host = m.group(3)
+    path = m.group(4)
+    user = m.group(2)
+    parm = m.group(5)
+    #print "type:", type
+    #print "host:", host
+    #print "path:", path
+    #print "parm:", parm
+    if user:
+        m = re.compile('([^:]+)(:?(.*))@').match(user)
+        if m:
+            user = m.group(1)
+            pswd = m.group(3)
+    else:
+        user = ''
+        pswd = ''
+    #print "user:", user
+    #print "pswd:", pswd
+    #print
+    p = {}
+    if parm:
+        for s in parm.split(';'):
+            s1,s2 = s.split('=')
+            p[s1] = s2
+
+    return (type, host, path, user, pswd, p)
+
 #######################################################################
 
 def encodeurl(decoded):
-       """Encodes a URL from tokens (scheme, network location, path,
-       user, password, parameters). 
+    """Encodes a URL from tokens (scheme, network location, path,
+    user, password, parameters).
 
-       >>> encodeurl(['http', 'www.google.com', '/index.html', '', '', {}])
+    >>> encodeurl(['http', 'www.google.com', '/index.html', '', '', {}])
 
-       "http://www.google.com/index.html"
+    "http://www.google.com/index.html"
 
-       CVS with username, host and cvsroot. The cvs module to check out is in the
-       parameters:
+    CVS with username, host and cvsroot. The cvs module to check out is in the
+    parameters:
 
-       >>> encodeurl(['cvs', 'cvs.handhelds.org', '/cvs', 'anoncvs', '', {'module': 'familiar/dist/ipkg'}])
+    >>> encodeurl(['cvs', 'cvs.handhelds.org', '/cvs', 'anoncvs', '', {'module': 'familiar/dist/ipkg'}])
 
-       "cvs://anoncvs@cvs.handhelds.org/cvs;module=familiar/dist/ipkg"
+    "cvs://anoncvs@cvs.handhelds.org/cvs;module=familiar/dist/ipkg"
 
-       Dito, but this time the username has a password part. And we also request a special tag
-       to check out.
+    Dito, but this time the username has a password part. And we also request a special tag
+    to check out.
 
-       >>> encodeurl(['cvs', 'cvs.handhelds.org', '/cvs', 'anoncvs', 'anonymous', {'tag': 'V0-99-81', 'module': 'familiar/dist/ipkg'}])
+    >>> encodeurl(['cvs', 'cvs.handhelds.org', '/cvs', 'anoncvs', 'anonymous', {'tag': 'V0-99-81', 'module': 'familiar/dist/ipkg'}])
 
-       "cvs://anoncvs:anonymous@cvs.handhelds.org/cvs;module=familiar/dist/ipkg;tag=V0-99-81"
-       """
+    "cvs://anoncvs:anonymous@cvs.handhelds.org/cvs;module=familiar/dist/ipkg;tag=V0-99-81"
+    """
 
-       (type, host, path, user, pswd, p) = decoded
+    (type, host, path, user, pswd, p) = decoded
 
-       if not type or not host or not path:
-               fatal("invalid or missing parameters for url encoding")
+    if not type or not host or not path:
+        fatal("invalid or missing parameters for url encoding")
 
-       url = '%s://' % type
-       if user:
-               url += "%s" % user
-               if pswd:
-                       url += ":%s" % pswd
-               url += "@"
-       url += "%s%s" % (host, path)
-       if p:
-               for parm in p.keys():
-                       url += ";%s=%s" % (parm, p[parm])
+    url = '%s://' % type
+    if user:
+        url += "%s" % user
+        if pswd:
+            url += ":%s" % pswd
+        url += "@"
+    url += "%s%s" % (host, path)
+    if p:
+        for parm in p.keys():
+            url += ";%s=%s" % (parm, p[parm])
 
-       return url
+    return url
 
 #######################################################################
 
-def which(path, item, direction = 0):
-       """Useful function for locating a file in a PATH"""
-       found = ""
-       for p in string.split(path or "", ":"):
-               if os.path.exists(os.path.join(p, item)):
-                       found = os.path.join(p, item)
-                       if direction == 0:
-                               break
-       return found
+def which(path, item, direction = 1):
+    """Useful function for locating a file in a PATH"""
+    found = ""
+    for p in string.split(path or "", ":"):
+        if os.path.exists(os.path.join(p, item)):
+            found = os.path.join(p, item)
+            if direction == 0:
+                break
+    return found
 
 #######################################################################
 
@@ -361,187 +362,187 @@ def which(path, item, direction = 0):
 #######################################################################
 
 def tokenize(mystring):
-       """Breaks a string like 'foo? (bar) oni? (blah (blah))' into (possibly embedded) lists:
-
-       >>> tokenize("x")
-       ['x']
-       >>> tokenize("x y")
-       ['x', 'y']
-       >>> tokenize("(x y)")
-       [['x', 'y']]
-       >>> tokenize("(x y) b c")
-       [['x', 'y'], 'b', 'c']
-       >>> tokenize("foo? (bar) oni? (blah (blah))")
-       ['foo?', ['bar'], 'oni?', ['blah', ['blah']]]
-       >>> tokenize("sys-apps/linux-headers nls? (sys-devel/gettext)")
-       ['sys-apps/linux-headers', 'nls?', ['sys-devel/gettext']]
-       """
-
-       newtokens = []
-       curlist   = newtokens
-       prevlists = []
-       level     = 0
-       accum     = ""
-       for x in mystring:
-               if x=="(":
-                       if accum:
-                               curlist.append(accum)
-                               accum=""
-                       prevlists.append(curlist)
-                       curlist=[]
-                       level=level+1
-               elif x==")":
-                       if accum:
-                               curlist.append(accum)
-                               accum=""
-                       if level==0:
-                               print "!!! tokenizer: Unmatched left parenthesis in:\n'"+mystring+"'"
-                               return None
-                       newlist=curlist
-                       curlist=prevlists.pop()
-                       curlist.append(newlist)
-                       level=level-1
-               elif x in string.whitespace:
-                       if accum:
-                               curlist.append(accum)
-                               accum=""
-               else:
-                       accum=accum+x
-       if accum:
-               curlist.append(accum)
-       if (level!=0):
-               print "!!! tokenizer: Exiting with unterminated parenthesis in:\n'"+mystring+"'"
-               return None
-       return newtokens
+    """Breaks a string like 'foo? (bar) oni? (blah (blah))' into (possibly embedded) lists:
+
+    >>> tokenize("x")
+    ['x']
+    >>> tokenize("x y")
+    ['x', 'y']
+    >>> tokenize("(x y)")
+    [['x', 'y']]
+    >>> tokenize("(x y) b c")
+    [['x', 'y'], 'b', 'c']
+    >>> tokenize("foo? (bar) oni? (blah (blah))")
+    ['foo?', ['bar'], 'oni?', ['blah', ['blah']]]
+    >>> tokenize("sys-apps/linux-headers nls? (sys-devel/gettext)")
+    ['sys-apps/linux-headers', 'nls?', ['sys-devel/gettext']]
+    """
+
+    newtokens = []
+    curlist   = newtokens
+    prevlists = []
+    level     = 0
+    accum     = ""
+    for x in mystring:
+        if x=="(":
+            if accum:
+                curlist.append(accum)
+                accum=""
+            prevlists.append(curlist)
+            curlist=[]
+            level=level+1
+        elif x==")":
+            if accum:
+                curlist.append(accum)
+                accum=""
+            if level==0:
+                print "!!! tokenizer: Unmatched left parenthesis in:\n'"+mystring+"'"
+                return None
+            newlist=curlist
+            curlist=prevlists.pop()
+            curlist.append(newlist)
+            level=level-1
+        elif x in string.whitespace:
+            if accum:
+                curlist.append(accum)
+                accum=""
+        else:
+            accum=accum+x
+    if accum:
+        curlist.append(accum)
+    if (level!=0):
+        print "!!! tokenizer: Exiting with unterminated parenthesis in:\n'"+mystring+"'"
+        return None
+    return newtokens
 
 
 #######################################################################
 
 def evaluate(tokens,mydefines,allon=0):
-       """Removes tokens based on whether conditional definitions exist or not.
-       Recognizes !
-
-       >>> evaluate(['sys-apps/linux-headers', 'nls?', ['sys-devel/gettext']], {})
-       ['sys-apps/linux-headers']
-
-       Negate the flag:
-
-       >>> evaluate(['sys-apps/linux-headers', '!nls?', ['sys-devel/gettext']], {})
-       ['sys-apps/linux-headers', ['sys-devel/gettext']]
-
-       Define 'nls':
-
-       >>> evaluate(['sys-apps/linux-headers', 'nls?', ['sys-devel/gettext']], {"nls":1})
-       ['sys-apps/linux-headers', ['sys-devel/gettext']]
-
-       Turn allon on:
-
-       >>> evaluate(['sys-apps/linux-headers', 'nls?', ['sys-devel/gettext']], {}, True)
-       ['sys-apps/linux-headers', ['sys-devel/gettext']]
-       """
-
-       if tokens == None:
-               return None
-       mytokens = tokens + []          # this copies the list
-       pos = 0
-       while pos < len(mytokens):
-               if type(mytokens[pos]) == types.ListType:
-                       evaluate(mytokens[pos], mydefines)
-                       if not len(mytokens[pos]):
-                               del mytokens[pos]
-                               continue
-               elif mytokens[pos][-1] == "?":
-                       cur = mytokens[pos][:-1]
-                       del mytokens[pos]
-                       if allon:
-                               if cur[0] == "!":
-                                       del mytokens[pos]
-                       else:
-                               if cur[0] == "!":
-                                       if (cur[1:] in mydefines) and (pos < len(mytokens)):
-                                               del mytokens[pos]
-                                               continue
-                               elif (cur not in mydefines) and (pos < len(mytokens)):
-                                       del mytokens[pos]
-                                       continue
-               pos = pos + 1
-       return mytokens
+    """Removes tokens based on whether conditional definitions exist or not.
+    Recognizes !
+
+    >>> evaluate(['sys-apps/linux-headers', 'nls?', ['sys-devel/gettext']], {})
+    ['sys-apps/linux-headers']
+
+    Negate the flag:
+
+    >>> evaluate(['sys-apps/linux-headers', '!nls?', ['sys-devel/gettext']], {})
+    ['sys-apps/linux-headers', ['sys-devel/gettext']]
+
+    Define 'nls':
+
+    >>> evaluate(['sys-apps/linux-headers', 'nls?', ['sys-devel/gettext']], {"nls":1})
+    ['sys-apps/linux-headers', ['sys-devel/gettext']]
+
+    Turn allon on:
+
+    >>> evaluate(['sys-apps/linux-headers', 'nls?', ['sys-devel/gettext']], {}, True)
+    ['sys-apps/linux-headers', ['sys-devel/gettext']]
+    """
+
+    if tokens == None:
+        return None
+    mytokens = tokens + []        # this copies the list
+    pos = 0
+    while pos < len(mytokens):
+        if type(mytokens[pos]) == types.ListType:
+            evaluate(mytokens[pos], mydefines)
+            if not len(mytokens[pos]):
+                del mytokens[pos]
+                continue
+        elif mytokens[pos][-1] == "?":
+            cur = mytokens[pos][:-1]
+            del mytokens[pos]
+            if allon:
+                if cur[0] == "!":
+                    del mytokens[pos]
+            else:
+                if cur[0] == "!":
+                    if (cur[1:] in mydefines) and (pos < len(mytokens)):
+                        del mytokens[pos]
+                        continue
+                elif (cur not in mydefines) and (pos < len(mytokens)):
+                    del mytokens[pos]
+                    continue
+        pos = pos + 1
+    return mytokens
 
 
 #######################################################################
 
 def flatten(mytokens):
-       """Converts nested arrays into a flat arrays:
+    """Converts nested arrays into a flat arrays:
 
-       >>> flatten([1,[2,3]])
-       [1, 2, 3]
-       >>> flatten(['sys-apps/linux-headers', ['sys-devel/gettext']])
-       ['sys-apps/linux-headers', 'sys-devel/gettext']
-       """
+    >>> flatten([1,[2,3]])
+    [1, 2, 3]
+    >>> flatten(['sys-apps/linux-headers', ['sys-devel/gettext']])
+    ['sys-apps/linux-headers', 'sys-devel/gettext']
+    """
 
-       newlist=[]
-       for x in mytokens:
-               if type(x)==types.ListType:
-                       newlist.extend(flatten(x))
-               else:
-                       newlist.append(x)
-       return newlist
+    newlist=[]
+    for x in mytokens:
+        if type(x)==types.ListType:
+            newlist.extend(flatten(x))
+        else:
+            newlist.append(x)
+    return newlist
 
 
 #######################################################################
 
-_package_weights_ = {"pre":-2,"p":0,"alpha":-4,"beta":-3,"rc":-1}      # dicts are unordered
-_package_ends_    = ["pre", "p", "alpha", "beta", "rc", "cvs", "bk", "HEAD" ]                  # so we need ordered list
+_package_weights_ = {"pre":-2,"p":0,"alpha":-4,"beta":-3,"rc":-1}    # dicts are unordered
+_package_ends_    = ["pre", "p", "alpha", "beta", "rc", "cvs", "bk", "HEAD" ]            # so we need ordered list
 
 def relparse(myver):
-       """Parses the last elements of a version number into a triplet, that can
-       later be compared:
-
-       >>> relparse('1.2_pre3')
-       [1.2, -2, 3.0]
-       >>> relparse('1.2b')
-       [1.2, 98, 0]
-       >>> relparse('1.2')
-       [1.2, 0, 0]
-       """
-
-       number   = 0
-       p1       = 0
-       p2       = 0
-       mynewver = string.split(myver,"_")
-       if len(mynewver)==2:
-               # an _package_weights_
-               number = string.atof(mynewver[0])
-               match = 0
-               for x in _package_ends_:
-                       elen = len(x)
-                       if mynewver[1][:elen] == x:
-                               match = 1
-                               p1 = _package_weights_[x]
-                               try:
-                                       p2 = string.atof(mynewver[1][elen:])
-                               except:
-                                       p2 = 0
-                               break
-               if not match:   
-                       # normal number or number with letter at end
-                       divider = len(myver)-1
-                       if myver[divider:] not in "1234567890":
-                               # letter at end
-                               p1 = ord(myver[divider:])
-                               number = string.atof(myver[0:divider])
-                       else:
-                               number = string.atof(myver)             
-       else:
-               # normal number or number with letter at end
-               divider = len(myver)-1
-               if myver[divider:] not in "1234567890":
-                       #letter at end
-                       p1     = ord(myver[divider:])
-                       number = string.atof(myver[0:divider])
-               else:
-                       number = string.atof(myver)  
-       return [number,p1,p2]
+    """Parses the last elements of a version number into a triplet, that can
+    later be compared:
+
+    >>> relparse('1.2_pre3')
+    [1.2, -2, 3.0]
+    >>> relparse('1.2b')
+    [1.2, 98, 0]
+    >>> relparse('1.2')
+    [1.2, 0, 0]
+    """
+
+    number   = 0
+    p1       = 0
+    p2       = 0
+    mynewver = string.split(myver,"_")
+    if len(mynewver)==2:
+        # an _package_weights_
+        number = string.atof(mynewver[0])
+        match = 0
+        for x in _package_ends_:
+            elen = len(x)
+            if mynewver[1][:elen] == x:
+                match = 1
+                p1 = _package_weights_[x]
+                try:
+                    p2 = string.atof(mynewver[1][elen:])
+                except:
+                    p2 = 0
+                break
+        if not match:
+            # normal number or number with letter at end
+            divider = len(myver)-1
+            if myver[divider:] not in "1234567890":
+                # letter at end
+                p1 = ord(myver[divider:])
+                number = string.atof(myver[0:divider])
+            else:
+                number = string.atof(myver)
+    else:
+        # normal number or number with letter at end
+        divider = len(myver)-1
+        if myver[divider:] not in "1234567890":
+            #letter at end
+            p1     = ord(myver[divider:])
+            number = string.atof(myver[0:divider])
+        else:
+            number = string.atof(myver)
+    return [number,p1,p2]
 
 
 #######################################################################
@@ -549,143 +550,143 @@ def relparse(myver):
 __ververify_cache__ = {}
 
 def ververify(myorigval,silent=1):
-       """Returns 1 if given a valid version string, els 0. Valid versions are in the format
-
-       <v1>.<v2>...<vx>[a-z,_{_package_weights_}[vy]]
-
-       >>> ververify('2.4.20')
-       1
-       >>> ververify('2.4..20')                # two dots
-       0
-       >>> ververify('2.x.20')                 # 'x' is not numeric
-       0
-       >>> ververify('2.4.20a')
-       1
-       >>> ververify('2.4.20cvs')              # only one trailing letter
-       0
-       >>> ververify('1a')
-       1
-       >>> ververify('test_a')                 # no version at all
-       0
-       >>> ververify('2.4.20_beta1')
-       1
-       >>> ververify('2.4.20_beta')
-       1
-       >>> ververify('2.4.20_wrongext')        # _wrongext is no valid trailer
-       0
-       """
-
-       # Lookup the cache first
-       try:
-               return __ververify_cache__[myorigval]
-       except KeyError:
-               pass
-
-       if len(myorigval) == 0:
-               if not silent:
-                       error("package version is empty")
-               __ververify_cache__[myorigval] = 0
-               return 0
-       myval = string.split(myorigval,'.')
-       if len(myval)==0:
-               if not silent:
-                       error("package name has empty version string")
-               __ververify_cache__[myorigval] = 0
-               return 0
-       # all but the last version must be a numeric
-       for x in myval[:-1]:
-               if not len(x):
-                       if not silent:
-                               error("package version has two points in a row")
-                       __ververify_cache__[myorigval] = 0
-                       return 0
-               try:
-                       foo = string.atoi(x)
-               except:
-                       if not silent:
-                               error("package version contains non-numeric '"+x+"'")
-                       __ververify_cache__[myorigval] = 0
-                       return 0
-       if not len(myval[-1]):
-                       if not silent:
-                               error("package version has trailing dot")
-                       __ververify_cache__[myorigval] = 0
-                       return 0
-       try:
-               foo = string.atoi(myval[-1])
-               __ververify_cache__[myorigval] = 1
-               return 1
-       except:
-               pass
-
-       # ok, our last component is not a plain number or blank, let's continue
-       if myval[-1][-1] in string.lowercase:
-               try:
-                       foo = string.atoi(myval[-1][:-1])
-                       return 1
-                       __ververify_cache__[myorigval] = 1
-                       # 1a, 2.0b, etc.
-               except:
-                       pass
-       # ok, maybe we have a 1_alpha or 1_beta2; let's see
-       ep=string.split(myval[-1],"_")
-       if len(ep)!= 2:
-               if not silent:
-                       error("package version has more than one letter at then end")
-               __ververify_cache__[myorigval] = 0
-               return 0
-       try:
-               foo = string.atoi(ep[0])
-       except:
-               # this needs to be numeric, i.e. the "1" in "1_alpha"
-               if not silent:
-                       error("package version must have numeric part before the '_'")
-               __ververify_cache__[myorigval] = 0
-               return 0
-
-       for mye in _package_ends_:
-               if ep[1][0:len(mye)] == mye:
-                       if len(mye) == len(ep[1]):
-                               # no trailing numeric is ok
-                               __ververify_cache__[myorigval] = 1
-                               return 1
-                       else:
-                               try:
-                                       foo = string.atoi(ep[1][len(mye):])
-                                       __ververify_cache__[myorigval] = 1
-                                       return 1
-                               except:
-                                       # if no _package_weights_ work, *then* we return 0
-                                       pass    
-       if not silent:
-               error("package version extension after '_' is invalid")
-       __ververify_cache__[myorigval] = 0
-       return 0
+    """Returns 1 if given a valid version string, els 0. Valid versions are in the format
+
+    <v1>.<v2>...<vx>[a-z,_{_package_weights_}[vy]]
+
+    >>> ververify('2.4.20')
+    1
+    >>> ververify('2.4..20')        # two dots
+    0
+    >>> ververify('2.x.20')            # 'x' is not numeric
+    0
+    >>> ververify('2.4.20a')
+    1
+    >>> ververify('2.4.20cvs')        # only one trailing letter
+    0
+    >>> ververify('1a')
+    1
+    >>> ververify('test_a')            # no version at all
+    0
+    >>> ververify('2.4.20_beta1')
+    1
+    >>> ververify('2.4.20_beta')
+    1
+    >>> ververify('2.4.20_wrongext')    # _wrongext is no valid trailer
+    0
+    """
+
+    # Lookup the cache first
+    try:
+        return __ververify_cache__[myorigval]
+    except KeyError:
+        pass
+
+    if len(myorigval) == 0:
+        if not silent:
+            error("package version is empty")
+        __ververify_cache__[myorigval] = 0
+        return 0
+    myval = string.split(myorigval,'.')
+    if len(myval)==0:
+        if not silent:
+            error("package name has empty version string")
+        __ververify_cache__[myorigval] = 0
+        return 0
+    # all but the last version must be a numeric
+    for x in myval[:-1]:
+        if not len(x):
+            if not silent:
+                error("package version has two points in a row")
+            __ververify_cache__[myorigval] = 0
+            return 0
+        try:
+            foo = string.atoi(x)
+        except:
+            if not silent:
+                error("package version contains non-numeric '"+x+"'")
+            __ververify_cache__[myorigval] = 0
+            return 0
+    if not len(myval[-1]):
+            if not silent:
+                error("package version has trailing dot")
+            __ververify_cache__[myorigval] = 0
+            return 0
+    try:
+        foo = string.atoi(myval[-1])
+        __ververify_cache__[myorigval] = 1
+        return 1
+    except:
+        pass
+
+    # ok, our last component is not a plain number or blank, let's continue
+    if myval[-1][-1] in string.lowercase:
+        try:
+            foo = string.atoi(myval[-1][:-1])
+            return 1
+            __ververify_cache__[myorigval] = 1
+            # 1a, 2.0b, etc.
+        except:
+            pass
+    # ok, maybe we have a 1_alpha or 1_beta2; let's see
+    ep=string.split(myval[-1],"_")
+    if len(ep)!= 2:
+        if not silent:
+            error("package version has more than one letter at then end")
+        __ververify_cache__[myorigval] = 0
+        return 0
+    try:
+        foo = string.atoi(ep[0])
+    except:
+        # this needs to be numeric, i.e. the "1" in "1_alpha"
+        if not silent:
+            error("package version must have numeric part before the '_'")
+        __ververify_cache__[myorigval] = 0
+        return 0
+
+    for mye in _package_ends_:
+        if ep[1][0:len(mye)] == mye:
+            if len(mye) == len(ep[1]):
+                # no trailing numeric is ok
+                __ververify_cache__[myorigval] = 1
+                return 1
+            else:
+                try:
+                    foo = string.atoi(ep[1][len(mye):])
+                    __ververify_cache__[myorigval] = 1
+                    return 1
+                except:
+                    # if no _package_weights_ work, *then* we return 0
+                    pass
+    if not silent:
+        error("package version extension after '_' is invalid")
+    __ververify_cache__[myorigval] = 0
+    return 0
 
 
 def isjustname(mypkg):
-       myparts = string.split(mypkg,'-')
-       for x in myparts:
-               if ververify(x):
-                       return 0
-       return 1
+    myparts = string.split(mypkg,'-')
+    for x in myparts:
+        if ververify(x):
+            return 0
+    return 1
 
 
 _isspecific_cache_={}
 
 def isspecific(mypkg):
-       "now supports packages with no category"
-       try:
-               return __isspecific_cache__[mypkg]
-       except:
-               pass
+    "now supports packages with no category"
+    try:
+        return __isspecific_cache__[mypkg]
+    except:
+        pass
 
-       mysplit = string.split(mypkg,"/")
-       if not isjustname(mysplit[-1]):
-                       __isspecific_cache__[mypkg] = 1
-                       return 1
-       __isspecific_cache__[mypkg] = 0
-       return 0
+    mysplit = string.split(mypkg,"/")
+    if not isjustname(mysplit[-1]):
+            __isspecific_cache__[mypkg] = 1
+            return 1
+    __isspecific_cache__[mypkg] = 0
+    return 0
 
 
 #######################################################################
@@ -694,85 +695,85 @@ __pkgsplit_cache__={}
 
 def pkgsplit(mypkg, silent=1):
 
-       """This function can be used as a package verification function. If
-       it is a valid name, pkgsplit will return a list containing:
-       [pkgname, pkgversion(norev), pkgrev ].
-
-       >>> pkgsplit('')
-       >>> pkgsplit('x')
-       >>> pkgsplit('x-')
-       >>> pkgsplit('-1')
-       >>> pkgsplit('glibc-1.2-8.9-r7')
-       >>> pkgsplit('glibc-2.2.5-r7')
-       ['glibc', '2.2.5', 'r7']
-       >>> pkgsplit('foo-1.2-1')
-       >>> pkgsplit('Mesa-3.0')
-       ['Mesa', '3.0', 'r0']
-       """
-
-       try:
-               return __pkgsplit_cache__[mypkg]
-       except KeyError:
-               pass
-
-       myparts = string.split(mypkg,'-')
-       if len(myparts) < 2:
-               if not silent:
-                       error("package name without name or version part")
-               __pkgsplit_cache__[mypkg] = None
-               return None
-       for x in myparts:
-               if len(x) == 0:
-                       if not silent:
-                               error("package name with empty name or version part")
-                       __pkgsplit_cache__[mypkg] = None
-                       return None
-       # verify rev
-       revok = 0
-       myrev = myparts[-1]
-       ververify(myrev, 0)
-       if len(myrev) and myrev[0] == "r":
-               try:
-                       string.atoi(myrev[1:])
-                       revok = 1
-               except: 
-                       pass
-       if revok:
-               if ververify(myparts[-2]):
-                       if len(myparts) == 2:
-                               __pkgsplit_cache__[mypkg] = None
-                               return None
-                       else:
-                               for x in myparts[:-2]:
-                                       if ververify(x):
-                                               __pkgsplit_cache__[mypkg]=None
-                                               return None
-                                               # names can't have versiony looking parts
-                               myval=[string.join(myparts[:-2],"-"),myparts[-2],myparts[-1]]
-                               __pkgsplit_cache__[mypkg]=myval
-                               return myval
-               else:
-                       __pkgsplit_cache__[mypkg] = None
-                       return None
-
-       elif ververify(myparts[-1],silent):
-               if len(myparts)==1:
-                       if not silent:
-                               print "!!! Name error in",mypkg+": missing name part."
-                       __pkgsplit_cache__[mypkg]=None
-                       return None
-               else:
-                       for x in myparts[:-1]:
-                               if ververify(x):
-                                       if not silent: error("package name has multiple version parts")
-                                       __pkgsplit_cache__[mypkg] = None
-                                       return None
-                       myval = [string.join(myparts[:-1],"-"), myparts[-1],"r0"]
-                       __pkgsplit_cache__[mypkg] = myval
-                       return myval
-       else:
-               __pkgsplit_cache__[mypkg] = None
-               return None
+    """This function can be used as a package verification function. If
+    it is a valid name, pkgsplit will return a list containing:
+    [pkgname, pkgversion(norev), pkgrev ].
+
+    >>> pkgsplit('')
+    >>> pkgsplit('x')
+    >>> pkgsplit('x-')
+    >>> pkgsplit('-1')
+    >>> pkgsplit('glibc-1.2-8.9-r7')
+    >>> pkgsplit('glibc-2.2.5-r7')
+    ['glibc', '2.2.5', 'r7']
+    >>> pkgsplit('foo-1.2-1')
+    >>> pkgsplit('Mesa-3.0')
+    ['Mesa', '3.0', 'r0']
+    """
+
+    try:
+        return __pkgsplit_cache__[mypkg]
+    except KeyError:
+        pass
+
+    myparts = string.split(mypkg,'-')
+    if len(myparts) < 2:
+        if not silent:
+            error("package name without name or version part")
+        __pkgsplit_cache__[mypkg] = None
+        return None
+    for x in myparts:
+        if len(x) == 0:
+            if not silent:
+                error("package name with empty name or version part")
+            __pkgsplit_cache__[mypkg] = None
+            return None
+    # verify rev
+    revok = 0
+    myrev = myparts[-1]
+    ververify(myrev, 0)
+    if len(myrev) and myrev[0] == "r":
+        try:
+            string.atoi(myrev[1:])
+            revok = 1
+        except:
+            pass
+    if revok:
+        if ververify(myparts[-2]):
+            if len(myparts) == 2:
+                __pkgsplit_cache__[mypkg] = None
+                return None
+            else:
+                for x in myparts[:-2]:
+                    if ververify(x):
+                        __pkgsplit_cache__[mypkg]=None
+                        return None
+                        # names can't have versiony looking parts
+                myval=[string.join(myparts[:-2],"-"),myparts[-2],myparts[-1]]
+                __pkgsplit_cache__[mypkg]=myval
+                return myval
+        else:
+            __pkgsplit_cache__[mypkg] = None
+            return None
+
+    elif ververify(myparts[-1],silent):
+        if len(myparts)==1:
+            if not silent:
+                print "!!! Name error in",mypkg+": missing name part."
+            __pkgsplit_cache__[mypkg]=None
+            return None
+        else:
+            for x in myparts[:-1]:
+                if ververify(x):
+                    if not silent: error("package name has multiple version parts")
+                    __pkgsplit_cache__[mypkg] = None
+                    return None
+            myval = [string.join(myparts[:-1],"-"), myparts[-1],"r0"]
+            __pkgsplit_cache__[mypkg] = myval
+            return myval
+    else:
+        __pkgsplit_cache__[mypkg] = None
+        return None
 
 
 #######################################################################
@@ -780,41 +781,41 @@ def pkgsplit(mypkg, silent=1):
 __catpkgsplit_cache__ = {}
 
 def catpkgsplit(mydata,silent=1):
-       """returns [cat, pkgname, version, rev ]
-
-       >>> catpkgsplit('sys-libs/glibc-1.2-r7')
-       ['sys-libs', 'glibc', '1.2', 'r7']
-       >>> catpkgsplit('glibc-1.2-r7')
-       ['null', 'glibc', '1.2', 'r7']
-       """
-
-       try:
-               return __catpkgsplit_cache__[mydata]
-       except KeyError:
-               pass
-
-       cat = os.path.basename(os.path.dirname(mydata))
-       mydata = os.path.join(cat, os.path.basename(mydata))
-#      if mydata[:len(projectdir)] == projectdir:
-#              mydata = mydata[len(projectdir)+1:]
-       if mydata[-3:] == '.oe':
-               mydata = mydata[:-3]
-
-       mysplit = mydata.split("/")
-       p_split = None
-       splitlen = len(mysplit)
-       if splitlen == 1:
-               retval = [None]
-               p_split = pkgsplit(mydata,silent)
-       else:
-               retval = [mysplit[splitlen - 2]]
-               p_split = pkgsplit(mysplit[splitlen - 1],silent)
-       if not p_split:
-               __catpkgsplit_cache__[mydata] = None
-               return None
-       retval.extend(p_split)
-       __catpkgsplit_cache__[mydata] = retval
-       return retval
+    """returns [cat, pkgname, version, rev ]
+
+    >>> catpkgsplit('sys-libs/glibc-1.2-r7')
+    ['sys-libs', 'glibc', '1.2', 'r7']
+    >>> catpkgsplit('glibc-1.2-r7')
+    ['null', 'glibc', '1.2', 'r7']
+    """
+
+    try:
+        return __catpkgsplit_cache__[mydata]
+    except KeyError:
+        pass
+
+    cat = os.path.basename(os.path.dirname(mydata))
+    mydata = os.path.join(cat, os.path.basename(mydata))
+#    if mydata[:len(projectdir)] == projectdir:
+#        mydata = mydata[len(projectdir)+1:]
+    if mydata[-3:] == '.oe':
+        mydata = mydata[:-3]
+
+    mysplit = mydata.split("/")
+    p_split = None
+    splitlen = len(mysplit)
+    if splitlen == 1:
+        retval = [None]
+        p_split = pkgsplit(mydata,silent)
+    else:
+        retval = [mysplit[splitlen - 2]]
+        p_split = pkgsplit(mysplit[splitlen - 1],silent)
+    if not p_split:
+        __catpkgsplit_cache__[mydata] = None
+        return None
+    retval.extend(p_split)
+    __catpkgsplit_cache__[mydata] = retval
+    return retval
 
 
 #######################################################################
@@ -822,397 +823,397 @@ def catpkgsplit(mydata,silent=1):
 __vercmp_cache__ = {}
 
 def vercmp(val1,val2):
-       """This takes two version strings and returns an integer to tell you whether
-       the versions are the same, val1>val2 or val2>val1.
-       
-       >>> vercmp('1', '2')
-       -1.0
-       >>> vercmp('2', '1')
-       1.0
-       >>> vercmp('1', '1.0')
-       0
-       >>> vercmp('1', '1.1')
-       -1.0
-       >>> vercmp('1.1', '1_p2')
-       1.0
-       """
-
-       # quick short-circuit
-       if val1 == val2:
-               return 0
-       valkey = val1+" "+val2
-
-       # cache lookup
-       try:
-               return __vercmp_cache__[valkey]
-               try:
-                       return - __vercmp_cache__[val2+" "+val1]
-               except KeyError:
-                       pass
-       except KeyError:
-               pass
-       
-       # consider 1_p2 vc 1.1
-       # after expansion will become (1_p2,0) vc (1,1)
-       # then 1_p2 is compared with 1 before 0 is compared with 1
-       # to solve the bug we need to convert it to (1,0_p2)
-       # by splitting _prepart part and adding it back _after_expansion
-
-       val1_prepart = val2_prepart = ''
-       if val1.count('_'):
-               val1, val1_prepart = val1.split('_', 1)
-       if val2.count('_'):
-               val2, val2_prepart = val2.split('_', 1)
-
-       # replace '-' by '.'
-       # FIXME: Is it needed? can val1/2 contain '-'?
-
-       val1 = string.split(val1,'-')
-       if len(val1) == 2:
-               val1[0] = val1[0] +"."+ val1[1]
-       val2 = string.split(val2,'-')
-       if len(val2) == 2:
-               val2[0] = val2[0] +"."+ val2[1]
-
-       val1 = string.split(val1[0],'.')
-       val2 = string.split(val2[0],'.')
-
-       # add back decimal point so that .03 does not become "3" !
-       for x in range(1,len(val1)):
-               if val1[x][0] == '0' :
-                       val1[x] = '.' + val1[x]
-       for x in range(1,len(val2)):
-               if val2[x][0] == '0' :
-                       val2[x] = '.' + val2[x]
-
-       # extend varion numbers
-       if len(val2) < len(val1):
-               val2.extend(["0"]*(len(val1)-len(val2)))
-       elif len(val1) < len(val2):
-               val1.extend(["0"]*(len(val2)-len(val1)))
-
-       # add back _prepart tails
-       if val1_prepart:
-               val1[-1] += '_' + val1_prepart
-       if val2_prepart:
-               val2[-1] += '_' + val2_prepart
-       # The above code will extend version numbers out so they
-       # have the same number of digits.
-       for x in range(0,len(val1)):
-               cmp1 = relparse(val1[x])
-               cmp2 = relparse(val2[x])
-               for y in range(0,3):
-                       myret = cmp1[y] - cmp2[y]
-                       if myret != 0:
-                               __vercmp_cache__[valkey] = myret
-                               return myret
-       __vercmp_cache__[valkey] = 0
-       return 0
+    """This takes two version strings and returns an integer to tell you whether
+    the versions are the same, val1>val2 or val2>val1.
+
+    >>> vercmp('1', '2')
+    -1.0
+    >>> vercmp('2', '1')
+    1.0
+    >>> vercmp('1', '1.0')
+    0
+    >>> vercmp('1', '1.1')
+    -1.0
+    >>> vercmp('1.1', '1_p2')
+    1.0
+    """
+
+    # quick short-circuit
+    if val1 == val2:
+        return 0
+    valkey = val1+" "+val2
+
+    # cache lookup
+    try:
+        return __vercmp_cache__[valkey]
+        try:
+            return - __vercmp_cache__[val2+" "+val1]
+        except KeyError:
+            pass
+    except KeyError:
+        pass
+
+    # consider 1_p2 vc 1.1
+    # after expansion will become (1_p2,0) vc (1,1)
+    # then 1_p2 is compared with 1 before 0 is compared with 1
+    # to solve the bug we need to convert it to (1,0_p2)
+    # by splitting _prepart part and adding it back _after_expansion
+
+    val1_prepart = val2_prepart = ''
+    if val1.count('_'):
+        val1, val1_prepart = val1.split('_', 1)
+    if val2.count('_'):
+        val2, val2_prepart = val2.split('_', 1)
+
+    # replace '-' by '.'
+    # FIXME: Is it needed? can val1/2 contain '-'?
+
+    val1 = string.split(val1,'-')
+    if len(val1) == 2:
+        val1[0] = val1[0] +"."+ val1[1]
+    val2 = string.split(val2,'-')
+    if len(val2) == 2:
+        val2[0] = val2[0] +"."+ val2[1]
+
+    val1 = string.split(val1[0],'.')
+    val2 = string.split(val2[0],'.')
+
+    # add back decimal point so that .03 does not become "3" !
+    for x in range(1,len(val1)):
+        if val1[x][0] == '0' :
+            val1[x] = '.' + val1[x]
+    for x in range(1,len(val2)):
+        if val2[x][0] == '0' :
+            val2[x] = '.' + val2[x]
+
+    # extend varion numbers
+    if len(val2) < len(val1):
+        val2.extend(["0"]*(len(val1)-len(val2)))
+    elif len(val1) < len(val2):
+        val1.extend(["0"]*(len(val2)-len(val1)))
+
+    # add back _prepart tails
+    if val1_prepart:
+        val1[-1] += '_' + val1_prepart
+    if val2_prepart:
+        val2[-1] += '_' + val2_prepart
+    # The above code will extend version numbers out so they
+    # have the same number of digits.
+    for x in range(0,len(val1)):
+        cmp1 = relparse(val1[x])
+        cmp2 = relparse(val2[x])
+        for y in range(0,3):
+            myret = cmp1[y] - cmp2[y]
+            if myret != 0:
+                __vercmp_cache__[valkey] = myret
+                return myret
+    __vercmp_cache__[valkey] = 0
+    return 0
 
 
 #######################################################################
 
 def pkgcmp(pkg1,pkg2):
-       """ Compares two packages, which should have been split via
-       pkgsplit(). if the return value val is less than zero, then pkg2 is
-       newer than pkg1, zero if equal and positive if older.
-
-       >>> pkgcmp(['glibc', '2.2.5', 'r7'], ['glibc', '2.2.5', 'r7'])
-       0
-       >>> pkgcmp(['glibc', '2.2.5', 'r4'], ['glibc', '2.2.5', 'r7'])
-       -1
-       >>> pkgcmp(['glibc', '2.2.5', 'r7'], ['glibc', '2.2.5', 'r2'])
-       1
-       """
-       
-       mycmp = vercmp(pkg1[1],pkg2[1])
-       if mycmp > 0:
-               return 1
-       if mycmp < 0:
-               return -1
-       r1=string.atoi(pkg1[2][1:])
-       r2=string.atoi(pkg2[2][1:])
-       if r1 > r2:
-               return 1
-       if r2 > r1:
-               return -1
-       return 0
+    """ Compares two packages, which should have been split via
+    pkgsplit(). if the return value val is less than zero, then pkg2 is
+    newer than pkg1, zero if equal and positive if older.
+
+    >>> pkgcmp(['glibc', '2.2.5', 'r7'], ['glibc', '2.2.5', 'r7'])
+    0
+    >>> pkgcmp(['glibc', '2.2.5', 'r4'], ['glibc', '2.2.5', 'r7'])
+    -1
+    >>> pkgcmp(['glibc', '2.2.5', 'r7'], ['glibc', '2.2.5', 'r2'])
+    1
+    """
+
+    mycmp = vercmp(pkg1[1],pkg2[1])
+    if mycmp > 0:
+        return 1
+    if mycmp < 0:
+        return -1
+    r1=string.atoi(pkg1[2][1:])
+    r2=string.atoi(pkg2[2][1:])
+    if r1 > r2:
+        return 1
+    if r2 > r1:
+        return -1
+    return 0
 
 
 #######################################################################
 
 def dep_parenreduce(mysplit, mypos=0):
-       """Accepts a list of strings, and converts '(' and ')' surrounded items to sub-lists:
-
-       >>> dep_parenreduce([''])
-       ['']
-       >>> dep_parenreduce(['1', '2', '3'])
-       ['1', '2', '3']
-       >>> dep_parenreduce(['1', '(', '2', '3', ')', '4'])
-       ['1', ['2', '3'], '4']
-       """
-
-       while mypos < len(mysplit): 
-               if mysplit[mypos] == "(":
-                       firstpos = mypos
-                       mypos = mypos + 1
-                       while mypos < len(mysplit):
-                               if mysplit[mypos] == ")":
-                                       mysplit[firstpos:mypos+1] = [mysplit[firstpos+1:mypos]]
-                                       mypos = firstpos
-                                       break
-                               elif mysplit[mypos] == "(":
-                                       # recurse
-                                       mysplit = dep_parenreduce(mysplit,mypos)
-                               mypos = mypos + 1
-               mypos = mypos + 1
-       return mysplit
+    """Accepts a list of strings, and converts '(' and ')' surrounded items to sub-lists:
+
+    >>> dep_parenreduce([''])
+    ['']
+    >>> dep_parenreduce(['1', '2', '3'])
+    ['1', '2', '3']
+    >>> dep_parenreduce(['1', '(', '2', '3', ')', '4'])
+    ['1', ['2', '3'], '4']
+    """
+
+    while mypos < len(mysplit):
+        if mysplit[mypos] == "(":
+            firstpos = mypos
+            mypos = mypos + 1
+            while mypos < len(mysplit):
+                if mysplit[mypos] == ")":
+                    mysplit[firstpos:mypos+1] = [mysplit[firstpos+1:mypos]]
+                    mypos = firstpos
+                    break
+                elif mysplit[mypos] == "(":
+                    # recurse
+                    mysplit = dep_parenreduce(mysplit,mypos)
+                mypos = mypos + 1
+        mypos = mypos + 1
+    return mysplit
 
 
 def dep_opconvert(mysplit, myuse):
-       "Does dependency operator conversion"
-       
-       mypos   = 0
-       newsplit = []
-       while mypos < len(mysplit):
-               if type(mysplit[mypos]) == types.ListType:
-                       newsplit.append(dep_opconvert(mysplit[mypos],myuse))
-                       mypos += 1
-               elif mysplit[mypos] == ")":
-                       # mismatched paren, error
-                       return None
-               elif mysplit[mypos]=="||":
-                       if ((mypos+1)>=len(mysplit)) or (type(mysplit[mypos+1])!=types.ListType):
-                               # || must be followed by paren'd list
-                               return None
-                       try:
-                               mynew = dep_opconvert(mysplit[mypos+1],myuse)
-                       except Exception, e:
-                               error("unable to satisfy OR dependancy: " + string.join(mysplit," || "))
-                               raise e
-                       mynew[0:0] = ["||"]
-                       newsplit.append(mynew)
-                       mypos += 2
-               elif mysplit[mypos][-1] == "?":
-                       # use clause, i.e "gnome? ( foo bar )"
-                       # this is a quick and dirty hack so that repoman can enable all USE vars:
-                       if (len(myuse) == 1) and (myuse[0] == "*"):
-                               # enable it even if it's ! (for repoman) but kill it if it's
-                               # an arch variable that isn't for this arch. XXX Sparc64?
-                               if (mysplit[mypos][:-1] not in settings.usemask) or \
-                                               (mysplit[mypos][:-1]==settings["ARCH"]):
-                                       enabled=1
-                               else:
-                                       enabled=0
-                       else:
-                               if mysplit[mypos][0] == "!":
-                                       myusevar = mysplit[mypos][1:-1]
-                                       enabled = not myusevar in myuse
-                                       #if myusevar in myuse:
-                                       #       enabled = 0
-                                       #else:
-                                       #       enabled = 1
-                               else:
-                                       myusevar=mysplit[mypos][:-1]
-                                       enabled = myusevar in myuse
-                                       #if myusevar in myuse:
-                                       #       enabled=1
-                                       #else:
-                                       #       enabled=0
-                       if (mypos +2 < len(mysplit)) and (mysplit[mypos+2] == ":"):
-                               # colon mode
-                               if enabled:
-                                       # choose the first option
-                                       if type(mysplit[mypos+1]) == types.ListType:
-                                               newsplit.append(dep_opconvert(mysplit[mypos+1],myuse))
-                                       else:
-                                               newsplit.append(mysplit[mypos+1])
-                               else:
-                                       # choose the alternate option
-                                       if type(mysplit[mypos+1]) == types.ListType:
-                                               newsplit.append(dep_opconvert(mysplit[mypos+3],myuse))
-                                       else:
-                                               newsplit.append(mysplit[mypos+3])
-                               mypos += 4
-                       else:
-                               # normal use mode
-                               if enabled:
-                                       if type(mysplit[mypos+1]) == types.ListType:
-                                               newsplit.append(dep_opconvert(mysplit[mypos+1],myuse))
-                                       else:
-                                               newsplit.append(mysplit[mypos+1])
-                               # otherwise, continue
-                               mypos += 2
-               else:
-                       # normal item
-                       newsplit.append(mysplit[mypos])
-                       mypos += 1
-       return newsplit
+    "Does dependency operator conversion"
+
+    mypos   = 0
+    newsplit = []
+    while mypos < len(mysplit):
+        if type(mysplit[mypos]) == types.ListType:
+            newsplit.append(dep_opconvert(mysplit[mypos],myuse))
+            mypos += 1
+        elif mysplit[mypos] == ")":
+            # mismatched paren, error
+            return None
+        elif mysplit[mypos]=="||":
+            if ((mypos+1)>=len(mysplit)) or (type(mysplit[mypos+1])!=types.ListType):
+                # || must be followed by paren'd list
+                return None
+            try:
+                mynew = dep_opconvert(mysplit[mypos+1],myuse)
+            except Exception, e:
+                error("unable to satisfy OR dependancy: " + string.join(mysplit," || "))
+                raise e
+            mynew[0:0] = ["||"]
+            newsplit.append(mynew)
+            mypos += 2
+        elif mysplit[mypos][-1] == "?":
+            # use clause, i.e "gnome? ( foo bar )"
+            # this is a quick and dirty hack so that repoman can enable all USE vars:
+            if (len(myuse) == 1) and (myuse[0] == "*"):
+                # enable it even if it's ! (for repoman) but kill it if it's
+                # an arch variable that isn't for this arch. XXX Sparc64?
+                if (mysplit[mypos][:-1] not in settings.usemask) or \
+                        (mysplit[mypos][:-1]==settings["ARCH"]):
+                    enabled=1
+                else:
+                    enabled=0
+            else:
+                if mysplit[mypos][0] == "!":
+                    myusevar = mysplit[mypos][1:-1]
+                    enabled = not myusevar in myuse
+                    #if myusevar in myuse:
+                    #    enabled = 0
+                    #else:
+                    #    enabled = 1
+                else:
+                    myusevar=mysplit[mypos][:-1]
+                    enabled = myusevar in myuse
+                    #if myusevar in myuse:
+                    #    enabled=1
+                    #else:
+                    #    enabled=0
+            if (mypos +2 < len(mysplit)) and (mysplit[mypos+2] == ":"):
+                # colon mode
+                if enabled:
+                    # choose the first option
+                    if type(mysplit[mypos+1]) == types.ListType:
+                        newsplit.append(dep_opconvert(mysplit[mypos+1],myuse))
+                    else:
+                        newsplit.append(mysplit[mypos+1])
+                else:
+                    # choose the alternate option
+                    if type(mysplit[mypos+1]) == types.ListType:
+                        newsplit.append(dep_opconvert(mysplit[mypos+3],myuse))
+                    else:
+                        newsplit.append(mysplit[mypos+3])
+                mypos += 4
+            else:
+                # normal use mode
+                if enabled:
+                    if type(mysplit[mypos+1]) == types.ListType:
+                        newsplit.append(dep_opconvert(mysplit[mypos+1],myuse))
+                    else:
+                        newsplit.append(mysplit[mypos+1])
+                # otherwise, continue
+                mypos += 2
+        else:
+            # normal item
+            newsplit.append(mysplit[mypos])
+            mypos += 1
+    return newsplit
 
 class digraph:
-       """beautiful directed graph object"""
-
-       def __init__(self):
-               self.dict={}
-               #okeys = keys, in order they were added (to optimize firstzero() ordering)
-               self.okeys=[]
-
-       def __str__(self):
-               str = ""
-               for key in self.okeys:
-                       str += "%s:\t%s\n" % (key, self.dict[key][1])
-               return str
-
-       def addnode(self,mykey,myparent):
-               if not mykey in self.dict:
-                       self.okeys.append(mykey)
-                       if myparent==None:
-                               self.dict[mykey]=[0,[]]
-                       else:
-                               self.dict[mykey]=[0,[myparent]]
-                               self.dict[myparent][0]=self.dict[myparent][0]+1
-                       return
-               if myparent and (not myparent in self.dict[mykey][1]):
-                       self.dict[mykey][1].append(myparent)
-                       self.dict[myparent][0]=self.dict[myparent][0]+1
-       
-       def delnode(self,mykey, ref = 1):
-               """Delete a node
-
-               If ref is 1, remove references to this node from other nodes.
-               If ref is 2, remove nodes that reference this node."""
-               if not mykey in self.dict:
-                       return
-               for x in self.dict[mykey][1]:
-                       self.dict[x][0]=self.dict[x][0]-1
-               del self.dict[mykey]
-               while 1:
-                       try:
-                               self.okeys.remove(mykey)        
-                       except ValueError:
-                               break
-               if ref:
-                       __kill = []
-                       for k in self.okeys:
-                               if mykey in self.dict[k][1]:
-                                       if ref == 1 or ref == 2:
-                                               self.dict[k][1].remove(mykey)
-                                       if ref == 2:
-                                               __kill.append(k)
-                       for l in __kill:
-                               self.delnode(l, ref)
-       
-       def allnodes(self):
-               "returns all nodes in the dictionary"
-               return self.dict.keys()
-       
-       def firstzero(self):
-               "returns first node with zero references, or NULL if no such node exists"
-               for x in self.okeys:
-                       if self.dict[x][0]==0:
-                               return x
-               return None 
-
-       def firstnonzero(self):
-               "returns first node with nonzero references, or NULL if no such node exists"
-               for x in self.okeys:
-                       if self.dict[x][0]!=0:
-                               return x
-               return None 
-
-
-       def allzeros(self):
-               "returns all nodes with zero references, or NULL if no such node exists"
-               zerolist = []
-               for x in self.dict.keys():
-                       if self.dict[x][0]==0:
-                               zerolist.append(x)
-               return zerolist
-
-       def hasallzeros(self):
-               "returns 0/1, Are all nodes zeros? 1 : 0"
-               zerolist = []
-               for x in self.dict.keys():
-                       if self.dict[x][0]!=0:
-                               return 0
-               return 1
-
-       def empty(self):
-               if len(self.dict)==0:
-                       return 1
-               return 0
-
-       def hasnode(self,mynode):
-               return mynode in self.dict
-
-       def getparents(self, item):
-               if not self.hasnode(item):
-                       return []
-               return self.dict[item][1]
-       
-       def getchildren(self, item):
-               if not self.hasnode(item):
-                       return []
-               children = [i for i in self.okeys if item in self.getparents(i)]
-               return children
-       
-       def walkdown(self, item, callback, debug = None):
-               __down_callback_cache = []
-               __recurse_count = 0
-               if not self.hasnode(item):
-                       return 0
-       
-               if __down_callback_cache.count(item):
-                       return 1
-
-               parents = self.getparents(item)
-               children = self.getchildren(item)
-               for p in parents:
-                       if p in children:
-#                              print "%s is both parent and child of %s" % (p, item)
-                               __down_callback_cache.append(p)
-                               ret = callback(self, p)
-                               if ret == 0:
-                                       return 0
-                               continue
-                       if item == p:
-                               print "eek, i'm my own parent!"
-                               return 0
-                       if debug:
-                               print "item: %s, p: %s" % (item, p)
-                       ret = self.walkdown(p, callback, debug)
-                       if ret == 0:
-                               return 0
-
-               __down_callback_cache.append(item)
-               return callback(self, item)
-       
-       def walkup(self, item, callback):
-               if not self.hasnode(item):
-                       return 0
-       
-               parents = self.getparents(item)
-               children = self.getchildren(item)
-               for c in children:
-                       if c in parents:
-                               ret = callback(self, item)
-                               if ret == 0:
-                                       return 0
-                               continue
-                       if item == c:
-                               print "eek, i'm my own child!"
-                               return 0
-                       ret = self.walkup(c, callback)
-                       if ret == 0:
-                               return 0
-               return callback(self, item)
-
-       def copy(self):
-               mygraph=digraph()
-               for x in self.dict.keys():
-                       mygraph.dict[x]=self.dict[x][:]
-                       mygraph.okeys=self.okeys[:]
-               return mygraph
+    """beautiful directed graph object"""
+
+    def __init__(self):
+        self.dict={}
+        #okeys = keys, in order they were added (to optimize firstzero() ordering)
+        self.okeys=[]
+
+    def __str__(self):
+        str = ""
+        for key in self.okeys:
+            str += "%s:\t%s\n" % (key, self.dict[key][1])
+        return str
+
+    def addnode(self,mykey,myparent):
+        if not mykey in self.dict:
+            self.okeys.append(mykey)
+            if myparent==None:
+                self.dict[mykey]=[0,[]]
+            else:
+                self.dict[mykey]=[0,[myparent]]
+                self.dict[myparent][0]=self.dict[myparent][0]+1
+            return
+        if myparent and (not myparent in self.dict[mykey][1]):
+            self.dict[mykey][1].append(myparent)
+            self.dict[myparent][0]=self.dict[myparent][0]+1
+
+    def delnode(self,mykey, ref = 1):
+        """Delete a node
+
+        If ref is 1, remove references to this node from other nodes.
+        If ref is 2, remove nodes that reference this node."""
+        if not mykey in self.dict:
+            return
+        for x in self.dict[mykey][1]:
+            self.dict[x][0]=self.dict[x][0]-1
+        del self.dict[mykey]
+        while 1:
+            try:
+                self.okeys.remove(mykey)
+            except ValueError:
+                break
+        if ref:
+            __kill = []
+            for k in self.okeys:
+                if mykey in self.dict[k][1]:
+                    if ref == 1 or ref == 2:
+                        self.dict[k][1].remove(mykey)
+                    if ref == 2:
+                        __kill.append(k)
+            for l in __kill:
+                self.delnode(l, ref)
+
+    def allnodes(self):
+        "returns all nodes in the dictionary"
+        return self.dict.keys()
+
+    def firstzero(self):
+        "returns first node with zero references, or NULL if no such node exists"
+        for x in self.okeys:
+            if self.dict[x][0]==0:
+                return x
+        return None
+
+    def firstnonzero(self):
+        "returns first node with nonzero references, or NULL if no such node exists"
+        for x in self.okeys:
+            if self.dict[x][0]!=0:
+                return x
+        return None
+
+
+    def allzeros(self):
+        "returns all nodes with zero references, or NULL if no such node exists"
+        zerolist = []
+        for x in self.dict.keys():
+            if self.dict[x][0]==0:
+                zerolist.append(x)
+        return zerolist
+
+    def hasallzeros(self):
+        "returns 0/1, Are all nodes zeros? 1 : 0"
+        zerolist = []
+        for x in self.dict.keys():
+            if self.dict[x][0]!=0:
+                return 0
+        return 1
+
+    def empty(self):
+        if len(self.dict)==0:
+            return 1
+        return 0
+
+    def hasnode(self,mynode):
+        return mynode in self.dict
+
+    def getparents(self, item):
+        if not self.hasnode(item):
+            return []
+        return self.dict[item][1]
+
+    def getchildren(self, item):
+        if not self.hasnode(item):
+            return []
+        children = [i for i in self.okeys if item in self.getparents(i)]
+        return children
+
+    def walkdown(self, item, callback, debug = None):
+        __down_callback_cache = []
+        __recurse_count = 0
+        if not self.hasnode(item):
+            return 0
+
+        if __down_callback_cache.count(item):
+            return 1
+
+        parents = self.getparents(item)
+        children = self.getchildren(item)
+        for p in parents:
+            if p in children:
+#                print "%s is both parent and child of %s" % (p, item)
+                __down_callback_cache.append(p)
+                ret = callback(self, p)
+                if ret == 0:
+                    return 0
+                continue
+            if item == p:
+                print "eek, i'm my own parent!"
+                return 0
+            if debug:
+                print "item: %s, p: %s" % (item, p)
+            ret = self.walkdown(p, callback, debug)
+            if ret == 0:
+                return 0
+
+        __down_callback_cache.append(item)
+        return callback(self, item)
+
+    def walkup(self, item, callback):
+        if not self.hasnode(item):
+            return 0
+
+        parents = self.getparents(item)
+        children = self.getchildren(item)
+        for c in children:
+            if c in parents:
+                ret = callback(self, item)
+                if ret == 0:
+                    return 0
+                continue
+            if item == c:
+                print "eek, i'm my own child!"
+                return 0
+            ret = self.walkup(c, callback)
+            if ret == 0:
+                return 0
+        return callback(self, item)
+
+    def copy(self):
+        mygraph=digraph()
+        for x in self.dict.keys():
+            mygraph.dict[x]=self.dict[x][:]
+            mygraph.okeys=self.okeys[:]
+        return mygraph
 
 #######################################################################
 #######################################################################
@@ -1220,32 +1221,32 @@ class digraph:
 # SECTION: Config
 #
 # PURPOSE: Reading and handling of system/target-specific/local configuration
-#         reading of package configuration
+#       reading of package configuration
 #
 #######################################################################
 #######################################################################
 
 def reader(cfgfile, feeder):
-       """Generic configuration file reader that opens a file, reads the lines,
-       handles continuation lines, comments, empty lines and feed all read lines
-       into the function feeder(lineno, line).
-       """
-       
-       f = open(cfgfile,'r')
-       lineno = 0
-       while 1:
-               lineno = lineno + 1
-               s = f.readline()
-               if not s: break
-               w = s.strip()
-               if not w: continue              # skip empty lines
-               s = s.rstrip()
-               if s[0] == '#': continue        # skip comments
-               while s[-1] == '\\':
-                       s2 = f.readline()[:-1].strip()
-                       s = s[:-1] + s2
-               feeder(lineno, s)
+    """Generic configuration file reader that opens a file, reads the lines,
+    handles continuation lines, comments, empty lines and feed all read lines
+    into the function feeder(lineno, line).
+    """
+
+    f = open(cfgfile,'r')
+    lineno = 0
+    while 1:
+        lineno = lineno + 1
+        s = f.readline()
+        if not s: break
+        w = s.strip()
+        if not w: continue        # skip empty lines
+        s = s.rstrip()
+        if s[0] == '#': continue    # skip comments
+        while s[-1] == '\\':
+            s2 = f.readline()[:-1].strip()
+            s = s[:-1] + s2
+        feeder(lineno, s)
 
 if __name__ == "__main__":
-       import doctest, oe
-       doctest.testmod(oe)
+    import doctest, oe
+    doctest.testmod(oe)
index 3e110f5..5dcaeba 100644 (file)
@@ -11,7 +11,7 @@ Based on functions from the base oe module, Copyright 2003 Holger Schurig
 """
 
 from oe import debug, data, fetch, fatal, error, note, event, mkdirhier
-import oe, os, string
+import oe, os
 
 # data holds flags and function name for a given task
 _task_data = data.init()
@@ -24,348 +24,347 @@ _task_stack = []
 
 # events
 class FuncFailed(Exception):
-       """Executed function failed"""
+    """Executed function failed"""
 
 class EventException(Exception):
-       """Exception which is associated with an Event."""
+    """Exception which is associated with an Event."""
 
-       def __init__(self, msg, event):
-               self.event = event
+    def __init__(self, msg, event):
+        self.event = event
 
-       def getEvent(self):
-               return self._event
+    def getEvent(self):
+        return self._event
 
-       def setEvent(self, event):
-               self._event = event
+    def setEvent(self, event):
+        self._event = event
 
-       event = property(getEvent, setEvent, None, "event property")
+    event = property(getEvent, setEvent, None, "event property")
 
 class TaskBase(event.Event):
-       """Base class for task events"""
+    """Base class for task events"""
 
-       def __init__(self, t, d = {}):
-               self.task = t
-               self.data = d
+    def __init__(self, t, d = {}):
+        self.task = t
+        self.data = d
 
-       def getTask(self):
-               return self._task
+    def getTask(self):
+        return self._task
 
-       def setTask(self, task):
-               self._task = task
+    def setTask(self, task):
+        self._task = task
 
-       task = property(getTask, setTask, None, "task property")
+    task = property(getTask, setTask, None, "task property")
 
-       def getData(self):
-               return self._data
+    def getData(self):
+        return self._data
 
-       def setData(self, data):
-               self._data = data
+    def setData(self, data):
+        self._data = data
 
-       data = property(getData, setData, None, "data property")
+    data = property(getData, setData, None, "data property")
 
 class TaskStarted(TaskBase):
-       """Task execution started"""
-       
+    """Task execution started"""
+
 class TaskSucceeded(TaskBase):
-       """Task execution completed"""
+    """Task execution completed"""
 
 class TaskFailed(TaskBase):
-       """Task execution failed"""
+    """Task execution failed"""
 
 class InvalidTask(TaskBase):
-       """Invalid Task"""
+    """Invalid Task"""
 
 # functions
 
 def init(data):
-       global _task_data, _task_graph, _task_stack
-       _task_data = data.init()
-       _task_graph = oe.digraph()
-       _task_stack = []
+    global _task_data, _task_graph, _task_stack
+    _task_data = data.init()
+    _task_graph = oe.digraph()
+    _task_stack = []
 
 
 def exec_func(func, d, dirs = None):
-       """Execute an OE 'function'"""
-
-       body = data.getVar(func, d)
-       if not body:
-               return
-
-       if not dirs:
-               dirs = string.split(data.getVarFlag(func, 'dirs', d) or "")
-       for adir in dirs:
-               adir = data.expand(adir, d)
-               mkdirhier(adir)
-
-       if len(dirs) > 0:
-               adir = dirs[-1]
-       else:
-               adir = data.getVar('B', d)
-
-       adir = data.expand(adir, d)
-
-       try:
-               prevdir = os.getcwd()
-       except OSError:
-               prevdir = data.expand('${TOPDIR}', d)
-       if adir and os.access(adir, os.F_OK):
-               os.chdir(adir)
-
-       if data.getVarFlag(func, "python", d):
-               exec_func_python(func, d)
-       else:
-               exec_func_shell(func, d)
-       os.chdir(prevdir)
+    """Execute an OE 'function'"""
+
+    if not dirs:
+        dirs = (data.getVarFlag(func, 'dirs', d) or "").split()
+    for adir in dirs:
+        adir = data.expand(adir, d)
+        mkdirhier(adir)
+
+    if len(dirs) > 0:
+        adir = dirs[-1]
+    else:
+        adir = data.getVar('S', d)
+
+    adir = data.expand(adir, d)
+
+    try:
+        prevdir = os.getcwd()
+    except OSError:
+        prevdir = data.expand('${TOPDIR}', d)
+    if adir and os.access(adir, os.F_OK):
+        os.chdir(adir)
+
+    if data.getVarFlag(func, "python", d):
+        exec_func_python(func, d)
+    else:
+        exec_func_shell(func, d)
+    os.chdir(prevdir)
 
 def exec_func_python(func, d):
-       """Execute a python OE 'function'"""
-       import re, os
-
-       tmp = "def " + func + "():\n%s" % data.getVar(func, d)
-       comp = compile(tmp + '\n' + func + '()', oe.data.getVar('FILE', d, 1) + ':' + func, "exec")
-       prevdir = os.getcwd()
-       g = {} # globals
-       g['oe'] = oe
-       g['os'] = os
-       g['d'] = d
-       exec comp in g
-       os.chdir(prevdir)
+    """Execute a python OE 'function'"""
+    import re, os
+
+    body = data.getVar(func, d)
+    if not body:
+        return
+    tmp = "def " + func + "():\n%s" % body
+    comp = compile(tmp + '\n' + func + '()', oe.data.getVar('FILE', d, 1) + ':' + func, "exec")
+    prevdir = os.getcwd()
+    g = {} # globals
+    g['oe'] = oe
+    g['os'] = os
+    g['d'] = d
+    exec comp in g
+    os.chdir(prevdir)
 
 def exec_func_shell(func, d):
-       """Execute a shell OE 'function' Returns true if execution was successful.
-
-       For this, it creates a bash shell script in the tmp dectory, writes the local
-       data into it and finally executes. The output of the shell will end in a log file and stdout.
-
-       Note on directory behavior.  The 'dirs' varflag should contain a list
-       of the directories you need created prior to execution.  The last
-       item in the list is where we will chdir/cd to.
-       """
-       import sys
-
-       deps = data.getVarFlag(func, 'deps', d)
-       check = data.getVarFlag(func, 'check', d)
-       if check in globals():
-               if globals()[check](func, deps):
-                       return
-
-       global logfile
-       t = data.getVar('T', d, 1)
-       if not t:
-               oe.error("T variable not set")
-               return 0
-       mkdirhier(t)
-       logfile = "%s/log.%s.%s" % (t, func, str(os.getpid()))
-       runfile = "%s/run.%s.%s" % (t, func, str(os.getpid()))
-
-       f = open(runfile, "w")
-       f.write("#!/bin/sh -e\n")
-       if data.getVar("OEDEBUG", d): f.write("set -x\n")
-       data.emit_env(f, d)
-
-       f.write("cd %s\n" % os.getcwd())
-       if func: f.write("%s || exit $?\n" % func)
-       f.close()
-       os.chmod(runfile, 0775)
-       if not func:
-               error("Function not specified")
-               raise FuncFailed()
-
-       # open logs
-       si = file('/dev/null', 'r')
-       so = file(logfile, 'a')
-       se = file(logfile, 'a+', 0)
-
-       # dup the existing fds so we dont lose them
-       osi = [os.dup(sys.stdin.fileno()), sys.stdin.fileno()]
-       oso = [os.dup(sys.stdout.fileno()), sys.stdout.fileno()]
-       ose = [os.dup(sys.stderr.fileno()), sys.stderr.fileno()]
-
-       # replace those fds with our own
-       os.dup2(si.fileno(), osi[1])
-       os.dup2(so.fileno(), oso[1])
-       os.dup2(se.fileno(), ose[1])
-
-       # execute function
-       prevdir = os.getcwd()
-       if data.getVarFlag(func, "fakeroot", d):
-               maybe_fakeroot = oe.data.expand("${STAGING_BINDIR}/fakeroot ",d)
-       else:
-               maybe_fakeroot = ''
-       ret = os.system('%ssh -e %s' % (maybe_fakeroot, runfile))
-       os.chdir(prevdir)
-
-       # restore the backups
-       os.dup2(osi[0], osi[1])
-       os.dup2(oso[0], oso[1])
-       os.dup2(ose[0], ose[1])
-
-       # close our logs
-       si.close()
-       so.close()
-       se.close()
-
-       # close the backup fds
-       os.close(osi[0])
-       os.close(oso[0])
-       os.close(ose[0])
-
-       if ret==0:
-               if not data.getVar("OEDEBUG"):
-                       os.remove(runfile)
-#                      os.remove(logfile)
-               return
-       else:
-               error("function %s failed" % func)
-               error("see log in %s" % logfile)
-               raise FuncFailed()
+    """Execute a shell OE 'function' Returns true if execution was successful.
+
+    For this, it creates a bash shell script in the tmp dectory, writes the local
+    data into it and finally executes. The output of the shell will end in a log file and stdout.
+
+    Note on directory behavior.  The 'dirs' varflag should contain a list
+    of the directories you need created prior to execution.  The last
+    item in the list is where we will chdir/cd to.
+    """
+    import sys
+
+    deps = data.getVarFlag(func, 'deps', d)
+    check = data.getVarFlag(func, 'check', d)
+    if check in globals():
+        if globals()[check](func, deps):
+            return
+
+    global logfile
+    t = data.getVar('T', d)
+    if not t:
+        return 0
+    t = data.expand(t, d)
+    mkdirhier(t)
+    logfile = "%s/log.%s.%s" % (t, func, str(os.getpid()))
+    runfile = "%s/run.%s.%s" % (t, func, str(os.getpid()))
+
+    f = open(runfile, "w")
+    f.write("#!/bin/sh -e\n")
+    if data.getVar("OEDEBUG", d): f.write("set -x\n")
+    data.emit_env(f, d)
+
+    f.write("cd %s\n" % os.getcwd())
+    if func: f.write("%s || exit $?\n" % func)
+    f.close()
+    os.chmod(runfile, 0775)
+    if not func:
+        error("Function not specified")
+        raise FuncFailed()
+
+    # open logs
+    si = file('/dev/null', 'r')
+    so = file(logfile, 'a')
+    se = file(logfile, 'a+', 0)
+
+    # dup the existing fds so we dont lose them
+    osi = [os.dup(sys.stdin.fileno()), sys.stdin.fileno()]
+    oso = [os.dup(sys.stdout.fileno()), sys.stdout.fileno()]
+    ose = [os.dup(sys.stderr.fileno()), sys.stderr.fileno()]
+
+    # replace those fds with our own
+    os.dup2(si.fileno(), osi[1])
+    os.dup2(so.fileno(), oso[1])
+    os.dup2(se.fileno(), ose[1])
+
+    # execute function
+    prevdir = os.getcwd()
+    if data.getVarFlag(func, "fakeroot", d):
+        maybe_fakeroot = oe.data.expand("${STAGING_BINDIR}/fakeroot ",d)
+    else:
+        maybe_fakeroot = ''
+    ret = os.system('%ssh -e %s' % (maybe_fakeroot, runfile))
+    os.chdir(prevdir)
+
+    # restore the backups
+    os.dup2(osi[0], osi[1])
+    os.dup2(oso[0], oso[1])
+    os.dup2(ose[0], ose[1])
+
+    # close our logs
+    si.close()
+    so.close()
+    se.close()
+
+    # close the backup fds
+    os.close(osi[0])
+    os.close(oso[0])
+    os.close(ose[0])
+
+    if ret==0:
+        if not data.getVar("OEDEBUG"):
+            os.remove(runfile)
+#            os.remove(logfile)
+        return
+    else:
+        error("function %s failed" % func)
+        error("see log in %s" % logfile)
+        raise FuncFailed()
 
 
 _task_cache = []
 
 def exec_task(task, d):
-       """Execute an OE 'task'
-
-          The primary difference between executing a task versus executing
-          a function is that a task exists in the task digraph, and therefore
-          has dependencies amongst other tasks."""
-
-       # check if the task is in the graph..
-       task_graph = data.getVar('_task_graph', d)
-       if not task_graph:
-               task_graph = oe.digraph()
-               data.setVar('_task_graph', task_graph, d)
-       task_cache = data.getVar('_task_cache', d)
-       if not task_cache:
-               task_cache = []
-               data.setVar('_task_cache', task_cache, d)
-       if not task_graph.hasnode(task):
-               raise EventException("", InvalidTask(task, d))
-
-       # check whether this task needs executing..
-       if not data.getVarFlag(task, 'force', d):
-               if stamp_is_current(task, d):
-                       return 1
-
-       # follow digraph path up, then execute our way back down
-       def execute(graph, item):
-               if data.getVarFlag(item, 'task', d):
-                       if item in task_cache:
-                               return 1
-
-                       if task != item:
-                               # deeper than toplevel, exec w/ deps
-                               exec_task(item, d)
-                               return 1
-
-                       try:
-                               debug(1, "Executing task %s" % item)
-                               event.fire(TaskStarted(item, d))
-                               exec_func(item, d)
-                               event.fire(TaskSucceeded(item, d))
-                               task_cache.append(item)
-                       except FuncFailed, reason:
-                               note( "Task failed: %s" % reason )
-                               failedevent = TaskFailed(item, d)
-                               event.fire(failedevent)
-                               raise EventException(None, failedevent)
-
-       # execute
-       task_graph.walkdown(task, execute)
-
-       # make stamp, or cause event and raise exception
-       if not data.getVarFlag(task, 'nostamp', d):
-               mkstamp(task, d)
+    """Execute an OE 'task'
+
+       The primary difference between executing a task versus executing
+       a function is that a task exists in the task digraph, and therefore
+       has dependencies amongst other tasks."""
+
+    # check if the task is in the graph..
+    task_graph = data.getVar('_task_graph', d)
+    if not task_graph:
+        task_graph = oe.digraph()
+        data.setVar('_task_graph', task_graph, d)
+    task_cache = data.getVar('_task_cache', d)
+    if not task_cache:
+        task_cache = []
+        data.setVar('_task_cache', task_cache, d)
+    if not task_graph.hasnode(task):
+        raise EventException("", InvalidTask(task, d))
+
+    # check whether this task needs executing..
+    if not data.getVarFlag(task, 'force', d):
+        if stamp_is_current(task, d):
+            return 1
+
+    # follow digraph path up, then execute our way back down
+    def execute(graph, item):
+        if data.getVarFlag(item, 'task', d):
+            if item in task_cache:
+                return 1
+
+            if task != item:
+                # deeper than toplevel, exec w/ deps
+                exec_task(item, d)
+                return 1
+
+            try:
+                debug(1, "Executing task %s" % item)
+                event.fire(TaskStarted(item, d))
+                exec_func(item, d)
+                event.fire(TaskSucceeded(item, d))
+                task_cache.append(item)
+            except FuncFailed, reason:
+                note( "Task failed: %s" % reason )
+                failedevent = TaskFailed(item, d)
+                event.fire(failedevent)
+                raise EventException(None, failedevent)
+
+    # execute
+    task_graph.walkdown(task, execute)
+
+    # make stamp, or cause event and raise exception
+    if not data.getVarFlag(task, 'nostamp', d):
+        mkstamp(task, d)
 
 
 def stamp_is_current(task, d, checkdeps = 1):
-       """Check status of a given task's stamp. returns 0 if it is not current and needs updating."""
-       task_graph = data.getVar('_task_graph', d)
-       if not task_graph:
-               task_graph = oe.digraph()
-               data.setVar('_task_graph', task_graph, d)
-       stamp = data.getVar('STAMP', d)
-       if not stamp:
-               return 0
-       stampfile = "%s.%s" % (data.expand(stamp, d), task)
-       if not os.access(stampfile, os.F_OK):
-               return 0
-
-       if checkdeps == 0:
-               return 1
-
-       import stat
-       tasktime = os.stat(stampfile)[stat.ST_MTIME]
-
-       _deps = []
-       def checkStamp(graph, task):
-               # check for existance
-               if data.getVarFlag(task, 'nostamp', d):
-                       return 1
-
-               if not stamp_is_current(task, d, 0):
-                       return 0
-
-               depfile = "%s.%s" % (data.expand(stamp, d), task)
-               deptime = os.stat(depfile)[stat.ST_MTIME]
-               if deptime > tasktime:
-                       return 0
-               return 1
-
-       return task_graph.walkdown(task, checkStamp)
+    """Check status of a given task's stamp. returns 0 if it is not current and needs updating."""
+    task_graph = data.getVar('_task_graph', d)
+    if not task_graph:
+        task_graph = oe.digraph()
+        data.setVar('_task_graph', task_graph, d)
+    stamp = data.getVar('STAMP', d)
+    if not stamp:
+        return 0
+    stampfile = "%s.%s" % (data.expand(stamp, d), task)
+    if not os.access(stampfile, os.F_OK):
+        return 0
+
+    if checkdeps == 0:
+        return 1
+
+    import stat
+    tasktime = os.stat(stampfile)[stat.ST_MTIME]
+
+    _deps = []
+    def checkStamp(graph, task):
+        # check for existance
+        if data.getVarFlag(task, 'nostamp', d):
+            return 1
+
+        if not stamp_is_current(task, d, 0):
+            return 0
+
+        depfile = "%s.%s" % (data.expand(stamp, d), task)
+        deptime = os.stat(depfile)[stat.ST_MTIME]
+        if deptime > tasktime:
+            return 0
+        return 1
+
+    return task_graph.walkdown(task, checkStamp)
 
 
 def md5_is_current(task):
-       """Check if a md5 file for a given task is current""" 
+    """Check if a md5 file for a given task is current"""
 
 
 def mkstamp(task, d):
-       """Creates/updates a stamp for a given task"""
-       mkdirhier(data.expand('${TMPDIR}/stamps', d));
-       stamp = data.getVar('STAMP', d)
-       if not stamp:
-               return
-       stamp = "%s.%s" % (data.expand(stamp, d), task)
-       open(stamp, "w+")
+    """Creates/updates a stamp for a given task"""
+    mkdirhier(data.expand('${TMPDIR}/stamps', d));
+    stamp = data.getVar('STAMP', d)
+    if not stamp:
+        return
+    stamp = "%s.%s" % (data.expand(stamp, d), task)
+    open(stamp, "w+")
 
 
 def add_task(task, deps, d):
-       task_graph = data.getVar('_task_graph', d)
-       if not task_graph:
-               task_graph = oe.digraph()
-               data.setVar('_task_graph', task_graph, d)
-       data.setVarFlag(task, 'task', 1, d)
-       task_graph.addnode(task, None)
-       for dep in deps:
-               if not task_graph.hasnode(dep):
-                       task_graph.addnode(dep, None)
-               task_graph.addnode(task, dep)
+    task_graph = data.getVar('_task_graph', d)
+    if not task_graph:
+        task_graph = oe.digraph()
+        data.setVar('_task_graph', task_graph, d)
+    data.setVarFlag(task, 'task', 1, d)
+    task_graph.addnode(task, None)
+    for dep in deps:
+        if not task_graph.hasnode(dep):
+            task_graph.addnode(dep, None)
+        task_graph.addnode(task, dep)
 
 
 def remove_task(task, kill, d):
-       """Remove an OE 'task'.
+    """Remove an OE 'task'.
 
-          If kill is 1, also remove tasks that depend on this task."""
+       If kill is 1, also remove tasks that depend on this task."""
 
-       task_graph = data.getVar('_task_graph', d)
-       if not task_graph:
-               task_graph = oe.digraph()
-               data.setVar('_task_graph', task_graph, d)
-       if not task_graph.hasnode(task):
-               return
+    task_graph = data.getVar('_task_graph', d)
+    if not task_graph:
+        task_graph = oe.digraph()
+        data.setVar('_task_graph', task_graph, d)
+    if not task_graph.hasnode(task):
+        return
 
-       data.delVarFlag(task, 'task', d)
-       ref = 1
-       if kill == 1:
-               ref = 2
-       task_graph.delnode(task, ref)
+    data.delVarFlag(task, 'task', d)
+    ref = 1
+    if kill == 1:
+        ref = 2
+    task_graph.delnode(task, ref)
 
 def task_exists(task, d):
-       task_graph = data.getVar('_task_graph', d)
-       if not task_graph:
-               task_graph = oe.digraph()
-               data.setVar('_task_graph', task_graph, d)
-       return task_graph.hasnode(task)
+    task_graph = data.getVar('_task_graph', d)
+    if not task_graph:
+        task_graph = oe.digraph()
+        data.setVar('_task_graph', task_graph, d)
+    return task_graph.hasnode(task)
 
 def get_task_data():
-       return _task_data
+    return _task_data
index 03e43e3..0f4ff16 100644 (file)
@@ -6,17 +6,16 @@ import oe
 from oe import *
 
 try:
-       import itertools
+    import itertools
 except ImportError: # itertools appears in Python 2.3
-       from utils import itertools
+    from utils import itertools
 try:
-       import optparse
+    import optparse
 except ImportError: # optparse appears in Python 2.3
-       from utils import optparse
+    from utils import optparse
+parsespin = itertools.cycle( r'|/-\-' )
 
 __version__ = 1.1
-__build_cache_fail = []
-__build_cache = []
 usageinfo = """%prog [options] [package ...]
 
 Builds specified packages, expecting that the .oe files
@@ -25,219 +24,28 @@ Default packages are all packages in OEFILES.
 Default OEFILES are the .oe files in the current directory."""
 
 def usage( ):
-       sys.__stderr__.write(usageinfo.replace('%prog', 'oemake') + "\n")
-       sys.exit(0)
+    sys.__stderr__.write(usageinfo.replace('%prog', 'oemake') + "\n")
+    sys.exit(0)
 
 def handle_options( args ):
-       parser = optparse.OptionParser( version = "OpenEmbedded Build Infrastructure Core version %s, %%prog version %s" % ( oe.__version__, __version__ ), usage = usageinfo )
+    parser = optparse.OptionParser( version = "OpenEmbedded Build Infrastructure Core version %s, %%prog version %s" % ( oe.__version__, __version__ ), usage = usageinfo )
 
-       parser.add_option( "-k", "--continue", help = "continue as much as possible after an error. While the target that failed, and those that depend on it, cannot be remade, the other dependencies of these targets can be processed all the same.",
-                          action = "store_false", dest = "abort", default = True )
+    parser.add_option( "-k", "--continue", help = "continue as much as possible after an error. While the target that failed, and those that depend on it, cannot be remade, the other dependencies of these targets can be processed all the same.",
+            action = "store_false", dest = "abort", default = True )
 
-       parser.add_option( "-f", "--force", help = "force run of specified cmd, regardless of stamp status",
-                          action = "store_true", dest = "force", default = False )
+    parser.add_option( "-f", "--force", help = "force run of specified cmd, regardless of stamp status",
+            action = "store_true", dest = "force", default = False )
 
 
-       parser.add_option( "-c", "--cmd", help = "specify command to pass to oebuild",
-                          action = "store", dest = "cmd", default = "build" )
+    parser.add_option( "-c", "--cmd", help = "specify command to pass to oebuild",
+            action = "store", dest = "cmd", default = "build" )
 
-       options, args = parser.parse_args( args )
-       return options, args[1:]
-
-def buildPackage(graph, item):
-       if item in __build_cache:
-               return 1
-       if item in __build_cache_fail:
-               return 0
-       fn = pkgs[item][1]
-       if fn is None:
-               return 1
-       command = options.cmd
-       debug(1, "oebuild %s %s" % (command, fn))
-       event.fire(event.PkgStarted(item, pkgdata[fn]))
-       try:
-               oe.build.exec_task('do_%s' % command, pkgdata[fn])
-               event.fire(event.PkgSucceeded(item, pkgdata[fn]))
-               __build_cache.append(item)
-               del pkgdata[fn]
-               return 1
-       except oe.build.FuncFailed:
-               error("task stack execution failed")
-               event.fire(event.PkgFailed(item, pkgdata[fn]))
-               __build_cache_fail.append(item)
-               del pkgdata[fn]
-               return 0
-       except oe.build.EventException:
-               (type, value, traceback) = sys.exc_info()
-               e = value.event
-               error("%s event exception, aborting" % event.getName(e))
-               event.fire(event.PkgFailed(item, pkgdata[fn]))
-               __build_cache_fail.append(item)
-               del pkgdata[fn]
-               return 0
-       except Exception, e:
-               event.fire(event.PkgFailed(item, pkgdata[fn]))
-               __build_cache_fail.append(item)
-               error("%s" % e)
-               del pkgdata[fn]
-               return 0
-
-def get_oefiles( path = os.getcwd() ):
-       """Get list of default .oe files by reading out the current directory"""
-       contents = os.listdir(path)
-       oefiles = []
-       for f in contents:
-               (root, ext) = os.path.splitext(f)
-               if ext == ".oe":
-                       oefiles.append(os.path.abspath(os.path.join(os.getcwd(),f)))
-       return oefiles
-
-def find_oefiles( path ):
-       """Find all the .oe files in a directory (uses find)"""
-       findcmd = 'find ' + path + ' -name *.oe | grep -v SCCS/'
-       try:
-               finddata = os.popen(findcmd)
-       except OSError:
-               return []
-       return finddata.readlines()
-
-def load_oefile( oefile, cfg ):
-       """Load and parse one .oe build file"""
-       oepath = data.getVar('OEPATH', cfg)
-       topdir = data.getVar('TOPDIR', cfg)
-       if not topdir:
-               topdir = os.path.abspath(os.getcwd())
-               # set topdir to here
-               data.setVar('TOPDIR', topdir, cfg)
-       oefile = os.path.abspath(oefile)
-       oefile_loc = os.path.abspath(os.path.dirname(oefile))
-       # expand tmpdir to include this topdir
-       data.setVar('TMPDIR', data.getVar('TMPDIR', cfg, 1) or "", cfg)
-       # add topdir to oepath
-       oepath += ":%s" % topdir
-       # set topdir to location of .oe file
-       topdir = oefile_loc
-       #data.setVar('TOPDIR', topdir, cfg)
-       # add that topdir to oepath
-       oepath += ":%s" % topdir
-       # go there
-       oldpath = os.path.abspath(os.getcwd())
-       os.chdir(topdir)
-       data.setVar('OEPATH', oepath, cfg)
-       oe = copy.deepcopy(cfg)
-       try:
-               parse.handle(oefile, oe) # read .oe data
-               os.chdir(oldpath)
-               return oe
-       except Exception, e:
-               error("%s" % e)
-               os.chdir(oldpath)
-               return None
-
-def collect_oefiles( cfg ):
-       """Collect all available .oe build files"""
-
-       files = (data.getVar( "OEFILES", cfg, 1 ) or "").split()
-       data.setVar("OEFILES", " ".join(files), cfg)
-
-       if not len(files):
-               files = get_oefiles()
-
-       if not len(files):
-               usage()
-
-       parsespin = itertools.cycle( r'|/-\-' )
-       sys.stdout.write("NOTE: Parsing .oe files: %s" % parsespin.next())
-
-       newfiles = []
-       for f in files:
-               if os.path.isdir(f):
-                       dirfiles = find_oefiles(f)
-                       if dirfiles:
-                               newfiles += dirfiles
-                               continue
-       newfiles += glob.glob(f) or [ f ]
-
-       for f in newfiles:
-               import re
-               oemask = oe.data.getVar('OEMASK', cfg, 1)
-               if oemask:
-                       if re.search(oemask, f):
-                               oe.debug(1, "oemake: skipping %s" % f)
-                               continue
-               # read a file's metadata
-               try:
-                       debug(1, "oemake: parsing %s" % f)
-                       sys.stdout.write("\b \b%s" % parsespin.next())
-                       sys.stdout.flush()
-                       pkgdata[f] = load_oefile(f, cfg)
-                       deps = None
-                       if pkgdata[f] is not None:
-                               # allow metadata files to add items to OEFILES
-                               #data.update_data(pkgdata[f])
-                               addoefiles = data.getVar('OEFILES', pkgdata[f]) or None
-                               if addoefiles:
-                                       for aof in addoefiles.split():
-                                               if not files.count(aof):
-                                                       if not os.path.isabs(aof):
-                                                               aof = os.path.join(os.path.dirname(f),aof)
-                                                       files.append(aof)
-                               for var in pkgdata[f].keys():
-                                       if data.getVarFlag(var, "handler", pkgdata[f]) and data.getVar(var, pkgdata[f]):
-                                               event.register(data.getVar(var, pkgdata[f]))
-                               depstr = data.getVar("DEPENDS", pkgdata[f], 1)
-                               if depstr is not None:
-                                       deps = depstr.split()
-                               pkg = []
-                               pkg.append(data.getVar('CATEGORY', pkgdata[f], 1))
-                               pkg.append(data.getVar('PN', pkgdata[f], 1))
-                               pkg.append(data.getVar('PV', pkgdata[f], 1))
-                               pkg.append(data.getVar('PR', pkgdata[f], 1))
-                               root = "%s/%s-%s-%s" % (pkg[0], pkg[1], pkg[2], pkg[3])
-                               provides = []
-                               providestr = data.getVar("PROVIDES", pkgdata[f], 1)
-                               if providestr is not None:
-                                       provides += providestr.split()
-                               for provide in provides:
-                                       pkgs[provide] = [[root], None]
-                               pkgs[root] = [deps, f]
-               except IOError:
-                       oe.error("opening %s" % f)
-                       pass
-
-       sys.stdout.write("\n")
-
-def build_depgraph( pkgs, graph, cfg ):
-       # add every provide relationship to the dependency graph, depending
-       # on all the packages that provide it
-
-       tokill = []
-       unsatisfied = []
-
-       for pkg in pkgs.keys():
-               graph.addnode(pkg, None)
-
-       for pkg in pkgs.keys():
-               (deps, fn) = pkgs[pkg]
-               if depcmd is not None:
-                       if deps is not None:
-                               for d in deps:
-                                       if not graph.hasnode(d):
-                                               def killitem(graph, item):
-                                                       tokill.append(item)
-                                               graph.walkup(pkg, killitem)
-                                               unsatisfied.append([pkg, d])
-                                               break
-                                       graph.addnode(pkg, d)
-
-       for u in unsatisfied:
-               event.fire(event.UnsatisfiedDep(u[0], pkgdata[pkgs[u[0]][1]], u[1]))
-
-       for k in tokill:
-               def reallykillitem(graph, item):
-                       graph.delnode(item)
-               graph.walkup(k, reallykillitem)
+    options, args = parser.parse_args( args )
+    return options, args[1:]
 
+def myProgressCallback( x, y, f ):
+    sys.stdout.write("\rNOTE: Parsing .oe files: %s (%04d/%04d) [%2d %%]" % ( parsespin.next(), x, y, x*100/y ) )
+    sys.stdout.flush()
 
 #
 # main
@@ -245,61 +53,66 @@ def build_depgraph( pkgs, graph, cfg ):
 
 if __name__ == "__main__":
 
-       options, args = handle_options( sys.argv )
-
-       _depcmds = { "clean": None,
-               "mrproper": None,
-               "build": "stage" }
-
-       if not options.cmd:
-               options.cmd = "build"
-
-       if options.cmd in _depcmds:
-               depcmd=_depcmds[options.cmd]
-       else:
-               depcmd=options.cmd
-
-       pkgdata = {}
-       pkgs = {}
-       cfg = {}
-       graph = digraph()
-
-       try:
-               cfg = parse.handle("conf/oe.conf", cfg)
-       except IOError:
-               fatal("Unable to open oe.conf")
-
-       if not data.getVar("BUILDNAME", cfg):
-               data.setVar("BUILDNAME", os.popen('date +%Y%m%d%H%M').readline().strip(), cfg)
-
-       buildname = data.getVar("BUILDNAME", cfg)
-
-       collect_oefiles( cfg )
-       build_depgraph( pkgs, graph, cfg )
-       
-       event.fire(event.BuildStarted(buildname, graph.okeys, cfg))
-
-       pkgs_to_build = None
-       if args:
-               if not pkgs_to_build:
-                       pkgs_to_build = []
-               pkgs_to_build.extend(args)
-       if not pkgs_to_build:
-               oepkgs = data.getVar('OEPKGS', cfg, 1)
-               if oepkgs:
-                       pkgs_to_build = string.split(oepkgs)
-       if not pkgs_to_build:
-               pkgs_to_build = graph.okeys
-       debug(1, "building: %s" % pkgs_to_build)
-
-       for k in pkgs_to_build:
-               if k in pkgs:
-                       ret = graph.walkdown(k, buildPackage)
-                       if options.abort and not ret:
-                               fatal("Build of %s failed, aborting." % k)
-               else:
-                       error("Unable to build %s: no .oe file provides it." % k)
-                       if options.abort:
-                               sys.exit(1)
-
-       event.fire(event.BuildCompleted(buildname, graph.okeys, cfg))
+    make.options, args = handle_options( sys.argv )
+
+    _depcmds = { "clean": None,
+        "mrproper": None,
+        "build": "stage" }
+
+    if not make.options.cmd:
+        make.options.cmd = "build"
+
+    if make.options.cmd in _depcmds:
+        depcmd=_depcmds[make.options.cmd]
+    else:
+        depcmd=make.options.cmd
+
+    make.pkgdata = {}
+    make.pkgs = {}
+    make.cfg = {}
+    make.graph = digraph()
+
+    try:
+        make.cfg = parse.handle("conf/oe.conf", make.cfg)
+    except IOError:
+        fatal("Unable to open oe.conf")
+
+    if not data.getVar("BUILDNAME", make.cfg):
+        data.setVar("BUILDNAME", os.popen('date +%Y%m%d%H%M').readline().strip(), make.cfg)
+
+    buildname = data.getVar("BUILDNAME", make.cfg)
+
+    try:
+        make.collect_oefiles( myProgressCallback )
+        sys.stdout.write("\n")
+        make.build_depgraph( depcmd )
+
+        event.fire(event.BuildStarted(buildname, make.graph.okeys, make.cfg))
+
+        pkgs_to_build = None
+        if args:
+            if not pkgs_to_build:
+                pkgs_to_build = []
+            pkgs_to_build.extend(args)
+        if not pkgs_to_build:
+            oepkgs = data.getVar('OEPKGS', make.cfg, 1)
+            if oepkgs:
+                pkgs_to_build = string.split(oepkgs)
+        if not pkgs_to_build:
+            pkgs_to_build = make.graph.okeys
+        debug(1, "building: %s" % pkgs_to_build)
+
+        for k in pkgs_to_build:
+            if k in make.pkgs:
+                ret = make.graph.walkdown(k, make.buildPackage)
+                if make.options.abort and not ret:
+                    fatal("Build of %s failed, aborting." % k)
+            else:
+                error("Unable to build %s: no .oe file provides it." % k)
+                if make.options.abort:
+                    sys.exit(1)
+
+        event.fire(event.BuildCompleted(buildname, make.graph.okeys, make.cfg))
+
+    except KeyboardInterrupt:
+        print "\nNOTE: KeyboardInterrupt - Build not completed."