1 BB_DEFAULT_TASK ?= "build"
3 # like os.path.join but doesn't treat absolute RHS specially
4 def base_path_join(a, *p):
7 if path == '' or path.endswith('/'):
13 def base_path_relative(src, dest):
14 """ Return a relative path from src to dest.
16 >>> base_path_relative("/usr/bin", "/tmp/foo/bar")
19 >>> base_path_relative("/usr/bin", "/usr/lib")
22 >>> base_path_relative("/tmp", "/tmp/foo/bar")
25 from os.path import sep, pardir, normpath, commonprefix
27 destlist = normpath(dest).split(sep)
28 srclist = normpath(src).split(sep)
30 # Find common section of the path
31 common = commonprefix([destlist, srclist])
32 commonlen = len(common)
34 # Climb back to the point where they differentiate
35 relpath = [ pardir ] * (len(srclist) - commonlen)
36 if commonlen < len(destlist):
37 # Add remaining portion
38 relpath += destlist[commonlen:]
40 return sep.join(relpath)
42 def base_path_out(path, d):
43 """ Prepare a path for display to the user. """
44 rel = base_path_relative(d.getVar("TOPDIR", 1), path)
45 if len(rel) > len(path):
50 # for MD5/SHA handling
51 def base_chk_load_parser(config_paths):
53 parser = ConfigParser.ConfigParser()
54 if len(parser.read(config_paths)) < 1:
55 raise ValueError("no ini files could be found")
59 def base_chk_file_vars(parser, localpath, params, data):
64 flagName = "%s.md5sum" % name
65 want_md5sum = bb.data.getVarFlag("SRC_URI", flagName, data)
66 flagName = "%s.sha256sum" % name
67 want_sha256sum = bb.data.getVarFlag("SRC_URI", flagName, data)
69 if (want_sha256sum == None and want_md5sum == None):
70 # no checksums to check, nothing to do
73 if not os.path.exists(localpath):
74 localpath = base_path_out(localpath, data)
75 bb.note("The localpath does not exist '%s'" % localpath)
76 raise Exception("The path does not exist '%s'" % localpath)
80 md5pipe = os.popen('PATH=%s md5sum %s' % (bb.data.getVar('PATH', data, True), localpath))
81 md5data = (md5pipe.readline().split() or [ "" ])[0]
84 raise Exception("Executing md5sum failed")
85 if want_md5sum != md5data:
86 bb.note("The MD5Sums did not match. Wanted: '%s' and Got: '%s'" % (want_md5sum, md5data))
87 raise Exception("MD5 Sums do not match. Wanted: '%s' Got: '%s'" % (want_md5sum, md5data))
91 shapipe = os.popen('PATH=%s oe_sha256sum %s' % (bb.data.getVar('PATH', data, True), localpath))
92 sha256data = (shapipe.readline().split() or [ "" ])[0]
95 raise Exception("Executing shasum failed")
96 if want_sha256sum != sha256data:
97 bb.note("The SHA256Sums did not match. Wanted: '%s' and Got: '%s'" % (want_sha256sum, sha256data))
98 raise Exception("SHA256 Sums do not match. Wanted: '%s' Got: '%s'" % (want_sha256sum, sha256data))
103 def base_chk_file(parser, pn, pv, src_uri, localpath, data):
105 # Try PN-PV-SRC_URI first and then try PN-SRC_URI
106 # we rely on the get method to create errors
107 pn_pv_src = "%s-%s-%s" % (pn,pv,src_uri)
108 pn_src = "%s-%s" % (pn,src_uri)
109 if parser.has_section(pn_pv_src):
110 md5 = parser.get(pn_pv_src, "md5")
111 sha256 = parser.get(pn_pv_src, "sha256")
112 elif parser.has_section(pn_src):
113 md5 = parser.get(pn_src, "md5")
114 sha256 = parser.get(pn_src, "sha256")
115 elif parser.has_section(src_uri):
116 md5 = parser.get(src_uri, "md5")
117 sha256 = parser.get(src_uri, "sha256")
121 # md5 and sha256 should be valid now
122 if not os.path.exists(localpath):
123 localpath = base_path_out(localpath, data)
124 bb.note("The localpath does not exist '%s'" % localpath)
125 raise Exception("The path does not exist '%s'" % localpath)
128 # call md5(sum) and shasum
130 md5pipe = os.popen('PATH=%s md5sum %s' % (bb.data.getVar('PATH', data, True), localpath))
131 md5data = (md5pipe.readline().split() or [ "" ])[0]
134 raise Exception("Executing md5sum failed")
137 shapipe = os.popen('PATH=%s oe_sha256sum %s' % (bb.data.getVar('PATH', data, True), localpath))
138 shadata = (shapipe.readline().split() or [ "" ])[0]
141 raise Exception("Executing shasum failed")
143 if no_checksum == True: # we do not have conf/checksums.ini entry
145 file = open("%s/checksums.ini" % bb.data.getVar("TMPDIR", data, 1), "a")
150 raise Exception("Creating checksums.ini failed")
152 file.write("[%s]\nmd5=%s\nsha256=%s\n\n" % (src_uri, md5data, shadata))
155 from string import maketrans
156 trtable = maketrans("", "")
157 uname = src_uri.split("/")[-1].translate(trtable, "-+._")
160 ufile = open("%s/%s.sum" % (bb.data.getVar("TMPDIR", data, 1), uname), "wt")
165 raise Exception("Creating %s.sum failed" % uname)
167 ufile.write("SRC_URI = \"%s;name=%s\"\nSRC_URI[%s.md5sum] = \"%s\"\nSRC_URI[%s.sha256sum] = \"%s\"\n" % (src_uri, uname, uname, md5data, uname, shadata))
170 if not bb.data.getVar("OE_STRICT_CHECKSUMS",data, True):
171 bb.note("This package has no entry in checksums.ini, please add one")
172 bb.note("\n[%s]\nmd5=%s\nsha256=%s" % (src_uri, md5data, shadata))
173 bb.note("This package has no checksums in corresponding recipe, please add")
174 bb.note("SRC_URI = \"%s;name=%s\"\nSRC_URI[%s.md5sum] = \"%s\"\nSRC_URI[%s.sha256sum] = \"%s\"\n" % (src_uri, uname, uname, md5data, uname, shadata))
177 bb.note("Missing checksum")
180 if not md5 == md5data:
181 bb.note("The MD5Sums did not match. Wanted: '%s' and Got: '%s'" % (md5,md5data))
182 raise Exception("MD5 Sums do not match. Wanted: '%s' Got: '%s'" % (md5, md5data))
184 if not sha256 == shadata:
185 bb.note("The SHA256 Sums do not match. Wanted: '%s' Got: '%s'" % (sha256,shadata))
186 raise Exception("SHA256 Sums do not match. Wanted: '%s' Got: '%s'" % (sha256, shadata))
191 def base_dep_prepend(d):
193 # Ideally this will check a flag so we will operate properly in
194 # the case where host == build == target, for now we don't work in
197 deps = "shasum-native coreutils-native"
198 if bb.data.getVar('PN', d, True) == "shasum-native" or bb.data.getVar('PN', d, True) == "stagemanager-native":
200 if bb.data.getVar('PN', d, True) == "coreutils-native":
201 deps = "shasum-native"
203 # INHIBIT_DEFAULT_DEPS doesn't apply to the patch command. Whether or not
204 # we need that built is the responsibility of the patch function / class, not
206 if not bb.data.getVar('INHIBIT_DEFAULT_DEPS', d):
207 if (bb.data.getVar('HOST_SYS', d, 1) !=
208 bb.data.getVar('BUILD_SYS', d, 1)):
209 deps += " virtual/${TARGET_PREFIX}gcc virtual/libc "
212 def base_read_file(filename):
214 f = file( filename, "r" )
215 except IOError, reason:
216 return "" # WARNING: can't raise an error now because of the new RDEPENDS handling. This is a bit ugly. :M:
218 return f.read().strip()
221 def base_ifelse(condition, iftrue = True, iffalse = False):
227 def base_conditional(variable, checkvalue, truevalue, falsevalue, d):
228 if bb.data.getVar(variable,d,1) == checkvalue:
233 def base_less_or_equal(variable, checkvalue, truevalue, falsevalue, d):
234 if float(bb.data.getVar(variable,d,1)) <= float(checkvalue):
239 def base_version_less_or_equal(variable, checkvalue, truevalue, falsevalue, d):
240 result = bb.vercmp(bb.data.getVar(variable,d,True), checkvalue)
246 def base_contains(variable, checkvalues, truevalue, falsevalue, d):
247 val = bb.data.getVar(variable,d,1)
251 if type(checkvalues).__name__ == "str":
252 checkvalues = [checkvalues]
253 for value in checkvalues:
254 if val.find(value) != -1:
255 matches = matches + 1
256 if matches == len(checkvalues):
260 def base_both_contain(variable1, variable2, checkvalue, d):
261 if bb.data.getVar(variable1,d,1).find(checkvalue) != -1 and bb.data.getVar(variable2,d,1).find(checkvalue) != -1:
266 DEPENDS_prepend="${@base_dep_prepend(d)} "
267 DEPENDS_virtclass-native_prepend="${@base_dep_prepend(d)} "
268 DEPENDS_virtclass-nativesdk_prepend="${@base_dep_prepend(d)} "
270 def base_prune_suffix(var, suffixes, d):
271 # See if var ends with any of the suffixes listed and
273 for suffix in suffixes:
274 if var.endswith(suffix):
275 return var.replace(suffix, "")
278 def base_set_filespath(path, d):
279 bb.note("base_set_filespath usage is deprecated, %s should be fixed" % d.getVar("P", 1))
281 # The ":" ensures we have an 'empty' override
282 overrides = (bb.data.getVar("OVERRIDES", d, 1) or "") + ":"
284 for o in overrides.split(":"):
285 filespath.append(os.path.join(p, o))
286 return ":".join(filespath)
288 def oe_filter(f, str, d):
290 return " ".join(filter(lambda x: match(f, x, 0), str.split()))
292 def oe_filter_out(f, str, d):
294 return " ".join(filter(lambda x: not match(f, x, 0), str.split()))
315 echo "Usage: oedebug level \"message\""
319 test ${OEDEBUG:-0} -ge $1 && {
326 if [ x"$MAKE" = x ]; then MAKE=make; fi
327 oenote ${MAKE} ${EXTRA_OEMAKE} "$@"
328 ${MAKE} ${EXTRA_OEMAKE} "$@" || die "oe_runmake failed"
332 # Purpose: Install shared library file and
333 # create the necessary links
338 #oenote installing shared library $1 to $2
340 libname=`basename $1`
341 install -m 755 $1 $2/$libname
342 sonamelink=`${HOST_PREFIX}readelf -d $1 |grep 'Library soname:' |sed -e 's/.*\[\(.*\)\].*/\1/'`
343 solink=`echo $libname | sed -e 's/\.so\..*/.so/'`
344 ln -sf $libname $2/$sonamelink
345 ln -sf $libname $2/$solink
349 # Purpose: Install a library, in all its forms
352 # oe_libinstall libltdl ${STAGING_LIBDIR}/
353 # oe_libinstall -C src/libblah libblah ${D}/${libdir}/
360 while [ "$#" -gt 0 ]; do
376 oefatal "oe_libinstall: unknown option: $1"
388 if [ -z "$destpath" ]; then
389 oefatal "oe_libinstall: no destination path specified"
391 if echo "$destpath/" | egrep '^${STAGING_LIBDIR}/' >/dev/null
397 if [ -z "$silent" ]; then
398 echo >&2 "oe_libinstall: $*"
403 if [ -z "$dir" ]; then
409 # Sanity check that the libname.lai is unique
410 number_of_files=`(cd $dir; find . -name "$dotlai") | wc -l`
411 if [ $number_of_files -gt 1 ]; then
412 oefatal "oe_libinstall: $dotlai is not unique in $dir"
416 dir=$dir`(cd $dir;find . -name "$dotlai") | sed "s/^\.//;s/\/$dotlai\$//;q"`
422 # If such file doesn't exist, try to cut version suffix
423 if [ ! -f "$lafile" ]; then
424 libname1=`echo "$libname" | sed 's/-[0-9.]*$//'`
426 if [ -f "$lafile1" ]; then
432 if [ -f "$lafile" ]; then
434 eval `cat $lafile|grep "^library_names="`
437 library_names="$libname.so* $libname.dll.a"
440 __runcmd install -d $destpath/
442 if [ -f "$dota" -o -n "$require_static" ]; then
443 __runcmd install -m 0644 $dota $destpath/
445 if [ -f "$dotlai" -a -n "$libtool" ]; then
446 if test -n "$staging_install"
448 # stop libtool using the final directory name for libraries
450 __runcmd rm -f $destpath/$libname.la
451 __runcmd sed -e 's/^installed=yes$/installed=no/' \
452 -e '/^dependency_libs=/s,${WORKDIR}[[:alnum:]/\._+-]*/\([[:alnum:]\._+-]*\),${STAGING_LIBDIR}/\1,g' \
453 -e "/^dependency_libs=/s,\([[:space:]']\)${libdir},\1${STAGING_LIBDIR},g" \
454 $dotlai >$destpath/$libname.la
456 __runcmd install -m 0644 $dotlai $destpath/$libname.la
460 for name in $library_names; do
461 files=`eval echo $name`
463 if [ ! -e "$f" ]; then
464 if [ -n "$libtool" ]; then
465 oefatal "oe_libinstall: $dir/$f not found."
467 elif [ -L "$f" ]; then
468 __runcmd cp -P "$f" $destpath/
469 elif [ ! -L "$f" ]; then
471 __runcmd install -m 0755 $libfile $destpath/
476 if [ -z "$libfile" ]; then
477 if [ -n "$require_shared" ]; then
478 oefatal "oe_libinstall: unable to locate shared library"
480 elif [ -z "$libtool" ]; then
481 # special case hack for non-libtool .so.#.#.# links
482 baselibfile=`basename "$libfile"`
483 if (echo $baselibfile | grep -qE '^lib.*\.so\.[0-9.]*$'); then
484 sonamelink=`${HOST_PREFIX}readelf -d $libfile |grep 'Library soname:' |sed -e 's/.*\[\(.*\)\].*/\1/'`
485 solink=`echo $baselibfile | sed -e 's/\.so\..*/.so/'`
486 if [ -n "$sonamelink" -a x"$baselibfile" != x"$sonamelink" ]; then
487 __runcmd ln -sf $baselibfile $destpath/$sonamelink
489 __runcmd ln -sf $baselibfile $destpath/$solink
493 __runcmd cd "$olddir"
496 def package_stagefile(file, d):
498 if bb.data.getVar('PSTAGING_ACTIVE', d, True) == "1":
499 destfile = file.replace(bb.data.getVar("TMPDIR", d, 1), bb.data.getVar("PSTAGE_TMPDIR_STAGE", d, 1))
500 bb.mkdirhier(os.path.dirname(destfile))
501 #print "%s to %s" % (file, destfile)
502 bb.copyfile(file, destfile)
504 package_stagefile_shell() {
505 if [ "$PSTAGING_ACTIVE" = "1" ]; then
507 destfile=`echo $srcfile | sed s#${TMPDIR}#${PSTAGE_TMPDIR_STAGE}#`
508 destdir=`dirname $destfile`
510 cp -dp $srcfile $destfile
515 # Purpose: Install machine dependent files, if available
516 # If not available, check if there is a default
517 # If no default, just touch the destination
520 # oe_machinstall -m 0644 fstab ${D}/etc/fstab
522 # TODO: Check argument number?
524 filename=`basename $3`
527 for o in `echo ${OVERRIDES} | tr ':' ' '`; do
528 if [ -e $dirname/$o/$filename ]; then
529 oenote $dirname/$o/$filename present, installing to $4
530 install $1 $2 $dirname/$o/$filename $4
534 # oenote overrides specific file NOT present, trying default=$3...
536 oenote $3 present, installing to $4
539 oenote $3 NOT present, touching empty $4
545 do_listtasks[nostamp] = "1"
546 python do_listtasks() {
548 # emit variables and shell functions
549 #bb.data.emit_env(sys.__stdout__, d)
550 # emit the metadata which isnt valid shell
552 if bb.data.getVarFlag(e, 'task', d):
553 sys.__stdout__.write("%s\n" % e)
557 do_clean[dirs] = "${TOPDIR}"
558 do_clean[nostamp] = "1"
559 python base_do_clean() {
560 """clear the build and temp directories"""
561 dir = bb.data.expand("${WORKDIR}", d)
562 if dir == '//': raise bb.build.FuncFailed("wrong DATADIR")
563 bb.note("removing " + base_path_out(dir, d))
564 os.system('rm -rf ' + dir)
566 dir = "%s.*" % bb.data.expand(bb.data.getVar('STAMP', d), d)
567 bb.note("removing " + base_path_out(dir, d))
568 os.system('rm -f '+ dir)
571 python do_cleanall() {
574 do_cleanall[recrdeptask] = "do_clean"
575 addtask cleanall after do_clean
577 addtask rebuild after do_${BB_DEFAULT_TASK}
578 do_rebuild[dirs] = "${TOPDIR}"
579 do_rebuild[nostamp] = "1"
580 python base_do_rebuild() {
581 """rebuild a package"""
585 do_mrproper[dirs] = "${TOPDIR}"
586 do_mrproper[nostamp] = "1"
587 python base_do_mrproper() {
588 """clear downloaded sources, build and temp directories"""
589 dir = bb.data.expand("${DL_DIR}", d)
590 if dir == '/': bb.build.FuncFailed("wrong DATADIR")
591 bb.debug(2, "removing " + dir)
592 os.system('rm -rf ' + dir)
593 bb.build.exec_func('do_clean', d)
597 do_distclean[dirs] = "${TOPDIR}"
598 do_distclean[nostamp] = "1"
599 python base_do_distclean() {
600 """clear downloaded sources, build and temp directories"""
602 bb.build.exec_func('do_clean', d)
604 src_uri = bb.data.getVar('SRC_URI', d, 1)
608 for uri in src_uri.split():
609 if bb.decodeurl(uri)[0] == "file":
613 local = bb.data.expand(bb.fetch.localpath(uri, d), d)
614 except bb.MalformedUrl, e:
615 bb.debug(1, 'Unable to generate local path for malformed uri: %s' % e)
617 bb.note("removing %s" % base_path_out(local, d))
619 if os.path.exists(local + ".md5"):
620 os.remove(local + ".md5")
621 if os.path.exists(local):
624 bb.note("Error in removal: %s" % e)
627 SCENEFUNCS += "base_scenefunction"
629 python base_do_setscene () {
630 for f in (bb.data.getVar('SCENEFUNCS', d, 1) or '').split():
631 bb.build.exec_func(f, d)
632 if not os.path.exists(bb.data.getVar('STAMP', d, 1) + ".do_setscene"):
633 bb.build.make_stamp("do_setscene", d)
635 do_setscene[selfstamp] = "1"
636 addtask setscene before do_fetch
638 python base_scenefunction () {
639 stamp = bb.data.getVar('STAMP', d, 1) + ".needclean"
640 if os.path.exists(stamp):
641 bb.build.exec_func("do_clean", d)
646 do_fetch[dirs] = "${DL_DIR}"
647 do_fetch[depends] = "shasum-native:do_populate_staging"
648 python base_do_fetch() {
651 localdata = bb.data.createCopy(d)
652 bb.data.update_data(localdata)
654 src_uri = bb.data.getVar('SRC_URI', localdata, 1)
659 bb.fetch.init(src_uri.split(),d)
660 except bb.fetch.NoMethodError:
661 (type, value, traceback) = sys.exc_info()
662 raise bb.build.FuncFailed("No method: %s" % value)
663 except bb.MalformedUrl:
664 (type, value, traceback) = sys.exc_info()
665 raise bb.build.FuncFailed("Malformed URL: %s" % value)
668 bb.fetch.go(localdata)
669 except bb.fetch.MissingParameterError:
670 (type, value, traceback) = sys.exc_info()
671 raise bb.build.FuncFailed("Missing parameters: %s" % value)
672 except bb.fetch.FetchError:
673 (type, value, traceback) = sys.exc_info()
674 raise bb.build.FuncFailed("Fetch failed: %s" % value)
675 except bb.fetch.MD5SumError:
676 (type, value, traceback) = sys.exc_info()
677 raise bb.build.FuncFailed("MD5 failed: %s" % value)
679 (type, value, traceback) = sys.exc_info()
680 raise bb.build.FuncFailed("Unknown fetch Error: %s" % value)
683 # Verify the SHA and MD5 sums we have in OE and check what do
685 checksum_paths = bb.data.getVar('BBPATH', d, True).split(":")
687 # reverse the list to give precedence to directories that
688 # appear first in BBPATH
689 checksum_paths.reverse()
691 checksum_files = ["%s/conf/checksums.ini" % path for path in checksum_paths]
693 parser = base_chk_load_parser(checksum_files)
695 bb.note("No conf/checksums.ini found, not checking checksums")
698 bb.note("Creating the CheckSum parser failed: %s:%s" % (sys.exc_info()[0], sys.exc_info()[1]))
701 pv = bb.data.getVar('PV', d, True)
702 pn = bb.data.getVar('PN', d, True)
705 for url in src_uri.split():
706 localpath = bb.data.expand(bb.fetch.localpath(url, localdata), localdata)
707 (type,host,path,_,_,params) = bb.decodeurl(url)
708 uri = "%s://%s%s" % (type,host,path)
710 if type in [ "http", "https", "ftp", "ftps" ]:
711 if not (base_chk_file_vars(parser, localpath, params, d) or base_chk_file(parser, pn, pv,uri, localpath, d)):
712 if not bb.data.getVar("OE_ALLOW_INSECURE_DOWNLOADS", d, True):
713 bb.fatal("%s-%s: %s has no checksum defined, cannot check archive integrity" % (pn,pv,uri))
715 bb.note("%s-%s: %s has no checksum defined, archive integrity not checked" % (pn,pv,uri))
717 raise bb.build.FuncFailed("Checksum of '%s' failed" % uri)
720 addtask fetchall after do_fetch
721 do_fetchall[recrdeptask] = "do_fetch"
727 do_checkuri[nostamp] = "1"
728 python do_checkuri() {
731 localdata = bb.data.createCopy(d)
732 bb.data.update_data(localdata)
734 src_uri = bb.data.getVar('SRC_URI', localdata, 1)
737 bb.fetch.init(src_uri.split(),d)
738 except bb.fetch.NoMethodError:
739 (type, value, traceback) = sys.exc_info()
740 raise bb.build.FuncFailed("No method: %s" % value)
743 bb.fetch.checkstatus(localdata)
744 except bb.fetch.MissingParameterError:
745 (type, value, traceback) = sys.exc_info()
746 raise bb.build.FuncFailed("Missing parameters: %s" % value)
747 except bb.fetch.FetchError:
748 (type, value, traceback) = sys.exc_info()
749 raise bb.build.FuncFailed("Fetch failed: %s" % value)
750 except bb.fetch.MD5SumError:
751 (type, value, traceback) = sys.exc_info()
752 raise bb.build.FuncFailed("MD5 failed: %s" % value)
754 (type, value, traceback) = sys.exc_info()
755 raise bb.build.FuncFailed("Unknown fetch Error: %s" % value)
758 addtask checkuriall after do_checkuri
759 do_checkuriall[recrdeptask] = "do_checkuri"
760 do_checkuriall[nostamp] = "1"
761 base_do_checkuriall() {
765 addtask buildall after do_build
766 do_buildall[recrdeptask] = "do_build"
771 def subprocess_setup():
773 # Python installs a SIGPIPE handler by default. This is usually not what
774 # non-Python subprocesses expect.
775 signal.signal(signal.SIGPIPE, signal.SIG_DFL)
777 def oe_unpack_file(file, data, url = None):
780 url = "file://%s" % file
781 dots = file.split(".")
782 if dots[-1] in ['gz', 'bz2', 'Z', 'xz', 'lz']:
783 efile = os.path.join(bb.data.getVar('WORKDIR', data, 1),os.path.basename('.'.join(dots[0:-1])))
787 if file.endswith('.tar'):
788 cmd = 'tar x --no-same-owner -f %s' % file
789 elif file.endswith('.tgz') or file.endswith('.tar.gz') or file.endswith('.tar.Z'):
790 cmd = 'tar xz --no-same-owner -f %s' % file
791 elif file.endswith('.tbz') or file.endswith('.tbz2') or file.endswith('.tar.bz2'):
792 cmd = 'bzip2 -dc %s | tar x --no-same-owner -f -' % file
793 elif file.endswith('.gz') or file.endswith('.Z') or file.endswith('.z'):
794 cmd = 'gzip -dc %s > %s' % (file, efile)
795 elif file.endswith('.bz2'):
796 cmd = 'bzip2 -dc %s > %s' % (file, efile)
797 elif file.endswith('.tar.xz'):
798 cmd = 'xz -dc %s | tar x --no-same-owner -f -' % file
799 elif file.endswith('.xz'):
800 cmd = 'xz -dc %s > %s' % (file, efile)
801 elif file.endswith('.tar.lz'):
802 cmd = 'lzip -dc %s | tar x --no-same-owner -f -' % file
803 elif file.endswith('.lz'):
804 cmd = 'lzip -dc %s > %s' % (file, efile)
805 elif file.endswith('.zip') or file.endswith('.jar'):
807 (type, host, path, user, pswd, parm) = bb.decodeurl(url)
810 cmd = '%s %s' % (cmd, file)
811 elif os.path.isdir(file):
813 filespath = bb.data.getVar("FILESPATH", data, 1).split(":")
815 if file[0:len(fp)] == fp:
816 destdir = file[len(fp):file.rfind('/')]
817 destdir = destdir.strip('/')
820 elif not os.access("%s/%s" % (os.getcwd(), destdir), os.F_OK):
821 os.makedirs("%s/%s" % (os.getcwd(), destdir))
824 cmd = 'cp -pPR %s %s/%s/' % (file, os.getcwd(), destdir)
826 (type, host, path, user, pswd, parm) = bb.decodeurl(url)
827 if not 'patch' in parm:
828 # The "destdir" handling was specifically done for FILESPATH
829 # items. So, only do so for file:// entries.
831 destdir = bb.decodeurl(url)[1] or "."
834 bb.mkdirhier("%s/%s" % (os.getcwd(), destdir))
835 cmd = 'cp %s %s/%s/' % (file, os.getcwd(), destdir)
840 dest = os.path.join(os.getcwd(), os.path.basename(file))
841 if os.path.exists(dest):
842 if os.path.samefile(file, dest):
845 # Change to subdir before executing command
846 save_cwd = os.getcwd();
847 parm = bb.decodeurl(url)[5]
849 newdir = ("%s/%s" % (os.getcwd(), parm['subdir']))
853 cmd = "PATH=\"%s\" %s" % (bb.data.getVar('PATH', data, 1), cmd)
854 bb.note("Unpacking %s to %s/" % (base_path_out(file, data), base_path_out(os.getcwd(), data)))
855 ret = subprocess.call(cmd, preexec_fn=subprocess_setup, shell=True)
861 addtask unpack after do_fetch
862 do_unpack[dirs] = "${WORKDIR}"
863 python base_do_unpack() {
866 localdata = bb.data.createCopy(d)
867 bb.data.update_data(localdata)
869 src_uri = bb.data.getVar('SRC_URI', localdata)
872 src_uri = bb.data.expand(src_uri, localdata)
873 for url in src_uri.split():
875 local = bb.data.expand(bb.fetch.localpath(url, localdata), localdata)
876 except bb.MalformedUrl, e:
877 raise bb.build.FuncFailed('Unable to generate local path for malformed uri: %s' % e)
879 raise bb.build.FuncFailed('Unable to locate local file for %s' % url)
880 local = os.path.realpath(local)
881 ret = oe_unpack_file(local, localdata, url)
883 raise bb.build.FuncFailed()
886 METADATA_SCM = "${@base_get_scm(d)}"
887 METADATA_REVISION = "${@base_get_scm_revision(d)}"
888 METADATA_BRANCH = "${@base_get_scm_branch(d)}"
892 baserepo = os.path.dirname(os.path.dirname(which(d.getVar("BBPATH", 1), "classes/base.bbclass")))
893 for (scm, scmpath) in {"svn": ".svn",
895 "monotone": "_MTN"}.iteritems():
896 if os.path.exists(os.path.join(baserepo, scmpath)):
897 return "%s %s" % (scm, baserepo)
898 return "<unknown> %s" % baserepo
900 def base_get_scm_revision(d):
901 (scm, path) = d.getVar("METADATA_SCM", 1).split()
903 if scm != "<unknown>":
904 return globals()["base_get_metadata_%s_revision" % scm](path, d)
910 def base_get_scm_branch(d):
911 (scm, path) = d.getVar("METADATA_SCM", 1).split()
913 if scm != "<unknown>":
914 return globals()["base_get_metadata_%s_branch" % scm](path, d)
920 def base_get_metadata_monotone_branch(path, d):
921 monotone_branch = "<unknown>"
923 monotone_branch = file( "%s/_MTN/options" % path ).read().strip()
924 if monotone_branch.startswith( "database" ):
925 monotone_branch_words = monotone_branch.split()
926 monotone_branch = monotone_branch_words[ monotone_branch_words.index( "branch" )+1][1:-1]
929 return monotone_branch
931 def base_get_metadata_monotone_revision(path, d):
932 monotone_revision = "<unknown>"
934 monotone_revision = file( "%s/_MTN/revision" % path ).read().strip()
935 if monotone_revision.startswith( "format_version" ):
936 monotone_revision_words = monotone_revision.split()
937 monotone_revision = monotone_revision_words[ monotone_revision_words.index( "old_revision" )+1][1:-1]
940 return monotone_revision
942 def base_get_metadata_svn_revision(path, d):
943 revision = "<unknown>"
945 revision = file( "%s/.svn/entries" % path ).readlines()[3].strip()
950 def base_get_metadata_git_branch(path, d):
951 branch = os.popen('cd %s; PATH=%s git symbolic-ref HEAD 2>/dev/null' % (path, d.getVar("PATH", 1))).read().rstrip()
954 return branch.replace("refs/heads/", "")
957 def base_get_metadata_git_revision(path, d):
958 rev = os.popen("cd %s; PATH=%s git show-ref HEAD 2>/dev/null" % (path, d.getVar("PATH", 1))).read().split(" ")[0].rstrip()
964 addhandler base_eventhandler
965 python base_eventhandler() {
966 from bb import note, error, data
967 from bb.event import Handled, NotHandled, getName
971 if name == "TaskCompleted":
972 msg = "package %s: task %s is complete." % (data.getVar("PF", e.data, 1), e.task)
973 elif name == "UnsatisfiedDep":
974 msg = "package %s: dependency %s %s" % (e.pkg, e.dep, name[:-3].lower())
978 # Only need to output when using 1.8 or lower, the UI code handles it
980 if (int(bb.__version__.split(".")[0]) <= 1 and int(bb.__version__.split(".")[1]) <= 8):
984 if name.startswith("BuildStarted"):
985 bb.data.setVar( 'BB_VERSION', bb.__version__, e.data )
986 statusvars = bb.data.getVar("BUILDCFG_VARS", e.data, 1).split()
987 statuslines = ["%-17s = \"%s\"" % (i, bb.data.getVar(i, e.data, 1) or '') for i in statusvars]
988 statusmsg = "\n%s\n%s\n" % (bb.data.getVar("BUILDCFG_HEADER", e.data, 1), "\n".join(statuslines))
991 needed_vars = bb.data.getVar("BUILDCFG_NEEDEDVARS", e.data, 1).split()
993 for v in needed_vars:
994 val = bb.data.getVar(v, e.data, 1)
995 if not val or val == 'INVALID':
998 bb.fatal('The following variable(s) were not set: %s\nPlease set them directly, or choose a MACHINE or DISTRO that sets them.' % ', '.join(pesteruser))
1001 # Handle removing stamps for 'rebuild' task
1003 if name.startswith("StampUpdate"):
1004 for (fn, task) in e.targets:
1005 #print "%s %s" % (task, fn)
1006 if task == "do_rebuild":
1007 dir = "%s.*" % e.stampPrefix[fn]
1008 bb.note("Removing stamps: " + dir)
1009 os.system('rm -f '+ dir)
1010 os.system('touch ' + e.stampPrefix[fn] + '.needclean')
1012 if not data in e.__dict__:
1015 log = data.getVar("EVENTLOG", e.data, 1)
1017 logfile = file(log, "a")
1018 logfile.write("%s\n" % msg)
1024 addtask configure after do_unpack do_patch
1025 do_configure[dirs] = "${S} ${B}"
1026 do_configure[deptask] = "do_populate_staging"
1027 base_do_configure() {
1031 addtask compile after do_configure
1032 do_compile[dirs] = "${S} ${B}"
1034 if [ -e Makefile -o -e makefile ]; then
1035 oe_runmake || die "make failed"
1037 oenote "nothing to compile"
1042 sysroot_stage_dir() {
1045 # This will remove empty directories so we can ignore them
1046 rmdir "$src" 2> /dev/null || true
1047 if [ -d "$src" ]; then
1049 cp -fpPR "$src"/* "$dest"
1053 sysroot_stage_libdir() {
1059 las=$(find . -name \*.la -type f)
1061 echo "Found la files: $las"
1064 sed -e 's/^installed=yes$/installed=no/' \
1065 -e '/^dependency_libs=/s,${WORKDIR}[[:alnum:]/\._+-]*/\([[:alnum:]\._+-]*\),${STAGING_LIBDIR}/\1,g' \
1066 -e "/^dependency_libs=/s,\([[:space:]']\)${libdir},\1${STAGING_LIBDIR},g" \
1069 sysroot_stage_dir $src $dest
1072 sysroot_stage_dirs() {
1076 sysroot_stage_dir $from${includedir} $to${STAGING_INCDIR}
1077 if [ "${BUILD_SYS}" = "${HOST_SYS}" ]; then
1078 sysroot_stage_dir $from${bindir} $to${STAGING_DIR_HOST}${bindir}
1079 sysroot_stage_dir $from${sbindir} $to${STAGING_DIR_HOST}${sbindir}
1080 sysroot_stage_dir $from${base_bindir} $to${STAGING_DIR_HOST}${base_bindir}
1081 sysroot_stage_dir $from${base_sbindir} $to${STAGING_DIR_HOST}${base_sbindir}
1082 sysroot_stage_dir $from${libexecdir} $to${STAGING_DIR_HOST}${libexecdir}
1083 if [ "${prefix}/lib" != "${libdir}" ]; then
1084 # python puts its files in here, make sure they are staged as well
1085 autotools_stage_dir $from/${prefix}/lib $to${STAGING_DIR_HOST}${prefix}/lib
1088 if [ -d $from${libdir} ]
1090 sysroot_stage_libdir $from/${libdir} $to${STAGING_LIBDIR}
1092 if [ -d $from${base_libdir} ]
1094 sysroot_stage_libdir $from${base_libdir} $to${STAGING_DIR_HOST}${base_libdir}
1096 sysroot_stage_dir $from${datadir} $to${STAGING_DATADIR}
1099 sysroot_stage_all() {
1100 sysroot_stage_dirs ${D} ${SYSROOT_DESTDIR}
1103 def is_legacy_staging(d):
1104 stagefunc = bb.data.getVar('do_stage', d, True)
1106 if stagefunc is None:
1108 elif stagefunc.strip() == "autotools_stage_all":
1110 elif stagefunc.strip() == "do_stage_native" and bb.data.getVar('AUTOTOOLS_NATIVE_STAGE_INSTALL', d, 1) == "1":
1112 elif bb.data.getVar('NATIVE_INSTALL_WORKS', d, 1) == "1":
1114 if bb.data.getVar('PSTAGE_BROKEN_DESTDIR', d, 1) == "1":
1116 if bb.data.getVar('FORCE_LEGACY_STAGING', d, 1) == "1":
1120 do_populate_staging[dirs] = "${STAGING_DIR_TARGET}/${bindir} ${STAGING_DIR_TARGET}/${libdir} \
1121 ${STAGING_DIR_TARGET}/${includedir} \
1122 ${STAGING_BINDIR_NATIVE} ${STAGING_LIBDIR_NATIVE} \
1123 ${STAGING_INCDIR_NATIVE} \
1124 ${STAGING_DATADIR} \
1127 # Could be compile but populate_staging and do_install shouldn't run at the same time
1128 addtask populate_staging after do_install
1130 SYSROOT_PREPROCESS_FUNCS ?= ""
1131 SYSROOT_DESTDIR = "${WORKDIR}/sysroot-destdir/"
1132 SYSROOT_LOCK = "${STAGING_DIR}/staging.lock"
1134 python populate_staging_prehook () {
1138 python populate_staging_posthook () {
1142 packagedstaging_fastpath () {
1146 python do_populate_staging () {
1148 # if do_stage exists, we're legacy. In that case run the do_stage,
1149 # modify the SYSROOT_DESTDIR variable and then run the staging preprocess
1150 # functions against staging directly.
1152 # Otherwise setup a destdir, copy the results from do_install
1153 # and run the staging preprocess against that
1155 pstageactive = (bb.data.getVar("PSTAGING_ACTIVE", d, True) == "1")
1156 lockfile = bb.data.getVar("SYSROOT_LOCK", d, True)
1157 stagefunc = bb.data.getVar('do_stage', d, True)
1158 legacy = is_legacy_staging(d)
1160 bb.data.setVar("SYSROOT_DESTDIR", "", d)
1161 bb.note("Legacy staging mode for %s" % bb.data.getVar("FILE", d, True))
1162 if bb.data.getVarFlags('do_stage', d) is None:
1163 bb.fatal("This recipe (%s) has a do_stage_prepend or do_stage_append and do_stage now doesn't exist. Please rename this to do_stage()" % bb.data.getVar("FILE", d, True))
1164 lock = bb.utils.lockfile(lockfile)
1165 bb.build.exec_func('populate_staging_prehook', d)
1166 bb.build.exec_func('do_stage', d)
1167 for f in (bb.data.getVar('SYSROOT_PREPROCESS_FUNCS', d, True) or '').split():
1168 bb.build.exec_func(f, d)
1169 bb.build.exec_func('populate_staging_posthook', d)
1170 bb.utils.unlockfile(lock)
1172 dest = bb.data.getVar('D', d, True)
1173 sysrootdest = bb.data.expand('${SYSROOT_DESTDIR}${STAGING_DIR_TARGET}', d)
1174 bb.mkdirhier(sysrootdest)
1176 bb.build.exec_func("sysroot_stage_all", d)
1177 #os.system('cp -pPR %s/* %s/' % (dest, sysrootdest))
1178 for f in (bb.data.getVar('SYSROOT_PREPROCESS_FUNCS', d, True) or '').split():
1179 bb.build.exec_func(f, d)
1180 bb.build.exec_func("packagedstaging_fastpath", d)
1182 lock = bb.utils.lockfile(lockfile)
1183 os.system(bb.data.expand('cp -pPR ${SYSROOT_DESTDIR}${TMPDIR}/* ${TMPDIR}/', d))
1184 bb.utils.unlockfile(lock)
1187 addtask install after do_compile
1188 do_install[dirs] = "${D} ${S} ${B}"
1189 # Remove and re-create ${D} so that is it guaranteed to be empty
1190 do_install[cleandirs] = "${D}"
1200 addtask build after do_populate_staging
1202 do_build[func] = "1"
1206 # Functions that update metadata based on files outputted
1207 # during the build process.
1209 def explode_deps(s):
1221 r[-1] += ' ' + ' '.join(j)
1226 # Make sure MACHINE isn't exported
1227 # (breaks binutils at least)
1228 MACHINE[unexport] = "1"
1230 # Make sure TARGET_ARCH isn't exported
1231 # (breaks Makefiles using implicit rules, e.g. quilt, as GNU make has this
1232 # in them, undocumented)
1233 TARGET_ARCH[unexport] = "1"
1235 # Make sure DISTRO isn't exported
1236 # (breaks sysvinit at least)
1237 DISTRO[unexport] = "1"
1240 def base_after_parse(d):
1243 source_mirror_fetch = bb.data.getVar('SOURCE_MIRROR_FETCH', d, 0)
1244 if not source_mirror_fetch:
1245 need_host = bb.data.getVar('COMPATIBLE_HOST', d, 1)
1248 this_host = bb.data.getVar('HOST_SYS', d, 1)
1249 if not re.match(need_host, this_host):
1250 raise bb.parse.SkipPackage("incompatible with host %s" % this_host)
1252 need_machine = bb.data.getVar('COMPATIBLE_MACHINE', d, 1)
1255 this_machine = bb.data.getVar('MACHINE', d, 1)
1256 if this_machine and not re.match(need_machine, this_machine):
1257 raise bb.parse.SkipPackage("incompatible with machine %s" % this_machine)
1259 pn = bb.data.getVar('PN', d, 1)
1261 # OBSOLETE in bitbake 1.7.4
1262 srcdate = bb.data.getVar('SRCDATE_%s' % pn, d, 1)
1264 bb.data.setVar('SRCDATE', srcdate, d)
1266 use_nls = bb.data.getVar('USE_NLS_%s' % pn, d, 1)
1268 bb.data.setVar('USE_NLS', use_nls, d)
1270 # Git packages should DEPEND on git-native
1271 srcuri = bb.data.getVar('SRC_URI', d, 1)
1272 if "git://" in srcuri:
1273 depends = bb.data.getVarFlag('do_fetch', 'depends', d) or ""
1274 depends = depends + " git-native:do_populate_staging"
1275 bb.data.setVarFlag('do_fetch', 'depends', depends, d)
1277 # 'multimachine' handling
1278 mach_arch = bb.data.getVar('MACHINE_ARCH', d, 1)
1279 pkg_arch = bb.data.getVar('PACKAGE_ARCH', d, 1)
1281 if (pkg_arch == mach_arch):
1282 # Already machine specific - nothing further to do
1286 # We always try to scan SRC_URI for urls with machine overrides
1287 # unless the package sets SRC_URI_OVERRIDES_PACKAGE_ARCH=0
1289 override = bb.data.getVar('SRC_URI_OVERRIDES_PACKAGE_ARCH', d, 1)
1292 for p in [ "${PF}", "${P}", "${PN}", "files", "" ]:
1293 path = bb.data.expand(os.path.join("${FILE_DIRNAME}", p, "${MACHINE}"), d)
1294 if os.path.isdir(path):
1297 for s in srcuri.split():
1298 if not s.startswith("file://"):
1300 local = bb.data.expand(bb.fetch.localpath(s, d), d)
1302 if local.startswith(mp):
1303 #bb.note("overriding PACKAGE_ARCH from %s to %s" % (pkg_arch, mach_arch))
1304 bb.data.setVar('PACKAGE_ARCH', "${MACHINE_ARCH}", d)
1305 bb.data.setVar('MULTIMACH_ARCH', mach_arch, d)
1308 multiarch = pkg_arch
1310 packages = bb.data.getVar('PACKAGES', d, 1).split()
1311 for pkg in packages:
1312 pkgarch = bb.data.getVar("PACKAGE_ARCH_%s" % pkg, d, 1)
1314 # We could look for != PACKAGE_ARCH here but how to choose
1315 # if multiple differences are present?
1316 # Look through PACKAGE_ARCHS for the priority order?
1317 if pkgarch and pkgarch == mach_arch:
1318 multiarch = mach_arch
1321 bb.data.setVar('MULTIMACH_ARCH', multiarch, d)
1325 if is_legacy_staging(d):
1326 bb.debug(1, "Legacy staging mode for %s" % bb.data.getVar("FILE", d, True))
1327 if bb.data.getVarFlags('do_stage', d) is None:
1328 bb.error("This recipe (%s) has a do_stage_prepend or do_stage_append and do_stage now doesn't exist. Please rename this to do_stage()" % bb.data.getVar("FILE", d, True))
1332 def check_app_exists(app, d):
1333 from bb import which, data
1335 app = data.expand(app, d)
1336 path = data.getVar('PATH', d, 1)
1337 return len(which(path, app)) != 0
1339 def check_gcc3(data):
1340 # Primarly used by qemu to make sure we have a workable gcc-3.4.x.
1341 # Start by checking for the program name as we build it, was not
1342 # all host-provided gcc-3.4's will work.
1344 gcc3_versions = 'gcc-3.4.6 gcc-3.4.4 gcc34 gcc-3.4 gcc-3.4.7 gcc-3.3 gcc33 gcc-3.3.6 gcc-3.2 gcc32'
1346 for gcc3 in gcc3_versions.split():
1347 if check_app_exists(gcc3, data):
1355 # Configuration data from site files
1356 # Move to autotools.bbclass?
1359 EXPORT_FUNCTIONS do_setscene do_clean do_mrproper do_distclean do_fetch do_unpack do_configure do_compile do_install do_package do_populate_pkgs do_rebuild do_fetchall
1363 ${DEBIAN_MIRROR}/main http://snapshot.debian.net/archive/pool
1364 ${DEBIAN_MIRROR} ftp://ftp.de.debian.org/debian/pool
1365 ${DEBIAN_MIRROR} ftp://ftp.au.debian.org/debian/pool
1366 ${DEBIAN_MIRROR} ftp://ftp.cl.debian.org/debian/pool
1367 ${DEBIAN_MIRROR} ftp://ftp.hr.debian.org/debian/pool
1368 ${DEBIAN_MIRROR} ftp://ftp.fi.debian.org/debian/pool
1369 ${DEBIAN_MIRROR} ftp://ftp.hk.debian.org/debian/pool
1370 ${DEBIAN_MIRROR} ftp://ftp.hu.debian.org/debian/pool
1371 ${DEBIAN_MIRROR} ftp://ftp.ie.debian.org/debian/pool
1372 ${DEBIAN_MIRROR} ftp://ftp.it.debian.org/debian/pool
1373 ${DEBIAN_MIRROR} ftp://ftp.jp.debian.org/debian/pool
1374 ${DEBIAN_MIRROR} ftp://ftp.no.debian.org/debian/pool
1375 ${DEBIAN_MIRROR} ftp://ftp.pl.debian.org/debian/pool
1376 ${DEBIAN_MIRROR} ftp://ftp.ro.debian.org/debian/pool
1377 ${DEBIAN_MIRROR} ftp://ftp.si.debian.org/debian/pool
1378 ${DEBIAN_MIRROR} ftp://ftp.es.debian.org/debian/pool
1379 ${DEBIAN_MIRROR} ftp://ftp.se.debian.org/debian/pool
1380 ${DEBIAN_MIRROR} ftp://ftp.tr.debian.org/debian/pool
1381 ${GNU_MIRROR} ftp://mirrors.kernel.org/gnu
1382 ${GNU_MIRROR} ftp://ftp.cs.ubc.ca/mirror2/gnu
1383 ${GNU_MIRROR} ftp://sunsite.ust.hk/pub/gnu
1384 ${GNU_MIRROR} ftp://ftp.ayamura.org/pub/gnu
1385 ${KERNELORG_MIRROR} http://www.kernel.org/pub
1386 ${KERNELORG_MIRROR} ftp://ftp.us.kernel.org/pub
1387 ${KERNELORG_MIRROR} ftp://ftp.uk.kernel.org/pub
1388 ${KERNELORG_MIRROR} ftp://ftp.hk.kernel.org/pub
1389 ${KERNELORG_MIRROR} ftp://ftp.au.kernel.org/pub
1390 ${KERNELORG_MIRROR} ftp://ftp.jp.kernel.org/pub
1391 ftp://ftp.gnupg.org/gcrypt/ ftp://ftp.franken.de/pub/crypt/mirror/ftp.gnupg.org/gcrypt/
1392 ftp://ftp.gnupg.org/gcrypt/ ftp://ftp.surfnet.nl/pub/security/gnupg/
1393 ftp://ftp.gnupg.org/gcrypt/ http://gulus.USherbrooke.ca/pub/appl/GnuPG/
1394 ftp://dante.ctan.org/tex-archive ftp://ftp.fu-berlin.de/tex/CTAN
1395 ftp://dante.ctan.org/tex-archive http://sunsite.sut.ac.jp/pub/archives/ctan/
1396 ftp://dante.ctan.org/tex-archive http://ctan.unsw.edu.au/
1397 ftp://ftp.gnutls.org/pub/gnutls ftp://ftp.gnutls.org/pub/gnutls/
1398 ftp://ftp.gnutls.org/pub/gnutls ftp://ftp.gnupg.org/gcrypt/gnutls/
1399 ftp://ftp.gnutls.org/pub/gnutls http://www.mirrors.wiretapped.net/security/network-security/gnutls/
1400 ftp://ftp.gnutls.org/pub/gnutls ftp://ftp.mirrors.wiretapped.net/pub/security/network-security/gnutls/
1401 ftp://ftp.gnutls.org/pub/gnutls http://josefsson.org/gnutls/releases/
1402 http://ftp.info-zip.org/pub/infozip/src/ http://mirror.switch.ch/ftp/mirror/infozip/src/
1403 http://ftp.info-zip.org/pub/infozip/src/ ftp://sunsite.icm.edu.pl/pub/unix/archiving/info-zip/src/
1404 ftp://lsof.itap.purdue.edu/pub/tools/unix/lsof/ ftp://ftp.cerias.purdue.edu/pub/tools/unix/sysutils/lsof/
1405 ftp://lsof.itap.purdue.edu/pub/tools/unix/lsof/ ftp://ftp.tau.ac.il/pub/unix/admin/
1406 ftp://lsof.itap.purdue.edu/pub/tools/unix/lsof/ ftp://ftp.cert.dfn.de/pub/tools/admin/lsof/
1407 ftp://lsof.itap.purdue.edu/pub/tools/unix/lsof/ ftp://ftp.fu-berlin.de/pub/unix/tools/lsof/
1408 ftp://lsof.itap.purdue.edu/pub/tools/unix/lsof/ ftp://ftp.kaizo.org/pub/lsof/
1409 ftp://lsof.itap.purdue.edu/pub/tools/unix/lsof/ ftp://ftp.tu-darmstadt.de/pub/sysadmin/lsof/
1410 ftp://lsof.itap.purdue.edu/pub/tools/unix/lsof/ ftp://ftp.tux.org/pub/sites/vic.cc.purdue.edu/tools/unix/lsof/
1411 ftp://lsof.itap.purdue.edu/pub/tools/unix/lsof/ ftp://gd.tuwien.ac.at/utils/admin-tools/lsof/
1412 ftp://lsof.itap.purdue.edu/pub/tools/unix/lsof/ ftp://sunsite.ualberta.ca/pub/Mirror/lsof/
1413 ftp://lsof.itap.purdue.edu/pub/tools/unix/lsof/ ftp://the.wiretapped.net/pub/security/host-security/lsof/
1414 http://www.apache.org/dist http://archive.apache.org/dist
1415 ftp://.*/.* http://mirrors.openembedded.org/
1416 https?$://.*/.* http://mirrors.openembedded.org/
1417 ftp://.*/.* http://sources.openembedded.org/
1418 https?$://.*/.* http://sources.openembedded.org/