2 # General packaging help functions
5 PKGDEST = "${WORKDIR}/install"
7 def legitimize_package_name(s):
9 Make sure package names are legitimate strings
16 return ('\u%s' % cp).decode('unicode_escape').encode('utf-8')
18 # Handle unicode codepoints encoded as <U0123>, as in glibc locale files.
19 s = re.sub('<U([0-9A-Fa-f]{1,4})>', fixutf, s)
21 # Remaining package name validity fixes
22 return s.lower().replace('_', '-').replace('@', '+').replace(',', '+').replace('/', '-')
24 def do_split_packages(d, root, file_regex, output_pattern, description, postinst=None, recursive=False, hook=None, extra_depends=None, aux_files_pattern=None, postrm=None, allow_dirs=False, prepend=False, match_path=False, aux_files_pattern_verbatim=None):
26 Used in .bb files to split up dynamically generated subpackages of a
27 given package, usually plugins or modules.
29 import os, os.path, bb
31 dvar = bb.data.getVar('D', d, 1)
33 bb.error("D not defined")
36 packages = bb.data.getVar('PACKAGES', d, 1).split()
39 postinst = '#!/bin/sh\n' + postinst + '\n'
41 postrm = '#!/bin/sh\n' + postrm + '\n'
43 objs = os.listdir(dvar + root)
46 for walkroot, dirs, files in os.walk(dvar + root):
48 relpath = os.path.join(walkroot, file).replace(dvar + root + '/', '', 1)
52 if extra_depends == None:
53 # This is *really* broken
55 # At least try and patch it up I guess...
56 if mainpkg.find('-dbg'):
57 mainpkg = mainpkg.replace('-dbg', '')
58 if mainpkg.find('-dev'):
59 mainpkg = mainpkg.replace('-dev', '')
60 extra_depends = mainpkg
65 m = re.match(file_regex, o)
67 m = re.match(file_regex, os.path.basename(o))
71 f = os.path.join(dvar + root, o)
72 mode = os.lstat(f).st_mode
73 if not (stat.S_ISREG(mode) or (allow_dirs and stat.S_ISDIR(mode))):
75 on = legitimize_package_name(m.group(1))
76 pkg = output_pattern % on
77 if not pkg in packages:
79 packages = [pkg] + packages
82 the_files = [os.path.join(root, o)]
84 if type(aux_files_pattern) is list:
85 for fp in aux_files_pattern:
86 the_files.append(fp % on)
88 the_files.append(aux_files_pattern % on)
89 if aux_files_pattern_verbatim:
90 if type(aux_files_pattern_verbatim) is list:
91 for fp in aux_files_pattern_verbatim:
92 the_files.append(fp % m.group(1))
94 the_files.append(aux_files_pattern_verbatim % m.group(1))
95 bb.data.setVar('FILES_' + pkg, " ".join(the_files), d)
96 if extra_depends != '':
97 the_depends = bb.data.getVar('RDEPENDS_' + pkg, d, 1)
99 the_depends = '%s %s' % (the_depends, extra_depends)
101 the_depends = extra_depends
102 bb.data.setVar('RDEPENDS_' + pkg, the_depends, d)
103 bb.data.setVar('DESCRIPTION_' + pkg, description % on, d)
105 bb.data.setVar('pkg_postinst_' + pkg, postinst, d)
107 bb.data.setVar('pkg_postrm_' + pkg, postrm, d)
109 oldfiles = bb.data.getVar('FILES_' + pkg, d, 1)
111 bb.fatal("Package '%s' exists but has no files" % pkg)
112 bb.data.setVar('FILES_' + pkg, oldfiles + " " + os.path.join(root, o), d)
114 hook(f, pkg, file_regex, output_pattern, m.group(1))
116 bb.data.setVar('PACKAGES', ' '.join(packages), d)
118 PACKAGE_DEPENDS += "file-native"
122 if bb.data.getVar('PACKAGES', d, True) != '':
123 deps = bb.data.getVarFlag('do_package', 'depends', d) or ""
124 for dep in (bb.data.getVar('PACKAGE_DEPENDS', d, True) or "").split():
125 deps += " %s:do_populate_staging" % dep
126 bb.data.setVarFlag('do_package', 'depends', deps, d)
128 deps = (bb.data.getVarFlag('do_package', 'deptask', d) or "").split()
129 # shlibs requires any DEPENDS to have already packaged for the *.list files
130 deps.append("do_package")
131 bb.data.setVarFlag('do_package', 'deptask', " ".join(deps), d)
134 def runstrip(file, d):
135 # Function to strip a single file, called from populate_packages below
136 # A working 'file' (one which works on the target architecture)
137 # is necessary for this stuff to work, hence the addition to do_package[depends]
139 import bb, os, commands, stat
141 pathprefix = "export PATH=%s; " % bb.data.getVar('PATH', d, 1)
143 ret, result = commands.getstatusoutput("%sfile '%s'" % (pathprefix, file))
146 bb.error("runstrip: 'file %s' failed (forced strip)" % file)
148 if "not stripped" not in result:
149 bb.debug(1, "runstrip: skip %s" % file)
152 strip = bb.data.getVar("STRIP", d, 1)
153 objcopy = bb.data.getVar("OBJCOPY", d, 1)
156 if not os.access(file, os.W_OK):
157 origmode = os.stat(file)[stat.ST_MODE]
158 newmode = origmode | stat.S_IWRITE
159 os.chmod(file, newmode)
162 if ".so" in file and "shared" in result:
163 extraflags = "--remove-section=.comment --remove-section=.note --strip-unneeded"
164 elif "shared" in result or "executable" in result:
165 extraflags = "--remove-section=.comment --remove-section=.note"
167 bb.mkdirhier(os.path.join(os.path.dirname(file), ".debug"))
168 debugfile=os.path.join(os.path.dirname(file), ".debug", os.path.basename(file))
170 stripcmd = "'%s' %s '%s'" % (strip, extraflags, file)
171 bb.debug(1, "runstrip: %s" % stripcmd)
173 os.system("%s'%s' --only-keep-debug '%s' '%s'" % (pathprefix, objcopy, file, debugfile))
174 ret = os.system("%s%s" % (pathprefix, stripcmd))
175 os.system("%s'%s' --add-gnu-debuglink='%s' '%s'" % (pathprefix, objcopy, debugfile, file))
178 os.chmod(file, origmode)
181 bb.error("runstrip: '%s' strip command failed" % stripcmd)
186 # Package data handling routines
189 def get_package_mapping (pkg, d):
192 data = read_subpkgdata(pkg, d)
200 def runtime_mapping_rename (varname, d):
203 #bb.note("%s before: %s" % (varname, bb.data.getVar(varname, d, 1)))
206 for depend in explode_deps(bb.data.getVar(varname, d, 1) or ""):
207 # Have to be careful with any version component of the depend
208 split_depend = depend.split(' (')
209 new_depend = get_package_mapping(split_depend[0].strip(), d)
210 if len(split_depend) > 1:
211 new_depends.append("%s (%s" % (new_depend, split_depend[1]))
213 new_depends.append(new_depend)
215 bb.data.setVar(varname, " ".join(new_depends) or None, d)
217 #bb.note("%s after: %s" % (varname, bb.data.getVar(varname, d, 1)))
220 # Package functions suitable for inclusion in PACKAGEFUNCS
223 python package_do_split_locales() {
226 if (bb.data.getVar('PACKAGE_NO_LOCALE', d, 1) == '1'):
227 bb.debug(1, "package requested not splitting locales")
230 packages = (bb.data.getVar('PACKAGES', d, 1) or "").split()
232 datadir = bb.data.getVar('datadir', d, 1)
234 bb.note("datadir not defined")
237 dvar = bb.data.getVar('D', d, 1)
239 bb.error("D not defined")
242 pn = bb.data.getVar('PN', d, 1)
244 bb.error("PN not defined")
247 if pn + '-locale' in packages:
248 packages.remove(pn + '-locale')
250 localedir = os.path.join(dvar + datadir, 'locale')
252 if not os.path.isdir(localedir):
253 bb.debug(1, "No locale files in this package")
256 locales = os.listdir(localedir)
258 # This is *really* broken
259 mainpkg = packages[0]
260 # At least try and patch it up I guess...
261 if mainpkg.find('-dbg'):
262 mainpkg = mainpkg.replace('-dbg', '')
263 if mainpkg.find('-dev'):
264 mainpkg = mainpkg.replace('-dev', '')
267 ln = legitimize_package_name(l)
268 pkg = pn + '-locale-' + ln
270 bb.data.setVar('FILES_' + pkg, os.path.join(datadir, 'locale', l), d)
271 bb.data.setVar('RDEPENDS_' + pkg, '%s virtual-locale-%s' % (mainpkg, ln), d)
272 bb.data.setVar('RPROVIDES_' + pkg, '%s-locale %s-translation' % (pn, ln), d)
273 bb.data.setVar('DESCRIPTION_' + pkg, '%s translation for %s' % (l, pn), d)
275 bb.data.setVar('PACKAGES', ' '.join(packages), d)
277 # Disabled by RP 18/06/07
278 # Wildcards aren't supported in debian
279 # They break with ipkg since glibc-locale* will mean that
280 # glibc-localedata-translit* won't install as a dependency
281 # for some other package which breaks meta-toolchain
282 # Probably breaks since virtual-locale- isn't provided anywhere
283 #rdep = (bb.data.getVar('RDEPENDS_%s' % mainpkg, d, 1) or bb.data.getVar('RDEPENDS', d, 1) or "").split()
284 #rdep.append('%s-locale*' % pn)
285 #bb.data.setVar('RDEPENDS_%s' % mainpkg, ' '.join(rdep), d)
288 def copyfile(src,dest,newmtime=None,sstat=None):
290 Copies a file from src to dest, preserving all permissions and
291 attributes; mtime will be preserved even when moving across
292 filesystems. Returns true on success and false on failure.
294 import os, stat, shutil, commands
296 #print "copyfile("+src+","+dest+","+str(newmtime)+","+str(sstat)+")"
301 print "copyfile: Stating source file failed...", e
308 dstat=os.lstat(os.path.dirname(dest))
312 if stat.S_ISLNK(dstat[stat.ST_MODE]):
319 if stat.S_ISLNK(sstat[stat.ST_MODE]):
321 target=os.readlink(src)
322 if destexists and not stat.S_ISDIR(dstat[stat.ST_MODE]):
324 os.symlink(target,dest)
325 #os.lchown(dest,sstat[stat.ST_UID],sstat[stat.ST_GID])
326 return os.lstat(dest)
328 print "copyfile: failed to properly create symlink:", dest, "->", target, e
331 if stat.S_ISREG(sstat[stat.ST_MODE]):
332 try: # For safety copy then move it over.
333 shutil.copyfile(src,dest+"#new")
334 os.rename(dest+"#new",dest)
336 print 'copyfile: copy', src, '->', dest, 'failed.', e
339 #we don't yet handle special, so we need to fall back to /bin/mv
340 a=commands.getstatusoutput("/bin/cp -f "+"'"+src+"' '"+dest+"'")
342 print "copyfile: Failed to copy special file:" + src + "' to '" + dest + "'", a
343 return False # failure
345 os.lchown(dest,sstat[stat.ST_UID],sstat[stat.ST_GID])
346 os.chmod(dest, stat.S_IMODE(sstat[stat.ST_MODE])) # Sticky is reset on chown
348 print "copyfile: Failed to chown/chmod/unlink", dest, e
352 os.utime(dest,(newmtime,newmtime))
354 os.utime(dest, (sstat[stat.ST_ATIME], sstat[stat.ST_MTIME]))
355 newmtime=sstat[stat.ST_MTIME]
358 python populate_packages () {
359 import glob, stat, errno, re
361 workdir = bb.data.getVar('WORKDIR', d, 1)
363 bb.error("WORKDIR not defined, unable to package")
366 import os # path manipulations
367 outdir = bb.data.getVar('DEPLOY_DIR', d, 1)
369 bb.error("DEPLOY_DIR not defined, unable to package")
373 dvar = bb.data.getVar('D', d, 1)
375 bb.error("D not defined, unable to package")
379 packages = bb.data.getVar('PACKAGES', d, 1)
381 pn = bb.data.getVar('PN', d, 1)
383 bb.error("PN not defined")
391 except (os.error, AttributeError):
393 return (s[stat.ST_MODE] & stat.S_IEXEC)
395 # Sanity check PACKAGES for duplicates - should be moved to
396 # sanity.bbclass once we have the infrastucture
398 for pkg in packages.split():
399 if pkg in package_list:
400 bb.error("-------------------")
401 bb.error("%s is listed in PACKAGES multiple times, this leads to packaging errors." % pkg)
402 bb.error("Please fix the metadata/report this as bug to OE bugtracker.")
403 bb.error("-------------------")
405 package_list.append(pkg)
407 if (bb.data.getVar('INHIBIT_PACKAGE_STRIP', d, 1) != '1'):
408 for root, dirs, files in os.walk(dvar):
410 file = os.path.join(root, f)
411 if not os.path.islink(file) and not os.path.isdir(file) and isexec(file):
414 pkgdest = bb.data.getVar('PKGDEST', d, 1)
415 os.system('rm -rf %s' % pkgdest)
419 for pkg in package_list:
420 localdata = bb.data.createCopy(d)
421 root = os.path.join(pkgdest, pkg)
424 bb.data.setVar('PKG', pkg, localdata)
425 overrides = bb.data.getVar('OVERRIDES', localdata, 1)
427 raise bb.build.FuncFailed('OVERRIDES not defined')
428 bb.data.setVar('OVERRIDES', overrides + ':' + pkg, localdata)
429 bb.data.update_data(localdata)
431 filesvar = bb.data.getVar('FILES', localdata, 1) or ""
432 files = filesvar.split()
434 if os.path.isabs(file):
436 if not os.path.islink(file):
437 if os.path.isdir(file):
438 newfiles = [ os.path.join(file,x) for x in os.listdir(file) ]
442 globbed = glob.glob(file)
444 if [ file ] != globbed:
447 if (not os.path.islink(file)) and (not os.path.exists(file)):
452 if os.path.isdir(file) and not os.path.islink(file):
453 bb.mkdirhier(os.path.join(root,file))
454 os.chmod(os.path.join(root,file), os.stat(file).st_mode)
456 fpath = os.path.join(root,file)
457 dpath = os.path.dirname(fpath)
459 ret = copyfile(file, fpath)
460 if ret is False or ret == 0:
461 raise bb.build.FuncFailed("File population failed")
466 for root, dirs, files in os.walk(dvar):
468 path = os.path.join(root[len(dvar):], f)
469 if ('.' + path) not in seen:
470 unshipped.append(path)
473 bb.note("the following files were installed but not shipped in any package:")
477 bb.build.exec_func("package_name_hook", d)
479 for pkg in package_list:
480 pkgname = bb.data.getVar('PKG_%s' % pkg, d, 1)
482 bb.data.setVar('PKG_%s' % pkg, pkg, d)
486 for pkg in package_list:
487 dangling_links[pkg] = []
489 inst_root = os.path.join(pkgdest, pkg)
490 for root, dirs, files in os.walk(inst_root):
492 path = os.path.join(root, f)
493 rpath = path[len(inst_root):]
494 pkg_files[pkg].append(rpath)
497 except OSError, (err, strerror):
498 if err != errno.ENOENT:
500 target = os.readlink(path)
502 target = os.path.join(root[len(inst_root):], target)
503 dangling_links[pkg].append(os.path.normpath(target))
505 for pkg in package_list:
506 rdepends = explode_deps(bb.data.getVar('RDEPENDS_' + pkg, d, 1) or bb.data.getVar('RDEPENDS', d, 1) or "")
507 for l in dangling_links[pkg]:
509 bb.debug(1, "%s contains dangling link %s" % (pkg, l))
510 for p in package_list:
511 for f in pkg_files[p]:
514 bb.debug(1, "target found in %s" % p)
517 if not p in rdepends:
521 bb.note("%s contains dangling symlink to %s" % (pkg, l))
522 bb.data.setVar('RDEPENDS_' + pkg, " " + " ".join(rdepends), d)
524 populate_packages[dirs] = "${D}"
526 python emit_pkgdata() {
527 from glob import glob
529 def write_if_exists(f, pkg, var):
532 c = codecs.getencoder("string_escape")
535 val = bb.data.getVar('%s_%s' % (var, pkg), d, 1)
537 f.write('%s_%s: %s\n' % (var, pkg, encode(val)))
539 packages = bb.data.getVar('PACKAGES', d, 1)
541 data_file = bb.data.expand("${PKGDATA_DIR}/${PN}", d)
542 f = open(data_file, 'w')
543 f.write("PACKAGES: %s\n" % packages)
546 workdir = bb.data.getVar('WORKDIR', d, 1)
548 for pkg in packages.split():
549 subdata_file = bb.data.expand("${PKGDATA_DIR}/runtime/%s" % pkg, d)
550 sf = open(subdata_file, 'w')
551 write_if_exists(sf, pkg, 'DESCRIPTION')
552 write_if_exists(sf, pkg, 'RDEPENDS')
553 write_if_exists(sf, pkg, 'RPROVIDES')
554 write_if_exists(sf, pkg, 'RRECOMMENDS')
555 write_if_exists(sf, pkg, 'RSUGGESTS')
556 write_if_exists(sf, pkg, 'RPROVIDES')
557 write_if_exists(sf, pkg, 'RREPLACES')
558 write_if_exists(sf, pkg, 'RCONFLICTS')
559 write_if_exists(sf, pkg, 'PKG')
560 write_if_exists(sf, pkg, 'ALLOW_EMPTY')
561 write_if_exists(sf, pkg, 'FILES')
562 write_if_exists(sf, pkg, 'pkg_postinst')
563 write_if_exists(sf, pkg, 'pkg_postrm')
564 write_if_exists(sf, pkg, 'pkg_preinst')
565 write_if_exists(sf, pkg, 'pkg_prerm')
568 allow_empty = bb.data.getVar('ALLOW_EMPTY_%s' % pkg, d, 1)
570 allow_empty = bb.data.getVar('ALLOW_EMPTY', d, 1)
571 root = "%s/install/%s" % (workdir, pkg)
574 if g or allow_empty == "1":
575 file(bb.data.expand('${PKGDATA_DIR}/runtime/%s.packaged' % pkg, d), 'w').close()
577 emit_pkgdata[dirs] = "${PKGDATA_DIR}/runtime"
579 ldconfig_postinst_fragment() {
580 if [ x"$D" = "x" ]; then
585 python package_do_shlibs() {
586 import os, re, os.path
588 exclude_shlibs = bb.data.getVar('EXCLUDE_FROM_SHLIBS', d, 0)
590 bb.note("not generating shlibs")
593 lib_re = re.compile("^lib.*\.so")
594 libdir_re = re.compile(".*/lib$")
596 packages = bb.data.getVar('PACKAGES', d, 1)
598 workdir = bb.data.getVar('WORKDIR', d, 1)
600 bb.error("WORKDIR not defined")
603 staging = bb.data.getVar('STAGING_DIR', d, 1)
605 bb.error("STAGING_DIR not defined")
608 ver = bb.data.getVar('PV', d, 1)
610 bb.error("PV not defined")
613 target_sys = bb.data.getVar('TARGET_SYS', d, 1)
615 bb.error("TARGET_SYS not defined")
618 pkgdest = bb.data.getVar('PKGDEST', d, 1)
620 shlibs_dir = os.path.join(staging, target_sys, "shlibs")
621 old_shlibs_dir = os.path.join(staging, "shlibs")
622 bb.mkdirhier(shlibs_dir)
625 private_libs = bb.data.getVar('PRIVATE_LIBS', d, 1)
626 for pkg in packages.split():
627 needs_ldconfig = False
628 bb.debug(2, "calculating shlib provides for %s" % pkg)
632 top = os.path.join(pkgdest, pkg)
633 for root, dirs, files in os.walk(top):
636 path = os.path.join(root, file)
637 if os.access(path, os.X_OK) or lib_re.match(file):
638 cmd = bb.data.getVar('OBJDUMP', d, 1) + " -p " + path + " 2>/dev/null"
639 cmd = "PATH=\"%s\" %s" % (bb.data.getVar('PATH', d, 1), cmd)
641 lines = fd.readlines()
644 m = re.match("\s+NEEDED\s+([^\s]*)", l)
646 needed[pkg].append(m.group(1))
647 m = re.match("\s+SONAME\s+([^\s]*)", l)
648 if m and not m.group(1) in sonames:
649 # if library is private (only used by package) then do not build shlib for it
650 if not private_libs or -1 == private_libs.find(m.group(1)):
651 sonames.append(m.group(1))
652 if m and libdir_re.match(root):
653 needs_ldconfig = True
654 shlibs_file = os.path.join(shlibs_dir, pkg + ".list")
655 if os.path.exists(shlibs_file):
656 os.remove(shlibs_file)
657 shver_file = os.path.join(shlibs_dir, pkg + ".ver")
658 if os.path.exists(shver_file):
659 os.remove(shver_file)
661 fd = open(shlibs_file, 'w')
665 fd = open(shver_file, 'w')
669 bb.debug(1, 'adding ldconfig call to postinst for %s' % pkg)
670 postinst = bb.data.getVar('pkg_postinst_%s' % pkg, d, 1) or bb.data.getVar('pkg_postinst', d, 1)
672 postinst = '#!/bin/sh\n'
673 postinst += bb.data.getVar('ldconfig_postinst_fragment', d, 1)
674 bb.data.setVar('pkg_postinst_%s' % pkg, postinst, d)
677 list_re = re.compile('^(.*)\.list$')
678 for dir in [old_shlibs_dir, shlibs_dir]:
679 if not os.path.exists(dir):
681 for file in os.listdir(dir):
682 m = list_re.match(file)
685 fd = open(os.path.join(dir, file))
686 lines = fd.readlines()
688 ver_file = os.path.join(dir, dep_pkg + '.ver')
690 if os.path.exists(ver_file):
692 lib_ver = fd.readline().rstrip()
695 shlib_provider[l.rstrip()] = (dep_pkg, lib_ver)
697 assumed_libs = bb.data.getVar('ASSUME_SHLIBS', d, 1)
699 for e in assumed_libs.split():
700 l, dep_pkg = e.split(":")
702 dep_pkg = dep_pkg.rsplit("_", 1)
703 if len(dep_pkg) == 2:
706 shlib_provider[l] = (dep_pkg, lib_ver)
708 for pkg in packages.split():
709 bb.debug(2, "calculating shlib requirements for %s" % pkg)
712 for n in needed[pkg]:
713 if n in shlib_provider.keys():
714 (dep_pkg, ver_needed) = shlib_provider[n]
720 dep = "%s (>= %s)" % (dep_pkg, ver_needed)
726 bb.note("Couldn't find shared library provider for %s" % n)
728 deps_file = os.path.join(pkgdest, pkg + ".shlibdeps")
729 if os.path.exists(deps_file):
732 fd = open(deps_file, 'w')
738 python package_do_pkgconfig () {
741 packages = bb.data.getVar('PACKAGES', d, 1)
743 workdir = bb.data.getVar('WORKDIR', d, 1)
745 bb.error("WORKDIR not defined")
748 staging = bb.data.getVar('STAGING_DIR', d, 1)
750 bb.error("STAGING_DIR not defined")
753 target_sys = bb.data.getVar('TARGET_SYS', d, 1)
755 bb.error("TARGET_SYS not defined")
758 pkgdest = bb.data.getVar('PKGDEST', d, 1)
760 shlibs_dir = os.path.join(staging, target_sys, "shlibs")
761 old_shlibs_dir = os.path.join(staging, "shlibs")
762 bb.mkdirhier(shlibs_dir)
764 pc_re = re.compile('(.*)\.pc$')
765 var_re = re.compile('(.*)=(.*)')
766 field_re = re.compile('(.*): (.*)')
768 pkgconfig_provided = {}
769 pkgconfig_needed = {}
770 for pkg in packages.split():
771 pkgconfig_provided[pkg] = []
772 pkgconfig_needed[pkg] = []
773 top = os.path.join(pkgdest, pkg)
774 for root, dirs, files in os.walk(top):
776 m = pc_re.match(file)
780 pkgconfig_provided[pkg].append(name)
781 path = os.path.join(root, file)
782 if not os.access(path, os.R_OK):
785 lines = f.readlines()
792 bb.data.setVar(name, bb.data.expand(val, pd), pd)
794 m = field_re.match(l)
797 exp = bb.data.expand(m.group(2), pd)
798 if hdr == 'Requires':
799 pkgconfig_needed[pkg] += exp.replace(',', ' ').split()
801 for pkg in packages.split():
802 pkgs_file = os.path.join(shlibs_dir, pkg + ".pclist")
803 if os.path.exists(pkgs_file):
805 if pkgconfig_provided[pkg] != []:
806 f = open(pkgs_file, 'w')
807 for p in pkgconfig_provided[pkg]:
811 for dir in [old_shlibs_dir, shlibs_dir]:
812 if not os.path.exists(dir):
814 for file in os.listdir(dir):
815 m = re.match('^(.*)\.pclist$', file)
818 fd = open(os.path.join(dir, file))
819 lines = fd.readlines()
821 pkgconfig_provided[pkg] = []
823 pkgconfig_provided[pkg].append(l.rstrip())
825 for pkg in packages.split():
827 for n in pkgconfig_needed[pkg]:
829 for k in pkgconfig_provided.keys():
830 if n in pkgconfig_provided[k]:
831 if k != pkg and not (k in deps):
835 bb.note("couldn't find pkgconfig module '%s' in any package" % n)
836 deps_file = os.path.join(pkgdest, pkg + ".pcdeps")
837 if os.path.exists(deps_file):
840 fd = open(deps_file, 'w')
846 python read_shlibdeps () {
847 packages = bb.data.getVar('PACKAGES', d, 1).split()
849 rdepends = explode_deps(bb.data.getVar('RDEPENDS_' + pkg, d, 0) or bb.data.getVar('RDEPENDS', d, 0) or "")
850 for extension in ".shlibdeps", ".pcdeps", ".clilibdeps":
851 depsfile = bb.data.expand("${PKGDEST}/" + pkg + extension, d)
852 if os.access(depsfile, os.R_OK):
854 lines = fd.readlines()
857 rdepends.append(l.rstrip())
858 bb.data.setVar('RDEPENDS_' + pkg, " " + " ".join(rdepends), d)
861 python package_depchains() {
863 For a given set of prefix and postfix modifiers, make those packages
864 RRECOMMENDS on the corresponding packages for its RDEPENDS.
866 Example: If package A depends upon package B, and A's .bb emits an
867 A-dev package, this would make A-dev Recommends: B-dev.
869 If only one of a given suffix is specified, it will take the RRECOMMENDS
870 based on the RDEPENDS of *all* other packages. If more than one of a given
871 suffix is specified, its will only use the RDEPENDS of the single parent
875 packages = bb.data.getVar('PACKAGES', d, 1)
876 postfixes = (bb.data.getVar('DEPCHAIN_POST', d, 1) or '').split()
877 prefixes = (bb.data.getVar('DEPCHAIN_PRE', d, 1) or '').split()
879 def pkg_addrrecs(pkg, base, suffix, getname, rdepends, d):
880 #bb.note('rdepends for %s is %s' % (base, rdepends))
882 rreclist = explode_deps(bb.data.getVar('RRECOMMENDS_' + pkg, d, 1) or bb.data.getVar('RRECOMMENDS', d, 1) or "")
884 for depend in rdepends:
885 pkgname = getname(depend, suffix)
886 if not pkgname in rreclist and packaged(pkgname, d):
887 rreclist.append(pkgname)
889 #bb.note('setting: RRECOMMENDS_%s=%s' % (pkg, ' '.join(rreclist)))
890 bb.data.setVar('RRECOMMENDS_%s' % pkg, ' '.join(rreclist), d)
892 def add_dep(list, dep):
893 dep = dep.split(' (')[0].strip()
898 for dep in explode_deps(bb.data.getVar('RDEPENDS', d, 1) or ""):
899 add_dep(rdepends, dep)
901 for pkg in packages.split():
902 for dep in explode_deps(bb.data.getVar('RDEPENDS_' + pkg, d, 1) or ""):
903 add_dep(rdepends, dep)
905 #bb.note('rdepends is %s' % rdepends)
907 def post_getname(name, suffix):
908 return '%s%s' % (name, suffix)
909 def pre_getname(name, suffix):
910 return '%s%s' % (suffix, name)
913 for pkg in packages.split():
914 for postfix in postfixes:
915 if pkg.endswith(postfix):
916 if not postfix in pkgs:
918 pkgs[postfix][pkg] = (pkg[:-len(postfix)], post_getname)
920 for prefix in prefixes:
921 if pkg.startswith(prefix):
922 if not prefix in pkgs:
924 pkgs[prefix][pkg] = (pkg[:-len(prefix)], pre_getname)
927 for pkg in pkgs[suffix]:
928 (base, func) = pkgs[suffix][pkg]
929 if len(pkgs[suffix]) == 1:
930 pkg_addrrecs(pkg, base, suffix, func, rdepends, d)
933 for dep in explode_deps(bb.data.getVar('RDEPENDS_' + base, d, 1) or bb.data.getVar('RDEPENDS', d, 1) or ""):
935 pkg_addrrecs(pkg, base, suffix, func, rdeps, d)
939 PACKAGEFUNCS ?= "package_do_split_locales \
942 package_do_pkgconfig \
947 python package_do_package () {
948 packages = (bb.data.getVar('PACKAGES', d, 1) or "").split()
949 if len(packages) < 1:
950 bb.debug(1, "No packages to build, skipping do_package")
953 for f in (bb.data.getVar('PACKAGEFUNCS', d, 1) or '').split():
954 bb.build.exec_func(f, d)
956 do_package[dirs] = "${D}"
957 addtask package before do_build after do_install
959 # Dummy task to mark when all packaging is complete
960 do_package_write () {
963 addtask package_write before do_build after do_package
965 EXPORT_FUNCTIONS do_package do_package_write
968 # Helper functions for the package writing classes
971 python package_mapping_rename_hook () {
973 Rewrite variables to account for package renaming in things
974 like debian.bbclass or manual PKG variable name changes
976 runtime_mapping_rename("RDEPENDS", d)
977 runtime_mapping_rename("RRECOMMENDS", d)
978 runtime_mapping_rename("RSUGGESTS", d)
979 runtime_mapping_rename("RPROVIDES", d)
980 runtime_mapping_rename("RREPLACES", d)
981 runtime_mapping_rename("RCONFLICTS", d)
984 EXPORT_FUNCTIONS mapping_rename_hook