]> code.ossystems Code Review - openembedded-core.git/commitdiff
getVar/setVar cleanups
authorRichard Purdie <richard.purdie@linuxfoundation.org>
Fri, 25 Nov 2011 14:25:16 +0000 (14:25 +0000)
committerRichard Purdie <richard.purdie@linuxfoundation.org>
Sat, 26 Nov 2011 22:42:00 +0000 (22:42 +0000)
Complete the bb.data.getVar/setVar replacements with accesses
directly to the data store object.

Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
27 files changed:
meta/classes/base.bbclass
meta/classes/cross-canadian.bbclass
meta/classes/distrodata.bbclass
meta/classes/gconf.bbclass
meta/classes/image-swab.bbclass
meta/classes/kernel.bbclass
meta/classes/libc-package.bbclass
meta/classes/native.bbclass
meta/classes/nativesdk.bbclass
meta/classes/package.bbclass
meta/classes/package_deb.bbclass
meta/classes/package_ipk.bbclass
meta/classes/package_rpm.bbclass
meta/classes/package_tar.bbclass
meta/classes/src_distribute.bbclass
meta/classes/sstate.bbclass
meta/classes/task.bbclass
meta/classes/update-rc.d.bbclass
meta/conf/bitbake.conf
meta/lib/oe/distro_check.py
meta/recipes-core/busybox/busybox.inc
meta/recipes-core/tasks/task-core-sdk.bb
meta/recipes-core/uclibc/uclibc.inc
meta/recipes-graphics/xorg-lib/libx11-diet_1.4.4.bb
meta/recipes-multimedia/gstreamer/gst-plugins-package.inc
meta/recipes-qt/qt4/qt4.inc
meta/recipes-support/boost/boost.inc

index 72196d60a7b1a0b3e2e4ace03b4d54993bdc98bb..a95dfd9a08ee140df984fa4817ae24b8c61ca869 100644 (file)
@@ -360,12 +360,12 @@ python () {
         d.setVarFlag('do_compile', 'umask', 022)
         deps = (d.getVarFlag('do_install', 'depends') or "").split()
         deps.append('virtual/fakeroot-native:do_populate_sysroot')
-        bb.data.setVarFlag('do_install', 'depends', " ".join(deps),d)
+        d.setVarFlag('do_install', 'depends', " ".join(deps))
         d.setVarFlag('do_install', 'fakeroot', 1)
         d.setVarFlag('do_install', 'umask', 022)
         deps = (d.getVarFlag('do_package', 'depends') or "").split()
         deps.append('virtual/fakeroot-native:do_populate_sysroot')
-        bb.data.setVarFlag('do_package', 'depends', " ".join(deps),d)
+        d.setVarFlag('do_package', 'depends', " ".join(deps))
         d.setVarFlag('do_package', 'fakeroot', 1)
         d.setVarFlag('do_package', 'umask', 022)
         d.setVarFlag('do_package_setscene', 'fakeroot', 1)
index 6f5bcd0ad40bfdcfa72e2f5ce26a841e6b9e8a5e..499a1fb171eade237769072d880bcb606f763660 100644 (file)
@@ -20,7 +20,7 @@ python () {
     sdkarchs = []
     for arch in archs:
         sdkarchs.append(arch + '-nativesdk')
-    bb.data.setVar('PACKAGE_ARCHS', " ".join(sdkarchs), d)
+    d.setVar('PACKAGE_ARCHS', " ".join(sdkarchs))
 }
 MULTIMACH_TARGET_SYS = "${PACKAGE_ARCH}${HOST_VENDOR}-${HOST_OS}"
 
index 687247a6491f1a892c59f441518948c889cbdb93..0c0b549fefa534db75bb80bb836c1c2b977327df 100644 (file)
@@ -31,31 +31,31 @@ python do_distrodata_np() {
        if pn.find("-native") != -1:
            pnstripped = pn.split("-native")
            bb.note("Native Split: %s" % pnstripped)
-           bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True), localdata)
+           localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True))
            bb.data.update_data(localdata)
 
        if pn.find("-nativesdk") != -1:
            pnstripped = pn.split("-nativesdk")
            bb.note("Native Split: %s" % pnstripped)
-           bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True), localdata)
+           localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True))
            bb.data.update_data(localdata)
 
        if pn.find("-cross") != -1:
            pnstripped = pn.split("-cross")
            bb.note("cross Split: %s" % pnstripped)
-           bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True), localdata)
+           localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True))
            bb.data.update_data(localdata)
 
        if pn.find("-crosssdk") != -1:
            pnstripped = pn.split("-crosssdk")
            bb.note("cross Split: %s" % pnstripped)
-           bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True), localdata)
+           localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True))
            bb.data.update_data(localdata)
 
        if pn.find("-initial") != -1:
            pnstripped = pn.split("-initial")
            bb.note("initial Split: %s" % pnstripped)
-           bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True), localdata)
+           localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True))
            bb.data.update_data(localdata)
 
        """generate package information from .bb file"""
@@ -130,19 +130,19 @@ python do_distrodata() {
        if pn.find("-native") != -1:
            pnstripped = pn.split("-native")
            bb.note("Native Split: %s" % pnstripped)
-           bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True), localdata)
+           localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True))
            bb.data.update_data(localdata)
 
        if pn.find("-cross") != -1:
            pnstripped = pn.split("-cross")
            bb.note("cross Split: %s" % pnstripped)
-           bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True), localdata)
+           localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True))
            bb.data.update_data(localdata)
 
        if pn.find("-initial") != -1:
            pnstripped = pn.split("-initial")
            bb.note("initial Split: %s" % pnstripped)
-           bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True), localdata)
+           localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True))
            bb.data.update_data(localdata)
 
        """generate package information from .bb file"""
@@ -308,8 +308,8 @@ python do_checkpkg() {
                which is designed for check purpose but we override check command for our own purpose
                """
                ld = bb.data.createCopy(d)
-               bb.data.setVar('CHECKCOMMAND_wget', "/usr/bin/env wget -t 1 --passive-ftp -O %s --user-agent=\"Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9.2.12) Gecko/20101027 Ubuntu/9.10 (karmic) Firefox/3.6.12\" '${URI}'" \
-                                       % tmpf.name, d)
+               d.setVar('CHECKCOMMAND_wget', "/usr/bin/env wget -t 1 --passive-ftp -O %s --user-agent=\"Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9.2.12) Gecko/20101027 Ubuntu/9.10 (karmic) Firefox/3.6.12\" '${URI}'" \
+                                       % tmpf.name)
                bb.data.update_data(ld)
 
                try:
@@ -452,19 +452,19 @@ python do_checkpkg() {
        if pname.find("-native") != -1:
            pnstripped = pname.split("-native")
            bb.note("Native Split: %s" % pnstripped)
-           bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True), localdata)
+           localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True))
            bb.data.update_data(localdata)
 
        if pname.find("-cross") != -1:
            pnstripped = pname.split("-cross")
            bb.note("cross Split: %s" % pnstripped)
-           bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True), localdata)
+           localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True))
            bb.data.update_data(localdata)
 
        if pname.find("-initial") != -1:
            pnstripped = pname.split("-initial")
            bb.note("initial Split: %s" % pnstripped)
-           bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True), localdata)
+           localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True))
            bb.data.update_data(localdata)
 
        pdesc = localdata.getVar('DESCRIPTION', True)
index f1645477825a19b27ced2480d4fa3df80b24be46..c1dbbe30dce86e62e86970a1218fb190dcbf8003 100644 (file)
@@ -45,7 +45,7 @@ python populate_packages_append () {
                                        schemas.append(f)
                if schemas != []:
                        bb.note("adding gconf postinst and prerm scripts to %s" % pkg)
-                       bb.data.setVar('SCHEMA_FILES', " ".join(schemas), d)
+                       d.setVar('SCHEMA_FILES', " ".join(schemas))
                        postinst = d.getVar('pkg_postinst_%s' % pkg, 1) or d.getVar('pkg_postinst', 1)
                        if not postinst:
                                postinst = '#!/bin/sh\n'
index 23183b3ec3b6a640e7d987048ef31ce6c8f63b4a..5aace0f5c84b1014f41b0e2a9ca38ba03a152d17 100644 (file)
@@ -53,7 +53,7 @@ python() {
     if not bb.data.inherits_class('native', d) and not bb.data.inherits_class('nativesdk', d) and not bb.data.inherits_class('cross', d):
        deps = (d.getVarFlag('do_setscene', 'depends') or "").split()
        deps.append('strace-native:do_populate_sysroot')
-       bb.data.setVarFlag('do_setscene', 'depends', " ".join(deps), d)
+       d.setVarFlag('do_setscene', 'depends', " ".join(deps))
        logdir = bb.data.expand("${TRACE_LOGDIR}", d)
        bb.utils.mkdirhier(logdir)
     else:
index 12e0b83b947a18ac813aec009a64f75943c25330..d0cc279a669822f5eb1ee3ac97d9e2c189fbc34c 100644 (file)
@@ -437,7 +437,7 @@ python populate_packages_prepend () {
                else:
                        rdepends = []
                rdepends.extend(get_dependencies(file, pattern, format))
-               bb.data.setVar('RDEPENDS_' + pkg, ' '.join(rdepends), d)
+               d.setVar('RDEPENDS_' + pkg, ' '.join(rdepends))
 
        module_deps = parse_depmod()
        module_regex = '^(.*)\.k?o$'
@@ -464,10 +464,10 @@ python populate_packages_prepend () {
        for pkg in packages[1:]:
                if not pkg in blacklist and not pkg in metapkg_rdepends:
                        metapkg_rdepends.append(pkg)
-       bb.data.setVar('RDEPENDS_' + metapkg, ' '.join(metapkg_rdepends), d)
+       d.setVar('RDEPENDS_' + metapkg, ' '.join(metapkg_rdepends))
        d.setVar('DESCRIPTION_' + metapkg, 'Kernel modules meta package')
        packages.append(metapkg)
-       bb.data.setVar('PACKAGES', ' '.join(packages), d)
+       d.setVar('PACKAGES', ' '.join(packages))
 }
 
 # Support checking the kernel size since some kernels need to reside in partitions
index fc1a5794d7b2e0148140fdc0de65e268395a1a29..bbe06fd54b35ef5f04dd36efcf79a38c216ec9d9 100644 (file)
@@ -10,7 +10,7 @@
 GLIBC_INTERNAL_USE_BINARY_LOCALE ?= "ondevice"
 
 python __anonymous () {
-    enabled = d.getVar("ENABLE_BINARY_LOCALE_GENERATION", 1)
+    enabled = d.getVar("ENABLE_BINARY_LOCALE_GENERATION", True)
 
     pn = d.getVar("PN", True)
     if pn.endswith("-initial"):
@@ -19,15 +19,15 @@ python __anonymous () {
     if enabled and int(enabled):
         import re
 
-        target_arch = d.getVar("TARGET_ARCH", 1)
-        binary_arches = d.getVar("BINARY_LOCALE_ARCHES", 1) or ""
-        use_cross_localedef = d.getVar("LOCALE_GENERATION_WITH_CROSS-LOCALEDEF", 1) or ""
+        target_arch = d.getVar("TARGET_ARCH", True)
+        binary_arches = d.getVar("BINARY_LOCALE_ARCHES", True) or ""
+        use_cross_localedef = d.getVar("LOCALE_GENERATION_WITH_CROSS-LOCALEDEF", True) or ""
 
         for regexp in binary_arches.split(" "):
             r = re.compile(regexp)
 
             if r.match(target_arch):
-                depends = d.getVar("DEPENDS", 1)
+                depends = d.getVar("DEPENDS", True)
                if use_cross_localedef == "1" :
                        depends = "%s cross-localedef-native" % depends
                else:
@@ -109,19 +109,19 @@ inherit qemu
 
 python package_do_split_gconvs () {
        import os, re
-       if (d.getVar('PACKAGE_NO_GCONV', 1) == '1'):
+       if (d.getVar('PACKAGE_NO_GCONV', True) == '1'):
                bb.note("package requested not splitting gconvs")
                return
 
-       if not d.getVar('PACKAGES', 1):
+       if not d.getVar('PACKAGES', True):
                return
 
-       bpn = d.getVar('BPN', 1)
-       libdir = d.getVar('libdir', 1)
+       bpn = d.getVar('BPN', True)
+       libdir = d.getVar('libdir', True)
        if not libdir:
                bb.error("libdir not defined")
                return
-       datadir = d.getVar('datadir', 1)
+       datadir = d.getVar('datadir', True)
        if not datadir:
                bb.error("datadir not defined")
                return
@@ -144,9 +144,9 @@ python package_do_split_gconvs () {
                                        deps.append(dp)
                f.close()
                if deps != []:
-                       bb.data.setVar('RDEPENDS_%s' % pkg, " ".join(deps), d)
+                       d.setVar('RDEPENDS_%s' % pkg, " ".join(deps))
                if bpn != 'glibc':
-                       bb.data.setVar('RPROVIDES_%s' % pkg, pkg.replace(bpn, 'glibc'), d)
+                       d.setVar('RPROVIDES_%s' % pkg, pkg.replace(bpn, 'glibc'))
 
        do_split_packages(d, gconv_libdir, file_regex='^(.*)\.so$', output_pattern=bpn+'-gconv-%s', \
                description='gconv module for character set %s', hook=calc_gconv_deps, \
@@ -165,9 +165,9 @@ python package_do_split_gconvs () {
                                        deps.append(dp)
                f.close()
                if deps != []:
-                       bb.data.setVar('RDEPENDS_%s' % pkg, " ".join(deps), d)
+                       d.setVar('RDEPENDS_%s' % pkg, " ".join(deps))
                if bpn != 'glibc':
-                       bb.data.setVar('RPROVIDES_%s' % pkg, pkg.replace(bpn, 'glibc'), d)
+                       d.setVar('RPROVIDES_%s' % pkg, pkg.replace(bpn, 'glibc'))
 
        do_split_packages(d, charmap_dir, file_regex='^(.*)\.gz$', output_pattern=bpn+'-charmap-%s', \
                description='character map for %s encoding', hook=calc_charmap_deps, extra_depends='')
@@ -185,23 +185,23 @@ python package_do_split_gconvs () {
                                        deps.append(dp)
                f.close()
                if deps != []:
-                       bb.data.setVar('RDEPENDS_%s' % pkg, " ".join(deps), d)
+                       d.setVar('RDEPENDS_%s' % pkg, " ".join(deps))
                if bpn != 'glibc':
-                       bb.data.setVar('RPROVIDES_%s' % pkg, pkg.replace(bpn, 'glibc'), d)
+                       d.setVar('RPROVIDES_%s' % pkg, pkg.replace(bpn, 'glibc'))
 
        do_split_packages(d, locales_dir, file_regex='(.*)', output_pattern=bpn+'-localedata-%s', \
                description='locale definition for %s', hook=calc_locale_deps, extra_depends='')
-       bb.data.setVar('PACKAGES', d.getVar('PACKAGES') + ' ' + d.getVar('MLPREFIX') + bpn + '-gconv', d)
+       d.setVar('PACKAGES', d.getVar('PACKAGES') + ' ' + d.getVar('MLPREFIX') + bpn + '-gconv')
 
-       use_bin = d.getVar("GLIBC_INTERNAL_USE_BINARY_LOCALE", 1)
+       use_bin = d.getVar("GLIBC_INTERNAL_USE_BINARY_LOCALE", True)
 
        dot_re = re.compile("(.*)\.(.*)")
 
 #GLIBC_GENERATE_LOCALES var specifies which locales to be supported, empty or "all" means all locales 
        if use_bin != "precompiled":
-               supported = d.getVar('GLIBC_GENERATE_LOCALES', 1)
+               supported = d.getVar('GLIBC_GENERATE_LOCALES', True)
                if not supported or supported == "all":
-                       f = open(base_path_join(d.getVar('WORKDIR', 1), "SUPPORTED"), "r")
+                       f = open(base_path_join(d.getVar('WORKDIR', True), "SUPPORTED"), "r")
                        supported = f.readlines()
                        f.close()
                else:
@@ -218,7 +218,7 @@ python package_do_split_gconvs () {
                        supported.append(dbase[0] + d2)
 
        # Collate the locales by base and encoding
-       utf8_only = int(d.getVar('LOCALE_UTF8_ONLY', 1) or 0)
+       utf8_only = int(d.getVar('LOCALE_UTF8_ONLY', True) or 0)
        encodings = {}
        for l in supported:
                l = l[:-1]
@@ -233,12 +233,12 @@ python package_do_split_gconvs () {
                encodings[locale].append(charset)
 
        def output_locale_source(name, pkgname, locale, encoding):
-               bb.data.setVar('RDEPENDS_%s' % pkgname, 'localedef %s-localedata-%s %s-charmap-%s' % \
-               (bpn, legitimize_package_name(locale), bpn, legitimize_package_name(encoding)), d)
-               bb.data.setVar('pkg_postinst_%s' % pkgname, d.getVar('locale_base_postinst', 1) \
-               % (locale, encoding, locale), d)
-               bb.data.setVar('pkg_postrm_%s' % pkgname, d.getVar('locale_base_postrm', 1) % \
-               (locale, encoding, locale), d)
+               setVar('RDEPENDS_%s' % pkgname, 'localedef %s-localedata-%s %s-charmap-%s' % \
+               (bpn, legitimize_package_name(locale), bpn, legitimize_package_name(encoding)))
+               d.setVar('pkg_postinst_%s' % pkgname, d.getVar('locale_base_postinst', True) \
+               % (locale, encoding, locale))
+               d.setVar('pkg_postrm_%s' % pkgname, d.getVar('locale_base_postrm', True) % \
+               (locale, encoding, locale))
 
        def output_locale_binary_rdepends(name, pkgname, locale, encoding):
                m = re.match("(.*)\.(.*)", name)
@@ -246,23 +246,23 @@ python package_do_split_gconvs () {
                        libc_name = "%s.%s" % (m.group(1), m.group(2).lower().replace("-",""))
                else:
                        libc_name = name
-               bb.data.setVar('RDEPENDS_%s' % pkgname, legitimize_package_name('%s-binary-localedata-%s' \
-                       % (bpn, libc_name)), d)
+               d.setVar('RDEPENDS_%s' % pkgname, legitimize_package_name('%s-binary-localedata-%s' \
+                       % (bpn, libc_name)))
                rprovides = (d.getVar('RPROVIDES_%s' % pkgname, True) or "").split()
                rprovides.append(legitimize_package_name('%s-binary-localedata-%s' % (bpn, libc_name)))
-               bb.data.setVar('RPROVIDES_%s' % pkgname, " ".join(rprovides), d)
+               d.setVar('RPROVIDES_%s' % pkgname, " ".join(rprovides))
 
        commands = {}
 
        def output_locale_binary(name, pkgname, locale, encoding):
-               treedir = base_path_join(d.getVar("WORKDIR", 1), "locale-tree")
-               ldlibdir = base_path_join(treedir, d.getVar("base_libdir", 1))
-               path = d.getVar("PATH", 1)
+               treedir = base_path_join(d.getVar("WORKDIR", True), "locale-tree")
+               ldlibdir = base_path_join(treedir, d.getVar("base_libdir", True))
+               path = d.getVar("PATH", True)
                i18npath = base_path_join(treedir, datadir, "i18n")
                gconvpath = base_path_join(treedir, "iconvdata")
                outputpath = base_path_join(treedir, libdir, "locale")
 
-               use_cross_localedef = d.getVar("LOCALE_GENERATION_WITH_CROSS-LOCALEDEF", 1) or "0"
+               use_cross_localedef = d.getVar("LOCALE_GENERATION_WITH_CROSS-LOCALEDEF", True) or "0"
                if use_cross_localedef == "1":
                        target_arch = d.getVar('TARGET_ARCH', True)
                        locale_arch_options = { \
@@ -292,9 +292,9 @@ python package_do_split_gconvs () {
                                --inputfile=%s/i18n/locales/%s --charmap=%s %s" \
                                % (treedir, datadir, locale, encoding, name)
 
-                       qemu_options = bb.data.getVar("QEMU_OPTIONS_%s" % d.getVar('PACKAGE_ARCH', 1), d, 1)
+                       qemu_options = d.getVar("QEMU_OPTIONS_%s" % d.getVar('PACKAGE_ARCH', True), True)
                        if not qemu_options:
-                               qemu_options = d.getVar('QEMU_OPTIONS', 1)
+                               qemu_options = d.getVar('QEMU_OPTIONS', True)
 
                        cmd = "PSEUDO_RELOADED=YES PATH=\"%s\" I18NPATH=\"%s\" %s -L %s \
                                -E LD_LIBRARY_PATH=%s %s %s/bin/localedef %s" % \
@@ -307,7 +307,7 @@ python package_do_split_gconvs () {
        def output_locale(name, locale, encoding):
                pkgname = d.getVar('MLPREFIX') + 'locale-base-' + legitimize_package_name(name)
                d.setVar('ALLOW_EMPTY_%s' % pkgname, '1')
-               bb.data.setVar('PACKAGES', '%s %s' % (pkgname, d.getVar('PACKAGES', 1)), d)
+               d.setVar('PACKAGES', '%s %s' % (pkgname, d.getVar('PACKAGES', True)))
                rprovides = ' virtual-locale-%s' % legitimize_package_name(name)
                m = re.match("(.*)_(.*)", name)
                if m:
@@ -347,7 +347,7 @@ python package_do_split_gconvs () {
                bb.note("  " + " ".join(non_utf8))
 
        if use_bin == "compile":
-               makefile = base_path_join(d.getVar("WORKDIR", 1), "locale-tree", "Makefile")
+               makefile = base_path_join(d.getVar("WORKDIR", True), "locale-tree", "Makefile")
                m = open(makefile, "w")
                m.write("all: %s\n\n" % " ".join(commands.keys()))
                for cmd in commands:
index 5f25bdd2c275739914a563a97718a1136ca47fdb..8f7cc1f2d3d400c8812327c7a73e7cb5442e663b 100644 (file)
@@ -121,7 +121,7 @@ python native_virtclass_handler () {
                 newdeps.append(dep + "-native")
             else:
                 newdeps.append(dep)
-        bb.data.setVar(varname, " ".join(newdeps), d)
+        d.setVar(varname, " ".join(newdeps))
 
     map_dependencies("DEPENDS", e.data)
     for pkg in (e.data.getVar("PACKAGES", True).split() + [""]):
@@ -139,7 +139,7 @@ python native_virtclass_handler () {
             provides = provides.replace(prov, prov + "-native")
     e.data.setVar("PROVIDES", provides)
 
-    bb.data.setVar("OVERRIDES", e.data.getVar("OVERRIDES", False) + ":virtclass-native", e.data)
+    e.data.setVar("OVERRIDES", e.data.getVar("OVERRIDES", False) + ":virtclass-native")
 }
 
 addhandler native_virtclass_handler
index ca24efaa7c067121d56034aca3042801f8ab0449..e6204c02ba113cbe8c87686d1444a0750d7f4805 100644 (file)
@@ -15,7 +15,7 @@ python () {
     sdkarchs = []
     for arch in archs:
         sdkarchs.append(arch + '-nativesdk')
-    bb.data.setVar('PACKAGE_ARCHS', " ".join(sdkarchs), d)
+    d.setVar('PACKAGE_ARCHS', " ".join(sdkarchs))
 }
 
 STAGING_DIR_HOST = "${STAGING_DIR}/${MULTIMACH_HOST_SYS}"
@@ -66,7 +66,7 @@ python nativesdk_virtclass_handler () {
     if not pn.endswith("-nativesdk"):
         return
 
-    bb.data.setVar("OVERRIDES", e.data.getVar("OVERRIDES", False) + ":virtclass-nativesdk", e.data)
+    e.data.setVar("OVERRIDES", e.data.getVar("OVERRIDES", False) + ":virtclass-nativesdk")
 }
 
 python () {
@@ -91,7 +91,7 @@ python () {
                 newdeps.append(dep.replace("-nativesdk", "") + "-nativesdk")
             else:
                 newdeps.append(dep)
-        bb.data.setVar(varname, " ".join(newdeps), d)
+        d.setVar(varname, " ".join(newdeps))
 
     map_dependencies("DEPENDS", d)
     #for pkg in (d.getVar("PACKAGES", True).split() + [""]):
index 2a78a8f54bd097b4735033eb2bfda35926808657..256cdc15dea0d787df250b26a31a41f29790667a 100644 (file)
@@ -151,7 +151,7 @@ def do_split_packages(d, root, file_regex, output_pattern, description, postinst
                                                the_files.append(fp % m.group(1))       
                                else:
                                        the_files.append(aux_files_pattern_verbatim % m.group(1))
-                       bb.data.setVar('FILES_' + pkg, " ".join(the_files), d)
+                       d.setVar('FILES_' + pkg, " ".join(the_files))
                        if extra_depends != '':
                                the_depends = d.getVar('RDEPENDS_' + pkg, True)
                                if the_depends:
@@ -165,11 +165,11 @@ def do_split_packages(d, root, file_regex, output_pattern, description, postinst
                        if postrm:
                                d.setVar('pkg_postrm_' + pkg, postrm)
                else:
-                       bb.data.setVar('FILES_' + pkg, oldfiles + " " + os.path.join(root, o), d)
+                       d.setVar('FILES_' + pkg, oldfiles + " " + os.path.join(root, o))
                if callable(hook):
                        hook(f, pkg, file_regex, output_pattern, m.group(1))
 
-       bb.data.setVar('PACKAGES', ' '.join(packages), d)
+       d.setVar('PACKAGES', ' '.join(packages))
 
 PACKAGE_DEPENDS += "file-native"
 
@@ -183,7 +183,7 @@ python () {
         deps = (d.getVarFlag('do_package', 'deptask') or "").split()
         # shlibs requires any DEPENDS to have already packaged for the *.list files
         deps.append("do_package")
-        bb.data.setVarFlag('do_package', 'deptask', " ".join(deps), d)
+        d.setVarFlag('do_package', 'deptask', " ".join(deps))
     elif not bb.data.inherits_class('image', d):
         d.setVar("PACKAGERDEPTASK", "")
 }
@@ -202,7 +202,7 @@ def splitfile(file, debugfile, debugsrcdir, d):
     pathprefix = "export PATH=%s; " % d.getVar('PATH', True)
     objcopy = d.getVar("OBJCOPY", True)
     debugedit = bb.data.expand("${STAGING_LIBDIR_NATIVE}/rpm/bin/debugedit", d)
-    workdir = bb.data.expand("${WORKDIR}", d)
+    workdir = d.getVar("WORKDIR", True)
     workparentdir = os.path.dirname(workdir)
     sourcefile = bb.data.expand("${WORKDIR}/debugsources.list", d)
 
@@ -245,7 +245,7 @@ def splitfile2(debugsrcdir, d):
     strip = d.getVar("STRIP", True)
     objcopy = d.getVar("OBJCOPY", True)
     debugedit = bb.data.expand("${STAGING_LIBDIR_NATIVE}/rpm/bin/debugedit", d)
-    workdir = bb.data.expand("${WORKDIR}", d)
+    workdir = d.getVar("WORKDIR", True)
     workparentdir = os.path.dirname(workdir)
     workbasedir = os.path.basename(workdir)
     sourcefile = bb.data.expand("${WORKDIR}/debugsources.list", d)
@@ -341,7 +341,7 @@ def runtime_mapping_rename (varname, d):
                else:
                        new_depends.append(new_depend)
 
-       bb.data.setVar(varname, " ".join(new_depends) or None, d)
+       d.setVar(varname, " ".join(new_depends) or None)
 
        #bb.note("%s after: %s" % (varname, d.getVar(varname, True)))
 
@@ -399,15 +399,15 @@ python package_do_split_locales() {
                ln = legitimize_package_name(l)
                pkg = pn + '-locale-' + ln
                packages.append(pkg)
-               bb.data.setVar('FILES_' + pkg, os.path.join(datadir, 'locale', l), d)
-               bb.data.setVar('RDEPENDS_' + pkg, '%s virtual-locale-%s' % (mainpkg, ln), d)
-               bb.data.setVar('RPROVIDES_' + pkg, '%s-locale %s-translation' % (pn, ln), d)
-               bb.data.setVar('SUMMARY_' + pkg, '%s - %s translations' % (summary, l), d)
-               bb.data.setVar('DESCRIPTION_' + pkg, '%s  This package contains language translation files for the %s locale.' % (description, l), d)
+               d.setVar('FILES_' + pkg, os.path.join(datadir, 'locale', l))
+               d.setVar('RDEPENDS_' + pkg, '%s virtual-locale-%s' % (mainpkg, ln))
+               d.setVar('RPROVIDES_' + pkg, '%s-locale %s-translation' % (pn, ln))
+               d.setVar('SUMMARY_' + pkg, '%s - %s translations' % (summary, l))
+               d.setVar('DESCRIPTION_' + pkg, '%s  This package contains language translation files for the %s locale.' % (description, l))
                if locale_section:
                        d.setVar('SECTION_' + pkg, locale_section)
 
-       bb.data.setVar('PACKAGES', ' '.join(packages), d)
+       d.setVar('PACKAGES', ' '.join(packages))
 
        # Disabled by RP 18/06/07
        # Wildcards aren't supported in debian
@@ -417,7 +417,7 @@ python package_do_split_locales() {
        # Probably breaks since virtual-locale- isn't provided anywhere
        #rdep = (d.getVar('RDEPENDS_%s' % mainpkg, True) or d.getVar('RDEPENDS', True) or "").split()
        #rdep.append('%s-locale*' % pn)
-       #bb.data.setVar('RDEPENDS_%s' % mainpkg, ' '.join(rdep), d)
+       #d.setVar('RDEPENDS_%s' % mainpkg, ' '.join(rdep))
 }
 
 python perform_packagecopy () {
@@ -1018,7 +1018,7 @@ python populate_packages () {
                                                break
                        if found == False:
                                bb.note("%s contains dangling symlink to %s" % (pkg, l))
-               bb.data.setVar('RDEPENDS_' + pkg, bb.utils.join_deps(rdepends, commasep=False), d)
+               d.setVar('RDEPENDS_' + pkg, bb.utils.join_deps(rdepends, commasep=False))
 }
 populate_packages[dirs] = "${D}"
 
@@ -1033,11 +1033,11 @@ python emit_pkgdata() {
                        c = codecs.getencoder("string_escape")
                        return c(str)[0]
 
-               val = bb.data.getVar('%s_%s' % (var, pkg), d, True)
+               val = d.getVar('%s_%s' % (var, pkg), True)
                if val:
                        f.write('%s_%s: %s\n' % (var, pkg, encode(val)))
                        return
-               val = bb.data.getVar('%s' % (var), d, True)
+               val = d.getVar('%s' % (var), True)
                if val:
                        f.write('%s: %s\n' % (var, encode(val)))
                return
@@ -1159,12 +1159,12 @@ python package_do_filedeps() {
                if len(provides) > 0:
                        provides_files.append(file)
                        key = "FILERPROVIDES_" + file + "_" + pkg
-                       bb.data.setVar(key, " ".join(provides), d)
+                       d.setVar(key, " ".join(provides))
 
                if len(requires) > 0:
                        requires_files.append(file)
                        key = "FILERDEPENDS_" + file + "_" + pkg
-                       bb.data.setVar(key, " ".join(requires), d)
+                       d.setVar(key, " ".join(requires))
 
        # Determine dependencies
        for pkg in packages.split():
@@ -1181,8 +1181,8 @@ python package_do_filedeps() {
 
                                process_deps(dep_pipe, pkg, f, provides_files, requires_files)
 
-               bb.data.setVar("FILERDEPENDSFLIST_" + pkg, " ".join(requires_files), d)
-               bb.data.setVar("FILERPROVIDESFLIST_" + pkg, " ".join(provides_files), d)
+               d.setVar("FILERDEPENDSFLIST_" + pkg, " ".join(requires_files))
+               d.setVar("FILERPROVIDESFLIST_" + pkg, " ".join(provides_files))
 }
 
 SHLIBSDIR = "${STAGING_DIR_HOST}/shlibs"
@@ -1461,7 +1461,7 @@ python package_do_pkgconfig () {
                                                if m:
                                                        name = m.group(1)
                                                        val = m.group(2)
-                                                       bb.data.setVar(name, bb.data.expand(val, pd), pd)
+                                                       pd.setVar(name, bb.data.expand(val, pd))
                                                        continue
                                                m = field_re.match(l)
                                                if m:
@@ -1519,7 +1519,7 @@ python package_do_pkgconfig () {
 python read_shlibdeps () {
        packages = d.getVar('PACKAGES', True).split()
        for pkg in packages:
-               rdepends = bb.utils.explode_dep_versions(d.getVar('RDEPENDS_' + pkg, 0) or d.getVar('RDEPENDS', 0) or "")
+               rdepends = bb.utils.explode_dep_versions(d.getVar('RDEPENDS_' + pkg, False) or d.getVar('RDEPENDS', False) or "")
 
                for extension in ".shlibdeps", ".pcdeps", ".clilibdeps":
                        depsfile = bb.data.expand("${PKGDEST}/" + pkg + extension, d)
@@ -1529,7 +1529,7 @@ python read_shlibdeps () {
                                fd.close()
                                for l in lines:
                                        rdepends[l.rstrip()] = ""
-               bb.data.setVar('RDEPENDS_' + pkg, bb.utils.join_deps(rdepends, commasep=False), d)
+               d.setVar('RDEPENDS_' + pkg, bb.utils.join_deps(rdepends, commasep=False))
 }
 
 python package_depchains() {
@@ -1569,7 +1569,7 @@ python package_depchains() {
                                rreclist[pkgname] = ""
 
                #bb.note('setting: RRECOMMENDS_%s=%s' % (pkg, ' '.join(rreclist)))
-               bb.data.setVar('RRECOMMENDS_%s' % pkg, bb.utils.join_deps(rreclist, commasep=False), d)
+               d.setVar('RRECOMMENDS_%s' % pkg, bb.utils.join_deps(rreclist, commasep=False))
 
        def pkg_addrrecs(pkg, base, suffix, getname, rdepends, d):
 
@@ -1590,7 +1590,7 @@ python package_depchains() {
                                rreclist[pkgname] = ""
 
                #bb.note('setting: RRECOMMENDS_%s=%s' % (pkg, ' '.join(rreclist)))
-               bb.data.setVar('RRECOMMENDS_%s' % pkg, bb.utils.join_deps(rreclist, commasep=False), d)
+               d.setVar('RRECOMMENDS_%s' % pkg, bb.utils.join_deps(rreclist, commasep=False))
 
        def add_dep(list, dep):
                dep = dep.split(' (')[0].strip()
index 71e46a8c8e0bf78742b43a60b21c73d595919f79..fc28ee1e2d9839d9a0235dc014ad469a37a7d3a3 100644 (file)
@@ -11,7 +11,7 @@ DPKG_ARCH ?= "${TARGET_ARCH}"
 PKGWRITEDIRDEB = "${WORKDIR}/deploy-debs"
 
 python package_deb_fn () {
-    bb.data.setVar('PKGFN', d.getVar('PKG'), d)
+    d.setVar('PKGFN', d.getVar('PKG'))
 }
 
 addtask package_deb_install
@@ -409,7 +409,7 @@ python () {
         deps = (d.getVarFlag('do_package_write_deb', 'depends') or "").split()
         deps.append('dpkg-native:do_populate_sysroot')
         deps.append('virtual/fakeroot-native:do_populate_sysroot')
-        bb.data.setVarFlag('do_package_write_deb', 'depends', " ".join(deps), d)
+        d.setVarFlag('do_package_write_deb', 'depends', " ".join(deps))
         d.setVarFlag('do_package_write_deb', 'fakeroot', "1")
         d.setVarFlag('do_package_write_deb_setscene', 'fakeroot', "1")
 
index df608fc0e3af89b8f4174946103543a9e3036235..1633affb08c416eb1999adbda4217de4a7c92d23 100644 (file)
@@ -11,7 +11,7 @@ PKGWRITEDIRIPK = "${WORKDIR}/deploy-ipks"
 OPKGBUILDCMD ??= "opkg-build"
 
 python package_ipk_fn () {
-       bb.data.setVar('PKGFN', d.getVar('PKG'), d)
+       d.setVar('PKGFN', d.getVar('PKG'))
 }
 
 python package_ipk_install () {
@@ -441,7 +441,7 @@ python () {
         deps = (d.getVarFlag('do_package_write_ipk', 'depends') or "").split()
         deps.append('opkg-utils-native:do_populate_sysroot')
         deps.append('virtual/fakeroot-native:do_populate_sysroot')
-        bb.data.setVarFlag('do_package_write_ipk', 'depends', " ".join(deps), d)
+        d.setVarFlag('do_package_write_ipk', 'depends', " ".join(deps))
         d.setVarFlag('do_package_write_ipk', 'fakeroot', "1")
         d.setVarFlag('do_package_write_ipk_setscene', 'fakeroot', "1")
 }
index 75e4f2d196535486f356a52ebae46a73e0a5f13f..93a4c3123c593ba9c62d35a4536fd51be4ebfff4 100644 (file)
@@ -8,7 +8,7 @@ RPMBUILD="rpmbuild"
 PKGWRITEDIRRPM = "${WORKDIR}/deploy-rpms"
 
 python package_rpm_fn () {
-       bb.data.setVar('PKGFN', d.getVar('PKG'), d)
+       d.setVar('PKGFN', d.getVar('PKG'))
 }
 
 python package_rpm_install () {
@@ -467,7 +467,7 @@ python write_specfile () {
                                                        ver = ver.replace(pv, reppv)
                                newdeps_dict[dep] = ver
                        depends = bb.utils.join_deps(newdeps_dict)
-                       bb.data.setVar(varname, depends.strip(), d)
+                       d.setVar(varname, depends.strip())
 
        # We need to change the style the dependency from BB to RPM
        # This needs to happen AFTER the mapping_rename_hook
@@ -969,7 +969,7 @@ python () {
         deps = (d.getVarFlag('do_package_write_rpm', 'depends') or "").split()
         deps.append('rpm-native:do_populate_sysroot')
         deps.append('virtual/fakeroot-native:do_populate_sysroot')
-        bb.data.setVarFlag('do_package_write_rpm', 'depends', " ".join(deps), d)
+        d.setVarFlag('do_package_write_rpm', 'depends', " ".join(deps))
         d.setVarFlag('do_package_write_rpm', 'fakeroot', 1)
         d.setVarFlag('do_package_write_rpm_setscene', 'fakeroot', 1)
 }
index f26a2c00085db0395108cc061518738e983da564..201bd916572b75f75ab494dd49305f63dcd4c06b 100644 (file)
@@ -69,7 +69,7 @@ python do_package_tar () {
                if not overrides:
                        raise bb.build.FuncFailed('OVERRIDES not defined')
                overrides = bb.data.expand(overrides, localdata)
-               bb.data.setVar('OVERRIDES', '%s:%s' % (overrides, pkg), localdata)
+               localdata.setVar('OVERRIDES', '%s:%s' % (overrides, pkg))
 
                bb.data.update_data(localdata)
 
@@ -95,7 +95,7 @@ python () {
         deps = (d.getVarFlag('do_package_write_tar', 'depends') or "").split()
         deps.append('tar-native:do_populate_sysroot')
         deps.append('virtual/fakeroot-native:do_populate_sysroot')
-        bb.data.setVarFlag('do_package_write_tar', 'depends', " ".join(deps), d)
+        d.setVarFlag('do_package_write_tar', 'depends', " ".join(deps))
         d.setVarFlag('do_package_write_ipk', 'fakeroot', "1")
 }
 
index 2069d652a395d9af589c9c18c0e2d18797c9bb68..651e49259863cbba70e55737ddf4aadde26aa0f0 100644 (file)
@@ -29,13 +29,13 @@ python do_distribute_sources () {
                                if url.basename == '*':
                                        import os.path
                                        dest_dir = os.path.basename(os.path.dirname(os.path.abspath(url.localpath)))
-                                       bb.data.setVar('DEST', "%s_%s/" % (d.getVar('PF', 1), dest_dir), d)
+                                       d.setVar('DEST', "%s_%s/" % (d.getVar('PF', 1), dest_dir))
                                else:
-                                       bb.data.setVar('DEST', "%s_%s" % (d.getVar('PF', 1), url.basename), d)
+                                       d.setVar('DEST', "%s_%s" % (d.getVar('PF', 1), url.basename))
                        else:
                                d.setVar('DEST', '')
 
-                       bb.data.setVar('SRC_DISTRIBUTEDIR', "%s/%s" % (sources_dir, license), d)
+                       d.setVar('SRC_DISTRIBUTEDIR', "%s/%s" % (sources_dir, license))
                        bb.build.exec_func('SRC_DISTRIBUTECOMMAND', d)
 }
 
index 951caa360fae8b76732d03bc24e3a6fb7960073c..504b09975d69a0adbe4057d8dac760711f9873d8 100644 (file)
@@ -20,18 +20,18 @@ SSTATEPOSTINSTFUNCS ?= ""
 
 python () {
     if bb.data.inherits_class('native', d):
-        bb.data.setVar('SSTATE_PKGARCH', d.getVar('BUILD_ARCH'), d)
+        d.setVar('SSTATE_PKGARCH', d.getVar('BUILD_ARCH'))
     elif bb.data.inherits_class('cross', d):
-        bb.data.setVar('SSTATE_PKGARCH', bb.data.expand("${BUILD_ARCH}_${TUNE_PKGARCH}", d), d)
-        bb.data.setVar('SSTATE_MANMACH', bb.data.expand("${BUILD_ARCH}_${MACHINE}", d), d)
+        d.setVar('SSTATE_PKGARCH', bb.data.expand("${BUILD_ARCH}_${TUNE_PKGARCH}", d))
+        d.setVar('SSTATE_MANMACH', bb.data.expand("${BUILD_ARCH}_${MACHINE}", d))
     elif bb.data.inherits_class('crosssdk', d):
-        bb.data.setVar('SSTATE_PKGARCH', bb.data.expand("${BUILD_ARCH}_${PACKAGE_ARCH}", d), d)
+        d.setVar('SSTATE_PKGARCH', bb.data.expand("${BUILD_ARCH}_${PACKAGE_ARCH}", d))
     elif bb.data.inherits_class('nativesdk', d):
-        bb.data.setVar('SSTATE_PKGARCH', bb.data.expand("${SDK_ARCH}", d), d)
+        d.setVar('SSTATE_PKGARCH', bb.data.expand("${SDK_ARCH}", d))
     elif bb.data.inherits_class('cross-canadian', d):
-        bb.data.setVar('SSTATE_PKGARCH', bb.data.expand("${SDK_ARCH}_${PACKAGE_ARCH}", d), d)
+        d.setVar('SSTATE_PKGARCH', bb.data.expand("${SDK_ARCH}_${PACKAGE_ARCH}", d))
     else:
-        bb.data.setVar('SSTATE_MANMACH', bb.data.expand("${MACHINE}", d), d)
+        d.setVar('SSTATE_MANMACH', bb.data.expand("${MACHINE}", d))
 
     # These classes encode staging paths into their scripts data so can only be
     # reused if we manipulate the paths
index 516d1a1c209477d87c4a219cf019e03b4ef58806..7891207a64bcd4cc7b7b9e6e6bcaedb5136b6c72 100644 (file)
@@ -22,6 +22,6 @@ python () {
     for pkg in packages:
         for postfix in ['-dbg', '-dev']:
             genpackages.append(pkg+postfix)
-    bb.data.setVar('PACKAGES', ' '.join(packages+genpackages), d)
+    d.setVar('PACKAGES', ' '.join(packages+genpackages))
 }
 
index 492c5fba2d4c90439c43ccd0cce2aaf6a35e150c..cba44d688f7bb934ab02cea5982c7844468587d6 100644 (file)
@@ -45,7 +45,7 @@ python populate_packages_prepend () {
                bb.debug(1, 'adding update-rc.d calls to postinst/postrm for %s' % pkg)
                localdata = bb.data.createCopy(d)
                overrides = localdata.getVar("OVERRIDES", 1)
-               bb.data.setVar("OVERRIDES", "%s:%s" % (pkg, overrides), localdata)
+               localdata.setVar("OVERRIDES", "%s:%s" % (pkg, overrides))
                bb.data.update_data(localdata)
 
                """
index 60f3200e89f8764ca3d8c6fc0d751ca86225f822..acba388226c130ef72a4185f66bbcdd956eb7580 100644 (file)
@@ -285,7 +285,7 @@ DOTDEBUG-dbg = "${bindir}/.debug ${sbindir}/.debug ${libexecdir}/.debug ${libdir
 
 DEBUGFILEDIRECTORY-dbg = "/usr/lib/debug /usr/src/debug"
 
-FILES_${PN}-dbg = "${@bb.data.getVar(['DOTDEBUG-dbg', 'DEBUGFILEDIRECTORY-dbg'][d.getVar('PACKAGE_DEBUG_SPLIT_STYLE', 1) == 'debug-file-directory'], d, 1)}"
+FILES_${PN}-dbg = "${@d.getVar(['DOTDEBUG-dbg', 'DEBUGFILEDIRECTORY-dbg'][d.getVar('PACKAGE_DEBUG_SPLIT_STYLE', True) == 'debug-file-directory'], True)}"
 
 SECTION_${PN}-dbg = "devel"
 ALLOW_EMPTY_${PN}-dbg = "1"
@@ -502,7 +502,7 @@ DEBUG_FLAGS ?= "-g -feliminate-unused-debug-types"
 # Disabled until the option works properly -feliminate-dwarf2-dups
 FULL_OPTIMIZATION = "-O2 -pipe ${DEBUG_FLAGS}"
 DEBUG_OPTIMIZATION = "-O -fno-omit-frame-pointer ${DEBUG_FLAGS} -pipe"
-SELECTED_OPTIMIZATION = "${@bb.data.getVar(['FULL_OPTIMIZATION', 'DEBUG_OPTIMIZATION'][d.getVar('DEBUG_BUILD', 1) == '1'], d, 1)}"
+SELECTED_OPTIMIZATION = "${@d.getVar(['FULL_OPTIMIZATION', 'DEBUG_OPTIMIZATION'][d.getVar('DEBUG_BUILD', True) == '1'], True)}"
 SELECTED_OPTIMIZATION[vardeps] += "FULL_OPTIMIZATION DEBUG_OPTIMIZATION"
 BUILD_OPTIMIZATION = "-O2 -pipe"
 
index ad391e3b013f07fe948f78df5fb0769bb3aef976..630e88d2f23d2e1fbf4fe519b06b9b7efdfddf64 100644 (file)
@@ -284,19 +284,19 @@ def compare_in_distro_packages_list(distro_check_dir, d):
 
     if pn.find("-native") != -1:
         pnstripped = pn.split("-native")
-        bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True), localdata)
+        localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True))
         bb.data.update_data(localdata)
         recipe_name = pnstripped[0]
 
     if pn.find("-cross") != -1:
         pnstripped = pn.split("-cross")
-        bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True), localdata)
+        localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True))
         bb.data.update_data(localdata)
         recipe_name = pnstripped[0]
 
     if pn.find("-initial") != -1:
         pnstripped = pn.split("-initial")
-        bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True), localdata)
+        localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True))
         bb.data.update_data(localdata)
         recipe_name = pnstripped[0]
 
index 0b74ea6780d83e705a3751d5178520e38f0aba81..b948e2ce6b6bdf7bd4b98a4f6f4a583d12785964 100644 (file)
@@ -81,17 +81,15 @@ python () {
   if "${OE_DEL}":
     d.setVar('configmangle_append', "${OE_DEL}" + "\n")
   if "${OE_FEATURES}":
-    bb.data.setVar('configmangle_append',
+    d.setVar('configmangle_append',
                    "/^### DISTRO FEATURES$/a\\\n%s\n\n" %
-                   ("\\n".join((bb.data.expand("${OE_FEATURES}", d).split("\n")))),
-                   d)
-  bb.data.setVar('configmangle_append',
+                   ("\\n".join((bb.data.expand("${OE_FEATURES}", d).split("\n")))))
+  d.setVar('configmangle_append',
                  "/^### CROSS$/a\\\n%s\n" %
                   ("\\n".join(["CONFIG_CROSS_COMPILER_PREFIX=\"${TARGET_PREFIX}\"",
                               "CONFIG_EXTRA_CFLAGS=\"${CFLAGS}\""
                         ])
-                  ),
-                 d)
+                  ))
 }
 
 do_prepare_config () {
index a74de01b07cba5dd03d1199d4de4235a8210d2d6..d940e39318df9ed3e8c988a52ec031924586951b 100644 (file)
@@ -80,11 +80,11 @@ RDEPENDS_task-core-sdk = "\
 #                        rreclist.append('%s-dev' % name)
 #
 #            oldrrec = d.getVar('RRECOMMENDS_%s' % newpkg) or ''
-#            bb.data.setVar('RRECOMMENDS_%s' % newpkg, oldrrec + ' ' + ' '.join(rreclist), d)
+#            d.setVar('RRECOMMENDS_%s' % newpkg, oldrrec + ' ' + ' '.join(rreclist))
 #            # bb.note('RRECOMMENDS_%s = "%s"' % (newpkg, d.getVar('RRECOMMENDS_%s' % newpkg)))
 #
 #    # bb.note('pkgs is %s' % pkgs)
-#    bb.data.setVar('PACKAGES', ' '.join(pkgs), d)
+#    d.setVar('PACKAGES', ' '.join(pkgs))
 #}
 #
 #PACKAGES_DYNAMIC = "task-core-sdk-*"
index 8438f254505170b7246dfeff60950b81d632be87..92157bdb2a490f876d907a5836ee9ec1bc238199 100644 (file)
@@ -141,11 +141,10 @@ python () {
   if "${OE_DEL}":
     d.setVar('configmangle_append', "${OE_DEL}" + "\n")
   if "${OE_FEATURES}":
-    bb.data.setVar('configmangle_append',
+    d.setVar('configmangle_append',
                    "/^### DISTRO FEATURES$/a\\\n%s\n\n" %
-                   ("\\n".join((bb.data.expand("${OE_FEATURES}", d).split("\n")))),
-                   d)
-  bb.data.setVar('configmangle_append',
+                   ("\\n".join((bb.data.expand("${OE_FEATURES}", d).split("\n")))))
+  d.setVar('configmangle_append',
                  "/^### CROSS$/a\\\n%s\n" %
                   ("\\n".join(["CROSS_COMPILER_PREFIX=\"${TARGET_PREFIX}\"",
                         "UCLIBC_EXTRA_CFLAGS=\"${UCLIBC_EXTRA_CFLAGS}\"",
@@ -154,22 +153,18 @@ python () {
                          "DEVEL_PREFIX=\"/${prefix}\"",
                          "SHARED_LIB_LOADER_PREFIX=\"/lib\"",
                         ])
-                  ),
-                 d)
-  bb.data.setVar('configmangle_append',
+                  ))
+  d.setVar('configmangle_append',
                  "/^### TGT$/a\\\nTARGET_ARCH=\"%s\"\\nTARGET_%s=y\n" %
-                        ("${UCLIBC_ARCH}", "${UCLIBC_ARCH}"),
-                 d)
-  bb.data.setVar('configmangle_append',
-    "/^### FPU$/a\\\n%s\n\n" % (["UCLIBC_HAS_FPU=y","# UCLIBC_HAS_FPU is not set"][d.getVar('TARGET_FPU', 1) in [ 'soft' ]]), d)
+                        ("${UCLIBC_ARCH}", "${UCLIBC_ARCH}"))
+  d.setVar('configmangle_append',
+    "/^### FPU$/a\\\n%s\n\n" % (["UCLIBC_HAS_FPU=y","# UCLIBC_HAS_FPU is not set"][d.getVar('TARGET_FPU', True) in [ 'soft' ]]))
   if "${UCLIBC_ENDIAN}":
-    bb.data.setVar('configmangle_append',
-                   "/^### ABI$/a\\\nARCH_WANTS_%s_ENDIAN=y\n\n" % ("${UCLIBC_ENDIAN}"),
-                   d)
+    d.setVar('configmangle_append',
+                   "/^### ABI$/a\\\nARCH_WANTS_%s_ENDIAN=y\n\n" % ("${UCLIBC_ENDIAN}"))
   if "${UCLIBC_ABI}":
-    bb.data.setVar('configmangle_append',
-                   "/^### ABI$/a\\\nCONFIG_%s=y\n\n" % ("${UCLIBC_ABI}"),
-                   d)
+    d.setVar('configmangle_append',
+                   "/^### ABI$/a\\\nCONFIG_%s=y\n\n" % ("${UCLIBC_ABI}"))
 }
 
 do_patch_append() {
index 1762829755ea2196d31d72931173789e237a8794..6106986bb08d0a78484790f0b86d52fc579fcd22 100644 (file)
@@ -21,7 +21,7 @@ SRC_URI[sha256sum] = "7fe62180f08ef5f0a0062fb444591e349cae2ab5af6ad834599f5c654e
 DEPENDS += "bigreqsproto xproto xextproto xtrans libxau xcmiscproto \
             libxdmcp xf86bigfontproto kbproto inputproto xproto-native"
 
-FILESDIR = "${@os.path.dirname(bb.data.getVar('FILE',d,1))}/libx11"
+FILESDIR = "${@os.path.dirname(d.getVar('FILE', True))}/libx11"
 
 EXTRA_OECONF += "--without-xcb --disable-udc --disable-xcms --disable-xlocale --with-keysymdefdir=${STAGING_INCDIR}/X11"
 CFLAGS += "-D_GNU_SOURCE"
index 7949058b136a6532278fefbc9fa00e9a1232260f..8ff9f82fb496bfd8b09f81522e04a0d96ac4ed98 100644 (file)
@@ -2,8 +2,8 @@ LIBV = "0.10"
 
 python populate_packages_prepend () {
        gst_libdir = bb.data.expand('${libdir}/gstreamer-${LIBV}', d)
-       postinst = d.getVar('plugin_postinst', 1)
-       glibdir = bb.data.expand('${libdir}', d)
+       postinst = d.getVar('plugin_postinst', True)
+       glibdir = d.getVar('libdir', True)
 
        do_split_packages(d, glibdir, '^lib(.*)\.so\.*', 'lib%s', 'gstreamer %s library', extra_depends='', allow_links=True)
        do_split_packages(d, gst_libdir, 'libgst(.*)\.so$', bb.data.expand('${PN}-%s', d), 'GStreamer plugin for %s', postinst=postinst, extra_depends=bb.data.expand('${PN}',d))
@@ -19,7 +19,7 @@ python populate_packages_prepend () {
        for pkg in packages[1:]:
                if not pkg in blacklist and not pkg in metapkg_rdepends and not pkg.endswith('-dev') and not pkg.endswith('-dbg') and not pkg.count('locale') and not pkg.count('-static'):
                        metapkg_rdepends.append(pkg)
-       bb.data.setVar('RDEPENDS_' + metapkg, ' '.join(metapkg_rdepends), d)
+       d.setVar('RDEPENDS_' + metapkg, ' '.join(metapkg_rdepends))
        d.setVar('DESCRIPTION_' + metapkg, pn + ' meta package')
 }
 
index 82ba6377a0f891bdf404296e52aa9baa06d2aa79..4dc9a75e5796482ba2ebd023f31980d3bab48a7b 100644 (file)
@@ -43,14 +43,14 @@ python __anonymous () {
         pkg = d.getVar("QT_BASE_LIB", True) + name.lower().replace("qt", "").replace("_", "-") + "4"
         # NOTE: the headers for QtAssistantClient are different
         incname = name.replace("QtAssistantClient", "QtAssistant")
-        bb.data.setVar("FILES_%s" % pkg, "${libdir}/lib%(name)s${QT_LIBINFIX}.so.*" % locals(), d)
-        bb.data.setVar("FILES_%s-dev" % pkg, """${libdir}/lib%(name)s${QT_LIBINFIX}.prl
+        d.setVar("FILES_%s" % pkg, "${libdir}/lib%(name)s${QT_LIBINFIX}.so.*" % locals())
+        d.setVar("FILES_%s-dev" % pkg, """${libdir}/lib%(name)s${QT_LIBINFIX}.prl
                   ${libdir}/lib%(name)s${QT_LIBINFIX}.a
                   ${libdir}/lib%(name)s${QT_LIBINFIX}.la
                   ${libdir}/lib%(name)s${QT_LIBINFIX}.so
                   ${includedir}/${QT_DIR_NAME}/%(incname)s
-                  ${libdir}/pkgconfig/%(name)s${QT_LIBINFIX}.pc""" % locals(), d)
-        bb.data.setVar("FILES_%s-dbg" % pkg, "${libdir}/.debug/lib%(name)s${QT_LIBINFIX}.so*" % locals(), d)
+                  ${libdir}/pkgconfig/%(name)s${QT_LIBINFIX}.pc""" % locals())
+        d.setVar("FILES_%s-dbg" % pkg, "${libdir}/.debug/lib%(name)s${QT_LIBINFIX}.so*" % locals())
         d.setVar("RRECOMMENDS_%s-dbg" % pkg, "${PN}-dbg")
         lib_packages.append(pkg)
         dev_packages.append("%s-dev" % pkg)
@@ -60,22 +60,22 @@ python __anonymous () {
 
     for name in d.getVar("QT_EXTRA_LIBS", 1).split():
         pkg = d.getVar("QT_BASE_LIB", True) + name.lower().replace("qt", "").replace("_", "-") + "4"
-        bb.data.setVar("FILES_%s" % pkg, "${libdir}/lib%(name)s.so.*" % locals(), d)
-        bb.data.setVar("FILES_%s-dev" % pkg, """${libdir}/lib%(name)s.prl
+        d.setVar("FILES_%s" % pkg, "${libdir}/lib%(name)s.so.*" % locals())
+        d.setVar("FILES_%s-dev" % pkg, """${libdir}/lib%(name)s.prl
                   ${libdir}/lib%(name)s.a
                   ${libdir}/lib%(name)s.la
                   ${libdir}/lib%(name)s.so
                   ${includedir}/${QT_DIR_NAME}/%(incname)s
-                  ${libdir}/pkgconfig/%(name)s.pc""" % locals(), d)
-        bb.data.setVar("FILES_%s-dbg" % pkg, "${libdir}/.debug/lib%(name)s.so*" % locals(), d)
+                  ${libdir}/pkgconfig/%(name)s.pc""" % locals())
+        d.setVar("FILES_%s-dbg" % pkg, "${libdir}/.debug/lib%(name)s.so*" % locals())
         d.setVar("RRECOMMENDS_%s-dbg" % pkg, "${PN}-dbg")
         lib_packages.append(pkg)
         dev_packages.append("%s-dev" % pkg)
         dbg_packages.append("%s-dbg" % pkg)
 
-    bb.data.setVar("LIB_PACKAGES", " ".join(lib_packages), d)
-    bb.data.setVar("DEV_PACKAGES", " ".join(dev_packages), d)
-    bb.data.setVar("DBG_PACKAGES", " ".join(dbg_packages), d)
+    d.setVar("LIB_PACKAGES", " ".join(lib_packages))
+    d.setVar("DEV_PACKAGES", " ".join(dev_packages))
+    d.setVar("DBG_PACKAGES", " ".join(dbg_packages))
 }
 
 OTHER_PACKAGES = "\
@@ -261,7 +261,7 @@ python populate_packages_prepend() {
                         packages = "%s %s-dbg" % (packages, package)
                         file_name = os.path.join(plugin_dir_dbg, os.path.basename(file))
                         d.setVar("FILES_%s-dbg" % package, file_name)
-                        bb.data.setVar("DESCRIPTION_%s-dbg" % package, "${PN} %s for %s" % (name, package), d)
+                        d.setVar("DESCRIPTION_%s-dbg" % package, "${PN} %s for %s" % (name, package))
 
                 d.setVar('PACKAGES', packages)
 
index ddb65b754d6aa5f2179483189311b2f0ecd2c3b1..fecdb9760221686d330eea635de091edfaacd38f 100644 (file)
@@ -57,8 +57,8 @@ python __anonymous () {
             packages.append(pkg)
             if not d.getVar("FILES_%s" % pkg, 1):
                     d.setVar("FILES_%s" % pkg, "${libdir}/libboost_%s*.so.*" % lib)
-    bb.data.setVar("BOOST_PACKAGES", " ".join(packages), d)
-    bb.data.setVar("BJAM_EXTRA", " ".join(extras), d)
+    d.setVar("BOOST_PACKAGES", " ".join(packages))
+    d.setVar("BJAM_EXTRA", " ".join(extras))
 }
 
 # Override the contents of specific packages