]> code.ossystems Code Review - openembedded-core.git/commitdiff
Convert tab indentation in python functions into four-space
authorRichard Purdie <richard.purdie@linuxfoundation.org>
Wed, 11 Jul 2012 17:33:43 +0000 (17:33 +0000)
committerRichard Purdie <richard.purdie@linuxfoundation.org>
Thu, 19 Jul 2012 09:17:30 +0000 (10:17 +0100)
Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
71 files changed:
meta/classes/autotools.bbclass
meta/classes/base.bbclass
meta/classes/boot-directdisk.bbclass
meta/classes/bootimg.bbclass
meta/classes/cpan-base.bbclass
meta/classes/debian.bbclass
meta/classes/gconf.bbclass
meta/classes/gnomebase.bbclass
meta/classes/gtk-icon-cache.bbclass
meta/classes/image.bbclass
meta/classes/image_types.bbclass
meta/classes/kernel-arch.bbclass
meta/classes/kernel-yocto.bbclass
meta/classes/kernel.bbclass
meta/classes/libc-common.bbclass
meta/classes/libc-package.bbclass
meta/classes/license.bbclass
meta/classes/metadata_scm.bbclass
meta/classes/mime.bbclass
meta/classes/package.bbclass
meta/classes/package_deb.bbclass
meta/classes/package_rpm.bbclass
meta/classes/packagedata.bbclass
meta/classes/patch.bbclass
meta/classes/pkg_metainfo.bbclass
meta/classes/populate_sdk_base.bbclass
meta/classes/qemu.bbclass
meta/classes/sstate.bbclass
meta/classes/staging.bbclass
meta/classes/syslinux.bbclass
meta/classes/update-alternatives.bbclass
meta/classes/update-rc.d.bbclass
meta/classes/useradd.bbclass
meta/classes/utility-tasks.bbclass
meta/classes/utils.bbclass
meta/recipes-connectivity/connman/connman.inc
meta/recipes-core/base-passwd/base-passwd_3.5.24.bb
meta/recipes-core/busybox/busybox.inc
meta/recipes-core/eglibc/cross-localedef-native_2.15.bb
meta/recipes-core/eglibc/cross-localedef-native_2.16.bb
meta/recipes-core/eglibc/eglibc-ld.inc
meta/recipes-core/eglibc/eglibc-options.inc
meta/recipes-core/eglibc/eglibc_2.15.bb
meta/recipes-core/eglibc/eglibc_2.16.bb
meta/recipes-core/libxml/libxml2.inc
meta/recipes-core/ncurses/ncurses.inc
meta/recipes-devtools/apt/apt-native.inc
meta/recipes-devtools/apt/apt-package.inc
meta/recipes-devtools/perl/perl_5.14.2.bb
meta/recipes-devtools/qemu/qemu-targets.inc
meta/recipes-extended/cups/cups14.inc
meta/recipes-extended/lighttpd/lighttpd_1.4.31.bb
meta/recipes-extended/ltp/ltp_20120401.bb
meta/recipes-extended/net-tools/net-tools_1.60-23.bb
meta/recipes-extended/pam/libpam_1.1.5.bb
meta/recipes-gnome/gdk-pixbuf/gdk-pixbuf_2.24.1.bb
meta/recipes-gnome/gtk+/gtk+_2.12.7.bb
meta/recipes-gnome/gtk+/gtk+_2.16.6.bb
meta/recipes-gnome/gtk+/gtk+_2.24.8.bb
meta/recipes-gnome/gtk-engines/gtk-engines_2.20.2.bb
meta/recipes-graphics/cairo/cairo-fpu.inc
meta/recipes-graphics/clutter/clutter-fpu.inc
meta/recipes-graphics/mesa/mesa-dri.inc
meta/recipes-graphics/pango/pango.inc
meta/recipes-kernel/perf/perf.inc
meta/recipes-multimedia/gstreamer/gst-plugins-package.inc
meta/recipes-multimedia/pulseaudio/pulseaudio.inc
meta/recipes-qt/qt-apps/qmmp_0.5.2.bb
meta/recipes-qt/qt4/qt4.inc
meta/recipes-sato/web/web_git.bb
meta/recipes-support/libpcre/libpcre_8.30.bb

index 02b984db639b48d12c6af49d364257b10196d537..4c4bf8775d096720cf237c3309dab37fa8925fc8 100644 (file)
@@ -1,23 +1,23 @@
 def autotools_dep_prepend(d):
-       if d.getVar('INHIBIT_AUTOTOOLS_DEPS', True):
-               return ''
+    if d.getVar('INHIBIT_AUTOTOOLS_DEPS', True):
+        return ''
 
-       pn = d.getVar('PN', True)
-       deps = ''
+    pn = d.getVar('PN', True)
+    deps = ''
 
-       if pn in ['autoconf-native', 'automake-native', 'help2man-native']:
-               return deps
-       deps += 'autoconf-native automake-native '
+    if pn in ['autoconf-native', 'automake-native', 'help2man-native']:
+        return deps
+    deps += 'autoconf-native automake-native '
 
-       if not pn in ['libtool', 'libtool-native'] and not pn.endswith("libtool-cross"):
-               deps += 'libtool-native '
-               if not bb.data.inherits_class('native', d) \
+    if not pn in ['libtool', 'libtool-native'] and not pn.endswith("libtool-cross"):
+        deps += 'libtool-native '
+        if not bb.data.inherits_class('native', d) \
                         and not bb.data.inherits_class('nativesdk', d) \
                         and not bb.data.inherits_class('cross', d) \
                         and not d.getVar('INHIBIT_DEFAULT_DEPS', True):
-                    deps += 'libtool-cross '
+            deps += 'libtool-cross '
 
-       return deps + 'gnu-config-native '
+    return deps + 'gnu-config-native '
 
 EXTRA_OEMAKE = ""
 
@@ -35,15 +35,15 @@ EXTRA_AUTORECONF = "--exclude=autopoint"
 export lt_cv_sys_lib_dlsearch_path_spec = "${libdir} ${base_libdir}"
 
 def autotools_set_crosscompiling(d):
-       if not bb.data.inherits_class('native', d):
-               return " cross_compiling=yes"
-       return ""
+    if not bb.data.inherits_class('native', d):
+        return " cross_compiling=yes"
+    return ""
 
 def append_libtool_sysroot(d):
-       # Only supply libtool sysroot option for non-native packages
-       if not bb.data.inherits_class('native', d):
-               return '--with-libtool-sysroot=${STAGING_DIR_HOST}'
-       return ""
+    # Only supply libtool sysroot option for non-native packages
+    if not bb.data.inherits_class('native', d):
+        return '--with-libtool-sysroot=${STAGING_DIR_HOST}'
+    return ""
 
 # EXTRA_OECONF_append = "${@autotools_set_crosscompiling(d)}"
 
index f69179943b308f5001cae222ebd82dca65cb6eff..f3587bcbefc92f30cefd7a0e49060cf902315306 100644 (file)
@@ -33,7 +33,7 @@ def oe_import(d):
 
 python oe_import_eh () {
     if isinstance(e, bb.event.ConfigParsed):
-       oe_import(e.data)
+        oe_import(e.data)
 }
 
 addhandler oe_import_eh
@@ -50,21 +50,20 @@ oe_runmake() {
 
 
 def base_dep_prepend(d):
-       #
-       # Ideally this will check a flag so we will operate properly in
-       # the case where host == build == target, for now we don't work in
-       # that case though.
-       #
-
-       deps = ""
-       # INHIBIT_DEFAULT_DEPS doesn't apply to the patch command.  Whether or  not
-       # we need that built is the responsibility of the patch function / class, not
-       # the application.
-       if not d.getVar('INHIBIT_DEFAULT_DEPS'):
-               if (d.getVar('HOST_SYS', True) !=
-                   d.getVar('BUILD_SYS', True)):
-                       deps += " virtual/${TARGET_PREFIX}gcc virtual/${TARGET_PREFIX}compilerlibs virtual/libc "
-       return deps
+    #
+    # Ideally this will check a flag so we will operate properly in
+    # the case where host == build == target, for now we don't work in
+    # that case though.
+    #
+
+    deps = ""
+    # INHIBIT_DEFAULT_DEPS doesn't apply to the patch command.  Whether or  not
+    # we need that built is the responsibility of the patch function / class, not
+    # the application.
+    if not d.getVar('INHIBIT_DEFAULT_DEPS'):
+        if (d.getVar('HOST_SYS', True) != d.getVar('BUILD_SYS', True)):
+            deps += " virtual/${TARGET_PREFIX}gcc virtual/${TARGET_PREFIX}compilerlibs virtual/libc "
+    return deps
 
 BASEDEPENDS = "${@base_dep_prepend(d)}"
 
@@ -80,61 +79,61 @@ do_fetch[dirs] = "${DL_DIR}"
 do_fetch[file-checksums] = "${@bb.fetch.get_checksum_file_list(d)}"
 python base_do_fetch() {
 
-       src_uri = (d.getVar('SRC_URI', True) or "").split()
-       if len(src_uri) == 0:
-               return
+    src_uri = (d.getVar('SRC_URI', True) or "").split()
+    if len(src_uri) == 0:
+        return
 
-       localdata = bb.data.createCopy(d)
-       bb.data.update_data(localdata)
+    localdata = bb.data.createCopy(d)
+    bb.data.update_data(localdata)
 
-        try:
-            fetcher = bb.fetch2.Fetch(src_uri, localdata)
-            fetcher.download()
-        except bb.fetch2.BBFetchException, e:
-            raise bb.build.FuncFailed(e)
+    try:
+        fetcher = bb.fetch2.Fetch(src_uri, localdata)
+        fetcher.download()
+    except bb.fetch2.BBFetchException, e:
+        raise bb.build.FuncFailed(e)
 }
 
 addtask unpack after do_fetch
 do_unpack[dirs] = "${WORKDIR}"
 do_unpack[cleandirs] = "${S}/patches"
 python base_do_unpack() {
-       src_uri = (d.getVar('SRC_URI', True) or "").split()
-       if len(src_uri) == 0:
-               return
+    src_uri = (d.getVar('SRC_URI', True) or "").split()
+    if len(src_uri) == 0:
+        return
 
-       localdata = bb.data.createCopy(d)
-       bb.data.update_data(localdata)
+    localdata = bb.data.createCopy(d)
+    bb.data.update_data(localdata)
 
-       rootdir = localdata.getVar('WORKDIR', True)
+    rootdir = localdata.getVar('WORKDIR', True)
 
-        try:
-            fetcher = bb.fetch2.Fetch(src_uri, localdata)
-            fetcher.unpack(rootdir)
-        except bb.fetch2.BBFetchException, e:
-            raise bb.build.FuncFailed(e)
+    try:
+        fetcher = bb.fetch2.Fetch(src_uri, localdata)
+        fetcher.unpack(rootdir)
+    except bb.fetch2.BBFetchException, e:
+        raise bb.build.FuncFailed(e)
 }
 
 GIT_CONFIG_PATH = "${STAGING_DIR_NATIVE}/etc"
 GIT_CONFIG = "${GIT_CONFIG_PATH}/gitconfig"
 
 def generate_git_config(e):
-        from bb import data
+    from bb import data
 
-        if data.getVar('GIT_CORE_CONFIG', e.data, True):
-                gitconfig_path = e.data.getVar('GIT_CONFIG', True)
-                proxy_command = "    gitProxy = %s\n" % data.getVar('OE_GIT_PROXY_COMMAND', e.data, True)
+    if data.getVar('GIT_CORE_CONFIG', e.data, True):
+        gitconfig_path = e.data.getVar('GIT_CONFIG', True)
+        proxy_command = "    gitProxy = %s\n" % data.getVar('OE_GIT_PROXY_COMMAND', e.data, True)
 
-                bb.mkdirhier(e.data.expand("${GIT_CONFIG_PATH}"))
-                if (os.path.exists(gitconfig_path)):
-                        os.remove(gitconfig_path)
+        bb.mkdirhier(e.data.expand("${GIT_CONFIG_PATH}"))
+        if (os.path.exists(gitconfig_path)):
+            os.remove(gitconfig_path)
 
-                f = open(gitconfig_path, 'w')
-                f.write("[core]\n")
-                ignore_hosts = data.getVar('GIT_PROXY_IGNORE', e.data, True).split()
-                for ignore_host in ignore_hosts:
-                        f.write("    gitProxy = none for %s\n" % ignore_host)
-                f.write(proxy_command)
-                f.close
+        f = open(gitconfig_path, 'w')
+        f.write("[core]\n")
+        ignore_hosts = data.getVar('GIT_PROXY_IGNORE', e.data, True).split()
+        for ignore_host in ignore_hosts:
+            f.write("    gitProxy = none for %s\n" % ignore_host)
+        f.write(proxy_command)
+        f.close
 
 def pkgarch_mapping(d):
     # Compatibility mappings of TUNE_PKGARCH (opt in)
@@ -205,69 +204,69 @@ def preferred_ml_updates(d):
 
 
 def get_layers_branch_rev(d):
-       layers = (d.getVar("BBLAYERS", True) or "").split()
-       layers_branch_rev = ["%-17s = \"%s:%s\"" % (os.path.basename(i), \
-               base_get_metadata_git_branch(i, None).strip(), \
-               base_get_metadata_git_revision(i, None)) \
-                       for i in layers]
-       i = len(layers_branch_rev)-1
-       p1 = layers_branch_rev[i].find("=")
-       s1 = layers_branch_rev[i][p1:]
-       while i > 0:
-               p2 = layers_branch_rev[i-1].find("=")
-               s2= layers_branch_rev[i-1][p2:]
-               if s1 == s2:
-                       layers_branch_rev[i-1] = layers_branch_rev[i-1][0:p2]
-                       i -= 1
-               else:
-                       i -= 1
-                       p1 = layers_branch_rev[i].find("=")
-                       s1= layers_branch_rev[i][p1:]
-       return layers_branch_rev
+    layers = (d.getVar("BBLAYERS", True) or "").split()
+    layers_branch_rev = ["%-17s = \"%s:%s\"" % (os.path.basename(i), \
+        base_get_metadata_git_branch(i, None).strip(), \
+        base_get_metadata_git_revision(i, None)) \
+            for i in layers]
+    i = len(layers_branch_rev)-1
+    p1 = layers_branch_rev[i].find("=")
+    s1 = layers_branch_rev[i][p1:]
+    while i > 0:
+        p2 = layers_branch_rev[i-1].find("=")
+        s2= layers_branch_rev[i-1][p2:]
+        if s1 == s2:
+            layers_branch_rev[i-1] = layers_branch_rev[i-1][0:p2]
+            i -= 1
+        else:
+            i -= 1
+            p1 = layers_branch_rev[i].find("=")
+            s1= layers_branch_rev[i][p1:]
+    return layers_branch_rev
 
 
 BUILDCFG_FUNCS ??= "buildcfg_vars get_layers_branch_rev buildcfg_neededvars"
 BUILDCFG_FUNCS[type] = "list"
 
 def buildcfg_vars(d):
-       statusvars = oe.data.typed_value('BUILDCFG_VARS', d)
-       for var in statusvars:
-               value = d.getVar(var, True)
-               if value is not None:
-                       yield '%-17s = "%s"' % (var, value)
+    statusvars = oe.data.typed_value('BUILDCFG_VARS', d)
+    for var in statusvars:
+        value = d.getVar(var, True)
+        if value is not None:
+            yield '%-17s = "%s"' % (var, value)
 
 def buildcfg_neededvars(d):
-       needed_vars = oe.data.typed_value("BUILDCFG_NEEDEDVARS", d)
-       pesteruser = []
-       for v in needed_vars:
-               val = d.getVar(v, True)
-               if not val or val == 'INVALID':
-                       pesteruser.append(v)
+    needed_vars = oe.data.typed_value("BUILDCFG_NEEDEDVARS", d)
+    pesteruser = []
+    for v in needed_vars:
+        val = d.getVar(v, True)
+        if not val or val == 'INVALID':
+            pesteruser.append(v)
 
-       if pesteruser:
-               bb.fatal('The following variable(s) were not set: %s\nPlease set them directly, or choose a MACHINE or DISTRO that sets them.' % ', '.join(pesteruser))
+    if pesteruser:
+        bb.fatal('The following variable(s) were not set: %s\nPlease set them directly, or choose a MACHINE or DISTRO that sets them.' % ', '.join(pesteruser))
 
 addhandler base_eventhandler
 python base_eventhandler() {
-        if isinstance(e, bb.event.ConfigParsed):
-               e.data.setVar('BB_VERSION', bb.__version__)
-                generate_git_config(e)
-                pkgarch_mapping(e.data)
-                preferred_ml_updates(e.data)
-
-       if isinstance(e, bb.event.BuildStarted):
-               statuslines = []
-               for func in oe.data.typed_value('BUILDCFG_FUNCS', e.data):
-                       g = globals()
-                       if func not in g:
-                               bb.warn("Build configuration function '%s' does not exist" % func)
-                       else:
-                               flines = g[func](e.data)
-                               if flines:
-                                       statuslines.extend(flines)
-
-               statusheader = e.data.getVar('BUILDCFG_HEADER', True)
-               bb.plain('\n%s\n%s\n' % (statusheader, '\n'.join(statuslines)))
+    if isinstance(e, bb.event.ConfigParsed):
+        e.data.setVar('BB_VERSION', bb.__version__)
+        generate_git_config(e)
+        pkgarch_mapping(e.data)
+        preferred_ml_updates(e.data)
+
+    if isinstance(e, bb.event.BuildStarted):
+        statuslines = []
+        for func in oe.data.typed_value('BUILDCFG_FUNCS', e.data):
+            g = globals()
+            if func not in g:
+                bb.warn("Build configuration function '%s' does not exist" % func)
+            else:
+                flines = g[func](e.data)
+                if flines:
+                    statuslines.extend(flines)
+
+        statusheader = e.data.getVar('BUILDCFG_HEADER', True)
+        bb.plain('\n%s\n%s\n' % (statusheader, '\n'.join(statuslines)))
 }
 
 addtask configure after do_patch
@@ -546,18 +545,18 @@ python do_cleansstate() {
 
 addtask cleanall after do_cleansstate
 python do_cleanall() {
-        src_uri = (d.getVar('SRC_URI', True) or "").split()
-        if len(src_uri) == 0:
-            return
-
-       localdata = bb.data.createCopy(d)
-       bb.data.update_data(localdata)
-
-        try:
-            fetcher = bb.fetch2.Fetch(src_uri, localdata)
-            fetcher.clean()
-        except bb.fetch2.BBFetchException, e:
-            raise bb.build.FuncFailed(e)
+    src_uri = (d.getVar('SRC_URI', True) or "").split()
+    if len(src_uri) == 0:
+        return
+
+    localdata = bb.data.createCopy(d)
+    bb.data.update_data(localdata)
+
+    try:
+        fetcher = bb.fetch2.Fetch(src_uri, localdata)
+        fetcher.clean()
+    except bb.fetch2.BBFetchException, e:
+        raise bb.build.FuncFailed(e)
 }
 do_cleanall[nostamp] = "1"
 
index 1c601c60c308198cc5b384c289afef79a691ff0c..d265485bb7e8f2d5b32c66ba3f976b6b0a74c515 100644 (file)
@@ -92,8 +92,8 @@ build_boot_dd() {
 } 
 
 python do_bootdirectdisk() {
-       bb.build.exec_func('build_syslinux_cfg', d)
-       bb.build.exec_func('build_boot_dd', d)
+    bb.build.exec_func('build_syslinux_cfg', d)
+    bb.build.exec_func('build_boot_dd', d)
 }
 
 addtask bootdirectdisk before do_build
index a4c0e8d93108700820e80c9769847f1f11940c72..11a29cdf2bc42d1cb7b5c0cb567fe4908f124a38 100644 (file)
@@ -42,15 +42,15 @@ EFI_CLASS = "${@base_contains("MACHINE_FEATURES", "efi", "grub-efi", "dummy", d)
 # contain "efi". This way legacy is supported by default if neither is
 # specified, maintaining the original behavior.
 def pcbios(d):
-       pcbios = base_contains("MACHINE_FEATURES", "pcbios", "1", "0", d)
-       if pcbios == "0":
-               pcbios = base_contains("MACHINE_FEATURES", "efi", "0", "1", d)
-       return pcbios
+    pcbios = base_contains("MACHINE_FEATURES", "pcbios", "1", "0", d)
+    if pcbios == "0":
+        pcbios = base_contains("MACHINE_FEATURES", "efi", "0", "1", d)
+    return pcbios
 
 def pcbios_class(d):
-       if d.getVar("PCBIOS", True) == "1":
-               return "syslinux"
-       return "dummy"
+    if d.getVar("PCBIOS", True) == "1":
+        return "syslinux"
+    return "dummy"
 
 PCBIOS = "${@pcbios(d)}"
 PCBIOS_CLASS = "${@pcbios_class(d)}"
@@ -181,12 +181,12 @@ build_hddimg() {
 }
 
 python do_bootimg() {
-       if d.getVar("PCBIOS", True) == "1":
-               bb.build.exec_func('build_syslinux_cfg', d)
-       if d.getVar("EFI", True) == "1":
-               bb.build.exec_func('build_grub_cfg', d)
-       bb.build.exec_func('build_hddimg', d)
-       bb.build.exec_func('build_iso', d)
+    if d.getVar("PCBIOS", True) == "1":
+        bb.build.exec_func('build_syslinux_cfg', d)
+    if d.getVar("EFI", True) == "1":
+        bb.build.exec_func('build_grub_cfg', d)
+    bb.build.exec_func('build_hddimg', d)
+    bb.build.exec_func('build_iso', d)
 }
 
 addtask bootimg before do_build
index b4b7b81d8d58308d96c5191822d7923d28086a66..660c15f549cde84cae9519091de021547ec2c5cd 100644 (file)
@@ -11,28 +11,28 @@ PERL_OWN_DIR = "${@["", "/perl-native"][(bb.data.inherits_class('native', d))]}"
 
 # Determine the staged version of perl from the perl configuration file
 def get_perl_version(d):
-       import re
-       cfg = d.expand('${STAGING_LIBDIR}${PERL_OWN_DIR}/perl/config.sh')
-       try:
-               f = open(cfg, 'r')
-       except IOError:
-               return None
-       l = f.readlines();
-       f.close();
-       r = re.compile("^version='(\d*\.\d*\.\d*)'")
-       for s in l:
-               m = r.match(s)
-               if m:
-                       return m.group(1)
-       return None
+    import re
+    cfg = d.expand('${STAGING_LIBDIR}${PERL_OWN_DIR}/perl/config.sh')
+    try:
+        f = open(cfg, 'r')
+    except IOError:
+        return None
+    l = f.readlines();
+    f.close();
+    r = re.compile("^version='(\d*\.\d*\.\d*)'")
+    for s in l:
+        m = r.match(s)
+        if m:
+            return m.group(1)
+    return None
 
 # Determine where the library directories are
 def perl_get_libdirs(d):
-       libdir = d.getVar('libdir', True)
-       if is_target(d) == "no":
-               libdir += '/perl-native'
-       libdir += '/perl'
-       return libdir
+    libdir = d.getVar('libdir', True)
+    if is_target(d) == "no":
+        libdir += '/perl-native'
+    libdir += '/perl'
+    return libdir
 
 def is_target(d):
     if not bb.data.inherits_class('native', d):
index bb4ae11eca70ed8ea7b8617cc2360559bfb3fc13..2484003e37d89b7c77ffe36faef1942e319f3393 100644 (file)
@@ -20,105 +20,105 @@ python () {
 }
 
 python debian_package_name_hook () {
-       import glob, copy, stat, errno, re
+    import glob, copy, stat, errno, re
 
-       pkgdest = d.getVar('PKGDEST', True)
-       packages = d.getVar('PACKAGES', True)
-       bin_re = re.compile(".*/s?" + os.path.basename(d.getVar("bindir", True)) + "$")
-       lib_re = re.compile(".*/" + os.path.basename(d.getVar("libdir", True)) + "$")
-       so_re = re.compile("lib.*\.so")
+    pkgdest = d.getVar('PKGDEST', True)
+    packages = d.getVar('PACKAGES', True)
+    bin_re = re.compile(".*/s?" + os.path.basename(d.getVar("bindir", True)) + "$")
+    lib_re = re.compile(".*/" + os.path.basename(d.getVar("libdir", True)) + "$")
+    so_re = re.compile("lib.*\.so")
 
-       def socrunch(s):
-               s = s.lower().replace('_', '-')
-               m = re.match("^(.*)(.)\.so\.(.*)$", s)
-               if m is None:
-                       return None
-               if m.group(2) in '0123456789':
-                       bin = '%s%s-%s' % (m.group(1), m.group(2), m.group(3))
-               else:
-                       bin = m.group(1) + m.group(2) + m.group(3)
-               dev = m.group(1) + m.group(2)
-               return (bin, dev)
+    def socrunch(s):
+        s = s.lower().replace('_', '-')
+        m = re.match("^(.*)(.)\.so\.(.*)$", s)
+        if m is None:
+            return None
+        if m.group(2) in '0123456789':
+            bin = '%s%s-%s' % (m.group(1), m.group(2), m.group(3))
+        else:
+            bin = m.group(1) + m.group(2) + m.group(3)
+        dev = m.group(1) + m.group(2)
+        return (bin, dev)
 
-       def isexec(path):
-               try:
-                       s = os.stat(path)
-               except (os.error, AttributeError):
-                       return 0
-               return (s[stat.ST_MODE] & stat.S_IEXEC)
+    def isexec(path):
+        try:
+            s = os.stat(path)
+        except (os.error, AttributeError):
+            return 0
+        return (s[stat.ST_MODE] & stat.S_IEXEC)
 
-       def auto_libname(packages, orig_pkg):
-               sonames = []
-               has_bins = 0
-               has_libs = 0
-               pkg_dir = os.path.join(pkgdest, orig_pkg)
-               for root, dirs, files in os.walk(pkg_dir):
-                       if bin_re.match(root) and files:
-                               has_bins = 1
-                       if lib_re.match(root) and files:
-                               has_libs = 1
-                               for f in files:
-                                       if so_re.match(f):
-                                               fp = os.path.join(root, f)
-                                               cmd = (d.getVar('TARGET_PREFIX', True) or "") + "objdump -p " + fp + " 2>/dev/null"
-                                               fd = os.popen(cmd)
-                                               lines = fd.readlines()
-                                               fd.close()
-                                               for l in lines:
-                                                       m = re.match("\s+SONAME\s+([^\s]*)", l)
-                                                       if m and not m.group(1) in sonames:
-                                                               sonames.append(m.group(1))
+    def auto_libname(packages, orig_pkg):
+        sonames = []
+        has_bins = 0
+        has_libs = 0
+        pkg_dir = os.path.join(pkgdest, orig_pkg)
+        for root, dirs, files in os.walk(pkg_dir):
+            if bin_re.match(root) and files:
+                has_bins = 1
+            if lib_re.match(root) and files:
+                has_libs = 1
+                for f in files:
+                    if so_re.match(f):
+                        fp = os.path.join(root, f)
+                        cmd = (d.getVar('TARGET_PREFIX', True) or "") + "objdump -p " + fp + " 2>/dev/null"
+                        fd = os.popen(cmd)
+                        lines = fd.readlines()
+                        fd.close()
+                        for l in lines:
+                            m = re.match("\s+SONAME\s+([^\s]*)", l)
+                            if m and not m.group(1) in sonames:
+                                sonames.append(m.group(1))
 
-               bb.debug(1, 'LIBNAMES: pkg %s libs %d bins %d sonames %s' % (orig_pkg, has_libs, has_bins, sonames))
-               soname = None
-               if len(sonames) == 1:
-                       soname = sonames[0]
-               elif len(sonames) > 1:
-                       lead = d.getVar('LEAD_SONAME', True)
-                       if lead:
-                               r = re.compile(lead)
-                               filtered = []
-                               for s in sonames:
-                                       if r.match(s):
-                                               filtered.append(s)
-                               if len(filtered) == 1:
-                                       soname = filtered[0]
-                               elif len(filtered) > 1:
-                                       bb.note("Multiple matches (%s) for LEAD_SONAME '%s'" % (", ".join(filtered), lead))
-                               else:
-                                       bb.note("Multiple libraries (%s) found, but LEAD_SONAME '%s' doesn't match any of them" % (", ".join(sonames), lead))
-                       else:
-                               bb.note("Multiple libraries (%s) found and LEAD_SONAME not defined" % ", ".join(sonames))
+        bb.debug(1, 'LIBNAMES: pkg %s libs %d bins %d sonames %s' % (orig_pkg, has_libs, has_bins, sonames))
+        soname = None
+        if len(sonames) == 1:
+            soname = sonames[0]
+        elif len(sonames) > 1:
+            lead = d.getVar('LEAD_SONAME', True)
+            if lead:
+                r = re.compile(lead)
+                filtered = []
+                for s in sonames:
+                    if r.match(s):
+                        filtered.append(s)
+                if len(filtered) == 1:
+                    soname = filtered[0]
+                elif len(filtered) > 1:
+                    bb.note("Multiple matches (%s) for LEAD_SONAME '%s'" % (", ".join(filtered), lead))
+                else:
+                    bb.note("Multiple libraries (%s) found, but LEAD_SONAME '%s' doesn't match any of them" % (", ".join(sonames), lead))
+            else:
+                bb.note("Multiple libraries (%s) found and LEAD_SONAME not defined" % ", ".join(sonames))
 
-               if has_libs and not has_bins and soname:
-                       soname_result = socrunch(soname)
-                       if soname_result:
-                               (pkgname, devname) = soname_result
-                               for pkg in packages.split():
-                                       if (d.getVar('PKG_' + pkg) or d.getVar('DEBIAN_NOAUTONAME_' + pkg)):
-                                               continue
-                                       debian_pn = d.getVar('DEBIANNAME_' + pkg)
-                                       if debian_pn:
-                                               newpkg = debian_pn
-                                       elif pkg == orig_pkg:
-                                               newpkg = pkgname
-                                       else:
-                                               newpkg = pkg.replace(orig_pkg, devname, 1)
-                                       mlpre=d.getVar('MLPREFIX', True)
-                                       if mlpre:
-                                               if not newpkg.find(mlpre) == 0:
-                                                       newpkg = mlpre + newpkg
-                                       if newpkg != pkg:
-                                               d.setVar('PKG_' + pkg, newpkg)
+        if has_libs and not has_bins and soname:
+            soname_result = socrunch(soname)
+            if soname_result:
+                (pkgname, devname) = soname_result
+                for pkg in packages.split():
+                    if (d.getVar('PKG_' + pkg) or d.getVar('DEBIAN_NOAUTONAME_' + pkg)):
+                        continue
+                    debian_pn = d.getVar('DEBIANNAME_' + pkg)
+                    if debian_pn:
+                        newpkg = debian_pn
+                    elif pkg == orig_pkg:
+                        newpkg = pkgname
+                    else:
+                        newpkg = pkg.replace(orig_pkg, devname, 1)
+                    mlpre=d.getVar('MLPREFIX', True)
+                    if mlpre:
+                        if not newpkg.find(mlpre) == 0:
+                            newpkg = mlpre + newpkg
+                    if newpkg != pkg:
+                        d.setVar('PKG_' + pkg, newpkg)
 
-       # reversed sort is needed when some package is substring of another
-       # ie in ncurses we get without reverse sort: 
-       # DEBUG: LIBNAMES: pkgname libtic5 devname libtic pkg ncurses-libtic orig_pkg ncurses-libtic debian_pn None newpkg libtic5
-       # and later
-       # DEBUG: LIBNAMES: pkgname libtic5 devname libtic pkg ncurses-libticw orig_pkg ncurses-libtic debian_pn None newpkg libticw
-       # so we need to handle ncurses-libticw->libticw5 before ncurses-libtic->libtic5
-       for pkg in sorted((d.getVar('AUTO_LIBNAME_PKGS', True) or "").split(), reverse=True):
-               auto_libname(packages, pkg)
+    # reversed sort is needed when some package is substring of another
+    # ie in ncurses we get without reverse sort: 
+    # DEBUG: LIBNAMES: pkgname libtic5 devname libtic pkg ncurses-libtic orig_pkg ncurses-libtic debian_pn None newpkg libtic5
+    # and later
+    # DEBUG: LIBNAMES: pkgname libtic5 devname libtic pkg ncurses-libticw orig_pkg ncurses-libtic debian_pn None newpkg libticw
+    # so we need to handle ncurses-libticw->libticw5 before ncurses-libtic->libtic5
+    for pkg in sorted((d.getVar('AUTO_LIBNAME_PKGS', True) or "").split(), reverse=True):
+        auto_libname(packages, pkg)
 }
 
 EXPORT_FUNCTIONS package_name_hook
index fb9f701b37d4e9af7bfbab2a27a5a21cb3861960..7a3ee3c28c4aada58e26eb3a89de21198a720d39 100644 (file)
@@ -39,33 +39,33 @@ done
 }
 
 python populate_packages_append () {
-       import re
-       packages = d.getVar('PACKAGES', True).split()
-       pkgdest =  d.getVar('PKGDEST', True)
-       
-       for pkg in packages:
-               schema_dir = '%s/%s/etc/gconf/schemas' % (pkgdest, pkg)
-               schemas = []
-               schema_re = re.compile(".*\.schemas$")
-               if os.path.exists(schema_dir):
-                       for f in os.listdir(schema_dir):
-                               if schema_re.match(f):
-                                       schemas.append(f)
-               if schemas != []:
-                       bb.note("adding gconf postinst and prerm scripts to %s" % pkg)
-                       d.setVar('SCHEMA_FILES', " ".join(schemas))
-                       postinst = d.getVar('pkg_postinst_%s' % pkg, True) or d.getVar('pkg_postinst', True)
-                       if not postinst:
-                               postinst = '#!/bin/sh\n'
-                       postinst += d.getVar('gconf_postinst', True)
-                       d.setVar('pkg_postinst_%s' % pkg, postinst)
-                       prerm = d.getVar('pkg_prerm_%s' % pkg, True) or d.getVar('pkg_prerm', True)
-                       if not prerm:
-                               prerm = '#!/bin/sh\n'
-                       prerm += d.getVar('gconf_prerm', True)
-                       d.setVar('pkg_prerm_%s' % pkg, prerm)
-                       rdepends = d.getVar("RDEPENDS_%s" % pkg, True) or ""
-                       rdepends += ' ' + d.getVar('MLPREFIX') + 'gconf'
-                       d.setVar("RDEPENDS_%s" % pkg, rdepends)
+    import re
+    packages = d.getVar('PACKAGES', True).split()
+    pkgdest =  d.getVar('PKGDEST', True)
+    
+    for pkg in packages:
+        schema_dir = '%s/%s/etc/gconf/schemas' % (pkgdest, pkg)
+        schemas = []
+        schema_re = re.compile(".*\.schemas$")
+        if os.path.exists(schema_dir):
+            for f in os.listdir(schema_dir):
+                if schema_re.match(f):
+                    schemas.append(f)
+        if schemas != []:
+            bb.note("adding gconf postinst and prerm scripts to %s" % pkg)
+            d.setVar('SCHEMA_FILES', " ".join(schemas))
+            postinst = d.getVar('pkg_postinst_%s' % pkg, True) or d.getVar('pkg_postinst', True)
+            if not postinst:
+                postinst = '#!/bin/sh\n'
+            postinst += d.getVar('gconf_postinst', True)
+            d.setVar('pkg_postinst_%s' % pkg, postinst)
+            prerm = d.getVar('pkg_prerm_%s' % pkg, True) or d.getVar('pkg_prerm', True)
+            if not prerm:
+                prerm = '#!/bin/sh\n'
+            prerm += d.getVar('gconf_prerm', True)
+            d.setVar('pkg_prerm_%s' % pkg, prerm)
+            rdepends = d.getVar("RDEPENDS_%s" % pkg, True) or ""
+            rdepends += ' ' + d.getVar('MLPREFIX') + 'gconf'
+            d.setVar("RDEPENDS_%s" % pkg, rdepends)
 
 }
index 80b78be48de0c3ce8bf60bae94d2bc21c349efb7..19c7f7143d2a605638c21cb3c70dcc53f4417324 100644 (file)
@@ -1,7 +1,7 @@
 def gnome_verdir(v):
-       import re
-       m = re.match("^([0-9]+)\.([0-9]+)", v)
-       return "%s.%s" % (m.group(1), m.group(2))
+    import re
+    m = re.match("^([0-9]+)\.([0-9]+)", v)
+    return "%s.%s" % (m.group(1), m.group(2))
 
 GNOME_COMPRESS_TYPE ?= "bz2"
 SECTION ?= "x11/gnome"
index 60e3401f4bfbac5edd9d435dde0e3bbe56a9c747..01fb2f39464a6b2c2abf0fd895daaa6ec74bad01 100644 (file)
@@ -28,31 +28,31 @@ done
 }
 
 python populate_packages_append () {
-       packages = d.getVar('PACKAGES', True).split()
-       pkgdest =  d.getVar('PKGDEST', True)
-       
-       for pkg in packages:
-               icon_dir = '%s/%s/%s/icons' % (pkgdest, pkg, d.getVar('datadir', True))
-               if not os.path.exists(icon_dir):
-                       continue
-
-               bb.note("adding hicolor-icon-theme dependency to %s" % pkg)     
-               rdepends = d.getVar('RDEPENDS_%s' % pkg, True)
-               rdepends = rdepends + ' ' + d.getVar('MLPREFIX') + "hicolor-icon-theme"
-               d.setVar('RDEPENDS_%s' % pkg, rdepends)
-       
-               bb.note("adding gtk-icon-cache postinst and postrm scripts to %s" % pkg)
-               
-               postinst = d.getVar('pkg_postinst_%s' % pkg, True) or d.getVar('pkg_postinst', True)
-               if not postinst:
-                       postinst = '#!/bin/sh\n'
-               postinst += d.getVar('gtk_icon_cache_postinst', True)
-               d.setVar('pkg_postinst_%s' % pkg, postinst)
-
-               postrm = d.getVar('pkg_postrm_%s' % pkg, True) or d.getVar('pkg_postrm', True)
-               if not postrm:
-                       postrm = '#!/bin/sh\n'
-               postrm += d.getVar('gtk_icon_cache_postrm', True)
-               d.setVar('pkg_postrm_%s' % pkg, postrm)
+    packages = d.getVar('PACKAGES', True).split()
+    pkgdest =  d.getVar('PKGDEST', True)
+    
+    for pkg in packages:
+        icon_dir = '%s/%s/%s/icons' % (pkgdest, pkg, d.getVar('datadir', True))
+        if not os.path.exists(icon_dir):
+            continue
+
+        bb.note("adding hicolor-icon-theme dependency to %s" % pkg)
+        rdepends = d.getVar('RDEPENDS_%s' % pkg, True)
+        rdepends = rdepends + ' ' + d.getVar('MLPREFIX') + "hicolor-icon-theme"
+        d.setVar('RDEPENDS_%s' % pkg, rdepends)
+    
+        bb.note("adding gtk-icon-cache postinst and postrm scripts to %s" % pkg)
+        
+        postinst = d.getVar('pkg_postinst_%s' % pkg, True) or d.getVar('pkg_postinst', True)
+        if not postinst:
+            postinst = '#!/bin/sh\n'
+        postinst += d.getVar('gtk_icon_cache_postinst', True)
+        d.setVar('pkg_postinst_%s' % pkg, postinst)
+
+        postrm = d.getVar('pkg_postrm_%s' % pkg, True) or d.getVar('pkg_postrm', True)
+        if not postrm:
+            postrm = '#!/bin/sh\n'
+        postrm += d.getVar('gtk_icon_cache_postrm', True)
+        d.setVar('pkg_postrm_%s' % pkg, postrm)
 }
 
index f1b829fe1854a446999ee0f1a5231406854feba2..1799bf18652371a634728b8705d3dbba65d715a9 100644 (file)
@@ -164,28 +164,28 @@ do_rootfs[umask] = "022"
 
 fakeroot do_rootfs () {
        #set -x
-    # When use the rpm incremental image generation, don't remove the rootfs
-    if [ "${INC_RPM_IMAGE_GEN}" != "1" -o "${IMAGE_PKGTYPE}" != "rpm" ]; then
-        rm -rf ${IMAGE_ROOTFS}
-    elif [ -d ${T}/saved_rpmlib/var/lib/rpm ]; then
-        # Move the rpmlib back
-        if [ ! -d ${IMAGE_ROOTFS}/var/lib/rpm ]; then
-                mkdir -p ${IMAGE_ROOTFS}/var/lib/
-                mv ${T}/saved_rpmlib/var/lib/rpm ${IMAGE_ROOTFS}/var/lib/
-        fi
-    fi
+       # When use the rpm incremental image generation, don't remove the rootfs
+       if [ "${INC_RPM_IMAGE_GEN}" != "1" -o "${IMAGE_PKGTYPE}" != "rpm" ]; then
+               rm -rf ${IMAGE_ROOTFS}
+       elif [ -d ${T}/saved_rpmlib/var/lib/rpm ]; then
+               # Move the rpmlib back
+               if [ ! -d ${IMAGE_ROOTFS}/var/lib/rpm ]; then
+                       mkdir -p ${IMAGE_ROOTFS}/var/lib/
+                       mv ${T}/saved_rpmlib/var/lib/rpm ${IMAGE_ROOTFS}/var/lib/
+               fi
+       fi
        rm -rf ${MULTILIB_TEMP_ROOTFS}
        mkdir -p ${IMAGE_ROOTFS}
        mkdir -p ${DEPLOY_DIR_IMAGE}
 
        cp ${COREBASE}/meta/files/deploydir_readme.txt ${DEPLOY_DIR_IMAGE}/README_-_DO_NOT_DELETE_FILES_IN_THIS_DIRECTORY.txt || true
 
-    # If "${IMAGE_ROOTFS}/dev" exists, then the device had been made by
-    # the previous build
+       # If "${IMAGE_ROOTFS}/dev" exists, then the device had been made by
+       # the previous build
        if [ "${USE_DEVFS}" != "1" -a ! -r "${IMAGE_ROOTFS}/dev" ]; then
                for devtable in ${@get_devtable_list(d)}; do
-            # Always return ture since there maybe already one when use the
-            # incremental image generation
+                       # Always return ture since there maybe already one when use the
+                       # incremental image generation
                        makedevs -r ${IMAGE_ROOTFS} -D $devtable
                done
        fi
@@ -398,7 +398,7 @@ rootfs_trim_schemas () {
                # Need this in case no files exist
                if [ -e $schema ]; then
                        oe-trim-schemas $schema > $schema.new
-                       mv $schema.new $schema
+                       mv $schema.new $schema
                fi
        done
 }
index 727d8d6f8f0d52b580e1946d8ff0b6d5ef298ebc..d286eeaea932839ef170fbc06eb3828b834544a6 100644 (file)
@@ -48,7 +48,7 @@ def get_imagecmds(d):
         types.remove("live")
 
     if d.getVar('IMAGE_LINK_NAME', True):
-        cmds += "      rm -f ${DEPLOY_DIR_IMAGE}/${IMAGE_LINK_NAME}.*"
+        cmds += "\trm -f ${DEPLOY_DIR_IMAGE}/${IMAGE_LINK_NAME}.*"
 
     for type in types:
         ccmd = []
index 4a30192c15655a0c42ab1ee55382b8113604e27b..6446504845c92a5fde354b9f8a9e53af0986690c 100644 (file)
@@ -7,38 +7,38 @@
 valid_archs = "alpha cris ia64 \
                i386 x86 \
                m68knommu m68k ppc powerpc powerpc64 ppc64  \
-              sparc sparc64 \
+               sparc sparc64 \
                arm \
                m32r mips \
-              sh sh64 um h8300   \
-              parisc s390  v850 \
-              avr32 blackfin \
-              microblaze"
+               sh sh64 um h8300   \
+               parisc s390  v850 \
+               avr32 blackfin \
+               microblaze"
 
 def map_kernel_arch(a, d):
-       import re
+    import re
 
-       valid_archs = d.getVar('valid_archs', True).split()
+    valid_archs = d.getVar('valid_archs', True).split()
 
-       if   re.match('(i.86|athlon|x86.64)$', a):      return 'x86'
-       elif re.match('armeb$', a):                     return 'arm'
-       elif re.match('mips(el|64|64el)$', a):          return 'mips'
-       elif re.match('p(pc|owerpc)(|64)', a):          return 'powerpc'
-       elif re.match('sh(3|4)$', a):                   return 'sh'
-       elif re.match('bfin', a):                       return 'blackfin'
-       elif re.match('microblazeel', a):               return 'microblaze'
-        elif a in valid_archs:                         return a
-       else:
-               bb.error("cannot map '%s' to a linux kernel architecture" % a)
+    if   re.match('(i.86|athlon|x86.64)$', a):  return 'x86'
+    elif re.match('armeb$', a):                 return 'arm'
+    elif re.match('mips(el|64|64el)$', a):      return 'mips'
+    elif re.match('p(pc|owerpc)(|64)', a):      return 'powerpc'
+    elif re.match('sh(3|4)$', a):               return 'sh'
+    elif re.match('bfin', a):                   return 'blackfin'
+    elif re.match('microblazeel', a):           return 'microblaze'
+    elif a in valid_archs:                      return a
+    else:
+        bb.error("cannot map '%s' to a linux kernel architecture" % a)
 
 export ARCH = "${@map_kernel_arch(d.getVar('TARGET_ARCH', True), d)}"
 
 def map_uboot_arch(a, d):
-       import re
+    import re
 
-       if   re.match('p(pc|owerpc)(|64)', a): return 'ppc'
-       elif re.match('i.86$', a): return 'x86'
-       return a
+    if   re.match('p(pc|owerpc)(|64)', a): return 'ppc'
+    elif re.match('i.86$', a): return 'x86'
+    return a
 
 export UBOOT_ARCH = "${@map_uboot_arch(d.getVar('ARCH', True), d)}"
 
index 48c89742572a6a732649d395b792e24a61aa3172..ab59fc8f13b29ea0aa0b686266f69affbaa90879 100644 (file)
@@ -6,41 +6,41 @@ SRCTREECOVEREDTASKS += "do_kernel_link_vmlinux do_kernel_configme do_validate_br
 # returns local (absolute) path names for all valid patches in the
 # src_uri
 def find_patches(d):
-       patches=src_patches(d)
-       patch_list=[]
-       for p in patches:
-           _, _, local, _, _, _ = bb.decodeurl(p)
-           patch_list.append(local)
+    patches = src_patches(d)
+    patch_list=[]
+    for p in patches:
+        _, _, local, _, _, _ = bb.decodeurl(p)
+        patch_list.append(local)
 
-       return patch_list
+    return patch_list
 
 # returns all the elements from the src uri that are .scc files
 def find_sccs(d):
-       sources=src_patches(d, True)
-       sources_list=[]
-       for s in sources:
-               base, ext = os.path.splitext(os.path.basename(s))
-               if ext and ext in ('.scc' '.cfg'):
-                       sources_list.append(s)
-               elif base and base in 'defconfig':
-                       sources_list.append(s)
+    sources=src_patches(d, True)
+    sources_list=[]
+    for s in sources:
+        base, ext = os.path.splitext(os.path.basename(s))
+        if ext and ext in ('.scc' '.cfg'):
+            sources_list.append(s)
+        elif base and base in 'defconfig':
+            sources_list.append(s)
 
-       return sources_list
+    return sources_list
 
 # this is different from find_patches, in that it returns a colon separated
 # list of <patches>:<subdir> instead of just a list of patches
 def find_urls(d):
-       patches=src_patches(d)
-       fetch = bb.fetch2.Fetch([], d)
-       patch_list=[]
-       for p in patches:
-               _, _, local, _, _, _ = bb.decodeurl(p)
-               for url in fetch.urls:
-                       urldata = fetch.ud[url]
-                       if urldata.localpath == local:
-                               patch_list.append(local+':'+urldata.path)
-
-        return patch_list
+    patches=src_patches(d)
+    fetch = bb.fetch2.Fetch([], d)
+    patch_list=[]
+    for p in patches:
+        _, _, local, _, _, _ = bb.decodeurl(p)
+        for url in fetch.urls:
+            urldata = fetch.ud[url]
+            if urldata.localpath == local:
+                patch_list.append(local+':'+urldata.path)
+
+    return patch_list
 
 
 do_patch() {
index fd744e7ea3dfe80701051bc8402610c74fcedaff..08b5e61fbe54d9a8dd34144459acb966cf0672e5 100644 (file)
@@ -310,177 +310,177 @@ module_conf_sco = "alias bt-proto-2 sco"
 module_conf_rfcomm = "alias bt-proto-3 rfcomm"
 
 python populate_packages_prepend () {
-       def extract_modinfo(file):
-               import tempfile, re, subprocess
-               tempfile.tempdir = d.getVar("WORKDIR", True)
-               tf = tempfile.mkstemp()
-               tmpfile = tf[1]
-               cmd = "PATH=\"%s\" %sobjcopy -j .modinfo -O binary %s %s" % (d.getVar("PATH", True), d.getVar("HOST_PREFIX", True) or "", file, tmpfile)
-               subprocess.call(cmd, shell=True)
-               f = open(tmpfile)
-               l = f.read().split("\000")
-               f.close()
-               os.close(tf[0])
-               os.unlink(tmpfile)
-               exp = re.compile("([^=]+)=(.*)")
-               vals = {}
-               for i in l:
-                       m = exp.match(i)
-                       if not m:
-                               continue
-                       vals[m.group(1)] = m.group(2)
-               return vals
-       
-       def parse_depmod():
-               import re
-
-               dvar = d.getVar('PKGD', True)
-               if not dvar:
-                       bb.error("PKGD not defined")
-                       return
-
-               kernelver = d.getVar('KERNEL_VERSION', True)
-               kernelver_stripped = kernelver
-               m = re.match('^(.*-hh.*)[\.\+].*$', kernelver)
-               if m:
-                       kernelver_stripped = m.group(1)
-               path = d.getVar("PATH", True)
-
-               cmd = "PATH=\"%s\" depmod -n -a -b %s -F %s/boot/System.map-%s %s" % (path, dvar, dvar, kernelver, kernelver_stripped)
-               f = os.popen(cmd, 'r')
-
-               deps = {}
-               pattern0 = "^(.*\.k?o):..*$"
-               pattern1 = "^(.*\.k?o):\s*(.*\.k?o)\s*$"
-               pattern2 = "^(.*\.k?o):\s*(.*\.k?o)\s*\\\$"
-               pattern3 = "^\t(.*\.k?o)\s*\\\$"
-               pattern4 = "^\t(.*\.k?o)\s*$"
-
-               line = f.readline()
-               while line:
-                       if not re.match(pattern0, line):
-                               line = f.readline()
-                               continue
-                       m1 = re.match(pattern1, line)
-                       if m1:
-                               deps[m1.group(1)] = m1.group(2).split()
-                       else:
-                               m2 = re.match(pattern2, line)
-                               if m2:
-                                       deps[m2.group(1)] = m2.group(2).split()
-                                       line = f.readline()
-                                       m3 = re.match(pattern3, line)
-                                       while m3:
-                                               deps[m2.group(1)].extend(m3.group(1).split())
-                                               line = f.readline()
-                                               m3 = re.match(pattern3, line)
-                                       m4 = re.match(pattern4, line)
-                                       deps[m2.group(1)].extend(m4.group(1).split())
-                       line = f.readline()
-               f.close()
-               return deps
-       
-       def get_dependencies(file, pattern, format):
-               # file no longer includes PKGD
-               file = file.replace(d.getVar('PKGD', True) or '', '', 1)
-               # instead is prefixed with /lib/modules/${KERNEL_VERSION}
-               file = file.replace("/lib/modules/%s/" % d.getVar('KERNEL_VERSION', True) or '', '', 1)
-
-               if module_deps.has_key(file):
-                       import re
-                       dependencies = []
-                       for i in module_deps[file]:
-                               m = re.match(pattern, os.path.basename(i))
-                               if not m:
-                                       continue
-                               on = legitimize_package_name(m.group(1))
-                               dependency_pkg = format % on
-                               dependencies.append(dependency_pkg)
-                       return dependencies
-               return []
-
-       def frob_metadata(file, pkg, pattern, format, basename):
-               import re
-               vals = extract_modinfo(file)
-
-               dvar = d.getVar('PKGD', True)
-
-               # If autoloading is requested, output /etc/modules-load.d/<name>.conf and append
-               # appropriate modprobe commands to the postinst
-               autoload = d.getVar('module_autoload_%s' % basename, True)
-               if autoload:
-                       name = '%s/etc/modules-load.d/%s.conf' % (dvar, basename)
-                       f = open(name, 'w')
-                       for m in autoload.split():
-                               f.write('%s\n' % m)
-                       f.close()
-                       postinst = d.getVar('pkg_postinst_%s' % pkg, True)
-                       if not postinst:
-                               bb.fatal("pkg_postinst_%s not defined" % pkg)
-                       postinst += d.getVar('autoload_postinst_fragment', True) % autoload
-                       d.setVar('pkg_postinst_%s' % pkg, postinst)
-
-               # Write out any modconf fragment
-               modconf = d.getVar('module_conf_%s' % basename, True)
-               if modconf:
-                       name = '%s/etc/modprobe.d/%s.conf' % (dvar, basename)
-                       f = open(name, 'w')
-                       f.write("%s\n" % modconf)
-                       f.close()
-
-               files = d.getVar('FILES_%s' % pkg, True)
-               files = "%s /etc/modules-load.d/%s.conf /etc/modprobe.d/%s.conf" % (files, basename, basename)
-               d.setVar('FILES_%s' % pkg, files)
-
-               if vals.has_key("description"):
-                       old_desc = d.getVar('DESCRIPTION_' + pkg, True) or ""
-                       d.setVar('DESCRIPTION_' + pkg, old_desc + "; " + vals["description"])
-
-               rdepends_str = d.getVar('RDEPENDS_' + pkg, True)
-               if rdepends_str:
-                       rdepends = rdepends_str.split()
-               else:
-                       rdepends = []
-               rdepends.extend(get_dependencies(file, pattern, format))
-               d.setVar('RDEPENDS_' + pkg, ' '.join(rdepends))
-
-       module_deps = parse_depmod()
-       module_regex = '^(.*)\.k?o$'
-       module_pattern = 'kernel-module-%s'
-
-       postinst = d.getVar('pkg_postinst_modules', True)
-       postrm = d.getVar('pkg_postrm_modules', True)
-       do_split_packages(d, root='/lib/firmware', file_regex='^(.*)\.bin$', output_pattern='kernel-firmware-%s', description='Firmware for %s', recursive=True, extra_depends='')
-       do_split_packages(d, root='/lib/firmware', file_regex='^(.*)\.fw$', output_pattern='kernel-firmware-%s', description='Firmware for %s', recursive=True, extra_depends='')
-       do_split_packages(d, root='/lib/firmware', file_regex='^(.*)\.cis$', output_pattern='kernel-firmware-%s', description='Firmware for %s', recursive=True, extra_depends='')
-       do_split_packages(d, root='/lib/modules', file_regex=module_regex, output_pattern=module_pattern, description='%s kernel module', postinst=postinst, postrm=postrm, recursive=True, hook=frob_metadata, extra_depends='update-modules kernel-%s' % d.getVar("KERNEL_VERSION", True))
-
-       # If modules-load.d and modprobe.d are empty at this point, remove them to
-       # avoid warnings. removedirs only raises an OSError if an empty
-       # directory cannot be removed.
-       dvar = d.getVar('PKGD', True)
-       for dir in ["%s/etc/modprobe.d" % (dvar), "%s/etc/modules-load.d" % (dvar), "%s/etc" % (dvar)]:
-               if len(os.listdir(dir)) == 0:
-                       os.rmdir(dir)
-
-       import re
-       metapkg = "kernel-modules"
-       d.setVar('ALLOW_EMPTY_' + metapkg, "1")
-       d.setVar('FILES_' + metapkg, "")
-       blacklist = [ 'kernel-dev', 'kernel-image', 'kernel-base', 'kernel-vmlinux' ]
-       for l in module_deps.values():
-               for i in l:
-                       pkg = module_pattern % legitimize_package_name(re.match(module_regex, os.path.basename(i)).group(1))
-                       blacklist.append(pkg)
-       metapkg_rdepends = []
-       packages = d.getVar('PACKAGES', True).split()
-       for pkg in packages[1:]:
-               if not pkg in blacklist and not pkg in metapkg_rdepends:
-                       metapkg_rdepends.append(pkg)
-       d.setVar('RDEPENDS_' + metapkg, ' '.join(metapkg_rdepends))
-       d.setVar('DESCRIPTION_' + metapkg, 'Kernel modules meta package')
-       packages.append(metapkg)
-       d.setVar('PACKAGES', ' '.join(packages))
+    def extract_modinfo(file):
+        import tempfile, re, subprocess
+        tempfile.tempdir = d.getVar("WORKDIR", True)
+        tf = tempfile.mkstemp()
+        tmpfile = tf[1]
+        cmd = "PATH=\"%s\" %sobjcopy -j .modinfo -O binary %s %s" % (d.getVar("PATH", True), d.getVar("HOST_PREFIX", True) or "", file, tmpfile)
+        subprocess.call(cmd, shell=True)
+        f = open(tmpfile)
+        l = f.read().split("\000")
+        f.close()
+        os.close(tf[0])
+        os.unlink(tmpfile)
+        exp = re.compile("([^=]+)=(.*)")
+        vals = {}
+        for i in l:
+            m = exp.match(i)
+            if not m:
+                continue
+            vals[m.group(1)] = m.group(2)
+        return vals
+    
+    def parse_depmod():
+        import re
+
+        dvar = d.getVar('PKGD', True)
+        if not dvar:
+            bb.error("PKGD not defined")
+            return
+
+        kernelver = d.getVar('KERNEL_VERSION', True)
+        kernelver_stripped = kernelver
+        m = re.match('^(.*-hh.*)[\.\+].*$', kernelver)
+        if m:
+            kernelver_stripped = m.group(1)
+        path = d.getVar("PATH", True)
+
+        cmd = "PATH=\"%s\" depmod -n -a -b %s -F %s/boot/System.map-%s %s" % (path, dvar, dvar, kernelver, kernelver_stripped)
+        f = os.popen(cmd, 'r')
+
+        deps = {}
+        pattern0 = "^(.*\.k?o):..*$"
+        pattern1 = "^(.*\.k?o):\s*(.*\.k?o)\s*$"
+        pattern2 = "^(.*\.k?o):\s*(.*\.k?o)\s*\\\$"
+        pattern3 = "^\t(.*\.k?o)\s*\\\$"
+        pattern4 = "^\t(.*\.k?o)\s*$"
+
+        line = f.readline()
+        while line:
+            if not re.match(pattern0, line):
+                line = f.readline()
+                continue
+            m1 = re.match(pattern1, line)
+            if m1:
+                deps[m1.group(1)] = m1.group(2).split()
+            else:
+                m2 = re.match(pattern2, line)
+                if m2:
+                    deps[m2.group(1)] = m2.group(2).split()
+                    line = f.readline()
+                    m3 = re.match(pattern3, line)
+                    while m3:
+                        deps[m2.group(1)].extend(m3.group(1).split())
+                        line = f.readline()
+                        m3 = re.match(pattern3, line)
+                    m4 = re.match(pattern4, line)
+                    deps[m2.group(1)].extend(m4.group(1).split())
+            line = f.readline()
+        f.close()
+        return deps
+    
+    def get_dependencies(file, pattern, format):
+        # file no longer includes PKGD
+        file = file.replace(d.getVar('PKGD', True) or '', '', 1)
+        # instead is prefixed with /lib/modules/${KERNEL_VERSION}
+        file = file.replace("/lib/modules/%s/" % d.getVar('KERNEL_VERSION', True) or '', '', 1)
+
+        if module_deps.has_key(file):
+            import re
+            dependencies = []
+            for i in module_deps[file]:
+                m = re.match(pattern, os.path.basename(i))
+                if not m:
+                    continue
+                on = legitimize_package_name(m.group(1))
+                dependency_pkg = format % on
+                dependencies.append(dependency_pkg)
+            return dependencies
+        return []
+
+    def frob_metadata(file, pkg, pattern, format, basename):
+        import re
+        vals = extract_modinfo(file)
+
+        dvar = d.getVar('PKGD', True)
+
+        # If autoloading is requested, output /etc/modules-load.d/<name>.conf and append
+        # appropriate modprobe commands to the postinst
+        autoload = d.getVar('module_autoload_%s' % basename, True)
+        if autoload:
+            name = '%s/etc/modules-load.d/%s.conf' % (dvar, basename)
+            f = open(name, 'w')
+            for m in autoload.split():
+                f.write('%s\n' % m)
+            f.close()
+            postinst = d.getVar('pkg_postinst_%s' % pkg, True)
+            if not postinst:
+                bb.fatal("pkg_postinst_%s not defined" % pkg)
+            postinst += d.getVar('autoload_postinst_fragment', True) % autoload
+            d.setVar('pkg_postinst_%s' % pkg, postinst)
+
+        # Write out any modconf fragment
+        modconf = d.getVar('module_conf_%s' % basename, True)
+        if modconf:
+            name = '%s/etc/modprobe.d/%s.conf' % (dvar, basename)
+            f = open(name, 'w')
+            f.write("%s\n" % modconf)
+            f.close()
+
+        files = d.getVar('FILES_%s' % pkg, True)
+        files = "%s /etc/modules-load.d/%s.conf /etc/modprobe.d/%s.conf" % (files, basename, basename)
+        d.setVar('FILES_%s' % pkg, files)
+
+        if vals.has_key("description"):
+            old_desc = d.getVar('DESCRIPTION_' + pkg, True) or ""
+            d.setVar('DESCRIPTION_' + pkg, old_desc + "; " + vals["description"])
+
+        rdepends_str = d.getVar('RDEPENDS_' + pkg, True)
+        if rdepends_str:
+            rdepends = rdepends_str.split()
+        else:
+            rdepends = []
+        rdepends.extend(get_dependencies(file, pattern, format))
+        d.setVar('RDEPENDS_' + pkg, ' '.join(rdepends))
+
+    module_deps = parse_depmod()
+    module_regex = '^(.*)\.k?o$'
+    module_pattern = 'kernel-module-%s'
+
+    postinst = d.getVar('pkg_postinst_modules', True)
+    postrm = d.getVar('pkg_postrm_modules', True)
+    do_split_packages(d, root='/lib/firmware', file_regex='^(.*)\.bin$', output_pattern='kernel-firmware-%s', description='Firmware for %s', recursive=True, extra_depends='')
+    do_split_packages(d, root='/lib/firmware', file_regex='^(.*)\.fw$', output_pattern='kernel-firmware-%s', description='Firmware for %s', recursive=True, extra_depends='')
+    do_split_packages(d, root='/lib/firmware', file_regex='^(.*)\.cis$', output_pattern='kernel-firmware-%s', description='Firmware for %s', recursive=True, extra_depends='')
+    do_split_packages(d, root='/lib/modules', file_regex=module_regex, output_pattern=module_pattern, description='%s kernel module', postinst=postinst, postrm=postrm, recursive=True, hook=frob_metadata, extra_depends='update-modules kernel-%s' % d.getVar("KERNEL_VERSION", True))
+
+    # If modules-load.d and modprobe.d are empty at this point, remove them to
+    # avoid warnings. removedirs only raises an OSError if an empty
+    # directory cannot be removed.
+    dvar = d.getVar('PKGD', True)
+    for dir in ["%s/etc/modprobe.d" % (dvar), "%s/etc/modules-load.d" % (dvar), "%s/etc" % (dvar)]:
+        if len(os.listdir(dir)) == 0:
+            os.rmdir(dir)
+
+    import re
+    metapkg = "kernel-modules"
+    d.setVar('ALLOW_EMPTY_' + metapkg, "1")
+    d.setVar('FILES_' + metapkg, "")
+    blacklist = [ 'kernel-dev', 'kernel-image', 'kernel-base', 'kernel-vmlinux' ]
+    for l in module_deps.values():
+        for i in l:
+            pkg = module_pattern % legitimize_package_name(re.match(module_regex, os.path.basename(i)).group(1))
+            blacklist.append(pkg)
+    metapkg_rdepends = []
+    packages = d.getVar('PACKAGES', True).split()
+    for pkg in packages[1:]:
+        if not pkg in blacklist and not pkg in metapkg_rdepends:
+            metapkg_rdepends.append(pkg)
+    d.setVar('RDEPENDS_' + metapkg, ' '.join(metapkg_rdepends))
+    d.setVar('DESCRIPTION_' + metapkg, 'Kernel modules meta package')
+    packages.append(metapkg)
+    d.setVar('PACKAGES', ' '.join(packages))
 }
 
 # Support checking the kernel size since some kernels need to reside in partitions
index 8145d64e29df7ab93f1affaf1466622acf4413f8..06d520164e9c6fc65034ea6b8e3aa94db1c5ac7c 100644 (file)
@@ -23,13 +23,13 @@ def get_libc_fpu_setting(bb, d):
     return ""
 
 python populate_packages_prepend () {
-       if d.getVar('DEBIAN_NAMES', True):
-               bpn = d.getVar('BPN', True)
-               d.setVar('PKG_'+bpn, 'libc6')
-               d.setVar('PKG_'+bpn+'-dev', 'libc6-dev')
-               d.setVar('PKG_'+bpn+'-dbg', 'libc6-dbg')
-               # For backward compatibility with old -dbg package
-               d.appendVar('RPROVIDES_' + bpn + '-dbg', ' libc-dbg')
-               d.appendVar('RCONFLICTS_' + bpn + '-dbg', ' libc-dbg')
-               d.appendVar('RREPLACES_' + bpn + '-dbg', ' libc-dbg')
+    if d.getVar('DEBIAN_NAMES', True):
+        bpn = d.getVar('BPN', True)
+        d.setVar('PKG_'+bpn, 'libc6')
+        d.setVar('PKG_'+bpn+'-dev', 'libc6-dev')
+        d.setVar('PKG_'+bpn+'-dbg', 'libc6-dbg')
+        # For backward compatibility with old -dbg package
+        d.appendVar('RPROVIDES_' + bpn + '-dbg', ' libc-dbg')
+        d.appendVar('RCONFLICTS_' + bpn + '-dbg', ' libc-dbg')
+        d.appendVar('RREPLACES_' + bpn + '-dbg', ' libc-dbg')
 }
index 9df3c17116beeafd3610eb258038e27576f76e70..e3214a68a235bd048d0263965583c9754c44c9fd 100644 (file)
@@ -28,10 +28,10 @@ python __anonymous () {
 
             if r.match(target_arch):
                 depends = d.getVar("DEPENDS", True)
-               if use_cross_localedef == "1" :
-                       depends = "%s cross-localedef-native" % depends
-               else:
-                       depends = "%s qemu-native" % depends
+                if use_cross_localedef == "1" :
+                    depends = "%s cross-localedef-native" % depends
+                else:
+                    depends = "%s qemu-native" % depends
                 d.setVar("DEPENDS", depends)
                 d.setVar("GLIBC_INTERNAL_USE_BINARY_LOCALE", "compile")
                 break
@@ -118,270 +118,270 @@ do_collect_bins_from_locale_tree() {
 inherit qemu
 
 python package_do_split_gconvs () {
-       import os, re
-       if (d.getVar('PACKAGE_NO_GCONV', True) == '1'):
-               bb.note("package requested not splitting gconvs")
-               return
-
-       if not d.getVar('PACKAGES', True):
-               return
-
-       mlprefix = d.getVar("MLPREFIX", True) or ""
-
-       bpn = d.getVar('BPN', True)
-       libdir = d.getVar('libdir', True)
-       if not libdir:
-               bb.error("libdir not defined")
-               return
-       datadir = d.getVar('datadir', True)
-       if not datadir:
-               bb.error("datadir not defined")
-               return
-
-       gconv_libdir = base_path_join(libdir, "gconv")
-       charmap_dir = base_path_join(datadir, "i18n", "charmaps")
-       locales_dir = base_path_join(datadir, "i18n", "locales")
-       binary_locales_dir = base_path_join(libdir, "locale")
-
-       def calc_gconv_deps(fn, pkg, file_regex, output_pattern, group):
-               deps = []
-               f = open(fn, "r")
-               c_re = re.compile('^copy "(.*)"')
-               i_re = re.compile('^include "(\w+)".*')
-               for l in f.readlines():
-                       m = c_re.match(l) or i_re.match(l)
-                       if m:
-                               dp = legitimize_package_name('%s%s-gconv-%s' % (mlprefix, bpn, m.group(1)))
-                               if not dp in deps:
-                                       deps.append(dp)
-               f.close()
-               if deps != []:
-                       d.setVar('RDEPENDS_%s' % pkg, " ".join(deps))
-               if bpn != 'glibc':
-                       d.setVar('RPROVIDES_%s' % pkg, pkg.replace(bpn, 'glibc'))
-
-       do_split_packages(d, gconv_libdir, file_regex='^(.*)\.so$', output_pattern=bpn+'-gconv-%s', \
-               description='gconv module for character set %s', hook=calc_gconv_deps, \
-               extra_depends=bpn+'-gconv')
-
-       def calc_charmap_deps(fn, pkg, file_regex, output_pattern, group):
-               deps = []
-               f = open(fn, "r")
-               c_re = re.compile('^copy "(.*)"')
-               i_re = re.compile('^include "(\w+)".*')
-               for l in f.readlines():
-                       m = c_re.match(l) or i_re.match(l)
-                       if m:
-                               dp = legitimize_package_name('%s%s-charmap-%s' % (mlprefix, bpn, m.group(1)))
-                               if not dp in deps:
-                                       deps.append(dp)
-               f.close()
-               if deps != []:
-                       d.setVar('RDEPENDS_%s' % pkg, " ".join(deps))
-               if bpn != 'glibc':
-                       d.setVar('RPROVIDES_%s' % pkg, pkg.replace(bpn, 'glibc'))
-
-       do_split_packages(d, charmap_dir, file_regex='^(.*)\.gz$', output_pattern=bpn+'-charmap-%s', \
-               description='character map for %s encoding', hook=calc_charmap_deps, extra_depends='')
-
-       def calc_locale_deps(fn, pkg, file_regex, output_pattern, group):
-               deps = []
-               f = open(fn, "r")
-               c_re = re.compile('^copy "(.*)"')
-               i_re = re.compile('^include "(\w+)".*')
-               for l in f.readlines():
-                       m = c_re.match(l) or i_re.match(l)
-                       if m:
-                               dp = legitimize_package_name(mlprefix+bpn+'-localedata-%s' % m.group(1))
-                               if not dp in deps:
-                                       deps.append(dp)
-               f.close()
-               if deps != []:
-                       d.setVar('RDEPENDS_%s' % pkg, " ".join(deps))
-               if bpn != 'glibc':
-                       d.setVar('RPROVIDES_%s' % pkg, pkg.replace(bpn, 'glibc'))
-
-       do_split_packages(d, locales_dir, file_regex='(.*)', output_pattern=bpn+'-localedata-%s', \
-               description='locale definition for %s', hook=calc_locale_deps, extra_depends='')
-       d.setVar('PACKAGES', d.getVar('PACKAGES') + ' ' + d.getVar('MLPREFIX') + bpn + '-gconv')
-
-       use_bin = d.getVar("GLIBC_INTERNAL_USE_BINARY_LOCALE", True)
-
-       dot_re = re.compile("(.*)\.(.*)")
-
-       # Read in supported locales and associated encodings
-       supported = {}
-       with open(base_path_join(d.getVar('WORKDIR', True), "SUPPORTED")) as f:
-               for line in f.readlines():
-                       try:
-                               locale, charset = line.rstrip().split()
-                       except ValueError:
-                               continue
-                       supported[locale] = charset
-
-       # GLIBC_GENERATE_LOCALES var specifies which locales to be generated. empty or "all" means all locales
-       to_generate = d.getVar('GLIBC_GENERATE_LOCALES', True)
-       if not to_generate or to_generate == 'all':
-               to_generate = supported.keys()
-       else:
-               to_generate = to_generate.split()
-               for locale in to_generate:
-                       if locale not in supported:
-                               if '.' in locale:
-                                       charset = locale.split('.')[1]
-                               else:
-                                       charset = 'UTF-8'
-                                       bb.warn("Unsupported locale '%s', assuming encoding '%s'" % (locale, charset))
-                               supported[locale] = charset
-
-       def output_locale_source(name, pkgname, locale, encoding):
-               d.setVar('RDEPENDS_%s' % pkgname, 'localedef %s-localedata-%s %s-charmap-%s' % \
-               (mlprefix+bpn, legitimize_package_name(locale), mlprefix+bpn, legitimize_package_name(encoding)))
-               d.setVar('pkg_postinst_%s' % pkgname, d.getVar('locale_base_postinst', True) \
-               % (locale, encoding, locale))
-               d.setVar('pkg_postrm_%s' % pkgname, d.getVar('locale_base_postrm', True) % \
-               (locale, encoding, locale))
-
-       def output_locale_binary_rdepends(name, pkgname, locale, encoding):
-               m = re.match("(.*)\.(.*)", name)
-               if m:
-                       libc_name = "%s.%s" % (m.group(1), m.group(2).lower().replace("-",""))
-               else:
-                       libc_name = name
-               d.setVar('RDEPENDS_%s' % pkgname, legitimize_package_name('%s-binary-localedata-%s' \
-                       % (mlprefix+bpn, libc_name)))
-
-       commands = {}
-
-       def output_locale_binary(name, pkgname, locale, encoding):
-               treedir = base_path_join(d.getVar("WORKDIR", True), "locale-tree")
-               ldlibdir = base_path_join(treedir, d.getVar("base_libdir", True))
-               path = d.getVar("PATH", True)
-               i18npath = base_path_join(treedir, datadir, "i18n")
-               gconvpath = base_path_join(treedir, "iconvdata")
-               outputpath = base_path_join(treedir, libdir, "locale")
-
-               use_cross_localedef = d.getVar("LOCALE_GENERATION_WITH_CROSS-LOCALEDEF", True) or "0"
-               if use_cross_localedef == "1":
-                       target_arch = d.getVar('TARGET_ARCH', True)
-                       locale_arch_options = { \
-                               "arm":     " --uint32-align=4 --little-endian ", \
-                               "sh4":     " --uint32-align=4 --big-endian ",    \
-                               "powerpc": " --uint32-align=4 --big-endian ",    \
-                               "powerpc64": " --uint32-align=4 --big-endian ",  \
-                               "mips":    " --uint32-align=4 --big-endian ",    \
-                               "mips64":  " --uint32-align=4 --big-endian ",    \
-                               "mipsel":  " --uint32-align=4 --little-endian ", \
-                               "mips64el":" --uint32-align=4 --little-endian ", \
-                               "i586":    " --uint32-align=4 --little-endian ", \
-                               "i686":    " --uint32-align=4 --little-endian ", \
-                               "x86_64":  " --uint32-align=4 --little-endian "  }
-
-                       if target_arch in locale_arch_options:
-                               localedef_opts = locale_arch_options[target_arch]
-                       else:
-                               bb.error("locale_arch_options not found for target_arch=" + target_arch)
-                               raise bb.build.FuncFailed("unknown arch:" + target_arch + " for locale_arch_options")
-
-                       localedef_opts += " --force --old-style --no-archive --prefix=%s \
-                               --inputfile=%s/%s/i18n/locales/%s --charmap=%s %s/%s" \
-                               % (treedir, treedir, datadir, locale, encoding, outputpath, name)
-
-                       cmd = "PATH=\"%s\" I18NPATH=\"%s\" GCONV_PATH=\"%s\" cross-localedef %s" % \
-                               (path, i18npath, gconvpath, localedef_opts)
-               else: # earlier slower qemu way 
-                       qemu = qemu_target_binary(d) 
-                       localedef_opts = "--force --old-style --no-archive --prefix=%s \
-                               --inputfile=%s/i18n/locales/%s --charmap=%s %s" \
-                               % (treedir, datadir, locale, encoding, name)
-
-                       qemu_options = d.getVar("QEMU_OPTIONS_%s" % d.getVar('PACKAGE_ARCH', True), True)
-                       if not qemu_options:
-                               qemu_options = d.getVar('QEMU_OPTIONS', True)
-
-                       cmd = "PSEUDO_RELOADED=YES PATH=\"%s\" I18NPATH=\"%s\" %s -L %s \
-                               -E LD_LIBRARY_PATH=%s %s %s/bin/localedef %s" % \
-                               (path, i18npath, qemu, treedir, ldlibdir, qemu_options, treedir, localedef_opts)
-
-               commands["%s/%s" % (outputpath, name)] = cmd
-
-               bb.note("generating locale %s (%s)" % (locale, encoding))
-
-       def output_locale(name, locale, encoding):
-               pkgname = d.getVar('MLPREFIX') + 'locale-base-' + legitimize_package_name(name)
-               d.setVar('ALLOW_EMPTY_%s' % pkgname, '1')
-               d.setVar('PACKAGES', '%s %s' % (pkgname, d.getVar('PACKAGES', True)))
-               rprovides = ' %svirtual-locale-%s' % (mlprefix, legitimize_package_name(name))
-               m = re.match("(.*)_(.*)", name)
-               if m:
-                       rprovides += ' %svirtual-locale-%s' % (mlprefix, m.group(1))
-               d.setVar('RPROVIDES_%s' % pkgname, rprovides)
-
-               if use_bin == "compile":
-                       output_locale_binary_rdepends(name, pkgname, locale, encoding)
-                       output_locale_binary(name, pkgname, locale, encoding)
-               elif use_bin == "precompiled":
-                       output_locale_binary_rdepends(name, pkgname, locale, encoding)
-               else:
-                       output_locale_source(name, pkgname, locale, encoding)
-
-       if use_bin == "compile":
-               bb.note("preparing tree for binary locale generation")
-               bb.build.exec_func("do_prep_locale_tree", d)
-
-       utf8_only = int(d.getVar('LOCALE_UTF8_ONLY', True) or 0)
-       encodings = {}
-       for locale in to_generate:
-               charset = supported[locale]
-               if utf8_only and charset != 'UTF-8':
-                       continue
-
-               m = dot_re.match(locale)
-               if m:
-                       base = m.group(1)
-               else:
-                       base = locale
-
-               # Precompiled locales are kept as is, obeying SUPPORTED, while
-               # others are adjusted, ensuring that the non-suffixed locales
-               # are utf-8, while the suffixed are not.
-               if use_bin == "precompiled":
-                       output_locale(locale, base, charset)
-               else:
-                       if charset == 'UTF-8':
-                               output_locale(base, base, charset)
-                       else:
-                               output_locale('%s.%s' % (base, charset), base, charset)
-
-       if use_bin == "compile":
-               makefile = base_path_join(d.getVar("WORKDIR", True), "locale-tree", "Makefile")
-               m = open(makefile, "w")
-               m.write("all: %s\n\n" % " ".join(commands.keys()))
-               for cmd in commands:
-                       m.write(cmd + ":\n")
-                       m.write("       " + commands[cmd] + "\n\n")
-               m.close()
-               d.setVar("B", os.path.dirname(makefile))
-               d.setVar("EXTRA_OEMAKE", "${PARALLEL_MAKE}")
-               bb.note("Executing binary locale generation makefile")
-               bb.build.exec_func("oe_runmake", d)
-               bb.note("collecting binary locales from locale tree")
-               bb.build.exec_func("do_collect_bins_from_locale_tree", d)
-               do_split_packages(d, binary_locales_dir, file_regex='(.*)', \
-                       output_pattern=bpn+'-binary-localedata-%s', \
-                       description='binary locale definition for %s', extra_depends='', allow_dirs=True)
-       elif use_bin == "precompiled":
-               do_split_packages(d, binary_locales_dir, file_regex='(.*)', \
-                       output_pattern=bpn+'-binary-localedata-%s', \
-                       description='binary locale definition for %s', extra_depends='', allow_dirs=True)
-       else:
-               bb.note("generation of binary locales disabled. this may break i18n!")
+    import os, re
+    if (d.getVar('PACKAGE_NO_GCONV', True) == '1'):
+        bb.note("package requested not splitting gconvs")
+        return
+
+    if not d.getVar('PACKAGES', True):
+        return
+
+    mlprefix = d.getVar("MLPREFIX", True) or ""
+
+    bpn = d.getVar('BPN', True)
+    libdir = d.getVar('libdir', True)
+    if not libdir:
+        bb.error("libdir not defined")
+        return
+    datadir = d.getVar('datadir', True)
+    if not datadir:
+        bb.error("datadir not defined")
+        return
+
+    gconv_libdir = base_path_join(libdir, "gconv")
+    charmap_dir = base_path_join(datadir, "i18n", "charmaps")
+    locales_dir = base_path_join(datadir, "i18n", "locales")
+    binary_locales_dir = base_path_join(libdir, "locale")
+
+    def calc_gconv_deps(fn, pkg, file_regex, output_pattern, group):
+        deps = []
+        f = open(fn, "r")
+        c_re = re.compile('^copy "(.*)"')
+        i_re = re.compile('^include "(\w+)".*')
+        for l in f.readlines():
+            m = c_re.match(l) or i_re.match(l)
+            if m:
+                dp = legitimize_package_name('%s%s-gconv-%s' % (mlprefix, bpn, m.group(1)))
+                if not dp in deps:
+                    deps.append(dp)
+        f.close()
+        if deps != []:
+            d.setVar('RDEPENDS_%s' % pkg, " ".join(deps))
+        if bpn != 'glibc':
+            d.setVar('RPROVIDES_%s' % pkg, pkg.replace(bpn, 'glibc'))
+
+    do_split_packages(d, gconv_libdir, file_regex='^(.*)\.so$', output_pattern=bpn+'-gconv-%s', \
+        description='gconv module for character set %s', hook=calc_gconv_deps, \
+        extra_depends=bpn+'-gconv')
+
+    def calc_charmap_deps(fn, pkg, file_regex, output_pattern, group):
+        deps = []
+        f = open(fn, "r")
+        c_re = re.compile('^copy "(.*)"')
+        i_re = re.compile('^include "(\w+)".*')
+        for l in f.readlines():
+            m = c_re.match(l) or i_re.match(l)
+            if m:
+                dp = legitimize_package_name('%s%s-charmap-%s' % (mlprefix, bpn, m.group(1)))
+                if not dp in deps:
+                    deps.append(dp)
+        f.close()
+        if deps != []:
+            d.setVar('RDEPENDS_%s' % pkg, " ".join(deps))
+        if bpn != 'glibc':
+            d.setVar('RPROVIDES_%s' % pkg, pkg.replace(bpn, 'glibc'))
+
+    do_split_packages(d, charmap_dir, file_regex='^(.*)\.gz$', output_pattern=bpn+'-charmap-%s', \
+        description='character map for %s encoding', hook=calc_charmap_deps, extra_depends='')
+
+    def calc_locale_deps(fn, pkg, file_regex, output_pattern, group):
+        deps = []
+        f = open(fn, "r")
+        c_re = re.compile('^copy "(.*)"')
+        i_re = re.compile('^include "(\w+)".*')
+        for l in f.readlines():
+            m = c_re.match(l) or i_re.match(l)
+            if m:
+                dp = legitimize_package_name(mlprefix+bpn+'-localedata-%s' % m.group(1))
+                if not dp in deps:
+                    deps.append(dp)
+        f.close()
+        if deps != []:
+            d.setVar('RDEPENDS_%s' % pkg, " ".join(deps))
+        if bpn != 'glibc':
+            d.setVar('RPROVIDES_%s' % pkg, pkg.replace(bpn, 'glibc'))
+
+    do_split_packages(d, locales_dir, file_regex='(.*)', output_pattern=bpn+'-localedata-%s', \
+        description='locale definition for %s', hook=calc_locale_deps, extra_depends='')
+    d.setVar('PACKAGES', d.getVar('PACKAGES') + ' ' + d.getVar('MLPREFIX') + bpn + '-gconv')
+
+    use_bin = d.getVar("GLIBC_INTERNAL_USE_BINARY_LOCALE", True)
+
+    dot_re = re.compile("(.*)\.(.*)")
+
+    # Read in supported locales and associated encodings
+    supported = {}
+    with open(base_path_join(d.getVar('WORKDIR', True), "SUPPORTED")) as f:
+        for line in f.readlines():
+            try:
+                locale, charset = line.rstrip().split()
+            except ValueError:
+                continue
+            supported[locale] = charset
+
+    # GLIBC_GENERATE_LOCALES var specifies which locales to be generated. empty or "all" means all locales
+    to_generate = d.getVar('GLIBC_GENERATE_LOCALES', True)
+    if not to_generate or to_generate == 'all':
+        to_generate = supported.keys()
+    else:
+        to_generate = to_generate.split()
+        for locale in to_generate:
+            if locale not in supported:
+                if '.' in locale:
+                    charset = locale.split('.')[1]
+                else:
+                    charset = 'UTF-8'
+                    bb.warn("Unsupported locale '%s', assuming encoding '%s'" % (locale, charset))
+                supported[locale] = charset
+
+    def output_locale_source(name, pkgname, locale, encoding):
+        d.setVar('RDEPENDS_%s' % pkgname, 'localedef %s-localedata-%s %s-charmap-%s' % \
+        (mlprefix+bpn, legitimize_package_name(locale), mlprefix+bpn, legitimize_package_name(encoding)))
+        d.setVar('pkg_postinst_%s' % pkgname, d.getVar('locale_base_postinst', True) \
+        % (locale, encoding, locale))
+        d.setVar('pkg_postrm_%s' % pkgname, d.getVar('locale_base_postrm', True) % \
+        (locale, encoding, locale))
+
+    def output_locale_binary_rdepends(name, pkgname, locale, encoding):
+        m = re.match("(.*)\.(.*)", name)
+        if m:
+            libc_name = "%s.%s" % (m.group(1), m.group(2).lower().replace("-",""))
+        else:
+            libc_name = name
+        d.setVar('RDEPENDS_%s' % pkgname, legitimize_package_name('%s-binary-localedata-%s' \
+            % (mlprefix+bpn, libc_name)))
+
+    commands = {}
+
+    def output_locale_binary(name, pkgname, locale, encoding):
+        treedir = base_path_join(d.getVar("WORKDIR", True), "locale-tree")
+        ldlibdir = base_path_join(treedir, d.getVar("base_libdir", True))
+        path = d.getVar("PATH", True)
+        i18npath = base_path_join(treedir, datadir, "i18n")
+        gconvpath = base_path_join(treedir, "iconvdata")
+        outputpath = base_path_join(treedir, libdir, "locale")
+
+        use_cross_localedef = d.getVar("LOCALE_GENERATION_WITH_CROSS-LOCALEDEF", True) or "0"
+        if use_cross_localedef == "1":
+            target_arch = d.getVar('TARGET_ARCH', True)
+            locale_arch_options = { \
+                "arm":     " --uint32-align=4 --little-endian ", \
+                "sh4":     " --uint32-align=4 --big-endian ",    \
+                "powerpc": " --uint32-align=4 --big-endian ",    \
+                "powerpc64": " --uint32-align=4 --big-endian ",  \
+                "mips":    " --uint32-align=4 --big-endian ",    \
+                "mips64":  " --uint32-align=4 --big-endian ",    \
+                "mipsel":  " --uint32-align=4 --little-endian ", \
+                "mips64el":" --uint32-align=4 --little-endian ", \
+                "i586":    " --uint32-align=4 --little-endian ", \
+                "i686":    " --uint32-align=4 --little-endian ", \
+                "x86_64":  " --uint32-align=4 --little-endian "  }
+
+            if target_arch in locale_arch_options:
+                localedef_opts = locale_arch_options[target_arch]
+            else:
+                bb.error("locale_arch_options not found for target_arch=" + target_arch)
+                raise bb.build.FuncFailed("unknown arch:" + target_arch + " for locale_arch_options")
+
+            localedef_opts += " --force --old-style --no-archive --prefix=%s \
+                --inputfile=%s/%s/i18n/locales/%s --charmap=%s %s/%s" \
+                % (treedir, treedir, datadir, locale, encoding, outputpath, name)
+
+            cmd = "PATH=\"%s\" I18NPATH=\"%s\" GCONV_PATH=\"%s\" cross-localedef %s" % \
+                (path, i18npath, gconvpath, localedef_opts)
+        else: # earlier slower qemu way 
+            qemu = qemu_target_binary(d) 
+            localedef_opts = "--force --old-style --no-archive --prefix=%s \
+                --inputfile=%s/i18n/locales/%s --charmap=%s %s" \
+                % (treedir, datadir, locale, encoding, name)
+
+            qemu_options = d.getVar("QEMU_OPTIONS_%s" % d.getVar('PACKAGE_ARCH', True), True)
+            if not qemu_options:
+                qemu_options = d.getVar('QEMU_OPTIONS', True)
+
+            cmd = "PSEUDO_RELOADED=YES PATH=\"%s\" I18NPATH=\"%s\" %s -L %s \
+                -E LD_LIBRARY_PATH=%s %s %s/bin/localedef %s" % \
+                (path, i18npath, qemu, treedir, ldlibdir, qemu_options, treedir, localedef_opts)
+
+        commands["%s/%s" % (outputpath, name)] = cmd
+
+        bb.note("generating locale %s (%s)" % (locale, encoding))
+
+    def output_locale(name, locale, encoding):
+        pkgname = d.getVar('MLPREFIX') + 'locale-base-' + legitimize_package_name(name)
+        d.setVar('ALLOW_EMPTY_%s' % pkgname, '1')
+        d.setVar('PACKAGES', '%s %s' % (pkgname, d.getVar('PACKAGES', True)))
+        rprovides = ' %svirtual-locale-%s' % (mlprefix, legitimize_package_name(name))
+        m = re.match("(.*)_(.*)", name)
+        if m:
+            rprovides += ' %svirtual-locale-%s' % (mlprefix, m.group(1))
+        d.setVar('RPROVIDES_%s' % pkgname, rprovides)
+
+        if use_bin == "compile":
+            output_locale_binary_rdepends(name, pkgname, locale, encoding)
+            output_locale_binary(name, pkgname, locale, encoding)
+        elif use_bin == "precompiled":
+            output_locale_binary_rdepends(name, pkgname, locale, encoding)
+        else:
+            output_locale_source(name, pkgname, locale, encoding)
+
+    if use_bin == "compile":
+        bb.note("preparing tree for binary locale generation")
+        bb.build.exec_func("do_prep_locale_tree", d)
+
+    utf8_only = int(d.getVar('LOCALE_UTF8_ONLY', True) or 0)
+    encodings = {}
+    for locale in to_generate:
+        charset = supported[locale]
+        if utf8_only and charset != 'UTF-8':
+            continue
+
+        m = dot_re.match(locale)
+        if m:
+            base = m.group(1)
+        else:
+            base = locale
+
+        # Precompiled locales are kept as is, obeying SUPPORTED, while
+        # others are adjusted, ensuring that the non-suffixed locales
+        # are utf-8, while the suffixed are not.
+        if use_bin == "precompiled":
+            output_locale(locale, base, charset)
+        else:
+            if charset == 'UTF-8':
+                output_locale(base, base, charset)
+            else:
+                output_locale('%s.%s' % (base, charset), base, charset)
+
+    if use_bin == "compile":
+        makefile = base_path_join(d.getVar("WORKDIR", True), "locale-tree", "Makefile")
+        m = open(makefile, "w")
+        m.write("all: %s\n\n" % " ".join(commands.keys()))
+        for cmd in commands:
+            m.write(cmd + ":\n")
+            m.write("\t" + commands[cmd] + "\n\n")
+        m.close()
+        d.setVar("B", os.path.dirname(makefile))
+        d.setVar("EXTRA_OEMAKE", "${PARALLEL_MAKE}")
+        bb.note("Executing binary locale generation makefile")
+        bb.build.exec_func("oe_runmake", d)
+        bb.note("collecting binary locales from locale tree")
+        bb.build.exec_func("do_collect_bins_from_locale_tree", d)
+        do_split_packages(d, binary_locales_dir, file_regex='(.*)', \
+            output_pattern=bpn+'-binary-localedata-%s', \
+            description='binary locale definition for %s', extra_depends='', allow_dirs=True)
+    elif use_bin == "precompiled":
+        do_split_packages(d, binary_locales_dir, file_regex='(.*)', \
+            output_pattern=bpn+'-binary-localedata-%s', \
+            description='binary locale definition for %s', extra_depends='', allow_dirs=True)
+    else:
+        bb.note("generation of binary locales disabled. this may break i18n!")
 
 }
 
 # We want to do this indirection so that we can safely 'return'
 # from the called function even though we're prepending
 python populate_packages_prepend () {
-       bb.build.exec_func('package_do_split_gconvs', d)
+    bb.build.exec_func('package_do_split_gconvs', d)
 }
 
index 4e25cf82b5381becc1713340d2435c6d0f421be2..03e413b3b8d7f03fcaf1919181d1289b0660d30f 100644 (file)
@@ -385,6 +385,6 @@ do_populate_lic[sstate-outputdirs] = "${LICENSE_DIRECTORY}/"
 ROOTFS_POSTPROCESS_COMMAND_prepend = "license_create_manifest; "
 
 python do_populate_lic_setscene () {
-       sstate_setscene(d)
+    sstate_setscene(d)
 }
 addtask do_populate_lic_setscene
index 62650be675bea586a39e39f98f61a6d63b6a4f31..502d400cbb6a475247b4efd13df7c86f096fcd21 100644 (file)
@@ -2,76 +2,76 @@ METADATA_BRANCH ?= "${@base_detect_branch(d)}"
 METADATA_REVISION ?= "${@base_detect_revision(d)}"
 
 def base_detect_revision(d):
-       path = base_get_scmbasepath(d)
+    path = base_get_scmbasepath(d)
 
-       scms = [base_get_metadata_git_revision, \
-                       base_get_metadata_svn_revision]
+    scms = [base_get_metadata_git_revision, \
+            base_get_metadata_svn_revision]
 
-       for scm in scms:
-               rev = scm(path, d)
-               if rev <> "<unknown>":
-                       return rev
+    for scm in scms:
+        rev = scm(path, d)
+        if rev <> "<unknown>":
+            return rev
 
-       return "<unknown>"      
+    return "<unknown>"
 
 def base_detect_branch(d):
-       path = base_get_scmbasepath(d)
+    path = base_get_scmbasepath(d)
 
-       scms = [base_get_metadata_git_branch]
+    scms = [base_get_metadata_git_branch]
 
-       for scm in scms:
-               rev = scm(path, d)
-               if rev <> "<unknown>":
-                       return rev.strip()
+    for scm in scms:
+        rev = scm(path, d)
+        if rev <> "<unknown>":
+            return rev.strip()
 
-       return "<unknown>"      
+    return "<unknown>"
 
 def base_get_scmbasepath(d):
-       return d.getVar( 'COREBASE', True)
+    return d.getVar( 'COREBASE', True)
 
 def base_get_metadata_monotone_branch(path, d):
-       monotone_branch = "<unknown>"
-       try:
-               monotone_branch = file( "%s/_MTN/options" % path ).read().strip()
-               if monotone_branch.startswith( "database" ):
-                       monotone_branch_words = monotone_branch.split()
-                       monotone_branch = monotone_branch_words[ monotone_branch_words.index( "branch" )+1][1:-1]
-       except:
-               pass
-       return monotone_branch
+    monotone_branch = "<unknown>"
+    try:
+        monotone_branch = file( "%s/_MTN/options" % path ).read().strip()
+        if monotone_branch.startswith( "database" ):
+            monotone_branch_words = monotone_branch.split()
+            monotone_branch = monotone_branch_words[ monotone_branch_words.index( "branch" )+1][1:-1]
+    except:
+        pass
+    return monotone_branch
 
 def base_get_metadata_monotone_revision(path, d):
-       monotone_revision = "<unknown>"
-       try:
-               monotone_revision = file( "%s/_MTN/revision" % path ).read().strip()
-               if monotone_revision.startswith( "format_version" ):
-                       monotone_revision_words = monotone_revision.split()
-                       monotone_revision = monotone_revision_words[ monotone_revision_words.index( "old_revision" )+1][1:-1]
-       except IOError:
-               pass
-       return monotone_revision
+    monotone_revision = "<unknown>"
+    try:
+        monotone_revision = file( "%s/_MTN/revision" % path ).read().strip()
+        if monotone_revision.startswith( "format_version" ):
+            monotone_revision_words = monotone_revision.split()
+            monotone_revision = monotone_revision_words[ monotone_revision_words.index( "old_revision" )+1][1:-1]
+    except IOError:
+        pass
+    return monotone_revision
 
 def base_get_metadata_svn_revision(path, d):
-       revision = "<unknown>"
-       try:
-               revision = file( "%s/.svn/entries" % path ).readlines()[3].strip()
-       except IOError:
-               pass
-       return revision
+    revision = "<unknown>"
+    try:
+        revision = file( "%s/.svn/entries" % path ).readlines()[3].strip()
+    except IOError:
+        pass
+    return revision
 
 def base_get_metadata_git_branch(path, d):
-       branch = os.popen('cd %s; git branch 2>&1 | grep "^* " | tr -d "* "' % path).read()
+    branch = os.popen('cd %s; git branch 2>&1 | grep "^* " | tr -d "* "' % path).read()
 
-       if len(branch) != 0:
-               return branch
-       return "<unknown>"
+    if len(branch) != 0:
+        return branch
+    return "<unknown>"
 
 def base_get_metadata_git_revision(path, d):
-       f = os.popen("cd %s; git log -n 1 --pretty=oneline -- 2>&1" % path)
-       data = f.read()
-       if f.close() is None:        
-               rev = data.split(" ")[0]
-               if len(rev) != 0:
-                       return rev
-       return "<unknown>"
+    f = os.popen("cd %s; git log -n 1 --pretty=oneline -- 2>&1" % path)
+    data = f.read()
+    if f.close() is None:        
+        rev = data.split(" ")[0]
+        if len(rev) != 0:
+            return rev
+    return "<unknown>"
 
index 6302747dc10cf86937f9d2cc5053befe9388c993..b669418286c40c3b63964d6f3609ae0ffd305539 100644 (file)
@@ -29,32 +29,32 @@ fi
 }
 
 python populate_packages_append () {
-       import re
-       packages = d.getVar('PACKAGES', True).split()
-       pkgdest =  d.getVar('PKGDEST', True)
+    import re
+    packages = d.getVar('PACKAGES', True).split()
+    pkgdest =  d.getVar('PKGDEST', True)
 
-       for pkg in packages:
-               mime_dir = '%s/%s/usr/share/mime/packages' % (pkgdest, pkg)
-               mimes = []
-               mime_re = re.compile(".*\.xml$")
-               if os.path.exists(mime_dir):
-                       for f in os.listdir(mime_dir):
-                               if mime_re.match(f):
-                                       mimes.append(f)
-               if mimes:
-                       bb.note("adding mime postinst and postrm scripts to %s" % pkg)
-                       postinst = d.getVar('pkg_postinst_%s' % pkg, True) or d.getVar('pkg_postinst', True)
-                       if not postinst:
-                               postinst = '#!/bin/sh\n'
-                       postinst += d.getVar('mime_postinst', True)
-                       d.setVar('pkg_postinst_%s' % pkg, postinst)
-                       postrm = d.getVar('pkg_postrm_%s' % pkg, True) or d.getVar('pkg_postrm', True)
-                       if not postrm:
-                               postrm = '#!/bin/sh\n'
-                       postrm += d.getVar('mime_postrm', True)
-                       d.setVar('pkg_postrm_%s' % pkg, postrm)
-                       bb.note("adding shared-mime-info-data dependency to %s" % pkg)
-                       rdepends = explode_deps(d.getVar('RDEPENDS_' + pkg, False) or d.getVar('RDEPENDS', False) or "" ) 
-                       rdepends.append("shared-mime-info-data")
-                       d.setVar('RDEPENDS_' + pkg, " " + " ".join(rdepends))
+    for pkg in packages:
+        mime_dir = '%s/%s/usr/share/mime/packages' % (pkgdest, pkg)
+        mimes = []
+        mime_re = re.compile(".*\.xml$")
+        if os.path.exists(mime_dir):
+            for f in os.listdir(mime_dir):
+                if mime_re.match(f):
+                    mimes.append(f)
+        if mimes:
+            bb.note("adding mime postinst and postrm scripts to %s" % pkg)
+            postinst = d.getVar('pkg_postinst_%s' % pkg, True) or d.getVar('pkg_postinst', True)
+            if not postinst:
+                postinst = '#!/bin/sh\n'
+            postinst += d.getVar('mime_postinst', True)
+            d.setVar('pkg_postinst_%s' % pkg, postinst)
+            postrm = d.getVar('pkg_postrm_%s' % pkg, True) or d.getVar('pkg_postrm', True)
+            if not postrm:
+                postrm = '#!/bin/sh\n'
+            postrm += d.getVar('mime_postrm', True)
+            d.setVar('pkg_postrm_%s' % pkg, postrm)
+            bb.note("adding shared-mime-info-data dependency to %s" % pkg)
+            rdepends = explode_deps(d.getVar('RDEPENDS_' + pkg, False) or d.getVar('RDEPENDS', False) or "" ) 
+            rdepends.append("shared-mime-info-data")
+            d.setVar('RDEPENDS_' + pkg, " " + " ".join(rdepends))
 }
index dfd42117c5bcc4de634c4e427319847dfb4e97ea..a51e955325eaad7b47a93365a49f80c561c54ed7 100644 (file)
@@ -25,8 +25,8 @@
 #    The data is stores in FILER{PROVIDES,DEPENDS}_file_pkg variables with
 #    a list of affected files in FILER{PROVIDES,DEPENDS}FLIST_pkg
 #
-# h) package_do_shlibs - Look at the shared libraries generated and autotmatically add any 
-#    depenedencies found. Also stores the package name so anyone else using this library 
+# h) package_do_shlibs - Look at the shared libraries generated and autotmatically add any
+#    depenedencies found. Also stores the package name so anyone else using this library
 #    knows which package to depend on.
 #
 # i) package_do_pkgconfig - Keep track of which packages need and provide which .pc files
@@ -35,7 +35,7 @@
 #
 # k) package_depchains - Adds automatic dependencies to -dbg and -dev packages
 #
-# l) emit_pkgdata - saves the packaging data into PKGDATA_DIR for use in later 
+# l) emit_pkgdata - saves the packaging data into PKGDATA_DIR for use in later
 #    packaging steps
 
 inherit packagedata
@@ -52,112 +52,112 @@ ALL_MULTILIB_PACKAGE_ARCHS = "${@all_multilib_tune_values(d, 'PACKAGE_ARCHS')}"
 PACKAGE_DEPENDS += "rpm-native"
 
 def legitimize_package_name(s):
-       """
-       Make sure package names are legitimate strings
-       """
-       import re
+    """
+    Make sure package names are legitimate strings
+    """
+    import re
 
-       def fixutf(m):
-               cp = m.group(1)
-               if cp:
-                       return ('\u%s' % cp).decode('unicode_escape').encode('utf-8')
+    def fixutf(m):
+        cp = m.group(1)
+        if cp:
+            return ('\u%s' % cp).decode('unicode_escape').encode('utf-8')
 
-       # Handle unicode codepoints encoded as <U0123>, as in glibc locale files.
-       s = re.sub('<U([0-9A-Fa-f]{1,4})>', fixutf, s)
+    # Handle unicode codepoints encoded as <U0123>, as in glibc locale files.
+    s = re.sub('<U([0-9A-Fa-f]{1,4})>', fixutf, s)
 
-       # Remaining package name validity fixes
-       return s.lower().replace('_', '-').replace('@', '+').replace(',', '+').replace('/', '-')
+    # Remaining package name validity fixes
+    return s.lower().replace('_', '-').replace('@', '+').replace(',', '+').replace('/', '-')
 
 def do_split_packages(d, root, file_regex, output_pattern, description, postinst=None, recursive=False, hook=None, extra_depends=None, aux_files_pattern=None, postrm=None, allow_dirs=False, prepend=False, match_path=False, aux_files_pattern_verbatim=None, allow_links=False):
-       """
-       Used in .bb files to split up dynamically generated subpackages of a 
-       given package, usually plugins or modules.
-       """
-
-       ml = d.getVar("MLPREFIX", True)
-       if ml:
-               if not output_pattern.startswith(ml):
-                       output_pattern = ml + output_pattern
-
-               newdeps = []
-               for dep in (extra_depends or "").split():
-                       if dep.startswith(ml):
-                               newdeps.append(dep)
-                       else:
-                               newdeps.append(ml + dep)
-               if newdeps:
-                       extra_depends = " ".join(newdeps)
-
-       dvar = d.getVar('PKGD', True)
-
-       packages = d.getVar('PACKAGES', True).split()
-
-       if postinst:
-               postinst = '#!/bin/sh\n' + postinst + '\n'
-       if postrm:
-               postrm = '#!/bin/sh\n' + postrm + '\n'
-       if not recursive:
-               objs = os.listdir(dvar + root)
-       else:
-               objs = []
-               for walkroot, dirs, files in os.walk(dvar + root):
-                       for file in files:
-                               relpath = os.path.join(walkroot, file).replace(dvar + root + '/', '', 1)
-                               if relpath:
-                                       objs.append(relpath)
-
-       if extra_depends == None:
-               extra_depends = d.getVar("PN", True)
-
-       for o in sorted(objs):
-               import re, stat
-               if match_path:
-                       m = re.match(file_regex, o)
-               else:
-                       m = re.match(file_regex, os.path.basename(o))
-               
-               if not m:
-                       continue
-               f = os.path.join(dvar + root, o)
-               mode = os.lstat(f).st_mode
-               if not (stat.S_ISREG(mode) or (allow_links and stat.S_ISLNK(mode)) or (allow_dirs and stat.S_ISDIR(mode))):
-                       continue
-               on = legitimize_package_name(m.group(1))
-               pkg = output_pattern % on
-               if not pkg in packages:
-                       if prepend:
-                               packages = [pkg] + packages
-                       else:
-                               packages.append(pkg)
-               oldfiles = d.getVar('FILES_' + pkg, True)
-               if not oldfiles:
-                       the_files = [os.path.join(root, o)]
-                       if aux_files_pattern:
-                               if type(aux_files_pattern) is list:
-                                       for fp in aux_files_pattern:
-                                               the_files.append(fp % on)       
-                               else:
-                                       the_files.append(aux_files_pattern % on)
-                       if aux_files_pattern_verbatim:
-                               if type(aux_files_pattern_verbatim) is list:
-                                       for fp in aux_files_pattern_verbatim:
-                                               the_files.append(fp % m.group(1))       
-                               else:
-                                       the_files.append(aux_files_pattern_verbatim % m.group(1))
-                       d.setVar('FILES_' + pkg, " ".join(the_files))
-                       if extra_depends != '':
-                               d.appendVar('RDEPENDS_' + pkg, ' ' + extra_depends)
-                       d.setVar('DESCRIPTION_' + pkg, description % on)
-                       if postinst:
-                               d.setVar('pkg_postinst_' + pkg, postinst)
-                       if postrm:
-                               d.setVar('pkg_postrm_' + pkg, postrm)
-               else:
-                       d.setVar('FILES_' + pkg, oldfiles + " " + os.path.join(root, o))
-               if callable(hook):
-                       hook(f, pkg, file_regex, output_pattern, m.group(1))
-
-       d.setVar('PACKAGES', ' '.join(packages))
+    """
+    Used in .bb files to split up dynamically generated subpackages of a
+    given package, usually plugins or modules.
+    """
+
+    ml = d.getVar("MLPREFIX", True)
+    if ml:
+        if not output_pattern.startswith(ml):
+            output_pattern = ml + output_pattern
+
+        newdeps = []
+        for dep in (extra_depends or "").split():
+            if dep.startswith(ml):
+                newdeps.append(dep)
+            else:
+                newdeps.append(ml + dep)
+        if newdeps:
+            extra_depends = " ".join(newdeps)
+
+    dvar = d.getVar('PKGD', True)
+
+    packages = d.getVar('PACKAGES', True).split()
+
+    if postinst:
+        postinst = '#!/bin/sh\n' + postinst + '\n'
+    if postrm:
+        postrm = '#!/bin/sh\n' + postrm + '\n'
+    if not recursive:
+        objs = os.listdir(dvar + root)
+    else:
+        objs = []
+        for walkroot, dirs, files in os.walk(dvar + root):
+            for file in files:
+                relpath = os.path.join(walkroot, file).replace(dvar + root + '/', '', 1)
+                if relpath:
+                    objs.append(relpath)
+
+    if extra_depends == None:
+        extra_depends = d.getVar("PN", True)
+
+    for o in sorted(objs):
+        import re, stat
+        if match_path:
+            m = re.match(file_regex, o)
+        else:
+            m = re.match(file_regex, os.path.basename(o))
+
+        if not m:
+            continue
+        f = os.path.join(dvar + root, o)
+        mode = os.lstat(f).st_mode
+        if not (stat.S_ISREG(mode) or (allow_links and stat.S_ISLNK(mode)) or (allow_dirs and stat.S_ISDIR(mode))):
+            continue
+        on = legitimize_package_name(m.group(1))
+        pkg = output_pattern % on
+        if not pkg in packages:
+            if prepend:
+                packages = [pkg] + packages
+            else:
+                packages.append(pkg)
+        oldfiles = d.getVar('FILES_' + pkg, True)
+        if not oldfiles:
+            the_files = [os.path.join(root, o)]
+            if aux_files_pattern:
+                if type(aux_files_pattern) is list:
+                    for fp in aux_files_pattern:
+                        the_files.append(fp % on)
+                else:
+                    the_files.append(aux_files_pattern % on)
+            if aux_files_pattern_verbatim:
+                if type(aux_files_pattern_verbatim) is list:
+                    for fp in aux_files_pattern_verbatim:
+                        the_files.append(fp % m.group(1))
+                else:
+                    the_files.append(aux_files_pattern_verbatim % m.group(1))
+            d.setVar('FILES_' + pkg, " ".join(the_files))
+            if extra_depends != '':
+                d.appendVar('RDEPENDS_' + pkg, ' ' + extra_depends)
+            d.setVar('DESCRIPTION_' + pkg, description % on)
+            if postinst:
+                d.setVar('pkg_postinst_' + pkg, postinst)
+            if postrm:
+                d.setVar('pkg_postrm_' + pkg, postrm)
+        else:
+            d.setVar('FILES_' + pkg, oldfiles + " " + os.path.join(root, o))
+        if callable(hook):
+            hook(f, pkg, file_regex, output_pattern, m.group(1))
+
+    d.setVar('PACKAGES', ' '.join(packages))
 
 PACKAGE_DEPENDS += "file-native"
 
@@ -195,7 +195,7 @@ def splitfile(file, debugfile, debugsrcdir, d):
 
     # We ignore kernel modules, we don't generate debug info files.
     if file.find("/lib/modules/") != -1 and file.endswith(".ko"):
-       return 1
+        return 1
 
     newmode = None
     if not os.access(file, os.W_OK) or os.access(file, os.R_OK):
@@ -205,7 +205,7 @@ def splitfile(file, debugfile, debugsrcdir, d):
 
     # We need to extract the debug src information here...
     if debugsrcdir:
-       subprocess.call("%s'%s' -b '%s' -d '%s' -i -l '%s' '%s'" % (pathprefix, debugedit, workparentdir, debugsrcdir, sourcefile, file), shell=True)
+        subprocess.call("%s'%s' -b '%s' -d '%s' -i -l '%s' '%s'" % (pathprefix, debugedit, workparentdir, debugsrcdir, sourcefile, file), shell=True)
 
     bb.mkdirhier(os.path.dirname(debugfile))
 
@@ -316,826 +316,826 @@ def runstrip(file, elftype, d):
 #
 
 def get_package_mapping (pkg, d):
-       import oe.packagedata
+    import oe.packagedata
 
-       data = oe.packagedata.read_subpkgdata(pkg, d)
-       key = "PKG_%s" % pkg
+    data = oe.packagedata.read_subpkgdata(pkg, d)
+    key = "PKG_%s" % pkg
 
-       if key in data:
-               return data[key]
+    if key in data:
+        return data[key]
 
-       return pkg
+    return pkg
 
 def runtime_mapping_rename (varname, d):
-       #bb.note("%s before: %s" % (varname, d.getVar(varname, True)))  
+    #bb.note("%s before: %s" % (varname, d.getVar(varname, True)))
 
-       new_depends = []
-       deps = bb.utils.explode_dep_versions(d.getVar(varname, True) or "")
-       for depend in deps:
-               # Have to be careful with any version component of the depend
-               new_depend = get_package_mapping(depend, d)
-               if deps[depend]:
-                       new_depends.append("%s (%s)" % (new_depend, deps[depend]))
-               else:
-                       new_depends.append(new_depend)
+    new_depends = []
+    deps = bb.utils.explode_dep_versions(d.getVar(varname, True) or "")
+    for depend in deps:
+        # Have to be careful with any version component of the depend
+        new_depend = get_package_mapping(depend, d)
+        if deps[depend]:
+            new_depends.append("%s (%s)" % (new_depend, deps[depend]))
+        else:
+            new_depends.append(new_depend)
 
-       d.setVar(varname, " ".join(new_depends) or None)
+    d.setVar(varname, " ".join(new_depends) or None)
 
-       #bb.note("%s after: %s" % (varname, d.getVar(varname, True)))
+    #bb.note("%s after: %s" % (varname, d.getVar(varname, True)))
 
 #
 # Package functions suitable for inclusion in PACKAGEFUNCS
 #
 
 python package_get_auto_pr() {
-       # per recipe PRSERV_HOST PRSERV_PORT
-       pn = d.getVar('PN', True)
-       host = d.getVar("PRSERV_HOST_" + pn, True)
-       port = d.getVar("PRSERV_PORT_" + pn, True)
-       if not (host is None):
-               d.setVar("PRSERV_HOST", host)
-       if not (port is None):
-               d.setVar("PRSERV_PORT", port)
-       if d.getVar('USE_PR_SERV', True) != "0":
-               try:
-                       auto_pr=prserv_get_pr_auto(d)
-               except Exception as e:
-                       bb.fatal("Can NOT get PRAUTO, exception %s" %  str(e))
-                       return
-               if auto_pr is None:
-                       if d.getVar('PRSERV_LOCKDOWN', True):
-                               bb.fatal("Can NOT get PRAUTO from lockdown exported file")
-                       else:
-                               bb.fatal("Can NOT get PRAUTO from remote PR service")
-                       return 
-               d.setVar('PRAUTO',str(auto_pr))
+    # per recipe PRSERV_HOST PRSERV_PORT
+    pn = d.getVar('PN', True)
+    host = d.getVar("PRSERV_HOST_" + pn, True)
+    port = d.getVar("PRSERV_PORT_" + pn, True)
+    if not (host is None):
+        d.setVar("PRSERV_HOST", host)
+    if not (port is None):
+        d.setVar("PRSERV_PORT", port)
+    if d.getVar('USE_PR_SERV', True) != "0":
+        try:
+            auto_pr=prserv_get_pr_auto(d)
+        except Exception as e:
+            bb.fatal("Can NOT get PRAUTO, exception %s" %  str(e))
+            return
+        if auto_pr is None:
+            if d.getVar('PRSERV_LOCKDOWN', True):
+                bb.fatal("Can NOT get PRAUTO from lockdown exported file")
+            else:
+                bb.fatal("Can NOT get PRAUTO from remote PR service")
+            return
+        d.setVar('PRAUTO',str(auto_pr))
 }
 
 python package_do_split_locales() {
-       if (d.getVar('PACKAGE_NO_LOCALE', True) == '1'):
-               bb.debug(1, "package requested not splitting locales")
-               return
-
-       packages = (d.getVar('PACKAGES', True) or "").split()
-
-       datadir = d.getVar('datadir', True)
-       if not datadir:
-               bb.note("datadir not defined")
-               return
-
-       dvar = d.getVar('PKGD', True)
-       pn = d.getVar('PN', True)
-
-       if pn + '-locale' in packages:
-               packages.remove(pn + '-locale')
-
-       localedir = os.path.join(dvar + datadir, 'locale')
-
-       if not os.path.isdir(localedir):
-               bb.debug(1, "No locale files in this package")
-               return
-
-       locales = os.listdir(localedir)
-
-       summary = d.getVar('SUMMARY', True) or pn
-       description = d.getVar('DESCRIPTION', True) or "" 
-        locale_section = d.getVar('LOCALE_SECTION', True)
-       mlprefix = d.getVar('MLPREFIX', True) or ""
-       for l in sorted(locales):
-               ln = legitimize_package_name(l)
-               pkg = pn + '-locale-' + ln
-               packages.append(pkg)
-               d.setVar('FILES_' + pkg, os.path.join(datadir, 'locale', l))
-               d.setVar('RDEPENDS_' + pkg, '%s %svirtual-locale-%s' % (pn, mlprefix, ln))
-               d.setVar('RPROVIDES_' + pkg, '%s-locale %s%s-translation' % (pn, mlprefix, ln))
-               d.setVar('SUMMARY_' + pkg, '%s - %s translations' % (summary, l))
-               d.setVar('DESCRIPTION_' + pkg, '%s  This package contains language translation files for the %s locale.' % (description, l))
-               if locale_section:
-                       d.setVar('SECTION_' + pkg, locale_section)
-
-       d.setVar('PACKAGES', ' '.join(packages))
-
-       # Disabled by RP 18/06/07
-       # Wildcards aren't supported in debian
-       # They break with ipkg since glibc-locale* will mean that 
-       # glibc-localedata-translit* won't install as a dependency
-       # for some other package which breaks meta-toolchain
-       # Probably breaks since virtual-locale- isn't provided anywhere
-       #rdep = (d.getVar('RDEPENDS_%s' % pn, True) or d.getVar('RDEPENDS', True) or "").split()
-       #rdep.append('%s-locale*' % pn)
-       #d.setVar('RDEPENDS_%s' % pn, ' '.join(rdep))
+    if (d.getVar('PACKAGE_NO_LOCALE', True) == '1'):
+        bb.debug(1, "package requested not splitting locales")
+        return
+
+    packages = (d.getVar('PACKAGES', True) or "").split()
+
+    datadir = d.getVar('datadir', True)
+    if not datadir:
+        bb.note("datadir not defined")
+        return
+
+    dvar = d.getVar('PKGD', True)
+    pn = d.getVar('PN', True)
+
+    if pn + '-locale' in packages:
+        packages.remove(pn + '-locale')
+
+    localedir = os.path.join(dvar + datadir, 'locale')
+
+    if not os.path.isdir(localedir):
+        bb.debug(1, "No locale files in this package")
+        return
+
+    locales = os.listdir(localedir)
+
+    summary = d.getVar('SUMMARY', True) or pn
+    description = d.getVar('DESCRIPTION', True) or ""
+    locale_section = d.getVar('LOCALE_SECTION', True)
+    mlprefix = d.getVar('MLPREFIX', True) or ""
+    for l in sorted(locales):
+        ln = legitimize_package_name(l)
+        pkg = pn + '-locale-' + ln
+        packages.append(pkg)
+        d.setVar('FILES_' + pkg, os.path.join(datadir, 'locale', l))
+        d.setVar('RDEPENDS_' + pkg, '%s %svirtual-locale-%s' % (pn, mlprefix, ln))
+        d.setVar('RPROVIDES_' + pkg, '%s-locale %s%s-translation' % (pn, mlprefix, ln))
+        d.setVar('SUMMARY_' + pkg, '%s - %s translations' % (summary, l))
+        d.setVar('DESCRIPTION_' + pkg, '%s  This package contains language translation files for the %s locale.' % (description, l))
+        if locale_section:
+            d.setVar('SECTION_' + pkg, locale_section)
+
+    d.setVar('PACKAGES', ' '.join(packages))
+
+    # Disabled by RP 18/06/07
+    # Wildcards aren't supported in debian
+    # They break with ipkg since glibc-locale* will mean that
+    # glibc-localedata-translit* won't install as a dependency
+    # for some other package which breaks meta-toolchain
+    # Probably breaks since virtual-locale- isn't provided anywhere
+    #rdep = (d.getVar('RDEPENDS_%s' % pn, True) or d.getVar('RDEPENDS', True) or "").split()
+    #rdep.append('%s-locale*' % pn)
+    #d.setVar('RDEPENDS_%s' % pn, ' '.join(rdep))
 }
 
 python perform_packagecopy () {
-       import subprocess
-       dest = d.getVar('D', True)
-       dvar = d.getVar('PKGD', True)
+    import subprocess
+    dest = d.getVar('D', True)
+    dvar = d.getVar('PKGD', True)
 
-       bb.mkdirhier(dvar)
+    bb.mkdirhier(dvar)
 
-       # Start by package population by taking a copy of the installed 
-       # files to operate on
-       subprocess.call('rm -rf %s/*' % (dvar), shell=True)
-       # Preserve sparse files and hard links
-       subprocess.call('tar -cf - -C %s -ps . | tar -xf - -C %s' % (dest, dvar), shell=True)
+    # Start by package population by taking a copy of the installed
+    # files to operate on
+    subprocess.call('rm -rf %s/*' % (dvar), shell=True)
+    # Preserve sparse files and hard links
+    subprocess.call('tar -cf - -C %s -ps . | tar -xf - -C %s' % (dest, dvar), shell=True)
 }
 
 # We generate a master list of directories to process, we start by
 # seeding this list with reasonable defaults, then load from
 # the fs-perms.txt files
 python fixup_perms () {
-       import os, pwd, grp
-
-       # init using a string with the same format as a line as documented in
-       # the fs-perms.txt file
-       # <path> <mode> <uid> <gid> <walk> <fmode> <fuid> <fgid>
-       # <path> link <link target>
-       #
-       # __str__ can be used to print out an entry in the input format
-       #
-       # if fs_perms_entry.path is None:
-       #       an error occured
-       # if fs_perms_entry.link, you can retrieve:
-       #       fs_perms_entry.path = path
-       #       fs_perms_entry.link = target of link
-       # if not fs_perms_entry.link, you can retrieve:
-       #       fs_perms_entry.path = path
-       #       fs_perms_entry.mode = expected dir mode or None
-       #       fs_perms_entry.uid = expected uid or -1
-       #       fs_perms_entry.gid = expected gid or -1
-       #       fs_perms_entry.walk = 'true' or something else
-       #       fs_perms_entry.fmode = expected file mode or None
-       #       fs_perms_entry.fuid = expected file uid or -1
-       #       fs_perms_entry_fgid = expected file gid or -1
-       class fs_perms_entry():
-               def __init__(self, line):
-                       lsplit = line.split()
-                       if len(lsplit) == 3 and lsplit[1].lower() == "link":
-                               self._setlink(lsplit[0], lsplit[2])
-                       elif len(lsplit) == 8:
-                               self._setdir(lsplit[0], lsplit[1], lsplit[2], lsplit[3], lsplit[4], lsplit[5], lsplit[6], lsplit[7])
-                       else:
-                               bb.error("Fixup Perms: invalid config line %s" % line)
-                               self.path = None
-                               self.link = None
-
-               def _setdir(self, path, mode, uid, gid, walk, fmode, fuid, fgid):
-                       self.path = os.path.normpath(path)
-                       self.link = None
-                       self.mode = self._procmode(mode)
-                       self.uid  = self._procuid(uid)
-                       self.gid  = self._procgid(gid)
-                       self.walk = walk.lower()
-                       self.fmode = self._procmode(fmode)
-                       self.fuid = self._procuid(fuid)
-                       self.fgid = self._procgid(fgid)
-
-               def _setlink(self, path, link):
-                       self.path = os.path.normpath(path)
-                       self.link = link
-
-               def _procmode(self, mode):
-                       if not mode or (mode and mode == "-"):
-                               return None
-                       else:
-                               return int(mode,8)
-
-               # Note uid/gid -1 has special significance in os.lchown
-               def _procuid(self, uid):
-                       if uid is None or uid == "-":
-                               return -1
-                       elif uid.isdigit():
-                               return int(uid)
-                       else:
-                               return pwd.getpwnam(uid).pw_uid
-
-               def _procgid(self, gid):
-                       if gid is None or gid == "-":
-                               return -1
-                       elif gid.isdigit():
-                               return int(gid)
-                       else:
-                               return grp.getgrnam(gid).gr_gid
-
-               # Use for debugging the entries
-               def __str__(self):
-                       if self.link:
-                               return "%s link %s" % (self.path, self.link)
-                       else:
-                               mode = "-"
-                               if self.mode:
-                                       mode = "0%o" % self.mode
-                               fmode = "-"
-                               if self.fmode:
-                                       fmode = "0%o" % self.fmode
-                               uid = self._mapugid(self.uid)
-                               gid = self._mapugid(self.gid)
-                               fuid = self._mapugid(self.fuid)
-                               fgid = self._mapugid(self.fgid)
-                               return "%s %s %s %s %s %s %s %s" % (self.path, mode, uid, gid, self.walk, fmode, fuid, fgid)
-
-               def _mapugid(self, id):
-                       if id is None or id == -1:
-                               return "-"
-                       else:
-                               return "%d" % id
-
-       # Fix the permission, owner and group of path
-       def fix_perms(path, mode, uid, gid, dir):
-               if mode and not os.path.islink(path):
-                       #bb.note("Fixup Perms: chmod 0%o %s" % (mode, dir))
-                       os.chmod(path, mode)
-               # -1 is a special value that means don't change the uid/gid
-               # if they are BOTH -1, don't bother to lchown
-               if not (uid == -1 and gid == -1):
-                       #bb.note("Fixup Perms: lchown %d:%d %s" % (uid, gid, dir))
-                       os.lchown(path, uid, gid)
-
-       # Return a list of configuration files based on either the default
-       # files/fs-perms.txt or the contents of FILESYSTEM_PERMS_TABLES
-       # paths are resolved via BBPATH
-       def get_fs_perms_list(d):
-               str = ""
-               fs_perms_tables = d.getVar('FILESYSTEM_PERMS_TABLES', True)
-               if not fs_perms_tables:
-                       fs_perms_tables = 'files/fs-perms.txt'
-               for conf_file in fs_perms_tables.split():
-                       str += " %s" % bb.which(d.getVar('BBPATH', True), conf_file)
-               return str
-
-
-
-       dvar = d.getVar('PKGD', True)
-
-       fs_perms_table = {}
-
-       # By default all of the standard directories specified in
-       # bitbake.conf will get 0755 root:root.
-       target_path_vars = [    'base_prefix',
-                               'prefix',
-                               'exec_prefix',
-                               'base_bindir',
-                               'base_sbindir',
-                               'base_libdir',
-                               'datadir',
-                               'sysconfdir',
-                               'servicedir',
-                               'sharedstatedir',
-                               'localstatedir',
-                               'infodir',
-                               'mandir',
-                               'docdir',
-                               'bindir',
-                               'sbindir',
-                               'libexecdir',
-                               'libdir',
-                               'includedir',
-                               'oldincludedir' ]
-
-       for path in target_path_vars:
-               dir = d.getVar(path, True) or ""
-               if dir == "":
-                       continue
-               fs_perms_table[dir] = fs_perms_entry(bb.data.expand("%s 0755 root root false - - -" % (dir), d))
-
-       # Now we actually load from the configuration files
-       for conf in get_fs_perms_list(d).split():
-               if os.path.exists(conf):
-                       f = open(conf)
-                       for line in f:
-                               if line.startswith('#'):
-                                       continue
-                               lsplit = line.split()
-                               if len(lsplit) == 0:
-                                       continue
-                               if len(lsplit) != 8 and not (len(lsplit) == 3 and lsplit[1].lower() == "link"):
-                                       bb.error("Fixup perms: %s invalid line: %s" % (conf, line))
-                                       continue
-                               entry = fs_perms_entry(d.expand(line))
-                               if entry and entry.path:
-                                       fs_perms_table[entry.path] = entry
-                       f.close()
-
-       # Debug -- list out in-memory table
-       #for dir in fs_perms_table:
-       #       bb.note("Fixup Perms: %s: %s" % (dir, str(fs_perms_table[dir])))
-
-       # We process links first, so we can go back and fixup directory ownership
-       # for any newly created directories
-       for dir in fs_perms_table:
-               if not fs_perms_table[dir].link:
-                       continue
-
-               origin = dvar + dir
-               if not (os.path.exists(origin) and os.path.isdir(origin) and not os.path.islink(origin)):
-                       continue
-
-               link = fs_perms_table[dir].link
-               if link[0] == "/":
-                       target = dvar + link
-                       ptarget = link
-               else:
-                       target = os.path.join(os.path.dirname(origin), link)
-                       ptarget = os.path.join(os.path.dirname(dir), link)
-               if os.path.exists(target):
-                       bb.error("Fixup Perms: Unable to correct directory link, target already exists: %s -> %s" % (dir, ptarget))
-                       continue
-
-               # Create path to move directory to, move it, and then setup the symlink
-               bb.mkdirhier(os.path.dirname(target))
-               #bb.note("Fixup Perms: Rename %s -> %s" % (dir, ptarget))
-               os.rename(origin, target)
-               #bb.note("Fixup Perms: Link %s -> %s" % (dir, link))
-               os.symlink(link, origin)
-
-       for dir in fs_perms_table:
-               if fs_perms_table[dir].link:
-                       continue
-
-               origin = dvar + dir
-               if not (os.path.exists(origin) and os.path.isdir(origin)):
-                       continue
-
-               fix_perms(origin, fs_perms_table[dir].mode, fs_perms_table[dir].uid, fs_perms_table[dir].gid, dir)
-
-               if fs_perms_table[dir].walk == 'true':
-                       for root, dirs, files in os.walk(origin):
-                               for dr in dirs:
-                                       each_dir = os.path.join(root, dr)
-                                       fix_perms(each_dir, fs_perms_table[dir].mode, fs_perms_table[dir].uid, fs_perms_table[dir].gid, dir)
-                               for f in files:
-                                       each_file = os.path.join(root, f)
-                                       fix_perms(each_file, fs_perms_table[dir].fmode, fs_perms_table[dir].fuid, fs_perms_table[dir].fgid, dir)
+    import os, pwd, grp
+
+    # init using a string with the same format as a line as documented in
+    # the fs-perms.txt file
+    # <path> <mode> <uid> <gid> <walk> <fmode> <fuid> <fgid>
+    # <path> link <link target>
+    #
+    # __str__ can be used to print out an entry in the input format
+    #
+    # if fs_perms_entry.path is None:
+    #    an error occured
+    # if fs_perms_entry.link, you can retrieve:
+    #    fs_perms_entry.path = path
+    #    fs_perms_entry.link = target of link
+    # if not fs_perms_entry.link, you can retrieve:
+    #    fs_perms_entry.path = path
+    #    fs_perms_entry.mode = expected dir mode or None
+    #    fs_perms_entry.uid = expected uid or -1
+    #    fs_perms_entry.gid = expected gid or -1
+    #    fs_perms_entry.walk = 'true' or something else
+    #    fs_perms_entry.fmode = expected file mode or None
+    #    fs_perms_entry.fuid = expected file uid or -1
+    #    fs_perms_entry_fgid = expected file gid or -1
+    class fs_perms_entry():
+        def __init__(self, line):
+            lsplit = line.split()
+            if len(lsplit) == 3 and lsplit[1].lower() == "link":
+                self._setlink(lsplit[0], lsplit[2])
+            elif len(lsplit) == 8:
+                self._setdir(lsplit[0], lsplit[1], lsplit[2], lsplit[3], lsplit[4], lsplit[5], lsplit[6], lsplit[7])
+            else:
+                bb.error("Fixup Perms: invalid config line %s" % line)
+                self.path = None
+                self.link = None
+
+        def _setdir(self, path, mode, uid, gid, walk, fmode, fuid, fgid):
+            self.path = os.path.normpath(path)
+            self.link = None
+            self.mode = self._procmode(mode)
+            self.uid  = self._procuid(uid)
+            self.gid  = self._procgid(gid)
+            self.walk = walk.lower()
+            self.fmode = self._procmode(fmode)
+            self.fuid = self._procuid(fuid)
+            self.fgid = self._procgid(fgid)
+
+        def _setlink(self, path, link):
+            self.path = os.path.normpath(path)
+            self.link = link
+
+        def _procmode(self, mode):
+            if not mode or (mode and mode == "-"):
+                return None
+            else:
+                return int(mode,8)
+
+        # Note uid/gid -1 has special significance in os.lchown
+        def _procuid(self, uid):
+            if uid is None or uid == "-":
+                return -1
+            elif uid.isdigit():
+                return int(uid)
+            else:
+                return pwd.getpwnam(uid).pw_uid
+
+        def _procgid(self, gid):
+            if gid is None or gid == "-":
+                return -1
+            elif gid.isdigit():
+                return int(gid)
+            else:
+                return grp.getgrnam(gid).gr_gid
+
+        # Use for debugging the entries
+        def __str__(self):
+            if self.link:
+                return "%s link %s" % (self.path, self.link)
+            else:
+                mode = "-"
+                if self.mode:
+                    mode = "0%o" % self.mode
+                fmode = "-"
+                if self.fmode:
+                    fmode = "0%o" % self.fmode
+                uid = self._mapugid(self.uid)
+                gid = self._mapugid(self.gid)
+                fuid = self._mapugid(self.fuid)
+                fgid = self._mapugid(self.fgid)
+                return "%s %s %s %s %s %s %s %s" % (self.path, mode, uid, gid, self.walk, fmode, fuid, fgid)
+
+        def _mapugid(self, id):
+            if id is None or id == -1:
+                return "-"
+            else:
+                return "%d" % id
+
+    # Fix the permission, owner and group of path
+    def fix_perms(path, mode, uid, gid, dir):
+        if mode and not os.path.islink(path):
+            #bb.note("Fixup Perms: chmod 0%o %s" % (mode, dir))
+            os.chmod(path, mode)
+        # -1 is a special value that means don't change the uid/gid
+        # if they are BOTH -1, don't bother to lchown
+        if not (uid == -1 and gid == -1):
+            #bb.note("Fixup Perms: lchown %d:%d %s" % (uid, gid, dir))
+            os.lchown(path, uid, gid)
+
+    # Return a list of configuration files based on either the default
+    # files/fs-perms.txt or the contents of FILESYSTEM_PERMS_TABLES
+    # paths are resolved via BBPATH
+    def get_fs_perms_list(d):
+        str = ""
+        fs_perms_tables = d.getVar('FILESYSTEM_PERMS_TABLES', True)
+        if not fs_perms_tables:
+            fs_perms_tables = 'files/fs-perms.txt'
+        for conf_file in fs_perms_tables.split():
+            str += " %s" % bb.which(d.getVar('BBPATH', True), conf_file)
+        return str
+
+
+
+    dvar = d.getVar('PKGD', True)
+
+    fs_perms_table = {}
+
+    # By default all of the standard directories specified in
+    # bitbake.conf will get 0755 root:root.
+    target_path_vars = [    'base_prefix',
+                'prefix',
+                'exec_prefix',
+                'base_bindir',
+                'base_sbindir',
+                'base_libdir',
+                'datadir',
+                'sysconfdir',
+                'servicedir',
+                'sharedstatedir',
+                'localstatedir',
+                'infodir',
+                'mandir',
+                'docdir',
+                'bindir',
+                'sbindir',
+                'libexecdir',
+                'libdir',
+                'includedir',
+                'oldincludedir' ]
+
+    for path in target_path_vars:
+        dir = d.getVar(path, True) or ""
+        if dir == "":
+            continue
+        fs_perms_table[dir] = fs_perms_entry(bb.data.expand("%s 0755 root root false - - -" % (dir), d))
+
+    # Now we actually load from the configuration files
+    for conf in get_fs_perms_list(d).split():
+        if os.path.exists(conf):
+            f = open(conf)
+            for line in f:
+                if line.startswith('#'):
+                    continue
+                lsplit = line.split()
+                if len(lsplit) == 0:
+                    continue
+                if len(lsplit) != 8 and not (len(lsplit) == 3 and lsplit[1].lower() == "link"):
+                    bb.error("Fixup perms: %s invalid line: %s" % (conf, line))
+                    continue
+                entry = fs_perms_entry(d.expand(line))
+                if entry and entry.path:
+                    fs_perms_table[entry.path] = entry
+            f.close()
+
+    # Debug -- list out in-memory table
+    #for dir in fs_perms_table:
+    #    bb.note("Fixup Perms: %s: %s" % (dir, str(fs_perms_table[dir])))
+
+    # We process links first, so we can go back and fixup directory ownership
+    # for any newly created directories
+    for dir in fs_perms_table:
+        if not fs_perms_table[dir].link:
+            continue
+
+        origin = dvar + dir
+        if not (os.path.exists(origin) and os.path.isdir(origin) and not os.path.islink(origin)):
+            continue
+
+        link = fs_perms_table[dir].link
+        if link[0] == "/":
+            target = dvar + link
+            ptarget = link
+        else:
+            target = os.path.join(os.path.dirname(origin), link)
+            ptarget = os.path.join(os.path.dirname(dir), link)
+        if os.path.exists(target):
+            bb.error("Fixup Perms: Unable to correct directory link, target already exists: %s -> %s" % (dir, ptarget))
+            continue
+
+        # Create path to move directory to, move it, and then setup the symlink
+        bb.mkdirhier(os.path.dirname(target))
+        #bb.note("Fixup Perms: Rename %s -> %s" % (dir, ptarget))
+        os.rename(origin, target)
+        #bb.note("Fixup Perms: Link %s -> %s" % (dir, link))
+        os.symlink(link, origin)
+
+    for dir in fs_perms_table:
+        if fs_perms_table[dir].link:
+            continue
+
+        origin = dvar + dir
+        if not (os.path.exists(origin) and os.path.isdir(origin)):
+            continue
+
+        fix_perms(origin, fs_perms_table[dir].mode, fs_perms_table[dir].uid, fs_perms_table[dir].gid, dir)
+
+        if fs_perms_table[dir].walk == 'true':
+            for root, dirs, files in os.walk(origin):
+                for dr in dirs:
+                    each_dir = os.path.join(root, dr)
+                    fix_perms(each_dir, fs_perms_table[dir].mode, fs_perms_table[dir].uid, fs_perms_table[dir].gid, dir)
+                for f in files:
+                    each_file = os.path.join(root, f)
+                    fix_perms(each_file, fs_perms_table[dir].fmode, fs_perms_table[dir].fuid, fs_perms_table[dir].fgid, dir)
 }
 
 python split_and_strip_files () {
-       import commands, stat, errno, subprocess
-
-       dvar = d.getVar('PKGD', True)
-       pn = d.getVar('PN', True)
-
-       # We default to '.debug' style
-       if d.getVar('PACKAGE_DEBUG_SPLIT_STYLE', True) == 'debug-file-directory':
-               # Single debug-file-directory style debug info
-               debugappend = ".debug"
-               debugdir = ""
-               debuglibdir = "/usr/lib/debug"
-               debugsrcdir = "/usr/src/debug"
-       else:
-               # Original OE-core, a.k.a. ".debug", style debug info
-               debugappend = ""
-               debugdir = "/.debug"
-               debuglibdir = ""
-               debugsrcdir = "/usr/src/debug"
-
-       os.chdir(dvar)
-
-       # Return type (bits):
-       # 0 - not elf
-       # 1 - ELF
-       # 2 - stripped
-       # 4 - executable
-       # 8 - shared library
-       def isELF(path):
-               type = 0
-               pathprefix = "export PATH=%s; " % d.getVar('PATH', True)
-               ret, result = commands.getstatusoutput("%sfile '%s'" % (pathprefix, path))
-
-               if ret:
-                       bb.error("split_and_strip_files: 'file %s' failed" % path)
-                       return type
-
-               # Not stripped
-               if "ELF" in result:
-                       type |= 1
-                       if "not stripped" not in result:
-                               type |= 2
-                       if "executable" in result:
-                               type |= 4
-                       if "shared" in result:
-                               type |= 8
-               return type
-
-
-       #
-       # First lets figure out all of the files we may have to process ... do this only once!
-       #
-       file_list = {}
-       file_links = {}
-       if (d.getVar('INHIBIT_PACKAGE_DEBUG_SPLIT', True) != '1') and \
-          (d.getVar('INHIBIT_PACKAGE_STRIP', True) != '1'):
-               for root, dirs, files in os.walk(dvar):
-                       for f in files:
-                               file = os.path.join(root, f)
-                               # Only process files (and symlinks)... Skip files that are obviously debug files
-                               if not (debugappend != "" and file.endswith(debugappend)) and \
-                                  not (debugdir != "" and debugdir in os.path.dirname(file[len(dvar):])) and \
-                                  os.path.isfile(file):
-                                       try:
-                                               s = os.stat(file)
-                                       except OSError, (err, strerror):
-                                               if err != errno.ENOENT:
-                                                       raise
-                                               # Skip broken symlinks
-                                               continue
-                                       # Is the item excutable?  Then we need to process it.
-                                       if (s[stat.ST_MODE] & stat.S_IXUSR) or \
-                                          (s[stat.ST_MODE] & stat.S_IXGRP) or \
-                                          (s[stat.ST_MODE] & stat.S_IXOTH):
-                                               # If it's a symlink, and points to an ELF file, we capture the readlink target
-                                               if os.path.islink(file):
-                                                       target = os.readlink(file)
-                                                       if not os.path.isabs(target):
-                                                               ltarget = os.path.join(os.path.dirname(file), target)
-                                                       else:
-                                                               ltarget = target
-
-                                                       if isELF(ltarget):
-                                                               #bb.note("Sym: %s (%d)" % (ltarget, isELF(ltarget)))
-                                                               file_list[file] = "sym: " + target
-                                                       continue
-                                               # It's a file (or hardlink), not a link
-                                               # ...but is it ELF, and is it already stripped?
-                                               elf_file = isELF(file)
-                                               if elf_file & 1:
-                                                       # Check if it's a hard link to something else
-                                                       if s.st_nlink > 1:
-                                                               file_reference = "%d_%d" % (s.st_dev, s.st_ino)
-                                                               # Hard link to something else
-                                                               file_list[file] = "hard: " + file_reference
-                                                               continue
-
-                                                       file_list[file] = "ELF: %d" % elf_file
-
-
-       #
-       # First lets process debug splitting
-       #
-       if (d.getVar('INHIBIT_PACKAGE_DEBUG_SPLIT', True) != '1'):
-               for file in file_list:
-                       src = file[len(dvar):]
-                       dest = debuglibdir + os.path.dirname(src) + debugdir + "/" + os.path.basename(src) + debugappend
-                       fpath = dvar + dest
-                       # Preserve symlinks in debug area...
-                       if file_list[file].startswith("sym: "):
-                               ltarget = file_list[file][5:]
-                               lpath = os.path.dirname(ltarget)
-                               lbase = os.path.basename(ltarget)
-                               ftarget = ""
-                               if lpath and lpath != ".":
-                                       ftarget += lpath + debugdir + "/"
-                               ftarget += lbase + debugappend
-                               if lpath.startswith(".."):
-                                       ftarget = os.path.join("..", ftarget)
-                               bb.mkdirhier(os.path.dirname(fpath))
-                               #bb.note("Symlink %s -> %s" % (fpath, ftarget))
-                               os.symlink(ftarget, fpath)
-                               continue
-
-                       # Preserve hard links in debug area...
-                       file_reference = ""
-                       if file_list[file].startswith("hard: "):
-                               file_reference = file_list[file][6:]
-                               if file_reference not in file_links:
-                                       # If this is a new file, add it as a reference, and
-                                       # update it's type, so we can fall through and split
-                                       file_list[file] = "ELF: %d" % (isELF(file))
-                               else:
-                                       target = file_links[file_reference][len(dvar):]
-                                       ftarget = dvar + debuglibdir + os.path.dirname(target) + debugdir + "/" + os.path.basename(target) + debugappend
-                                       bb.mkdirhier(os.path.dirname(fpath))
-                                       #bb.note("Link %s -> %s" % (fpath, ftarget))
-                                       os.link(ftarget, fpath)
-                                       continue
-
-                       # It's ELF...
-                       if file_list[file].startswith("ELF: "):
-                               elf_file = int(file_list[file][5:])
-                               if elf_file & 2:
-                                       bb.warn("File '%s' from %s was already stripped, this will prevent future debugging!" % (src, pn))
-                                       continue
-
-                               # Split the file...
-                               bb.mkdirhier(os.path.dirname(fpath))
-                               #bb.note("Split %s -> %s" % (file, fpath))
-                               # Only store off the hard link reference if we successfully split!
-                               if splitfile(file, fpath, debugsrcdir, d) == 0 and file_reference != "":
-                                       file_links[file_reference] = file
-
-               # The above may have generated dangling symlinks, remove them!
-               # Dangling symlinks are a result of something NOT being split, such as a stripped binary.
-               # This should be a rare occurance, but we want to clean up anyway.
-               for file in file_list:
-                       if file_list[file].startswith("sym: "):
-                               src = file[len(dvar):]
-                               dest = debuglibdir + os.path.dirname(src) + debugdir + "/" + os.path.basename(src) + debugappend
-                               fpath = dvar + dest
-                               try:
-                                       s = os.stat(fpath)
-                               except OSError, (err, strerror):
-                                       if err != errno.ENOENT:
-                                               raise
-                                       #bb.note("Remove dangling link %s -> %s" % (fpath, os.readlink(fpath)))
-                                       os.unlink(fpath)
-                                       # This could leave an empty debug directory laying around
-                                       # take care of the obvious case...
-                                       subprocess.call("rmdir %s 2>/dev/null" % os.path.dirname(fpath), shell=True)
-
-               # Process the debugsrcdir if requested...
-               # This copies and places the referenced sources for later debugging...
-               splitfile2(debugsrcdir, d)
-       #
-       # End of debug splitting
-       #
-
-       #
-       # Now lets go back over things and strip them
-       #
-       if (d.getVar('INHIBIT_PACKAGE_STRIP', True) != '1'):    
-               for file in file_list:
-                       if file_list[file].startswith("ELF: "):
-                               elf_file = int(file_list[file][5:])
-                               #bb.note("Strip %s" % file)
-                               runstrip(file, elf_file, d)
-
-
-       if (d.getVar('INHIBIT_PACKAGE_STRIP', True) != '1'):    
-               for root, dirs, files in os.walk(dvar):
-                       for f in files:
-                               if not f.endswith(".ko"):
-                                       continue
-                               runstrip(os.path.join(root, f), None, d)
-       #
-       # End of strip
-       #
+    import commands, stat, errno, subprocess
+
+    dvar = d.getVar('PKGD', True)
+    pn = d.getVar('PN', True)
+
+    # We default to '.debug' style
+    if d.getVar('PACKAGE_DEBUG_SPLIT_STYLE', True) == 'debug-file-directory':
+        # Single debug-file-directory style debug info
+        debugappend = ".debug"
+        debugdir = ""
+        debuglibdir = "/usr/lib/debug"
+        debugsrcdir = "/usr/src/debug"
+    else:
+        # Original OE-core, a.k.a. ".debug", style debug info
+        debugappend = ""
+        debugdir = "/.debug"
+        debuglibdir = ""
+        debugsrcdir = "/usr/src/debug"
+
+    os.chdir(dvar)
+
+    # Return type (bits):
+    # 0 - not elf
+    # 1 - ELF
+    # 2 - stripped
+    # 4 - executable
+    # 8 - shared library
+    def isELF(path):
+        type = 0
+        pathprefix = "export PATH=%s; " % d.getVar('PATH', True)
+        ret, result = commands.getstatusoutput("%sfile '%s'" % (pathprefix, path))
+
+        if ret:
+            bb.error("split_and_strip_files: 'file %s' failed" % path)
+            return type
+
+        # Not stripped
+        if "ELF" in result:
+            type |= 1
+            if "not stripped" not in result:
+                type |= 2
+            if "executable" in result:
+                type |= 4
+            if "shared" in result:
+                type |= 8
+        return type
+
+
+    #
+    # First lets figure out all of the files we may have to process ... do this only once!
+    #
+    file_list = {}
+    file_links = {}
+    if (d.getVar('INHIBIT_PACKAGE_DEBUG_SPLIT', True) != '1') and \
+       (d.getVar('INHIBIT_PACKAGE_STRIP', True) != '1'):
+        for root, dirs, files in os.walk(dvar):
+            for f in files:
+                file = os.path.join(root, f)
+                # Only process files (and symlinks)... Skip files that are obviously debug files
+                if not (debugappend != "" and file.endswith(debugappend)) and \
+                   not (debugdir != "" and debugdir in os.path.dirname(file[len(dvar):])) and \
+                   os.path.isfile(file):
+                    try:
+                        s = os.stat(file)
+                    except OSError, (err, strerror):
+                        if err != errno.ENOENT:
+                            raise
+                        # Skip broken symlinks
+                        continue
+                    # Is the item excutable?  Then we need to process it.
+                    if (s[stat.ST_MODE] & stat.S_IXUSR) or \
+                       (s[stat.ST_MODE] & stat.S_IXGRP) or \
+                       (s[stat.ST_MODE] & stat.S_IXOTH):
+                        # If it's a symlink, and points to an ELF file, we capture the readlink target
+                        if os.path.islink(file):
+                            target = os.readlink(file)
+                            if not os.path.isabs(target):
+                                ltarget = os.path.join(os.path.dirname(file), target)
+                            else:
+                                ltarget = target
+
+                            if isELF(ltarget):
+                                #bb.note("Sym: %s (%d)" % (ltarget, isELF(ltarget)))
+                                file_list[file] = "sym: " + target
+                            continue
+                        # It's a file (or hardlink), not a link
+                        # ...but is it ELF, and is it already stripped?
+                        elf_file = isELF(file)
+                        if elf_file & 1:
+                            # Check if it's a hard link to something else
+                            if s.st_nlink > 1:
+                                file_reference = "%d_%d" % (s.st_dev, s.st_ino)
+                                # Hard link to something else
+                                file_list[file] = "hard: " + file_reference
+                                continue
+
+                            file_list[file] = "ELF: %d" % elf_file
+
+
+    #
+    # First lets process debug splitting
+    #
+    if (d.getVar('INHIBIT_PACKAGE_DEBUG_SPLIT', True) != '1'):
+        for file in file_list:
+            src = file[len(dvar):]
+            dest = debuglibdir + os.path.dirname(src) + debugdir + "/" + os.path.basename(src) + debugappend
+            fpath = dvar + dest
+            # Preserve symlinks in debug area...
+            if file_list[file].startswith("sym: "):
+                ltarget = file_list[file][5:]
+                lpath = os.path.dirname(ltarget)
+                lbase = os.path.basename(ltarget)
+                ftarget = ""
+                if lpath and lpath != ".":
+                    ftarget += lpath + debugdir + "/"
+                ftarget += lbase + debugappend
+                if lpath.startswith(".."):
+                    ftarget = os.path.join("..", ftarget)
+                bb.mkdirhier(os.path.dirname(fpath))
+                #bb.note("Symlink %s -> %s" % (fpath, ftarget))
+                os.symlink(ftarget, fpath)
+                continue
+
+            # Preserve hard links in debug area...
+            file_reference = ""
+            if file_list[file].startswith("hard: "):
+                file_reference = file_list[file][6:]
+                if file_reference not in file_links:
+                    # If this is a new file, add it as a reference, and
+                    # update it's type, so we can fall through and split
+                    file_list[file] = "ELF: %d" % (isELF(file))
+                else:
+                    target = file_links[file_reference][len(dvar):]
+                    ftarget = dvar + debuglibdir + os.path.dirname(target) + debugdir + "/" + os.path.basename(target) + debugappend
+                    bb.mkdirhier(os.path.dirname(fpath))
+                    #bb.note("Link %s -> %s" % (fpath, ftarget))
+                    os.link(ftarget, fpath)
+                    continue
+
+            # It's ELF...
+            if file_list[file].startswith("ELF: "):
+                elf_file = int(file_list[file][5:])
+                if elf_file & 2:
+                    bb.warn("File '%s' from %s was already stripped, this will prevent future debugging!" % (src, pn))
+                    continue
+
+                # Split the file...
+                bb.mkdirhier(os.path.dirname(fpath))
+                #bb.note("Split %s -> %s" % (file, fpath))
+                # Only store off the hard link reference if we successfully split!
+                if splitfile(file, fpath, debugsrcdir, d) == 0 and file_reference != "":
+                    file_links[file_reference] = file
+
+        # The above may have generated dangling symlinks, remove them!
+        # Dangling symlinks are a result of something NOT being split, such as a stripped binary.
+        # This should be a rare occurance, but we want to clean up anyway.
+        for file in file_list:
+            if file_list[file].startswith("sym: "):
+                src = file[len(dvar):]
+                dest = debuglibdir + os.path.dirname(src) + debugdir + "/" + os.path.basename(src) + debugappend
+                fpath = dvar + dest
+                try:
+                    s = os.stat(fpath)
+                except OSError, (err, strerror):
+                    if err != errno.ENOENT:
+                        raise
+                    #bb.note("Remove dangling link %s -> %s" % (fpath, os.readlink(fpath)))
+                    os.unlink(fpath)
+                    # This could leave an empty debug directory laying around
+                    # take care of the obvious case...
+                    subprocess.call("rmdir %s 2>/dev/null" % os.path.dirname(fpath), shell=True)
+
+        # Process the debugsrcdir if requested...
+        # This copies and places the referenced sources for later debugging...
+        splitfile2(debugsrcdir, d)
+    #
+    # End of debug splitting
+    #
+
+    #
+    # Now lets go back over things and strip them
+    #
+    if (d.getVar('INHIBIT_PACKAGE_STRIP', True) != '1'):
+        for file in file_list:
+            if file_list[file].startswith("ELF: "):
+                elf_file = int(file_list[file][5:])
+                #bb.note("Strip %s" % file)
+                runstrip(file, elf_file, d)
+
+
+    if (d.getVar('INHIBIT_PACKAGE_STRIP', True) != '1'):
+        for root, dirs, files in os.walk(dvar):
+            for f in files:
+                if not f.endswith(".ko"):
+                    continue
+                runstrip(os.path.join(root, f), None, d)
+    #
+    # End of strip
+    #
 }
 
 python populate_packages () {
-       import glob, stat, errno, re, subprocess
-
-       workdir = d.getVar('WORKDIR', True)
-       outdir = d.getVar('DEPLOY_DIR', True)
-       dvar = d.getVar('PKGD', True)
-       packages = d.getVar('PACKAGES', True)
-       pn = d.getVar('PN', True)
-
-       bb.mkdirhier(outdir)
-       os.chdir(dvar)
-
-       # Sanity check PACKAGES for duplicates and for LICENSE_EXCLUSION 
-       # Sanity should be moved to sanity.bbclass once we have the infrastucture
-       package_list = []
-
-       for pkg in packages.split():
-               if d.getVar('LICENSE_EXCLUSION-' + pkg, True):
-                       bb.warn("%s has an incompatible license. Excluding from packaging." % pkg)
-                       packages.remove(pkg)
-               else:
-                       if pkg in package_list:
-                               bb.error("%s is listed in PACKAGES multiple times, this leads to packaging errors." % pkg)
-                       else:
-                               package_list.append(pkg)
-       d.setVar('PACKAGES', ' '.join(package_list))
-       pkgdest = d.getVar('PKGDEST', True)
-       subprocess.call('rm -rf %s' % pkgdest, shell=True)
-
-       seen = []
-
-       for pkg in package_list:
-               localdata = bb.data.createCopy(d)
-               root = os.path.join(pkgdest, pkg)
-               bb.mkdirhier(root)
-
-               localdata.setVar('PKG', pkg)
-               overrides = localdata.getVar('OVERRIDES', True)
-               if not overrides:
-                       raise bb.build.FuncFailed('OVERRIDES not defined')
-               localdata.setVar('OVERRIDES', overrides + ':' + pkg)
-               bb.data.update_data(localdata)
-
-               filesvar = localdata.getVar('FILES', True) or ""
-               files = filesvar.split()
-               file_links = {}
-               for file in files:
-                       if file.find("//") != -1:
-                               bb.warn("FILES variable for package %s contains '//' which is invalid. Attempting to fix this but you should correct the metadata.\n" % pkg)
-                               file.replace("//", "/")
-                       if os.path.isabs(file):
-                               file = '.' + file
-                       if not os.path.islink(file):
-                               if os.path.isdir(file):
-                                       newfiles =  [ os.path.join(file,x) for x in os.listdir(file) ]
-                                       if newfiles:
-                                               files += newfiles
-                                               continue
-                       globbed = glob.glob(file)
-                       if globbed:
-                               if [ file ] != globbed:
-                                       files += globbed
-                                       continue
-                       if (not os.path.islink(file)) and (not os.path.exists(file)):
-                               continue
-                       if file in seen:
-                               continue
-                       seen.append(file)
-
-                       def mkdir(src, dest, p):
-                               src = os.path.join(src, p)
-                               dest = os.path.join(dest, p)
-                               bb.mkdirhier(dest)
-                               fstat = os.stat(src)
-                               os.chmod(dest, fstat.st_mode)
-                               os.chown(dest, fstat.st_uid, fstat.st_gid)
-                               if p not in seen:
-                                       seen.append(p)
-
-                       def mkdir_recurse(src, dest, paths):
-                               while paths.startswith("./"):
-                                       paths = paths[2:]
-                               p = "."
-                               for c in paths.split("/"):
-                                       p = os.path.join(p, c)
-                                       if not os.path.exists(os.path.join(dest, p)):
-                                               mkdir(src, dest, p)
-
-                       if os.path.isdir(file) and not os.path.islink(file):
-                               mkdir_recurse(dvar, root, file)
-                               continue
-
-                       mkdir_recurse(dvar, root, os.path.dirname(file))
-                       fpath = os.path.join(root,file)
-                       if not os.path.islink(file):
-                               os.link(file, fpath)
-                               fstat = os.stat(file)
-                               os.chmod(fpath, fstat.st_mode)
-                               os.chown(fpath, fstat.st_uid, fstat.st_gid)
-                               continue
-                       ret = bb.copyfile(file, fpath)
-                       if ret is False or ret == 0:
-                               raise bb.build.FuncFailed("File population failed")
-
-               del localdata
-       os.chdir(workdir)
-
-       unshipped = []
-       for root, dirs, files in os.walk(dvar):
-               dir = root[len(dvar):]
-               if not dir:
-                       dir = os.sep
-               for f in (files + dirs):
-                       path = os.path.join(dir, f)
-                       if ('.' + path) not in seen:
-                               unshipped.append(path)
-
-       if unshipped != []:
-               msg = pn + ": Files/directories were installed but not shipped"
-               if "installed_vs_shipped" in (d.getVar('INSANE_SKIP_' + pn, True) or "").split():
-                       bb.note("Package %s skipping QA tests: installed_vs_shipped" % pn)
-               else:
-                       for f in unshipped:
-                               msg = msg + "\n  " + f  
-                       package_qa_handle_error("installed_vs_shipped", msg, d)
-
-       bb.build.exec_func("package_name_hook", d)
-
-       for pkg in package_list:
-               pkgname = d.getVar('PKG_%s' % pkg, True)
-               if pkgname is None:
-                       d.setVar('PKG_%s' % pkg, pkg)
-
-       dangling_links = {}
-       pkg_files = {}
-       for pkg in package_list:
-               dangling_links[pkg] = []
-               pkg_files[pkg] = []
-               inst_root = os.path.join(pkgdest, pkg)
-               for root, dirs, files in os.walk(inst_root):
-                       for f in files:
-                               path = os.path.join(root, f)
-                               rpath = path[len(inst_root):]
-                               pkg_files[pkg].append(rpath)
-                               try:
-                                       s = os.stat(path)
-                               except OSError, (err, strerror):
-                                       if err != errno.ENOENT:
-                                               raise
-                                       target = os.readlink(path)
-                                       if target[0] != '/':
-                                               target = os.path.join(root[len(inst_root):], target)
-                                       dangling_links[pkg].append(os.path.normpath(target))
-
-       for pkg in package_list:
-               rdepends = bb.utils.explode_dep_versions(d.getVar('RDEPENDS_' + pkg, True) or d.getVar('RDEPENDS', True) or "")
-
-               for l in dangling_links[pkg]:
-                       found = False
-                       bb.debug(1, "%s contains dangling link %s" % (pkg, l))
-                       for p in package_list:
-                               for f in pkg_files[p]:
-                                       if f == l:
-                                               found = True
-                                               bb.debug(1, "target found in %s" % p)
-                                               if p == pkg:
-                                                       break
-                                               if p not in rdepends:
-                                                       rdepends[p] = ""
-                                               break
-                       if found == False:
-                               bb.note("%s contains dangling symlink to %s" % (pkg, l))
-               d.setVar('RDEPENDS_' + pkg, bb.utils.join_deps(rdepends, commasep=False))
+    import glob, stat, errno, re, subprocess
+
+    workdir = d.getVar('WORKDIR', True)
+    outdir = d.getVar('DEPLOY_DIR', True)
+    dvar = d.getVar('PKGD', True)
+    packages = d.getVar('PACKAGES', True)
+    pn = d.getVar('PN', True)
+
+    bb.mkdirhier(outdir)
+    os.chdir(dvar)
+
+    # Sanity check PACKAGES for duplicates and for LICENSE_EXCLUSION
+    # Sanity should be moved to sanity.bbclass once we have the infrastucture
+    package_list = []
+
+    for pkg in packages.split():
+        if d.getVar('LICENSE_EXCLUSION-' + pkg, True):
+            bb.warn("%s has an incompatible license. Excluding from packaging." % pkg)
+            packages.remove(pkg)
+        else:
+            if pkg in package_list:
+                bb.error("%s is listed in PACKAGES multiple times, this leads to packaging errors." % pkg)
+            else:
+                package_list.append(pkg)
+    d.setVar('PACKAGES', ' '.join(package_list))
+    pkgdest = d.getVar('PKGDEST', True)
+    subprocess.call('rm -rf %s' % pkgdest, shell=True)
+
+    seen = []
+
+    for pkg in package_list:
+        localdata = bb.data.createCopy(d)
+        root = os.path.join(pkgdest, pkg)
+        bb.mkdirhier(root)
+
+        localdata.setVar('PKG', pkg)
+        overrides = localdata.getVar('OVERRIDES', True)
+        if not overrides:
+            raise bb.build.FuncFailed('OVERRIDES not defined')
+        localdata.setVar('OVERRIDES', overrides + ':' + pkg)
+        bb.data.update_data(localdata)
+
+        filesvar = localdata.getVar('FILES', True) or ""
+        files = filesvar.split()
+        file_links = {}
+        for file in files:
+            if file.find("//") != -1:
+                bb.warn("FILES variable for package %s contains '//' which is invalid. Attempting to fix this but you should correct the metadata.\n" % pkg)
+                file.replace("//", "/")
+            if os.path.isabs(file):
+                file = '.' + file
+            if not os.path.islink(file):
+                if os.path.isdir(file):
+                    newfiles =  [ os.path.join(file,x) for x in os.listdir(file) ]
+                    if newfiles:
+                        files += newfiles
+                        continue
+            globbed = glob.glob(file)
+            if globbed:
+                if [ file ] != globbed:
+                    files += globbed
+                    continue
+            if (not os.path.islink(file)) and (not os.path.exists(file)):
+                continue
+            if file in seen:
+                continue
+            seen.append(file)
+
+            def mkdir(src, dest, p):
+                src = os.path.join(src, p)
+                dest = os.path.join(dest, p)
+                bb.mkdirhier(dest)
+                fstat = os.stat(src)
+                os.chmod(dest, fstat.st_mode)
+                os.chown(dest, fstat.st_uid, fstat.st_gid)
+                if p not in seen:
+                    seen.append(p)
+
+            def mkdir_recurse(src, dest, paths):
+                while paths.startswith("./"):
+                    paths = paths[2:]
+                p = "."
+                for c in paths.split("/"):
+                    p = os.path.join(p, c)
+                    if not os.path.exists(os.path.join(dest, p)):
+                        mkdir(src, dest, p)
+
+            if os.path.isdir(file) and not os.path.islink(file):
+                mkdir_recurse(dvar, root, file)
+                continue
+
+            mkdir_recurse(dvar, root, os.path.dirname(file))
+            fpath = os.path.join(root,file)
+            if not os.path.islink(file):
+                os.link(file, fpath)
+                fstat = os.stat(file)
+                os.chmod(fpath, fstat.st_mode)
+                os.chown(fpath, fstat.st_uid, fstat.st_gid)
+                continue
+            ret = bb.copyfile(file, fpath)
+            if ret is False or ret == 0:
+                raise bb.build.FuncFailed("File population failed")
+
+        del localdata
+    os.chdir(workdir)
+
+    unshipped = []
+    for root, dirs, files in os.walk(dvar):
+        dir = root[len(dvar):]
+        if not dir:
+            dir = os.sep
+        for f in (files + dirs):
+            path = os.path.join(dir, f)
+            if ('.' + path) not in seen:
+                unshipped.append(path)
+
+    if unshipped != []:
+        msg = pn + ": Files/directories were installed but not shipped"
+        if "installed_vs_shipped" in (d.getVar('INSANE_SKIP_' + pn, True) or "").split():
+            bb.note("Package %s skipping QA tests: installed_vs_shipped" % pn)
+        else:
+            for f in unshipped:
+                msg = msg + "\n  " + f
+            package_qa_handle_error("installed_vs_shipped", msg, d)
+
+    bb.build.exec_func("package_name_hook", d)
+
+    for pkg in package_list:
+        pkgname = d.getVar('PKG_%s' % pkg, True)
+        if pkgname is None:
+            d.setVar('PKG_%s' % pkg, pkg)
+
+    dangling_links = {}
+    pkg_files = {}
+    for pkg in package_list:
+        dangling_links[pkg] = []
+        pkg_files[pkg] = []
+        inst_root = os.path.join(pkgdest, pkg)
+        for root, dirs, files in os.walk(inst_root):
+            for f in files:
+                path = os.path.join(root, f)
+                rpath = path[len(inst_root):]
+                pkg_files[pkg].append(rpath)
+                try:
+                    s = os.stat(path)
+                except OSError, (err, strerror):
+                    if err != errno.ENOENT:
+                        raise
+                    target = os.readlink(path)
+                    if target[0] != '/':
+                        target = os.path.join(root[len(inst_root):], target)
+                    dangling_links[pkg].append(os.path.normpath(target))
+
+    for pkg in package_list:
+        rdepends = bb.utils.explode_dep_versions(d.getVar('RDEPENDS_' + pkg, True) or d.getVar('RDEPENDS', True) or "")
+
+        for l in dangling_links[pkg]:
+            found = False
+            bb.debug(1, "%s contains dangling link %s" % (pkg, l))
+            for p in package_list:
+                for f in pkg_files[p]:
+                    if f == l:
+                        found = True
+                        bb.debug(1, "target found in %s" % p)
+                        if p == pkg:
+                            break
+                        if p not in rdepends:
+                            rdepends[p] = ""
+                        break
+            if found == False:
+                bb.note("%s contains dangling symlink to %s" % (pkg, l))
+        d.setVar('RDEPENDS_' + pkg, bb.utils.join_deps(rdepends, commasep=False))
 }
 populate_packages[dirs] = "${D}"
 
 PKGDESTWORK = "${WORKDIR}/pkgdata"
 
 python emit_pkgdata() {
-       from glob import glob
-
-       def write_if_exists(f, pkg, var):
-               def encode(str):
-                       import codecs
-                       c = codecs.getencoder("string_escape")
-                       return c(str)[0]
-
-               val = d.getVar('%s_%s' % (var, pkg), True)
-               if val:
-                       f.write('%s_%s: %s\n' % (var, pkg, encode(val)))
-                       return
-               val = d.getVar('%s' % (var), True)
-               if val:
-                       f.write('%s: %s\n' % (var, encode(val)))
-               return
-
-       def get_directory_size(dir):
-               if os.listdir(dir):
-                       size = int(os.popen('du -sk %s' % dir).readlines()[0].split('\t')[0])
-               else:
-                       size = 0
-               return size
-
-       packages = d.getVar('PACKAGES', True)
-       pkgdest = d.getVar('PKGDEST', True)
-       pkgdatadir = d.getVar('PKGDESTWORK', True)
-
-       # Take shared lock since we're only reading, not writing
-       lf = bb.utils.lockfile(d.expand("${PACKAGELOCK}"), True)
-
-       data_file = pkgdatadir + d.expand("/${PN}" )
-       f = open(data_file, 'w')
-       f.write("PACKAGES: %s\n" % packages)
-       f.close()
-
-       workdir = d.getVar('WORKDIR', True)
-
-       for pkg in packages.split():
-               subdata_file = pkgdatadir + "/runtime/%s" % pkg
-
-               sf = open(subdata_file, 'w')
-               write_if_exists(sf, pkg, 'PN')
-               write_if_exists(sf, pkg, 'PV')
-               write_if_exists(sf, pkg, 'PR')
-               write_if_exists(sf, pkg, 'PKGV')
-               write_if_exists(sf, pkg, 'PKGR')
-               write_if_exists(sf, pkg, 'LICENSE')
-               write_if_exists(sf, pkg, 'DESCRIPTION')
-               write_if_exists(sf, pkg, 'SUMMARY')
-               write_if_exists(sf, pkg, 'RDEPENDS')
-               write_if_exists(sf, pkg, 'RPROVIDES')
-               write_if_exists(sf, pkg, 'RRECOMMENDS')
-               write_if_exists(sf, pkg, 'RSUGGESTS')
-               write_if_exists(sf, pkg, 'RREPLACES')
-               write_if_exists(sf, pkg, 'RCONFLICTS')
-               write_if_exists(sf, pkg, 'SECTION')
-               write_if_exists(sf, pkg, 'PKG')
-               write_if_exists(sf, pkg, 'ALLOW_EMPTY')
-               write_if_exists(sf, pkg, 'FILES')
-               write_if_exists(sf, pkg, 'pkg_postinst')
-               write_if_exists(sf, pkg, 'pkg_postrm')
-               write_if_exists(sf, pkg, 'pkg_preinst')
-               write_if_exists(sf, pkg, 'pkg_prerm')
-               write_if_exists(sf, pkg, 'FILERPROVIDESFLIST')
-               for dfile in (d.getVar('FILERPROVIDESFLIST_' + pkg, True) or "").split():
-                       write_if_exists(sf, pkg, 'FILERPROVIDES_' + dfile)
-
-               write_if_exists(sf, pkg, 'FILERDEPENDSFLIST')
-               for dfile in (d.getVar('FILERDEPENDSFLIST_' + pkg, True) or "").split():
-                       write_if_exists(sf, pkg, 'FILERDEPENDS_' + dfile)
-
-               sf.write('%s_%s: %s\n' % ('PKGSIZE', pkg, get_directory_size(pkgdest + "/%s" % pkg)))
-               sf.close()
-
-
-               allow_empty = d.getVar('ALLOW_EMPTY_%s' % pkg, True)
-               if not allow_empty:
-                       allow_empty = d.getVar('ALLOW_EMPTY', True)
-               root = "%s/%s" % (pkgdest, pkg)
-               os.chdir(root)
-               g = glob('*')
-               if g or allow_empty == "1":
-                       packagedfile = pkgdatadir + '/runtime/%s.packaged' % pkg
-                       file(packagedfile, 'w').close()
-
-       bb.utils.unlockfile(lf)
+    from glob import glob
+
+    def write_if_exists(f, pkg, var):
+        def encode(str):
+            import codecs
+            c = codecs.getencoder("string_escape")
+            return c(str)[0]
+
+        val = d.getVar('%s_%s' % (var, pkg), True)
+        if val:
+            f.write('%s_%s: %s\n' % (var, pkg, encode(val)))
+            return
+        val = d.getVar('%s' % (var), True)
+        if val:
+            f.write('%s: %s\n' % (var, encode(val)))
+        return
+
+    def get_directory_size(dir):
+        if os.listdir(dir):
+            size = int(os.popen('du -sk %s' % dir).readlines()[0].split('\t')[0])
+        else:
+            size = 0
+        return size
+
+    packages = d.getVar('PACKAGES', True)
+    pkgdest = d.getVar('PKGDEST', True)
+    pkgdatadir = d.getVar('PKGDESTWORK', True)
+
+    # Take shared lock since we're only reading, not writing
+    lf = bb.utils.lockfile(d.expand("${PACKAGELOCK}"), True)
+
+    data_file = pkgdatadir + d.expand("/${PN}" )
+    f = open(data_file, 'w')
+    f.write("PACKAGES: %s\n" % packages)
+    f.close()
+
+    workdir = d.getVar('WORKDIR', True)
+
+    for pkg in packages.split():
+        subdata_file = pkgdatadir + "/runtime/%s" % pkg
+
+        sf = open(subdata_file, 'w')
+        write_if_exists(sf, pkg, 'PN')
+        write_if_exists(sf, pkg, 'PV')
+        write_if_exists(sf, pkg, 'PR')
+        write_if_exists(sf, pkg, 'PKGV')
+        write_if_exists(sf, pkg, 'PKGR')
+        write_if_exists(sf, pkg, 'LICENSE')
+        write_if_exists(sf, pkg, 'DESCRIPTION')
+        write_if_exists(sf, pkg, 'SUMMARY')
+        write_if_exists(sf, pkg, 'RDEPENDS')
+        write_if_exists(sf, pkg, 'RPROVIDES')
+        write_if_exists(sf, pkg, 'RRECOMMENDS')
+        write_if_exists(sf, pkg, 'RSUGGESTS')
+        write_if_exists(sf, pkg, 'RREPLACES')
+        write_if_exists(sf, pkg, 'RCONFLICTS')
+        write_if_exists(sf, pkg, 'SECTION')
+        write_if_exists(sf, pkg, 'PKG')
+        write_if_exists(sf, pkg, 'ALLOW_EMPTY')
+        write_if_exists(sf, pkg, 'FILES')
+        write_if_exists(sf, pkg, 'pkg_postinst')
+        write_if_exists(sf, pkg, 'pkg_postrm')
+        write_if_exists(sf, pkg, 'pkg_preinst')
+        write_if_exists(sf, pkg, 'pkg_prerm')
+        write_if_exists(sf, pkg, 'FILERPROVIDESFLIST')
+        for dfile in (d.getVar('FILERPROVIDESFLIST_' + pkg, True) or "").split():
+            write_if_exists(sf, pkg, 'FILERPROVIDES_' + dfile)
+
+        write_if_exists(sf, pkg, 'FILERDEPENDSFLIST')
+        for dfile in (d.getVar('FILERDEPENDSFLIST_' + pkg, True) or "").split():
+            write_if_exists(sf, pkg, 'FILERDEPENDS_' + dfile)
+
+        sf.write('%s_%s: %s\n' % ('PKGSIZE', pkg, get_directory_size(pkgdest + "/%s" % pkg)))
+        sf.close()
+
+
+        allow_empty = d.getVar('ALLOW_EMPTY_%s' % pkg, True)
+        if not allow_empty:
+            allow_empty = d.getVar('ALLOW_EMPTY', True)
+        root = "%s/%s" % (pkgdest, pkg)
+        os.chdir(root)
+        g = glob('*')
+        if g or allow_empty == "1":
+            packagedfile = pkgdatadir + '/runtime/%s.packaged' % pkg
+            file(packagedfile, 'w').close()
+
+    bb.utils.unlockfile(lf)
 }
 emit_pkgdata[dirs] = "${PKGDESTWORK}/runtime"
 
@@ -1156,557 +1156,557 @@ RPMDEPS = "${STAGING_LIBDIR_NATIVE}/rpm/bin/rpmdeps-oecore --macros ${STAGING_LI
 #  FILERDEPENDS_filepath_pkg - per file dep
 
 python package_do_filedeps() {
-       import re
-
-       if d.getVar('SKIP_FILEDEPS', True) == '1':
-               return
-
-       pkgdest = d.getVar('PKGDEST', True)
-       packages = d.getVar('PACKAGES', True)
-
-       rpmdeps = d.expand("${RPMDEPS}")
-       r = re.compile(r'[<>=]+ +[^ ]*')
-
-       def file_translate(file):
-               ft = file.replace("@", "@at@")
-               ft = ft.replace(" ", "@space@")
-               ft = ft.replace("\t", "@tab@")
-               ft = ft.replace("[", "@openbrace@")
-               ft = ft.replace("]", "@closebrace@")
-               ft = ft.replace("_", "@underscore@")
-               return ft
-
-       # Quick routine to process the results of the rpmdeps call...
-       def process_deps(pipe, pkg, provides_files, requires_files):
-               provides = {}
-               requires = {}
-
-               for line in pipe:
-                       f = line.split(" ", 1)[0].strip()
-                       line = line.split(" ", 1)[1].strip()
-
-                       if line.startswith("Requires:"):
-                               i = requires
-                       elif line.startswith("Provides:"):
-                               i = provides
-                       else:
-                               continue
-
-                       file = f.replace(pkgdest + "/" + pkg, "")
-                       file = file_translate(file)
-                       value = line.split(":", 1)[1].strip()
-                       value = r.sub(r'(\g<0>)', value)
-
-                       if value.startswith("rpmlib("):
-                               continue
-                       if value == "python":
-                               continue
-                       if file not in i:
-                               i[file] = []
-                       i[file].append(value)
-
-               for file in provides:
-                       provides_files.append(file)
-                       key = "FILERPROVIDES_" + file + "_" + pkg
-                       d.setVar(key, " ".join(provides[file]))
-
-               for file in requires:
-                       requires_files.append(file)
-                       key = "FILERDEPENDS_" + file + "_" + pkg
-                       d.setVar(key, " ".join(requires[file]))
-
-       def chunks(files, n):
-               return [files[i:i+n] for i in range(0, len(files), n)]
-
-       # Determine dependencies
-       for pkg in packages.split():
-               if pkg.endswith('-dbg') or pkg.endswith('-doc') or pkg.find('-locale-') != -1 or pkg.find('-localedata-') != -1 or pkg.find('-gconv-') != -1 or pkg.find('-charmap-') != -1 or pkg.startswith('kernel-module-'):
-                       continue
-
-               provides_files = []
-               requires_files = []
-               rpfiles = []
-               for root, dirs, files in os.walk(pkgdest + "/" + pkg):
-                       for file in files:
-                               rpfiles.append(os.path.join(root, file))
-
-               for files in chunks(rpfiles, 100):
-                       dep_pipe = os.popen(rpmdeps + " " + " ".join(files))
-
-                       process_deps(dep_pipe, pkg, provides_files, requires_files)
-
-               d.setVar("FILERDEPENDSFLIST_" + pkg, " ".join(requires_files))
-               d.setVar("FILERPROVIDESFLIST_" + pkg, " ".join(provides_files))
+    import re
+
+    if d.getVar('SKIP_FILEDEPS', True) == '1':
+        return
+
+    pkgdest = d.getVar('PKGDEST', True)
+    packages = d.getVar('PACKAGES', True)
+
+    rpmdeps = d.expand("${RPMDEPS}")
+    r = re.compile(r'[<>=]+ +[^ ]*')
+
+    def file_translate(file):
+        ft = file.replace("@", "@at@")
+        ft = ft.replace(" ", "@space@")
+        ft = ft.replace("\t", "@tab@")
+        ft = ft.replace("[", "@openbrace@")
+        ft = ft.replace("]", "@closebrace@")
+        ft = ft.replace("_", "@underscore@")
+        return ft
+
+    # Quick routine to process the results of the rpmdeps call...
+    def process_deps(pipe, pkg, provides_files, requires_files):
+        provides = {}
+        requires = {}
+
+        for line in pipe:
+            f = line.split(" ", 1)[0].strip()
+            line = line.split(" ", 1)[1].strip()
+
+            if line.startswith("Requires:"):
+                i = requires
+            elif line.startswith("Provides:"):
+                i = provides
+            else:
+                continue
+
+            file = f.replace(pkgdest + "/" + pkg, "")
+            file = file_translate(file)
+            value = line.split(":", 1)[1].strip()
+            value = r.sub(r'(\g<0>)', value)
+
+            if value.startswith("rpmlib("):
+                continue
+            if value == "python":
+                continue
+            if file not in i:
+                i[file] = []
+            i[file].append(value)
+
+        for file in provides:
+            provides_files.append(file)
+            key = "FILERPROVIDES_" + file + "_" + pkg
+            d.setVar(key, " ".join(provides[file]))
+
+        for file in requires:
+            requires_files.append(file)
+            key = "FILERDEPENDS_" + file + "_" + pkg
+            d.setVar(key, " ".join(requires[file]))
+
+    def chunks(files, n):
+        return [files[i:i+n] for i in range(0, len(files), n)]
+
+    # Determine dependencies
+    for pkg in packages.split():
+        if pkg.endswith('-dbg') or pkg.endswith('-doc') or pkg.find('-locale-') != -1 or pkg.find('-localedata-') != -1 or pkg.find('-gconv-') != -1 or pkg.find('-charmap-') != -1 or pkg.startswith('kernel-module-'):
+            continue
+
+        provides_files = []
+        requires_files = []
+        rpfiles = []
+        for root, dirs, files in os.walk(pkgdest + "/" + pkg):
+            for file in files:
+                rpfiles.append(os.path.join(root, file))
+
+        for files in chunks(rpfiles, 100):
+            dep_pipe = os.popen(rpmdeps + " " + " ".join(files))
+
+            process_deps(dep_pipe, pkg, provides_files, requires_files)
+
+        d.setVar("FILERDEPENDSFLIST_" + pkg, " ".join(requires_files))
+        d.setVar("FILERPROVIDESFLIST_" + pkg, " ".join(provides_files))
 }
 
 SHLIBSDIR = "${STAGING_DIR_HOST}/shlibs"
 SHLIBSWORKDIR = "${WORKDIR}/shlibs"
 
 python package_do_shlibs() {
-       import re, pipes
-
-       exclude_shlibs = d.getVar('EXCLUDE_FROM_SHLIBS', 0)
-       if exclude_shlibs:
-               bb.note("not generating shlibs")
-               return
-               
-       lib_re = re.compile("^.*\.so")
-       libdir_re = re.compile(".*/%s$" % d.getVar('baselib', True))
-
-       packages = d.getVar('PACKAGES', True)
-       targetos = d.getVar('TARGET_OS', True)
-
-       workdir = d.getVar('WORKDIR', True)
-
-       ver = d.getVar('PKGV', True)
-       if not ver:
-               bb.error("PKGV not defined")
-               return
-
-       pkgdest = d.getVar('PKGDEST', True)
-
-       shlibs_dir = d.getVar('SHLIBSDIR', True)
-       shlibswork_dir = d.getVar('SHLIBSWORKDIR', True)
-
-       # Take shared lock since we're only reading, not writing
-       lf = bb.utils.lockfile(d.expand("${PACKAGELOCK}"))
-
-       def linux_so(root, path, file):
-               needs_ldconfig = False
-               cmd = d.getVar('OBJDUMP', True) + " -p " + pipes.quote(os.path.join(root, file)) + " 2>/dev/null"
-               cmd = "PATH=\"%s\" %s" % (d.getVar('PATH', True), cmd)
-               fd = os.popen(cmd)
-               lines = fd.readlines()
-               fd.close()
-               for l in lines:
-                       m = re.match("\s+NEEDED\s+([^\s]*)", l)
-                       if m:
-                               if m.group(1) not in needed[pkg]:
-                                       needed[pkg].append(m.group(1))
-                       m = re.match("\s+SONAME\s+([^\s]*)", l)
-                       if m:
-                               this_soname = m.group(1)
-                               if not this_soname in sonames:
-                                       # if library is private (only used by package) then do not build shlib for it
-                                       if not private_libs or -1 == private_libs.find(this_soname):
-                                               sonames.append(this_soname)
-                               if libdir_re.match(root):
-                                       needs_ldconfig = True
-                               if snap_symlinks and (file != this_soname):
-                                       renames.append((os.path.join(root, file), os.path.join(root, this_soname)))
-               return needs_ldconfig
-
-       def darwin_so(root, path, file):
-               fullpath = os.path.join(root, file)
-               if not os.path.exists(fullpath):
-                       return
-
-               def get_combinations(base):
-                       #
-                       # Given a base library name, find all combinations of this split by "." and "-"
-                       #
-                       combos = []
-                       options = base.split(".")
-                       for i in range(1, len(options) + 1):
-                               combos.append(".".join(options[0:i]))
-                       options = base.split("-")
-                       for i in range(1, len(options) + 1):
-                               combos.append("-".join(options[0:i]))
-                       return combos           
-
-               if (file.endswith('.dylib') or file.endswith('.so')) and not pkg.endswith('-dev') and not pkg.endswith('-dbg'):
-                       # Drop suffix
-                       name = file.rsplit(".",1)[0]
-                       # Find all combinations
-                       combos = get_combinations(name)
-                       for combo in combos:
-                               if not combo in sonames:
-                                       sonames.append(combo)
-               if file.endswith('.dylib') or file.endswith('.so'):
-                       lafile = fullpath.replace(os.path.join(pkgdest, pkg), d.getVar('PKGD', True))
-                       # Drop suffix
-                       lafile = lafile.rsplit(".",1)[0]
-                       lapath = os.path.dirname(lafile)
-                       lafile = os.path.basename(lafile)
-                       # Find all combinations
-                       combos = get_combinations(lafile)
-                       for combo in combos:
-                               if os.path.exists(lapath + '/' + combo + '.la'):
-                                       break
-                       lafile = lapath + '/' + combo + '.la'
-
-                       #bb.note("Foo2: %s" % lafile)
-                       #bb.note("Foo %s %s" % (file, fullpath))
-                       if os.path.exists(lafile):
-                               fd = open(lafile, 'r')
-                               lines = fd.readlines()
-                               fd.close()
-                               for l in lines:
-                                       m = re.match("\s*dependency_libs=\s*'(.*)'", l)
-                                       if m:
-                                               deps = m.group(1).split(" ")
-                                               for dep in deps:
-                                                       #bb.note("Trying %s for %s" % (dep, pkg))
-                                                       name = None
-                                                       if dep.endswith(".la"):
-                                                               name = os.path.basename(dep).replace(".la", "")
-                                                       elif dep.startswith("-l"):
-                                                               name = dep.replace("-l", "lib")
-                                                       if pkg not in needed:
-                                                               needed[pkg] = []
-                                                       if name and name not in needed[pkg]:
-                                                               needed[pkg].append(name)
-                                                               #bb.note("Adding %s for %s" % (name, pkg))
-
-       if d.getVar('PACKAGE_SNAP_LIB_SYMLINKS', True) == "1":
-               snap_symlinks = True
-       else:
-               snap_symlinks = False
-
-       if (d.getVar('USE_LDCONFIG', True) or "1") == "1":
-               use_ldconfig = True
-       else:
-               use_ldconfig = False
-
-       needed = {}
-       shlib_provider = {}
-       for pkg in packages.split():
-               private_libs = d.getVar('PRIVATE_LIBS_' + pkg, True) or d.getVar('PRIVATE_LIBS', True)
-               needs_ldconfig = False
-               bb.debug(2, "calculating shlib provides for %s" % pkg)
-
-               pkgver = d.getVar('PKGV_' + pkg, True)
-               if not pkgver:
-                       pkgver = d.getVar('PV_' + pkg, True)
-               if not pkgver:
-                       pkgver = ver
-
-               needed[pkg] = []
-               sonames = list()
-               renames = list()
-               top = os.path.join(pkgdest, pkg)
-               for root, dirs, files in os.walk(top):
-                       for file in files:
-                               soname = None
-                               path = os.path.join(root, file)
-                               if os.path.islink(path):
-                                       continue
-                               if targetos == "darwin" or targetos == "darwin8":
-                                       darwin_so(root, dirs, file)
-                               elif os.access(path, os.X_OK) or lib_re.match(file):
-                                       ldconfig = linux_so(root, dirs, file)
-                                       needs_ldconfig = needs_ldconfig or ldconfig
-               for (old, new) in renames:
-                       bb.note("Renaming %s to %s" % (old, new))
-                       os.rename(old, new)
-               shlibs_file = os.path.join(shlibswork_dir, pkg + ".list")
-               shver_file = os.path.join(shlibswork_dir, pkg + ".ver")
-               if len(sonames):
-                       fd = open(shlibs_file, 'w')
-                       for s in sonames:
-                               fd.write(s + '\n')
-                               shlib_provider[s] = (pkg, pkgver)
-                       fd.close()
-                       fd = open(shver_file, 'w')
-                       fd.write(pkgver + '\n')
-                       fd.close()
-               if needs_ldconfig and use_ldconfig:
-                       bb.debug(1, 'adding ldconfig call to postinst for %s' % pkg)
-                       postinst = d.getVar('pkg_postinst_%s' % pkg, True) or d.getVar('pkg_postinst', True)
-                       if not postinst:
-                               postinst = '#!/bin/sh\n'
-                       postinst += d.getVar('ldconfig_postinst_fragment', True)
-                       d.setVar('pkg_postinst_%s' % pkg, postinst)
-
-       list_re = re.compile('^(.*)\.list$')
-       for dir in [shlibs_dir]: 
-               if not os.path.exists(dir):
-                       continue
-               for file in os.listdir(dir):
-                       m = list_re.match(file)
-                       if m:
-                               dep_pkg = m.group(1)
-                               fd = open(os.path.join(dir, file))
-                               lines = fd.readlines()
-                               fd.close()
-                               ver_file = os.path.join(dir, dep_pkg + '.ver')
-                               lib_ver = None
-                               if os.path.exists(ver_file):
-                                       fd = open(ver_file)
-                                       lib_ver = fd.readline().rstrip()
-                                       fd.close()
-                               for l in lines:
-                                       shlib_provider[l.rstrip()] = (dep_pkg, lib_ver)
-
-       bb.utils.unlockfile(lf)
-
-       assumed_libs = d.getVar('ASSUME_SHLIBS', True)
-       if assumed_libs:
-           for e in assumed_libs.split():
-               l, dep_pkg = e.split(":")
-               lib_ver = None
-               dep_pkg = dep_pkg.rsplit("_", 1)
-               if len(dep_pkg) == 2:
-                   lib_ver = dep_pkg[1]
-               dep_pkg = dep_pkg[0]
-               shlib_provider[l] = (dep_pkg, lib_ver)
-
-       for pkg in packages.split():
-               bb.debug(2, "calculating shlib requirements for %s" % pkg)
-
-               deps = list()
-               for n in needed[pkg]:
-                       if n in shlib_provider.keys():
-                               (dep_pkg, ver_needed) = shlib_provider[n]
-
-                               bb.debug(2, '%s: Dependency %s requires package %s' % (pkg, n, dep_pkg))
-
-                               if dep_pkg == pkg:
-                                       continue
-
-                               if ver_needed:
-                                       dep = "%s (>= %s)" % (dep_pkg, ver_needed)
-                               else:
-                                       dep = dep_pkg
-                               if not dep in deps:
-                                       deps.append(dep)
-                       else:
-                               bb.note("Couldn't find shared library provider for %s" % n)
-
-               deps_file = os.path.join(pkgdest, pkg + ".shlibdeps")
-               if os.path.exists(deps_file):
-                       os.remove(deps_file)
-               if len(deps):
-                       fd = open(deps_file, 'w')
-                       for dep in deps:
-                               fd.write(dep + '\n')
-                       fd.close()
+    import re, pipes
+
+    exclude_shlibs = d.getVar('EXCLUDE_FROM_SHLIBS', 0)
+    if exclude_shlibs:
+        bb.note("not generating shlibs")
+        return
+
+    lib_re = re.compile("^.*\.so")
+    libdir_re = re.compile(".*/%s$" % d.getVar('baselib', True))
+
+    packages = d.getVar('PACKAGES', True)
+    targetos = d.getVar('TARGET_OS', True)
+
+    workdir = d.getVar('WORKDIR', True)
+
+    ver = d.getVar('PKGV', True)
+    if not ver:
+        bb.error("PKGV not defined")
+        return
+
+    pkgdest = d.getVar('PKGDEST', True)
+
+    shlibs_dir = d.getVar('SHLIBSDIR', True)
+    shlibswork_dir = d.getVar('SHLIBSWORKDIR', True)
+
+    # Take shared lock since we're only reading, not writing
+    lf = bb.utils.lockfile(d.expand("${PACKAGELOCK}"))
+
+    def linux_so(root, path, file):
+        needs_ldconfig = False
+        cmd = d.getVar('OBJDUMP', True) + " -p " + pipes.quote(os.path.join(root, file)) + " 2>/dev/null"
+        cmd = "PATH=\"%s\" %s" % (d.getVar('PATH', True), cmd)
+        fd = os.popen(cmd)
+        lines = fd.readlines()
+        fd.close()
+        for l in lines:
+            m = re.match("\s+NEEDED\s+([^\s]*)", l)
+            if m:
+                if m.group(1) not in needed[pkg]:
+                    needed[pkg].append(m.group(1))
+            m = re.match("\s+SONAME\s+([^\s]*)", l)
+            if m:
+                this_soname = m.group(1)
+                if not this_soname in sonames:
+                    # if library is private (only used by package) then do not build shlib for it
+                    if not private_libs or -1 == private_libs.find(this_soname):
+                        sonames.append(this_soname)
+                if libdir_re.match(root):
+                    needs_ldconfig = True
+                if snap_symlinks and (file != this_soname):
+                    renames.append((os.path.join(root, file), os.path.join(root, this_soname)))
+        return needs_ldconfig
+
+    def darwin_so(root, path, file):
+        fullpath = os.path.join(root, file)
+        if not os.path.exists(fullpath):
+            return
+
+        def get_combinations(base):
+            #
+            # Given a base library name, find all combinations of this split by "." and "-"
+            #
+            combos = []
+            options = base.split(".")
+            for i in range(1, len(options) + 1):
+                combos.append(".".join(options[0:i]))
+            options = base.split("-")
+            for i in range(1, len(options) + 1):
+                combos.append("-".join(options[0:i]))
+            return combos
+
+        if (file.endswith('.dylib') or file.endswith('.so')) and not pkg.endswith('-dev') and not pkg.endswith('-dbg'):
+            # Drop suffix
+            name = file.rsplit(".",1)[0]
+            # Find all combinations
+            combos = get_combinations(name)
+            for combo in combos:
+                if not combo in sonames:
+                    sonames.append(combo)
+        if file.endswith('.dylib') or file.endswith('.so'):
+            lafile = fullpath.replace(os.path.join(pkgdest, pkg), d.getVar('PKGD', True))
+            # Drop suffix
+            lafile = lafile.rsplit(".",1)[0]
+            lapath = os.path.dirname(lafile)
+            lafile = os.path.basename(lafile)
+            # Find all combinations
+            combos = get_combinations(lafile)
+            for combo in combos:
+                if os.path.exists(lapath + '/' + combo + '.la'):
+                    break
+            lafile = lapath + '/' + combo + '.la'
+
+            #bb.note("Foo2: %s" % lafile)
+            #bb.note("Foo %s %s" % (file, fullpath))
+            if os.path.exists(lafile):
+                fd = open(lafile, 'r')
+                lines = fd.readlines()
+                fd.close()
+                for l in lines:
+                    m = re.match("\s*dependency_libs=\s*'(.*)'", l)
+                    if m:
+                        deps = m.group(1).split(" ")
+                        for dep in deps:
+                            #bb.note("Trying %s for %s" % (dep, pkg))
+                            name = None
+                            if dep.endswith(".la"):
+                                name = os.path.basename(dep).replace(".la", "")
+                            elif dep.startswith("-l"):
+                                name = dep.replace("-l", "lib")
+                            if pkg not in needed:
+                                needed[pkg] = []
+                            if name and name not in needed[pkg]:
+                                needed[pkg].append(name)
+                                #bb.note("Adding %s for %s" % (name, pkg))
+
+    if d.getVar('PACKAGE_SNAP_LIB_SYMLINKS', True) == "1":
+        snap_symlinks = True
+    else:
+        snap_symlinks = False
+
+    if (d.getVar('USE_LDCONFIG', True) or "1") == "1":
+        use_ldconfig = True
+    else:
+        use_ldconfig = False
+
+    needed = {}
+    shlib_provider = {}
+    for pkg in packages.split():
+        private_libs = d.getVar('PRIVATE_LIBS_' + pkg, True) or d.getVar('PRIVATE_LIBS', True)
+        needs_ldconfig = False
+        bb.debug(2, "calculating shlib provides for %s" % pkg)
+
+        pkgver = d.getVar('PKGV_' + pkg, True)
+        if not pkgver:
+            pkgver = d.getVar('PV_' + pkg, True)
+        if not pkgver:
+            pkgver = ver
+
+        needed[pkg] = []
+        sonames = list()
+        renames = list()
+        top = os.path.join(pkgdest, pkg)
+        for root, dirs, files in os.walk(top):
+            for file in files:
+                soname = None
+                path = os.path.join(root, file)
+                if os.path.islink(path):
+                    continue
+                if targetos == "darwin" or targetos == "darwin8":
+                    darwin_so(root, dirs, file)
+                elif os.access(path, os.X_OK) or lib_re.match(file):
+                    ldconfig = linux_so(root, dirs, file)
+                    needs_ldconfig = needs_ldconfig or ldconfig
+        for (old, new) in renames:
+            bb.note("Renaming %s to %s" % (old, new))
+            os.rename(old, new)
+        shlibs_file = os.path.join(shlibswork_dir, pkg + ".list")
+        shver_file = os.path.join(shlibswork_dir, pkg + ".ver")
+        if len(sonames):
+            fd = open(shlibs_file, 'w')
+            for s in sonames:
+                fd.write(s + '\n')
+                shlib_provider[s] = (pkg, pkgver)
+            fd.close()
+            fd = open(shver_file, 'w')
+            fd.write(pkgver + '\n')
+            fd.close()
+        if needs_ldconfig and use_ldconfig:
+            bb.debug(1, 'adding ldconfig call to postinst for %s' % pkg)
+            postinst = d.getVar('pkg_postinst_%s' % pkg, True) or d.getVar('pkg_postinst', True)
+            if not postinst:
+                postinst = '#!/bin/sh\n'
+            postinst += d.getVar('ldconfig_postinst_fragment', True)
+            d.setVar('pkg_postinst_%s' % pkg, postinst)
+
+    list_re = re.compile('^(.*)\.list$')
+    for dir in [shlibs_dir]:
+        if not os.path.exists(dir):
+            continue
+        for file in os.listdir(dir):
+            m = list_re.match(file)
+            if m:
+                dep_pkg = m.group(1)
+                fd = open(os.path.join(dir, file))
+                lines = fd.readlines()
+                fd.close()
+                ver_file = os.path.join(dir, dep_pkg + '.ver')
+                lib_ver = None
+                if os.path.exists(ver_file):
+                    fd = open(ver_file)
+                    lib_ver = fd.readline().rstrip()
+                    fd.close()
+                for l in lines:
+                    shlib_provider[l.rstrip()] = (dep_pkg, lib_ver)
+
+    bb.utils.unlockfile(lf)
+
+    assumed_libs = d.getVar('ASSUME_SHLIBS', True)
+    if assumed_libs:
+        for e in assumed_libs.split():
+            l, dep_pkg = e.split(":")
+            lib_ver = None
+            dep_pkg = dep_pkg.rsplit("_", 1)
+            if len(dep_pkg) == 2:
+                lib_ver = dep_pkg[1]
+            dep_pkg = dep_pkg[0]
+            shlib_provider[l] = (dep_pkg, lib_ver)
+
+    for pkg in packages.split():
+        bb.debug(2, "calculating shlib requirements for %s" % pkg)
+
+        deps = list()
+        for n in needed[pkg]:
+            if n in shlib_provider.keys():
+                (dep_pkg, ver_needed) = shlib_provider[n]
+
+                bb.debug(2, '%s: Dependency %s requires package %s' % (pkg, n, dep_pkg))
+
+                if dep_pkg == pkg:
+                    continue
+
+                if ver_needed:
+                    dep = "%s (>= %s)" % (dep_pkg, ver_needed)
+                else:
+                    dep = dep_pkg
+                if not dep in deps:
+                    deps.append(dep)
+            else:
+                bb.note("Couldn't find shared library provider for %s" % n)
+
+        deps_file = os.path.join(pkgdest, pkg + ".shlibdeps")
+        if os.path.exists(deps_file):
+            os.remove(deps_file)
+        if len(deps):
+            fd = open(deps_file, 'w')
+            for dep in deps:
+                fd.write(dep + '\n')
+            fd.close()
 }
 
 python package_do_pkgconfig () {
-       import re
-
-       packages = d.getVar('PACKAGES', True)
-       workdir = d.getVar('WORKDIR', True)
-       pkgdest = d.getVar('PKGDEST', True)
-
-       shlibs_dir = d.getVar('SHLIBSDIR', True)
-       shlibswork_dir = d.getVar('SHLIBSWORKDIR', True)
-
-       pc_re = re.compile('(.*)\.pc$')
-       var_re = re.compile('(.*)=(.*)')
-       field_re = re.compile('(.*): (.*)')
-
-       pkgconfig_provided = {}
-       pkgconfig_needed = {}
-       for pkg in packages.split():
-               pkgconfig_provided[pkg] = []
-               pkgconfig_needed[pkg] = []
-               top = os.path.join(pkgdest, pkg)
-               for root, dirs, files in os.walk(top):
-                       for file in files:
-                               m = pc_re.match(file)
-                               if m:
-                                       pd = bb.data.init()
-                                       name = m.group(1)
-                                       pkgconfig_provided[pkg].append(name)
-                                       path = os.path.join(root, file)
-                                       if not os.access(path, os.R_OK):
-                                               continue
-                                       f = open(path, 'r')
-                                       lines = f.readlines()
-                                       f.close()
-                                       for l in lines:
-                                               m = var_re.match(l)
-                                               if m:
-                                                       name = m.group(1)
-                                                       val = m.group(2)
-                                                       pd.setVar(name, pd.expand(val))
-                                                       continue
-                                               m = field_re.match(l)
-                                               if m:
-                                                       hdr = m.group(1)
-                                                       exp = bb.data.expand(m.group(2), pd)
-                                                       if hdr == 'Requires':
-                                                               pkgconfig_needed[pkg] += exp.replace(',', ' ').split()
-
-       # Take shared lock since we're only reading, not writing
-       lf = bb.utils.lockfile(d.expand("${PACKAGELOCK}"))
-
-       for pkg in packages.split():
-               pkgs_file = os.path.join(shlibswork_dir, pkg + ".pclist")
-               if pkgconfig_provided[pkg] != []:
-                       f = open(pkgs_file, 'w')
-                       for p in pkgconfig_provided[pkg]:
-                               f.write('%s\n' % p)
-                       f.close()
-
-       for dir in [shlibs_dir]:
-               if not os.path.exists(dir):
-                       continue
-               for file in os.listdir(dir):
-                       m = re.match('^(.*)\.pclist$', file)
-                       if m:
-                               pkg = m.group(1)
-                               fd = open(os.path.join(dir, file))
-                               lines = fd.readlines()
-                               fd.close()
-                               pkgconfig_provided[pkg] = []
-                               for l in lines:
-                                       pkgconfig_provided[pkg].append(l.rstrip())
-
-       for pkg in packages.split():
-               deps = []
-               for n in pkgconfig_needed[pkg]:
-                       found = False
-                       for k in pkgconfig_provided.keys():
-                               if n in pkgconfig_provided[k]:
-                                       if k != pkg and not (k in deps):
-                                               deps.append(k)
-                                       found = True
-                       if found == False:
-                               bb.note("couldn't find pkgconfig module '%s' in any package" % n)
-               deps_file = os.path.join(pkgdest, pkg + ".pcdeps")
-               if len(deps):
-                       fd = open(deps_file, 'w')
-                       for dep in deps:
-                               fd.write(dep + '\n')
-                       fd.close()
-
-       bb.utils.unlockfile(lf)
+    import re
+
+    packages = d.getVar('PACKAGES', True)
+    workdir = d.getVar('WORKDIR', True)
+    pkgdest = d.getVar('PKGDEST', True)
+
+    shlibs_dir = d.getVar('SHLIBSDIR', True)
+    shlibswork_dir = d.getVar('SHLIBSWORKDIR', True)
+
+    pc_re = re.compile('(.*)\.pc$')
+    var_re = re.compile('(.*)=(.*)')
+    field_re = re.compile('(.*): (.*)')
+
+    pkgconfig_provided = {}
+    pkgconfig_needed = {}
+    for pkg in packages.split():
+        pkgconfig_provided[pkg] = []
+        pkgconfig_needed[pkg] = []
+        top = os.path.join(pkgdest, pkg)
+        for root, dirs, files in os.walk(top):
+            for file in files:
+                m = pc_re.match(file)
+                if m:
+                    pd = bb.data.init()
+                    name = m.group(1)
+                    pkgconfig_provided[pkg].append(name)
+                    path = os.path.join(root, file)
+                    if not os.access(path, os.R_OK):
+                        continue
+                    f = open(path, 'r')
+                    lines = f.readlines()
+                    f.close()
+                    for l in lines:
+                        m = var_re.match(l)
+                        if m:
+                            name = m.group(1)
+                            val = m.group(2)
+                            pd.setVar(name, pd.expand(val))
+                            continue
+                        m = field_re.match(l)
+                        if m:
+                            hdr = m.group(1)
+                            exp = bb.data.expand(m.group(2), pd)
+                            if hdr == 'Requires':
+                                pkgconfig_needed[pkg] += exp.replace(',', ' ').split()
+
+    # Take shared lock since we're only reading, not writing
+    lf = bb.utils.lockfile(d.expand("${PACKAGELOCK}"))
+
+    for pkg in packages.split():
+        pkgs_file = os.path.join(shlibswork_dir, pkg + ".pclist")
+        if pkgconfig_provided[pkg] != []:
+            f = open(pkgs_file, 'w')
+            for p in pkgconfig_provided[pkg]:
+                f.write('%s\n' % p)
+            f.close()
+
+    for dir in [shlibs_dir]:
+        if not os.path.exists(dir):
+            continue
+        for file in os.listdir(dir):
+            m = re.match('^(.*)\.pclist$', file)
+            if m:
+                pkg = m.group(1)
+                fd = open(os.path.join(dir, file))
+                lines = fd.readlines()
+                fd.close()
+                pkgconfig_provided[pkg] = []
+                for l in lines:
+                    pkgconfig_provided[pkg].append(l.rstrip())
+
+    for pkg in packages.split():
+        deps = []
+        for n in pkgconfig_needed[pkg]:
+            found = False
+            for k in pkgconfig_provided.keys():
+                if n in pkgconfig_provided[k]:
+                    if k != pkg and not (k in deps):
+                        deps.append(k)
+                    found = True
+            if found == False:
+                bb.note("couldn't find pkgconfig module '%s' in any package" % n)
+        deps_file = os.path.join(pkgdest, pkg + ".pcdeps")
+        if len(deps):
+            fd = open(deps_file, 'w')
+            for dep in deps:
+                fd.write(dep + '\n')
+            fd.close()
+
+    bb.utils.unlockfile(lf)
 }
 
 python read_shlibdeps () {
-       packages = d.getVar('PACKAGES', True).split()
-       for pkg in packages:
-               rdepends = bb.utils.explode_dep_versions(d.getVar('RDEPENDS_' + pkg, False) or d.getVar('RDEPENDS', False) or "")
-
-               for extension in ".shlibdeps", ".pcdeps", ".clilibdeps":
-                       depsfile = d.expand("${PKGDEST}/" + pkg + extension)
-                       if os.access(depsfile, os.R_OK):
-                               fd = file(depsfile)
-                               lines = fd.readlines()
-                               fd.close()
-                               for l in lines:
-                                       rdepends[l.rstrip()] = ""
-               d.setVar('RDEPENDS_' + pkg, bb.utils.join_deps(rdepends, commasep=False))
+    packages = d.getVar('PACKAGES', True).split()
+    for pkg in packages:
+        rdepends = bb.utils.explode_dep_versions(d.getVar('RDEPENDS_' + pkg, False) or d.getVar('RDEPENDS', False) or "")
+
+        for extension in ".shlibdeps", ".pcdeps", ".clilibdeps":
+            depsfile = d.expand("${PKGDEST}/" + pkg + extension)
+            if os.access(depsfile, os.R_OK):
+                fd = file(depsfile)
+                lines = fd.readlines()
+                fd.close()
+                for l in lines:
+                    rdepends[l.rstrip()] = ""
+        d.setVar('RDEPENDS_' + pkg, bb.utils.join_deps(rdepends, commasep=False))
 }
 
 python package_depchains() {
-       """
-       For a given set of prefix and postfix modifiers, make those packages
-       RRECOMMENDS on the corresponding packages for its RDEPENDS.
-
-       Example:  If package A depends upon package B, and A's .bb emits an
-       A-dev package, this would make A-dev Recommends: B-dev.
-
-       If only one of a given suffix is specified, it will take the RRECOMMENDS
-       based on the RDEPENDS of *all* other packages. If more than one of a given 
-       suffix is specified, its will only use the RDEPENDS of the single parent 
-       package.
-       """
-
-       packages  = d.getVar('PACKAGES', True)
-       postfixes = (d.getVar('DEPCHAIN_POST', True) or '').split()
-       prefixes  = (d.getVar('DEPCHAIN_PRE', True) or '').split()
-
-       def pkg_adddeprrecs(pkg, base, suffix, getname, depends, d):
-
-               #bb.note('depends for %s is %s' % (base, depends))
-               rreclist = bb.utils.explode_dep_versions(d.getVar('RRECOMMENDS_' + pkg, True) or d.getVar('RRECOMMENDS', True) or "")
-
-               for depend in depends:
-                       if depend.find('-native') != -1 or depend.find('-cross') != -1 or depend.startswith('virtual/'):
-                               #bb.note("Skipping %s" % depend)
-                               continue
-                       if depend.endswith('-dev'):
-                               depend = depend.replace('-dev', '')
-                       if depend.endswith('-dbg'):
-                               depend = depend.replace('-dbg', '')
-                       pkgname = getname(depend, suffix)
-                       #bb.note("Adding %s for %s" % (pkgname, depend))
-                       if pkgname not in rreclist:
-                               rreclist[pkgname] = ""
-
-               #bb.note('setting: RRECOMMENDS_%s=%s' % (pkg, ' '.join(rreclist)))
-               d.setVar('RRECOMMENDS_%s' % pkg, bb.utils.join_deps(rreclist, commasep=False))
-
-       def pkg_addrrecs(pkg, base, suffix, getname, rdepends, d):
-
-               #bb.note('rdepends for %s is %s' % (base, rdepends))
-               rreclist = bb.utils.explode_dep_versions(d.getVar('RRECOMMENDS_' + pkg, True) or d.getVar('RRECOMMENDS', True) or "")
-
-               for depend in rdepends:
-                       if depend.find('virtual-locale-') != -1:
-                               #bb.note("Skipping %s" % depend)
-                               continue
-                       if depend.endswith('-dev'):
-                               depend = depend.replace('-dev', '')
-                       if depend.endswith('-dbg'):
-                               depend = depend.replace('-dbg', '')
-                       pkgname = getname(depend, suffix)
-                       #bb.note("Adding %s for %s" % (pkgname, depend))
-                       if pkgname not in rreclist:
-                               rreclist[pkgname] = ""
-
-               #bb.note('setting: RRECOMMENDS_%s=%s' % (pkg, ' '.join(rreclist)))
-               d.setVar('RRECOMMENDS_%s' % pkg, bb.utils.join_deps(rreclist, commasep=False))
-
-       def add_dep(list, dep):
-               dep = dep.split(' (')[0].strip()
-               if dep not in list:
-                       list.append(dep)
-
-       depends = []
-       for dep in bb.utils.explode_deps(d.getVar('DEPENDS', True) or ""):
-               add_dep(depends, dep)
-
-       rdepends = []
-       for dep in bb.utils.explode_deps(d.getVar('RDEPENDS', True) or ""):
-               add_dep(rdepends, dep)
-
-       for pkg in packages.split():
-               for dep in bb.utils.explode_deps(d.getVar('RDEPENDS_' + pkg, True) or ""):
-                       add_dep(rdepends, dep)
-
-       #bb.note('rdepends is %s' % rdepends)
-
-       def post_getname(name, suffix):
-               return '%s%s' % (name, suffix)
-       def pre_getname(name, suffix):
-               return '%s%s' % (suffix, name)
-
-       pkgs = {}
-       for pkg in packages.split():
-               for postfix in postfixes:
-                       if pkg.endswith(postfix):
-                               if not postfix in pkgs:
-                                       pkgs[postfix] = {}
-                               pkgs[postfix][pkg] = (pkg[:-len(postfix)], post_getname)
-
-               for prefix in prefixes:
-                       if pkg.startswith(prefix):
-                               if not prefix in pkgs:
-                                       pkgs[prefix] = {}
-                               pkgs[prefix][pkg] = (pkg[:-len(prefix)], pre_getname)
-
-       for suffix in pkgs:
-               for pkg in pkgs[suffix]:
-                       if d.getVarFlag('RRECOMMENDS_' + pkg, 'nodeprrecs'):
-                               continue
-                       (base, func) = pkgs[suffix][pkg]
-                       if suffix == "-dev":
-                               pkg_adddeprrecs(pkg, base, suffix, func, depends, d)
-                       if len(pkgs[suffix]) == 1:
-                               pkg_addrrecs(pkg, base, suffix, func, rdepends, d)
-                       else:
-                               rdeps = []
-                               for dep in bb.utils.explode_deps(d.getVar('RDEPENDS_' + base, True) or d.getVar('RDEPENDS', True) or ""):
-                                       add_dep(rdeps, dep)
-                               pkg_addrrecs(pkg, base, suffix, func, rdeps, d)
+    """
+    For a given set of prefix and postfix modifiers, make those packages
+    RRECOMMENDS on the corresponding packages for its RDEPENDS.
+
+    Example:  If package A depends upon package B, and A's .bb emits an
+    A-dev package, this would make A-dev Recommends: B-dev.
+
+    If only one of a given suffix is specified, it will take the RRECOMMENDS
+    based on the RDEPENDS of *all* other packages. If more than one of a given
+    suffix is specified, its will only use the RDEPENDS of the single parent
+    package.
+    """
+
+    packages  = d.getVar('PACKAGES', True)
+    postfixes = (d.getVar('DEPCHAIN_POST', True) or '').split()
+    prefixes  = (d.getVar('DEPCHAIN_PRE', True) or '').split()
+
+    def pkg_adddeprrecs(pkg, base, suffix, getname, depends, d):
+
+        #bb.note('depends for %s is %s' % (base, depends))
+        rreclist = bb.utils.explode_dep_versions(d.getVar('RRECOMMENDS_' + pkg, True) or d.getVar('RRECOMMENDS', True) or "")
+
+        for depend in depends:
+            if depend.find('-native') != -1 or depend.find('-cross') != -1 or depend.startswith('virtual/'):
+                #bb.note("Skipping %s" % depend)
+                continue
+            if depend.endswith('-dev'):
+                depend = depend.replace('-dev', '')
+            if depend.endswith('-dbg'):
+                depend = depend.replace('-dbg', '')
+            pkgname = getname(depend, suffix)
+            #bb.note("Adding %s for %s" % (pkgname, depend))
+            if pkgname not in rreclist:
+                rreclist[pkgname] = ""
+
+        #bb.note('setting: RRECOMMENDS_%s=%s' % (pkg, ' '.join(rreclist)))
+        d.setVar('RRECOMMENDS_%s' % pkg, bb.utils.join_deps(rreclist, commasep=False))
+
+    def pkg_addrrecs(pkg, base, suffix, getname, rdepends, d):
+
+        #bb.note('rdepends for %s is %s' % (base, rdepends))
+        rreclist = bb.utils.explode_dep_versions(d.getVar('RRECOMMENDS_' + pkg, True) or d.getVar('RRECOMMENDS', True) or "")
+
+        for depend in rdepends:
+            if depend.find('virtual-locale-') != -1:
+                #bb.note("Skipping %s" % depend)
+                continue
+            if depend.endswith('-dev'):
+                depend = depend.replace('-dev', '')
+            if depend.endswith('-dbg'):
+                depend = depend.replace('-dbg', '')
+            pkgname = getname(depend, suffix)
+            #bb.note("Adding %s for %s" % (pkgname, depend))
+            if pkgname not in rreclist:
+                rreclist[pkgname] = ""
+
+        #bb.note('setting: RRECOMMENDS_%s=%s' % (pkg, ' '.join(rreclist)))
+        d.setVar('RRECOMMENDS_%s' % pkg, bb.utils.join_deps(rreclist, commasep=False))
+
+    def add_dep(list, dep):
+        dep = dep.split(' (')[0].strip()
+        if dep not in list:
+            list.append(dep)
+
+    depends = []
+    for dep in bb.utils.explode_deps(d.getVar('DEPENDS', True) or ""):
+        add_dep(depends, dep)
+
+    rdepends = []
+    for dep in bb.utils.explode_deps(d.getVar('RDEPENDS', True) or ""):
+        add_dep(rdepends, dep)
+
+    for pkg in packages.split():
+        for dep in bb.utils.explode_deps(d.getVar('RDEPENDS_' + pkg, True) or ""):
+            add_dep(rdepends, dep)
+
+    #bb.note('rdepends is %s' % rdepends)
+
+    def post_getname(name, suffix):
+        return '%s%s' % (name, suffix)
+    def pre_getname(name, suffix):
+        return '%s%s' % (suffix, name)
+
+    pkgs = {}
+    for pkg in packages.split():
+        for postfix in postfixes:
+            if pkg.endswith(postfix):
+                if not postfix in pkgs:
+                    pkgs[postfix] = {}
+                pkgs[postfix][pkg] = (pkg[:-len(postfix)], post_getname)
+
+        for prefix in prefixes:
+            if pkg.startswith(prefix):
+                if not prefix in pkgs:
+                    pkgs[prefix] = {}
+                pkgs[prefix][pkg] = (pkg[:-len(prefix)], pre_getname)
+
+    for suffix in pkgs:
+        for pkg in pkgs[suffix]:
+            if d.getVarFlag('RRECOMMENDS_' + pkg, 'nodeprrecs'):
+                continue
+            (base, func) = pkgs[suffix][pkg]
+            if suffix == "-dev":
+                pkg_adddeprrecs(pkg, base, suffix, func, depends, d)
+            if len(pkgs[suffix]) == 1:
+                pkg_addrrecs(pkg, base, suffix, func, rdepends, d)
+            else:
+                rdeps = []
+                for dep in bb.utils.explode_deps(d.getVar('RDEPENDS_' + base, True) or d.getVar('RDEPENDS', True) or ""):
+                    add_dep(rdeps, dep)
+                pkg_addrrecs(pkg, base, suffix, func, rdeps, d)
 }
 
-# Since bitbake can't determine which variables are accessed during package 
+# Since bitbake can't determine which variables are accessed during package
 # iteration, we need to list them here:
 PACKAGEVARS = "FILES RDEPENDS RRECOMMENDS SUMMARY DESCRIPTION RSUGGESTS RPROVIDES RCONFLICTS PKG ALLOW_EMPTY pkg_postinst pkg_postrm INITSCRIPT_NAME INITSCRIPT_PARAMS DEBIAN_NOAUTONAME ALTERNATIVE PKGE PKGV PKGR"
 
@@ -1720,44 +1720,44 @@ def gen_packagevar(d):
     return " ".join(ret)
 
 PACKAGE_PREPROCESS_FUNCS ?= ""
-PACKAGEFUNCS ?= "package_get_auto_pr \ 
+PACKAGEFUNCS ?= "package_get_auto_pr \
                 perform_packagecopy \
                 ${PACKAGE_PREPROCESS_FUNCS} \
-               package_do_split_locales \
-               split_and_strip_files \
-               fixup_perms \
-               populate_packages \
-               package_do_filedeps \
-               package_do_shlibs \
-               package_do_pkgconfig \
-               read_shlibdeps \
-               package_depchains \
-               emit_pkgdata"
+                package_do_split_locales \
+                split_and_strip_files \
+                fixup_perms \
+                populate_packages \
+                package_do_filedeps \
+                package_do_shlibs \
+                package_do_pkgconfig \
+                read_shlibdeps \
+                package_depchains \
+                emit_pkgdata"
 
 python do_package () {
-        # Change the following version to cause sstate to invalidate the package
-        # cache.  This is useful if an item this class depends on changes in a
-        # way that the output of this class changes.  rpmdeps is a good example
-        # as any change to rpmdeps requires this to be rerun.
-        # PACKAGE_BBCLASS_VERSION = "1"
-
-       packages = (d.getVar('PACKAGES', True) or "").split()
-       if len(packages) < 1:
-               bb.debug(1, "No packages to build, skipping do_package")
-               return
-
-       workdir = d.getVar('WORKDIR', True)
-       outdir = d.getVar('DEPLOY_DIR', True)
-       dest = d.getVar('D', True)
-       dvar = d.getVar('PKGD', True)
-       pn = d.getVar('PN', True)
-
-       if not workdir or not outdir or not dest or not dvar or not pn or not packages:
-               bb.error("WORKDIR, DEPLOY_DIR, D, PN and PKGD all must be defined, unable to package")
-               return
-
-       for f in (d.getVar('PACKAGEFUNCS', True) or '').split():
-               bb.build.exec_func(f, d)
+    # Change the following version to cause sstate to invalidate the package
+    # cache.  This is useful if an item this class depends on changes in a
+    # way that the output of this class changes.  rpmdeps is a good example
+    # as any change to rpmdeps requires this to be rerun.
+    # PACKAGE_BBCLASS_VERSION = "1"
+
+    packages = (d.getVar('PACKAGES', True) or "").split()
+    if len(packages) < 1:
+        bb.debug(1, "No packages to build, skipping do_package")
+        return
+
+    workdir = d.getVar('WORKDIR', True)
+    outdir = d.getVar('DEPLOY_DIR', True)
+    dest = d.getVar('D', True)
+    dvar = d.getVar('PKGD', True)
+    pn = d.getVar('PN', True)
+
+    if not workdir or not outdir or not dest or not dvar or not pn or not packages:
+        bb.error("WORKDIR, DEPLOY_DIR, D, PN and PKGD all must be defined, unable to package")
+        return
+
+    for f in (d.getVar('PACKAGEFUNCS', True) or '').split():
+        bb.build.exec_func(f, d)
 }
 
 do_package[dirs] = "${SHLIBSWORKDIR} ${PKGDESTWORK} ${D}"
@@ -1775,7 +1775,7 @@ do_package[stamp-extra-info] = "${MACHINE}"
 do_package_setscene[dirs] = "${STAGING_DIR}"
 
 python do_package_setscene () {
-       sstate_setscene(d)
+    sstate_setscene(d)
 }
 addtask do_package_setscene
 
@@ -1793,14 +1793,14 @@ addtask package_write before do_build after do_package
 #
 
 def mapping_rename_hook(d):
-       """
-       Rewrite variables to account for package renaming in things
-       like debian.bbclass or manual PKG variable name changes
-       """
-       runtime_mapping_rename("RDEPENDS", d)
-       runtime_mapping_rename("RRECOMMENDS", d)
-       runtime_mapping_rename("RSUGGESTS", d)
-       runtime_mapping_rename("RPROVIDES", d)
-       runtime_mapping_rename("RREPLACES", d)
-       runtime_mapping_rename("RCONFLICTS", d)
+    """
+    Rewrite variables to account for package renaming in things
+    like debian.bbclass or manual PKG variable name changes
+    """
+    runtime_mapping_rename("RDEPENDS", d)
+    runtime_mapping_rename("RRECOMMENDS", d)
+    runtime_mapping_rename("RSUGGESTS", d)
+    runtime_mapping_rename("RPROVIDES", d)
+    runtime_mapping_rename("RREPLACES", d)
+    runtime_mapping_rename("RCONFLICTS", d)
 
index f58fd2be02791c1ff47507c5d7a739ea310d96b9..d09baeaa283a1d793e6fb8fbe3e49fb6a42d0865 100644 (file)
@@ -418,8 +418,8 @@ python () {
 }
 
 python do_package_write_deb () {
-       bb.build.exec_func("read_subpackage_metadata", d)
-       bb.build.exec_func("do_package_deb", d)
+    bb.build.exec_func("read_subpackage_metadata", d)
+    bb.build.exec_func("do_package_deb", d)
 }
 do_package_write_deb[dirs] = "${PKGWRITEDIRDEB}"
 do_package_write_deb[umask] = "022"
index 2a2991768b43e299173fcd03927d9f52a415ba70..b20df0f24313daabee43eebc3802b11cc62071cb 100644 (file)
@@ -9,11 +9,11 @@ PKGWRITEDIRRPM = "${WORKDIR}/deploy-rpms"
 PKGWRITEDIRSRPM = "${DEPLOY_DIR}/sources/deploy-srpm"
 
 python package_rpm_fn () {
-       d.setVar('PKGFN', d.getVar('PKG'))
+    d.setVar('PKGFN', d.getVar('PKG'))
 }
 
 python package_rpm_install () {
-       bb.fatal("package_rpm_install not implemented!")
+    bb.fatal("package_rpm_install not implemented!")
 }
 
 RPMCONF_TARGET_BASE = "${DEPLOY_DIR_RPM}/solvedb"
@@ -547,601 +547,601 @@ EOF
 }
 
 python write_specfile () {
-       import textwrap
-       import oe.packagedata
-
-       # append information for logs and patches to %prep
-       def add_prep(d,spec_files_bottom):
-               if d.getVar('SOURCE_ARCHIVE_PACKAGE_TYPE', True) and d.getVar('SOURCE_ARCHIVE_PACKAGE_TYPE', True).upper() == 'SRPM':
-                       spec_files_bottom.append('%%prep -n %s' % d.getVar('PN', True) )
-                       spec_files_bottom.append('%s' % "echo \"include logs and patches, Please check them in SOURCES\"")
-                       spec_files_bottom.append('')
-
-       # get the name of tarball for sources, patches and logs
-       def get_tarballs(d):
-               if d.getVar('SOURCE_ARCHIVE_PACKAGE_TYPE', True) and d.getVar('SOURCE_ARCHIVE_PACKAGE_TYPE', True).upper() == 'SRPM':
-                       return get_package(d)
+    import textwrap
+    import oe.packagedata
+
+    # append information for logs and patches to %prep
+    def add_prep(d,spec_files_bottom):
+        if d.getVar('SOURCE_ARCHIVE_PACKAGE_TYPE', True) and d.getVar('SOURCE_ARCHIVE_PACKAGE_TYPE', True).upper() == 'SRPM':
+            spec_files_bottom.append('%%prep -n %s' % d.getVar('PN', True) )
+            spec_files_bottom.append('%s' % "echo \"include logs and patches, Please check them in SOURCES\"")
+            spec_files_bottom.append('')
+
+    # get the name of tarball for sources, patches and logs
+    def get_tarballs(d):
+        if d.getVar('SOURCE_ARCHIVE_PACKAGE_TYPE', True) and d.getVar('SOURCE_ARCHIVE_PACKAGE_TYPE', True).upper() == 'SRPM':
+            return get_package(d)
     
-       # append the name of tarball to key word 'SOURCE' in xxx.spec.
-       def tail_source(d,source_list=[],patch_list=None):
-               if d.getVar('SOURCE_ARCHIVE_PACKAGE_TYPE', True) and d.getVar('SOURCE_ARCHIVE_PACKAGE_TYPE', True).upper() == 'SRPM':
-                       source_number = 0
-                       patch_number = 0
-                       for source in source_list:
-                               spec_preamble_top.append('Source' + str(source_number) + ': %s' % source)
-                               source_number += 1
-                       if patch_list:
-                               for patch in patch_list:
-                                       print_deps(patch, "Patch" + str(patch_number), spec_preamble_top, d)
-                                       patch_number += 1
-       # We need a simple way to remove the MLPREFIX from the package name,
-       # and dependency information...
-       def strip_multilib(name, d):
-               multilibs = d.getVar('MULTILIBS', True) or ""
-               for ext in multilibs.split():
-                       eext = ext.split(':')
-                       if len(eext) > 1 and eext[0] == 'multilib' and name and name.find(eext[1] + '-') >= 0:
-                               name = "".join(name.split(eext[1] + '-'))
-               return name
-
-#              ml = d.getVar("MLPREFIX", True)
-#              if ml and name and len(ml) != 0 and name.find(ml) == 0:
-#                      return ml.join(name.split(ml, 1)[1:])
-#              return name
-
-       # In RPM, dependencies are of the format: pkg <>= Epoch:Version-Release
-       # This format is similar to OE, however there are restrictions on the
-       # characters that can be in a field.  In the Version field, "-"
-       # characters are not allowed.  "-" is allowed in the Release field.
-       #
-       # We translate the "-" in the version to a "+", by loading the PKGV
-       # from the dependent recipe, replacing the - with a +, and then using
-       # that value to do a replace inside of this recipe's dependencies.
-       # This preserves the "-" separator between the version and release, as
-       # well as any "-" characters inside of the release field.
-       #
-       # All of this has to happen BEFORE the mapping_rename_hook as
-       # after renaming we cannot look up the dependencies in the packagedata
-       # store.
-       def translate_vers(varname, d):
-               depends = d.getVar(varname, True)
-               if depends:
-                       depends_dict = bb.utils.explode_dep_versions(depends)
-                       newdeps_dict = {}
-                       for dep in depends_dict:
-                               ver = depends_dict[dep]
-                               if dep and ver:
-                                       if '-' in ver:
-                                               subd = oe.packagedata.read_subpkgdata_dict(dep, d)
-                                               if 'PKGV' in subd:
-                                                       pv = subd['PKGV']
-                                                       reppv = pv.replace('-', '+')
-                                                       ver = ver.replace(pv, reppv)
-                               newdeps_dict[dep] = ver
-                       depends = bb.utils.join_deps(newdeps_dict)
-                       d.setVar(varname, depends.strip())
-
-       # We need to change the style the dependency from BB to RPM
-       # This needs to happen AFTER the mapping_rename_hook
-       def print_deps(variable, tag, array, d):
-               depends = variable
-               if depends:
-                       depends_dict = bb.utils.explode_dep_versions(depends)
-                       for dep in depends_dict:
-                               ver = depends_dict[dep]
-                               if dep and ver:
-                                       ver = ver.replace('(', '')
-                                       ver = ver.replace(')', '')
-                                       array.append("%s: %s %s" % (tag, dep, ver))
-                               else:
-                                       array.append("%s: %s" % (tag, dep))
-
-       def walk_files(walkpath, target, conffiles):
-               import os
-               for rootpath, dirs, files in os.walk(walkpath):
-                       path = rootpath.replace(walkpath, "")
-                       for dir in dirs:
-                               # All packages own the directories their files are in...
-                               target.append('%dir "' + path + '/' + dir + '"')
-                       for file in files:
-                               if conffiles.count(path + '/' + file):
-                                       target.append('%config "' + path + '/' + file + '"')
-                               else:
-                                       target.append('"' + path + '/' + file + '"')
-
-       # Prevent the prerm/postrm scripts from being run during an upgrade
-       def wrap_uninstall(scriptvar):
-               scr = scriptvar.strip()
-               if scr.startswith("#!"):
-                       pos = scr.find("\n") + 1
-               else:
-                       pos = 0
-               scr = scr[:pos] + 'if [ "$1" = "0" ] ; then\n' + scr[pos:] + '\nfi'
-               return scr
-
-       packages = d.getVar('PACKAGES', True)
-       if not packages or packages == '':
-               bb.debug(1, "No packages; nothing to do")
-               return
-
-       pkgdest = d.getVar('PKGDEST', True)
-       if not pkgdest:
-               bb.fatal("No PKGDEST")
-               return
-
-       outspecfile = d.getVar('OUTSPECFILE', True)
-       if not outspecfile:
-               bb.fatal("No OUTSPECFILE")
-               return
-
-       # Construct the SPEC file...
-       srcname    = strip_multilib(d.getVar('PN', True), d)
-       srcsummary = (d.getVar('SUMMARY', True) or d.getVar('DESCRIPTION', True) or ".")
-       srcversion = d.getVar('PKGV', True).replace('-', '+')
-       srcrelease = d.getVar('PKGR', True)
-       srcepoch   = (d.getVar('PKGE', True) or "")
-       srclicense = d.getVar('LICENSE', True)
-       srcsection = d.getVar('SECTION', True)
-       srcmaintainer  = d.getVar('MAINTAINER', True)
-       srchomepage    = d.getVar('HOMEPAGE', True)
-       srcdescription = d.getVar('DESCRIPTION', True) or "."
-
-       srcdepends     = strip_multilib(d.getVar('DEPENDS', True), d)
-       srcrdepends    = []
-       srcrrecommends = []
-       srcrsuggests   = []
-       srcrprovides   = []
-       srcrreplaces   = []
-       srcrconflicts  = []
-       srcrobsoletes  = []
-
-       srcpreinst  = []
-       srcpostinst = []
-       srcprerm    = []
-       srcpostrm   = []
-
-       spec_preamble_top = []
-       spec_preamble_bottom = []
-
-       spec_scriptlets_top = []
-       spec_scriptlets_bottom = []
-
-       spec_files_top = []
-       spec_files_bottom = []
-
-       for pkg in packages.split():
-               localdata = bb.data.createCopy(d)
-
-               root = "%s/%s" % (pkgdest, pkg)
-
-               lf = bb.utils.lockfile(root + ".lock")
-
-               localdata.setVar('ROOT', '')
-               localdata.setVar('ROOT_%s' % pkg, root)
-               pkgname = localdata.getVar('PKG_%s' % pkg, True)
-               if not pkgname:
-                       pkgname = pkg
-               localdata.setVar('PKG', pkgname)
-
-               localdata.setVar('OVERRIDES', pkg)
-
-               bb.data.update_data(localdata)
-
-               conffiles = (localdata.getVar('CONFFILES', True) or "").split()
-
-               splitname    = strip_multilib(pkgname, d)
-
-               splitsummary = (localdata.getVar('SUMMARY', True) or localdata.getVar('DESCRIPTION', True) or ".")
-               splitversion = (localdata.getVar('PKGV', True) or "").replace('-', '+')
-               splitrelease = (localdata.getVar('PKGR', True) or "")
-               splitepoch   = (localdata.getVar('PKGE', True) or "")
-               splitlicense = (localdata.getVar('LICENSE', True) or "")
-               splitsection = (localdata.getVar('SECTION', True) or "")
-               splitdescription = (localdata.getVar('DESCRIPTION', True) or ".")
-
-               translate_vers('RDEPENDS', localdata)
-               translate_vers('RRECOMMENDS', localdata)
-               translate_vers('RSUGGESTS', localdata)
-               translate_vers('RPROVIDES', localdata)
-               translate_vers('RREPLACES', localdata)
-               translate_vers('RCONFLICTS', localdata)
-
-               # Map the dependencies into their final form
-               mapping_rename_hook(localdata)
-
-               splitrdepends    = strip_multilib(localdata.getVar('RDEPENDS', True), d) or ""
-               splitrrecommends = strip_multilib(localdata.getVar('RRECOMMENDS', True), d) or ""
-               splitrsuggests   = strip_multilib(localdata.getVar('RSUGGESTS', True), d) or ""
-               splitrprovides   = strip_multilib(localdata.getVar('RPROVIDES', True), d) or ""
-               splitrreplaces   = strip_multilib(localdata.getVar('RREPLACES', True), d) or ""
-               splitrconflicts  = strip_multilib(localdata.getVar('RCONFLICTS', True), d) or ""
-               splitrobsoletes  = []
-
-               # Gather special src/first package data
-               if srcname == splitname:
-                       srcrdepends    = splitrdepends
-                       srcrrecommends = splitrrecommends
-                       srcrsuggests   = splitrsuggests
-                       srcrprovides   = splitrprovides
-                       srcrreplaces   = splitrreplaces
-                       srcrconflicts  = splitrconflicts
-
-                       srcpreinst  = localdata.getVar('pkg_preinst', True)
-                       srcpostinst = localdata.getVar('pkg_postinst', True)
-                       srcprerm    = localdata.getVar('pkg_prerm', True)
-                       srcpostrm   = localdata.getVar('pkg_postrm', True)
-
-                       file_list = []
-                       walk_files(root, file_list, conffiles)
-                       if not file_list and localdata.getVar('ALLOW_EMPTY') != "1":
-                               bb.note("Not creating empty RPM package for %s" % splitname)
-                       else:
-                               bb.note("Creating RPM package for %s" % splitname)
-                               spec_files_top.append('%files')
-                               spec_files_top.append('%defattr(-,-,-,-)')
-                               if file_list:
-                                       bb.note("Creating RPM package for %s" % splitname)
-                                       spec_files_top.extend(file_list)
-                               else:
-                                       bb.note("Creating EMPTY RPM Package for %s" % splitname)
-                               spec_files_top.append('')
-
-                       bb.utils.unlockfile(lf)
-                       continue
-
-               # Process subpackage data
-               spec_preamble_bottom.append('%%package -n %s' % splitname)
-               spec_preamble_bottom.append('Summary: %s' % splitsummary)
-               if srcversion != splitversion:
-                       spec_preamble_bottom.append('Version: %s' % splitversion)
-               if srcrelease != splitrelease:
-                       spec_preamble_bottom.append('Release: %s' % splitrelease)
-               if srcepoch != splitepoch:
-                       spec_preamble_bottom.append('Epoch: %s' % splitepoch)
-               if srclicense != splitlicense:
-                       spec_preamble_bottom.append('License: %s' % splitlicense)
-               spec_preamble_bottom.append('Group: %s' % splitsection)
-
-               # Replaces == Obsoletes && Provides
-               if splitrreplaces and splitrreplaces.strip() != "":
-                       for dep in splitrreplaces.split(','):
-                               if splitrprovides:
-                                       splitrprovides = splitrprovides + ", " + dep
-                               else:
-                                       splitrprovides = dep
-                               if splitrobsoletes:
-                                       splitrobsoletes = splitrobsoletes + ", " + dep
-                               else:
-                                       splitrobsoletes = dep
-
-               print_deps(splitrdepends,       "Requires", spec_preamble_bottom, d)
-               # Suggests in RPM are like recommends in OE-core!
-               print_deps(splitrrecommends,    "Suggests", spec_preamble_bottom, d)
-               # While there is no analog for suggests... (So call them recommends for now)
-               print_deps(splitrsuggests,      "Recommends", spec_preamble_bottom, d)
-               print_deps(splitrprovides,      "Provides", spec_preamble_bottom, d)
-               print_deps(splitrobsoletes,     "Obsoletes", spec_preamble_bottom, d)
-
-               # conflicts can not be in a provide!  We will need to filter it.
-               if splitrconflicts:
-                       depends_dict = bb.utils.explode_dep_versions(splitrconflicts)
-                       newdeps_dict = {}
-                       for dep in depends_dict:
-                               if dep not in splitrprovides:
-                                       newdeps_dict[dep] = depends_dict[dep]
-                       if newdeps_dict:
-                               splitrconflicts = bb.utils.join_deps(newdeps_dict)
-                       else:
-                               splitrconflicts = ""
-
-               print_deps(splitrconflicts,     "Conflicts", spec_preamble_bottom, d)
-
-               spec_preamble_bottom.append('')
-
-               spec_preamble_bottom.append('%%description -n %s' % splitname)
-               dedent_text = textwrap.dedent(splitdescription).strip()
-               spec_preamble_bottom.append('%s' % textwrap.fill(dedent_text, width=75))
-
-               spec_preamble_bottom.append('')
-
-               # Now process scriptlets
-               for script in ["preinst", "postinst", "prerm", "postrm"]:
-                       scriptvar = localdata.getVar('pkg_%s' % script, True)
-                       if not scriptvar:
-                               continue
-                       if script == 'preinst':
-                               spec_scriptlets_bottom.append('%%pre -n %s' % splitname)
-                       elif script == 'postinst':
-                               spec_scriptlets_bottom.append('%%post -n %s' % splitname)
-                       elif script == 'prerm':
-                               spec_scriptlets_bottom.append('%%preun -n %s' % splitname)
-                               scriptvar = wrap_uninstall(scriptvar)
-                       elif script == 'postrm':
-                               spec_scriptlets_bottom.append('%%postun -n %s' % splitname)
-                               scriptvar = wrap_uninstall(scriptvar)
-                       spec_scriptlets_bottom.append('# %s - %s' % (splitname, script))
-                       spec_scriptlets_bottom.append(scriptvar)
-                       spec_scriptlets_bottom.append('')
-
-               # Now process files
-               file_list = []
-               walk_files(root, file_list, conffiles)
-               if not file_list and localdata.getVar('ALLOW_EMPTY') != "1":
-                       bb.note("Not creating empty RPM package for %s" % splitname)
-               else:
-                       spec_files_bottom.append('%%files -n %s' % splitname)
-                       spec_files_bottom.append('%defattr(-,-,-,-)')
-                       if file_list:
-                               bb.note("Creating RPM package for %s" % splitname)
-                               spec_files_bottom.extend(file_list)
-                       else:
-                               bb.note("Creating EMPTY RPM Package for %s" % splitname)
-                       spec_files_bottom.append('')
-
-               del localdata
-               bb.utils.unlockfile(lf)
-       
-       add_prep(d,spec_files_bottom)
-       spec_preamble_top.append('Summary: %s' % srcsummary)
-       spec_preamble_top.append('Name: %s' % srcname)
-       spec_preamble_top.append('Version: %s' % srcversion)
-       spec_preamble_top.append('Release: %s' % srcrelease)
-       if srcepoch and srcepoch.strip() != "":
-               spec_preamble_top.append('Epoch: %s' % srcepoch)
-       spec_preamble_top.append('License: %s' % srclicense)
-       spec_preamble_top.append('Group: %s' % srcsection)
-       spec_preamble_top.append('Packager: %s' % srcmaintainer)
-       spec_preamble_top.append('URL: %s' % srchomepage)
-       source_list = get_tarballs(d)
-       tail_source(d,source_list,None)
-
-       # Replaces == Obsoletes && Provides
-       if srcrreplaces and srcrreplaces.strip() != "":
-               for dep in srcrreplaces.split(','):
-                       if srcrprovides:
-                               srcrprovides = srcrprovides + ", " + dep
-                       else:
-                               srcrprovides = dep
-                       if srcrobsoletes:
-                               srcrobsoletes = srcrobsoletes + ", " + dep
-                       else:
-                               srcrobsoletes = dep
-
-       print_deps(srcdepends,          "BuildRequires", spec_preamble_top, d)
-       print_deps(srcrdepends,         "Requires", spec_preamble_top, d)
-       # Suggests in RPM are like recommends in OE-core!
-       print_deps(srcrrecommends,      "Suggests", spec_preamble_top, d)
-       # While there is no analog for suggests... (So call them recommends for now)
-       print_deps(srcrsuggests,        "Recommends", spec_preamble_top, d)
-       print_deps(srcrprovides,        "Provides", spec_preamble_top, d)
-       print_deps(srcrobsoletes,       "Obsoletes", spec_preamble_top, d)
+    # append the name of tarball to key word 'SOURCE' in xxx.spec.
+    def tail_source(d,source_list=[],patch_list=None):
+        if d.getVar('SOURCE_ARCHIVE_PACKAGE_TYPE', True) and d.getVar('SOURCE_ARCHIVE_PACKAGE_TYPE', True).upper() == 'SRPM':
+            source_number = 0
+            patch_number = 0
+            for source in source_list:
+                spec_preamble_top.append('Source' + str(source_number) + ': %s' % source)
+                source_number += 1
+            if patch_list:
+                for patch in patch_list:
+                    print_deps(patch, "Patch" + str(patch_number), spec_preamble_top, d)
+                    patch_number += 1
+    # We need a simple way to remove the MLPREFIX from the package name,
+    # and dependency information...
+    def strip_multilib(name, d):
+        multilibs = d.getVar('MULTILIBS', True) or ""
+        for ext in multilibs.split():
+            eext = ext.split(':')
+            if len(eext) > 1 and eext[0] == 'multilib' and name and name.find(eext[1] + '-') >= 0:
+                name = "".join(name.split(eext[1] + '-'))
+        return name
+
+#        ml = d.getVar("MLPREFIX", True)
+#        if ml and name and len(ml) != 0 and name.find(ml) == 0:
+#            return ml.join(name.split(ml, 1)[1:])
+#        return name
+
+    # In RPM, dependencies are of the format: pkg <>= Epoch:Version-Release
+    # This format is similar to OE, however there are restrictions on the
+    # characters that can be in a field.  In the Version field, "-"
+    # characters are not allowed.  "-" is allowed in the Release field.
+    #
+    # We translate the "-" in the version to a "+", by loading the PKGV
+    # from the dependent recipe, replacing the - with a +, and then using
+    # that value to do a replace inside of this recipe's dependencies.
+    # This preserves the "-" separator between the version and release, as
+    # well as any "-" characters inside of the release field.
+    #
+    # All of this has to happen BEFORE the mapping_rename_hook as
+    # after renaming we cannot look up the dependencies in the packagedata
+    # store.
+    def translate_vers(varname, d):
+        depends = d.getVar(varname, True)
+        if depends:
+            depends_dict = bb.utils.explode_dep_versions(depends)
+            newdeps_dict = {}
+            for dep in depends_dict:
+                ver = depends_dict[dep]
+                if dep and ver:
+                    if '-' in ver:
+                        subd = oe.packagedata.read_subpkgdata_dict(dep, d)
+                        if 'PKGV' in subd:
+                            pv = subd['PKGV']
+                            reppv = pv.replace('-', '+')
+                            ver = ver.replace(pv, reppv)
+                newdeps_dict[dep] = ver
+            depends = bb.utils.join_deps(newdeps_dict)
+            d.setVar(varname, depends.strip())
+
+    # We need to change the style the dependency from BB to RPM
+    # This needs to happen AFTER the mapping_rename_hook
+    def print_deps(variable, tag, array, d):
+        depends = variable
+        if depends:
+            depends_dict = bb.utils.explode_dep_versions(depends)
+            for dep in depends_dict:
+                ver = depends_dict[dep]
+                if dep and ver:
+                    ver = ver.replace('(', '')
+                    ver = ver.replace(')', '')
+                    array.append("%s: %s %s" % (tag, dep, ver))
+                else:
+                    array.append("%s: %s" % (tag, dep))
+
+    def walk_files(walkpath, target, conffiles):
+        import os
+        for rootpath, dirs, files in os.walk(walkpath):
+            path = rootpath.replace(walkpath, "")
+            for dir in dirs:
+                # All packages own the directories their files are in...
+                target.append('%dir "' + path + '/' + dir + '"')
+            for file in files:
+                if conffiles.count(path + '/' + file):
+                    target.append('%config "' + path + '/' + file + '"')
+                else:
+                    target.append('"' + path + '/' + file + '"')
+
+    # Prevent the prerm/postrm scripts from being run during an upgrade
+    def wrap_uninstall(scriptvar):
+        scr = scriptvar.strip()
+        if scr.startswith("#!"):
+            pos = scr.find("\n") + 1
+        else:
+            pos = 0
+        scr = scr[:pos] + 'if [ "$1" = "0" ] ; then\n' + scr[pos:] + '\nfi'
+        return scr
+
+    packages = d.getVar('PACKAGES', True)
+    if not packages or packages == '':
+        bb.debug(1, "No packages; nothing to do")
+        return
+
+    pkgdest = d.getVar('PKGDEST', True)
+    if not pkgdest:
+        bb.fatal("No PKGDEST")
+        return
+
+    outspecfile = d.getVar('OUTSPECFILE', True)
+    if not outspecfile:
+        bb.fatal("No OUTSPECFILE")
+        return
+
+    # Construct the SPEC file...
+    srcname    = strip_multilib(d.getVar('PN', True), d)
+    srcsummary = (d.getVar('SUMMARY', True) or d.getVar('DESCRIPTION', True) or ".")
+    srcversion = d.getVar('PKGV', True).replace('-', '+')
+    srcrelease = d.getVar('PKGR', True)
+    srcepoch   = (d.getVar('PKGE', True) or "")
+    srclicense = d.getVar('LICENSE', True)
+    srcsection = d.getVar('SECTION', True)
+    srcmaintainer  = d.getVar('MAINTAINER', True)
+    srchomepage    = d.getVar('HOMEPAGE', True)
+    srcdescription = d.getVar('DESCRIPTION', True) or "."
+
+    srcdepends     = strip_multilib(d.getVar('DEPENDS', True), d)
+    srcrdepends    = []
+    srcrrecommends = []
+    srcrsuggests   = []
+    srcrprovides   = []
+    srcrreplaces   = []
+    srcrconflicts  = []
+    srcrobsoletes  = []
+
+    srcpreinst  = []
+    srcpostinst = []
+    srcprerm    = []
+    srcpostrm   = []
+
+    spec_preamble_top = []
+    spec_preamble_bottom = []
+
+    spec_scriptlets_top = []
+    spec_scriptlets_bottom = []
+
+    spec_files_top = []
+    spec_files_bottom = []
+
+    for pkg in packages.split():
+        localdata = bb.data.createCopy(d)
+
+        root = "%s/%s" % (pkgdest, pkg)
+
+        lf = bb.utils.lockfile(root + ".lock")
+
+        localdata.setVar('ROOT', '')
+        localdata.setVar('ROOT_%s' % pkg, root)
+        pkgname = localdata.getVar('PKG_%s' % pkg, True)
+        if not pkgname:
+            pkgname = pkg
+        localdata.setVar('PKG', pkgname)
+
+        localdata.setVar('OVERRIDES', pkg)
+
+        bb.data.update_data(localdata)
+
+        conffiles = (localdata.getVar('CONFFILES', True) or "").split()
+
+        splitname    = strip_multilib(pkgname, d)
+
+        splitsummary = (localdata.getVar('SUMMARY', True) or localdata.getVar('DESCRIPTION', True) or ".")
+        splitversion = (localdata.getVar('PKGV', True) or "").replace('-', '+')
+        splitrelease = (localdata.getVar('PKGR', True) or "")
+        splitepoch   = (localdata.getVar('PKGE', True) or "")
+        splitlicense = (localdata.getVar('LICENSE', True) or "")
+        splitsection = (localdata.getVar('SECTION', True) or "")
+        splitdescription = (localdata.getVar('DESCRIPTION', True) or ".")
+
+        translate_vers('RDEPENDS', localdata)
+        translate_vers('RRECOMMENDS', localdata)
+        translate_vers('RSUGGESTS', localdata)
+        translate_vers('RPROVIDES', localdata)
+        translate_vers('RREPLACES', localdata)
+        translate_vers('RCONFLICTS', localdata)
+
+        # Map the dependencies into their final form
+        mapping_rename_hook(localdata)
+
+        splitrdepends    = strip_multilib(localdata.getVar('RDEPENDS', True), d) or ""
+        splitrrecommends = strip_multilib(localdata.getVar('RRECOMMENDS', True), d) or ""
+        splitrsuggests   = strip_multilib(localdata.getVar('RSUGGESTS', True), d) or ""
+        splitrprovides   = strip_multilib(localdata.getVar('RPROVIDES', True), d) or ""
+        splitrreplaces   = strip_multilib(localdata.getVar('RREPLACES', True), d) or ""
+        splitrconflicts  = strip_multilib(localdata.getVar('RCONFLICTS', True), d) or ""
+        splitrobsoletes  = []
+
+        # Gather special src/first package data
+        if srcname == splitname:
+            srcrdepends    = splitrdepends
+            srcrrecommends = splitrrecommends
+            srcrsuggests   = splitrsuggests
+            srcrprovides   = splitrprovides
+            srcrreplaces   = splitrreplaces
+            srcrconflicts  = splitrconflicts
+
+            srcpreinst  = localdata.getVar('pkg_preinst', True)
+            srcpostinst = localdata.getVar('pkg_postinst', True)
+            srcprerm    = localdata.getVar('pkg_prerm', True)
+            srcpostrm   = localdata.getVar('pkg_postrm', True)
+
+            file_list = []
+            walk_files(root, file_list, conffiles)
+            if not file_list and localdata.getVar('ALLOW_EMPTY') != "1":
+                bb.note("Not creating empty RPM package for %s" % splitname)
+            else:
+                bb.note("Creating RPM package for %s" % splitname)
+                spec_files_top.append('%files')
+                spec_files_top.append('%defattr(-,-,-,-)')
+                if file_list:
+                    bb.note("Creating RPM package for %s" % splitname)
+                    spec_files_top.extend(file_list)
+                else:
+                    bb.note("Creating EMPTY RPM Package for %s" % splitname)
+                spec_files_top.append('')
+
+            bb.utils.unlockfile(lf)
+            continue
+
+        # Process subpackage data
+        spec_preamble_bottom.append('%%package -n %s' % splitname)
+        spec_preamble_bottom.append('Summary: %s' % splitsummary)
+        if srcversion != splitversion:
+            spec_preamble_bottom.append('Version: %s' % splitversion)
+        if srcrelease != splitrelease:
+            spec_preamble_bottom.append('Release: %s' % splitrelease)
+        if srcepoch != splitepoch:
+            spec_preamble_bottom.append('Epoch: %s' % splitepoch)
+        if srclicense != splitlicense:
+            spec_preamble_bottom.append('License: %s' % splitlicense)
+        spec_preamble_bottom.append('Group: %s' % splitsection)
+
+        # Replaces == Obsoletes && Provides
+        if splitrreplaces and splitrreplaces.strip() != "":
+            for dep in splitrreplaces.split(','):
+                if splitrprovides:
+                    splitrprovides = splitrprovides + ", " + dep
+                else:
+                    splitrprovides = dep
+                if splitrobsoletes:
+                    splitrobsoletes = splitrobsoletes + ", " + dep
+                else:
+                    splitrobsoletes = dep
+
+        print_deps(splitrdepends, "Requires", spec_preamble_bottom, d)
+        # Suggests in RPM are like recommends in OE-core!
+        print_deps(splitrrecommends, "Suggests", spec_preamble_bottom, d)
+        # While there is no analog for suggests... (So call them recommends for now)
+        print_deps(splitrsuggests,  "Recommends", spec_preamble_bottom, d)
+        print_deps(splitrprovides,  "Provides", spec_preamble_bottom, d)
+        print_deps(splitrobsoletes, "Obsoletes", spec_preamble_bottom, d)
+
+        # conflicts can not be in a provide!  We will need to filter it.
+        if splitrconflicts:
+            depends_dict = bb.utils.explode_dep_versions(splitrconflicts)
+            newdeps_dict = {}
+            for dep in depends_dict:
+                if dep not in splitrprovides:
+                    newdeps_dict[dep] = depends_dict[dep]
+            if newdeps_dict:
+                splitrconflicts = bb.utils.join_deps(newdeps_dict)
+            else:
+                splitrconflicts = ""
+
+        print_deps(splitrconflicts,  "Conflicts", spec_preamble_bottom, d)
+
+        spec_preamble_bottom.append('')
+
+        spec_preamble_bottom.append('%%description -n %s' % splitname)
+        dedent_text = textwrap.dedent(splitdescription).strip()
+        spec_preamble_bottom.append('%s' % textwrap.fill(dedent_text, width=75))
+
+        spec_preamble_bottom.append('')
+
+        # Now process scriptlets
+        for script in ["preinst", "postinst", "prerm", "postrm"]:
+            scriptvar = localdata.getVar('pkg_%s' % script, True)
+            if not scriptvar:
+                continue
+            if script == 'preinst':
+                spec_scriptlets_bottom.append('%%pre -n %s' % splitname)
+            elif script == 'postinst':
+                spec_scriptlets_bottom.append('%%post -n %s' % splitname)
+            elif script == 'prerm':
+                spec_scriptlets_bottom.append('%%preun -n %s' % splitname)
+                scriptvar = wrap_uninstall(scriptvar)
+            elif script == 'postrm':
+                spec_scriptlets_bottom.append('%%postun -n %s' % splitname)
+                scriptvar = wrap_uninstall(scriptvar)
+            spec_scriptlets_bottom.append('# %s - %s' % (splitname, script))
+            spec_scriptlets_bottom.append(scriptvar)
+            spec_scriptlets_bottom.append('')
+
+        # Now process files
+        file_list = []
+        walk_files(root, file_list, conffiles)
+        if not file_list and localdata.getVar('ALLOW_EMPTY') != "1":
+            bb.note("Not creating empty RPM package for %s" % splitname)
+        else:
+            spec_files_bottom.append('%%files -n %s' % splitname)
+            spec_files_bottom.append('%defattr(-,-,-,-)')
+            if file_list:
+                bb.note("Creating RPM package for %s" % splitname)
+                spec_files_bottom.extend(file_list)
+            else:
+                bb.note("Creating EMPTY RPM Package for %s" % splitname)
+            spec_files_bottom.append('')
+
+        del localdata
+        bb.utils.unlockfile(lf)
     
-       # conflicts can not be in a provide!  We will need to filter it.
-       if srcrconflicts:
-               depends_dict = bb.utils.explode_dep_versions(srcrconflicts)
-               newdeps_dict = {}
-               for dep in depends_dict:
-                       if dep not in srcrprovides:
-                               newdeps_dict[dep] = depends_dict[dep]
-               if newdeps_dict:
-                       srcrconflicts = bb.utils.join_deps(newdeps_dict)
-               else:
-                       srcrconflicts = ""
-
-       print_deps(srcrconflicts,       "Conflicts", spec_preamble_top, d)
-
-       spec_preamble_top.append('')
-
-       spec_preamble_top.append('%description')
-       dedent_text = textwrap.dedent(srcdescription).strip()
-       spec_preamble_top.append('%s' % textwrap.fill(dedent_text, width=75))
-
-       spec_preamble_top.append('')
-
-       if srcpreinst:
-               spec_scriptlets_top.append('%pre')
-               spec_scriptlets_top.append('# %s - preinst' % srcname)
-               spec_scriptlets_top.append(srcpreinst)
-               spec_scriptlets_top.append('')
-       if srcpostinst:
-               spec_scriptlets_top.append('%post')
-               spec_scriptlets_top.append('# %s - postinst' % srcname)
-               spec_scriptlets_top.append(srcpostinst)
-               spec_scriptlets_top.append('')
-       if srcprerm:
-               spec_scriptlets_top.append('%preun')
-               spec_scriptlets_top.append('# %s - prerm' % srcname)
-               scriptvar = wrap_uninstall(srcprerm)
-               spec_scriptlets_top.append(scriptvar)
-               spec_scriptlets_top.append('')
-       if srcpostrm:
-               spec_scriptlets_top.append('%postun')
-               spec_scriptlets_top.append('# %s - postrm' % srcname)
-               scriptvar = wrap_uninstall(srcpostrm)
-               spec_scriptlets_top.append(scriptvar)
-               spec_scriptlets_top.append('')
-
-       # Write the SPEC file
-       try:
-               from __builtin__ import file
-               specfile = file(outspecfile, 'w')
-       except OSError:
-               raise bb.build.FuncFailed("unable to open spec file for writing.")
-
-       # RPMSPEC_PREAMBLE is a way to add arbitrary text to the top
-       # of the generated spec file
-       external_preamble = d.getVar("RPMSPEC_PREAMBLE", True)
-       if external_preamble:
-               specfile.write(external_preamble + "\n")
-
-       for line in spec_preamble_top:
-               specfile.write(line + "\n")
-
-       for line in spec_preamble_bottom:
-               specfile.write(line + "\n")
-
-       for line in spec_scriptlets_top:
-               specfile.write(line + "\n")
-
-       for line in spec_scriptlets_bottom:
-               specfile.write(line + "\n")
-
-       for line in spec_files_top:
-               specfile.write(line + "\n")
-
-       for line in spec_files_bottom:
-               specfile.write(line + "\n")
-
-       specfile.close()
+    add_prep(d,spec_files_bottom)
+    spec_preamble_top.append('Summary: %s' % srcsummary)
+    spec_preamble_top.append('Name: %s' % srcname)
+    spec_preamble_top.append('Version: %s' % srcversion)
+    spec_preamble_top.append('Release: %s' % srcrelease)
+    if srcepoch and srcepoch.strip() != "":
+        spec_preamble_top.append('Epoch: %s' % srcepoch)
+    spec_preamble_top.append('License: %s' % srclicense)
+    spec_preamble_top.append('Group: %s' % srcsection)
+    spec_preamble_top.append('Packager: %s' % srcmaintainer)
+    spec_preamble_top.append('URL: %s' % srchomepage)
+    source_list = get_tarballs(d)
+    tail_source(d,source_list,None)
+
+    # Replaces == Obsoletes && Provides
+    if srcrreplaces and srcrreplaces.strip() != "":
+        for dep in srcrreplaces.split(','):
+            if srcrprovides:
+                srcrprovides = srcrprovides + ", " + dep
+            else:
+                srcrprovides = dep
+            if srcrobsoletes:
+                srcrobsoletes = srcrobsoletes + ", " + dep
+            else:
+                srcrobsoletes = dep
+
+    print_deps(srcdepends, "BuildRequires", spec_preamble_top, d)
+    print_deps(srcrdepends, "Requires", spec_preamble_top, d)
+    # Suggests in RPM are like recommends in OE-core!
+    print_deps(srcrrecommends, "Suggests", spec_preamble_top, d)
+    # While there is no analog for suggests... (So call them recommends for now)
+    print_deps(srcrsuggests, "Recommends", spec_preamble_top, d)
+    print_deps(srcrprovides, "Provides", spec_preamble_top, d)
+    print_deps(srcrobsoletes, "Obsoletes", spec_preamble_top, d)
+    
+    # conflicts can not be in a provide!  We will need to filter it.
+    if srcrconflicts:
+        depends_dict = bb.utils.explode_dep_versions(srcrconflicts)
+        newdeps_dict = {}
+        for dep in depends_dict:
+            if dep not in srcrprovides:
+                newdeps_dict[dep] = depends_dict[dep]
+        if newdeps_dict:
+            srcrconflicts = bb.utils.join_deps(newdeps_dict)
+        else:
+            srcrconflicts = ""
+
+    print_deps(srcrconflicts, "Conflicts", spec_preamble_top, d)
+
+    spec_preamble_top.append('')
+
+    spec_preamble_top.append('%description')
+    dedent_text = textwrap.dedent(srcdescription).strip()
+    spec_preamble_top.append('%s' % textwrap.fill(dedent_text, width=75))
+
+    spec_preamble_top.append('')
+
+    if srcpreinst:
+        spec_scriptlets_top.append('%pre')
+        spec_scriptlets_top.append('# %s - preinst' % srcname)
+        spec_scriptlets_top.append(srcpreinst)
+        spec_scriptlets_top.append('')
+    if srcpostinst:
+        spec_scriptlets_top.append('%post')
+        spec_scriptlets_top.append('# %s - postinst' % srcname)
+        spec_scriptlets_top.append(srcpostinst)
+        spec_scriptlets_top.append('')
+    if srcprerm:
+        spec_scriptlets_top.append('%preun')
+        spec_scriptlets_top.append('# %s - prerm' % srcname)
+        scriptvar = wrap_uninstall(srcprerm)
+        spec_scriptlets_top.append(scriptvar)
+        spec_scriptlets_top.append('')
+    if srcpostrm:
+        spec_scriptlets_top.append('%postun')
+        spec_scriptlets_top.append('# %s - postrm' % srcname)
+        scriptvar = wrap_uninstall(srcpostrm)
+        spec_scriptlets_top.append(scriptvar)
+        spec_scriptlets_top.append('')
+
+    # Write the SPEC file
+    try:
+        from __builtin__ import file
+        specfile = file(outspecfile, 'w')
+    except OSError:
+        raise bb.build.FuncFailed("unable to open spec file for writing.")
+
+    # RPMSPEC_PREAMBLE is a way to add arbitrary text to the top
+    # of the generated spec file
+    external_preamble = d.getVar("RPMSPEC_PREAMBLE", True)
+    if external_preamble:
+        specfile.write(external_preamble + "\n")
+
+    for line in spec_preamble_top:
+        specfile.write(line + "\n")
+
+    for line in spec_preamble_bottom:
+        specfile.write(line + "\n")
+
+    for line in spec_scriptlets_top:
+        specfile.write(line + "\n")
+
+    for line in spec_scriptlets_bottom:
+        specfile.write(line + "\n")
+
+    for line in spec_files_top:
+        specfile.write(line + "\n")
+
+    for line in spec_files_bottom:
+        specfile.write(line + "\n")
+
+    specfile.close()
 }
 
 python do_package_rpm () {
-       import os
-       
-       def creat_srpm_dir(d):
-               if d.getVar('SOURCE_ARCHIVE_PACKAGE_TYPE', True) and d.getVar('SOURCE_ARCHIVE_PACKAGE_TYPE', True).upper() == 'SRPM':
-                       clean_licenses = get_licenses(d)
-                       pkgwritesrpmdir = bb.data.expand('${PKGWRITEDIRSRPM}/${PACKAGE_ARCH_EXTEND}', d)
-                       pkgwritesrpmdir = pkgwritesrpmdir + '/' + clean_licenses
-                       bb.mkdirhier(pkgwritesrpmdir)
-                       os.chmod(pkgwritesrpmdir, 0755)
-                       return pkgwritesrpmdir
+    import os
+    
+    def creat_srpm_dir(d):
+        if d.getVar('SOURCE_ARCHIVE_PACKAGE_TYPE', True) and d.getVar('SOURCE_ARCHIVE_PACKAGE_TYPE', True).upper() == 'SRPM':
+            clean_licenses = get_licenses(d)
+            pkgwritesrpmdir = bb.data.expand('${PKGWRITEDIRSRPM}/${PACKAGE_ARCH_EXTEND}', d)
+            pkgwritesrpmdir = pkgwritesrpmdir + '/' + clean_licenses
+            bb.mkdirhier(pkgwritesrpmdir)
+            os.chmod(pkgwritesrpmdir, 0755)
+            return pkgwritesrpmdir
             
-       # We need a simple way to remove the MLPREFIX from the package name,
-       # and dependency information...
-       def strip_multilib(name, d):
-               ml = d.getVar("MLPREFIX", True)
-               if ml and name and len(ml) != 0 and name.find(ml) >= 0:
-                       return "".join(name.split(ml))
-               return name
-
-       workdir = d.getVar('WORKDIR', True)
-       outdir = d.getVar('DEPLOY_DIR_IPK', True)
-       tmpdir = d.getVar('TMPDIR', True)
-       pkgd = d.getVar('PKGD', True)
-       pkgdest = d.getVar('PKGDEST', True)
-       if not workdir or not outdir or not pkgd or not tmpdir:
-               bb.error("Variables incorrectly set, unable to package")
-               return
-
-       packages = d.getVar('PACKAGES', True)
-       if not packages or packages == '':
-               bb.debug(1, "No packages; nothing to do")
-               return
-
-       # Construct the spec file...
-       srcname    = strip_multilib(d.getVar('PN', True), d)
-       outspecfile = workdir + "/" + srcname + ".spec"
-       d.setVar('OUTSPECFILE', outspecfile)
-       bb.build.exec_func('write_specfile', d)
-
-       # Construct per file dependencies file
-       def dump_filerdeps(varname, outfile, d):
-               outfile.write("#!/usr/bin/env python\n\n")
-               outfile.write("# Dependency table\n")
-               outfile.write('deps = {\n')
-               for pkg in packages.split():
-                       dependsflist_key = 'FILE' + varname + 'FLIST' + "_" + pkg
-                       dependsflist = (d.getVar(dependsflist_key, True) or "")
-                       for dfile in dependsflist.split():
-                               key = "FILE" + varname + "_" + dfile + "_" + pkg
-                               depends_dict = bb.utils.explode_dep_versions(d.getVar(key, True) or "")
-                               file = dfile.replace("@underscore@", "_")
-                               file = file.replace("@closebrace@", "]")
-                               file = file.replace("@openbrace@", "[")
-                               file = file.replace("@tab@", "\t")
-                               file = file.replace("@space@", " ")
-                               file = file.replace("@at@", "@")
-                               outfile.write('"' + pkgd + file + '" : "')
-                               for dep in depends_dict:
-                                       ver = depends_dict[dep]
-                                       if dep and ver:
-                                               ver = ver.replace("(","")
-                                               ver = ver.replace(")","")
-                                               outfile.write(dep + " " + ver + " ")
-                                       else:
-                                               outfile.write(dep + " ")
-                               outfile.write('",\n')
-               outfile.write('}\n\n')
-               outfile.write("import sys\n")
-               outfile.write("while 1:\n")
-               outfile.write("\tline = sys.stdin.readline().strip()\n")
-               outfile.write("\tif not line:\n")
-               outfile.write("\t\tsys.exit(0)\n")
-               outfile.write("\tif line in deps:\n")
-               outfile.write("\t\tprint(deps[line] + '\\n')\n")
-
-       # OE-core dependencies a.k.a. RPM requires
-       outdepends = workdir + "/" + srcname + ".requires"
-
-       try:
-               from __builtin__ import file
-               dependsfile = file(outdepends, 'w')
-       except OSError:
-               raise bb.build.FuncFailed("unable to open spec file for writing.")
-
-       dump_filerdeps('RDEPENDS', dependsfile, d)
-
-       dependsfile.close()
-       os.chmod(outdepends, 0755)
-
-       # OE-core / RPM Provides
-       outprovides = workdir + "/" + srcname + ".provides"
-
-       try:
-               from __builtin__ import file
-               providesfile = file(outprovides, 'w')
-       except OSError:
-               raise bb.build.FuncFailed("unable to open spec file for writing.")
-
-       dump_filerdeps('RPROVIDES', providesfile, d)
-
-       providesfile.close()
-       os.chmod(outprovides, 0755)
-
-       # Setup the rpmbuild arguments...
-       rpmbuild = d.getVar('RPMBUILD', True)
-       targetsys = d.getVar('TARGET_SYS', True)
-       targetvendor = d.getVar('TARGET_VENDOR', True)
-       package_arch = d.getVar('PACKAGE_ARCH', True) or ""
-       if package_arch not in "all any noarch".split():
-               ml_prefix = (d.getVar('MLPREFIX', True) or "").replace("-", "_")
-               d.setVar('PACKAGE_ARCH_EXTEND', ml_prefix + package_arch)
-       else:
-               d.setVar('PACKAGE_ARCH_EXTEND', package_arch)
-       pkgwritedir = d.expand('${PKGWRITEDIRRPM}/${PACKAGE_ARCH_EXTEND}')
-       pkgarch = d.expand('${PACKAGE_ARCH_EXTEND}${TARGET_VENDOR}-${TARGET_OS}')
-       magicfile = d.expand('${STAGING_DIR_NATIVE}${datadir_native}/misc/magic.mgc')
-       bb.mkdirhier(pkgwritedir)
-       os.chmod(pkgwritedir, 0755)
-
-       cmd = rpmbuild
-       cmd = cmd + " --nodeps --short-circuit --target " + pkgarch + " --buildroot " + pkgd
-       cmd = cmd + " --define '_topdir " + workdir + "' --define '_rpmdir " + pkgwritedir + "'"
-       cmd = cmd + " --define '_build_name_fmt %%{NAME}-%%{VERSION}-%%{RELEASE}.%%{ARCH}.rpm'"
-       cmd = cmd + " --define '_use_internal_dependency_generator 0'"
-       cmd = cmd + " --define '__find_requires " + outdepends + "'"
-       cmd = cmd + " --define '__find_provides " + outprovides + "'"
-       cmd = cmd + " --define '_unpackaged_files_terminate_build 0'"
-       cmd = cmd + " --define 'debug_package %{nil}'"
-       cmd = cmd + " --define '_rpmfc_magic_path " + magicfile + "'"
-       cmd = cmd + " --define '_tmppath " + workdir + "'"
-       if d.getVar('SOURCE_ARCHIVE_PACKAGE_TYPE', True) and d.getVar('SOURCE_ARCHIVE_PACKAGE_TYPE', True).upper() == 'SRPM':
-               cmdsrpm = cmd + " --define '_sourcedir " + workdir + "' --define '_srcrpmdir " + creat_srpm_dir(d) + "'"
-               cmdsrpm = 'fakeroot ' + cmdsrpm + " -bs " + outspecfile
-       cmd = cmd + " -bb " + outspecfile
+    # We need a simple way to remove the MLPREFIX from the package name,
+    # and dependency information...
+    def strip_multilib(name, d):
+        ml = d.getVar("MLPREFIX", True)
+        if ml and name and len(ml) != 0 and name.find(ml) >= 0:
+            return "".join(name.split(ml))
+        return name
+
+    workdir = d.getVar('WORKDIR', True)
+    outdir = d.getVar('DEPLOY_DIR_IPK', True)
+    tmpdir = d.getVar('TMPDIR', True)
+    pkgd = d.getVar('PKGD', True)
+    pkgdest = d.getVar('PKGDEST', True)
+    if not workdir or not outdir or not pkgd or not tmpdir:
+        bb.error("Variables incorrectly set, unable to package")
+        return
+
+    packages = d.getVar('PACKAGES', True)
+    if not packages or packages == '':
+        bb.debug(1, "No packages; nothing to do")
+        return
+
+    # Construct the spec file...
+    srcname    = strip_multilib(d.getVar('PN', True), d)
+    outspecfile = workdir + "/" + srcname + ".spec"
+    d.setVar('OUTSPECFILE', outspecfile)
+    bb.build.exec_func('write_specfile', d)
+
+    # Construct per file dependencies file
+    def dump_filerdeps(varname, outfile, d):
+        outfile.write("#!/usr/bin/env python\n\n")
+        outfile.write("# Dependency table\n")
+        outfile.write('deps = {\n')
+        for pkg in packages.split():
+            dependsflist_key = 'FILE' + varname + 'FLIST' + "_" + pkg
+            dependsflist = (d.getVar(dependsflist_key, True) or "")
+            for dfile in dependsflist.split():
+                key = "FILE" + varname + "_" + dfile + "_" + pkg
+                depends_dict = bb.utils.explode_dep_versions(d.getVar(key, True) or "")
+                file = dfile.replace("@underscore@", "_")
+                file = file.replace("@closebrace@", "]")
+                file = file.replace("@openbrace@", "[")
+                file = file.replace("@tab@", "\t")
+                file = file.replace("@space@", " ")
+                file = file.replace("@at@", "@")
+                outfile.write('"' + pkgd + file + '" : "')
+                for dep in depends_dict:
+                    ver = depends_dict[dep]
+                    if dep and ver:
+                        ver = ver.replace("(","")
+                        ver = ver.replace(")","")
+                        outfile.write(dep + " " + ver + " ")
+                    else:
+                        outfile.write(dep + " ")
+                outfile.write('",\n')
+        outfile.write('}\n\n')
+        outfile.write("import sys\n")
+        outfile.write("while 1:\n")
+        outfile.write("\tline = sys.stdin.readline().strip()\n")
+        outfile.write("\tif not line:\n")
+        outfile.write("\t\tsys.exit(0)\n")
+        outfile.write("\tif line in deps:\n")
+        outfile.write("\t\tprint(deps[line] + '\\n')\n")
+
+    # OE-core dependencies a.k.a. RPM requires
+    outdepends = workdir + "/" + srcname + ".requires"
+
+    try:
+        from __builtin__ import file
+        dependsfile = file(outdepends, 'w')
+    except OSError:
+        raise bb.build.FuncFailed("unable to open spec file for writing.")
+
+    dump_filerdeps('RDEPENDS', dependsfile, d)
+
+    dependsfile.close()
+    os.chmod(outdepends, 0755)
+
+    # OE-core / RPM Provides
+    outprovides = workdir + "/" + srcname + ".provides"
+
+    try:
+        from __builtin__ import file
+        providesfile = file(outprovides, 'w')
+    except OSError:
+        raise bb.build.FuncFailed("unable to open spec file for writing.")
+
+    dump_filerdeps('RPROVIDES', providesfile, d)
+
+    providesfile.close()
+    os.chmod(outprovides, 0755)
+
+    # Setup the rpmbuild arguments...
+    rpmbuild = d.getVar('RPMBUILD', True)
+    targetsys = d.getVar('TARGET_SYS', True)
+    targetvendor = d.getVar('TARGET_VENDOR', True)
+    package_arch = d.getVar('PACKAGE_ARCH', True) or ""
+    if package_arch not in "all any noarch".split():
+        ml_prefix = (d.getVar('MLPREFIX', True) or "").replace("-", "_")
+        d.setVar('PACKAGE_ARCH_EXTEND', ml_prefix + package_arch)
+    else:
+        d.setVar('PACKAGE_ARCH_EXTEND', package_arch)
+    pkgwritedir = d.expand('${PKGWRITEDIRRPM}/${PACKAGE_ARCH_EXTEND}')
+    pkgarch = d.expand('${PACKAGE_ARCH_EXTEND}${TARGET_VENDOR}-${TARGET_OS}')
+    magicfile = d.expand('${STAGING_DIR_NATIVE}${datadir_native}/misc/magic.mgc')
+    bb.mkdirhier(pkgwritedir)
+    os.chmod(pkgwritedir, 0755)
+
+    cmd = rpmbuild
+    cmd = cmd + " --nodeps --short-circuit --target " + pkgarch + " --buildroot " + pkgd
+    cmd = cmd + " --define '_topdir " + workdir + "' --define '_rpmdir " + pkgwritedir + "'"
+    cmd = cmd + " --define '_build_name_fmt %%{NAME}-%%{VERSION}-%%{RELEASE}.%%{ARCH}.rpm'"
+    cmd = cmd + " --define '_use_internal_dependency_generator 0'"
+    cmd = cmd + " --define '__find_requires " + outdepends + "'"
+    cmd = cmd + " --define '__find_provides " + outprovides + "'"
+    cmd = cmd + " --define '_unpackaged_files_terminate_build 0'"
+    cmd = cmd + " --define 'debug_package %{nil}'"
+    cmd = cmd + " --define '_rpmfc_magic_path " + magicfile + "'"
+    cmd = cmd + " --define '_tmppath " + workdir + "'"
+    if d.getVar('SOURCE_ARCHIVE_PACKAGE_TYPE', True) and d.getVar('SOURCE_ARCHIVE_PACKAGE_TYPE', True).upper() == 'SRPM':
+        cmdsrpm = cmd + " --define '_sourcedir " + workdir + "' --define '_srcrpmdir " + creat_srpm_dir(d) + "'"
+        cmdsrpm = 'fakeroot ' + cmdsrpm + " -bs " + outspecfile
+    cmd = cmd + " -bb " + outspecfile
 
     # Build the source rpm package !
-       if d.getVar('SOURCE_ARCHIVE_PACKAGE_TYPE', True) and d.getVar('SOURCE_ARCHIVE_PACKAGE_TYPE', True).upper() == 'SRPM':
-               d.setVar('SBUILDSPEC', cmdsrpm + "\n")
-               d.setVarFlag('SBUILDSPEC', 'func', '1')
-               bb.build.exec_func('SBUILDSPEC', d)
+    if d.getVar('SOURCE_ARCHIVE_PACKAGE_TYPE', True) and d.getVar('SOURCE_ARCHIVE_PACKAGE_TYPE', True).upper() == 'SRPM':
+        d.setVar('SBUILDSPEC', cmdsrpm + "\n")
+        d.setVarFlag('SBUILDSPEC', 'func', '1')
+        bb.build.exec_func('SBUILDSPEC', d)
 
 
-       # Build the rpm package!
-       d.setVar('BUILDSPEC', cmd + "\n")
-       d.setVarFlag('BUILDSPEC', 'func', '1')
-       bb.build.exec_func('BUILDSPEC', d)
+    # Build the rpm package!
+    d.setVar('BUILDSPEC', cmd + "\n")
+    d.setVarFlag('BUILDSPEC', 'func', '1')
+    bb.build.exec_func('BUILDSPEC', d)
 }
 
 python () {
@@ -1161,13 +1161,13 @@ do_package_write_rpm[sstate-outputdirs] = "${DEPLOY_DIR_RPM}"
 do_package_write_rpm[sstate-lockfile-shared] += "${DEPLOY_DIR_RPM}/rpm.lock"
 
 python do_package_write_rpm_setscene () {
-       sstate_setscene(d)
+    sstate_setscene(d)
 }
 addtask do_package_write_rpm_setscene
 
 python do_package_write_rpm () {
-       bb.build.exec_func("read_subpackage_metadata", d)
-       bb.build.exec_func("do_package_rpm", d)
+    bb.build.exec_func("read_subpackage_metadata", d)
+    bb.build.exec_func("do_package_rpm", d)
 }
 
 do_package_write_rpm[dirs] = "${PKGWRITEDIRRPM}"
index 60f1aded0d19977163113a763e7dae7c1752ead1..790d874c1cb105ba8bf46e7e609318c01a879c13 100644 (file)
@@ -1,13 +1,13 @@
 python read_subpackage_metadata () {
-       import oe.packagedata
+    import oe.packagedata
 
-       data = oe.packagedata.read_pkgdata(d.getVar('PN', True), d)
+    data = oe.packagedata.read_pkgdata(d.getVar('PN', True), d)
 
-       for key in data.keys():
-               d.setVar(key, data[key])
+    for key in data.keys():
+        d.setVar(key, data[key])
 
-       for pkg in d.getVar('PACKAGES', True).split():
-               sdata = oe.packagedata.read_subpkgdata(pkg, d)
-               for key in sdata.keys():
-                       d.setVar(key, sdata[key])
+    for pkg in d.getVar('PACKAGES', True).split():
+        sdata = oe.packagedata.read_subpkgdata(pkg, d)
+        for key in sdata.keys():
+            d.setVar(key, sdata[key])
 }
index 3c4d99783389f2be300fba4cc4616330cc837c75..a724972821a3ca6a804ecbee933041b4ca7b7932 100644 (file)
@@ -8,164 +8,164 @@ PATCHDEPENDENCY = "${PATCHTOOL}-native:do_populate_sysroot"
 inherit terminal
 
 def src_patches(d, all = False ):
-       workdir = d.getVar('WORKDIR', True)
-       fetch = bb.fetch2.Fetch([], d)
-       patches = []
-       sources = []
-       for url in fetch.urls:
-               local = patch_path(url, fetch, workdir)
-               if not local:
-                       if all:
-                               local = fetch.localpath(url)
-                               sources.append(local)
-                       continue
-
-               urldata = fetch.ud[url]
-               parm = urldata.parm
-               patchname = parm.get('pname') or os.path.basename(local)
-
-               apply, reason = should_apply(parm, d)
-               if not apply:
-                       if reason:
-                               bb.note("Patch %s %s" % (patchname, reason))
-                       continue
-
-               patchparm = {'patchname': patchname}
-               if "striplevel" in parm:
-                       striplevel = parm["striplevel"]
-               elif "pnum" in parm:
-                       #bb.msg.warn(None, "Deprecated usage of 'pnum' url parameter in '%s', please use 'striplevel'" % url)
-                       striplevel = parm["pnum"]
-               else:
-                       striplevel = '1'
-               patchparm['striplevel'] = striplevel
-
-               patchdir = parm.get('patchdir')
-               if patchdir:
-                       patchparm['patchdir'] = patchdir
-
-               localurl = bb.encodeurl(('file', '', local, '', '', patchparm))
-               patches.append(localurl)
-
-       if all:
-               return sources
-
-       return patches
+    workdir = d.getVar('WORKDIR', True)
+    fetch = bb.fetch2.Fetch([], d)
+    patches = []
+    sources = []
+    for url in fetch.urls:
+        local = patch_path(url, fetch, workdir)
+        if not local:
+            if all:
+                local = fetch.localpath(url)
+                sources.append(local)
+            continue
+
+        urldata = fetch.ud[url]
+        parm = urldata.parm
+        patchname = parm.get('pname') or os.path.basename(local)
+
+        apply, reason = should_apply(parm, d)
+        if not apply:
+            if reason:
+                bb.note("Patch %s %s" % (patchname, reason))
+            continue
+
+        patchparm = {'patchname': patchname}
+        if "striplevel" in parm:
+            striplevel = parm["striplevel"]
+        elif "pnum" in parm:
+            #bb.msg.warn(None, "Deprecated usage of 'pnum' url parameter in '%s', please use 'striplevel'" % url)
+            striplevel = parm["pnum"]
+        else:
+            striplevel = '1'
+        patchparm['striplevel'] = striplevel
+
+        patchdir = parm.get('patchdir')
+        if patchdir:
+            patchparm['patchdir'] = patchdir
+
+        localurl = bb.encodeurl(('file', '', local, '', '', patchparm))
+        patches.append(localurl)
+
+    if all:
+        return sources
+
+    return patches
 
 def patch_path(url, fetch, workdir):
-       """Return the local path of a patch, or None if this isn't a patch"""
+    """Return the local path of a patch, or None if this isn't a patch"""
 
-       local = fetch.localpath(url)
-       base, ext = os.path.splitext(os.path.basename(local))
-       if ext in ('.gz', '.bz2', '.Z'):
-               local = os.path.join(workdir, base)
-               ext = os.path.splitext(base)[1]
+    local = fetch.localpath(url)
+    base, ext = os.path.splitext(os.path.basename(local))
+    if ext in ('.gz', '.bz2', '.Z'):
+        local = os.path.join(workdir, base)
+        ext = os.path.splitext(base)[1]
 
-       urldata = fetch.ud[url]
-       if "apply" in urldata.parm:
-               apply = oe.types.boolean(urldata.parm["apply"])
-               if not apply:
-                       return
-       elif ext not in (".diff", ".patch"):
-               return
+    urldata = fetch.ud[url]
+    if "apply" in urldata.parm:
+        apply = oe.types.boolean(urldata.parm["apply"])
+        if not apply:
+            return
+    elif ext not in (".diff", ".patch"):
+        return
 
-       return local
+    return local
 
 def should_apply(parm, d):
-       """Determine if we should apply the given patch"""
+    """Determine if we should apply the given patch"""
 
-       if "mindate" in parm or "maxdate" in parm:
-               pn = d.getVar('PN', True)
-               srcdate = d.getVar('SRCDATE_%s' % pn, True)
-               if not srcdate:
-                       srcdate = d.getVar('SRCDATE', True)
+    if "mindate" in parm or "maxdate" in parm:
+        pn = d.getVar('PN', True)
+        srcdate = d.getVar('SRCDATE_%s' % pn, True)
+        if not srcdate:
+            srcdate = d.getVar('SRCDATE', True)
 
-               if srcdate == "now":
-                       srcdate = d.getVar('DATE', True)
+        if srcdate == "now":
+            srcdate = d.getVar('DATE', True)
 
-               if "maxdate" in parm and parm["maxdate"] < srcdate:
-                       return False, 'is outdated'
+        if "maxdate" in parm and parm["maxdate"] < srcdate:
+            return False, 'is outdated'
 
-               if "mindate" in parm and parm["mindate"] > srcdate:
-                       return False, 'is predated'
+        if "mindate" in parm and parm["mindate"] > srcdate:
+            return False, 'is predated'
 
 
-       if "minrev" in parm:
-               srcrev = d.getVar('SRCREV', True)
-               if srcrev and srcrev < parm["minrev"]:
-                       return False, 'applies to later revisions'
+    if "minrev" in parm:
+        srcrev = d.getVar('SRCREV', True)
+        if srcrev and srcrev < parm["minrev"]:
+            return False, 'applies to later revisions'
 
-       if "maxrev" in parm:
-               srcrev = d.getVar('SRCREV', True)
-               if srcrev and srcrev > parm["maxrev"]:
-                       return False, 'applies to earlier revisions'
+    if "maxrev" in parm:
+        srcrev = d.getVar('SRCREV', True)
+        if srcrev and srcrev > parm["maxrev"]:
+            return False, 'applies to earlier revisions'
 
-       if "rev" in parm:
-               srcrev = d.getVar('SRCREV', True)
-               if srcrev and parm["rev"] not in srcrev:
-                       return False, "doesn't apply to revision"
+    if "rev" in parm:
+        srcrev = d.getVar('SRCREV', True)
+        if srcrev and parm["rev"] not in srcrev:
+            return False, "doesn't apply to revision"
 
-       if "notrev" in parm:
-               srcrev = d.getVar('SRCREV', True)
-               if srcrev and parm["notrev"] in srcrev:
-                       return False, "doesn't apply to revision"
+    if "notrev" in parm:
+        srcrev = d.getVar('SRCREV', True)
+        if srcrev and parm["notrev"] in srcrev:
+            return False, "doesn't apply to revision"
 
-       return True, None
+    return True, None
 
 should_apply[vardepsexclude] = "DATE SRCDATE"
 
 python patch_do_patch() {
-       import oe.patch
-
-       patchsetmap = {
-               "patch": oe.patch.PatchTree,
-               "quilt": oe.patch.QuiltTree,
-               "git": oe.patch.GitApplyTree,
-       }
-
-       cls = patchsetmap[d.getVar('PATCHTOOL', True) or 'quilt']
-
-       resolvermap = {
-               "noop": oe.patch.NOOPResolver,
-               "user": oe.patch.UserResolver,
-       }
-
-       rcls = resolvermap[d.getVar('PATCHRESOLVE', True) or 'user']
-
-       classes = {}
-
-       s = d.getVar('S', True)
-
-       path = os.getenv('PATH')
-       os.putenv('PATH', d.getVar('PATH', True))
-
-       for patch in src_patches(d):
-               _, _, local, _, _, parm = bb.decodeurl(patch)
-
-               if "patchdir" in parm:
-                       patchdir = parm["patchdir"]
-                       if not os.path.isabs(patchdir):
-                               patchdir = os.path.join(s, patchdir)
-               else:
-                       patchdir = s
-
-               if not patchdir in classes:
-                       patchset = cls(patchdir, d)
-                       resolver = rcls(patchset, oe_terminal)
-                       classes[patchdir] = (patchset, resolver)
-                       patchset.Clean()
-               else:
-                       patchset, resolver = classes[patchdir]
-
-               bb.note("Applying patch '%s' (%s)" % (parm['patchname'], oe.path.format_display(local, d)))
-               try:
-                       patchset.Import({"file":local, "strippath": parm['striplevel']}, True)
-               except Exception as exc:
-                       bb.fatal(str(exc))
-               try:
-                       resolver.Resolve()
-               except bb.BBHandledException as e:
-                       bb.fatal(str(e))
+    import oe.patch
+
+    patchsetmap = {
+        "patch": oe.patch.PatchTree,
+        "quilt": oe.patch.QuiltTree,
+        "git": oe.patch.GitApplyTree,
+    }
+
+    cls = patchsetmap[d.getVar('PATCHTOOL', True) or 'quilt']
+
+    resolvermap = {
+        "noop": oe.patch.NOOPResolver,
+        "user": oe.patch.UserResolver,
+    }
+
+    rcls = resolvermap[d.getVar('PATCHRESOLVE', True) or 'user']
+
+    classes = {}
+
+    s = d.getVar('S', True)
+
+    path = os.getenv('PATH')
+    os.putenv('PATH', d.getVar('PATH', True))
+
+    for patch in src_patches(d):
+        _, _, local, _, _, parm = bb.decodeurl(patch)
+
+        if "patchdir" in parm:
+            patchdir = parm["patchdir"]
+            if not os.path.isabs(patchdir):
+                patchdir = os.path.join(s, patchdir)
+        else:
+            patchdir = s
+
+        if not patchdir in classes:
+            patchset = cls(patchdir, d)
+            resolver = rcls(patchset, oe_terminal)
+            classes[patchdir] = (patchset, resolver)
+            patchset.Clean()
+        else:
+            patchset, resolver = classes[patchdir]
+
+        bb.note("Applying patch '%s' (%s)" % (parm['patchname'], oe.path.format_display(local, d)))
+        try:
+            patchset.Import({"file":local, "strippath": parm['striplevel']}, True)
+        except Exception as exc:
+            bb.fatal(str(exc))
+        try:
+            resolver.Resolve()
+        except bb.BBHandledException as e:
+            bb.fatal(str(e))
 }
 patch_do_patch[vardepsexclude] = "PATCHRESOLVE"
 
index 4b182690f28b755b1069fc2423c7e34984c9a84f..80f6244fcabd246f961347d9226eee6d3224559b 100644 (file)
@@ -1,22 +1,22 @@
 python do_pkg_write_metainfo () {
-       deploydir = d.getVar('DEPLOY_DIR', True)
-       if not deploydir:
-               bb.error("DEPLOY_DIR not defined, unable to write package info")
-               return
+    deploydir = d.getVar('DEPLOY_DIR', True)
+    if not deploydir:
+        bb.error("DEPLOY_DIR not defined, unable to write package info")
+        return
 
-       try:
-               infofile = file(os.path.join(deploydir, 'package-metainfo'), 'a')
-       except OSError:
-               raise bb.build.FuncFailed("unable to open package-info file for writing.")
-       
-       name = d.getVar('PN', True)
-       version = d.getVar('PV', True)
-       desc = d.getVar('DESCRIPTION', True)
-       page = d.getVar('HOMEPAGE', True)
-       lic = d.getVar('LICENSE', True)
-       
-       infofile.write("|| "+ name +" || "+ version + " || "+ desc +" || "+ page +" || "+ lic + " ||\n" ) 
-       infofile.close()
+    try:
+        infofile = file(os.path.join(deploydir, 'package-metainfo'), 'a')
+    except OSError:
+        raise bb.build.FuncFailed("unable to open package-info file for writing.")
+
+    name = d.getVar('PN', True)
+    version = d.getVar('PV', True)
+    desc = d.getVar('DESCRIPTION', True)
+    page = d.getVar('HOMEPAGE', True)
+    lic = d.getVar('LICENSE', True)
+
+    infofile.write("|| "+ name +" || "+ version + " || "+ desc +" || "+ page +" || "+ lic + " ||\n" ) 
+    infofile.close()
 }
 
-addtask pkg_write_metainfo after do_package before do_build
\ No newline at end of file
+addtask pkg_write_metainfo after do_package before do_build
index ed2dca07809fb5e571d5e3b0d80251c54dab714b..9483e93f3b0ab9adf4741f953b8840e0195a529b 100644 (file)
@@ -32,29 +32,29 @@ python () {
 }
 
 fakeroot python do_populate_sdk() {
-       bb.build.exec_func("populate_sdk_image", d)
+    bb.build.exec_func("populate_sdk_image", d)
 
-       # Handle multilibs in the SDK environment, siteconfig, etc files...
-       localdata = bb.data.createCopy(d)
+    # Handle multilibs in the SDK environment, siteconfig, etc files...
+    localdata = bb.data.createCopy(d)
 
-       # make sure we only use the WORKDIR value from 'd', or it can change
-       localdata.setVar('WORKDIR', d.getVar('WORKDIR', True))
+    # make sure we only use the WORKDIR value from 'd', or it can change
+    localdata.setVar('WORKDIR', d.getVar('WORKDIR', True))
 
-       # make sure we only use the SDKTARGETSYSROOT value from 'd'
-       localdata.setVar('SDKTARGETSYSROOT', d.getVar('SDKTARGETSYSROOT', True))
+    # make sure we only use the SDKTARGETSYSROOT value from 'd'
+    localdata.setVar('SDKTARGETSYSROOT', d.getVar('SDKTARGETSYSROOT', True))
 
-       # Process DEFAULTTUNE
-       bb.build.exec_func("create_sdk_files", localdata)
+    # Process DEFAULTTUNE
+    bb.build.exec_func("create_sdk_files", localdata)
 
-       variants = d.getVar("MULTILIB_VARIANTS", True) or ""
-       for item in variants.split():
-               # Load overrides from 'd' to avoid having to reset the value...
-               overrides = d.getVar("OVERRIDES", False) + ":virtclass-multilib-" + item
-               localdata.setVar("OVERRIDES", overrides)
-               bb.data.update_data(localdata)
-               bb.build.exec_func("create_sdk_files", localdata)
+    variants = d.getVar("MULTILIB_VARIANTS", True) or ""
+    for item in variants.split():
+        # Load overrides from 'd' to avoid having to reset the value...
+        overrides = d.getVar("OVERRIDES", False) + ":virtclass-multilib-" + item
+        localdata.setVar("OVERRIDES", overrides)
+        bb.data.update_data(localdata)
+        bb.build.exec_func("create_sdk_files", localdata)
 
-       bb.build.exec_func("tar_sdk", d)
+    bb.build.exec_func("tar_sdk", d)
 }
 
 fakeroot populate_sdk_image() {
index c0a538036f324dd9763f4d5f664db5a19567a14e..0e75ac4e3860730cb790c7ffd7caecf42ec6aed1 100644 (file)
@@ -4,12 +4,12 @@
 #
 
 def qemu_target_binary(data):
-       import bb
+    import bb
 
-       target_arch = data.getVar("TARGET_ARCH", True)
-       if target_arch in ("i486", "i586", "i686"):
-               target_arch = "i386"
-       elif target_arch == "powerpc":
-               target_arch = "ppc"
+    target_arch = data.getVar("TARGET_ARCH", True)
+    if target_arch in ("i486", "i586", "i686"):
+        target_arch = "i386"
+    elif target_arch == "powerpc":
+        target_arch = "ppc"
 
-       return "qemu-" + target_arch
+    return "qemu-" + target_arch
index c2c5bd5c256098453eeaf98ce705ef3cced11d55..d572f0edcfd473b8118b36121aba32f9f9245634 100644 (file)
@@ -185,18 +185,18 @@ def sstate_installpkg(ss, d):
         staging_target = d.getVar('STAGING_DIR_TARGET', True)
         staging_host = d.getVar('STAGING_DIR_HOST', True)
 
-       if bb.data.inherits_class('native', d) or bb.data.inherits_class('nativesdk', d) or bb.data.inherits_class('crosssdk', d) or bb.data.inherits_class('cross-canadian', d):
-               sstate_sed_cmd = "sed -i -e 's:FIXMESTAGINGDIR:%s:g'" % (staging)
-       elif bb.data.inherits_class('cross', d):
-               sstate_sed_cmd = "sed -i -e 's:FIXMESTAGINGDIRTARGET:%s:g; s:FIXMESTAGINGDIR:%s:g'" % (staging_target, staging)
-       else:
-               sstate_sed_cmd = "sed -i -e 's:FIXMESTAGINGDIRHOST:%s:g'" % (staging_host)
+        if bb.data.inherits_class('native', d) or bb.data.inherits_class('nativesdk', d) or bb.data.inherits_class('crosssdk', d) or bb.data.inherits_class('cross-canadian', d):
+            sstate_sed_cmd = "sed -i -e 's:FIXMESTAGINGDIR:%s:g'" % (staging)
+        elif bb.data.inherits_class('cross', d):
+            sstate_sed_cmd = "sed -i -e 's:FIXMESTAGINGDIRTARGET:%s:g; s:FIXMESTAGINGDIR:%s:g'" % (staging_target, staging)
+        else:
+            sstate_sed_cmd = "sed -i -e 's:FIXMESTAGINGDIRHOST:%s:g'" % (staging_host)
 
-       # Add sstateinst to each filename in fixmepath, use xargs to efficiently call sed
-       sstate_hardcode_cmd = "sed -e 's:^:%s:g' %s | xargs %s" % (sstateinst, fixmefn, sstate_sed_cmd)
+        # Add sstateinst to each filename in fixmepath, use xargs to efficiently call sed
+        sstate_hardcode_cmd = "sed -e 's:^:%s:g' %s | xargs %s" % (sstateinst, fixmefn, sstate_sed_cmd)
 
-       print "Replacing fixme paths in sstate package: %s" % (sstate_hardcode_cmd)
-       subprocess.call(sstate_hardcode_cmd, shell=True)
+        print "Replacing fixme paths in sstate package: %s" % (sstate_hardcode_cmd)
+        subprocess.call(sstate_hardcode_cmd, shell=True)
 
         # Need to remove this or we'd copy it into the target directory and may 
         # conflict with another writer
@@ -310,50 +310,50 @@ python sstate_cleanall() {
 }
 
 def sstate_hardcode_path(d):
-       import subprocess
-
-       # Need to remove hardcoded paths and fix these when we install the
-       # staging packages.
-       #
-       # Note: the logic in this function needs to match the reverse logic
-       # in sstate_installpkg(ss, d)
-
-       staging = d.getVar('STAGING_DIR', True)
-       staging_target = d.getVar('STAGING_DIR_TARGET', True)
-       staging_host = d.getVar('STAGING_DIR_HOST', True)
-       sstate_builddir = d.getVar('SSTATE_BUILDDIR', True)
-
-       if bb.data.inherits_class('native', d) or bb.data.inherits_class('nativesdk', d) or bb.data.inherits_class('crosssdk', d) or bb.data.inherits_class('cross-canadian', d):
-               sstate_grep_cmd = "grep -l -e '%s'" % (staging)
-               sstate_sed_cmd = "sed -i -e 's:%s:FIXMESTAGINGDIR:g'" % (staging)
-       elif bb.data.inherits_class('cross', d):
-               sstate_grep_cmd = "grep -l -e '(%s|%s)'" % (staging_target, staging)
-               sstate_sed_cmd = "sed -i -e 's:%s:FIXMESTAGINGDIRTARGET:g; s:%s:FIXMESTAGINGDIR:g'" % (staging_target, staging)
-       else:
-               sstate_grep_cmd = "grep -l -e '%s'" % (staging_host)
-               sstate_sed_cmd = "sed -i -e 's:%s:FIXMESTAGINGDIRHOST:g'" % (staging_host)
-       
-       fixmefn =  sstate_builddir + "fixmepath"
-
-       sstate_scan_cmd = d.getVar('SSTATE_SCAN_CMD', True)
-       sstate_filelist_cmd = "tee %s" % (fixmefn)
-
-       # fixmepath file needs relative paths, drop sstate_builddir prefix
-       sstate_filelist_relative_cmd = "sed -i -e 's:^%s::g' %s" % (sstate_builddir, fixmefn)
-
-       # Limit the fixpaths and sed operations based on the initial grep search
-       # This has the side effect of making sure the vfs cache is hot
-       sstate_hardcode_cmd = "%s | xargs %s | %s | xargs --no-run-if-empty %s" % (sstate_scan_cmd, sstate_grep_cmd, sstate_filelist_cmd, sstate_sed_cmd)
-
-       print "Removing hardcoded paths from sstate package: '%s'" % (sstate_hardcode_cmd)
-       subprocess.call(sstate_hardcode_cmd, shell=True)
+    import subprocess
+
+    # Need to remove hardcoded paths and fix these when we install the
+    # staging packages.
+    #
+    # Note: the logic in this function needs to match the reverse logic
+    # in sstate_installpkg(ss, d)
+
+    staging = d.getVar('STAGING_DIR', True)
+    staging_target = d.getVar('STAGING_DIR_TARGET', True)
+    staging_host = d.getVar('STAGING_DIR_HOST', True)
+    sstate_builddir = d.getVar('SSTATE_BUILDDIR', True)
+
+    if bb.data.inherits_class('native', d) or bb.data.inherits_class('nativesdk', d) or bb.data.inherits_class('crosssdk', d) or bb.data.inherits_class('cross-canadian', d):
+        sstate_grep_cmd = "grep -l -e '%s'" % (staging)
+        sstate_sed_cmd = "sed -i -e 's:%s:FIXMESTAGINGDIR:g'" % (staging)
+    elif bb.data.inherits_class('cross', d):
+        sstate_grep_cmd = "grep -l -e '(%s|%s)'" % (staging_target, staging)
+        sstate_sed_cmd = "sed -i -e 's:%s:FIXMESTAGINGDIRTARGET:g; s:%s:FIXMESTAGINGDIR:g'" % (staging_target, staging)
+    else:
+        sstate_grep_cmd = "grep -l -e '%s'" % (staging_host)
+        sstate_sed_cmd = "sed -i -e 's:%s:FIXMESTAGINGDIRHOST:g'" % (staging_host)
+    
+    fixmefn =  sstate_builddir + "fixmepath"
+
+    sstate_scan_cmd = d.getVar('SSTATE_SCAN_CMD', True)
+    sstate_filelist_cmd = "tee %s" % (fixmefn)
+
+    # fixmepath file needs relative paths, drop sstate_builddir prefix
+    sstate_filelist_relative_cmd = "sed -i -e 's:^%s::g' %s" % (sstate_builddir, fixmefn)
+
+    # Limit the fixpaths and sed operations based on the initial grep search
+    # This has the side effect of making sure the vfs cache is hot
+    sstate_hardcode_cmd = "%s | xargs %s | %s | xargs --no-run-if-empty %s" % (sstate_scan_cmd, sstate_grep_cmd, sstate_filelist_cmd, sstate_sed_cmd)
+
+    print "Removing hardcoded paths from sstate package: '%s'" % (sstate_hardcode_cmd)
+    subprocess.call(sstate_hardcode_cmd, shell=True)
 
         # If the fixmefn is empty, remove it..
-       if os.stat(fixmefn).st_size == 0:
-               os.remove(fixmefn)
-       else:
-               print "Replacing absolute paths in fixmepath file: '%s'" % (sstate_filelist_relative_cmd)
-               subprocess.call(sstate_filelist_relative_cmd, shell=True)
+    if os.stat(fixmefn).st_size == 0:
+        os.remove(fixmefn)
+    else:
+        print "Replacing absolute paths in fixmepath file: '%s'" % (sstate_filelist_relative_cmd)
+        subprocess.call(sstate_filelist_relative_cmd, shell=True)
 
 def sstate_package(ss, d):
     import oe.path
index a98f51deb556ef2b13224f7ce1dfbb07719b15d5..ee5a025a3b1f1ab570dec67a53994d4c12ca8227 100644 (file)
@@ -109,7 +109,7 @@ do_populate_sysroot[sstate-outputdirs] = "${STAGING_DIR_HOST}/"
 do_populate_sysroot[stamp-extra-info] = "${MACHINE}"
 
 python do_populate_sysroot_setscene () {
-       sstate_setscene(d)
+    sstate_setscene(d)
 }
 addtask do_populate_sysroot_setscene
 
index 700ea539118a239d48586bbdc7f5d2eaf97d9257..c4596bffcea0c4b3f64dce67b65a41941744001d 100644 (file)
@@ -54,146 +54,146 @@ syslinux_hddimg_install() {
 }
 
 python build_syslinux_menu () {
-       import copy
-       import sys
-
-       workdir = d.getVar('WORKDIR', True)
-       if not workdir:
-               bb.error("WORKDIR is not defined")
-               return
-               
-       labels = d.getVar('LABELS', True)
-       if not labels:
-               bb.debug(1, "LABELS not defined, nothing to do")
-               return
-       
-       if labels == []:
-               bb.debug(1, "No labels, nothing to do")
-               return
-
-       cfile = d.getVar('SYSLINUXMENU', True)
-       if not cfile:
-               raise bb.build.FuncFailed('Unable to read SYSLINUXMENU')
-
-       try:
-               cfgfile = file(cfile, 'w')
-       except OSError:
-               raise bb.build.funcFailed('Unable to open %s' % (cfile))
-
-       # Beep the speaker and Clear the screen
-       cfgfile.write('\x07\x0C')
-
-       # The title should be configurable
-       cfgfile.write('Linux Boot Menu\n')
-       cfgfile.write('The following targets are available on this image:\n')
-       cfgfile.write('\n')
-
-       for label in labels.split():
-               from copy import deepcopy
-               localdata = deepcopy(d)
-
-               overrides = localdata.getVar('OVERRIDES')
-               if not overrides:
-                       raise bb.build.FuncFailed('OVERRIDES not defined')
-               overrides = localdata.expand(overrides)
-       
-               localdata.setVar('OVERRIDES', label + ':' + overrides)
-               bb.data.update_data(localdata)
-
-               usage = localdata.getVar('USAGE', True)
-               cfgfile.write('  \x0F\x30\x3E%16s\x0F\x30\x37: ' % (label))
-               cfgfile.write('%s\n' % (usage))
-
-               del localdata
-
-       cfgfile.write('\n')
-       cfgfile.close()
+    import copy
+    import sys
+
+    workdir = d.getVar('WORKDIR', True)
+    if not workdir:
+        bb.error("WORKDIR is not defined")
+        return
+        
+    labels = d.getVar('LABELS', True)
+    if not labels:
+        bb.debug(1, "LABELS not defined, nothing to do")
+        return
+    
+    if labels == []:
+        bb.debug(1, "No labels, nothing to do")
+        return
+
+    cfile = d.getVar('SYSLINUXMENU', True)
+    if not cfile:
+        raise bb.build.FuncFailed('Unable to read SYSLINUXMENU')
+
+    try:
+        cfgfile = file(cfile, 'w')
+    except OSError:
+        raise bb.build.funcFailed('Unable to open %s' % (cfile))
+
+    # Beep the speaker and Clear the screen
+    cfgfile.write('\x07\x0C')
+
+    # The title should be configurable
+    cfgfile.write('Linux Boot Menu\n')
+    cfgfile.write('The following targets are available on this image:\n')
+    cfgfile.write('\n')
+
+    for label in labels.split():
+        from copy import deepcopy
+        localdata = deepcopy(d)
+
+        overrides = localdata.getVar('OVERRIDES')
+        if not overrides:
+            raise bb.build.FuncFailed('OVERRIDES not defined')
+        overrides = localdata.expand(overrides)
+    
+        localdata.setVar('OVERRIDES', label + ':' + overrides)
+        bb.data.update_data(localdata)
+
+        usage = localdata.getVar('USAGE', True)
+        cfgfile.write('  \x0F\x30\x3E%16s\x0F\x30\x37: ' % (label))
+        cfgfile.write('%s\n' % (usage))
+
+        del localdata
+
+    cfgfile.write('\n')
+    cfgfile.close()
 }
 
 python build_syslinux_cfg () {
-       import copy
-       import sys
-
-       workdir = d.getVar('WORKDIR', True)
-       if not workdir:
-               bb.error("WORKDIR not defined, unable to package")
-               return
-               
-       labels = d.getVar('LABELS', True)
-       if not labels:
-               bb.debug(1, "LABELS not defined, nothing to do")
-               return
-       
-       if labels == []:
-               bb.debug(1, "No labels, nothing to do")
-               return
-
-       cfile = d.getVar('SYSLINUXCFG', True)
-       if not cfile:
-               raise bb.build.FuncFailed('Unable to read SYSLINUXCFG')
-
-       try:
-               cfgfile = file(cfile, 'w')
-       except OSError:
-               raise bb.build.funcFailed('Unable to open %s' % (cfile))
-
-       cfgfile.write('# Automatically created by OE\n')
-
-       opts = d.getVar('SYSLINUX_OPTS', True)
-
-       if opts:
-               for opt in opts.split(';'):
-                       cfgfile.write('%s\n' % opt)
-
-       cfgfile.write('ALLOWOPTIONS 1\n');
-       cfgfile.write('DEFAULT %s\n' % (labels.split()[0]))
-
-       timeout = d.getVar('SYSLINUX_TIMEOUT', True)
-
-       if timeout:
-               cfgfile.write('TIMEOUT %s\n' % timeout)
-       else:
-               cfgfile.write('TIMEOUT 50\n')
-
-       prompt = d.getVar('SYSLINUX_PROMPT', True)
-       if prompt:
-               cfgfile.write('PROMPT %s\n' % prompt)
-       else:
-               cfgfile.write('PROMPT 1\n')
-
-       menu = d.getVar('AUTO_SYSLINUXMENU', True)
-
-       # This is ugly.  My bad.
-
-       if menu:
-               bb.build.exec_func('build_syslinux_menu', d)
-               mfile = d.getVar('SYSLINUXMENU', True)
-               cfgfile.write('DISPLAY %s\n' % (mfile.split('/')[-1]) )
-       
-       for label in labels.split():
-               localdata = bb.data.createCopy(d)
-
-               overrides = localdata.getVar('OVERRIDES', True)
-               if not overrides:
-                       raise bb.build.FuncFailed('OVERRIDES not defined')
-       
-               localdata.setVar('OVERRIDES', label + ':' + overrides)
-               bb.data.update_data(localdata)
-       
-               cfgfile.write('LABEL %s\nKERNEL /vmlinuz\n' % (label))
-
-               append = localdata.getVar('APPEND', True)
-               initrd = localdata.getVar('INITRD', True)
-
-               if append:
-                       cfgfile.write('APPEND ')
-
-                       if initrd:
-                               cfgfile.write('initrd=/initrd ')
-
-                       cfgfile.write('LABEL=%s '% (label))
-
-                       cfgfile.write('%s\n' % (append))
-
-       cfgfile.close()
+    import copy
+    import sys
+
+    workdir = d.getVar('WORKDIR', True)
+    if not workdir:
+        bb.error("WORKDIR not defined, unable to package")
+        return
+        
+    labels = d.getVar('LABELS', True)
+    if not labels:
+        bb.debug(1, "LABELS not defined, nothing to do")
+        return
+    
+    if labels == []:
+        bb.debug(1, "No labels, nothing to do")
+        return
+
+    cfile = d.getVar('SYSLINUXCFG', True)
+    if not cfile:
+        raise bb.build.FuncFailed('Unable to read SYSLINUXCFG')
+
+    try:
+        cfgfile = file(cfile, 'w')
+    except OSError:
+        raise bb.build.funcFailed('Unable to open %s' % (cfile))
+
+    cfgfile.write('# Automatically created by OE\n')
+
+    opts = d.getVar('SYSLINUX_OPTS', True)
+
+    if opts:
+        for opt in opts.split(';'):
+            cfgfile.write('%s\n' % opt)
+
+    cfgfile.write('ALLOWOPTIONS 1\n');
+    cfgfile.write('DEFAULT %s\n' % (labels.split()[0]))
+
+    timeout = d.getVar('SYSLINUX_TIMEOUT', True)
+
+    if timeout:
+        cfgfile.write('TIMEOUT %s\n' % timeout)
+    else:
+        cfgfile.write('TIMEOUT 50\n')
+
+    prompt = d.getVar('SYSLINUX_PROMPT', True)
+    if prompt:
+        cfgfile.write('PROMPT %s\n' % prompt)
+    else:
+        cfgfile.write('PROMPT 1\n')
+
+    menu = d.getVar('AUTO_SYSLINUXMENU', True)
+
+    # This is ugly.  My bad.
+
+    if menu:
+        bb.build.exec_func('build_syslinux_menu', d)
+        mfile = d.getVar('SYSLINUXMENU', True)
+        cfgfile.write('DISPLAY %s\n' % (mfile.split('/')[-1]) )
+    
+    for label in labels.split():
+        localdata = bb.data.createCopy(d)
+
+        overrides = localdata.getVar('OVERRIDES', True)
+        if not overrides:
+            raise bb.build.FuncFailed('OVERRIDES not defined')
+    
+        localdata.setVar('OVERRIDES', label + ':' + overrides)
+        bb.data.update_data(localdata)
+    
+        cfgfile.write('LABEL %s\nKERNEL /vmlinuz\n' % (label))
+
+        append = localdata.getVar('APPEND', True)
+        initrd = localdata.getVar('INITRD', True)
+
+        if append:
+            cfgfile.write('APPEND ')
+
+            if initrd:
+                cfgfile.write('initrd=/initrd ')
+
+            cfgfile.write('LABEL=%s '% (label))
+
+            cfgfile.write('%s\n' % (append))
+
+    cfgfile.close()
 }
index 9f2e59f5846ba604da9f1e076f849202f436361f..47215add1e149fe144c542aa40232f791a1b906d 100644 (file)
@@ -113,13 +113,13 @@ def update_alternatives_after_parse(d):
         # Convert old format to new format...
         alt_links = d.getVar('ALTERNATIVE_LINKS', True) or ""
         for alt_link in alt_links.split():
-               alt_name = os.path.basename(alt_link)
+            alt_name = os.path.basename(alt_link)
 
-               alternative = d.getVar('ALTERNATIVE_%s' % pn, True) or ""
-               alternative += " " + alt_name
-               d.setVar('ALTERNATIVE_%s' % pn, alternative)
-               d.setVarFlag('ALTERNATIVE_LINK_NAME', alt_name, alt_link)
-               d.setVarFlag('ALTERNATIVE_TARGET', alt_name, alt_link)
+            alternative = d.getVar('ALTERNATIVE_%s' % pn, True) or ""
+            alternative += " " + alt_name
+            d.setVar('ALTERNATIVE_%s' % pn, alternative)
+            d.setVarFlag('ALTERNATIVE_LINK_NAME', alt_name, alt_link)
+            d.setVarFlag('ALTERNATIVE_TARGET', alt_name, alt_link)
         return
 
     if d.getVar('ALTERNATIVE_NAME') != None or d.getVar('ALTERNATIVE_PATH') != None:
@@ -128,15 +128,15 @@ def update_alternatives_after_parse(d):
         alt_path = d.getVar('ALTERNATIVE_PATH', True)
         alt_link = d.getVar('ALTERNATIVE_LINK', True) or ("%s/%s" % (d.getVar('bindir', True), alt_name))
         if alt_name == None:
-               raise bb.build.build.FuncFailed, "%s inherits update-alternatives but doesn't set ALTERNATIVE_NAME" % d.getVar('FILE')
+            raise bb.build.build.FuncFailed, "%s inherits update-alternatives but doesn't set ALTERNATIVE_NAME" % d.getVar('FILE')
         if alt_path == None:
-               raise bb.build.build.FuncFailed, "%s inherits update-alternatives but doesn't set ALTERNATIVE_PATH" % d.getVar('FILE')
+            raise bb.build.build.FuncFailed, "%s inherits update-alternatives but doesn't set ALTERNATIVE_PATH" % d.getVar('FILE')
 
         alternative = d.getVar('ALTERNATIVE_%s' % pn, True) or ""
         alternative += " " + alt_name
 
-       # Fix the alt_path if it's relative
-       alt_path = os.path.join(os.path.dirname(alt_link), alt_path)
+        # Fix the alt_path if it's relative
+        alt_path = os.path.join(os.path.dirname(alt_link), alt_path)
 
         d.setVar('ALTERNATIVE_%s' % pn, alternative)
         d.setVarFlag('ALTERNATIVE_LINK_NAME', alt_name, alt_link)
@@ -199,144 +199,144 @@ populate_packages[vardeps] += "${UPDALTVARS} ${@gen_updatealternativesvars(d)}"
 # the split and strip steps..  packagecopy seems to be the earliest reasonable
 # place.
 python perform_packagecopy_append () {
-       # Check for deprecated usage...
-       pn = d.getVar('BPN', True)
-       if d.getVar('ALTERNATIVE_LINKS', True) != None:
-               bb.warn('%s: Use of ALTERNATIVE_LINKS is deprecated, see update-alternatives.bbclass for more info.' % pn)
-
-       if d.getVar('ALTERNATIVE_NAME', True) != None or d.getVar('ALTERNATIVE_PATH', True) != None:
-               bb.warn('%s: Use of ALTERNATIVE_NAME is deprecated, see update-alternatives.bbclass for more info.' % pn)
-
-       # Do actual update alternatives processing
-       pkgdest = d.getVar('PKGD', True)
-       for pkg in (d.getVar('PACKAGES', True) or "").split():
-               # If the src == dest, we know we need to rename the dest by appending ${BPN}
-               link_rename = {}
-               for alt_name in (d.getVar('ALTERNATIVE_%s' % pkg, True) or "").split():
-                       alt_link     = d.getVarFlag('ALTERNATIVE_LINK_NAME', alt_name, True)
-                       if not alt_link:
-                               alt_link = "%s/%s" % (d.getVar('bindir', True), alt_name)
-                               d.setVarFlag('ALTERNATIVE_LINK_NAME', alt_name, alt_link)
-
-                       alt_target   = d.getVarFlag('ALTERNATIVE_TARGET_%s' % pkg, alt_name, True) or d.getVarFlag('ALTERNATIVE_TARGET', alt_name, True)
-                       alt_target   = alt_target or d.getVar('ALTERNATIVE_TARGET_%s' % pkg, True) or d.getVar('ALTERNATIVE_TARGET', True) or alt_link
-                       # Sometimes alt_target is specified as relative to the link name.
-                       alt_target   = os.path.join(os.path.dirname(alt_link), alt_target)
-
-                       # If the link and target are the same name, we need to rename the target.
-                       if alt_link == alt_target:
-                               src = '%s/%s' % (pkgdest, alt_target)
-                               alt_target_rename = '%s.%s' % (alt_target, pn)
-                               dest = '%s/%s' % (pkgdest, alt_target_rename)
-                               if os.path.lexists(dest):
-                                       bb.note('%s: Already renamed: %s' % (pn, alt_target_rename))
-                               elif os.path.lexists(src):
-                                       if os.path.islink(src):
-                                               # Delay rename of links
-                                               link_rename[alt_target] = alt_target_rename
-                                       else:
-                                               bb.note('%s: Rename %s -> %s' % (pn, alt_target, alt_target_rename))
-                                               os.rename(src, dest)
-                               else:
-                                       bb.warn("%s: alternative target (%s or %s) does not exist, skipping..." % (pn, alt_target, alt_target_rename))
-                                       continue
-                               d.setVarFlag('ALTERNATIVE_TARGET_%s' % pkg, alt_name, alt_target_rename)
-
-               # Process delayed link names
-               # Do these after other renames so we can correct broken links
-               for alt_target in link_rename:
-                       src = '%s/%s' % (pkgdest, alt_target)
-                       dest = '%s/%s' % (pkgdest, link_rename[alt_target])
-                       link = os.readlink(src)
-                       if os.path.isabs(link):
-                               link_target = pkgdest + os.readlink(src)
-                       else:
-                               link_target = os.path.join(os.path.dirname(src), link)
-
-                       if os.path.lexists(link_target):
-                               # Ok, the link_target exists, we can rename
-                               bb.note('%s: Rename (link) %s -> %s' % (pn, alt_target, link_rename[alt_target]))
-                               os.rename(src, dest)
-                       else:
-                               # Try to resolve the broken link to link.${BPN}
-                               link_maybe = '%s.%s' % (os.readlink(src), pn)
-                               if os.path.lexists(os.path.join(os.path.dirname(src), link_maybe)):
-                                       # Ok, the renamed link target exists.. create a new link, and remove the original
-                                       bb.note('%s: Creating new link %s -> %s' % (pn, link_rename[alt_target], link_maybe))
-                                       os.symlink(link_maybe, dest)
-                                       os.unlink(src)
-                               else:
-                                       bb.warn('%s: Unable to resolve dangling symlink: %s' % (pn, alt_target))
+    # Check for deprecated usage...
+    pn = d.getVar('BPN', True)
+    if d.getVar('ALTERNATIVE_LINKS', True) != None:
+        bb.warn('%s: Use of ALTERNATIVE_LINKS is deprecated, see update-alternatives.bbclass for more info.' % pn)
+
+    if d.getVar('ALTERNATIVE_NAME', True) != None or d.getVar('ALTERNATIVE_PATH', True) != None:
+        bb.warn('%s: Use of ALTERNATIVE_NAME is deprecated, see update-alternatives.bbclass for more info.' % pn)
+
+    # Do actual update alternatives processing
+    pkgdest = d.getVar('PKGD', True)
+    for pkg in (d.getVar('PACKAGES', True) or "").split():
+        # If the src == dest, we know we need to rename the dest by appending ${BPN}
+        link_rename = {}
+        for alt_name in (d.getVar('ALTERNATIVE_%s' % pkg, True) or "").split():
+            alt_link     = d.getVarFlag('ALTERNATIVE_LINK_NAME', alt_name, True)
+            if not alt_link:
+                alt_link = "%s/%s" % (d.getVar('bindir', True), alt_name)
+                d.setVarFlag('ALTERNATIVE_LINK_NAME', alt_name, alt_link)
+
+            alt_target   = d.getVarFlag('ALTERNATIVE_TARGET_%s' % pkg, alt_name, True) or d.getVarFlag('ALTERNATIVE_TARGET', alt_name, True)
+            alt_target   = alt_target or d.getVar('ALTERNATIVE_TARGET_%s' % pkg, True) or d.getVar('ALTERNATIVE_TARGET', True) or alt_link
+            # Sometimes alt_target is specified as relative to the link name.
+            alt_target   = os.path.join(os.path.dirname(alt_link), alt_target)
+
+            # If the link and target are the same name, we need to rename the target.
+            if alt_link == alt_target:
+                src = '%s/%s' % (pkgdest, alt_target)
+                alt_target_rename = '%s.%s' % (alt_target, pn)
+                dest = '%s/%s' % (pkgdest, alt_target_rename)
+                if os.path.lexists(dest):
+                    bb.note('%s: Already renamed: %s' % (pn, alt_target_rename))
+                elif os.path.lexists(src):
+                    if os.path.islink(src):
+                        # Delay rename of links
+                        link_rename[alt_target] = alt_target_rename
+                    else:
+                        bb.note('%s: Rename %s -> %s' % (pn, alt_target, alt_target_rename))
+                        os.rename(src, dest)
+                else:
+                    bb.warn("%s: alternative target (%s or %s) does not exist, skipping..." % (pn, alt_target, alt_target_rename))
+                    continue
+                d.setVarFlag('ALTERNATIVE_TARGET_%s' % pkg, alt_name, alt_target_rename)
+
+        # Process delayed link names
+        # Do these after other renames so we can correct broken links
+        for alt_target in link_rename:
+            src = '%s/%s' % (pkgdest, alt_target)
+            dest = '%s/%s' % (pkgdest, link_rename[alt_target])
+            link = os.readlink(src)
+            if os.path.isabs(link):
+                link_target = pkgdest + os.readlink(src)
+            else:
+                link_target = os.path.join(os.path.dirname(src), link)
+
+            if os.path.lexists(link_target):
+                # Ok, the link_target exists, we can rename
+                bb.note('%s: Rename (link) %s -> %s' % (pn, alt_target, link_rename[alt_target]))
+                os.rename(src, dest)
+            else:
+                # Try to resolve the broken link to link.${BPN}
+                link_maybe = '%s.%s' % (os.readlink(src), pn)
+                if os.path.lexists(os.path.join(os.path.dirname(src), link_maybe)):
+                    # Ok, the renamed link target exists.. create a new link, and remove the original
+                    bb.note('%s: Creating new link %s -> %s' % (pn, link_rename[alt_target], link_maybe))
+                    os.symlink(link_maybe, dest)
+                    os.unlink(src)
+                else:
+                    bb.warn('%s: Unable to resolve dangling symlink: %s' % (pn, alt_target))
 }
 
 python populate_packages_prepend () {
-       pn = d.getVar('BPN', True)
-
-       # Do actual update alternatives processing
-       pkgdest = d.getVar('PKGD', True)
-       for pkg in (d.getVar('PACKAGES', True) or "").split():
-               # Create post install/removal scripts
-               alt_setup_links = ""
-               alt_remove_links = ""
-               for alt_name in (d.getVar('ALTERNATIVE_%s' % pkg, True) or "").split():
-                       alt_link     = d.getVarFlag('ALTERNATIVE_LINK_NAME', alt_name, True)
-                       alt_target   = d.getVarFlag('ALTERNATIVE_TARGET_%s' % pkg, alt_name, True) or d.getVarFlag('ALTERNATIVE_TARGET', alt_name, True)
-                       alt_target   = alt_target or d.getVar('ALTERNATIVE_TARGET_%s' % pkg, True) or d.getVar('ALTERNATIVE_TARGET', True) or alt_link
-                       # Sometimes alt_target is specified as relative to the link name.
-                       alt_target   = os.path.join(os.path.dirname(alt_link), alt_target)
-
-                       alt_priority = d.getVarFlag('ALTERNATIVE_PRIORITY_%s' % pkg,  alt_name, True) or d.getVarFlag('ALTERNATIVE_PRIORITY',  alt_name, True)
-                       alt_priority = alt_priority or d.getVar('ALTERNATIVE_PRIORITY_%s' % pkg, True) or d.getVar('ALTERNATIVE_PRIORITY', True)
-
-                       # This shouldn't trigger, as it should have been resolved earlier!
-                       if alt_link == alt_target:
-                               bb.note('alt_link == alt_target: %s == %s -- correcting, this should not happen!' % (alt_link, alt_target))
-                               alt_target = '%s.%s' % (alt_target, pn)
-
-                       if not os.path.lexists('%s/%s' % (pkgdest, alt_target)):
-                               bb.warn('%s: NOT adding alternative provide %s: %s does not exist' % (pn, alt_link, alt_target))
-                               continue
-
-                       # Default to generate shell script.. eventually we may want to change this...
-                       alt_target = os.path.relpath(alt_target, os.path.dirname(alt_link))
-
-                       alt_setup_links  += '   update-alternatives --install %s %s %s %s\n' % (alt_link, alt_name, alt_target, alt_priority)
-                       alt_remove_links += '   update-alternatives --remove  %s %s\n' % (alt_name, alt_target)
-
-               if alt_setup_links:
-                       bb.note('adding update-alternatives calls to postinst/postrm for %s' % pkg)
-                       bb.note('%s' % alt_setup_links)
-                       postinst = (d.getVar('pkg_postinst_%s' % pkg, True) or d.getVar('pkg_postinst', True)) or '#!/bin/sh\n'
-                       postinst += alt_setup_links
-                       d.setVar('pkg_postinst_%s' % pkg, postinst)
-
-                       bb.note('%s' % alt_remove_links)
-                       postrm = (d.getVar('pkg_postrm_%s' % pkg, True) or d.getVar('pkg_postrm', True)) or '#!/bin/sh\n'
-                       postrm += alt_remove_links
-                       d.setVar('pkg_postrm_%s' % pkg, postrm)
+    pn = d.getVar('BPN', True)
+
+    # Do actual update alternatives processing
+    pkgdest = d.getVar('PKGD', True)
+    for pkg in (d.getVar('PACKAGES', True) or "").split():
+        # Create post install/removal scripts
+        alt_setup_links = ""
+        alt_remove_links = ""
+        for alt_name in (d.getVar('ALTERNATIVE_%s' % pkg, True) or "").split():
+            alt_link     = d.getVarFlag('ALTERNATIVE_LINK_NAME', alt_name, True)
+            alt_target   = d.getVarFlag('ALTERNATIVE_TARGET_%s' % pkg, alt_name, True) or d.getVarFlag('ALTERNATIVE_TARGET', alt_name, True)
+            alt_target   = alt_target or d.getVar('ALTERNATIVE_TARGET_%s' % pkg, True) or d.getVar('ALTERNATIVE_TARGET', True) or alt_link
+            # Sometimes alt_target is specified as relative to the link name.
+            alt_target   = os.path.join(os.path.dirname(alt_link), alt_target)
+
+            alt_priority = d.getVarFlag('ALTERNATIVE_PRIORITY_%s' % pkg,  alt_name, True) or d.getVarFlag('ALTERNATIVE_PRIORITY',  alt_name, True)
+            alt_priority = alt_priority or d.getVar('ALTERNATIVE_PRIORITY_%s' % pkg, True) or d.getVar('ALTERNATIVE_PRIORITY', True)
+
+            # This shouldn't trigger, as it should have been resolved earlier!
+            if alt_link == alt_target:
+                bb.note('alt_link == alt_target: %s == %s -- correcting, this should not happen!' % (alt_link, alt_target))
+                alt_target = '%s.%s' % (alt_target, pn)
+
+            if not os.path.lexists('%s/%s' % (pkgdest, alt_target)):
+                bb.warn('%s: NOT adding alternative provide %s: %s does not exist' % (pn, alt_link, alt_target))
+                continue
+
+            # Default to generate shell script.. eventually we may want to change this...
+            alt_target = os.path.relpath(alt_target, os.path.dirname(alt_link))
+
+            alt_setup_links  += '\tupdate-alternatives --install %s %s %s %s\n' % (alt_link, alt_name, alt_target, alt_priority)
+            alt_remove_links += '\tupdate-alternatives --remove  %s %s\n' % (alt_name, alt_target)
+
+        if alt_setup_links:
+            bb.note('adding update-alternatives calls to postinst/postrm for %s' % pkg)
+            bb.note('%s' % alt_setup_links)
+            postinst = (d.getVar('pkg_postinst_%s' % pkg, True) or d.getVar('pkg_postinst', True)) or '#!/bin/sh\n'
+            postinst += alt_setup_links
+            d.setVar('pkg_postinst_%s' % pkg, postinst)
+
+            bb.note('%s' % alt_remove_links)
+            postrm = (d.getVar('pkg_postrm_%s' % pkg, True) or d.getVar('pkg_postrm', True)) or '#!/bin/sh\n'
+            postrm += alt_remove_links
+            d.setVar('pkg_postrm_%s' % pkg, postrm)
 }
 
 python package_do_filedeps_append () {
-       pn = d.getVar('BPN', True)
-       pkgdest = d.getVar('PKGDEST', True)
-
-       for pkg in packages.split():
-               for alt_name in (d.getVar('ALTERNATIVE_%s' % pkg, True) or "").split():
-                       alt_link     = d.getVarFlag('ALTERNATIVE_LINK_NAME', alt_name, True)
-                       alt_target   = d.getVarFlag('ALTERNATIVE_TARGET_%s' % pkg, alt_name, True) or d.getVarFlag('ALTERNATIVE_TARGET', alt_name, True)
-                       alt_target   = alt_target or d.getVar('ALTERNATIVE_TARGET_%s' % pkg, True) or d.getVar('ALTERNATIVE_TARGET', True) or alt_link
-
-                       if alt_link == alt_target:
-                               bb.warn('alt_link == alt_target: %s == %s' % (alt_link, alt_target))
-                               alt_target = '%s.%s' % (alt_target, pn)
-
-                       if not os.path.lexists('%s/%s/%s' % (pkgdest, pkg, alt_target)):
-                               continue
-
-                       # Add file provide
-                       trans_target = file_translate(alt_target)
-                       d.appendVar('FILERPROVIDES_%s_%s' % (trans_target, pkg), " " + alt_link)
-                       if not trans_target in (d.getVar('FILERPROVIDESFLIST_%s' % pkg, True) or ""):
-                               d.appendVar('FILERPROVIDESFLIST_%s' % pkg, " " + trans_target)
+    pn = d.getVar('BPN', True)
+    pkgdest = d.getVar('PKGDEST', True)
+
+    for pkg in packages.split():
+        for alt_name in (d.getVar('ALTERNATIVE_%s' % pkg, True) or "").split():
+            alt_link     = d.getVarFlag('ALTERNATIVE_LINK_NAME', alt_name, True)
+            alt_target   = d.getVarFlag('ALTERNATIVE_TARGET_%s' % pkg, alt_name, True) or d.getVarFlag('ALTERNATIVE_TARGET', alt_name, True)
+            alt_target   = alt_target or d.getVar('ALTERNATIVE_TARGET_%s' % pkg, True) or d.getVar('ALTERNATIVE_TARGET', True) or alt_link
+
+            if alt_link == alt_target:
+                bb.warn('alt_link == alt_target: %s == %s' % (alt_link, alt_target))
+                alt_target = '%s.%s' % (alt_target, pn)
+
+            if not os.path.lexists('%s/%s/%s' % (pkgdest, pkg, alt_target)):
+                continue
+
+            # Add file provide
+            trans_target = file_translate(alt_target)
+            d.appendVar('FILERPROVIDES_%s_%s' % (trans_target, pkg), " " + alt_link)
+            if not trans_target in (d.getVar('FILERPROVIDESFLIST_%s' % pkg, True) or ""):
+                d.appendVar('FILERPROVIDESFLIST_%s' % pkg, " " + trans_target)
 }
 
index bddead4a2532c13371a8f1638db2d17be4bda24d..eef2e8caa03fad4bcd728e52665b36dc8aeb315f 100644 (file)
@@ -44,42 +44,42 @@ python __anonymous() {
 }
 
 python populate_packages_prepend () {
-       def update_rcd_package(pkg):
-               bb.debug(1, 'adding update-rc.d calls to postinst/postrm for %s' % pkg)
-               localdata = bb.data.createCopy(d)
-               overrides = localdata.getVar("OVERRIDES", True)
-               localdata.setVar("OVERRIDES", "%s:%s" % (pkg, overrides))
-               bb.data.update_data(localdata)
+    def update_rcd_package(pkg):
+        bb.debug(1, 'adding update-rc.d calls to postinst/postrm for %s' % pkg)
+        localdata = bb.data.createCopy(d)
+        overrides = localdata.getVar("OVERRIDES", True)
+        localdata.setVar("OVERRIDES", "%s:%s" % (pkg, overrides))
+        bb.data.update_data(localdata)
 
-               """
-               update_rc.d postinst is appended here because pkg_postinst may require to
-               execute on the target. Not doing so may cause update_rc.d postinst invoked
-               twice to cause unwanted warnings.
-               """ 
-               postinst = localdata.getVar('pkg_postinst', True)
-               if not postinst:
-                       postinst = '#!/bin/sh\n'
-               postinst += localdata.getVar('updatercd_postinst', True)
-               d.setVar('pkg_postinst_%s' % pkg, postinst)
+        """
+        update_rc.d postinst is appended here because pkg_postinst may require to
+        execute on the target. Not doing so may cause update_rc.d postinst invoked
+        twice to cause unwanted warnings.
+        """ 
+        postinst = localdata.getVar('pkg_postinst', True)
+        if not postinst:
+            postinst = '#!/bin/sh\n'
+        postinst += localdata.getVar('updatercd_postinst', True)
+        d.setVar('pkg_postinst_%s' % pkg, postinst)
 
-               prerm = localdata.getVar('pkg_prerm', True)
-               if not prerm:
-                       prerm = '#!/bin/sh\n'
-               prerm += localdata.getVar('updatercd_prerm', True)
-               d.setVar('pkg_prerm_%s' % pkg, prerm)
+        prerm = localdata.getVar('pkg_prerm', True)
+        if not prerm:
+            prerm = '#!/bin/sh\n'
+        prerm += localdata.getVar('updatercd_prerm', True)
+        d.setVar('pkg_prerm_%s' % pkg, prerm)
 
-               postrm = localdata.getVar('pkg_postrm', True)
-               if not postrm:
-                       postrm = '#!/bin/sh\n'
-                postrm += localdata.getVar('updatercd_postrm', True)
-               d.setVar('pkg_postrm_%s' % pkg, postrm)
+        postrm = localdata.getVar('pkg_postrm', True)
+        if not postrm:
+                postrm = '#!/bin/sh\n'
+        postrm += localdata.getVar('updatercd_postrm', True)
+        d.setVar('pkg_postrm_%s' % pkg, postrm)
 
-       pkgs = d.getVar('INITSCRIPT_PACKAGES', True)
-       if pkgs == None:
-               pkgs = d.getVar('UPDATERCPN', True)
-               packages = (d.getVar('PACKAGES', True) or "").split()
-               if not pkgs in packages and packages != []:
-                       pkgs = packages[0]
-       for pkg in pkgs.split():
-               update_rcd_package(pkg)
+    pkgs = d.getVar('INITSCRIPT_PACKAGES', True)
+    if pkgs == None:
+        pkgs = d.getVar('UPDATERCPN', True)
+        packages = (d.getVar('PACKAGES', True) or "").split()
+        if not pkgs in packages and packages != []:
+            pkgs = packages[0]
+    for pkg in pkgs.split():
+        update_rcd_package(pkg)
 }
index 6ae5e484209f68b3d9bd911522eb5d76fb7b65fd..bb8f42b357a631f1d73d754480e0e8c381535cc1 100644 (file)
@@ -154,61 +154,61 @@ do_package_setscene[depends] = "${USERADDSETSCENEDEPS}"
 
 # Recipe parse-time sanity checks
 def update_useradd_after_parse(d):
-       useradd_packages = d.getVar('USERADD_PACKAGES', True)
+    useradd_packages = d.getVar('USERADD_PACKAGES', True)
 
-       if not useradd_packages:
-               raise bb.build.FuncFailed, "%s inherits useradd but doesn't set USERADD_PACKAGES" % d.getVar('FILE')
+    if not useradd_packages:
+        raise bb.build.FuncFailed, "%s inherits useradd but doesn't set USERADD_PACKAGES" % d.getVar('FILE')
 
-       for pkg in useradd_packages.split():
-               if not d.getVar('USERADD_PARAM_%s' % pkg, True) and not d.getVar('GROUPADD_PARAM_%s' % pkg, True):
-                       raise bb.build.FuncFailed, "%s inherits useradd but doesn't set USERADD_PARAM or GROUPADD_PARAM for package %s" % (d.getVar('FILE'), pkg)
+    for pkg in useradd_packages.split():
+        if not d.getVar('USERADD_PARAM_%s' % pkg, True) and not d.getVar('GROUPADD_PARAM_%s' % pkg, True):
+            raise bb.build.FuncFailed, "%s inherits useradd but doesn't set USERADD_PARAM or GROUPADD_PARAM for package %s" % (d.getVar('FILE'), pkg)
 
 python __anonymous() {
-       update_useradd_after_parse(d)
+    update_useradd_after_parse(d)
 }
 
 # Return a single [GROUP|USER]ADD_PARAM formatted string which includes the
 # [group|user]add parameters for all USERADD_PACKAGES in this recipe
 def get_all_cmd_params(d, cmd_type):
-       import string
-       
-       param_type = cmd_type.upper() + "ADD_PARAM_%s"
-       params = []
+    import string
+    
+    param_type = cmd_type.upper() + "ADD_PARAM_%s"
+    params = []
 
-       useradd_packages = d.getVar('USERADD_PACKAGES', True) or ""
-       for pkg in useradd_packages.split():
-               param = d.getVar(param_type % pkg, True)
-               if param:
-                       params.append(param)
+    useradd_packages = d.getVar('USERADD_PACKAGES', True) or ""
+    for pkg in useradd_packages.split():
+        param = d.getVar(param_type % pkg, True)
+        if param:
+            params.append(param)
 
-       return string.join(params, "; ")
+    return string.join(params, "; ")
 
 # Adds the preinst script into generated packages
 fakeroot python populate_packages_prepend () {
-       def update_useradd_package(pkg):
-               bb.debug(1, 'adding user/group calls to preinst for %s' % pkg)
-
-               """
-               useradd preinst is appended here because pkg_preinst may be
-               required to execute on the target. Not doing so may cause
-               useradd preinst to be invoked twice, causing unwanted warnings.
-               """
-               preinst = d.getVar('pkg_preinst_%s' % pkg, True) or d.getVar('pkg_preinst', True)
-               if not preinst:
-                       preinst = '#!/bin/sh\n'
-               preinst += d.getVar('useradd_preinst', True)
-               d.setVar('pkg_preinst_%s' % pkg, preinst)
-
-               # RDEPENDS setup
-               rdepends = d.getVar("RDEPENDS_%s" % pkg, True) or ""
-               rdepends += ' ' + d.getVar('MLPREFIX') + 'base-passwd'
-               rdepends += ' ' + d.getVar('MLPREFIX') + 'shadow'
-               d.setVar("RDEPENDS_%s" % pkg, rdepends)
-
-       # Add the user/group preinstall scripts and RDEPENDS requirements
-       # to packages specified by USERADD_PACKAGES
-       if not bb.data.inherits_class('nativesdk', d):
-               useradd_packages = d.getVar('USERADD_PACKAGES', True) or ""
-               for pkg in useradd_packages.split():
-                       update_useradd_package(pkg)
+    def update_useradd_package(pkg):
+        bb.debug(1, 'adding user/group calls to preinst for %s' % pkg)
+
+        """
+        useradd preinst is appended here because pkg_preinst may be
+        required to execute on the target. Not doing so may cause
+        useradd preinst to be invoked twice, causing unwanted warnings.
+        """
+        preinst = d.getVar('pkg_preinst_%s' % pkg, True) or d.getVar('pkg_preinst', True)
+        if not preinst:
+            preinst = '#!/bin/sh\n'
+        preinst += d.getVar('useradd_preinst', True)
+        d.setVar('pkg_preinst_%s' % pkg, preinst)
+
+        # RDEPENDS setup
+        rdepends = d.getVar("RDEPENDS_%s" % pkg, True) or ""
+        rdepends += ' ' + d.getVar('MLPREFIX') + 'base-passwd'
+        rdepends += ' ' + d.getVar('MLPREFIX') + 'shadow'
+        d.setVar("RDEPENDS_%s" % pkg, rdepends)
+
+    # Add the user/group preinstall scripts and RDEPENDS requirements
+    # to packages specified by USERADD_PACKAGES
+    if not bb.data.inherits_class('nativesdk', d):
+        useradd_packages = d.getVar('USERADD_PACKAGES', True) or ""
+        for pkg in useradd_packages.split():
+            update_useradd_package(pkg)
 }
index 6c2232ec57fa3f00dc31dcd1b4b1826b867ddba9..d150ec57baa028b916e1a174f4f3280afc98ea5f 100644 (file)
@@ -1,13 +1,13 @@
 addtask listtasks
 do_listtasks[nostamp] = "1"
 python do_listtasks() {
-       import sys
-       # emit variables and shell functions
-       #bb.data.emit_env(sys.__stdout__, d)
-       # emit the metadata which isnt valid shell
-       for e in d.keys():
-               if d.getVarFlag(e, 'task'):
-                       bb.plain("%s" % e)
+    import sys
+    # emit variables and shell functions
+    #bb.data.emit_env(sys.__stdout__, d)
+    # emit the metadata which isnt valid shell
+    for e in d.keys():
+        if d.getVarFlag(e, 'task'):
+            bb.plain("%s" % e)
 }
 
 CLEANFUNCS ?= ""
@@ -15,34 +15,34 @@ CLEANFUNCS ?= ""
 addtask clean
 do_clean[nostamp] = "1"
 python do_clean() {
-       """clear the build and temp directories"""
-       dir = d.expand("${WORKDIR}")
-       bb.note("Removing " + dir)
-       oe.path.remove(dir)
+    """clear the build and temp directories"""
+    dir = d.expand("${WORKDIR}")
+    bb.note("Removing " + dir)
+    oe.path.remove(dir)
 
-       dir = "%s.*" % bb.data.expand(d.getVar('STAMP'), d)
-       bb.note("Removing " + dir)
-       oe.path.remove(dir)
+    dir = "%s.*" % bb.data.expand(d.getVar('STAMP'), d)
+    bb.note("Removing " + dir)
+    oe.path.remove(dir)
 
-       for f in (d.getVar('CLEANFUNCS', True) or '').split():
-               bb.build.exec_func(f, d)
+    for f in (d.getVar('CLEANFUNCS', True) or '').split():
+        bb.build.exec_func(f, d)
 }
 
 addtask checkuri
 do_checkuri[nostamp] = "1"
 python do_checkuri() {
-       src_uri = (d.getVar('SRC_URI', True) or "").split()
-       if len(src_uri) == 0:
-               return
-
-       localdata = bb.data.createCopy(d)
-       bb.data.update_data(localdata)
-
-        try:
-            fetcher = bb.fetch2.Fetch(src_uri, localdata)
-            fetcher.checkstatus()
-        except bb.fetch2.BBFetchException, e:
-            raise bb.build.FuncFailed(e)
+    src_uri = (d.getVar('SRC_URI', True) or "").split()
+    if len(src_uri) == 0:
+        return
+
+    localdata = bb.data.createCopy(d)
+    bb.data.update_data(localdata)
+
+    try:
+        fetcher = bb.fetch2.Fetch(src_uri, localdata)
+        fetcher.checkstatus()
+    except bb.fetch2.BBFetchException, e:
+        raise bb.build.FuncFailed(e)
 }
 
 addtask checkuriall after do_checkuri
index ccf78fcfee99ae65d6cd72253927ea7a43b5546e..57406109decefb62a8dbaf0523440e1e83d6c4b7 100644 (file)
@@ -292,77 +292,77 @@ END
 }
 
 def check_app_exists(app, d):
-       from bb import which, data
+    from bb import which, data
 
-       app = data.expand(app, d)
-       path = data.getVar('PATH', d, 1)
-       return bool(which(path, app))
+    app = data.expand(app, d)
+    path = data.getVar('PATH', d, 1)
+    return bool(which(path, app))
 
 def explode_deps(s):
-       return bb.utils.explode_deps(s)
+    return bb.utils.explode_deps(s)
 
 def base_set_filespath(path, d):
-       filespath = []
-       extrapaths = (d.getVar("FILESEXTRAPATHS", True) or "")
-       # Don't prepend empty strings to the path list
-       if extrapaths != "":
-               path = extrapaths.split(":") + path
-       # The ":" ensures we have an 'empty' override
-       overrides = (d.getVar("OVERRIDES", True) or "") + ":"
-       for p in path:
-               if p != "": 
-                       for o in overrides.split(":"):
-                               filespath.append(os.path.join(p, o))
-       return ":".join(filespath)
+    filespath = []
+    extrapaths = (d.getVar("FILESEXTRAPATHS", True) or "")
+    # Don't prepend empty strings to the path list
+    if extrapaths != "":
+        path = extrapaths.split(":") + path
+    # The ":" ensures we have an 'empty' override
+    overrides = (d.getVar("OVERRIDES", True) or "") + ":"
+    for p in path:
+        if p != "": 
+            for o in overrides.split(":"):
+                filespath.append(os.path.join(p, o))
+    return ":".join(filespath)
 
 def extend_variants(d, var, extend, delim=':'):
-       """Return a string of all bb class extend variants for the given extend"""
-       variants = []
-       whole = d.getVar(var, True) or ""
-       for ext in whole.split():
-               eext = ext.split(delim)
-               if len(eext) > 1 and eext[0] == extend:
-                       variants.append(eext[1])
-       return " ".join(variants)
+    """Return a string of all bb class extend variants for the given extend"""
+    variants = []
+    whole = d.getVar(var, True) or ""
+    for ext in whole.split():
+        eext = ext.split(delim)
+        if len(eext) > 1 and eext[0] == extend:
+            variants.append(eext[1])
+    return " ".join(variants)
 
 def multilib_pkg_extend(d, pkg):
-       variants = (d.getVar("MULTILIB_VARIANTS", True) or "").split()
-       if not variants:
-           return pkg
-       pkgs = pkg
-       for v in variants:
-           pkgs = pkgs + " " + v + "-" + pkg
-       return pkgs
+    variants = (d.getVar("MULTILIB_VARIANTS", True) or "").split()
+    if not variants:
+        return pkg
+    pkgs = pkg
+    for v in variants:
+        pkgs = pkgs + " " + v + "-" + pkg
+    return pkgs
 
 def all_multilib_tune_values(d, var, unique = True, need_split = True, delim = ' '):
-       """Return a string of all ${var} in all multilib tune configuration"""
-       values = []
-       value = d.getVar(var, True) or ""
-       if value != "":
-               if need_split:
-                       for item in value.split(delim):
-                               values.append(item)
-               else:
-                       values.append(value)
-       variants = d.getVar("MULTILIB_VARIANTS", True) or ""
-       for item in variants.split():
-               localdata = bb.data.createCopy(d)
-               overrides = localdata.getVar("OVERRIDES", False) + ":virtclass-multilib-" + item
-               localdata.setVar("OVERRIDES", overrides)
-               bb.data.update_data(localdata)
-               value = localdata.getVar(var, True) or ""
-               if value != "":
-                       if need_split:
-                               for item in value.split(delim):
-                                       values.append(item)
-                       else:
-                               values.append(value)
-       if unique:
-               #we do this to keep order as much as possible
-               ret = []
-               for value in values:
-                       if not value in ret:
-                               ret.append(value)
-       else:
-               ret = values
-       return " ".join(ret)
+    """Return a string of all ${var} in all multilib tune configuration"""
+    values = []
+    value = d.getVar(var, True) or ""
+    if value != "":
+        if need_split:
+            for item in value.split(delim):
+                values.append(item)
+        else:
+            values.append(value)
+    variants = d.getVar("MULTILIB_VARIANTS", True) or ""
+    for item in variants.split():
+        localdata = bb.data.createCopy(d)
+        overrides = localdata.getVar("OVERRIDES", False) + ":virtclass-multilib-" + item
+        localdata.setVar("OVERRIDES", overrides)
+        bb.data.update_data(localdata)
+        value = localdata.getVar(var, True) or ""
+        if value != "":
+            if need_split:
+                for item in value.split(delim):
+                    values.append(item)
+            else:
+                values.append(value)
+    if unique:
+        #we do this to keep order as much as possible
+        ret = []
+        for value in values:
+            if not value in ret:
+                ret.append(value)
+    else:
+        ret = values
+    return " ".join(ret)
index bb6f5321ed4ec7616bf62556eeea3c46f976579f..9966f879a631f41612546c4832572500e8fd703c 100644 (file)
@@ -96,20 +96,19 @@ RDEPENDS_${PN} = "\
 PACKAGES_DYNAMIC = "${PN}-plugin-*"
 
 python populate_packages_prepend() {
-       depmap = dict( pppd="ppp",
-                                )
-       packages = []
-       multilib_prefix = (d.getVar("MLPREFIX", True) or "")
-       hook = lambda file,pkg,b,c,d:packages.append((file,pkg))
-       plugin_dir = d.expand('${libdir}/connman/plugins/')
-       plugin_name = d.expand('${PN}-plugin-%s')
-       do_split_packages(d, plugin_dir, '^(.*).so$', plugin_name, '${PN} plugin for %s', extra_depends='', hook=hook, prepend=True )
-       for (file, package) in packages:
-               plugintype = package.split( '-' )[-1]
-               if plugintype in depmap:
-                       rdepends = map(lambda x: multilib_prefix + x,  depmap[plugintype].split())
-                       bb.note( "Adding rdependency on %s to %s" % ( rdepends, package ) )
-                       d.setVar("RDEPENDS_%s" % package, " ".join(rdepends))
+    depmap = dict(pppd="ppp")
+    packages = []
+    multilib_prefix = (d.getVar("MLPREFIX", True) or "")
+    hook = lambda file,pkg,b,c,d:packages.append((file,pkg))
+    plugin_dir = d.expand('${libdir}/connman/plugins/')
+    plugin_name = d.expand('${PN}-plugin-%s')
+    do_split_packages(d, plugin_dir, '^(.*).so$', plugin_name, '${PN} plugin for %s', extra_depends='', hook=hook, prepend=True )
+    for (file, package) in packages:
+        plugintype = package.split( '-' )[-1]
+        if plugintype in depmap:
+            rdepends = map(lambda x: multilib_prefix + x,  depmap[plugintype].split())
+            bb.note( "Adding rdependency on %s to %s" % ( rdepends, package ) )
+            d.setVar("RDEPENDS_%s" % package, " ".join(rdepends))
 }
 
 PACKAGES =+ "${PN}-tools ${PN}-tests"
index 6eeee5cd5bdfdd6b26288e8562465b083da85b11..0d6f580f51bcb9f0eb6f3ff08a4c29f78e1b3680 100644 (file)
@@ -52,30 +52,30 @@ base_passwd_sstate_postinst() {
 }
 
 python populate_packages_prepend() {
-       # Add in the preinst function for ${PN}
-       # We have to do this here as prior to this, passwd/group.master
-       # would be unavailable. We need to create these files at preinst
-       # time before the files from the package may be available, hence
-       # storing the data from the files in the preinst directly.
+    # Add in the preinst function for ${PN}
+    # We have to do this here as prior to this, passwd/group.master
+    # would be unavailable. We need to create these files at preinst
+    # time before the files from the package may be available, hence
+    # storing the data from the files in the preinst directly.
 
-       f = open(d.expand("${STAGING_DATADIR}/base-passwd/passwd.master"), 'r')
-       passwd = "".join(f.readlines())
-       f.close()
-       f = open(d.expand("${STAGING_DATADIR}/base-passwd/group.master"), 'r')
-       group = "".join(f.readlines())
-       f.close()
+    f = open(d.expand("${STAGING_DATADIR}/base-passwd/passwd.master"), 'r')
+    passwd = "".join(f.readlines())
+    f.close()
+    f = open(d.expand("${STAGING_DATADIR}/base-passwd/group.master"), 'r')
+    group = "".join(f.readlines())
+    f.close()
 
-       preinst = """#!/bin/sh
+    preinst = """#!/bin/sh
 if [ ! -e $D${sysconfdir}/passwd ]; then
-       cat << EOF > $D${sysconfdir}/passwd
+\tcat << EOF > $D${sysconfdir}/passwd
 """ + passwd + """EOF
 fi
 if [ ! -e $D${sysconfdir}/group ]; then
-       cat << EOF > $D${sysconfdir}/group
+\tcat << EOF > $D${sysconfdir}/group
 """ + group + """EOF
 fi
 """
-       d.setVar('pkg_preinst_${PN}', preinst)
+    d.setVar('pkg_preinst_${PN}', preinst)
 }
 
 addtask do_package after do_populate_sysroot
index 82137a3c0e365d8d8cc82aeb1c35f0080aba3f2c..f5bf2478343c0b8315c63b783b6cc6854fe2803a 100644 (file)
@@ -218,23 +218,23 @@ ALTERNATIVE_TARGET[syslog-startup-conf] = "${sysconfdir}/syslog-startup.conf.${B
 ALTERNATIVE_TARGET = "/bin/busybox"
 
 python do_package_prepend () {
-       # We need to load the full set of busybox provides from the /etc/busybox.links
-       # Use this to see the update-alternatives with the right information
+    # We need to load the full set of busybox provides from the /etc/busybox.links
+    # Use this to see the update-alternatives with the right information
 
-       dvar = d.getVar('D', True)
-       pn = d.getVar('PN', True)
-       f = open('%s/etc/busybox.links' % (dvar), 'r')
+    dvar = d.getVar('D', True)
+    pn = d.getVar('PN', True)
+    f = open('%s/etc/busybox.links' % (dvar), 'r')
 
-       for alt_link_name in f:
-               alt_link_name = alt_link_name.strip()
-               alt_name = os.path.basename(alt_link_name)
+    for alt_link_name in f:
+        alt_link_name = alt_link_name.strip()
+        alt_name = os.path.basename(alt_link_name)
 
-               # Match coreutils
-               if alt_name == '[':
-                       alt_name = 'lbracket'
+        # Match coreutils
+        if alt_name == '[':
+            alt_name = 'lbracket'
 
-               d.appendVar('ALTERNATIVE_%s' % (pn), ' ' + alt_name)
-               d.setVarFlag('ALTERNATIVE_LINK_NAME', alt_name, alt_link_name)
+        d.appendVar('ALTERNATIVE_%s' % (pn), ' ' + alt_name)
+        d.setVarFlag('ALTERNATIVE_LINK_NAME', alt_name, alt_link_name)
 }
 
 pkg_postinst_${PN} () {
index 493eb61a7a330da323da8188aae9ae07a70b574e..422e0bb118edbf998dc248380d4a0515ef65d23d 100644 (file)
@@ -20,7 +20,7 @@ SRC_URI = "svn://www.eglibc.org/svn/branches/;module=${EGLIBC_BRANCH};protocol=h
 S = "${WORKDIR}/${EGLIBC_BRANCH}/localedef"
 
 do_unpack_append() {
-       bb.build.exec_func('do_move_ports', d)
+    bb.build.exec_func('do_move_ports', d)
 }
 
 do_move_ports() {
index 0f870be10cf1b736727ce6f8203405e7a44945f6..47f0834003dc2f07fcce7ae212fec5a284033b2c 100644 (file)
@@ -20,7 +20,7 @@ SRC_URI = "svn://www.eglibc.org/svn/branches/;module=${EGLIBC_BRANCH};protocol=h
 S = "${WORKDIR}/${EGLIBC_BRANCH}/localedef"
 
 do_unpack_append() {
-       bb.build.exec_func('do_move_ports', d)
+    bb.build.exec_func('do_move_ports', d)
 }
 
 do_move_ports() {
index b3eb46e160362278b58964e755f31550190a4831..e596b5591f764b0f06e79ce62cd50576ca60c25b 100644 (file)
@@ -1,54 +1,54 @@
 def ld_append_if_tune_exists(d, infos, dict):
-       tune = d.getVar("DEFAULTTUNE", True) or ""
-       libdir = d.getVar("base_libdir", True) or ""
-       if dict.has_key(tune):
-               infos['ldconfig'].add('{"' + libdir + '/' + dict[tune][0] + '",' + dict[tune][1] + ' }')
-               infos['lddrewrite'].add(libdir+'/'+dict[tune][0])
+    tune = d.getVar("DEFAULTTUNE", True) or ""
+    libdir = d.getVar("base_libdir", True) or ""
+    if dict.has_key(tune):
+        infos['ldconfig'].add('{"' + libdir + '/' + dict[tune][0] + '",' + dict[tune][1] + ' }')
+        infos['lddrewrite'].add(libdir+'/'+dict[tune][0])
 
 def eglibc_dl_info(d):
-       ld_info_all = {
-               "mips": ["ld.so.1", "FLAG_ELF_LIBC6"],
-               "mips64-n32": ["ld.so.1", "FLAG_ELF_LIBC6"],
-               "mips64": ["ld.so.1", "FLAG_ELF_LIBC6"],
-               "mipsel": ["ld.so.1", "FLAG_ELF_LIBC6"],
-               "mips64el-n32": ["ld.so.1", "FLAG_ELF_LIBC6"],
-               "mips64el": ["ld.so.1", "FLAG_ELF_LIBC6"],
-               "mips-nf": ["ld.so.1", "FLAG_ELF_LIBC6"],
-               "mips64-nf-n32": ["ld.so.1", "FLAG_ELF_LIBC6"],
-               "mips64-nf": ["ld.so.1", "FLAG_ELF_LIBC6"],
-               "mips64el-nf-n32": ["ld.so.1", "FLAG_ELF_LIBC6"],
-               "mips64el-nf": ["ld.so.1", "FLAG_ELF_LIBC6"],
-               "powerpc": ["ld.so.1", "FLAG_ELF_LIBC6"],
-               "powerpc-nf": ["ld.so.1", "FLAG_ELF_LIBC6"],
-               "powerpc64": ["ld64.so.1", "FLAG_ELF_LIBC6"],
-               "powerpc64-nf": ["ld64.so.1", "FLAG_ELF_LIBC6"],
-               "core2": ["ld-linux.so.2", "FLAG_ELF_LIBC6"],
-               "core2-64": ["ld-linux-x86-64.so.2", "FLAG_ELF_LIBC6"],
-               "x86": ["ld-linux.so.2", "FLAG_ELF_LIBC6"],
-               "x86-64": ["ld-linux-x86-64.so.2", "FLAG_ELF_LIBC6"],
-               "i586": ["ld-linux.so.2", "FLAG_ELF_LIBC6"],
-       }
+    ld_info_all = {
+        "mips": ["ld.so.1", "FLAG_ELF_LIBC6"],
+        "mips64-n32": ["ld.so.1", "FLAG_ELF_LIBC6"],
+        "mips64": ["ld.so.1", "FLAG_ELF_LIBC6"],
+        "mipsel": ["ld.so.1", "FLAG_ELF_LIBC6"],
+        "mips64el-n32": ["ld.so.1", "FLAG_ELF_LIBC6"],
+        "mips64el": ["ld.so.1", "FLAG_ELF_LIBC6"],
+        "mips-nf": ["ld.so.1", "FLAG_ELF_LIBC6"],
+        "mips64-nf-n32": ["ld.so.1", "FLAG_ELF_LIBC6"],
+        "mips64-nf": ["ld.so.1", "FLAG_ELF_LIBC6"],
+        "mips64el-nf-n32": ["ld.so.1", "FLAG_ELF_LIBC6"],
+        "mips64el-nf": ["ld.so.1", "FLAG_ELF_LIBC6"],
+        "powerpc": ["ld.so.1", "FLAG_ELF_LIBC6"],
+        "powerpc-nf": ["ld.so.1", "FLAG_ELF_LIBC6"],
+        "powerpc64": ["ld64.so.1", "FLAG_ELF_LIBC6"],
+        "powerpc64-nf": ["ld64.so.1", "FLAG_ELF_LIBC6"],
+        "core2": ["ld-linux.so.2", "FLAG_ELF_LIBC6"],
+        "core2-64": ["ld-linux-x86-64.so.2", "FLAG_ELF_LIBC6"],
+        "x86": ["ld-linux.so.2", "FLAG_ELF_LIBC6"],
+        "x86-64": ["ld-linux-x86-64.so.2", "FLAG_ELF_LIBC6"],
+        "i586": ["ld-linux.so.2", "FLAG_ELF_LIBC6"],
+    }
 
-       infos = {'ldconfig':set(), 'lddrewrite':set()}
-       ld_append_if_tune_exists(d, infos, ld_info_all)
+    infos = {'ldconfig':set(), 'lddrewrite':set()}
+    ld_append_if_tune_exists(d, infos, ld_info_all)
 
-       #DEFAULTTUNE_MULTILIB_ORIGINAL
-       original_tune=d.getVar("DEFAULTTUNE_MULTILIB_ORIGINAL",True)
-       if original_tune:
-               localdata = bb.data.createCopy(d)
-               localdata.setVar("DEFAULTTUNE", original_tune)
-               ld_append_if_tune_exists(localdata, infos, ld_info_all)
+    #DEFAULTTUNE_MULTILIB_ORIGINAL
+    original_tune=d.getVar("DEFAULTTUNE_MULTILIB_ORIGINAL",True)
+    if original_tune:
+        localdata = bb.data.createCopy(d)
+        localdata.setVar("DEFAULTTUNE", original_tune)
+        ld_append_if_tune_exists(localdata, infos, ld_info_all)
 
-       variants = d.getVar("MULTILIB_VARIANTS", True) or ""
-       for item in variants.split():
-               localdata = bb.data.createCopy(d)
-               overrides = localdata.getVar("OVERRIDES", False) + ":virtclass-multilib-" + item
-               localdata.setVar("OVERRIDES", overrides)
-               bb.data.update_data(localdata)
-               ld_append_if_tune_exists(localdata, infos, ld_info_all)
-       infos['ldconfig'] = ','.join(infos['ldconfig'])
-       infos['lddrewrite'] = ' '.join(infos['lddrewrite'])
-       return infos
+    variants = d.getVar("MULTILIB_VARIANTS", True) or ""
+    for item in variants.split():
+        localdata = bb.data.createCopy(d)
+        overrides = localdata.getVar("OVERRIDES", False) + ":virtclass-multilib-" + item
+        localdata.setVar("OVERRIDES", overrides)
+        bb.data.update_data(localdata)
+        ld_append_if_tune_exists(localdata, infos, ld_info_all)
+    infos['ldconfig'] = ','.join(infos['ldconfig'])
+    infos['lddrewrite'] = ' '.join(infos['lddrewrite'])
+    return infos
 
 EGLIBC_KNOWN_INTERPRETER_NAMES = "${@eglibc_dl_info(d)['ldconfig']}"
 RTLDLIST = "${@eglibc_dl_info(d)['lddrewrite']}"
index bd90ee79a29d4c312ac0bc87ea6ff66d2e261011..6009278577a207044c7fbf6530be618d46c8da1b 100644 (file)
@@ -1,14 +1,14 @@
 def eglibc_cfg(feature, features, tokens, cnf):
-       if type(tokens) == type(""):
-               tokens = [tokens]
-       if type(features) == type([]) and feature in features:
-               cnf.extend([token + ' = y' for token in tokens])
-       else:
-               for token in tokens:
-                       cnf.extend([token + ' = n'])
-                       if token == 'OPTION_EGLIBC_NSSWITCH':
-                               cnf.extend(["OPTION_EGLIBC_NSSWITCH_FIXED_CONFIG = ${S}/nss/nsswitch.conf"])
-                               cnf.extend(["OPTION_EGLIBC_NSSWITCH_FIXED_FUNCTIONS = ${S}/nss/fixed-nsswitch.functions"])
+    if type(tokens) == type(""):
+        tokens = [tokens]
+    if type(features) == type([]) and feature in features:
+        cnf.extend([token + ' = y' for token in tokens])
+    else:
+        for token in tokens:
+            cnf.extend([token + ' = n'])
+            if token == 'OPTION_EGLIBC_NSSWITCH':
+                cnf.extend(["OPTION_EGLIBC_NSSWITCH_FIXED_CONFIG = ${S}/nss/nsswitch.conf"])
+                cnf.extend(["OPTION_EGLIBC_NSSWITCH_FIXED_FUNCTIONS = ${S}/nss/fixed-nsswitch.functions"])
 
 # arrange the dependencies among eglibc configuable options according to file option-groups.def from eglibc source code
 def distro_features_check_deps(distro_features):
index 110febfe52038d46ff3e243fb7ef013173cbc292..4d4d2cf2ed8802abbb9fc1b5d467b808192ee9d4 100644 (file)
@@ -78,7 +78,7 @@ EXTRA_OECONF = "--enable-kernel=${OLDEST_KERNEL} \
 EXTRA_OECONF += "${@get_libc_fpu_setting(bb, d)}"
 
 do_unpack_append() {
-       bb.build.exec_func('do_move_ports', d)
+    bb.build.exec_func('do_move_ports', d)
 }
 
 do_move_ports() {
@@ -89,8 +89,8 @@ do_move_ports() {
 }
 
 do_patch_append() {
-       bb.build.exec_func('do_fix_ia_headers', d)
-       bb.build.exec_func('do_fix_readlib_c', d)
+    bb.build.exec_func('do_fix_ia_headers', d)
+    bb.build.exec_func('do_fix_readlib_c', d)
 }
 
 # for mips eglibc now builds syscall tables for all abi's
index 23a41301855624279938c469392db9fa70ed2f6c..7e7d68b87d80b92fec9add09c4aedc29c0b8e8c7 100644 (file)
@@ -76,7 +76,7 @@ EXTRA_OECONF = "--enable-kernel=${OLDEST_KERNEL} \
 EXTRA_OECONF += "${@get_libc_fpu_setting(bb, d)}"
 
 do_unpack_append() {
-       bb.build.exec_func('do_move_ports', d)
+    bb.build.exec_func('do_move_ports', d)
 }
 
 do_move_ports() {
@@ -87,7 +87,7 @@ do_move_ports() {
 }
 
 do_patch_append() {
-       bb.build.exec_func('do_fix_readlib_c', d)
+    bb.build.exec_func('do_fix_readlib_c', d)
 }
 
 # for mips eglibc now builds syscall tables for all abi's
index d70fe479d14b03857091bcb836bfa6de37426ce8..ae9c8c2e8756437e4a72f46c8e50223894a1fce1 100644 (file)
@@ -38,9 +38,9 @@ export STAGING_INCDIR
 export LDFLAGS += "-ldl"
 
 python populate_packages_prepend () {
-       # autonamer would call this libxml2-2, but we don't want that
-       if d.getVar('DEBIAN_NAMES', True):
-               d.setVar('PKG_libxml2', '${MLPREFIX}libxml2')
+    # autonamer would call this libxml2-2, but we don't want that
+    if d.getVar('DEBIAN_NAMES', True):
+        d.setVar('PKG_libxml2', '${MLPREFIX}libxml2')
 }
 
 PACKAGES += "${PN}-utils"
index 7cffeca932e41aace796cc3788e868030504ddc5..c7438464384d1b13a875a7f863c8c25c1ed5f335 100644 (file)
@@ -119,8 +119,8 @@ _install_cfgs = "\
 "
 
 python do_install () {
-       bb.build.exec_func("shell_do_install", d)
-       oe.path.make_relative_symlink(d.expand("${D}${libdir}/libtinfo.so"))
+    bb.build.exec_func("shell_do_install", d)
+    oe.path.make_relative_symlink(d.expand("${D}${libdir}/libtinfo.so"))
 }
 
 shell_do_install() {
@@ -205,12 +205,12 @@ shell_do_install() {
 }
 
 python populate_packages_prepend () {
-        libdir = d.expand("${libdir}")
-        base_libdir = d.expand("${base_libdir}")
-        pnbase = d.expand("${PN}-lib%s")
-        do_split_packages(d, libdir, '^lib(.*)\.so\..*', pnbase, 'ncurses %s library', prepend=True, extra_depends = '', allow_links=True)
-        if libdir is not base_libdir:
-            do_split_packages(d, base_libdir, '^lib(.*)\.so\..*', pnbase, 'ncurses %s library', prepend=True, extra_depends = '', allow_links=True)
+    libdir = d.expand("${libdir}")
+    base_libdir = d.expand("${base_libdir}")
+    pnbase = d.expand("${PN}-lib%s")
+    do_split_packages(d, libdir, '^lib(.*)\.so\..*', pnbase, 'ncurses %s library', prepend=True, extra_depends = '', allow_links=True)
+    if libdir is not base_libdir:
+        do_split_packages(d, base_libdir, '^lib(.*)\.so\..*', pnbase, 'ncurses %s library', prepend=True, extra_depends = '', allow_links=True)
 }
 
 
index 51e1e3b5c4bab22fa404d4557181921b778b507d..ab89f71c4bd1860907ec9324d33582fffb8f1a45 100644 (file)
@@ -8,26 +8,26 @@ USE_NLS = "yes"
 SRC_URI += "file://db_linking_hack.patch"
 
 python do_install () {
-       bb.build.exec_func('do_install_base', d)
-       bb.build.exec_func('do_install_config', d)
+    bb.build.exec_func('do_install_base', d)
+    bb.build.exec_func('do_install_config', d)
 }
 
 python do_install_config () {
-       indir = os.path.dirname(d.getVar('FILE',1))
-       infile = file(os.path.join(indir, 'files', 'apt.conf'), 'r')
-       data = infile.read()
-       infile.close()
+    indir = os.path.dirname(d.getVar('FILE',1))
+    infile = file(os.path.join(indir, 'files', 'apt.conf'), 'r')
+    data = infile.read()
+    infile.close()
 
-       data = d.expand(data)
+    data = d.expand(data)
 
-       outdir = os.path.join(d.getVar('D', True), d.getVar('sysconfdir', True), 'apt')
-       if not os.path.exists(outdir):
-               os.makedirs(outdir)
-       outpath = os.path.join(outdir, 'apt.conf.sample')
+    outdir = os.path.join(d.getVar('D', True), d.getVar('sysconfdir', True), 'apt')
+    if not os.path.exists(outdir):
+        os.makedirs(outdir)
+        outpath = os.path.join(outdir, 'apt.conf.sample')
 
-       outfile = file(outpath, 'w')
-       outfile.write(data)
-       outfile.close()
+        outfile = file(outpath, 'w')
+        outfile.write(data)
+        outfile.close()
 }
 
 do_install_base () {
index 736672c26cd4a3a158c90fa6633e05e4b9578ed4..26f3d749436a9547d1c75d9e5ed94930fcc196b7 100644 (file)
@@ -34,23 +34,23 @@ apt-utils-manpages="doc/apt-extracttemplates.1 \
 #                  doc/apt-ftparchive.1
 
 def get_files_apt_doc(d, bb, manpages):
-       import re
-       manpages = re.sub(r'\bdoc/(\S+)/(\S+)\.\1\.(.)\b', r'${mandir}/\1/man\3/\2.\3', manpages)
-       manpages = re.sub(r'\bdoc/(\S+)\.(.)\b', r'${mandir}/man\2/\1.\2', manpages)
-       return manpages
+    import re
+    manpages = re.sub(r'\bdoc/(\S+)/(\S+)\.\1\.(.)\b', r'${mandir}/\1/man\3/\2.\3', manpages)
+    manpages = re.sub(r'\bdoc/(\S+)\.(.)\b', r'${mandir}/man\2/\1.\2', manpages)
+    return manpages
 
 def get_commands_apt_doc(d, bb, manpages):
-       import os
-       s = list()
-       __dir_cache__ = list()
-       for m in manpages.split():
-               dest = get_files_apt_doc(d, bb, m)
-               dir = os.path.dirname(dest)
-               if not dir in __dir_cache__:
-                       s.append("install -d ${D}/%s" % dir)
-                       __dir_cache__.append(dir)
-               s.append("install -m 0644 %s ${D}/%s" % (m, dest))
-       return "\n".join(s)
+    import os
+    s = list()
+    __dir_cache__ = list()
+    for m in manpages.split():
+        dest = get_files_apt_doc(d, bb, m)
+        dir = os.path.dirname(dest)
+        if not dir in __dir_cache__:
+            s.append("install -d ${D}/%s" % dir)
+            __dir_cache__.append(dir)
+        s.append("install -m 0644 %s ${D}/%s" % (m, dest))
+    return "\n".join(s)
 
 PACKAGES += "${PN}-utils ${PN}-utils-doc"
 FILES_${PN} = "${bindir}/apt-cdrom ${bindir}/apt-get \
index 3ba12236a86952a2d6b91cdc33846cd62fe97f1d..561b30ed1817a850dc6838a19b8e3990f02e8443 100644 (file)
@@ -294,11 +294,11 @@ PACKAGES_append = " perl-modules "
 RRECOMMENDS_perl-modules = "${@d.getVar('PACKAGES', True).replace('${PN}-modules ', '').replace('${PN}-dbg ', '').replace('${PN}-misc ', '').replace('${PN}-dev ', '').replace('${PN}-pod ', '').replace('${PN}-doc ', '')}"
 
 python populate_packages_prepend () {
-        libdir = d.expand('${libdir}/perl/${PV}')
-        do_split_packages(d, libdir, 'auto/(Encode/.[^/]*)/.*', 'perl-module-%s', 'perl module %s', recursive=True, allow_dirs=False, match_path=True, prepend=False)
-        do_split_packages(d, libdir, 'auto/([^/]*)/.*', 'perl-module-%s', 'perl module %s', recursive=True, allow_dirs=False, match_path=True, prepend=False)
-        do_split_packages(d, libdir, 'Module/([^\/]*).*', 'perl-module-%s', 'perl module %s', recursive=True, allow_dirs=False, match_path=True, prepend=False)
-        do_split_packages(d, libdir, '(^(?!(CPAN\/|CPANPLUS\/|Module\/|unicore\/|auto\/)[^\/]).*)\.(pm|pl|e2x)', 'perl-module-%s', 'perl module %s', recursive=True, allow_dirs=False, match_path=True, prepend=False)
+    libdir = d.expand('${libdir}/perl/${PV}')
+    do_split_packages(d, libdir, 'auto/(Encode/.[^/]*)/.*', 'perl-module-%s', 'perl module %s', recursive=True, allow_dirs=False, match_path=True, prepend=False)
+    do_split_packages(d, libdir, 'auto/([^/]*)/.*', 'perl-module-%s', 'perl module %s', recursive=True, allow_dirs=False, match_path=True, prepend=False)
+    do_split_packages(d, libdir, 'Module/([^\/]*).*', 'perl-module-%s', 'perl module %s', recursive=True, allow_dirs=False, match_path=True, prepend=False)
+    do_split_packages(d, libdir, '(^(?!(CPAN\/|CPANPLUS\/|Module\/|unicore\/|auto\/)[^\/]).*)\.(pm|pl|e2x)', 'perl-module-%s', 'perl module %s', recursive=True, allow_dirs=False, match_path=True, prepend=False)
 }
 
 PACKAGES_DYNAMIC = "perl-module-*"
index 1970dda55ba1c52431c99728b0578eb0fb475f43..5a60bfda823f70f50520a989133f94166d4954f5 100644 (file)
@@ -9,10 +9,10 @@ def get_qemu_target_list(d):
     for arch in ['mips64', 'mips64el', 'ppcemb']:
         if arch in archs:
             targets += arch + "-softmmu,"
-           archs.remove(arch)
+            archs.remove(arch)
     for arch in ['armeb', 'alpha', 'ppc64abi32', 'sparc32plus']:
         if arch in archs:
             targets += arch + "-linux-user,"
-           archs.remove(arch)
+            archs.remove(arch)
     return targets + ''.join([arch + "-linux-user" + "," + arch + "-softmmu" + "," for arch in archs]).rstrip(',')
 
index 9cae33c5b6aa3de92a0f2622c13a3a867b9f9de6..4f2b0a1dc4352698d82132dd7402aba4023e56d2 100644 (file)
@@ -58,10 +58,10 @@ fakeroot do_install () {
 }
 
 python do_package_append() {
-       import subprocess
-       # Change permissions back the way they were, they probably had a reason...
-       workdir = d.getVar('WORKDIR', True)
-       subprocess.call('chmod 0511 %s/install/cups/var/run/cups/certs' % workdir, shell=True)
+    import subprocess
+    # Change permissions back the way they were, they probably had a reason...
+    workdir = d.getVar('WORKDIR', True)
+    subprocess.call('chmod 0511 %s/install/cups/var/run/cups/certs' % workdir, shell=True)
 }
 
 PACKAGES =+ "${PN}-lib ${PN}-libimage"
index ea41d82267de9f2c8c48d322a97e7beab94c8ccd..85ea7083d01abd8c1637bcb6248c95601d128f39 100644 (file)
@@ -59,6 +59,6 @@ CONFFILES_${PN} = "${sysconfdir}/lighttpd.conf"
 PACKAGES_DYNAMIC = "lighttpd-module-*"
 
 python populate_packages_prepend () {
-        lighttpd_libdir = d.expand('${libdir}')
-        do_split_packages(d, lighttpd_libdir, '^mod_(.*)\.so$', 'lighttpd-module-%s', 'Lighttpd module for %s', extra_depends='')
+    lighttpd_libdir = d.expand('${libdir}')
+    do_split_packages(d, lighttpd_libdir, '^mod_(.*)\.so$', 'lighttpd-module-%s', 'Lighttpd module for %s', extra_depends='')
 }
index 563bd58bf8147b3d003556ea4d68c03fcd84b5fe..31babf9d3decf596fb9eb8e6885b883e9f0fe154 100644 (file)
@@ -49,7 +49,7 @@ FILES_${PN} += "/opt/ltp/* /opt/ltp/runtest/* /opt/ltp/scenario_groups/* /opt/lt
 TARGET_CC_ARCH += "${LDFLAGS}"
 
 do_unpack_append() {
-        bb.build.exec_func('do_extract_tarball', d)
+    bb.build.exec_func('do_extract_tarball', d)
 }
 
 do_extract_tarball() {
index b9a05c6fed47cdb9f1e97ec6afd2f24c16afe529..764955e5767cbf7f5592ecda9eb5c8a2ac9f3309 100644 (file)
@@ -46,8 +46,8 @@ do_unpack[cleandirs] += "${S}"
 
 # We invoke base do_patch at end, to incorporate any local patch
 python do_patch() {
-       bb.build.exec_func('nettools_do_patch', d)
-       bb.build.exec_func('patch_do_patch', d)
+    bb.build.exec_func('nettools_do_patch', d)
+    bb.build.exec_func('patch_do_patch', d)
 }
 
 do_configure() {
index 275e5d842472449355b41b47677972442ea74491..9274ed3832be65c2f56dbcfe649e45ae35f366c4 100644 (file)
@@ -53,28 +53,28 @@ RDEPENDS_${PN}-xtests = "libpam pam-plugin-access pam-plugin-debug pam-plugin-cr
 RRECOMMENDS_${PN} = "libpam-runtime"
 
 python populate_packages_prepend () {
-       import os.path
-
-       def pam_plugin_append_file(pn, dir, file):
-               nf = os.path.join(dir, file)
-               of = d.getVar('FILES_' + pn, True)
-               if of:
-                       nf = of + " " + nf
-               d.setVar('FILES_' + pn, nf)
-
-       dvar = bb.data.expand('${WORKDIR}/package', d, True)
-       pam_libdir = d.expand('${base_libdir}/security')
-       pam_sbindir = d.expand('${sbindir}')
-       pam_filterdir = d.expand('${base_libdir}/security/pam_filter')
-
-       do_split_packages(d, pam_libdir, '^pam(.*)\.so$', 'pam-plugin%s', 'PAM plugin for %s', extra_depends='')
-       pam_plugin_append_file('pam-plugin-unix', pam_sbindir, 'unix_chkpwd')
-       pam_plugin_append_file('pam-plugin-unix', pam_sbindir, 'unix_update')
-       pam_plugin_append_file('pam-plugin-tally', pam_sbindir, 'pam_tally')
-       pam_plugin_append_file('pam-plugin-tally2', pam_sbindir, 'pam_tally2')
-       pam_plugin_append_file('pam-plugin-timestamp', pam_sbindir, 'pam_timestamp_check')
-       pam_plugin_append_file('pam-plugin-mkhomedir', pam_sbindir, 'mkhomedir_helper')
-       do_split_packages(d, pam_filterdir, '^(.*)$', 'pam-filter-%s', 'PAM filter for %s', extra_depends='')
+    import os.path
+
+    def pam_plugin_append_file(pn, dir, file):
+        nf = os.path.join(dir, file)
+        of = d.getVar('FILES_' + pn, True)
+        if of:
+            nf = of + " " + nf
+        d.setVar('FILES_' + pn, nf)
+
+    dvar = bb.data.expand('${WORKDIR}/package', d, True)
+    pam_libdir = d.expand('${base_libdir}/security')
+    pam_sbindir = d.expand('${sbindir}')
+    pam_filterdir = d.expand('${base_libdir}/security/pam_filter')
+
+    do_split_packages(d, pam_libdir, '^pam(.*)\.so$', 'pam-plugin%s', 'PAM plugin for %s', extra_depends='')
+    pam_plugin_append_file('pam-plugin-unix', pam_sbindir, 'unix_chkpwd')
+    pam_plugin_append_file('pam-plugin-unix', pam_sbindir, 'unix_update')
+    pam_plugin_append_file('pam-plugin-tally', pam_sbindir, 'pam_tally')
+    pam_plugin_append_file('pam-plugin-tally2', pam_sbindir, 'pam_tally2')
+    pam_plugin_append_file('pam-plugin-timestamp', pam_sbindir, 'pam_timestamp_check')
+    pam_plugin_append_file('pam-plugin-mkhomedir', pam_sbindir, 'mkhomedir_helper')
+    do_split_packages(d, pam_filterdir, '^(.*)$', 'pam-filter-%s', 'PAM filter for %s', extra_depends='')
 }
 
 do_install() {
index a7efa5b2b890bc607da0f5f1f851bf28c7ae1ce3..8e13115821789db65c034cdf7f9bfbe48c825ad4 100644 (file)
@@ -68,11 +68,11 @@ PACKAGES_DYNAMIC += "gdk-pixbuf-loader-*"
 PACKAGES_DYNAMIC_virtclass-native = ""
 
 python populate_packages_prepend () {
-       postinst_pixbufloader = d.getVar("postinst_pixbufloader", True)
+    postinst_pixbufloader = d.getVar("postinst_pixbufloader", True)
 
-       loaders_root = d.expand('${libdir}/gdk-pixbuf-2.0/${LIBV}/loaders')
+    loaders_root = d.expand('${libdir}/gdk-pixbuf-2.0/${LIBV}/loaders')
 
-       do_split_packages(d, loaders_root, '^libpixbufloader-(.*)\.so$', 'gdk-pixbuf-loader-%s', 'GDK pixbuf loader for %s', postinst_pixbufloader)
+    do_split_packages(d, loaders_root, '^libpixbufloader-(.*)\.so$', 'gdk-pixbuf-loader-%s', 'GDK pixbuf loader for %s', postinst_pixbufloader)
 }
 
 do_install_append_virtclass-native() {
index 66be75aee016d591942982cfbc80d8bd1524f401..f90b3fcd64cb185ef7632881b5c93a197f4b7e00 100644 (file)
@@ -32,20 +32,20 @@ LIBV = "2.10.0"
 PACKAGES_DYNAMIC += "gdk-pixbuf-loader-* gtk-immodule-* gtk-printbackend-*"
 
 python populate_packages_prepend () {
-       import os.path
+    import os.path
 
-       prologue = d.getVar("postinst_prologue", True)
-       postinst_pixbufloader = d.getVar("postinst_pixbufloader", True)
+    prologue = d.getVar("postinst_prologue", True)
+    postinst_pixbufloader = d.getVar("postinst_pixbufloader", True)
 
-       gtk_libdir = d.expand('${libdir}/gtk-2.0/${LIBV}')
-       loaders_root = os.path.join(gtk_libdir, 'loaders')
-       immodules_root = os.path.join(gtk_libdir, 'immodules')
-       printmodules_root = os.path.join(gtk_libdir, 'printbackends');
+    gtk_libdir = d.expand('${libdir}/gtk-2.0/${LIBV}')
+    loaders_root = os.path.join(gtk_libdir, 'loaders')
+    immodules_root = os.path.join(gtk_libdir, 'immodules')
+    printmodules_root = os.path.join(gtk_libdir, 'printbackends');
 
-       do_split_packages(d, loaders_root, '^libpixbufloader-(.*)\.so$', 'gdk-pixbuf-loader-%s', 'GDK pixbuf loader for %s', postinst_pixbufloader)
-       do_split_packages(d, immodules_root, '^im-(.*)\.so$', 'gtk-immodule-%s', 'GTK input module for %s', prologue + 'gtk-query-immodules-2.0 > /etc/gtk-2.0/gtk.immodules')
-       do_split_packages(d, printmodules_root, '^libprintbackend-(.*)\.so$', 'gtk-printbackend-%s', 'GTK printbackend module for %s')
+    do_split_packages(d, loaders_root, '^libpixbufloader-(.*)\.so$', 'gdk-pixbuf-loader-%s', 'GDK pixbuf loader for %s', postinst_pixbufloader)
+    do_split_packages(d, immodules_root, '^im-(.*)\.so$', 'gtk-immodule-%s', 'GTK input module for %s', prologue + 'gtk-query-immodules-2.0 > /etc/gtk-2.0/gtk.immodules')
+    do_split_packages(d, printmodules_root, '^libprintbackend-(.*)\.so$', 'gtk-printbackend-%s', 'GTK printbackend module for %s')
 
-        if (d.getVar('DEBIAN_NAMES', True)):
-                d.setVar('PKG_${PN}', '${MLPREFIX}libgtk-2.0')
+    if (d.getVar('DEBIAN_NAMES', True)):
+        d.setVar('PKG_${PN}', '${MLPREFIX}libgtk-2.0')
 }
index aee18c58808dce31f8dc72fbfab1a28f51f630c3..e2d6ef1edd4aad8fab70c7f6f045add9a49b0341 100644 (file)
@@ -32,20 +32,20 @@ LIBV = "2.10.0"
 PACKAGES_DYNAMIC += "gdk-pixbuf-loader-* gtk-immodule-* gtk-printbackend-*"
 
 python populate_packages_prepend () {
-       import os.path
+    import os.path
 
-       prologue = d.getVar("postinst_prologue", True)
-       postinst_pixbufloader = d.getVar("postinst_pixbufloader", True)
+    prologue = d.getVar("postinst_prologue", True)
+    postinst_pixbufloader = d.getVar("postinst_pixbufloader", True)
 
-       gtk_libdir = d.expand('${libdir}/gtk-2.0/${LIBV}')
-       loaders_root = os.path.join(gtk_libdir, 'loaders')
-       immodules_root = os.path.join(gtk_libdir, 'immodules')
-       printmodules_root = os.path.join(gtk_libdir, 'printbackends');
+    gtk_libdir = d.expand('${libdir}/gtk-2.0/${LIBV}')
+    loaders_root = os.path.join(gtk_libdir, 'loaders')
+    immodules_root = os.path.join(gtk_libdir, 'immodules')
+    printmodules_root = os.path.join(gtk_libdir, 'printbackends');
 
-       do_split_packages(d, loaders_root, '^libpixbufloader-(.*)\.so$', 'gdk-pixbuf-loader-%s', 'GDK pixbuf loader for %s', postinst_pixbufloader)
-       do_split_packages(d, immodules_root, '^im-(.*)\.so$', 'gtk-immodule-%s', 'GTK input module for %s', prologue + 'gtk-query-immodules-2.0 > /etc/gtk-2.0/gtk.immodules')
-       do_split_packages(d, printmodules_root, '^libprintbackend-(.*)\.so$', 'gtk-printbackend-%s', 'GTK printbackend module for %s')
+    do_split_packages(d, loaders_root, '^libpixbufloader-(.*)\.so$', 'gdk-pixbuf-loader-%s', 'GDK pixbuf loader for %s', postinst_pixbufloader)
+    do_split_packages(d, immodules_root, '^im-(.*)\.so$', 'gtk-immodule-%s', 'GTK input module for %s', prologue + 'gtk-query-immodules-2.0 > /etc/gtk-2.0/gtk.immodules')
+    do_split_packages(d, printmodules_root, '^libprintbackend-(.*)\.so$', 'gtk-printbackend-%s', 'GTK printbackend module for %s')
 
-        if (d.getVar('DEBIAN_NAMES', True)):
-                d.setVar('PKG_${PN}', '${MLPREFIX}libgtk-2.0')
+    if (d.getVar('DEBIAN_NAMES', True)):
+        d.setVar('PKG_${PN}', '${MLPREFIX}libgtk-2.0')
 }
index 2cc3a03576cf1155da2f7fc31d382d6d3097a48d..e49e6e7e17974b0dc9733b497b6e78fd52c94f4f 100644 (file)
@@ -39,17 +39,17 @@ LIBV = "2.10.0"
 PACKAGES_DYNAMIC += "gtk-immodule-* gtk-printbackend-*"
 
 python populate_packages_prepend () {
-       import os.path
+    import os.path
 
-       prologue = d.getVar("postinst_prologue", True)
+    prologue = d.getVar("postinst_prologue", True)
 
-       gtk_libdir = d.expand('${libdir}/gtk-2.0/${LIBV}')
-       immodules_root = os.path.join(gtk_libdir, 'immodules')
-       printmodules_root = os.path.join(gtk_libdir, 'printbackends');
+    gtk_libdir = d.expand('${libdir}/gtk-2.0/${LIBV}')
+    immodules_root = os.path.join(gtk_libdir, 'immodules')
+    printmodules_root = os.path.join(gtk_libdir, 'printbackends');
 
-       do_split_packages(d, immodules_root, '^im-(.*)\.so$', 'gtk-immodule-%s', 'GTK input module for %s', prologue + 'gtk-query-immodules-2.0 > /etc/gtk-2.0/gtk.immodules')
-       do_split_packages(d, printmodules_root, '^libprintbackend-(.*)\.so$', 'gtk-printbackend-%s', 'GTK printbackend module for %s')
+    do_split_packages(d, immodules_root, '^im-(.*)\.so$', 'gtk-immodule-%s', 'GTK input module for %s', prologue + 'gtk-query-immodules-2.0 > /etc/gtk-2.0/gtk.immodules')
+    do_split_packages(d, printmodules_root, '^libprintbackend-(.*)\.so$', 'gtk-printbackend-%s', 'GTK printbackend module for %s')
 
-        if (d.getVar('DEBIAN_NAMES', True)):
-                d.setVar('PKG_${PN}', '${MLPREFIX}libgtk-2.0')
+    if (d.getVar('DEBIAN_NAMES', True)):
+        d.setVar('PKG_${PN}', '${MLPREFIX}libgtk-2.0')
 }
index 5b8ee88bdfc03f6b5236bb596691ee151cd53627..3baa46e540e60436a9183185e59e7c494531336f 100644 (file)
@@ -29,14 +29,14 @@ CFLAGS_prepend = "-DHAVE_ANIMATION "
 inherit gnome
 
 python populate_packages_prepend() {
-       import os.path
+    import os.path
 
-       engines_root = os.path.join(d.getVar('libdir', True), "gtk-2.0/2.10.0/engines")
-       themes_root = os.path.join(d.getVar('datadir', True), "themes")
+    engines_root = os.path.join(d.getVar('libdir', True), "gtk-2.0/2.10.0/engines")
+    themes_root = os.path.join(d.getVar('datadir', True), "themes")
 
-       do_split_packages(d, engines_root, '^lib(.*)\.so$', 'gtk-engine-%s', 'GTK %s theme engine', extra_depends='')
-       do_split_packages(d, themes_root, '(.*)', 'gtk-theme-%s', 'GTK theme %s', allow_dirs=True, extra_depends='')
-       # TODO: mark theme packages as arch all
+    do_split_packages(d, engines_root, '^lib(.*)\.so$', 'gtk-engine-%s', 'GTK %s theme engine', extra_depends='')
+    do_split_packages(d, themes_root, '(.*)', 'gtk-theme-%s', 'GTK theme %s', allow_dirs=True, extra_depends='')
+    # TODO: mark theme packages as arch all
 }
 
 SRC_URI += "file://glib-2.32.patch"
index bf5a1b01e1560fa43a7288565a1f7400af4473d4..ef72e8904b4d9a92e7ed80f64dc9eee97b7a4030 100644 (file)
@@ -1,6 +1,6 @@
 
 def get_cairo_fpu_setting(bb, d):
-       if d.getVar('TARGET_FPU', True) in [ 'soft' ]:
-               return "--disable-some-floating-point"
-       return ""
+    if d.getVar('TARGET_FPU', True) in [ 'soft' ]:
+        return "--disable-some-floating-point"
+    return ""
 
index 7b5dc68e3ca80b14c7afadecd1f3643aafb5846d..c541fe315106bc308acff66ef68e0d3aa5e8422a 100644 (file)
@@ -1,6 +1,6 @@
 
 def get_clutter_fpu_setting(bb, d):
-       if d.getVar('TARGET_FPU', True) in [ 'soft' ]:
-               return "--without-fpu"
-       return ""
+    if d.getVar('TARGET_FPU', True) in [ 'soft' ]:
+        return "--without-fpu"
+    return ""
 
index 480672fd081541e2083727c338767decb50968e3..6eb5fd8e0538e291c50bb113572fca1d6b659ab1 100644 (file)
@@ -11,11 +11,11 @@ DRIDRIVERS_append_x86-64 = ",i915,i965"
 EXTRA_OECONF += "--with-driver=dri --disable-egl --disable-gallium --without-gallium-drivers --with-dri-drivers=${DRIDRIVERS}"
 
 python populate_packages_prepend() {
-       import os.path
+    import os.path
 
-       dri_drivers_root = os.path.join(d.getVar('libdir', True), "dri")
+    dri_drivers_root = os.path.join(d.getVar('libdir', True), "dri")
 
-       do_split_packages(d, dri_drivers_root, '^(.*)_dri\.so$', 'mesa-dri-driver-%s', 'Mesa %s DRI driver', extra_depends='')
+    do_split_packages(d, dri_drivers_root, '^(.*)_dri\.so$', 'mesa-dri-driver-%s', 'Mesa %s DRI driver', extra_depends='')
 }
 
 PACKAGES_DYNAMIC = "mesa-dri-driver-*"
index 754a720b04cb0978f8dc2d2956a72d66cee8aee6..4fcc94f9519adfecf375dbb6758b4f0b61958aa0 100644 (file)
@@ -61,11 +61,11 @@ do_install_append () {
 
 
 python populate_packages_prepend () {
-       prologue = d.getVar("postinst_prologue", True)
+    prologue = d.getVar("postinst_prologue", True)
 
-       modules_root = d.expand('${libdir}/pango/${LIBV}/modules')
+    modules_root = d.expand('${libdir}/pango/${LIBV}/modules')
 
-       do_split_packages(d, modules_root, '^pango-(.*)\.so$', 'pango-module-%s', 'Pango module %s', prologue + '${bindir}/${MLPREFIX}pango-querymodules > /etc/pango/${MLPREFIX}pango.modules')
+    do_split_packages(d, modules_root, '^pango-(.*)\.so$', 'pango-module-%s', 'Pango module %s', prologue + '${bindir}/${MLPREFIX}pango-querymodules > /etc/pango/${MLPREFIX}pango.modules')
 }
 
 FILES_${PN} = "${sysconfdir}/pango/* ${bindir}/* ${libdir}/libpango*${SOLIBS}"
index d112751417308568115af0718d52620b0b8b1340..b8859ab7d5e76cc1cca092ade1adef292d828150 100644 (file)
@@ -18,5 +18,5 @@ def perf_feature_enabled(feature, trueval, falseval, d):
     """
     enabled_features = d.getVar("PERF_FEATURES_ENABLE", True) or ""
     if feature in enabled_features:
-               return trueval
+        return trueval
     return falseval
index 4572265ef47487efd24868639e66467c78becfad..f26aca5c85bacc2621ec7c8e458c5de9e3ffe74d 100644 (file)
@@ -1,27 +1,27 @@
 LIBV = "0.10"
 
 python populate_packages_prepend () {
-       gst_libdir = d.expand('${libdir}/gstreamer-${LIBV}')
-       postinst = d.getVar('plugin_postinst', True)
-       glibdir = d.getVar('libdir', True)
+    gst_libdir = d.expand('${libdir}/gstreamer-${LIBV}')
+    postinst = d.getVar('plugin_postinst', True)
+    glibdir = d.getVar('libdir', True)
 
-       do_split_packages(d, glibdir, '^lib(.*)\.so\.*', 'lib%s', 'gstreamer %s library', extra_depends='', allow_links=True)
-       do_split_packages(d, gst_libdir, 'libgst(.*)\.so$', d.expand('${PN}-%s'), 'GStreamer plugin for %s', postinst=postinst, extra_depends=d.expand('${PN}'))
-       do_split_packages(d, gst_libdir, 'libgst(.*)\.la$', d.expand('${PN}-%s-dev'), 'GStreamer plugin for %s (development files)', extra_depends=d.expand('${PN}-dev'))
-       do_split_packages(d, gst_libdir, 'libgst(.*)\.a$', d.expand('${PN}-%s-staticdev'), 'GStreamer plugin for %s (static development files)', extra_depends=d.expand('${PN}-staticdev'))
-       
-       pn = d.getVar('PN', True)
-       metapkg =  pn + '-meta'
-       d.setVar('ALLOW_EMPTY_' + metapkg, "1")
-       d.setVar('FILES_' + metapkg, "")
-       blacklist = [ pn + '-locale', pn + '-dev', pn + '-dbg', pn + '-doc' ]
-       metapkg_rdepends = []
-       packages = d.getVar('PACKAGES', True).split()
-       for pkg in packages[1:]:
-               if not pkg in blacklist and not pkg in metapkg_rdepends and not pkg.endswith('-dev') and not pkg.endswith('-dbg') and not pkg.count('locale') and not pkg.count('-staticdev'):
-                       metapkg_rdepends.append(pkg)
-       d.setVar('RDEPENDS_' + metapkg, ' '.join(metapkg_rdepends))
-       d.setVar('DESCRIPTION_' + metapkg, pn + ' meta package')
+    do_split_packages(d, glibdir, '^lib(.*)\.so\.*', 'lib%s', 'gstreamer %s library', extra_depends='', allow_links=True)
+    do_split_packages(d, gst_libdir, 'libgst(.*)\.so$', d.expand('${PN}-%s'), 'GStreamer plugin for %s', postinst=postinst, extra_depends=d.expand('${PN}'))
+    do_split_packages(d, gst_libdir, 'libgst(.*)\.la$', d.expand('${PN}-%s-dev'), 'GStreamer plugin for %s (development files)', extra_depends=d.expand('${PN}-dev'))
+    do_split_packages(d, gst_libdir, 'libgst(.*)\.a$', d.expand('${PN}-%s-staticdev'), 'GStreamer plugin for %s (static development files)', extra_depends=d.expand('${PN}-staticdev'))
+    
+    pn = d.getVar('PN', True)
+    metapkg =  pn + '-meta'
+    d.setVar('ALLOW_EMPTY_' + metapkg, "1")
+    d.setVar('FILES_' + metapkg, "")
+    blacklist = [ pn + '-locale', pn + '-dev', pn + '-dbg', pn + '-doc' ]
+    metapkg_rdepends = []
+    packages = d.getVar('PACKAGES', True).split()
+    for pkg in packages[1:]:
+        if not pkg in blacklist and not pkg in metapkg_rdepends and not pkg.endswith('-dev') and not pkg.endswith('-dbg') and not pkg.count('locale') and not pkg.count('-staticdev'):
+            metapkg_rdepends.append(pkg)
+    d.setVar('RDEPENDS_' + metapkg, ' '.join(metapkg_rdepends))
+    d.setVar('DESCRIPTION_' + metapkg, pn + ' meta package')
 }
 
 ALLOW_EMPTY = "1"
index c3d6d9858e5daf55172831bd2bb7377f91e721c4..ca8b71e952407bca80fa26da616eeb7e2cb14365 100644 (file)
@@ -74,10 +74,10 @@ FILES_libpulse = "${libdir}/libpulse.so.*"
 FILES_libpulse-simple = "${libdir}/libpulse-simple.so.*"
 FILES_libpulse-browse = "${libdir}/libpulse-browse.so.*"
 FILES_libpulse-mainloop-glib = "${libdir}/libpulse-mainloop-glib.so.*"
-                                        
+
 FILES_${PN}-dbg += "${libexecdir}/pulse/.debug \
                     ${libdir}/pulse-${PV}/modules/.debug"
-FILES_${PN}-dev += "${libdir}/pulse-${PV}/modules/*.la ${datadir}/vala ${libdir}/cmake"                    
+FILES_${PN}-dev += "${libdir}/pulse-${PV}/modules/*.la ${datadir}/vala ${libdir}/cmake"   
 FILES_${PN}-conf = "${sysconfdir}"
 FILES_${PN}-bin += "${sysconfdir}/default/volatiles/volatiles.04_pulse"
 FILES_${PN}-server = "${bindir}/pulseaudio ${bindir}/start-* ${sysconfdir} ${bindir}/pactl ${base_libdir}/udev/rules.d/*.rules"
@@ -105,11 +105,11 @@ pkg_postinst_${PN}-server() {
 }
 
 python populate_packages_prepend() {
-        #d.setVar('PKG_pulseaudio', 'pulseaudio')
+    #d.setVar('PKG_pulseaudio', 'pulseaudio')
 
-        plugindir = d.expand('${libdir}/pulse-${PV}/modules/')
-        do_split_packages(d, plugindir, '^module-(.*)\.so$', 'pulseaudio-module-%s', 'PulseAudio module for %s', extra_depends='' )
-        do_split_packages(d, plugindir, '^lib(.*)\.so$', 'pulseaudio-lib-%s', 'PulseAudio library for %s', extra_depends='' )
+    plugindir = d.expand('${libdir}/pulse-${PV}/modules/')
+    do_split_packages(d, plugindir, '^module-(.*)\.so$', 'pulseaudio-module-%s', 'PulseAudio module for %s', extra_depends='' )
+    do_split_packages(d, plugindir, '^lib(.*)\.so$', 'pulseaudio-lib-%s', 'PulseAudio library for %s', extra_depends='' )
 }
 
 RDEPENDS_pulseaudio-module-console-kit =+ "consolekit"
index df18f91c90eae77114517976fe9361ed5666ceca..ebaced4247b3de96a41a6633f5658962ee17b981 100644 (file)
@@ -30,23 +30,23 @@ PACKAGES_DYNAMIC = "qmmp-plugin-* "
 
 
 python populate_packages_prepend () {
-       import os
-       qmmp_libdir = d.expand('${libdir}/qmmp')
-       gd = d.expand('${D}/${libdir}/qmmp')
-       plug_dirs = os.listdir(gd)
-
-       for plug_dir in plug_dirs:
-               g_plug_dir = os.path.join(qmmp_libdir,plug_dir)
-               do_split_packages(d, g_plug_dir, '^lib(.*)\.so$', 'qmmp-plugin-' + plug_dir.lower() + '-%s', 'Qmmp' + plug_dir  + 'plugin for %s')
+    import os
+    qmmp_libdir = d.expand('${libdir}/qmmp')
+    gd = d.expand('${D}/${libdir}/qmmp')
+    plug_dirs = os.listdir(gd)
+
+    for plug_dir in plug_dirs:
+        g_plug_dir = os.path.join(qmmp_libdir,plug_dir)
+        do_split_packages(d, g_plug_dir, '^lib(.*)\.so$', 'qmmp-plugin-' + plug_dir.lower() + '-%s', 'Qmmp' + plug_dir  + 'plugin for %s')
 } 
 
 FILES_${PN} = "\
-               ${bindir}/qmmp \
+                ${bindir}/qmmp \
                 ${libdir}/lib*${SOLIBS} \ 
-               ${datadir}/icons/* \
+                ${datadir}/icons/* \
                 ${datadir}/qmmp/images/* \
                 ${datadir}/applications/* \
-               "
+                "
 
 FILES_${PN}-dbg += "\
                 ${libdir}/qmmp/*/.debug/* \
index 23fda336b2ef2c11c36e7200473d486ce3bf22cc..3f61a20a2cb129b2c418a8a6b40cf6942a154eaa 100644 (file)
@@ -274,59 +274,59 @@ do_compile() {
 }
 
 python populate_packages_prepend() {
-        translation_dir = d.expand('${datadir}/${QT_DIR_NAME}/translations/')
-        translation_name = d.expand('${QT_BASE_NAME}-translation-%s')
-        do_split_packages(d, translation_dir, '^(assistant|designer|linguist|qt|qtconfig|qvfb)_(.*)\.qm$', translation_name, '${PN} translation for %s', extra_depends='' )
+    translation_dir = d.expand('${datadir}/${QT_DIR_NAME}/translations/')
+    translation_name = d.expand('${QT_BASE_NAME}-translation-%s')
+    do_split_packages(d, translation_dir, '^(assistant|designer|linguist|qt|qtconfig|qvfb)_(.*)\.qm$', translation_name, '${PN} translation for %s', extra_depends='' )
  
-        phrasebook_dir = d.expand('${datadir}/${QT_DIR_NAME}/phrasebooks/')
-        phrasebook_name = d.expand('${QT_BASE_NAME}-phrasebook-%s')
-        import os;
-        if os.path.exists("%s%s" % (d.expand('${D}'), phrasebook_dir)):
-                do_split_packages(d, phrasebook_dir, '^(.*)\.qph$', phrasebook_name, '${PN} phrasebook for %s', extra_depends='' )
-        else:
-                bb.note("The path does not exist:", d.expand('${D}'), phrasebook_dir)
+    phrasebook_dir = d.expand('${datadir}/${QT_DIR_NAME}/phrasebooks/')
+    phrasebook_name = d.expand('${QT_BASE_NAME}-phrasebook-%s')
+    import os;
+    if os.path.exists("%s%s" % (d.expand('${D}'), phrasebook_dir)):
+        do_split_packages(d, phrasebook_dir, '^(.*)\.qph$', phrasebook_name, '${PN} phrasebook for %s', extra_depends='' )
+    else:
+        bb.note("The path does not exist:", d.expand('${D}'), phrasebook_dir)
  
-        # Package all the plugins and their -dbg version and create a meta package
-        def qtopia_split(path, name, glob):
-                """
-                Split the package into a normal and -dbg package and then add the
-                new packages to the meta package.
-                """
-                plugin_dir = d.expand('${libdir}/${QT_DIR_NAME}/plugins/%s/' % path)
-                if not os.path.exists("%s%s" % (d.expand('${D}'), plugin_dir)):
-                        bb.note("The path does not exist:", d.expand('${D}'), plugin_dir)
-                        return
+    # Package all the plugins and their -dbg version and create a meta package
+    def qtopia_split(path, name, glob):
+        """
+        Split the package into a normal and -dbg package and then add the
+        new packages to the meta package.
+        """
+        plugin_dir = d.expand('${libdir}/${QT_DIR_NAME}/plugins/%s/' % path)
+        if not os.path.exists("%s%s" % (d.expand('${D}'), plugin_dir)):
+            bb.note("The path does not exist:", d.expand('${D}'), plugin_dir)
+            return
  
-                plugin_name = d.expand('${QT_BASE_NAME}-plugin-%s-%%s' % name)
-                dev_packages = []
-                dev_hook = lambda file,pkg,b,c,d:dev_packages.append((file,pkg))
-                do_split_packages(d, plugin_dir, glob, plugin_name, '${PN} %s for %%s' % name, extra_depends='', hook=dev_hook)
-                # Create a -dbg package as well
-                plugin_dir_dbg = d.expand('${libdir}/${QT_DIR_NAME}/plugins/%s/.debug' % path)
-                packages = d.getVar('PACKAGES')
-                for (file,package) in dev_packages:
-                        packages = "%s %s-dbg" % (packages, package)
-                        file_name = os.path.join(plugin_dir_dbg, os.path.basename(file))
-                        d.setVar("FILES_%s-dbg" % package, file_name)
-                        d.setVar("DESCRIPTION_%s-dbg" % package, "${PN} %s for %s" % (name, package))
-
-                d.setVar('PACKAGES', packages)
-
-        qtopia_split('accessible',    'accessible',    '^libq(.*)\.so$')
-        qtopia_split('codecs',        'codec',         '^libq(.*)\.so$')
-        qtopia_split('decorations',   'decoration',    '^libqdecoration(.*)\.so$')
-        qtopia_split('designer',      'designer',      '^lib(.*)\.so$')
-        qtopia_split('gfxdrivers',    'gfxdriver',     '^libq(.*)\.so$')
-        qtopia_split('graphicssystems','graphicssystems', '^libq(.*)\.so$')
-        qtopia_split('mousedrivers',  'mousedriver',   '^libq(.*)mousedriver\.so$')
-        qtopia_split('iconengines',   'iconengine',    '^libq(.*)\.so$')
-        qtopia_split('imageformats',  'imageformat',   '^libq(.*)\.so$')
-        qtopia_split('inputmethods',  'inputmethod',   '^libq(.*)\.so$')
-        qtopia_split('sqldrivers',    'sqldriver',     '^libq(.*)\.so$')
-        qtopia_split('script',        'script',        '^libqtscript(.*)\.so$')
-        qtopia_split('styles',        'style',         '^libq(.*)\.so$')
-        qtopia_split('phonon_backend','phonon-backend','^libphonon_(.*)\.so$')
-        qtopia_split('bearer',        'bearer',        '^libq(.*)bearer\.so$')
+        plugin_name = d.expand('${QT_BASE_NAME}-plugin-%s-%%s' % name)
+        dev_packages = []
+        dev_hook = lambda file,pkg,b,c,d:dev_packages.append((file,pkg))
+        do_split_packages(d, plugin_dir, glob, plugin_name, '${PN} %s for %%s' % name, extra_depends='', hook=dev_hook)
+        # Create a -dbg package as well
+        plugin_dir_dbg = d.expand('${libdir}/${QT_DIR_NAME}/plugins/%s/.debug' % path)
+        packages = d.getVar('PACKAGES')
+        for (file,package) in dev_packages:
+            packages = "%s %s-dbg" % (packages, package)
+            file_name = os.path.join(plugin_dir_dbg, os.path.basename(file))
+            d.setVar("FILES_%s-dbg" % package, file_name)
+            d.setVar("DESCRIPTION_%s-dbg" % package, "${PN} %s for %s" % (name, package))
+
+        d.setVar('PACKAGES', packages)
+
+    qtopia_split('accessible',    'accessible',    '^libq(.*)\.so$')
+    qtopia_split('codecs',        'codec',         '^libq(.*)\.so$')
+    qtopia_split('decorations',   'decoration',    '^libqdecoration(.*)\.so$')
+    qtopia_split('designer',      'designer',      '^lib(.*)\.so$')
+    qtopia_split('gfxdrivers',    'gfxdriver',     '^libq(.*)\.so$')
+    qtopia_split('graphicssystems','graphicssystems', '^libq(.*)\.so$')
+    qtopia_split('mousedrivers',  'mousedriver',   '^libq(.*)mousedriver\.so$')
+    qtopia_split('iconengines',   'iconengine',    '^libq(.*)\.so$')
+    qtopia_split('imageformats',  'imageformat',   '^libq(.*)\.so$')
+    qtopia_split('inputmethods',  'inputmethod',   '^libq(.*)\.so$')
+    qtopia_split('sqldrivers',    'sqldriver',     '^libq(.*)\.so$')
+    qtopia_split('script',        'script',        '^libqtscript(.*)\.so$')
+    qtopia_split('styles',        'style',         '^libq(.*)\.so$')
+    qtopia_split('phonon_backend','phonon-backend','^libphonon_(.*)\.so$')
+    qtopia_split('bearer',        'bearer',        '^libq(.*)bearer\.so$')
 }
 
 do_install() {
index 9e2474ff534dadab0ecd54445ae345b3dab2318c..ac4810df8e5d3fd7c3a5624f43b20f05e24c9e72 100644 (file)
@@ -17,7 +17,7 @@ LIC_FILES_CHKSUM = "file://COPYING;md5=94d55d512a9ba36caa9b7df079bae19f"
 S = "${WORKDIR}/git"
 
 do_unpack_append () {
-       bb.build.exec_func('do_remove_patches', d)
+    bb.build.exec_func('do_remove_patches', d)
 }
 
 do_remove_patches () {
index c62b940808bd9eb216a0ed0cba70521d7cc4f8ce..4c6dfae287d7f7c386cd2a27c28f4566534970d7 100644 (file)
@@ -46,8 +46,8 @@ do_compile () {
 }
 
 python populate_packages_prepend () {
-       pcre_libdir = d.expand('${libdir}')
-       do_split_packages(d, pcre_libdir, '^lib(.*)\.so\.+', 'lib%s', 'libpcre %s library', extra_depends='', allow_links=True, prepend=True)
+    pcre_libdir = d.expand('${libdir}')
+    do_split_packages(d, pcre_libdir, '^lib(.*)\.so\.+', 'lib%s', 'libpcre %s library', extra_depends='', allow_links=True, prepend=True)
 }
 
 BBCLASSEXTEND = "native"